diff --git a/doc/langref.html.in b/doc/langref.html.in index bd3fb41340..c070686c56 100644 --- a/doc/langref.html.in +++ b/doc/langref.html.in @@ -1281,7 +1281,7 @@ const ptr = &x;
x() x[] x.y
!x -x -%x ~x *x &x ?x %x ??x
x{}
-* / % ** *%
+! * / % ** *%
+ - ++ +% -%
<< >>
&
@@ -5566,14 +5566,12 @@ fn readU32Be() u32 {}
{#header_open|Grammar#}
Root = many(TopLevelItem) EOF
-TopLevelItem = ErrorValueDecl | CompTimeExpression(Block) | TopLevelDecl | TestDecl
+TopLevelItem = CompTimeExpression(Block) | TopLevelDecl | TestDecl
TestDecl = "test" String Block
TopLevelDecl = option("pub") (FnDef | ExternDecl | GlobalVarDecl | UseDecl)
-ErrorValueDecl = "error" Symbol ";"
-
GlobalVarDecl = option("export") VariableDeclaration ";"
LocalVarDecl = option("comptime") VariableDeclaration
@@ -5588,7 +5586,7 @@ UseDecl = "use" Expression ";"
ExternDecl = "extern" option(String) (FnProto | VariableDeclaration) ";"
-FnProto = option("nakedcc" | "stdcallcc" | "extern") "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") TypeExpr
+FnProto = option("nakedcc" | "stdcallcc" | "extern") "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") option("!") TypeExpr
FnDef = option("inline" | "export") FnProto Block
@@ -5682,7 +5680,7 @@ MultiplyExpression = CurlySuffixExpression MultiplyOperator MultiplyExpression |
CurlySuffixExpression = TypeExpr option(ContainerInitExpression)
-MultiplyOperator = "*" | "/" | "%" | "**" | "*%"
+MultiplyOperator = "!" | "*" | "/" | "%" | "**" | "*%"
PrefixOpExpression = PrefixOp PrefixOpExpression | SuffixOpExpression
@@ -5702,9 +5700,9 @@ ContainerInitBody = list(StructLiteralField, ",") | list(Expression, ",")
StructLiteralField = "." Symbol "=" Expression
-PrefixOp = "!" | "-" | "~" | "*" | ("&" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "%" | "??" | "-%" | "try"
+PrefixOp = "!" | "-" | "~" | "*" | ("&" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try"
-PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ("error" "." Symbol) | ContainerDecl | ("continue" option(":" Symbol))
+PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ContainerDecl | ("continue" option(":" Symbol)) | ErrorSetDecl
ArrayType : "[" option(Expression) "]" option("align" "(" Expression option(":" Integer ":" Integer) ")")) option("const") option("volatile") TypeExpr
@@ -5712,6 +5710,8 @@ GroupedExpression = "(" Expression ")"
KeywordLiteral = "true" | "false" | "null" | "undefined" | "error" | "this" | "unreachable"
+ErrorSetDecl = "error" "{" list(Symbol, ",") "}"
+
ContainerDecl = option("extern" | "packed")
("struct" option(GroupedExpression) | "union" option("enum" option(GroupedExpression) | GroupedExpression) | ("enum" option(GroupedExpression)))
"{" many(ContainerMember) "}"
diff --git a/src/all_types.hpp b/src/all_types.hpp
index 0f41760718..304fa5651a 100644
--- a/src/all_types.hpp
+++ b/src/all_types.hpp
@@ -236,7 +236,7 @@ struct ConstExprValue {
TypeTableEntry *x_type;
ConstExprValue *x_maybe;
ConstErrValue x_err_union;
- ErrorTableEntry *x_pure_err;
+ ErrorTableEntry *x_err_set;
BigInt x_enum_tag;
ConstStructValue x_struct;
ConstUnionValue x_union;
@@ -353,7 +353,6 @@ enum NodeType {
NodeTypeReturnExpr,
NodeTypeDefer,
NodeTypeVariableDeclaration,
- NodeTypeErrorValueDecl,
NodeTypeTestDecl,
NodeTypeBinOpExpr,
NodeTypeUnwrapErrorExpr,
@@ -393,6 +392,7 @@ enum NodeType {
NodeTypeVarLiteral,
NodeTypeIfErrorExpr,
NodeTypeTestExpr,
+ NodeTypeErrorSetDecl,
};
struct AstNodeRoot {
@@ -424,6 +424,8 @@ struct AstNodeFnProto {
AstNode *align_expr;
// populated if the "section(S)" is present
AstNode *section_expr;
+
+ bool auto_err_set;
};
struct AstNodeFnDef {
@@ -486,12 +488,6 @@ struct AstNodeVariableDeclaration {
AstNode *section_expr;
};
-struct AstNodeErrorValueDecl {
- Buf *name;
-
- ErrorTableEntry *err;
-};
-
struct AstNodeTestDecl {
Buf *name;
@@ -540,6 +536,7 @@ enum BinOpType {
BinOpTypeUnwrapMaybe,
BinOpTypeArrayCat,
BinOpTypeArrayMult,
+ BinOpTypeErrorUnion,
};
struct AstNodeBinOpExpr {
@@ -595,7 +592,6 @@ enum PrefixOp {
PrefixOpNegationWrap,
PrefixOpDereference,
PrefixOpMaybe,
- PrefixOpError,
PrefixOpUnwrapMaybe,
};
@@ -762,6 +758,10 @@ struct AstNodeContainerDecl {
bool auto_enum; // union(enum)
};
+struct AstNodeErrorSetDecl {
+ ZigList decls;
+};
+
struct AstNodeStructField {
VisibMod visib_mod;
Buf *name;
@@ -858,7 +858,6 @@ struct AstNode {
AstNodeReturnExpr return_expr;
AstNodeDefer defer;
AstNodeVariableDeclaration variable_declaration;
- AstNodeErrorValueDecl error_value_decl;
AstNodeTestDecl test_decl;
AstNodeBinOpExpr bin_op_expr;
AstNodeCatchExpr unwrap_err_expr;
@@ -899,6 +898,7 @@ struct AstNode {
AstNodeArrayType array_type;
AstNodeErrorType error_type;
AstNodeVarLiteral var_literal;
+ AstNodeErrorSetDecl err_set_decl;
} data;
};
@@ -993,8 +993,15 @@ struct TypeTableEntryMaybe {
TypeTableEntry *child_type;
};
-struct TypeTableEntryError {
- TypeTableEntry *child_type;
+struct TypeTableEntryErrorUnion {
+ TypeTableEntry *err_set_type;
+ TypeTableEntry *payload_type;
+};
+
+struct TypeTableEntryErrorSet {
+ uint32_t err_count;
+ ErrorTableEntry **errors;
+ FnTableEntry *infer_fn;
};
struct TypeTableEntryEnum {
@@ -1097,7 +1104,7 @@ enum TypeTableEntryId {
TypeTableEntryIdNullLit,
TypeTableEntryIdMaybe,
TypeTableEntryIdErrorUnion,
- TypeTableEntryIdPureError,
+ TypeTableEntryIdErrorSet,
TypeTableEntryIdEnum,
TypeTableEntryIdUnion,
TypeTableEntryIdFn,
@@ -1126,7 +1133,8 @@ struct TypeTableEntry {
TypeTableEntryArray array;
TypeTableEntryStruct structure;
TypeTableEntryMaybe maybe;
- TypeTableEntryError error;
+ TypeTableEntryErrorUnion error_union;
+ TypeTableEntryErrorSet error_set;
TypeTableEntryEnum enumeration;
TypeTableEntryUnion unionation;
TypeTableEntryFn fn;
@@ -1136,7 +1144,6 @@ struct TypeTableEntry {
// use these fields to make sure we don't duplicate type table entries for the same type
TypeTableEntry *pointer_parent[2]; // [0 - mut, 1 - const]
TypeTableEntry *maybe_parent;
- TypeTableEntry *error_parent;
// If we generate a constant name value for this type, we memoize it here.
// The type of this is array
ConstExprValue *cached_const_name_val;
@@ -1340,6 +1347,10 @@ struct TypeId {
bool is_signed;
uint32_t bit_count;
} integer;
+ struct {
+ TypeTableEntry *err_set_type;
+ TypeTableEntry *payload_type;
+ } error_union;
} data;
};
@@ -1481,7 +1492,7 @@ struct CodeGen {
TypeTableEntry *entry_undef;
TypeTableEntry *entry_null;
TypeTableEntry *entry_var;
- TypeTableEntry *entry_pure_error;
+ TypeTableEntry *entry_global_error_set;
TypeTableEntry *entry_arg_tuple;
} builtin_types;
@@ -1570,7 +1581,6 @@ struct CodeGen {
LLVMValueRef return_address_fn_val;
LLVMValueRef frame_address_fn_val;
bool error_during_imports;
- TypeTableEntry *err_tag_type;
const char **clang_argv;
size_t clang_argv_len;
@@ -1584,7 +1594,9 @@ struct CodeGen {
bool each_lib_rpath;
- ZigList error_decls;
+ TypeTableEntry *err_tag_type;
+ ZigList err_enumerators;
+ ZigList errors_by_index;
bool generate_error_name_table;
LLVMValueRef err_name_table;
size_t largest_err_name_len;
@@ -1617,6 +1629,8 @@ struct CodeGen {
TypeTableEntry *align_amt_type;
TypeTableEntry *stack_trace_type;
TypeTableEntry *ptr_to_stack_trace_type;
+
+ ZigList error_di_types;
};
enum VarLinkage {
@@ -1653,6 +1667,7 @@ struct ErrorTableEntry {
Buf name;
uint32_t value;
AstNode *decl_node;
+ TypeTableEntry *set_with_only_this_in_it;
// If we generate a constant error name value for this error, we memoize it here.
// The type of this is array
ConstExprValue *cached_error_name_val;
@@ -1920,6 +1935,7 @@ enum IrInstructionId {
IrInstructionIdArgType,
IrInstructionIdExport,
IrInstructionIdErrorReturnTrace,
+ IrInstructionIdErrorUnion,
};
struct IrInstruction {
@@ -1996,7 +2012,6 @@ enum IrUnOp {
IrUnOpNegation,
IrUnOpNegationWrap,
IrUnOpDereference,
- IrUnOpError,
IrUnOpMaybe,
};
@@ -2750,6 +2765,13 @@ struct IrInstructionErrorReturnTrace {
IrInstruction base;
};
+struct IrInstructionErrorUnion {
+ IrInstruction base;
+
+ IrInstruction *err_set;
+ IrInstruction *payload;
+};
+
static const size_t slice_ptr_index = 0;
static const size_t slice_len_index = 1;
diff --git a/src/analyze.cpp b/src/analyze.cpp
index 4fa8dad7ce..3b98999081 100644
--- a/src/analyze.cpp
+++ b/src/analyze.cpp
@@ -224,7 +224,7 @@ bool type_is_complete(TypeTableEntry *type_entry) {
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
@@ -260,7 +260,7 @@ bool type_has_zero_bits_known(TypeTableEntry *type_entry) {
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
@@ -514,29 +514,39 @@ TypeTableEntry *get_maybe_type(CodeGen *g, TypeTableEntry *child_type) {
}
}
-TypeTableEntry *get_error_type(CodeGen *g, TypeTableEntry *child_type) {
- if (child_type->error_parent)
- return child_type->error_parent;
+TypeTableEntry *get_error_union_type(CodeGen *g, TypeTableEntry *err_set_type, TypeTableEntry *payload_type) {
+ assert(err_set_type->id == TypeTableEntryIdErrorSet);
+
+ TypeId type_id = {};
+ type_id.id = TypeTableEntryIdErrorUnion;
+ type_id.data.error_union.err_set_type = err_set_type;
+ type_id.data.error_union.payload_type = payload_type;
+
+ auto existing_entry = g->type_table.maybe_get(type_id);
+ if (existing_entry) {
+ return existing_entry->value;
+ }
TypeTableEntry *entry = new_type_table_entry(TypeTableEntryIdErrorUnion);
entry->is_copyable = true;
- assert(child_type->type_ref);
- assert(child_type->di_type);
- ensure_complete_type(g, child_type);
+ assert(payload_type->type_ref);
+ assert(payload_type->di_type);
+ ensure_complete_type(g, payload_type);
buf_resize(&entry->name, 0);
- buf_appendf(&entry->name, "%%%s", buf_ptr(&child_type->name));
+ buf_appendf(&entry->name, "%s!%s", buf_ptr(&err_set_type->name), buf_ptr(&payload_type->name));
- entry->data.error.child_type = child_type;
+ entry->data.error_union.err_set_type = err_set_type;
+ entry->data.error_union.payload_type = payload_type;
- if (!type_has_bits(child_type)) {
- entry->type_ref = g->err_tag_type->type_ref;
- entry->di_type = g->err_tag_type->di_type;
+ if (!type_has_bits(payload_type)) {
+ entry->type_ref = err_set_type->type_ref;
+ entry->di_type = err_set_type->di_type;
} else {
LLVMTypeRef elem_types[] = {
- g->err_tag_type->type_ref,
- child_type->type_ref,
+ err_set_type->type_ref,
+ payload_type->type_ref,
};
entry->type_ref = LLVMStructType(elem_types, 2, false);
@@ -547,12 +557,12 @@ TypeTableEntry *get_error_type(CodeGen *g, TypeTableEntry *child_type) {
ZigLLVMTag_DW_structure_type(), buf_ptr(&entry->name),
compile_unit_scope, di_file, line);
- uint64_t tag_debug_size_in_bits = 8*LLVMStoreSizeOfType(g->target_data_ref, g->err_tag_type->type_ref);
- uint64_t tag_debug_align_in_bits = 8*LLVMABISizeOfType(g->target_data_ref, g->err_tag_type->type_ref);
+ uint64_t tag_debug_size_in_bits = 8*LLVMStoreSizeOfType(g->target_data_ref, err_set_type->type_ref);
+ uint64_t tag_debug_align_in_bits = 8*LLVMABISizeOfType(g->target_data_ref, err_set_type->type_ref);
uint64_t tag_offset_in_bits = 8*LLVMOffsetOfElement(g->target_data_ref, entry->type_ref, err_union_err_index);
- uint64_t value_debug_size_in_bits = 8*LLVMStoreSizeOfType(g->target_data_ref, child_type->type_ref);
- uint64_t value_debug_align_in_bits = 8*LLVMABISizeOfType(g->target_data_ref, child_type->type_ref);
+ uint64_t value_debug_size_in_bits = 8*LLVMStoreSizeOfType(g->target_data_ref, payload_type->type_ref);
+ uint64_t value_debug_align_in_bits = 8*LLVMABISizeOfType(g->target_data_ref, payload_type->type_ref);
uint64_t value_offset_in_bits = 8*LLVMOffsetOfElement(g->target_data_ref, entry->type_ref,
err_union_payload_index);
@@ -565,13 +575,13 @@ TypeTableEntry *get_error_type(CodeGen *g, TypeTableEntry *child_type) {
tag_debug_size_in_bits,
tag_debug_align_in_bits,
tag_offset_in_bits,
- 0, child_type->di_type),
+ 0, err_set_type->di_type),
ZigLLVMCreateDebugMemberType(g->dbuilder, ZigLLVMTypeToScope(entry->di_type),
"value", di_file, line,
value_debug_size_in_bits,
value_debug_align_in_bits,
value_offset_in_bits,
- 0, child_type->di_type),
+ 0, payload_type->di_type),
};
ZigLLVMDIType *replacement_di_type = ZigLLVMCreateDebugStructType(g->dbuilder,
@@ -587,7 +597,7 @@ TypeTableEntry *get_error_type(CodeGen *g, TypeTableEntry *child_type) {
entry->di_type = replacement_di_type;
}
- child_type->error_parent = entry;
+ g->type_table.put(type_id, entry);
return entry;
}
@@ -937,7 +947,7 @@ TypeTableEntry *get_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
handle_is_ptr(fn_type_id->return_type);
bool prefix_arg_error_return_trace = g->have_err_ret_tracing &&
(fn_type_id->return_type->id == TypeTableEntryIdErrorUnion ||
- fn_type_id->return_type->id == TypeTableEntryIdPureError);
+ fn_type_id->return_type->id == TypeTableEntryIdErrorSet);
// +1 for maybe making the first argument the return value
// +1 for maybe last argument the error return trace
LLVMTypeRef *gen_param_types = allocate(2 + fn_type_id->param_count);
@@ -1177,7 +1187,7 @@ static bool type_allowed_in_packed_struct(TypeTableEntry *type_entry) {
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -1218,7 +1228,7 @@ static bool type_allowed_in_extern(CodeGen *g, TypeTableEntry *type_entry) {
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -1263,7 +1273,23 @@ static bool type_allowed_in_extern(CodeGen *g, TypeTableEntry *type_entry) {
zig_unreachable();
}
-static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *child_scope) {
+static TypeTableEntry *get_auto_err_set_type(CodeGen *g, FnTableEntry *fn_entry) {
+ TypeTableEntry *err_set_type = new_type_table_entry(TypeTableEntryIdErrorSet);
+ buf_resize(&err_set_type->name, 0);
+ buf_appendf(&err_set_type->name, "%s.errors", buf_ptr(&fn_entry->symbol_name));
+ err_set_type->is_copyable = true;
+ err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
+ err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
+ err_set_type->data.error_set.err_count = 0;
+ err_set_type->data.error_set.errors = nullptr;
+ err_set_type->data.error_set.infer_fn = fn_entry;
+
+ g->error_di_types.append(&err_set_type->di_type);
+
+ return err_set_type;
+}
+
+static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *child_scope, FnTableEntry *fn_entry) {
assert(proto_node->type == NodeTypeFnProto);
AstNodeFnProto *fn_proto = &proto_node->data.fn_proto;
@@ -1359,7 +1385,7 @@ static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *c
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -1382,8 +1408,13 @@ static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *c
}
}
- fn_type_id.return_type = (fn_proto->return_type == nullptr) ?
- g->builtin_types.entry_void : analyze_type_expr(g, child_scope, fn_proto->return_type);
+ TypeTableEntry *specified_return_type = analyze_type_expr(g, child_scope, fn_proto->return_type);
+ if (fn_proto->auto_err_set) {
+ TypeTableEntry *inferred_err_set_type = get_auto_err_set_type(g, fn_entry);
+ fn_type_id.return_type = get_error_union_type(g, inferred_err_set_type, specified_return_type);
+ } else {
+ fn_type_id.return_type = specified_return_type;
+ }
if (type_is_invalid(fn_type_id.return_type)) {
return g->builtin_types.entry_invalid;
@@ -1434,7 +1465,7 @@ static TypeTableEntry *analyze_fn_type(CodeGen *g, AstNode *proto_node, Scope *c
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -2756,7 +2787,8 @@ TypeTableEntry *get_test_fn_type(CodeGen *g) {
return g->test_fn_type;
FnTypeId fn_type_id = {0};
- fn_type_id.return_type = get_error_type(g, g->builtin_types.entry_void);
+ fn_type_id.return_type = get_error_union_type(g, g->builtin_types.entry_global_error_set,
+ g->builtin_types.entry_void);
g->test_fn_type = get_fn_type(g, &fn_type_id);
return g->test_fn_type;
}
@@ -2824,7 +2856,7 @@ static void resolve_decl_fn(CodeGen *g, TldFn *tld_fn) {
Scope *child_scope = fn_table_entry->fndef_scope ? &fn_table_entry->fndef_scope->base : tld_fn->base.parent_scope;
- fn_table_entry->type_entry = analyze_fn_type(g, source_node, child_scope);
+ fn_table_entry->type_entry = analyze_fn_type(g, source_node, child_scope, fn_table_entry);
if (fn_proto->section_expr != nullptr) {
if (fn_table_entry->body_node == nullptr) {
@@ -2949,29 +2981,6 @@ static void preview_test_decl(CodeGen *g, AstNode *node, ScopeDecls *decls_scope
g->resolve_queue.append(&tld_fn->base);
}
-static void preview_error_value_decl(CodeGen *g, AstNode *node) {
- assert(node->type == NodeTypeErrorValueDecl);
-
- ErrorTableEntry *err = allocate(1);
-
- err->decl_node = node;
- buf_init_from_buf(&err->name, node->data.error_value_decl.name);
-
- auto existing_entry = g->error_table.maybe_get(&err->name);
- if (existing_entry) {
- // duplicate error definitions allowed and they get the same value
- err->value = existing_entry->value->value;
- } else {
- size_t error_value_count = g->error_decls.length;
- assert((uint32_t)error_value_count < (((uint32_t)1) << (uint32_t)g->err_tag_type->data.integral.bit_count));
- err->value = (uint32_t)error_value_count;
- g->error_decls.append(node);
- g->error_table.put(&err->name, err);
- }
-
- node->data.error_value_decl.err = err;
-}
-
static void preview_comptime_decl(CodeGen *g, AstNode *node, ScopeDecls *decls_scope) {
assert(node->type == NodeTypeCompTime);
@@ -3045,10 +3054,6 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
import->use_decls.append(node);
break;
}
- case NodeTypeErrorValueDecl:
- // error value declarations do not depend on other top level decls
- preview_error_value_decl(g, node);
- break;
case NodeTypeTestDecl:
preview_test_decl(g, node, decls_scope);
break;
@@ -3097,6 +3102,7 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
case NodeTypeVarLiteral:
case NodeTypeIfErrorExpr:
case NodeTypeTestExpr:
+ case NodeTypeErrorSetDecl:
zig_unreachable();
}
}
@@ -3147,7 +3153,7 @@ TypeTableEntry *validate_var_type(CodeGen *g, AstNode *source_node, TypeTableEnt
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -3403,13 +3409,16 @@ bool types_match_const_cast_only(TypeTableEntry *expected_type, TypeTableEntry *
actual_type->data.maybe.child_type);
}
- // error
+ // error union
if (expected_type->id == TypeTableEntryIdErrorUnion &&
actual_type->id == TypeTableEntryIdErrorUnion)
{
return types_match_const_cast_only(
- expected_type->data.error.child_type,
- actual_type->data.error.child_type);
+ expected_type->data.error_union.payload_type,
+ actual_type->data.error_union.payload_type) &&
+ types_match_const_cast_only(
+ expected_type->data.error_union.err_set_type,
+ actual_type->data.error_union.err_set_type);
}
// fn
@@ -3625,7 +3634,7 @@ static bool is_container(TypeTableEntry *type_entry) {
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
@@ -3673,7 +3682,7 @@ void resolve_container_type(CodeGen *g, TypeTableEntry *type_entry) {
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
@@ -3774,14 +3783,36 @@ void analyze_fn_ir(CodeGen *g, FnTableEntry *fn_table_entry, AstNode *return_typ
&fn_table_entry->analyzed_executable, fn_type_id->return_type, return_type_node);
fn_table_entry->implicit_return_type = block_return_type;
- if (block_return_type->id == TypeTableEntryIdInvalid ||
- fn_table_entry->analyzed_executable.invalid)
- {
+ if (type_is_invalid(block_return_type) || fn_table_entry->analyzed_executable.invalid) {
assert(g->errors.length > 0);
fn_table_entry->anal_state = FnAnalStateInvalid;
return;
}
+ if (fn_type_id->return_type->id == TypeTableEntryIdErrorUnion) {
+ TypeTableEntry *return_err_set_type = fn_type_id->return_type->data.error_union.err_set_type;
+ if (return_err_set_type->data.error_set.infer_fn != nullptr) {
+ TypeTableEntry *inferred_err_set_type;
+ if (fn_table_entry->implicit_return_type->id == TypeTableEntryIdErrorSet) {
+ inferred_err_set_type = fn_table_entry->implicit_return_type;
+ } else if (fn_table_entry->implicit_return_type->id == TypeTableEntryIdErrorUnion) {
+ inferred_err_set_type = fn_table_entry->implicit_return_type->data.error_union.err_set_type;
+ } else {
+ add_node_error(g, return_type_node,
+ buf_sprintf("function with inferred error set must return at least one possible error"));
+ fn_table_entry->anal_state = FnAnalStateInvalid;
+ return;
+ }
+
+ return_err_set_type->data.error_set.infer_fn = nullptr;
+ return_err_set_type->data.error_set.err_count = inferred_err_set_type->data.error_set.err_count;
+ return_err_set_type->data.error_set.errors = allocate(inferred_err_set_type->data.error_set.err_count);
+ for (uint32_t i = 0; i < inferred_err_set_type->data.error_set.err_count; i += 1) {
+ return_err_set_type->data.error_set.errors[i] = inferred_err_set_type->data.error_set.errors[i];
+ }
+ }
+ }
+
if (g->verbose_ir) {
fprintf(stderr, "{ // (analyzed)\n");
ir_print(g, stderr, &fn_table_entry->analyzed_executable, 4);
@@ -3791,7 +3822,7 @@ void analyze_fn_ir(CodeGen *g, FnTableEntry *fn_table_entry, AstNode *return_typ
fn_table_entry->anal_state = FnAnalStateComplete;
}
-static void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry) {
+void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry) {
assert(fn_table_entry->anal_state != FnAnalStateProbing);
if (fn_table_entry->anal_state != FnAnalStateReady)
return;
@@ -4022,7 +4053,8 @@ void semantic_analyze(CodeGen *g) {
for (; g->resolve_queue_index < g->resolve_queue.length; g->resolve_queue_index += 1) {
Tld *tld = g->resolve_queue.at(g->resolve_queue_index);
bool pointer_only = false;
- resolve_top_level_decl(g, tld, pointer_only, nullptr);
+ AstNode *source_node = nullptr;
+ resolve_top_level_decl(g, tld, pointer_only, source_node);
}
for (; g->fn_defs_index < g->fn_defs.length; g->fn_defs_index += 1) {
@@ -4114,7 +4146,7 @@ bool handle_is_ptr(TypeTableEntry *type_entry) {
case TypeTableEntryIdInt:
case TypeTableEntryIdFloat:
case TypeTableEntryIdPointer:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdEnum:
return false;
@@ -4122,7 +4154,7 @@ bool handle_is_ptr(TypeTableEntry *type_entry) {
case TypeTableEntryIdStruct:
return type_has_bits(type_entry);
case TypeTableEntryIdErrorUnion:
- return type_has_bits(type_entry->data.error.child_type);
+ return type_has_bits(type_entry->data.error_union.payload_type);
case TypeTableEntryIdMaybe:
return type_has_bits(type_entry->data.maybe.child_type) &&
type_entry->data.maybe.child_type->id != TypeTableEntryIdPointer &&
@@ -4386,9 +4418,9 @@ static uint32_t hash_const_val(ConstExprValue *const_val) {
case TypeTableEntryIdErrorUnion:
// TODO better hashing algorithm
return 3415065496;
- case TypeTableEntryIdPureError:
- // TODO better hashing algorithm
- return 2630160122;
+ case TypeTableEntryIdErrorSet:
+ assert(const_val->data.x_err_set != nullptr);
+ return const_val->data.x_err_set->value ^ 2630160122;
case TypeTableEntryIdFn:
return 4133894920 ^ hash_ptr(const_val->data.x_fn.fn_entry);
case TypeTableEntryIdNamespace:
@@ -4515,7 +4547,7 @@ bool type_requires_comptime(TypeTableEntry *type_entry) {
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdEnum:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdBool:
case TypeTableEntryIdInt:
@@ -4894,8 +4926,8 @@ bool const_values_equal(ConstExprValue *a, ConstExprValue *b) {
return a->data.x_type == b->data.x_type;
case TypeTableEntryIdVoid:
return true;
- case TypeTableEntryIdPureError:
- return a->data.x_pure_err == b->data.x_pure_err;
+ case TypeTableEntryIdErrorSet:
+ return a->data.x_err_set->value == b->data.x_err_set->value;
case TypeTableEntryIdFn:
return a->data.x_fn.fn_entry == b->data.x_fn.fn_entry;
case TypeTableEntryIdBool:
@@ -5256,9 +5288,9 @@ void render_const_value(CodeGen *g, Buf *buf, ConstExprValue *const_val) {
buf_appendf(buf, "(union %s constant)", buf_ptr(&type_entry->name));
return;
}
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
{
- buf_appendf(buf, "(pure error constant)");
+ buf_appendf(buf, "%s.%s", buf_ptr(&type_entry->name), buf_ptr(&const_val->data.x_err_set->name));
return;
}
case TypeTableEntryIdArgTuple:
@@ -5319,8 +5351,7 @@ uint32_t type_id_hash(TypeId x) {
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
- case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -5329,6 +5360,8 @@ uint32_t type_id_hash(TypeId x) {
case TypeTableEntryIdBoundFn:
case TypeTableEntryIdArgTuple:
zig_unreachable();
+ case TypeTableEntryIdErrorUnion:
+ return hash_ptr(x.data.error_union.err_set_type) ^ hash_ptr(x.data.error_union.payload_type);
case TypeTableEntryIdPointer:
return hash_ptr(x.data.pointer.child_type) +
(x.data.pointer.is_const ? (uint32_t)2749109194 : (uint32_t)4047371087) +
@@ -5363,8 +5396,7 @@ bool type_id_eql(TypeId a, TypeId b) {
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
- case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -5374,6 +5406,10 @@ bool type_id_eql(TypeId a, TypeId b) {
case TypeTableEntryIdArgTuple:
case TypeTableEntryIdOpaque:
zig_unreachable();
+ case TypeTableEntryIdErrorUnion:
+ return a.data.error_union.err_set_type == b.data.error_union.err_set_type &&
+ a.data.error_union.payload_type == b.data.error_union.payload_type;
+
case TypeTableEntryIdPointer:
return a.data.pointer.child_type == b.data.pointer.child_type &&
a.data.pointer.is_const == b.data.pointer.is_const &&
@@ -5478,7 +5514,7 @@ static const TypeTableEntryId all_type_ids[] = {
TypeTableEntryIdNullLit,
TypeTableEntryIdMaybe,
TypeTableEntryIdErrorUnion,
- TypeTableEntryIdPureError,
+ TypeTableEntryIdErrorSet,
TypeTableEntryIdEnum,
TypeTableEntryIdUnion,
TypeTableEntryIdFn,
@@ -5533,7 +5569,7 @@ size_t type_id_index(TypeTableEntryId id) {
return 13;
case TypeTableEntryIdErrorUnion:
return 14;
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
return 15;
case TypeTableEntryIdEnum:
return 16;
@@ -5590,8 +5626,8 @@ const char *type_id_name(TypeTableEntryId id) {
return "Nullable";
case TypeTableEntryIdErrorUnion:
return "ErrorUnion";
- case TypeTableEntryIdPureError:
- return "Error";
+ case TypeTableEntryIdErrorSet:
+ return "ErrorSet";
case TypeTableEntryIdEnum:
return "Enum";
case TypeTableEntryIdUnion:
diff --git a/src/analyze.hpp b/src/analyze.hpp
index dab6d17d0c..06c582fb77 100644
--- a/src/analyze.hpp
+++ b/src/analyze.hpp
@@ -30,7 +30,7 @@ TypeTableEntry *get_slice_type(CodeGen *g, TypeTableEntry *ptr_type);
TypeTableEntry *get_partial_container_type(CodeGen *g, Scope *scope, ContainerKind kind,
AstNode *decl_node, const char *name, ContainerLayout layout);
TypeTableEntry *get_smallest_unsigned_int_type(CodeGen *g, uint64_t x);
-TypeTableEntry *get_error_type(CodeGen *g, TypeTableEntry *child_type);
+TypeTableEntry *get_error_union_type(CodeGen *g, TypeTableEntry *err_set_type, TypeTableEntry *payload_type);
TypeTableEntry *get_bound_fn_type(CodeGen *g, FnTableEntry *fn_entry);
TypeTableEntry *get_opaque_type(CodeGen *g, Scope *scope, AstNode *source_node, const char *name);
TypeTableEntry *get_struct_type(CodeGen *g, const char *type_name, const char *field_names[],
@@ -46,7 +46,6 @@ bool type_has_bits(TypeTableEntry *type_entry);
ImportTableEntry *add_source_file(CodeGen *g, PackageTableEntry *package, Buf *abs_full_path, Buf *source_code);
-// TODO move these over, these used to be static
bool types_match_const_cast_only(TypeTableEntry *expected_type, TypeTableEntry *actual_type);
VariableTableEntry *find_variable(CodeGen *g, Scope *orig_context, Buf *name);
Tld *find_decl(CodeGen *g, Scope *scope, Buf *name);
@@ -188,6 +187,7 @@ void add_fn_export(CodeGen *g, FnTableEntry *fn_table_entry, Buf *symbol_name, G
ConstExprValue *get_builtin_value(CodeGen *codegen, const char *name);
TypeTableEntry *get_ptr_to_stack_trace_type(CodeGen *g);
+void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry);
#endif
diff --git a/src/ast_render.cpp b/src/ast_render.cpp
index 79cbc1b49a..88895d2d36 100644
--- a/src/ast_render.cpp
+++ b/src/ast_render.cpp
@@ -54,6 +54,7 @@ static const char *bin_op_str(BinOpType bin_op) {
case BinOpTypeUnwrapMaybe: return "??";
case BinOpTypeArrayCat: return "++";
case BinOpTypeArrayMult: return "**";
+ case BinOpTypeErrorUnion: return "!";
}
zig_unreachable();
}
@@ -67,7 +68,6 @@ static const char *prefix_op_str(PrefixOp prefix_op) {
case PrefixOpBinNot: return "~";
case PrefixOpDereference: return "*";
case PrefixOpMaybe: return "?";
- case PrefixOpError: return "%";
case PrefixOpUnwrapMaybe: return "??";
}
zig_unreachable();
@@ -174,8 +174,6 @@ static const char *node_type_str(NodeType node_type) {
return "Defer";
case NodeTypeVariableDeclaration:
return "VariableDeclaration";
- case NodeTypeErrorValueDecl:
- return "ErrorValueDecl";
case NodeTypeTestDecl:
return "TestDecl";
case NodeTypeIntLiteral:
@@ -244,6 +242,8 @@ static const char *node_type_str(NodeType node_type) {
return "IfErrorExpr";
case NodeTypeTestExpr:
return "TestExpr";
+ case NodeTypeErrorSetDecl:
+ return "ErrorSetDecl";
}
zig_unreachable();
}
@@ -396,7 +396,6 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
if (child->type == NodeTypeUse ||
child->type == NodeTypeVariableDeclaration ||
- child->type == NodeTypeErrorValueDecl ||
child->type == NodeTypeFnProto)
{
fprintf(ar->f, ";");
@@ -452,6 +451,9 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
AstNode *return_type_node = node->data.fn_proto.return_type;
assert(return_type_node != nullptr);
fprintf(ar->f, " ");
+ if (node->data.fn_proto.auto_err_set) {
+ fprintf(ar->f, "!");
+ }
render_node_grouped(ar, return_type_node);
break;
}
@@ -1017,9 +1019,26 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
render_node_ungrouped(ar, node->data.unwrap_err_expr.op2);
break;
}
+ case NodeTypeErrorSetDecl:
+ {
+ fprintf(ar->f, "error {\n");
+ ar->indent += ar->indent_size;
+
+ for (size_t i = 0; i < node->data.err_set_decl.decls.length; i += 1) {
+ AstNode *field_node = node->data.err_set_decl.decls.at(i);
+ assert(field_node->type == NodeTypeSymbol);
+ print_indent(ar);
+ print_symbol(ar, field_node->data.symbol_expr.symbol);
+ fprintf(ar->f, ",\n");
+ }
+
+ ar->indent -= ar->indent_size;
+ print_indent(ar);
+ fprintf(ar->f, "}");
+ break;
+ }
case NodeTypeFnDecl:
case NodeTypeParamDecl:
- case NodeTypeErrorValueDecl:
case NodeTypeTestDecl:
case NodeTypeStructField:
case NodeTypeUse:
diff --git a/src/codegen.cpp b/src/codegen.cpp
index b1412b2b59..ea82b3fcd6 100644
--- a/src/codegen.cpp
+++ b/src/codegen.cpp
@@ -92,9 +92,6 @@ CodeGen *codegen_create(Buf *root_src_path, const ZigTarget *target, OutType out
g->want_h_file = (out_type == OutTypeObj || out_type == OutTypeLib);
buf_resize(&g->global_asm, 0);
- // reserve index 0 to indicate no error
- g->error_decls.append(nullptr);
-
if (root_src_path) {
Buf *src_basename = buf_alloc();
Buf *src_dir = buf_alloc();
@@ -410,7 +407,7 @@ static uint32_t get_err_ret_trace_arg_index(CodeGen *g, FnTableEntry *fn_table_e
}
TypeTableEntry *fn_type = fn_table_entry->type_entry;
TypeTableEntry *return_type = fn_type->data.fn.fn_type_id.return_type;
- if (return_type->id != TypeTableEntryIdErrorUnion && return_type->id != TypeTableEntryIdPureError) {
+ if (return_type->id != TypeTableEntryIdErrorUnion && return_type->id != TypeTableEntryIdErrorSet) {
return UINT32_MAX;
}
bool first_arg_ret = type_has_bits(return_type) && handle_is_ptr(return_type);
@@ -1442,7 +1439,7 @@ static LLVMValueRef ir_render_return(CodeGen *g, IrExecutable *executable, IrIns
is_err_return = return_instruction->value->value.data.rh_error_union == RuntimeHintErrorUnionError;
// TODO: emit a branch to check if the return value is an error
}
- } else if (return_type->id == TypeTableEntryIdPureError) {
+ } else if (return_type->id == TypeTableEntryIdErrorSet) {
is_err_return = true;
}
if (is_err_return) {
@@ -1823,7 +1820,7 @@ static LLVMValueRef ir_render_bin_op(CodeGen *g, IrExecutable *executable,
} else if (type_entry->id == TypeTableEntryIdEnum) {
LLVMIntPredicate pred = cmp_op_to_int_predicate(op_id, false);
return LLVMBuildICmp(g->builder, pred, op1_value, op2_value, "");
- } else if (type_entry->id == TypeTableEntryIdPureError ||
+ } else if (type_entry->id == TypeTableEntryIdErrorSet ||
type_entry->id == TypeTableEntryIdPointer ||
type_entry->id == TypeTableEntryIdBool)
{
@@ -2139,7 +2136,7 @@ static LLVMValueRef ir_render_int_to_enum(CodeGen *g, IrExecutable *executable,
static LLVMValueRef ir_render_int_to_err(CodeGen *g, IrExecutable *executable, IrInstructionIntToErr *instruction) {
TypeTableEntry *wanted_type = instruction->base.value.type;
- assert(wanted_type->id == TypeTableEntryIdPureError);
+ assert(wanted_type->id == TypeTableEntryIdErrorSet);
TypeTableEntry *actual_type = instruction->target->value.type;
assert(actual_type->id == TypeTableEntryIdInt);
@@ -2156,11 +2153,11 @@ static LLVMValueRef ir_render_int_to_err(CodeGen *g, IrExecutable *executable, I
eval_min_max_value_int(g, actual_type, &biggest_possible_err_val, true);
if (bigint_fits_in_bits(&biggest_possible_err_val, 64, false) &&
- bigint_as_unsigned(&biggest_possible_err_val) < g->error_decls.length)
+ bigint_as_unsigned(&biggest_possible_err_val) < g->errors_by_index.length)
{
ok_bit = neq_zero_bit;
} else {
- LLVMValueRef error_value_count = LLVMConstInt(actual_type->type_ref, g->error_decls.length, false);
+ LLVMValueRef error_value_count = LLVMConstInt(actual_type->type_ref, g->errors_by_index.length, false);
LLVMValueRef in_bounds_bit = LLVMBuildICmp(g->builder, LLVMIntULT, target_val, error_value_count, "");
ok_bit = LLVMBuildAnd(g->builder, neq_zero_bit, in_bounds_bit, "");
}
@@ -2187,15 +2184,15 @@ static LLVMValueRef ir_render_err_to_int(CodeGen *g, IrExecutable *executable, I
TypeTableEntry *actual_type = instruction->target->value.type;
LLVMValueRef target_val = ir_llvm_value(g, instruction->target);
- if (actual_type->id == TypeTableEntryIdPureError) {
+ if (actual_type->id == TypeTableEntryIdErrorSet) {
return gen_widen_or_shorten(g, ir_want_runtime_safety(g, &instruction->base),
g->err_tag_type, wanted_type, target_val);
} else if (actual_type->id == TypeTableEntryIdErrorUnion) {
- if (!type_has_bits(actual_type->data.error.child_type)) {
+ if (!type_has_bits(actual_type->data.error_union.payload_type)) {
return gen_widen_or_shorten(g, ir_want_runtime_safety(g, &instruction->base),
g->err_tag_type, wanted_type, target_val);
} else {
- zig_panic("TODO");
+ zig_panic("TODO err to int when error union payload type not void");
}
} else {
zig_unreachable();
@@ -2235,7 +2232,6 @@ static LLVMValueRef ir_render_un_op(CodeGen *g, IrExecutable *executable, IrInst
switch (op_id) {
case IrUnOpInvalid:
- case IrUnOpError:
case IrUnOpMaybe:
case IrUnOpDereference:
zig_unreachable();
@@ -2489,7 +2485,7 @@ static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstr
TypeTableEntry *src_return_type = fn_type_id->return_type;
bool ret_has_bits = type_has_bits(src_return_type);
bool first_arg_ret = ret_has_bits && handle_is_ptr(src_return_type);
- bool prefix_arg_err_ret_stack = g->have_err_ret_tracing && (src_return_type->id == TypeTableEntryIdErrorUnion || src_return_type->id == TypeTableEntryIdPureError);
+ bool prefix_arg_err_ret_stack = g->have_err_ret_tracing && (src_return_type->id == TypeTableEntryIdErrorUnion || src_return_type->id == TypeTableEntryIdErrorSet);
size_t actual_param_count = instruction->arg_count + (first_arg_ret ? 1 : 0) + (prefix_arg_err_ret_stack ? 1 : 0);
bool is_var_args = fn_type_id->is_var_args;
LLVMValueRef *gen_param_values = allocate(actual_param_count);
@@ -2907,7 +2903,7 @@ static LLVMValueRef ir_render_ref(CodeGen *g, IrExecutable *executable, IrInstru
static LLVMValueRef ir_render_err_name(CodeGen *g, IrExecutable *executable, IrInstructionErrName *instruction) {
assert(g->generate_error_name_table);
- if (g->error_decls.length == 1) {
+ if (g->errors_by_index.length == 1) {
LLVMBuildUnreachable(g->builder);
return nullptr;
}
@@ -2915,7 +2911,7 @@ static LLVMValueRef ir_render_err_name(CodeGen *g, IrExecutable *executable, IrI
LLVMValueRef err_val = ir_llvm_value(g, instruction->value);
if (ir_want_runtime_safety(g, &instruction->base)) {
LLVMValueRef zero = LLVMConstNull(LLVMTypeOf(err_val));
- LLVMValueRef end_val = LLVMConstInt(LLVMTypeOf(err_val), g->error_decls.length, false);
+ LLVMValueRef end_val = LLVMConstInt(LLVMTypeOf(err_val), g->errors_by_index.length, false);
add_bounds_check(g, err_val, LLVMIntNE, zero, LLVMIntULT, end_val);
}
@@ -3393,11 +3389,11 @@ static LLVMValueRef ir_render_overflow_op(CodeGen *g, IrExecutable *executable,
static LLVMValueRef ir_render_test_err(CodeGen *g, IrExecutable *executable, IrInstructionTestErr *instruction) {
TypeTableEntry *err_union_type = instruction->value->value.type;
- TypeTableEntry *child_type = err_union_type->data.error.child_type;
+ TypeTableEntry *payload_type = err_union_type->data.error_union.payload_type;
LLVMValueRef err_union_handle = ir_llvm_value(g, instruction->value);
LLVMValueRef err_val;
- if (type_has_bits(child_type)) {
+ if (type_has_bits(payload_type)) {
LLVMValueRef err_val_ptr = LLVMBuildStructGEP(g->builder, err_union_handle, err_union_err_index, "");
err_val = gen_load_untyped(g, err_val_ptr, 0, false, "");
} else {
@@ -3412,11 +3408,11 @@ static LLVMValueRef ir_render_unwrap_err_code(CodeGen *g, IrExecutable *executab
TypeTableEntry *ptr_type = instruction->value->value.type;
assert(ptr_type->id == TypeTableEntryIdPointer);
TypeTableEntry *err_union_type = ptr_type->data.pointer.child_type;
- TypeTableEntry *child_type = err_union_type->data.error.child_type;
+ TypeTableEntry *payload_type = err_union_type->data.error_union.payload_type;
LLVMValueRef err_union_ptr = ir_llvm_value(g, instruction->value);
LLVMValueRef err_union_handle = get_handle_value(g, err_union_ptr, err_union_type, ptr_type);
- if (type_has_bits(child_type)) {
+ if (type_has_bits(payload_type)) {
LLVMValueRef err_val_ptr = LLVMBuildStructGEP(g->builder, err_union_handle, err_union_err_index, "");
return gen_load_untyped(g, err_val_ptr, 0, false, "");
} else {
@@ -3428,13 +3424,13 @@ static LLVMValueRef ir_render_unwrap_err_payload(CodeGen *g, IrExecutable *execu
TypeTableEntry *ptr_type = instruction->value->value.type;
assert(ptr_type->id == TypeTableEntryIdPointer);
TypeTableEntry *err_union_type = ptr_type->data.pointer.child_type;
- TypeTableEntry *child_type = err_union_type->data.error.child_type;
+ TypeTableEntry *payload_type = err_union_type->data.error_union.payload_type;
LLVMValueRef err_union_ptr = ir_llvm_value(g, instruction->value);
LLVMValueRef err_union_handle = get_handle_value(g, err_union_ptr, err_union_type, ptr_type);
- if (ir_want_runtime_safety(g, &instruction->base) && instruction->safety_check_on && g->error_decls.length > 1) {
+ if (ir_want_runtime_safety(g, &instruction->base) && instruction->safety_check_on && g->errors_by_index.length > 1) {
LLVMValueRef err_val;
- if (type_has_bits(child_type)) {
+ if (type_has_bits(payload_type)) {
LLVMValueRef err_val_ptr = LLVMBuildStructGEP(g->builder, err_union_handle, err_union_err_index, "");
err_val = gen_load_untyped(g, err_val_ptr, 0, false, "");
} else {
@@ -3452,7 +3448,7 @@ static LLVMValueRef ir_render_unwrap_err_payload(CodeGen *g, IrExecutable *execu
LLVMPositionBuilderAtEnd(g->builder, ok_block);
}
- if (type_has_bits(child_type)) {
+ if (type_has_bits(payload_type)) {
return LLVMBuildStructGEP(g->builder, err_union_handle, err_union_payload_index, "");
} else {
return nullptr;
@@ -3493,10 +3489,10 @@ static LLVMValueRef ir_render_err_wrap_code(CodeGen *g, IrExecutable *executable
assert(wanted_type->id == TypeTableEntryIdErrorUnion);
- TypeTableEntry *child_type = wanted_type->data.error.child_type;
+ TypeTableEntry *payload_type = wanted_type->data.error_union.payload_type;
LLVMValueRef err_val = ir_llvm_value(g, instruction->value);
- if (!type_has_bits(child_type))
+ if (!type_has_bits(payload_type))
return err_val;
assert(instruction->tmp_ptr);
@@ -3512,11 +3508,11 @@ static LLVMValueRef ir_render_err_wrap_payload(CodeGen *g, IrExecutable *executa
assert(wanted_type->id == TypeTableEntryIdErrorUnion);
- TypeTableEntry *child_type = wanted_type->data.error.child_type;
+ TypeTableEntry *payload_type = wanted_type->data.error_union.payload_type;
LLVMValueRef ok_err_val = LLVMConstNull(g->err_tag_type->type_ref);
- if (!type_has_bits(child_type))
+ if (!type_has_bits(payload_type))
return ok_err_val;
assert(instruction->tmp_ptr);
@@ -3527,7 +3523,7 @@ static LLVMValueRef ir_render_err_wrap_payload(CodeGen *g, IrExecutable *executa
gen_store_untyped(g, ok_err_val, err_tag_ptr, 0, false);
LLVMValueRef payload_ptr = LLVMBuildStructGEP(g->builder, instruction->tmp_ptr, err_union_payload_index, "");
- gen_assign_raw(g, payload_ptr, get_pointer_to_type(g, child_type, false), payload_val);
+ gen_assign_raw(g, payload_ptr, get_pointer_to_type(g, payload_type, false), payload_val);
return instruction->tmp_ptr;
}
@@ -3700,6 +3696,7 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
case IrInstructionIdArgType:
case IrInstructionIdTagType:
case IrInstructionIdExport:
+ case IrInstructionIdErrorUnion:
zig_unreachable();
case IrInstructionIdReturn:
return ir_render_return(g, executable, (IrInstructionReturn *)instruction);
@@ -3933,7 +3930,7 @@ static LLVMValueRef pack_const_int(CodeGen *g, LLVMTypeRef big_int_type_ref, Con
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -4026,10 +4023,10 @@ static LLVMValueRef gen_const_val(CodeGen *g, ConstExprValue *const_val) {
switch (type_entry->id) {
case TypeTableEntryIdInt:
return bigint_to_llvm_const(type_entry->type_ref, &const_val->data.x_bigint);
- case TypeTableEntryIdPureError:
- assert(const_val->data.x_pure_err);
- return LLVMConstInt(g->builtin_types.entry_pure_error->type_ref,
- const_val->data.x_pure_err->value, false);
+ case TypeTableEntryIdErrorSet:
+ assert(const_val->data.x_err_set != nullptr);
+ return LLVMConstInt(g->builtin_types.entry_global_error_set->type_ref,
+ const_val->data.x_err_set->value, false);
case TypeTableEntryIdFloat:
switch (type_entry->data.floating.bit_count) {
case 32:
@@ -4330,8 +4327,8 @@ static LLVMValueRef gen_const_val(CodeGen *g, ConstExprValue *const_val) {
}
case TypeTableEntryIdErrorUnion:
{
- TypeTableEntry *child_type = type_entry->data.error.child_type;
- if (!type_has_bits(child_type)) {
+ TypeTableEntry *payload_type = type_entry->data.error_union.payload_type;
+ if (!type_has_bits(payload_type)) {
uint64_t value = const_val->data.x_err_union.err ? const_val->data.x_err_union.err->value : 0;
return LLVMConstInt(g->err_tag_type->type_ref, value, false);
} else {
@@ -4340,7 +4337,7 @@ static LLVMValueRef gen_const_val(CodeGen *g, ConstExprValue *const_val) {
bool make_unnamed_struct;
if (const_val->data.x_err_union.err) {
err_tag_value = LLVMConstInt(g->err_tag_type->type_ref, const_val->data.x_err_union.err->value, false);
- err_payload_value = LLVMConstNull(child_type->type_ref);
+ err_payload_value = LLVMConstNull(payload_type->type_ref);
make_unnamed_struct = false;
} else {
err_tag_value = LLVMConstNull(g->err_tag_type->type_ref);
@@ -4410,21 +4407,20 @@ static void render_const_val_global(CodeGen *g, ConstExprValue *const_val, const
}
static void generate_error_name_table(CodeGen *g) {
- if (g->err_name_table != nullptr || !g->generate_error_name_table || g->error_decls.length == 1) {
+ if (g->err_name_table != nullptr || !g->generate_error_name_table || g->errors_by_index.length == 1) {
return;
}
- assert(g->error_decls.length > 0);
+ assert(g->errors_by_index.length > 0);
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
- LLVMValueRef *values = allocate(g->error_decls.length);
+ LLVMValueRef *values = allocate(g->errors_by_index.length);
values[0] = LLVMGetUndef(str_type->type_ref);
- for (size_t i = 1; i < g->error_decls.length; i += 1) {
- AstNode *error_decl_node = g->error_decls.at(i);
- assert(error_decl_node->type == NodeTypeErrorValueDecl);
- Buf *name = error_decl_node->data.error_value_decl.name;
+ for (size_t i = 1; i < g->errors_by_index.length; i += 1) {
+ ErrorTableEntry *err_entry = g->errors_by_index.at(i);
+ Buf *name = &err_entry->name;
g->largest_err_name_len = max(g->largest_err_name_len, buf_len(name));
@@ -4443,7 +4439,7 @@ static void generate_error_name_table(CodeGen *g) {
values[i] = LLVMConstNamedStruct(str_type->type_ref, fields, 2);
}
- LLVMValueRef err_name_table_init = LLVMConstArray(str_type->type_ref, values, (unsigned)g->error_decls.length);
+ LLVMValueRef err_name_table_init = LLVMConstArray(str_type->type_ref, values, (unsigned)g->errors_by_index.length);
g->err_name_table = LLVMAddGlobal(g->module, LLVMTypeOf(err_name_table_init),
buf_ptr(get_mangled_name(g, buf_create_from_str("__zig_err_name_table"), false)));
@@ -4573,6 +4569,28 @@ static void validate_inline_fns(CodeGen *g) {
static void do_code_gen(CodeGen *g) {
assert(!g->errors.length);
+ {
+ // create debug type for error sets
+ assert(g->err_enumerators.length == g->errors_by_index.length);
+ uint64_t tag_debug_size_in_bits = 8*LLVMStoreSizeOfType(g->target_data_ref, g->err_tag_type->type_ref);
+ uint64_t tag_debug_align_in_bits = 8*LLVMABIAlignmentOfType(g->target_data_ref, g->err_tag_type->type_ref);
+ ZigLLVMDIFile *err_set_di_file = nullptr;
+ ZigLLVMDIType *err_set_di_type = ZigLLVMCreateDebugEnumerationType(g->dbuilder,
+ ZigLLVMCompileUnitToScope(g->compile_unit), buf_ptr(&g->builtin_types.entry_global_error_set->name),
+ err_set_di_file, 0,
+ tag_debug_size_in_bits,
+ tag_debug_align_in_bits,
+ g->err_enumerators.items, g->err_enumerators.length,
+ g->err_tag_type->di_type, "");
+ ZigLLVMReplaceTemporary(g->dbuilder, g->builtin_types.entry_global_error_set->di_type, err_set_di_type);
+ g->builtin_types.entry_global_error_set->di_type = err_set_di_type;
+
+ for (size_t i = 0; i < g->error_di_types.length; i += 1) {
+ ZigLLVMDIType **di_type_ptr = g->error_di_types.at(i);
+ *di_type_ptr = err_set_di_type;
+ }
+ }
+
codegen_add_time_event(g, "Code Generation");
generate_error_name_table(g);
@@ -5176,16 +5194,23 @@ static void define_builtin_types(CodeGen *g) {
}
{
- TypeTableEntry *entry = new_type_table_entry(TypeTableEntryIdPureError);
+ TypeTableEntry *entry = new_type_table_entry(TypeTableEntryIdErrorSet);
buf_init_from_str(&entry->name, "error");
// TODO allow overriding this type and keep track of max value and emit an
// error if there are too many errors declared
g->err_tag_type = g->builtin_types.entry_u16;
- g->builtin_types.entry_pure_error = entry;
+ g->builtin_types.entry_global_error_set = entry;
entry->type_ref = g->err_tag_type->type_ref;
- entry->di_type = g->err_tag_type->di_type;
+
+ entry->di_type = ZigLLVMCreateReplaceableCompositeType(g->dbuilder,
+ ZigLLVMTag_DW_enumeration_type(), "error",
+ ZigLLVMCompileUnitToScope(g->compile_unit), nullptr, 0);
+
+ // reserve index 0 to indicate no error
+ g->err_enumerators.append(ZigLLVMCreateDebugEnumerator(g->dbuilder, "(none)", 0));
+ g->errors_by_index.append(nullptr);
g->primitive_type_table.put(&entry->name, entry);
}
@@ -5815,7 +5840,7 @@ static void prepend_c_type_to_decl_list(CodeGen *g, GenH *gen_h, TypeTableEntry
case TypeTableEntryIdBoundFn:
case TypeTableEntryIdArgTuple:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
zig_unreachable();
case TypeTableEntryIdVoid:
case TypeTableEntryIdUnreachable:
@@ -5988,7 +6013,7 @@ static void get_c_type(CodeGen *g, GenH *gen_h, TypeTableEntry *type_entry, Buf
return;
}
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
zig_panic("TODO implement get_c_type for more types");
case TypeTableEntryIdInvalid:
@@ -6155,7 +6180,7 @@ static void gen_h_file(CodeGen *g) {
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
diff --git a/src/ir.cpp b/src/ir.cpp
index 9d5f59d187..cdae4b1511 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -580,6 +580,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionErrorReturnTrace
return IrInstructionIdErrorReturnTrace;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionErrorUnion *) {
+ return IrInstructionIdErrorUnion;
+}
+
template
static T *ir_create_instruction(IrBuilder *irb, Scope *scope, AstNode *source_node) {
T *special_instruction = allocate(1);
@@ -2326,6 +2330,19 @@ static IrInstruction *ir_build_error_return_trace(IrBuilder *irb, Scope *scope,
return &instruction->base;
}
+static IrInstruction *ir_build_error_union(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *err_set, IrInstruction *payload)
+{
+ IrInstructionErrorUnion *instruction = ir_build_instruction(irb, scope, source_node);
+ instruction->err_set = err_set;
+ instruction->payload = payload;
+
+ ir_ref_instruction(err_set, irb->current_basic_block);
+ ir_ref_instruction(payload, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static void ir_count_defers(IrBuilder *irb, Scope *inner_scope, Scope *outer_scope, size_t *results) {
results[ReturnKindUnconditional] = 0;
results[ReturnKindError] = 0;
@@ -2800,6 +2817,23 @@ static IrInstruction *ir_gen_maybe_ok_or(IrBuilder *irb, Scope *parent_scope, As
return ir_build_phi(irb, parent_scope, node, 2, incoming_blocks, incoming_values);
}
+static IrInstruction *ir_gen_error_union(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
+ assert(node->type == NodeTypeBinOpExpr);
+
+ AstNode *op1_node = node->data.bin_op_expr.op1;
+ AstNode *op2_node = node->data.bin_op_expr.op2;
+
+ IrInstruction *err_set = ir_gen_node(irb, op1_node, parent_scope);
+ if (err_set == irb->codegen->invalid_instruction)
+ return irb->codegen->invalid_instruction;
+
+ IrInstruction *payload = ir_gen_node(irb, op2_node, parent_scope);
+ if (payload == irb->codegen->invalid_instruction)
+ return irb->codegen->invalid_instruction;
+
+ return ir_build_error_union(irb, parent_scope, node, err_set, payload);
+}
+
static IrInstruction *ir_gen_bin_op(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeBinOpExpr);
@@ -2887,6 +2921,8 @@ static IrInstruction *ir_gen_bin_op(IrBuilder *irb, Scope *scope, AstNode *node)
return ir_gen_bin_op_id(irb, scope, node, IrBinOpArrayMult);
case BinOpTypeUnwrapMaybe:
return ir_gen_maybe_ok_or(irb, scope, node);
+ case BinOpTypeErrorUnion:
+ return ir_gen_error_union(irb, scope, node);
}
zig_unreachable();
}
@@ -3990,8 +4026,6 @@ static IrInstruction *ir_gen_prefix_op_expr(IrBuilder *irb, Scope *scope, AstNod
return ir_gen_prefix_op_id_lval(irb, scope, node, IrUnOpDereference, lval);
case PrefixOpMaybe:
return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpMaybe), lval);
- case PrefixOpError:
- return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpError), lval);
case PrefixOpUnwrapMaybe:
return ir_gen_maybe_assert_ok(irb, scope, node, lval);
}
@@ -5165,7 +5199,7 @@ static IrInstruction *ir_gen_continue(IrBuilder *irb, Scope *continue_scope, Ast
static IrInstruction *ir_gen_error_type(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeErrorType);
- return ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_pure_error);
+ return ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_global_error_set);
}
static IrInstruction *ir_gen_defer(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
@@ -5249,8 +5283,6 @@ static IrInstruction *ir_gen_err_ok_or(IrBuilder *irb, Scope *parent_scope, AstN
Scope *err_scope;
if (var_node) {
assert(var_node->type == NodeTypeSymbol);
- IrInstruction *var_type = ir_build_const_type(irb, parent_scope, node,
- irb->codegen->builtin_types.entry_pure_error);
Buf *var_name = var_node->data.symbol_expr.symbol;
bool is_const = true;
bool is_shadowable = false;
@@ -5258,7 +5290,7 @@ static IrInstruction *ir_gen_err_ok_or(IrBuilder *irb, Scope *parent_scope, AstN
is_const, is_const, is_shadowable, is_comptime);
err_scope = var->child_scope;
IrInstruction *err_val = ir_build_unwrap_err_code(irb, err_scope, node, err_union_ptr);
- ir_build_var_decl(irb, err_scope, var_node, var, var_type, nullptr, err_val);
+ ir_build_var_decl(irb, err_scope, var_node, var, nullptr, nullptr, err_val);
} else {
err_scope = parent_scope;
}
@@ -5348,6 +5380,70 @@ static IrInstruction *ir_gen_container_decl(IrBuilder *irb, Scope *parent_scope,
return ir_build_const_type(irb, parent_scope, node, container_type);
}
+static TypeTableEntry *make_err_set_with_one_item(CodeGen *g, Scope *parent_scope, AstNode *node,
+ ErrorTableEntry *err_entry)
+{
+ TypeTableEntry *err_set_type = new_type_table_entry(TypeTableEntryIdErrorSet);
+ buf_resize(&err_set_type->name, 0);
+ buf_appendf(&err_set_type->name, "@typeOf(error.%s)", buf_ptr(&err_entry->name));
+ err_set_type->is_copyable = true;
+ err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
+ err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
+ err_set_type->data.error_set.err_count = 1;
+ err_set_type->data.error_set.errors = allocate(1);
+
+ g->error_di_types.append(&err_set_type->di_type);
+
+ err_set_type->data.error_set.errors[0] = err_entry;
+
+ return err_set_type;
+}
+
+static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
+ assert(node->type == NodeTypeErrorSetDecl);
+
+ uint32_t err_count = node->data.err_set_decl.decls.length;
+
+ if (err_count == 0) {
+ add_node_error(irb->codegen, node, buf_sprintf("empty error set"));
+ return irb->codegen->invalid_instruction;
+ }
+
+ Buf *type_name = get_anon_type_name(irb->codegen, irb->exec, "error set", node);
+ TypeTableEntry *err_set_type = new_type_table_entry(TypeTableEntryIdErrorSet);
+ buf_init_from_buf(&err_set_type->name, type_name);
+ err_set_type->is_copyable = true;
+ err_set_type->type_ref = irb->codegen->builtin_types.entry_global_error_set->type_ref;
+ err_set_type->di_type = irb->codegen->builtin_types.entry_global_error_set->di_type;
+ err_set_type->data.error_set.err_count = err_count;
+ err_set_type->data.error_set.errors = allocate(err_count);
+
+ irb->codegen->error_di_types.append(&err_set_type->di_type);
+
+ for (uint32_t i = 0; i < err_count; i += 1) {
+ AstNode *symbol_node = node->data.err_set_decl.decls.at(i);
+ assert(symbol_node->type == NodeTypeSymbol);
+ Buf *err_name = symbol_node->data.symbol_expr.symbol;
+ ErrorTableEntry *err = allocate(1);
+ err->decl_node = symbol_node;
+ buf_init_from_buf(&err->name, err_name);
+
+ auto existing_entry = irb->codegen->error_table.put_unique(err_name, err);
+ if (existing_entry) {
+ err->value = existing_entry->value->value;
+ } else {
+ size_t error_value_count = irb->codegen->errors_by_index.length;
+ assert((uint32_t)error_value_count < (((uint32_t)1) << (uint32_t)irb->codegen->err_tag_type->data.integral.bit_count));
+ err->value = error_value_count;
+ irb->codegen->errors_by_index.append(err);
+ irb->codegen->err_enumerators.append(ZigLLVMCreateDebugEnumerator(irb->codegen->dbuilder,
+ buf_ptr(err_name), error_value_count));
+ }
+ err_set_type->data.error_set.errors[i] = err;
+ }
+ return ir_build_const_type(irb, parent_scope, node, err_set_type);
+}
+
static IrInstruction *ir_gen_fn_proto(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
assert(node->type == NodeTypeFnProto);
@@ -5401,7 +5497,6 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
case NodeTypeStructField:
case NodeTypeFnDef:
case NodeTypeFnDecl:
- case NodeTypeErrorValueDecl:
case NodeTypeTestDecl:
zig_unreachable();
case NodeTypeBlock:
@@ -5482,6 +5577,8 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
return ir_lval_wrap(irb, scope, ir_gen_container_decl(irb, scope, node), lval);
case NodeTypeFnProto:
return ir_lval_wrap(irb, scope, ir_gen_fn_proto(irb, scope, node), lval);
+ case NodeTypeErrorSetDecl:
+ return ir_lval_wrap(irb, scope, ir_gen_err_set_decl(irb, scope, node), lval);
}
zig_unreachable();
}
@@ -6301,25 +6398,32 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
return ImplicitCastMatchResultYes;
}
- // implicit T to %T
+ // implicit T to U!T
if (expected_type->id == TypeTableEntryIdErrorUnion &&
- ir_types_match_with_implicit_cast(ira, expected_type->data.error.child_type, actual_type, value))
+ ir_types_match_with_implicit_cast(ira, expected_type->data.error_union.payload_type, actual_type, value))
{
return ImplicitCastMatchResultYes;
}
- // implicit conversion from pure error to error union type
+ // implicit conversion from error set to error union type
if (expected_type->id == TypeTableEntryIdErrorUnion &&
- actual_type->id == TypeTableEntryIdPureError)
+ actual_type->id == TypeTableEntryIdErrorSet)
{
return ImplicitCastMatchResultYes;
}
- // implicit conversion from T to %?T
+ // implicit conversion from error set to another error set
+ if (expected_type->id == TypeTableEntryIdErrorSet &&
+ actual_type->id == TypeTableEntryIdErrorSet)
+ {
+ return ImplicitCastMatchResultYes;
+ }
+
+ // implicit conversion from T to U!?T
if (expected_type->id == TypeTableEntryIdErrorUnion &&
- expected_type->data.error.child_type->id == TypeTableEntryIdMaybe &&
+ expected_type->data.error_union.payload_type->id == TypeTableEntryIdMaybe &&
ir_types_match_with_implicit_cast(ira,
- expected_type->data.error.child_type->data.maybe.child_type,
+ expected_type->data.error_union.payload_type->data.maybe.child_type,
actual_type, value))
{
return ImplicitCastMatchResultYes;
@@ -6503,7 +6607,19 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
if (type_is_invalid(prev_inst->value.type)) {
return ira->codegen->builtin_types.entry_invalid;
}
- bool any_are_pure_error = (prev_inst->value.type->id == TypeTableEntryIdPureError);
+ ErrorTableEntry **errors = nullptr;
+ TypeTableEntry *err_set_type = nullptr;
+ if (prev_inst->value.type == ira->codegen->builtin_types.entry_global_error_set) {
+ err_set_type = ira->codegen->builtin_types.entry_global_error_set;
+ } else if (prev_inst->value.type->id == TypeTableEntryIdErrorSet) {
+ err_set_type = prev_inst->value.type;
+ errors = allocate(ira->codegen->errors_by_index.length);
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
+ errors[error_entry->value] = error_entry;
+ }
+ }
+
bool any_are_null = (prev_inst->value.type->id == TypeTableEntryIdNullLit);
bool convert_to_const_slice = false;
for (size_t i = 1; i < instruction_count; i += 1) {
@@ -6524,29 +6640,135 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
continue;
}
- if (prev_type->id == TypeTableEntryIdPureError) {
- prev_inst = cur_inst;
- continue;
- }
-
if (prev_type->id == TypeTableEntryIdNullLit) {
prev_inst = cur_inst;
continue;
}
- if (cur_type->id == TypeTableEntryIdPureError) {
- if (prev_type->id == TypeTableEntryIdArray) {
- convert_to_const_slice = true;
- }
- any_are_pure_error = true;
- continue;
- }
-
if (cur_type->id == TypeTableEntryIdNullLit) {
any_are_null = true;
continue;
}
+ if (prev_type->id == TypeTableEntryIdErrorSet) {
+ assert(err_set_type != nullptr);
+ if (cur_type->id == TypeTableEntryIdErrorSet) {
+ if (err_set_type == ira->codegen->builtin_types.entry_global_error_set) {
+ continue;
+ }
+ if (cur_type == ira->codegen->builtin_types.entry_global_error_set) {
+ err_set_type = ira->codegen->builtin_types.entry_global_error_set;
+ prev_inst = cur_inst;
+ continue;
+ }
+ // if err_set_type is a superset of cur_type, keep err_set_type.
+ // if cur_type is a superset of err_set_type, switch err_set_type to cur_type
+ // otherwise emit a compile error
+ bool prev_is_superset = true;
+ for (uint32_t i = 0; i < cur_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *contained_error_entry = cur_type->data.error_set.errors[i];
+ ErrorTableEntry *error_entry = errors[contained_error_entry->value];
+ if (error_entry == nullptr) {
+ prev_is_superset = false;
+ break;
+ }
+ }
+ if (prev_is_superset) {
+ continue;
+ }
+
+ // unset everything in errors
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
+ errors[error_entry->value] = nullptr;
+ }
+ for (uint32_t i = 0; i < cur_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = cur_type->data.error_set.errors[i];
+ errors[error_entry->value] = error_entry;
+ }
+ bool cur_is_superset = true;
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *contained_error_entry = err_set_type->data.error_set.errors[i];
+ ErrorTableEntry *error_entry = errors[contained_error_entry->value];
+ if (error_entry == nullptr) {
+ cur_is_superset = false;
+ break;
+ }
+ }
+ if (cur_is_superset) {
+ err_set_type = cur_inst->value.type;
+ prev_inst = cur_inst;
+ continue;
+ }
+ } else if (cur_type->id == TypeTableEntryIdErrorUnion) {
+ // err_set_type must be a subset of cur_type's error set
+ // unset everything in errors
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
+ errors[error_entry->value] = nullptr;
+ }
+ TypeTableEntry *cur_err_set_type = cur_type->data.error_union.err_set_type;
+ for (uint32_t i = 0; i < cur_err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = cur_err_set_type->data.error_set.errors[i];
+ errors[error_entry->value] = error_entry;
+ }
+ bool cur_is_superset = true;
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *contained_error_entry = err_set_type->data.error_set.errors[i];
+ ErrorTableEntry *error_entry = errors[contained_error_entry->value];
+ if (error_entry == nullptr) {
+ cur_is_superset = false;
+ break;
+ }
+ }
+ if (cur_is_superset) {
+ err_set_type = cur_err_set_type;
+ prev_inst = cur_inst;
+ continue;
+ }
+ } else {
+ prev_inst = cur_inst;
+ continue;
+ }
+ }
+
+ if (cur_type->id == TypeTableEntryIdErrorSet) {
+ if (prev_type->id == TypeTableEntryIdArray) {
+ convert_to_const_slice = true;
+ }
+ if (cur_type == ira->codegen->builtin_types.entry_global_error_set) {
+ err_set_type = ira->codegen->builtin_types.entry_global_error_set;
+ continue;
+ }
+ if (err_set_type == ira->codegen->builtin_types.entry_global_error_set) {
+ continue;
+ }
+ if (err_set_type == nullptr) {
+ err_set_type = cur_type;
+ errors = allocate(ira->codegen->errors_by_index.length);
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
+ errors[error_entry->value] = error_entry;
+ }
+ continue;
+ }
+ if (prev_type->id == TypeTableEntryIdErrorUnion) {
+ // the cur type error set must be a subset
+ bool prev_is_superset = true;
+ for (uint32_t i = 0; i < cur_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *contained_error_entry = cur_type->data.error_set.errors[i];
+ ErrorTableEntry *error_entry = errors[contained_error_entry->value];
+ if (error_entry == nullptr) {
+ prev_is_superset = false;
+ break;
+ }
+ }
+ if (prev_is_superset) {
+ continue;
+ }
+ }
+ }
+
if (types_match_const_cast_only(prev_type, cur_type)) {
continue;
}
@@ -6574,20 +6796,20 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
if (prev_type->id == TypeTableEntryIdErrorUnion &&
- types_match_const_cast_only(prev_type->data.error.child_type, cur_type))
+ types_match_const_cast_only(prev_type->data.error_union.payload_type, cur_type))
{
continue;
}
if (cur_type->id == TypeTableEntryIdErrorUnion &&
- types_match_const_cast_only(cur_type->data.error.child_type, prev_type))
+ types_match_const_cast_only(cur_type->data.error_union.payload_type, prev_type))
{
prev_inst = cur_inst;
continue;
}
if (prev_type->id == TypeTableEntryIdMaybe &&
- types_match_const_cast_only(prev_type->data.maybe.child_type, cur_type))
+ types_match_const_cast_only(prev_type->data.maybe.child_type, cur_type))
{
continue;
}
@@ -6700,16 +6922,19 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
return ira->codegen->builtin_types.entry_invalid;
}
+
+ free(errors);
+
if (convert_to_const_slice) {
assert(prev_inst->value.type->id == TypeTableEntryIdArray);
TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, prev_inst->value.type->data.array.child_type, true);
TypeTableEntry *slice_type = get_slice_type(ira->codegen, ptr_type);
- if (any_are_pure_error) {
- return get_error_type(ira->codegen, slice_type);
+ if (err_set_type != nullptr) {
+ return get_error_union_type(ira->codegen, err_set_type, slice_type);
} else {
return slice_type;
}
- } else if (any_are_pure_error && prev_inst->value.type->id != TypeTableEntryIdPureError) {
+ } else if (err_set_type != nullptr && prev_inst->value.type->id != TypeTableEntryIdErrorSet) {
if (prev_inst->value.type->id == TypeTableEntryIdNumLitInt ||
prev_inst->value.type->id == TypeTableEntryIdNumLitFloat)
{
@@ -6723,7 +6948,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
} else if (prev_inst->value.type->id == TypeTableEntryIdErrorUnion) {
return prev_inst->value.type;
} else {
- return get_error_type(ira->codegen, prev_inst->value.type);
+ return get_error_union_type(ira->codegen, err_set_type, prev_inst->value.type);
}
} else if (any_are_null && prev_inst->value.type->id != TypeTableEntryIdNullLit) {
if (prev_inst->value.type->id == TypeTableEntryIdNumLitInt ||
@@ -7199,7 +7424,7 @@ static IrInstruction *ir_analyze_err_wrap_payload(IrAnalyze *ira, IrInstruction
assert(wanted_type->id == TypeTableEntryIdErrorUnion);
if (instr_is_comptime(value)) {
- TypeTableEntry *payload_type = wanted_type->data.error.child_type;
+ TypeTableEntry *payload_type = wanted_type->data.error_union.payload_type;
IrInstruction *casted_payload = ir_implicit_cast(ira, value, payload_type);
if (type_is_invalid(casted_payload->value.type))
return ira->codegen->invalid_instruction;
@@ -7224,8 +7449,43 @@ static IrInstruction *ir_analyze_err_wrap_payload(IrAnalyze *ira, IrInstruction
return result;
}
-static IrInstruction *ir_analyze_err_wrap_code(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value, TypeTableEntry *wanted_type) {
- assert(wanted_type->id == TypeTableEntryIdErrorUnion);
+static IrInstruction *ir_analyze_err_set_cast(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value,
+ TypeTableEntry *wanted_type)
+{
+ TypeTableEntry *contained_set = value->value.type;
+ TypeTableEntry *container_set = wanted_type;
+
+ assert(contained_set->id == TypeTableEntryIdErrorSet);
+ assert(container_set->id == TypeTableEntryIdErrorSet);
+
+ if (container_set->data.error_set.infer_fn == nullptr &&
+ container_set != ira->codegen->builtin_types.entry_global_error_set)
+ {
+ ErrorTableEntry **errors = allocate(ira->codegen->errors_by_index.length);
+ for (uint32_t i = 0; i < container_set->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *error_entry = container_set->data.error_set.errors[i];
+ errors[error_entry->value] = error_entry;
+ }
+ ErrorMsg *err_msg = nullptr;
+ for (uint32_t i = 0; i < contained_set->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *contained_error_entry = contained_set->data.error_set.errors[i];
+ ErrorTableEntry *error_entry = errors[contained_error_entry->value];
+ if (error_entry == nullptr) {
+ if (err_msg == nullptr) {
+ err_msg = ir_add_error(ira, source_instr,
+ buf_sprintf("invalid cast of error set '%s' to error set '%s'",
+ buf_ptr(&contained_set->name), buf_ptr(&container_set->name)));
+ }
+ add_error_note(ira->codegen, err_msg, contained_error_entry->decl_node,
+ buf_sprintf("'%s.%s' not present in '%s'", buf_ptr(&contained_set->name),
+ buf_ptr(&contained_error_entry->name), buf_ptr(&container_set->name)));
+ }
+ }
+ free(errors);
+ if (err_msg != nullptr) {
+ return ira->codegen->invalid_instruction;
+ }
+ }
if (instr_is_comptime(value)) {
ConstExprValue *val = ir_resolve_const(ira, value, UndefBad);
@@ -7236,7 +7496,30 @@ static IrInstruction *ir_analyze_err_wrap_code(IrAnalyze *ira, IrInstruction *so
source_instr->scope, source_instr->source_node);
const_instruction->base.value.type = wanted_type;
const_instruction->base.value.special = ConstValSpecialStatic;
- const_instruction->base.value.data.x_err_union.err = val->data.x_pure_err;
+ const_instruction->base.value.data.x_err_set = val->data.x_err_set;
+ return &const_instruction->base;
+ }
+
+ IrInstruction *result = ir_build_cast(&ira->new_irb, source_instr->scope, source_instr->source_node, wanted_type, value, CastOpNoop);
+ result->value.type = wanted_type;
+ return result;
+}
+
+static IrInstruction *ir_analyze_err_wrap_code(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value, TypeTableEntry *wanted_type) {
+ assert(wanted_type->id == TypeTableEntryIdErrorUnion);
+
+ IrInstruction *casted_value = ir_implicit_cast(ira, value, wanted_type->data.error_union.err_set_type);
+
+ if (instr_is_comptime(casted_value)) {
+ ConstExprValue *val = ir_resolve_const(ira, casted_value, UndefBad);
+ if (!val)
+ return ira->codegen->invalid_instruction;
+
+ IrInstructionConst *const_instruction = ir_create_instruction(&ira->new_irb,
+ source_instr->scope, source_instr->source_node);
+ const_instruction->base.value.type = wanted_type;
+ const_instruction->base.value.special = ConstValSpecialStatic;
+ const_instruction->base.value.data.x_err_union.err = val->data.x_err_set;
const_instruction->base.value.data.x_err_union.payload = nullptr;
return &const_instruction->base;
}
@@ -7616,9 +7899,12 @@ static IrInstruction *ir_analyze_number_to_literal(IrAnalyze *ira, IrInstruction
return result;
}
-static IrInstruction *ir_analyze_int_to_err(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *target) {
+static IrInstruction *ir_analyze_int_to_err(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *target,
+ TypeTableEntry *wanted_type)
+{
assert(target->value.type->id == TypeTableEntryIdInt);
assert(!target->value.type->data.integral.is_signed);
+ assert(wanted_type->id == TypeTableEntryIdErrorSet);
if (instr_is_comptime(target)) {
ConstExprValue *val = ir_resolve_const(ira, target, UndefBad);
@@ -7626,10 +7912,10 @@ static IrInstruction *ir_analyze_int_to_err(IrAnalyze *ira, IrInstruction *sourc
return ira->codegen->invalid_instruction;
IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
- source_instr->source_node, ira->codegen->builtin_types.entry_pure_error);
+ source_instr->source_node, wanted_type);
BigInt err_count;
- bigint_init_unsigned(&err_count, ira->codegen->error_decls.length);
+ bigint_init_unsigned(&err_count, ira->codegen->errors_by_index.length);
if (bigint_cmp_zero(&val->data.x_bigint) == CmpEQ || bigint_cmp(&val->data.x_bigint, &err_count) != CmpLT) {
Buf *val_buf = buf_alloc();
bigint_append_buf(val_buf, &val->data.x_bigint, 10);
@@ -7639,13 +7925,12 @@ static IrInstruction *ir_analyze_int_to_err(IrAnalyze *ira, IrInstruction *sourc
}
size_t index = bigint_as_unsigned(&val->data.x_bigint);
- AstNode *error_decl_node = ira->codegen->error_decls.at(index);
- result->value.data.x_pure_err = error_decl_node->data.error_value_decl.err;
+ result->value.data.x_err_set = ira->codegen->errors_by_index.at(index);
return result;
}
IrInstruction *result = ir_build_int_to_err(&ira->new_irb, source_instr->scope, source_instr->source_node, target);
- result->value.type = ira->codegen->builtin_types.entry_pure_error;
+ result->value.type = wanted_type;
return result;
}
@@ -7667,8 +7952,8 @@ static IrInstruction *ir_analyze_err_to_int(IrAnalyze *ira, IrInstruction *sourc
ErrorTableEntry *err;
if (err_type->id == TypeTableEntryIdErrorUnion) {
err = val->data.x_err_union.err;
- } else if (err_type->id == TypeTableEntryIdPureError) {
- err = val->data.x_pure_err;
+ } else if (err_type->id == TypeTableEntryIdErrorSet) {
+ err = val->data.x_err_set;
} else {
zig_unreachable();
}
@@ -7689,7 +7974,7 @@ static IrInstruction *ir_analyze_err_to_int(IrAnalyze *ira, IrInstruction *sourc
}
BigInt bn;
- bigint_init_unsigned(&bn, ira->codegen->error_decls.length);
+ bigint_init_unsigned(&bn, ira->codegen->errors_by_index.length);
if (!bigint_fits_in_bits(&bn, wanted_type->data.integral.bit_count, wanted_type->data.integral.is_signed)) {
ir_add_error_node(ira, source_instr->source_node,
buf_sprintf("too many error values to fit in '%s'", buf_ptr(&wanted_type->name)));
@@ -7734,6 +8019,13 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
}
+ // explicit error set cast
+ if (wanted_type->id == TypeTableEntryIdErrorSet &&
+ actual_type->id == TypeTableEntryIdErrorSet)
+ {
+ return ir_analyze_err_set_cast(ira, source_instr, value, wanted_type);
+ }
+
// explicit cast from int to float
if (wanted_type->id == TypeTableEntryIdFloat &&
actual_type->id == TypeTableEntryIdInt)
@@ -7894,12 +8186,12 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
// explicit cast from child type of error type to error type
if (wanted_type->id == TypeTableEntryIdErrorUnion) {
- if (types_match_const_cast_only(wanted_type->data.error.child_type, actual_type)) {
+ if (types_match_const_cast_only(wanted_type->data.error_union.payload_type, actual_type)) {
return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
} else if (actual_type->id == TypeTableEntryIdNumLitInt ||
actual_type->id == TypeTableEntryIdNumLitFloat)
{
- if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error.child_type, true)) {
+ if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error_union.payload_type, true)) {
return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
} else {
return ira->codegen->invalid_instruction;
@@ -7909,16 +8201,16 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
// explicit cast from [N]T to %[]const T
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
- is_slice(wanted_type->data.error.child_type) &&
+ is_slice(wanted_type->data.error_union.payload_type) &&
actual_type->id == TypeTableEntryIdArray)
{
TypeTableEntry *ptr_type =
- wanted_type->data.error.child_type->data.structure.fields[slice_ptr_index].type_entry;
+ wanted_type->data.error_union.payload_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
types_match_const_cast_only(ptr_type->data.pointer.child_type, actual_type->data.array.child_type))
{
- IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error.child_type, value);
+ IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
if (type_is_invalid(cast1->value.type))
return ira->codegen->invalid_instruction;
@@ -7930,25 +8222,25 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from pure error to error union type
+ // explicit cast from error set to error union type
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
- actual_type->id == TypeTableEntryIdPureError)
+ actual_type->id == TypeTableEntryIdErrorSet)
{
return ir_analyze_err_wrap_code(ira, source_instr, value, wanted_type);
}
// explicit cast from T to %?T
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
- wanted_type->data.error.child_type->id == TypeTableEntryIdMaybe &&
+ wanted_type->data.error_union.payload_type->id == TypeTableEntryIdMaybe &&
actual_type->id != TypeTableEntryIdMaybe)
{
- TypeTableEntry *wanted_child_type = wanted_type->data.error.child_type->data.maybe.child_type;
+ TypeTableEntry *wanted_child_type = wanted_type->data.error_union.payload_type->data.maybe.child_type;
if (types_match_const_cast_only(wanted_child_type, actual_type) ||
actual_type->id == TypeTableEntryIdNullLit ||
actual_type->id == TypeTableEntryIdNumLitInt ||
actual_type->id == TypeTableEntryIdNumLitFloat)
{
- IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error.child_type, value);
+ IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
if (type_is_invalid(cast1->value.type))
return ira->codegen->invalid_instruction;
@@ -8017,21 +8309,19 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
return ir_analyze_number_to_literal(ira, source_instr, value, wanted_type);
}
- // explicit cast from %void to integer type which can fit it
+ // explicit cast from T!void to integer type which can fit it
bool actual_type_is_void_err = actual_type->id == TypeTableEntryIdErrorUnion &&
- !type_has_bits(actual_type->data.error.child_type);
- bool actual_type_is_pure_err = actual_type->id == TypeTableEntryIdPureError;
- if ((actual_type_is_void_err || actual_type_is_pure_err) &&
- wanted_type->id == TypeTableEntryIdInt)
- {
+ !type_has_bits(actual_type->data.error_union.payload_type);
+ bool actual_type_is_err_set = actual_type->id == TypeTableEntryIdErrorSet;
+ if ((actual_type_is_void_err || actual_type_is_err_set) && wanted_type->id == TypeTableEntryIdInt) {
return ir_analyze_err_to_int(ira, source_instr, value, wanted_type);
}
- // explicit cast from integer to pure error
- if (wanted_type->id == TypeTableEntryIdPureError && actual_type->id == TypeTableEntryIdInt &&
+ // explicit cast from integer to error set
+ if (wanted_type->id == TypeTableEntryIdErrorSet && actual_type->id == TypeTableEntryIdInt &&
!actual_type->data.integral.is_signed)
{
- return ir_analyze_int_to_err(ira, source_instr, value);
+ return ir_analyze_int_to_err(ira, source_instr, value, wanted_type);
}
// explicit cast from integer to enum type with no payload
@@ -8524,7 +8814,7 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
case TypeTableEntryIdMetaType:
case TypeTableEntryIdVoid:
case TypeTableEntryIdPointer:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
case TypeTableEntryIdOpaque:
case TypeTableEntryIdNamespace:
@@ -9312,7 +9602,7 @@ static VarClassRequired get_var_class_required(TypeTableEntry *type_entry) {
case TypeTableEntryIdInt:
case TypeTableEntryIdFloat:
case TypeTableEntryIdVoid:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdFn:
return VarClassRequiredAny;
case TypeTableEntryIdNumLitFloat:
@@ -9338,7 +9628,7 @@ static VarClassRequired get_var_class_required(TypeTableEntry *type_entry) {
case TypeTableEntryIdMaybe:
return get_var_class_required(type_entry->data.maybe.child_type);
case TypeTableEntryIdErrorUnion:
- return get_var_class_required(type_entry->data.error.child_type);
+ return get_var_class_required(type_entry->data.error_union.payload_type);
case TypeTableEntryIdStruct:
case TypeTableEntryIdEnum:
@@ -9573,7 +9863,7 @@ static TypeTableEntry *ir_analyze_instruction_export(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -9596,7 +9886,7 @@ static TypeTableEntry *ir_analyze_instruction_export(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
zig_panic("TODO export const value of type %s", buf_ptr(&target->value.type->name));
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
@@ -9630,6 +9920,24 @@ static TypeTableEntry *ir_analyze_instruction_error_return_trace(IrAnalyze *ira,
return nullable_type;
}
+static TypeTableEntry *ir_analyze_instruction_error_union(IrAnalyze *ira,
+ IrInstructionErrorUnion *instruction)
+{
+ TypeTableEntry *err_set_type = ir_resolve_type(ira, instruction->err_set->other);
+ if (type_is_invalid(err_set_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *payload_type = ir_resolve_type(ira, instruction->payload->other);
+ if (type_is_invalid(payload_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *result_type = get_error_union_type(ira->codegen, err_set_type, payload_type);
+
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ out_val->data.x_type = result_type;
+ return ira->codegen->builtin_types.entry_type;
+}
+
static bool ir_analyze_fn_call_inline_arg(IrAnalyze *ira, AstNode *fn_proto_node,
IrInstruction *arg, Scope **exec_scope, size_t *next_proto_i)
{
@@ -10114,7 +10422,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
TypeTableEntry *return_type = impl_fn->type_entry->data.fn.fn_type_id.return_type;
ir_add_alloca(ira, new_call_instruction, return_type);
- if (return_type->id == TypeTableEntryIdPureError || return_type->id == TypeTableEntryIdErrorUnion) {
+ if (return_type->id == TypeTableEntryIdErrorSet || return_type->id == TypeTableEntryIdErrorUnion) {
parent_fn_entry->calls_errorable_function = true;
}
@@ -10124,7 +10432,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
FnTableEntry *parent_fn_entry = exec_fn_entry(ira->new_irb.exec);
assert(fn_type_id->return_type != nullptr);
assert(parent_fn_entry != nullptr);
- if (fn_type_id->return_type->id == TypeTableEntryIdPureError || fn_type_id->return_type->id == TypeTableEntryIdErrorUnion) {
+ if (fn_type_id->return_type->id == TypeTableEntryIdErrorSet || fn_type_id->return_type->id == TypeTableEntryIdErrorUnion) {
parent_fn_entry->calls_errorable_function = true;
}
@@ -10243,58 +10551,6 @@ static TypeTableEntry *ir_analyze_instruction_call(IrAnalyze *ira, IrInstruction
}
}
-static TypeTableEntry *ir_analyze_unary_prefix_op_err(IrAnalyze *ira, IrInstructionUnOp *un_op_instruction) {
- assert(un_op_instruction->op_id == IrUnOpError);
- IrInstruction *value = un_op_instruction->value->other;
-
- TypeTableEntry *meta_type = ir_resolve_type(ira, value);
- if (type_is_invalid(meta_type))
- return ira->codegen->builtin_types.entry_invalid;
-
-
- switch (meta_type->id) {
- case TypeTableEntryIdInvalid: // handled above
- zig_unreachable();
-
- case TypeTableEntryIdVoid:
- case TypeTableEntryIdBool:
- case TypeTableEntryIdInt:
- case TypeTableEntryIdFloat:
- case TypeTableEntryIdPointer:
- case TypeTableEntryIdArray:
- case TypeTableEntryIdStruct:
- case TypeTableEntryIdMaybe:
- case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
- case TypeTableEntryIdEnum:
- case TypeTableEntryIdUnion:
- case TypeTableEntryIdFn:
- case TypeTableEntryIdBoundFn:
- {
- ConstExprValue *out_val = ir_build_const_from(ira, &un_op_instruction->base);
- TypeTableEntry *result_type = get_error_type(ira->codegen, meta_type);
- out_val->data.x_type = result_type;
- return ira->codegen->builtin_types.entry_type;
- }
- case TypeTableEntryIdMetaType:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdNamespace:
- case TypeTableEntryIdBlock:
- case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdVar:
- case TypeTableEntryIdArgTuple:
- case TypeTableEntryIdOpaque:
- ir_add_error_node(ira, un_op_instruction->base.source_node,
- buf_sprintf("unable to wrap type '%s' in error type", buf_ptr(&meta_type->name)));
- return ira->codegen->builtin_types.entry_invalid;
- }
- zig_unreachable();
-}
-
-
static TypeTableEntry *ir_analyze_dereference(IrAnalyze *ira, IrInstructionUnOp *un_op_instruction) {
IrInstruction *value = un_op_instruction->value->other;
@@ -10350,7 +10606,7 @@ static TypeTableEntry *ir_analyze_maybe(IrAnalyze *ira, IrInstructionUnOp *un_op
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -10460,8 +10716,6 @@ static TypeTableEntry *ir_analyze_instruction_un_op(IrAnalyze *ira, IrInstructio
return ir_analyze_dereference(ira, un_op_instruction);
case IrUnOpMaybe:
return ir_analyze_maybe(ira, un_op_instruction);
- case IrUnOpError:
- return ir_analyze_unary_prefix_op_err(ira, un_op_instruction);
}
zig_unreachable();
}
@@ -11083,6 +11337,17 @@ static TypeTableEntry *ir_analyze_decl_ref(IrAnalyze *ira, IrInstruction *source
zig_unreachable();
}
+static ErrorTableEntry *find_err_table_entry(TypeTableEntry *err_set_type, Buf *field_name) {
+ assert(err_set_type->id == TypeTableEntryIdErrorSet);
+ for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *err_table_entry = err_set_type->data.error_set.errors[i];
+ if (buf_eql_buf(&err_table_entry->name, field_name)) {
+ return err_table_entry;
+ }
+ }
+ return nullptr;
+}
+
static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstructionFieldPtr *field_ptr_instruction) {
IrInstruction *container_ptr = field_ptr_instruction->container_ptr->other;
if (type_is_invalid(container_ptr->value.type))
@@ -11224,23 +11489,49 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
buf_sprintf("container '%s' has no member called '%s'",
buf_ptr(&child_type->name), buf_ptr(field_name)));
return ira->codegen->builtin_types.entry_invalid;
- } else if (child_type->id == TypeTableEntryIdPureError) {
- auto err_table_entry = ira->codegen->error_table.maybe_get(field_name);
- if (err_table_entry) {
- ConstExprValue *const_val = create_const_vals(1);
- const_val->special = ConstValSpecialStatic;
- const_val->type = child_type;
- const_val->data.x_pure_err = err_table_entry->value;
-
- bool ptr_is_const = true;
- bool ptr_is_volatile = false;
- return ir_analyze_const_ptr(ira, &field_ptr_instruction->base, const_val,
- child_type, ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
+ } else if (child_type->id == TypeTableEntryIdErrorSet) {
+ ErrorTableEntry *err_entry;
+ TypeTableEntry *err_set_type;
+ if (child_type == ira->codegen->builtin_types.entry_global_error_set) {
+ auto existing_entry = ira->codegen->error_table.maybe_get(field_name);
+ if (existing_entry) {
+ err_entry = existing_entry->value;
+ } else {
+ err_entry = allocate(1);
+ err_entry->decl_node = field_ptr_instruction->base.source_node;
+ buf_init_from_buf(&err_entry->name, field_name);
+ size_t error_value_count = ira->codegen->errors_by_index.length;
+ assert((uint32_t)error_value_count < (((uint32_t)1) << (uint32_t)ira->codegen->err_tag_type->data.integral.bit_count));
+ err_entry->value = error_value_count;
+ ira->codegen->errors_by_index.append(err_entry);
+ ira->codegen->err_enumerators.append(ZigLLVMCreateDebugEnumerator(ira->codegen->dbuilder,
+ buf_ptr(field_name), error_value_count));
+ ira->codegen->error_table.put(field_name, err_entry);
+ }
+ if (err_entry->set_with_only_this_in_it == nullptr) {
+ err_entry->set_with_only_this_in_it = make_err_set_with_one_item(ira->codegen,
+ field_ptr_instruction->base.scope, field_ptr_instruction->base.source_node,
+ err_entry);
+ }
+ err_set_type = err_entry->set_with_only_this_in_it;
+ } else {
+ ErrorTableEntry *err_entry = find_err_table_entry(child_type, field_name);
+ if (err_entry == nullptr) {
+ ir_add_error(ira, &field_ptr_instruction->base,
+ buf_sprintf("no error named '%s' in '%s'", buf_ptr(field_name), buf_ptr(&child_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ err_set_type = child_type;
}
+ ConstExprValue *const_val = create_const_vals(1);
+ const_val->special = ConstValSpecialStatic;
+ const_val->type = err_set_type;
+ const_val->data.x_err_set = err_entry;
- ir_add_error(ira, &field_ptr_instruction->base,
- buf_sprintf("use of undeclared error value '%s'", buf_ptr(field_name)));
- return ira->codegen->builtin_types.entry_invalid;
+ bool ptr_is_const = true;
+ bool ptr_is_volatile = false;
+ return ir_analyze_const_ptr(ira, &field_ptr_instruction->base, const_val,
+ err_set_type, ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
} else if (child_type->id == TypeTableEntryIdInt) {
if (buf_eql_str(field_name, "bit_count")) {
bool ptr_is_const = true;
@@ -11323,11 +11614,18 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
return ira->codegen->builtin_types.entry_invalid;
}
} else if (child_type->id == TypeTableEntryIdErrorUnion) {
- if (buf_eql_str(field_name, "Child")) {
+ if (buf_eql_str(field_name, "Payload")) {
bool ptr_is_const = true;
bool ptr_is_volatile = false;
return ir_analyze_const_ptr(ira, &field_ptr_instruction->base,
- create_const_type(ira->codegen, child_type->data.error.child_type),
+ create_const_type(ira->codegen, child_type->data.error_union.payload_type),
+ ira->codegen->builtin_types.entry_type,
+ ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
+ } else if (buf_eql_str(field_name, "ErrorSet")) {
+ bool ptr_is_const = true;
+ bool ptr_is_volatile = false;
+ return ir_analyze_const_ptr(ira, &field_ptr_instruction->base,
+ create_const_type(ira->codegen, child_type->data.error_union.err_set_type),
ira->codegen->builtin_types.entry_type,
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
} else {
@@ -11514,7 +11812,7 @@ static TypeTableEntry *ir_analyze_instruction_typeof(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -11781,7 +12079,7 @@ static TypeTableEntry *ir_analyze_instruction_slice_type(IrAnalyze *ira,
case TypeTableEntryIdNumLitInt:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -11889,7 +12187,7 @@ static TypeTableEntry *ir_analyze_instruction_array_type(IrAnalyze *ira,
case TypeTableEntryIdNumLitInt:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -11942,7 +12240,7 @@ static TypeTableEntry *ir_analyze_instruction_size_of(IrAnalyze *ira,
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -12277,7 +12575,7 @@ static TypeTableEntry *ir_analyze_instruction_switch_target(IrAnalyze *ira,
case TypeTableEntryIdPointer:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
if (pointee_val) {
ConstExprValue *out_val = ir_build_const_from(ira, &switch_target_instruction->base);
copy_const_val(out_val, pointee_val, true);
@@ -12347,8 +12645,6 @@ static TypeTableEntry *ir_analyze_instruction_switch_target(IrAnalyze *ira,
return target_type;
}
case TypeTableEntryIdErrorUnion:
- // see https://github.com/andrewrk/zig/issues/632
- zig_panic("TODO switch on error union");
case TypeTableEntryIdUnreachable:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
@@ -12873,7 +13169,7 @@ static TypeTableEntry *ir_analyze_min_max(IrAnalyze *ira, IrInstruction *source_
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
case TypeTableEntryIdNamespace:
@@ -12961,7 +13257,7 @@ static TypeTableEntry *ir_analyze_instruction_err_name(IrAnalyze *ira, IrInstruc
TypeTableEntry *u8_ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true);
TypeTableEntry *str_type = get_slice_type(ira->codegen, u8_ptr_type);
if (casted_value->value.special == ConstValSpecialStatic) {
- ErrorTableEntry *err = casted_value->value.data.x_pure_err;
+ ErrorTableEntry *err = casted_value->value.data.x_err_set;
if (!err->cached_error_name_val) {
ConstExprValue *array_val = create_const_str_lit(ira->codegen, &err->name);
err->cached_error_name_val = create_const_slice(ira->codegen, array_val, 0, buf_len(&err->name), true);
@@ -14106,7 +14402,7 @@ static TypeTableEntry *ir_analyze_instruction_align_of(IrAnalyze *ira, IrInstruc
case TypeTableEntryIdStruct:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
case TypeTableEntryIdUnion:
case TypeTableEntryIdFn:
@@ -14239,7 +14535,7 @@ static TypeTableEntry *ir_analyze_instruction_test_err(IrAnalyze *ira, IrInstruc
ir_build_test_err_from(&ira->new_irb, &instruction->base, value);
return ira->codegen->builtin_types.entry_bool;
- } else if (type_entry->id == TypeTableEntryIdPureError) {
+ } else if (type_entry->id == TypeTableEntryIdErrorSet) {
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
out_val->data.x_bool = true;
return ira->codegen->builtin_types.entry_bool;
@@ -14275,13 +14571,13 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_err_code(IrAnalyze *ira,
assert(err);
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- out_val->data.x_pure_err = err;
- return ira->codegen->builtin_types.entry_pure_error;
+ out_val->data.x_err_set = err;
+ return type_entry->data.error_union.err_set_type;
}
}
ir_build_unwrap_err_code_from(&ira->new_irb, &instruction->base, value);
- return ira->codegen->builtin_types.entry_pure_error;
+ return type_entry->data.error_union.err_set_type;
} else {
ir_add_error(ira, value,
buf_sprintf("expected error union type, found '%s'", buf_ptr(&type_entry->name)));
@@ -14305,10 +14601,10 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_err_payload(IrAnalyze *ira,
if (type_is_invalid(type_entry)) {
return ira->codegen->builtin_types.entry_invalid;
} else if (type_entry->id == TypeTableEntryIdErrorUnion) {
- TypeTableEntry *child_type = type_entry->data.error.child_type;
- TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, child_type,
+ TypeTableEntry *payload_type = type_entry->data.error_union.payload_type;
+ TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, payload_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
- get_abi_alignment(ira->codegen, child_type), 0, 0);
+ get_abi_alignment(ira->codegen, payload_type), 0, 0);
if (instr_is_comptime(value)) {
ConstExprValue *ptr_val = ir_resolve_const(ira, value, UndefBad);
if (!ptr_val)
@@ -14343,6 +14639,12 @@ static TypeTableEntry *ir_analyze_instruction_fn_proto(IrAnalyze *ira, IrInstruc
AstNode *proto_node = instruction->base.source_node;
assert(proto_node->type == NodeTypeFnProto);
+ if (proto_node->data.fn_proto.auto_err_set) {
+ ir_add_error(ira, &instruction->base,
+ buf_sprintf("inferring error set of return type valid only for function definitions"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
FnTypeId fn_type_id = {0};
init_fn_type_id(&fn_type_id, proto_node, proto_node->data.fn_proto.params.length);
@@ -14468,6 +14770,71 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira
}
}
}
+ } else if (switch_type->id == TypeTableEntryIdErrorSet) {
+ FnTableEntry *infer_fn = switch_type->data.error_set.infer_fn;
+ if (infer_fn != nullptr) {
+ if (infer_fn->anal_state == FnAnalStateInvalid) {
+ return ira->codegen->builtin_types.entry_invalid;
+ } else if (infer_fn->anal_state == FnAnalStateReady) {
+ analyze_fn_body(ira->codegen, infer_fn);
+ if (switch_type->data.error_set.infer_fn != nullptr) {
+ assert(ira->codegen->errors.length != 0);
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ } else {
+ ir_add_error(ira, &instruction->base,
+ buf_sprintf("cannot switch on inferred error set '%s': function '%s' not fully analyzed yet",
+ buf_ptr(&switch_type->name), buf_ptr(&switch_type->data.error_set.infer_fn->symbol_name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ }
+
+ AstNode **field_prev_uses = allocate(ira->codegen->errors_by_index.length);
+
+ for (size_t range_i = 0; range_i < instruction->range_count; range_i += 1) {
+ IrInstructionCheckSwitchProngsRange *range = &instruction->ranges[range_i];
+
+ IrInstruction *start_value = range->start->other;
+ if (type_is_invalid(start_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *end_value = range->end->other;
+ if (type_is_invalid(end_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ assert(start_value->value.type->id == TypeTableEntryIdErrorSet);
+ uint32_t start_index = start_value->value.data.x_err_set->value;
+
+ assert(end_value->value.type->id == TypeTableEntryIdErrorSet);
+ uint32_t end_index = end_value->value.data.x_err_set->value;
+
+ if (start_index != end_index) {
+ ir_add_error(ira, end_value, buf_sprintf("ranges not allowed when switching on errors"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ AstNode *prev_node = field_prev_uses[start_index];
+ if (prev_node != nullptr) {
+ Buf *err_name = &ira->codegen->errors_by_index.at(start_index)->name;
+ ErrorMsg *msg = ir_add_error(ira, start_value,
+ buf_sprintf("duplicate switch value: '%s.%s'", buf_ptr(&switch_type->name), buf_ptr(err_name)));
+ add_error_note(ira->codegen, msg, prev_node, buf_sprintf("other value is here"));
+ }
+ field_prev_uses[start_index] = start_value->source_node;
+ }
+ if (!instruction->have_else_prong) {
+ for (uint32_t i = 0; i < switch_type->data.error_set.err_count; i += 1) {
+ ErrorTableEntry *err_entry = switch_type->data.error_set.errors[i];
+
+ AstNode *prev_node = field_prev_uses[err_entry->value];
+ if (prev_node == nullptr) {
+ ir_add_error(ira, &instruction->base,
+ buf_sprintf("error.%s not handled in switch", buf_ptr(&err_entry->name)));
+ }
+ }
+ }
+
+ free(field_prev_uses);
} else if (switch_type->id == TypeTableEntryIdInt) {
RangeSet rs = {0};
for (size_t range_i = 0; range_i < instruction->range_count; range_i += 1) {
@@ -14760,7 +15127,7 @@ static void buf_write_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
zig_panic("TODO buf_write_value_bytes maybe type");
case TypeTableEntryIdErrorUnion:
zig_panic("TODO buf_write_value_bytes error union");
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
zig_panic("TODO buf_write_value_bytes pure error type");
case TypeTableEntryIdEnum:
zig_panic("TODO buf_write_value_bytes enum type");
@@ -14818,7 +15185,7 @@ static void buf_read_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
zig_panic("TODO buf_read_value_bytes maybe type");
case TypeTableEntryIdErrorUnion:
zig_panic("TODO buf_read_value_bytes error union");
- case TypeTableEntryIdPureError:
+ case TypeTableEntryIdErrorSet:
zig_panic("TODO buf_read_value_bytes pure error type");
case TypeTableEntryIdEnum:
zig_panic("TODO buf_read_value_bytes enum type");
@@ -15429,6 +15796,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_export(ira, (IrInstructionExport *)instruction);
case IrInstructionIdErrorReturnTrace:
return ir_analyze_instruction_error_return_trace(ira, (IrInstructionErrorReturnTrace *)instruction);
+ case IrInstructionIdErrorUnion:
+ return ir_analyze_instruction_error_union(ira, (IrInstructionErrorUnion *)instruction);
}
zig_unreachable();
}
@@ -15614,6 +15983,7 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdArgType:
case IrInstructionIdTagType:
case IrInstructionIdErrorReturnTrace:
+ case IrInstructionIdErrorUnion:
return false;
case IrInstructionIdAsm:
{
diff --git a/src/ir_print.cpp b/src/ir_print.cpp
index 8332212d34..781cd4a1e3 100644
--- a/src/ir_print.cpp
+++ b/src/ir_print.cpp
@@ -148,8 +148,6 @@ static const char *ir_un_op_id_str(IrUnOp op_id) {
return "*";
case IrUnOpMaybe:
return "?";
- case IrUnOpError:
- return "%";
}
zig_unreachable();
}
@@ -1004,6 +1002,11 @@ static void ir_print_error_return_trace(IrPrint *irp, IrInstructionErrorReturnTr
fprintf(irp->f, "@errorReturnTrace()");
}
+static void ir_print_error_union(IrPrint *irp, IrInstructionErrorUnion *instruction) {
+ ir_print_other_instruction(irp, instruction->err_set);
+ fprintf(irp->f, "!");
+ ir_print_other_instruction(irp, instruction->payload);
+}
static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
ir_print_prefix(irp, instruction);
@@ -1322,6 +1325,9 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
case IrInstructionIdErrorReturnTrace:
ir_print_error_return_trace(irp, (IrInstructionErrorReturnTrace *)instruction);
break;
+ case IrInstructionIdErrorUnion:
+ ir_print_error_union(irp, (IrInstructionErrorUnion *)instruction);
+ break;
}
fprintf(irp->f, "\n");
}
diff --git a/src/parser.cpp b/src/parser.cpp
index 12293bc61b..b5b35a9f1a 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -221,6 +221,7 @@ static AstNode *ast_parse_grouped_expr(ParseContext *pc, size_t *token_index, bo
static AstNode *ast_parse_container_decl(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_primary_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_try_expr(ParseContext *pc, size_t *token_index);
+static AstNode *ast_parse_symbol(ParseContext *pc, size_t *token_index);
static void ast_expect_token(ParseContext *pc, Token *token, TokenId token_id) {
if (token->id == token_id) {
@@ -651,8 +652,9 @@ static AstNode *ast_parse_comptime_expr(ParseContext *pc, size_t *token_index, b
}
/*
-PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ("error" "." Symbol) | ContainerDecl | ("continue" option(":" Symbol))
+PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ContainerDecl | ("continue" option(":" Symbol)) | ErrorSetDecl
KeywordLiteral = "true" | "false" | "null" | "undefined" | "error" | "this" | "unreachable"
+ErrorSetDecl = "error" "{" list(Symbol, ",") "}"
*/
static AstNode *ast_parse_primary_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
@@ -716,9 +718,31 @@ static AstNode *ast_parse_primary_expr(ParseContext *pc, size_t *token_index, bo
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordError) {
- AstNode *node = ast_create_node(pc, NodeTypeErrorType, token);
- *token_index += 1;
- return node;
+ Token *next_token = &pc->tokens->at(*token_index + 1);
+ if (next_token->id == TokenIdLBrace) {
+ AstNode *node = ast_create_node(pc, NodeTypeErrorSetDecl, token);
+ *token_index += 2;
+ for (;;) {
+ Token *item_tok = &pc->tokens->at(*token_index);
+ if (item_tok->id == TokenIdRBrace) {
+ *token_index += 1;
+ return node;
+ } else if (item_tok->id == TokenIdSymbol) {
+ AstNode *symbol_node = ast_parse_symbol(pc, token_index);
+ node->data.err_set_decl.decls.append(symbol_node);
+ Token *opt_comma_tok = &pc->tokens->at(*token_index);
+ if (opt_comma_tok->id == TokenIdComma) {
+ *token_index += 1;
+ }
+ } else {
+ ast_invalid_token_error(pc, item_tok);
+ }
+ }
+ } else {
+ AstNode *node = ast_create_node(pc, NodeTypeErrorType, token);
+ *token_index += 1;
+ return node;
+ }
} else if (token->id == TokenIdAtSign) {
*token_index += 1;
Token *name_tok = &pc->tokens->at(*token_index);
@@ -950,7 +974,6 @@ static PrefixOp tok_to_prefix_op(Token *token) {
case TokenIdTilde: return PrefixOpBinNot;
case TokenIdStar: return PrefixOpDereference;
case TokenIdMaybe: return PrefixOpMaybe;
- case TokenIdPercent: return PrefixOpError;
case TokenIdDoubleQuestion: return PrefixOpUnwrapMaybe;
case TokenIdStarStar: return PrefixOpDereference;
default: return PrefixOpInvalid;
@@ -998,7 +1021,7 @@ static AstNode *ast_parse_addr_of(ParseContext *pc, size_t *token_index) {
/*
PrefixOpExpression : PrefixOp PrefixOpExpression | SuffixOpExpression
-PrefixOp = "!" | "-" | "~" | "*" | ("&" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "%" | "%%" | "??" | "-%" | "try"
+PrefixOp = "!" | "-" | "~" | "*" | ("&" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try"
*/
static AstNode *ast_parse_prefix_op_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
@@ -1043,12 +1066,13 @@ static BinOpType tok_to_mult_op(Token *token) {
case TokenIdStarStar: return BinOpTypeArrayMult;
case TokenIdSlash: return BinOpTypeDiv;
case TokenIdPercent: return BinOpTypeMod;
+ case TokenIdBang: return BinOpTypeErrorUnion;
default: return BinOpTypeInvalid;
}
}
/*
-MultiplyOperator = "*" | "/" | "%" | "**" | "*%"
+MultiplyOperator = "!" | "*" | "/" | "%" | "**" | "*%"
*/
static BinOpType ast_parse_mult_op(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
@@ -2240,7 +2264,7 @@ static AstNode *ast_parse_block(ParseContext *pc, size_t *token_index, bool mand
}
/*
-FnProto = option("nakedcc" | "stdcallcc" | "extern") "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") TypeExpr
+FnProto = option("nakedcc" | "stdcallcc" | "extern") "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") option("!") TypeExpr
*/
static AstNode *ast_parse_fn_proto(ParseContext *pc, size_t *token_index, bool mandatory, VisibMod visib_mod) {
Token *first_token = &pc->tokens->at(*token_index);
@@ -2315,6 +2339,21 @@ static AstNode *ast_parse_fn_proto(ParseContext *pc, size_t *token_index, bool m
ast_eat_token(pc, token_index, TokenIdRParen);
next_token = &pc->tokens->at(*token_index);
}
+ if (next_token->id == TokenIdKeywordError) {
+ Token *maybe_lbrace_tok = &pc->tokens->at(*token_index + 1);
+ if (maybe_lbrace_tok->id == TokenIdLBrace) {
+ *token_index += 1;
+ node->data.fn_proto.return_type = ast_create_node(pc, NodeTypeErrorType, next_token);
+ return node;
+ }
+
+ return node;
+ }
+ if (next_token->id == TokenIdBang) {
+ *token_index += 1;
+ node->data.fn_proto.auto_err_set = true;
+ next_token = &pc->tokens->at(*token_index);
+ }
node->data.fn_proto.return_type = ast_parse_type_expr(pc, token_index, true);
return node;
@@ -2559,26 +2598,6 @@ static AstNode *ast_parse_container_decl(ParseContext *pc, size_t *token_index,
return node;
}
-/*
-ErrorValueDecl : "error" "Symbol" ";"
-*/
-static AstNode *ast_parse_error_value_decl(ParseContext *pc, size_t *token_index) {
- Token *first_token = &pc->tokens->at(*token_index);
-
- if (first_token->id != TokenIdKeywordError) {
- return nullptr;
- }
- *token_index += 1;
-
- Token *name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
- ast_eat_token(pc, token_index, TokenIdSemicolon);
-
- AstNode *node = ast_create_node(pc, NodeTypeErrorValueDecl, first_token);
- node->data.error_value_decl.name = token_buf(name_tok);
-
- return node;
-}
-
/*
TestDecl = "test" String Block
*/
@@ -2611,12 +2630,6 @@ static void ast_parse_top_level_decls(ParseContext *pc, size_t *token_index, Zig
continue;
}
- AstNode *error_value_node = ast_parse_error_value_decl(pc, token_index);
- if (error_value_node) {
- top_level_decls->append(error_value_node);
- continue;
- }
-
AstNode *test_decl_node = ast_parse_test_decl_node(pc, token_index);
if (test_decl_node) {
top_level_decls->append(test_decl_node);
@@ -2744,9 +2757,6 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
visit_field(&node->data.variable_declaration.align_expr, visit, context);
visit_field(&node->data.variable_declaration.section_expr, visit, context);
break;
- case NodeTypeErrorValueDecl:
- // none
- break;
case NodeTypeTestDecl:
visit_field(&node->data.test_decl.body, visit, context);
break;
@@ -2899,5 +2909,8 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
visit_field(&node->data.addr_of_expr.align_expr, visit, context);
visit_field(&node->data.addr_of_expr.op_expr, visit, context);
break;
+ case NodeTypeErrorSetDecl:
+ visit_node_list(&node->data.err_set_decl.decls, visit, context);
+ break;
}
}
diff --git a/src/zig_llvm.cpp b/src/zig_llvm.cpp
index 81e22187ed..3e92752d9f 100644
--- a/src/zig_llvm.cpp
+++ b/src/zig_llvm.cpp
@@ -437,6 +437,10 @@ unsigned ZigLLVMTag_DW_structure_type(void) {
return dwarf::DW_TAG_structure_type;
}
+unsigned ZigLLVMTag_DW_enumeration_type(void) {
+ return dwarf::DW_TAG_enumeration_type;
+}
+
unsigned ZigLLVMTag_DW_union_type(void) {
return dwarf::DW_TAG_union_type;
}
diff --git a/src/zig_llvm.h b/src/zig_llvm.h
index 9a67bf7135..4ae25ef6fd 100644
--- a/src/zig_llvm.h
+++ b/src/zig_llvm.h
@@ -133,6 +133,7 @@ ZIG_EXTERN_C unsigned ZigLLVMEncoding_DW_ATE_signed_char(void);
ZIG_EXTERN_C unsigned ZigLLVMLang_DW_LANG_C99(void);
ZIG_EXTERN_C unsigned ZigLLVMTag_DW_variable(void);
ZIG_EXTERN_C unsigned ZigLLVMTag_DW_structure_type(void);
+ZIG_EXTERN_C unsigned ZigLLVMTag_DW_enumeration_type(void);
ZIG_EXTERN_C unsigned ZigLLVMTag_DW_union_type(void);
ZIG_EXTERN_C struct ZigLLVMDIBuilder *ZigLLVMCreateDIBuilder(LLVMModuleRef module, bool allow_unresolved);
diff --git a/std/debug/index.zig b/std/debug/index.zig
index ccf5f6d413..6bb578e5bf 100644
--- a/std/debug/index.zig
+++ b/std/debug/index.zig
@@ -10,15 +10,6 @@ const builtin = @import("builtin");
pub const FailingAllocator = @import("failing_allocator.zig").FailingAllocator;
-error MissingDebugInfo;
-error InvalidDebugInfo;
-error UnsupportedDebugInfo;
-error UnknownObjectFormat;
-error TodoSupportCoffDebugInfo;
-error TodoSupportMachoDebugInfo;
-error TodoSupportCOFFDebugInfo;
-
-
/// Tries to write to stderr, unbuffered, and ignores any error returned.
/// Does not append a newline.
/// TODO atomic/multithread support
@@ -29,7 +20,7 @@ pub fn warn(comptime fmt: []const u8, args: ...) void {
const stderr = getStderrStream() catch return;
stderr.print(fmt, args) catch return;
}
-fn getStderrStream() %&io.OutStream {
+fn getStderrStream() !&io.OutStream {
if (stderr_stream) |st| {
return st;
} else {
@@ -42,7 +33,7 @@ fn getStderrStream() %&io.OutStream {
}
var self_debug_info: ?&ElfStackTrace = null;
-pub fn getSelfDebugInfo() %&ElfStackTrace {
+pub fn getSelfDebugInfo() !&ElfStackTrace {
if (self_debug_info) |info| {
return info;
} else {
@@ -149,11 +140,8 @@ const WHITE = "\x1b[37;1m";
const DIM = "\x1b[2m";
const RESET = "\x1b[0m";
-error PathNotFound;
-error InvalidDebugInfo;
-
pub fn writeStackTrace(stack_trace: &const builtin.StackTrace, out_stream: &io.OutStream, allocator: &mem.Allocator,
- debug_info: &ElfStackTrace, tty_color: bool) %void
+ debug_info: &ElfStackTrace, tty_color: bool) !void
{
var frame_index: usize = undefined;
var frames_left: usize = undefined;
@@ -175,7 +163,7 @@ pub fn writeStackTrace(stack_trace: &const builtin.StackTrace, out_stream: &io.O
}
pub fn writeCurrentStackTrace(out_stream: &io.OutStream, allocator: &mem.Allocator,
- debug_info: &ElfStackTrace, tty_color: bool, ignore_frame_count: usize) %void
+ debug_info: &ElfStackTrace, tty_color: bool, ignore_frame_count: usize) !void
{
var ignored_count: usize = 0;
@@ -191,7 +179,7 @@ pub fn writeCurrentStackTrace(out_stream: &io.OutStream, allocator: &mem.Allocat
}
}
-fn printSourceAtAddress(debug_info: &ElfStackTrace, out_stream: &io.OutStream, address: usize) %void {
+fn printSourceAtAddress(debug_info: &ElfStackTrace, out_stream: &io.OutStream, address: usize) !void {
if (builtin.os == builtin.Os.windows) {
return error.UnsupportedDebugInfo;
}
@@ -232,7 +220,7 @@ fn printSourceAtAddress(debug_info: &ElfStackTrace, out_stream: &io.OutStream, a
}
}
-pub fn openSelfDebugInfo(allocator: &mem.Allocator) %&ElfStackTrace {
+pub fn openSelfDebugInfo(allocator: &mem.Allocator) !&ElfStackTrace {
switch (builtin.object_format) {
builtin.ObjectFormat.elf => {
const st = try allocator.create(ElfStackTrace);
@@ -276,7 +264,7 @@ pub fn openSelfDebugInfo(allocator: &mem.Allocator) %&ElfStackTrace {
}
}
-fn printLineFromFile(allocator: &mem.Allocator, out_stream: &io.OutStream, line_info: &const LineInfo) %void {
+fn printLineFromFile(allocator: &mem.Allocator, out_stream: &io.OutStream, line_info: &const LineInfo) !void {
var f = try io.File.openRead(line_info.file_name, allocator);
defer f.close();
// TODO fstat and make sure that the file has the correct size
@@ -324,7 +312,7 @@ pub const ElfStackTrace = struct {
return self.abbrev_table_list.allocator;
}
- pub fn readString(self: &ElfStackTrace) %[]u8 {
+ pub fn readString(self: &ElfStackTrace) ![]u8 {
var in_file_stream = io.FileInStream.init(&self.self_exe_file);
const in_stream = &in_file_stream.stream;
return readStringRaw(self.allocator(), in_stream);
@@ -387,7 +375,7 @@ const Constant = struct {
payload: []u8,
signed: bool,
- fn asUnsignedLe(self: &const Constant) %u64 {
+ fn asUnsignedLe(self: &const Constant) !u64 {
if (self.payload.len > @sizeOf(u64))
return error.InvalidDebugInfo;
if (self.signed)
@@ -414,7 +402,7 @@ const Die = struct {
return null;
}
- fn getAttrAddr(self: &const Die, id: u64) %u64 {
+ fn getAttrAddr(self: &const Die, id: u64) !u64 {
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
return switch (*form_value) {
FormValue.Address => |value| value,
@@ -422,7 +410,7 @@ const Die = struct {
};
}
- fn getAttrSecOffset(self: &const Die, id: u64) %u64 {
+ fn getAttrSecOffset(self: &const Die, id: u64) !u64 {
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
return switch (*form_value) {
FormValue.Const => |value| value.asUnsignedLe(),
@@ -431,7 +419,7 @@ const Die = struct {
};
}
- fn getAttrUnsignedLe(self: &const Die, id: u64) %u64 {
+ fn getAttrUnsignedLe(self: &const Die, id: u64) !u64 {
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
return switch (*form_value) {
FormValue.Const => |value| value.asUnsignedLe(),
@@ -439,7 +427,7 @@ const Die = struct {
};
}
- fn getAttrString(self: &const Die, st: &ElfStackTrace, id: u64) %[]u8 {
+ fn getAttrString(self: &const Die, st: &ElfStackTrace, id: u64) ![]u8 {
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
return switch (*form_value) {
FormValue.String => |value| value,
@@ -512,7 +500,7 @@ const LineNumberProgram = struct {
};
}
- pub fn checkLineMatch(self: &LineNumberProgram) %?LineInfo {
+ pub fn checkLineMatch(self: &LineNumberProgram) !?LineInfo {
if (self.target_address >= self.prev_address and self.target_address < self.address) {
const file_entry = if (self.prev_file == 0) {
return error.MissingDebugInfo;
@@ -544,7 +532,7 @@ const LineNumberProgram = struct {
}
};
-fn readStringRaw(allocator: &mem.Allocator, in_stream: &io.InStream) %[]u8 {
+fn readStringRaw(allocator: &mem.Allocator, in_stream: &io.InStream) ![]u8 {
var buf = ArrayList(u8).init(allocator);
while (true) {
const byte = try in_stream.readByte();
@@ -555,58 +543,58 @@ fn readStringRaw(allocator: &mem.Allocator, in_stream: &io.InStream) %[]u8 {
return buf.toSlice();
}
-fn getString(st: &ElfStackTrace, offset: u64) %[]u8 {
+fn getString(st: &ElfStackTrace, offset: u64) ![]u8 {
const pos = st.debug_str.offset + offset;
try st.self_exe_file.seekTo(pos);
return st.readString();
}
-fn readAllocBytes(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) %[]u8 {
+fn readAllocBytes(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) ![]u8 {
const buf = try global_allocator.alloc(u8, size);
errdefer global_allocator.free(buf);
if ((try in_stream.read(buf)) < size) return error.EndOfFile;
return buf;
}
-fn parseFormValueBlockLen(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) %FormValue {
+fn parseFormValueBlockLen(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue { .Block = buf };
}
-fn parseFormValueBlock(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) %FormValue {
+fn parseFormValueBlock(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) !FormValue {
const block_len = try in_stream.readVarInt(builtin.Endian.Little, usize, size);
return parseFormValueBlockLen(allocator, in_stream, block_len);
}
-fn parseFormValueConstant(allocator: &mem.Allocator, in_stream: &io.InStream, signed: bool, size: usize) %FormValue {
+fn parseFormValueConstant(allocator: &mem.Allocator, in_stream: &io.InStream, signed: bool, size: usize) !FormValue {
return FormValue { .Const = Constant {
.signed = signed,
.payload = try readAllocBytes(allocator, in_stream, size),
}};
}
-fn parseFormValueDwarfOffsetSize(in_stream: &io.InStream, is_64: bool) %u64 {
+fn parseFormValueDwarfOffsetSize(in_stream: &io.InStream, is_64: bool) !u64 {
return if (is_64) try in_stream.readIntLe(u64)
else u64(try in_stream.readIntLe(u32)) ;
}
-fn parseFormValueTargetAddrSize(in_stream: &io.InStream) %u64 {
+fn parseFormValueTargetAddrSize(in_stream: &io.InStream) !u64 {
return if (@sizeOf(usize) == 4) u64(try in_stream.readIntLe(u32))
else if (@sizeOf(usize) == 8) try in_stream.readIntLe(u64)
else unreachable;
}
-fn parseFormValueRefLen(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) %FormValue {
+fn parseFormValueRefLen(allocator: &mem.Allocator, in_stream: &io.InStream, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue { .Ref = buf };
}
-fn parseFormValueRef(allocator: &mem.Allocator, in_stream: &io.InStream, comptime T: type) %FormValue {
+fn parseFormValueRef(allocator: &mem.Allocator, in_stream: &io.InStream, comptime T: type) !FormValue {
const block_len = try in_stream.readIntLe(T);
return parseFormValueRefLen(allocator, in_stream, block_len);
}
-fn parseFormValue(allocator: &mem.Allocator, in_stream: &io.InStream, form_id: u64, is_64: bool) %FormValue {
+fn parseFormValue(allocator: &mem.Allocator, in_stream: &io.InStream, form_id: u64, is_64: bool) !FormValue {
return switch (form_id) {
DW.FORM_addr => FormValue { .Address = try parseFormValueTargetAddrSize(in_stream) },
DW.FORM_block1 => parseFormValueBlock(allocator, in_stream, 1),
@@ -656,7 +644,7 @@ fn parseFormValue(allocator: &mem.Allocator, in_stream: &io.InStream, form_id: u
};
}
-fn parseAbbrevTable(st: &ElfStackTrace) %AbbrevTable {
+fn parseAbbrevTable(st: &ElfStackTrace) !AbbrevTable {
const in_file = &st.self_exe_file;
var in_file_stream = io.FileInStream.init(in_file);
const in_stream = &in_file_stream.stream;
@@ -688,7 +676,7 @@ fn parseAbbrevTable(st: &ElfStackTrace) %AbbrevTable {
/// Gets an already existing AbbrevTable given the abbrev_offset, or if not found,
/// seeks in the stream and parses it.
-fn getAbbrevTable(st: &ElfStackTrace, abbrev_offset: u64) %&const AbbrevTable {
+fn getAbbrevTable(st: &ElfStackTrace, abbrev_offset: u64) !&const AbbrevTable {
for (st.abbrev_table_list.toSlice()) |*header| {
if (header.offset == abbrev_offset) {
return &header.table;
@@ -710,7 +698,7 @@ fn getAbbrevTableEntry(abbrev_table: &const AbbrevTable, abbrev_code: u64) ?&con
return null;
}
-fn parseDie(st: &ElfStackTrace, abbrev_table: &const AbbrevTable, is_64: bool) %Die {
+fn parseDie(st: &ElfStackTrace, abbrev_table: &const AbbrevTable, is_64: bool) !Die {
const in_file = &st.self_exe_file;
var in_file_stream = io.FileInStream.init(in_file);
const in_stream = &in_file_stream.stream;
@@ -732,7 +720,7 @@ fn parseDie(st: &ElfStackTrace, abbrev_table: &const AbbrevTable, is_64: bool) %
return result;
}
-fn getLineNumberInfo(st: &ElfStackTrace, compile_unit: &const CompileUnit, target_address: usize) %LineInfo {
+fn getLineNumberInfo(st: &ElfStackTrace, compile_unit: &const CompileUnit, target_address: usize) !LineInfo {
const compile_unit_cwd = try compile_unit.die.getAttrString(st, DW.AT_comp_dir);
const in_file = &st.self_exe_file;
@@ -910,7 +898,7 @@ fn getLineNumberInfo(st: &ElfStackTrace, compile_unit: &const CompileUnit, targe
return error.MissingDebugInfo;
}
-fn scanAllCompileUnits(st: &ElfStackTrace) %void {
+fn scanAllCompileUnits(st: &ElfStackTrace) !void {
const debug_info_end = st.debug_info.offset + st.debug_info.size;
var this_unit_offset = st.debug_info.offset;
var cu_index: usize = 0;
@@ -986,7 +974,7 @@ fn scanAllCompileUnits(st: &ElfStackTrace) %void {
}
}
-fn findCompileUnit(st: &ElfStackTrace, target_address: u64) %&const CompileUnit {
+fn findCompileUnit(st: &ElfStackTrace, target_address: u64) !&const CompileUnit {
var in_file_stream = io.FileInStream.init(&st.self_exe_file);
const in_stream = &in_file_stream.stream;
for (st.compile_unit_list.toSlice()) |*compile_unit| {
@@ -1022,7 +1010,7 @@ fn findCompileUnit(st: &ElfStackTrace, target_address: u64) %&const CompileUnit
return error.MissingDebugInfo;
}
-fn readInitialLength(in_stream: &io.InStream, is_64: &bool) %u64 {
+fn readInitialLength(in_stream: &io.InStream, is_64: &bool) !u64 {
const first_32_bits = try in_stream.readIntLe(u32);
*is_64 = (first_32_bits == 0xffffffff);
if (*is_64) {
@@ -1033,7 +1021,7 @@ fn readInitialLength(in_stream: &io.InStream, is_64: &bool) %u64 {
}
}
-fn readULeb128(in_stream: &io.InStream) %u64 {
+fn readULeb128(in_stream: &io.InStream) !u64 {
var result: u64 = 0;
var shift: usize = 0;
@@ -1054,7 +1042,7 @@ fn readULeb128(in_stream: &io.InStream) %u64 {
}
}
-fn readILeb128(in_stream: &io.InStream) %i64 {
+fn readILeb128(in_stream: &io.InStream) !i64 {
var result: i64 = 0;
var shift: usize = 0;
diff --git a/std/os/index.zig b/std/os/index.zig
index 4451faf103..6592facf51 100644
--- a/std/os/index.zig
+++ b/std/os/index.zig
@@ -57,25 +57,10 @@ const ArrayList = std.ArrayList;
const Buffer = std.Buffer;
const math = std.math;
-error SystemResources;
-error AccessDenied;
-error InvalidExe;
-error FileSystem;
-error IsDir;
-error FileNotFound;
-error FileBusy;
-error PathAlreadyExists;
-error SymLinkLoop;
-error ReadOnlyFileSystem;
-error LinkQuotaExceeded;
-error RenameAcrossMountPoints;
-error DirNotEmpty;
-error WouldBlock;
-
/// Fills `buf` with random bytes. If linking against libc, this calls the
/// appropriate OS-specific library call. Otherwise it uses the zig standard
/// library implementation.
-pub fn getRandomBytes(buf: []u8) %void {
+pub fn getRandomBytes(buf: []u8) !void {
switch (builtin.os) {
Os.linux => while (true) {
// TODO check libc version and potentially call c.getrandom.
@@ -182,7 +167,7 @@ pub fn close(handle: FileHandle) void {
}
/// Calls POSIX read, and keeps trying if it gets interrupted.
-pub fn posixRead(fd: i32, buf: []u8) %void {
+pub fn posixRead(fd: i32, buf: []u8) !void {
var index: usize = 0;
while (index < buf.len) {
const amt_written = posix.read(fd, &buf[index], buf.len - index);
@@ -203,17 +188,8 @@ pub fn posixRead(fd: i32, buf: []u8) %void {
}
}
-error WouldBlock;
-error FileClosed;
-error DestinationAddressRequired;
-error DiskQuota;
-error FileTooBig;
-error InputOutput;
-error NoSpaceLeft;
-error BrokenPipe;
-
/// Calls POSIX write, and keeps trying if it gets interrupted.
-pub fn posixWrite(fd: i32, bytes: []const u8) %void {
+pub fn posixWrite(fd: i32, bytes: []const u8) !void {
while (true) {
const write_ret = posix.write(fd, bytes.ptr, bytes.len);
const write_err = posix.getErrno(write_ret);
@@ -243,7 +219,7 @@ pub fn posixWrite(fd: i32, bytes: []const u8) %void {
/// otherwise if the fixed size buffer is too small, allocator is used to obtain the needed memory.
/// Calls POSIX open, keeps trying if it gets interrupted, and translates
/// the return value into zig errors.
-pub fn posixOpen(file_path: []const u8, flags: u32, perm: usize, allocator: ?&Allocator) %i32 {
+pub fn posixOpen(file_path: []const u8, flags: u32, perm: usize, allocator: ?&Allocator) !i32 {
var stack_buf: [max_noalloc_path_len]u8 = undefined;
var path0: []u8 = undefined;
var need_free = false;
@@ -292,7 +268,7 @@ pub fn posixOpen(file_path: []const u8, flags: u32, perm: usize, allocator: ?&Al
}
}
-pub fn posixDup2(old_fd: i32, new_fd: i32) %void {
+pub fn posixDup2(old_fd: i32, new_fd: i32) !void {
while (true) {
const err = posix.getErrno(posix.dup2(old_fd, new_fd));
if (err > 0) {
@@ -307,7 +283,7 @@ pub fn posixDup2(old_fd: i32, new_fd: i32) %void {
}
}
-pub fn createNullDelimitedEnvMap(allocator: &Allocator, env_map: &const BufMap) %[]?&u8 {
+pub fn createNullDelimitedEnvMap(allocator: &Allocator, env_map: &const BufMap) ![]?&u8 {
const envp_count = env_map.count();
const envp_buf = try allocator.alloc(?&u8, envp_count + 1);
mem.set(?&u8, envp_buf, null);
@@ -344,7 +320,7 @@ pub fn freeNullDelimitedEnvMap(allocator: &Allocator, envp_buf: []?&u8) void {
/// `argv[0]` is the executable path.
/// This function also uses the PATH environment variable to get the full path to the executable.
pub fn posixExecve(argv: []const []const u8, env_map: &const BufMap,
- allocator: &Allocator) %void
+ allocator: &Allocator) !void
{
const argv_buf = try allocator.alloc(?&u8, argv.len + 1);
mem.set(?&u8, argv_buf, null);
@@ -419,7 +395,7 @@ fn posixExecveErrnoToErr(err: usize) error {
pub var posix_environ_raw: []&u8 = undefined;
/// Caller must free result when done.
-pub fn getEnvMap(allocator: &Allocator) %BufMap {
+pub fn getEnvMap(allocator: &Allocator) !BufMap {
var result = BufMap.init(allocator);
errdefer result.deinit();
@@ -480,10 +456,8 @@ pub fn getEnvPosix(key: []const u8) ?[]const u8 {
return null;
}
-error EnvironmentVariableNotFound;
-
/// Caller must free returned memory.
-pub fn getEnvVarOwned(allocator: &mem.Allocator, key: []const u8) %[]u8 {
+pub fn getEnvVarOwned(allocator: &mem.Allocator, key: []const u8) ![]u8 {
if (is_windows) {
const key_with_null = try cstr.addNullByte(allocator, key);
defer allocator.free(key_with_null);
@@ -517,7 +491,7 @@ pub fn getEnvVarOwned(allocator: &mem.Allocator, key: []const u8) %[]u8 {
}
/// Caller must free the returned memory.
-pub fn getCwd(allocator: &Allocator) %[]u8 {
+pub fn getCwd(allocator: &Allocator) ![]u8 {
switch (builtin.os) {
Os.windows => {
var buf = try allocator.alloc(u8, 256);
@@ -564,7 +538,7 @@ test "os.getCwd" {
_ = getCwd(debug.global_allocator);
}
-pub fn symLink(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) %void {
+pub fn symLink(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) !void {
if (is_windows) {
return symLinkWindows(allocator, existing_path, new_path);
} else {
@@ -572,7 +546,7 @@ pub fn symLink(allocator: &Allocator, existing_path: []const u8, new_path: []con
}
}
-pub fn symLinkWindows(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) %void {
+pub fn symLinkWindows(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) !void {
const existing_with_null = try cstr.addNullByte(allocator, existing_path);
defer allocator.free(existing_with_null);
const new_with_null = try cstr.addNullByte(allocator, new_path);
@@ -586,7 +560,7 @@ pub fn symLinkWindows(allocator: &Allocator, existing_path: []const u8, new_path
}
}
-pub fn symLinkPosix(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) %void {
+pub fn symLinkPosix(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) !void {
const full_buf = try allocator.alloc(u8, existing_path.len + new_path.len + 2);
defer allocator.free(full_buf);
@@ -623,7 +597,7 @@ const b64_fs_encoder = base64.Base64Encoder.init(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_",
base64.standard_pad_char);
-pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) %void {
+pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path: []const u8) !void {
if (symLink(allocator, existing_path, new_path)) {
return;
} else |err| {
@@ -652,7 +626,7 @@ pub fn atomicSymLink(allocator: &Allocator, existing_path: []const u8, new_path:
}
-pub fn deleteFile(allocator: &Allocator, file_path: []const u8) %void {
+pub fn deleteFile(allocator: &Allocator, file_path: []const u8) !void {
if (builtin.os == Os.windows) {
return deleteFileWindows(allocator, file_path);
} else {
@@ -660,10 +634,7 @@ pub fn deleteFile(allocator: &Allocator, file_path: []const u8) %void {
}
}
-error FileNotFound;
-error AccessDenied;
-
-pub fn deleteFileWindows(allocator: &Allocator, file_path: []const u8) %void {
+pub fn deleteFileWindows(allocator: &Allocator, file_path: []const u8) !void {
const buf = try allocator.alloc(u8, file_path.len + 1);
defer allocator.free(buf);
@@ -681,7 +652,7 @@ pub fn deleteFileWindows(allocator: &Allocator, file_path: []const u8) %void {
}
}
-pub fn deleteFilePosix(allocator: &Allocator, file_path: []const u8) %void {
+pub fn deleteFilePosix(allocator: &Allocator, file_path: []const u8) !void {
const buf = try allocator.alloc(u8, file_path.len + 1);
defer allocator.free(buf);
@@ -708,13 +679,13 @@ pub fn deleteFilePosix(allocator: &Allocator, file_path: []const u8) %void {
}
/// Calls ::copyFileMode with 0o666 for the mode.
-pub fn copyFile(allocator: &Allocator, source_path: []const u8, dest_path: []const u8) %void {
+pub fn copyFile(allocator: &Allocator, source_path: []const u8, dest_path: []const u8) !void {
return copyFileMode(allocator, source_path, dest_path, 0o666);
}
// TODO instead of accepting a mode argument, use the mode from fstat'ing the source path once open
/// Guaranteed to be atomic.
-pub fn copyFileMode(allocator: &Allocator, source_path: []const u8, dest_path: []const u8, mode: usize) %void {
+pub fn copyFileMode(allocator: &Allocator, source_path: []const u8, dest_path: []const u8, mode: usize) !void {
var rand_buf: [12]u8 = undefined;
const tmp_path = try allocator.alloc(u8, dest_path.len + base64.Base64Encoder.calcSize(rand_buf.len));
defer allocator.free(tmp_path);
@@ -738,7 +709,7 @@ pub fn copyFileMode(allocator: &Allocator, source_path: []const u8, dest_path: [
}
}
-pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8) %void {
+pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8) !void {
const full_buf = try allocator.alloc(u8, old_path.len + new_path.len + 2);
defer allocator.free(full_buf);
@@ -783,7 +754,7 @@ pub fn rename(allocator: &Allocator, old_path: []const u8, new_path: []const u8)
}
}
-pub fn makeDir(allocator: &Allocator, dir_path: []const u8) %void {
+pub fn makeDir(allocator: &Allocator, dir_path: []const u8) !void {
if (is_windows) {
return makeDirWindows(allocator, dir_path);
} else {
@@ -791,7 +762,7 @@ pub fn makeDir(allocator: &Allocator, dir_path: []const u8) %void {
}
}
-pub fn makeDirWindows(allocator: &Allocator, dir_path: []const u8) %void {
+pub fn makeDirWindows(allocator: &Allocator, dir_path: []const u8) !void {
const path_buf = try cstr.addNullByte(allocator, dir_path);
defer allocator.free(path_buf);
@@ -805,7 +776,7 @@ pub fn makeDirWindows(allocator: &Allocator, dir_path: []const u8) %void {
}
}
-pub fn makeDirPosix(allocator: &Allocator, dir_path: []const u8) %void {
+pub fn makeDirPosix(allocator: &Allocator, dir_path: []const u8) !void {
const path_buf = try cstr.addNullByte(allocator, dir_path);
defer allocator.free(path_buf);
@@ -831,7 +802,7 @@ pub fn makeDirPosix(allocator: &Allocator, dir_path: []const u8) %void {
/// Calls makeDir recursively to make an entire path. Returns success if the path
/// already exists and is a directory.
-pub fn makePath(allocator: &Allocator, full_path: []const u8) %void {
+pub fn makePath(allocator: &Allocator, full_path: []const u8) !void {
const resolved_path = try path.resolve(allocator, full_path);
defer allocator.free(resolved_path);
@@ -869,7 +840,7 @@ pub fn makePath(allocator: &Allocator, full_path: []const u8) %void {
/// Returns ::error.DirNotEmpty if the directory is not empty.
/// To delete a directory recursively, see ::deleteTree
-pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) %void {
+pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) !void {
const path_buf = try allocator.alloc(u8, dir_path.len + 1);
defer allocator.free(path_buf);
@@ -898,7 +869,7 @@ pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) %void {
/// removes it. If it cannot be removed because it is a non-empty directory,
/// this function recursively removes its entries and then tries again.
// TODO non-recursive implementation
-pub fn deleteTree(allocator: &Allocator, full_path: []const u8) %void {
+pub fn deleteTree(allocator: &Allocator, full_path: []const u8) !void {
start_over: while (true) {
// First, try deleting the item as a file. This way we don't follow sym links.
if (deleteFile(allocator, full_path)) {
@@ -967,7 +938,7 @@ pub const Dir = struct {
};
};
- pub fn open(allocator: &Allocator, dir_path: []const u8) %Dir {
+ pub fn open(allocator: &Allocator, dir_path: []const u8) !Dir {
const fd = try posixOpen(dir_path, posix.O_RDONLY|posix.O_DIRECTORY|posix.O_CLOEXEC, 0, allocator);
return Dir {
.allocator = allocator,
@@ -985,7 +956,7 @@ pub const Dir = struct {
/// Memory such as file names referenced in this returned entry becomes invalid
/// with subsequent calls to next, as well as when this ::Dir is deinitialized.
- pub fn next(self: &Dir) %?Entry {
+ pub fn next(self: &Dir) !?Entry {
start_over: while (true) {
if (self.index >= self.end_index) {
if (self.buf.len == 0) {
@@ -1042,7 +1013,7 @@ pub const Dir = struct {
}
};
-pub fn changeCurDir(allocator: &Allocator, dir_path: []const u8) %void {
+pub fn changeCurDir(allocator: &Allocator, dir_path: []const u8) !void {
const path_buf = try allocator.alloc(u8, dir_path.len + 1);
defer allocator.free(path_buf);
@@ -1066,7 +1037,7 @@ pub fn changeCurDir(allocator: &Allocator, dir_path: []const u8) %void {
}
/// Read value of a symbolic link.
-pub fn readLink(allocator: &Allocator, pathname: []const u8) %[]u8 {
+pub fn readLink(allocator: &Allocator, pathname: []const u8) ![]u8 {
const path_buf = try allocator.alloc(u8, pathname.len + 1);
defer allocator.free(path_buf);
@@ -1143,11 +1114,7 @@ test "os.sleep" {
sleep(0, 1);
}
-error ResourceLimitReached;
-error InvalidUserId;
-error PermissionDenied;
-
-pub fn posix_setuid(uid: u32) %void {
+pub fn posix_setuid(uid: u32) !void {
const err = posix.getErrno(posix.setuid(uid));
if (err == 0) return;
return switch (err) {
@@ -1158,7 +1125,7 @@ pub fn posix_setuid(uid: u32) %void {
};
}
-pub fn posix_setreuid(ruid: u32, euid: u32) %void {
+pub fn posix_setreuid(ruid: u32, euid: u32) !void {
const err = posix.getErrno(posix.setreuid(ruid, euid));
if (err == 0) return;
return switch (err) {
@@ -1169,7 +1136,7 @@ pub fn posix_setreuid(ruid: u32, euid: u32) %void {
};
}
-pub fn posix_setgid(gid: u32) %void {
+pub fn posix_setgid(gid: u32) !void {
const err = posix.getErrno(posix.setgid(gid));
if (err == 0) return;
return switch (err) {
@@ -1180,7 +1147,7 @@ pub fn posix_setgid(gid: u32) %void {
};
}
-pub fn posix_setregid(rgid: u32, egid: u32) %void {
+pub fn posix_setregid(rgid: u32, egid: u32) !void {
const err = posix.getErrno(posix.setregid(rgid, egid));
if (err == 0) return;
return switch (err) {
@@ -1191,8 +1158,7 @@ pub fn posix_setregid(rgid: u32, egid: u32) %void {
};
}
-error NoStdHandles;
-pub fn windowsGetStdHandle(handle_id: windows.DWORD) %windows.HANDLE {
+pub fn windowsGetStdHandle(handle_id: windows.DWORD) !windows.HANDLE {
if (windows.GetStdHandle(handle_id)) |handle| {
if (handle == windows.INVALID_HANDLE_VALUE) {
const err = windows.GetLastError();
@@ -1261,7 +1227,7 @@ pub const ArgIteratorWindows = struct {
}
/// You must free the returned memory when done.
- pub fn next(self: &ArgIteratorWindows, allocator: &Allocator) ?%[]u8 {
+ pub fn next(self: &ArgIteratorWindows, allocator: &Allocator) ?internalNext.errors![]u8 {
// march forward over whitespace
while (true) : (self.index += 1) {
const byte = self.cmd_line[self.index];
@@ -1314,7 +1280,7 @@ pub const ArgIteratorWindows = struct {
}
}
- fn internalNext(self: &ArgIteratorWindows, allocator: &Allocator) %[]u8 {
+ fn internalNext(self: &ArgIteratorWindows, allocator: &Allocator) ![]u8 {
var buf = try Buffer.initSize(allocator, 0);
defer buf.deinit();
@@ -1358,7 +1324,7 @@ pub const ArgIteratorWindows = struct {
}
}
- fn emitBackslashes(self: &ArgIteratorWindows, buf: &Buffer, emit_count: usize) %void {
+ fn emitBackslashes(self: &ArgIteratorWindows, buf: &Buffer, emit_count: usize) !void {
var i: usize = 0;
while (i < emit_count) : (i += 1) {
try buf.appendByte('\\');
@@ -1397,7 +1363,7 @@ pub const ArgIterator = struct {
}
/// You must free the returned memory when done.
- pub fn next(self: &ArgIterator, allocator: &Allocator) ?%[]u8 {
+ pub fn next(self: &ArgIterator, allocator: &Allocator) ?![]u8 {
if (builtin.os == Os.windows) {
return self.inner.next(allocator);
} else {
@@ -1422,7 +1388,7 @@ pub fn args() ArgIterator {
}
/// Caller must call freeArgs on result.
-pub fn argsAlloc(allocator: &mem.Allocator) %[]const []u8 {
+pub fn argsAlloc(allocator: &mem.Allocator) ![]const []u8 {
// TODO refactor to only make 1 allocation.
var it = args();
var contents = try Buffer.initSize(allocator, 0);
@@ -1529,7 +1495,7 @@ pub fn unexpectedErrorWindows(err: windows.DWORD) error {
return error.Unexpected;
}
-pub fn openSelfExe() %io.File {
+pub fn openSelfExe() !io.File {
switch (builtin.os) {
Os.linux => {
return io.File.openRead("/proc/self/exe", null);
@@ -1547,7 +1513,7 @@ pub fn openSelfExe() %io.File {
/// This function may return an error if the current executable
/// was deleted after spawning.
/// Caller owns returned memory.
-pub fn selfExePath(allocator: &mem.Allocator) %[]u8 {
+pub fn selfExePath(allocator: &mem.Allocator) ![]u8 {
switch (builtin.os) {
Os.linux => {
// If the currently executing binary has been deleted,
@@ -1590,7 +1556,7 @@ pub fn selfExePath(allocator: &mem.Allocator) %[]u8 {
/// Get the directory path that contains the current executable.
/// Caller owns returned memory.
-pub fn selfExeDirPath(allocator: &mem.Allocator) %[]u8 {
+pub fn selfExeDirPath(allocator: &mem.Allocator) ![]u8 {
switch (builtin.os) {
Os.linux => {
// If the currently executing binary has been deleted,
diff --git a/std/special/test_runner.zig b/std/special/test_runner.zig
index 3284f740b0..76a54a5018 100644
--- a/std/special/test_runner.zig
+++ b/std/special/test_runner.zig
@@ -4,7 +4,7 @@ const builtin = @import("builtin");
const test_fn_list = builtin.__zig_test_fn_slice;
const warn = std.debug.warn;
-pub fn main() %void {
+pub fn main() !void {
for (test_fn_list) |test_fn, i| {
warn("Test {}/{} {}...", i + 1, test_fn_list.len, test_fn.name);