New Zig formal grammar (#1685)

Reverted #1628 and changed the grammar+parser of the language to not allow certain expr where types are expected
This commit is contained in:
Jimmi Holst Christensen 2018-11-13 05:08:37 -08:00 committed by GitHub
parent 67fbb0434f
commit 8139c5a516
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
216 changed files with 93348 additions and 93327 deletions

View File

@ -17,7 +17,7 @@ pub fn build(b: *Builder) !void {
const rel_zig_exe = try os.path.relative(b.allocator, b.build_root, b.zig_exe);
const langref_out_path = os.path.join(b.allocator, b.cache_root, "langref.html") catch unreachable;
var docgen_cmd = b.addCommand(null, b.env_map, [][]const u8.{
var docgen_cmd = b.addCommand(null, b.env_map, [][]const u8{
docgen_exe.getOutputPath(),
rel_zig_exe,
"doc" ++ os.path.sep_str ++ "langref.html.in",
@ -31,12 +31,12 @@ pub fn build(b: *Builder) !void {
const test_step = b.step("test", "Run all the tests");
// find the stage0 build artifacts because we're going to re-use config.h and zig_cpp library
const build_info = try b.exec([][]const u8.{
const build_info = try b.exec([][]const u8{
b.zig_exe,
"BUILD_INFO",
});
var index: usize = 0;
var ctx = Context.{
var ctx = Context{
.cmake_binary_dir = nextValue(&index, build_info),
.cxx_compiler = nextValue(&index, build_info),
.llvm_config_exe = nextValue(&index, build_info),
@ -162,7 +162,7 @@ fn addCppLib(b: *Builder, lib_exe_obj: var, cmake_binary_dir: []const u8, lib_na
lib_exe_obj.addObjectFile(os.path.join(b.allocator, cmake_binary_dir, "zig_cpp", b.fmt("{}{}{}", lib_prefix, lib_name, lib_exe_obj.target.libFileExt())) catch unreachable);
}
const LibraryDep = struct.{
const LibraryDep = struct {
prefix: []const u8,
libdirs: ArrayList([]const u8),
libs: ArrayList([]const u8),
@ -171,24 +171,24 @@ const LibraryDep = struct.{
};
fn findLLVM(b: *Builder, llvm_config_exe: []const u8) !LibraryDep {
const shared_mode = try b.exec([][]const u8.{ llvm_config_exe, "--shared-mode" });
const shared_mode = try b.exec([][]const u8{ llvm_config_exe, "--shared-mode" });
const is_static = mem.startsWith(u8, shared_mode, "static");
const libs_output = if (is_static)
try b.exec([][]const u8.{
try b.exec([][]const u8{
llvm_config_exe,
"--libfiles",
"--system-libs",
})
else
try b.exec([][]const u8.{
try b.exec([][]const u8{
llvm_config_exe,
"--libs",
});
const includes_output = try b.exec([][]const u8.{ llvm_config_exe, "--includedir" });
const libdir_output = try b.exec([][]const u8.{ llvm_config_exe, "--libdir" });
const prefix_output = try b.exec([][]const u8.{ llvm_config_exe, "--prefix" });
const includes_output = try b.exec([][]const u8{ llvm_config_exe, "--includedir" });
const libdir_output = try b.exec([][]const u8{ llvm_config_exe, "--libdir" });
const prefix_output = try b.exec([][]const u8{ llvm_config_exe, "--prefix" });
var result = LibraryDep.{
var result = LibraryDep{
.prefix = mem.split(prefix_output, " \r\n").next().?,
.libs = ArrayList([]const u8).init(b.allocator),
.system_libs = ArrayList([]const u8).init(b.allocator),
@ -328,7 +328,7 @@ fn addCxxKnownPath(
objname: []const u8,
errtxt: ?[]const u8,
) !void {
const path_padded = try b.exec([][]const u8.{
const path_padded = try b.exec([][]const u8{
ctx.cxx_compiler,
b.fmt("-print-file-name={}", objname),
});
@ -344,7 +344,7 @@ fn addCxxKnownPath(
exe.addObjectFile(path_unpadded);
}
const Context = struct.{
const Context = struct {
cmake_binary_dir: []const u8,
cxx_compiler: []const u8,
llvm_config_exe: []const u8,

View File

@ -58,12 +58,12 @@ pub fn main() !void {
try buffered_out_stream.flush();
}
const Token = struct.{
const Token = struct {
id: Id,
start: usize,
end: usize,
const Id = enum.{
const Id = enum {
Invalid,
Content,
BracketOpen,
@ -74,14 +74,14 @@ const Token = struct.{
};
};
const Tokenizer = struct.{
const Tokenizer = struct {
buffer: []const u8,
index: usize,
state: State,
source_file_name: []const u8,
code_node_count: usize,
const State = enum.{
const State = enum {
Start,
LBracket,
Hash,
@ -90,7 +90,7 @@ const Tokenizer = struct.{
};
fn init(source_file_name: []const u8, buffer: []const u8) Tokenizer {
return Tokenizer.{
return Tokenizer{
.buffer = buffer,
.index = 0,
.state = State.Start,
@ -100,7 +100,7 @@ const Tokenizer = struct.{
}
fn next(self: *Tokenizer) Token {
var result = Token.{
var result = Token{
.id = Token.Id.Eof,
.start = self.index,
.end = undefined,
@ -184,7 +184,7 @@ const Tokenizer = struct.{
return result;
}
const Location = struct.{
const Location = struct {
line: usize,
column: usize,
line_start: usize,
@ -192,7 +192,7 @@ const Tokenizer = struct.{
};
fn getTokenLocation(self: *Tokenizer, token: Token) Location {
var loc = Location.{
var loc = Location{
.line = 0,
.column = 0,
.line_start = 0,
@ -216,7 +216,7 @@ const Tokenizer = struct.{
}
};
fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: ...) error {
fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: ...) anyerror {
const loc = tokenizer.getTokenLocation(token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
if (loc.line_start <= loc.line_end) {
@ -251,23 +251,23 @@ fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token {
return token;
}
const HeaderOpen = struct.{
const HeaderOpen = struct {
name: []const u8,
url: []const u8,
n: usize,
};
const SeeAlsoItem = struct.{
const SeeAlsoItem = struct {
name: []const u8,
token: Token,
};
const ExpectedOutcome = enum.{
const ExpectedOutcome = enum {
Succeed,
Fail,
};
const Code = struct.{
const Code = struct {
id: Id,
name: []const u8,
source_token: Token,
@ -277,7 +277,7 @@ const Code = struct.{
target_windows: bool,
link_libc: bool,
const Id = union(enum).{
const Id = union(enum) {
Test,
TestError: []const u8,
TestSafety: []const u8,
@ -286,13 +286,13 @@ const Code = struct.{
};
};
const Link = struct.{
const Link = struct {
url: []const u8,
name: []const u8,
token: Token,
};
const Node = union(enum).{
const Node = union(enum) {
Content: []const u8,
Nav,
Builtin: Token,
@ -303,13 +303,13 @@ const Node = union(enum).{
Syntax: Token,
};
const Toc = struct.{
const Toc = struct {
nodes: []Node,
toc: []u8,
urls: std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8),
};
const Action = enum.{
const Action = enum {
Open,
Close,
};
@ -343,7 +343,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
break;
},
Token.Id.Content => {
try nodes.append(Node.{ .Content = tokenizer.buffer[token.start..token.end] });
try nodes.append(Node{ .Content = tokenizer.buffer[token.start..token.end] });
},
Token.Id.BracketOpen => {
const tag_token = try eatToken(tokenizer, Token.Id.TagContent);
@ -355,7 +355,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
try nodes.append(Node.Nav);
} else if (mem.eql(u8, tag_name, "builtin")) {
_ = try eatToken(tokenizer, Token.Id.BracketClose);
try nodes.append(Node.{ .Builtin = tag_token });
try nodes.append(Node{ .Builtin = tag_token });
} else if (mem.eql(u8, tag_name, "header_open")) {
_ = try eatToken(tokenizer, Token.Id.Separator);
const content_token = try eatToken(tokenizer, Token.Id.TagContent);
@ -365,8 +365,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
header_stack_size += 1;
const urlized = try urlize(allocator, content);
try nodes.append(Node.{
.HeaderOpen = HeaderOpen.{
try nodes.append(Node{
.HeaderOpen = HeaderOpen{
.name = content,
.url = urlized,
.n = header_stack_size,
@ -409,14 +409,14 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
switch (see_also_tok.id) {
Token.Id.TagContent => {
const content = tokenizer.buffer[see_also_tok.start..see_also_tok.end];
try list.append(SeeAlsoItem.{
try list.append(SeeAlsoItem{
.name = content,
.token = see_also_tok,
});
},
Token.Id.Separator => {},
Token.Id.BracketClose => {
try nodes.append(Node.{ .SeeAlso = list.toOwnedSlice() });
try nodes.append(Node{ .SeeAlso = list.toOwnedSlice() });
break;
},
else => return parseError(tokenizer, see_also_tok, "invalid see_also token"),
@ -440,8 +440,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
}
};
try nodes.append(Node.{
.Link = Link.{
try nodes.append(Node{
.Link = Link{
.url = try urlize(allocator, url_name),
.name = name,
.token = name_tok,
@ -465,24 +465,24 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var code_kind_id: Code.Id = undefined;
var is_inline = false;
if (mem.eql(u8, code_kind_str, "exe")) {
code_kind_id = Code.Id.{ .Exe = ExpectedOutcome.Succeed };
code_kind_id = Code.Id{ .Exe = ExpectedOutcome.Succeed };
} else if (mem.eql(u8, code_kind_str, "exe_err")) {
code_kind_id = Code.Id.{ .Exe = ExpectedOutcome.Fail };
code_kind_id = Code.Id{ .Exe = ExpectedOutcome.Fail };
} else if (mem.eql(u8, code_kind_str, "test")) {
code_kind_id = Code.Id.Test;
} else if (mem.eql(u8, code_kind_str, "test_err")) {
code_kind_id = Code.Id.{ .TestError = name };
code_kind_id = Code.Id{ .TestError = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "test_safety")) {
code_kind_id = Code.Id.{ .TestSafety = name };
code_kind_id = Code.Id{ .TestSafety = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "obj")) {
code_kind_id = Code.Id.{ .Obj = null };
code_kind_id = Code.Id{ .Obj = null };
} else if (mem.eql(u8, code_kind_str, "obj_err")) {
code_kind_id = Code.Id.{ .Obj = name };
code_kind_id = Code.Id{ .Obj = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "syntax")) {
code_kind_id = Code.Id.{ .Obj = null };
code_kind_id = Code.Id{ .Obj = null };
is_inline = true;
} else {
return parseError(tokenizer, code_kind_tok, "unrecognized code kind: {}", code_kind_str);
@ -518,8 +518,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
_ = try eatToken(tokenizer, Token.Id.BracketClose);
} else
unreachable; // TODO issue #707
try nodes.append(Node.{
.Code = Code.{
try nodes.append(Node{
.Code = Code{
.id = code_kind_id,
.name = name,
.source_token = source_token,
@ -541,7 +541,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
return parseError(tokenizer, end_syntax_tag, "invalid token inside syntax: {}", end_tag_name);
}
_ = try eatToken(tokenizer, Token.Id.BracketClose);
try nodes.append(Node.{ .Syntax = content_tok });
try nodes.append(Node{ .Syntax = content_tok });
} else {
return parseError(tokenizer, tag_token, "unrecognized tag name: {}", tag_name);
}
@ -550,7 +550,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
}
}
return Toc.{
return Toc{
.nodes = nodes.toOwnedSlice(),
.toc = toc_buf.toOwnedSlice(),
.urls = urls,
@ -606,7 +606,7 @@ fn writeEscaped(out: var, input: []const u8) !void {
//#define VT_BOLD "\x1b[0;1m"
//#define VT_RESET "\x1b[0m"
const TermState = enum.{
const TermState = enum {
Start,
Escape,
LBracket,
@ -703,7 +703,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
return buf.toOwnedSlice();
}
const builtin_types = [][]const u8.{
const builtin_types = [][]const u8{
"f16", "f32", "f64", "f128", "c_longdouble", "c_short",
"c_ushort", "c_int", "c_uint", "c_long", "c_ulong", "c_longlong",
"c_ulonglong", "c_char", "c_void", "void", "bool", "isize",
@ -735,6 +735,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
std.zig.Token.Id.Keyword_align,
std.zig.Token.Id.Keyword_and,
std.zig.Token.Id.Keyword_anyerror,
std.zig.Token.Id.Keyword_asm,
std.zig.Token.Id.Keyword_async,
std.zig.Token.Id.Keyword_await,
@ -998,7 +999,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
const tmp_bin_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_bin_ext);
var build_args = std.ArrayList([]const u8).init(allocator);
defer build_args.deinit();
try build_args.appendSlice([][]const u8.{
try build_args.appendSlice([][]const u8{
zig_exe,
"build-exe",
tmp_source_file_name,
@ -1035,7 +1036,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
}
_ = exec(allocator, &env_map, build_args.toSliceConst()) catch return parseError(tokenizer, code.source_token, "example failed to compile");
const run_args = [][]const u8.{tmp_bin_file_name};
const run_args = [][]const u8{tmp_bin_file_name};
const result = if (expected_outcome == ExpectedOutcome.Fail) blk: {
const result = try os.ChildProcess.exec(allocator, run_args, null, &env_map, max_doc_file_size);
@ -1069,7 +1070,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
tmp_source_file_name,
@ -1093,7 +1094,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
},
}
if (code.target_windows) {
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
"--target-os",
"windows",
"--target-arch",
@ -1111,7 +1112,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
"--color",
@ -1170,7 +1171,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
tmp_source_file_name,
@ -1222,7 +1223,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
const name_plus_h_ext = try std.fmt.allocPrint(allocator, "{}.h", code.name);
const output_h_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_h_ext);
try build_args.appendSlice([][]const u8.{
try build_args.appendSlice([][]const u8{
zig_exe,
"build-obj",
tmp_source_file_name,
@ -1332,7 +1333,7 @@ fn exec(allocator: *mem.Allocator, env_map: *std.BufMap, args: []const []const u
}
fn getBuiltinCode(allocator: *mem.Allocator, env_map: *std.BufMap, zig_exe: []const u8) ![]const u8 {
const result = try exec(allocator, env_map, []const []const u8.{
const result = try exec(allocator, env_map, []const []const u8{
zig_exe,
"builtin",
});

File diff suppressed because it is too large Load Diff

View File

@ -61,7 +61,7 @@ fn cat_file(stdout: *os.File, file: *os.File) !void {
}
}
fn unwrapArg(arg: error![]u8) ![]u8 {
fn unwrapArg(arg: anyerror![]u8) ![]u8 {
return arg catch |err| {
warn("Unable to parse command line: {}\n", err);
return err;

View File

@ -4,13 +4,13 @@ pub fn build(b: *Builder) void {
const obj = b.addObject("base64", "base64.zig");
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{"-std=c99"});
exe.addCompileFlags([][]const u8{"-std=c99"});
exe.addSourceFile("test.c");
exe.addObject(obj);
b.default_step.dependOn(&exe.step);
const run_cmd = b.addCommand(".", b.env_map, [][]const u8.{exe.getOutputPath()});
const run_cmd = b.addCommand(".", b.env_map, [][]const u8{exe.getOutputPath()});
run_cmd.step.dependOn(&exe.step);
const test_step = b.step("test", "Test the program");

View File

@ -4,13 +4,13 @@ pub fn build(b: *Builder) void {
const lib = b.addSharedLibrary("mathtest", "mathtest.zig", b.version(1, 0, 0));
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{"-std=c99"});
exe.addCompileFlags([][]const u8{"-std=c99"});
exe.addSourceFile("test.c");
exe.linkLibrary(lib);
b.default_step.dependOn(&exe.step);
const run_cmd = b.addCommand(".", b.env_map, [][]const u8.{exe.getOutputPath()});
const run_cmd = b.addCommand(".", b.env_map, [][]const u8{exe.getOutputPath()});
run_cmd.step.dependOn(&exe.step);
const test_step = b.step("test", "Test the program");

View File

@ -5,9 +5,7 @@ comptime {
@export("__mh_execute_header", _mh_execute_header, builtin.GlobalLinkage.Weak);
}
}
var _mh_execute_header = extern struct.{
x: usize,
}.{ .x = 0 };
var _mh_execute_header = extern struct {x: usize}{.x = 0};
export fn add(a: i32, b: i32) i32 {
return a + b;

View File

@ -32,7 +32,7 @@ fn argInAllowedSet(maybe_set: ?[]const []const u8, arg: []const u8) bool {
// Modifies the current argument index during iteration
fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required: usize, allowed_set: ?[]const []const u8, index: *usize) !FlagArg {
switch (required) {
0 => return FlagArg.{ .None = undefined }, // TODO: Required to force non-tag but value?
0 => return FlagArg{ .None = undefined }, // TODO: Required to force non-tag but value?
1 => {
if (index.* + 1 >= args.len) {
return error.MissingFlagArguments;
@ -45,7 +45,7 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
return error.ArgumentNotInAllowedSet;
}
return FlagArg.{ .Single = arg };
return FlagArg{ .Single = arg };
},
else => |needed| {
var extra = ArrayList([]const u8).init(allocator);
@ -67,7 +67,7 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
try extra.append(arg);
}
return FlagArg.{ .Many = extra };
return FlagArg{ .Many = extra };
},
}
}
@ -75,12 +75,12 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
const HashMapFlags = HashMap([]const u8, FlagArg, std.hash.Fnv1a_32.hash, mem.eql_slice_u8);
// A store for querying found flags and positional arguments.
pub const Args = struct.{
pub const Args = struct {
flags: HashMapFlags,
positionals: ArrayList([]const u8),
pub fn parse(allocator: *Allocator, comptime spec: []const Flag, args: []const []const u8) !Args {
var parsed = Args.{
var parsed = Args{
.flags = HashMapFlags.init(allocator),
.positionals = ArrayList([]const u8).init(allocator),
};
@ -123,7 +123,7 @@ pub const Args = struct.{
FlagArg.Many => |inner| try prev.appendSlice(inner.toSliceConst()),
}
_ = try parsed.flags.put(flag_name_trimmed, FlagArg.{ .Many = prev });
_ = try parsed.flags.put(flag_name_trimmed, FlagArg{ .Many = prev });
} else {
_ = try parsed.flags.put(flag_name_trimmed, flag_args);
}
@ -177,20 +177,20 @@ pub const Args = struct.{
else => @panic("attempted to retrieve flag with wrong type"),
}
} else {
return []const []const u8.{};
return []const []const u8{};
}
}
};
// Arguments for a flag. e.g. arg1, arg2 in `--command arg1 arg2`.
const FlagArg = union(enum).{
const FlagArg = union(enum) {
None,
Single: []const u8,
Many: ArrayList([]const u8),
};
// Specification for how a flag should be parsed.
pub const Flag = struct.{
pub const Flag = struct {
name: []const u8,
required: usize,
mergable: bool,
@ -205,7 +205,7 @@ pub const Flag = struct.{
}
pub fn ArgN(comptime name: []const u8, comptime n: usize) Flag {
return Flag.{
return Flag{
.name = name,
.required = n,
.mergable = false,
@ -218,7 +218,7 @@ pub const Flag = struct.{
@compileError("n must be greater than 0");
}
return Flag.{
return Flag{
.name = name,
.required = n,
.mergable = true,
@ -227,7 +227,7 @@ pub const Flag = struct.{
}
pub fn Option(comptime name: []const u8, comptime set: []const []const u8) Flag {
return Flag.{
return Flag{
.name = name,
.required = 1,
.mergable = false,
@ -237,11 +237,11 @@ pub const Flag = struct.{
};
test "parse arguments" {
const spec1 = comptime []const Flag.{
const spec1 = comptime []const Flag{
Flag.Bool("--help"),
Flag.Bool("--init"),
Flag.Arg1("--build-file"),
Flag.Option("--color", []const []const u8.{
Flag.Option("--color", []const []const u8{
"on",
"off",
"auto",
@ -251,7 +251,7 @@ test "parse arguments" {
Flag.ArgN("--library", 1),
};
const cliargs = []const []const u8.{
const cliargs = []const []const u8{
"build",
"--help",
"pos1",

View File

@ -1,10 +1,10 @@
pub const CInt = struct.{
pub const CInt = struct {
id: Id,
zig_name: []const u8,
c_name: []const u8,
is_signed: bool,
pub const Id = enum.{
pub const Id = enum {
Short,
UShort,
Int,
@ -15,50 +15,50 @@ pub const CInt = struct.{
ULongLong,
};
pub const list = []CInt.{
CInt.{
pub const list = []CInt{
CInt{
.id = Id.Short,
.zig_name = "c_short",
.c_name = "short",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.UShort,
.zig_name = "c_ushort",
.c_name = "unsigned short",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.Int,
.zig_name = "c_int",
.c_name = "int",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.UInt,
.zig_name = "c_uint",
.c_name = "unsigned int",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.Long,
.zig_name = "c_long",
.c_name = "long",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.ULong,
.zig_name = "c_ulong",
.c_name = "unsigned long",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.LongLong,
.zig_name = "c_longlong",
.c_name = "long long",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.ULongLong,
.zig_name = "c_ulonglong",
.c_name = "unsigned long long",

View File

@ -73,7 +73,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
!comp.strip,
) orelse return error.OutOfMemory;
var ofile = ObjectFile.{
var ofile = ObjectFile{
.comp = comp,
.module = module,
.builder = builder,
@ -135,7 +135,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
}
}
pub const ObjectFile = struct.{
pub const ObjectFile = struct {
comp: *Compilation,
module: llvm.ModuleRef,
builder: llvm.BuilderRef,

View File

@ -35,7 +35,7 @@ const fs = event.fs;
const max_src_size = 2 * 1024 * 1024 * 1024; // 2 GiB
/// Data that is local to the event loop.
pub const ZigCompiler = struct.{
pub const ZigCompiler = struct {
loop: *event.Loop,
llvm_handle_pool: std.atomic.Stack(llvm.ContextRef),
lld_lock: event.Lock,
@ -57,7 +57,7 @@ pub const ZigCompiler = struct.{
try std.os.getRandomBytes(seed_bytes[0..]);
const seed = std.mem.readInt(seed_bytes, u64, builtin.Endian.Big);
return ZigCompiler.{
return ZigCompiler{
.loop = loop,
.lld_lock = event.Lock.init(loop),
.llvm_handle_pool = std.atomic.Stack(llvm.ContextRef).init(),
@ -78,18 +78,18 @@ pub const ZigCompiler = struct.{
/// Gets an exclusive handle on any LlvmContext.
/// Caller must release the handle when done.
pub fn getAnyLlvmContext(self: *ZigCompiler) !LlvmHandle {
if (self.llvm_handle_pool.pop()) |node| return LlvmHandle.{ .node = node };
if (self.llvm_handle_pool.pop()) |node| return LlvmHandle{ .node = node };
const context_ref = c.LLVMContextCreate() orelse return error.OutOfMemory;
errdefer c.LLVMContextDispose(context_ref);
const node = try self.loop.allocator.create(std.atomic.Stack(llvm.ContextRef).Node.{
const node = try self.loop.allocator.create(std.atomic.Stack(llvm.ContextRef).Node{
.next = undefined,
.data = context_ref,
});
errdefer self.loop.allocator.destroy(node);
return LlvmHandle.{ .node = node };
return LlvmHandle{ .node = node };
}
pub async fn getNativeLibC(self: *ZigCompiler) !*LibCInstallation {
@ -102,8 +102,8 @@ pub const ZigCompiler = struct.{
/// Must be called only once, ever. Sets global state.
pub fn setLlvmArgv(allocator: *Allocator, llvm_argv: []const []const u8) !void {
if (llvm_argv.len != 0) {
var c_compatible_args = try std.cstr.NullTerminated2DArray.fromSlices(allocator, [][]const []const u8.{
[][]const u8.{"zig (LLVM option parsing)"},
var c_compatible_args = try std.cstr.NullTerminated2DArray.fromSlices(allocator, [][]const []const u8{
[][]const u8{"zig (LLVM option parsing)"},
llvm_argv,
});
defer c_compatible_args.deinit();
@ -112,7 +112,7 @@ pub const ZigCompiler = struct.{
}
};
pub const LlvmHandle = struct.{
pub const LlvmHandle = struct {
node: *std.atomic.Stack(llvm.ContextRef).Node,
pub fn release(self: LlvmHandle, zig_compiler: *ZigCompiler) void {
@ -120,7 +120,7 @@ pub const LlvmHandle = struct.{
}
};
pub const Compilation = struct.{
pub const Compilation = struct {
zig_compiler: *ZigCompiler,
loop: *event.Loop,
name: Buffer,
@ -254,7 +254,7 @@ pub const Compilation = struct.{
const CompileErrList = std.ArrayList(*Msg);
// TODO handle some of these earlier and report them in a way other than error codes
pub const BuildError = error.{
pub const BuildError = error{
OutOfMemory,
EndOfStream,
IsDir,
@ -302,25 +302,25 @@ pub const Compilation = struct.{
BadPathName,
};
pub const Event = union(enum).{
pub const Event = union(enum) {
Ok,
Error: BuildError,
Fail: []*Msg,
};
pub const DarwinVersionMin = union(enum).{
pub const DarwinVersionMin = union(enum) {
None,
MacOS: []const u8,
Ios: []const u8,
};
pub const Kind = enum.{
pub const Kind = enum {
Exe,
Lib,
Obj,
};
pub const LinkLib = struct.{
pub const LinkLib = struct {
name: []const u8,
path: ?[]const u8,
@ -329,7 +329,7 @@ pub const Compilation = struct.{
provided_explicitly: bool,
};
pub const Emit = enum.{
pub const Emit = enum {
Binary,
Assembly,
LlvmIr,
@ -380,7 +380,7 @@ pub const Compilation = struct.{
}
const loop = zig_compiler.loop;
var comp = Compilation.{
var comp = Compilation{
.loop = loop,
.arena_allocator = std.heap.ArenaAllocator.init(loop.allocator),
.zig_compiler = zig_compiler,
@ -419,20 +419,20 @@ pub const Compilation = struct.{
.strip = false,
.is_static = is_static,
.linker_rdynamic = false,
.clang_argv = [][]const u8.{},
.lib_dirs = [][]const u8.{},
.rpath_list = [][]const u8.{},
.assembly_files = [][]const u8.{},
.link_objects = [][]const u8.{},
.clang_argv = [][]const u8{},
.lib_dirs = [][]const u8{},
.rpath_list = [][]const u8{},
.assembly_files = [][]const u8{},
.link_objects = [][]const u8{},
.fn_link_set = event.Locked(FnLinkSet).init(loop, FnLinkSet.init()),
.windows_subsystem_windows = false,
.windows_subsystem_console = false,
.link_libs_list = undefined,
.libc_link_lib = null,
.err_color = errmsg.Color.Auto,
.darwin_frameworks = [][]const u8.{},
.darwin_frameworks = [][]const u8{},
.darwin_version_min = DarwinVersionMin.None,
.test_filters = [][]const u8.{},
.test_filters = [][]const u8{},
.test_name_prefix = null,
.emit_file_type = Emit.Binary,
.link_out_file = null,
@ -575,7 +575,7 @@ pub const Compilation = struct.{
error.Overflow => return error.Overflow,
error.InvalidCharacter => unreachable, // we just checked the characters above
};
const int_type = try await (async Type.Int.get(comp, Type.Int.Key.{
const int_type = try await (async Type.Int.get(comp, Type.Int.Key{
.bit_count = bit_count,
.is_signed = is_signed,
}) catch unreachable);
@ -595,10 +595,10 @@ pub const Compilation = struct.{
}
fn initTypes(comp: *Compilation) !void {
comp.meta_type = try comp.arena().create(Type.MetaType.{
.base = Type.{
comp.meta_type = try comp.arena().create(Type.MetaType{
.base = Type{
.name = "type",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = undefined,
.ref_count = std.atomic.Int(usize).init(3), // 3 because it references itself twice
@ -612,10 +612,10 @@ pub const Compilation = struct.{
comp.meta_type.base.base.typ = &comp.meta_type.base;
assert((try comp.primitive_type_table.put(comp.meta_type.base.name, &comp.meta_type.base)) == null);
comp.void_type = try comp.arena().create(Type.Void.{
.base = Type.{
comp.void_type = try comp.arena().create(Type.Void{
.base = Type{
.name = "void",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -626,10 +626,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.void_type.base.name, &comp.void_type.base)) == null);
comp.noreturn_type = try comp.arena().create(Type.NoReturn.{
.base = Type.{
comp.noreturn_type = try comp.arena().create(Type.NoReturn{
.base = Type{
.name = "noreturn",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -640,10 +640,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.noreturn_type.base.name, &comp.noreturn_type.base)) == null);
comp.comptime_int_type = try comp.arena().create(Type.ComptimeInt.{
.base = Type.{
comp.comptime_int_type = try comp.arena().create(Type.ComptimeInt{
.base = Type{
.name = "comptime_int",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -654,10 +654,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.comptime_int_type.base.name, &comp.comptime_int_type.base)) == null);
comp.bool_type = try comp.arena().create(Type.Bool.{
.base = Type.{
comp.bool_type = try comp.arena().create(Type.Bool{
.base = Type{
.name = "bool",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -668,16 +668,16 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.bool_type.base.name, &comp.bool_type.base)) == null);
comp.void_value = try comp.arena().create(Value.Void.{
.base = Value.{
comp.void_value = try comp.arena().create(Value.Void{
.base = Value{
.id = Value.Id.Void,
.typ = &Type.Void.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
comp.true_value = try comp.arena().create(Value.Bool.{
.base = Value.{
comp.true_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -685,8 +685,8 @@ pub const Compilation = struct.{
.x = true,
});
comp.false_value = try comp.arena().create(Value.Bool.{
.base = Value.{
comp.false_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -694,8 +694,8 @@ pub const Compilation = struct.{
.x = false,
});
comp.noreturn_value = try comp.arena().create(Value.NoReturn.{
.base = Value.{
comp.noreturn_value = try comp.arena().create(Value.NoReturn{
.base = Value{
.id = Value.Id.NoReturn,
.typ = &Type.NoReturn.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -703,10 +703,10 @@ pub const Compilation = struct.{
});
for (CInt.list) |cint, i| {
const c_int_type = try comp.arena().create(Type.Int.{
.base = Type.{
const c_int_type = try comp.arena().create(Type.Int{
.base = Type{
.name = cint.zig_name,
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -714,7 +714,7 @@ pub const Compilation = struct.{
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.key = Type.Int.Key.{
.key = Type.Int.Key{
.is_signed = cint.is_signed,
.bit_count = comp.target.cIntTypeSizeInBits(cint.id),
},
@ -723,10 +723,10 @@ pub const Compilation = struct.{
comp.c_int_types[i] = c_int_type;
assert((try comp.primitive_type_table.put(cint.zig_name, &c_int_type.base)) == null);
}
comp.u8_type = try comp.arena().create(Type.Int.{
.base = Type.{
comp.u8_type = try comp.arena().create(Type.Int{
.base = Type{
.name = "u8",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -734,7 +734,7 @@ pub const Compilation = struct.{
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.key = Type.Int.Key.{
.key = Type.Int.Key{
.is_signed = false,
.bit_count = 8,
},
@ -777,13 +777,13 @@ pub const Compilation = struct.{
if (compile_errors.len == 0) {
await (async self.events.put(Event.Ok) catch unreachable);
} else {
await (async self.events.put(Event.{ .Fail = compile_errors }) catch unreachable);
await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
}
} else |err| {
// if there's an error then the compile errors have dangling references
self.gpa().free(compile_errors);
await (async self.events.put(Event.{ .Error = err }) catch unreachable);
await (async self.events.put(Event{ .Error = err }) catch unreachable);
}
// First, get an item from the watch channel, waiting on the channel.
@ -894,7 +894,7 @@ pub const Compilation = struct.{
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
const name = if (fn_proto.name_token) |name_token| tree_scope.tree.tokenSlice(name_token) else {
try self.addCompileError(tree_scope, Span.{
try self.addCompileError(tree_scope, Span{
.first = fn_proto.fn_token,
.last = fn_proto.fn_token + 1,
}, "missing function name");
@ -924,8 +924,8 @@ pub const Compilation = struct.{
}
} else {
// add new decl
const fn_decl = try self.gpa().create(Decl.Fn.{
.base = Decl.{
const fn_decl = try self.gpa().create(Decl.Fn{
.base = Decl{
.id = Decl.Id.Fn,
.name = name,
.visib = parseVisibToken(tree_scope.tree, fn_proto.visib_token),
@ -933,7 +933,7 @@ pub const Compilation = struct.{
.parent_scope = &decl_scope.base,
.tree_scope = tree_scope,
},
.value = Decl.Fn.Val.{ .Unresolved = {} },
.value = Decl.Fn.Val{ .Unresolved = {} },
.fn_proto = fn_proto,
});
tree_scope.base.ref();
@ -1139,7 +1139,7 @@ pub const Compilation = struct.{
}
}
const link_lib = try self.gpa().create(LinkLib.{
const link_lib = try self.gpa().create(LinkLib{
.name = name,
.path = null,
.provided_explicitly = provided_explicitly,
@ -1307,7 +1307,7 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
// The Decl.Fn owns the initial 1 reference count
const fn_val = try Value.Fn.create(comp, fn_type, fndef_scope, symbol_name);
fn_decl.value = Decl.Fn.Val.{ .Fn = fn_val };
fn_decl.value = Decl.Fn.Val{ .Fn = fn_val };
symbol_name_consumed = true;
// Define local parameter variables
@ -1315,7 +1315,7 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
//AstNode *param_decl_node = get_param_decl_node(fn_table_entry, i);
const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", fn_decl.fn_proto.params.at(i).*);
const name_token = param_decl.name_token orelse {
try comp.addCompileError(tree_scope, Span.{
try comp.addCompileError(tree_scope, Span{
.first = param_decl.firstToken(),
.last = param_decl.type_node.firstToken(),
}, "missing parameter name");
@ -1402,17 +1402,17 @@ async fn analyzeFnType(
const param_node = param_node_ptr.*.cast(ast.Node.ParamDecl).?;
const param_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node) catch unreachable);
errdefer param_type.base.deref(comp);
try params.append(Type.Fn.Param.{
try params.append(Type.Fn.Param{
.typ = param_type,
.is_noalias = param_node.noalias_token != null,
});
}
}
const key = Type.Fn.Key.{
const key = Type.Fn.Key{
.alignment = null,
.data = Type.Fn.Key.Data.{
.Normal = Type.Fn.Key.Normal.{
.data = Type.Fn.Key.Data{
.Normal = Type.Fn.Key.Normal{
.return_type = return_type,
.params = params.toOwnedSlice(),
.is_var_args = false, // TODO
@ -1451,7 +1451,7 @@ async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
// The Decl.Fn owns the initial 1 reference count
const fn_proto_val = try Value.FnProto.create(comp, fn_type, symbol_name);
fn_decl.value = Decl.Fn.Val.{ .FnProto = fn_proto_val };
fn_decl.value = Decl.Fn.Val{ .FnProto = fn_proto_val };
symbol_name_consumed = true;
}

View File

@ -10,7 +10,7 @@ const errmsg = @import("errmsg.zig");
const Scope = @import("scope.zig").Scope;
const Compilation = @import("compilation.zig").Compilation;
pub const Decl = struct.{
pub const Decl = struct {
id: Id,
name: []const u8,
visib: Visib,
@ -44,7 +44,7 @@ pub const Decl = struct.{
const fn_proto = fn_decl.fn_proto;
const start = fn_proto.fn_token;
const end = fn_proto.name_token orelse start;
return errmsg.Span.{
return errmsg.Span{
.first = start,
.last = end + 1,
};
@ -57,23 +57,23 @@ pub const Decl = struct.{
return base.parent_scope.findRoot();
}
pub const Id = enum.{
pub const Id = enum {
Var,
Fn,
CompTime,
};
pub const Var = struct.{
pub const Var = struct {
base: Decl,
};
pub const Fn = struct.{
pub const Fn = struct {
base: Decl,
value: Val,
fn_proto: *ast.Node.FnProto,
// TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous
pub const Val = union(enum).{
pub const Val = union(enum) {
Unresolved: void,
Fn: *Value.Fn,
FnProto: *Value.FnProto,
@ -99,7 +99,7 @@ pub const Decl = struct.{
}
};
pub const CompTime = struct.{
pub const CompTime = struct {
base: Decl,
};
};

View File

@ -7,55 +7,55 @@ const TokenIndex = std.zig.ast.TokenIndex;
const Compilation = @import("compilation.zig").Compilation;
const Scope = @import("scope.zig").Scope;
pub const Color = enum.{
pub const Color = enum {
Auto,
Off,
On,
};
pub const Span = struct.{
pub const Span = struct {
first: ast.TokenIndex,
last: ast.TokenIndex,
pub fn token(i: TokenIndex) Span {
return Span.{
return Span{
.first = i,
.last = i,
};
}
pub fn node(n: *ast.Node) Span {
return Span.{
return Span{
.first = n.firstToken(),
.last = n.lastToken(),
};
}
};
pub const Msg = struct.{
pub const Msg = struct {
text: []u8,
realpath: []u8,
data: Data,
const Data = union(enum).{
const Data = union(enum) {
Cli: Cli,
PathAndTree: PathAndTree,
ScopeAndComp: ScopeAndComp,
};
const PathAndTree = struct.{
const PathAndTree = struct {
span: Span,
tree: *ast.Tree,
allocator: *mem.Allocator,
};
const ScopeAndComp = struct.{
const ScopeAndComp = struct {
span: Span,
tree_scope: *Scope.AstTree,
compilation: *Compilation,
};
const Cli = struct.{
const Cli = struct {
allocator: *mem.Allocator,
};
@ -118,11 +118,11 @@ pub const Msg = struct.{
const realpath = try mem.dupe(comp.gpa(), u8, tree_scope.root().realpath);
errdefer comp.gpa().free(realpath);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = text,
.realpath = realpath,
.data = Data.{
.ScopeAndComp = ScopeAndComp.{
.data = Data{
.ScopeAndComp = ScopeAndComp{
.tree_scope = tree_scope,
.compilation = comp,
.span = span,
@ -139,11 +139,11 @@ pub const Msg = struct.{
const realpath_copy = try mem.dupe(comp.gpa(), u8, realpath);
errdefer comp.gpa().free(realpath_copy);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = text,
.realpath = realpath_copy,
.data = Data.{
.Cli = Cli.{ .allocator = comp.gpa() },
.data = Data{
.Cli = Cli{ .allocator = comp.gpa() },
},
});
return msg;
@ -164,14 +164,14 @@ pub const Msg = struct.{
var out_stream = &std.io.BufferOutStream.init(&text_buf).stream;
try parse_error.render(&tree_scope.tree.tokens, out_stream);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = undefined,
.realpath = realpath_copy,
.data = Data.{
.ScopeAndComp = ScopeAndComp.{
.data = Data{
.ScopeAndComp = ScopeAndComp{
.tree_scope = tree_scope,
.compilation = comp,
.span = Span.{
.span = Span{
.first = loc_token,
.last = loc_token,
},
@ -203,14 +203,14 @@ pub const Msg = struct.{
var out_stream = &std.io.BufferOutStream.init(&text_buf).stream;
try parse_error.render(&tree.tokens, out_stream);
const msg = try allocator.create(Msg.{
const msg = try allocator.create(Msg{
.text = undefined,
.realpath = realpath_copy,
.data = Data.{
.PathAndTree = PathAndTree.{
.data = Data{
.PathAndTree = PathAndTree{
.allocator = allocator,
.tree = tree,
.span = Span.{
.span = Span{
.first = loc_token,
.last = loc_token,
},

View File

@ -15,17 +15,17 @@ const ObjectFile = codegen.ObjectFile;
const Decl = @import("decl.zig").Decl;
const mem = std.mem;
pub const LVal = enum.{
pub const LVal = enum {
None,
Ptr,
};
pub const IrVal = union(enum).{
pub const IrVal = union(enum) {
Unknown,
KnownType: *Type,
KnownValue: *Value,
const Init = enum.{
const Init = enum {
Unknown,
NoReturn,
Void,
@ -48,7 +48,7 @@ pub const IrVal = union(enum).{
}
};
pub const Inst = struct.{
pub const Inst = struct {
id: Id,
scope: *Scope,
debug_id: usize,
@ -129,7 +129,7 @@ pub const Inst = struct.{
}
}
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error.{OutOfMemory}!?llvm.ValueRef) {
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?llvm.ValueRef) {
switch (base.id) {
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
@ -242,7 +242,7 @@ pub const Inst = struct.{
parent.child = self;
}
pub const Id = enum.{
pub const Id = enum {
Return,
Const,
Ref,
@ -258,11 +258,11 @@ pub const Inst = struct.{
LoadPtr,
};
pub const Call = struct.{
pub const Call = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
fn_ref: *Inst,
args: []*Inst,
};
@ -305,11 +305,11 @@ pub const Inst = struct.{
for (self.params.args) |arg, i| {
args[i] = try arg.getAsParam();
}
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params.{
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params{
.fn_ref = fn_ref,
.args = args,
});
new_inst.val = IrVal.{ .KnownType = fn_type.key.data.Normal.return_type };
new_inst.val = IrVal{ .KnownType = fn_type.key.data.Normal.return_type };
return new_inst;
}
@ -336,11 +336,11 @@ pub const Inst = struct.{
}
};
pub const Const = struct.{
pub const Const = struct {
base: Inst,
params: Params,
const Params = struct.{};
const Params = struct {};
// Use Builder.buildConst* methods, or, after building a Const instruction,
// manually set the ir_val field.
@ -355,8 +355,8 @@ pub const Inst = struct.{
}
pub fn analyze(self: *const Const, ira: *Analyze) !*Inst {
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params.{});
new_inst.val = IrVal.{ .KnownValue = self.base.val.KnownValue.getRef() };
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
return new_inst;
}
@ -365,11 +365,11 @@ pub const Inst = struct.{
}
};
pub const Return = struct.{
pub const Return = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
return_value: *Inst,
};
@ -389,7 +389,7 @@ pub const Inst = struct.{
// TODO detect returning local variable address
return ira.irb.build(Return, self.base.scope, self.base.span, Params.{ .return_value = casted_value });
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
}
pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef {
@ -405,11 +405,11 @@ pub const Inst = struct.{
}
};
pub const Ref = struct.{
pub const Ref = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
@ -435,13 +435,13 @@ pub const Inst = struct.{
);
}
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params.{
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
.target = target,
.mut = self.params.mut,
.volatility = self.params.volatility,
});
const elem_type = target.getKnownType();
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = self.params.mut,
.vol = self.params.volatility,
@ -450,17 +450,17 @@ pub const Inst = struct.{
}) catch unreachable);
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal.{ .KnownType = &ptr_type.base };
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
// TODO potentially add an alloca entry here
return new_inst;
}
};
pub const DeclRef = struct.{
pub const DeclRef = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
decl: *Decl,
lval: LVal,
};
@ -499,11 +499,11 @@ pub const Inst = struct.{
}
};
pub const VarPtr = struct.{
pub const VarPtr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
var_scope: *Scope.Var,
};
@ -525,16 +525,16 @@ pub const Inst = struct.{
Inst.VarPtr,
self.base.scope,
self.base.span,
Inst.VarPtr.Params.{ .var_scope = self.params.var_scope },
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
);
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = param.typ,
.mut = Type.Pointer.Mut.Const,
.vol = Type.Pointer.Vol.Non,
.size = Type.Pointer.Size.One,
.alignment = Type.Pointer.Align.Abi,
}) catch unreachable);
new_inst.val = IrVal.{ .KnownType = &ptr_type.base };
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
return new_inst;
},
}
@ -548,11 +548,11 @@ pub const Inst = struct.{
}
};
pub const LoadPtr = struct.{
pub const LoadPtr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
};
@ -590,9 +590,9 @@ pub const Inst = struct.{
Inst.LoadPtr,
self.base.scope,
self.base.span,
Inst.LoadPtr.Params.{ .target = target },
Inst.LoadPtr.Params{ .target = target },
);
new_inst.val = IrVal.{ .KnownType = ptr_type.key.child_type };
new_inst.val = IrVal{ .KnownType = ptr_type.key.child_type };
return new_inst;
}
@ -626,11 +626,11 @@ pub const Inst = struct.{
}
};
pub const PtrType = struct.{
pub const PtrType = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
child_type: *Inst,
mut: Type.Pointer.Mut,
vol: Type.Pointer.Vol,
@ -657,11 +657,11 @@ pub const Inst = struct.{
// }
const alignment = if (self.params.alignment) |align_inst| blk: {
const amt = try align_inst.getAsConstAlign(ira);
break :blk Type.Pointer.Align.{ .Override = amt };
break :blk Type.Pointer.Align{ .Override = amt };
} else blk: {
break :blk Type.Pointer.Align.{ .Abi = {} };
break :blk Type.Pointer.Align{ .Abi = {} };
};
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = child_type,
.mut = self.params.mut,
.vol = self.params.vol,
@ -674,11 +674,11 @@ pub const Inst = struct.{
}
};
pub const DeclVar = struct.{
pub const DeclVar = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
variable: *Variable,
};
@ -695,11 +695,11 @@ pub const Inst = struct.{
}
};
pub const CheckVoidStmt = struct.{
pub const CheckVoidStmt = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
};
@ -723,11 +723,11 @@ pub const Inst = struct.{
}
};
pub const Phi = struct.{
pub const Phi = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
incoming_blocks: []*BasicBlock,
incoming_values: []*Inst,
};
@ -745,11 +745,11 @@ pub const Inst = struct.{
}
};
pub const Br = struct.{
pub const Br = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
dest_block: *BasicBlock,
is_comptime: *Inst,
};
@ -767,11 +767,11 @@ pub const Inst = struct.{
}
};
pub const CondBr = struct.{
pub const CondBr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
condition: *Inst,
then_block: *BasicBlock,
else_block: *BasicBlock,
@ -791,11 +791,11 @@ pub const Inst = struct.{
}
};
pub const AddImplicitReturnType = struct.{
pub const AddImplicitReturnType = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@ -816,11 +816,11 @@ pub const Inst = struct.{
}
};
pub const TestErr = struct.{
pub const TestErr = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@ -878,11 +878,11 @@ pub const Inst = struct.{
}
};
pub const TestCompTime = struct.{
pub const TestCompTime = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@ -902,11 +902,11 @@ pub const Inst = struct.{
}
};
pub const SaveErrRetAddr = struct.{
pub const SaveErrRetAddr = struct {
base: Inst,
params: Params,
const Params = struct.{};
const Params = struct {};
const ir_val_init = IrVal.Init.Unknown;
@ -917,16 +917,16 @@ pub const Inst = struct.{
}
pub fn analyze(self: *const SaveErrRetAddr, ira: *Analyze) !*Inst {
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params.{});
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params{});
}
};
};
pub const Variable = struct.{
pub const Variable = struct {
child_scope: *Scope,
};
pub const BasicBlock = struct.{
pub const BasicBlock = struct {
ref_count: usize,
name_hint: [*]const u8, // must be a C string literal
debug_id: usize,
@ -957,7 +957,7 @@ pub const BasicBlock = struct.{
};
/// Stuff that survives longer than Builder
pub const Code = struct.{
pub const Code = struct {
basic_block_list: std.ArrayList(*BasicBlock),
arena: std.heap.ArenaAllocator,
return_type: ?*Type,
@ -1009,7 +1009,7 @@ pub const Code = struct.{
}
};
pub const Builder = struct.{
pub const Builder = struct {
comp: *Compilation,
code: *Code,
current_basic_block: *BasicBlock,
@ -1021,7 +1021,7 @@ pub const Builder = struct.{
pub const Error = Analyze.Error;
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, begin_scope: ?*Scope) !Builder {
const code = try comp.gpa().create(Code.{
const code = try comp.gpa().create(Code{
.basic_block_list = undefined,
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
.return_type = null,
@ -1030,7 +1030,7 @@ pub const Builder = struct.{
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
errdefer code.destroy(comp.gpa());
return Builder.{
return Builder{
.comp = comp,
.current_basic_block = undefined,
.code = code,
@ -1052,7 +1052,7 @@ pub const Builder = struct.{
/// No need to clean up resources thanks to the arena allocator.
pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: [*]const u8) !*BasicBlock {
const basic_block = try self.arena().create(BasicBlock.{
const basic_block = try self.arena().create(BasicBlock{
.ref_count = 0,
.name_hint = name_hint,
.debug_id = self.next_debug_id,
@ -1208,7 +1208,7 @@ pub const Builder = struct.{
// }
//}
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params.{
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params{
.fn_ref = fn_ref,
.args = args,
});
@ -1272,7 +1272,7 @@ pub const Builder = struct.{
// return irb->codegen->invalid_instruction;
//}
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params.{
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
.child_type = child_type,
.mut = Type.Pointer.Mut.Mut,
.vol = Type.Pointer.Vol.Non,
@ -1336,8 +1336,8 @@ pub const Builder = struct.{
};
errdefer int_val.base.deref(irb.comp);
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = &int_val.base };
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = &int_val.base };
return inst;
}
@ -1455,11 +1455,11 @@ pub const Builder = struct.{
_ = irb.build(
Inst.CheckVoidStmt,
child_scope,
Span.{
Span{
.first = statement_node.firstToken(),
.last = statement_node.lastToken(),
},
Inst.CheckVoidStmt.Params.{ .target = statement_value },
Inst.CheckVoidStmt.Params{ .target = statement_value },
);
}
}
@ -1471,7 +1471,7 @@ pub const Builder = struct.{
}
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params.{
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
@ -1484,14 +1484,14 @@ pub const Builder = struct.{
);
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params.{
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
.dest_block = block_scope.end_block,
.is_comptime = block_scope.is_comptime,
});
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params.{
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
@ -1553,12 +1553,12 @@ pub const Builder = struct.{
Inst.TestErr,
scope,
src_span,
Inst.TestErr.Params.{ .target = return_value },
Inst.TestErr.Params{ .target = return_value },
);
const err_is_comptime = try irb.buildTestCompTime(scope, src_span, is_err);
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params.{
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params{
.condition = is_err,
.then_block = err_block,
.else_block = ok_block,
@ -1572,9 +1572,9 @@ pub const Builder = struct.{
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit) catch unreachable);
}
if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params.{});
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
}
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params.{
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
.dest_block = ret_stmt_block,
.is_comptime = err_is_comptime,
});
@ -1583,7 +1583,7 @@ pub const Builder = struct.{
if (have_err_defers) {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
}
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params.{
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
.dest_block = ret_stmt_block,
.is_comptime = err_is_comptime,
});
@ -1631,17 +1631,17 @@ pub const Builder = struct.{
switch (await (async irb.findIdent(scope, name) catch unreachable)) {
Ident.Decl => |decl| {
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params.{
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
.decl = decl,
.lval = lval,
});
},
Ident.VarScope => |var_scope| {
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params.{ .var_scope = var_scope });
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
switch (lval) {
LVal.Ptr => return var_ptr,
LVal.None => {
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params.{ .target = var_ptr });
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
},
}
},
@ -1661,13 +1661,13 @@ pub const Builder = struct.{
return error.SemanticAnalysisFailed;
}
const DeferCounts = struct.{
const DeferCounts = struct {
scope_exit: usize,
error_exit: usize,
};
fn countDefers(irb: *Builder, inner_scope: *Scope, outer_scope: *Scope) DeferCounts {
var result = DeferCounts.{ .scope_exit = 0, .error_exit = 0 };
var result = DeferCounts{ .scope_exit = 0, .error_exit = 0 };
var scope = inner_scope;
while (scope != outer_scope) {
@ -1726,7 +1726,7 @@ pub const Builder = struct.{
Inst.CheckVoidStmt,
&defer_expr_scope.base,
Span.token(defer_expr_scope.expr_node.lastToken()),
Inst.CheckVoidStmt.Params.{ .target = instruction },
Inst.CheckVoidStmt.Params{ .target = instruction },
);
}
}
@ -1753,7 +1753,7 @@ pub const Builder = struct.{
LVal.Ptr => {
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params.{
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params{
.target = instruction,
.mut = Type.Pointer.Mut.Const,
.volatility = Type.Pointer.Vol.Non,
@ -1774,16 +1774,16 @@ pub const Builder = struct.{
params: I.Params,
is_generated: bool,
) !*Inst {
const inst = try self.arena().create(I.{
.base = Inst.{
const inst = try self.arena().create(I{
.base = Inst{
.id = Inst.typeToId(I),
.is_generated = is_generated,
.scope = scope,
.debug_id = self.next_debug_id,
.val = switch (I.ir_val_init) {
IrVal.Init.Unknown => IrVal.Unknown,
IrVal.Init.NoReturn => IrVal.{ .KnownValue = &Value.NoReturn.get(self.comp).base },
IrVal.Init.Void => IrVal.{ .KnownValue = &Value.Void.get(self.comp).base },
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
},
.ref_count = 0,
.span = span,
@ -1852,20 +1852,20 @@ pub const Builder = struct.{
}
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Inst {
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = &Value.Bool.get(self.comp, x).base };
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.comp, x).base };
return inst;
}
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Inst {
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params.{}, is_generated);
inst.val = IrVal.{ .KnownValue = &Value.Void.get(self.comp).base };
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params{}, is_generated);
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.comp).base };
return inst;
}
fn buildConstValue(self: *Builder, scope: *Scope, span: Span, v: *Value) !*Inst {
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = v.getRef() };
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = v.getRef() };
return inst;
}
@ -1879,7 +1879,7 @@ pub const Builder = struct.{
Inst.TestCompTime,
scope,
span,
Inst.TestCompTime.Params.{ .target = target },
Inst.TestCompTime.Params{ .target = target },
);
}
}
@ -1889,7 +1889,7 @@ pub const Builder = struct.{
Inst.AddImplicitReturnType,
scope,
span,
Inst.AddImplicitReturnType.Params.{ .target = result },
Inst.AddImplicitReturnType.Params{ .target = result },
);
if (!irb.is_async) {
@ -1897,7 +1897,7 @@ pub const Builder = struct.{
Inst.Return,
scope,
span,
Inst.Return.Params.{ .return_value = result },
Inst.Return.Params{ .return_value = result },
is_gen,
);
}
@ -1919,7 +1919,7 @@ pub const Builder = struct.{
//// the above blocks are rendered by ir_gen after the rest of codegen
}
const Ident = union(enum).{
const Ident = union(enum) {
NotFound,
Decl: *Decl,
VarScope: *Scope.Var,
@ -1935,13 +1935,13 @@ pub const Builder = struct.{
const locked_table = await (async decls.table.acquireRead() catch unreachable);
defer locked_table.release();
if (locked_table.value.get(name)) |entry| {
return Ident.{ .Decl = entry.value };
return Ident{ .Decl = entry.value };
}
},
Scope.Id.Var => {
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
if (mem.eql(u8, var_scope.name, name)) {
return Ident.{ .VarScope = var_scope };
return Ident{ .VarScope = var_scope };
}
},
else => {},
@ -1951,7 +1951,7 @@ pub const Builder = struct.{
}
};
const Analyze = struct.{
const Analyze = struct {
irb: Builder,
old_bb_index: usize,
const_predecessor_bb: ?*BasicBlock,
@ -1960,7 +1960,7 @@ const Analyze = struct.{
src_implicit_return_type_list: std.ArrayList(*Inst),
explicit_return_type: ?*Type,
pub const Error = error.{
pub const Error = error{
/// This is only for when we have already reported a compile error. It is the poison value.
SemanticAnalysisFailed,
@ -1975,7 +1975,7 @@ const Analyze = struct.{
var irb = try Builder.init(comp, tree_scope, null);
errdefer irb.abort();
return Analyze.{
return Analyze{
.irb = irb,
.old_bb_index = 0,
.const_predecessor_bb = null,

View File

@ -5,7 +5,7 @@ const Target = @import("target.zig").Target;
const c = @import("c.zig");
/// See the render function implementation for documentation of the fields.
pub const LibCInstallation = struct.{
pub const LibCInstallation = struct {
include_dir: []const u8,
lib_dir: ?[]const u8,
static_lib_dir: ?[]const u8,
@ -13,7 +13,7 @@ pub const LibCInstallation = struct.{
kernel32_lib_dir: ?[]const u8,
dynamic_linker_path: ?[]const u8,
pub const FindError = error.{
pub const FindError = error{
OutOfMemory,
FileSystem,
UnableToSpawnCCompiler,
@ -34,7 +34,7 @@ pub const LibCInstallation = struct.{
) !void {
self.initEmpty();
const keys = []const []const u8.{
const keys = []const []const u8{
"include_dir",
"lib_dir",
"static_lib_dir",
@ -42,11 +42,11 @@ pub const LibCInstallation = struct.{
"kernel32_lib_dir",
"dynamic_linker_path",
};
const FoundKey = struct.{
const FoundKey = struct {
found: bool,
allocated: ?[]u8,
};
var found_keys = [1]FoundKey.{FoundKey.{ .found = false, .allocated = null }} ** keys.len;
var found_keys = [1]FoundKey{FoundKey{ .found = false, .allocated = null }} ** keys.len;
errdefer {
self.initEmpty();
for (found_keys) |found_key| {
@ -182,7 +182,7 @@ pub const LibCInstallation = struct.{
async fn findNativeIncludeDirLinux(self: *LibCInstallation, loop: *event.Loop) !void {
const cc_exe = std.os.getEnvPosix("CC") orelse "cc";
const argv = []const []const u8.{
const argv = []const []const u8{
cc_exe,
"-E",
"-Wp,-v",
@ -302,12 +302,12 @@ pub const LibCInstallation = struct.{
}
async fn findNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop) FindError!void {
var dyn_tests = []DynTest.{
DynTest.{
var dyn_tests = []DynTest{
DynTest{
.name = "ld-linux-x86-64.so.2",
.result = null,
},
DynTest.{
DynTest{
.name = "ld-musl-x86_64.so.1",
.result = null,
},
@ -326,7 +326,7 @@ pub const LibCInstallation = struct.{
}
}
const DynTest = struct.{
const DynTest = struct {
name: []const u8,
result: ?[]const u8,
};
@ -369,7 +369,7 @@ pub const LibCInstallation = struct.{
}
fn initEmpty(self: *LibCInstallation) void {
self.* = LibCInstallation.{
self.* = LibCInstallation{
.include_dir = ([*]const u8)(undefined)[0..0],
.lib_dir = null,
.static_lib_dir = null,
@ -385,7 +385,7 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
const cc_exe = std.os.getEnvPosix("CC") orelse "cc";
const arg1 = try std.fmt.allocPrint(loop.allocator, "-print-file-name={}", o_file);
defer loop.allocator.free(arg1);
const argv = []const []const u8.{ cc_exe, arg1 };
const argv = []const []const u8{ cc_exe, arg1 };
// TODO This simulates evented I/O for the child process exec
await (async loop.yield() catch unreachable);
@ -421,7 +421,7 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
}
}
const Search = struct.{
const Search = struct {
path: []const u8,
version: []const u8,
};
@ -430,7 +430,7 @@ fn fillSearch(search_buf: *[2]Search, sdk: *c.ZigWindowsSDK) []Search {
var search_end: usize = 0;
if (sdk.path10_ptr) |path10_ptr| {
if (sdk.version10_ptr) |ver10_ptr| {
search_buf[search_end] = Search.{
search_buf[search_end] = Search{
.path = path10_ptr[0..sdk.path10_len],
.version = ver10_ptr[0..sdk.version10_len],
};
@ -439,7 +439,7 @@ fn fillSearch(search_buf: *[2]Search, sdk: *c.ZigWindowsSDK) []Search {
}
if (sdk.path81_ptr) |path81_ptr| {
if (sdk.version81_ptr) |ver81_ptr| {
search_buf[search_end] = Search.{
search_buf[search_end] = Search{
.path = path81_ptr[0..sdk.path81_len],
.version = ver81_ptr[0..sdk.version81_len],
};

View File

@ -8,13 +8,13 @@ const Target = @import("target.zig").Target;
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const assert = std.debug.assert;
const Context = struct.{
const Context = struct {
comp: *Compilation,
arena: std.heap.ArenaAllocator,
args: std.ArrayList([*]const u8),
link_in_crt: bool,
link_err: error.{OutOfMemory}!void,
link_err: error{OutOfMemory}!void,
link_msg: std.Buffer,
libc: *LibCInstallation,
@ -22,7 +22,7 @@ const Context = struct.{
};
pub async fn link(comp: *Compilation) !void {
var ctx = Context.{
var ctx = Context{
.comp = comp,
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
.args = undefined,
@ -648,13 +648,13 @@ fn addFnObjects(ctx: *Context) !void {
}
}
const DarwinPlatform = struct.{
const DarwinPlatform = struct {
kind: Kind,
major: u32,
minor: u32,
micro: u32,
const Kind = enum.{
const Kind = enum {
MacOS,
IPhoneOS,
IPhoneOSSimulator,
@ -726,7 +726,7 @@ fn darwinGetReleaseVersion(str: []const u8, major: *u32, minor: *u32, micro: *u3
return error.InvalidDarwinVersionString;
var start_pos: usize = 0;
for ([]*u32.{ major, minor, micro }) |v| {
for ([]*u32{ major, minor, micro }) |v| {
const dot_pos = mem.indexOfScalarPos(u8, str, start_pos, '.');
const end_pos = dot_pos orelse str.len;
v.* = std.fmt.parseUnsigned(u32, str[start_pos..end_pos], 10) catch return error.InvalidDarwinVersionString;

View File

@ -183,7 +183,7 @@ pub const X86StdcallCallConv = c.LLVMX86StdcallCallConv;
pub const X86FastcallCallConv = c.LLVMX86FastcallCallConv;
pub const CallConv = c.LLVMCallConv;
pub const FnInline = extern enum.{
pub const FnInline = extern enum {
Auto,
Always,
Never,

View File

@ -43,9 +43,9 @@ const usage =
\\
;
const Command = struct.{
const Command = struct {
name: []const u8,
exec: fn (*Allocator, []const []const u8) error!void,
exec: fn (*Allocator, []const []const u8) anyerror!void,
};
pub fn main() !void {
@ -72,46 +72,46 @@ pub fn main() !void {
os.exit(1);
}
const commands = []Command.{
Command.{
const commands = []Command{
Command{
.name = "build-exe",
.exec = cmdBuildExe,
},
Command.{
Command{
.name = "build-lib",
.exec = cmdBuildLib,
},
Command.{
Command{
.name = "build-obj",
.exec = cmdBuildObj,
},
Command.{
Command{
.name = "fmt",
.exec = cmdFmt,
},
Command.{
Command{
.name = "libc",
.exec = cmdLibC,
},
Command.{
Command{
.name = "targets",
.exec = cmdTargets,
},
Command.{
Command{
.name = "version",
.exec = cmdVersion,
},
Command.{
Command{
.name = "zen",
.exec = cmdZen,
},
// undocumented commands
Command.{
Command{
.name = "help",
.exec = cmdHelp,
},
Command.{
Command{
.name = "internal",
.exec = cmdInternal,
},
@ -190,14 +190,14 @@ const usage_build_generic =
\\
;
const args_build_generic = []Flag.{
const args_build_generic = []Flag{
Flag.Bool("--help"),
Flag.Option("--color", []const []const u8.{
Flag.Option("--color", []const []const u8{
"auto",
"off",
"on",
}),
Flag.Option("--mode", []const []const u8.{
Flag.Option("--mode", []const []const u8{
"debug",
"release-fast",
"release-safe",
@ -205,7 +205,7 @@ const args_build_generic = []Flag.{
}),
Flag.ArgMergeN("--assembly", 1),
Flag.Option("--emit", []const []const u8.{
Flag.Option("--emit", []const []const u8{
"asm",
"bin",
"llvm-ir",
@ -456,10 +456,10 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
}
if (flags.single("mmacosx-version-min")) |ver| {
comp.darwin_version_min = Compilation.DarwinVersionMin.{ .MacOS = ver };
comp.darwin_version_min = Compilation.DarwinVersionMin{ .MacOS = ver };
}
if (flags.single("mios-version-min")) |ver| {
comp.darwin_version_min = Compilation.DarwinVersionMin.{ .Ios = ver };
comp.darwin_version_min = Compilation.DarwinVersionMin{ .Ios = ver };
}
comp.emit_file_type = emit_type;
@ -523,9 +523,9 @@ const usage_fmt =
\\
;
const args_fmt_spec = []Flag.{
const args_fmt_spec = []Flag{
Flag.Bool("--help"),
Flag.Option("--color", []const []const u8.{
Flag.Option("--color", []const []const u8{
"auto",
"off",
"on",
@ -533,7 +533,7 @@ const args_fmt_spec = []Flag.{
Flag.Bool("--stdin"),
};
const Fmt = struct.{
const Fmt = struct {
seen: event.Locked(SeenMap),
any_error: bool,
color: errmsg.Color,
@ -675,7 +675,7 @@ async fn asyncFmtMainChecked(
result.* = await (async asyncFmtMain(loop, flags, color) catch unreachable);
}
const FmtError = error.{
const FmtError = error{
SystemResources,
OperationAborted,
IoPending,
@ -704,7 +704,7 @@ async fn asyncFmtMain(
suspend {
resume @handle();
}
var fmt = Fmt.{
var fmt = Fmt{
.seen = event.Locked(Fmt.SeenMap).init(loop, Fmt.SeenMap.init(loop.allocator)),
.any_error = false,
.color = color,
@ -836,7 +836,7 @@ fn cmdVersion(allocator: *Allocator, args: []const []const u8) !void {
try stdout.print("{}\n", std.cstr.toSliceConst(c.ZIG_VERSION_STRING));
}
const args_test_spec = []Flag.{Flag.Bool("--help")};
const args_test_spec = []Flag{Flag.Bool("--help")};
fn cmdHelp(allocator: *Allocator, args: []const []const u8) !void {
try stdout.write(usage);
@ -878,7 +878,7 @@ fn cmdInternal(allocator: *Allocator, args: []const []const u8) !void {
os.exit(1);
}
const sub_commands = []Command.{Command.{
const sub_commands = []Command{Command{
.name = "build-info",
.exec = cmdInternalBuildInfo,
}};
@ -917,14 +917,14 @@ fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void {
);
}
const CliPkg = struct.{
const CliPkg = struct {
name: []const u8,
path: []const u8,
children: ArrayList(*CliPkg),
parent: ?*CliPkg,
pub fn init(allocator: *mem.Allocator, name: []const u8, path: []const u8, parent: ?*CliPkg) !*CliPkg {
var pkg = try allocator.create(CliPkg.{
var pkg = try allocator.create(CliPkg{
.name = name,
.path = path,
.children = ArrayList(*CliPkg).init(allocator),

View File

@ -3,7 +3,7 @@ const mem = std.mem;
const assert = std.debug.assert;
const Buffer = std.Buffer;
pub const Package = struct.{
pub const Package = struct {
root_src_dir: Buffer,
root_src_path: Buffer,
@ -15,7 +15,7 @@ pub const Package = struct.{
/// makes internal copies of root_src_dir and root_src_path
/// allocator should be an arena allocator because Package never frees anything
pub fn create(allocator: *mem.Allocator, root_src_dir: []const u8, root_src_path: []const u8) !*Package {
return allocator.create(Package.{
return allocator.create(Package{
.root_src_dir = try Buffer.init(allocator, root_src_dir),
.root_src_path = try Buffer.init(allocator, root_src_path),
.table = Table.init(allocator),

View File

@ -13,7 +13,7 @@ const assert = std.debug.assert;
const event = std.event;
const llvm = @import("llvm.zig");
pub const Scope = struct.{
pub const Scope = struct {
id: Id,
parent: ?*Scope,
ref_count: std.atomic.Int(usize),
@ -92,7 +92,7 @@ pub const Scope = struct.{
}
fn init(base: *Scope, id: Id, parent: *Scope) void {
base.* = Scope.{
base.* = Scope{
.id = id,
.parent = parent,
.ref_count = std.atomic.Int(usize).init(1),
@ -100,7 +100,7 @@ pub const Scope = struct.{
parent.ref();
}
pub const Id = enum.{
pub const Id = enum {
Root,
AstTree,
Decls,
@ -112,7 +112,7 @@ pub const Scope = struct.{
Var,
};
pub const Root = struct.{
pub const Root = struct {
base: Scope,
realpath: []const u8,
decls: *Decls,
@ -121,8 +121,8 @@ pub const Scope = struct.{
/// Takes ownership of realpath
pub fn create(comp: *Compilation, realpath: []u8) !*Root {
const self = try comp.gpa().createOne(Root);
self.* = Root.{
.base = Scope.{
self.* = Root{
.base = Scope{
.id = Id.Root,
.parent = null,
.ref_count = std.atomic.Int(usize).init(1),
@ -143,7 +143,7 @@ pub const Scope = struct.{
}
};
pub const AstTree = struct.{
pub const AstTree = struct {
base: Scope,
tree: *ast.Tree,
@ -151,7 +151,7 @@ pub const Scope = struct.{
/// Takes ownership of tree, will deinit and destroy when done.
pub fn create(comp: *Compilation, tree: *ast.Tree, root_scope: *Root) !*AstTree {
const self = try comp.gpa().createOne(AstTree);
self.* = AstTree.{
self.* = AstTree{
.base = undefined,
.tree = tree,
};
@ -172,7 +172,7 @@ pub const Scope = struct.{
}
};
pub const Decls = struct.{
pub const Decls = struct {
base: Scope,
/// This table remains Write Locked when the names are incomplete or possibly outdated.
@ -183,7 +183,7 @@ pub const Scope = struct.{
/// Creates a Decls scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*Decls {
const self = try comp.gpa().createOne(Decls);
self.* = Decls.{
self.* = Decls{
.base = undefined,
.table = event.RwLocked(Decl.Table).init(comp.loop, Decl.Table.init(comp.gpa())),
};
@ -197,7 +197,7 @@ pub const Scope = struct.{
}
};
pub const Block = struct.{
pub const Block = struct {
base: Scope,
incoming_values: std.ArrayList(*ir.Inst),
incoming_blocks: std.ArrayList(*ir.BasicBlock),
@ -206,11 +206,11 @@ pub const Scope = struct.{
safety: Safety,
const Safety = union(enum).{
const Safety = union(enum) {
Auto,
Manual: Manual,
const Manual = struct.{
const Manual = struct {
/// the source span that disabled the safety value
span: Span,
@ -236,7 +236,7 @@ pub const Scope = struct.{
/// Creates a Block scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*Block {
const self = try comp.gpa().createOne(Block);
self.* = Block.{
self.* = Block{
.base = undefined,
.incoming_values = undefined,
.incoming_blocks = undefined,
@ -253,7 +253,7 @@ pub const Scope = struct.{
}
};
pub const FnDef = struct.{
pub const FnDef = struct {
base: Scope,
/// This reference is not counted so that the scope can get destroyed with the function
@ -263,7 +263,7 @@ pub const Scope = struct.{
/// Must set the fn_val later
pub fn create(comp: *Compilation, parent: *Scope) !*FnDef {
const self = try comp.gpa().createOne(FnDef);
self.* = FnDef.{
self.* = FnDef{
.base = undefined,
.fn_val = null,
};
@ -276,13 +276,13 @@ pub const Scope = struct.{
}
};
pub const CompTime = struct.{
pub const CompTime = struct {
base: Scope,
/// Creates a CompTime scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*CompTime {
const self = try comp.gpa().createOne(CompTime);
self.* = CompTime.{ .base = undefined };
self.* = CompTime{ .base = undefined };
self.base.init(Id.CompTime, parent);
return self;
}
@ -292,12 +292,12 @@ pub const Scope = struct.{
}
};
pub const Defer = struct.{
pub const Defer = struct {
base: Scope,
defer_expr_scope: *DeferExpr,
kind: Kind,
pub const Kind = enum.{
pub const Kind = enum {
ScopeExit,
ErrorExit,
};
@ -310,7 +310,7 @@ pub const Scope = struct.{
defer_expr_scope: *DeferExpr,
) !*Defer {
const self = try comp.gpa().createOne(Defer);
self.* = Defer.{
self.* = Defer{
.base = undefined,
.defer_expr_scope = defer_expr_scope,
.kind = kind,
@ -326,7 +326,7 @@ pub const Scope = struct.{
}
};
pub const DeferExpr = struct.{
pub const DeferExpr = struct {
base: Scope,
expr_node: *ast.Node,
reported_err: bool,
@ -334,7 +334,7 @@ pub const Scope = struct.{
/// Creates a DeferExpr scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope, expr_node: *ast.Node) !*DeferExpr {
const self = try comp.gpa().createOne(DeferExpr);
self.* = DeferExpr.{
self.* = DeferExpr{
.base = undefined,
.expr_node = expr_node,
.reported_err = false,
@ -348,18 +348,18 @@ pub const Scope = struct.{
}
};
pub const Var = struct.{
pub const Var = struct {
base: Scope,
name: []const u8,
src_node: *ast.Node,
data: Data,
pub const Data = union(enum).{
pub const Data = union(enum) {
Param: Param,
Const: *Value,
};
pub const Param = struct.{
pub const Param = struct {
index: usize,
typ: *Type,
llvm_value: llvm.ValueRef,
@ -374,8 +374,8 @@ pub const Scope = struct.{
param_type: *Type,
) !*Var {
const self = try create(comp, parent, name, src_node);
self.data = Data.{
.Param = Param.{
self.data = Data{
.Param = Param{
.index = param_index,
.typ = param_type,
.llvm_value = undefined,
@ -392,14 +392,14 @@ pub const Scope = struct.{
value: *Value,
) !*Var {
const self = try create(comp, parent, name, src_node);
self.data = Data.{ .Const = value };
self.data = Data{ .Const = value };
value.ref();
return self;
}
fn create(comp: *Compilation, parent: *Scope, name: []const u8, src_node: *ast.Node) !*Var {
const self = try comp.gpa().createOne(Var);
self.* = Var.{
self.* = Var{
.base = undefined,
.name = name,
.src_node = src_node,

View File

@ -3,17 +3,17 @@ const builtin = @import("builtin");
const llvm = @import("llvm.zig");
const CInt = @import("c_int.zig").CInt;
pub const FloatAbi = enum.{
pub const FloatAbi = enum {
Hard,
Soft,
SoftFp,
};
pub const Target = union(enum).{
pub const Target = union(enum) {
Native,
Cross: Cross,
pub const Cross = struct.{
pub const Cross = struct {
arch: builtin.Arch,
os: builtin.Os,
environ: builtin.Environ,

View File

@ -23,18 +23,18 @@ test "stage2" {
const file1 = "1.zig";
const allocator = std.heap.c_allocator;
pub const TestContext = struct.{
pub const TestContext = struct {
loop: std.event.Loop,
zig_compiler: ZigCompiler,
zig_lib_dir: []u8,
file_index: std.atomic.Int(usize),
group: std.event.Group(error!void),
any_err: error!void,
group: std.event.Group(anyerror!void),
any_err: anyerror!void,
const tmp_dir_name = "stage2_test_tmp";
fn init(self: *TestContext) !void {
self.* = TestContext.{
self.* = TestContext{
.any_err = {},
.loop = undefined,
.zig_compiler = undefined,
@ -49,7 +49,7 @@ pub const TestContext = struct.{
self.zig_compiler = try ZigCompiler.init(&self.loop);
errdefer self.zig_compiler.deinit();
self.group = std.event.Group(error!void).init(&self.loop);
self.group = std.event.Group(anyerror!void).init(&self.loop);
errdefer self.group.deinit();
self.zig_lib_dir = try introspect.resolveZigLibDir(allocator);
@ -162,7 +162,7 @@ pub const TestContext = struct.{
switch (build_event) {
Compilation.Event.Ok => {
const argv = []const []const u8.{exe_file_2};
const argv = []const []const u8{exe_file_2};
// TODO use event loop
const child = try std.os.ChildProcess.exec(allocator, argv, null, null, 1024 * 1024);
switch (child.term) {

View File

@ -8,13 +8,13 @@ const event = std.event;
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
pub const Type = struct.{
pub const Type = struct {
base: Value,
id: Id,
name: []const u8,
abi_alignment: AbiAlignment,
pub const AbiAlignment = event.Future(error.{OutOfMemory}!u32);
pub const AbiAlignment = event.Future(error{OutOfMemory}!u32);
pub const Id = builtin.TypeId;
@ -51,7 +51,7 @@ pub const Type = struct.{
base: *Type,
allocator: *Allocator,
llvm_context: llvm.ContextRef,
) (error.{OutOfMemory}!llvm.TypeRef) {
) (error{OutOfMemory}!llvm.TypeRef) {
switch (base.id) {
Id.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(allocator, llvm_context),
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(allocator, llvm_context),
@ -162,8 +162,8 @@ pub const Type = struct.{
}
fn init(base: *Type, comp: *Compilation, id: Id, name: []const u8) void {
base.* = Type.{
.base = Value.{
base.* = Type{
.base = Value{
.id = Value.Id.Type,
.typ = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@ -206,7 +206,7 @@ pub const Type = struct.{
return @intCast(u32, llvm.ABIAlignmentOfType(comp.target_data_ref, llvm_type));
}
pub const Struct = struct.{
pub const Struct = struct {
base: Type,
decls: *Scope.Decls,
@ -219,47 +219,47 @@ pub const Type = struct.{
}
};
pub const Fn = struct.{
pub const Fn = struct {
base: Type,
key: Key,
non_key: NonKey,
garbage_node: std.atomic.Stack(*Fn).Node,
pub const Kind = enum.{
pub const Kind = enum {
Normal,
Generic,
};
pub const NonKey = union.{
pub const NonKey = union {
Normal: Normal,
Generic: void,
pub const Normal = struct.{
pub const Normal = struct {
variable_list: std.ArrayList(*Scope.Var),
};
};
pub const Key = struct.{
pub const Key = struct {
data: Data,
alignment: ?u32,
pub const Data = union(Kind).{
pub const Data = union(Kind) {
Generic: Generic,
Normal: Normal,
};
pub const Normal = struct.{
pub const Normal = struct {
params: []Param,
return_type: *Type,
is_var_args: bool,
cc: CallingConvention,
};
pub const Generic = struct.{
pub const Generic = struct {
param_count: usize,
cc: CC,
pub const CC = union(CallingConvention).{
pub const CC = union(CallingConvention) {
Auto,
C,
Cold,
@ -362,7 +362,7 @@ pub const Type = struct.{
}
};
pub const CallingConvention = enum.{
pub const CallingConvention = enum {
Auto,
C,
Cold,
@ -371,7 +371,7 @@ pub const Type = struct.{
Async,
};
pub const Param = struct.{
pub const Param = struct {
is_noalias: bool,
typ: *Type,
};
@ -410,7 +410,7 @@ pub const Type = struct.{
errdefer key.deref(comp);
const self = try comp.gpa().createOne(Fn);
self.* = Fn.{
self.* = Fn{
.base = undefined,
.key = key,
.non_key = undefined,
@ -425,7 +425,7 @@ pub const Type = struct.{
switch (key.data) {
Kind.Generic => |generic| {
self.non_key = NonKey.{ .Generic = {} };
self.non_key = NonKey{ .Generic = {} };
switch (generic.cc) {
CallingConvention.Async => |async_allocator_type| {
try name_stream.print("async<{}> ", async_allocator_type.name);
@ -448,8 +448,8 @@ pub const Type = struct.{
try name_stream.write(" var");
},
Kind.Normal => |normal| {
self.non_key = NonKey.{
.Normal = NonKey.Normal.{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
self.non_key = NonKey{
.Normal = NonKey.Normal{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
};
const cc_str = ccFnTypeStr(normal.cc);
try name_stream.print("{}fn(", cc_str);
@ -513,7 +513,7 @@ pub const Type = struct.{
}
};
pub const MetaType = struct.{
pub const MetaType = struct {
base: Type,
value: *Type,
@ -528,7 +528,7 @@ pub const Type = struct.{
}
};
pub const Void = struct.{
pub const Void = struct {
base: Type,
/// Adds 1 reference to the resulting type
@ -542,7 +542,7 @@ pub const Type = struct.{
}
};
pub const Bool = struct.{
pub const Bool = struct {
base: Type,
/// Adds 1 reference to the resulting type
@ -560,7 +560,7 @@ pub const Type = struct.{
}
};
pub const NoReturn = struct.{
pub const NoReturn = struct {
base: Type,
/// Adds 1 reference to the resulting type
@ -574,12 +574,12 @@ pub const Type = struct.{
}
};
pub const Int = struct.{
pub const Int = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Int).Node,
pub const Key = struct.{
pub const Key = struct {
bit_count: u32,
is_signed: bool,
@ -611,7 +611,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Int.{
const self = try comp.gpa().create(Int{
.base = undefined,
.key = key,
.garbage_node = undefined,
@ -634,7 +634,7 @@ pub const Type = struct.{
}
pub fn destroy(self: *Int, comp: *Compilation) void {
self.garbage_node = std.atomic.Stack(*Int).Node.{
self.garbage_node = std.atomic.Stack(*Int).Node{
.data = self,
.next = undefined,
};
@ -658,7 +658,7 @@ pub const Type = struct.{
}
};
pub const Float = struct.{
pub const Float = struct {
base: Type,
pub fn destroy(self: *Float, comp: *Compilation) void {
@ -669,12 +669,12 @@ pub const Type = struct.{
@panic("TODO");
}
};
pub const Pointer = struct.{
pub const Pointer = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Pointer).Node,
pub const Key = struct.{
pub const Key = struct {
child_type: *Type,
mut: Mut,
vol: Vol,
@ -710,17 +710,17 @@ pub const Type = struct.{
}
};
pub const Mut = enum.{
pub const Mut = enum {
Mut,
Const,
};
pub const Vol = enum.{
pub const Vol = enum {
Non,
Volatile,
};
pub const Align = union(enum).{
pub const Align = union(enum) {
Abi,
Override: u32,
};
@ -728,7 +728,7 @@ pub const Type = struct.{
pub const Size = builtin.TypeInfo.Pointer.Size;
pub fn destroy(self: *Pointer, comp: *Compilation) void {
self.garbage_node = std.atomic.Stack(*Pointer).Node.{
self.garbage_node = std.atomic.Stack(*Pointer).Node{
.data = self,
.next = undefined,
};
@ -777,7 +777,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Pointer.{
const self = try comp.gpa().create(Pointer{
.base = undefined,
.key = normal_key,
.garbage_node = undefined,
@ -835,12 +835,12 @@ pub const Type = struct.{
}
};
pub const Array = struct.{
pub const Array = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Array).Node,
pub const Key = struct.{
pub const Key = struct {
elem_type: *Type,
len: usize,
@ -875,7 +875,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Array.{
const self = try comp.gpa().create(Array{
.base = undefined,
.key = key,
.garbage_node = undefined,
@ -902,7 +902,7 @@ pub const Type = struct.{
}
};
pub const ComptimeFloat = struct.{
pub const ComptimeFloat = struct {
base: Type,
pub fn destroy(self: *ComptimeFloat, comp: *Compilation) void {
@ -910,7 +910,7 @@ pub const Type = struct.{
}
};
pub const ComptimeInt = struct.{
pub const ComptimeInt = struct {
base: Type,
/// Adds 1 reference to the resulting type
@ -924,7 +924,7 @@ pub const Type = struct.{
}
};
pub const Undefined = struct.{
pub const Undefined = struct {
base: Type,
pub fn destroy(self: *Undefined, comp: *Compilation) void {
@ -932,7 +932,7 @@ pub const Type = struct.{
}
};
pub const Null = struct.{
pub const Null = struct {
base: Type,
pub fn destroy(self: *Null, comp: *Compilation) void {
@ -940,7 +940,7 @@ pub const Type = struct.{
}
};
pub const Optional = struct.{
pub const Optional = struct {
base: Type,
pub fn destroy(self: *Optional, comp: *Compilation) void {
@ -952,7 +952,7 @@ pub const Type = struct.{
}
};
pub const ErrorUnion = struct.{
pub const ErrorUnion = struct {
base: Type,
pub fn destroy(self: *ErrorUnion, comp: *Compilation) void {
@ -964,7 +964,7 @@ pub const Type = struct.{
}
};
pub const ErrorSet = struct.{
pub const ErrorSet = struct {
base: Type,
pub fn destroy(self: *ErrorSet, comp: *Compilation) void {
@ -976,7 +976,7 @@ pub const Type = struct.{
}
};
pub const Enum = struct.{
pub const Enum = struct {
base: Type,
pub fn destroy(self: *Enum, comp: *Compilation) void {
@ -988,7 +988,7 @@ pub const Type = struct.{
}
};
pub const Union = struct.{
pub const Union = struct {
base: Type,
pub fn destroy(self: *Union, comp: *Compilation) void {
@ -1000,7 +1000,7 @@ pub const Type = struct.{
}
};
pub const Namespace = struct.{
pub const Namespace = struct {
base: Type,
pub fn destroy(self: *Namespace, comp: *Compilation) void {
@ -1008,7 +1008,7 @@ pub const Type = struct.{
}
};
pub const BoundFn = struct.{
pub const BoundFn = struct {
base: Type,
pub fn destroy(self: *BoundFn, comp: *Compilation) void {
@ -1020,7 +1020,7 @@ pub const Type = struct.{
}
};
pub const ArgTuple = struct.{
pub const ArgTuple = struct {
base: Type,
pub fn destroy(self: *ArgTuple, comp: *Compilation) void {
@ -1028,7 +1028,7 @@ pub const Type = struct.{
}
};
pub const Opaque = struct.{
pub const Opaque = struct {
base: Type,
pub fn destroy(self: *Opaque, comp: *Compilation) void {
@ -1040,7 +1040,7 @@ pub const Type = struct.{
}
};
pub const Promise = struct.{
pub const Promise = struct {
base: Type,
pub fn destroy(self: *Promise, comp: *Compilation) void {
@ -1074,7 +1074,7 @@ fn hashAny(x: var, comptime seed: u64) u32 {
builtin.TypeId.Enum => return hashAny(@enumToInt(x), seed),
builtin.TypeId.Bool => {
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
const vals = comptime [2]u32.{ rng.random.scalar(u32), rng.random.scalar(u32) };
const vals = comptime [2]u32{ rng.random.scalar(u32), rng.random.scalar(u32) };
return vals[@boolToInt(x)];
},
builtin.TypeId.Optional => {

View File

@ -9,7 +9,7 @@ const assert = std.debug.assert;
/// Values are ref-counted, heap-allocated, and copy-on-write
/// If there is only 1 ref then write need not copy
pub const Value = struct.{
pub const Value = struct {
id: Id,
typ: *Type,
ref_count: std.atomic.Int(usize),
@ -57,7 +57,7 @@ pub const Value = struct.{
std.debug.warn("{}", @tagName(base.id));
}
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error.{OutOfMemory}!?llvm.ValueRef) {
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error{OutOfMemory}!?llvm.ValueRef) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
@ -71,7 +71,7 @@ pub const Value = struct.{
}
}
pub fn derefAndCopy(self: *Value, comp: *Compilation) (error.{OutOfMemory}!*Value) {
pub fn derefAndCopy(self: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
if (self.ref_count.get() == 1) {
// ( ͡° ͜ʖ ͡°)
return self;
@ -81,7 +81,7 @@ pub const Value = struct.{
return self.copy(comp);
}
pub fn copy(base: *Value, comp: *Compilation) (error.{OutOfMemory}!*Value) {
pub fn copy(base: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => unreachable,
@ -95,25 +95,25 @@ pub const Value = struct.{
}
}
pub const Parent = union(enum).{
pub const Parent = union(enum) {
None,
BaseStruct: BaseStruct,
BaseArray: BaseArray,
BaseUnion: *Value,
BaseScalar: *Value,
pub const BaseStruct = struct.{
pub const BaseStruct = struct {
val: *Value,
field_index: usize,
};
pub const BaseArray = struct.{
pub const BaseArray = struct {
val: *Value,
elem_index: usize,
};
};
pub const Id = enum.{
pub const Id = enum {
Type,
Fn,
Void,
@ -127,7 +127,7 @@ pub const Value = struct.{
pub const Type = @import("type.zig").Type;
pub const FnProto = struct.{
pub const FnProto = struct {
base: Value,
/// The main external name that is used in the .o file.
@ -135,8 +135,8 @@ pub const Value = struct.{
symbol_name: Buffer,
pub fn create(comp: *Compilation, fn_type: *Type.Fn, symbol_name: Buffer) !*FnProto {
const self = try comp.gpa().create(FnProto.{
.base = Value.{
const self = try comp.gpa().create(FnProto{
.base = Value{
.id = Value.Id.FnProto,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
@ -166,7 +166,7 @@ pub const Value = struct.{
}
};
pub const Fn = struct.{
pub const Fn = struct {
base: Value,
/// The main external name that is used in the .o file.
@ -190,15 +190,15 @@ pub const Value = struct.{
/// Creates a Fn value with 1 ref
/// Takes ownership of symbol_name
pub fn create(comp: *Compilation, fn_type: *Type.Fn, fndef_scope: *Scope.FnDef, symbol_name: Buffer) !*Fn {
const link_set_node = try comp.gpa().create(Compilation.FnLinkSet.Node.{
const link_set_node = try comp.gpa().create(Compilation.FnLinkSet.Node{
.data = null,
.next = undefined,
.prev = undefined,
});
errdefer comp.gpa().destroy(link_set_node);
const self = try comp.gpa().create(Fn.{
.base = Value.{
const self = try comp.gpa().create(Fn{
.base = Value{
.id = Value.Id.Fn,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
@ -249,7 +249,7 @@ pub const Value = struct.{
}
};
pub const Void = struct.{
pub const Void = struct {
base: Value,
pub fn get(comp: *Compilation) *Void {
@ -262,7 +262,7 @@ pub const Value = struct.{
}
};
pub const Bool = struct.{
pub const Bool = struct {
base: Value,
x: bool,
@ -290,7 +290,7 @@ pub const Value = struct.{
}
};
pub const NoReturn = struct.{
pub const NoReturn = struct {
base: Value,
pub fn get(comp: *Compilation) *NoReturn {
@ -303,18 +303,18 @@ pub const Value = struct.{
}
};
pub const Ptr = struct.{
pub const Ptr = struct {
base: Value,
special: Special,
mut: Mut,
pub const Mut = enum.{
pub const Mut = enum {
CompTimeConst,
CompTimeVar,
RunTime,
};
pub const Special = union(enum).{
pub const Special = union(enum) {
Scalar: *Value,
BaseArray: BaseArray,
BaseStruct: BaseStruct,
@ -322,12 +322,12 @@ pub const Value = struct.{
Discard,
};
pub const BaseArray = struct.{
pub const BaseArray = struct {
val: *Value,
elem_index: usize,
};
pub const BaseStruct = struct.{
pub const BaseStruct = struct {
val: *Value,
field_index: usize,
};
@ -343,7 +343,7 @@ pub const Value = struct.{
errdefer array_val.base.deref(comp);
const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = mut,
.vol = Type.Pointer.Vol.Non,
@ -353,14 +353,14 @@ pub const Value = struct.{
var ptr_type_consumed = false;
errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Ptr.{
.base = Value.{
const self = try comp.gpa().create(Value.Ptr{
.base = Value{
.id = Value.Id.Ptr,
.typ = &ptr_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
.special = Special.{
.BaseArray = BaseArray.{
.special = Special{
.BaseArray = BaseArray{
.val = &array_val.base,
.elem_index = 0,
},
@ -387,7 +387,7 @@ pub const Value = struct.{
const array_llvm_value = (try base_array.val.getLlvmConst(ofile)).?;
const ptr_bit_count = ofile.comp.target_ptr_bits;
const usize_llvm_type = llvm.IntTypeInContext(ofile.context, ptr_bit_count) orelse return error.OutOfMemory;
const indices = []llvm.ValueRef.{
const indices = []llvm.ValueRef{
llvm.ConstNull(usize_llvm_type) orelse return error.OutOfMemory,
llvm.ConstInt(usize_llvm_type, base_array.elem_index, 0) orelse return error.OutOfMemory,
};
@ -404,17 +404,17 @@ pub const Value = struct.{
}
};
pub const Array = struct.{
pub const Array = struct {
base: Value,
special: Special,
pub const Special = union(enum).{
pub const Special = union(enum) {
Undefined,
OwnedBuffer: []u8,
Explicit: Data,
};
pub const Data = struct.{
pub const Data = struct {
parent: Parent,
elements: []*Value,
};
@ -424,19 +424,19 @@ pub const Value = struct.{
const u8_type = Type.Int.get_u8(comp);
defer u8_type.base.base.deref(comp);
const array_type = try await (async Type.Array.get(comp, Type.Array.Key.{
const array_type = try await (async Type.Array.get(comp, Type.Array.Key{
.elem_type = &u8_type.base,
.len = buffer.len,
}) catch unreachable);
errdefer array_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Array.{
.base = Value.{
const self = try comp.gpa().create(Value.Array{
.base = Value{
.id = Value.Id.Array,
.typ = &array_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
.special = Special.{ .OwnedBuffer = buffer },
.special = Special{ .OwnedBuffer = buffer },
});
errdefer comp.gpa().destroy(self);
@ -504,13 +504,13 @@ pub const Value = struct.{
}
};
pub const Int = struct.{
pub const Int = struct {
base: Value,
big_int: std.math.big.Int,
pub fn createFromString(comp: *Compilation, typ: *Type, base: u8, value: []const u8) !*Int {
const self = try comp.gpa().create(Value.Int.{
.base = Value.{
const self = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
.typ = typ,
.ref_count = std.atomic.Int(usize).init(1),
@ -557,8 +557,8 @@ pub const Value = struct.{
old.base.typ.base.ref();
errdefer old.base.typ.base.deref(comp);
const new = try comp.gpa().create(Value.Int.{
.base = Value.{
const new = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
.typ = old.base.typ,
.ref_count = std.atomic.Int(usize).init(1),

View File

@ -1,4 +1,4 @@
pub const Visib = enum.{
pub const Visib = enum {
Private,
Pub,
};

View File

@ -387,7 +387,6 @@ struct TypeUnionField {
};
enum NodeType {
NodeTypeRoot,
NodeTypeFnProto,
NodeTypeFnDef,
NodeTypeParamDecl,
@ -443,10 +442,6 @@ enum NodeType {
NodeTypePromiseType,
};
struct AstNodeRoot {
ZigList<AstNode *> top_level_decls;
};
enum CallingConvention {
CallingConventionUnspecified,
CallingConventionC,
@ -812,7 +807,7 @@ struct AstNodeContainerDecl {
ZigList<AstNode *> decls;
ContainerLayout layout;
AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
bool auto_enum; // union(enum)
bool auto_enum, is_root; // union(enum)
};
struct AstNodeErrorSetDecl {
@ -922,7 +917,6 @@ struct AstNode {
size_t column;
ImportTableEntry *owner;
union {
AstNodeRoot root;
AstNodeFnDef fn_def;
AstNodeFnProto fn_proto;
AstNodeParamDecl param_decl;
@ -1863,7 +1857,7 @@ struct Scope {
// This scope comes from global declarations or from
// declarations in a container declaration
// NodeTypeRoot, NodeTypeContainerDecl
// NodeTypeContainerDecl
struct ScopeDecls {
Scope base;

View File

@ -100,7 +100,7 @@ void init_scope(CodeGen *g, Scope *dest, ScopeId id, AstNode *source_node, Scope
}
ScopeDecls *create_decls_scope(CodeGen *g, AstNode *node, Scope *parent, ZigType *container_type, ImportTableEntry *import) {
assert(node == nullptr || node->type == NodeTypeRoot || node->type == NodeTypeContainerDecl || node->type == NodeTypeFnCallExpr);
assert(node == nullptr || node->type == NodeTypeContainerDecl || node->type == NodeTypeFnCallExpr);
ScopeDecls *scope = allocate<ScopeDecls>(1);
init_scope(g, &scope->base, ScopeIdDecls, node, parent);
scope->decl_table.init(4);
@ -3399,9 +3399,9 @@ void update_compile_var(CodeGen *g, Buf *name, ConstExprValue *value) {
void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
switch (node->type) {
case NodeTypeRoot:
for (size_t i = 0; i < node->data.root.top_level_decls.length; i += 1) {
AstNode *child = node->data.root.top_level_decls.at(i);
case NodeTypeContainerDecl:
for (size_t i = 0; i < node->data.container_decl.decls.length; i += 1) {
AstNode *child = node->data.container_decl.decls.at(i);
scan_decls(g, decls_scope, child);
}
break;
@ -3448,7 +3448,6 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
case NodeTypeCompTime:
preview_comptime_decl(g, node, decls_scope);
break;
case NodeTypeContainerDecl:
case NodeTypeParamDecl:
case NodeTypeReturnExpr:
case NodeTypeDefer:
@ -4337,9 +4336,9 @@ ImportTableEntry *add_source_file(CodeGen *g, PackageTableEntry *package, Buf *r
import_entry->decls_scope = create_decls_scope(g, import_entry->root, nullptr, nullptr, import_entry);
assert(import_entry->root->type == NodeTypeRoot);
for (size_t decl_i = 0; decl_i < import_entry->root->data.root.top_level_decls.length; decl_i += 1) {
AstNode *top_level_decl = import_entry->root->data.root.top_level_decls.at(decl_i);
assert(import_entry->root->type == NodeTypeContainerDecl);
for (size_t decl_i = 0; decl_i < import_entry->root->data.container_decl.decls.length; decl_i += 1) {
AstNode *top_level_decl = import_entry->root->data.container_decl.decls.at(decl_i);
if (top_level_decl->type == NodeTypeFnDef) {
AstNode *proto_node = top_level_decl->data.fn_def.fn_proto;

View File

@ -143,8 +143,6 @@ const char *container_string(ContainerKind kind) {
static const char *node_type_str(NodeType node_type) {
switch (node_type) {
case NodeTypeRoot:
return "Root";
case NodeTypeFnDef:
return "FnDef";
case NodeTypeFnProto:
@ -379,6 +377,38 @@ static void print_symbol(AstRender *ar, Buf *symbol) {
fprintf(ar->f, "@\"%s\"", buf_ptr(&escaped));
}
static bool statement_terminates_without_semicolon(AstNode *node) {
switch (node->type) {
case NodeTypeIfBoolExpr:
if (node->data.if_bool_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_bool_expr.else_node);
return node->data.if_bool_expr.then_block->type == NodeTypeBlock;
case NodeTypeIfErrorExpr:
if (node->data.if_err_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_err_expr.else_node);
return node->data.if_err_expr.then_node->type == NodeTypeBlock;
case NodeTypeTestExpr:
if (node->data.test_expr.else_node)
return statement_terminates_without_semicolon(node->data.test_expr.else_node);
return node->data.test_expr.then_node->type == NodeTypeBlock;
case NodeTypeWhileExpr:
return node->data.while_expr.body->type == NodeTypeBlock;
case NodeTypeForExpr:
return node->data.for_expr.body->type == NodeTypeBlock;
case NodeTypeCompTime:
return node->data.comptime_expr.expr->type == NodeTypeBlock;
case NodeTypeDefer:
return node->data.defer.expr->type == NodeTypeBlock;
case NodeTypeSuspend:
return node->data.suspend.block != nullptr && node->data.suspend.block->type == NodeTypeBlock;
case NodeTypeSwitchExpr:
case NodeTypeBlock:
return true;
default:
return false;
}
}
static void render_node_extra(AstRender *ar, AstNode *node, bool grouped);
static void render_node_grouped(AstRender *ar, AstNode *node) {
@ -395,21 +425,6 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
case NodeTypeSwitchRange:
case NodeTypeStructValueField:
zig_unreachable();
case NodeTypeRoot:
for (size_t i = 0; i < node->data.root.top_level_decls.length; i += 1) {
AstNode *child = node->data.root.top_level_decls.at(i);
print_indent(ar);
render_node_grouped(ar, child);
if (child->type == NodeTypeUse ||
child->type == NodeTypeVariableDeclaration ||
child->type == NodeTypeFnProto)
{
fprintf(ar->f, ";");
}
fprintf(ar->f, "\n");
}
break;
case NodeTypeFnProto:
{
const char *pub_str = visib_mod_string(node->data.fn_proto.visib_mod);
@ -698,7 +713,11 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
{
AstNode *lhs = node->data.field_access_expr.struct_expr;
Buf *rhs = node->data.field_access_expr.field_name;
render_node_ungrouped(ar, lhs);
if (lhs->type == NodeTypeErrorType) {
fprintf(ar->f, "error");
} else {
render_node_ungrouped(ar, lhs);
}
fprintf(ar->f, ".");
print_symbol(ar, rhs);
break;
@ -722,23 +741,25 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
break;
case NodeTypeContainerDecl:
{
const char *layout_str = layout_string(node->data.container_decl.layout);
const char *container_str = container_string(node->data.container_decl.kind);
fprintf(ar->f, "%s%s", layout_str, container_str);
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, "(enum");
}
if (node->data.container_decl.init_arg_expr != nullptr) {
fprintf(ar->f, "(");
render_node_grouped(ar, node->data.container_decl.init_arg_expr);
fprintf(ar->f, ")");
}
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, ")");
}
if (!node->data.container_decl.is_root) {
const char *layout_str = layout_string(node->data.container_decl.layout);
const char *container_str = container_string(node->data.container_decl.kind);
fprintf(ar->f, "%s%s", layout_str, container_str);
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, "(enum");
}
if (node->data.container_decl.init_arg_expr != nullptr) {
fprintf(ar->f, "(");
render_node_grouped(ar, node->data.container_decl.init_arg_expr);
fprintf(ar->f, ")");
}
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, ")");
}
fprintf(ar->f, ".{\n");
ar->indent += ar->indent_size;
fprintf(ar->f, " {\n");
ar->indent += ar->indent_size;
}
for (size_t field_i = 0; field_i < node->data.container_decl.fields.length; field_i += 1) {
AstNode *field_node = node->data.container_decl.fields.at(field_i);
assert(field_node->type == NodeTypeStructField);
@ -755,18 +776,33 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
fprintf(ar->f, ",\n");
}
ar->indent -= ar->indent_size;
print_indent(ar);
fprintf(ar->f, "}");
for (size_t decl_i = 0; decl_i < node->data.container_decl.decls.length; decl_i += 1) {
AstNode *decls_node = node->data.container_decl.decls.at(decl_i);
render_node_grouped(ar, decls_node);
if (decls_node->type == NodeTypeUse ||
decls_node->type == NodeTypeVariableDeclaration ||
decls_node->type == NodeTypeFnProto)
{
fprintf(ar->f, ";");
}
fprintf(ar->f, "\n");
}
if (!node->data.container_decl.is_root) {
ar->indent -= ar->indent_size;
print_indent(ar);
fprintf(ar->f, "}");
}
break;
}
case NodeTypeContainerInitExpr:
render_node_ungrouped(ar, node->data.container_init_expr.type);
if (node->data.container_init_expr.kind == ContainerInitKindStruct) {
fprintf(ar->f, ".{\n");
fprintf(ar->f, "{\n");
ar->indent += ar->indent_size;
} else {
fprintf(ar->f, ".{");
fprintf(ar->f, "{");
}
for (size_t i = 0; i < node->data.container_init_expr.entries.length; i += 1) {
AstNode *entry = node->data.container_init_expr.entries.at(i);
@ -812,7 +848,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
break;
}
case NodeTypeErrorType:
fprintf(ar->f, "error");
fprintf(ar->f, "anyerror");
break;
case NodeTypeAsmExpr:
{

View File

@ -6603,7 +6603,7 @@ static void define_builtin_types(CodeGen *g) {
{
ZigType *entry = new_type_table_entry(ZigTypeIdErrorSet);
buf_init_from_str(&entry->name, "error");
buf_init_from_str(&entry->name, "anyerror");
entry->data.error_set.err_count = UINT32_MAX;
// TODO allow overriding this type and keep track of max value and emit an
@ -6614,7 +6614,7 @@ static void define_builtin_types(CodeGen *g) {
entry->type_ref = g->err_tag_type->type_ref;
entry->di_type = ZigLLVMCreateReplaceableCompositeType(g->dbuilder,
ZigLLVMTag_DW_enumeration_type(), "error",
ZigLLVMTag_DW_enumeration_type(), "anyerror",
ZigLLVMCompileUnitToScope(g->compile_unit), nullptr, 0);
// reserve index 0 to indicate no error
@ -6746,14 +6746,14 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
// Modifications to this struct must be coordinated with code that does anything with
// g->stack_trace_type. There are hard-coded references to the field indexes.
buf_append_str(contents,
"pub const StackTrace = struct.{\n"
"pub const StackTrace = struct {\n"
" index: usize,\n"
" instruction_addresses: []usize,\n"
"};\n\n");
const char *cur_os = nullptr;
{
buf_appendf(contents, "pub const Os = enum.{\n");
buf_appendf(contents, "pub const Os = enum {\n");
uint32_t field_count = (uint32_t)target_os_count();
for (uint32_t i = 0; i < field_count; i += 1) {
Os os_type = get_target_os(i);
@ -6771,7 +6771,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_arch = nullptr;
{
buf_appendf(contents, "pub const Arch = enum.{\n");
buf_appendf(contents, "pub const Arch = enum {\n");
uint32_t field_count = (uint32_t)target_arch_count();
for (uint32_t i = 0; i < field_count; i += 1) {
const ArchType *arch_type = get_target_arch(i);
@ -6795,7 +6795,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_environ = nullptr;
{
buf_appendf(contents, "pub const Environ = enum.{\n");
buf_appendf(contents, "pub const Environ = enum {\n");
uint32_t field_count = (uint32_t)target_environ_count();
for (uint32_t i = 0; i < field_count; i += 1) {
ZigLLVM_EnvironmentType environ_type = get_target_environ(i);
@ -6813,7 +6813,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_obj_fmt = nullptr;
{
buf_appendf(contents, "pub const ObjectFormat = enum.{\n");
buf_appendf(contents, "pub const ObjectFormat = enum {\n");
uint32_t field_count = (uint32_t)target_oformat_count();
for (uint32_t i = 0; i < field_count; i += 1) {
ZigLLVM_ObjectFormatType oformat = get_target_oformat(i);
@ -6831,7 +6831,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
assert(cur_obj_fmt != nullptr);
{
buf_appendf(contents, "pub const GlobalLinkage = enum.{\n");
buf_appendf(contents, "pub const GlobalLinkage = enum {\n");
uint32_t field_count = array_length(global_linkage_values);
for (uint32_t i = 0; i < field_count; i += 1) {
const GlobalLinkageValue *value = &global_linkage_values[i];
@ -6841,7 +6841,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const AtomicOrder = enum.{\n"
"pub const AtomicOrder = enum {\n"
" Unordered,\n"
" Monotonic,\n"
" Acquire,\n"
@ -6852,7 +6852,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const AtomicRmwOp = enum.{\n"
"pub const AtomicRmwOp = enum {\n"
" Xchg,\n"
" Add,\n"
" Sub,\n"
@ -6866,7 +6866,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const Mode = enum.{\n"
"pub const Mode = enum {\n"
" Debug,\n"
" ReleaseSafe,\n"
" ReleaseFast,\n"
@ -6874,7 +6874,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
"};\n\n");
}
{
buf_appendf(contents, "pub const TypeId = enum.{\n");
buf_appendf(contents, "pub const TypeId = enum {\n");
size_t field_count = type_id_len();
for (size_t i = 0; i < field_count; i += 1) {
const ZigTypeId id = type_id_at_index(i);
@ -6884,7 +6884,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const TypeInfo = union(TypeId).{\n"
"pub const TypeInfo = union(TypeId) {\n"
" Type: void,\n"
" Void: void,\n"
" Bool: void,\n"
@ -6910,96 +6910,96 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" Opaque: void,\n"
" Promise: Promise,\n"
"\n\n"
" pub const Int = struct.{\n"
" pub const Int = struct {\n"
" is_signed: bool,\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Float = struct.{\n"
" pub const Float = struct {\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Pointer = struct.{\n"
" pub const Pointer = struct {\n"
" size: Size,\n"
" is_const: bool,\n"
" is_volatile: bool,\n"
" alignment: u32,\n"
" child: type,\n"
"\n"
" pub const Size = enum.{\n"
" pub const Size = enum {\n"
" One,\n"
" Many,\n"
" Slice,\n"
" };\n"
" };\n"
"\n"
" pub const Array = struct.{\n"
" pub const Array = struct {\n"
" len: usize,\n"
" child: type,\n"
" };\n"
"\n"
" pub const ContainerLayout = enum.{\n"
" pub const ContainerLayout = enum {\n"
" Auto,\n"
" Extern,\n"
" Packed,\n"
" };\n"
"\n"
" pub const StructField = struct.{\n"
" pub const StructField = struct {\n"
" name: []const u8,\n"
" offset: ?usize,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Struct = struct.{\n"
" pub const Struct = struct {\n"
" layout: ContainerLayout,\n"
" fields: []StructField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const Optional = struct.{\n"
" pub const Optional = struct {\n"
" child: type,\n"
" };\n"
"\n"
" pub const ErrorUnion = struct.{\n"
" pub const ErrorUnion = struct {\n"
" error_set: type,\n"
" payload: type,\n"
" };\n"
"\n"
" pub const Error = struct.{\n"
" pub const Error = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const ErrorSet = struct.{\n"
" pub const ErrorSet = struct {\n"
" errors: []Error,\n"
" };\n"
"\n"
" pub const EnumField = struct.{\n"
" pub const EnumField = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const Enum = struct.{\n"
" pub const Enum = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: type,\n"
" fields: []EnumField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const UnionField = struct.{\n"
" pub const UnionField = struct {\n"
" name: []const u8,\n"
" enum_field: ?EnumField,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Union = struct.{\n"
" pub const Union = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: ?type,\n"
" fields: []UnionField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const CallingConvention = enum.{\n"
" pub const CallingConvention = enum {\n"
" Unspecified,\n"
" C,\n"
" Cold,\n"
@ -7008,13 +7008,13 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" Async,\n"
" };\n"
"\n"
" pub const FnArg = struct.{\n"
" pub const FnArg = struct {\n"
" is_generic: bool,\n"
" is_noalias: bool,\n"
" arg_type: ?type,\n"
" };\n"
"\n"
" pub const Fn = struct.{\n"
" pub const Fn = struct {\n"
" calling_convention: CallingConvention,\n"
" is_generic: bool,\n"
" is_var_args: bool,\n"
@ -7023,21 +7023,21 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" args: []FnArg,\n"
" };\n"
"\n"
" pub const Promise = struct.{\n"
" pub const Promise = struct {\n"
" child: ?type,\n"
" };\n"
"\n"
" pub const Definition = struct.{\n"
" pub const Definition = struct {\n"
" name: []const u8,\n"
" is_pub: bool,\n"
" data: Data,\n"
"\n"
" pub const Data = union(enum).{\n"
" pub const Data = union(enum) {\n"
" Type: type,\n"
" Var: type,\n"
" Fn: FnDef,\n"
"\n"
" pub const FnDef = struct.{\n"
" pub const FnDef = struct {\n"
" fn_type: type,\n"
" inline_type: Inline,\n"
" calling_convention: CallingConvention,\n"
@ -7048,7 +7048,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" return_type: type,\n"
" arg_names: [][] const u8,\n"
"\n"
" pub const Inline = enum.{\n"
" pub const Inline = enum {\n"
" Auto,\n"
" Always,\n"
" Never,\n"
@ -7074,7 +7074,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const FloatMode = enum.{\n"
"pub const FloatMode = enum {\n"
" Strict,\n"
" Optimized,\n"
"};\n\n");
@ -7083,7 +7083,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const Endian = enum.{\n"
"pub const Endian = enum {\n"
" Big,\n"
" Little,\n"
"};\n\n");

View File

@ -6297,7 +6297,7 @@ static ZigType *get_error_set_union(CodeGen *g, ErrorTableEntry **errors, ZigTyp
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{");
buf_appendf(&err_set_type->name, "error{");
for (uint32_t i = 0, count = set1->data.error_set.err_count; i < count; i += 1) {
assert(errors[set1->data.error_set.errors[i]->value] == set1->data.error_set.errors[i]);
@ -6348,7 +6348,7 @@ static ZigType *make_err_set_with_one_item(CodeGen *g, Scope *parent_scope, AstN
{
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{%s}", buf_ptr(&err_entry->name));
buf_appendf(&err_set_type->name, "error{%s}", buf_ptr(&err_entry->name));
err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
err_set_type->data.error_set.err_count = 1;
@ -6912,7 +6912,6 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
assert(scope);
switch (node->type) {
case NodeTypeStructValueField:
case NodeTypeRoot:
case NodeTypeParamDecl:
case NodeTypeUse:
case NodeTypeSwitchProng:
@ -8202,7 +8201,7 @@ static ZigType *get_error_set_intersection(IrAnalyze *ira, ZigType *set1, ZigTyp
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{");
buf_appendf(&err_set_type->name, "error{");
for (uint32_t i = 0; i < set2->data.error_set.err_count; i += 1) {
ErrorTableEntry *error_entry = set2->data.error_set.errors[i];
@ -19467,7 +19466,7 @@ static IrInstruction *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira,
if (!instruction->have_else_prong) {
if (type_is_global_error_set(switch_type)) {
ir_add_error(ira, &instruction->base,
buf_sprintf("else prong required when switching on type 'error'"));
buf_sprintf("else prong required when switching on type 'anyerror'"));
return ira->codegen->invalid_instruction;
} else {
for (uint32_t i = 0; i < switch_type->data.error_set.err_count; i += 1) {

File diff suppressed because it is too large Load Diff

View File

@ -22,6 +22,4 @@ void ast_print(AstNode *node, int indent);
void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *context), void *context);
bool statement_terminates_without_semicolon(AstNode *node);
#endif

View File

@ -108,6 +108,7 @@ struct ZigKeyword {
static const struct ZigKeyword zig_keywords[] = {
{"align", TokenIdKeywordAlign},
{"and", TokenIdKeywordAnd},
{"anyerror", TokenIdKeywordAnyerror},
{"asm", TokenIdKeywordAsm},
{"async", TokenIdKeywordAsync},
{"await", TokenIdKeywordAwait},
@ -1548,6 +1549,7 @@ const char * token_name(TokenId id) {
case TokenIdFloatLiteral: return "FloatLiteral";
case TokenIdIntLiteral: return "IntLiteral";
case TokenIdKeywordAsync: return "async";
case TokenIdKeywordAnyerror: return "anyerror";
case TokenIdKeywordAwait: return "await";
case TokenIdKeywordResume: return "resume";
case TokenIdKeywordSuspend: return "suspend";
@ -1625,6 +1627,8 @@ const char * token_name(TokenId id) {
case TokenIdTimesPercent: return "*%";
case TokenIdTimesPercentEq: return "*%=";
case TokenIdBarBarEq: return "||=";
case TokenIdCount:
zig_unreachable();
}
return "(invalid token)";
}

View File

@ -50,6 +50,7 @@ enum TokenId {
TokenIdIntLiteral,
TokenIdKeywordAlign,
TokenIdKeywordAnd,
TokenIdKeywordAnyerror,
TokenIdKeywordAsm,
TokenIdKeywordAsync,
TokenIdKeywordAwait,
@ -125,6 +126,7 @@ enum TokenId {
TokenIdTimesEq,
TokenIdTimesPercent,
TokenIdTimesPercentEq,
TokenIdCount,
};
struct TokenFloatLit {

View File

@ -445,7 +445,7 @@ static AstNode *get_global(Context *c, Buf *name) {
static void add_top_level_decl(Context *c, Buf *name, AstNode *node) {
c->global_table.put(name, node);
c->root->data.root.top_level_decls.append(node);
c->root->data.container_decl.decls.append(node);
}
static AstNode *add_global_var(Context *c, Buf *var_name, AstNode *value_node) {
@ -4855,7 +4855,8 @@ Error parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const
c->ctx = &ast_unit->getASTContext();
c->source_manager = &ast_unit->getSourceManager();
c->root = trans_create_node(c, NodeTypeRoot);
c->root = trans_create_node(c, NodeTypeContainerDecl);
c->root->data.container_decl.is_root = true;
ast_unit->visitLocalTopLevelDecls(c, decl_visitor);

View File

@ -194,6 +194,15 @@ struct Optional {
static inline Optional<T> some(T x) {
return {x, true};
}
static inline Optional<T> none() {
return {{}, false};
}
inline bool unwrap(T *res) {
*res = value;
return is_some;
}
};
template<typename T>
@ -201,6 +210,11 @@ struct Slice {
T *ptr;
size_t len;
inline T &at(size_t i) {
assert(i < len);
return &ptr[i];
}
inline Slice<T> slice(size_t start, size_t end) {
assert(end <= len);
assert(end >= start);
@ -223,6 +237,19 @@ struct Slice {
}
};
template<typename T, size_t n>
struct Array {
static const size_t len = n;
T items[n];
inline Slice<T> slice() {
return {
&items[0],
len,
};
}
};
static inline Slice<uint8_t> str(const char *literal) {
return {(uint8_t*)(literal), strlen(literal)};
}

View File

@ -10,7 +10,7 @@ pub fn ArrayList(comptime T: type) type {
}
pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return struct.{
return struct {
const Self = @This();
/// Use toSlice instead of slicing this directly, because if you don't
@ -22,8 +22,8 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
/// Deinitialize with `deinit` or use `toOwnedSlice`.
pub fn init(allocator: *Allocator) Self {
return Self.{
.items = []align(A) T.{},
return Self{
.items = []align(A) T{},
.len = 0,
.allocator = allocator,
};
@ -70,7 +70,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
/// allocated with `allocator`.
/// Deinitialize with `deinit` or use `toOwnedSlice`.
pub fn fromOwnedSlice(allocator: *Allocator, slice: []align(A) T) Self {
return Self.{
return Self{
.items = slice,
.len = slice.len,
.allocator = allocator,
@ -179,7 +179,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return self.pop();
}
pub const Iterator = struct.{
pub const Iterator = struct {
list: *const Self,
// how many items have we returned
count: usize,
@ -197,7 +197,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
};
pub fn iterator(self: *const Self) Iterator {
return Iterator.{
return Iterator{
.list = self,
.count = 0,
};
@ -251,7 +251,7 @@ test "std.ArrayList.basic" {
assert(list.pop() == 10);
assert(list.len == 9);
list.appendSlice([]const i32.{
list.appendSlice([]const i32{
1,
2,
3,
@ -262,7 +262,7 @@ test "std.ArrayList.basic" {
assert(list.pop() == 1);
assert(list.len == 9);
list.appendSlice([]const i32.{}) catch unreachable;
list.appendSlice([]const i32{}) catch unreachable;
assert(list.len == 9);
// can only set on indices < self.len
@ -382,7 +382,7 @@ test "std.ArrayList.insertSlice" {
try list.append(2);
try list.append(3);
try list.append(4);
try list.insertSlice(1, []const i32.{
try list.insertSlice(1, []const i32{
9,
8,
});
@ -393,7 +393,7 @@ test "std.ArrayList.insertSlice" {
assert(list.items[4] == 3);
assert(list.items[5] == 4);
const items = []const i32.{1};
const items = []const i32{1};
try list.insertSlice(0, items[0..0]);
assert(list.len == 6);
assert(list.items[0] == 1);

View File

@ -3,13 +3,13 @@ const AtomicOrder = builtin.AtomicOrder;
/// Thread-safe, lock-free integer
pub fn Int(comptime T: type) type {
return struct.{
return struct {
unprotected_value: T,
pub const Self = @This();
pub fn init(init_val: T) Self {
return Self.{ .unprotected_value = init_val };
return Self{ .unprotected_value = init_val };
}
/// Returns previous value

View File

@ -7,7 +7,7 @@ const assert = std.debug.assert;
/// Many producer, many consumer, non-allocating, thread-safe.
/// Uses a mutex to protect access.
pub fn Queue(comptime T: type) type {
return struct.{
return struct {
head: ?*Node,
tail: ?*Node,
mutex: std.Mutex,
@ -16,7 +16,7 @@ pub fn Queue(comptime T: type) type {
pub const Node = std.LinkedList(T).Node;
pub fn init() Self {
return Self.{
return Self{
.head = null,
.tail = null,
.mutex = std.Mutex.init(),
@ -111,7 +111,7 @@ pub fn Queue(comptime T: type) type {
}
pub fn dumpToStream(self: *Self, comptime Error: type, stream: *std.io.OutStream(Error)) Error!void {
const S = struct.{
const S = struct {
fn dumpRecursive(s: *std.io.OutStream(Error), optional_node: ?*Node, indent: usize) Error!void {
try s.writeByteNTimes(' ', indent);
if (optional_node) |node| {
@ -133,7 +133,7 @@ pub fn Queue(comptime T: type) type {
};
}
const Context = struct.{
const Context = struct {
allocator: *std.mem.Allocator,
queue: *Queue(i32),
put_sum: isize,
@ -161,7 +161,7 @@ test "std.atomic.Queue" {
var a = &fixed_buffer_allocator.allocator;
var queue = Queue(i32).init();
var context = Context.{
var context = Context{
.allocator = a,
.queue = &queue,
.put_sum = 0,
@ -205,7 +205,7 @@ fn startPuts(ctx: *Context) u8 {
while (put_count != 0) : (put_count -= 1) {
std.os.time.sleep(1); // let the os scheduler be our fuzz
const x = @bitCast(i32, r.random.scalar(u32));
const node = ctx.allocator.create(Queue(i32).Node.{
const node = ctx.allocator.create(Queue(i32).Node{
.prev = undefined,
.next = undefined,
.data = x,
@ -233,14 +233,14 @@ fn startGets(ctx: *Context) u8 {
test "std.atomic.Queue single-threaded" {
var queue = Queue(i32).init();
var node_0 = Queue(i32).Node.{
var node_0 = Queue(i32).Node{
.data = 0,
.next = undefined,
.prev = undefined,
};
queue.put(&node_0);
var node_1 = Queue(i32).Node.{
var node_1 = Queue(i32).Node{
.data = 1,
.next = undefined,
.prev = undefined,
@ -249,14 +249,14 @@ test "std.atomic.Queue single-threaded" {
assert(queue.get().?.data == 0);
var node_2 = Queue(i32).Node.{
var node_2 = Queue(i32).Node{
.data = 2,
.next = undefined,
.prev = undefined,
};
queue.put(&node_2);
var node_3 = Queue(i32).Node.{
var node_3 = Queue(i32).Node{
.data = 3,
.next = undefined,
.prev = undefined,
@ -267,7 +267,7 @@ test "std.atomic.Queue single-threaded" {
assert(queue.get().?.data == 2);
var node_4 = Queue(i32).Node.{
var node_4 = Queue(i32).Node{
.data = 4,
.next = undefined,
.prev = undefined,
@ -301,7 +301,7 @@ test "std.atomic.Queue dump" {
));
// Test a stream with one element
var node_0 = Queue(i32).Node.{
var node_0 = Queue(i32).Node{
.data = 1,
.next = undefined,
.prev = undefined,
@ -321,7 +321,7 @@ test "std.atomic.Queue dump" {
assert(mem.eql(u8, buffer[0..sos.pos], expected));
// Test a stream with two elements
var node_1 = Queue(i32).Node.{
var node_1 = Queue(i32).Node{
.data = 2,
.next = undefined,
.prev = undefined,

View File

@ -5,19 +5,19 @@ const AtomicOrder = builtin.AtomicOrder;
/// Many reader, many writer, non-allocating, thread-safe
/// Uses a spinlock to protect push() and pop()
pub fn Stack(comptime T: type) type {
return struct.{
return struct {
root: ?*Node,
lock: u8,
pub const Self = @This();
pub const Node = struct.{
pub const Node = struct {
next: ?*Node,
data: T,
};
pub fn init() Self {
return Self.{
return Self{
.root = null,
.lock = 0,
};
@ -54,7 +54,7 @@ pub fn Stack(comptime T: type) type {
}
const std = @import("../index.zig");
const Context = struct.{
const Context = struct {
allocator: *std.mem.Allocator,
stack: *Stack(i32),
put_sum: isize,
@ -81,7 +81,7 @@ test "std.atomic.stack" {
var a = &fixed_buffer_allocator.allocator;
var stack = Stack(i32).init();
var context = Context.{
var context = Context{
.allocator = a,
.stack = &stack,
.put_sum = 0,
@ -125,7 +125,7 @@ fn startPuts(ctx: *Context) u8 {
while (put_count != 0) : (put_count -= 1) {
std.os.time.sleep(1); // let the os scheduler be our fuzz
const x = @bitCast(i32, r.random.scalar(u32));
const node = ctx.allocator.create(Stack(i32).Node.{
const node = ctx.allocator.create(Stack(i32).Node{
.next = undefined,
.data = x,
}) catch unreachable;

View File

@ -6,21 +6,21 @@ pub const standard_alphabet_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopq
pub const standard_pad_char = '=';
pub const standard_encoder = Base64Encoder.init(standard_alphabet_chars, standard_pad_char);
pub const Base64Encoder = struct.{
pub const Base64Encoder = struct {
alphabet_chars: []const u8,
pad_char: u8,
/// a bunch of assertions, then simply pass the data right through.
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64Encoder {
assert(alphabet_chars.len == 64);
var char_in_alphabet = []bool.{false} ** 256;
var char_in_alphabet = []bool{false} ** 256;
for (alphabet_chars) |c| {
assert(!char_in_alphabet[c]);
assert(c != pad_char);
char_in_alphabet[c] = true;
}
return Base64Encoder.{
return Base64Encoder{
.alphabet_chars = alphabet_chars,
.pad_char = pad_char,
};
@ -77,7 +77,7 @@ pub const Base64Encoder = struct.{
pub const standard_decoder = Base64Decoder.init(standard_alphabet_chars, standard_pad_char);
pub const Base64Decoder = struct.{
pub const Base64Decoder = struct {
/// e.g. 'A' => 0.
/// undefined for any value not in the 64 alphabet chars.
char_to_index: [256]u8,
@ -89,9 +89,9 @@ pub const Base64Decoder = struct.{
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64Decoder {
assert(alphabet_chars.len == 64);
var result = Base64Decoder.{
var result = Base64Decoder{
.char_to_index = undefined,
.char_in_alphabet = []bool.{false} ** 256,
.char_in_alphabet = []bool{false} ** 256,
.pad_char = pad_char,
};
@ -153,13 +153,13 @@ pub const Base64Decoder = struct.{
}
};
pub const Base64DecoderWithIgnore = struct.{
pub const Base64DecoderWithIgnore = struct {
decoder: Base64Decoder,
char_is_ignored: [256]bool,
pub fn init(alphabet_chars: []const u8, pad_char: u8, ignore_chars: []const u8) Base64DecoderWithIgnore {
var result = Base64DecoderWithIgnore.{
var result = Base64DecoderWithIgnore{
.decoder = Base64Decoder.init(alphabet_chars, pad_char),
.char_is_ignored = []bool.{false} ** 256,
.char_is_ignored = []bool{false} ** 256,
};
for (ignore_chars) |c| {
@ -270,7 +270,7 @@ pub const Base64DecoderWithIgnore = struct.{
pub const standard_decoder_unsafe = Base64DecoderUnsafe.init(standard_alphabet_chars, standard_pad_char);
pub const Base64DecoderUnsafe = struct.{
pub const Base64DecoderUnsafe = struct {
/// e.g. 'A' => 0.
/// undefined for any value not in the 64 alphabet chars.
char_to_index: [256]u8,
@ -278,7 +278,7 @@ pub const Base64DecoderUnsafe = struct.{
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64DecoderUnsafe {
assert(alphabet_chars.len == 64);
var result = Base64DecoderUnsafe.{
var result = Base64DecoderUnsafe{
.char_to_index = undefined,
.pad_char = pad_char,
};
@ -432,7 +432,7 @@ fn testDecodeIgnoreSpace(expected_decoded: []const u8, encoded: []const u8) !voi
assert(mem.eql(u8, decoded[0..written], expected_decoded));
}
fn testError(encoded: []const u8, expected_err: error) !void {
fn testError(encoded: []const u8, expected_err: anyerror) !void {
const standard_decoder_ignore_space = Base64DecoderWithIgnore.init(standard_alphabet_chars, standard_pad_char, " ");
var buffer: [0x100]u8 = undefined;
if (standard_decoder.calcSize(encoded)) |decoded_size| {

View File

@ -6,13 +6,13 @@ const assert = std.debug.assert;
/// BufMap copies keys and values before they go into the map, and
/// frees them when they get removed.
pub const BufMap = struct.{
pub const BufMap = struct {
hash_map: BufMapHashMap,
const BufMapHashMap = HashMap([]const u8, []const u8, mem.hash_slice_u8, mem.eql_slice_u8);
pub fn init(allocator: *Allocator) BufMap {
var self = BufMap.{ .hash_map = BufMapHashMap.init(allocator) };
var self = BufMap{ .hash_map = BufMapHashMap.init(allocator) };
return self;
}

View File

@ -4,13 +4,13 @@ const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const assert = std.debug.assert;
pub const BufSet = struct.{
pub const BufSet = struct {
hash_map: BufSetHashMap,
const BufSetHashMap = HashMap([]const u8, void, mem.hash_slice_u8, mem.eql_slice_u8);
pub fn init(a: *Allocator) BufSet {
var self = BufSet.{ .hash_map = BufSetHashMap.init(a) };
var self = BufSet{ .hash_map = BufSetHashMap.init(a) };
return self;
}

View File

@ -6,7 +6,7 @@ const assert = debug.assert;
const ArrayList = std.ArrayList;
/// A buffer that allocates memory and maintains a null byte at the end.
pub const Buffer = struct.{
pub const Buffer = struct {
list: ArrayList(u8),
/// Must deinitialize with deinit.
@ -28,7 +28,7 @@ pub const Buffer = struct.{
/// * ::replaceContents
/// * ::resize
pub fn initNull(allocator: *Allocator) Buffer {
return Buffer.{ .list = ArrayList(u8).init(allocator) };
return Buffer{ .list = ArrayList(u8).init(allocator) };
}
/// Must deinitialize with deinit.
@ -40,7 +40,7 @@ pub const Buffer = struct.{
/// allocated with `allocator`.
/// Must deinitialize with deinit.
pub fn fromOwnedSlice(allocator: *Allocator, slice: []u8) !Buffer {
var self = Buffer.{ .list = ArrayList(u8).fromOwnedSlice(allocator, slice) };
var self = Buffer{ .list = ArrayList(u8).fromOwnedSlice(allocator, slice) };
try self.list.append(0);
return self;
}
@ -55,13 +55,13 @@ pub const Buffer = struct.{
}
pub fn allocPrint(allocator: *Allocator, comptime format: []const u8, args: ...) !Buffer {
const countSize = struct.{
fn countSize(size: *usize, bytes: []const u8) (error.{}!void) {
const countSize = struct {
fn countSize(size: *usize, bytes: []const u8) (error{}!void) {
size.* += bytes.len;
}
}.countSize;
var size: usize = 0;
std.fmt.format(&size, error.{}, countSize, format, args) catch |err| switch (err) {};
std.fmt.format(&size, error{}, countSize, format, args) catch |err| switch (err) {};
var self = try Buffer.initSize(allocator, size);
assert((std.fmt.bufPrint(self.list.items, format, args) catch unreachable).len == size);
return self;

View File

@ -15,7 +15,7 @@ const BufSet = std.BufSet;
const BufMap = std.BufMap;
const fmt_lib = std.fmt;
pub const Builder = struct.{
pub const Builder = struct {
uninstall_tls: TopLevelStep,
install_tls: TopLevelStep,
have_uninstall_step: bool,
@ -48,7 +48,7 @@ pub const Builder = struct.{
cache_root: []const u8,
release_mode: ?builtin.Mode,
pub const CStd = enum.{
pub const CStd = enum {
C89,
C99,
C11,
@ -57,25 +57,25 @@ pub const Builder = struct.{
const UserInputOptionsMap = HashMap([]const u8, UserInputOption, mem.hash_slice_u8, mem.eql_slice_u8);
const AvailableOptionsMap = HashMap([]const u8, AvailableOption, mem.hash_slice_u8, mem.eql_slice_u8);
const AvailableOption = struct.{
const AvailableOption = struct {
name: []const u8,
type_id: TypeId,
description: []const u8,
};
const UserInputOption = struct.{
const UserInputOption = struct {
name: []const u8,
value: UserValue,
used: bool,
};
const UserValue = union(enum).{
const UserValue = union(enum) {
Flag: void,
Scalar: []const u8,
List: ArrayList([]const u8),
};
const TypeId = enum.{
const TypeId = enum {
Bool,
Int,
Float,
@ -83,7 +83,7 @@ pub const Builder = struct.{
List,
};
const TopLevelStep = struct.{
const TopLevelStep = struct {
step: Step,
description: []const u8,
};
@ -91,7 +91,7 @@ pub const Builder = struct.{
pub fn init(allocator: *Allocator, zig_exe: []const u8, build_root: []const u8, cache_root: []const u8) Builder {
const env_map = allocator.createOne(BufMap) catch unreachable;
env_map.* = os.getEnvMap(allocator) catch unreachable;
var self = Builder.{
var self = Builder{
.zig_exe = zig_exe,
.build_root = build_root,
.cache_root = os.path.relative(allocator, build_root, cache_root) catch unreachable,
@ -118,12 +118,12 @@ pub const Builder = struct.{
.lib_dir = undefined,
.exe_dir = undefined,
.installed_files = ArrayList([]const u8).init(allocator),
.uninstall_tls = TopLevelStep.{
.uninstall_tls = TopLevelStep{
.step = Step.init("uninstall", allocator, makeUninstall),
.description = "Remove build artifacts from prefix path",
},
.have_uninstall_step = false,
.install_tls = TopLevelStep.{
.install_tls = TopLevelStep{
.step = Step.initNoOp("install", allocator),
.description = "Copy build artifacts to prefix path",
},
@ -214,7 +214,7 @@ pub const Builder = struct.{
}
pub fn version(self: *const Builder, major: u32, minor: u32, patch: u32) Version {
return Version.{
return Version{
.major = major,
.minor = minor,
.patch = patch,
@ -269,7 +269,7 @@ pub const Builder = struct.{
return &self.uninstall_tls.step;
}
fn makeUninstall(uninstall_step: *Step) error!void {
fn makeUninstall(uninstall_step: *Step) anyerror!void {
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
@ -283,7 +283,7 @@ pub const Builder = struct.{
// TODO remove empty directories
}
fn makeOneStep(self: *Builder, s: *Step) error!void {
fn makeOneStep(self: *Builder, s: *Step) anyerror!void {
if (s.loop_flag) {
warn("Dependency loop detected:\n {}\n", s.name);
return error.DependencyLoopDetected;
@ -358,7 +358,7 @@ pub const Builder = struct.{
pub fn option(self: *Builder, comptime T: type, name: []const u8, description: []const u8) ?T {
const type_id = comptime typeToEnum(T);
const available_option = AvailableOption.{
const available_option = AvailableOption{
.name = name,
.type_id = type_id,
.description = description,
@ -410,7 +410,7 @@ pub const Builder = struct.{
}
pub fn step(self: *Builder, name: []const u8, description: []const u8) *Step {
const step_info = self.allocator.create(TopLevelStep.{
const step_info = self.allocator.create(TopLevelStep{
.step = Step.initNoOp(name, self.allocator),
.description = description,
}) catch unreachable;
@ -437,9 +437,9 @@ pub const Builder = struct.{
pub fn addUserInputOption(self: *Builder, name: []const u8, value: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption.{
gop.kv.value = UserInputOption{
.name = name,
.value = UserValue.{ .Scalar = value },
.value = UserValue{ .Scalar = value },
.used = false,
};
return false;
@ -452,18 +452,18 @@ pub const Builder = struct.{
var list = ArrayList([]const u8).init(self.allocator);
list.append(s) catch unreachable;
list.append(value) catch unreachable;
_ = self.user_input_options.put(name, UserInputOption.{
_ = self.user_input_options.put(name, UserInputOption{
.name = name,
.value = UserValue.{ .List = list },
.value = UserValue{ .List = list },
.used = false,
}) catch unreachable;
},
UserValue.List => |*list| {
// append to the list
list.append(value) catch unreachable;
_ = self.user_input_options.put(name, UserInputOption.{
_ = self.user_input_options.put(name, UserInputOption{
.name = name,
.value = UserValue.{ .List = list.* },
.value = UserValue{ .List = list.* },
.used = false,
}) catch unreachable;
},
@ -478,9 +478,9 @@ pub const Builder = struct.{
pub fn addUserInputFlag(self: *Builder, name: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption.{
gop.kv.value = UserInputOption{
.name = name,
.value = UserValue.{ .Flag = {} },
.value = UserValue{ .Flag = {} },
.used = false,
};
return false;
@ -660,7 +660,7 @@ pub const Builder = struct.{
pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
// TODO report error for ambiguous situations
const exe_extension = (Target.{ .Native = {} }).exeFileExt();
const exe_extension = (Target{ .Native = {} }).exeFileExt();
for (self.search_prefixes.toSliceConst()) |search_prefix| {
for (names) |name| {
if (os.path.isAbsolute(name)) {
@ -679,7 +679,7 @@ pub const Builder = struct.{
if (os.path.isAbsolute(name)) {
return name;
}
var it = mem.split(PATH, []u8.{os.path.delimiter});
var it = mem.split(PATH, []u8{os.path.delimiter});
while (it.next()) |path| {
const full_path = try os.path.join(self.allocator, path, self.fmt("{}{}", name, exe_extension));
if (os.path.real(self.allocator, full_path)) |real_path| {
@ -733,19 +733,19 @@ pub const Builder = struct.{
}
};
const Version = struct.{
const Version = struct {
major: u32,
minor: u32,
patch: u32,
};
const CrossTarget = struct.{
const CrossTarget = struct {
arch: builtin.Arch,
os: builtin.Os,
environ: builtin.Environ,
};
pub const Target = union(enum).{
pub const Target = union(enum) {
Native: void,
Cross: CrossTarget,
@ -800,7 +800,7 @@ pub const Target = union(enum).{
}
};
pub const LibExeObjStep = struct.{
pub const LibExeObjStep = struct {
step: Step,
builder: *Builder,
name: []const u8,
@ -842,12 +842,12 @@ pub const LibExeObjStep = struct.{
source_files: ArrayList([]const u8),
object_src: []const u8,
const Pkg = struct.{
const Pkg = struct {
name: []const u8,
path: []const u8,
};
const Kind = enum.{
const Kind = enum {
Exe,
Lib,
Obj,
@ -895,7 +895,7 @@ pub const LibExeObjStep = struct.{
}
fn initExtraArgs(builder: *Builder, name: []const u8, root_src: ?[]const u8, kind: Kind, static: bool, ver: Version) LibExeObjStep {
var self = LibExeObjStep.{
var self = LibExeObjStep{
.no_rosegment = false,
.strip = false,
.builder = builder,
@ -938,7 +938,7 @@ pub const LibExeObjStep = struct.{
}
fn initC(builder: *Builder, name: []const u8, kind: Kind, version: Version, static: bool) LibExeObjStep {
var self = LibExeObjStep.{
var self = LibExeObjStep{
.no_rosegment = false,
.builder = builder,
.name = name,
@ -1018,8 +1018,8 @@ pub const LibExeObjStep = struct.{
}
pub fn setTarget(self: *LibExeObjStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
self.target = Target.{
.Cross = CrossTarget.{
self.target = Target{
.Cross = CrossTarget{
.arch = target_arch,
.os = target_os,
.environ = target_environ,
@ -1148,7 +1148,7 @@ pub const LibExeObjStep = struct.{
pub fn addPackagePath(self: *LibExeObjStep, name: []const u8, pkg_index_path: []const u8) void {
assert(self.is_zig);
self.packages.append(Pkg.{
self.packages.append(Pkg{
.name = name,
.path = pkg_index_path,
}) catch unreachable;
@ -1640,7 +1640,7 @@ pub const LibExeObjStep = struct.{
}
};
pub const TestStep = struct.{
pub const TestStep = struct {
step: Step,
builder: *Builder,
root_src: []const u8,
@ -1660,7 +1660,7 @@ pub const TestStep = struct.{
pub fn init(builder: *Builder, root_src: []const u8) TestStep {
const step_name = builder.fmt("test {}", root_src);
return TestStep.{
return TestStep{
.step = Step.init(step_name, builder.allocator, make),
.builder = builder,
.root_src = root_src,
@ -1669,7 +1669,7 @@ pub const TestStep = struct.{
.name_prefix = "",
.filter = null,
.link_libs = BufSet.init(builder.allocator),
.target = Target.{ .Native = {} },
.target = Target{ .Native = {} },
.exec_cmd_args = null,
.include_dirs = ArrayList([]const u8).init(builder.allocator),
.lib_paths = ArrayList([]const u8).init(builder.allocator),
@ -1746,8 +1746,8 @@ pub const TestStep = struct.{
}
pub fn setTarget(self: *TestStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
self.target = Target.{
.Cross = CrossTarget.{
self.target = Target{
.Cross = CrossTarget{
.arch = target_arch,
.os = target_os,
.environ = target_environ,
@ -1875,7 +1875,7 @@ pub const TestStep = struct.{
}
};
pub const CommandStep = struct.{
pub const CommandStep = struct {
step: Step,
builder: *Builder,
argv: [][]const u8,
@ -1884,7 +1884,7 @@ pub const CommandStep = struct.{
/// ::argv is copied.
pub fn create(builder: *Builder, cwd: ?[]const u8, env_map: *const BufMap, argv: []const []const u8) *CommandStep {
const self = builder.allocator.create(CommandStep.{
const self = builder.allocator.create(CommandStep{
.builder = builder,
.step = Step.init(argv[0], builder.allocator, make),
.argv = builder.allocator.alloc([]u8, argv.len) catch unreachable,
@ -1905,7 +1905,7 @@ pub const CommandStep = struct.{
}
};
const InstallArtifactStep = struct.{
const InstallArtifactStep = struct {
step: Step,
builder: *Builder,
artifact: *LibExeObjStep,
@ -1919,7 +1919,7 @@ const InstallArtifactStep = struct.{
LibExeObjStep.Kind.Exe => builder.exe_dir,
LibExeObjStep.Kind.Lib => builder.lib_dir,
};
const self = builder.allocator.create(Self.{
const self = builder.allocator.create(Self{
.builder = builder,
.step = Step.init(builder.fmt("install {}", artifact.step.name), builder.allocator, make),
.artifact = artifact,
@ -1953,14 +1953,14 @@ const InstallArtifactStep = struct.{
}
};
pub const InstallFileStep = struct.{
pub const InstallFileStep = struct {
step: Step,
builder: *Builder,
src_path: []const u8,
dest_path: []const u8,
pub fn init(builder: *Builder, src_path: []const u8, dest_path: []const u8) InstallFileStep {
return InstallFileStep.{
return InstallFileStep{
.builder = builder,
.step = Step.init(builder.fmt("install {}", src_path), builder.allocator, make),
.src_path = src_path,
@ -1974,14 +1974,14 @@ pub const InstallFileStep = struct.{
}
};
pub const WriteFileStep = struct.{
pub const WriteFileStep = struct {
step: Step,
builder: *Builder,
file_path: []const u8,
data: []const u8,
pub fn init(builder: *Builder, file_path: []const u8, data: []const u8) WriteFileStep {
return WriteFileStep.{
return WriteFileStep{
.builder = builder,
.step = Step.init(builder.fmt("writefile {}", file_path), builder.allocator, make),
.file_path = file_path,
@ -2004,32 +2004,32 @@ pub const WriteFileStep = struct.{
}
};
pub const LogStep = struct.{
pub const LogStep = struct {
step: Step,
builder: *Builder,
data: []const u8,
pub fn init(builder: *Builder, data: []const u8) LogStep {
return LogStep.{
return LogStep{
.builder = builder,
.step = Step.init(builder.fmt("log {}", data), builder.allocator, make),
.data = data,
};
}
fn make(step: *Step) error!void {
fn make(step: *Step) anyerror!void {
const self = @fieldParentPtr(LogStep, "step", step);
warn("{}", self.data);
}
};
pub const RemoveDirStep = struct.{
pub const RemoveDirStep = struct {
step: Step,
builder: *Builder,
dir_path: []const u8,
pub fn init(builder: *Builder, dir_path: []const u8) RemoveDirStep {
return RemoveDirStep.{
return RemoveDirStep{
.builder = builder,
.step = Step.init(builder.fmt("RemoveDir {}", dir_path), builder.allocator, make),
.dir_path = dir_path,
@ -2047,15 +2047,15 @@ pub const RemoveDirStep = struct.{
}
};
pub const Step = struct.{
pub const Step = struct {
name: []const u8,
makeFn: fn (self: *Step) error!void,
makeFn: fn (self: *Step) anyerror!void,
dependencies: ArrayList(*Step),
loop_flag: bool,
done_flag: bool,
pub fn init(name: []const u8, allocator: *Allocator, makeFn: fn (*Step) error!void) Step {
return Step.{
pub fn init(name: []const u8, allocator: *Allocator, makeFn: fn (*Step) anyerror!void) Step {
return Step{
.name = name,
.makeFn = makeFn,
.dependencies = ArrayList(*Step).init(allocator),
@ -2078,7 +2078,7 @@ pub const Step = struct.{
self.dependencies.append(other) catch unreachable;
}
fn makeNoOp(self: *Step) error!void {}
fn makeNoOp(self: *Step) anyerror!void {}
};
fn doAtomicSymLinks(allocator: *Allocator, output_path: []const u8, filename_major_only: []const u8, filename_name_only: []const u8) !void {

View File

@ -52,18 +52,18 @@ pub const _errno = __error;
pub const in_port_t = u16;
pub const sa_family_t = u8;
pub const socklen_t = u32;
pub const sockaddr = extern union.{
pub const sockaddr = extern union {
in: sockaddr_in,
in6: sockaddr_in6,
};
pub const sockaddr_in = extern struct.{
pub const sockaddr_in = extern struct {
len: u8,
family: sa_family_t,
port: in_port_t,
addr: u32,
zero: [8]u8,
};
pub const sockaddr_in6 = extern struct.{
pub const sockaddr_in6 = extern struct {
len: u8,
family: sa_family_t,
port: in_port_t,
@ -72,23 +72,23 @@ pub const sockaddr_in6 = extern struct.{
scope_id: u32,
};
pub const timeval = extern struct.{
pub const timeval = extern struct {
tv_sec: isize,
tv_usec: isize,
};
pub const timezone = extern struct.{
pub const timezone = extern struct {
tz_minuteswest: i32,
tz_dsttime: i32,
};
pub const mach_timebase_info_data = extern struct.{
pub const mach_timebase_info_data = extern struct {
numer: u32,
denom: u32,
};
/// Renamed to Stat to not conflict with the stat function.
pub const Stat = extern struct.{
pub const Stat = extern struct {
dev: i32,
mode: u16,
nlink: u16,
@ -113,7 +113,7 @@ pub const Stat = extern struct.{
qspare: [2]i64,
};
pub const timespec = extern struct.{
pub const timespec = extern struct {
tv_sec: isize,
tv_nsec: isize,
};
@ -121,13 +121,13 @@ pub const timespec = extern struct.{
pub const sigset_t = u32;
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with function name.
pub const Sigaction = extern struct.{
pub const Sigaction = extern struct {
handler: extern fn (c_int) void,
sa_mask: sigset_t,
sa_flags: c_int,
};
pub const dirent = extern struct.{
pub const dirent = extern struct {
d_ino: usize,
d_seekoff: usize,
d_reclen: u16,
@ -136,13 +136,13 @@ pub const dirent = extern struct.{
d_name: u8, // field address is address of first byte of name
};
pub const pthread_attr_t = extern struct.{
pub const pthread_attr_t = extern struct {
__sig: c_long,
__opaque: [56]u8,
};
/// Renamed from `kevent` to `Kevent` to avoid conflict with function name.
pub const Kevent = extern struct.{
pub const Kevent = extern struct {
ident: usize,
filter: i16,
flags: u16,
@ -166,7 +166,7 @@ comptime {
assert(@byteOffsetOf(Kevent, "udata") == 24);
}
pub const kevent64_s = extern struct.{
pub const kevent64_s = extern struct {
ident: u64,
filter: i16,
flags: u16,

View File

@ -4,7 +4,7 @@ pub extern "c" fn getrandom(buf_ptr: [*]u8, buf_len: usize, flags: c_uint) c_int
extern "c" fn __errno_location() *c_int;
pub const _errno = __errno_location;
pub const pthread_attr_t = extern struct.{
pub const pthread_attr_t = extern struct {
__size: [56]u8,
__align: c_long,
};

View File

@ -20,14 +20,14 @@ const IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b;
const IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16;
const DEBUG_DIRECTORY = 6;
pub const CoffError = error.{
pub const CoffError = error{
InvalidPEMagic,
InvalidPEHeader,
InvalidMachine,
MissingCoffSection,
};
pub const Coff = struct.{
pub const Coff = struct {
in_file: os.File,
allocator: *mem.Allocator,
@ -56,10 +56,10 @@ pub const Coff = struct.{
var pe_header_magic: [4]u8 = undefined;
try in.readNoEof(pe_header_magic[0..]);
if (!mem.eql(u8, pe_header_magic, []u8.{ 'P', 'E', 0, 0 }))
if (!mem.eql(u8, pe_header_magic, []u8{ 'P', 'E', 0, 0 }))
return error.InvalidPEHeader;
self.coff_header = CoffHeader.{
self.coff_header = CoffHeader{
.machine = try in.readIntLe(u16),
.number_of_sections = try in.readIntLe(u16),
.timedate_stamp = try in.readIntLe(u32),
@ -98,7 +98,7 @@ pub const Coff = struct.{
return error.InvalidPEHeader;
for (self.pe_header.data_directory) |*data_dir| {
data_dir.* = OptionalHeader.DataDirectory.{
data_dir.* = OptionalHeader.DataDirectory{
.virtual_address = try in.readIntLe(u32),
.size = try in.readIntLe(u32),
};
@ -154,10 +154,10 @@ pub const Coff = struct.{
var i: u16 = 0;
while (i < self.coff_header.number_of_sections) : (i += 1) {
try in.readNoEof(name[0..]);
try self.sections.append(Section.{
.header = SectionHeader.{
try self.sections.append(Section{
.header = SectionHeader{
.name = name,
.misc = SectionHeader.Misc.{ .physical_address = try in.readIntLe(u32) },
.misc = SectionHeader.Misc{ .physical_address = try in.readIntLe(u32) },
.virtual_address = try in.readIntLe(u32),
.size_of_raw_data = try in.readIntLe(u32),
.pointer_to_raw_data = try in.readIntLe(u32),
@ -181,7 +181,7 @@ pub const Coff = struct.{
}
};
const CoffHeader = struct.{
const CoffHeader = struct {
machine: u16,
number_of_sections: u16,
timedate_stamp: u32,
@ -191,8 +191,8 @@ const CoffHeader = struct.{
characteristics: u16,
};
const OptionalHeader = struct.{
const DataDirectory = struct.{
const OptionalHeader = struct {
const DataDirectory = struct {
virtual_address: u32,
size: u32,
};
@ -201,12 +201,12 @@ const OptionalHeader = struct.{
data_directory: [IMAGE_NUMBEROF_DIRECTORY_ENTRIES]DataDirectory,
};
pub const Section = struct.{
pub const Section = struct {
header: SectionHeader,
};
const SectionHeader = struct.{
const Misc = union.{
const SectionHeader = struct {
const Misc = union {
physical_address: u32,
virtual_size: u32,
};

View File

@ -5,7 +5,7 @@ const debug = @import("../debug/index.zig");
const builtin = @import("builtin");
const htest = @import("test.zig");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@ -15,7 +15,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, x: usize, y: usize) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@ -32,12 +32,12 @@ pub const Blake2s224 = Blake2s(224);
pub const Blake2s256 = Blake2s(256);
fn Blake2s(comptime out_len: usize) type {
return struct.{
return struct {
const Self = @This();
const block_length = 64;
const digest_length = out_len / 8;
const iv = [8]u32.{
const iv = [8]u32{
0x6A09E667,
0xBB67AE85,
0x3C6EF372,
@ -48,17 +48,17 @@ fn Blake2s(comptime out_len: usize) type {
0x5BE0CD19,
};
const sigma = [10][16]u8.{
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8.{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8.{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8.{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8.{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8.{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8.{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8.{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8.{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
const sigma = [10][16]u8{
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
};
h: [8]u32,
@ -147,7 +147,7 @@ fn Blake2s(comptime out_len: usize) type {
v[13] ^= @intCast(u32, d.t >> 32);
if (last) v[14] = ~v[14];
const rounds = comptime []RoundParam.{
const rounds = comptime []RoundParam{
Rp(0, 4, 8, 12, 0, 1),
Rp(1, 5, 9, 13, 2, 3),
Rp(2, 6, 10, 14, 4, 5),
@ -250,7 +250,7 @@ test "blake2s256 streaming" {
}
test "blake2s256 aligned final" {
var block = []u8.{0} ** Blake2s256.block_length;
var block = []u8{0} ** Blake2s256.block_length;
var out: [Blake2s256.digest_length]u8 = undefined;
var h = Blake2s256.init();
@ -265,12 +265,12 @@ pub const Blake2b384 = Blake2b(384);
pub const Blake2b512 = Blake2b(512);
fn Blake2b(comptime out_len: usize) type {
return struct.{
return struct {
const Self = @This();
const block_length = 128;
const digest_length = out_len / 8;
const iv = [8]u64.{
const iv = [8]u64{
0x6a09e667f3bcc908,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
@ -281,19 +281,19 @@ fn Blake2b(comptime out_len: usize) type {
0x5be0cd19137e2179,
};
const sigma = [12][16]u8.{
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8.{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8.{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8.{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8.{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8.{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8.{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8.{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8.{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
const sigma = [12][16]u8{
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
};
h: [8]u64,
@ -380,7 +380,7 @@ fn Blake2b(comptime out_len: usize) type {
v[13] ^= @intCast(u64, d.t >> 64);
if (last) v[14] = ~v[14];
const rounds = comptime []RoundParam.{
const rounds = comptime []RoundParam{
Rp(0, 4, 8, 12, 0, 1),
Rp(1, 5, 9, 13, 2, 3),
Rp(2, 6, 10, 14, 4, 5),
@ -483,7 +483,7 @@ test "blake2b512 streaming" {
}
test "blake2b512 aligned final" {
var block = []u8.{0} ** Blake2b512.block_length;
var block = []u8{0} ** Blake2b512.block_length;
var out: [Blake2b512.digest_length]u8 = undefined;
var h = Blake2b512.init();

View File

@ -7,7 +7,7 @@ const assert = std.debug.assert;
const builtin = @import("builtin");
const maxInt = std.math.maxInt;
const QuarterRound = struct.{
const QuarterRound = struct {
a: usize,
b: usize,
c: usize,
@ -15,7 +15,7 @@ const QuarterRound = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize) QuarterRound {
return QuarterRound.{
return QuarterRound{
.a = a,
.b = b,
.c = c,
@ -32,7 +32,7 @@ fn salsa20_wordtobyte(out: []u8, input: [16]u32) void {
for (x) |_, i|
x[i] = input[i];
const rounds = comptime []QuarterRound.{
const rounds = comptime []QuarterRound{
Rp(0, 4, 8, 12),
Rp(1, 5, 9, 13),
Rp(2, 6, 10, 14),
@ -69,7 +69,7 @@ fn chaCha20_internal(out: []u8, in: []const u8, key: [8]u32, counter: [4]u32) vo
var cursor: usize = 0;
const c = "expand 32-byte k";
const constant_le = []u32.{
const constant_le = []u32{
mem.readIntLE(u32, c[0..4]),
mem.readIntLE(u32, c[4..8]),
mem.readIntLE(u32, c[8..12]),
@ -183,7 +183,7 @@ pub fn chaCha20With64BitNonce(out: []u8, in: []const u8, counter: u64, key: [32]
// https://tools.ietf.org/html/rfc7539#section-2.4.2
test "crypto.chacha20 test vector sunscreen" {
const expected_result = []u8.{
const expected_result = []u8{
0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
@ -202,13 +202,13 @@ test "crypto.chacha20 test vector sunscreen" {
};
const input = "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it.";
var result: [114]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31,
};
const nonce = []u8.{
const nonce = []u8{
0, 0, 0, 0,
0, 0, 0, 0x4a,
0, 0, 0, 0,
@ -225,7 +225,7 @@ test "crypto.chacha20 test vector sunscreen" {
// https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04#section-7
test "crypto.chacha20 test vector 1" {
const expected_result = []u8.{
const expected_result = []u8{
0x76, 0xb8, 0xe0, 0xad, 0xa0, 0xf1, 0x3d, 0x90,
0x40, 0x5d, 0x6a, 0xe5, 0x53, 0x86, 0xbd, 0x28,
0xbd, 0xd2, 0x19, 0xb8, 0xa0, 0x8d, 0xed, 0x1a,
@ -235,7 +235,7 @@ test "crypto.chacha20 test vector 1" {
0x6a, 0x43, 0xb8, 0xf4, 0x15, 0x18, 0xa1, 0x1c,
0xc3, 0x87, 0xb6, 0x69, 0xb2, 0xee, 0x65, 0x86,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@ -246,20 +246,20 @@ test "crypto.chacha20 test vector 1" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 2" {
const expected_result = []u8.{
const expected_result = []u8{
0x45, 0x40, 0xf0, 0x5a, 0x9f, 0x1f, 0xb2, 0x96,
0xd7, 0x73, 0x6e, 0x7b, 0x20, 0x8e, 0x3c, 0x96,
0xeb, 0x4f, 0xe1, 0x83, 0x46, 0x88, 0xd2, 0x60,
@ -269,7 +269,7 @@ test "crypto.chacha20 test vector 2" {
0x53, 0xd7, 0x92, 0xb1, 0xc4, 0x3f, 0xea, 0x81,
0x7e, 0x9a, 0xd2, 0x75, 0xae, 0x54, 0x69, 0x63,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@ -280,20 +280,20 @@ test "crypto.chacha20 test vector 2" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 3" {
const expected_result = []u8.{
const expected_result = []u8{
0xde, 0x9c, 0xba, 0x7b, 0xf3, 0xd6, 0x9e, 0xf5,
0xe7, 0x86, 0xdc, 0x63, 0x97, 0x3f, 0x65, 0x3a,
0x0b, 0x49, 0xe0, 0x15, 0xad, 0xbf, 0xf7, 0x13,
@ -303,7 +303,7 @@ test "crypto.chacha20 test vector 3" {
0x52, 0x77, 0x06, 0x2e, 0xb7, 0xa0, 0x43, 0x3e,
0x44, 0x5f, 0x41, 0xe3,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@ -314,20 +314,20 @@ test "crypto.chacha20 test vector 3" {
0x00, 0x00, 0x00, 0x00,
};
var result: [60]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 1 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 1 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 4" {
const expected_result = []u8.{
const expected_result = []u8{
0xef, 0x3f, 0xdf, 0xd6, 0xc6, 0x15, 0x78, 0xfb,
0xf5, 0xcf, 0x35, 0xbd, 0x3d, 0xd3, 0x3b, 0x80,
0x09, 0x63, 0x16, 0x34, 0xd2, 0x1e, 0x42, 0xac,
@ -337,7 +337,7 @@ test "crypto.chacha20 test vector 4" {
0x5d, 0xdc, 0x49, 0x7a, 0x0b, 0x46, 0x6e, 0x7d,
0x6b, 0xbd, 0xb0, 0x04, 0x1b, 0x2f, 0x58, 0x6b,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@ -348,20 +348,20 @@ test "crypto.chacha20 test vector 4" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 1, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 1, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 5" {
const expected_result = []u8.{
const expected_result = []u8{
0xf7, 0x98, 0xa1, 0x89, 0xf1, 0x95, 0xe6, 0x69,
0x82, 0x10, 0x5f, 0xfb, 0x64, 0x0b, 0xb7, 0x75,
0x7f, 0x57, 0x9d, 0xa3, 0x16, 0x02, 0xfc, 0x93,
@ -398,7 +398,7 @@ test "crypto.chacha20 test vector 5" {
0x87, 0x46, 0xd4, 0x52, 0x4d, 0x38, 0x40, 0x7a,
0x6d, 0xeb, 0x3a, 0xb7, 0x8f, 0xab, 0x78, 0xc9,
};
const input = []u8.{
const input = []u8{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
@ -418,13 +418,13 @@ test "crypto.chacha20 test vector 5" {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
};
var result: [256]u8 = undefined;
const key = []u8.{
const key = []u8{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
};
const nonce = []u8.{
const nonce = []u8{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
};

View File

@ -9,7 +9,7 @@ pub const HmacSha256 = Hmac(crypto.Sha256);
pub const HmacBlake2s256 = Hmac(crypto.Blake2s256);
pub fn Hmac(comptime Hash: type) type {
return struct.{
return struct {
const Self = @This();
pub const mac_length = Hash.digest_length;
pub const minimum_key_length = 0;

View File

@ -5,7 +5,7 @@ const builtin = @import("builtin");
const debug = @import("../debug/index.zig");
const fmt = @import("../fmt/index.zig");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@ -16,7 +16,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, k: usize, s: u32, t: u32) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@ -27,7 +27,7 @@ fn Rp(a: usize, b: usize, c: usize, d: usize, k: usize, s: u32, t: u32) RoundPar
};
}
pub const Md5 = struct.{
pub const Md5 = struct {
const Self = @This();
const block_length = 64;
const digest_length = 16;
@ -131,14 +131,14 @@ pub const Md5 = struct.{
s[i] |= u32(b[i * 4 + 3]) << 24;
}
var v: [4]u32 = []u32.{
var v: [4]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
d.s[3],
};
const round0 = comptime []RoundParam.{
const round0 = comptime []RoundParam{
Rp(0, 1, 2, 3, 0, 7, 0xD76AA478),
Rp(3, 0, 1, 2, 1, 12, 0xE8C7B756),
Rp(2, 3, 0, 1, 2, 17, 0x242070DB),
@ -161,7 +161,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round1 = comptime []RoundParam.{
const round1 = comptime []RoundParam{
Rp(0, 1, 2, 3, 1, 5, 0xF61E2562),
Rp(3, 0, 1, 2, 6, 9, 0xC040B340),
Rp(2, 3, 0, 1, 11, 14, 0x265E5A51),
@ -184,7 +184,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round2 = comptime []RoundParam.{
const round2 = comptime []RoundParam{
Rp(0, 1, 2, 3, 5, 4, 0xFFFA3942),
Rp(3, 0, 1, 2, 8, 11, 0x8771F681),
Rp(2, 3, 0, 1, 11, 16, 0x6D9D6122),
@ -207,7 +207,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round3 = comptime []RoundParam.{
const round3 = comptime []RoundParam{
Rp(0, 1, 2, 3, 0, 6, 0xF4292244),
Rp(3, 0, 1, 2, 7, 10, 0x432AFF97),
Rp(2, 3, 0, 1, 14, 15, 0xAB9423A7),
@ -271,7 +271,7 @@ test "md5 streaming" {
}
test "md5 aligned final" {
var block = []u8.{0} ** Md5.block_length;
var block = []u8{0} ** Md5.block_length;
var out: [Md5.digest_length]u8 = undefined;
var h = Md5.init();

View File

@ -9,7 +9,7 @@ const Endian = builtin.Endian;
const readInt = std.mem.readInt;
const writeInt = std.mem.writeInt;
pub const Poly1305 = struct.{
pub const Poly1305 = struct {
const Self = @This();
pub const mac_length = 16;

View File

@ -4,7 +4,7 @@ const endian = @import("../endian.zig");
const debug = @import("../debug/index.zig");
const builtin = @import("builtin");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@ -14,7 +14,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, e: usize, i: u32) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@ -24,7 +24,7 @@ fn Rp(a: usize, b: usize, c: usize, d: usize, e: usize, i: u32) RoundParam {
};
}
pub const Sha1 = struct.{
pub const Sha1 = struct {
const Self = @This();
const block_length = 64;
const digest_length = 20;
@ -118,7 +118,7 @@ pub const Sha1 = struct.{
var s: [16]u32 = undefined;
var v: [5]u32 = []u32.{
var v: [5]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
@ -126,7 +126,7 @@ pub const Sha1 = struct.{
d.s[4],
};
const round0a = comptime []RoundParam.{
const round0a = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 0),
Rp(4, 0, 1, 2, 3, 1),
Rp(3, 4, 0, 1, 2, 2),
@ -151,7 +151,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round0b = comptime []RoundParam.{
const round0b = comptime []RoundParam{
Rp(4, 0, 1, 2, 3, 16),
Rp(3, 4, 0, 1, 2, 17),
Rp(2, 3, 4, 0, 1, 18),
@ -165,7 +165,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round1 = comptime []RoundParam.{
const round1 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 20),
Rp(4, 0, 1, 2, 3, 21),
Rp(3, 4, 0, 1, 2, 22),
@ -195,7 +195,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round2 = comptime []RoundParam.{
const round2 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 40),
Rp(4, 0, 1, 2, 3, 41),
Rp(3, 4, 0, 1, 2, 42),
@ -225,7 +225,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round3 = comptime []RoundParam.{
const round3 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 60),
Rp(4, 0, 1, 2, 3, 61),
Rp(3, 4, 0, 1, 2, 62),
@ -292,7 +292,7 @@ test "sha1 streaming" {
}
test "sha1 aligned final" {
var block = []u8.{0} ** Sha1.block_length;
var block = []u8{0} ** Sha1.block_length;
var out: [Sha1.digest_length]u8 = undefined;
var h = Sha1.init();

View File

@ -8,7 +8,7 @@ const htest = @import("test.zig");
/////////////////////
// Sha224 + Sha256
const RoundParam256 = struct.{
const RoundParam256 = struct {
a: usize,
b: usize,
c: usize,
@ -22,7 +22,7 @@ const RoundParam256 = struct.{
};
fn Rp256(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h: usize, i: usize, k: u32) RoundParam256 {
return RoundParam256.{
return RoundParam256{
.a = a,
.b = b,
.c = c,
@ -36,7 +36,7 @@ fn Rp256(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h
};
}
const Sha2Params32 = struct.{
const Sha2Params32 = struct {
iv0: u32,
iv1: u32,
iv2: u32,
@ -48,7 +48,7 @@ const Sha2Params32 = struct.{
out_len: usize,
};
const Sha224Params = Sha2Params32.{
const Sha224Params = Sha2Params32{
.iv0 = 0xC1059ED8,
.iv1 = 0x367CD507,
.iv2 = 0x3070DD17,
@ -60,7 +60,7 @@ const Sha224Params = Sha2Params32.{
.out_len = 224,
};
const Sha256Params = Sha2Params32.{
const Sha256Params = Sha2Params32{
.iv0 = 0x6A09E667,
.iv1 = 0xBB67AE85,
.iv2 = 0x3C6EF372,
@ -76,7 +76,7 @@ pub const Sha224 = Sha2_32(Sha224Params);
pub const Sha256 = Sha2_32(Sha256Params);
fn Sha2_32(comptime params: Sha2Params32) type {
return struct.{
return struct {
const Self = @This();
const block_length = 64;
const digest_length = params.out_len / 8;
@ -188,7 +188,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
s[i] = s[i - 16] +% s[i - 7] +% (math.rotr(u32, s[i - 15], u32(7)) ^ math.rotr(u32, s[i - 15], u32(18)) ^ (s[i - 15] >> 3)) +% (math.rotr(u32, s[i - 2], u32(17)) ^ math.rotr(u32, s[i - 2], u32(19)) ^ (s[i - 2] >> 10));
}
var v: [8]u32 = []u32.{
var v: [8]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
@ -199,7 +199,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
d.s[7],
};
const round0 = comptime []RoundParam256.{
const round0 = comptime []RoundParam256{
Rp256(0, 1, 2, 3, 4, 5, 6, 7, 0, 0x428A2F98),
Rp256(7, 0, 1, 2, 3, 4, 5, 6, 1, 0x71374491),
Rp256(6, 7, 0, 1, 2, 3, 4, 5, 2, 0xB5C0FBCF),
@ -338,7 +338,7 @@ test "sha256 streaming" {
}
test "sha256 aligned final" {
var block = []u8.{0} ** Sha256.block_length;
var block = []u8{0} ** Sha256.block_length;
var out: [Sha256.digest_length]u8 = undefined;
var h = Sha256.init();
@ -349,7 +349,7 @@ test "sha256 aligned final" {
/////////////////////
// Sha384 + Sha512
const RoundParam512 = struct.{
const RoundParam512 = struct {
a: usize,
b: usize,
c: usize,
@ -363,7 +363,7 @@ const RoundParam512 = struct.{
};
fn Rp512(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h: usize, i: usize, k: u64) RoundParam512 {
return RoundParam512.{
return RoundParam512{
.a = a,
.b = b,
.c = c,
@ -377,7 +377,7 @@ fn Rp512(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h
};
}
const Sha2Params64 = struct.{
const Sha2Params64 = struct {
iv0: u64,
iv1: u64,
iv2: u64,
@ -389,7 +389,7 @@ const Sha2Params64 = struct.{
out_len: usize,
};
const Sha384Params = Sha2Params64.{
const Sha384Params = Sha2Params64{
.iv0 = 0xCBBB9D5DC1059ED8,
.iv1 = 0x629A292A367CD507,
.iv2 = 0x9159015A3070DD17,
@ -401,7 +401,7 @@ const Sha384Params = Sha2Params64.{
.out_len = 384,
};
const Sha512Params = Sha2Params64.{
const Sha512Params = Sha2Params64{
.iv0 = 0x6A09E667F3BCC908,
.iv1 = 0xBB67AE8584CAA73B,
.iv2 = 0x3C6EF372FE94F82B,
@ -417,7 +417,7 @@ pub const Sha384 = Sha2_64(Sha384Params);
pub const Sha512 = Sha2_64(Sha512Params);
fn Sha2_64(comptime params: Sha2Params64) type {
return struct.{
return struct {
const Self = @This();
const block_length = 128;
const digest_length = params.out_len / 8;
@ -533,7 +533,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
s[i] = s[i - 16] +% s[i - 7] +% (math.rotr(u64, s[i - 15], u64(1)) ^ math.rotr(u64, s[i - 15], u64(8)) ^ (s[i - 15] >> 7)) +% (math.rotr(u64, s[i - 2], u64(19)) ^ math.rotr(u64, s[i - 2], u64(61)) ^ (s[i - 2] >> 6));
}
var v: [8]u64 = []u64.{
var v: [8]u64 = []u64{
d.s[0],
d.s[1],
d.s[2],
@ -544,7 +544,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
d.s[7],
};
const round0 = comptime []RoundParam512.{
const round0 = comptime []RoundParam512{
Rp512(0, 1, 2, 3, 4, 5, 6, 7, 0, 0x428A2F98D728AE22),
Rp512(7, 0, 1, 2, 3, 4, 5, 6, 1, 0x7137449123EF65CD),
Rp512(6, 7, 0, 1, 2, 3, 4, 5, 2, 0xB5C0FBCFEC4D3B2F),
@ -715,7 +715,7 @@ test "sha512 streaming" {
}
test "sha512 aligned final" {
var block = []u8.{0} ** Sha512.block_length;
var block = []u8{0} ** Sha512.block_length;
var out: [Sha512.digest_length]u8 = undefined;
var h = Sha512.init();

View File

@ -11,7 +11,7 @@ pub const Sha3_384 = Keccak(384, 0x06);
pub const Sha3_512 = Keccak(512, 0x06);
fn Keccak(comptime bits: usize, comptime delim: u8) type {
return struct.{
return struct {
const Self = @This();
const block_length = 200;
const digest_length = bits / 8;
@ -86,7 +86,7 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
};
}
const RC = []const u64.{
const RC = []const u64{
0x0000000000000001, 0x0000000000008082, 0x800000000000808a, 0x8000000080008000,
0x000000000000808b, 0x0000000080000001, 0x8000000080008081, 0x8000000000008009,
0x000000000000008a, 0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
@ -95,15 +95,15 @@ const RC = []const u64.{
0x8000000080008081, 0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
};
const ROTC = []const usize.{
const ROTC = []const usize{
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
};
const PIL = []const usize.{
const PIL = []const usize{
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
};
const M5 = []const usize.{
const M5 = []const usize{
0, 1, 2, 3, 4, 0, 1, 2, 3, 4,
};
@ -115,9 +115,9 @@ fn keccak_f(comptime F: usize, d: []u8) void {
break :x 12 + 2 * math.log2(B);
};
var s = []const u64.{0} ** 25;
var t = []const u64.{0} ** 1;
var c = []const u64.{0} ** 5;
var s = []const u64{0} ** 25;
var t = []const u64{0} ** 1;
var c = []const u64{0} ** 5;
for (s) |*r, i| {
r.* = mem.readIntLE(u64, d[8 * i .. 8 * i + 8]);
@ -224,7 +224,7 @@ test "sha3-256 streaming" {
}
test "sha3-256 aligned final" {
var block = []u8.{0} ** Sha3_256.block_length;
var block = []u8{0} ** Sha3_256.block_length;
var out: [Sha3_256.digest_length]u8 = undefined;
var h = Sha3_256.init();
@ -295,7 +295,7 @@ test "sha3-512 streaming" {
}
test "sha3-512 aligned final" {
var block = []u8.{0} ** Sha3_512.block_length;
var block = []u8{0} ** Sha3_512.block_length;
var out: [Sha3_512.digest_length]u8 = undefined;
var h = Sha3_512.init();

View File

@ -9,20 +9,20 @@ const MiB = 1024 * KiB;
var prng = std.rand.DefaultPrng.init(0);
const Crypto = struct.{
const Crypto = struct {
ty: type,
name: []const u8,
};
const hashes = []Crypto.{
Crypto.{ .ty = crypto.Md5, .name = "md5" },
Crypto.{ .ty = crypto.Sha1, .name = "sha1" },
Crypto.{ .ty = crypto.Sha256, .name = "sha256" },
Crypto.{ .ty = crypto.Sha512, .name = "sha512" },
Crypto.{ .ty = crypto.Sha3_256, .name = "sha3-256" },
Crypto.{ .ty = crypto.Sha3_512, .name = "sha3-512" },
Crypto.{ .ty = crypto.Blake2s256, .name = "blake2s" },
Crypto.{ .ty = crypto.Blake2b512, .name = "blake2b" },
const hashes = []Crypto{
Crypto{ .ty = crypto.Md5, .name = "md5" },
Crypto{ .ty = crypto.Sha1, .name = "sha1" },
Crypto{ .ty = crypto.Sha256, .name = "sha256" },
Crypto{ .ty = crypto.Sha512, .name = "sha512" },
Crypto{ .ty = crypto.Sha3_256, .name = "sha3-256" },
Crypto{ .ty = crypto.Sha3_512, .name = "sha3-512" },
Crypto{ .ty = crypto.Blake2s256, .name = "blake2s" },
Crypto{ .ty = crypto.Blake2b512, .name = "blake2b" },
};
pub fn benchmarkHash(comptime Hash: var, comptime bytes: comptime_int) !u64 {
@ -45,11 +45,11 @@ pub fn benchmarkHash(comptime Hash: var, comptime bytes: comptime_int) !u64 {
return throughput;
}
const macs = []Crypto.{
Crypto.{ .ty = crypto.Poly1305, .name = "poly1305" },
Crypto.{ .ty = crypto.HmacMd5, .name = "hmac-md5" },
Crypto.{ .ty = crypto.HmacSha1, .name = "hmac-sha1" },
Crypto.{ .ty = crypto.HmacSha256, .name = "hmac-sha256" },
const macs = []Crypto{
Crypto{ .ty = crypto.Poly1305, .name = "poly1305" },
Crypto{ .ty = crypto.HmacMd5, .name = "hmac-md5" },
Crypto{ .ty = crypto.HmacSha1, .name = "hmac-sha1" },
Crypto{ .ty = crypto.HmacSha256, .name = "hmac-sha256" },
};
pub fn benchmarkMac(comptime Mac: var, comptime bytes: comptime_int) !u64 {
@ -75,7 +75,7 @@ pub fn benchmarkMac(comptime Mac: var, comptime bytes: comptime_int) !u64 {
return throughput;
}
const exchanges = []Crypto.{Crypto.{ .ty = crypto.X25519, .name = "x25519" }};
const exchanges = []Crypto{Crypto{ .ty = crypto.X25519, .name = "x25519" }};
pub fn benchmarkKeyExchange(comptime DhKeyExchange: var, comptime exchange_count: comptime_int) !u64 {
std.debug.assert(DhKeyExchange.minimum_key_length >= DhKeyExchange.secret_length);

View File

@ -11,7 +11,7 @@ const readInt = std.mem.readInt;
const writeInt = std.mem.writeInt;
// Based on Supercop's ref10 implementation.
pub const X25519 = struct.{
pub const X25519 = struct {
pub const secret_length = 32;
pub const minimum_key_length = 32;
@ -116,7 +116,7 @@ pub const X25519 = struct.{
}
pub fn createPublicKey(public_key: []u8, private_key: []const u8) bool {
var base_point = []u8.{9} ++ []u8.{0} ** 31;
var base_point = []u8{9} ++ []u8{0} ** 31;
return create(public_key, private_key, base_point);
}
};
@ -137,7 +137,7 @@ fn zerocmp(comptime T: type, a: []const T) bool {
// A bit bigger than TweetNaCl, over 4 times faster.
// field element
const Fe = struct.{
const Fe = struct {
b: [10]i32,
fn secureZero(self: *Fe) void {

View File

@ -55,7 +55,7 @@ pub fn addNullByte(allocator: *mem.Allocator, slice: []const u8) ![]u8 {
return result;
}
pub const NullTerminated2DArray = struct.{
pub const NullTerminated2DArray = struct {
allocator: *mem.Allocator,
byte_count: usize,
ptr: ?[*]?[*]u8,
@ -95,7 +95,7 @@ pub const NullTerminated2DArray = struct.{
}
index_buf[i] = null;
return NullTerminated2DArray.{
return NullTerminated2DArray{
.allocator = allocator,
.byte_count = byte_count,
.ptr = @ptrCast(?[*]?[*]u8, buf.ptr),

View File

@ -3,7 +3,7 @@ const mem = std.mem;
/// Allocator that fails after N allocations, useful for making sure out of
/// memory conditions are handled correctly.
pub const FailingAllocator = struct.{
pub const FailingAllocator = struct {
allocator: mem.Allocator,
index: usize,
fail_index: usize,
@ -13,14 +13,14 @@ pub const FailingAllocator = struct.{
deallocations: usize,
pub fn init(allocator: *mem.Allocator, fail_index: usize) FailingAllocator {
return FailingAllocator.{
return FailingAllocator{
.internal_allocator = allocator,
.fail_index = fail_index,
.index = 0,
.allocated_bytes = 0,
.freed_bytes = 0,
.deallocations = 0,
.allocator = mem.Allocator.{
.allocator = mem.Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,

View File

@ -21,7 +21,7 @@ pub const runtime_safety = switch (builtin.mode) {
builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => false,
};
const Module = struct.{
const Module = struct {
mod_info: pdb.ModInfo,
module_name: []u8,
obj_file_name: []u8,
@ -125,7 +125,7 @@ pub fn assert(ok: bool) void {
/// TODO: add `==` operator for `error_union == error_set`, and then
/// remove this function
pub fn assertError(value: var, expected_error: error) void {
pub fn assertError(value: var, expected_error: anyerror) void {
if (value) {
@panic("expected error");
} else |actual_error| {
@ -213,7 +213,7 @@ pub fn writeCurrentStackTrace(out_stream: var, debug_info: *DebugInfo, tty_color
builtin.Os.windows => return writeCurrentStackTraceWindows(out_stream, debug_info, tty_color, start_addr),
else => {},
}
const AddressState = union(enum).{
const AddressState = union(enum) {
NotLookingForStartAddress,
LookingForStartAddress: usize,
};
@ -222,7 +222,7 @@ pub fn writeCurrentStackTrace(out_stream: var, debug_info: *DebugInfo, tty_color
// else AddressState.NotLookingForStartAddress;
var addr_state: AddressState = undefined;
if (start_addr) |addr| {
addr_state = AddressState.{ .LookingForStartAddress = addr };
addr_state = AddressState{ .LookingForStartAddress = addr };
} else {
addr_state = AddressState.NotLookingForStartAddress;
}
@ -377,7 +377,7 @@ fn printSourceAtAddressWindows(di: *DebugInfo, out_stream: var, relocated_addres
const col_num_entry = @ptrCast(*pdb.ColumnNumberEntry, &subsect_info[line_index]);
break :blk col_num_entry.StartColumn;
} else 0;
break :subsections LineInfo.{
break :subsections LineInfo{
.allocator = allocator,
.file_name = source_file_name,
.line = line,
@ -444,7 +444,7 @@ fn printSourceAtAddressWindows(di: *DebugInfo, out_stream: var, relocated_addres
}
}
const TtyColor = enum.{
const TtyColor = enum {
Red,
Green,
Cyan,
@ -478,7 +478,7 @@ fn setTtyColor(tty_color: TtyColor) void {
},
}
} else {
const S = struct.{
const S = struct {
var attrs: windows.WORD = undefined;
var init_attrs = false;
};
@ -685,7 +685,7 @@ fn printLineInfo(
}
// TODO use this
pub const OpenSelfDebugInfoError = error.{
pub const OpenSelfDebugInfoError = error{
MissingDebugInfo,
OutOfMemory,
UnsupportedOperatingSystem,
@ -705,7 +705,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
defer self_file.close();
const coff_obj = try allocator.createOne(coff.Coff);
coff_obj.* = coff.Coff.{
coff_obj.* = coff.Coff{
.in_file = self_file,
.allocator = allocator,
.coff_header = undefined,
@ -715,7 +715,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
.age = undefined,
};
var di = DebugInfo.{
var di = DebugInfo{
.coff = coff_obj,
.pdb = undefined,
.sect_contribs = undefined,
@ -747,7 +747,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
const name_bytes = try allocator.alloc(u8, name_bytes_len);
try pdb_stream.stream.readNoEof(name_bytes);
const HashTableHeader = packed struct.{
const HashTableHeader = packed struct {
Size: u32,
Capacity: u32,
@ -768,7 +768,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
return error.InvalidDebugInfo;
const deleted = try readSparseBitVector(&pdb_stream.stream, allocator);
const Bucket = struct.{
const Bucket = struct {
first: u32,
second: u32,
};
@ -816,7 +816,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
this_record_len += march_forward_bytes;
}
try modules.append(Module.{
try modules.append(Module{
.mod_info = mod_info,
.module_name = module_name,
.obj_file_name = obj_file_name,
@ -875,7 +875,7 @@ fn readSparseBitVector(stream: var, allocator: *mem.Allocator) ![]usize {
}
fn openSelfDebugInfoLinux(allocator: *mem.Allocator) !DebugInfo {
var di = DebugInfo.{
var di = DebugInfo{
.self_exe_file = undefined,
.elf = undefined,
.debug_info = undefined,
@ -962,7 +962,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
if (sym.n_sect == 0) {
last_len = sym.n_value;
} else {
symbols_buf[symbol_index] = MachoSymbol.{
symbols_buf[symbol_index] = MachoSymbol{
.nlist = sym,
.ofile = ofile,
.reloc = reloc,
@ -980,7 +980,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
}
}
const sentinel = try allocator.createOne(macho.nlist_64);
sentinel.* = macho.nlist_64.{
sentinel.* = macho.nlist_64{
.n_strx = 0,
.n_type = 36,
.n_sect = 0,
@ -995,7 +995,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
// This sort is so that we can binary search later.
std.sort.sort(MachoSymbol, symbols, MachoSymbol.addressLessThan);
return DebugInfo.{
return DebugInfo{
.ofiles = DebugInfo.OFileTable.init(allocator),
.symbols = symbols,
.strings = strings,
@ -1034,7 +1034,7 @@ fn printLineFromFile(out_stream: var, line_info: LineInfo) !void {
}
}
const MachoSymbol = struct.{
const MachoSymbol = struct {
nlist: *macho.nlist_64,
ofile: ?*macho.nlist_64,
reloc: u64,
@ -1049,14 +1049,14 @@ const MachoSymbol = struct.{
}
};
const MachOFile = struct.{
const MachOFile = struct {
bytes: []align(@alignOf(macho.mach_header_64)) const u8,
sect_debug_info: ?*const macho.section_64,
sect_debug_line: ?*const macho.section_64,
};
pub const DebugInfo = switch (builtin.os) {
builtin.Os.macosx => struct.{
builtin.Os.macosx => struct {
symbols: []const MachoSymbol,
strings: []const u8,
ofiles: OFileTable,
@ -1072,13 +1072,13 @@ pub const DebugInfo = switch (builtin.os) {
return self.ofiles.allocator;
}
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
pdb: pdb.Pdb,
coff: *coff.Coff,
sect_contribs: []pdb.SectionContribEntry,
modules: []Module,
},
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
self_exe_file: os.File,
elf: elf.Elf,
debug_info: *elf.SectionHeader,
@ -1107,12 +1107,12 @@ pub const DebugInfo = switch (builtin.os) {
else => @compileError("Unsupported OS"),
};
const PcRange = struct.{
const PcRange = struct {
start: u64,
end: u64,
};
const CompileUnit = struct.{
const CompileUnit = struct {
version: u16,
is_64: bool,
die: *Die,
@ -1122,25 +1122,25 @@ const CompileUnit = struct.{
const AbbrevTable = ArrayList(AbbrevTableEntry);
const AbbrevTableHeader = struct.{
const AbbrevTableHeader = struct {
// offset from .debug_abbrev
offset: u64,
table: AbbrevTable,
};
const AbbrevTableEntry = struct.{
const AbbrevTableEntry = struct {
has_children: bool,
abbrev_code: u64,
tag_id: u64,
attrs: ArrayList(AbbrevAttr),
};
const AbbrevAttr = struct.{
const AbbrevAttr = struct {
attr_id: u64,
form_id: u64,
};
const FormValue = union(enum).{
const FormValue = union(enum) {
Address: u64,
Block: []u8,
Const: Constant,
@ -1154,7 +1154,7 @@ const FormValue = union(enum).{
StrPtr: u64,
};
const Constant = struct.{
const Constant = struct {
payload: []u8,
signed: bool,
@ -1165,12 +1165,12 @@ const Constant = struct.{
}
};
const Die = struct.{
const Die = struct {
tag_id: u64,
has_children: bool,
attrs: ArrayList(Attr),
const Attr = struct.{
const Attr = struct {
id: u64,
value: FormValue,
};
@ -1217,14 +1217,14 @@ const Die = struct.{
}
};
const FileEntry = struct.{
const FileEntry = struct {
file_name: []const u8,
dir_index: usize,
mtime: usize,
len_bytes: usize,
};
const LineInfo = struct.{
const LineInfo = struct {
line: usize,
column: usize,
file_name: []u8,
@ -1235,7 +1235,7 @@ const LineInfo = struct.{
}
};
const LineNumberProgram = struct.{
const LineNumberProgram = struct {
address: usize,
file: usize,
line: isize,
@ -1257,7 +1257,7 @@ const LineNumberProgram = struct.{
prev_end_sequence: bool,
pub fn init(is_stmt: bool, include_dirs: []const []const u8, file_entries: *ArrayList(FileEntry), target_address: usize) LineNumberProgram {
return LineNumberProgram.{
return LineNumberProgram{
.address = 0,
.file = 1,
.line = 1,
@ -1293,7 +1293,7 @@ const LineNumberProgram = struct.{
self.include_dirs[file_entry.dir_index];
const file_name = try os.path.join(self.file_entries.allocator, dir_name, file_entry.file_name);
errdefer self.file_entries.allocator.free(file_name);
return LineInfo.{
return LineInfo{
.line = if (self.prev_line >= 0) @intCast(usize, self.prev_line) else 0,
.column = self.prev_column,
.file_name = file_name,
@ -1337,7 +1337,7 @@ fn readAllocBytes(allocator: *mem.Allocator, in_stream: var, size: usize) ![]u8
fn parseFormValueBlockLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .Block = buf };
return FormValue{ .Block = buf };
}
fn parseFormValueBlock(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
@ -1346,8 +1346,8 @@ fn parseFormValueBlock(allocator: *mem.Allocator, in_stream: var, size: usize) !
}
fn parseFormValueConstant(allocator: *mem.Allocator, in_stream: var, signed: bool, size: usize) !FormValue {
return FormValue.{
.Const = Constant.{
return FormValue{
.Const = Constant{
.signed = signed,
.payload = try readAllocBytes(allocator, in_stream, size),
},
@ -1364,7 +1364,7 @@ fn parseFormValueTargetAddrSize(in_stream: var) !u64 {
fn parseFormValueRefLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .Ref = buf };
return FormValue{ .Ref = buf };
}
fn parseFormValueRef(allocator: *mem.Allocator, in_stream: var, comptime T: type) !FormValue {
@ -1372,7 +1372,7 @@ fn parseFormValueRef(allocator: *mem.Allocator, in_stream: var, comptime T: type
return parseFormValueRefLen(allocator, in_stream, block_len);
}
const ParseFormValueError = error.{
const ParseFormValueError = error{
EndOfStream,
InvalidDebugInfo,
EndOfFile,
@ -1381,7 +1381,7 @@ const ParseFormValueError = error.{
fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64: bool) ParseFormValueError!FormValue {
return switch (form_id) {
DW.FORM_addr => FormValue.{ .Address = try parseFormValueTargetAddrSize(in_stream) },
DW.FORM_addr => FormValue{ .Address = try parseFormValueTargetAddrSize(in_stream) },
DW.FORM_block1 => parseFormValueBlock(allocator, in_stream, 1),
DW.FORM_block2 => parseFormValueBlock(allocator, in_stream, 2),
DW.FORM_block4 => parseFormValueBlock(allocator, in_stream, 4),
@ -1401,11 +1401,11 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64
DW.FORM_exprloc => {
const size = try readULeb128(in_stream);
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .ExprLoc = buf };
return FormValue{ .ExprLoc = buf };
},
DW.FORM_flag => FormValue.{ .Flag = (try in_stream.readByte()) != 0 },
DW.FORM_flag_present => FormValue.{ .Flag = true },
DW.FORM_sec_offset => FormValue.{ .SecOffset = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_flag => FormValue{ .Flag = (try in_stream.readByte()) != 0 },
DW.FORM_flag_present => FormValue{ .Flag = true },
DW.FORM_sec_offset => FormValue{ .SecOffset = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref1 => parseFormValueRef(allocator, in_stream, u8),
DW.FORM_ref2 => parseFormValueRef(allocator, in_stream, u16),
@ -1416,11 +1416,11 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64
return parseFormValueRefLen(allocator, in_stream, ref_len);
},
DW.FORM_ref_addr => FormValue.{ .RefAddr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref_sig8 => FormValue.{ .RefSig8 = try in_stream.readIntLe(u64) },
DW.FORM_ref_addr => FormValue{ .RefAddr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref_sig8 => FormValue{ .RefSig8 = try in_stream.readIntLe(u64) },
DW.FORM_string => FormValue.{ .String = try readStringRaw(allocator, in_stream) },
DW.FORM_strp => FormValue.{ .StrPtr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_string => FormValue{ .String = try readStringRaw(allocator, in_stream) },
DW.FORM_strp => FormValue{ .StrPtr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_indirect => {
const child_form_id = try readULeb128(in_stream);
return parseFormValue(allocator, in_stream, child_form_id, is_64);
@ -1437,7 +1437,7 @@ fn parseAbbrevTable(st: *DebugInfo) !AbbrevTable {
while (true) {
const abbrev_code = try readULeb128(in_stream);
if (abbrev_code == 0) return result;
try result.append(AbbrevTableEntry.{
try result.append(AbbrevTableEntry{
.abbrev_code = abbrev_code,
.tag_id = try readULeb128(in_stream),
.has_children = (try in_stream.readByte()) == DW.CHILDREN_yes,
@ -1449,7 +1449,7 @@ fn parseAbbrevTable(st: *DebugInfo) !AbbrevTable {
const attr_id = try readULeb128(in_stream);
const form_id = try readULeb128(in_stream);
if (attr_id == 0 and form_id == 0) break;
try attrs.append(AbbrevAttr.{
try attrs.append(AbbrevAttr{
.attr_id = attr_id,
.form_id = form_id,
});
@ -1466,7 +1466,7 @@ fn getAbbrevTable(st: *DebugInfo, abbrev_offset: u64) !*const AbbrevTable {
}
}
try st.self_exe_file.seekTo(st.debug_abbrev.offset + abbrev_offset);
try st.abbrev_table_list.append(AbbrevTableHeader.{
try st.abbrev_table_list.append(AbbrevTableHeader{
.offset = abbrev_offset,
.table = try parseAbbrevTable(st),
});
@ -1487,14 +1487,14 @@ fn parseDie(st: *DebugInfo, abbrev_table: *const AbbrevTable, is_64: bool) !Die
const abbrev_code = try readULeb128(in_stream);
const table_entry = getAbbrevTableEntry(abbrev_table, abbrev_code) orelse return error.InvalidDebugInfo;
var result = Die.{
var result = Die{
.tag_id = table_entry.tag_id,
.has_children = table_entry.has_children,
.attrs = ArrayList(Die.Attr).init(st.allocator()),
};
try result.attrs.resize(table_entry.attrs.len);
for (table_entry.attrs.toSliceConst()) |attr, i| {
result.attrs.items[i] = Die.Attr.{
result.attrs.items[i] = Die.Attr{
.id = attr.attr_id,
.value = try parseFormValue(st.allocator(), in_stream, attr.form_id, is_64),
};
@ -1509,7 +1509,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
errdefer _ = di.ofiles.remove(ofile);
const ofile_path = mem.toSliceConst(u8, di.strings.ptr + ofile.n_strx);
gop.kv.value = MachOFile.{
gop.kv.value = MachOFile{
.bytes = try std.io.readFileAllocAligned(di.ofiles.allocator, ofile_path, @alignOf(macho.mach_header_64)),
.sect_debug_info = null,
.sect_debug_line = null,
@ -1600,7 +1600,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
const dir_index = try readULeb128Mem(&ptr);
const mtime = try readULeb128Mem(&ptr);
const len_bytes = try readULeb128Mem(&ptr);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@ -1631,7 +1631,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
const dir_index = try readULeb128Mem(&ptr);
const mtime = try readULeb128Mem(&ptr);
const len_bytes = try readULeb128Mem(&ptr);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@ -1773,7 +1773,7 @@ fn getLineNumberInfoLinux(di: *DebugInfo, compile_unit: *const CompileUnit, targ
const dir_index = try readULeb128(in_stream);
const mtime = try readULeb128(in_stream);
const len_bytes = try readULeb128(in_stream);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@ -1805,7 +1805,7 @@ fn getLineNumberInfoLinux(di: *DebugInfo, compile_unit: *const CompileUnit, targ
const dir_index = try readULeb128(in_stream);
const mtime = try readULeb128(in_stream);
const len_bytes = try readULeb128(in_stream);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@ -1922,7 +1922,7 @@ fn scanAllCompileUnits(st: *DebugInfo) !void {
},
else => return error.InvalidDebugInfo,
};
break :x PcRange.{
break :x PcRange{
.start = low_pc,
.end = pc_end,
};
@ -1935,7 +1935,7 @@ fn scanAllCompileUnits(st: *DebugInfo) !void {
}
};
try st.compile_unit_list.append(CompileUnit.{
try st.compile_unit_list.append(CompileUnit{
.version = version,
.is_64 = is_64,
.pc_range = pc_range,

View File

@ -18,7 +18,7 @@ pub const DynLib = switch (builtin.os) {
else => void,
};
pub const LinuxDynLib = struct.{
pub const LinuxDynLib = struct {
allocator: *mem.Allocator,
elf_lib: ElfLib,
fd: i32,
@ -44,7 +44,7 @@ pub const LinuxDynLib = struct.{
const bytes = @intToPtr([*]align(std.os.page_size) u8, addr)[0..size];
return DynLib.{
return DynLib{
.allocator = allocator,
.elf_lib = try ElfLib.init(bytes),
.fd = fd,
@ -64,7 +64,7 @@ pub const LinuxDynLib = struct.{
}
};
pub const ElfLib = struct.{
pub const ElfLib = struct {
strings: [*]u8,
syms: [*]elf.Sym,
hashtab: [*]linux.Elf_Symndx,
@ -121,7 +121,7 @@ pub const ElfLib = struct.{
}
}
return ElfLib.{
return ElfLib{
.base = base,
.strings = maybe_strings orelse return error.ElfStringSectionNotFound,
.syms = maybe_syms orelse return error.ElfSymSectionNotFound,
@ -169,14 +169,14 @@ fn checkver(def_arg: *elf.Verdef, vsym_arg: i32, vername: []const u8, strings: [
return mem.eql(u8, vername, cstr.toSliceConst(strings + aux.vda_name));
}
pub const WindowsDynLib = struct.{
pub const WindowsDynLib = struct {
allocator: *mem.Allocator,
dll: windows.HMODULE,
pub fn open(allocator: *mem.Allocator, path: []const u8) !WindowsDynLib {
const wpath = try win_util.sliceToPrefixedFileW(path);
return WindowsDynLib.{
return WindowsDynLib{
.allocator = allocator,
.dll = windows.LoadLibraryW(&wpath) orelse {
const err = windows.GetLastError();

View File

@ -320,14 +320,14 @@ pub const ET_DYN = 3;
/// A core file.
pub const ET_CORE = 4;
pub const FileType = enum.{
pub const FileType = enum {
Relocatable,
Executable,
Shared,
Core,
};
pub const Arch = enum.{
pub const Arch = enum {
Sparc,
x86,
Mips,
@ -339,7 +339,7 @@ pub const Arch = enum.{
AArch64,
};
pub const SectionHeader = struct.{
pub const SectionHeader = struct {
name: u32,
sh_type: u32,
flags: u64,
@ -352,7 +352,7 @@ pub const SectionHeader = struct.{
ent_size: u64,
};
pub const Elf = struct.{
pub const Elf = struct {
in_file: os.File,
auto_close_stream: bool,
is_64: bool,
@ -572,7 +572,7 @@ pub const Elf32_Section = u16;
pub const Elf64_Section = u16;
pub const Elf32_Versym = Elf32_Half;
pub const Elf64_Versym = Elf64_Half;
pub const Elf32_Ehdr = extern struct.{
pub const Elf32_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: Elf32_Half,
e_machine: Elf32_Half,
@ -588,7 +588,7 @@ pub const Elf32_Ehdr = extern struct.{
e_shnum: Elf32_Half,
e_shstrndx: Elf32_Half,
};
pub const Elf64_Ehdr = extern struct.{
pub const Elf64_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: Elf64_Half,
e_machine: Elf64_Half,
@ -604,7 +604,7 @@ pub const Elf64_Ehdr = extern struct.{
e_shnum: Elf64_Half,
e_shstrndx: Elf64_Half,
};
pub const Elf32_Shdr = extern struct.{
pub const Elf32_Shdr = extern struct {
sh_name: Elf32_Word,
sh_type: Elf32_Word,
sh_flags: Elf32_Word,
@ -616,7 +616,7 @@ pub const Elf32_Shdr = extern struct.{
sh_addralign: Elf32_Word,
sh_entsize: Elf32_Word,
};
pub const Elf64_Shdr = extern struct.{
pub const Elf64_Shdr = extern struct {
sh_name: Elf64_Word,
sh_type: Elf64_Word,
sh_flags: Elf64_Xword,
@ -628,18 +628,18 @@ pub const Elf64_Shdr = extern struct.{
sh_addralign: Elf64_Xword,
sh_entsize: Elf64_Xword,
};
pub const Elf32_Chdr = extern struct.{
pub const Elf32_Chdr = extern struct {
ch_type: Elf32_Word,
ch_size: Elf32_Word,
ch_addralign: Elf32_Word,
};
pub const Elf64_Chdr = extern struct.{
pub const Elf64_Chdr = extern struct {
ch_type: Elf64_Word,
ch_reserved: Elf64_Word,
ch_size: Elf64_Xword,
ch_addralign: Elf64_Xword,
};
pub const Elf32_Sym = extern struct.{
pub const Elf32_Sym = extern struct {
st_name: Elf32_Word,
st_value: Elf32_Addr,
st_size: Elf32_Word,
@ -647,7 +647,7 @@ pub const Elf32_Sym = extern struct.{
st_other: u8,
st_shndx: Elf32_Section,
};
pub const Elf64_Sym = extern struct.{
pub const Elf64_Sym = extern struct {
st_name: Elf64_Word,
st_info: u8,
st_other: u8,
@ -655,33 +655,33 @@ pub const Elf64_Sym = extern struct.{
st_value: Elf64_Addr,
st_size: Elf64_Xword,
};
pub const Elf32_Syminfo = extern struct.{
pub const Elf32_Syminfo = extern struct {
si_boundto: Elf32_Half,
si_flags: Elf32_Half,
};
pub const Elf64_Syminfo = extern struct.{
pub const Elf64_Syminfo = extern struct {
si_boundto: Elf64_Half,
si_flags: Elf64_Half,
};
pub const Elf32_Rel = extern struct.{
pub const Elf32_Rel = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
};
pub const Elf64_Rel = extern struct.{
pub const Elf64_Rel = extern struct {
r_offset: Elf64_Addr,
r_info: Elf64_Xword,
};
pub const Elf32_Rela = extern struct.{
pub const Elf32_Rela = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
r_addend: Elf32_Sword,
};
pub const Elf64_Rela = extern struct.{
pub const Elf64_Rela = extern struct {
r_offset: Elf64_Addr,
r_info: Elf64_Xword,
r_addend: Elf64_Sxword,
};
pub const Elf32_Phdr = extern struct.{
pub const Elf32_Phdr = extern struct {
p_type: Elf32_Word,
p_offset: Elf32_Off,
p_vaddr: Elf32_Addr,
@ -691,7 +691,7 @@ pub const Elf32_Phdr = extern struct.{
p_flags: Elf32_Word,
p_align: Elf32_Word,
};
pub const Elf64_Phdr = extern struct.{
pub const Elf64_Phdr = extern struct {
p_type: Elf64_Word,
p_flags: Elf64_Word,
p_offset: Elf64_Off,
@ -701,21 +701,21 @@ pub const Elf64_Phdr = extern struct.{
p_memsz: Elf64_Xword,
p_align: Elf64_Xword,
};
pub const Elf32_Dyn = extern struct.{
pub const Elf32_Dyn = extern struct {
d_tag: Elf32_Sword,
d_un: extern union.{
d_un: extern union {
d_val: Elf32_Word,
d_ptr: Elf32_Addr,
},
};
pub const Elf64_Dyn = extern struct.{
pub const Elf64_Dyn = extern struct {
d_tag: Elf64_Sxword,
d_un: extern union.{
d_un: extern union {
d_val: Elf64_Xword,
d_ptr: Elf64_Addr,
},
};
pub const Elf32_Verdef = extern struct.{
pub const Elf32_Verdef = extern struct {
vd_version: Elf32_Half,
vd_flags: Elf32_Half,
vd_ndx: Elf32_Half,
@ -724,7 +724,7 @@ pub const Elf32_Verdef = extern struct.{
vd_aux: Elf32_Word,
vd_next: Elf32_Word,
};
pub const Elf64_Verdef = extern struct.{
pub const Elf64_Verdef = extern struct {
vd_version: Elf64_Half,
vd_flags: Elf64_Half,
vd_ndx: Elf64_Half,
@ -733,111 +733,111 @@ pub const Elf64_Verdef = extern struct.{
vd_aux: Elf64_Word,
vd_next: Elf64_Word,
};
pub const Elf32_Verdaux = extern struct.{
pub const Elf32_Verdaux = extern struct {
vda_name: Elf32_Word,
vda_next: Elf32_Word,
};
pub const Elf64_Verdaux = extern struct.{
pub const Elf64_Verdaux = extern struct {
vda_name: Elf64_Word,
vda_next: Elf64_Word,
};
pub const Elf32_Verneed = extern struct.{
pub const Elf32_Verneed = extern struct {
vn_version: Elf32_Half,
vn_cnt: Elf32_Half,
vn_file: Elf32_Word,
vn_aux: Elf32_Word,
vn_next: Elf32_Word,
};
pub const Elf64_Verneed = extern struct.{
pub const Elf64_Verneed = extern struct {
vn_version: Elf64_Half,
vn_cnt: Elf64_Half,
vn_file: Elf64_Word,
vn_aux: Elf64_Word,
vn_next: Elf64_Word,
};
pub const Elf32_Vernaux = extern struct.{
pub const Elf32_Vernaux = extern struct {
vna_hash: Elf32_Word,
vna_flags: Elf32_Half,
vna_other: Elf32_Half,
vna_name: Elf32_Word,
vna_next: Elf32_Word,
};
pub const Elf64_Vernaux = extern struct.{
pub const Elf64_Vernaux = extern struct {
vna_hash: Elf64_Word,
vna_flags: Elf64_Half,
vna_other: Elf64_Half,
vna_name: Elf64_Word,
vna_next: Elf64_Word,
};
pub const Elf32_auxv_t = extern struct.{
pub const Elf32_auxv_t = extern struct {
a_type: u32,
a_un: extern union.{
a_un: extern union {
a_val: u32,
},
};
pub const Elf64_auxv_t = extern struct.{
pub const Elf64_auxv_t = extern struct {
a_type: u64,
a_un: extern union.{
a_un: extern union {
a_val: u64,
},
};
pub const Elf32_Nhdr = extern struct.{
pub const Elf32_Nhdr = extern struct {
n_namesz: Elf32_Word,
n_descsz: Elf32_Word,
n_type: Elf32_Word,
};
pub const Elf64_Nhdr = extern struct.{
pub const Elf64_Nhdr = extern struct {
n_namesz: Elf64_Word,
n_descsz: Elf64_Word,
n_type: Elf64_Word,
};
pub const Elf32_Move = extern struct.{
pub const Elf32_Move = extern struct {
m_value: Elf32_Xword,
m_info: Elf32_Word,
m_poffset: Elf32_Word,
m_repeat: Elf32_Half,
m_stride: Elf32_Half,
};
pub const Elf64_Move = extern struct.{
pub const Elf64_Move = extern struct {
m_value: Elf64_Xword,
m_info: Elf64_Xword,
m_poffset: Elf64_Xword,
m_repeat: Elf64_Half,
m_stride: Elf64_Half,
};
pub const Elf32_gptab = extern union.{
gt_header: extern struct.{
pub const Elf32_gptab = extern union {
gt_header: extern struct {
gt_current_g_value: Elf32_Word,
gt_unused: Elf32_Word,
},
gt_entry: extern struct.{
gt_entry: extern struct {
gt_g_value: Elf32_Word,
gt_bytes: Elf32_Word,
},
};
pub const Elf32_RegInfo = extern struct.{
pub const Elf32_RegInfo = extern struct {
ri_gprmask: Elf32_Word,
ri_cprmask: [4]Elf32_Word,
ri_gp_value: Elf32_Sword,
};
pub const Elf_Options = extern struct.{
pub const Elf_Options = extern struct {
kind: u8,
size: u8,
@"section": Elf32_Section,
info: Elf32_Word,
};
pub const Elf_Options_Hw = extern struct.{
pub const Elf_Options_Hw = extern struct {
hwp_flags1: Elf32_Word,
hwp_flags2: Elf32_Word,
};
pub const Elf32_Lib = extern struct.{
pub const Elf32_Lib = extern struct {
l_name: Elf32_Word,
l_time_stamp: Elf32_Word,
l_checksum: Elf32_Word,
l_version: Elf32_Word,
l_flags: Elf32_Word,
};
pub const Elf64_Lib = extern struct.{
pub const Elf64_Lib = extern struct {
l_name: Elf64_Word,
l_time_stamp: Elf64_Word,
l_checksum: Elf64_Word,
@ -845,7 +845,7 @@ pub const Elf64_Lib = extern struct.{
l_flags: Elf64_Word,
};
pub const Elf32_Conflict = Elf32_Addr;
pub const Elf_MIPS_ABIFlags_v0 = extern struct.{
pub const Elf_MIPS_ABIFlags_v0 = extern struct {
version: Elf32_Half,
isa_level: u8,
isa_rev: u8,

View File

@ -9,7 +9,7 @@ const Loop = std.event.Loop;
/// when buffer is empty, consumers suspend and are resumed by producers
/// when buffer is full, producers suspend and are resumed by consumers
pub fn Channel(comptime T: type) type {
return struct.{
return struct {
loop: *Loop,
getters: std.atomic.Queue(GetNode),
@ -26,25 +26,25 @@ pub fn Channel(comptime T: type) type {
buffer_len: usize,
const SelfChannel = @This();
const GetNode = struct.{
const GetNode = struct {
tick_node: *Loop.NextTickNode,
data: Data,
const Data = union(enum).{
const Data = union(enum) {
Normal: Normal,
OrNull: OrNull,
};
const Normal = struct.{
const Normal = struct {
ptr: *T,
};
const OrNull = struct.{
const OrNull = struct {
ptr: *?T,
or_null: *std.atomic.Queue(*std.atomic.Queue(GetNode).Node).Node,
};
};
const PutNode = struct.{
const PutNode = struct {
data: T,
tick_node: *Loop.NextTickNode,
};
@ -54,7 +54,7 @@ pub fn Channel(comptime T: type) type {
const buffer_nodes = try loop.allocator.alloc(T, capacity);
errdefer loop.allocator.free(buffer_nodes);
const self = try loop.allocator.create(SelfChannel.{
const self = try loop.allocator.create(SelfChannel{
.loop = loop,
.buffer_len = 0,
.buffer_nodes = buffer_nodes,
@ -93,7 +93,7 @@ pub fn Channel(comptime T: type) type {
}
var my_tick_node = Loop.NextTickNode.init(@handle());
var queue_node = std.atomic.Queue(PutNode).Node.init(PutNode.{
var queue_node = std.atomic.Queue(PutNode).Node.init(PutNode{
.tick_node = &my_tick_node,
.data = data,
});
@ -129,10 +129,10 @@ pub fn Channel(comptime T: type) type {
// so we can get rid of this extra result copy
var result: T = undefined;
var my_tick_node = Loop.NextTickNode.init(@handle());
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode.{
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode{
.tick_node = &my_tick_node,
.data = GetNode.Data.{
.Normal = GetNode.Normal.{ .ptr = &result },
.data = GetNode.Data{
.Normal = GetNode.Normal{ .ptr = &result },
},
});
@ -181,10 +181,10 @@ pub fn Channel(comptime T: type) type {
var result: ?T = null;
var my_tick_node = Loop.NextTickNode.init(@handle());
var or_null_node = std.atomic.Queue(*std.atomic.Queue(GetNode).Node).Node.init(undefined);
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode.{
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode{
.tick_node = &my_tick_node,
.data = GetNode.Data.{
.OrNull = GetNode.OrNull.{
.data = GetNode.Data{
.OrNull = GetNode.OrNull{
.ptr = &result,
.or_null = &or_null_node,
},

View File

@ -10,17 +10,17 @@ const Loop = event.Loop;
pub const RequestNode = std.atomic.Queue(Request).Node;
pub const Request = struct.{
pub const Request = struct {
msg: Msg,
finish: Finish,
pub const Finish = union(enum).{
pub const Finish = union(enum) {
TickNode: Loop.NextTickNode,
DeallocCloseOperation: *CloseOperation,
NoAction,
};
pub const Msg = union(enum).{
pub const Msg = union(enum) {
PWriteV: PWriteV,
PReadV: PReadV,
Open: Open,
@ -28,7 +28,7 @@ pub const Request = struct.{
WriteFile: WriteFile,
End, // special - means the fs thread should exit
pub const PWriteV = struct.{
pub const PWriteV = struct {
fd: os.FileHandle,
iov: []const os.posix.iovec_const,
offset: usize,
@ -37,7 +37,7 @@ pub const Request = struct.{
pub const Error = os.PosixWriteError;
};
pub const PReadV = struct.{
pub const PReadV = struct {
fd: os.FileHandle,
iov: []const os.posix.iovec,
offset: usize,
@ -46,7 +46,7 @@ pub const Request = struct.{
pub const Error = os.PosixReadError;
};
pub const Open = struct.{
pub const Open = struct {
/// must be null terminated. TODO https://github.com/ziglang/zig/issues/265
path: []const u8,
flags: u32,
@ -56,7 +56,7 @@ pub const Request = struct.{
pub const Error = os.File.OpenError;
};
pub const WriteFile = struct.{
pub const WriteFile = struct {
/// must be null terminated. TODO https://github.com/ziglang/zig/issues/265
path: []const u8,
contents: []const u8,
@ -66,13 +66,13 @@ pub const Request = struct.{
pub const Error = os.File.OpenError || os.File.WriteError;
};
pub const Close = struct.{
pub const Close = struct {
fd: os.FileHandle,
};
};
};
pub const PWriteVError = error.{OutOfMemory} || os.File.WriteError;
pub const PWriteVError = error{OutOfMemory} || os.File.WriteError;
/// data - just the inner references - must live until pwritev promise completes.
pub async fn pwritev(loop: *Loop, fd: os.FileHandle, data: []const []const u8, offset: usize) PWriteVError!void {
@ -88,7 +88,7 @@ pub async fn pwritev(loop: *Loop, fd: os.FileHandle, data: []const []const u8, o
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec_const.{
iovecs[i] = os.posix.iovec_const{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@ -124,11 +124,11 @@ pub async fn pwriteWindows(loop: *Loop, fd: os.FileHandle, data: []const u8, off
resume @handle();
}
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = @truncate(u32, offset),
@ -175,20 +175,20 @@ pub async fn pwritevPosix(
resume @handle();
}
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.PWriteV = Request.Msg.PWriteV.{
.data = Request{
.msg = Request.Msg{
.PWriteV = Request.Msg.PWriteV{
.fd = fd,
.iov = iovecs,
.offset = offset,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@ -206,7 +206,7 @@ pub async fn pwritevPosix(
return req_node.data.msg.PWriteV.result;
}
pub const PReadVError = error.{OutOfMemory} || os.File.ReadError;
pub const PReadVError = error{OutOfMemory} || os.File.ReadError;
/// data - just the inner references - must live until preadv promise completes.
pub async fn preadv(loop: *Loop, fd: os.FileHandle, data: []const []u8, offset: usize) PReadVError!usize {
@ -224,7 +224,7 @@ pub async fn preadv(loop: *Loop, fd: os.FileHandle, data: []const []u8, offset:
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec.{
iovecs[i] = os.posix.iovec{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@ -272,11 +272,11 @@ pub async fn preadWindows(loop: *Loop, fd: os.FileHandle, data: []u8, offset: u6
resume @handle();
}
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = @truncate(u32, offset),
@ -322,20 +322,20 @@ pub async fn preadvPosix(
resume @handle();
}
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.PReadV = Request.Msg.PReadV.{
.data = Request{
.msg = Request.Msg{
.PReadV = Request.Msg.PReadV{
.fd = fd,
.iov = iovecs,
.offset = offset,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@ -366,20 +366,20 @@ pub async fn openPosix(
const path_c = try std.os.toPosixPath(path);
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.Open = Request.Msg.Open.{
.data = Request{
.msg = Request.Msg{
.Open = Request.Msg.Open{
.path = path_c[0..path.len],
.flags = flags,
.mode = mode,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@ -472,32 +472,32 @@ pub async fn openReadWrite(
/// `CloseOperation.finish`.
/// If you call `setHandle` then finishing will close the fd; otherwise finishing
/// will deallocate the `CloseOperation`.
pub const CloseOperation = struct.{
pub const CloseOperation = struct {
loop: *Loop,
os_data: OsData,
const OsData = switch (builtin.os) {
builtin.Os.linux, builtin.Os.macosx => OsDataPosix,
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
handle: ?os.FileHandle,
},
else => @compileError("Unsupported OS"),
};
const OsDataPosix = struct.{
const OsDataPosix = struct {
have_fd: bool,
close_req_node: RequestNode,
};
pub fn start(loop: *Loop) (error.{OutOfMemory}!*CloseOperation) {
pub fn start(loop: *Loop) (error{OutOfMemory}!*CloseOperation) {
const self = try loop.allocator.createOne(CloseOperation);
self.* = CloseOperation.{
self.* = CloseOperation{
.loop = loop,
.os_data = switch (builtin.os) {
builtin.Os.linux, builtin.Os.macosx => initOsDataPosix(self),
builtin.Os.windows => OsData.{ .handle = null },
builtin.Os.windows => OsData{ .handle = null },
else => @compileError("Unsupported OS"),
},
};
@ -505,16 +505,16 @@ pub const CloseOperation = struct.{
}
fn initOsDataPosix(self: *CloseOperation) OsData {
return OsData.{
return OsData{
.have_fd = false,
.close_req_node = RequestNode.{
.close_req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.Close = Request.Msg.Close.{ .fd = undefined },
.data = Request{
.msg = Request.Msg{
.Close = Request.Msg.Close{ .fd = undefined },
},
.finish = Request.Finish.{ .DeallocCloseOperation = self },
.finish = Request.Finish{ .DeallocCloseOperation = self },
},
},
};
@ -627,20 +627,20 @@ async fn writeFileModeThread(loop: *Loop, path: []const u8, contents: []const u8
const path_with_null = try std.cstr.addNullByte(loop.allocator, path);
defer loop.allocator.free(path_with_null);
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.WriteFile = Request.Msg.WriteFile.{
.data = Request{
.msg = Request.Msg{
.WriteFile = Request.Msg.WriteFile{
.path = path_with_null[0..path.len],
.contents = contents,
.mode = mode,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@ -674,7 +674,7 @@ pub async fn readFile(loop: *Loop, file_path: []const u8, max_size: usize) ![]u8
while (true) {
try list.ensureCapacity(list.len + os.page_size);
const buf = list.items[list.len..];
const buf_array = [][]u8.{buf};
const buf_array = [][]u8{buf};
const amt = try await (async preadv(loop, fd, buf_array, list.len) catch unreachable);
list.len += amt;
if (list.len > max_size) {
@ -686,12 +686,12 @@ pub async fn readFile(loop: *Loop, file_path: []const u8, max_size: usize) ![]u8
}
}
pub const WatchEventId = enum.{
pub const WatchEventId = enum {
CloseWrite,
Delete,
};
pub const WatchEventError = error.{
pub const WatchEventError = error{
UserResourceLimitReached,
SystemResources,
AccessDenied,
@ -699,17 +699,17 @@ pub const WatchEventError = error.{
};
pub fn Watch(comptime V: type) type {
return struct.{
return struct {
channel: *event.Channel(Event.Error!Event),
os_data: OsData,
const OsData = switch (builtin.os) {
builtin.Os.macosx => struct.{
builtin.Os.macosx => struct {
file_table: FileTable,
table_lock: event.Lock,
const FileTable = std.AutoHashMap([]const u8, *Put);
const Put = struct.{
const Put = struct {
putter: promise,
value_ptr: *V,
};
@ -721,7 +721,7 @@ pub fn Watch(comptime V: type) type {
else => @compileError("Unsupported OS"),
};
const WindowsOsData = struct.{
const WindowsOsData = struct {
table_lock: event.Lock,
dir_table: DirTable,
all_putters: std.atomic.Queue(promise),
@ -730,14 +730,14 @@ pub fn Watch(comptime V: type) type {
const DirTable = std.AutoHashMap([]const u8, *Dir);
const FileTable = std.AutoHashMap([]const u16, V);
const Dir = struct.{
const Dir = struct {
putter: promise,
file_table: FileTable,
table_lock: event.Lock,
};
};
const LinuxOsData = struct.{
const LinuxOsData = struct {
putter: promise,
inotify_fd: i32,
wd_table: WdTable,
@ -746,7 +746,7 @@ pub fn Watch(comptime V: type) type {
const WdTable = std.AutoHashMap(i32, Dir);
const FileTable = std.AutoHashMap([]const u8, V);
const Dir = struct.{
const Dir = struct {
dirname: []const u8,
file_table: FileTable,
};
@ -756,7 +756,7 @@ pub fn Watch(comptime V: type) type {
const Self = @This();
pub const Event = struct.{
pub const Event = struct {
id: Id,
data: V,
@ -781,9 +781,9 @@ pub fn Watch(comptime V: type) type {
builtin.Os.windows => {
const self = try loop.allocator.createOne(Self);
errdefer loop.allocator.destroy(self);
self.* = Self.{
self.* = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.table_lock = event.Lock.init(loop),
.dir_table = OsData.DirTable.init(loop.allocator),
.ref_count = std.atomic.Int(usize).init(1),
@ -797,9 +797,9 @@ pub fn Watch(comptime V: type) type {
const self = try loop.allocator.createOne(Self);
errdefer loop.allocator.destroy(self);
self.* = Self.{
self.* = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.table_lock = event.Lock.init(loop),
.file_table = OsData.FileTable.init(loop.allocator),
},
@ -908,7 +908,7 @@ pub fn Watch(comptime V: type) type {
}
var value_copy = value;
var put = OsData.Put.{
var put = OsData.Put{
.putter = @handle(),
.value_ptr = &value_copy,
};
@ -928,12 +928,12 @@ pub fn Watch(comptime V: type) type {
) catch unreachable)) |kev| {
// TODO handle EV_ERROR
if (kev.fflags & posix.NOTE_DELETE != 0) {
await (async self.channel.put(Self.Event.{
await (async self.channel.put(Self.Event{
.id = Event.Id.Delete,
.data = value_copy,
}) catch unreachable);
} else if (kev.fflags & posix.NOTE_WRITE != 0) {
await (async self.channel.put(Self.Event.{
await (async self.channel.put(Self.Event{
.id = Event.Id.CloseWrite,
.data = value_copy,
}) catch unreachable);
@ -943,7 +943,7 @@ pub fn Watch(comptime V: type) type {
error.ProcessNotFound => unreachable,
error.AccessDenied, error.SystemResources => {
// TODO https://github.com/ziglang/zig/issues/769
const casted_err = @errSetCast(error.{
const casted_err = @errSetCast(error{
AccessDenied,
SystemResources,
}, err);
@ -978,7 +978,7 @@ pub fn Watch(comptime V: type) type {
const gop = try self.os_data.wd_table.getOrPut(wd);
if (!gop.found_existing) {
gop.kv.value = OsData.Dir.{
gop.kv.value = OsData.Dir{
.dirname = dirname_with_null,
.file_table = OsData.FileTable.init(self.channel.loop.allocator),
};
@ -1060,7 +1060,7 @@ pub fn Watch(comptime V: type) type {
const dir = try self.channel.loop.allocator.createOne(OsData.Dir);
errdefer self.channel.loop.allocator.destroy(dir);
dir.* = OsData.Dir.{
dir.* = OsData.Dir{
.file_table = OsData.FileTable.init(self.channel.loop.allocator),
.table_lock = event.Lock.init(self.channel.loop),
.putter = undefined,
@ -1089,7 +1089,7 @@ pub fn Watch(comptime V: type) type {
defer os.close(dir_handle);
var putter_node = std.atomic.Queue(promise).Node.{
var putter_node = std.atomic.Queue(promise).Node{
.data = @handle(),
.prev = null,
.next = null,
@ -1097,11 +1097,11 @@ pub fn Watch(comptime V: type) type {
self.os_data.all_putters.put(&putter_node);
defer _ = self.os_data.all_putters.remove(&putter_node);
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = 0,
@ -1179,7 +1179,7 @@ pub fn Watch(comptime V: type) type {
}
};
if (user_value) |v| {
await (async self.channel.put(Event.{
await (async self.channel.put(Event{
.id = id,
.data = v,
}) catch unreachable);
@ -1203,9 +1203,9 @@ pub fn Watch(comptime V: type) type {
const loop = channel.loop;
var watch = Self.{
var watch = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.putter = @handle(),
.inotify_fd = inotify_fd,
.wd_table = OsData.WdTable.init(loop.allocator),
@ -1259,7 +1259,7 @@ pub fn Watch(comptime V: type) type {
}
};
if (user_value) |v| {
await (async channel.put(Event.{
await (async channel.put(Event{
.id = WatchEventId.CloseWrite,
.data = v,
}) catch unreachable);
@ -1310,7 +1310,7 @@ test "write a file, watch it, write it again" {
try loop.initMultiThreaded(allocator);
defer loop.deinit();
var result: error!void = error.ResultNeverWritten;
var result: anyerror!void = error.ResultNeverWritten;
const handle = try async<allocator> testFsWatchCantFail(&loop, &result);
defer cancel handle;
@ -1318,8 +1318,8 @@ test "write a file, watch it, write it again" {
return result;
}
async fn testFsWatchCantFail(loop: *Loop, result: *(error!void)) void {
result.* = await async testFsWatch(loop) catch unreachable;
async fn testFsWatchCantFail(loop: *Loop, result: *(anyerror!void)) void {
result.* = await (async testFsWatch(loop) catch unreachable);
}
async fn testFsWatch(loop: *Loop) !void {
@ -1353,7 +1353,7 @@ async fn testFsWatch(loop: *Loop) !void {
{
defer os.close(fd);
try await try async pwritev(loop, fd, []const []const u8.{"lorem ipsum"}, line2_offset);
try await try async pwritev(loop, fd, []const []const u8{"lorem ipsum"}, line2_offset);
}
ev_consumed = true;
@ -1370,7 +1370,7 @@ async fn testFsWatch(loop: *Loop) !void {
// TODO test deleting the file and then re-adding it. we should get events for both
}
pub const OutStream = struct.{
pub const OutStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@ -1380,11 +1380,11 @@ pub const OutStream = struct.{
pub const Stream = event.io.OutStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle, offset: usize) OutStream {
return OutStream.{
return OutStream{
.fd = fd,
.loop = loop,
.offset = offset,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@ -1392,11 +1392,11 @@ pub const OutStream = struct.{
const self = @fieldParentPtr(OutStream, "stream", out_stream);
const offset = self.offset;
self.offset += bytes.len;
return await (async pwritev(self.loop, self.fd, [][]const u8.{bytes}, offset) catch unreachable);
return await (async pwritev(self.loop, self.fd, [][]const u8{bytes}, offset) catch unreachable);
}
};
pub const InStream = struct.{
pub const InStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@ -1406,17 +1406,17 @@ pub const InStream = struct.{
pub const Stream = event.io.InStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle, offset: usize) InStream {
return InStream.{
return InStream{
.fd = fd,
.loop = loop,
.offset = offset,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
async<*mem.Allocator> fn readFn(in_stream: *Stream, bytes: []u8) Error!usize {
const self = @fieldParentPtr(InStream, "stream", in_stream);
const amt = try await (async preadv(self.loop, self.fd, [][]u8.{bytes}, self.offset) catch unreachable);
const amt = try await (async preadv(self.loop, self.fd, [][]u8{bytes}, self.offset) catch unreachable);
self.offset += amt;
return amt;
}

View File

@ -11,7 +11,7 @@ const Loop = std.event.Loop;
/// and then are resumed when resolve() is called.
/// At this point the value remains forever available, and another resolve() is not allowed.
pub fn Future(comptime T: type) type {
return struct.{
return struct {
lock: Lock,
data: T,
@ -25,7 +25,7 @@ pub fn Future(comptime T: type) type {
const Queue = std.atomic.Queue(promise);
pub fn init(loop: *Loop) Self {
return Self.{
return Self{
.lock = Lock.initLocked(loop),
.available = 0,
.data = undefined,
@ -78,7 +78,7 @@ pub fn Future(comptime T: type) type {
pub fn resolve(self: *Self) void {
const prev = @atomicRmw(u8, &self.available, AtomicRmwOp.Xchg, 2, AtomicOrder.SeqCst);
assert(prev == 0 or prev == 1); // resolve() called twice
Lock.Held.release(Lock.Held.{ .lock = &self.lock });
Lock.Held.release(Lock.Held{ .lock = &self.lock });
}
};
}

View File

@ -8,7 +8,7 @@ const assert = std.debug.assert;
/// ReturnType must be `void` or `E!void`
pub fn Group(comptime ReturnType: type) type {
return struct.{
return struct {
coro_stack: Stack,
alloc_stack: Stack,
lock: Lock,
@ -22,7 +22,7 @@ pub fn Group(comptime ReturnType: type) type {
const Stack = std.atomic.Stack(promise->ReturnType);
pub fn init(loop: *Loop) Self {
return Self.{
return Self{
.coro_stack = Stack.init(),
.alloc_stack = Stack.init(),
.lock = Lock.init(loop),
@ -41,8 +41,8 @@ pub fn Group(comptime ReturnType: type) type {
}
/// Add a promise to the group. Thread-safe.
pub fn add(self: *Self, handle: promise->ReturnType) (error.{OutOfMemory}!void) {
const node = try self.lock.loop.allocator.create(Stack.Node.{
pub fn add(self: *Self, handle: promise->ReturnType) (error{OutOfMemory}!void) {
const node = try self.lock.loop.allocator.create(Stack.Node{
.next = undefined,
.data = handle,
});
@ -61,8 +61,8 @@ pub fn Group(comptime ReturnType: type) type {
/// This is equivalent to an async call, but the async function is added to the group, instead
/// of returning a promise. func must be async and have return type ReturnType.
/// Thread-safe.
pub fn call(self: *Self, comptime func: var, args: ...) (error.{OutOfMemory}!void) {
const S = struct.{
pub fn call(self: *Self, comptime func: var, args: ...) (error{OutOfMemory}!void) {
const S = struct {
async fn asyncFunc(node: **Stack.Node, args2: ...) ReturnType {
// TODO this is a hack to make the memory following be inside the coro frame
suspend {
@ -78,7 +78,7 @@ pub fn Group(comptime ReturnType: type) type {
};
var node: *Stack.Node = undefined;
const handle = try async<self.lock.loop.allocator> S.asyncFunc(&node, args);
node.* = Stack.Node.{
node.* = Stack.Node{
.next = undefined,
.data = handle,
};
@ -144,7 +144,7 @@ async fn testGroup(loop: *Loop) void {
await (async group.wait() catch @panic("memory"));
assert(count == 11);
var another = Group(error!void).init(loop);
var another = Group(anyerror!void).init(loop);
another.add(async somethingElse() catch @panic("memory")) catch @panic("memory");
another.call(doSomethingThatFails) catch @panic("memory");
std.debug.assertError(await (async another.wait() catch @panic("memory")), error.ItBroke);
@ -162,7 +162,7 @@ async fn increaseByTen(count: *usize) void {
}
}
async fn doSomethingThatFails() error!void {}
async fn somethingElse() error!void {
async fn doSomethingThatFails() anyerror!void {}
async fn somethingElse() anyerror!void {
return error.ItBroke;
}

View File

@ -5,7 +5,7 @@ const assert = std.debug.assert;
const mem = std.mem;
pub fn InStream(comptime ReadError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = ReadError;
@ -62,7 +62,7 @@ pub fn InStream(comptime ReadError: type) type {
}
pub fn OutStream(comptime WriteError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = WriteError;

View File

@ -10,7 +10,7 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
/// Allows only one actor to hold the lock.
pub const Lock = struct.{
pub const Lock = struct {
loop: *Loop,
shared_bit: u8, // TODO make this a bool
queue: Queue,
@ -18,7 +18,7 @@ pub const Lock = struct.{
const Queue = std.atomic.Queue(promise);
pub const Held = struct.{
pub const Held = struct {
lock: *Lock,
pub fn release(self: Held) void {
@ -66,7 +66,7 @@ pub const Lock = struct.{
};
pub fn init(loop: *Loop) Lock {
return Lock.{
return Lock{
.loop = loop,
.shared_bit = 0,
.queue = Queue.init(),
@ -75,7 +75,7 @@ pub const Lock = struct.{
}
pub fn initLocked(loop: *Loop) Lock {
return Lock.{
return Lock{
.loop = loop,
.shared_bit = 1,
.queue = Queue.init(),
@ -117,7 +117,7 @@ pub const Lock = struct.{
}
}
return Held.{ .lock = self };
return Held{ .lock = self };
}
};
@ -138,7 +138,7 @@ test "std.event.Lock" {
defer cancel handle;
loop.run();
assert(mem.eql(i32, shared_test_data, [1]i32.{3 * @intCast(i32, shared_test_data.len)} ** shared_test_data.len));
assert(mem.eql(i32, shared_test_data, [1]i32{3 * @intCast(i32, shared_test_data.len)} ** shared_test_data.len));
}
async fn testLock(loop: *Loop, lock: *Lock) void {
@ -147,7 +147,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
resume @handle();
}
const handle1 = async lockRunner(lock) catch @panic("out of memory");
var tick_node1 = Loop.NextTickNode.{
var tick_node1 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle1,
@ -155,7 +155,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
loop.onNextTick(&tick_node1);
const handle2 = async lockRunner(lock) catch @panic("out of memory");
var tick_node2 = Loop.NextTickNode.{
var tick_node2 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle2,
@ -163,7 +163,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
loop.onNextTick(&tick_node2);
const handle3 = async lockRunner(lock) catch @panic("out of memory");
var tick_node3 = Loop.NextTickNode.{
var tick_node3 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle3,
@ -175,7 +175,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
await handle3;
}
var shared_test_data = [1]i32.{0} ** 10;
var shared_test_data = [1]i32{0} ** 10;
var shared_test_index: usize = 0;
async fn lockRunner(lock: *Lock) void {

View File

@ -6,13 +6,13 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
pub fn Locked(comptime T: type) type {
return struct.{
return struct {
lock: Lock,
private_data: T,
const Self = @This();
pub const HeldLock = struct.{
pub const HeldLock = struct {
value: *T,
held: Lock.Held,
@ -22,7 +22,7 @@ pub fn Locked(comptime T: type) type {
};
pub fn init(loop: *Loop, data: T) Self {
return Self.{
return Self{
.lock = Lock.init(loop),
.private_data = data,
};
@ -33,7 +33,7 @@ pub fn Locked(comptime T: type) type {
}
pub async fn acquire(self: *Self) HeldLock {
return HeldLock.{
return HeldLock{
// TODO guaranteed allocation elision
.held = await (async self.lock.acquire() catch unreachable),
.value = &self.private_data,

View File

@ -10,7 +10,7 @@ const posix = os.posix;
const windows = os.windows;
const maxInt = std.math.maxInt;
pub const Loop = struct.{
pub const Loop = struct {
allocator: *mem.Allocator,
next_tick_queue: std.atomic.Queue(promise),
os_data: OsData,
@ -25,13 +25,13 @@ pub const Loop = struct.{
pub const NextTickNode = std.atomic.Queue(promise).Node;
pub const ResumeNode = struct.{
pub const ResumeNode = struct {
id: Id,
handle: promise,
overlapped: Overlapped,
pub const overlapped_init = switch (builtin.os) {
builtin.Os.windows => windows.OVERLAPPED.{
builtin.Os.windows => windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = 0,
@ -42,7 +42,7 @@ pub const Loop = struct.{
};
pub const Overlapped = @typeOf(overlapped_init);
pub const Id = enum.{
pub const Id = enum {
Basic,
Stop,
EventFd,
@ -50,35 +50,35 @@ pub const Loop = struct.{
pub const EventFd = switch (builtin.os) {
builtin.Os.macosx => MacOsEventFd,
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
base: ResumeNode,
epoll_op: u32,
eventfd: i32,
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
base: ResumeNode,
completion_key: usize,
},
else => @compileError("unsupported OS"),
};
const MacOsEventFd = struct.{
const MacOsEventFd = struct {
base: ResumeNode,
kevent: posix.Kevent,
};
pub const Basic = switch (builtin.os) {
builtin.Os.macosx => MacOsBasic,
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
base: ResumeNode,
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
base: ResumeNode,
},
else => @compileError("unsupported OS"),
};
const MacOsBasic = struct.{
const MacOsBasic = struct {
base: ResumeNode,
kev: posix.Kevent,
};
@ -104,7 +104,7 @@ pub const Loop = struct.{
/// Thread count is the total thread count. The thread pool size will be
/// max(thread_count - 1, 0)
fn initInternal(self: *Loop, allocator: *mem.Allocator, thread_count: usize) !void {
self.* = Loop.{
self.* = Loop{
.pending_event_count = 1,
.allocator = allocator,
.os_data = undefined,
@ -112,7 +112,7 @@ pub const Loop = struct.{
.extra_threads = undefined,
.available_eventfd_resume_nodes = std.atomic.Stack(ResumeNode.EventFd).init(),
.eventfd_resume_nodes = undefined,
.final_resume_node = ResumeNode.{
.final_resume_node = ResumeNode{
.id = ResumeNode.Id.Stop,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@ -141,7 +141,7 @@ pub const Loop = struct.{
os.SpawnThreadError || os.LinuxEpollCtlError || os.BsdKEventError ||
os.WindowsCreateIoCompletionPortError;
const wakeup_bytes = []u8.{0x1} ** 8;
const wakeup_bytes = []u8{0x1} ** 8;
fn initOsData(self: *Loop, extra_thread_count: usize) InitOsDataError!void {
switch (builtin.os) {
@ -150,10 +150,10 @@ pub const Loop = struct.{
self.os_data.fs_queue_item = 0;
// we need another thread for the file system because Linux does not have an async
// file system I/O API.
self.os_data.fs_end_request = fs.RequestNode.{
self.os_data.fs_end_request = fs.RequestNode{
.prev = undefined,
.next = undefined,
.data = fs.Request.{
.data = fs.Request{
.msg = fs.Request.Msg.End,
.finish = fs.Request.Finish.NoAction,
},
@ -163,9 +163,9 @@ pub const Loop = struct.{
while (self.available_eventfd_resume_nodes.pop()) |node| os.close(node.data.eventfd);
}
for (self.eventfd_resume_nodes) |*eventfd_node| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@ -184,9 +184,9 @@ pub const Loop = struct.{
self.os_data.final_eventfd = try os.linuxEventFd(0, posix.EFD_CLOEXEC | posix.EFD_NONBLOCK);
errdefer os.close(self.os_data.final_eventfd);
self.os_data.final_eventfd_event = posix.epoll_event.{
self.os_data.final_eventfd_event = posix.epoll_event{
.events = posix.EPOLLIN,
.data = posix.epoll_data.{ .ptr = @ptrToInt(&self.final_resume_node) },
.data = posix.epoll_data{ .ptr = @ptrToInt(&self.final_resume_node) },
};
try os.linuxEpollCtl(
self.os_data.epollfd,
@ -224,10 +224,10 @@ pub const Loop = struct.{
self.os_data.fs_queue = std.atomic.Queue(fs.Request).init();
// we need another thread for the file system because Darwin does not have an async
// file system I/O API.
self.os_data.fs_end_request = fs.RequestNode.{
self.os_data.fs_end_request = fs.RequestNode{
.prev = undefined,
.next = undefined,
.data = fs.Request.{
.data = fs.Request{
.msg = fs.Request.Msg.End,
.finish = fs.Request.Finish.NoAction,
},
@ -236,15 +236,15 @@ pub const Loop = struct.{
const empty_kevs = ([*]posix.Kevent)(undefined)[0..0];
for (self.eventfd_resume_nodes) |*eventfd_node, i| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
},
// this one is for sending events
.kevent = posix.Kevent.{
.kevent = posix.Kevent{
.ident = i,
.filter = posix.EVFILT_USER,
.flags = posix.EV_CLEAR | posix.EV_ADD | posix.EV_DISABLE,
@ -264,7 +264,7 @@ pub const Loop = struct.{
// Pre-add so that we cannot get error.SystemResources
// later when we try to activate it.
self.os_data.final_kevent = posix.Kevent.{
self.os_data.final_kevent = posix.Kevent{
.ident = extra_thread_count,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_DISABLE,
@ -277,7 +277,7 @@ pub const Loop = struct.{
self.os_data.final_kevent.flags = posix.EV_ENABLE;
self.os_data.final_kevent.fflags = posix.NOTE_TRIGGER;
self.os_data.fs_kevent_wake = posix.Kevent.{
self.os_data.fs_kevent_wake = posix.Kevent{
.ident = 0,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_ENABLE,
@ -286,7 +286,7 @@ pub const Loop = struct.{
.udata = undefined,
};
self.os_data.fs_kevent_wait = posix.Kevent.{
self.os_data.fs_kevent_wait = posix.Kevent{
.ident = 0,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_CLEAR,
@ -323,9 +323,9 @@ pub const Loop = struct.{
errdefer os.close(self.os_data.io_port);
for (self.eventfd_resume_nodes) |*eventfd_node, i| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@ -396,9 +396,9 @@ pub const Loop = struct.{
pub fn linuxModFd(self: *Loop, fd: i32, op: u32, flags: u32, resume_node: *ResumeNode) !void {
assert(flags & posix.EPOLLET == posix.EPOLLET);
var ev = os.linux.epoll_event.{
var ev = os.linux.epoll_event{
.events = flags,
.data = os.linux.epoll_data.{ .ptr = @ptrToInt(resume_node) },
.data = os.linux.epoll_data{ .ptr = @ptrToInt(resume_node) },
};
try os.linuxEpollCtl(self.os_data.epollfd, op, fd, &ev);
}
@ -412,8 +412,8 @@ pub const Loop = struct.{
defer self.linuxRemoveFd(fd);
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var resume_node = ResumeNode.Basic.{
.base = ResumeNode.{
var resume_node = ResumeNode.Basic{
.base = ResumeNode{
.id = ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = ResumeNode.overlapped_init,
@ -428,8 +428,8 @@ pub const Loop = struct.{
suspend {
resume @handle();
}
var resume_node = ResumeNode.Basic.{
.base = ResumeNode.{
var resume_node = ResumeNode.Basic{
.base = ResumeNode{
.id = ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = ResumeNode.overlapped_init,
@ -447,7 +447,7 @@ pub const Loop = struct.{
pub fn bsdAddKev(self: *Loop, resume_node: *ResumeNode.Basic, ident: usize, filter: i16, fflags: u32) !void {
self.beginOneEvent();
errdefer self.finishOneEvent();
var kev = posix.Kevent.{
var kev = posix.Kevent{
.ident = ident,
.filter = filter,
.flags = posix.EV_ADD | posix.EV_ENABLE | posix.EV_CLEAR,
@ -461,7 +461,7 @@ pub const Loop = struct.{
}
pub fn bsdRemoveKev(self: *Loop, ident: usize, filter: i16) void {
var kev = posix.Kevent.{
var kev = posix.Kevent{
.ident = ident,
.filter = filter,
.flags = posix.EV_DELETE,
@ -559,11 +559,11 @@ pub const Loop = struct.{
/// it immediately returns to the caller, and the async function is queued in the event loop. It still
/// returns a promise to be awaited.
pub fn call(self: *Loop, comptime func: var, args: ...) !(promise->@typeOf(func).ReturnType) {
const S = struct.{
const S = struct {
async fn asyncFunc(loop: *Loop, handle: *promise->@typeOf(func).ReturnType, args2: ...) @typeOf(func).ReturnType {
suspend {
handle.* = @handle();
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = @handle(),
@ -585,7 +585,7 @@ pub const Loop = struct.{
/// is performed.
pub async fn yield(self: *Loop) void {
suspend {
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = @handle(),
@ -814,14 +814,14 @@ pub const Loop = struct.{
const OsData = switch (builtin.os) {
builtin.Os.linux => LinuxOsData,
builtin.Os.macosx => MacOsData,
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
io_port: windows.HANDLE,
extra_thread_count: usize,
},
else => struct.{},
else => struct {},
};
const MacOsData = struct.{
const MacOsData = struct {
kqfd: i32,
final_kevent: posix.Kevent,
fs_kevent_wake: posix.Kevent,
@ -832,7 +832,7 @@ pub const Loop = struct.{
fs_end_request: fs.RequestNode,
};
const LinuxOsData = struct.{
const LinuxOsData = struct {
epollfd: i32,
final_eventfd: i32,
final_eventfd_event: os.linux.epoll_event,

View File

@ -7,7 +7,7 @@ const os = std.os;
const posix = os.posix;
const Loop = std.event.Loop;
pub const Server = struct.{
pub const Server = struct {
handleRequestFn: async<*mem.Allocator> fn (*Server, *const std.net.Address, os.File) void,
loop: *Loop,
@ -22,14 +22,14 @@ pub const Server = struct.{
pub fn init(loop: *Loop) Server {
// TODO can't initialize handler coroutine here because we need well defined copy elision
return Server.{
return Server{
.loop = loop,
.sockfd = null,
.accept_coro = null,
.handleRequestFn = undefined,
.waiting_for_emfile_node = undefined,
.listen_address = undefined,
.listen_resume_node = event.Loop.ResumeNode.{
.listen_resume_node = event.Loop.ResumeNode{
.id = event.Loop.ResumeNode.Id.Basic,
.handle = undefined,
.overlapped = event.Loop.ResumeNode.overlapped_init,
@ -118,7 +118,7 @@ pub async fn connectUnixSocket(loop: *Loop, path: []const u8) !i32 {
);
errdefer os.close(sockfd);
var sock_addr = posix.sockaddr_un.{
var sock_addr = posix.sockaddr_un{
.family = posix.AF_UNIX,
.path = undefined,
};
@ -133,7 +133,7 @@ pub async fn connectUnixSocket(loop: *Loop, path: []const u8) !i32 {
return sockfd;
}
pub const ReadError = error.{
pub const ReadError = error{
SystemResources,
Unexpected,
UserResourceLimitReached,
@ -147,7 +147,7 @@ pub const ReadError = error.{
/// returns number of bytes read. 0 means EOF.
pub async fn read(loop: *std.event.Loop, fd: os.FileHandle, buffer: []u8) ReadError!usize {
const iov = posix.iovec.{
const iov = posix.iovec{
.iov_base = buffer.ptr,
.iov_len = buffer.len,
};
@ -155,10 +155,10 @@ pub async fn read(loop: *std.event.Loop, fd: os.FileHandle, buffer: []u8) ReadEr
return await (async readvPosix(loop, fd, iovs, 1) catch unreachable);
}
pub const WriteError = error.{};
pub const WriteError = error{};
pub async fn write(loop: *std.event.Loop, fd: os.FileHandle, buffer: []const u8) WriteError!void {
const iov = posix.iovec_const.{
const iov = posix.iovec_const{
.iov_base = buffer.ptr,
.iov_len = buffer.len,
};
@ -232,7 +232,7 @@ pub async fn writev(loop: *Loop, fd: os.FileHandle, data: []const []const u8) !v
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec_const.{
iovecs[i] = os.posix.iovec_const{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@ -246,7 +246,7 @@ pub async fn readv(loop: *Loop, fd: os.FileHandle, data: []const []u8) !usize {
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec.{
iovecs[i] = os.posix.iovec{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@ -274,7 +274,7 @@ test "listen on a port, send bytes, receive bytes" {
return error.SkipZigTest;
}
const MyServer = struct.{
const MyServer = struct {
tcp_server: Server,
const Self = @This();
@ -305,7 +305,7 @@ test "listen on a port, send bytes, receive bytes" {
var loop: Loop = undefined;
try loop.initSingleThreaded(std.debug.global_allocator);
var server = MyServer.{ .tcp_server = Server.init(&loop) };
var server = MyServer{ .tcp_server = Server.init(&loop) };
defer server.tcp_server.deinit();
try server.tcp_server.listen(&addr, MyServer.handler);
@ -327,7 +327,7 @@ async fn doAsyncTest(loop: *Loop, address: *const std.net.Address, server: *Serv
server.close();
}
pub const OutStream = struct.{
pub const OutStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@ -336,10 +336,10 @@ pub const OutStream = struct.{
pub const Stream = event.io.OutStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle) OutStream {
return OutStream.{
return OutStream{
.fd = fd,
.loop = loop,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@ -349,7 +349,7 @@ pub const OutStream = struct.{
}
};
pub const InStream = struct.{
pub const InStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@ -358,10 +358,10 @@ pub const InStream = struct.{
pub const Stream = event.io.InStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle) InStream {
return InStream.{
return InStream{
.fd = fd,
.loop = loop,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}

View File

@ -12,7 +12,7 @@ const Loop = std.event.Loop;
/// Many readers can hold the lock at the same time; however locking for writing is exclusive.
/// When a read lock is held, it will not be released until the reader queue is empty.
/// When a write lock is held, it will not be released until the writer queue is empty.
pub const RwLock = struct.{
pub const RwLock = struct {
loop: *Loop,
shared_state: u8, // TODO make this an enum
writer_queue: Queue,
@ -21,7 +21,7 @@ pub const RwLock = struct.{
reader_queue_empty_bit: u8, // TODO make this a bool
reader_lock_count: usize,
const State = struct.{
const State = struct {
const Unlocked = 0;
const WriteLock = 1;
const ReadLock = 2;
@ -29,7 +29,7 @@ pub const RwLock = struct.{
const Queue = std.atomic.Queue(promise);
pub const HeldRead = struct.{
pub const HeldRead = struct {
lock: *RwLock,
pub fn release(self: HeldRead) void {
@ -48,7 +48,7 @@ pub const RwLock = struct.{
}
};
pub const HeldWrite = struct.{
pub const HeldWrite = struct {
lock: *RwLock,
pub fn release(self: HeldWrite) void {
@ -77,7 +77,7 @@ pub const RwLock = struct.{
};
pub fn init(loop: *Loop) RwLock {
return RwLock.{
return RwLock{
.loop = loop,
.shared_state = State.Unlocked,
.writer_queue = Queue.init(),
@ -101,7 +101,7 @@ pub const RwLock = struct.{
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.data = @handle(),
.prev = undefined,
.next = undefined,
@ -128,13 +128,13 @@ pub const RwLock = struct.{
}
}
}
return HeldRead.{ .lock = self };
return HeldRead{ .lock = self };
}
pub async fn acquireWrite(self: *RwLock) HeldWrite {
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.data = @handle(),
.prev = undefined,
.next = undefined,
@ -158,7 +158,7 @@ pub const RwLock = struct.{
}
}
}
return HeldWrite.{ .lock = self };
return HeldWrite{ .lock = self };
}
fn commonPostUnlock(self: *RwLock) void {
@ -227,7 +227,7 @@ test "std.event.RwLock" {
defer cancel handle;
loop.run();
const expected_result = [1]i32.{shared_it_count * @intCast(i32, shared_test_data.len)} ** shared_test_data.len;
const expected_result = [1]i32{shared_it_count * @intCast(i32, shared_test_data.len)} ** shared_test_data.len;
assert(mem.eql(i32, shared_test_data, expected_result));
}
@ -258,7 +258,7 @@ async fn testLock(loop: *Loop, lock: *RwLock) void {
}
const shared_it_count = 10;
var shared_test_data = [1]i32.{0} ** 10;
var shared_test_data = [1]i32{0} ** 10;
var shared_test_index: usize = 0;
var shared_count: usize = 0;

View File

@ -6,13 +6,13 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
pub fn RwLocked(comptime T: type) type {
return struct.{
return struct {
lock: RwLock,
locked_data: T,
const Self = @This();
pub const HeldReadLock = struct.{
pub const HeldReadLock = struct {
value: *const T,
held: RwLock.HeldRead,
@ -21,7 +21,7 @@ pub fn RwLocked(comptime T: type) type {
}
};
pub const HeldWriteLock = struct.{
pub const HeldWriteLock = struct {
value: *T,
held: RwLock.HeldWrite,
@ -31,7 +31,7 @@ pub fn RwLocked(comptime T: type) type {
};
pub fn init(loop: *Loop, data: T) Self {
return Self.{
return Self{
.lock = RwLock.init(loop),
.locked_data = data,
};
@ -42,14 +42,14 @@ pub fn RwLocked(comptime T: type) type {
}
pub async fn acquireRead(self: *Self) HeldReadLock {
return HeldReadLock.{
return HeldReadLock{
.held = await (async self.lock.acquireRead() catch unreachable),
.value = &self.locked_data,
};
}
pub async fn acquireWrite(self: *Self) HeldWriteLock {
return HeldWriteLock.{
return HeldWriteLock{
.held = await (async self.lock.acquireWrite() catch unreachable),
.value = &self.locked_data,
};

View File

@ -1,4 +1,4 @@
pub const enum3 = []u64.{
pub const enum3 = []u64{
0x4e2e2785c3a2a20b,
0x240a28877a09a4e1,
0x728fca36c06cf106,
@ -433,19 +433,19 @@ pub const enum3 = []u64.{
0x6d4b9445072f4374,
};
const Slab = struct.{
const Slab = struct {
str: []const u8,
exp: i32,
};
fn slab(str: []const u8, exp: i32) Slab {
return Slab.{
return Slab{
.str = str,
.exp = exp,
};
}
pub const enum3_data = []Slab.{
pub const enum3_data = []Slab{
slab("40648030339495312", 69),
slab("4498645355592131", -134),
slab("678321594594593", 244),

View File

@ -7,12 +7,12 @@ const math = std.math;
const mem = std.mem;
const assert = std.debug.assert;
pub const FloatDecimal = struct.{
pub const FloatDecimal = struct {
digits: []u8,
exp: i32,
};
pub const RoundMode = enum.{
pub const RoundMode = enum {
// Round only the fractional portion (e.g. 1234.23 has precision 2)
Decimal,
// Round the entire whole/fractional portion (e.g. 1.23423e3 has precision 5)
@ -86,7 +86,7 @@ pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
const data = enum3_data[i];
const digits = buffer[1 .. data.str.len + 1];
mem.copy(u8, digits, data.str);
return FloatDecimal.{
return FloatDecimal{
.digits = digits,
.exp = data.exp,
};
@ -135,11 +135,11 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
}
// compute boundaries
var high = HP.{
var high = HP{
.val = mid.val,
.off = mid.off + (fpnext(val) - val) * lten * ten / 2.0,
};
var low = HP.{
var low = HP{
.val = mid.val,
.off = mid.off + (fpprev(val) - val) * lten * ten / 2.0,
};
@ -191,7 +191,7 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
buffer[buf_index] = mdig + '0';
buf_index += 1;
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[1..buf_index],
.exp = exp,
};
@ -229,7 +229,7 @@ fn hpProd(in: HP, val: f64) HP {
const p = in.val * val;
const e = ((hi * hi2 - p) + lo * hi2 + hi * lo2) + lo * lo2;
return HP.{
return HP{
.val = p,
.off = in.off * val + e,
};
@ -342,7 +342,7 @@ fn errolInt(val: f64, buffer: []u8) FloatDecimal {
buf_index += 1;
}
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[0..buf_index],
.exp = @intCast(i32, buf_index) + mi,
};
@ -401,7 +401,7 @@ fn errolFixed(val: f64, buffer: []u8) FloatDecimal {
buffer[j] = 0;
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[0..j],
.exp = exp,
};
@ -415,7 +415,7 @@ fn fpprev(val: f64) f64 {
return @bitCast(f64, @bitCast(u64, val) -% 1);
}
pub const c_digits_lut = []u8.{
pub const c_digits_lut = []u8{
'0', '0', '0', '1', '0', '2', '0', '3', '0', '4', '0', '5', '0', '6',
'0', '7', '0', '8', '0', '9', '1', '0', '1', '1', '1', '2', '1', '3',
'1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9', '2', '0',

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@ const max_int_digits = 65;
/// If `output` returns an error, the error is returned from `format` and
/// `output` is not called again.
pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void, comptime fmt: []const u8, args: ...) Errors!void {
const State = enum.{
const State = enum {
Start,
OpenBrace,
CloseBrace,
@ -111,7 +111,7 @@ pub fn formatType(
output: fn (@typeOf(context), []const u8) Errors!void,
) Errors!void {
const T = @typeOf(value);
if (T == error) {
if (T == anyerror) {
try output(context, "error.");
return output(context, @errorName(value));
}
@ -286,7 +286,7 @@ pub fn formatIntValue(
switch (fmt[0]) {
'c' => {
if (@typeOf(value) == u8) {
if (fmt.len > 1) @compileError("Unknown format character: " ++ []u8.{fmt[1]});
if (fmt.len > 1) @compileError("Unknown format character: " ++ []u8{fmt[1]});
return formatAsciiChar(value, context, Errors, output);
}
},
@ -310,7 +310,7 @@ pub fn formatIntValue(
uppercase = true;
width = 0;
},
else => @compileError("Unknown format character: " ++ []u8.{fmt[0]}),
else => @compileError("Unknown format character: " ++ []u8{fmt[0]}),
}
if (fmt.len > 1) width = comptime (parseUnsigned(usize, fmt[1..], 10) catch unreachable);
}
@ -334,7 +334,7 @@ fn formatFloatValue(
switch (float_fmt) {
'e' => try formatFloatScientific(value, width, context, Errors, output),
'.' => try formatFloatDecimal(value, width, context, Errors, output),
else => @compileError("Unknown format character: " ++ []u8.{float_fmt}),
else => @compileError("Unknown format character: " ++ []u8{float_fmt}),
}
}
@ -355,7 +355,7 @@ pub fn formatText(
try formatInt(c, 16, fmt[0] == 'X', 2, context, Errors, output);
}
return;
} else @compileError("Unknown format character: " ++ []u8.{fmt[0]});
} else @compileError("Unknown format character: " ++ []u8{fmt[0]});
}
return output(context, bytes);
}
@ -661,8 +661,8 @@ pub fn formatBytes(
}
const buf = switch (radix) {
1000 => []u8.{ suffix, 'B' },
1024 => []u8.{ suffix, 'i', 'B' },
1000 => []u8{ suffix, 'B' },
1024 => []u8{ suffix, 'i', 'B' },
else => unreachable,
};
return output(context, buf);
@ -757,18 +757,18 @@ fn formatIntUnsigned(
}
pub fn formatIntBuf(out_buf: []u8, value: var, base: u8, uppercase: bool, width: usize) usize {
var context = FormatIntBuf.{
var context = FormatIntBuf{
.out_buf = out_buf,
.index = 0,
};
formatInt(value, base, uppercase, width, &context, error.{}, formatIntCallback) catch unreachable;
formatInt(value, base, uppercase, width, &context, error{}, formatIntCallback) catch unreachable;
return context.index;
}
const FormatIntBuf = struct.{
const FormatIntBuf = struct {
out_buf: []u8,
index: usize,
};
fn formatIntCallback(context: *FormatIntBuf, bytes: []const u8) (error.{}!void) {
fn formatIntCallback(context: *FormatIntBuf, bytes: []const u8) (error{}!void) {
mem.copy(u8, context.out_buf[context.index..], bytes);
context.index += bytes.len;
}
@ -795,7 +795,7 @@ test "fmt.parseInt" {
assert(if (parseInt(u8, "256", 10)) |_| false else |err| err == error.Overflow);
}
const ParseUnsignedError = error.{
const ParseUnsignedError = error{
/// The result cannot fit in the type specified
Overflow,
@ -815,7 +815,7 @@ pub fn parseUnsigned(comptime T: type, buf: []const u8, radix: u8) ParseUnsigned
return x;
}
pub fn charToDigit(c: u8, radix: u8) (error.{InvalidCharacter}!u8) {
pub fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
const value = switch (c) {
'0'...'9' => c - '0',
'A'...'Z' => c - 'A' + 10,
@ -836,7 +836,7 @@ fn digitToChar(digit: u8, uppercase: bool) u8 {
};
}
const BufPrintContext = struct.{
const BufPrintContext = struct {
remaining: []u8,
};
@ -847,23 +847,23 @@ fn bufPrintWrite(context: *BufPrintContext, bytes: []const u8) !void {
}
pub fn bufPrint(buf: []u8, comptime fmt: []const u8, args: ...) ![]u8 {
var context = BufPrintContext.{ .remaining = buf };
try format(&context, error.{BufferTooSmall}, bufPrintWrite, fmt, args);
var context = BufPrintContext{ .remaining = buf };
try format(&context, error{BufferTooSmall}, bufPrintWrite, fmt, args);
return buf[0 .. buf.len - context.remaining.len];
}
pub const AllocPrintError = error.{OutOfMemory};
pub const AllocPrintError = error{OutOfMemory};
pub fn allocPrint(allocator: *mem.Allocator, comptime fmt: []const u8, args: ...) AllocPrintError![]u8 {
var size: usize = 0;
format(&size, error.{}, countSize, fmt, args) catch |err| switch (err) {};
format(&size, error{}, countSize, fmt, args) catch |err| switch (err) {};
const buf = try allocator.alloc(u8, size);
return bufPrint(buf, fmt, args) catch |err| switch (err) {
error.BufferTooSmall => unreachable, // we just counted the size above
};
}
fn countSize(size: *usize, bytes: []const u8) (error.{}!void) {
fn countSize(size: *usize, bytes: []const u8) (error{}!void) {
size.* += bytes.len;
}
@ -913,11 +913,11 @@ test "fmt.format" {
try testFmt("optional: null\n", "optional: {}\n", value);
}
{
const value: error!i32 = 1234;
const value: anyerror!i32 = 1234;
try testFmt("error union: 1234\n", "error union: {}\n", value);
}
{
const value: error!i32 = error.InvalidChar;
const value: anyerror!i32 = error.InvalidChar;
try testFmt("error union: error.InvalidChar\n", "error union: {}\n", value);
}
{
@ -960,23 +960,23 @@ test "fmt.format" {
try testFmt("file size: 63MiB\n", "file size: {Bi}\n", usize(63 * 1024 * 1024));
try testFmt("file size: 66.06MB\n", "file size: {B2}\n", usize(63 * 1024 * 1024));
{
const Struct = struct.{
const Struct = struct {
field: u8,
};
const value = Struct.{ .field = 42 };
const value = Struct{ .field = 42 };
try testFmt("struct: Struct{ .field = 42 }\n", "struct: {}\n", value);
try testFmt("struct: Struct{ .field = 42 }\n", "struct: {}\n", &value);
}
{
const Struct = struct.{
const Struct = struct {
a: u0,
b: u1,
};
const value = Struct.{ .a = 0, .b = 1 };
const value = Struct{ .a = 0, .b = 1 };
try testFmt("struct: Struct{ .a = 0, .b = 1 }\n", "struct: {}\n", value);
}
{
const Enum = enum.{
const Enum = enum {
One,
Two,
};
@ -1194,7 +1194,7 @@ test "fmt.format" {
}
//custom type format
{
const Vec2 = struct.{
const Vec2 = struct {
const SelfType = @This();
x: f32,
y: f32,
@ -1221,7 +1221,7 @@ test "fmt.format" {
};
var buf1: [32]u8 = undefined;
var value = Vec2.{
var value = Vec2{
.x = 10.2,
.y = 2.22,
};
@ -1234,12 +1234,12 @@ test "fmt.format" {
}
//struct format
{
const S = struct.{
const S = struct {
a: u32,
b: error,
b: anyerror,
};
const inst = S.{
const inst = S{
.a = 456,
.b = error.Unused,
};
@ -1248,24 +1248,24 @@ test "fmt.format" {
}
//union format
{
const TU = union(enum).{
const TU = union(enum) {
float: f32,
int: u32,
};
const UU = union.{
const UU = union {
float: f32,
int: u32,
};
const EU = extern union.{
const EU = extern union {
float: f32,
int: u32,
};
const tu_inst = TU.{ .int = 123 };
const uu_inst = UU.{ .int = 456 };
const eu_inst = EU.{ .float = 321.123 };
const tu_inst = TU{ .int = 123 };
const uu_inst = UU{ .int = 456 };
const eu_inst = EU{ .float = 321.123 };
try testFmt("TU{ .int = 123 }", "{}", tu_inst);
@ -1278,7 +1278,7 @@ test "fmt.format" {
}
//enum format
{
const E = enum.{
const E = enum {
One,
Two,
Three,

View File

@ -6,14 +6,14 @@
const std = @import("../index.zig");
const debug = std.debug;
pub const Adler32 = struct.{
pub const Adler32 = struct {
const base = 65521;
const nmax = 5552;
adler: u32,
pub fn init() Adler32 {
return Adler32.{ .adler = 1 };
return Adler32{ .adler = 1 };
}
// This fast variant is taken from zlib. It reduces the required modulos and unrolls longer
@ -94,14 +94,14 @@ test "adler32 sanity" {
}
test "adler32 long" {
const long1 = []u8.{1} ** 1024;
const long1 = []u8{1} ** 1024;
debug.assert(Adler32.hash(long1[0..]) == 0x06780401);
const long2 = []u8.{1} ** 1025;
const long2 = []u8{1} ** 1025;
debug.assert(Adler32.hash(long2[0..]) == 0x0a7a0402);
}
test "adler32 very long" {
const long = []u8.{1} ** 5553;
const long = []u8{1} ** 5553;
debug.assert(Adler32.hash(long[0..]) == 0x707f15b2);
}

View File

@ -8,7 +8,7 @@
const std = @import("../index.zig");
const debug = std.debug;
pub const Polynomial = struct.{
pub const Polynomial = struct {
const IEEE = 0xedb88320;
const Castagnoli = 0x82f63b78;
const Koopman = 0xeb31d82e;
@ -19,7 +19,7 @@ pub const Crc32 = Crc32WithPoly(Polynomial.IEEE);
// slicing-by-8 crc32 implementation.
pub fn Crc32WithPoly(comptime poly: u32) type {
return struct.{
return struct {
const Self = @This();
const lookup_tables = comptime block: {
@setEvalBranchQuota(20000);
@ -55,7 +55,7 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
crc: u32,
pub fn init() Self {
return Self.{ .crc = 0xffffffff };
return Self{ .crc = 0xffffffff };
}
pub fn update(self: *Self, input: []const u8) void {
@ -116,7 +116,7 @@ test "crc32 castagnoli" {
// half-byte lookup table implementation.
pub fn Crc32SmallWithPoly(comptime poly: u32) type {
return struct.{
return struct {
const Self = @This();
const lookup_table = comptime block: {
var table: [16]u32 = undefined;
@ -140,7 +140,7 @@ pub fn Crc32SmallWithPoly(comptime poly: u32) type {
crc: u32,
pub fn init() Self {
return Self.{ .crc = 0xffffffff };
return Self{ .crc = 0xffffffff };
}
pub fn update(self: *Self, input: []const u8) void {

View File

@ -12,13 +12,13 @@ pub const Fnv1a_64 = Fnv1a(u64, 0x100000001b3, 0xcbf29ce484222325);
pub const Fnv1a_128 = Fnv1a(u128, 0x1000000000000000000013b, 0x6c62272e07bb014262b821756295c58d);
fn Fnv1a(comptime T: type, comptime prime: T, comptime offset: T) type {
return struct.{
return struct {
const Self = @This();
value: T,
pub fn init() Self {
return Self.{ .value = offset };
return Self{ .value = offset };
}
pub fn update(self: *Self, input: []const u8) void {

View File

@ -24,7 +24,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
debug.assert(T == u64 or T == u128);
debug.assert(c_rounds > 0 and d_rounds > 0);
return struct.{
return struct {
const Self = @This();
const digest_size = 64;
const block_size = 64;
@ -45,7 +45,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
const k0 = mem.readInt(key[0..8], u64, Endian.Little);
const k1 = mem.readInt(key[8..16], u64, Endian.Little);
var d = Self.{
var d = Self{
.v0 = k0 ^ 0x736f6d6570736575,
.v1 = k1 ^ 0x646f72616e646f6d,
.v2 = k0 ^ 0x6c7967656e657261,
@ -162,7 +162,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
const test_key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f";
test "siphash64-2-4 sanity" {
const vectors = [][]const u8.{
const vectors = [][]const u8{
"\x31\x0e\x0e\xdd\x47\xdb\x6f\x72", // ""
"\xfd\x67\xdc\x93\xc5\x39\xf8\x74", // "\x00"
"\x5a\x4f\xa9\xd9\x09\x80\x6c\x0d", // "\x00\x01" ... etc
@ -241,7 +241,7 @@ test "siphash64-2-4 sanity" {
}
test "siphash128-2-4 sanity" {
const vectors = [][]const u8.{
const vectors = [][]const u8{
"\xa3\x81\x7f\x04\xba\x25\xa8\xe6\x6d\xf6\x72\x14\xc7\x55\x02\x93",
"\xda\x87\xc1\xd8\x6b\x99\xaf\x44\x34\x76\x59\x11\x9b\x22\xfc\x45",
"\x81\x77\x22\x8d\xa4\xa4\x5d\xc7\xfc\xa3\x8b\xde\xf6\x0a\xff\xe4",

View File

@ -14,7 +14,7 @@ pub fn AutoHashMap(comptime K: type, comptime V: type) type {
}
pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u32, comptime eql: fn (a: K, b: K) bool) type {
return struct.{
return struct {
entries: []Entry,
size: usize,
max_distance_from_start_index: usize,
@ -24,23 +24,23 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
const Self = @This();
pub const KV = struct.{
pub const KV = struct {
key: K,
value: V,
};
const Entry = struct.{
const Entry = struct {
used: bool,
distance_from_start_index: usize,
kv: KV,
};
pub const GetOrPutResult = struct.{
pub const GetOrPutResult = struct {
kv: *KV,
found_existing: bool,
};
pub const Iterator = struct.{
pub const Iterator = struct {
hm: *const Self,
// how many items have we returned
count: usize,
@ -75,8 +75,8 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
};
pub fn init(allocator: *Allocator) Self {
return Self.{
.entries = []Entry.{},
return Self{
.entries = []Entry{},
.allocator = allocator,
.size = 0,
.max_distance_from_start_index = 0,
@ -111,7 +111,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
// TODO this implementation can be improved - we should only
// have to hash once and find the entry once.
if (self.get(key)) |kv| {
return GetOrPutResult.{
return GetOrPutResult{
.kv = kv,
.found_existing = true,
};
@ -120,7 +120,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
try self.ensureCapacity();
const put_result = self.internalPut(key);
assert(put_result.old_kv == null);
return GetOrPutResult.{
return GetOrPutResult{
.kv = &put_result.new_entry.kv,
.found_existing = false,
};
@ -199,7 +199,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
}
pub fn iterator(hm: *const Self) Iterator {
return Iterator.{
return Iterator{
.hm = hm,
.count = 0,
.index = 0,
@ -232,7 +232,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
}
}
const InternalPutResult = struct.{
const InternalPutResult = struct {
new_entry: *Entry,
old_kv: ?KV,
};
@ -246,7 +246,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
var roll_over: usize = 0;
var distance_from_start_index: usize = 0;
var got_result_entry = false;
var result = InternalPutResult.{
var result = InternalPutResult{
.new_entry = undefined,
.old_kv = null,
};
@ -266,10 +266,10 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
got_result_entry = true;
result.new_entry = entry;
}
entry.* = Entry.{
entry.* = Entry{
.used = true,
.distance_from_start_index = distance_from_start_index,
.kv = KV.{
.kv = KV{
.key = key,
.value = value,
},
@ -293,10 +293,10 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
if (!got_result_entry) {
result.new_entry = entry;
}
entry.* = Entry.{
entry.* = Entry{
.used = true,
.distance_from_start_index = distance_from_start_index,
.kv = KV.{
.kv = KV{
.key = key,
.value = value,
},
@ -372,12 +372,12 @@ test "iterator hash map" {
assert((try reset_map.put(2, 22)) == null);
assert((try reset_map.put(3, 33)) == null);
var keys = []i32.{
var keys = []i32{
3,
2,
1,
};
var values = []i32.{
var values = []i32{
33,
22,
11,
@ -409,7 +409,7 @@ test "iterator hash map" {
}
pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) {
return struct.{
return struct {
fn hash(key: K) u32 {
return getAutoHashFn(usize)(@ptrToInt(key));
}
@ -417,7 +417,7 @@ pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) {
}
pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) {
return struct.{
return struct {
fn eql(a: K, b: K) bool {
return a == b;
}
@ -425,7 +425,7 @@ pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) {
}
pub fn getAutoHashFn(comptime K: type) (fn (K) u32) {
return struct.{
return struct {
fn hash(key: K) u32 {
comptime var rng = comptime std.rand.DefaultPrng.init(0);
return autoHash(key, &rng.random, u32);
@ -434,7 +434,7 @@ pub fn getAutoHashFn(comptime K: type) (fn (K) u32) {
}
pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) {
return struct.{
return struct {
fn eql(a: K, b: K) bool {
return autoEql(a, b);
}

View File

@ -11,7 +11,7 @@ const maxInt = std.math.maxInt;
const Allocator = mem.Allocator;
pub const c_allocator = &c_allocator_state;
var c_allocator_state = Allocator.{
var c_allocator_state = Allocator{
.allocFn = cAlloc,
.reallocFn = cRealloc,
.freeFn = cFree,
@ -40,15 +40,15 @@ fn cFree(self: *Allocator, old_mem: []u8) void {
/// This allocator makes a syscall directly for every allocation and free.
/// Thread-safe and lock-free.
pub const DirectAllocator = struct.{
pub const DirectAllocator = struct {
allocator: Allocator,
heap_handle: ?HeapHandle,
const HeapHandle = if (builtin.os == Os.windows) os.windows.HANDLE else void;
pub fn init() DirectAllocator {
return DirectAllocator.{
.allocator = Allocator.{
return DirectAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@ -182,7 +182,7 @@ pub const DirectAllocator = struct.{
/// This allocator takes an existing allocator, wraps it, and provides an interface
/// where you can allocate without freeing, and then free it all together.
pub const ArenaAllocator = struct.{
pub const ArenaAllocator = struct {
pub allocator: Allocator,
child_allocator: *Allocator,
@ -192,8 +192,8 @@ pub const ArenaAllocator = struct.{
const BufNode = std.LinkedList([]u8).Node;
pub fn init(child_allocator: *Allocator) ArenaAllocator {
return ArenaAllocator.{
.allocator = Allocator.{
return ArenaAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@ -225,7 +225,7 @@ pub const ArenaAllocator = struct.{
const buf = try self.child_allocator.alignedAlloc(u8, @alignOf(BufNode), len);
const buf_node_slice = @bytesToSlice(BufNode, buf[0..@sizeOf(BufNode)]);
const buf_node = &buf_node_slice[0];
buf_node.* = BufNode.{
buf_node.* = BufNode{
.data = buf,
.prev = null,
.next = null,
@ -269,14 +269,14 @@ pub const ArenaAllocator = struct.{
fn free(allocator: *Allocator, bytes: []u8) void {}
};
pub const FixedBufferAllocator = struct.{
pub const FixedBufferAllocator = struct {
allocator: Allocator,
end_index: usize,
buffer: []u8,
pub fn init(buffer: []u8) FixedBufferAllocator {
return FixedBufferAllocator.{
.allocator = Allocator.{
return FixedBufferAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@ -325,14 +325,14 @@ pub const FixedBufferAllocator = struct.{
};
/// lock free
pub const ThreadSafeFixedBufferAllocator = struct.{
pub const ThreadSafeFixedBufferAllocator = struct {
allocator: Allocator,
end_index: usize,
buffer: []u8,
pub fn init(buffer: []u8) ThreadSafeFixedBufferAllocator {
return ThreadSafeFixedBufferAllocator.{
.allocator = Allocator.{
return ThreadSafeFixedBufferAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@ -372,11 +372,11 @@ pub const ThreadSafeFixedBufferAllocator = struct.{
};
pub fn stackFallback(comptime size: usize, fallback_allocator: *Allocator) StackFallbackAllocator(size) {
return StackFallbackAllocator(size).{
return StackFallbackAllocator(size){
.buffer = undefined,
.fallback_allocator = fallback_allocator,
.fixed_buffer_allocator = undefined,
.allocator = Allocator.{
.allocator = Allocator{
.allocFn = StackFallbackAllocator(size).alloc,
.reallocFn = StackFallbackAllocator(size).realloc,
.freeFn = StackFallbackAllocator(size).free,
@ -385,7 +385,7 @@ pub fn stackFallback(comptime size: usize, fallback_allocator: *Allocator) Stack
}
pub fn StackFallbackAllocator(comptime size: usize) type {
return struct.{
return struct {
const Self = @This();
buffer: [size]u8,

View File

@ -33,7 +33,7 @@ pub fn getStdIn() GetStdIoErrs!File {
}
pub fn InStream(comptime ReadError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = ReadError;
@ -193,7 +193,7 @@ pub fn InStream(comptime ReadError: type) type {
}
pub fn OutStream(comptime WriteError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = WriteError;
@ -271,7 +271,7 @@ pub fn BufferedInStream(comptime Error: type) type {
}
pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type) type {
return struct.{
return struct {
const Self = @This();
const Stream = InStream(Error);
@ -284,7 +284,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
end_index: usize,
pub fn init(unbuffered_in_stream: *Stream) Self {
return Self.{
return Self{
.unbuffered_in_stream = unbuffered_in_stream,
.buffer = undefined,
@ -295,7 +295,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
.start_index = buffer_size,
.end_index = buffer_size,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@ -341,7 +341,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
/// Creates a stream which supports 'un-reading' data, so that it can be read again.
/// This makes look-ahead style parsing much easier.
pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = InStreamError;
pub const Stream = InStream(Error);
@ -356,12 +356,12 @@ pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) typ
at_end: bool,
pub fn init(base: *Stream) Self {
return Self.{
return Self{
.base = base,
.buffer = undefined,
.index = 0,
.at_end = false,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@ -404,9 +404,9 @@ pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) typ
};
}
pub const SliceInStream = struct.{
pub const SliceInStream = struct {
const Self = @This();
pub const Error = error.{};
pub const Error = error{};
pub const Stream = InStream(Error);
pub stream: Stream,
@ -415,10 +415,10 @@ pub const SliceInStream = struct.{
slice: []const u8,
pub fn init(slice: []const u8) Self {
return Self.{
return Self{
.slice = slice,
.pos = 0,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@ -436,8 +436,8 @@ pub const SliceInStream = struct.{
/// This is a simple OutStream that writes to a slice, and returns an error
/// when it runs out of space.
pub const SliceOutStream = struct.{
pub const Error = error.{OutOfSpace};
pub const SliceOutStream = struct {
pub const Error = error{OutOfSpace};
pub const Stream = OutStream(Error);
pub stream: Stream,
@ -446,10 +446,10 @@ pub const SliceOutStream = struct.{
slice: []u8,
pub fn init(slice: []u8) SliceOutStream {
return SliceOutStream.{
return SliceOutStream{
.slice = slice,
.pos = 0,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@ -485,7 +485,7 @@ pub fn BufferedOutStream(comptime Error: type) type {
}
pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Stream = OutStream(Error);
pub const Error = OutStreamError;
@ -498,11 +498,11 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
index: usize,
pub fn init(unbuffered_out_stream: *Stream) Self {
return Self.{
return Self{
.unbuffered_out_stream = unbuffered_out_stream,
.buffer = undefined,
.index = 0,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@ -536,17 +536,17 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
}
/// Implementation of OutStream trait for Buffer
pub const BufferOutStream = struct.{
pub const BufferOutStream = struct {
buffer: *Buffer,
stream: Stream,
pub const Error = error.{OutOfMemory};
pub const Error = error{OutOfMemory};
pub const Stream = OutStream(Error);
pub fn init(buffer: *Buffer) BufferOutStream {
return BufferOutStream.{
return BufferOutStream{
.buffer = buffer,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@ -556,7 +556,7 @@ pub const BufferOutStream = struct.{
}
};
pub const BufferedAtomicFile = struct.{
pub const BufferedAtomicFile = struct {
atomic_file: os.AtomicFile,
file_stream: os.File.OutStream,
buffered_stream: BufferedOutStream(os.File.WriteError),
@ -564,7 +564,7 @@ pub const BufferedAtomicFile = struct.{
pub fn create(allocator: *mem.Allocator, dest_path: []const u8) !*BufferedAtomicFile {
// TODO with well defined copy elision we don't need this allocation
var self = try allocator.create(BufferedAtomicFile.{
var self = try allocator.create(BufferedAtomicFile{
.atomic_file = undefined,
.file_stream = undefined,
.buffered_stream = undefined,
@ -624,3 +624,5 @@ pub fn readLine(buf: []u8) !usize {
}
}
}

View File

@ -63,7 +63,7 @@ test "BufferOutStream" {
}
test "SliceInStream" {
const bytes = []const u8.{ 1, 2, 3, 4, 5, 6, 7 };
const bytes = []const u8{ 1, 2, 3, 4, 5, 6, 7 };
var ss = io.SliceInStream.init(bytes);
var dest: [4]u8 = undefined;
@ -81,7 +81,7 @@ test "SliceInStream" {
}
test "PeekStream" {
const bytes = []const u8.{ 1, 2, 3, 4, 5, 6, 7, 8 };
const bytes = []const u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
var ss = io.SliceInStream.init(bytes);
var ps = io.PeekStream(2, io.SliceInStream.Error).init(&ss.stream);

View File

@ -10,7 +10,7 @@ const maxInt = std.math.maxInt;
// A single token slice into the parent string.
//
// Use `token.slice()` on the input at the current position to get the current slice.
pub const Token = struct.{
pub const Token = struct {
id: Id,
// How many bytes do we skip before counting
offset: u1,
@ -21,7 +21,7 @@ pub const Token = struct.{
// How many bytes from the current position behind the start of this token is.
count: usize,
pub const Id = enum.{
pub const Id = enum {
ObjectBegin,
ObjectEnd,
ArrayBegin,
@ -34,7 +34,7 @@ pub const Token = struct.{
};
pub fn init(id: Id, count: usize, offset: u1) Token {
return Token.{
return Token{
.id = id,
.offset = offset,
.string_has_escape = false,
@ -44,7 +44,7 @@ pub const Token = struct.{
}
pub fn initString(count: usize, has_unicode_escape: bool) Token {
return Token.{
return Token{
.id = Id.String,
.offset = 0,
.string_has_escape = has_unicode_escape,
@ -54,7 +54,7 @@ pub const Token = struct.{
}
pub fn initNumber(count: usize, number_is_integer: bool) Token {
return Token.{
return Token{
.id = Id.Number,
.offset = 0,
.string_has_escape = false,
@ -65,7 +65,7 @@ pub const Token = struct.{
// A marker token is a zero-length
pub fn initMarker(id: Id) Token {
return Token.{
return Token{
.id = id,
.offset = 0,
.string_has_escape = false,
@ -87,7 +87,7 @@ pub const Token = struct.{
// Conforms strictly to RFC8529.
//
// For a non-byte based wrapper, consider using TokenStream instead.
pub const StreamingParser = struct.{
pub const StreamingParser = struct {
// Current state
state: State,
// How many bytes we have counted for the current token
@ -129,7 +129,7 @@ pub const StreamingParser = struct.{
p.number_is_integer = true;
}
pub const State = enum.{
pub const State = enum {
// These must be first with these explicit values as we rely on them for indexing the
// bit-stack directly and avoiding a branch.
ObjectSeparator = 0,
@ -182,7 +182,7 @@ pub const StreamingParser = struct.{
}
};
pub const Error = error.{
pub const Error = error{
InvalidTopLevel,
TooManyNestedItems,
TooManyClosingItems,
@ -858,14 +858,14 @@ pub const StreamingParser = struct.{
};
// A small wrapper over a StreamingParser for full slices. Returns a stream of json Tokens.
pub const TokenStream = struct.{
pub const TokenStream = struct {
i: usize,
slice: []const u8,
parser: StreamingParser,
token: ?Token,
pub fn init(slice: []const u8) TokenStream {
return TokenStream.{
return TokenStream{
.i = 0,
.slice = slice,
.parser = StreamingParser.init(),
@ -989,7 +989,7 @@ const ArenaAllocator = std.heap.ArenaAllocator;
const ArrayList = std.ArrayList;
const HashMap = std.HashMap;
pub const ValueTree = struct.{
pub const ValueTree = struct {
arena: ArenaAllocator,
root: Value,
@ -1000,7 +1000,7 @@ pub const ValueTree = struct.{
pub const ObjectMap = HashMap([]const u8, Value, mem.hash_slice_u8, mem.eql_slice_u8);
pub const Value = union(enum).{
pub const Value = union(enum) {
Null,
Bool: bool,
Integer: i64,
@ -1127,14 +1127,14 @@ pub const Value = union(enum).{
};
// A non-stream JSON parser which constructs a tree of Value's.
pub const Parser = struct.{
pub const Parser = struct {
allocator: *Allocator,
state: State,
copy_strings: bool,
// Stores parent nodes and un-combined Values.
stack: ArrayList(Value),
const State = enum.{
const State = enum {
ObjectKey,
ObjectValue,
ArrayValue,
@ -1142,7 +1142,7 @@ pub const Parser = struct.{
};
pub fn init(allocator: *Allocator, copy_strings: bool) Parser {
return Parser.{
return Parser{
.allocator = allocator,
.state = State.Simple,
.copy_strings = copy_strings,
@ -1171,7 +1171,7 @@ pub const Parser = struct.{
debug.assert(p.stack.len == 1);
return ValueTree.{
return ValueTree{
.arena = arena,
.root = p.stack.at(0),
};
@ -1204,11 +1204,11 @@ pub const Parser = struct.{
switch (token.id) {
Token.Id.ObjectBegin => {
try p.stack.append(Value.{ .Object = ObjectMap.init(allocator) });
try p.stack.append(Value{ .Object = ObjectMap.init(allocator) });
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value.{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1222,12 +1222,12 @@ pub const Parser = struct.{
p.state = State.ObjectKey;
},
Token.Id.True => {
_ = try object.put(key, Value.{ .Bool = true });
_ = try object.put(key, Value{ .Bool = true });
_ = p.stack.pop();
p.state = State.ObjectKey;
},
Token.Id.False => {
_ = try object.put(key, Value.{ .Bool = false });
_ = try object.put(key, Value{ .Bool = false });
_ = p.stack.pop();
p.state = State.ObjectKey;
},
@ -1254,11 +1254,11 @@ pub const Parser = struct.{
try p.pushToParent(value);
},
Token.Id.ObjectBegin => {
try p.stack.append(Value.{ .Object = ObjectMap.init(allocator) });
try p.stack.append(Value{ .Object = ObjectMap.init(allocator) });
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value.{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1268,10 +1268,10 @@ pub const Parser = struct.{
try array.append(try p.parseNumber(token, input, i));
},
Token.Id.True => {
try array.append(Value.{ .Bool = true });
try array.append(Value{ .Bool = true });
},
Token.Id.False => {
try array.append(Value.{ .Bool = false });
try array.append(Value{ .Bool = false });
},
Token.Id.Null => {
try array.append(Value.Null);
@ -1283,11 +1283,11 @@ pub const Parser = struct.{
},
State.Simple => switch (token.id) {
Token.Id.ObjectBegin => {
try p.stack.append(Value.{ .Object = ObjectMap.init(allocator) });
try p.stack.append(Value{ .Object = ObjectMap.init(allocator) });
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value.{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1297,10 +1297,10 @@ pub const Parser = struct.{
try p.stack.append(try p.parseNumber(token, input, i));
},
Token.Id.True => {
try p.stack.append(Value.{ .Bool = true });
try p.stack.append(Value{ .Bool = true });
},
Token.Id.False => {
try p.stack.append(Value.{ .Bool = false });
try p.stack.append(Value{ .Bool = false });
},
Token.Id.Null => {
try p.stack.append(Value.Null);
@ -1337,12 +1337,12 @@ pub const Parser = struct.{
// TODO: We don't strictly have to copy values which do not contain any escape
// characters if flagged with the option.
const slice = token.slice(input, i);
return Value.{ .String = try mem.dupe(p.allocator, u8, slice) };
return Value{ .String = try mem.dupe(p.allocator, u8, slice) };
}
fn parseNumber(p: *Parser, token: Token, input: []const u8, i: usize) !Value {
return if (token.number_is_integer)
Value.{ .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) }
Value{ .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) }
else
@panic("TODO: fmt.parseFloat not yet implemented");
}

View File

@ -7,14 +7,14 @@ const AtomicOrder = builtin.AtomicOrder;
/// Thread-safe initialization of global data.
/// TODO use a mutex instead of a spinlock
pub fn lazyInit(comptime T: type) LazyInit(T) {
return LazyInit(T).{
return LazyInit(T){
.data = undefined,
.state = 0,
};
}
fn LazyInit(comptime T: type) type {
return struct.{
return struct {
state: u8, // TODO make this an enum
data: Data,

View File

@ -6,17 +6,17 @@ const Allocator = mem.Allocator;
/// Generic doubly linked list.
pub fn LinkedList(comptime T: type) type {
return struct.{
return struct {
const Self = @This();
/// Node inside the linked list wrapping the actual data.
pub const Node = struct.{
pub const Node = struct {
prev: ?*Node,
next: ?*Node,
data: T,
pub fn init(data: T) Node {
return Node.{
return Node{
.prev = null,
.next = null,
.data = data,
@ -33,7 +33,7 @@ pub fn LinkedList(comptime T: type) type {
/// Returns:
/// An empty linked list.
pub fn init() Self {
return Self.{
return Self{
.first = null,
.last = null,
.len = 0,

View File

@ -1,4 +1,4 @@
pub const mach_header = extern struct.{
pub const mach_header = extern struct {
magic: u32,
cputype: cpu_type_t,
cpusubtype: cpu_subtype_t,
@ -8,7 +8,7 @@ pub const mach_header = extern struct.{
flags: u32,
};
pub const mach_header_64 = extern struct.{
pub const mach_header_64 = extern struct {
magic: u32,
cputype: cpu_type_t,
cpusubtype: cpu_subtype_t,
@ -19,7 +19,7 @@ pub const mach_header_64 = extern struct.{
reserved: u32,
};
pub const load_command = extern struct.{
pub const load_command = extern struct {
cmd: u32,
cmdsize: u32,
};
@ -27,7 +27,7 @@ pub const load_command = extern struct.{
/// The symtab_command contains the offsets and sizes of the link-edit 4.3BSD
/// "stab" style symbol table information as described in the header files
/// <nlist.h> and <stab.h>.
pub const symtab_command = extern struct.{
pub const symtab_command = extern struct {
/// LC_SYMTAB
cmd: u32,
@ -49,7 +49,7 @@ pub const symtab_command = extern struct.{
/// The linkedit_data_command contains the offsets and sizes of a blob
/// of data in the __LINKEDIT segment.
const linkedit_data_command = extern struct.{
const linkedit_data_command = extern struct {
/// LC_CODE_SIGNATURE, LC_SEGMENT_SPLIT_INFO, LC_FUNCTION_STARTS, LC_DATA_IN_CODE, LC_DYLIB_CODE_SIGN_DRS or LC_LINKER_OPTIMIZATION_HINT.
cmd: u32,
@ -73,7 +73,7 @@ const linkedit_data_command = extern struct.{
/// by the maxprot and initprot fields. If the segment has sections then the
/// section structures directly follow the segment command and their size is
/// reflected in cmdsize.
pub const segment_command = extern struct.{
pub const segment_command = extern struct {
/// LC_SEGMENT
cmd: u32,
@ -110,7 +110,7 @@ pub const segment_command = extern struct.{
/// mapped into a 64-bit task's address space. If the 64-bit segment has
/// sections then section_64 structures directly follow the 64-bit segment
/// command and their size is reflected in cmdsize.
pub const segment_command_64 = extern struct.{
pub const segment_command_64 = extern struct {
/// LC_SEGMENT_64
cmd: u32,
@ -168,7 +168,7 @@ pub const segment_command_64 = extern struct.{
/// The format of the relocation entries referenced by the reloff and nreloc
/// fields of the section structure for mach object files is described in the
/// header file <reloc.h>.
pub const @"section" = extern struct.{
pub const @"section" = extern struct {
/// name of this section
sectname: [16]u8,
@ -203,7 +203,7 @@ pub const @"section" = extern struct.{
reserved2: u32,
};
pub const section_64 = extern struct.{
pub const section_64 = extern struct {
/// name of this section
sectname: [16]u8,
@ -241,7 +241,7 @@ pub const section_64 = extern struct.{
reserved3: u32,
};
pub const nlist = extern struct.{
pub const nlist = extern struct {
n_strx: u32,
n_type: u8,
n_sect: u8,
@ -249,7 +249,7 @@ pub const nlist = extern struct.{
n_value: u32,
};
pub const nlist_64 = extern struct.{
pub const nlist_64 = extern struct {
n_strx: u32,
n_type: u8,
n_sect: u8,

View File

@ -17,21 +17,21 @@ pub fn atan(x: var) @typeOf(x) {
}
fn atan32(x_: f32) f32 {
const atanhi = []const f32.{
const atanhi = []const f32{
4.6364760399e-01, // atan(0.5)hi
7.8539812565e-01, // atan(1.0)hi
9.8279368877e-01, // atan(1.5)hi
1.5707962513e+00, // atan(inf)hi
};
const atanlo = []const f32.{
const atanlo = []const f32{
5.0121582440e-09, // atan(0.5)lo
3.7748947079e-08, // atan(1.0)lo
3.4473217170e-08, // atan(1.5)lo
7.5497894159e-08, // atan(inf)lo
};
const aT = []const f32.{
const aT = []const f32{
3.3333328366e-01,
-1.9999158382e-01,
1.4253635705e-01,
@ -108,21 +108,21 @@ fn atan32(x_: f32) f32 {
}
fn atan64(x_: f64) f64 {
const atanhi = []const f64.{
const atanhi = []const f64{
4.63647609000806093515e-01, // atan(0.5)hi
7.85398163397448278999e-01, // atan(1.0)hi
9.82793723247329054082e-01, // atan(1.5)hi
1.57079632679489655800e+00, // atan(inf)hi
};
const atanlo = []const f64.{
const atanlo = []const f64{
2.26987774529616870924e-17, // atan(0.5)lo
3.06161699786838301793e-17, // atan(1.0)lo
1.39033110312309984516e-17, // atan(1.5)lo
6.12323399573676603587e-17, // atan(inf)lo
};
const aT = []const f64.{
const aT = []const f64{
3.33333333333329318027e-01,
-1.99999999998764832476e-01,
1.42857142725034663711e-01,

View File

@ -20,7 +20,7 @@ comptime {
debug.assert(Limb.is_signed == false);
}
pub const Int = struct.{
pub const Int = struct {
allocator: *Allocator,
positive: bool,
// - little-endian ordered
@ -42,7 +42,7 @@ pub const Int = struct.{
}
pub fn initCapacity(allocator: *Allocator, capacity: usize) !Int {
return Int.{
return Int{
.allocator = allocator,
.positive = true,
.limbs = block: {
@ -68,7 +68,7 @@ pub const Int = struct.{
}
pub fn clone(other: Int) !Int {
return Int.{
return Int{
.allocator = other.allocator,
.positive = other.positive,
.limbs = block: {
@ -234,7 +234,7 @@ pub const Int = struct.{
}
}
pub const ConvertError = error.{
pub const ConvertError = error{
NegativeIntoUnsigned,
TargetTooSmall,
};
@ -532,7 +532,7 @@ pub const Int = struct.{
if (a.positive != b.positive) {
if (a.positive) {
// (a) + (-b) => a - b
const bp = Int.{
const bp = Int{
.allocator = undefined,
.positive = true,
.limbs = b.limbs,
@ -541,7 +541,7 @@ pub const Int = struct.{
try r.sub(a, bp);
} else {
// (-a) + (b) => b - a
const ap = Int.{
const ap = Int{
.allocator = undefined,
.positive = true,
.limbs = a.limbs,
@ -593,7 +593,7 @@ pub const Int = struct.{
if (a.positive != b.positive) {
if (a.positive) {
// (a) - (-b) => a + b
const bp = Int.{
const bp = Int{
.allocator = undefined,
.positive = true,
.limbs = b.limbs,
@ -602,7 +602,7 @@ pub const Int = struct.{
try r.add(a, bp);
} else {
// (-a) - (b) => -(a + b)
const ap = Int.{
const ap = Int{
.allocator = undefined,
.positive = true,
.limbs = a.limbs,

View File

@ -24,35 +24,35 @@ pub const tanh = @import("tanh.zig").tanh;
pub const tan = @import("tan.zig").tan;
pub fn Complex(comptime T: type) type {
return struct.{
return struct {
const Self = @This();
re: T,
im: T,
pub fn new(re: T, im: T) Self {
return Self.{
return Self{
.re = re,
.im = im,
};
}
pub fn add(self: Self, other: Self) Self {
return Self.{
return Self{
.re = self.re + other.re,
.im = self.im + other.im,
};
}
pub fn sub(self: Self, other: Self) Self {
return Self.{
return Self{
.re = self.re - other.re,
.im = self.im - other.im,
};
}
pub fn mul(self: Self, other: Self) Self {
return Self.{
return Self{
.re = self.re * other.re - self.im * other.im,
.im = self.im * other.re + self.re * other.im,
};
@ -63,14 +63,14 @@ pub fn Complex(comptime T: type) type {
const im_num = self.im * other.re - self.re * other.im;
const den = other.re * other.re + other.im * other.im;
return Self.{
return Self{
.re = re_num / den,
.im = im_num / den,
};
}
pub fn conjugate(self: Self) Self {
return Self.{
return Self{
.re = self.re,
.im = -self.im,
};
@ -78,7 +78,7 @@ pub fn Complex(comptime T: type) type {
pub fn reciprocal(self: Self) Self {
const m = self.re * self.re + self.im * self.im;
return Self.{
return Self{
.re = self.re / m,
.im = -self.im / m,
};

View File

@ -18,7 +18,7 @@ pub fn exp(x: var) @typeOf(x) {
}
fn exp32(x_: f32) f32 {
const half = []f32.{ 0.5, -0.5 };
const half = []f32{ 0.5, -0.5 };
const ln2hi = 6.9314575195e-1;
const ln2lo = 1.4286067653e-6;
const invln2 = 1.4426950216e+0;
@ -93,7 +93,7 @@ fn exp32(x_: f32) f32 {
}
fn exp64(x_: f64) f64 {
const half = []const f64.{ 0.5, -0.5 };
const half = []const f64{ 0.5, -0.5 };
const ln2hi: f64 = 6.93147180369123816490e-01;
const ln2lo: f64 = 1.90821492927058770002e-10;
const invln2: f64 = 1.44269504088896338700e+00;

View File

@ -16,7 +16,7 @@ pub fn exp2(x: var) @typeOf(x) {
};
}
const exp2ft = []const f64.{
const exp2ft = []const f64{
0x1.6a09e667f3bcdp-1,
0x1.7a11473eb0187p-1,
0x1.8ace5422aa0dbp-1,
@ -90,7 +90,7 @@ fn exp2_32(x: f32) f32 {
return @floatCast(f32, r * uk);
}
const exp2dt = []f64.{
const exp2dt = []f64{
// exp2(z + eps) eps
0x1.6a09e667f3d5dp-1, 0x1.9880p-44,
0x1.6b052fa751744p-1, 0x1.8000p-50,

View File

@ -71,7 +71,7 @@ fn fma64(x: f64, y: f64, z: f64) f64 {
}
}
const dd = struct.{
const dd = struct {
hi: f64,
lo: f64,
};

View File

@ -9,7 +9,7 @@ const math = std.math;
const assert = std.debug.assert;
fn frexp_result(comptime T: type) type {
return struct.{
return struct {
significand: T,
exponent: i32,
};

View File

@ -244,17 +244,17 @@ test "math.max" {
assert(max(i32(-1), i32(2)) == 2);
}
pub fn mul(comptime T: type, a: T, b: T) (error.{Overflow}!T) {
pub fn mul(comptime T: type, a: T, b: T) (error{Overflow}!T) {
var answer: T = undefined;
return if (@mulWithOverflow(T, a, b, &answer)) error.Overflow else answer;
}
pub fn add(comptime T: type, a: T, b: T) (error.{Overflow}!T) {
pub fn add(comptime T: type, a: T, b: T) (error{Overflow}!T) {
var answer: T = undefined;
return if (@addWithOverflow(T, a, b, &answer)) error.Overflow else answer;
}
pub fn sub(comptime T: type, a: T, b: T) (error.{Overflow}!T) {
pub fn sub(comptime T: type, a: T, b: T) (error{Overflow}!T) {
var answer: T = undefined;
return if (@subWithOverflow(T, a, b, &answer)) error.Overflow else answer;
}
@ -559,7 +559,7 @@ test "math.negateCast" {
/// Cast an integer to a different integer type. If the value doesn't fit,
/// return an error.
pub fn cast(comptime T: type, x: var) (error.{Overflow}!T) {
pub fn cast(comptime T: type, x: var) (error{Overflow}!T) {
comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer
comptime assert(@typeId(@typeOf(x)) == builtin.TypeId.Int); // must pass an integer
if (maxInt(@typeOf(x)) > maxInt(T) and x > maxInt(T)) {
@ -581,7 +581,7 @@ test "math.cast" {
assert(@typeOf(try cast(u8, u32(255))) == u8);
}
pub const AlignCastError = error.{UnalignedMemory};
pub const AlignCastError = error{UnalignedMemory};
/// Align cast a pointer but return an error if it's the wrong alignment
pub fn alignCast(comptime alignment: u29, ptr: var) AlignCastError!@typeOf(@alignCast(alignment, ptr)) {

View File

@ -9,7 +9,7 @@ const assert = std.debug.assert;
const maxInt = std.math.maxInt;
fn modf_result(comptime T: type) type {
return struct.{
return struct {
fpart: T,
ipart: T,
};

View File

@ -15,7 +15,7 @@ const assert = std.debug.assert;
const assertError = std.debug.assertError;
// This implementation is based on that from the rust stlib
pub fn powi(comptime T: type, x: T, y: T) (error.{
pub fn powi(comptime T: type, x: T, y: T) (error{
Overflow,
Underflow,
}!T) {

Some files were not shown because too many files have changed in this diff Show More