mirror of
https://github.com/ziglang/zig.git
synced 2024-11-27 07:32:44 +00:00
Merge pull request #7005 from jshholland/deprecate-span
Remove ArrayList.span
This commit is contained in:
commit
6d5b76a75d
@ -59,13 +59,7 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
|
||||
self.allocator.free(self.allocatedSlice());
|
||||
}
|
||||
|
||||
/// Deprecated: use `items` field directly.
|
||||
/// Return contents as a slice. Only valid while the list
|
||||
/// doesn't change size.
|
||||
pub fn span(self: anytype) @TypeOf(self.items) {
|
||||
return self.items;
|
||||
}
|
||||
|
||||
pub const span = @compileError("deprecated: use `items` field directly");
|
||||
pub const toSlice = @compileError("deprecated: use `items` field directly");
|
||||
pub const toSliceConst = @compileError("deprecated: use `items` field directly");
|
||||
pub const at = @compileError("deprecated: use `list.items[i]`");
|
||||
@ -1061,7 +1055,7 @@ test "std.ArrayList(u8) implements outStream" {
|
||||
const y: i32 = 1234;
|
||||
try buffer.outStream().print("x: {}\ny: {}\n", .{ x, y });
|
||||
|
||||
testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", buffer.span());
|
||||
testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", buffer.items);
|
||||
}
|
||||
|
||||
test "std.ArrayList/ArrayListUnmanaged.shrink still sets length on error.OutOfMemory" {
|
||||
|
@ -147,7 +147,7 @@ pub fn ArrayListSentineled(comptime T: type, comptime sentinel: T) type {
|
||||
|
||||
pub fn replaceContents(self: *Self, m: []const T) !void {
|
||||
try self.resize(m.len);
|
||||
mem.copy(T, self.list.span(), m);
|
||||
mem.copy(T, self.list.items, m);
|
||||
}
|
||||
|
||||
/// Initializes an OutStream which will append to the list.
|
||||
|
@ -386,7 +386,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
for (wanted_steps.span()) |s| {
|
||||
for (wanted_steps.items) |s| {
|
||||
try self.makeOneStep(s);
|
||||
}
|
||||
}
|
||||
@ -403,7 +403,7 @@ pub const Builder = struct {
|
||||
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
|
||||
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
|
||||
|
||||
for (self.installed_files.span()) |installed_file| {
|
||||
for (self.installed_files.items) |installed_file| {
|
||||
const full_path = self.getInstallPath(installed_file.dir, installed_file.path);
|
||||
if (self.verbose) {
|
||||
warn("rm {}\n", .{full_path});
|
||||
@ -421,7 +421,7 @@ pub const Builder = struct {
|
||||
}
|
||||
s.loop_flag = true;
|
||||
|
||||
for (s.dependencies.span()) |dep| {
|
||||
for (s.dependencies.items) |dep| {
|
||||
self.makeOneStep(dep) catch |err| {
|
||||
if (err == error.DependencyLoopDetected) {
|
||||
warn(" {}\n", .{s.name});
|
||||
@ -436,7 +436,7 @@ pub const Builder = struct {
|
||||
}
|
||||
|
||||
fn getTopLevelStepByName(self: *Builder, name: []const u8) !*Step {
|
||||
for (self.top_level_steps.span()) |top_level_step| {
|
||||
for (self.top_level_steps.items) |top_level_step| {
|
||||
if (mem.eql(u8, top_level_step.step.name, name)) {
|
||||
return &top_level_step.step;
|
||||
}
|
||||
@ -550,7 +550,7 @@ pub const Builder = struct {
|
||||
.Scalar => |s| {
|
||||
return self.allocator.dupe([]const u8, &[_][]const u8{s}) catch unreachable;
|
||||
},
|
||||
.List => |lst| return lst.span(),
|
||||
.List => |lst| return lst.items,
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -951,7 +951,7 @@ pub const Builder = struct {
|
||||
pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
|
||||
// TODO report error for ambiguous situations
|
||||
const exe_extension = @as(CrossTarget, .{}).exeFileExt();
|
||||
for (self.search_prefixes.span()) |search_prefix| {
|
||||
for (self.search_prefixes.items) |search_prefix| {
|
||||
for (names) |name| {
|
||||
if (fs.path.isAbsolute(name)) {
|
||||
return name;
|
||||
@ -1096,7 +1096,7 @@ pub const Builder = struct {
|
||||
.desc = tok_it.rest(),
|
||||
});
|
||||
}
|
||||
return list.span();
|
||||
return list.items;
|
||||
}
|
||||
|
||||
fn getPkgConfigList(self: *Builder) ![]const PkgConfigPkg {
|
||||
@ -1504,7 +1504,7 @@ pub const LibExeObjStep = struct {
|
||||
if (isLibCLibrary(name)) {
|
||||
return self.is_linking_libc;
|
||||
}
|
||||
for (self.link_objects.span()) |link_object| {
|
||||
for (self.link_objects.items) |link_object| {
|
||||
switch (link_object) {
|
||||
LinkObject.SystemLib => |n| if (mem.eql(u8, n, name)) return true,
|
||||
else => continue,
|
||||
@ -1903,7 +1903,7 @@ pub const LibExeObjStep = struct {
|
||||
self.include_dirs.append(IncludeDir{ .OtherStep = other }) catch unreachable;
|
||||
|
||||
// Inherit dependency on system libraries
|
||||
for (other.link_objects.span()) |link_object| {
|
||||
for (other.link_objects.items) |link_object| {
|
||||
switch (link_object) {
|
||||
.SystemLib => |name| self.linkSystemLibrary(name),
|
||||
else => continue,
|
||||
@ -1965,7 +1965,7 @@ pub const LibExeObjStep = struct {
|
||||
if (self.root_src) |root_src| try zig_args.append(root_src.getPath(builder));
|
||||
|
||||
var prev_has_extra_flags = false;
|
||||
for (self.link_objects.span()) |link_object| {
|
||||
for (self.link_objects.items) |link_object| {
|
||||
switch (link_object) {
|
||||
.StaticPath => |static_path| {
|
||||
try zig_args.append(builder.pathFromRoot(static_path));
|
||||
@ -2035,7 +2035,7 @@ pub const LibExeObjStep = struct {
|
||||
&[_][]const u8{ builder.cache_root, builder.fmt("{}_build_options.zig", .{self.name}) },
|
||||
);
|
||||
const path_from_root = builder.pathFromRoot(build_options_file);
|
||||
try fs.cwd().writeFile(path_from_root, self.build_options_contents.span());
|
||||
try fs.cwd().writeFile(path_from_root, self.build_options_contents.items);
|
||||
try zig_args.append("--pkg-begin");
|
||||
try zig_args.append("build_options");
|
||||
try zig_args.append(path_from_root);
|
||||
@ -2233,11 +2233,11 @@ pub const LibExeObjStep = struct {
|
||||
},
|
||||
}
|
||||
|
||||
for (self.packages.span()) |pkg| {
|
||||
for (self.packages.items) |pkg| {
|
||||
try self.makePackageCmd(pkg, &zig_args);
|
||||
}
|
||||
|
||||
for (self.include_dirs.span()) |include_dir| {
|
||||
for (self.include_dirs.items) |include_dir| {
|
||||
switch (include_dir) {
|
||||
.RawPath => |include_path| {
|
||||
try zig_args.append("-I");
|
||||
@ -2255,18 +2255,18 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
for (self.lib_paths.span()) |lib_path| {
|
||||
for (self.lib_paths.items) |lib_path| {
|
||||
try zig_args.append("-L");
|
||||
try zig_args.append(lib_path);
|
||||
}
|
||||
|
||||
for (self.c_macros.span()) |c_macro| {
|
||||
for (self.c_macros.items) |c_macro| {
|
||||
try zig_args.append("-D");
|
||||
try zig_args.append(c_macro);
|
||||
}
|
||||
|
||||
if (self.target.isDarwin()) {
|
||||
for (self.framework_dirs.span()) |dir| {
|
||||
for (self.framework_dirs.items) |dir| {
|
||||
try zig_args.append("-F");
|
||||
try zig_args.append(dir);
|
||||
}
|
||||
@ -2322,11 +2322,11 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
|
||||
if (self.kind == Kind.Test) {
|
||||
try builder.spawnChild(zig_args.span());
|
||||
try builder.spawnChild(zig_args.items);
|
||||
} else {
|
||||
try zig_args.append("--enable-cache");
|
||||
|
||||
const output_dir_nl = try builder.execFromStep(zig_args.span(), &self.step);
|
||||
const output_dir_nl = try builder.execFromStep(zig_args.items, &self.step);
|
||||
const build_output_dir = mem.trimRight(u8, output_dir_nl, "\r\n");
|
||||
|
||||
if (self.output_dir) |output_dir| {
|
||||
|
@ -79,7 +79,7 @@ const BinaryElfOutput = struct {
|
||||
newSegment.binaryOffset = 0;
|
||||
newSegment.firstSection = null;
|
||||
|
||||
for (self.sections.span()) |section| {
|
||||
for (self.sections.items) |section| {
|
||||
if (sectionWithinSegment(section, phdr)) {
|
||||
if (section.segment) |sectionSegment| {
|
||||
if (sectionSegment.elfOffset > newSegment.elfOffset) {
|
||||
@ -99,7 +99,7 @@ const BinaryElfOutput = struct {
|
||||
}
|
||||
}
|
||||
|
||||
sort.sort(*BinaryElfSegment, self.segments.span(), {}, segmentSortCompare);
|
||||
sort.sort(*BinaryElfSegment, self.segments.items, {}, segmentSortCompare);
|
||||
|
||||
if (self.segments.items.len > 0) {
|
||||
const firstSegment = self.segments.items[0];
|
||||
@ -112,19 +112,19 @@ const BinaryElfOutput = struct {
|
||||
|
||||
const basePhysicalAddress = firstSegment.physicalAddress;
|
||||
|
||||
for (self.segments.span()) |segment| {
|
||||
for (self.segments.items) |segment| {
|
||||
segment.binaryOffset = segment.physicalAddress - basePhysicalAddress;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (self.sections.span()) |section| {
|
||||
for (self.sections.items) |section| {
|
||||
if (section.segment) |segment| {
|
||||
section.binaryOffset = segment.binaryOffset + (section.elfOffset - segment.elfOffset);
|
||||
}
|
||||
}
|
||||
|
||||
sort.sort(*BinaryElfSection, self.sections.span(), {}, sectionSortCompare);
|
||||
sort.sort(*BinaryElfSection, self.sections.items, {}, sectionSortCompare);
|
||||
|
||||
return self;
|
||||
}
|
||||
@ -172,7 +172,7 @@ fn emitRaw(allocator: *Allocator, elf_path: []const u8, raw_path: []const u8) !v
|
||||
var binary_elf_output = try BinaryElfOutput.parse(allocator, elf_file);
|
||||
defer binary_elf_output.deinit();
|
||||
|
||||
for (binary_elf_output.sections.span()) |section| {
|
||||
for (binary_elf_output.sections.items) |section| {
|
||||
try writeBinaryElfSection(elf_file, out_file, section);
|
||||
}
|
||||
}
|
||||
|
@ -159,7 +159,7 @@ pub const RunStep = struct {
|
||||
const cwd = if (self.cwd) |cwd| self.builder.pathFromRoot(cwd) else self.builder.build_root;
|
||||
|
||||
var argv_list = ArrayList([]const u8).init(self.builder.allocator);
|
||||
for (self.argv.span()) |arg| {
|
||||
for (self.argv.items) |arg| {
|
||||
switch (arg) {
|
||||
Arg.Bytes => |bytes| try argv_list.append(bytes),
|
||||
Arg.WriteFile => |file| {
|
||||
@ -176,7 +176,7 @@ pub const RunStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
const argv = argv_list.span();
|
||||
const argv = argv_list.items;
|
||||
|
||||
const child = std.ChildProcess.init(argv, self.builder.allocator) catch unreachable;
|
||||
defer child.deinit();
|
||||
@ -312,7 +312,7 @@ pub const RunStep = struct {
|
||||
}
|
||||
|
||||
fn addPathForDynLibs(self: *RunStep, artifact: *LibExeObjStep) void {
|
||||
for (artifact.link_objects.span()) |link_object| {
|
||||
for (artifact.link_objects.items) |link_object| {
|
||||
switch (link_object) {
|
||||
.OtherStep => |other| {
|
||||
if (other.target.isWindows() and other.isDynamicLibrary()) {
|
||||
|
@ -86,7 +86,7 @@ pub const TranslateCStep = struct {
|
||||
|
||||
try argv_list.append(self.source.getPath(self.builder));
|
||||
|
||||
const output_path_nl = try self.builder.execFromStep(argv_list.span(), &self.step);
|
||||
const output_path_nl = try self.builder.execFromStep(argv_list.items, &self.step);
|
||||
const output_path = mem.trimRight(u8, output_path_nl, "\r\n");
|
||||
|
||||
self.out_basename = fs.path.basename(output_path);
|
||||
|
@ -64,7 +64,7 @@ pub const WriteFileStep = struct {
|
||||
// new random bytes when WriteFileStep implementation is modified
|
||||
// in a non-backwards-compatible way.
|
||||
hash.update("eagVR1dYXoE7ARDP");
|
||||
for (self.files.span()) |file| {
|
||||
for (self.files.items) |file| {
|
||||
hash.update(file.basename);
|
||||
hash.update(file.bytes);
|
||||
hash.update("|");
|
||||
@ -85,7 +85,7 @@ pub const WriteFileStep = struct {
|
||||
};
|
||||
var dir = try fs.cwd().openDir(self.output_dir, .{});
|
||||
defer dir.close();
|
||||
for (self.files.span()) |file| {
|
||||
for (self.files.items) |file| {
|
||||
dir.writeFile(file.basename, file.bytes) catch |err| {
|
||||
warn("unable to write {} into {}: {}\n", .{
|
||||
file.basename,
|
||||
|
@ -216,7 +216,7 @@ pub const Coff = struct {
|
||||
blk: while (i < debug_dir_entry_count) : (i += 1) {
|
||||
const debug_dir_entry = try in.readStruct(DebugDirectoryEntry);
|
||||
if (debug_dir_entry.type == IMAGE_DEBUG_TYPE_CODEVIEW) {
|
||||
for (self.sections.span()) |*section| {
|
||||
for (self.sections.items) |*section| {
|
||||
const section_start = section.header.virtual_address;
|
||||
const section_size = section.header.misc.virtual_size;
|
||||
const rva = debug_dir_entry.address_of_raw_data;
|
||||
@ -282,7 +282,7 @@ pub const Coff = struct {
|
||||
}
|
||||
|
||||
pub fn getSection(self: *Coff, comptime name: []const u8) ?*Section {
|
||||
for (self.sections.span()) |*sec| {
|
||||
for (self.sections.items) |*sec| {
|
||||
if (mem.eql(u8, sec.header.name[0..name.len], name)) {
|
||||
return sec;
|
||||
}
|
||||
|
@ -1507,7 +1507,7 @@ pub const ModuleDebugInfo = switch (builtin.os.tag) {
|
||||
const mod_index = for (self.sect_contribs) |sect_contrib| {
|
||||
if (sect_contrib.Section > self.coff.sections.items.len) continue;
|
||||
// Remember that SectionContribEntry.Section is 1-based.
|
||||
coff_section = &self.coff.sections.span()[sect_contrib.Section - 1];
|
||||
coff_section = &self.coff.sections.items[sect_contrib.Section - 1];
|
||||
|
||||
const vaddr_start = coff_section.header.virtual_address + sect_contrib.Offset;
|
||||
const vaddr_end = vaddr_start + sect_contrib.Size;
|
||||
|
@ -87,7 +87,7 @@ const Die = struct {
|
||||
};
|
||||
|
||||
fn getAttr(self: *const Die, id: u64) ?*const FormValue {
|
||||
for (self.attrs.span()) |*attr| {
|
||||
for (self.attrs.items) |*attr| {
|
||||
if (attr.id == id) return &attr.value;
|
||||
}
|
||||
return null;
|
||||
@ -371,7 +371,7 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: anytype, form_id: u64, e
|
||||
}
|
||||
|
||||
fn getAbbrevTableEntry(abbrev_table: *const AbbrevTable, abbrev_code: u64) ?*const AbbrevTableEntry {
|
||||
for (abbrev_table.span()) |*table_entry| {
|
||||
for (abbrev_table.items) |*table_entry| {
|
||||
if (table_entry.abbrev_code == abbrev_code) return table_entry;
|
||||
}
|
||||
return null;
|
||||
@ -395,7 +395,7 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
|
||||
pub fn getSymbolName(di: *DwarfInfo, address: u64) ?[]const u8 {
|
||||
for (di.func_list.span()) |*func| {
|
||||
for (di.func_list.items) |*func| {
|
||||
if (func.pc_range) |range| {
|
||||
if (address >= range.start and address < range.end) {
|
||||
return func.name;
|
||||
@ -584,7 +584,7 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
|
||||
pub fn findCompileUnit(di: *DwarfInfo, target_address: u64) !*const CompileUnit {
|
||||
for (di.compile_unit_list.span()) |*compile_unit| {
|
||||
for (di.compile_unit_list.items) |*compile_unit| {
|
||||
if (compile_unit.pc_range) |range| {
|
||||
if (target_address >= range.start and target_address < range.end) return compile_unit;
|
||||
}
|
||||
@ -632,7 +632,7 @@ pub const DwarfInfo = struct {
|
||||
/// Gets an already existing AbbrevTable given the abbrev_offset, or if not found,
|
||||
/// seeks in the stream and parses it.
|
||||
fn getAbbrevTable(di: *DwarfInfo, abbrev_offset: u64) !*const AbbrevTable {
|
||||
for (di.abbrev_table_list.span()) |*header| {
|
||||
for (di.abbrev_table_list.items) |*header| {
|
||||
if (header.offset == abbrev_offset) {
|
||||
return &header.table;
|
||||
}
|
||||
@ -686,7 +686,7 @@ pub const DwarfInfo = struct {
|
||||
.attrs = ArrayList(Die.Attr).init(di.allocator()),
|
||||
};
|
||||
try result.attrs.resize(table_entry.attrs.items.len);
|
||||
for (table_entry.attrs.span()) |attr, i| {
|
||||
for (table_entry.attrs.items) |attr, i| {
|
||||
result.attrs.items[i] = Die.Attr{
|
||||
.id = attr.attr_id,
|
||||
.value = try parseFormValue(di.allocator(), in_stream, attr.form_id, di.endian, is_64),
|
||||
@ -753,7 +753,7 @@ pub const DwarfInfo = struct {
|
||||
}
|
||||
|
||||
var file_entries = ArrayList(FileEntry).init(di.allocator());
|
||||
var prog = LineNumberProgram.init(default_is_stmt, include_directories.span(), &file_entries, target_address);
|
||||
var prog = LineNumberProgram.init(default_is_stmt, include_directories.items, &file_entries, target_address);
|
||||
|
||||
while (true) {
|
||||
const file_name = try in.readUntilDelimiterAlloc(di.allocator(), 0, math.maxInt(usize));
|
||||
|
@ -2138,7 +2138,7 @@ pub const Walker = struct {
|
||||
while (true) {
|
||||
if (self.stack.items.len == 0) return null;
|
||||
// `top` becomes invalid after appending to `self.stack`.
|
||||
const top = &self.stack.span()[self.stack.items.len - 1];
|
||||
const top = &self.stack.items[self.stack.items.len - 1];
|
||||
const dirname_len = top.dirname_len;
|
||||
if (try top.dir_it.next()) |base| {
|
||||
self.name_buffer.shrink(dirname_len);
|
||||
@ -2159,8 +2159,8 @@ pub const Walker = struct {
|
||||
}
|
||||
return Entry{
|
||||
.dir = top.dir_it.dir,
|
||||
.basename = self.name_buffer.span()[dirname_len + 1 ..],
|
||||
.path = self.name_buffer.span(),
|
||||
.basename = self.name_buffer.items[dirname_len + 1 ..],
|
||||
.path = self.name_buffer.items,
|
||||
.kind = base.kind,
|
||||
};
|
||||
} else {
|
||||
|
@ -62,7 +62,7 @@ pub fn Reader(
|
||||
var start_index: usize = original_len;
|
||||
while (true) {
|
||||
array_list.expandToCapacity();
|
||||
const dest_slice = array_list.span()[start_index..];
|
||||
const dest_slice = array_list.items[start_index..];
|
||||
const bytes_read = try self.readAll(dest_slice);
|
||||
start_index += bytes_read;
|
||||
|
||||
|
@ -1270,7 +1270,7 @@ pub const Value = union(enum) {
|
||||
.Integer => |inner| try stringify(inner, options, out_stream),
|
||||
.Float => |inner| try stringify(inner, options, out_stream),
|
||||
.String => |inner| try stringify(inner, options, out_stream),
|
||||
.Array => |inner| try stringify(inner.span(), options, out_stream),
|
||||
.Array => |inner| try stringify(inner.items, options, out_stream),
|
||||
.Object => |inner| {
|
||||
try out_stream.writeByte('{');
|
||||
var field_output = false;
|
||||
@ -2057,7 +2057,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn pushToParent(p: *Parser, value: *const Value) !void {
|
||||
switch (p.stack.span()[p.stack.items.len - 1]) {
|
||||
switch (p.stack.items[p.stack.items.len - 1]) {
|
||||
// Object Parent -> [ ..., object, <key>, value ]
|
||||
Value.String => |key| {
|
||||
_ = p.stack.pop();
|
||||
|
@ -796,7 +796,7 @@ pub fn getAddressList(allocator: *mem.Allocator, name: []const u8, port: u16) !*
|
||||
result.canon_name = canon.toOwnedSlice();
|
||||
}
|
||||
|
||||
for (lookup_addrs.span()) |lookup_addr, i| {
|
||||
for (lookup_addrs.items) |lookup_addr, i| {
|
||||
result.addrs[i] = lookup_addr.addr;
|
||||
assert(result.addrs[i].getPort() == port);
|
||||
}
|
||||
@ -849,7 +849,7 @@ fn linuxLookupName(
|
||||
// No further processing is needed if there are fewer than 2
|
||||
// results or if there are only IPv4 results.
|
||||
if (addrs.items.len == 1 or family == os.AF_INET) return;
|
||||
const all_ip4 = for (addrs.span()) |addr| {
|
||||
const all_ip4 = for (addrs.items) |addr| {
|
||||
if (addr.addr.any.family != os.AF_INET) break false;
|
||||
} else true;
|
||||
if (all_ip4) return;
|
||||
@ -861,7 +861,7 @@ fn linuxLookupName(
|
||||
// So far the label/precedence table cannot be customized.
|
||||
// This implementation is ported from musl libc.
|
||||
// A more idiomatic "ziggy" implementation would be welcome.
|
||||
for (addrs.span()) |*addr, i| {
|
||||
for (addrs.items) |*addr, i| {
|
||||
var key: i32 = 0;
|
||||
var sa6: os.sockaddr_in6 = undefined;
|
||||
@memset(@ptrCast([*]u8, &sa6), 0, @sizeOf(os.sockaddr_in6));
|
||||
@ -926,7 +926,7 @@ fn linuxLookupName(
|
||||
key |= (MAXADDRS - @intCast(i32, i)) << DAS_ORDER_SHIFT;
|
||||
addr.sortkey = key;
|
||||
}
|
||||
std.sort.sort(LookupAddr, addrs.span(), {}, addrCmpLessThan);
|
||||
std.sort.sort(LookupAddr, addrs.items, {}, addrCmpLessThan);
|
||||
}
|
||||
|
||||
const Policy = struct {
|
||||
@ -1361,9 +1361,9 @@ fn resMSendRc(
|
||||
defer ns_list.deinit();
|
||||
|
||||
try ns_list.resize(rc.ns.items.len);
|
||||
const ns = ns_list.span();
|
||||
const ns = ns_list.items;
|
||||
|
||||
for (rc.ns.span()) |iplit, i| {
|
||||
for (rc.ns.items) |iplit, i| {
|
||||
ns[i] = iplit.addr;
|
||||
assert(ns[i].getPort() == 53);
|
||||
if (iplit.addr.any.family != os.AF_INET) {
|
||||
|
@ -654,7 +654,7 @@ const MsfStream = struct {
|
||||
while (true) {
|
||||
const byte = try self.reader().readByte();
|
||||
if (byte == 0) {
|
||||
return list.span();
|
||||
return list.items;
|
||||
}
|
||||
try list.append(byte);
|
||||
}
|
||||
|
@ -519,8 +519,8 @@ pub fn argsAlloc(allocator: *mem.Allocator) ![][:0]u8 {
|
||||
try slice_list.append(arg.len);
|
||||
}
|
||||
|
||||
const contents_slice = contents.span();
|
||||
const slice_sizes = slice_list.span();
|
||||
const contents_slice = contents.items;
|
||||
const slice_sizes = slice_list.items;
|
||||
const contents_size_bytes = try math.add(usize, contents_slice.len, slice_sizes.len);
|
||||
const slice_list_bytes = try math.mul(usize, @sizeOf([]u8), slice_sizes.len);
|
||||
const total_bytes = try math.add(usize, slice_list_bytes, contents_size_bytes);
|
||||
|
@ -130,7 +130,7 @@ pub fn main() !void {
|
||||
if (builder.validateUserInputDidItFail())
|
||||
return usageAndErr(builder, true, stderr_stream);
|
||||
|
||||
builder.make(targets.span()) catch |err| {
|
||||
builder.make(targets.items) catch |err| {
|
||||
switch (err) {
|
||||
error.InvalidStepName => {
|
||||
return usageAndErr(builder, true, stderr_stream);
|
||||
@ -165,7 +165,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void
|
||||
, .{builder.zig_exe});
|
||||
|
||||
const allocator = builder.allocator;
|
||||
for (builder.top_level_steps.span()) |top_level_step| {
|
||||
for (builder.top_level_steps.items) |top_level_step| {
|
||||
const name = if (&top_level_step.step == builder.default_step)
|
||||
try fmt.allocPrint(allocator, "{} (default)", .{top_level_step.step.name})
|
||||
else
|
||||
@ -189,7 +189,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void
|
||||
if (builder.available_options_list.items.len == 0) {
|
||||
try out_stream.print(" (none)\n", .{});
|
||||
} else {
|
||||
for (builder.available_options_list.span()) |option| {
|
||||
for (builder.available_options_list.items) |option| {
|
||||
const name = try fmt.allocPrint(allocator, " -D{}=[{}]", .{
|
||||
option.name,
|
||||
Builder.typeIdName(option.type_id),
|
||||
|
@ -128,7 +128,7 @@ pub const NativePaths = struct {
|
||||
}
|
||||
|
||||
fn deinitArray(array: *ArrayList([:0]u8)) void {
|
||||
for (array.span()) |item| {
|
||||
for (array.items) |item| {
|
||||
array.allocator.free(item);
|
||||
}
|
||||
array.deinit();
|
||||
|
@ -340,7 +340,7 @@ pub const LibCInstallation = struct {
|
||||
result_buf.shrink(0);
|
||||
try result_buf.outStream().print("{}\\Include\\{}\\ucrt", .{ search.path, search.version });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) {
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
error.NotDir,
|
||||
error.NoDevice,
|
||||
@ -386,7 +386,7 @@ pub const LibCInstallation = struct {
|
||||
result_buf.shrink(0);
|
||||
try result_buf.outStream().print("{}\\Lib\\{}\\ucrt\\{}", .{ search.path, search.version, arch_sub_dir });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) {
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
error.NotDir,
|
||||
error.NoDevice,
|
||||
@ -441,7 +441,7 @@ pub const LibCInstallation = struct {
|
||||
const stream = result_buf.outStream();
|
||||
try stream.print("{}\\Lib\\{}\\um\\{}", .{ search.path, search.version, arch_sub_dir });
|
||||
|
||||
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) {
|
||||
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
|
||||
error.FileNotFound,
|
||||
error.NotDir,
|
||||
error.NoDevice,
|
||||
|
@ -2505,7 +2505,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
|
||||
defer fmt.seen.deinit();
|
||||
defer fmt.out_buffer.deinit();
|
||||
|
||||
for (input_files.span()) |file_path| {
|
||||
for (input_files.items) |file_path| {
|
||||
// Get the real path here to avoid Windows failing on relative file paths with . or .. in them.
|
||||
const real_path = fs.realpathAlloc(gpa, file_path) catch |err| {
|
||||
fatal("unable to open '{}': {}", .{ file_path, err });
|
||||
@ -2685,7 +2685,7 @@ fn printErrMsgToFile(
|
||||
defer text_buf.deinit();
|
||||
const out_stream = text_buf.outStream();
|
||||
try parse_error.render(tree.token_ids, out_stream);
|
||||
const text = text_buf.span();
|
||||
const text = text_buf.items;
|
||||
|
||||
const stream = file.outStream();
|
||||
try stream.print("{}:{}:{}: error: {}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text });
|
||||
@ -2834,7 +2834,7 @@ pub const ClangArgIterator = struct {
|
||||
defer resp_arg_list.deinit();
|
||||
{
|
||||
errdefer {
|
||||
for (resp_arg_list.span()) |item| {
|
||||
for (resp_arg_list.items) |item| {
|
||||
allocator.free(mem.span(item));
|
||||
}
|
||||
}
|
||||
|
@ -6564,7 +6564,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod
|
||||
|
||||
fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
|
||||
const tok = c.token_locs.items[token];
|
||||
const slice = c.source_buffer.span()[tok.start..tok.end];
|
||||
const slice = c.source_buffer.items[tok.start..tok.end];
|
||||
return if (mem.startsWith(u8, slice, "@\""))
|
||||
slice[2 .. slice.len - 1]
|
||||
else
|
||||
|
@ -91,7 +91,7 @@ pub const CompareOutputContext = struct {
|
||||
const b = self.b;
|
||||
|
||||
const write_src = b.addWriteFiles();
|
||||
for (case.sources.span()) |src_file| {
|
||||
for (case.sources.items) |src_file| {
|
||||
write_src.add(src_file.filename, src_file.source);
|
||||
}
|
||||
|
||||
@ -105,7 +105,7 @@ pub const CompareOutputContext = struct {
|
||||
}
|
||||
|
||||
const exe = b.addExecutable("test", null);
|
||||
exe.addAssemblyFileFromWriteFileStep(write_src, case.sources.span()[0].filename);
|
||||
exe.addAssemblyFileFromWriteFileStep(write_src, case.sources.items[0].filename);
|
||||
|
||||
const run = exe.run();
|
||||
run.addArgs(case.cli_args);
|
||||
@ -125,7 +125,7 @@ pub const CompareOutputContext = struct {
|
||||
if (mem.indexOf(u8, annotated_case_name, filter) == null) continue;
|
||||
}
|
||||
|
||||
const basename = case.sources.span()[0].filename;
|
||||
const basename = case.sources.items[0].filename;
|
||||
const exe = b.addExecutableFromWriteFileStep("test", write_src, basename);
|
||||
exe.setBuildMode(mode);
|
||||
if (case.link_libc) {
|
||||
@ -146,7 +146,7 @@ pub const CompareOutputContext = struct {
|
||||
if (mem.indexOf(u8, annotated_case_name, filter) == null) return;
|
||||
}
|
||||
|
||||
const basename = case.sources.span()[0].filename;
|
||||
const basename = case.sources.items[0].filename;
|
||||
const exe = b.addExecutableFromWriteFileStep("test", write_src, basename);
|
||||
if (case.link_libc) {
|
||||
exe.linkSystemLibrary("c");
|
||||
|
@ -82,13 +82,13 @@ pub const RunTranslatedCContext = struct {
|
||||
}
|
||||
|
||||
const write_src = b.addWriteFiles();
|
||||
for (case.sources.span()) |src_file| {
|
||||
for (case.sources.items) |src_file| {
|
||||
write_src.add(src_file.filename, src_file.source);
|
||||
}
|
||||
const translate_c = b.addTranslateC(.{
|
||||
.write_file = .{
|
||||
.step = write_src,
|
||||
.basename = case.sources.span()[0].filename,
|
||||
.basename = case.sources.items[0].filename,
|
||||
},
|
||||
});
|
||||
translate_c.step.name = b.fmt("{} translate-c", .{annotated_case_name});
|
||||
|
@ -105,20 +105,20 @@ pub const TranslateCContext = struct {
|
||||
}
|
||||
|
||||
const write_src = b.addWriteFiles();
|
||||
for (case.sources.span()) |src_file| {
|
||||
for (case.sources.items) |src_file| {
|
||||
write_src.add(src_file.filename, src_file.source);
|
||||
}
|
||||
|
||||
const translate_c = b.addTranslateC(.{
|
||||
.write_file = .{
|
||||
.step = write_src,
|
||||
.basename = case.sources.span()[0].filename,
|
||||
.basename = case.sources.items[0].filename,
|
||||
},
|
||||
});
|
||||
translate_c.step.name = annotated_case_name;
|
||||
translate_c.setTarget(case.target);
|
||||
|
||||
const check_file = translate_c.addCheckFile(case.expected_lines.span());
|
||||
const check_file = translate_c.addCheckFile(case.expected_lines.items);
|
||||
|
||||
self.step.dependOn(&check_file.step);
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ fn expandString(input: []const u8, output: *ArrayListSentineled(u8, 0)) !void {
|
||||
try expandNode(root, &result_list);
|
||||
|
||||
try output.resize(0);
|
||||
for (result_list.span()) |buf, i| {
|
||||
for (result_list.items) |buf, i| {
|
||||
if (i != 0) {
|
||||
try output.append(' ');
|
||||
}
|
||||
@ -157,8 +157,8 @@ fn expandNode(node: Node, output: *ArrayList(ArrayListSentineled(u8, 0))) Expand
|
||||
var child_list_b = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator);
|
||||
try expandNode(b_node, &child_list_b);
|
||||
|
||||
for (child_list_a.span()) |buf_a| {
|
||||
for (child_list_b.span()) |buf_b| {
|
||||
for (child_list_a.items) |buf_a| {
|
||||
for (child_list_b.items) |buf_b| {
|
||||
var combined_buf = try ArrayListSentineled(u8, 0).initFromBuffer(buf_a);
|
||||
try combined_buf.appendSlice(buf_b.span());
|
||||
try output.append(combined_buf);
|
||||
@ -166,11 +166,11 @@ fn expandNode(node: Node, output: *ArrayList(ArrayListSentineled(u8, 0))) Expand
|
||||
}
|
||||
},
|
||||
Node.List => |list| {
|
||||
for (list.span()) |child_node| {
|
||||
for (list.items) |child_node| {
|
||||
var child_list = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator);
|
||||
try expandNode(child_node, &child_list);
|
||||
|
||||
for (child_list.span()) |buf| {
|
||||
for (child_list.items) |buf| {
|
||||
try output.append(buf);
|
||||
}
|
||||
}
|
||||
|
@ -183,13 +183,13 @@ const Dump = struct {
|
||||
try mergeSameStrings(&self.zig_version, zig_version);
|
||||
try mergeSameStrings(&self.root_name, root_name);
|
||||
|
||||
for (params.get("builds").?.value.Array.span()) |json_build| {
|
||||
for (params.get("builds").?.value.Array.items) |json_build| {
|
||||
const target = json_build.Object.get("target").?.value.String;
|
||||
try self.targets.append(target);
|
||||
}
|
||||
|
||||
// Merge files. If the string matches, it's the same file.
|
||||
const other_files = root.Object.get("files").?.value.Array.span();
|
||||
const other_files = root.Object.get("files").?.value.Array.items;
|
||||
var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_files) |other_file, i| {
|
||||
const gop = try self.file_map.getOrPut(other_file.String);
|
||||
@ -201,7 +201,7 @@ const Dump = struct {
|
||||
}
|
||||
|
||||
// Merge AST nodes. If the file id, line, and column all match, it's the same AST node.
|
||||
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.span();
|
||||
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.items;
|
||||
var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_ast_nodes) |other_ast_node_json, i| {
|
||||
const other_file_id = jsonObjInt(other_ast_node_json, "file");
|
||||
@ -221,9 +221,9 @@ const Dump = struct {
|
||||
// convert fields lists
|
||||
for (other_ast_nodes) |other_ast_node_json, i| {
|
||||
const my_node_index = other_ast_node_to_mine.get(i).?.value;
|
||||
const my_node = &self.node_list.span()[my_node_index];
|
||||
const my_node = &self.node_list.items[my_node_index];
|
||||
if (other_ast_node_json.Object.get("fields")) |fields_json_kv| {
|
||||
const other_fields = fields_json_kv.value.Array.span();
|
||||
const other_fields = fields_json_kv.value.Array.items;
|
||||
my_node.fields = try self.a().alloc(usize, other_fields.len);
|
||||
for (other_fields) |other_field_index, field_i| {
|
||||
const other_index = @intCast(usize, other_field_index.Integer);
|
||||
@ -233,7 +233,7 @@ const Dump = struct {
|
||||
}
|
||||
|
||||
// Merge errors. If the AST Node matches, it's the same error value.
|
||||
const other_errors = root.Object.get("errors").?.value.Array.span();
|
||||
const other_errors = root.Object.get("errors").?.value.Array.items;
|
||||
var other_error_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_errors) |other_error_json, i| {
|
||||
const other_src_id = jsonObjInt(other_error_json, "src");
|
||||
@ -253,7 +253,7 @@ const Dump = struct {
|
||||
// First we identify all the simple types and merge those.
|
||||
// Example: void, type, noreturn
|
||||
// We can also do integers and floats.
|
||||
const other_types = root.Object.get("types").?.value.Array.span();
|
||||
const other_types = root.Object.get("types").?.value.Array.items;
|
||||
var other_types_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_types) |other_type_json, i| {
|
||||
const type_kind = jsonObjInt(other_type_json, "kind");
|
||||
@ -336,7 +336,7 @@ const Dump = struct {
|
||||
|
||||
try jw.objectField("builds");
|
||||
try jw.beginArray();
|
||||
for (self.targets.span()) |target| {
|
||||
for (self.targets.items) |target| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
try jw.objectField("target");
|
||||
@ -349,7 +349,7 @@ const Dump = struct {
|
||||
|
||||
try jw.objectField("types");
|
||||
try jw.beginArray();
|
||||
for (self.type_list.span()) |t| {
|
||||
for (self.type_list.items) |t| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
@ -379,7 +379,7 @@ const Dump = struct {
|
||||
|
||||
try jw.objectField("errors");
|
||||
try jw.beginArray();
|
||||
for (self.error_list.span()) |zig_error| {
|
||||
for (self.error_list.items) |zig_error| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
@ -395,7 +395,7 @@ const Dump = struct {
|
||||
|
||||
try jw.objectField("astNodes");
|
||||
try jw.beginArray();
|
||||
for (self.node_list.span()) |node| {
|
||||
for (self.node_list.items) |node| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
@ -425,7 +425,7 @@ const Dump = struct {
|
||||
|
||||
try jw.objectField("files");
|
||||
try jw.beginArray();
|
||||
for (self.file_list.span()) |file| {
|
||||
for (self.file_list.items) |file| {
|
||||
try jw.arrayElem();
|
||||
try jw.emitString(file);
|
||||
}
|
||||
|
@ -325,7 +325,7 @@ pub fn main() !void {
|
||||
},
|
||||
.os = .linux,
|
||||
};
|
||||
search: for (search_paths.span()) |search_path| {
|
||||
search: for (search_paths.items) |search_path| {
|
||||
var sub_path: []const []const u8 = undefined;
|
||||
switch (vendor) {
|
||||
.musl => {
|
||||
@ -416,7 +416,7 @@ pub fn main() !void {
|
||||
try contents_list.append(contents);
|
||||
}
|
||||
}
|
||||
std.sort.sort(*Contents, contents_list.span(), {}, Contents.hitCountLessThan);
|
||||
std.sort.sort(*Contents, contents_list.items, {}, Contents.hitCountLessThan);
|
||||
const best_contents = contents_list.popOrNull().?;
|
||||
if (best_contents.hit_count > 1) {
|
||||
// worth it to make it generic
|
||||
|
@ -374,7 +374,7 @@ pub fn main() anyerror!void {
|
||||
}
|
||||
// Some options have multiple matches. As an example, "-Wl,foo" matches both
|
||||
// "W" and "Wl,". So we sort this list in order of descending priority.
|
||||
std.sort.sort(*json.ObjectMap, all_objects.span(), {}, objectLessThan);
|
||||
std.sort.sort(*json.ObjectMap, all_objects.items, {}, objectLessThan);
|
||||
|
||||
var stdout_bos = std.io.bufferedOutStream(std.io.getStdOut().outStream());
|
||||
const stdout = stdout_bos.outStream();
|
||||
@ -386,12 +386,12 @@ pub fn main() anyerror!void {
|
||||
\\
|
||||
);
|
||||
|
||||
for (all_objects.span()) |obj| {
|
||||
for (all_objects.items) |obj| {
|
||||
const name = obj.get("Name").?.String;
|
||||
var pd1 = false;
|
||||
var pd2 = false;
|
||||
var pslash = false;
|
||||
for (obj.get("Prefixes").?.Array.span()) |prefix_json| {
|
||||
for (obj.get("Prefixes").?.Array.items) |prefix_json| {
|
||||
const prefix = prefix_json.String;
|
||||
if (std.mem.eql(u8, prefix, "-")) {
|
||||
pd1 = true;
|
||||
@ -502,7 +502,7 @@ const Syntax = union(enum) {
|
||||
|
||||
fn objSyntax(obj: *json.ObjectMap) Syntax {
|
||||
const num_args = @intCast(u8, obj.get("NumArgs").?.Integer);
|
||||
for (obj.get("!superclasses").?.Array.span()) |superclass_json| {
|
||||
for (obj.get("!superclasses").?.Array.items) |superclass_json| {
|
||||
const superclass = superclass_json.String;
|
||||
if (std.mem.eql(u8, superclass, "Joined")) {
|
||||
return .joined;
|
||||
@ -548,7 +548,7 @@ fn objSyntax(obj: *json.ObjectMap) Syntax {
|
||||
}
|
||||
const key = obj.get("!name").?.String;
|
||||
std.debug.warn("{} (key {}) has unrecognized superclasses:\n", .{ name, key });
|
||||
for (obj.get("!superclasses").?.Array.span()) |superclass_json| {
|
||||
for (obj.get("!superclasses").?.Array.items) |superclass_json| {
|
||||
std.debug.warn(" {}\n", .{superclass_json.String});
|
||||
}
|
||||
std.process.exit(1);
|
||||
|
@ -225,15 +225,15 @@ pub fn main() !void {
|
||||
var list = std.ArrayList([]const u8).init(allocator);
|
||||
var it = global_fn_set.iterator();
|
||||
while (it.next()) |entry| try list.append(entry.key);
|
||||
std.sort.sort([]const u8, list.span(), {}, strCmpLessThan);
|
||||
break :blk list.span();
|
||||
std.sort.sort([]const u8, list.items, {}, strCmpLessThan);
|
||||
break :blk list.items;
|
||||
};
|
||||
const global_ver_list = blk: {
|
||||
var list = std.ArrayList([]const u8).init(allocator);
|
||||
var it = global_ver_set.iterator();
|
||||
while (it.next()) |entry| try list.append(entry.key);
|
||||
std.sort.sort([]const u8, list.span(), {}, versionLessThan);
|
||||
break :blk list.span();
|
||||
std.sort.sort([]const u8, list.items, {}, versionLessThan);
|
||||
break :blk list.items;
|
||||
};
|
||||
{
|
||||
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
|
||||
@ -266,13 +266,13 @@ pub fn main() !void {
|
||||
for (abi_lists) |*abi_list, abi_index| {
|
||||
const entry = target_functions.getEntry(@ptrToInt(abi_list)).?;
|
||||
const fn_vers_list = &entry.value.fn_vers_list;
|
||||
for (entry.value.list.span()) |*ver_fn| {
|
||||
for (entry.value.list.items) |*ver_fn| {
|
||||
const gop = try fn_vers_list.getOrPut(ver_fn.name);
|
||||
if (!gop.found_existing) {
|
||||
gop.entry.value = std.ArrayList(usize).init(allocator);
|
||||
}
|
||||
const ver_index = global_ver_set.getEntry(ver_fn.ver).?.value;
|
||||
if (std.mem.indexOfScalar(usize, gop.entry.value.span(), ver_index) == null) {
|
||||
if (std.mem.indexOfScalar(usize, gop.entry.value.items, ver_index) == null) {
|
||||
try gop.entry.value.append(ver_index);
|
||||
}
|
||||
}
|
||||
@ -299,7 +299,7 @@ pub fn main() !void {
|
||||
try abilist_txt.writeByte('\n');
|
||||
continue;
|
||||
};
|
||||
for (entry.value.span()) |ver_index, it_i| {
|
||||
for (entry.value.items) |ver_index, it_i| {
|
||||
if (it_i != 0) try abilist_txt.writeByte(' ');
|
||||
try abilist_txt.print("{d}", .{ver_index});
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user