Reduce use of deprecated IO types

Related: #4917
This commit is contained in:
Jay Petacat 2021-01-05 20:57:18 -05:00 committed by Andrew Kelley
parent 8e9a1ac364
commit a9b505fa77
43 changed files with 159 additions and 159 deletions

View File

@ -40,9 +40,9 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(out_file_name, .{}); var out_file = try fs.cwd().createFile(out_file_name, .{});
defer out_file.close(); defer out_file.close();
const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size); const input_file_bytes = try in_file.reader().readAllAlloc(allocator, max_doc_file_size);
var buffered_out_stream = io.bufferedOutStream(out_file.writer()); var buffered_writer = io.bufferedWriter(out_file.writer());
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes); var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
var toc = try genToc(allocator, &tokenizer); var toc = try genToc(allocator, &tokenizer);
@ -50,8 +50,8 @@ pub fn main() !void {
try fs.cwd().makePath(tmp_dir_name); try fs.cwd().makePath(tmp_dir_name);
defer fs.cwd().deleteTree(tmp_dir_name) catch {}; defer fs.cwd().deleteTree(tmp_dir_name) catch {};
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.writer(), zig_exe); try genHtml(allocator, &tokenizer, &toc, buffered_writer.writer(), zig_exe);
try buffered_out_stream.flush(); try buffered_writer.flush();
} }
const Token = struct { const Token = struct {

View File

@ -271,7 +271,7 @@ fn refreshWithHeldLock(self: *Progress) void {
pub fn log(self: *Progress, comptime format: []const u8, args: anytype) void { pub fn log(self: *Progress, comptime format: []const u8, args: anytype) void {
const file = self.terminal orelse return; const file = self.terminal orelse return;
self.refresh(); self.refresh();
file.outStream().print(format, args) catch { file.writer().print(format, args) catch {
self.terminal = null; self.terminal = null;
return; return;
}; };

View File

@ -122,7 +122,7 @@ pub fn Queue(comptime T: type) type {
/// Dumps the contents of the queue to `stderr`. /// Dumps the contents of the queue to `stderr`.
pub fn dump(self: *Self) void { pub fn dump(self: *Self) void {
self.dumpToStream(std.io.getStdErr().outStream()) catch return; self.dumpToStream(std.io.getStdErr().writer()) catch return;
} }
/// Dumps the contents of the queue to `stream`. /// Dumps the contents of the queue to `stream`.
@ -351,7 +351,7 @@ test "std.atomic.Queue dump" {
// Test empty stream // Test empty stream
fbs.reset(); fbs.reset();
try queue.dumpToStream(fbs.outStream()); try queue.dumpToStream(fbs.writer());
expect(mem.eql(u8, buffer[0..fbs.pos], expect(mem.eql(u8, buffer[0..fbs.pos],
\\head: (null) \\head: (null)
\\tail: (null) \\tail: (null)
@ -367,7 +367,7 @@ test "std.atomic.Queue dump" {
queue.put(&node_0); queue.put(&node_0);
fbs.reset(); fbs.reset();
try queue.dumpToStream(fbs.outStream()); try queue.dumpToStream(fbs.writer());
var expected = try std.fmt.bufPrint(expected_buffer[0..], var expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1 \\head: 0x{x}=1
@ -387,7 +387,7 @@ test "std.atomic.Queue dump" {
queue.put(&node_1); queue.put(&node_1);
fbs.reset(); fbs.reset();
try queue.dumpToStream(fbs.outStream()); try queue.dumpToStream(fbs.writer());
expected = try std.fmt.bufPrint(expected_buffer[0..], expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1 \\head: 0x{x}=1

View File

@ -1042,7 +1042,7 @@ pub const Builder = struct {
try child.spawn(); try child.spawn();
const stdout = try child.stdout.?.inStream().readAllAlloc(self.allocator, max_output_size); const stdout = try child.stdout.?.reader().readAllAlloc(self.allocator, max_output_size);
errdefer self.allocator.free(stdout); errdefer self.allocator.free(stdout);
const term = try child.wait(); const term = try child.wait();
@ -1849,7 +1849,7 @@ pub const LibExeObjStep = struct {
} }
pub fn addBuildOption(self: *LibExeObjStep, comptime T: type, name: []const u8, value: T) void { pub fn addBuildOption(self: *LibExeObjStep, comptime T: type, name: []const u8, value: T) void {
const out = self.build_options_contents.outStream(); const out = self.build_options_contents.writer();
switch (T) { switch (T) {
[]const []const u8 => { []const []const u8 => {
out.print("pub const {z}: []const []const u8 = &[_][]const u8{{\n", .{name}) catch unreachable; out.print("pub const {z}: []const []const u8 = &[_][]const u8{{\n", .{name}) catch unreachable;
@ -2295,16 +2295,16 @@ pub const LibExeObjStep = struct {
} else { } else {
var mcpu_buffer = std.ArrayList(u8).init(builder.allocator); var mcpu_buffer = std.ArrayList(u8).init(builder.allocator);
try mcpu_buffer.outStream().print("-mcpu={s}", .{cross.cpu.model.name}); try mcpu_buffer.writer().print("-mcpu={s}", .{cross.cpu.model.name});
for (all_features) |feature, i_usize| { for (all_features) |feature, i_usize| {
const i = @intCast(std.Target.Cpu.Feature.Set.Index, i_usize); const i = @intCast(std.Target.Cpu.Feature.Set.Index, i_usize);
const in_cpu_set = populated_cpu_features.isEnabled(i); const in_cpu_set = populated_cpu_features.isEnabled(i);
const in_actual_set = cross.cpu.features.isEnabled(i); const in_actual_set = cross.cpu.features.isEnabled(i);
if (in_cpu_set and !in_actual_set) { if (in_cpu_set and !in_actual_set) {
try mcpu_buffer.outStream().print("-{s}", .{feature.name}); try mcpu_buffer.writer().print("-{s}", .{feature.name});
} else if (!in_cpu_set and in_actual_set) { } else if (!in_cpu_set and in_actual_set) {
try mcpu_buffer.outStream().print("+{s}", .{feature.name}); try mcpu_buffer.writer().print("+{s}", .{feature.name});
} }
} }

View File

@ -200,7 +200,7 @@ pub const RunStep = struct {
switch (self.stdout_action) { switch (self.stdout_action) {
.expect_exact, .expect_matches => { .expect_exact, .expect_matches => {
stdout = child.stdout.?.inStream().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable; stdout = child.stdout.?.reader().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
}, },
.inherit, .ignore => {}, .inherit, .ignore => {},
} }
@ -210,7 +210,7 @@ pub const RunStep = struct {
switch (self.stderr_action) { switch (self.stderr_action) {
.expect_exact, .expect_matches => { .expect_exact, .expect_matches => {
stderr = child.stderr.?.inStream().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable; stderr = child.stderr.?.reader().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
}, },
.inherit, .ignore => {}, .inherit, .ignore => {},
} }

View File

@ -922,7 +922,7 @@ fn writeIntFd(fd: i32, value: ErrInt) !void {
.capable_io_mode = .blocking, .capable_io_mode = .blocking,
.intended_io_mode = .blocking, .intended_io_mode = .blocking,
}; };
file.outStream().writeIntNative(u64, @intCast(u64, value)) catch return error.SystemResources; file.writer().writeIntNative(u64, @intCast(u64, value)) catch return error.SystemResources;
} }
fn readIntFd(fd: i32) !ErrInt { fn readIntFd(fd: i32) !ErrInt {

View File

@ -127,7 +127,7 @@ pub const Coff = struct {
pub fn loadHeader(self: *Coff) !void { pub fn loadHeader(self: *Coff) !void {
const pe_pointer_offset = 0x3C; const pe_pointer_offset = 0x3C;
const in = self.in_file.inStream(); const in = self.in_file.reader();
var magic: [2]u8 = undefined; var magic: [2]u8 = undefined;
try in.readNoEof(magic[0..]); try in.readNoEof(magic[0..]);
@ -163,7 +163,7 @@ pub const Coff = struct {
} }
fn loadOptionalHeader(self: *Coff) !void { fn loadOptionalHeader(self: *Coff) !void {
const in = self.in_file.inStream(); const in = self.in_file.reader();
self.pe_header.magic = try in.readIntLittle(u16); self.pe_header.magic = try in.readIntLittle(u16);
// For now we're only interested in finding the reference to the .pdb, // For now we're only interested in finding the reference to the .pdb,
// so we'll skip most of this header, which size is different in 32 // so we'll skip most of this header, which size is different in 32
@ -206,7 +206,7 @@ pub const Coff = struct {
const debug_dir = &self.pe_header.data_directory[DEBUG_DIRECTORY]; const debug_dir = &self.pe_header.data_directory[DEBUG_DIRECTORY];
const file_offset = debug_dir.virtual_address - header.virtual_address + header.pointer_to_raw_data; const file_offset = debug_dir.virtual_address - header.virtual_address + header.pointer_to_raw_data;
const in = self.in_file.inStream(); const in = self.in_file.reader();
try self.in_file.seekTo(file_offset); try self.in_file.seekTo(file_offset);
// Find the correct DebugDirectoryEntry, and where its data is stored. // Find the correct DebugDirectoryEntry, and where its data is stored.
@ -257,7 +257,7 @@ pub const Coff = struct {
try self.sections.ensureCapacity(self.coff_header.number_of_sections); try self.sections.ensureCapacity(self.coff_header.number_of_sections);
const in = self.in_file.inStream(); const in = self.in_file.reader();
var name: [8]u8 = undefined; var name: [8]u8 = undefined;

View File

@ -314,7 +314,7 @@ fn mode(comptime x: comptime_int) comptime_int {
} }
pub fn main() !void { pub fn main() !void {
const stdout = std.io.getStdOut().outStream(); const stdout = std.io.getStdOut().writer();
var buffer: [1024]u8 = undefined; var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]); var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);

View File

@ -517,15 +517,15 @@ fn populateModule(di: *ModuleDebugInfo, mod: *Module) !void {
const modi = di.pdb.getStreamById(mod.mod_info.ModuleSymStream) orelse return error.MissingDebugInfo; const modi = di.pdb.getStreamById(mod.mod_info.ModuleSymStream) orelse return error.MissingDebugInfo;
const signature = try modi.inStream().readIntLittle(u32); const signature = try modi.reader().readIntLittle(u32);
if (signature != 4) if (signature != 4)
return error.InvalidDebugInfo; return error.InvalidDebugInfo;
mod.symbols = try allocator.alloc(u8, mod.mod_info.SymByteSize - 4); mod.symbols = try allocator.alloc(u8, mod.mod_info.SymByteSize - 4);
try modi.inStream().readNoEof(mod.symbols); try modi.reader().readNoEof(mod.symbols);
mod.subsect_info = try allocator.alloc(u8, mod.mod_info.C13ByteSize); mod.subsect_info = try allocator.alloc(u8, mod.mod_info.C13ByteSize);
try modi.inStream().readNoEof(mod.subsect_info); try modi.reader().readNoEof(mod.subsect_info);
var sect_offset: usize = 0; var sect_offset: usize = 0;
var skip_len: usize = undefined; var skip_len: usize = undefined;
@ -704,11 +704,11 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
try di.pdb.openFile(di.coff, path); try di.pdb.openFile(di.coff, path);
var pdb_stream = di.pdb.getStream(pdb.StreamType.Pdb) orelse return error.InvalidDebugInfo; var pdb_stream = di.pdb.getStream(pdb.StreamType.Pdb) orelse return error.InvalidDebugInfo;
const version = try pdb_stream.inStream().readIntLittle(u32); const version = try pdb_stream.reader().readIntLittle(u32);
const signature = try pdb_stream.inStream().readIntLittle(u32); const signature = try pdb_stream.reader().readIntLittle(u32);
const age = try pdb_stream.inStream().readIntLittle(u32); const age = try pdb_stream.reader().readIntLittle(u32);
var guid: [16]u8 = undefined; var guid: [16]u8 = undefined;
try pdb_stream.inStream().readNoEof(&guid); try pdb_stream.reader().readNoEof(&guid);
if (version != 20000404) // VC70, only value observed by LLVM team if (version != 20000404) // VC70, only value observed by LLVM team
return error.UnknownPDBVersion; return error.UnknownPDBVersion;
if (!mem.eql(u8, &di.coff.guid, &guid) or di.coff.age != age) if (!mem.eql(u8, &di.coff.guid, &guid) or di.coff.age != age)
@ -716,9 +716,9 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
// We validated the executable and pdb match. // We validated the executable and pdb match.
const string_table_index = str_tab_index: { const string_table_index = str_tab_index: {
const name_bytes_len = try pdb_stream.inStream().readIntLittle(u32); const name_bytes_len = try pdb_stream.reader().readIntLittle(u32);
const name_bytes = try allocator.alloc(u8, name_bytes_len); const name_bytes = try allocator.alloc(u8, name_bytes_len);
try pdb_stream.inStream().readNoEof(name_bytes); try pdb_stream.reader().readNoEof(name_bytes);
const HashTableHeader = packed struct { const HashTableHeader = packed struct {
Size: u32, Size: u32,
@ -728,17 +728,17 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
return cap * 2 / 3 + 1; return cap * 2 / 3 + 1;
} }
}; };
const hash_tbl_hdr = try pdb_stream.inStream().readStruct(HashTableHeader); const hash_tbl_hdr = try pdb_stream.reader().readStruct(HashTableHeader);
if (hash_tbl_hdr.Capacity == 0) if (hash_tbl_hdr.Capacity == 0)
return error.InvalidDebugInfo; return error.InvalidDebugInfo;
if (hash_tbl_hdr.Size > HashTableHeader.maxLoad(hash_tbl_hdr.Capacity)) if (hash_tbl_hdr.Size > HashTableHeader.maxLoad(hash_tbl_hdr.Capacity))
return error.InvalidDebugInfo; return error.InvalidDebugInfo;
const present = try readSparseBitVector(&pdb_stream.inStream(), allocator); const present = try readSparseBitVector(&pdb_stream.reader(), allocator);
if (present.len != hash_tbl_hdr.Size) if (present.len != hash_tbl_hdr.Size)
return error.InvalidDebugInfo; return error.InvalidDebugInfo;
const deleted = try readSparseBitVector(&pdb_stream.inStream(), allocator); const deleted = try readSparseBitVector(&pdb_stream.reader(), allocator);
const Bucket = struct { const Bucket = struct {
first: u32, first: u32,
@ -746,8 +746,8 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
}; };
const bucket_list = try allocator.alloc(Bucket, present.len); const bucket_list = try allocator.alloc(Bucket, present.len);
for (present) |_| { for (present) |_| {
const name_offset = try pdb_stream.inStream().readIntLittle(u32); const name_offset = try pdb_stream.reader().readIntLittle(u32);
const name_index = try pdb_stream.inStream().readIntLittle(u32); const name_index = try pdb_stream.reader().readIntLittle(u32);
const name = mem.spanZ(std.meta.assumeSentinel(name_bytes.ptr + name_offset, 0)); const name = mem.spanZ(std.meta.assumeSentinel(name_bytes.ptr + name_offset, 0));
if (mem.eql(u8, name, "/names")) { if (mem.eql(u8, name, "/names")) {
break :str_tab_index name_index; break :str_tab_index name_index;
@ -762,7 +762,7 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
const dbi = di.pdb.dbi; const dbi = di.pdb.dbi;
// Dbi Header // Dbi Header
const dbi_stream_header = try dbi.inStream().readStruct(pdb.DbiStreamHeader); const dbi_stream_header = try dbi.reader().readStruct(pdb.DbiStreamHeader);
if (dbi_stream_header.VersionHeader != 19990903) // V70, only value observed by LLVM team if (dbi_stream_header.VersionHeader != 19990903) // V70, only value observed by LLVM team
return error.UnknownPDBVersion; return error.UnknownPDBVersion;
if (dbi_stream_header.Age != age) if (dbi_stream_header.Age != age)
@ -776,7 +776,7 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
// Module Info Substream // Module Info Substream
var mod_info_offset: usize = 0; var mod_info_offset: usize = 0;
while (mod_info_offset != mod_info_size) { while (mod_info_offset != mod_info_size) {
const mod_info = try dbi.inStream().readStruct(pdb.ModInfo); const mod_info = try dbi.reader().readStruct(pdb.ModInfo);
var this_record_len: usize = @sizeOf(pdb.ModInfo); var this_record_len: usize = @sizeOf(pdb.ModInfo);
const module_name = try dbi.readNullTermString(allocator); const module_name = try dbi.readNullTermString(allocator);
@ -814,14 +814,14 @@ fn readCoffDebugInfo(allocator: *mem.Allocator, coff_file: File) !ModuleDebugInf
var sect_contribs = ArrayList(pdb.SectionContribEntry).init(allocator); var sect_contribs = ArrayList(pdb.SectionContribEntry).init(allocator);
var sect_cont_offset: usize = 0; var sect_cont_offset: usize = 0;
if (section_contrib_size != 0) { if (section_contrib_size != 0) {
const ver = @intToEnum(pdb.SectionContrSubstreamVersion, try dbi.inStream().readIntLittle(u32)); const ver = @intToEnum(pdb.SectionContrSubstreamVersion, try dbi.reader().readIntLittle(u32));
if (ver != pdb.SectionContrSubstreamVersion.Ver60) if (ver != pdb.SectionContrSubstreamVersion.Ver60)
return error.InvalidDebugInfo; return error.InvalidDebugInfo;
sect_cont_offset += @sizeOf(u32); sect_cont_offset += @sizeOf(u32);
} }
while (sect_cont_offset != section_contrib_size) { while (sect_cont_offset != section_contrib_size) {
const entry = try sect_contribs.addOne(); const entry = try sect_contribs.addOne();
entry.* = try dbi.inStream().readStruct(pdb.SectionContribEntry); entry.* = try dbi.reader().readStruct(pdb.SectionContribEntry);
sect_cont_offset += @sizeOf(pdb.SectionContribEntry); sect_cont_offset += @sizeOf(pdb.SectionContribEntry);
if (sect_cont_offset > section_contrib_size) if (sect_cont_offset > section_contrib_size)

View File

@ -408,7 +408,7 @@ pub const DwarfInfo = struct {
fn scanAllFunctions(di: *DwarfInfo) !void { fn scanAllFunctions(di: *DwarfInfo) !void {
var stream = io.fixedBufferStream(di.debug_info); var stream = io.fixedBufferStream(di.debug_info);
const in = &stream.inStream(); const in = &stream.reader();
const seekable = &stream.seekableStream(); const seekable = &stream.seekableStream();
var this_unit_offset: u64 = 0; var this_unit_offset: u64 = 0;
@ -512,7 +512,7 @@ pub const DwarfInfo = struct {
fn scanAllCompileUnits(di: *DwarfInfo) !void { fn scanAllCompileUnits(di: *DwarfInfo) !void {
var stream = io.fixedBufferStream(di.debug_info); var stream = io.fixedBufferStream(di.debug_info);
const in = &stream.inStream(); const in = &stream.reader();
const seekable = &stream.seekableStream(); const seekable = &stream.seekableStream();
var this_unit_offset: u64 = 0; var this_unit_offset: u64 = 0;
@ -585,7 +585,7 @@ pub const DwarfInfo = struct {
if (di.debug_ranges) |debug_ranges| { if (di.debug_ranges) |debug_ranges| {
if (compile_unit.die.getAttrSecOffset(AT_ranges)) |ranges_offset| { if (compile_unit.die.getAttrSecOffset(AT_ranges)) |ranges_offset| {
var stream = io.fixedBufferStream(debug_ranges); var stream = io.fixedBufferStream(debug_ranges);
const in = &stream.inStream(); const in = &stream.reader();
const seekable = &stream.seekableStream(); const seekable = &stream.seekableStream();
// All the addresses in the list are relative to the value // All the addresses in the list are relative to the value
@ -640,7 +640,7 @@ pub const DwarfInfo = struct {
fn parseAbbrevTable(di: *DwarfInfo, offset: u64) !AbbrevTable { fn parseAbbrevTable(di: *DwarfInfo, offset: u64) !AbbrevTable {
var stream = io.fixedBufferStream(di.debug_abbrev); var stream = io.fixedBufferStream(di.debug_abbrev);
const in = &stream.inStream(); const in = &stream.reader();
const seekable = &stream.seekableStream(); const seekable = &stream.seekableStream();
try seekable.seekTo(offset); try seekable.seekTo(offset);
@ -691,7 +691,7 @@ pub const DwarfInfo = struct {
pub fn getLineNumberInfo(di: *DwarfInfo, compile_unit: CompileUnit, target_address: usize) !debug.LineInfo { pub fn getLineNumberInfo(di: *DwarfInfo, compile_unit: CompileUnit, target_address: usize) !debug.LineInfo {
var stream = io.fixedBufferStream(di.debug_line); var stream = io.fixedBufferStream(di.debug_line);
const in = &stream.inStream(); const in = &stream.reader();
const seekable = &stream.seekableStream(); const seekable = &stream.seekableStream();
const compile_unit_cwd = try compile_unit.die.getAttrString(di, AT_comp_dir); const compile_unit_cwd = try compile_unit.die.getAttrString(di, AT_comp_dir);

View File

@ -89,7 +89,7 @@ test "LoggingAllocator" {
var allocator_buf: [10]u8 = undefined; var allocator_buf: [10]u8 = undefined;
var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf)); var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
const allocator = &loggingAllocator(&fixedBufferAllocator.allocator, fbs.outStream()).allocator; const allocator = &loggingAllocator(&fixedBufferAllocator.allocator, fbs.writer()).allocator;
var a = try allocator.alloc(u8, 10); var a = try allocator.alloc(u8, 10);
a = allocator.shrink(a, 5); a = allocator.shrink(a, 5);

View File

@ -38,7 +38,7 @@ pub const BufferedAtomicFile = struct {
self.atomic_file = try dir.atomicFile(dest_path, atomic_file_options); self.atomic_file = try dir.atomicFile(dest_path, atomic_file_options);
errdefer self.atomic_file.deinit(); errdefer self.atomic_file.deinit();
self.file_stream = self.atomic_file.file.outStream(); self.file_stream = self.atomic_file.file.writer();
self.buffered_stream = .{ .unbuffered_writer = self.file_stream }; self.buffered_stream = .{ .unbuffered_writer = self.file_stream };
return self; return self;
} }

View File

@ -45,7 +45,7 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
return .{ .context = self }; return .{ .context = self };
} }
/// Deprecated: use `inStream` /// Deprecated: use `reader`
pub fn inStream(self: *Self) InStream { pub fn inStream(self: *Self) InStream {
return .{ .context = self }; return .{ .context = self };
} }

View File

@ -30,8 +30,8 @@ test "write a file, read it, then delete it" {
var file = try tmp.dir.createFile(tmp_file_name, .{}); var file = try tmp.dir.createFile(tmp_file_name, .{});
defer file.close(); defer file.close();
var buf_stream = io.bufferedOutStream(file.outStream()); var buf_stream = io.bufferedWriter(file.writer());
const st = buf_stream.outStream(); const st = buf_stream.writer();
try st.print("begin", .{}); try st.print("begin", .{});
try st.writeAll(data[0..]); try st.writeAll(data[0..]);
try st.print("end", .{}); try st.print("end", .{});
@ -72,7 +72,7 @@ test "BitStreams with File Stream" {
var file = try tmp.dir.createFile(tmp_file_name, .{}); var file = try tmp.dir.createFile(tmp_file_name, .{});
defer file.close(); defer file.close();
var bit_stream = io.bitOutStream(builtin.endian, file.outStream()); var bit_stream = io.bitWriter(builtin.endian, file.writer());
try bit_stream.writeBits(@as(u2, 1), 1); try bit_stream.writeBits(@as(u2, 1), 1);
try bit_stream.writeBits(@as(u5, 2), 2); try bit_stream.writeBits(@as(u5, 2), 2);

View File

@ -1323,31 +1323,31 @@ test "Value.jsonStringify" {
{ {
var buffer: [10]u8 = undefined; var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer); var fbs = std.io.fixedBufferStream(&buffer);
try @as(Value, .Null).jsonStringify(.{}, fbs.outStream()); try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "null"); testing.expectEqualSlices(u8, fbs.getWritten(), "null");
} }
{ {
var buffer: [10]u8 = undefined; var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer); var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.outStream()); try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "true"); testing.expectEqualSlices(u8, fbs.getWritten(), "true");
} }
{ {
var buffer: [10]u8 = undefined; var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer); var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.outStream()); try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "42"); testing.expectEqualSlices(u8, fbs.getWritten(), "42");
} }
{ {
var buffer: [10]u8 = undefined; var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer); var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.outStream()); try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e+01"); testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e+01");
} }
{ {
var buffer: [10]u8 = undefined; var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer); var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.outStream()); try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\""); testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
} }
{ {
@ -1360,7 +1360,7 @@ test "Value.jsonStringify" {
}; };
try (Value{ try (Value{
.Array = Array.fromOwnedSlice(undefined, &vals), .Array = Array.fromOwnedSlice(undefined, &vals),
}).jsonStringify(.{}, fbs.outStream()); }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]"); testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
} }
{ {
@ -1369,7 +1369,7 @@ test "Value.jsonStringify" {
var obj = ObjectMap.init(testing.allocator); var obj = ObjectMap.init(testing.allocator);
defer obj.deinit(); defer obj.deinit();
try obj.putNoClobber("a", .{ .String = "b" }); try obj.putNoClobber("a", .{ .String = "b" });
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.outStream()); try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}"); testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
} }
} }
@ -2223,7 +2223,7 @@ test "write json then parse it" {
var out_buffer: [1000]u8 = undefined; var out_buffer: [1000]u8 = undefined;
var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer); var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
const out_stream = fixed_buffer_stream.outStream(); const out_stream = fixed_buffer_stream.writer();
var jw = writeStream(out_stream, 4); var jw = writeStream(out_stream, 4);
try jw.beginObject(); try jw.beginObject();

View File

@ -238,7 +238,7 @@ pub fn writeStream(
test "json write stream" { test "json write stream" {
var out_buf: [1024]u8 = undefined; var out_buf: [1024]u8 = undefined;
var slice_stream = std.io.fixedBufferStream(&out_buf); var slice_stream = std.io.fixedBufferStream(&out_buf);
const out = slice_stream.outStream(); const out = slice_stream.writer();
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator); var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena_allocator.deinit(); defer arena_allocator.deinit();

View File

@ -1106,7 +1106,7 @@ fn linuxLookupNameFromHosts(
}; };
defer file.close(); defer file.close();
const stream = std.io.bufferedInStream(file.inStream()).inStream(); const stream = std.io.bufferedReader(file.reader()).reader();
var line_buf: [512]u8 = undefined; var line_buf: [512]u8 = undefined;
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
error.StreamTooLong => blk: { error.StreamTooLong => blk: {
@ -1304,7 +1304,7 @@ fn getResolvConf(allocator: *mem.Allocator, rc: *ResolvConf) !void {
}; };
defer file.close(); defer file.close();
const stream = std.io.bufferedInStream(file.inStream()).inStream(); const stream = std.io.bufferedReader(file.reader()).reader();
var line_buf: [512]u8 = undefined; var line_buf: [512]u8 = undefined;
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) { while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
error.StreamTooLong => blk: { error.StreamTooLong => blk: {

View File

@ -249,6 +249,6 @@ fn testServer(server: *net.StreamServer) anyerror!void {
var client = try server.accept(); var client = try server.accept();
const stream = client.file.outStream(); const stream = client.file.writer();
try stream.print("hello from server\n", .{}); try stream.print("hello from server\n", .{});
} }

View File

@ -191,7 +191,7 @@ fn getRandomBytesDevURandom(buf: []u8) !void {
.capable_io_mode = .blocking, .capable_io_mode = .blocking,
.intended_io_mode = .blocking, .intended_io_mode = .blocking,
}; };
const stream = file.inStream(); const stream = file.reader();
stream.readNoEof(buf) catch return error.Unexpected; stream.readNoEof(buf) catch return error.Unexpected;
} }

View File

@ -475,7 +475,7 @@ test "mmap" {
const file = try tmp.dir.createFile(test_out_file, .{}); const file = try tmp.dir.createFile(test_out_file, .{});
defer file.close(); defer file.close();
const stream = file.outStream(); const stream = file.writer();
var i: u32 = 0; var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) { while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
@ -499,7 +499,7 @@ test "mmap" {
defer os.munmap(data); defer os.munmap(data);
var mem_stream = io.fixedBufferStream(data); var mem_stream = io.fixedBufferStream(data);
const stream = mem_stream.inStream(); const stream = mem_stream.reader();
var i: u32 = 0; var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) { while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
@ -523,7 +523,7 @@ test "mmap" {
defer os.munmap(data); defer os.munmap(data);
var mem_stream = io.fixedBufferStream(data); var mem_stream = io.fixedBufferStream(data);
const stream = mem_stream.inStream(); const stream = mem_stream.reader();
var i: u32 = alloc_size / 2 / @sizeOf(u32); var i: u32 = alloc_size / 2 / @sizeOf(u32);
while (i < alloc_size / @sizeOf(u32)) : (i += 1) { while (i < alloc_size / @sizeOf(u32)) : (i += 1) {

View File

@ -57,8 +57,8 @@ pub fn main() !void {
var targets = ArrayList([]const u8).init(allocator); var targets = ArrayList([]const u8).init(allocator);
const stderr_stream = io.getStdErr().outStream(); const stderr_stream = io.getStdErr().writer();
const stdout_stream = io.getStdOut().outStream(); const stdout_stream = io.getStdOut().writer();
while (nextArg(args, &arg_idx)) |arg| { while (nextArg(args, &arg_idx)) |arg| {
if (mem.startsWith(u8, arg, "-D")) { if (mem.startsWith(u8, arg, "-D")) {

View File

@ -45,7 +45,7 @@ fn benchmarkCodepointCount(buf: []const u8) !ResultCount {
} }
pub fn main() !void { pub fn main() !void {
const stdout = std.io.getStdOut().outStream(); const stdout = std.io.getStdOut().writer();
const args = try std.process.argsAlloc(std.heap.page_allocator); const args = try std.process.argsAlloc(std.heap.page_allocator);

View File

@ -519,29 +519,29 @@ pub const CrossTarget = struct {
var result = std.ArrayList(u8).init(allocator); var result = std.ArrayList(u8).init(allocator);
defer result.deinit(); defer result.deinit();
try result.outStream().print("{s}-{s}", .{ arch_name, os_name }); try result.writer().print("{s}-{s}", .{ arch_name, os_name });
// The zig target syntax does not allow specifying a max os version with no min, so // The zig target syntax does not allow specifying a max os version with no min, so
// if either are present, we need the min. // if either are present, we need the min.
if (self.os_version_min != null or self.os_version_max != null) { if (self.os_version_min != null or self.os_version_max != null) {
switch (self.getOsVersionMin()) { switch (self.getOsVersionMin()) {
.none => {}, .none => {},
.semver => |v| try result.outStream().print(".{}", .{v}), .semver => |v| try result.writer().print(".{}", .{v}),
.windows => |v| try result.outStream().print("{s}", .{v}), .windows => |v| try result.writer().print("{s}", .{v}),
} }
} }
if (self.os_version_max) |max| { if (self.os_version_max) |max| {
switch (max) { switch (max) {
.none => {}, .none => {},
.semver => |v| try result.outStream().print("...{}", .{v}), .semver => |v| try result.writer().print("...{}", .{v}),
.windows => |v| try result.outStream().print("..{s}", .{v}), .windows => |v| try result.writer().print("..{s}", .{v}),
} }
} }
if (self.glibc_version) |v| { if (self.glibc_version) |v| {
try result.outStream().print("-{s}.{}", .{ @tagName(self.getAbi()), v }); try result.writer().print("-{s}.{}", .{ @tagName(self.getAbi()), v });
} else if (self.abi) |abi| { } else if (self.abi) |abi| {
try result.outStream().print("-{s}", .{@tagName(abi)}); try result.writer().print("-{s}", .{@tagName(abi)});
} }
return result.toOwnedSlice(); return result.toOwnedSlice();

View File

@ -3734,7 +3734,7 @@ const maxInt = std.math.maxInt;
var fixed_buffer_mem: [100 * 1024]u8 = undefined; var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 { fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 {
const stderr = io.getStdErr().outStream(); const stderr = io.getStdErr().writer();
const tree = try std.zig.parse(allocator, source); const tree = try std.zig.parse(allocator, source);
defer tree.deinit(); defer tree.deinit();
@ -3767,8 +3767,8 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
var buffer = std.ArrayList(u8).init(allocator); var buffer = std.ArrayList(u8).init(allocator);
errdefer buffer.deinit(); errdefer buffer.deinit();
const outStream = buffer.outStream(); const writer = buffer.writer();
anything_changed.* = try std.zig.render(allocator, outStream, tree); anything_changed.* = try std.zig.render(allocator, writer, tree);
return buffer.toOwnedSlice(); return buffer.toOwnedSlice();
} }
fn testTransform(source: []const u8, expected_source: []const u8) !void { fn testTransform(source: []const u8, expected_source: []const u8) !void {

View File

@ -29,7 +29,7 @@ pub fn main() !void {
const mb_per_sec = bytes_per_sec / (1024 * 1024); const mb_per_sec = bytes_per_sec / (1024 * 1024);
var stdout_file = std.io.getStdOut(); var stdout_file = std.io.getStdOut();
const stdout = stdout_file.outStream(); const stdout = stdout_file.writer();
try stdout.print("{:.3} MiB/s, {} KiB used \n", .{ mb_per_sec, memory_used / 1024 }); try stdout.print("{:.3} MiB/s, {} KiB used \n", .{ mb_per_sec, memory_used / 1024 });
} }

View File

@ -790,7 +790,7 @@ fn renderExpression(
const section_exprs = row_exprs[0..section_end]; const section_exprs = row_exprs[0..section_end];
// Null stream for counting the printed length of each expression // Null stream for counting the printed length of each expression
var line_find_stream = std.io.findByteOutStream('\n', std.io.null_out_stream); var line_find_stream = std.io.findByteOutStream('\n', std.io.null_writer);
var counting_stream = std.io.countingOutStream(line_find_stream.writer()); var counting_stream = std.io.countingOutStream(line_find_stream.writer());
var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer()); var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer());
@ -954,7 +954,7 @@ fn renderExpression(
const expr_outputs_one_line = blk: { const expr_outputs_one_line = blk: {
// render field expressions until a LF is found // render field expressions until a LF is found
for (field_inits) |field_init| { for (field_inits) |field_init| {
var find_stream = std.io.findByteOutStream('\n', std.io.null_out_stream); var find_stream = std.io.findByteOutStream('\n', std.io.null_writer);
var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer()); var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer());
try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None); try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None);

View File

@ -285,7 +285,7 @@ pub const Manifest = struct {
}; };
} }
const file_contents = try self.manifest_file.?.inStream().readAllAlloc(self.cache.gpa, manifest_file_size_max); const file_contents = try self.manifest_file.?.reader().readAllAlloc(self.cache.gpa, manifest_file_size_max);
defer self.cache.gpa.free(file_contents); defer self.cache.gpa.free(file_contents);
const input_file_count = self.files.items.len; const input_file_count = self.files.items.len;

View File

@ -1820,7 +1820,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
var out_zig_file = try o_dir.createFile(cimport_zig_basename, .{}); var out_zig_file = try o_dir.createFile(cimport_zig_basename, .{});
defer out_zig_file.close(); defer out_zig_file.close();
var bos = std.io.bufferedOutStream(out_zig_file.writer()); var bos = std.io.bufferedWriter(out_zig_file.writer());
_ = try std.zig.render(comp.gpa, bos.writer(), tree); _ = try std.zig.render(comp.gpa, bos.writer(), tree);
try bos.flush(); try bos.flush();
@ -2750,7 +2750,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
switch (target.os.getVersionRange()) { switch (target.os.getVersionRange()) {
.none => try buffer.appendSlice(" .none = {} }\n"), .none => try buffer.appendSlice(" .none = {} }\n"),
.semver => |semver| try buffer.outStream().print( .semver => |semver| try buffer.writer().print(
\\ .semver = .{{ \\ .semver = .{{
\\ .min = .{{ \\ .min = .{{
\\ .major = {}, \\ .major = {},
@ -2773,7 +2773,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
semver.max.minor, semver.max.minor,
semver.max.patch, semver.max.patch,
}), }),
.linux => |linux| try buffer.outStream().print( .linux => |linux| try buffer.writer().print(
\\ .linux = .{{ \\ .linux = .{{
\\ .range = .{{ \\ .range = .{{
\\ .min = .{{ \\ .min = .{{
@ -2807,7 +2807,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
linux.glibc.minor, linux.glibc.minor,
linux.glibc.patch, linux.glibc.patch,
}), }),
.windows => |windows| try buffer.outStream().print( .windows => |windows| try buffer.writer().print(
\\ .windows = .{{ \\ .windows = .{{
\\ .min = {s}, \\ .min = {s},
\\ .max = {s}, \\ .max = {s},

View File

@ -910,7 +910,7 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void {
}, },
else => { else => {
try buffer.appendSlice("ERROR: "); try buffer.appendSlice("ERROR: ");
try token.printError(buffer.outStream()); try token.printError(buffer.writer());
break; break;
}, },
} }

View File

@ -1649,7 +1649,7 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
var msg = std.ArrayList(u8).init(self.gpa); var msg = std.ArrayList(u8).init(self.gpa);
defer msg.deinit(); defer msg.deinit();
try parse_err.render(tree.token_ids, msg.outStream()); try parse_err.render(tree.token_ids, msg.writer());
const err_msg = try self.gpa.create(Compilation.ErrorMsg); const err_msg = try self.gpa.create(Compilation.ErrorMsg);
err_msg.* = .{ err_msg.* = .{
.msg = msg.toOwnedSlice(), .msg = msg.toOwnedSlice(),

View File

@ -200,7 +200,7 @@ pub const LLVMIRModule = struct {
if (llvm.Target.getFromTriple(llvm_target_triple.ptr, &target, &error_message)) { if (llvm.Target.getFromTriple(llvm_target_triple.ptr, &target, &error_message)) {
defer llvm.disposeMessage(error_message); defer llvm.disposeMessage(error_message);
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
try stderr.print( try stderr.print(
\\Zig is expecting LLVM to understand this target: '{s}' \\Zig is expecting LLVM to understand this target: '{s}'
\\However LLVM responded with: "{s}" \\However LLVM responded with: "{s}"
@ -268,7 +268,7 @@ pub const LLVMIRModule = struct {
const dump = self.llvm_module.printToString(); const dump = self.llvm_module.printToString();
defer llvm.disposeMessage(dump); defer llvm.disposeMessage(dump);
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
try stderr.writeAll(std.mem.spanZ(dump)); try stderr.writeAll(std.mem.spanZ(dump));
} }
@ -278,7 +278,7 @@ pub const LLVMIRModule = struct {
defer llvm.disposeMessage(error_message); defer llvm.disposeMessage(error_message);
if (self.llvm_module.verify(.ReturnStatus, &error_message)) { if (self.llvm_module.verify(.ReturnStatus, &error_message)) {
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
try stderr.print("broken LLVM module found: {s}\nThis is a bug in the Zig compiler.", .{error_message}); try stderr.print("broken LLVM module found: {s}\nThis is a bug in the Zig compiler.", .{error_message});
return error.BrokenLLVMModule; return error.BrokenLLVMModule;
} }
@ -296,7 +296,7 @@ pub const LLVMIRModule = struct {
)) { )) {
defer llvm.disposeMessage(error_message); defer llvm.disposeMessage(error_message);
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
try stderr.print("LLVM failed to emit file: {s}\n", .{error_message}); try stderr.print("LLVM failed to emit file: {s}\n", .{error_message});
return error.FailedToEmit; return error.FailedToEmit;
} }

View File

@ -338,7 +338,7 @@ pub const LibCInstallation = struct {
for (searches) |search| { for (searches) |search| {
result_buf.shrinkAndFree(0); result_buf.shrinkAndFree(0);
try result_buf.outStream().print("{s}\\Include\\{s}\\ucrt", .{ search.path, search.version }); try result_buf.writer().print("{s}\\Include\\{s}\\ucrt", .{ search.path, search.version });
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
error.FileNotFound, error.FileNotFound,
@ -384,7 +384,7 @@ pub const LibCInstallation = struct {
for (searches) |search| { for (searches) |search| {
result_buf.shrinkAndFree(0); result_buf.shrinkAndFree(0);
try result_buf.outStream().print("{s}\\Lib\\{s}\\ucrt\\{s}", .{ search.path, search.version, arch_sub_dir }); try result_buf.writer().print("{s}\\Lib\\{s}\\ucrt\\{s}", .{ search.path, search.version, arch_sub_dir });
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
error.FileNotFound, error.FileNotFound,
@ -438,7 +438,7 @@ pub const LibCInstallation = struct {
for (searches) |search| { for (searches) |search| {
result_buf.shrinkAndFree(0); result_buf.shrinkAndFree(0);
const stream = result_buf.outStream(); const stream = result_buf.writer();
try stream.print("{s}\\Lib\\{s}\\um\\{s}", .{ search.path, search.version, arch_sub_dir }); try stream.print("{s}\\Lib\\{s}\\um\\{s}", .{ search.path, search.version, arch_sub_dir });
var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {

View File

@ -196,7 +196,7 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
return cmdInit(gpa, arena, cmd_args, .Lib); return cmdInit(gpa, arena, cmd_args, .Lib);
} else if (mem.eql(u8, cmd, "targets")) { } else if (mem.eql(u8, cmd, "targets")) {
const info = try detectNativeTargetInfo(arena, .{}); const info = try detectNativeTargetInfo(arena, .{});
const stdout = io.getStdOut().outStream(); const stdout = io.getStdOut().writer();
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, info.target); return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, info.target);
} else if (mem.eql(u8, cmd, "version")) { } else if (mem.eql(u8, cmd, "version")) {
try std.io.getStdOut().writeAll(build_options.version ++ "\n"); try std.io.getStdOut().writeAll(build_options.version ++ "\n");
@ -1944,8 +1944,8 @@ fn buildOutputType(
} }
} }
const stdin = std.io.getStdIn().inStream(); const stdin = std.io.getStdIn().reader();
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
var repl_buf: [1024]u8 = undefined; var repl_buf: [1024]u8 = undefined;
while (watch) { while (watch) {
@ -2114,9 +2114,9 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
var zig_file = try o_dir.createFile(translated_zig_basename, .{}); var zig_file = try o_dir.createFile(translated_zig_basename, .{});
defer zig_file.close(); defer zig_file.close();
var bos = io.bufferedOutStream(zig_file.writer()); var bw = io.bufferedWriter(zig_file.writer());
_ = try std.zig.render(comp.gpa, bos.writer(), tree); _ = try std.zig.render(comp.gpa, bw.writer(), tree);
try bos.flush(); try bw.flush();
man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{@errorName(err)}); man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{@errorName(err)});
@ -2187,9 +2187,9 @@ pub fn cmdLibC(gpa: *Allocator, args: []const []const u8) !void {
}; };
defer libc.deinit(gpa); defer libc.deinit(gpa);
var bos = io.bufferedOutStream(io.getStdOut().writer()); var bw = io.bufferedWriter(io.getStdOut().writer());
try libc.render(bos.writer()); try libc.render(bw.writer());
try bos.flush(); try bw.flush();
} }
} }
@ -2570,7 +2570,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
const arg = args[i]; const arg = args[i];
if (mem.startsWith(u8, arg, "-")) { if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
const stdout = io.getStdOut().outStream(); const stdout = io.getStdOut().writer();
try stdout.writeAll(usage_fmt); try stdout.writeAll(usage_fmt);
return cleanExit(); return cleanExit();
} else if (mem.eql(u8, arg, "--color")) { } else if (mem.eql(u8, arg, "--color")) {
@ -2600,7 +2600,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
fatal("cannot use --stdin with positional arguments", .{}); fatal("cannot use --stdin with positional arguments", .{});
} }
const stdin = io.getStdIn().inStream(); const stdin = io.getStdIn().reader();
const source_code = try stdin.readAllAlloc(gpa, max_src_size); const source_code = try stdin.readAllAlloc(gpa, max_src_size);
defer gpa.free(source_code); defer gpa.free(source_code);
@ -2617,14 +2617,14 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
process.exit(1); process.exit(1);
} }
if (check_flag) { if (check_flag) {
const anything_changed = try std.zig.render(gpa, io.null_out_stream, tree); const anything_changed = try std.zig.render(gpa, io.null_writer, tree);
const code = if (anything_changed) @as(u8, 1) else @as(u8, 0); const code = if (anything_changed) @as(u8, 1) else @as(u8, 0);
process.exit(code); process.exit(code);
} }
var bos = io.bufferedOutStream(io.getStdOut().writer()); var bw = io.bufferedWriter(io.getStdOut().writer());
_ = try std.zig.render(gpa, bos.writer(), tree); _ = try std.zig.render(gpa, bw.writer(), tree);
try bos.flush(); try bw.flush();
return; return;
} }
@ -2774,7 +2774,7 @@ fn fmtPathFile(
} }
if (check_mode) { if (check_mode) {
const anything_changed = try std.zig.render(fmt.gpa, io.null_out_stream, tree); const anything_changed = try std.zig.render(fmt.gpa, io.null_writer, tree);
if (anything_changed) { if (anything_changed) {
const stdout = io.getStdOut().writer(); const stdout = io.getStdOut().writer();
try stdout.print("{s}\n", .{file_path}); try stdout.print("{s}\n", .{file_path});
@ -2823,11 +2823,11 @@ fn printErrMsgToFile(
var text_buf = std.ArrayList(u8).init(gpa); var text_buf = std.ArrayList(u8).init(gpa);
defer text_buf.deinit(); defer text_buf.deinit();
const out_stream = text_buf.outStream(); const writer = text_buf.writer();
try parse_error.render(tree.token_ids, out_stream); try parse_error.render(tree.token_ids, writer);
const text = text_buf.items; const text = text_buf.items;
const stream = file.outStream(); const stream = file.writer();
try stream.print("{s}:{d}:{d}: error: {s}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text }); try stream.print("{s}:{d}:{d}: error: {s}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text });
if (!color_on) return; if (!color_on) return;

View File

@ -20,10 +20,10 @@ pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: std.fs.File.Wri
const global_cache_dir = try introspect.resolveGlobalCacheDir(gpa); const global_cache_dir = try introspect.resolveGlobalCacheDir(gpa);
defer gpa.free(global_cache_dir); defer gpa.free(global_cache_dir);
var bos = std.io.bufferedOutStream(stdout); var bw = std.io.bufferedWriter(stdout);
const bos_stream = bos.outStream(); const w = bw.writer();
var jws = std.json.WriteStream(@TypeOf(bos_stream), 6).init(bos_stream); var jws = std.json.WriteStream(@TypeOf(w), 6).init(w);
try jws.beginObject(); try jws.beginObject();
try jws.objectField("zig_exe"); try jws.objectField("zig_exe");
@ -42,6 +42,6 @@ pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: std.fs.File.Wri
try jws.emitString(build_options.version); try jws.emitString(build_options.version);
try jws.endObject(); try jws.endObject();
try bos_stream.writeByte('\n'); try w.writeByte('\n');
try bos.flush(); try bw.flush();
} }

View File

@ -26,9 +26,9 @@ pub fn cmdTargets(
const glibc_abi = try glibc.loadMetaData(allocator, zig_lib_directory.handle); const glibc_abi = try glibc.loadMetaData(allocator, zig_lib_directory.handle);
defer glibc_abi.destroy(allocator); defer glibc_abi.destroy(allocator);
var bos = io.bufferedOutStream(stdout); var bw = io.bufferedWriter(stdout);
const bos_stream = bos.outStream(); const w = bw.writer();
var jws = std.json.WriteStream(@TypeOf(bos_stream), 6).init(bos_stream); var jws = std.json.WriteStream(@TypeOf(w), 6).init(w);
try jws.beginObject(); try jws.beginObject();
@ -156,6 +156,6 @@ pub fn cmdTargets(
try jws.endObject(); try jws.endObject();
try bos_stream.writeByte('\n'); try w.writeByte('\n');
return bos.flush(); return bw.flush();
} }

View File

@ -738,7 +738,7 @@ pub const TestContext = struct {
write_node.activate(); write_node.activate();
var out_zir = std.ArrayList(u8).init(allocator); var out_zir = std.ArrayList(u8).init(allocator);
defer out_zir.deinit(); defer out_zir.deinit();
try new_zir_module.writeToStream(allocator, out_zir.outStream()); try new_zir_module.writeToStream(allocator, out_zir.writer());
write_node.end(); write_node.end();
var test_node = update_node.start("assert", 0); var test_node = update_node.start("assert", 0);

View File

@ -5268,7 +5268,7 @@ fn appendTokenFmt(c: *Context, token_id: Token.Id, comptime format: []const u8,
try c.token_locs.ensureCapacity(c.gpa, c.token_locs.items.len + 1); try c.token_locs.ensureCapacity(c.gpa, c.token_locs.items.len + 1);
const start_index = c.source_buffer.items.len; const start_index = c.source_buffer.items.len;
try c.source_buffer.outStream().print(format ++ " ", args); try c.source_buffer.writer().print(format ++ " ", args);
c.token_ids.appendAssumeCapacity(token_id); c.token_ids.appendAssumeCapacity(token_id);
c.token_locs.appendAssumeCapacity(.{ c.token_locs.appendAssumeCapacity(.{

View File

@ -1116,7 +1116,7 @@ pub const Module = struct {
/// This is a debugging utility for rendering the tree to stderr. /// This is a debugging utility for rendering the tree to stderr.
pub fn dump(self: Module) void { pub fn dump(self: Module) void {
self.writeToStream(std.heap.page_allocator, std.io.getStdErr().outStream()) catch {}; self.writeToStream(std.heap.page_allocator, std.io.getStdErr().writer()) catch {};
} }
const DeclAndIndex = struct { const DeclAndIndex = struct {
@ -3254,7 +3254,7 @@ pub fn dumpZir(allocator: *Allocator, kind: []const u8, decl_name: [*:0]const u8
try write.inst_table.ensureCapacity(@intCast(u32, instructions.len)); try write.inst_table.ensureCapacity(@intCast(u32, instructions.len));
const stderr = std.io.getStdErr().outStream(); const stderr = std.io.getStdErr().writer();
try stderr.print("{s} {s} {{ // unanalyzed\n", .{ kind, decl_name }); try stderr.print("{s} {s} {{ // unanalyzed\n", .{ kind, decl_name });
for (instructions) |inst| { for (instructions) |inst| {

View File

@ -22,7 +22,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ \\
\\pub fn main() void { \\pub fn main() void {
\\ privateFunction(); \\ privateFunction();
\\ const stdout = getStdOut().outStream(); \\ const stdout = getStdOut().writer();
\\ stdout.print("OK 2\n", .{}) catch unreachable; \\ stdout.print("OK 2\n", .{}) catch unreachable;
\\} \\}
\\ \\
@ -37,7 +37,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\// purposefully conflicting function with main.zig \\// purposefully conflicting function with main.zig
\\// but it's private so it should be OK \\// but it's private so it should be OK
\\fn privateFunction() void { \\fn privateFunction() void {
\\ const stdout = getStdOut().outStream(); \\ const stdout = getStdOut().writer();
\\ stdout.print("OK 1\n", .{}) catch unreachable; \\ stdout.print("OK 1\n", .{}) catch unreachable;
\\} \\}
\\ \\
@ -63,7 +63,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
tc.addSourceFile("foo.zig", tc.addSourceFile("foo.zig",
\\usingnamespace @import("std").io; \\usingnamespace @import("std").io;
\\pub fn foo_function() void { \\pub fn foo_function() void {
\\ const stdout = getStdOut().outStream(); \\ const stdout = getStdOut().writer();
\\ stdout.print("OK\n", .{}) catch unreachable; \\ stdout.print("OK\n", .{}) catch unreachable;
\\} \\}
); );
@ -74,7 +74,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ \\
\\pub fn bar_function() void { \\pub fn bar_function() void {
\\ if (foo_function()) { \\ if (foo_function()) {
\\ const stdout = getStdOut().outStream(); \\ const stdout = getStdOut().writer();
\\ stdout.print("OK\n", .{}) catch unreachable; \\ stdout.print("OK\n", .{}) catch unreachable;
\\ } \\ }
\\} \\}
@ -106,7 +106,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\pub const a_text = "OK\n"; \\pub const a_text = "OK\n";
\\ \\
\\pub fn ok() void { \\pub fn ok() void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print(b_text, .{}) catch unreachable; \\ stdout.print(b_text, .{}) catch unreachable;
\\} \\}
); );
@ -124,7 +124,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io; \\const io = @import("std").io;
\\ \\
\\pub fn main() void { \\pub fn main() void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("Hello, world!\n{d:4} {x:3} {c}\n", .{@as(u32, 12), @as(u16, 0x12), @as(u8, 'a')}) catch unreachable; \\ stdout.print("Hello, world!\n{d:4} {x:3} {c}\n", .{@as(u32, 12), @as(u16, 0x12), @as(u8, 'a')}) catch unreachable;
\\} \\}
, "Hello, world!\n 12 12 a\n"); , "Hello, world!\n 12 12 a\n");
@ -267,7 +267,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ var x_local : i32 = print_ok(x); \\ var x_local : i32 = print_ok(x);
\\} \\}
\\fn print_ok(val: @TypeOf(x)) @TypeOf(foo) { \\fn print_ok(val: @TypeOf(x)) @TypeOf(foo) {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("OK\n", .{}) catch unreachable; \\ stdout.print("OK\n", .{}) catch unreachable;
\\ return 0; \\ return 0;
\\} \\}
@ -349,7 +349,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\pub fn main() void { \\pub fn main() void {
\\ const bar = Bar {.field2 = 13,}; \\ const bar = Bar {.field2 = 13,};
\\ const foo = Foo {.field1 = bar,}; \\ const foo = Foo {.field1 = bar,};
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ if (!foo.method()) { \\ if (!foo.method()) {
\\ stdout.print("BAD\n", .{}) catch unreachable; \\ stdout.print("BAD\n", .{}) catch unreachable;
\\ } \\ }
@ -363,7 +363,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
cases.add("defer with only fallthrough", cases.add("defer with only fallthrough",
\\const io = @import("std").io; \\const io = @import("std").io;
\\pub fn main() void { \\pub fn main() void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("before\n", .{}) catch unreachable; \\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable; \\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ defer stdout.print("defer2\n", .{}) catch unreachable; \\ defer stdout.print("defer2\n", .{}) catch unreachable;
@ -376,7 +376,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io; \\const io = @import("std").io;
\\const os = @import("std").os; \\const os = @import("std").os;
\\pub fn main() void { \\pub fn main() void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("before\n", .{}) catch unreachable; \\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable; \\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ defer stdout.print("defer2\n", .{}) catch unreachable; \\ defer stdout.print("defer2\n", .{}) catch unreachable;
@ -393,7 +393,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ do_test() catch return; \\ do_test() catch return;
\\} \\}
\\fn do_test() !void { \\fn do_test() !void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("before\n", .{}) catch unreachable; \\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable; \\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ errdefer stdout.print("deferErr\n", .{}) catch unreachable; \\ errdefer stdout.print("deferErr\n", .{}) catch unreachable;
@ -412,7 +412,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ do_test() catch return; \\ do_test() catch return;
\\} \\}
\\fn do_test() !void { \\fn do_test() !void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print("before\n", .{}) catch unreachable; \\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable; \\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ errdefer stdout.print("deferErr\n", .{}) catch unreachable; \\ errdefer stdout.print("deferErr\n", .{}) catch unreachable;
@ -429,7 +429,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io; \\const io = @import("std").io;
\\ \\
\\pub fn main() void { \\pub fn main() void {
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ stdout.print(foo_txt, .{}) catch unreachable; \\ stdout.print(foo_txt, .{}) catch unreachable;
\\} \\}
, "1234\nabcd\n"); , "1234\nabcd\n");
@ -448,7 +448,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ \\
\\pub fn main() !void { \\pub fn main() !void {
\\ var args_it = std.process.args(); \\ var args_it = std.process.args();
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ var index: usize = 0; \\ var index: usize = 0;
\\ _ = args_it.skip(); \\ _ = args_it.skip();
\\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) { \\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) {
@ -487,7 +487,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ \\
\\pub fn main() !void { \\pub fn main() !void {
\\ var args_it = std.process.args(); \\ var args_it = std.process.args();
\\ const stdout = io.getStdOut().outStream(); \\ const stdout = io.getStdOut().writer();
\\ var index: usize = 0; \\ var index: usize = 0;
\\ _ = args_it.skip(); \\ _ = args_it.skip();
\\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) { \\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) {

View File

@ -3,10 +3,10 @@ const io = @import("std").io;
pub fn write(_: void, bytes: []const u8) !usize { pub fn write(_: void, bytes: []const u8) !usize {
return 0; return 0;
} }
pub fn outStream() io.OutStream(void, @typeInfo(@typeInfo(@TypeOf(write)).Fn.return_type.?).ErrorUnion.error_set, write) { pub fn writer() io.Writer(void, @typeInfo(@typeInfo(@TypeOf(write)).Fn.return_type.?).ErrorUnion.error_set, write) {
return io.OutStream(void, @typeInfo(@typeInfo(@TypeOf(write)).Fn.return_type.?).ErrorUnion.error_set, write){ .context = {} }; return io.Writer(void, @typeInfo(@typeInfo(@TypeOf(write)).Fn.return_type.?).ErrorUnion.error_set, write){ .context = {} };
} }
test "crash" { test "crash" {
_ = io.multiOutStream(.{outStream()}); _ = io.multiWriter(.{writer()});
} }

View File

@ -652,9 +652,9 @@ pub const StackTracesContext = struct {
} }
child.spawn() catch |err| debug.panic("Unable to spawn {s}: {s}\n", .{ full_exe_path, @errorName(err) }); child.spawn() catch |err| debug.panic("Unable to spawn {s}: {s}\n", .{ full_exe_path, @errorName(err) });
const stdout = child.stdout.?.inStream().readAllAlloc(b.allocator, max_stdout_size) catch unreachable; const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
defer b.allocator.free(stdout); defer b.allocator.free(stdout);
const stderrFull = child.stderr.?.inStream().readAllAlloc(b.allocator, max_stdout_size) catch unreachable; const stderrFull = child.stderr.?.reader().readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
defer b.allocator.free(stderrFull); defer b.allocator.free(stderrFull);
var stderr = stderrFull; var stderr = stderrFull;
@ -875,8 +875,8 @@ pub const CompileErrorContext = struct {
var stdout_buf = ArrayList(u8).init(b.allocator); var stdout_buf = ArrayList(u8).init(b.allocator);
var stderr_buf = ArrayList(u8).init(b.allocator); var stderr_buf = ArrayList(u8).init(b.allocator);
child.stdout.?.inStream().readAllArrayList(&stdout_buf, max_stdout_size) catch unreachable; child.stdout.?.reader().readAllArrayList(&stdout_buf, max_stdout_size) catch unreachable;
child.stderr.?.inStream().readAllArrayList(&stderr_buf, max_stdout_size) catch unreachable; child.stderr.?.reader().readAllArrayList(&stderr_buf, max_stdout_size) catch unreachable;
const term = child.wait() catch |err| { const term = child.wait() catch |err| {
debug.panic("Unable to spawn {s}: {s}\n", .{ zig_args.items[0], @errorName(err) }); debug.panic("Unable to spawn {s}: {s}\n", .{ zig_args.items[0], @errorName(err) });

View File

@ -388,8 +388,8 @@ pub fn main() anyerror!void {
// "W" and "Wl,". So we sort this list in order of descending priority. // "W" and "Wl,". So we sort this list in order of descending priority.
std.sort.sort(*json.ObjectMap, all_objects.items, {}, objectLessThan); std.sort.sort(*json.ObjectMap, all_objects.items, {}, objectLessThan);
var stdout_bos = std.io.bufferedOutStream(std.io.getStdOut().outStream()); var buffered_stdout = std.io.bufferedWriter(std.io.getStdOut().writer());
const stdout = stdout_bos.outStream(); const stdout = buffered_stdout.writer();
try stdout.writeAll( try stdout.writeAll(
\\// This file is generated by tools/update_clang_options.zig. \\// This file is generated by tools/update_clang_options.zig.
\\// zig fmt: off \\// zig fmt: off
@ -469,7 +469,7 @@ pub fn main() anyerror!void {
\\ \\
); );
try stdout_bos.flush(); try buffered_stdout.flush();
} }
// TODO we should be able to import clang_options.zig but currently this is problematic because it will // TODO we should be able to import clang_options.zig but currently this is problematic because it will
@ -611,7 +611,7 @@ fn objectLessThan(context: void, a: *json.ObjectMap, b: *json.ObjectMap) bool {
} }
fn usageAndExit(file: fs.File, arg0: []const u8, code: u8) noreturn { fn usageAndExit(file: fs.File, arg0: []const u8, code: u8) noreturn {
file.outStream().print( file.writer().print(
\\Usage: {} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\Usage: {} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project
\\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project \\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project
\\ \\

View File

@ -239,7 +239,7 @@ pub fn main() !void {
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" }); const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
const vers_txt_file = try fs.cwd().createFile(vers_txt_path, .{}); const vers_txt_file = try fs.cwd().createFile(vers_txt_path, .{});
defer vers_txt_file.close(); defer vers_txt_file.close();
var buffered = std.io.bufferedOutStream(vers_txt_file.writer()); var buffered = std.io.bufferedWriter(vers_txt_file.writer());
const vers_txt = buffered.writer(); const vers_txt = buffered.writer();
for (global_ver_list) |name, i| { for (global_ver_list) |name, i| {
_ = global_ver_set.put(name, i) catch unreachable; _ = global_ver_set.put(name, i) catch unreachable;
@ -251,7 +251,7 @@ pub fn main() !void {
const fns_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "fns.txt" }); const fns_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "fns.txt" });
const fns_txt_file = try fs.cwd().createFile(fns_txt_path, .{}); const fns_txt_file = try fs.cwd().createFile(fns_txt_path, .{});
defer fns_txt_file.close(); defer fns_txt_file.close();
var buffered = std.io.bufferedOutStream(fns_txt_file.writer()); var buffered = std.io.bufferedWriter(fns_txt_file.writer());
const fns_txt = buffered.writer(); const fns_txt = buffered.writer();
for (global_fn_list) |name, i| { for (global_fn_list) |name, i| {
const entry = global_fn_set.getEntry(name).?; const entry = global_fn_set.getEntry(name).?;
@ -282,7 +282,7 @@ pub fn main() !void {
const abilist_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "abi.txt" }); const abilist_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "abi.txt" });
const abilist_txt_file = try fs.cwd().createFile(abilist_txt_path, .{}); const abilist_txt_file = try fs.cwd().createFile(abilist_txt_path, .{});
defer abilist_txt_file.close(); defer abilist_txt_file.close();
var buffered = std.io.bufferedOutStream(abilist_txt_file.writer()); var buffered = std.io.bufferedWriter(abilist_txt_file.writer());
const abilist_txt = buffered.writer(); const abilist_txt = buffered.writer();
// first iterate over the abi lists // first iterate over the abi lists