mirror of
https://github.com/ziglang/zig.git
synced 2024-11-28 08:02:32 +00:00
stage2: caching system integration & Module/Compilation splitting
* update to the new cache hash API * std.Target defaultVersionRange moves to std.Target.Os.Tag * std.Target.Os gains getVersionRange which returns a tagged union * start the process of splitting Module into Compilation and "zig module". - The parts of Module having to do with only compiling zig code are extracted into ZigModule.zig. - Next step is to rename Module to Compilation. - After that rename ZigModule back to Module. * implement proper cache hash usage when compiling C objects, and properly manage the file lock of the build artifacts. * make versions optional to match recent changes to master branch. * proper cache hash integration for compiling zig code * proper cache hash integration for linking even when not compiling zig code. * ELF LLD linking integrates with the caching system. A comment from the source code: Here we want to determine whether we can save time by not invoking LLD when the output is unchanged. None of the linker options or the object files that are being linked are in the hash that namespaces the directory we are outputting to. Therefore, we must hash those now, and the resulting digest will form the "id" of the linking job we are about to perform. After a successful link, we store the id in the metadata of a symlink named "id.txt" in the artifact directory. So, now, we check if this symlink exists, and if it matches our digest. If so, we can skip linking. Otherwise, we proceed with invoking LLD. * implement disable_c_depfile option * add tracy to a few more functions
This commit is contained in:
parent
1baa56a25f
commit
2a8fc1a18e
@ -82,7 +82,7 @@ pub const HashHelper = struct {
|
||||
}
|
||||
|
||||
pub fn addListOfBytes(hh: *HashHelper, list_of_bytes: []const []const u8) void {
|
||||
hh.add(list_of_bytes.items.len);
|
||||
hh.add(list_of_bytes.len);
|
||||
for (list_of_bytes) |bytes| hh.addBytes(bytes);
|
||||
}
|
||||
|
||||
|
@ -75,6 +75,13 @@ pub const Target = struct {
|
||||
else => return ".so",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn defaultVersionRange(tag: Tag) Os {
|
||||
return .{
|
||||
.tag = tag,
|
||||
.version_range = VersionRange.default(tag),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Based on NTDDI version constants from
|
||||
@ -290,11 +297,32 @@ pub const Target = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn defaultVersionRange(tag: Tag) Os {
|
||||
return .{
|
||||
.tag = tag,
|
||||
.version_range = VersionRange.default(tag),
|
||||
};
|
||||
pub const TaggedVersionRange = union(enum) {
|
||||
none: void,
|
||||
semver: Version.Range,
|
||||
linux: LinuxVersionRange,
|
||||
windows: WindowsVersion.Range,
|
||||
};
|
||||
|
||||
/// Provides a tagged union. `Target` does not store the tag because it is
|
||||
/// redundant with the OS tag; this function abstracts that part away.
|
||||
pub fn getVersionRange(self: Os) TaggedVersionRange {
|
||||
switch (self.tag) {
|
||||
.linux => return TaggedVersionRange{ .linux = self.version_range.linux },
|
||||
.windows => return TaggedVersionRange{ .windows = self.version_range.windows },
|
||||
|
||||
.freebsd,
|
||||
.macosx,
|
||||
.ios,
|
||||
.tvos,
|
||||
.watchos,
|
||||
.netbsd,
|
||||
.openbsd,
|
||||
.dragonfly,
|
||||
=> return TaggedVersionRange{ .semver = self.version_range.semver },
|
||||
|
||||
else => return .none,
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if system is guaranteed to be at least `version` or older than `version`.
|
||||
|
@ -375,7 +375,7 @@ pub const CrossTarget = struct {
|
||||
// `Target.current.os` works when doing `zig build` because Zig generates a build executable using
|
||||
// native OS version range. However this will not be accurate otherwise, and
|
||||
// will need to be integrated with `std.zig.system.NativeTargetInfo.detect`.
|
||||
var adjusted_os = if (self.os_tag) |os_tag| Target.Os.defaultVersionRange(os_tag) else Target.current.os;
|
||||
var adjusted_os = if (self.os_tag) |os_tag| os_tag.defaultVersionRange() else Target.current.os;
|
||||
|
||||
if (self.os_version_min) |min| switch (min) {
|
||||
.none => {},
|
||||
|
@ -203,7 +203,7 @@ pub const NativeTargetInfo = struct {
|
||||
/// deinitialization method.
|
||||
/// TODO Remove the Allocator requirement from this function.
|
||||
pub fn detect(allocator: *Allocator, cross_target: CrossTarget) DetectError!NativeTargetInfo {
|
||||
var os = Target.Os.defaultVersionRange(cross_target.getOsTag());
|
||||
var os = cross_target.getOsTag().defaultVersionRange();
|
||||
if (cross_target.os_tag == null) {
|
||||
switch (Target.current.os.tag) {
|
||||
.linux => {
|
||||
|
File diff suppressed because it is too large
Load Diff
3238
src-self-hosted/ZigModule.zig
Normal file
3238
src-self-hosted/ZigModule.zig
Normal file
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,7 @@ const Type = @import("type.zig").Type;
|
||||
const TypedValue = @import("TypedValue.zig");
|
||||
const assert = std.debug.assert;
|
||||
const zir = @import("zir.zig");
|
||||
const Module = @import("Module.zig");
|
||||
const Module = @import("ZigModule.zig");
|
||||
const ast = std.zig.ast;
|
||||
const trace = @import("tracy.zig").trace;
|
||||
const Scope = Module.Scope;
|
||||
|
@ -7,8 +7,9 @@ const Type = @import("type.zig").Type;
|
||||
const Value = @import("value.zig").Value;
|
||||
const TypedValue = @import("TypedValue.zig");
|
||||
const link = @import("link.zig");
|
||||
const Module = @import("Module.zig");
|
||||
const ErrorMsg = Module.ErrorMsg;
|
||||
const Module = @import("ZigModule.zig");
|
||||
const Compilation = @import("Module.zig");
|
||||
const ErrorMsg = Compilation.ErrorMsg;
|
||||
const Target = std.Target;
|
||||
const Allocator = mem.Allocator;
|
||||
const trace = @import("tracy.zig").trace;
|
||||
@ -50,7 +51,7 @@ pub const Result = union(enum) {
|
||||
appended: void,
|
||||
/// The value is available externally, `code` is unused.
|
||||
externally_managed: []const u8,
|
||||
fail: *Module.ErrorMsg,
|
||||
fail: *ErrorMsg,
|
||||
};
|
||||
|
||||
pub const GenerateSymbolError = error{
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
|
||||
const link = @import("../link.zig");
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
|
||||
const Inst = @import("../ir.zig").Inst;
|
||||
const Value = @import("../value.zig").Value;
|
||||
|
@ -5,7 +5,8 @@ const assert = std.debug.assert;
|
||||
const leb = std.debug.leb;
|
||||
const mem = std.mem;
|
||||
|
||||
const Decl = @import("../Module.zig").Decl;
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Decl = Module.Decl;
|
||||
const Inst = @import("../ir.zig").Inst;
|
||||
const Type = @import("../type.zig").Type;
|
||||
const Value = @import("../value.zig").Value;
|
||||
|
@ -5,6 +5,7 @@ const mem = std.mem;
|
||||
const Module = @import("Module.zig");
|
||||
const path = std.fs.path;
|
||||
const build_options = @import("build_options");
|
||||
const trace = @import("tracy.zig").trace;
|
||||
|
||||
pub const Lib = struct {
|
||||
name: []const u8,
|
||||
@ -54,6 +55,9 @@ pub const LoadMetaDataError = error{
|
||||
/// This function will emit a log error when there is a problem with the zig installation and then return
|
||||
/// `error.ZigInstallationCorrupt`.
|
||||
pub fn loadMetaData(gpa: *Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!*ABI {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
@ -584,6 +588,9 @@ fn lib_path(mod: *Module, arena: *Allocator, sub_path: []const u8) ![]const u8 {
|
||||
}
|
||||
|
||||
fn build_libc_object(mod: *Module, basename: []const u8, c_source_file: Module.CSourceFile) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
// TODO: This is extracted into a local variable to work around a stage1 miscompilation.
|
||||
const emit_bin = Module.EmitLoc{
|
||||
.directory = null, // Put it in the cache directory.
|
||||
@ -618,8 +625,11 @@ fn build_libc_object(mod: *Module, basename: []const u8, c_source_file: Module.C
|
||||
try sub_module.update();
|
||||
|
||||
try mod.crt_files.ensureCapacity(mod.gpa, mod.crt_files.count() + 1);
|
||||
const artifact_path = try std.fs.path.join(mod.gpa, &[_][]const u8{
|
||||
sub_module.zig_cache_artifact_directory.path.?, basename,
|
||||
});
|
||||
const artifact_path = if (sub_module.bin_file.options.directory.path) |p|
|
||||
try std.fs.path.join(mod.gpa, &[_][]const u8{ p, basename })
|
||||
else
|
||||
try mod.gpa.dupe(u8, basename);
|
||||
|
||||
// TODO obtain a lock on the artifact and put that in crt_files as well.
|
||||
mod.crt_files.putAssumeCapacityNoClobber(basename, artifact_path);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Value = @import("value.zig").Value;
|
||||
const Type = @import("type.zig").Type;
|
||||
const Module = @import("Module.zig");
|
||||
const Module = @import("ZigModule.zig");
|
||||
const assert = std.debug.assert;
|
||||
const codegen = @import("codegen.zig");
|
||||
const ast = std.zig.ast;
|
||||
|
@ -1,6 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Module = @import("Module.zig");
|
||||
const Compilation = @import("Module.zig");
|
||||
const ZigModule = @import("ZigModule.zig");
|
||||
const fs = std.fs;
|
||||
const trace = @import("tracy.zig").trace;
|
||||
const Package = @import("Package.zig");
|
||||
@ -12,7 +13,7 @@ pub const producer_string = if (std.builtin.is_test) "zig test" else "zig " ++ b
|
||||
|
||||
pub const Options = struct {
|
||||
/// Where the output will go.
|
||||
directory: Module.Directory,
|
||||
directory: Compilation.Directory,
|
||||
/// Path to the output file, relative to `directory`.
|
||||
sub_path: []const u8,
|
||||
target: std.Target,
|
||||
@ -21,7 +22,9 @@ pub const Options = struct {
|
||||
object_format: std.builtin.ObjectFormat,
|
||||
optimize_mode: std.builtin.Mode,
|
||||
root_name: []const u8,
|
||||
root_pkg: ?*const Package,
|
||||
/// Not every Compilation compiles .zig code! For example you could do `zig build-exe foo.o`.
|
||||
/// TODO rename Module to Compilation and then (as a separate commit) ZigModule to Module.
|
||||
zig_module: ?*ZigModule,
|
||||
dynamic_linker: ?[]const u8 = null,
|
||||
/// Used for calculating how much space to reserve for symbols in case the binary file
|
||||
/// does not already have a symbol table.
|
||||
@ -71,7 +74,7 @@ pub const Options = struct {
|
||||
lib_dirs: []const []const u8 = &[0][]const u8{},
|
||||
rpath_list: []const []const u8 = &[0][]const u8{},
|
||||
|
||||
version: std.builtin.Version,
|
||||
version: ?std.builtin.Version,
|
||||
libc_installation: ?*const LibCInstallation,
|
||||
|
||||
pub fn effectiveOutputMode(options: Options) std.builtin.OutputMode {
|
||||
@ -184,7 +187,7 @@ pub const File = struct {
|
||||
|
||||
/// May be called before or after updateDeclExports but must be called
|
||||
/// after allocateDeclIndexes for any given Decl.
|
||||
pub fn updateDecl(base: *File, module: *Module, decl: *Module.Decl) !void {
|
||||
pub fn updateDecl(base: *File, module: *ZigModule, decl: *ZigModule.Decl) !void {
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).updateDecl(module, decl),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).updateDecl(module, decl),
|
||||
@ -194,7 +197,7 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn updateDeclLineNumber(base: *File, module: *Module, decl: *Module.Decl) !void {
|
||||
pub fn updateDeclLineNumber(base: *File, module: *ZigModule, decl: *ZigModule.Decl) !void {
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).updateDeclLineNumber(module, decl),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).updateDeclLineNumber(module, decl),
|
||||
@ -205,7 +208,7 @@ pub const File = struct {
|
||||
|
||||
/// Must be called before any call to updateDecl or updateDeclExports for
|
||||
/// any given Decl.
|
||||
pub fn allocateDeclIndexes(base: *File, decl: *Module.Decl) !void {
|
||||
pub fn allocateDeclIndexes(base: *File, decl: *ZigModule.Decl) !void {
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).allocateDeclIndexes(decl),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).allocateDeclIndexes(decl),
|
||||
@ -256,20 +259,20 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flush(base: *File, module: *Module) !void {
|
||||
pub fn flush(base: *File, comp: *Compilation) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
try switch (base.tag) {
|
||||
.coff => @fieldParentPtr(Coff, "base", base).flush(module),
|
||||
.elf => @fieldParentPtr(Elf, "base", base).flush(module),
|
||||
.macho => @fieldParentPtr(MachO, "base", base).flush(module),
|
||||
.c => @fieldParentPtr(C, "base", base).flush(module),
|
||||
.wasm => @fieldParentPtr(Wasm, "base", base).flush(module),
|
||||
.coff => @fieldParentPtr(Coff, "base", base).flush(comp),
|
||||
.elf => @fieldParentPtr(Elf, "base", base).flush(comp),
|
||||
.macho => @fieldParentPtr(MachO, "base", base).flush(comp),
|
||||
.c => @fieldParentPtr(C, "base", base).flush(comp),
|
||||
.wasm => @fieldParentPtr(Wasm, "base", base).flush(comp),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn freeDecl(base: *File, decl: *Module.Decl) void {
|
||||
pub fn freeDecl(base: *File, decl: *ZigModule.Decl) void {
|
||||
switch (base.tag) {
|
||||
.coff => @fieldParentPtr(Coff, "base", base).freeDecl(decl),
|
||||
.elf => @fieldParentPtr(Elf, "base", base).freeDecl(decl),
|
||||
@ -293,9 +296,9 @@ pub const File = struct {
|
||||
/// allocateDeclIndexes for any given Decl.
|
||||
pub fn updateDeclExports(
|
||||
base: *File,
|
||||
module: *Module,
|
||||
decl: *const Module.Decl,
|
||||
exports: []const *Module.Export,
|
||||
module: *ZigModule,
|
||||
decl: *const ZigModule.Decl,
|
||||
exports: []const *ZigModule.Export,
|
||||
) !void {
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).updateDeclExports(module, decl, exports),
|
||||
@ -306,7 +309,7 @@ pub const File = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getDeclVAddr(base: *File, decl: *const Module.Decl) u64 {
|
||||
pub fn getDeclVAddr(base: *File, decl: *const ZigModule.Decl) u64 {
|
||||
switch (base.tag) {
|
||||
.coff => return @fieldParentPtr(Coff, "base", base).getDeclVAddr(decl),
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).getDeclVAddr(decl),
|
||||
|
@ -2,7 +2,8 @@ const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Compilation = @import("../Module.zig");
|
||||
const fs = std.fs;
|
||||
const codegen = @import("../codegen/c.zig");
|
||||
const link = @import("../link.zig");
|
||||
@ -20,7 +21,7 @@ main: std.ArrayList(u8),
|
||||
called: std.StringHashMap(void),
|
||||
need_stddef: bool = false,
|
||||
need_stdint: bool = false,
|
||||
error_msg: *Module.ErrorMsg = undefined,
|
||||
error_msg: *Compilation.ErrorMsg = undefined,
|
||||
|
||||
pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Options) !*File {
|
||||
assert(options.object_format == .c);
|
||||
@ -51,7 +52,7 @@ pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Optio
|
||||
}
|
||||
|
||||
pub fn fail(self: *C, src: usize, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
|
||||
self.error_msg = try Module.ErrorMsg.create(self.base.allocator, src, format, args);
|
||||
self.error_msg = try Compilation.ErrorMsg.create(self.base.allocator, src, format, args);
|
||||
return error.AnalysisFail;
|
||||
}
|
||||
|
||||
@ -71,7 +72,7 @@ pub fn updateDecl(self: *C, module: *Module, decl: *Module.Decl) !void {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn flush(self: *C, module: *Module) !void {
|
||||
pub fn flush(self: *C, comp: *Compilation) !void {
|
||||
const writer = self.base.file.?.writer();
|
||||
try writer.writeAll(@embedFile("cbe.h"));
|
||||
var includes = false;
|
||||
|
@ -7,7 +7,8 @@ const assert = std.debug.assert;
|
||||
const fs = std.fs;
|
||||
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Compilation = @import("../Module.zig");
|
||||
const codegen = @import("../codegen.zig");
|
||||
const link = @import("../link.zig");
|
||||
|
||||
@ -732,7 +733,7 @@ pub fn updateDeclExports(self: *Coff, module: *Module, decl: *const Module.Decl,
|
||||
try module.failed_exports.ensureCapacity(module.gpa, module.failed_exports.items().len + 1);
|
||||
module.failed_exports.putAssumeCapacityNoClobber(
|
||||
exp,
|
||||
try Module.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: ExportOptions.section", .{}),
|
||||
try Compilation.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: ExportOptions.section", .{}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
@ -743,14 +744,14 @@ pub fn updateDeclExports(self: *Coff, module: *Module, decl: *const Module.Decl,
|
||||
try module.failed_exports.ensureCapacity(module.gpa, module.failed_exports.items().len + 1);
|
||||
module.failed_exports.putAssumeCapacityNoClobber(
|
||||
exp,
|
||||
try Module.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: Exports other than '_start'", .{}),
|
||||
try Compilation.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: Exports other than '_start'", .{}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flush(self: *Coff, module: *Module) !void {
|
||||
pub fn flush(self: *Coff, comp: *Compilation) !void {
|
||||
if (self.text_section_size_dirty) {
|
||||
// Write the new raw size in the .text header
|
||||
var buf: [4]u8 = undefined;
|
||||
|
@ -3,7 +3,8 @@ const mem = std.mem;
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ir = @import("../ir.zig");
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Compilation = @import("../Module.zig");
|
||||
const fs = std.fs;
|
||||
const elf = std.elf;
|
||||
const codegen = @import("../codegen.zig");
|
||||
@ -122,6 +123,9 @@ dbg_info_decl_free_list: std.AutoHashMapUnmanaged(*TextBlock, void) = .{},
|
||||
dbg_info_decl_first: ?*TextBlock = null,
|
||||
dbg_info_decl_last: ?*TextBlock = null,
|
||||
|
||||
/// Prevents other processes from clobbering the output file this is linking.
|
||||
lock: ?std.cache_hash.Lock = null,
|
||||
|
||||
/// `alloc_num / alloc_den` is the factor of padding when allocating.
|
||||
const alloc_num = 4;
|
||||
const alloc_den = 3;
|
||||
@ -285,7 +289,21 @@ fn createFile(allocator: *Allocator, file: fs.File, options: link.Options) !Elf
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn releaseLock(self: *Elf) void {
|
||||
if (self.lock) |*lock| {
|
||||
lock.release();
|
||||
self.lock = null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toOwnedLock(self: *Elf) std.cache_hash.Lock {
|
||||
const lock = self.lock.?;
|
||||
self.lock = null;
|
||||
return lock;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Elf) void {
|
||||
self.releaseLock();
|
||||
self.sections.deinit(self.base.allocator);
|
||||
self.program_headers.deinit(self.base.allocator);
|
||||
self.shstrtab.deinit(self.base.allocator);
|
||||
@ -709,20 +727,24 @@ pub const abbrev_base_type = 4;
|
||||
pub const abbrev_pad1 = 5;
|
||||
pub const abbrev_parameter = 6;
|
||||
|
||||
pub fn flush(self: *Elf, module: *Module) !void {
|
||||
pub fn flush(self: *Elf, comp: *Compilation) !void {
|
||||
if (build_options.have_llvm and self.base.options.use_lld) {
|
||||
return self.linkWithLLD(module);
|
||||
return self.linkWithLLD(comp);
|
||||
} else {
|
||||
switch (self.base.options.effectiveOutputMode()) {
|
||||
.Exe, .Obj => {},
|
||||
.Lib => return error.TODOImplementWritingLibFiles,
|
||||
}
|
||||
return self.flushInner(module);
|
||||
return self.flushInner(comp);
|
||||
}
|
||||
}
|
||||
|
||||
/// Commit pending changes and write headers.
|
||||
fn flushInner(self: *Elf, module: *Module) !void {
|
||||
fn flushInner(self: *Elf, comp: *Compilation) !void {
|
||||
// TODO This linker code currently assumes there is only 1 compilation unit and it corresponds to the
|
||||
// Zig source code.
|
||||
const zig_module = self.base.options.zig_module orelse return error.LinkingWithoutZigSourceUnimplemented;
|
||||
|
||||
const target_endian = self.base.options.target.cpu.arch.endian();
|
||||
const foreign_endian = target_endian != std.Target.current.cpu.arch.endian();
|
||||
const ptr_width_bytes: u8 = self.ptrWidthBytes();
|
||||
@ -844,8 +866,8 @@ fn flushInner(self: *Elf, module: *Module) !void {
|
||||
},
|
||||
}
|
||||
// Write the form for the compile unit, which must match the abbrev table above.
|
||||
const name_strp = try self.makeDebugString(self.base.options.root_pkg.?.root_src_path);
|
||||
const comp_dir_strp = try self.makeDebugString(self.base.options.root_pkg.?.root_src_directory.path.?);
|
||||
const name_strp = try self.makeDebugString(zig_module.root_pkg.root_src_path);
|
||||
const comp_dir_strp = try self.makeDebugString(zig_module.root_pkg.root_src_directory.path.?);
|
||||
const producer_strp = try self.makeDebugString(link.producer_string);
|
||||
// Currently only one compilation unit is supported, so the address range is simply
|
||||
// identical to the main program header virtual address and memory size.
|
||||
@ -1014,7 +1036,7 @@ fn flushInner(self: *Elf, module: *Module) !void {
|
||||
0, // include_directories (none except the compilation unit cwd)
|
||||
});
|
||||
// file_names[0]
|
||||
di_buf.appendSliceAssumeCapacity(self.base.options.root_pkg.?.root_src_path); // relative path name
|
||||
di_buf.appendSliceAssumeCapacity(zig_module.root_pkg.root_src_path); // relative path name
|
||||
di_buf.appendSliceAssumeCapacity(&[_]u8{
|
||||
0, // null byte for the relative path name
|
||||
0, // directory_index
|
||||
@ -1199,11 +1221,105 @@ fn flushInner(self: *Elf, module: *Module) !void {
|
||||
assert(!self.debug_strtab_dirty);
|
||||
}
|
||||
|
||||
fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = &arena_allocator.allocator;
|
||||
|
||||
const directory = self.base.options.directory; // Just an alias to make it shorter to type.
|
||||
|
||||
// If there is no Zig code to compile, then we should skip flushing the output file because it
|
||||
// will not be part of the linker line anyway.
|
||||
const zig_module_obj_path: ?[]const u8 = if (self.base.options.zig_module) |module| blk: {
|
||||
try self.flushInner(comp);
|
||||
|
||||
const obj_basename = self.base.intermediary_basename.?;
|
||||
const full_obj_path = if (directory.path) |dir_path|
|
||||
try std.fs.path.join(arena, &[_][]const u8{dir_path, obj_basename})
|
||||
else
|
||||
obj_basename;
|
||||
break :blk full_obj_path;
|
||||
} else null;
|
||||
|
||||
// Here we want to determine whether we can save time by not invoking LLD when the
|
||||
// output is unchanged. None of the linker options or the object files that are being
|
||||
// linked are in the hash that namespaces the directory we are outputting to. Therefore,
|
||||
// we must hash those now, and the resulting digest will form the "id" of the linking
|
||||
// job we are about to perform.
|
||||
// After a successful link, we store the id in the metadata of a symlink named "id.txt" in
|
||||
// the artifact directory. So, now, we check if this symlink exists, and if it matches
|
||||
// our digest. If so, we can skip linking. Otherwise, we proceed with invoking LLD.
|
||||
const id_symlink_basename = "id.txt";
|
||||
|
||||
// We are about to obtain this lock, so here we give other processes a chance first.
|
||||
self.releaseLock();
|
||||
|
||||
var ch = comp.cache_parent.obtain();
|
||||
defer ch.deinit();
|
||||
|
||||
const is_lib = self.base.options.output_mode == .Lib;
|
||||
const is_dyn_lib = self.base.options.link_mode == .Dynamic and is_lib;
|
||||
const have_dynamic_linker = self.base.options.link_libc and
|
||||
self.base.options.link_mode == .Dynamic and (is_dyn_lib or self.base.options.output_mode == .Exe);
|
||||
|
||||
try ch.addOptionalFile(self.base.options.linker_script);
|
||||
try ch.addOptionalFile(self.base.options.version_script);
|
||||
try ch.addListOfFiles(self.base.options.objects);
|
||||
for (comp.c_object_table.items()) |entry| switch (entry.key.status) {
|
||||
.new => unreachable,
|
||||
.failure => return error.NotAllCSourceFilesAvailableToLink,
|
||||
.success => |success| _ = try ch.addFile(success.object_path, null),
|
||||
};
|
||||
try ch.addOptionalFile(zig_module_obj_path);
|
||||
// We can skip hashing libc and libc++ components that we are in charge of building from Zig
|
||||
// installation sources because they are always a product of the compiler version + target information.
|
||||
ch.hash.addOptional(self.base.options.stack_size_override);
|
||||
ch.hash.addOptional(self.base.options.gc_sections);
|
||||
ch.hash.add(self.base.options.eh_frame_hdr);
|
||||
ch.hash.add(self.base.options.rdynamic);
|
||||
ch.hash.addListOfBytes(self.base.options.extra_lld_args);
|
||||
ch.hash.addListOfBytes(self.base.options.lib_dirs);
|
||||
ch.hash.add(self.base.options.z_nodelete);
|
||||
ch.hash.add(self.base.options.z_defs);
|
||||
if (self.base.options.link_libc) {
|
||||
ch.hash.add(self.base.options.libc_installation != null);
|
||||
if (self.base.options.libc_installation) |libc_installation| {
|
||||
ch.hash.addBytes(libc_installation.crt_dir.?);
|
||||
}
|
||||
if (have_dynamic_linker) {
|
||||
ch.hash.addOptionalBytes(self.base.options.dynamic_linker);
|
||||
}
|
||||
}
|
||||
if (is_dyn_lib) {
|
||||
ch.hash.addOptionalBytes(self.base.options.override_soname);
|
||||
ch.hash.addOptional(self.base.options.version);
|
||||
}
|
||||
ch.hash.addListOfBytes(self.base.options.system_libs);
|
||||
ch.hash.addOptional(self.base.options.allow_shlib_undefined);
|
||||
ch.hash.add(self.base.options.bind_global_refs_locally);
|
||||
|
||||
// We don't actually care whether it's a cache hit or miss; we just need the digest and the lock.
|
||||
_ = try ch.hit();
|
||||
const digest = ch.final();
|
||||
|
||||
var prev_digest_buf: [digest.len]u8 = undefined;
|
||||
const prev_digest: []u8 = directory.handle.readLink(id_symlink_basename, &prev_digest_buf) catch blk: {
|
||||
// Handle this as a cache miss.
|
||||
mem.set(u8, &prev_digest_buf, 0);
|
||||
break :blk &prev_digest_buf;
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
// Hot diggity dog! The output binary is already there.
|
||||
self.lock = ch.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
|
||||
// We are about to change the output file to be different, so we invalidate the build hash now.
|
||||
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
|
||||
error.FileNotFound => {},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
const target = self.base.options.target;
|
||||
const is_obj = self.base.options.output_mode == .Obj;
|
||||
|
||||
@ -1272,8 +1388,6 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
try argv.append(arg);
|
||||
}
|
||||
|
||||
const is_lib = self.base.options.output_mode == .Lib;
|
||||
const is_dyn_lib = self.base.options.link_mode == .Dynamic and is_lib;
|
||||
if (self.base.options.link_mode == .Static) {
|
||||
if (target.cpu.arch.isARM() or target.cpu.arch.isThumb()) {
|
||||
try argv.append("-Bstatic");
|
||||
@ -1288,7 +1402,7 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
try argv.append("-pie");
|
||||
}
|
||||
|
||||
const full_out_path = if (self.base.options.directory.path) |dir_path|
|
||||
const full_out_path = if (directory.path) |dir_path|
|
||||
try std.fs.path.join(arena, &[_][]const u8{dir_path, self.base.options.sub_path})
|
||||
else
|
||||
self.base.options.sub_path;
|
||||
@ -1311,13 +1425,14 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
break :o "Scrt1.o";
|
||||
}
|
||||
};
|
||||
try argv.append(try module.get_libc_crt_file(arena, crt1o));
|
||||
try argv.append(try comp.get_libc_crt_file(arena, crt1o));
|
||||
if (target_util.libc_needs_crti_crtn(target)) {
|
||||
try argv.append(try module.get_libc_crt_file(arena, "crti.o"));
|
||||
try argv.append(try comp.get_libc_crt_file(arena, "crti.o"));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO rpaths
|
||||
// TODO add to cache hash above too
|
||||
//for (size_t i = 0; i < g->rpath_list.length; i += 1) {
|
||||
// Buf *rpath = g->rpath_list.at(i);
|
||||
// add_rpath(lj, rpath);
|
||||
@ -1354,7 +1469,7 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
try argv.append(libc_installation.crt_dir.?);
|
||||
}
|
||||
|
||||
if (self.base.options.link_mode == .Dynamic and (is_dyn_lib or self.base.options.output_mode == .Exe)) {
|
||||
if (have_dynamic_linker) {
|
||||
if (self.base.options.dynamic_linker) |dynamic_linker| {
|
||||
try argv.append("-dynamic-linker");
|
||||
try argv.append(dynamic_linker);
|
||||
@ -1363,9 +1478,10 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
}
|
||||
|
||||
if (is_dyn_lib) {
|
||||
const soname = self.base.options.override_soname orelse
|
||||
try std.fmt.allocPrint(arena, "lib{}.so.{}", .{self.base.options.root_name,
|
||||
self.base.options.version.major,});
|
||||
const soname = self.base.options.override_soname orelse if (self.base.options.version) |ver|
|
||||
try std.fmt.allocPrint(arena, "lib{}.so.{}", .{self.base.options.root_name, ver.major})
|
||||
else
|
||||
try std.fmt.allocPrint(arena, "lib{}.so", .{self.base.options.root_name});
|
||||
try argv.append("-soname");
|
||||
try argv.append(soname);
|
||||
|
||||
@ -1378,28 +1494,14 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
// Positional arguments to the linker such as object files.
|
||||
try argv.appendSlice(self.base.options.objects);
|
||||
|
||||
for (module.c_object_table.items()) |entry| {
|
||||
const c_object = entry.key;
|
||||
switch (c_object.status) {
|
||||
.new => unreachable,
|
||||
.failure => return error.NotAllCSourceFilesAvailableToLink,
|
||||
.success => |full_obj_path| {
|
||||
try argv.append(full_obj_path);
|
||||
},
|
||||
}
|
||||
}
|
||||
for (comp.c_object_table.items()) |entry| switch (entry.key.status) {
|
||||
.new => unreachable,
|
||||
.failure => unreachable, // Checked during cache hashing.
|
||||
.success => |success| try argv.append(success.object_path),
|
||||
};
|
||||
|
||||
// If there is no Zig code to compile, then we should skip flushing the output file because it
|
||||
// will not be part of the linker line anyway.
|
||||
if (module.root_pkg != null) {
|
||||
try self.flushInner(module);
|
||||
|
||||
const obj_basename = self.base.intermediary_basename.?;
|
||||
const full_obj_path = if (self.base.options.directory.path) |dir_path|
|
||||
try std.fs.path.join(arena, &[_][]const u8{dir_path, obj_basename})
|
||||
else
|
||||
obj_basename;
|
||||
try argv.append(full_obj_path);
|
||||
if (zig_module_obj_path) |p| {
|
||||
try argv.append(p);
|
||||
}
|
||||
|
||||
// TODO compiler-rt and libc
|
||||
@ -1419,7 +1521,7 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
// By this time, we depend on these libs being dynamically linked libraries and not static libraries
|
||||
// (the check for that needs to be earlier), but they could be full paths to .so files, in which
|
||||
// case we want to avoid prepending "-l".
|
||||
const ext = Module.classifyFileExt(link_lib);
|
||||
const ext = Compilation.classifyFileExt(link_lib);
|
||||
const arg = if (ext == .so) link_lib else try std.fmt.allocPrint(arena, "-l{}", .{link_lib});
|
||||
argv.appendAssumeCapacity(arg);
|
||||
}
|
||||
@ -1427,8 +1529,8 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
if (!is_obj) {
|
||||
// libc++ dep
|
||||
if (self.base.options.link_libcpp) {
|
||||
try argv.append(module.libcxxabi_static_lib.?);
|
||||
try argv.append(module.libcxx_static_lib.?);
|
||||
try argv.append(comp.libcxxabi_static_lib.?);
|
||||
try argv.append(comp.libcxx_static_lib.?);
|
||||
}
|
||||
|
||||
// libc dep
|
||||
@ -1448,15 +1550,15 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
try argv.append("-lpthread");
|
||||
}
|
||||
} else if (target.isGnuLibC()) {
|
||||
try argv.append(module.libunwind_static_lib.?);
|
||||
try argv.append(comp.libunwind_static_lib.?);
|
||||
// TODO here we need to iterate over the glibc libs and add the .so files to the linker line.
|
||||
std.log.warn("TODO port add_glibc_libs to stage2", .{});
|
||||
try argv.append(try module.get_libc_crt_file(arena, "libc_nonshared.a"));
|
||||
try argv.append(try comp.get_libc_crt_file(arena, "libc_nonshared.a"));
|
||||
} else if (target.isMusl()) {
|
||||
try argv.append(module.libunwind_static_lib.?);
|
||||
try argv.append(module.libc_static_lib.?);
|
||||
try argv.append(comp.libunwind_static_lib.?);
|
||||
try argv.append(comp.libc_static_lib.?);
|
||||
} else if (self.base.options.link_libcpp) {
|
||||
try argv.append(module.libunwind_static_lib.?);
|
||||
try argv.append(comp.libunwind_static_lib.?);
|
||||
} else {
|
||||
unreachable; // Compiler was supposed to emit an error for not being able to provide libc.
|
||||
}
|
||||
@ -1466,9 +1568,9 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
// crt end
|
||||
if (link_in_crt) {
|
||||
if (target.isAndroid()) {
|
||||
try argv.append(try module.get_libc_crt_file(arena, "crtend_android.o"));
|
||||
try argv.append(try comp.get_libc_crt_file(arena, "crtend_android.o"));
|
||||
} else if (target_util.libc_needs_crti_crtn(target)) {
|
||||
try argv.append(try module.get_libc_crt_file(arena, "crtn.o"));
|
||||
try argv.append(try comp.get_libc_crt_file(arena, "crtn.o"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1500,6 +1602,19 @@ fn linkWithLLD(self: *Elf, module: *Module) !void {
|
||||
const ZigLLDLink = @import("../llvm.zig").ZigLLDLink;
|
||||
const ok = ZigLLDLink(.ELF, new_argv.ptr, new_argv.len, append_diagnostic, 0, 0);
|
||||
if (!ok) return error.LLDReportedFailure;
|
||||
|
||||
// Update the dangling symlink "id.txt" with the digest. If it fails we can continue; it only
|
||||
// means that the next invocation will have an unnecessary cache miss.
|
||||
directory.handle.symLink(&digest, id_symlink_basename, .{}) catch |err| {
|
||||
std.log.warn("failed to save linking hash digest symlink: {}", .{@errorName(err)});
|
||||
};
|
||||
// Again failure here only means an unnecessary cache miss.
|
||||
ch.writeManifest() catch |err| {
|
||||
std.log.warn("failed to write cache manifest when linking: {}", .{ @errorName(err) });
|
||||
};
|
||||
// We hang on to this lock so that the output file path can be used without
|
||||
// other processes clobbering it.
|
||||
self.lock = ch.toOwnedLock();
|
||||
}
|
||||
|
||||
fn append_diagnostic(context: usize, ptr: [*]const u8, len: usize) callconv(.C) void {
|
||||
@ -2396,7 +2511,7 @@ pub fn updateDeclExports(
|
||||
try module.failed_exports.ensureCapacity(module.gpa, module.failed_exports.items().len + 1);
|
||||
module.failed_exports.putAssumeCapacityNoClobber(
|
||||
exp,
|
||||
try Module.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: ExportOptions.section", .{}),
|
||||
try Compilation.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: ExportOptions.section", .{}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
@ -2414,7 +2529,7 @@ pub fn updateDeclExports(
|
||||
try module.failed_exports.ensureCapacity(module.gpa, module.failed_exports.items().len + 1);
|
||||
module.failed_exports.putAssumeCapacityNoClobber(
|
||||
exp,
|
||||
try Module.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: GlobalLinkage.LinkOnce", .{}),
|
||||
try Compilation.ErrorMsg.create(self.base.allocator, 0, "Unimplemented: GlobalLinkage.LinkOnce", .{}),
|
||||
);
|
||||
continue;
|
||||
},
|
||||
@ -2722,8 +2837,8 @@ fn dbgLineNeededHeaderBytes(self: Elf) u32 {
|
||||
directory_count * 8 + file_name_count * 8 +
|
||||
// These are encoded as DW.FORM_string rather than DW.FORM_strp as we would like
|
||||
// because of a workaround for readelf and gdb failing to understand DWARFv5 correctly.
|
||||
self.base.options.root_pkg.?.root_src_directory.path.?.len +
|
||||
self.base.options.root_pkg.?.root_src_path.len);
|
||||
self.base.options.zig_module.?.root_pkg.root_src_directory.path.?.len +
|
||||
self.base.options.zig_module.?.root_pkg.root_src_path.len);
|
||||
}
|
||||
|
||||
fn dbgInfoNeededHeaderBytes(self: Elf) u32 {
|
||||
|
@ -12,7 +12,8 @@ const mem = std.mem;
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
const Type = @import("../type.zig").Type;
|
||||
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Compilation = @import("../Module.zig");
|
||||
const link = @import("../link.zig");
|
||||
const File = link.File;
|
||||
|
||||
@ -205,7 +206,7 @@ fn createFile(allocator: *Allocator, file: fs.File, options: link.Options) !Mach
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn flush(self: *MachO, module: *Module) !void {
|
||||
pub fn flush(self: *MachO, comp: *Compilation) !void {
|
||||
switch (self.base.options.output_mode) {
|
||||
.Exe => {
|
||||
var last_cmd_offset: usize = @sizeOf(macho.mach_header_64);
|
||||
|
@ -6,7 +6,8 @@ const assert = std.debug.assert;
|
||||
const fs = std.fs;
|
||||
const leb = std.debug.leb;
|
||||
|
||||
const Module = @import("../Module.zig");
|
||||
const Module = @import("../ZigModule.zig");
|
||||
const Compilation = @import("../Module.zig");
|
||||
const codegen = @import("../codegen/wasm.zig");
|
||||
const link = @import("../link.zig");
|
||||
|
||||
@ -126,7 +127,7 @@ pub fn freeDecl(self: *Wasm, decl: *Module.Decl) void {
|
||||
decl.fn_link.wasm = null;
|
||||
}
|
||||
|
||||
pub fn flush(self: *Wasm, module: *Module) !void {
|
||||
pub fn flush(self: *Wasm, comp: *Compilation) !void {
|
||||
const file = self.base.file.?;
|
||||
const header_size = 5 + 1;
|
||||
|
||||
@ -164,7 +165,7 @@ pub fn flush(self: *Wasm, module: *Module) !void {
|
||||
}
|
||||
|
||||
// Export section
|
||||
{
|
||||
if (self.base.options.zig_module) |module| {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
var count: u32 = 0;
|
||||
|
@ -268,6 +268,7 @@ pub fn buildOutputType(
|
||||
var link_mode: ?std.builtin.LinkMode = null;
|
||||
var root_src_file: ?[]const u8 = null;
|
||||
var version: std.builtin.Version = .{ .major = 0, .minor = 0, .patch = 0 };
|
||||
var have_version = false;
|
||||
var strip = false;
|
||||
var single_threaded = false;
|
||||
var watch = false;
|
||||
@ -445,6 +446,7 @@ pub fn buildOutputType(
|
||||
version = std.builtin.Version.parse(args[i]) catch |err| {
|
||||
fatal("unable to parse --version '{}': {}", .{ args[i], @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "-target")) {
|
||||
if (i + 1 >= args.len) fatal("expected parameter after {}", .{arg});
|
||||
i += 1;
|
||||
@ -799,6 +801,7 @@ pub fn buildOutputType(
|
||||
version.major = std.fmt.parseInt(u32, linker_args.items[i], 10) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "--minor-image-version")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
@ -807,6 +810,7 @@ pub fn buildOutputType(
|
||||
version.minor = std.fmt.parseInt(u32, linker_args.items[i], 10) catch |err| {
|
||||
fatal("unable to parse '{}': {}", .{ arg, @errorName(err) });
|
||||
};
|
||||
have_version = true;
|
||||
} else if (mem.eql(u8, arg, "--stack")) {
|
||||
i += 1;
|
||||
if (i >= linker_args.items.len) {
|
||||
@ -1161,7 +1165,7 @@ pub fn buildOutputType(
|
||||
.self_exe_path = self_exe_path,
|
||||
.rand = &default_prng.random,
|
||||
.clang_passthrough_mode = arg_mode != .build,
|
||||
.version = version,
|
||||
.version = if (have_version) version else null,
|
||||
.libc_installation = if (libc_installation) |*lci| lci else null,
|
||||
.debug_cc = debug_cc,
|
||||
.debug_link = debug_link,
|
||||
@ -1228,7 +1232,9 @@ fn updateModule(gpa: *Allocator, module: *Module, zir_out_path: ?[]const u8) !vo
|
||||
}
|
||||
|
||||
if (zir_out_path) |zop| {
|
||||
var new_zir_module = try zir.emit(gpa, module);
|
||||
const zig_module = module.bin_file.options.zig_module orelse
|
||||
fatal("-femit-zir with no zig source code", .{});
|
||||
var new_zir_module = try zir.emit(gpa, zig_module);
|
||||
defer new_zir_module.deinit(gpa);
|
||||
|
||||
const baf = try io.BufferedAtomicFile.create(gpa, fs.cwd(), zop, .{});
|
||||
|
@ -549,7 +549,7 @@ pub const TestContext = struct {
|
||||
update_node.estimated_total_items = 5;
|
||||
var emit_node = update_node.start("emit", null);
|
||||
emit_node.activate();
|
||||
var new_zir_module = try zir.emit(allocator, module);
|
||||
var new_zir_module = try zir.emit(allocator, module.bin_file.options.zig_module.?);
|
||||
defer new_zir_module.deinit(allocator);
|
||||
emit_node.end();
|
||||
|
||||
|
@ -3,7 +3,7 @@ const Value = @import("value.zig").Value;
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Target = std.Target;
|
||||
const Module = @import("Module.zig");
|
||||
const Module = @import("ZigModule.zig");
|
||||
|
||||
/// This is the raw data, with no bookkeeping, no memory awareness, no de-duplication.
|
||||
/// It's important for this type to be small.
|
||||
|
@ -6,7 +6,7 @@ const BigIntConst = std.math.big.int.Const;
|
||||
const BigIntMutable = std.math.big.int.Mutable;
|
||||
const Target = std.Target;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Module = @import("Module.zig");
|
||||
const Module = @import("ZigModule.zig");
|
||||
|
||||
/// This is the raw data, with no bookkeeping, no memory awareness,
|
||||
/// no de-duplication, and no type system awareness.
|
||||
|
@ -10,7 +10,7 @@ const Type = @import("type.zig").Type;
|
||||
const Value = @import("value.zig").Value;
|
||||
const TypedValue = @import("TypedValue.zig");
|
||||
const ir = @import("ir.zig");
|
||||
const IrModule = @import("Module.zig");
|
||||
const IrModule = @import("ZigModule.zig");
|
||||
|
||||
/// This struct is relevent only for the ZIR Module text format. It is not used for
|
||||
/// semantic analysis of Zig source code.
|
||||
|
@ -16,7 +16,7 @@ const TypedValue = @import("TypedValue.zig");
|
||||
const assert = std.debug.assert;
|
||||
const ir = @import("ir.zig");
|
||||
const zir = @import("zir.zig");
|
||||
const Module = @import("Module.zig");
|
||||
const Module = @import("ZigModule.zig");
|
||||
const Inst = ir.Inst;
|
||||
const Body = ir.Body;
|
||||
const trace = @import("tracy.zig").trace;
|
||||
@ -199,10 +199,10 @@ pub fn analyzeZirDecl(mod: *Module, decl: *Decl, src_decl: *zir.Decl) InnerError
|
||||
// We don't fully codegen the decl until later, but we do need to reserve a global
|
||||
// offset table index for it. This allows us to codegen decls out of dependency order,
|
||||
// increasing how many computations can be done in parallel.
|
||||
try mod.bin_file.allocateDeclIndexes(decl);
|
||||
try mod.work_queue.writeItem(.{ .codegen_decl = decl });
|
||||
try mod.comp.bin_file.allocateDeclIndexes(decl);
|
||||
try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl });
|
||||
} else if (prev_type_has_bits) {
|
||||
mod.bin_file.freeDecl(decl);
|
||||
mod.comp.bin_file.freeDecl(decl);
|
||||
}
|
||||
|
||||
return type_changed;
|
||||
|
@ -1,8 +1,11 @@
|
||||
const std = @import("std");
|
||||
const TestContext = @import("../../src-self-hosted/test.zig").TestContext;
|
||||
|
||||
// self-hosted does not yet support PE executable files / COFF object files
|
||||
// or mach-o files. So we do these test cases cross compiling for x86_64-linux.
|
||||
// Self-hosted has differing levels of support for various architectures. For now we pass explicit
|
||||
// target parameters to each test case. At some point we will take this to the next level and have
|
||||
// a set of targets that all test cases run on unless specifically overridden. For now, each test
|
||||
// case applies to only the specified target.
|
||||
|
||||
const linux_x64 = std.zig.CrossTarget{
|
||||
.cpu_arch = .x86_64,
|
||||
.os_tag = .linux,
|
||||
|
Loading…
Reference in New Issue
Block a user