move installation logic to the build script where it belongs

* build.zig: introduce `-Dflat` option which makes the installation
  match what we want to ship for our download tarballs. This allows
  deleting a bunch of shell script logic from the CI.
  - for example it puts the executable directly in prefix/zig rather
    than prefix/bin/zig and it additionally includes prefix/LICENSE.
* build.zig: by default also install std lib documentation to doc/std/
  - this can be disabled by `-Dno-autodocs` similar to how there is
    already `-Dno-langref`.
* build.zig: add `std-docs` and `langref` steps which build and install
  the std lib autodocs and langref to prefix/doc/std and
  prefix/doc/langref.html, respectively.

* std.Build: implement proper handling of `-femit-docs` using the
  LazyPath system. This is a breaking change.
  - this is a partial implementation of #16351
* frontend: fixed the handling of Autodocs with regards to caching and
  putting the artifacts in the proper location to integrate with the
  build system.
  - closes #15864

* CI: delete the logic for autodocs since it is now handled by build.zig
  and is enabled by default.
  - in the future we should strive to have nearly all the CI shell
    script logic deleted in favor of `zig build` commands.
* CI: pass `-DZIG_NO_LIB=ON`/`-Dno-lib` except for the one command where
  we want to actually generate the langref and autodocs. Generating the
  langref takes 14 minutes right now (why?!) so we don't want to do that
  more times than necessary.

* Autodoc: fixed use of a global variable. It works fine as a local
  variable instead.
  - note that in the future we will want to make Autodoc run
    simultaneously using the job system, but for now the principle of
    YAGNI dictates that we don't have an init()/deinit() API and instead
    simply call the function that does the things.
* Autodoc: only do it when there are no compile errors
This commit is contained in:
Andrew Kelley 2023-07-21 20:29:42 -07:00
parent 1291f4aca4
commit 6e4fff6ba6
16 changed files with 216 additions and 138 deletions

View File

@ -24,12 +24,14 @@ pub fn build(b: *std.Build) !void {
const optimize = b.standardOptimizeOption(.{});
const flat = b.option(bool, "flat", "Put files into the installation prefix in a manner suited for upstream distribution rather than a posix file system hierarchy standard") orelse false;
const single_threaded = b.option(bool, "single-threaded", "Build artifacts that run in single threaded mode");
const use_zig_libcxx = b.option(bool, "use-zig-libcxx", "If libc++ is needed, use zig's bundled version, don't try to integrate with the system") orelse false;
const test_step = b.step("test", "Run all the tests");
const skip_install_lib_files = b.option(bool, "no-lib", "skip copying of lib/ files and langref to installation prefix. Useful for development") orelse false;
const skip_install_langref = b.option(bool, "no-langref", "skip copying of langref to the installation prefix") orelse skip_install_lib_files;
const skip_install_autodocs = b.option(bool, "no-autodocs", "skip copying of standard library autodocs to the installation prefix") orelse skip_install_lib_files;
const no_bin = b.option(bool, "no-bin", "skip emitting compiler binary") orelse false;
const docgen_exe = b.addExecutable(.{
@ -52,8 +54,34 @@ pub fn build(b: *std.Build) !void {
b.getInstallStep().dependOn(&install_langref.step);
}
const docs_step = b.step("docs", "Build documentation");
docs_step.dependOn(&docgen_cmd.step);
const autodoc_test = b.addTest(.{
.root_source_file = .{ .path = "lib/std/std.zig" },
.target = target,
});
autodoc_test.overrideZigLibDir("lib");
autodoc_test.emit_bin = .no_emit; // https://github.com/ziglang/zig/issues/16351
const install_std_docs = b.addInstallDirectory(.{
.source_dir = autodoc_test.getOutputDocs(),
.install_dir = .prefix,
.install_subdir = "doc/std",
});
if (!skip_install_autodocs) {
b.getInstallStep().dependOn(&install_std_docs.step);
}
if (flat) {
b.installFile("LICENSE", "LICENSE");
}
const langref_step = b.step("langref", "Build and install the language reference");
langref_step.dependOn(&install_langref.step);
const std_docs_step = b.step("std-docs", "Build and install the standard library documentation");
std_docs_step.dependOn(&install_std_docs.step);
const docs_step = b.step("docs", "Build and install documentation");
docs_step.dependOn(langref_step);
docs_step.dependOn(std_docs_step);
const check_case_exe = b.addExecutable(.{
.name = "check-case",
@ -104,10 +132,10 @@ pub fn build(b: *std.Build) !void {
const config_h_path_option = b.option([]const u8, "config_h", "Path to the generated config.h");
if (!skip_install_lib_files) {
b.installDirectory(InstallDirectoryOptions{
b.installDirectory(.{
.source_dir = .{ .path = "lib" },
.install_dir = .lib,
.install_subdir = "zig",
.install_dir = if (flat) .prefix else .lib,
.install_subdir = if (flat) "lib" else "zig",
.exclude_extensions = &[_][]const u8{
// exclude files from lib/std/compress/testdata
".gz",
@ -167,6 +195,9 @@ pub fn build(b: *std.Build) !void {
exe.pie = pie;
exe.sanitize_thread = sanitize_thread;
exe.entitlements = entitlements;
// TODO -femit-bin/-fno-emit-bin should be inferred by the build system
// based on whether or not the exe is run or installed.
// https://github.com/ziglang/zig/issues/16351
if (no_bin) exe.emit_bin = .no_emit;
exe.build_id = b.option(
@ -175,7 +206,13 @@ pub fn build(b: *std.Build) !void {
"Request creation of '.note.gnu.build-id' section",
);
b.installArtifact(exe);
if (!no_bin) {
const install_exe = b.addInstallArtifact(exe);
if (flat) {
install_exe.dest_dir = .prefix;
}
b.getInstallStep().dependOn(&install_exe.step);
}
test_step.dependOn(&exe.step);

View File

@ -40,6 +40,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
@ -49,16 +50,18 @@ unset CXX
ninja install
# TODO: move this to a build.zig step (check-fmt)
echo "Looking for non-conforming code formatting..."
stage3-debug/bin/zig fmt --check .. \
--exclude ../test/cases/ \
--exclude ../build-debug
# simultaneously test building self-hosted without LLVM and with 32-bit arm
stage3-debug/bin/zig build -Dtarget=arm-linux-musleabihf
stage3-debug/bin/zig build \
-Dtarget=arm-linux-musleabihf \
-Dno-lib
# TODO: add -fqemu back to this line
stage3-debug/bin/zig build test docs \
--maxrss 24696061952 \
-fwasmtime \
@ -68,10 +71,8 @@ stage3-debug/bin/zig build test docs \
--zig-lib-dir "$(pwd)/../lib"
# Look for HTML errors.
tidy --drop-empty-elements no -qe "stage3-debug/doc/langref.html"
# Produce the experimental std lib documentation.
stage3-debug/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# TODO: move this to a build.zig flag (-Denable-tidy)
tidy --drop-empty-elements no -qe "zig-out/doc/langref.html"
# Ensure that updating the wasm binary from this commit will result in a viable build.
stage3-debug/bin/zig build update-zig1
@ -91,6 +92,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
unset CC
@ -102,6 +104,7 @@ stage3/bin/zig test ../test/behavior.zig -I../test
stage3/bin/zig build -p stage4 \
-Dstatic-llvm \
-Dtarget=native-native-musl \
-Dno-lib \
--search-prefix "$PREFIX" \
--zig-lib-dir "$(pwd)/../lib"
stage4/bin/zig test ../test/behavior.zig -I../test

View File

@ -40,6 +40,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
@ -49,16 +50,18 @@ unset CXX
ninja install
# TODO: move this to a build.zig step (check-fmt)
echo "Looking for non-conforming code formatting..."
stage3-release/bin/zig fmt --check .. \
--exclude ../test/cases/ \
--exclude ../build-release
# simultaneously test building self-hosted without LLVM and with 32-bit arm
stage3-release/bin/zig build -Dtarget=arm-linux-musleabihf
stage3-release/bin/zig build \
-Dtarget=arm-linux-musleabihf \
-Dno-lib
# TODO: add -fqemu back to this line
stage3-release/bin/zig build test docs \
--maxrss 24696061952 \
-fwasmtime \
@ -68,10 +71,8 @@ stage3-release/bin/zig build test docs \
--zig-lib-dir "$(pwd)/../lib"
# Look for HTML errors.
tidy --drop-empty-elements no -qe "stage3-release/doc/langref.html"
# Produce the experimental std lib documentation.
stage3-release/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# TODO: move this to a build.zig flag (-Denable-tidy)
tidy --drop-empty-elements no -qe "zig-out/doc/langref.html"
# Ensure that updating the wasm binary from this commit will result in a viable build.
stage3-release/bin/zig build update-zig1
@ -91,6 +92,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
unset CC
@ -102,6 +104,7 @@ stage3/bin/zig test ../test/behavior.zig -I../test
stage3/bin/zig build -p stage4 \
-Dstatic-llvm \
-Dtarget=native-native-musl \
-Dno-lib \
--search-prefix "$PREFIX" \
--zig-lib-dir "$(pwd)/../lib"
stage4/bin/zig test ../test/behavior.zig -I../test

View File

@ -39,6 +39,7 @@ PATH="$HOME/local/bin:$PATH" cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
$HOME/local/bin/ninja install
@ -49,6 +50,3 @@ stage3-debug/bin/zig build test docs \
-Dstatic-llvm \
-Dskip-non-native \
--search-prefix "$PREFIX"
# Produce the experimental std lib documentation.
stage3-debug/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib

View File

@ -39,6 +39,7 @@ PATH="$HOME/local/bin:$PATH" cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
$HOME/local/bin/ninja install
@ -50,9 +51,6 @@ stage3-release/bin/zig build test docs \
-Dskip-non-native \
--search-prefix "$PREFIX"
# Produce the experimental std lib documentation.
stage3-release/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# Ensure that stage3 and stage4 are byte-for-byte identical.
stage3-release/bin/zig build \
--prefix stage4-release \

View File

@ -55,7 +55,8 @@ $Env:ZIG_LOCAL_CACHE_DIR="$(Get-Location)\zig-local-cache"
-DZIG_AR_WORKAROUND=ON `
-DZIG_TARGET_TRIPLE="$TARGET" `
-DZIG_TARGET_MCPU="$MCPU" `
-DZIG_STATIC=ON
-DZIG_STATIC=ON `
-DZIG_NO_LIB=ON
CheckLastExitCode
ninja install
@ -69,11 +70,3 @@ Write-Output "Main test suite..."
-Dskip-non-native `
-Denable-symlinks-windows
CheckLastExitCode
Write-Output "Testing Autodocs..."
& "stage3-release\bin\zig.exe" test "..\lib\std\std.zig" `
--zig-lib-dir "$ZIG_LIB_DIR" `
-femit-docs `
-fno-emit-bin
CheckLastExitCode

View File

@ -40,6 +40,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
@ -49,13 +50,16 @@ unset CXX
ninja install
# TODO: move this to a build.zig step (check-fmt)
echo "Looking for non-conforming code formatting..."
stage3-debug/bin/zig fmt --check .. \
--exclude ../test/cases/ \
--exclude ../build-debug
# simultaneously test building self-hosted without LLVM and with 32-bit arm
stage3-debug/bin/zig build -Dtarget=arm-linux-musleabihf
stage3-debug/bin/zig build \
-Dtarget=arm-linux-musleabihf \
-Dno-lib
stage3-debug/bin/zig build test docs \
--maxrss 21000000000 \
@ -67,10 +71,8 @@ stage3-debug/bin/zig build test docs \
--zig-lib-dir "$(pwd)/../lib"
# Look for HTML errors.
tidy --drop-empty-elements no -qe "stage3-debug/doc/langref.html"
# Produce the experimental std lib documentation.
stage3-debug/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# TODO: move this to a build.zig flag (-Denable-tidy)
tidy --drop-empty-elements no -qe "zig-out/doc/langref.html"
# Ensure that updating the wasm binary from this commit will result in a viable build.
stage3-debug/bin/zig build update-zig1
@ -90,6 +92,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
unset CC
@ -101,6 +104,7 @@ stage3/bin/zig test ../test/behavior.zig -I../test
stage3/bin/zig build -p stage4 \
-Dstatic-llvm \
-Dtarget=native-native-musl \
-Dno-lib \
--search-prefix "$PREFIX" \
--zig-lib-dir "$(pwd)/../lib"
stage4/bin/zig test ../test/behavior.zig -I../test

View File

@ -40,6 +40,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
# Now cmake will use zig as the C/C++ compiler. We reset the environment variables
@ -49,6 +50,7 @@ unset CXX
ninja install
# TODO: move this to a build.zig step (check-fmt)
echo "Looking for non-conforming code formatting..."
stage3-release/bin/zig fmt --check .. \
--exclude ../test/cases/ \
@ -56,7 +58,9 @@ stage3-release/bin/zig fmt --check .. \
--exclude ../build-release
# simultaneously test building self-hosted without LLVM and with 32-bit arm
stage3-release/bin/zig build -Dtarget=arm-linux-musleabihf
stage3-release/bin/zig build \
-Dtarget=arm-linux-musleabihf \
-Dno-lib
stage3-release/bin/zig build test docs \
--maxrss 21000000000 \
@ -68,10 +72,8 @@ stage3-release/bin/zig build test docs \
--zig-lib-dir "$(pwd)/../lib"
# Look for HTML errors.
tidy --drop-empty-elements no -qe "stage3-release/doc/langref.html"
# Produce the experimental std lib documentation.
stage3-release/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# TODO: move this to a build.zig flag (-Denable-tidy)
tidy --drop-empty-elements no -qe "zig-out/doc/langref.html"
# Ensure that stage3 and stage4 are byte-for-byte identical.
stage3-release/bin/zig build \
@ -107,6 +109,7 @@ cmake .. \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON \
-GNinja
unset CC
@ -118,6 +121,7 @@ stage3/bin/zig test ../test/behavior.zig -I../test
stage3/bin/zig build -p stage4 \
-Dstatic-llvm \
-Dtarget=native-native-musl \
-Dno-lib \
--search-prefix "$PREFIX" \
--zig-lib-dir "$(pwd)/../lib"
stage4/bin/zig test ../test/behavior.zig -I../test

View File

@ -43,7 +43,8 @@ cmake .. \
-DCMAKE_CXX_COMPILER="$ZIG;c++;-target;$TARGET;-mcpu=$MCPU" \
-DZIG_TARGET_TRIPLE="$TARGET" \
-DZIG_TARGET_MCPU="$MCPU" \
-DZIG_STATIC=ON
-DZIG_STATIC=ON \
-DZIG_NO_LIB=ON
make $JOBS install
@ -54,9 +55,6 @@ stage3/bin/zig build test docs \
-Dskip-non-native \
--search-prefix "$PREFIX"
# Produce the experimental std lib documentation.
stage3/bin/zig test ../lib/std/std.zig -femit-docs -fno-emit-bin --zig-lib-dir ../lib
# Ensure that stage3 and stage4 are byte-for-byte identical.
stage3/bin/zig build \
--prefix stage4 \

View File

@ -45,7 +45,8 @@ Set-Location -Path 'build-debug'
-DCMAKE_CXX_COMPILER="$($ZIG -Replace "\\", "/");c++;-target;$TARGET;-mcpu=$MCPU" `
-DZIG_TARGET_TRIPLE="$TARGET" `
-DZIG_TARGET_MCPU="$MCPU" `
-DZIG_STATIC=ON
-DZIG_STATIC=ON `
-DZIG_NO_LIB=ON
CheckLastExitCode
ninja install
@ -60,13 +61,6 @@ Write-Output "Main test suite..."
-Denable-symlinks-windows
CheckLastExitCode
Write-Output "Testing Autodocs..."
& "stage3-debug\bin\zig.exe" test "..\lib\std\std.zig" `
--zig-lib-dir "$ZIG_LIB_DIR" `
-femit-docs `
-fno-emit-bin
CheckLastExitCode
Write-Output "Build x86_64-windows-msvc behavior tests using the C backend..."
& "stage3-debug\bin\zig.exe" test `
..\test\behavior.zig `

View File

@ -45,7 +45,8 @@ Set-Location -Path 'build-release'
-DCMAKE_CXX_COMPILER="$($ZIG -Replace "\\", "/");c++;-target;$TARGET;-mcpu=$MCPU" `
-DZIG_TARGET_TRIPLE="$TARGET" `
-DZIG_TARGET_MCPU="$MCPU" `
-DZIG_STATIC=ON
-DZIG_STATIC=ON `
-DZIG_NO_LIB=ON
CheckLastExitCode
ninja install
@ -60,13 +61,6 @@ Write-Output "Main test suite..."
-Denable-symlinks-windows
CheckLastExitCode
Write-Output "Testing Autodocs..."
& "stage3-release\bin\zig.exe" test "..\lib\std\std.zig" `
--zig-lib-dir "$ZIG_LIB_DIR" `
-femit-docs `
-fno-emit-bin
CheckLastExitCode
Write-Output "Build x86_64-windows-msvc behavior tests using the C backend..."
& "stage3-release\bin\zig.exe" test `
..\test\behavior.zig `

View File

@ -49,7 +49,6 @@ verbose_cc: bool,
emit_analysis: EmitOption = .default,
emit_asm: EmitOption = .default,
emit_bin: EmitOption = .default,
emit_docs: EmitOption = .default,
emit_implib: EmitOption = .default,
emit_llvm_bc: EmitOption = .default,
emit_llvm_ir: EmitOption = .default,
@ -217,6 +216,7 @@ output_lib_path_source: GeneratedFile,
output_h_path_source: GeneratedFile,
output_pdb_path_source: GeneratedFile,
output_dirname_source: GeneratedFile,
generated_docs: ?*GeneratedFile,
pub const CSourceFiles = struct {
files: []const []const u8,
@ -433,7 +433,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
}) catch @panic("OOM");
const self = owner.allocator.create(Compile) catch @panic("OOM");
self.* = Compile{
self.* = .{
.strip = null,
.unwind_tables = null,
.verbose_link = false,
@ -486,6 +486,7 @@ pub fn create(owner: *std.Build, options: Options) *Compile {
.output_h_path_source = GeneratedFile{ .step = &self.step },
.output_pdb_path_source = GeneratedFile{ .step = &self.step },
.output_dirname_source = GeneratedFile{ .step = &self.step },
.generated_docs = null,
.target_info = target_info,
@ -1004,6 +1005,15 @@ pub fn getOutputPdbSource(self: *Compile) FileSource {
return .{ .generated = &self.output_pdb_path_source };
}
pub fn getOutputDocs(self: *Compile) FileSource {
assert(self.generated_docs == null); // This function may only be called once.
const arena = self.step.owner.allocator;
const generated_file = arena.create(GeneratedFile) catch @panic("OOM");
generated_file.* = .{ .step = &self.step };
self.generated_docs = generated_file;
return .{ .generated = generated_file };
}
pub fn addAssemblyFile(self: *Compile, path: []const u8) void {
const b = self.step.owner;
self.link_objects.append(.{
@ -1509,7 +1519,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
if (self.emit_analysis.getArg(b, "emit-analysis")) |arg| try zig_args.append(arg);
if (self.emit_asm.getArg(b, "emit-asm")) |arg| try zig_args.append(arg);
if (self.emit_bin.getArg(b, "emit-bin")) |arg| try zig_args.append(arg);
if (self.emit_docs.getArg(b, "emit-docs")) |arg| try zig_args.append(arg);
if (self.generated_docs != null) try zig_args.append("-femit-docs");
if (self.emit_implib.getArg(b, "emit-implib")) |arg| try zig_args.append(arg);
if (self.emit_llvm_bc.getArg(b, "emit-llvm-bc")) |arg| try zig_args.append(arg);
if (self.emit_llvm_ir.getArg(b, "emit-llvm-ir")) |arg| try zig_args.append(arg);
@ -2022,6 +2032,10 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
&.{ output_dir, self.out_pdb_filename },
);
}
if (self.generated_docs) |generated_docs| {
generated_docs.path = b.pathJoin(&.{ output_dir, "docs" });
}
}
if (self.kind == .lib and self.linkage != null and self.linkage.? == .dynamic and

View File

@ -15,7 +15,6 @@ const log = std.log.scoped(.autodoc);
const renderer = @import("autodoc/render_source.zig");
comp_module: *CompilationModule,
doc_location: Compilation.EmitLoc,
arena: std.mem.Allocator,
// The goal of autodoc is to fill up these arrays
@ -74,28 +73,23 @@ const Section = struct {
};
};
var arena_allocator: std.heap.ArenaAllocator = undefined;
pub fn init(m: *CompilationModule, doc_location: Compilation.EmitLoc) Autodoc {
arena_allocator = std.heap.ArenaAllocator.init(m.gpa);
return .{
.comp_module = m,
.doc_location = doc_location,
pub fn generate(cm: *CompilationModule, output_dir: std.fs.Dir) !void {
var arena_allocator = std.heap.ArenaAllocator.init(cm.gpa);
defer arena_allocator.deinit();
var autodoc: Autodoc = .{
.comp_module = cm,
.arena = arena_allocator.allocator(),
};
try autodoc.generateZirData(output_dir);
const lib_dir = cm.comp.zig_lib_directory.handle;
try lib_dir.copyFile("docs/main.js", output_dir, "main.js", .{});
try lib_dir.copyFile("docs/ziglexer.js", output_dir, "ziglexer.js", .{});
try lib_dir.copyFile("docs/commonmark.js", output_dir, "commonmark.js", .{});
try lib_dir.copyFile("docs/index.html", output_dir, "index.html", .{});
}
pub fn deinit(_: *Autodoc) void {
arena_allocator.deinit();
}
/// The entry point of the Autodoc generation process.
pub fn generateZirData(self: *Autodoc) !void {
if (self.doc_location.directory) |dir| {
if (dir.path) |path| {
log.debug("path: {s}", .{path});
}
}
fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
const root_src_dir = self.comp_module.main_pkg.root_src_directory;
const root_src_path = self.comp_module.main_pkg.root_src_path;
const joined_src_path = try root_src_dir.join(self.arena, &.{root_src_path});
@ -362,19 +356,6 @@ pub fn generateZirData(self: *Autodoc) !void {
.guide_sections = self.guide_sections,
};
const base_dir = self.doc_location.directory orelse
self.comp_module.zig_cache_artifact_directory;
base_dir.handle.makeDir(self.doc_location.basename) catch |e| switch (e) {
error.PathAlreadyExists => {},
else => |err| return err,
};
const output_dir = if (self.doc_location.directory) |d|
try d.handle.openDir(self.doc_location.basename, .{})
else
try self.comp_module.zig_cache_artifact_directory.handle.openDir(self.doc_location.basename, .{});
{
const data_js_f = try output_dir.createFile("data.js", .{});
defer data_js_f.close();
@ -386,7 +367,7 @@ pub fn generateZirData(self: *Autodoc) !void {
\\ var zigAnalysis=
, .{});
try std.json.stringifyArbitraryDepth(
arena_allocator.allocator(),
self.arena,
data,
.{
.whitespace = .minified,
@ -439,14 +420,6 @@ pub fn generateZirData(self: *Autodoc) !void {
try buffer.flush();
}
}
// copy main.js, index.html
var docs_dir = try self.comp_module.comp.zig_lib_directory.handle.openDir("docs", .{});
defer docs_dir.close();
try docs_dir.copyFile("main.js", output_dir, "main.js", .{});
try docs_dir.copyFile("ziglexer.js", output_dir, "ziglexer.js", .{});
try docs_dir.copyFile("commonmark.js", output_dir, "commonmark.js", .{});
try docs_dir.copyFile("index.html", output_dir, "index.html", .{});
}
/// Represents a chain of scopes, used to resolve decl references to the

View File

@ -118,6 +118,7 @@ self_exe_path: ?[]const u8,
whole_bin_sub_path: ?[]u8,
/// Same as `whole_bin_sub_path` but for implibs.
whole_implib_sub_path: ?[]u8,
whole_docs_sub_path: ?[]u8,
zig_lib_directory: Directory,
local_cache_directory: Directory,
global_cache_directory: Directory,
@ -179,7 +180,6 @@ emit_asm: ?EmitLoc,
emit_llvm_ir: ?EmitLoc,
emit_llvm_bc: ?EmitLoc,
emit_analysis: ?EmitLoc,
emit_docs: ?EmitLoc,
work_queue_wait_group: WaitGroup = .{},
astgen_wait_group: WaitGroup = .{},
@ -1119,6 +1119,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
cache.hash.addOptional(options.dwarf_format);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_bin);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_implib);
cache_helpers.addOptionalEmitLoc(&cache.hash, options.emit_docs);
cache.hash.addBytes(options.root_name);
if (options.target.os.tag == .wasi) cache.hash.add(wasi_exec_model);
// TODO audit this and make sure everything is in it
@ -1171,8 +1172,8 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
// For whole cache mode, it is still used for builtin.zig so that the file
// path to builtin.zig can remain consistent during a debugging session at
// runtime. However, we don't know where to put outputs from the linker
// or stage1 backend object files until the final cache hash, which is available
// after the compilation is complete.
// until the final cache hash, which is available after the
// compilation is complete.
//
// Therefore, in whole cache mode, we additionally create a temporary cache
// directory for these two kinds of build artifacts, and then rename it
@ -1346,6 +1347,8 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
};
}
// In case of whole cache mode, `whole_bin_sub_path` is used to distinguish
// between -femit-bin and -fno-emit-bin.
switch (cache_mode) {
.whole => break :blk null,
.incremental => {},
@ -1408,6 +1411,34 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
};
};
const docs_emit: ?link.Emit = blk: {
const emit_docs = options.emit_docs orelse break :blk null;
if (emit_docs.directory) |directory| {
break :blk .{
.directory = directory,
.sub_path = emit_docs.basename,
};
}
// This is here for the same reason as in `bin_file_emit` above.
switch (cache_mode) {
.whole => break :blk null,
.incremental => {},
}
// Use the same directory as the bin, if possible.
if (bin_file_emit) |x| break :blk .{
.directory = x.directory,
.sub_path = emit_docs.basename,
};
break :blk .{
.directory = module.?.zig_cache_artifact_directory,
.sub_path = emit_docs.basename,
};
};
// This is so that when doing `CacheMode.whole`, the mechanism in update()
// can use it for communicating the result directory via `bin_file.emit`.
// This is used to distinguish between -fno-emit-bin and -femit-bin
@ -1417,6 +1448,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const whole_bin_sub_path: ?[]u8 = try prepareWholeEmitSubPath(arena, options.emit_bin);
// Same thing but for implibs.
const whole_implib_sub_path: ?[]u8 = try prepareWholeEmitSubPath(arena, options.emit_implib);
const whole_docs_sub_path: ?[]u8 = try prepareWholeEmitSubPath(arena, options.emit_docs);
var system_libs: std.StringArrayHashMapUnmanaged(SystemLib) = .{};
errdefer system_libs.deinit(gpa);
@ -1428,6 +1460,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const bin_file = try link.File.openPath(gpa, .{
.emit = bin_file_emit,
.implib_emit = implib_emit,
.docs_emit = docs_emit,
.root_name = root_name,
.module = module,
.target = options.target,
@ -1552,11 +1585,11 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.bin_file = bin_file,
.whole_bin_sub_path = whole_bin_sub_path,
.whole_implib_sub_path = whole_implib_sub_path,
.whole_docs_sub_path = whole_docs_sub_path,
.emit_asm = options.emit_asm,
.emit_llvm_ir = options.emit_llvm_ir,
.emit_llvm_bc = options.emit_llvm_bc,
.emit_analysis = options.emit_analysis,
.emit_docs = options.emit_docs,
.work_queue = std.fifo.LinearFifo(Job, .Dynamic).init(gpa),
.anon_work_queue = std.fifo.LinearFifo(Job, .Dynamic).init(gpa),
.c_object_work_queue = std.fifo.LinearFifo(*CObject, .Dynamic).init(gpa),
@ -1940,7 +1973,7 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
};
};
// This updates the output directory for stage1 backend and linker outputs.
// This updates the output directory for linker outputs.
if (comp.bin_file.options.module) |module| {
module.zig_cache_artifact_directory = tmp_artifact_directory.?;
}
@ -1960,6 +1993,12 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
.sub_path = std.fs.path.basename(sub_path),
};
}
if (comp.whole_docs_sub_path) |sub_path| {
options.docs_emit = .{
.directory = tmp_artifact_directory.?,
.sub_path = std.fs.path.basename(sub_path),
};
}
var old_bin_file = comp.bin_file;
comp.bin_file = try link.File.openPath(comp.gpa, options);
old_bin_file.destroy();
@ -2064,16 +2103,6 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
return;
}
if (!build_options.only_c and !build_options.only_core_functionality) {
if (comp.emit_docs) |doc_location| {
if (comp.bin_file.options.module) |module| {
var autodoc = Autodoc.init(module, doc_location);
defer autodoc.deinit();
try autodoc.generateZirData();
}
}
}
// Flush takes care of -femit-bin, but we still have -femit-llvm-ir, -femit-llvm-bc, and
// -femit-asm to handle, in the case of C objects.
comp.emitOthers();
@ -2122,12 +2151,21 @@ pub fn update(comp: *Compilation, main_progress_node: *std.Progress.Node) !void
};
try comp.flush(main_progress_node);
if (comp.totalErrorCount() != 0) return;
// TODO: do this in a separate job during performAllTheWork(). The
// file copies at the end of generate() can also be extracted to
// separate jobs
if (!build_options.only_c and !build_options.only_core_functionality) {
if (comp.bin_file.options.docs_emit) |emit| {
var dir = try emit.directory.handle.makeOpenPath(emit.sub_path, .{});
defer dir.close();
try Autodoc.generate(module, dir);
}
}
} else {
try comp.flush(main_progress_node);
}
if (comp.totalErrorCount() != 0) {
return;
if (comp.totalErrorCount() != 0) return;
}
// Failure here only means an unnecessary cache miss.
@ -2190,6 +2228,15 @@ fn wholeCacheModeSetBinFilePath(comp: *Compilation, digest: *const [Cache.hex_di
.sub_path = sub_path,
};
}
if (comp.whole_docs_sub_path) |sub_path| {
@memcpy(sub_path[digest_start..][0..digest.len], digest);
comp.bin_file.options.docs_emit = .{
.directory = comp.local_cache_directory,
.sub_path = sub_path,
};
}
}
fn prepareWholeEmitSubPath(arena: Allocator, opt_emit: ?EmitLoc) error{OutOfMemory}!?[]u8 {
@ -2265,7 +2312,6 @@ fn addNonIncrementalStuffToCacheManifest(comp: *Compilation, man: *Cache.Manifes
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_llvm_ir);
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_llvm_bc);
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_analysis);
cache_helpers.addOptionalEmitLoc(&man.hash, comp.emit_docs);
man.hash.addListOfBytes(comp.clang_argv);

View File

@ -71,8 +71,10 @@ pub const Emit = struct {
pub const Options = struct {
/// This is `null` when `-fno-emit-bin` is used.
emit: ?Emit,
/// This is `null` not building a Windows DLL, or when `-fno-emit-implib` is used.
/// This is `null` when not building a Windows DLL, or when `-fno-emit-implib` is used.
implib_emit: ?Emit,
/// This is non-null when `-femit-docs` is provided.
docs_emit: ?Emit,
target: std.Target,
output_mode: std.builtin.OutputMode,
link_mode: std.builtin.LinkMode,

View File

@ -622,7 +622,7 @@ const Emit = union(enum) {
}
};
fn resolve(emit: Emit, default_basename: []const u8) !Resolved {
fn resolve(emit: Emit, default_basename: []const u8, output_to_cache: bool) !Resolved {
var resolved: Resolved = .{ .data = null, .dir = null };
errdefer resolved.deinit();
@ -630,7 +630,10 @@ const Emit = union(enum) {
.no => {},
.yes_default_path => {
resolved.data = Compilation.EmitLoc{
.directory = .{ .path = null, .handle = fs.cwd() },
.directory = if (output_to_cache) null else .{
.path = null,
.handle = fs.cwd(),
},
.basename = default_basename,
};
},
@ -2750,7 +2753,7 @@ fn buildOutputType(
};
const default_h_basename = try std.fmt.allocPrint(arena, "{s}.h", .{root_name});
var emit_h_resolved = emit_h.resolve(default_h_basename) catch |err| {
var emit_h_resolved = emit_h.resolve(default_h_basename, output_to_cache) catch |err| {
switch (emit_h) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-h', '{s}': {s}", .{
@ -2768,7 +2771,7 @@ fn buildOutputType(
defer emit_h_resolved.deinit();
const default_asm_basename = try std.fmt.allocPrint(arena, "{s}.s", .{root_name});
var emit_asm_resolved = emit_asm.resolve(default_asm_basename) catch |err| {
var emit_asm_resolved = emit_asm.resolve(default_asm_basename, output_to_cache) catch |err| {
switch (emit_asm) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-asm', '{s}': {s}", .{
@ -2786,7 +2789,7 @@ fn buildOutputType(
defer emit_asm_resolved.deinit();
const default_llvm_ir_basename = try std.fmt.allocPrint(arena, "{s}.ll", .{root_name});
var emit_llvm_ir_resolved = emit_llvm_ir.resolve(default_llvm_ir_basename) catch |err| {
var emit_llvm_ir_resolved = emit_llvm_ir.resolve(default_llvm_ir_basename, output_to_cache) catch |err| {
switch (emit_llvm_ir) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-llvm-ir', '{s}': {s}", .{
@ -2804,7 +2807,7 @@ fn buildOutputType(
defer emit_llvm_ir_resolved.deinit();
const default_llvm_bc_basename = try std.fmt.allocPrint(arena, "{s}.bc", .{root_name});
var emit_llvm_bc_resolved = emit_llvm_bc.resolve(default_llvm_bc_basename) catch |err| {
var emit_llvm_bc_resolved = emit_llvm_bc.resolve(default_llvm_bc_basename, output_to_cache) catch |err| {
switch (emit_llvm_bc) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-llvm-bc', '{s}': {s}", .{
@ -2822,7 +2825,7 @@ fn buildOutputType(
defer emit_llvm_bc_resolved.deinit();
const default_analysis_basename = try std.fmt.allocPrint(arena, "{s}-analysis.json", .{root_name});
var emit_analysis_resolved = emit_analysis.resolve(default_analysis_basename) catch |err| {
var emit_analysis_resolved = emit_analysis.resolve(default_analysis_basename, output_to_cache) catch |err| {
switch (emit_analysis) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-analysis', '{s}': {s}", .{
@ -2839,7 +2842,7 @@ fn buildOutputType(
};
defer emit_analysis_resolved.deinit();
var emit_docs_resolved = emit_docs.resolve("docs") catch |err| {
var emit_docs_resolved = emit_docs.resolve("docs", output_to_cache) catch |err| {
switch (emit_docs) {
.yes => |p| {
fatal("unable to open directory from argument '-femit-docs', '{s}': {s}", .{
@ -2873,7 +2876,7 @@ fn buildOutputType(
const default_implib_basename = try std.fmt.allocPrint(arena, "{s}.lib", .{root_name});
var emit_implib_resolved = switch (emit_implib) {
.no => Emit.Resolved{ .data = null, .dir = null },
.yes => |p| emit_implib.resolve(default_implib_basename) catch |err| {
.yes => |p| emit_implib.resolve(default_implib_basename, output_to_cache) catch |err| {
fatal("unable to open directory from argument '-femit-implib', '{s}': {s}", .{
p, @errorName(err),
});
@ -3566,7 +3569,21 @@ fn serveUpdateResults(s: *Server, comp: *Compilation) !void {
defer error_bundle.deinit(gpa);
if (error_bundle.errorMessageCount() > 0) {
try s.serveErrorBundle(error_bundle);
} else if (comp.bin_file.options.emit) |emit| {
return;
}
// This logic is a bit counter-intuitive because the protocol implies that
// each emitted artifact could possibly be in a different location, when in
// reality, there is only one artifact output directory, and the build
// system depends on that fact. So, until the protocol is changed to
// reflect this, this logic only needs to ensure that emit_bin_path is
// emitted for at least one thing, if there are any artifacts.
if (comp.bin_file.options.emit) |emit| {
const full_path = try emit.directory.join(gpa, &.{emit.sub_path});
defer gpa.free(full_path);
try s.serveEmitBinPath(full_path, .{
.flags = .{ .cache_hit = comp.last_update_was_cache_hit },
});
} else if (comp.bin_file.options.docs_emit) |emit| {
const full_path = try emit.directory.join(gpa, &.{emit.sub_path});
defer gpa.free(full_path);
try s.serveEmitBinPath(full_path, .{