const std = @import("std"); const builtin = @import("builtin"); const io = std.io; const fs = std.fs; const process = std.process; const ChildProcess = std.ChildProcess; const Progress = std.Progress; const print = std.debug.print; const mem = std.mem; const testing = std.testing; const Allocator = std.mem.Allocator; const getExternalExecutor = std.zig.system.getExternalExecutor; const max_doc_file_size = 10 * 1024 * 1024; const obj_ext = builtin.object_format.fileExt(builtin.cpu.arch); const tmp_dir_name = "docgen_tmp"; const usage = \\Usage: docgen [--zig] [--skip-code-tests] input output" \\ \\ Generates an HTML document from a docgen template. \\ \\Options: \\ -h, --help Print this help and exit \\ --skip-code-tests Skip the doctests \\ ; fn fatal(comptime format: []const u8, args: anytype) noreturn { const stderr = io.getStdErr().writer(); stderr.print("error: " ++ format ++ "\n", args) catch {}; process.exit(1); } pub fn main() !void { var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator); defer arena.deinit(); const allocator = arena.allocator(); var args_it = try process.argsWithAllocator(allocator); if (!args_it.skip()) @panic("expected self arg"); var zig_exe: []const u8 = "zig"; var opt_zig_lib_dir: ?[]const u8 = null; var do_code_tests = true; var files = [_][]const u8{ "", "" }; var i: usize = 0; while (args_it.next()) |arg| { if (mem.startsWith(u8, arg, "-")) { if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) { const stdout = io.getStdOut().writer(); try stdout.writeAll(usage); process.exit(0); } else if (mem.eql(u8, arg, "--zig")) { if (args_it.next()) |param| { zig_exe = param; } else { fatal("expected parameter after --zig", .{}); } } else if (mem.eql(u8, arg, "--zig-lib-dir")) { if (args_it.next()) |param| { // Convert relative to absolute because this will be passed // to a child process with a different cwd. opt_zig_lib_dir = try fs.realpathAlloc(allocator, param); } else { fatal("expected parameter after --zig-lib-dir", .{}); } } else if (mem.eql(u8, arg, "--skip-code-tests")) { do_code_tests = false; } else { fatal("unrecognized option: '{s}'", .{arg}); } } else { if (i > 1) { fatal("too many arguments", .{}); } files[i] = arg; i += 1; } } if (i < 2) { fatal("not enough arguments", .{}); } var in_file = try fs.cwd().openFile(files[0], .{ .mode = .read_only }); defer in_file.close(); var out_file = try fs.cwd().createFile(files[1], .{}); defer out_file.close(); const input_file_bytes = try in_file.reader().readAllAlloc(allocator, max_doc_file_size); var buffered_writer = io.bufferedWriter(out_file.writer()); var tokenizer = Tokenizer.init(files[0], input_file_bytes); var toc = try genToc(allocator, &tokenizer); try fs.cwd().makePath(tmp_dir_name); defer fs.cwd().deleteTree(tmp_dir_name) catch {}; try genHtml(allocator, &tokenizer, &toc, buffered_writer.writer(), zig_exe, opt_zig_lib_dir, do_code_tests); try buffered_writer.flush(); } const Token = struct { id: Id, start: usize, end: usize, const Id = enum { invalid, content, bracket_open, tag_content, separator, bracket_close, eof, }; }; const Tokenizer = struct { buffer: []const u8, index: usize, state: State, source_file_name: []const u8, code_node_count: usize, const State = enum { start, l_bracket, hash, tag_name, eof, }; fn init(source_file_name: []const u8, buffer: []const u8) Tokenizer { return Tokenizer{ .buffer = buffer, .index = 0, .state = .start, .source_file_name = source_file_name, .code_node_count = 0, }; } fn next(self: *Tokenizer) Token { var result = Token{ .id = .eof, .start = self.index, .end = undefined, }; while (self.index < self.buffer.len) : (self.index += 1) { const c = self.buffer[self.index]; switch (self.state) { .start => switch (c) { '{' => { self.state = .l_bracket; }, else => { result.id = .content; }, }, .l_bracket => switch (c) { '#' => { if (result.id != .eof) { self.index -= 1; self.state = .start; break; } else { result.id = .bracket_open; self.index += 1; self.state = .tag_name; break; } }, else => { result.id = .content; self.state = .start; }, }, .tag_name => switch (c) { '|' => { if (result.id != .eof) { break; } else { result.id = .separator; self.index += 1; break; } }, '#' => { self.state = .hash; }, else => { result.id = .tag_content; }, }, .hash => switch (c) { '}' => { if (result.id != .eof) { self.index -= 1; self.state = .tag_name; break; } else { result.id = .bracket_close; self.index += 1; self.state = .start; break; } }, else => { result.id = .tag_content; self.state = .tag_name; }, }, .eof => unreachable, } } else { switch (self.state) { .start, .l_bracket, .eof => {}, else => { result.id = .invalid; }, } self.state = .eof; } result.end = self.index; return result; } const Location = struct { line: usize, column: usize, line_start: usize, line_end: usize, }; fn getTokenLocation(self: *Tokenizer, token: Token) Location { var loc = Location{ .line = 0, .column = 0, .line_start = 0, .line_end = 0, }; for (self.buffer, 0..) |c, i| { if (i == token.start) { loc.line_end = i; while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {} return loc; } if (c == '\n') { loc.line += 1; loc.column = 0; loc.line_start = i + 1; } else { loc.column += 1; } } return loc; } }; fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: anytype) anyerror { const loc = tokenizer.getTokenLocation(token); const args_prefix = .{ tokenizer.source_file_name, loc.line + 1, loc.column + 1 }; print("{s}:{d}:{d}: error: " ++ fmt ++ "\n", args_prefix ++ args); if (loc.line_start <= loc.line_end) { print("{s}\n", .{tokenizer.buffer[loc.line_start..loc.line_end]}); { var i: usize = 0; while (i < loc.column) : (i += 1) { print(" ", .{}); } } { const caret_count = @min(token.end, loc.line_end) - token.start; var i: usize = 0; while (i < caret_count) : (i += 1) { print("~", .{}); } } print("\n", .{}); } return error.ParseError; } fn assertToken(tokenizer: *Tokenizer, token: Token, id: Token.Id) !void { if (token.id != id) { return parseError(tokenizer, token, "expected {s}, found {s}", .{ @tagName(id), @tagName(token.id) }); } } fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token { const token = tokenizer.next(); try assertToken(tokenizer, token, id); return token; } const HeaderOpen = struct { name: []const u8, url: []const u8, n: usize, }; const SeeAlsoItem = struct { name: []const u8, token: Token, }; const ExpectedOutcome = enum { succeed, fail, build_fail, }; const Code = struct { id: Id, name: []const u8, source_token: Token, just_check_syntax: bool, mode: std.builtin.OptimizeMode, link_objects: []const []const u8, target_str: ?[]const u8, link_libc: bool, link_mode: ?std.builtin.LinkMode, disable_cache: bool, verbose_cimport: bool, additional_options: []const []const u8, const Id = union(enum) { @"test", test_error: []const u8, test_safety: []const u8, exe: ExpectedOutcome, obj: ?[]const u8, lib, }; }; const Link = struct { url: []const u8, name: []const u8, token: Token, }; const SyntaxBlock = struct { source_type: SourceType, name: []const u8, source_token: Token, const SourceType = enum { zig, c, peg, javascript, }; }; const Node = union(enum) { Content: []const u8, Nav, Builtin: Token, HeaderOpen: HeaderOpen, SeeAlso: []const SeeAlsoItem, Code: Code, Link: Link, InlineSyntax: Token, Shell: Token, SyntaxBlock: SyntaxBlock, }; const Toc = struct { nodes: []Node, toc: []u8, urls: std.StringHashMap(Token), }; const Action = enum { open, close, }; fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc { var urls = std.StringHashMap(Token).init(allocator); errdefer urls.deinit(); var header_stack_size: usize = 0; var last_action: Action = .open; var last_columns: ?u8 = null; var toc_buf = std.ArrayList(u8).init(allocator); defer toc_buf.deinit(); var toc = toc_buf.writer(); var nodes = std.ArrayList(Node).init(allocator); defer nodes.deinit(); try toc.writeByte('\n'); while (true) { const token = tokenizer.next(); switch (token.id) { .eof => { if (header_stack_size != 0) { return parseError(tokenizer, token, "unbalanced headers", .{}); } try toc.writeAll(" \n"); break; }, .content => { try nodes.append(Node{ .Content = tokenizer.buffer[token.start..token.end] }); }, .bracket_open => { const tag_token = try eatToken(tokenizer, .tag_content); const tag_name = tokenizer.buffer[tag_token.start..tag_token.end]; if (mem.eql(u8, tag_name, "nav")) { _ = try eatToken(tokenizer, .bracket_close); try nodes.append(Node.Nav); } else if (mem.eql(u8, tag_name, "builtin")) { _ = try eatToken(tokenizer, .bracket_close); try nodes.append(Node{ .Builtin = tag_token }); } else if (mem.eql(u8, tag_name, "header_open")) { _ = try eatToken(tokenizer, .separator); const content_token = try eatToken(tokenizer, .tag_content); const content = tokenizer.buffer[content_token.start..content_token.end]; var columns: ?u8 = null; while (true) { const bracket_tok = tokenizer.next(); switch (bracket_tok.id) { .bracket_close => break, .separator => continue, .tag_content => { const param = tokenizer.buffer[bracket_tok.start..bracket_tok.end]; if (mem.eql(u8, param, "2col")) { columns = 2; } else { return parseError( tokenizer, bracket_tok, "unrecognized header_open param: {s}", .{param}, ); } }, else => return parseError(tokenizer, bracket_tok, "invalid header_open token", .{}), } } header_stack_size += 1; const urlized = try urlize(allocator, content); try nodes.append(Node{ .HeaderOpen = HeaderOpen{ .name = content, .url = urlized, .n = header_stack_size + 1, // highest-level section headers start at h2 }, }); if (try urls.fetchPut(urlized, tag_token)) |kv| { parseError(tokenizer, tag_token, "duplicate header url: #{s}", .{urlized}) catch {}; parseError(tokenizer, kv.value, "other tag here", .{}) catch {}; return error.ParseError; } if (last_action == .open) { try toc.writeByte('\n'); try toc.writeByteNTimes(' ', header_stack_size * 4); if (last_columns) |n| { try toc.print("