2017-10-31 08:47:55 +00:00
|
|
|
const std = @import("std");
|
|
|
|
const io = std.io;
|
2017-05-01 18:12:38 +01:00
|
|
|
const builtin = @import("builtin");
|
2016-02-04 01:02:01 +00:00
|
|
|
|
2020-02-06 22:56:40 +00:00
|
|
|
pub const io_mode: io.Mode = builtin.test_io_mode;
|
2020-02-05 21:53:29 +00:00
|
|
|
|
2020-08-11 03:34:27 +01:00
|
|
|
var log_err_count: usize = 0;
|
|
|
|
|
2021-07-24 06:23:03 +01:00
|
|
|
pub fn main() void {
|
2022-03-18 07:12:22 +00:00
|
|
|
if (builtin.zig_backend != .stage1 and
|
2022-10-19 12:56:02 +01:00
|
|
|
(builtin.zig_backend != .stage2_llvm or builtin.cpu.arch == .wasm32) and
|
|
|
|
builtin.zig_backend != .stage2_c)
|
2022-03-18 07:12:22 +00:00
|
|
|
{
|
2021-07-24 06:23:03 +01:00
|
|
|
return main2() catch @panic("test failure");
|
2021-05-17 23:28:22 +01:00
|
|
|
}
|
2019-12-12 23:27:17 +00:00
|
|
|
const test_fn_list = builtin.test_functions;
|
2018-07-22 04:43:43 +01:00
|
|
|
var ok_count: usize = 0;
|
|
|
|
var skip_count: usize = 0;
|
2021-05-04 17:36:59 +01:00
|
|
|
var fail_count: usize = 0;
|
2021-12-19 06:44:31 +00:00
|
|
|
var progress = std.Progress{
|
|
|
|
.dont_print_on_dumb = true,
|
|
|
|
};
|
2022-02-09 00:26:55 +00:00
|
|
|
const root_node = progress.start("Test", test_fn_list.len);
|
2022-05-13 04:23:05 +01:00
|
|
|
const have_tty = progress.terminal != null and
|
|
|
|
(progress.supports_ansi_escape_codes or progress.is_windows_terminal);
|
2016-02-04 01:02:01 +00:00
|
|
|
|
2020-02-06 22:56:40 +00:00
|
|
|
var async_frame_buffer: []align(std.Target.stack_align) u8 = undefined;
|
|
|
|
// TODO this is on the next line (using `undefined` above) because otherwise zig incorrectly
|
|
|
|
// ignores the alignment of the slice.
|
|
|
|
async_frame_buffer = &[_]u8{};
|
|
|
|
|
2020-08-08 07:26:58 +01:00
|
|
|
var leaks: usize = 0;
|
2019-10-18 02:46:41 +01:00
|
|
|
for (test_fn_list) |test_fn, i| {
|
2022-03-01 19:57:06 +00:00
|
|
|
std.testing.allocator_instance = .{};
|
2020-08-08 07:26:58 +01:00
|
|
|
defer {
|
2022-03-01 19:57:06 +00:00
|
|
|
if (std.testing.allocator_instance.deinit()) {
|
2020-08-08 07:26:58 +01:00
|
|
|
leaks += 1;
|
|
|
|
}
|
|
|
|
}
|
2020-07-09 05:01:13 +01:00
|
|
|
std.testing.log_level = .warn;
|
2020-01-29 19:18:04 +00:00
|
|
|
|
2020-12-19 04:51:18 +00:00
|
|
|
var test_node = root_node.start(test_fn.name, 0);
|
2019-10-18 02:46:41 +01:00
|
|
|
test_node.activate();
|
2019-10-22 00:01:08 +01:00
|
|
|
progress.refresh();
|
2021-12-19 06:44:31 +00:00
|
|
|
if (!have_tty) {
|
2022-02-27 14:40:59 +00:00
|
|
|
std.debug.print("{d}/{d} {s}... ", .{ i + 1, test_fn_list.len, test_fn.name });
|
2019-12-12 23:27:17 +00:00
|
|
|
}
|
2022-02-27 10:25:50 +00:00
|
|
|
const result = if (test_fn.async_frame_size) |size| switch (io_mode) {
|
2020-02-06 22:56:40 +00:00
|
|
|
.evented => blk: {
|
|
|
|
if (async_frame_buffer.len < size) {
|
|
|
|
std.heap.page_allocator.free(async_frame_buffer);
|
2021-09-16 06:42:19 +01:00
|
|
|
async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory");
|
2020-02-06 22:56:40 +00:00
|
|
|
}
|
2020-05-04 16:49:27 +01:00
|
|
|
const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func);
|
2020-06-24 12:07:39 +01:00
|
|
|
break :blk await @asyncCall(async_frame_buffer, {}, casted_fn, .{});
|
2020-02-06 22:56:40 +00:00
|
|
|
},
|
|
|
|
.blocking => {
|
|
|
|
skip_count += 1;
|
|
|
|
test_node.end();
|
2022-02-13 20:58:41 +00:00
|
|
|
progress.log("SKIP (async test)\n", .{});
|
2020-02-06 22:56:40 +00:00
|
|
|
continue;
|
|
|
|
},
|
|
|
|
} else test_fn.func();
|
|
|
|
if (result) |_| {
|
2018-07-22 04:43:43 +01:00
|
|
|
ok_count += 1;
|
2019-10-18 02:46:41 +01:00
|
|
|
test_node.end();
|
2021-12-19 06:44:31 +00:00
|
|
|
if (!have_tty) std.debug.print("OK\n", .{});
|
2018-07-22 04:43:43 +01:00
|
|
|
} else |err| switch (err) {
|
|
|
|
error.SkipZigTest => {
|
|
|
|
skip_count += 1;
|
2022-02-13 20:58:41 +00:00
|
|
|
progress.log("SKIP\n", .{});
|
2022-02-13 13:19:33 +00:00
|
|
|
test_node.end();
|
2018-07-22 04:43:43 +01:00
|
|
|
},
|
2019-10-21 23:35:14 +01:00
|
|
|
else => {
|
2021-05-04 17:36:59 +01:00
|
|
|
fail_count += 1;
|
2022-02-13 20:58:41 +00:00
|
|
|
progress.log("FAIL ({s})\n", .{@errorName(err)});
|
2022-04-22 21:10:02 +01:00
|
|
|
if (@errorReturnTrace()) |trace| {
|
2021-05-11 18:50:17 +01:00
|
|
|
std.debug.dumpStackTrace(trace.*);
|
2022-04-22 21:10:02 +01:00
|
|
|
}
|
2022-02-13 13:19:33 +00:00
|
|
|
test_node.end();
|
2019-10-21 23:35:14 +01:00
|
|
|
},
|
2018-07-22 04:43:43 +01:00
|
|
|
}
|
|
|
|
}
|
2019-10-18 02:46:41 +01:00
|
|
|
root_node.end();
|
2019-10-22 03:20:45 +01:00
|
|
|
if (ok_count == test_fn_list.len) {
|
2020-11-26 08:48:12 +00:00
|
|
|
std.debug.print("All {d} tests passed.\n", .{ok_count});
|
2019-10-22 03:20:45 +01:00
|
|
|
} else {
|
2022-02-27 14:40:59 +00:00
|
|
|
std.debug.print("{d} passed; {d} skipped; {d} failed.\n", .{ ok_count, skip_count, fail_count });
|
2016-02-04 01:02:01 +00:00
|
|
|
}
|
2020-08-11 03:34:27 +01:00
|
|
|
if (log_err_count != 0) {
|
2020-11-26 08:48:12 +00:00
|
|
|
std.debug.print("{d} errors were logged.\n", .{log_err_count});
|
2020-08-11 03:34:27 +01:00
|
|
|
}
|
2020-08-08 07:26:58 +01:00
|
|
|
if (leaks != 0) {
|
2020-11-26 08:48:12 +00:00
|
|
|
std.debug.print("{d} tests leaked memory.\n", .{leaks});
|
2020-08-11 03:34:27 +01:00
|
|
|
}
|
2021-05-04 17:36:59 +01:00
|
|
|
if (leaks != 0 or log_err_count != 0 or fail_count != 0) {
|
2020-08-08 07:26:58 +01:00
|
|
|
std.process.exit(1);
|
|
|
|
}
|
2016-02-04 01:02:01 +00:00
|
|
|
}
|
2020-07-09 05:01:13 +01:00
|
|
|
|
|
|
|
pub fn log(
|
|
|
|
comptime message_level: std.log.Level,
|
|
|
|
comptime scope: @Type(.EnumLiteral),
|
|
|
|
comptime format: []const u8,
|
2020-07-11 12:09:04 +01:00
|
|
|
args: anytype,
|
2020-07-09 05:01:13 +01:00
|
|
|
) void {
|
2020-08-11 03:34:27 +01:00
|
|
|
if (@enumToInt(message_level) <= @enumToInt(std.log.Level.err)) {
|
|
|
|
log_err_count += 1;
|
|
|
|
}
|
2020-07-09 05:01:13 +01:00
|
|
|
if (@enumToInt(message_level) <= @enumToInt(std.testing.log_level)) {
|
2022-05-23 23:48:18 +01:00
|
|
|
std.debug.print(
|
|
|
|
"[" ++ @tagName(scope) ++ "] (" ++ @tagName(message_level) ++ "): " ++ format ++ "\n",
|
|
|
|
args,
|
|
|
|
);
|
2020-07-09 05:01:13 +01:00
|
|
|
}
|
|
|
|
}
|
2021-05-17 23:28:22 +01:00
|
|
|
|
|
|
|
pub fn main2() anyerror!void {
|
2022-01-18 05:17:02 +00:00
|
|
|
var skipped: usize = 0;
|
|
|
|
var failed: usize = 0;
|
2021-05-17 23:28:22 +01:00
|
|
|
// Simpler main(), exercising fewer language features, so that stage2 can handle it.
|
|
|
|
for (builtin.test_functions) |test_fn| {
|
2021-09-17 05:03:55 +01:00
|
|
|
test_fn.func() catch |err| {
|
|
|
|
if (err != error.SkipZigTest) {
|
2022-01-18 05:17:02 +00:00
|
|
|
failed += 1;
|
|
|
|
} else {
|
|
|
|
skipped += 1;
|
2021-09-17 05:03:55 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2022-02-27 09:48:42 +00:00
|
|
|
if (builtin.zig_backend == .stage2_wasm or
|
stage2: fix tuple assigned to variable
Before this we would see ZIR code like this:
```
%69 = alloc_inferred_mut()
%70 = array_base_ptr(%69)
%71 = elem_ptr_imm(%70, 0)
```
This would crash the compiler because it expects to see a
`coerce_result_ptr` instruction after `alloc_inferred_mut`, but that
does not happen in this case because there is no type to coerce the
result pointer to.
In this commit I modified AstGen so that it has similar codegen as when
using a const instead of a var:
```
%69 = alloc_inferred_mut()
%76 = array_init_anon(.{%71, %73, %75})
%77 = store_to_inferred_ptr(%69, %76)
```
This does not obey result locations, meaning if you call a function
inside the initializer, it will end up doing a copy into the LHS.
Solving this problem, or changing the language to make this legal,
will be left for my future self to deal with. Hi future self!
I see you reading this commit log. Hope you're doing OK buddy.
Sema for `store_ptr` of a tuple where the pointer is in fact the same
element type as the operand had an issue where the comptime fields would
get incorrectly lowered to runtime stores to bogus addresses. This is
solved with an exception to the optimization in Sema for storing
pointers that handles tuples element-wise. In the case that we are
storing a tuple to itself, it skips the optimization. This results in
better code and avoids the problem. However this caused a regression in
GeneralPurposeAllocator from the standard library.
I regressed the test runner code back to the simpler path. It's too
hard to debug standard library code in the LLVM backend right now since
we don't have debug info hooked up. Also, we didn't have any behavior
test coverage of whatever was regressed, so let's try to get that
coverage added as a stepping stone to getting the standard library
working.
2022-03-05 00:19:36 +00:00
|
|
|
builtin.zig_backend == .stage2_x86_64 or
|
2022-10-31 18:50:02 +00:00
|
|
|
builtin.zig_backend == .stage2_aarch64 or
|
2022-11-01 00:18:15 +00:00
|
|
|
builtin.zig_backend == .stage2_llvm or
|
|
|
|
builtin.zig_backend == .stage2_c)
|
2022-02-22 20:53:49 +00:00
|
|
|
{
|
|
|
|
const passed = builtin.test_functions.len - skipped - failed;
|
|
|
|
const stderr = std.io.getStdErr();
|
|
|
|
writeInt(stderr, passed) catch {};
|
|
|
|
stderr.writeAll(" passed; ") catch {};
|
|
|
|
writeInt(stderr, skipped) catch {};
|
|
|
|
stderr.writeAll(" skipped; ") catch {};
|
|
|
|
writeInt(stderr, failed) catch {};
|
|
|
|
stderr.writeAll(" failed.\n") catch {};
|
2022-01-18 05:17:02 +00:00
|
|
|
}
|
|
|
|
if (failed != 0) {
|
2021-09-17 05:03:55 +01:00
|
|
|
return error.TestsFailed;
|
2021-05-17 23:28:22 +01:00
|
|
|
}
|
|
|
|
}
|
2022-01-18 05:17:02 +00:00
|
|
|
|
|
|
|
fn writeInt(stderr: std.fs.File, int: usize) anyerror!void {
|
|
|
|
const base = 10;
|
|
|
|
var buf: [100]u8 = undefined;
|
|
|
|
var a: usize = int;
|
|
|
|
var index: usize = buf.len;
|
|
|
|
while (true) {
|
|
|
|
const digit = a % base;
|
|
|
|
index -= 1;
|
|
|
|
buf[index] = std.fmt.digitToChar(@intCast(u8, digit), .lower);
|
|
|
|
a /= base;
|
|
|
|
if (a == 0) break;
|
|
|
|
}
|
|
|
|
const slice = buf[index..];
|
|
|
|
try stderr.writeAll(slice);
|
|
|
|
}
|