mirror of
https://github.com/ziglang/zig.git
synced 2024-11-27 23:52:31 +00:00
Liveness: add a liveness verification pass
This code only runs in a debug zig compiler, similar to verifying llvm modules.
This commit is contained in:
parent
6f210b74ee
commit
02a8b66b00
@ -3092,6 +3092,7 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
||||
|
||||
.file_failure,
|
||||
.sema_failure,
|
||||
.liveness_failure,
|
||||
.codegen_failure,
|
||||
.dependency_failure,
|
||||
.sema_failure_retryable,
|
||||
@ -3142,7 +3143,7 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
||||
|
||||
// emit-h only requires semantic analysis of the Decl to be complete,
|
||||
// it does not depend on machine code generation to succeed.
|
||||
.codegen_failure, .codegen_failure_retryable, .complete => {
|
||||
.liveness_failure, .codegen_failure, .codegen_failure_retryable, .complete => {
|
||||
const named_frame = tracy.namedFrame("emit_h_decl");
|
||||
defer named_frame.end();
|
||||
|
||||
|
@ -14,6 +14,8 @@ const Allocator = std.mem.Allocator;
|
||||
const Air = @import("Air.zig");
|
||||
const Log2Int = std.math.Log2Int;
|
||||
|
||||
pub const Verify = @import("Liveness/Verify.zig");
|
||||
|
||||
/// This array is split into sets of 4 bits per AIR instruction.
|
||||
/// The MSB (0bX000) is whether the instruction is unreferenced.
|
||||
/// The LSB (0b000X) is the first operand, and so on, up to 3 operands. A set bit means the
|
||||
|
603
src/Liveness/Verify.zig
Normal file
603
src/Liveness/Verify.zig
Normal file
@ -0,0 +1,603 @@
|
||||
//! Verifies that liveness information is valid.
|
||||
|
||||
gpa: std.mem.Allocator,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
live: LiveMap = .{},
|
||||
blocks: std.AutoHashMapUnmanaged(Air.Inst.Index, LiveMap) = .{},
|
||||
|
||||
pub const Error = error{ LivenessInvalid, OutOfMemory };
|
||||
|
||||
pub fn deinit(self: *Verify) void {
|
||||
self.live.deinit(self.gpa);
|
||||
var block_it = self.blocks.valueIterator();
|
||||
while (block_it.next()) |block| block.deinit(self.gpa);
|
||||
self.blocks.deinit(self.gpa);
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn verify(self: *Verify) Error!void {
|
||||
self.live.clearRetainingCapacity();
|
||||
self.blocks.clearRetainingCapacity();
|
||||
try self.verifyBody(self.air.getMainBody());
|
||||
// We don't care about `self.live` now, because the loop body was noreturn - everything being dead was checked on `ret` etc
|
||||
assert(self.blocks.count() == 0);
|
||||
}
|
||||
|
||||
const LiveMap = std.AutoHashMapUnmanaged(Air.Inst.Index, void);
|
||||
|
||||
fn verifyBody(self: *Verify, body: []const Air.Inst.Index) Error!void {
|
||||
const tag = self.air.instructions.items(.tag);
|
||||
const data = self.air.instructions.items(.data);
|
||||
for (body) |inst| switch (tag[inst]) {
|
||||
// no operands
|
||||
.arg,
|
||||
.alloc,
|
||||
.ret_ptr,
|
||||
.constant,
|
||||
.const_ty,
|
||||
.breakpoint,
|
||||
.dbg_stmt,
|
||||
.dbg_inline_begin,
|
||||
.dbg_inline_end,
|
||||
.dbg_block_begin,
|
||||
.dbg_block_end,
|
||||
.fence,
|
||||
.ret_addr,
|
||||
.frame_addr,
|
||||
.wasm_memory_size,
|
||||
.err_return_trace,
|
||||
.save_err_return_trace_index,
|
||||
.c_va_start,
|
||||
.work_item_id,
|
||||
.work_group_size,
|
||||
.work_group_id,
|
||||
=> try self.verifyInst(inst, .{ .none, .none, .none }),
|
||||
|
||||
.trap, .unreach => {
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
// This instruction terminates the function, so everything should be dead
|
||||
if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst});
|
||||
},
|
||||
|
||||
// unary
|
||||
.not,
|
||||
.bitcast,
|
||||
.load,
|
||||
.fpext,
|
||||
.fptrunc,
|
||||
.intcast,
|
||||
.trunc,
|
||||
.optional_payload,
|
||||
.optional_payload_ptr,
|
||||
.optional_payload_ptr_set,
|
||||
.errunion_payload_ptr_set,
|
||||
.wrap_optional,
|
||||
.unwrap_errunion_payload,
|
||||
.unwrap_errunion_err,
|
||||
.unwrap_errunion_payload_ptr,
|
||||
.unwrap_errunion_err_ptr,
|
||||
.wrap_errunion_payload,
|
||||
.wrap_errunion_err,
|
||||
.slice_ptr,
|
||||
.slice_len,
|
||||
.ptr_slice_len_ptr,
|
||||
.ptr_slice_ptr_ptr,
|
||||
.struct_field_ptr_index_0,
|
||||
.struct_field_ptr_index_1,
|
||||
.struct_field_ptr_index_2,
|
||||
.struct_field_ptr_index_3,
|
||||
.array_to_slice,
|
||||
.float_to_int,
|
||||
.float_to_int_optimized,
|
||||
.int_to_float,
|
||||
.get_union_tag,
|
||||
.clz,
|
||||
.ctz,
|
||||
.popcount,
|
||||
.byte_swap,
|
||||
.bit_reverse,
|
||||
.splat,
|
||||
.error_set_has_value,
|
||||
.addrspace_cast,
|
||||
.c_va_arg,
|
||||
.c_va_copy,
|
||||
=> {
|
||||
const ty_op = data[inst].ty_op;
|
||||
try self.verifyInst(inst, .{ ty_op.operand, .none, .none });
|
||||
},
|
||||
.is_null,
|
||||
.is_non_null,
|
||||
.is_null_ptr,
|
||||
.is_non_null_ptr,
|
||||
.is_err,
|
||||
.is_non_err,
|
||||
.is_err_ptr,
|
||||
.is_non_err_ptr,
|
||||
.ptrtoint,
|
||||
.bool_to_int,
|
||||
.is_named_enum_value,
|
||||
.tag_name,
|
||||
.error_name,
|
||||
.sqrt,
|
||||
.sin,
|
||||
.cos,
|
||||
.tan,
|
||||
.exp,
|
||||
.exp2,
|
||||
.log,
|
||||
.log2,
|
||||
.log10,
|
||||
.fabs,
|
||||
.floor,
|
||||
.ceil,
|
||||
.round,
|
||||
.trunc_float,
|
||||
.neg,
|
||||
.neg_optimized,
|
||||
.cmp_lt_errors_len,
|
||||
.set_err_return_trace,
|
||||
.c_va_end,
|
||||
=> {
|
||||
const un_op = data[inst].un_op;
|
||||
try self.verifyInst(inst, .{ un_op, .none, .none });
|
||||
},
|
||||
.ret,
|
||||
.ret_load,
|
||||
=> {
|
||||
const un_op = data[inst].un_op;
|
||||
try self.verifyInst(inst, .{ un_op, .none, .none });
|
||||
// This instruction terminates the function, so everything should be dead
|
||||
if (self.live.count() > 0) return invalid("%{}: instructions still alive", .{inst});
|
||||
},
|
||||
.dbg_var_ptr,
|
||||
.dbg_var_val,
|
||||
.wasm_memory_grow,
|
||||
=> {
|
||||
const pl_op = data[inst].pl_op;
|
||||
try self.verifyInst(inst, .{ pl_op.operand, .none, .none });
|
||||
},
|
||||
.prefetch => {
|
||||
const prefetch = data[inst].prefetch;
|
||||
try self.verifyInst(inst, .{ prefetch.ptr, .none, .none });
|
||||
},
|
||||
.reduce,
|
||||
.reduce_optimized,
|
||||
=> {
|
||||
const reduce = data[inst].reduce;
|
||||
try self.verifyInst(inst, .{ reduce.operand, .none, .none });
|
||||
},
|
||||
.union_init => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.UnionInit, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.init, .none, .none });
|
||||
},
|
||||
.struct_field_ptr, .struct_field_val => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.StructField, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.struct_operand, .none, .none });
|
||||
},
|
||||
.field_parent_ptr => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.field_ptr, .none, .none });
|
||||
},
|
||||
.atomic_load => {
|
||||
const atomic_load = data[inst].atomic_load;
|
||||
try self.verifyInst(inst, .{ atomic_load.ptr, .none, .none });
|
||||
},
|
||||
|
||||
// binary
|
||||
.add,
|
||||
.add_optimized,
|
||||
.addwrap,
|
||||
.addwrap_optimized,
|
||||
.add_sat,
|
||||
.sub,
|
||||
.sub_optimized,
|
||||
.subwrap,
|
||||
.subwrap_optimized,
|
||||
.sub_sat,
|
||||
.mul,
|
||||
.mul_optimized,
|
||||
.mulwrap,
|
||||
.mulwrap_optimized,
|
||||
.mul_sat,
|
||||
.div_float,
|
||||
.div_float_optimized,
|
||||
.div_trunc,
|
||||
.div_trunc_optimized,
|
||||
.div_floor,
|
||||
.div_floor_optimized,
|
||||
.div_exact,
|
||||
.div_exact_optimized,
|
||||
.rem,
|
||||
.rem_optimized,
|
||||
.mod,
|
||||
.mod_optimized,
|
||||
.bit_and,
|
||||
.bit_or,
|
||||
.xor,
|
||||
.cmp_lt,
|
||||
.cmp_lt_optimized,
|
||||
.cmp_lte,
|
||||
.cmp_lte_optimized,
|
||||
.cmp_eq,
|
||||
.cmp_eq_optimized,
|
||||
.cmp_gte,
|
||||
.cmp_gte_optimized,
|
||||
.cmp_gt,
|
||||
.cmp_gt_optimized,
|
||||
.cmp_neq,
|
||||
.cmp_neq_optimized,
|
||||
.bool_and,
|
||||
.bool_or,
|
||||
.store,
|
||||
.array_elem_val,
|
||||
.slice_elem_val,
|
||||
.ptr_elem_val,
|
||||
.shl,
|
||||
.shl_exact,
|
||||
.shl_sat,
|
||||
.shr,
|
||||
.shr_exact,
|
||||
.atomic_store_unordered,
|
||||
.atomic_store_monotonic,
|
||||
.atomic_store_release,
|
||||
.atomic_store_seq_cst,
|
||||
.set_union_tag,
|
||||
.min,
|
||||
.max,
|
||||
=> {
|
||||
const bin_op = data[inst].bin_op;
|
||||
try self.verifyInst(inst, .{ bin_op.lhs, bin_op.rhs, .none });
|
||||
},
|
||||
.add_with_overflow,
|
||||
.sub_with_overflow,
|
||||
.mul_with_overflow,
|
||||
.shl_with_overflow,
|
||||
.ptr_add,
|
||||
.ptr_sub,
|
||||
.ptr_elem_ptr,
|
||||
.slice_elem_ptr,
|
||||
.slice,
|
||||
=> {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.Bin, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none });
|
||||
},
|
||||
.shuffle => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.Shuffle, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.a, extra.b, .none });
|
||||
},
|
||||
.cmp_vector,
|
||||
.cmp_vector_optimized,
|
||||
=> {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.VectorCmp, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.lhs, extra.rhs, .none });
|
||||
},
|
||||
.atomic_rmw => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.AtomicRmw, pl_op.payload).data;
|
||||
try self.verifyInst(inst, .{ pl_op.operand, extra.operand, .none });
|
||||
},
|
||||
|
||||
// ternary
|
||||
.select => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
|
||||
try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs });
|
||||
},
|
||||
.mul_add => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.lhs, extra.rhs, pl_op.operand });
|
||||
},
|
||||
.vector_store_elem => {
|
||||
const vector_store_elem = data[inst].vector_store_elem;
|
||||
const extra = self.air.extraData(Air.Bin, vector_store_elem.payload).data;
|
||||
try self.verifyInst(inst, .{ vector_store_elem.vector_ptr, extra.lhs, extra.rhs });
|
||||
},
|
||||
.memset,
|
||||
.memcpy,
|
||||
=> {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
|
||||
try self.verifyInst(inst, .{ pl_op.operand, extra.lhs, extra.rhs });
|
||||
},
|
||||
.cmpxchg_strong,
|
||||
.cmpxchg_weak,
|
||||
=> {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
|
||||
try self.verifyInst(inst, .{ extra.ptr, extra.expected_value, extra.new_value });
|
||||
},
|
||||
|
||||
// big tombs
|
||||
.aggregate_init => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const aggregate_ty = self.air.getRefType(ty_pl.ty);
|
||||
const len = @intCast(usize, aggregate_ty.arrayLen());
|
||||
const elements = @ptrCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]);
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
for (elements) |element| {
|
||||
try self.verifyOperand(inst, element, bt.feed());
|
||||
}
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.call, .call_always_tail, .call_never_tail, .call_never_inline => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.Call, pl_op.payload);
|
||||
const args = @ptrCast(
|
||||
[]const Air.Inst.Ref,
|
||||
self.air.extra[extra.end..][0..extra.data.args_len],
|
||||
);
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
try self.verifyOperand(inst, pl_op.operand, bt.feed());
|
||||
for (args) |arg| {
|
||||
try self.verifyOperand(inst, arg, bt.feed());
|
||||
}
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.assembly => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.Asm, ty_pl.payload);
|
||||
var extra_i = extra.end;
|
||||
const outputs = @ptrCast(
|
||||
[]const Air.Inst.Ref,
|
||||
self.air.extra[extra_i..][0..extra.data.outputs_len],
|
||||
);
|
||||
extra_i += outputs.len;
|
||||
const inputs = @ptrCast(
|
||||
[]const Air.Inst.Ref,
|
||||
self.air.extra[extra_i..][0..extra.data.inputs_len],
|
||||
);
|
||||
extra_i += inputs.len;
|
||||
|
||||
var bt = self.liveness.iterateBigTomb(inst);
|
||||
for (outputs) |output| {
|
||||
if (output != .none) {
|
||||
try self.verifyOperand(inst, output, bt.feed());
|
||||
}
|
||||
}
|
||||
for (inputs) |input| {
|
||||
try self.verifyOperand(inst, input, bt.feed());
|
||||
}
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
|
||||
// control flow
|
||||
.@"try" => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.Try, pl_op.payload);
|
||||
const try_body = self.air.extra[extra.end..][0..extra.data.body_len];
|
||||
|
||||
const cond_br_liveness = self.liveness.getCondBr(inst);
|
||||
|
||||
try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0));
|
||||
|
||||
var live = try self.live.clone(self.gpa);
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(try_body);
|
||||
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = live.move();
|
||||
|
||||
for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death);
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.try_ptr => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.TryPtr, ty_pl.payload);
|
||||
const try_body = self.air.extra[extra.end..][0..extra.data.body_len];
|
||||
|
||||
const cond_br_liveness = self.liveness.getCondBr(inst);
|
||||
|
||||
try self.verifyOperand(inst, extra.data.ptr, self.liveness.operandDies(inst, 0));
|
||||
|
||||
var live = try self.live.clone(self.gpa);
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(try_body);
|
||||
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = live.move();
|
||||
|
||||
for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death);
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.br => {
|
||||
const br = data[inst].br;
|
||||
const gop = try self.blocks.getOrPut(self.gpa, br.block_inst);
|
||||
|
||||
try self.verifyOperand(inst, br.operand, self.liveness.operandDies(inst, 0));
|
||||
if (gop.found_existing) {
|
||||
try self.verifyMatchingLiveness(br.block_inst, gop.value_ptr.*);
|
||||
} else {
|
||||
gop.value_ptr.* = try self.live.clone(self.gpa);
|
||||
}
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.block => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const block_ty = self.air.getRefType(ty_pl.ty);
|
||||
const extra = self.air.extraData(Air.Block, ty_pl.payload);
|
||||
const block_body = self.air.extra[extra.end..][0..extra.data.body_len];
|
||||
const block_liveness = self.liveness.getBlock(inst);
|
||||
|
||||
var orig_live = try self.live.clone(self.gpa);
|
||||
defer orig_live.deinit(self.gpa);
|
||||
|
||||
assert(!self.blocks.contains(inst));
|
||||
try self.verifyBody(block_body);
|
||||
|
||||
// Liveness data after the block body is garbage, but we want to
|
||||
// restore it to verify deaths
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = orig_live.move();
|
||||
|
||||
for (block_liveness.deaths) |death| try self.verifyDeath(inst, death);
|
||||
|
||||
if (block_ty.isNoReturn()) {
|
||||
assert(!self.blocks.contains(inst));
|
||||
} else {
|
||||
var live = self.blocks.fetchRemove(inst).?.value;
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
try self.verifyMatchingLiveness(inst, live);
|
||||
}
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.loop => {
|
||||
const ty_pl = data[inst].ty_pl;
|
||||
const extra = self.air.extraData(Air.Block, ty_pl.payload);
|
||||
const loop_body = self.air.extra[extra.end..][0..extra.data.body_len];
|
||||
|
||||
var live = try self.live.clone(self.gpa);
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
try self.verifyBody(loop_body);
|
||||
|
||||
// The same stuff should be alive after the loop as before it
|
||||
try self.verifyMatchingLiveness(inst, live);
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.cond_br => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const extra = self.air.extraData(Air.CondBr, pl_op.payload);
|
||||
const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len];
|
||||
const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
|
||||
const cond_br_liveness = self.liveness.getCondBr(inst);
|
||||
|
||||
try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0));
|
||||
|
||||
var live = try self.live.clone(self.gpa);
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
for (cond_br_liveness.then_deaths) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(then_body);
|
||||
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = live.move();
|
||||
|
||||
for (cond_br_liveness.else_deaths) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(else_body);
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
.switch_br => {
|
||||
const pl_op = data[inst].pl_op;
|
||||
const switch_br = self.air.extraData(Air.SwitchBr, pl_op.payload);
|
||||
var extra_index = switch_br.end;
|
||||
var case_i: u32 = 0;
|
||||
const switch_br_liveness = try self.liveness.getSwitchBr(
|
||||
self.gpa,
|
||||
inst,
|
||||
switch_br.data.cases_len + 1,
|
||||
);
|
||||
defer self.gpa.free(switch_br_liveness.deaths);
|
||||
|
||||
try self.verifyOperand(inst, pl_op.operand, self.liveness.operandDies(inst, 0));
|
||||
|
||||
var live = self.live.move();
|
||||
defer live.deinit(self.gpa);
|
||||
|
||||
while (case_i < switch_br.data.cases_len) : (case_i += 1) {
|
||||
const case = self.air.extraData(Air.SwitchBr.Case, extra_index);
|
||||
const items = @ptrCast(
|
||||
[]const Air.Inst.Ref,
|
||||
self.air.extra[case.end..][0..case.data.items_len],
|
||||
);
|
||||
const case_body = self.air.extra[case.end + items.len ..][0..case.data.body_len];
|
||||
extra_index = case.end + items.len + case_body.len;
|
||||
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = try live.clone(self.gpa);
|
||||
|
||||
for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(case_body);
|
||||
}
|
||||
|
||||
const else_body = self.air.extra[extra_index..][0..switch_br.data.else_body_len];
|
||||
if (else_body.len > 0) {
|
||||
self.live.deinit(self.gpa);
|
||||
self.live = try live.clone(self.gpa);
|
||||
|
||||
for (switch_br_liveness.deaths[case_i]) |death| try self.verifyDeath(inst, death);
|
||||
try self.verifyBody(else_body);
|
||||
}
|
||||
|
||||
try self.verifyInst(inst, .{ .none, .none, .none });
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn verifyDeath(self: *Verify, inst: Air.Inst.Index, operand: Air.Inst.Index) Error!void {
|
||||
try self.verifyOperand(inst, Air.indexToRef(operand), true);
|
||||
}
|
||||
|
||||
fn verifyOperand(self: *Verify, inst: Air.Inst.Index, op_ref: Air.Inst.Ref, dies: bool) Error!void {
|
||||
const operand = Air.refToIndex(op_ref) orelse return;
|
||||
switch (self.air.instructions.items(.tag)[operand]) {
|
||||
.constant, .const_ty => {},
|
||||
else => {
|
||||
if (dies) {
|
||||
if (!self.live.remove(operand)) return invalid("%{}: dead operand %{} reused and killed again", .{ inst, operand });
|
||||
} else {
|
||||
if (!self.live.contains(operand)) return invalid("%{}: dead operand %{} reused", .{ inst, operand });
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn verifyInst(
|
||||
self: *Verify,
|
||||
inst: Air.Inst.Index,
|
||||
operands: [Liveness.bpi - 1]Air.Inst.Ref,
|
||||
) Error!void {
|
||||
for (operands, 0..) |operand, operand_index| {
|
||||
const dies = self.liveness.operandDies(inst, @intCast(Liveness.OperandInt, operand_index));
|
||||
try self.verifyOperand(inst, operand, dies);
|
||||
}
|
||||
const tag = self.air.instructions.items(.tag);
|
||||
switch (tag[inst]) {
|
||||
.constant, .const_ty => unreachable,
|
||||
else => {
|
||||
if (self.liveness.isUnused(inst)) {
|
||||
assert(!self.live.contains(inst));
|
||||
} else {
|
||||
try self.live.putNoClobber(self.gpa, inst, {});
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn verifyMatchingLiveness(self: *Verify, block: Air.Inst.Index, live: LiveMap) Error!void {
|
||||
if (self.live.count() != live.count()) return invalid("%{}: different deaths across branches", .{block});
|
||||
var live_it = self.live.keyIterator();
|
||||
while (live_it.next()) |live_inst| if (!live.contains(live_inst.*)) return invalid("%{}: different deaths across branches", .{block});
|
||||
}
|
||||
|
||||
fn invalid(comptime fmt: []const u8, args: anytype) error{LivenessInvalid} {
|
||||
log.err(fmt, args);
|
||||
return error.LivenessInvalid;
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const log = std.log.scoped(.liveness_verify);
|
||||
|
||||
const Air = @import("../Air.zig");
|
||||
const Liveness = @import("../Liveness.zig");
|
||||
const Verify = @This();
|
@ -483,6 +483,8 @@ pub const Decl = struct {
|
||||
/// and attempting semantic analysis again may succeed.
|
||||
sema_failure_retryable,
|
||||
/// There will be a corresponding ErrorMsg in Module.failed_decls.
|
||||
liveness_failure,
|
||||
/// There will be a corresponding ErrorMsg in Module.failed_decls.
|
||||
codegen_failure,
|
||||
/// There will be a corresponding ErrorMsg in Module.failed_decls.
|
||||
/// This indicates the failure was something like running out of disk space,
|
||||
@ -4129,6 +4131,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
|
||||
.file_failure,
|
||||
.sema_failure,
|
||||
.sema_failure_retryable,
|
||||
.liveness_failure,
|
||||
.codegen_failure,
|
||||
.dependency_failure,
|
||||
.codegen_failure_retryable,
|
||||
@ -4222,6 +4225,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl_index: Decl.Index) SemaError!void {
|
||||
.dependency_failure,
|
||||
.sema_failure,
|
||||
.sema_failure_retryable,
|
||||
.liveness_failure,
|
||||
.codegen_failure,
|
||||
.codegen_failure_retryable,
|
||||
.complete,
|
||||
@ -4247,6 +4251,7 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
|
||||
|
||||
.file_failure,
|
||||
.sema_failure,
|
||||
.liveness_failure,
|
||||
.codegen_failure,
|
||||
.dependency_failure,
|
||||
.sema_failure_retryable,
|
||||
@ -4306,6 +4311,33 @@ pub fn ensureFuncBodyAnalyzed(mod: *Module, func: *Fn) SemaError!void {
|
||||
std.debug.print("# End Function AIR: {s}\n\n", .{fqn});
|
||||
}
|
||||
|
||||
if (std.debug.runtime_safety) {
|
||||
var verify = Liveness.Verify{
|
||||
.gpa = gpa,
|
||||
.air = air,
|
||||
.liveness = liveness,
|
||||
};
|
||||
defer verify.deinit();
|
||||
|
||||
verify.verify() catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
else => {
|
||||
try mod.failed_decls.ensureUnusedCapacity(gpa, 1);
|
||||
mod.failed_decls.putAssumeCapacityNoClobber(
|
||||
decl_index,
|
||||
try Module.ErrorMsg.create(
|
||||
gpa,
|
||||
decl.srcLoc(),
|
||||
"invalid liveness: {s}",
|
||||
.{@errorName(err)},
|
||||
),
|
||||
);
|
||||
decl.analysis = .liveness_failure;
|
||||
return error.AnalysisFail;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (no_bin_file and !dump_llvm_ir) return;
|
||||
|
||||
comp.bin_file.updateFunc(mod, func, air, liveness) catch |err| switch (err) {
|
||||
@ -4349,6 +4381,7 @@ pub fn updateEmbedFile(mod: *Module, embed_file: *EmbedFile) SemaError!void {
|
||||
.dependency_failure,
|
||||
.sema_failure,
|
||||
.sema_failure_retryable,
|
||||
.liveness_failure,
|
||||
.codegen_failure,
|
||||
.codegen_failure_retryable,
|
||||
.complete,
|
||||
|
Loading…
Reference in New Issue
Block a user