wasm: Fix text cases and add pointer test cases

Ensure all previous test cases are still passing, as well as add some basic tests for now
for testing pointers to the stack.

This means we can start implementing wasm's C ABI found at: https://github.com/WebAssembly/tool-conventions/blob/main/BasicCABI.md

We also simplified the block logic by always using 'void' block types and instead writing the value to a local,
which can then be referenced by continues instructions, as done currently by AIR.
Besides this, we also no longer need to insert blocks at an offset, as we simply write the saved temporary
after we create the block.
This commit is contained in:
Luuk de Gram 2021-11-20 21:28:39 +01:00
parent ec5220405b
commit deb8d0765b
No known key found for this signature in database
GPG Key ID: A8CFE58E4DC7D664
2 changed files with 150 additions and 50 deletions

View File

@ -187,7 +187,13 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode {
32 => switch (args.valtype1.?) {
.i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u,
.i32 => return .i32_load,
.f32, .f64 => unreachable,
.f32 => return .f32_load,
.f64 => unreachable,
},
64 => switch (args.valtype1.?) {
.i64 => return .i64_load,
.f64 => return .f64_load,
else => unreachable,
},
else => unreachable,
} else switch (args.valtype1.?) {
@ -216,6 +222,7 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode {
},
64 => switch (args.valtype1.?) {
.i64 => return .i64_store,
.f64 => return .f64_store,
else => unreachable,
},
else => unreachable,
@ -505,7 +512,10 @@ gpa: *mem.Allocator,
/// Table to save `WValue`'s generated by an `Air.Inst`
values: ValueTable,
/// Mapping from Air.Inst.Index to block ids
blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, u32) = .{},
blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, struct {
label: u32,
value: WValue,
}) = .{},
/// `bytes` contains the wasm bytecode belonging to the 'code' section.
code: ArrayList(u8),
/// Contains the generated function type bytecode for the current function
@ -984,8 +994,8 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue {
.dbg_stmt => WValue.none,
.intcast => self.airIntcast(inst),
.is_err => self.airIsErr(inst, .i32_eq),
.is_non_err => self.airIsErr(inst, .i32_ne),
.is_err => self.airIsErr(inst, .i32_ne),
.is_non_err => self.airIsErr(inst, .i32_eq),
.is_null => self.airIsNull(inst, .i32_ne),
.is_non_null => self.airIsNull(inst, .i32_eq),
@ -1065,12 +1075,12 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ret_ty = target.ty.fnReturnType();
switch (ret_ty.zigTypeTag()) {
.ErrorUnion, .Optional => {
.Void, .NoReturn => return WValue.none,
else => {
const result_local = try self.allocLocal(ret_ty);
try self.addLabel(.local_set, result_local.local);
return result_local;
},
else => return WValue.none,
}
}
@ -1086,7 +1096,7 @@ fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
if (abi_size == 0) return WValue{ .none = {} };
// local, containing the offset to the stack position
const local = try self.allocLocal(child_type);
const local = try self.allocLocal(Type.initTag(.i32)); // always pointer therefore i32
try self.moveStack(@intCast(u32, abi_size), local.local);
return local;
@ -1114,30 +1124,65 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro
var buf: Type.Payload.ElemType = undefined;
const payload_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionPayload() else ty.optionalChild(&buf);
const tag_ty = if (ty.zigTypeTag() == .ErrorUnion) ty.errorUnionSet() else Type.initTag(.u8);
const payload_offset = @intCast(u32, tag_ty.abiSize(self.target));
const payload_offset = if (ty.zigTypeTag() == .ErrorUnion)
@intCast(u32, tag_ty.abiSize(self.target))
else
@intCast(u32, ty.abiSize(self.target) - payload_ty.abiSize(self.target));
if (rhs == .constant) {
// constant will contain both tag and payload,
// so save those in 2 temporary locals before storing them
// in memory
try self.emitWValue(rhs);
const tag_local = try self.allocLocal(tag_ty);
const payload_local = try self.allocLocal(payload_ty);
switch (rhs) {
.constant => {
// constant will contain both tag and payload,
// so save those in 2 temporary locals before storing them
// in memory
try self.emitWValue(rhs);
const tag_local = try self.allocLocal(tag_ty);
const payload_local = try self.allocLocal(payload_ty);
try self.addLabel(.local_set, payload_local.local);
try self.addLabel(.local_set, tag_local.local);
try self.addLabel(.local_set, payload_local.local);
try self.addLabel(.local_set, tag_local.local);
try self.store(lhs, tag_local, tag_ty, 0);
return try self.store(lhs, payload_local, payload_ty, payload_offset);
} else {
// Load values from `rhs` stack position and store in `lhs` instead
const tag_local = try self.load(rhs, tag_ty, 0);
const payload_local = try self.load(rhs, payload_ty, payload_offset);
try self.store(lhs, tag_local, tag_ty, 0);
return try self.store(lhs, payload_local, payload_ty, payload_offset);
},
.local => {
// Load values from `rhs` stack position and store in `lhs` instead
const tag_local = try self.load(rhs, tag_ty, 0);
const payload_local = try self.load(rhs, payload_ty, payload_offset);
try self.store(lhs, tag_local, tag_ty, 0);
return try self.store(lhs, payload_local, payload_ty, payload_offset);
try self.store(lhs, tag_local, tag_ty, 0);
return try self.store(lhs, payload_local, payload_ty, payload_offset);
},
.local_with_offset => |with_offset| {
const tag_local = try self.allocLocal(tag_ty);
try self.addImm32(0);
try self.store(lhs, tag_local, tag_ty, 0);
return try self.store(
lhs,
.{ .local = with_offset.local },
payload_ty,
with_offset.offset,
);
},
else => unreachable,
}
},
.Struct => {
// we are copying a struct with its fields.
// Replace this with a wasm memcpy instruction once we support that feature.
const fields_len = ty.structFieldCount();
var index: usize = 0;
while (index < fields_len) : (index += 1) {
const field_ty = ty.structFieldType(index);
if (!field_ty.hasCodeGenBits()) continue;
const field_offset = std.math.cast(u32, ty.structFieldOffset(index, self.target)) catch {
return self.fail("Field type '{}' too big to fit into stack frame", .{field_ty});
};
const field_local = try self.load(rhs, field_ty, field_offset);
try self.store(lhs, field_local, field_ty, field_offset);
}
return;
},
else => {},
}
try self.emitWValue(lhs);
@ -1429,21 +1474,29 @@ fn airBlock(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const extra = self.air.extraData(Air.Block, ty_pl.payload);
const body = self.air.extra[extra.end..][0..extra.data.body_len];
try self.startBlock(.block, block_ty, null);
// if block_ty is non-empty, we create a register to store the temporary value
const block_result: WValue = if (block_ty != wasm.block_empty)
try self.allocLocal(self.air.getRefType(ty_pl.ty))
else
WValue.none;
try self.startBlock(.block, wasm.block_empty);
// Here we set the current block idx, so breaks know the depth to jump
// to when breaking out.
try self.blocks.putNoClobber(self.gpa, inst, self.block_depth);
try self.blocks.putNoClobber(self.gpa, inst, .{
.label = self.block_depth,
.value = block_result,
});
try self.genBody(body);
try self.endBlock();
return .none;
return block_result;
}
/// appends a new wasm block to the code section and increases the `block_depth` by 1
fn startBlock(self: *Self, block_tag: wasm.Opcode, valtype: u8, with_offset: ?usize) !void {
fn startBlock(self: *Self, block_tag: wasm.Opcode, valtype: u8) !void {
self.block_depth += 1;
const offset = with_offset orelse self.mir_instructions.len;
try self.addInstAt(offset, .{
try self.addInst(.{
.tag = Mir.Inst.Tag.fromOpcode(block_tag),
.data = .{ .block_type = valtype },
});
@ -1462,7 +1515,7 @@ fn airLoop(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
// result type of loop is always 'noreturn', meaning we can always
// emit the wasm type 'block_empty'.
try self.startBlock(.loop, wasm.block_empty, null);
try self.startBlock(.loop, wasm.block_empty);
try self.genBody(body);
// breaking to the index of a loop block will continue the loop instead
@ -1480,13 +1533,10 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
// TODO: Handle death instructions for then and else body
// insert blocks at the position of `offset` so
// the condition can jump to it
const offset = self.mir_instructions.len;
try self.emitWValue(condition);
// result type is always noreturn, so use `block_empty` as type.
try self.startBlock(.block, wasm.block_empty, offset);
try self.startBlock(.block, wasm.block_empty);
// emit the conditional value
try self.emitWValue(condition);
// we inserted the block in front of the condition
// so now check if condition matches. If not, break outside this block
@ -1539,15 +1589,20 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) Inner
fn airBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const br = self.air.instructions.items(.data)[inst].br;
const block = self.blocks.get(br.block_inst).?;
// if operand has codegen bits we should break with a value
if (self.air.typeOf(br.operand).hasCodeGenBits()) {
try self.emitWValue(self.resolveInst(br.operand));
if (block.value != .none) {
try self.addLabel(.local_set, block.value.local);
}
}
// We map every block to its block index.
// We then determine how far we have to jump to it by subtracting it from current block depth
const idx: u32 = self.block_depth - self.blocks.get(br.block_inst).?;
const idx: u32 = self.block_depth - block.label;
try self.addLabel(.br, idx);
return .none;
@ -1677,7 +1732,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
}
case_list.appendAssumeCapacity(.{ .values = values, .body = case_body });
try self.startBlock(.block, blocktype, null);
try self.startBlock(.block, blocktype);
}
// When the highest and lowest values are seperated by '50',
@ -1690,7 +1745,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const else_body = self.air.extra[extra_index..][0..switch_br.data.else_body_len];
const has_else_body = else_body.len != 0;
if (has_else_body) {
try self.startBlock(.block, blocktype, null);
try self.startBlock(.block, blocktype);
}
if (!is_sparse) {
@ -1698,7 +1753,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
// The value 'target' represents the index into the table.
// Each index in the table represents a label to the branch
// to jump to.
try self.startBlock(.block, blocktype, null);
try self.startBlock(.block, blocktype);
try self.emitWValue(target);
if (lowest < 0) {
// since br_table works using indexes, starting from '0', we must ensure all values
@ -1754,7 +1809,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
try self.addLabel(.br_if, 0);
} else {
// in multi-value prongs we must check if any prongs match the target value.
try self.startBlock(.block, blocktype, null);
try self.startBlock(.block, blocktype);
for (case.values) |value| {
try self.emitWValue(target);
try self.emitConstant(value.value, target_ty);
@ -1794,7 +1849,7 @@ fn airIsErr(self: *Self, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!W
.alignment = err_ty.abiAlignment(self.target),
});
try self.addInst(.{
.tag = .i32_load,
.tag = .i32_load16_u,
.data = .{ .payload = mem_arg_index },
});
@ -1811,14 +1866,14 @@ fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const operand = self.resolveInst(ty_op.operand);
const err_ty = self.air.typeOf(ty_op.operand);
const offset = @intCast(u32, err_ty.errorUnionSet().abiSize(self.target) / 8);
return self.load(operand, self.air.getRefType(ty_op.ty), offset);
const offset = @intCast(u32, err_ty.errorUnionSet().abiSize(self.target));
return self.load(operand, err_ty.errorUnionPayload(), offset);
}
fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
return self.resolveInst(ty_op.operand);
_ = ty_op;
return self.fail("TODO: wasm airWrapErrUnionPayload", .{});
}
fn airIntcast(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
@ -1880,8 +1935,9 @@ fn airOptionalPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
var buf: Type.Payload.ElemType = undefined;
const child_ty = opt_ty.optionalChild(&buf);
const offset = opt_ty.abiSize(self.target) - child_ty.abiSize(self.target);
return self.load(operand, child_ty, @as(u32, 1)); // null tag is 1 byte
return self.load(operand, child_ty, @intCast(u32, offset));
}
fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
@ -1893,5 +1949,14 @@ fn airOptionalPayloadPtrSet(self: *Self, inst: Air.Inst.Index) InnerError!WValue
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) InnerError!WValue {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
return self.resolveInst(ty_op.operand);
const operand = self.resolveInst(ty_op.operand);
const op_ty = self.air.typeOf(ty_op.operand);
const optional_ty = self.air.getRefType(ty_op.ty);
const offset = optional_ty.abiSize(self.target) - op_ty.abiSize(self.target);
return WValue{ .local_with_offset = .{
.local = operand.local,
.offset = @intCast(u32, offset),
} };
}

View File

@ -740,4 +740,39 @@ pub fn addCases(ctx: *TestContext) !void {
\\}
, "0\n");
}
{
var case = ctx.exe("wasm pointers", wasi);
case.addCompareOutput(
\\pub export fn _start() u32 {
\\ var x: u32 = 0;
\\
\\ foo(&x);
\\ return x;
\\}
\\
\\fn foo(x: *u32)void {
\\ x.* = 2;
\\}
, "2\n");
case.addCompareOutput(
\\pub export fn _start() u32 {
\\ var x: u32 = 0;
\\
\\ foo(&x);
\\ bar(&x);
\\ return x;
\\}
\\
\\fn foo(x: *u32)void {
\\ x.* = 2;
\\}
\\
\\fn bar(x: *u32) void {
\\ x.* += 2;
\\}
, "4\n");
}
}