Merge pull request #18857 from alichraghi/shader

spirv: make rusticl the primary testing implementation
This commit is contained in:
Veikka Tuominen 2024-02-09 14:11:31 +02:00 committed by GitHub
commit ddcea2cad4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 51 additions and 139 deletions

View File

@ -721,75 +721,18 @@ const DeclGen = struct {
};
}
/// Construct a struct at runtime.
/// ty must be a struct type.
/// Constituents should be in `indirect` representation (as the elements of a struct should be).
/// Result is in `direct` representation.
fn constructStruct(self: *DeclGen, ty: Type, types: []const Type, constituents: []const IdRef) !IdRef {
assert(types.len == constituents.len);
// The Khronos LLVM-SPIRV translator crashes because it cannot construct structs which'
// operands are not constant.
// See https://github.com/KhronosGroup/SPIRV-LLVM-Translator/issues/1349
// For now, just initialize the struct by setting the fields manually...
// TODO: Make this OpCompositeConstruct when we can
const ptr_composite_id = try self.alloc(ty, .{ .storage_class = .Function });
for (constituents, types, 0..) |constitent_id, member_ty, index| {
const ptr_member_ty_ref = try self.ptrType(member_ty, .Function);
const ptr_id = try self.accessChain(ptr_member_ty_ref, ptr_composite_id, &.{@as(u32, @intCast(index))});
try self.func.body.emit(self.spv.gpa, .OpStore, .{
.pointer = ptr_id,
.object = constitent_id,
});
}
return try self.load(ty, ptr_composite_id, .{});
}
/// Construct a vector at runtime.
/// ty must be an vector type.
/// Constituents should be in `indirect` representation (as the elements of an vector should be).
/// Result is in `direct` representation.
fn constructVector(self: *DeclGen, ty: Type, constituents: []const IdRef) !IdRef {
// The Khronos LLVM-SPIRV translator crashes because it cannot construct structs which'
// operands are not constant.
// See https://github.com/KhronosGroup/SPIRV-LLVM-Translator/issues/1349
// For now, just initialize the struct by setting the fields manually...
// TODO: Make this OpCompositeConstruct when we can
const mod = self.module;
const ptr_composite_id = try self.alloc(ty, .{ .storage_class = .Function });
const ptr_elem_ty_ref = try self.ptrType(ty.elemType2(mod), .Function);
for (constituents, 0..) |constitent_id, index| {
const ptr_id = try self.accessChain(ptr_elem_ty_ref, ptr_composite_id, &.{@as(u32, @intCast(index))});
try self.func.body.emit(self.spv.gpa, .OpStore, .{
.pointer = ptr_id,
.object = constitent_id,
});
}
return try self.load(ty, ptr_composite_id, .{});
}
/// Construct an array at runtime.
/// ty must be an array type.
/// Constituents should be in `indirect` representation (as the elements of an array should be).
/// Result is in `direct` representation.
fn constructArray(self: *DeclGen, ty: Type, constituents: []const IdRef) !IdRef {
// The Khronos LLVM-SPIRV translator crashes because it cannot construct structs which'
// operands are not constant.
// See https://github.com/KhronosGroup/SPIRV-LLVM-Translator/issues/1349
// For now, just initialize the struct by setting the fields manually...
// TODO: Make this OpCompositeConstruct when we can
const mod = self.module;
const ptr_composite_id = try self.alloc(ty, .{ .storage_class = .Function });
const ptr_elem_ty_ref = try self.ptrType(ty.elemType2(mod), .Function);
for (constituents, 0..) |constitent_id, index| {
const ptr_id = try self.accessChain(ptr_elem_ty_ref, ptr_composite_id, &.{@as(u32, @intCast(index))});
try self.func.body.emit(self.spv.gpa, .OpStore, .{
.pointer = ptr_id,
.object = constitent_id,
});
}
return try self.load(ty, ptr_composite_id, .{});
/// Construct a composite value at runtime. If the parameters are in direct
/// representation, then the result is also in direct representation. Otherwise,
/// if the parameters are in indirect representation, then the result is too.
fn constructComposite(self: *DeclGen, ty: Type, constituents: []const IdRef) !IdRef {
const constituents_id = self.spv.allocId();
const type_id = try self.resolveTypeId(ty);
try self.func.body.emit(self.spv.gpa, .OpCompositeConstruct, .{
.id_result_type = type_id,
.id_result = constituents_id,
.constituents = constituents,
});
return constituents_id;
}
/// This function generates a load for a constant in direct (ie, non-memory) representation.
@ -897,18 +840,15 @@ const DeclGen = struct {
});
var constituents: [2]IdRef = undefined;
var types: [2]Type = undefined;
if (eu_layout.error_first) {
constituents[0] = try self.constant(err_ty, err_val, .indirect);
constituents[1] = try self.constant(payload_ty, payload_val, .indirect);
types = .{ err_ty, payload_ty };
} else {
constituents[0] = try self.constant(payload_ty, payload_val, .indirect);
constituents[1] = try self.constant(err_ty, err_val, .indirect);
types = .{ payload_ty, err_ty };
}
return try self.constructStruct(ty, &types, &constituents);
return try self.constructComposite(ty, &constituents);
},
.enum_tag => {
const int_val = try val.intFromEnum(ty, mod);
@ -920,11 +860,7 @@ const DeclGen = struct {
const ptr_ty = ty.slicePtrFieldType(mod);
const ptr_id = try self.constantPtr(ptr_ty, Value.fromInterned(slice.ptr));
const len_id = try self.constant(Type.usize, Value.fromInterned(slice.len), .indirect);
return self.constructStruct(
ty,
&.{ ptr_ty, Type.usize },
&.{ ptr_id, len_id },
);
return self.constructComposite(ty, &.{ ptr_id, len_id });
},
.opt => {
const payload_ty = ty.optionalChild(mod);
@ -951,11 +887,7 @@ const DeclGen = struct {
else
try self.spv.constUndef(try self.resolveType(payload_ty, .indirect));
return try self.constructStruct(
ty,
&.{ payload_ty, Type.bool },
&.{ payload_id, has_pl_id },
);
return try self.constructComposite(ty, &.{ payload_id, has_pl_id });
},
.aggregate => |aggregate| switch (ip.indexToKey(ty.ip_index)) {
inline .array_type, .vector_type => |array_type, tag| {
@ -992,9 +924,9 @@ const DeclGen = struct {
const sentinel = Value.fromInterned(array_type.sentinel);
constituents[constituents.len - 1] = try self.constant(elem_ty, sentinel, .indirect);
}
return self.constructArray(ty, constituents);
return self.constructComposite(ty, constituents);
},
inline .vector_type => return self.constructVector(ty, constituents),
inline .vector_type => return self.constructComposite(ty, constituents),
else => unreachable,
}
},
@ -1004,9 +936,6 @@ const DeclGen = struct {
return self.todo("packed struct constants", .{});
}
var types = std.ArrayList(Type).init(self.gpa);
defer types.deinit();
var constituents = std.ArrayList(IdRef).init(self.gpa);
defer constituents.deinit();
@ -1022,11 +951,10 @@ const DeclGen = struct {
const field_val = try val.fieldValue(mod, field_index);
const field_id = try self.constant(field_ty, field_val, .indirect);
try types.append(field_ty);
try constituents.append(field_id);
}
return try self.constructStruct(ty, types.items, constituents.items);
return try self.constructComposite(ty, constituents.items);
},
.anon_struct_type => unreachable, // TODO
else => unreachable,
@ -1870,7 +1798,7 @@ const DeclGen = struct {
for (wip.results) |*result| {
result.* = try wip.dg.convertToIndirect(wip.scalar_ty, result.*);
}
return try wip.dg.constructArray(wip.result_ty, wip.results);
return try wip.dg.constructComposite(wip.result_ty, wip.results);
} else {
return wip.results[0];
}
@ -2814,9 +2742,8 @@ const DeclGen = struct {
ov_id.* = try self.intFromBool(wip_ov.scalar_ty_ref, overflowed_id);
}
return try self.constructStruct(
return try self.constructComposite(
result_ty,
&.{ operand_ty, ov_ty },
&.{ try wip_result.finalize(), try wip_ov.finalize() },
);
}
@ -2905,9 +2832,8 @@ const DeclGen = struct {
ov_id.* = try self.intFromBool(wip_ov.scalar_ty_ref, overflowed_id);
}
return try self.constructStruct(
return try self.constructComposite(
result_ty,
&.{ operand_ty, ov_ty },
&.{ try wip_result.finalize(), try wip_ov.finalize() },
);
}
@ -3637,9 +3563,8 @@ const DeclGen = struct {
// Convert the pointer-to-array to a pointer to the first element.
try self.accessChain(elem_ptr_ty_ref, array_ptr_id, &.{0});
return try self.constructStruct(
return try self.constructComposite(
slice_ty,
&.{ elem_ptr_ty, Type.usize },
&.{ elem_ptr_id, len_id },
);
}
@ -3651,14 +3576,12 @@ const DeclGen = struct {
const bin_op = self.air.extraData(Air.Bin, ty_pl.payload).data;
const ptr_id = try self.resolve(bin_op.lhs);
const len_id = try self.resolve(bin_op.rhs);
const ptr_ty = self.typeOf(bin_op.lhs);
const slice_ty = self.typeOfIndex(inst);
// Note: Types should not need to be converted to direct, these types
// dont need to be converted.
return try self.constructStruct(
return try self.constructComposite(
slice_ty,
&.{ ptr_ty, Type.usize },
&.{ ptr_id, len_id },
);
}
@ -3680,8 +3603,6 @@ const DeclGen = struct {
unreachable; // TODO
}
const types = try self.gpa.alloc(Type, elements.len);
defer self.gpa.free(types);
const constituents = try self.gpa.alloc(IdRef, elements.len);
defer self.gpa.free(constituents);
var index: usize = 0;
@ -3693,7 +3614,6 @@ const DeclGen = struct {
assert(Type.fromInterned(field_ty).hasRuntimeBits(mod));
const id = try self.resolve(element);
types[index] = Type.fromInterned(field_ty);
constituents[index] = try self.convertToIndirect(Type.fromInterned(field_ty), id);
index += 1;
}
@ -3707,7 +3627,6 @@ const DeclGen = struct {
assert(field_ty.hasRuntimeBitsIgnoreComptime(mod));
const id = try self.resolve(element);
types[index] = field_ty;
constituents[index] = try self.convertToIndirect(field_ty, id);
index += 1;
}
@ -3715,11 +3634,7 @@ const DeclGen = struct {
else => unreachable,
}
return try self.constructStruct(
result_ty,
types[0..index],
constituents[0..index],
);
return try self.constructComposite(result_ty, constituents[0..index]);
},
.Vector => {
const n_elems = result_ty.vectorLen(mod);
@ -3731,7 +3646,7 @@ const DeclGen = struct {
elem_ids[i] = try self.convertToIndirect(result_ty.childType(mod), id);
}
return try self.constructVector(result_ty, elem_ids);
return try self.constructComposite(result_ty, elem_ids);
},
.Array => {
const array_info = result_ty.arrayInfo(mod);
@ -3748,7 +3663,7 @@ const DeclGen = struct {
elem_ids[n_elems - 1] = try self.constant(array_info.elem_type, sentinel_val, .indirect);
}
return try self.constructArray(result_ty, elem_ids);
return try self.constructComposite(result_ty, elem_ids);
},
else => unreachable,
}
@ -4885,11 +4800,7 @@ const DeclGen = struct {
members[eu_layout.errorFieldIndex()] = operand_id;
members[eu_layout.payloadFieldIndex()] = try self.spv.constUndef(payload_ty_ref);
var types: [2]Type = undefined;
types[eu_layout.errorFieldIndex()] = Type.anyerror;
types[eu_layout.payloadFieldIndex()] = payload_ty;
return try self.constructStruct(err_union_ty, &types, &members);
return try self.constructComposite(err_union_ty, &members);
}
fn airWrapErrUnionPayload(self: *DeclGen, inst: Air.Inst.Index) !?IdRef {
@ -4910,11 +4821,7 @@ const DeclGen = struct {
members[eu_layout.errorFieldIndex()] = try self.constInt(err_ty_ref, 0);
members[eu_layout.payloadFieldIndex()] = try self.convertToIndirect(payload_ty, operand_id);
var types: [2]Type = undefined;
types[eu_layout.errorFieldIndex()] = Type.anyerror;
types[eu_layout.payloadFieldIndex()] = payload_ty;
return try self.constructStruct(err_union_ty, &types, &members);
return try self.constructComposite(err_union_ty, &members);
}
fn airIsNull(self: *DeclGen, inst: Air.Inst.Index, is_pointer: bool, pred: enum { is_null, is_non_null }) !?IdRef {
@ -5091,8 +4998,7 @@ const DeclGen = struct {
const payload_id = try self.convertToIndirect(payload_ty, operand_id);
const members = [_]IdRef{ payload_id, try self.constBool(true, .indirect) };
const types = [_]Type{ payload_ty, Type.bool };
return try self.constructStruct(optional_ty, &types, &members);
return try self.constructComposite(optional_ty, &members);
}
fn airSwitchBr(self: *DeclGen, inst: Air.Inst.Index) !void {

View File

@ -407,12 +407,12 @@ pub fn flush(self: *Module, file: std.fs.File, target: std.Target) !void {
var types_constants = try self.cache.materialize(self);
defer types_constants.deinit(self.gpa);
// TODO: Vulkan doesn't support initializer kernel
var init_func = if (target.os.tag != .vulkan)
try self.initializer(&entry_points)
else
Section{};
defer init_func.deinit(self.gpa);
// // TODO: Pass global variables as function parameters
// var init_func = if (target.os.tag != .vulkan)
// try self.initializer(&entry_points)
// else
// Section{};
// defer init_func.deinit(self.gpa);
const header = [_]Word{
spec.magic_number,
@ -458,7 +458,6 @@ pub fn flush(self: *Module, file: std.fs.File, target: std.Target) !void {
self.sections.types_globals_constants.toWords(),
globals.toWords(),
self.sections.functions.toWords(),
init_func.toWords(),
};
var iovc_buffers: [buffers.len]std.os.iovec_const = undefined;

View File

@ -163,7 +163,7 @@ pub fn updateExports(
.Vertex => spec.ExecutionModel.Vertex,
.Fragment => spec.ExecutionModel.Fragment,
.Kernel => spec.ExecutionModel.Kernel,
else => unreachable,
else => return,
};
const is_vulkan = target.os.tag == .vulkan;

View File

@ -756,7 +756,6 @@ test "extern variable with non-pointer opaque type" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
@export(var_to_export, .{ .name = "opaque_extern_var" });
@ -1195,7 +1194,6 @@ test "integer compare" {
test "reference to inferred local variable works as expected" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const Crasher = struct {
lets_crash: u64 = 0,

View File

@ -5,6 +5,7 @@ var result: []const u8 = "wrong";
test "pass string literal byvalue to a generic var param" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
start();
blowUpStack(10);

View File

@ -1262,6 +1262,7 @@ test "implicit cast from *T to ?*anyopaque" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
var a: u8 = 1;
incrementVoidPtrValue(&a);

View File

@ -124,6 +124,7 @@ test "debug info for optional error set" {
test "implicit cast to optional to error union to return result loc" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
fn entry() !void {
@ -950,6 +951,7 @@ test "returning an error union containing a type with no runtime bits" {
test "try used in recursive function with inferred error set" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
const Value = union(enum) {
values: []const @This(),

View File

@ -127,6 +127,7 @@ test "cmp f16" {
test "cmp f32/f64" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
try testCmp(f32);
try comptime testCmp(f32);
@ -978,6 +979,7 @@ test "@abs f32/f64" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
try testFabs(f32);
try comptime testFabs(f32);

View File

@ -50,7 +50,6 @@ test "global loads can affect liveness" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
const ByRef = struct {

View File

@ -28,6 +28,7 @@ pub const EmptyStruct = struct {};
test "optional pointer to size zero struct" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
var e = EmptyStruct{};
const o: ?*EmptyStruct = &e;
@ -35,6 +36,8 @@ test "optional pointer to size zero struct" {
}
test "equality compare optional pointers" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
try testNullPtrsEql();
try comptime testNullPtrsEql();
}

View File

@ -216,6 +216,7 @@ test "assign null directly to C pointer and test null equality" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
var x: [*c]i32 = null;
_ = &x;

View File

@ -41,7 +41,6 @@ test "anon field init" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const Foo = .{
.T1 = struct {},
@ -90,7 +89,6 @@ test "top level decl" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
try expectEqualStrings(
"behavior.typename.A_Struct",
@ -140,7 +138,6 @@ test "fn param" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
// https://github.com/ziglang/zig/issues/675
try expectEqualStrings(
@ -211,7 +208,6 @@ test "local variable" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const Foo = struct { a: u32 };
const Bar = union { a: u32 };
@ -239,7 +235,6 @@ test "anon name strategy used in sub expression" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
fn getTheName() []const u8 {

View File

@ -104,6 +104,7 @@ test "returned undef is 0xaa bytes when runtime safety is enabled" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const Rect = struct {
x: f32,

View File

@ -372,6 +372,7 @@ test "load vector elements via comptime index" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -393,6 +394,7 @@ test "store vector elements via comptime index" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {

View File

@ -38,6 +38,8 @@ fn staticWhileLoop2() i32 {
}
test "while with continue expression" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // TODO
var sum: i32 = 0;
{
var i: i32 = 0;