Type/Value: garbage collect some methods

This commit is contained in:
Jacob Young 2023-06-21 23:51:11 -04:00 committed by Andrew Kelley
parent 7d511d6428
commit 6aa88ecc54
3 changed files with 7 additions and 303 deletions

View File

@ -8273,7 +8273,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
if (try sema.resolveMaybeUndefVal(enum_tag)) |enum_tag_val| {
const val = try enum_tag_val.intFromEnum(enum_tag_ty, mod);
return sema.addConstant(int_tag_ty, try val.copy(sema.arena));
return sema.addConstant(int_tag_ty, val);
}
try sema.requireRuntimeBlock(block, src, operand_src);
@ -28723,14 +28723,11 @@ fn beginComptimePtrMutation(
// without making a call to this function.
const arena = sema.arena;
const repeated_val = try val_ptr.castTag(.repeated).?.data.copy(arena);
const repeated_val = try val_ptr.castTag(.repeated).?.data.intern(parent.ty.childType(mod), mod);
const array_len_including_sentinel =
try sema.usizeCast(block, src, parent.ty.arrayLenIncludingSentinel(mod));
const elems = try arena.alloc(Value, array_len_including_sentinel);
if (elems.len > 0) elems[0] = repeated_val;
for (elems[1..]) |*elem| {
elem.* = try repeated_val.copy(arena);
}
@memset(elems, repeated_val.toValue());
val_ptr.* = try Value.Tag.aggregate.create(arena, elems);
@ -36421,7 +36418,7 @@ fn valuesEqual(
rhs: Value,
ty: Type,
) CompileError!bool {
return Value.eqlAdvanced(lhs, ty, rhs, ty, sema.mod, sema);
return lhs.eql(rhs, ty, sema.mod);
}
/// Asserts the values are comparable vectors of type `ty`.

View File

@ -120,14 +120,6 @@ pub const Type = struct {
return a.toIntern() == b.toIntern();
}
pub fn hash(ty: Type, mod: *const Module) u32 {
_ = mod; // TODO: remove this parameter
// The InternPool data structure hashes based on Key to make interned objects
// unique. An Index can be treated simply as u32 value for the
// purpose of Type/Value hashing and equality.
return std.hash.uint32(@intFromEnum(ty.toIntern()));
}
pub fn format(ty: Type, comptime unused_fmt_string: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = ty;
_ = unused_fmt_string;

View File

@ -132,98 +132,6 @@ pub const Value = struct {
return null;
}
/// It's intentional that this function is not passed a corresponding Type, so that
/// a Value can be copied from a Sema to a Decl prior to resolving struct/union field types.
pub fn copy(self: Value, arena: Allocator) error{OutOfMemory}!Value {
if (self.ip_index != .none) {
return Value{ .ip_index = self.ip_index, .legacy = undefined };
}
switch (self.legacy.ptr_otherwise.tag) {
.bytes => {
const bytes = self.castTag(.bytes).?.data;
const new_payload = try arena.create(Payload.Bytes);
new_payload.* = .{
.base = .{ .tag = .bytes },
.data = try arena.dupe(u8, bytes),
};
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
},
.eu_payload,
.opt_payload,
.repeated,
=> {
const payload = self.cast(Payload.SubValue).?;
const new_payload = try arena.create(Payload.SubValue);
new_payload.* = .{
.base = payload.base,
.data = try payload.data.copy(arena),
};
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
},
.slice => {
const payload = self.castTag(.slice).?;
const new_payload = try arena.create(Payload.Slice);
new_payload.* = .{
.base = payload.base,
.data = .{
.ptr = try payload.data.ptr.copy(arena),
.len = try payload.data.len.copy(arena),
},
};
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
},
.aggregate => {
const payload = self.castTag(.aggregate).?;
const new_payload = try arena.create(Payload.Aggregate);
new_payload.* = .{
.base = payload.base,
.data = try arena.alloc(Value, payload.data.len),
};
for (new_payload.data, 0..) |*elem, i| {
elem.* = try payload.data[i].copy(arena);
}
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
},
.@"union" => {
const tag_and_val = self.castTag(.@"union").?.data;
const new_payload = try arena.create(Payload.Union);
new_payload.* = .{
.base = .{ .tag = .@"union" },
.data = .{
.tag = try tag_and_val.tag.copy(arena),
.val = try tag_and_val.val.copy(arena),
},
};
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
},
}
}
fn copyPayloadShallow(self: Value, arena: Allocator, comptime T: type) error{OutOfMemory}!Value {
const payload = self.cast(T).?;
const new_payload = try arena.create(T);
new_payload.* = payload.*;
return Value{
.ip_index = .none,
.legacy = .{ .ptr_otherwise = &new_payload.base },
};
}
pub fn format(val: Value, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = val;
_ = fmt;
@ -1494,193 +1402,9 @@ pub const Value = struct {
}
pub fn eql(a: Value, b: Value, ty: Type, mod: *Module) bool {
return eqlAdvanced(a, ty, b, ty, mod, null) catch unreachable;
}
/// This function is used by hash maps and so treats floating-point NaNs as equal
/// to each other, and not equal to other floating-point values.
/// Similarly, it treats `undef` as a distinct value from all other values.
/// This function has to be able to support implicit coercion of `a` to `ty`. That is,
/// `ty` will be an exactly correct Type for `b` but it may be a post-coerced Type
/// for `a`. This function must act *as if* `a` has been coerced to `ty`. This complication
/// is required in order to make generic function instantiation efficient - specifically
/// the insertion into the monomorphized function table.
/// If `null` is provided for `opt_sema` then it is guaranteed no error will be returned.
pub fn eqlAdvanced(
a: Value,
a_ty: Type,
b: Value,
ty: Type,
mod: *Module,
opt_sema: ?*Sema,
) Module.CompileError!bool {
if (a.ip_index != .none or b.ip_index != .none) return a.ip_index == b.ip_index;
const target = mod.getTarget();
const a_tag = a.tag();
const b_tag = b.tag();
if (a_tag == b_tag) switch (a_tag) {
.aggregate => {
const a_field_vals = a.castTag(.aggregate).?.data;
const b_field_vals = b.castTag(.aggregate).?.data;
assert(a_field_vals.len == b_field_vals.len);
switch (mod.intern_pool.indexToKey(ty.toIntern())) {
.anon_struct_type => |anon_struct| {
assert(anon_struct.types.len == a_field_vals.len);
for (anon_struct.types, 0..) |field_ty, i| {
if (!(try eqlAdvanced(a_field_vals[i], field_ty.toType(), b_field_vals[i], field_ty.toType(), mod, opt_sema))) {
return false;
}
}
return true;
},
.struct_type => |struct_type| {
const struct_obj = mod.structPtrUnwrap(struct_type.index).?;
const fields = struct_obj.fields.values();
assert(fields.len == a_field_vals.len);
for (fields, 0..) |field, i| {
if (!(try eqlAdvanced(a_field_vals[i], field.ty, b_field_vals[i], field.ty, mod, opt_sema))) {
return false;
}
}
return true;
},
else => {},
}
const elem_ty = ty.childType(mod);
for (a_field_vals, 0..) |a_elem, i| {
const b_elem = b_field_vals[i];
if (!(try eqlAdvanced(a_elem, elem_ty, b_elem, elem_ty, mod, opt_sema))) {
return false;
}
}
return true;
},
.@"union" => {
const a_union = a.castTag(.@"union").?.data;
const b_union = b.castTag(.@"union").?.data;
switch (ty.containerLayout(mod)) {
.Packed, .Extern => {
const tag_ty = ty.unionTagTypeHypothetical(mod);
if (!(try eqlAdvanced(a_union.tag, tag_ty, b_union.tag, tag_ty, mod, opt_sema))) {
// In this case, we must disregard mismatching tags and compare
// based on the in-memory bytes of the payloads.
@panic("TODO comptime comparison of extern union values with mismatching tags");
}
},
.Auto => {
const tag_ty = ty.unionTagTypeHypothetical(mod);
if (!(try eqlAdvanced(a_union.tag, tag_ty, b_union.tag, tag_ty, mod, opt_sema))) {
return false;
}
},
}
const active_field_ty = ty.unionFieldType(a_union.tag, mod);
return eqlAdvanced(a_union.val, active_field_ty, b_union.val, active_field_ty, mod, opt_sema);
},
else => {},
};
if (a.pointerDecl(mod)) |a_decl| {
if (b.pointerDecl(mod)) |b_decl| {
return a_decl == b_decl;
} else {
return false;
}
} else if (b.pointerDecl(mod)) |_| {
return false;
}
switch (ty.zigTypeTag(mod)) {
.Type => {
const a_type = a.toType();
const b_type = b.toType();
return a_type.eql(b_type, mod);
},
.Enum => {
const a_val = try a.intFromEnum(ty, mod);
const b_val = try b.intFromEnum(ty, mod);
const int_ty = ty.intTagType(mod);
return eqlAdvanced(a_val, int_ty, b_val, int_ty, mod, opt_sema);
},
.Array, .Vector => {
const len = ty.arrayLen(mod);
const elem_ty = ty.childType(mod);
var i: usize = 0;
while (i < len) : (i += 1) {
const a_elem = try elemValue(a, mod, i);
const b_elem = try elemValue(b, mod, i);
if (!(try eqlAdvanced(a_elem, elem_ty, b_elem, elem_ty, mod, opt_sema))) {
return false;
}
}
return true;
},
.Pointer => switch (ty.ptrSize(mod)) {
.Slice => {
const a_len = switch (a_ty.ptrSize(mod)) {
.Slice => a.sliceLen(mod),
.One => a_ty.childType(mod).arrayLen(mod),
else => unreachable,
};
if (a_len != b.sliceLen(mod)) {
return false;
}
const ptr_ty = ty.slicePtrFieldType(mod);
const a_ptr = switch (a_ty.ptrSize(mod)) {
.Slice => a.slicePtr(mod),
.One => a,
else => unreachable,
};
return try eqlAdvanced(a_ptr, ptr_ty, b.slicePtr(mod), ptr_ty, mod, opt_sema);
},
.Many, .C, .One => {},
},
.Struct => {
// A struct can be represented with one of:
// .the_one_possible_value,
// .aggregate,
// Note that we already checked above for matching tags, e.g. both .aggregate.
return (try ty.onePossibleValue(mod)) != null;
},
.Union => {
// Here we have to check for value equality, as-if `a` has been coerced to `ty`.
if ((try ty.onePossibleValue(mod)) != null) {
return true;
}
return false;
},
.Float => {
switch (ty.floatBits(target)) {
16 => return @bitCast(u16, a.toFloat(f16, mod)) == @bitCast(u16, b.toFloat(f16, mod)),
32 => return @bitCast(u32, a.toFloat(f32, mod)) == @bitCast(u32, b.toFloat(f32, mod)),
64 => return @bitCast(u64, a.toFloat(f64, mod)) == @bitCast(u64, b.toFloat(f64, mod)),
80 => return @bitCast(u80, a.toFloat(f80, mod)) == @bitCast(u80, b.toFloat(f80, mod)),
128 => return @bitCast(u128, a.toFloat(f128, mod)) == @bitCast(u128, b.toFloat(f128, mod)),
else => unreachable,
}
},
.ComptimeFloat => {
const a_float = a.toFloat(f128, mod);
const b_float = b.toFloat(f128, mod);
const a_nan = std.math.isNan(a_float);
const b_nan = std.math.isNan(b_float);
if (a_nan != b_nan) return false;
if (std.math.signbit(a_float) != std.math.signbit(b_float)) return false;
if (a_nan) return true;
return a_float == b_float;
},
.Optional,
.ErrorUnion,
=> unreachable, // handled by InternPool
else => {},
}
return (try orderAdvanced(a, b, mod, opt_sema)).compare(.eq);
assert(mod.intern_pool.typeOf(a.toIntern()) == ty.toIntern());
assert(mod.intern_pool.typeOf(b.toIntern()) == ty.toIntern());
return a.toIntern() == b.toIntern();
}
pub fn isComptimeMutablePtr(val: Value, mod: *Module) bool {
@ -1736,15 +1460,6 @@ pub const Value = struct {
};
}
fn hashInt(int_val: Value, hasher: *std.hash.Wyhash, mod: *Module) void {
var buffer: BigIntSpace = undefined;
const big = int_val.toBigInt(&buffer, mod);
std.hash.autoHash(hasher, big.positive);
for (big.limbs) |limb| {
std.hash.autoHash(hasher, limb);
}
}
pub const slice_ptr_index = 0;
pub const slice_len_index = 1;