riscv: implement optional logic

This commit is contained in:
David Rubin 2024-05-25 22:52:26 -07:00
parent d69c48370a
commit a270c6f8c8
No known key found for this signature in database
GPG Key ID: C326E694CED89F6D
23 changed files with 222 additions and 105 deletions

View File

@ -1545,6 +1545,58 @@ fn splitType(func: *Func, ty: Type) ![2]Type {
return func.fail("TODO implement splitType for {}", .{ty.fmt(zcu)});
}
/// Truncates the value in the register in place.
/// Clobbers any remaining bits.
fn truncateRegister(func: *Func, ty: Type, reg: Register) !void {
const mod = func.bin_file.comp.module.?;
const int_info = if (ty.isAbiInt(mod)) ty.intInfo(mod) else std.builtin.Type.Int{
.signedness = .unsigned,
.bits = @intCast(ty.bitSize(mod)),
};
const shift = math.cast(u6, 64 - int_info.bits % 64) orelse return;
switch (int_info.signedness) {
.signed => {
_ = try func.addInst(.{
.tag = .slli,
.ops = .rri,
.data = .{
.i_type = .{
.rd = reg,
.rs1 = reg,
.imm12 = Immediate.s(shift),
},
},
});
_ = try func.addInst(.{
.tag = .srai,
.ops = .rri,
.data = .{
.i_type = .{
.rd = reg,
.rs1 = reg,
.imm12 = Immediate.s(shift),
},
},
});
},
.unsigned => {
const mask = ~@as(u64, 0) >> shift;
const tmp_reg = try func.copyToTmpRegister(Type.usize, .{ .immediate = mask });
_ = try func.addInst(.{
.tag = .@"and",
.ops = .rrr,
.data = .{
.r_type = .{
.rd = reg,
.rs1 = reg,
.rs2 = tmp_reg,
},
},
});
},
}
}
fn symbolIndex(func: *Func) !u32 {
const zcu = func.bin_file.comp.module.?;
const decl_index = zcu.funcOwnerDeclIndex(func.func_index);
@ -2868,8 +2920,25 @@ fn airShr(func: *Func, inst: Air.Inst.Index) !void {
}
fn airOptionalPayload(func: *Func, inst: Air.Inst.Index) !void {
const zcu = func.bin_file.comp.module.?;
const ty_op = func.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else return func.fail("TODO implement .optional_payload for {}", .{func.target.cpu.arch});
const result: MCValue = result: {
const pl_ty = func.typeOfIndex(inst);
if (!pl_ty.hasRuntimeBitsIgnoreComptime(zcu)) break :result .none;
const opt_mcv = try func.resolveInst(ty_op.operand);
if (func.reuseOperand(inst, ty_op.operand, 0, opt_mcv)) {
switch (opt_mcv) {
.register => |pl_reg| try func.truncateRegister(pl_ty, pl_reg),
else => {},
}
break :result opt_mcv;
}
const pl_mcv = try func.allocRegOrMem(inst, true);
try func.genCopy(pl_ty, pl_mcv, opt_mcv);
break :result pl_mcv;
};
return func.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
@ -3022,16 +3091,40 @@ fn airSaveErrReturnTraceIndex(func: *Func, inst: Air.Inst.Index) !void {
}
fn airWrapOptional(func: *Func, inst: Air.Inst.Index) !void {
const zcu = func.bin_file.comp.module.?;
const ty_op = func.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const zcu = func.bin_file.comp.module.?;
const optional_ty = func.typeOfIndex(inst);
const result: MCValue = result: {
const pl_ty = func.typeOf(ty_op.operand);
if (!pl_ty.hasRuntimeBits(zcu)) break :result .{ .immediate = 1 };
// Optional with a zero-bit payload type is just a boolean true
if (optional_ty.abiSize(zcu) == 1)
break :result MCValue{ .immediate = 1 };
const opt_ty = func.typeOfIndex(inst);
const pl_mcv = try func.resolveInst(ty_op.operand);
const same_repr = opt_ty.optionalReprIsPayload(zcu);
if (same_repr and func.reuseOperand(inst, ty_op.operand, 0, pl_mcv)) break :result pl_mcv;
return func.fail("TODO implement wrap optional for {}", .{func.target.cpu.arch});
const pl_lock: ?RegisterLock = switch (pl_mcv) {
.register => |reg| func.register_manager.lockRegAssumeUnused(reg),
else => null,
};
defer if (pl_lock) |lock| func.register_manager.unlockReg(lock);
const opt_mcv = try func.allocRegOrMem(inst, true);
try func.genCopy(pl_ty, opt_mcv, pl_mcv);
if (!same_repr) {
const pl_abi_size: i32 = @intCast(pl_ty.abiSize(zcu));
switch (opt_mcv) {
.load_frame => |frame_addr| try func.genSetMem(
.{ .frame = frame_addr.index },
frame_addr.off + pl_abi_size,
Type.u8,
.{ .immediate = 1 },
),
.register => return func.fail("TODO: airWrapOption opt_mcv register", .{}),
else => unreachable,
}
}
break :result opt_mcv;
};
return func.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
@ -4435,72 +4528,141 @@ fn condBr(func: *Func, cond_ty: Type, condition: MCValue) !Mir.Inst.Index {
});
}
fn isNull(func: *Func, inst: Air.Inst.Index, opt_ty: Type, opt_mcv: MCValue) !MCValue {
const zcu = func.bin_file.comp.module.?;
const pl_ty = opt_ty.optionalChild(zcu);
const some_info: struct { off: i32, ty: Type } = if (opt_ty.optionalReprIsPayload(zcu))
.{ .off = 0, .ty = if (pl_ty.isSlice(zcu)) pl_ty.slicePtrFieldType(zcu) else pl_ty }
else
.{ .off = @intCast(pl_ty.abiSize(zcu)), .ty = Type.bool };
const return_mcv = try func.allocRegOrMem(inst, true);
assert(return_mcv == .register); // should not be larger 8 bytes
const return_reg = return_mcv.register;
switch (opt_mcv) {
.none,
.unreach,
.dead,
.undef,
.immediate,
.register_pair,
.register_offset,
.lea_frame,
.lea_symbol,
.reserved_frame,
.air_ref,
=> return func.fail("TODO: hmm {}", .{opt_mcv}),
.register => |opt_reg| {
if (some_info.off == 0) {
_ = try func.addInst(.{
.tag = .pseudo,
.ops = .pseudo_compare,
.data = .{
.compare = .{
.op = .eq,
.rd = return_reg,
.rs1 = opt_reg,
.rs2 = try func.copyToTmpRegister(
some_info.ty,
.{ .immediate = 0 },
),
.size = .byte,
},
},
});
return return_mcv;
}
assert(some_info.ty.ip_index == .bool_type);
const opt_abi_size: u32 = @intCast(opt_ty.abiSize(zcu));
_ = opt_abi_size;
return func.fail("TODO: isNull some_info.off != 0 register", .{});
},
.load_frame => {
const opt_reg = try func.copyToTmpRegister(
some_info.ty,
opt_mcv.address().offset(some_info.off).deref(),
);
const opt_reg_lock = func.register_manager.lockRegAssumeUnused(opt_reg);
defer func.register_manager.unlockReg(opt_reg_lock);
_ = try func.addInst(.{
.tag = .pseudo,
.ops = .pseudo_compare,
.data = .{
.compare = .{
.op = .eq,
.rd = return_reg,
.rs1 = opt_reg,
.rs2 = try func.copyToTmpRegister(
some_info.ty,
.{ .immediate = 0 },
),
.size = .byte,
},
},
});
return return_mcv;
},
else => return func.fail("TODO: isNull {}", .{opt_mcv}),
}
}
fn airIsNull(func: *Func, inst: Air.Inst.Index) !void {
const un_op = func.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const operand = try func.resolveInst(un_op);
break :result try func.isNull(operand);
};
const operand = try func.resolveInst(un_op);
const ty = func.typeOf(un_op);
const result = try func.isNull(inst, ty, operand);
return func.finishAir(inst, result, .{ un_op, .none, .none });
}
fn airIsNullPtr(func: *Func, inst: Air.Inst.Index) !void {
const un_op = func.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const operand_ptr = try func.resolveInst(un_op);
const operand: MCValue = blk: {
if (func.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
break :blk operand_ptr;
} else {
break :blk try func.allocRegOrMem(inst, true);
}
};
try func.load(operand, operand_ptr, func.typeOf(un_op));
break :result try func.isNull(operand);
};
return func.finishAir(inst, result, .{ un_op, .none, .none });
}
const operand = try func.resolveInst(un_op);
_ = operand; // autofix
const ty = func.typeOf(un_op);
_ = ty; // autofix
fn isNull(func: *Func, operand: MCValue) !MCValue {
_ = operand;
// Here you can specialize this instruction if it makes sense to, otherwise the default
// will call isNonNull and invert the result.
return func.fail("TODO call isNonNull and invert the result", .{});
if (true) return func.fail("TODO: airIsNullPtr", .{});
return func.finishAir(inst, .unreach, .{ un_op, .none, .none });
}
fn airIsNonNull(func: *Func, inst: Air.Inst.Index) !void {
const un_op = func.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const operand = try func.resolveInst(un_op);
break :result try func.isNonNull(operand);
};
return func.finishAir(inst, result, .{ un_op, .none, .none });
}
const operand = try func.resolveInst(un_op);
const ty = func.typeOf(un_op);
const result = try func.isNull(inst, ty, operand);
assert(result == .register);
fn isNonNull(func: *Func, operand: MCValue) !MCValue {
_ = operand;
// Here you can specialize this instruction if it makes sense to, otherwise the default
// will call isNull and invert the result.
return func.fail("TODO call isNull and invert the result", .{});
_ = try func.addInst(.{
.tag = .pseudo,
.ops = .pseudo_not,
.data = .{
.rr = .{
.rd = result.register,
.rs = result.register,
},
},
});
return func.finishAir(inst, result, .{ un_op, .none, .none });
}
fn airIsNonNullPtr(func: *Func, inst: Air.Inst.Index) !void {
const un_op = func.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
const result: MCValue = if (func.liveness.isUnused(inst)) .unreach else result: {
const operand_ptr = try func.resolveInst(un_op);
const operand: MCValue = blk: {
if (func.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
break :blk operand_ptr;
} else {
break :blk try func.allocRegOrMem(inst, true);
}
};
try func.load(operand, operand_ptr, func.typeOf(un_op));
break :result try func.isNonNull(operand);
};
return func.finishAir(inst, result, .{ un_op, .none, .none });
const operand = try func.resolveInst(un_op);
_ = operand; // autofix
const ty = func.typeOf(un_op);
_ = ty; // autofix
if (true) return func.fail("TODO: airIsNonNullPtr", .{});
return func.finishAir(inst, .unreach, .{ un_op, .none, .none });
}
fn airIsErr(func: *Func, inst: Air.Inst.Index) !void {
@ -5110,7 +5272,7 @@ fn genCopy(func: *Func, ty: Type, dst_mcv: MCValue, src_mcv: MCValue) !void {
dst_mcv,
try func.resolveInst(src_ref),
),
else => unreachable,
else => return func.fail("genCopy register_pair src: {}", .{src_mcv}),
};
defer if (src_info) |info| {

View File

@ -125,6 +125,7 @@ pub fn classifySystem(ty: Type, zcu: *Module) [8]SystemClass {
return result;
}
result[0] = .integer;
if (ty.optionalChild(zcu).abiSize(zcu) == 0) return result;
result[1] = .integer;
return result;
},

View File

@ -230,7 +230,7 @@ pub const Register = enum(u8) {
return @as(u8, reg.id());
}
pub fn bitSize(reg: Register, zcu: Module) u32 {
pub fn bitSize(reg: Register, zcu: *const Module) u32 {
const features = zcu.getTarget().cpu.features;
return switch (@intFromEnum(reg)) {

View File

@ -603,7 +603,6 @@ test "comptime alloc alignment" {
}
test "@alignCast null" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO

View File

@ -483,7 +483,6 @@ fn testStructInFn() !void {
test "fn call returning scalar optional in equality expression" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try expect(getNull() == null);
}
@ -494,7 +493,6 @@ fn getNull() ?*i32 {
test "global variable assignment with optional unwrapping with var initialized to undefined" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
var data: i32 = 1234;

View File

@ -186,7 +186,6 @@ fn expectIntFromFloat(comptime F: type, f: F, comptime I: type, i: I) !void {
test "implicitly cast indirect pointer to maybe-indirect pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const Self = @This();
@ -247,7 +246,6 @@ test "coerce undefined to optional" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try expect(MakeType(void).getNull() == null);
try expect(MakeType(void).getNonNull() != null);
@ -1184,7 +1182,6 @@ test "implicit ptr to *anyopaque" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: u32 = 1;
const ptr: *align(@alignOf(u32)) anyopaque = &a;
@ -1198,7 +1195,6 @@ test "implicit ptr to *anyopaque" {
test "return null from fn () anyerror!?&T" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const a = returnNullFromOptionalTypeErrorRef();
const b = returnNullLitFromOptionalTypeErrorRef();
@ -1289,7 +1285,6 @@ test "implicit cast from *T to ?*anyopaque" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: u8 = 1;
incrementVoidPtrValue(&a);
@ -1361,7 +1356,6 @@ test "assignment to optional pointer result loc" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var foo: struct { ptr: ?*anyopaque } = .{ .ptr = &global_struct };
_ = &foo;
@ -1437,7 +1431,6 @@ test "peer type resolution: unreachable, null, slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest(num: usize, word: []const u8) !void {
@ -1478,7 +1471,6 @@ test "cast compatible optional types" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var a: ?[:0]const u8 = null;
_ = &a;
@ -1591,7 +1583,6 @@ test "bitcast packed struct with u0" {
test "optional pointer coerced to optional allowzero pointer" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var p: ?*u32 = undefined;
var q: ?*allowzero u32 = undefined;
@ -1608,8 +1599,6 @@ test "optional slice coerced to allowzero many pointer" {
}
test "optional slice passed as parameter coerced to allowzero many pointer" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const ns = struct {
const Color = struct {
r: u8,
@ -1832,7 +1821,6 @@ test "peer type resolution: error union and optional of same type" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const E = error{Foo};
var a: E!*u8 = error.Foo;
@ -1878,7 +1866,6 @@ test "peer type resolution: three-way resolution combines error set and optional
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const E = error{Foo};
var a: E = error.Foo;
@ -2104,7 +2091,6 @@ test "peer type resolution: tuple pointer and optional slice" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
// Miscompilation on Intel's OpenCL CPU runtime.
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest; // flaky
@ -2389,7 +2375,6 @@ test "cast builtins can wrap result in error union and optional" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const MyEnum = enum(u32) { _ };

View File

@ -124,7 +124,6 @@ test "debug info for optional error set" {
test "implicit cast to optional to error union to return result loc" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn entry() !void {

View File

@ -1548,7 +1548,6 @@ test "non-optional and optional array elements concatenated" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const array = [1]u8{'A'} ++ [1]?u8{null};
var index: usize = 0;

View File

@ -104,7 +104,6 @@ test "inline function call that calls optional function pointer, return pointer
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
field: u32,
@ -259,7 +258,6 @@ test "implicit cast fn call result to optional in field result" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn entry() !void {
@ -473,7 +471,6 @@ test "method call with optional and error union first param" {
test "method call with optional pointer first param" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
x: i32 = 1234,

View File

@ -444,7 +444,6 @@ test "generic function passed as comptime argument" {
test "return type of generic function is function pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn b(comptime T: type) ?*const fn () error{}!T {

View File

@ -139,7 +139,6 @@ test "if-else expression with runtime condition result location is inferred opti
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const A = struct { b: u64, c: u64 };
var d: bool = true;

View File

@ -85,7 +85,6 @@ fn testTestNullRuntime(x: ?i32) !void {
test "optional void" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
try optionalVoidImpl();
try comptime optionalVoidImpl();
@ -109,7 +108,6 @@ const Empty = struct {};
test "optional struct{}" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
_ = try optionalEmptyStructImpl();
_ = try comptime optionalEmptyStructImpl();
@ -135,7 +133,6 @@ test "null with default unwrap" {
test "optional pointer to 0 bit type null value at runtime" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const EmptyStruct = struct {};
var x: ?*EmptyStruct = null;

View File

@ -29,7 +29,6 @@ pub const EmptyStruct = struct {};
test "optional pointer to size zero struct" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var e = EmptyStruct{};
const o: ?*EmptyStruct = &e;
@ -60,7 +59,6 @@ fn testNullPtrsEql() !void {
test "optional with zero-bit type" {
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {
@ -241,7 +239,6 @@ test "compare optionals with modified payloads" {
test "unwrap function call with optional pointer return value" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn entry() !void {
@ -373,7 +370,6 @@ test "0-bit child type coerced to optional return ptr result location" {
test "0-bit child type coerced to optional" {
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -638,7 +634,6 @@ test "result location initialization of optional with OPV payload" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
const S = struct {

View File

@ -174,7 +174,6 @@ test "implicit cast error unions with non-optional to optional pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
fn doTheTest() !void {
@ -222,7 +221,6 @@ test "assign null directly to C pointer and test null equality" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: [*c]i32 = null;
_ = &x;

View File

@ -34,7 +34,6 @@ test "@ptrFromInt creates null pointer" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const ptr = @as(?*u32, @ptrFromInt(0));
try expectEqual(@as(?*u32, null), ptr);

View File

@ -328,7 +328,6 @@ test "peer type resolution with @TypeOf doesn't trigger dependency loop check" {
if (builtin.zig_backend == .stage2_x86) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const T = struct {
next: @TypeOf(null, @as(*const @This(), undefined)),

View File

@ -246,7 +246,6 @@ fn sliceFromLenToLen(a_slice: []u8, start: usize, end: usize) []u8 {
test "C pointer" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var buf: [*c]const u8 = "kjdhfkjdhfdkjhfkfjhdfkjdhfkdjhfdkjhf";
var len: u32 = 10;

View File

@ -1873,8 +1873,6 @@ test "initializer takes a pointer to a variable inside its struct" {
}
test "circular dependency through pointer field of a struct" {
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const StructInner = extern struct {
outer: StructOuter = std.mem.zeroes(StructOuter),
@ -2151,7 +2149,6 @@ test "initiate global variable with runtime value" {
test "struct containing optional pointer to array of @This()" {
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
x: ?*const [1]@This(),

View File

@ -5,7 +5,6 @@ const builtin = @import("builtin");
test "struct contains null pointer which contains original struct" {
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: ?*NodeLineComment = null;
_ = &x;

View File

@ -50,7 +50,6 @@ test "this used as optional function parameter" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var global: State = undefined;
global.enter = prev;

View File

@ -260,7 +260,6 @@ test "Type.Struct" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const A = @Type(@typeInfo(struct { x: u8, y: u32 }));
const infoA = @typeInfo(A).Struct;

View File

@ -2145,7 +2145,6 @@ test "pass register-sized field as non-register-sized union" {
test "circular dependency through pointer field of a union" {
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
const S = struct {
const UnionInner = extern struct {

View File

@ -37,7 +37,6 @@ test "void optional" {
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
var x: ?void = {};
_ = &x;