mirror of
https://github.com/ziglang/zig.git
synced 2025-02-01 14:55:08 +00:00
AstGen: use RLS to infer the first argument of @fieldParentPtr
This commit is contained in:
parent
e409afb79b
commit
17673dcd6e
@ -316,8 +316,7 @@ const ResultInfo = struct {
|
||||
};
|
||||
|
||||
/// Find the result type for a cast builtin given the result location.
|
||||
/// If the location does not have a known result type, emits an error on
|
||||
/// the given node.
|
||||
/// If the location does not have a known result type, returns `null`.
|
||||
fn resultType(rl: Loc, gz: *GenZir, node: Ast.Node.Index) !?Zir.Inst.Ref {
|
||||
return switch (rl) {
|
||||
.discard, .none, .ref, .inferred_ptr, .destructure => null,
|
||||
@ -330,6 +329,9 @@ const ResultInfo = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// Find the result type for a cast builtin given the result location.
|
||||
/// If the location does not have a known result type, emits an error on
|
||||
/// the given node.
|
||||
fn resultTypeForCast(rl: Loc, gz: *GenZir, node: Ast.Node.Index, builtin_name: []const u8) !Zir.Inst.Ref {
|
||||
const astgen = gz.astgen;
|
||||
if (try rl.resultType(gz, node)) |ty| return ty;
|
||||
@ -2786,7 +2788,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
|
||||
.atomic_load,
|
||||
.atomic_rmw,
|
||||
.mul_add,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.min,
|
||||
.c_import,
|
||||
@ -8853,6 +8854,7 @@ fn ptrCast(
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
var flags: Zir.Inst.FullPtrCastFlags = .{};
|
||||
|
||||
// Note that all pointer cast builtins have one parameter, so we only need
|
||||
@ -8870,36 +8872,62 @@ fn ptrCast(
|
||||
}
|
||||
|
||||
if (node_datas[node].lhs == 0) break; // 0 args
|
||||
if (node_datas[node].rhs != 0) break; // 2 args
|
||||
|
||||
const builtin_token = main_tokens[node];
|
||||
const builtin_name = tree.tokenSlice(builtin_token);
|
||||
const info = BuiltinFn.list.get(builtin_name) orelse break;
|
||||
if (info.param_count != 1) break;
|
||||
if (node_datas[node].rhs == 0) {
|
||||
// 1 arg
|
||||
if (info.param_count != 1) break;
|
||||
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
inline .ptr_cast,
|
||||
.align_cast,
|
||||
.addrspace_cast,
|
||||
.const_cast,
|
||||
.volatile_cast,
|
||||
=> |tag| {
|
||||
if (@field(flags, @tagName(tag))) {
|
||||
return astgen.failNode(node, "redundant {s}", .{builtin_name});
|
||||
}
|
||||
@field(flags, @tagName(tag)) = true;
|
||||
},
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
inline .ptr_cast,
|
||||
.align_cast,
|
||||
.addrspace_cast,
|
||||
.const_cast,
|
||||
.volatile_cast,
|
||||
=> |tag| {
|
||||
if (@field(flags, @tagName(tag))) {
|
||||
return astgen.failNode(node, "redundant {s}", .{builtin_name});
|
||||
}
|
||||
@field(flags, @tagName(tag)) = true;
|
||||
},
|
||||
}
|
||||
|
||||
node = node_datas[node].lhs;
|
||||
} else {
|
||||
// 2 args
|
||||
if (info.param_count != 2) break;
|
||||
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
.field_parent_ptr => {
|
||||
if (flags.ptr_cast) break;
|
||||
|
||||
const flags_int: FlagsInt = @bitCast(flags);
|
||||
const cursor = maybeAdvanceSourceCursorToMainToken(gz, root_node);
|
||||
const parent_ptr_type = try ri.rl.resultTypeForCast(gz, root_node, "@alignCast");
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, node_datas[node].lhs);
|
||||
const field_ptr = try expr(gz, scope, .{ .rl = .none }, node_datas[node].rhs);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.field_parent_ptr, flags_int, Zir.Inst.FieldParentPtr{
|
||||
.src_node = gz.nodeIndexToRelative(node),
|
||||
.parent_ptr_type = parent_ptr_type,
|
||||
.field_name = field_name,
|
||||
.field_ptr = field_ptr,
|
||||
});
|
||||
return rvalue(gz, ri, result, root_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
node = node_datas[node].lhs;
|
||||
}
|
||||
|
||||
const flags_i: u5 = @bitCast(flags);
|
||||
assert(flags_i != 0);
|
||||
const flags_int: FlagsInt = @bitCast(flags);
|
||||
assert(flags_int != 0);
|
||||
|
||||
const ptr_only: Zir.Inst.FullPtrCastFlags = .{ .ptr_cast = true };
|
||||
if (flags_i == @as(u5, @bitCast(ptr_only))) {
|
||||
if (flags_int == @as(FlagsInt, @bitCast(ptr_only))) {
|
||||
// Special case: simpler representation
|
||||
return typeCast(gz, scope, ri, root_node, node, .ptr_cast, "@ptrCast");
|
||||
}
|
||||
@ -8908,12 +8936,12 @@ fn ptrCast(
|
||||
.const_cast = true,
|
||||
.volatile_cast = true,
|
||||
};
|
||||
if ((flags_i & ~@as(u5, @bitCast(no_result_ty_flags))) == 0) {
|
||||
if ((flags_int & ~@as(FlagsInt, @bitCast(no_result_ty_flags))) == 0) {
|
||||
// Result type not needed
|
||||
const cursor = maybeAdvanceSourceCursorToMainToken(gz, root_node);
|
||||
const operand = try expr(gz, scope, .{ .rl = .none }, node);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_no_dest, flags_i, Zir.Inst.UnNode{
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_no_dest, flags_int, Zir.Inst.UnNode{
|
||||
.node = gz.nodeIndexToRelative(root_node),
|
||||
.operand = operand,
|
||||
});
|
||||
@ -8926,7 +8954,7 @@ fn ptrCast(
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, root_node, flags.needResultTypeBuiltinName());
|
||||
const operand = try expr(gz, scope, .{ .rl = .none }, node);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_full, flags_i, Zir.Inst.BinNode{
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_full, flags_int, Zir.Inst.BinNode{
|
||||
.node = gz.nodeIndexToRelative(root_node),
|
||||
.lhs = result_type,
|
||||
.rhs = operand,
|
||||
@ -9379,7 +9407,7 @@ fn builtinCall(
|
||||
try emitDbgNode(gz, node);
|
||||
|
||||
const result = try gz.addExtendedPayload(.error_cast, Zir.Inst.BinNode{
|
||||
.lhs = try ri.rl.resultTypeForCast(gz, node, "@errorCast"),
|
||||
.lhs = try ri.rl.resultTypeForCast(gz, node, builtin_name),
|
||||
.rhs = try expr(gz, scope, .{ .rl = .none }, params[0]),
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
});
|
||||
@ -9452,7 +9480,7 @@ fn builtinCall(
|
||||
},
|
||||
|
||||
.splat => {
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, node, "@splat");
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, node, builtin_name);
|
||||
const elem_type = try gz.addUnNode(.vector_elem_type, result_type, node);
|
||||
const scalar = try expr(gz, scope, .{ .rl = .{ .ty = elem_type } }, params[0]);
|
||||
const result = try gz.addPlNode(.splat, node, Zir.Inst.Bin{
|
||||
@ -9537,12 +9565,13 @@ fn builtinCall(
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.field_parent_ptr => {
|
||||
const parent_type = try typeExpr(gz, scope, params[0]);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[1]);
|
||||
const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{
|
||||
.parent_type = parent_type,
|
||||
const parent_ptr_type = try ri.rl.resultTypeForCast(gz, node, builtin_name);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[0]);
|
||||
const result = try gz.addExtendedPayloadSmall(.field_parent_ptr, 0, Zir.Inst.FieldParentPtr{
|
||||
.src_node = gz.nodeIndexToRelative(node),
|
||||
.parent_ptr_type = parent_ptr_type,
|
||||
.field_name = field_name,
|
||||
.field_ptr = try expr(gz, scope, .{ .rl = .none }, params[2]),
|
||||
.field_ptr = try expr(gz, scope, .{ .rl = .none }, params[1]),
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
|
@ -911,6 +911,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
.work_item_id,
|
||||
.work_group_size,
|
||||
.work_group_id,
|
||||
.field_parent_ptr,
|
||||
=> {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
return false;
|
||||
@ -976,7 +977,6 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
},
|
||||
.bit_offset_of,
|
||||
.offset_of,
|
||||
.field_parent_ptr,
|
||||
.has_decl,
|
||||
.has_field,
|
||||
.field,
|
||||
|
@ -504,7 +504,7 @@ pub const list = list: {
|
||||
"@fieldParentPtr",
|
||||
.{
|
||||
.tag = .field_parent_ptr,
|
||||
.param_count = 3,
|
||||
.param_count = 2,
|
||||
},
|
||||
},
|
||||
.{
|
||||
|
@ -940,9 +940,6 @@ pub const Inst = struct {
|
||||
/// The addend communicates the type of the builtin.
|
||||
/// The mulends need to be coerced to the same type.
|
||||
mul_add,
|
||||
/// Implements the `@fieldParentPtr` builtin.
|
||||
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
||||
field_parent_ptr,
|
||||
/// Implements the `@memcpy` builtin.
|
||||
/// Uses the `pl_node` union field with payload `Bin`.
|
||||
memcpy,
|
||||
@ -1230,7 +1227,6 @@ pub const Inst = struct {
|
||||
.atomic_store,
|
||||
.mul_add,
|
||||
.builtin_call,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.memcpy,
|
||||
.memset,
|
||||
@ -1522,7 +1518,6 @@ pub const Inst = struct {
|
||||
.atomic_rmw,
|
||||
.mul_add,
|
||||
.builtin_call,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.min,
|
||||
.c_import,
|
||||
@ -1794,7 +1789,6 @@ pub const Inst = struct {
|
||||
.atomic_store = .pl_node,
|
||||
.mul_add = .pl_node,
|
||||
.builtin_call = .pl_node,
|
||||
.field_parent_ptr = .pl_node,
|
||||
.max = .pl_node,
|
||||
.memcpy = .pl_node,
|
||||
.memset = .pl_node,
|
||||
@ -2064,6 +2058,12 @@ pub const Inst = struct {
|
||||
/// with a specific value. For instance, this is used for the capture of an `errdefer`.
|
||||
/// This should never appear in a body.
|
||||
value_placeholder,
|
||||
/// Implements the `@fieldParentPtr` builtin.
|
||||
/// `operand` is payload index to `FieldParentPtr`.
|
||||
/// `small` contains `FullPtrCastFlags`.
|
||||
/// Guaranteed to not have the `ptr_cast` flag.
|
||||
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
||||
field_parent_ptr,
|
||||
|
||||
pub const InstData = struct {
|
||||
opcode: Extended,
|
||||
@ -3363,9 +3363,14 @@ pub const Inst = struct {
|
||||
};
|
||||
|
||||
pub const FieldParentPtr = struct {
|
||||
parent_type: Ref,
|
||||
src_node: i32,
|
||||
parent_ptr_type: Ref,
|
||||
field_name: Ref,
|
||||
field_ptr: Ref,
|
||||
|
||||
pub fn src(self: FieldParentPtr) LazySrcLoc {
|
||||
return LazySrcLoc.nodeOffset(self.src_node);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Shuffle = struct {
|
||||
|
62
src/Sema.zig
62
src/Sema.zig
@ -1131,7 +1131,6 @@ fn analyzeBodyInner(
|
||||
.atomic_rmw => try sema.zirAtomicRmw(block, inst),
|
||||
.mul_add => try sema.zirMulAdd(block, inst),
|
||||
.builtin_call => try sema.zirBuiltinCall(block, inst),
|
||||
.field_parent_ptr => try sema.zirFieldParentPtr(block, inst),
|
||||
.@"resume" => try sema.zirResume(block, inst),
|
||||
.@"await" => try sema.zirAwait(block, inst),
|
||||
.for_len => try sema.zirForLen(block, inst),
|
||||
@ -1296,6 +1295,7 @@ fn analyzeBodyInner(
|
||||
continue;
|
||||
},
|
||||
.value_placeholder => unreachable, // never appears in a body
|
||||
.field_parent_ptr => try sema.zirFieldParentPtr(block, extended),
|
||||
};
|
||||
},
|
||||
|
||||
@ -22757,10 +22757,8 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData
|
||||
}
|
||||
|
||||
fn zirPtrCastFull(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(
|
||||
@typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?,
|
||||
@truncate(extended.small),
|
||||
));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = sema.code.extraData(Zir.Inst.BinNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_ptrcast_operand = extra.node };
|
||||
@ -22773,6 +22771,7 @@ fn zirPtrCastFull(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDa
|
||||
operand,
|
||||
operand_src,
|
||||
dest_ty,
|
||||
flags.needResultTypeBuiltinName(),
|
||||
);
|
||||
}
|
||||
|
||||
@ -22791,6 +22790,7 @@ fn zirPtrCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
operand,
|
||||
operand_src,
|
||||
dest_ty,
|
||||
"@ptrCast",
|
||||
);
|
||||
}
|
||||
|
||||
@ -22802,6 +22802,7 @@ fn ptrCastFull(
|
||||
operand: Air.Inst.Ref,
|
||||
operand_src: LazySrcLoc,
|
||||
dest_ty: Type,
|
||||
operation: []const u8,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const operand_ty = sema.typeOf(operand);
|
||||
@ -22834,7 +22835,7 @@ fn ptrCastFull(
|
||||
};
|
||||
const dest_elem_size = Type.fromInterned(dest_info.child).abiSize(mod);
|
||||
if (src_elem_size != dest_elem_size) {
|
||||
return sema.fail(block, src, "TODO: implement @ptrCast between slices changing the length", .{});
|
||||
return sema.fail(block, src, "TODO: implement {s} between slices changing the length", .{operation});
|
||||
}
|
||||
}
|
||||
|
||||
@ -22983,7 +22984,7 @@ fn ptrCastFull(
|
||||
if (!flags.align_cast) {
|
||||
if (dest_align.compare(.gt, src_align)) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast increases pointer alignment", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} increases pointer alignment", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, operand_src, msg, "'{}' has alignment '{d}'", .{
|
||||
operand_ty.fmt(mod), src_align.toByteUnits() orelse 0,
|
||||
@ -23000,7 +23001,7 @@ fn ptrCastFull(
|
||||
if (!flags.addrspace_cast) {
|
||||
if (src_info.flags.address_space != dest_info.flags.address_space) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast changes pointer address space", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} changes pointer address space", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, operand_src, msg, "'{}' has address space '{s}'", .{
|
||||
operand_ty.fmt(mod), @tagName(src_info.flags.address_space),
|
||||
@ -23030,7 +23031,7 @@ fn ptrCastFull(
|
||||
if (!flags.const_cast) {
|
||||
if (src_info.flags.is_const and !dest_info.flags.is_const) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast discards const qualifier", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} discards const qualifier", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, src, msg, "use @constCast to discard const qualifier", .{});
|
||||
break :msg msg;
|
||||
@ -23041,7 +23042,7 @@ fn ptrCastFull(
|
||||
if (!flags.volatile_cast) {
|
||||
if (src_info.flags.is_volatile and !dest_info.flags.is_volatile) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast discards volatile qualifier", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} discards volatile qualifier", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, src, msg, "use @volatileCast to discard volatile qualifier", .{});
|
||||
break :msg msg;
|
||||
@ -23187,10 +23188,8 @@ fn ptrCastFull(
|
||||
|
||||
fn zirPtrCastNoDest(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(
|
||||
@typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?,
|
||||
@truncate(extended.small),
|
||||
));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = sema.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_ptrcast_operand = extra.node };
|
||||
@ -24859,25 +24858,28 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
|
||||
);
|
||||
}
|
||||
|
||||
fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
|
||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||
const extra = sema.code.extraData(Zir.Inst.FieldParentPtr, inst_data.payload_index).data;
|
||||
const inst_src = inst_data.src();
|
||||
const parent_ptr_ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
|
||||
const field_name_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
|
||||
const field_ptr_src: LazySrcLoc = .{ .node_offset_builtin_call_arg2 = inst_data.src_node };
|
||||
const extra = sema.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data;
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
assert(!flags.ptr_cast);
|
||||
const inst_src = extra.src();
|
||||
const field_name_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.src_node };
|
||||
const field_ptr_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.src_node };
|
||||
|
||||
const parent_ptr_ty = try sema.resolveType(block, parent_ptr_ty_src, extra.parent_type);
|
||||
try sema.checkPtrType(block, parent_ptr_ty_src, parent_ptr_ty, false);
|
||||
if (!parent_ptr_ty.isSinglePointer(mod)) {
|
||||
return sema.fail(block, parent_ptr_ty_src, "expected single pointer type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)});
|
||||
const parent_ptr_ty = try sema.resolveDestType(block, inst_src, extra.parent_ptr_type, .remove_eu, "@fieldParentPtr");
|
||||
try sema.checkPtrType(block, inst_src, parent_ptr_ty, true);
|
||||
const parent_ptr_info = parent_ptr_ty.ptrInfo(mod);
|
||||
if (parent_ptr_info.flags.size != .One) {
|
||||
return sema.fail(block, inst_src, "expected single pointer type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)});
|
||||
}
|
||||
const parent_ty = parent_ptr_ty.childType(mod);
|
||||
if (parent_ty.zigTypeTag(mod) != .Struct and parent_ty.zigTypeTag(mod) != .Union) {
|
||||
return sema.fail(block, parent_ptr_ty_src, "expected pointer to struct or union type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)});
|
||||
const parent_ty = Type.fromInterned(parent_ptr_info.child);
|
||||
switch (parent_ty.zigTypeTag(mod)) {
|
||||
.Struct, .Union => {},
|
||||
else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)}),
|
||||
}
|
||||
try sema.resolveTypeLayout(parent_ty);
|
||||
|
||||
@ -24916,7 +24918,7 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr
|
||||
.is_allowzero = field_ptr_info.flags.is_allowzero,
|
||||
.address_space = field_ptr_info.flags.address_space,
|
||||
},
|
||||
.packed_offset = parent_ptr_ty.ptrInfo(mod).packed_offset,
|
||||
.packed_offset = parent_ptr_info.packed_offset,
|
||||
};
|
||||
const field_ty = parent_ty.structFieldType(field_index, mod);
|
||||
var actual_field_ptr_info: InternPool.Key.PtrType = .{
|
||||
@ -25000,7 +25002,7 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr
|
||||
} },
|
||||
});
|
||||
};
|
||||
return sema.coerce(block, parent_ptr_ty, result, inst_src);
|
||||
return sema.ptrCastFull(block, flags, inst_src, result, inst_src, parent_ptr_ty, "@fieldParentPtr");
|
||||
}
|
||||
|
||||
fn zirMinMax(
|
||||
|
@ -355,7 +355,6 @@ const Writer = struct {
|
||||
.atomic_rmw => try self.writeAtomicRmw(stream, inst),
|
||||
.shuffle => try self.writeShuffle(stream, inst),
|
||||
.mul_add => try self.writeMulAdd(stream, inst),
|
||||
.field_parent_ptr => try self.writeFieldParentPtr(stream, inst),
|
||||
.builtin_call => try self.writeBuiltinCall(stream, inst),
|
||||
|
||||
.field_type_ref => try self.writeFieldTypeRef(stream, inst),
|
||||
@ -609,6 +608,7 @@ const Writer = struct {
|
||||
|
||||
.restore_err_ret_index => try self.writeRestoreErrRetIndex(stream, extended),
|
||||
.closure_get => try self.writeClosureGet(stream, extended),
|
||||
.field_parent_ptr => try self.writeFieldParentPtr(stream, extended),
|
||||
}
|
||||
}
|
||||
|
||||
@ -901,16 +901,21 @@ const Writer = struct {
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writeFieldParentPtr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||
const extra = self.code.extraData(Zir.Inst.FieldParentPtr, inst_data.payload_index).data;
|
||||
try self.writeInstRef(stream, extra.parent_type);
|
||||
fn writeFieldParentPtr(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const extra = self.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data;
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
if (flags.align_cast) try stream.writeAll("align_cast, ");
|
||||
if (flags.addrspace_cast) try stream.writeAll("addrspace_cast, ");
|
||||
if (flags.const_cast) try stream.writeAll("const_cast, ");
|
||||
if (flags.volatile_cast) try stream.writeAll("volatile_cast, ");
|
||||
try self.writeInstRef(stream, extra.parent_ptr_type);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_name);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_ptr);
|
||||
try stream.writeAll(") ");
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
try self.writeSrc(stream, extra.src());
|
||||
}
|
||||
|
||||
fn writeBuiltinAsyncCall(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
@ -1069,7 +1074,8 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writePtrCastFull(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const flags = @as(Zir.Inst.FullPtrCastFlags, @bitCast(@as(u5, @truncate(extended.small))));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
if (flags.ptr_cast) try stream.writeAll("ptr_cast, ");
|
||||
@ -1085,7 +1091,8 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writePtrCastNoDest(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const flags = @as(Zir.Inst.FullPtrCastFlags, @bitCast(@as(u5, @truncate(extended.small))));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
if (flags.const_cast) try stream.writeAll("const_cast, ");
|
||||
|
@ -8,7 +8,7 @@ export fn entry() u32 {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :3:23: error: cast increases pointer alignment
|
||||
// :3:23: error: @ptrCast increases pointer alignment
|
||||
// :3:32: note: '*u8' has alignment '1'
|
||||
// :3:23: note: '*u32' has alignment '4'
|
||||
// :3:23: note: use @alignCast to assert pointer alignment
|
||||
|
@ -16,7 +16,7 @@ export fn c() void {
|
||||
//
|
||||
// :3:45: error: null pointer casted to type '*const u32'
|
||||
// :6:34: error: expected pointer type, found 'comptime_int'
|
||||
// :9:22: error: cast increases pointer alignment
|
||||
// :9:22: error: @ptrCast increases pointer alignment
|
||||
// :9:71: note: '?*const u8' has alignment '1'
|
||||
// :9:22: note: '?*f32' has alignment '4'
|
||||
// :9:22: note: use @alignCast to assert pointer alignment
|
||||
|
@ -8,5 +8,5 @@ export fn entry() void {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :3:21: error: cast discards const qualifier
|
||||
// :3:21: error: @ptrCast discards const qualifier
|
||||
// :3:21: note: use @constCast to discard const qualifier
|
||||
|
Loading…
Reference in New Issue
Block a user