mirror of
https://github.com/ziglang/zig.git
synced 2025-01-21 01:14:32 +00:00
Update uses of @fieldParentPtr
to use RLS
This commit is contained in:
parent
17673dcd6e
commit
eb723a4070
@ -3107,7 +3107,7 @@ test "struct namespaced variable" {
|
||||
// struct field order is determined by the compiler for optimal performance.
|
||||
// however, you can still calculate a struct base pointer given a field pointer:
|
||||
fn setYBasedOnX(x: *f32, y: f32) void {
|
||||
const point = @fieldParentPtr(Point, "x", x);
|
||||
const point: *Point = @fieldParentPtr("x", x);
|
||||
point.y = y;
|
||||
}
|
||||
test "field parent pointer" {
|
||||
@ -8757,8 +8757,7 @@ test "decl access by string" {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@fieldParentPtr#}
|
||||
<pre>{#syntax#}@fieldParentPtr(comptime ParentType: type, comptime field_name: []const u8,
|
||||
field_ptr: *T) *ParentType{#endsyntax#}</pre>
|
||||
<pre>{#syntax#}@fieldParentPtr(comptime field_name: []const u8, field_ptr: *T) anytype{#endsyntax#}</pre>
|
||||
<p>
|
||||
Given a pointer to a field, returns the base pointer of a struct.
|
||||
</p>
|
||||
|
12
lib/compiler/aro/aro/pragmas/gcc.zig
vendored
12
lib/compiler/aro/aro/pragmas/gcc.zig
vendored
@ -37,18 +37,18 @@ const Directive = enum {
|
||||
};
|
||||
|
||||
fn beforePreprocess(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
self.original_options = comp.diagnostics.options;
|
||||
}
|
||||
|
||||
fn beforeParse(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
comp.diagnostics.options = self.original_options;
|
||||
self.options_stack.items.len = 0;
|
||||
}
|
||||
|
||||
fn afterParse(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
comp.diagnostics.options = self.original_options;
|
||||
self.options_stack.items.len = 0;
|
||||
}
|
||||
@ -60,7 +60,7 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
self.options_stack.deinit(comp.gpa);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
@ -108,7 +108,7 @@ fn diagnosticHandler(self: *GCC, pp: *Preprocessor, start_idx: TokenIndex) Pragm
|
||||
}
|
||||
|
||||
fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex) Pragma.Error!void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
const directive_tok = pp.tokens.get(start_idx + 1);
|
||||
if (directive_tok.id == .nl) return;
|
||||
|
||||
@ -174,7 +174,7 @@ fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex
|
||||
}
|
||||
|
||||
fn parserHandler(pragma: *Pragma, p: *Parser, start_idx: TokenIndex) Compilation.Error!void {
|
||||
var self = @fieldParentPtr(*GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
const directive_tok = p.pp.tokens.get(start_idx + 1);
|
||||
if (directive_tok.id == .nl) return;
|
||||
const name = p.pp.expandedSlice(directive_tok);
|
||||
|
2
lib/compiler/aro/aro/pragmas/message.zig
vendored
2
lib/compiler/aro/aro/pragmas/message.zig
vendored
@ -22,7 +22,7 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
const self = @fieldParentPtr(*Message, "pragma", pragma);
|
||||
const self: *Message = @fieldParentPtr("pragma", pragma);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
|
6
lib/compiler/aro/aro/pragmas/once.zig
vendored
6
lib/compiler/aro/aro/pragmas/once.zig
vendored
@ -27,18 +27,18 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn afterParse(pragma: *Pragma, _: *Compilation) void {
|
||||
var self = @fieldParentPtr(*Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
self.pragma_once.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
self.pragma_once.deinit();
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex) Pragma.Error!void {
|
||||
var self = @fieldParentPtr(*Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
const name_tok = pp.tokens.get(start_idx);
|
||||
const next = pp.tokens.get(start_idx + 1);
|
||||
if (next.id != .nl) {
|
||||
|
4
lib/compiler/aro/aro/pragmas/pack.zig
vendored
4
lib/compiler/aro/aro/pragmas/pack.zig
vendored
@ -24,13 +24,13 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(*Pack, "pragma", pragma);
|
||||
var self: *Pack = @fieldParentPtr("pragma", pragma);
|
||||
self.stack.deinit(comp.gpa);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
fn parserHandler(pragma: *Pragma, p: *Parser, start_idx: TokenIndex) Compilation.Error!void {
|
||||
var pack = @fieldParentPtr(*Pack, "pragma", pragma);
|
||||
var pack: *Pack = @fieldParentPtr("pragma", pragma);
|
||||
var idx = start_idx + 1;
|
||||
const l_paren = p.pp.tokens.get(idx);
|
||||
if (l_paren.id != .l_paren) {
|
||||
|
10
lib/compiler/aro/backend/Object.zig
vendored
10
lib/compiler/aro/backend/Object.zig
vendored
@ -16,7 +16,7 @@ pub fn create(gpa: Allocator, target: std.Target) !*Object {
|
||||
|
||||
pub fn deinit(obj: *Object) void {
|
||||
switch (obj.format) {
|
||||
.elf => @fieldParentPtr(Elf, "obj", obj).deinit(),
|
||||
.elf => @as(*Elf, @fieldParentPtr("obj", obj)).deinit(),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
@ -32,7 +32,7 @@ pub const Section = union(enum) {
|
||||
|
||||
pub fn getSection(obj: *Object, section: Section) !*std.ArrayList(u8) {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).getSection(section),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).getSection(section),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
@ -53,21 +53,21 @@ pub fn declareSymbol(
|
||||
size: u64,
|
||||
) ![]const u8 {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).declareSymbol(section, name, linkage, @"type", offset, size),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).declareSymbol(section, name, linkage, @"type", offset, size),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addRelocation(obj: *Object, name: []const u8, section: Section, address: u64, addend: i64) !void {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).addRelocation(name, section, address, addend),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).addRelocation(name, section, address, addend),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(obj: *Object, file: std.fs.File) !void {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).finish(file),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).finish(file),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
@ -1098,13 +1098,13 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
}
|
||||
};
|
||||
|
||||
pub fn findBlockScope(inner: *ScopeExtraScope, c: *ScopeExtraContext) !*ScopeExtraScope.Block {
|
||||
pub fn findBlockScope(inner: *ScopeExtraScope, c: *ScopeExtraContext) !*Block {
|
||||
var scope = inner;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.root => unreachable,
|
||||
.block => return @fieldParentPtr(*Block, "base", scope),
|
||||
.condition => return @fieldParentPtr(*Condition, "base", scope).getBlockScope(c),
|
||||
.block => return @fieldParentPtr("base", scope),
|
||||
.condition => return @as(*Condition, @fieldParentPtr("base", scope)).getBlockScope(c),
|
||||
else => scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
@ -1116,7 +1116,7 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
switch (scope.id) {
|
||||
.root => unreachable,
|
||||
.block => {
|
||||
const block = @fieldParentPtr(*Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
if (block.return_type) |ty| return ty;
|
||||
scope = scope.parent.?;
|
||||
},
|
||||
@ -1128,15 +1128,15 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
pub fn getAlias(scope: *ScopeExtraScope, name: []const u8) []const u8 {
|
||||
return switch (scope.id) {
|
||||
.root => return name,
|
||||
.block => @fieldParentPtr(*Block, "base", scope).getAlias(name),
|
||||
.block => @as(*Block, @fieldParentPtr("base", scope)).getAlias(name),
|
||||
.loop, .do_loop, .condition => scope.parent.?.getAlias(name),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contains(scope: *ScopeExtraScope, name: []const u8) bool {
|
||||
return switch (scope.id) {
|
||||
.root => @fieldParentPtr(*Root, "base", scope).contains(name),
|
||||
.block => @fieldParentPtr(*Block, "base", scope).contains(name),
|
||||
.root => @as(*Root, @fieldParentPtr("base", scope)).contains(name),
|
||||
.block => @as(*Block, @fieldParentPtr("base", scope)).contains(name),
|
||||
.loop, .do_loop, .condition => scope.parent.?.contains(name),
|
||||
};
|
||||
}
|
||||
@ -1158,11 +1158,11 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.root => {
|
||||
const root = @fieldParentPtr(*Root, "base", scope);
|
||||
const root: *Root = @fieldParentPtr("base", scope);
|
||||
return root.nodes.append(node);
|
||||
},
|
||||
.block => {
|
||||
const block = @fieldParentPtr(*Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
return block.statements.append(node);
|
||||
},
|
||||
else => scope = scope.parent.?,
|
||||
@ -1184,7 +1184,7 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
switch (scope.id) {
|
||||
.root => return,
|
||||
.block => {
|
||||
const block = @fieldParentPtr(*Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
if (block.variable_discards.get(name)) |discard| {
|
||||
discard.data.should_skip = true;
|
||||
return;
|
||||
|
@ -409,7 +409,7 @@ pub const Node = extern union {
|
||||
return null;
|
||||
|
||||
if (self.ptr_otherwise.tag == t)
|
||||
return @alignCast(@fieldParentPtr(*align(1) t.Type(), "base", self.ptr_otherwise));
|
||||
return @alignCast(@fieldParentPtr("base", self.ptr_otherwise));
|
||||
|
||||
return null;
|
||||
}
|
||||
@ -1220,7 +1220,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
});
|
||||
},
|
||||
.pub_var_simple, .var_simple => {
|
||||
const payload = @as(*Payload.SimpleVarDecl, @alignCast(@fieldParentPtr(*align(1) Payload.SimpleVarDecl, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.SimpleVarDecl, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (node.tag() == .pub_var_simple) _ = try c.addToken(.keyword_pub, "pub");
|
||||
const const_tok = try c.addToken(.keyword_const, "const");
|
||||
_ = try c.addIdentifier(payload.name);
|
||||
@ -1293,7 +1293,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
},
|
||||
.var_decl => return renderVar(c, node),
|
||||
.arg_redecl, .alias => {
|
||||
const payload = @as(*Payload.ArgRedecl, @alignCast(@fieldParentPtr(*align(1) Payload.ArgRedecl, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.ArgRedecl, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (node.tag() == .alias) _ = try c.addToken(.keyword_pub, "pub");
|
||||
const mut_tok = if (node.tag() == .alias)
|
||||
try c.addToken(.keyword_const, "const")
|
||||
@ -1492,7 +1492,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
});
|
||||
},
|
||||
.c_pointer, .single_pointer => {
|
||||
const payload = @as(*Payload.Pointer, @alignCast(@fieldParentPtr(*align(1) Payload.Pointer, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.Pointer, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
|
||||
const asterisk = if (node.tag() == .single_pointer)
|
||||
try c.addToken(.asterisk, "*")
|
||||
@ -2085,7 +2085,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
}
|
||||
|
||||
fn renderRecord(c: *Context, node: Node) !NodeIndex {
|
||||
const payload = @as(*Payload.Record, @alignCast(@fieldParentPtr(*align(1) Payload.Record, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.Record, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (payload.layout == .@"packed")
|
||||
_ = try c.addToken(.keyword_packed, "packed")
|
||||
else if (payload.layout == .@"extern")
|
||||
@ -2487,7 +2487,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
|
||||
}
|
||||
|
||||
fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @as(*Payload.UnOp, @alignCast(@fieldParentPtr(*align(1) Payload.UnOp, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.UnOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
.main_token = try c.addToken(tok_tag, bytes),
|
||||
@ -2499,7 +2499,7 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: T
|
||||
}
|
||||
|
||||
fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr(*align(1) Payload.BinOp, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
const lhs = try renderNodeGrouped(c, payload.lhs);
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
@ -2512,7 +2512,7 @@ fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_ta
|
||||
}
|
||||
|
||||
fn renderBinOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr(*align(1) Payload.BinOp, "base", node.ptr_otherwise))).data;
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
const lhs = try renderNode(c, payload.lhs);
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
|
@ -19,7 +19,7 @@ pub const Tree = struct {
|
||||
}
|
||||
|
||||
pub fn root(self: *Tree) *Node.Root {
|
||||
return @fieldParentPtr(Node.Root, "base", self.node);
|
||||
return @alignCast(@fieldParentPtr("base", self.node));
|
||||
}
|
||||
|
||||
pub fn dump(self: *Tree, writer: anytype) @TypeOf(writer).Error!void {
|
||||
@ -174,7 +174,7 @@ pub const Node = struct {
|
||||
|
||||
pub fn cast(base: *Node, comptime id: Id) ?*id.Type() {
|
||||
if (base.id == id) {
|
||||
return @fieldParentPtr(id.Type(), "base", base);
|
||||
return @alignCast(@fieldParentPtr("base", base));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -461,7 +461,7 @@ pub const Node = struct {
|
||||
pub fn isNumberExpression(node: *const Node) bool {
|
||||
switch (node.id) {
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return switch (literal.token.id) {
|
||||
.number => true,
|
||||
else => false,
|
||||
@ -475,7 +475,7 @@ pub const Node = struct {
|
||||
pub fn isStringLiteral(node: *const Node) bool {
|
||||
switch (node.id) {
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return switch (literal.token.id) {
|
||||
.quoted_ascii_string, .quoted_wide_string => true,
|
||||
else => false,
|
||||
@ -489,105 +489,103 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => unreachable,
|
||||
.resource_external => {
|
||||
const casted = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const casted: *const Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const casted = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const casted: *const Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.literal => {
|
||||
const casted = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const casted: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.token;
|
||||
},
|
||||
.binary_expression => {
|
||||
const casted = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const casted: *const Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.left.getFirstToken();
|
||||
},
|
||||
.grouped_expression => {
|
||||
const casted = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const casted: *const Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.open_token;
|
||||
},
|
||||
.not_expression => {
|
||||
const casted = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const casted: *const Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.not_token;
|
||||
},
|
||||
.accelerators => {
|
||||
const casted = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const casted: *const Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.accelerator => {
|
||||
const casted = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const casted: *const Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.event.getFirstToken();
|
||||
},
|
||||
.dialog => {
|
||||
const casted = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const casted: *const Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.control_statement => {
|
||||
const casted = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const casted: *const Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.toolbar => {
|
||||
const casted = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const casted: *const Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.menu => {
|
||||
const casted = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const casted: *const Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
inline .menu_item, .menu_item_separator, .menu_item_ex => |menu_item_type| {
|
||||
const node_type = menu_item_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const menu_item_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.menuitem;
|
||||
},
|
||||
inline .popup, .popup_ex => |popup_type| {
|
||||
const node_type = popup_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const popup_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.popup;
|
||||
},
|
||||
.version_info => {
|
||||
const casted = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const casted: *const Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.version_statement => {
|
||||
const casted = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const casted: *const Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.block => {
|
||||
const casted = @fieldParentPtr(Node.Block, "base", node);
|
||||
const casted: *const Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.block_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const casted: *const Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.block_value_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const casted: *const Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.expression.getFirstToken();
|
||||
},
|
||||
.string_table => {
|
||||
const casted = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const casted: *const Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.string_table_string => {
|
||||
const casted = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const casted: *const Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id.getFirstToken();
|
||||
},
|
||||
.language_statement => {
|
||||
const casted = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const casted: *const Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.language_token;
|
||||
},
|
||||
.font_statement => {
|
||||
const casted = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const casted: *const Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.simple_statement => {
|
||||
const casted = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const casted: *const Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.invalid => {
|
||||
const casted = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const casted: *const Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.context[0];
|
||||
},
|
||||
}
|
||||
@ -597,44 +595,44 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => unreachable,
|
||||
.resource_external => {
|
||||
const casted = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const casted: *const Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.filename.getLastToken();
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const casted = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const casted: *const Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.literal => {
|
||||
const casted = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const casted: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.token;
|
||||
},
|
||||
.binary_expression => {
|
||||
const casted = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const casted: *const Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.right.getLastToken();
|
||||
},
|
||||
.grouped_expression => {
|
||||
const casted = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const casted: *const Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.close_token;
|
||||
},
|
||||
.not_expression => {
|
||||
const casted = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const casted: *const Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.number_token;
|
||||
},
|
||||
.accelerators => {
|
||||
const casted = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const casted: *const Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.accelerator => {
|
||||
const casted = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const casted: *const Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.type_and_options.len > 0) return casted.type_and_options[casted.type_and_options.len - 1];
|
||||
return casted.idvalue.getLastToken();
|
||||
},
|
||||
.dialog => {
|
||||
const casted = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const casted: *const Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.control_statement => {
|
||||
const casted = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const casted: *const Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.extra_data_end) |token| return token;
|
||||
if (casted.help_id) |help_id_node| return help_id_node.getLastToken();
|
||||
if (casted.exstyle) |exstyle_node| return exstyle_node.getLastToken();
|
||||
@ -647,80 +645,79 @@ pub const Node = struct {
|
||||
return casted.height.getLastToken();
|
||||
},
|
||||
.toolbar => {
|
||||
const casted = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const casted: *const Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.menu => {
|
||||
const casted = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const casted: *const Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.menu_item => {
|
||||
const casted = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const casted: *const Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.option_list.len > 0) return casted.option_list[casted.option_list.len - 1];
|
||||
return casted.result.getLastToken();
|
||||
},
|
||||
.menu_item_separator => {
|
||||
const casted = @fieldParentPtr(Node.MenuItemSeparator, "base", node);
|
||||
const casted: *const Node.MenuItemSeparator = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.separator;
|
||||
},
|
||||
.menu_item_ex => {
|
||||
const casted = @fieldParentPtr(Node.MenuItemEx, "base", node);
|
||||
const casted: *const Node.MenuItemEx = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.state) |state_node| return state_node.getLastToken();
|
||||
if (casted.type) |type_node| return type_node.getLastToken();
|
||||
if (casted.id) |id_node| return id_node.getLastToken();
|
||||
return casted.text;
|
||||
},
|
||||
inline .popup, .popup_ex => |popup_type| {
|
||||
const node_type = popup_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const popup_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.version_info => {
|
||||
const casted = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const casted: *const Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.version_statement => {
|
||||
const casted = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const casted: *const Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.parts[casted.parts.len - 1].getLastToken();
|
||||
},
|
||||
.block => {
|
||||
const casted = @fieldParentPtr(Node.Block, "base", node);
|
||||
const casted: *const Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.block_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const casted: *const Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.values.len > 0) return casted.values[casted.values.len - 1].getLastToken();
|
||||
return casted.key;
|
||||
},
|
||||
.block_value_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const casted: *const Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.expression.getLastToken();
|
||||
},
|
||||
.string_table => {
|
||||
const casted = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const casted: *const Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.string_table_string => {
|
||||
const casted = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const casted: *const Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.string;
|
||||
},
|
||||
.language_statement => {
|
||||
const casted = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const casted: *const Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.sublanguage_id.getLastToken();
|
||||
},
|
||||
.font_statement => {
|
||||
const casted = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const casted: *const Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.char_set) |char_set_node| return char_set_node.getLastToken();
|
||||
if (casted.italic) |italic_node| return italic_node.getLastToken();
|
||||
if (casted.weight) |weight_node| return weight_node.getLastToken();
|
||||
return casted.typeface;
|
||||
},
|
||||
.simple_statement => {
|
||||
const casted = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const casted: *const Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.value.getLastToken();
|
||||
},
|
||||
.invalid => {
|
||||
const casted = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const casted: *const Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.context[casted.context.len - 1];
|
||||
},
|
||||
}
|
||||
@ -737,31 +734,31 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => {
|
||||
try writer.writeAll("\n");
|
||||
const root = @fieldParentPtr(Node.Root, "base", node);
|
||||
const root: *Node.Root = @alignCast(@fieldParentPtr("base", node));
|
||||
for (root.body) |body_node| {
|
||||
try body_node.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.resource_external => {
|
||||
const resource = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const resource: *Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ resource.id.slice(tree.source), resource.type.slice(tree.source), resource.common_resource_attributes.len });
|
||||
try resource.filename.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const resource = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const resource: *Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes] raw data: {}\n", .{ resource.id.slice(tree.source), resource.type.slice(tree.source), resource.common_resource_attributes.len, resource.raw_data.len });
|
||||
for (resource.raw_data) |data_expression| {
|
||||
try data_expression.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(literal.token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.binary_expression => {
|
||||
const binary = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const binary: *Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(binary.operator.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
@ -769,7 +766,7 @@ pub const Node = struct {
|
||||
try binary.right.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.grouped_expression => {
|
||||
const grouped = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const grouped: *Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll("\n");
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.writeAll(grouped.open_token.slice(tree.source));
|
||||
@ -780,7 +777,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.not_expression => {
|
||||
const not = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const not: *Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(not.not_token.slice(tree.source));
|
||||
try writer.writeAll(" ");
|
||||
@ -788,7 +785,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.accelerators => {
|
||||
const accelerators = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const accelerators: *Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ accelerators.id.slice(tree.source), accelerators.type.slice(tree.source), accelerators.common_resource_attributes.len });
|
||||
for (accelerators.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -804,7 +801,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.accelerator => {
|
||||
const accelerator = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const accelerator: *Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
for (accelerator.type_and_options, 0..) |option, i| {
|
||||
if (i != 0) try writer.writeAll(",");
|
||||
try writer.writeByte(' ');
|
||||
@ -815,7 +812,7 @@ pub const Node = struct {
|
||||
try accelerator.idvalue.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.dialog => {
|
||||
const dialog = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const dialog: *Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ dialog.id.slice(tree.source), dialog.type.slice(tree.source), dialog.common_resource_attributes.len });
|
||||
inline for (.{ "x", "y", "width", "height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
@ -841,7 +838,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.control_statement => {
|
||||
const control = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const control: *Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}", .{control.type.slice(tree.source)});
|
||||
if (control.text) |text| {
|
||||
try writer.print(" text: {s}", .{text.slice(tree.source)});
|
||||
@ -877,7 +874,7 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.toolbar => {
|
||||
const toolbar = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const toolbar: *Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ toolbar.id.slice(tree.source), toolbar.type.slice(tree.source), toolbar.common_resource_attributes.len });
|
||||
inline for (.{ "button_width", "button_height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
@ -895,7 +892,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.menu => {
|
||||
const menu = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const menu: *Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ menu.id.slice(tree.source), menu.type.slice(tree.source), menu.common_resource_attributes.len });
|
||||
for (menu.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -916,16 +913,16 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.menu_item => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const menu_item: *Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} options]\n", .{ menu_item.menuitem.slice(tree.source), menu_item.text.slice(tree.source), menu_item.option_list.len });
|
||||
try menu_item.result.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.menu_item_separator => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItemSeparator, "base", node);
|
||||
const menu_item: *Node.MenuItemSeparator = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ menu_item.menuitem.slice(tree.source), menu_item.separator.slice(tree.source) });
|
||||
},
|
||||
.menu_item_ex => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItemEx, "base", node);
|
||||
const menu_item: *Node.MenuItemEx = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ menu_item.menuitem.slice(tree.source), menu_item.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state" }) |arg| {
|
||||
if (@field(menu_item, arg)) |val_node| {
|
||||
@ -936,7 +933,7 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.popup => {
|
||||
const popup = @fieldParentPtr(Node.Popup, "base", node);
|
||||
const popup: *Node.Popup = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} options]\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source), popup.option_list.len });
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.writeAll(popup.begin_token.slice(tree.source));
|
||||
@ -949,7 +946,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.popup_ex => {
|
||||
const popup = @fieldParentPtr(Node.PopupEx, "base", node);
|
||||
const popup: *Node.PopupEx = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state", "help_id" }) |arg| {
|
||||
if (@field(popup, arg)) |val_node| {
|
||||
@ -969,7 +966,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.version_info => {
|
||||
const version_info = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const version_info: *Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ version_info.id.slice(tree.source), version_info.versioninfo.slice(tree.source), version_info.common_resource_attributes.len });
|
||||
for (version_info.fixed_info) |fixed_info| {
|
||||
try fixed_info.dump(tree, writer, indent + 1);
|
||||
@ -985,14 +982,14 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.version_statement => {
|
||||
const version_statement = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const version_statement: *Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{version_statement.type.slice(tree.source)});
|
||||
for (version_statement.parts) |part| {
|
||||
try part.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.block => {
|
||||
const block = @fieldParentPtr(Node.Block, "base", node);
|
||||
const block: *Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ block.identifier.slice(tree.source), block.key.slice(tree.source) });
|
||||
for (block.values) |value| {
|
||||
try value.dump(tree, writer, indent + 1);
|
||||
@ -1008,14 +1005,14 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.block_value => {
|
||||
const block_value = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const block_value: *Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ block_value.identifier.slice(tree.source), block_value.key.slice(tree.source) });
|
||||
for (block_value.values) |value| {
|
||||
try value.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.block_value_value => {
|
||||
const block_value = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const block_value: *Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
if (block_value.trailing_comma) {
|
||||
try writer.writeAll(" ,");
|
||||
}
|
||||
@ -1023,7 +1020,7 @@ pub const Node = struct {
|
||||
try block_value.expression.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.string_table => {
|
||||
const string_table = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const string_table: *Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} [{d} common_resource_attributes]\n", .{ string_table.type.slice(tree.source), string_table.common_resource_attributes.len });
|
||||
for (string_table.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -1040,19 +1037,19 @@ pub const Node = struct {
|
||||
},
|
||||
.string_table_string => {
|
||||
try writer.writeAll("\n");
|
||||
const string = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const string: *Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
try string.id.dump(tree, writer, indent + 1);
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.print("{s}\n", .{string.string.slice(tree.source)});
|
||||
},
|
||||
.language_statement => {
|
||||
const language = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{language.language_token.slice(tree.source)});
|
||||
try language.primary_language_id.dump(tree, writer, indent + 1);
|
||||
try language.sublanguage_id.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.font_statement => {
|
||||
const font = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const font: *Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} typeface: {s}\n", .{ font.identifier.slice(tree.source), font.typeface.slice(tree.source) });
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.writeAll("point_size:\n");
|
||||
@ -1066,12 +1063,12 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.simple_statement => {
|
||||
const statement = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{statement.identifier.slice(tree.source)});
|
||||
try statement.value.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.invalid => {
|
||||
const invalid = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const invalid: *Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" context.len: {}\n", .{invalid.context.len});
|
||||
for (invalid.context) |context_token| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
|
@ -229,34 +229,34 @@ pub const Compiler = struct {
|
||||
pub fn writeNode(self: *Compiler, node: *Node, writer: anytype) !void {
|
||||
switch (node.id) {
|
||||
.root => unreachable, // writeRoot should be called directly instead
|
||||
.resource_external => try self.writeResourceExternal(@fieldParentPtr(Node.ResourceExternal, "base", node), writer),
|
||||
.resource_raw_data => try self.writeResourceRawData(@fieldParentPtr(Node.ResourceRawData, "base", node), writer),
|
||||
.resource_external => try self.writeResourceExternal(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.resource_raw_data => try self.writeResourceRawData(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.literal => unreachable, // this is context dependent and should be handled by its parent
|
||||
.binary_expression => unreachable,
|
||||
.grouped_expression => unreachable,
|
||||
.not_expression => unreachable,
|
||||
.invalid => {}, // no-op, currently only used for dangling literals at EOF
|
||||
.accelerators => try self.writeAccelerators(@fieldParentPtr(Node.Accelerators, "base", node), writer),
|
||||
.accelerators => try self.writeAccelerators(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.accelerator => unreachable, // handled by writeAccelerators
|
||||
.dialog => try self.writeDialog(@fieldParentPtr(Node.Dialog, "base", node), writer),
|
||||
.dialog => try self.writeDialog(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.control_statement => unreachable,
|
||||
.toolbar => try self.writeToolbar(@fieldParentPtr(Node.Toolbar, "base", node), writer),
|
||||
.menu => try self.writeMenu(@fieldParentPtr(Node.Menu, "base", node), writer),
|
||||
.toolbar => try self.writeToolbar(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.menu => try self.writeMenu(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.menu_item => unreachable,
|
||||
.menu_item_separator => unreachable,
|
||||
.menu_item_ex => unreachable,
|
||||
.popup => unreachable,
|
||||
.popup_ex => unreachable,
|
||||
.version_info => try self.writeVersionInfo(@fieldParentPtr(Node.VersionInfo, "base", node), writer),
|
||||
.version_info => try self.writeVersionInfo(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.version_statement => unreachable,
|
||||
.block => unreachable,
|
||||
.block_value => unreachable,
|
||||
.block_value_value => unreachable,
|
||||
.string_table => try self.writeStringTable(@fieldParentPtr(Node.StringTable, "base", node)),
|
||||
.string_table => try self.writeStringTable(@alignCast(@fieldParentPtr("base", node))),
|
||||
.string_table_string => unreachable, // handled by writeStringTable
|
||||
.language_statement => self.writeLanguageStatement(@fieldParentPtr(Node.LanguageStatement, "base", node)),
|
||||
.language_statement => self.writeLanguageStatement(@alignCast(@fieldParentPtr("base", node))),
|
||||
.font_statement => unreachable,
|
||||
.simple_statement => self.writeTopLevelSimpleStatement(@fieldParentPtr(Node.SimpleStatement, "base", node)),
|
||||
.simple_statement => self.writeTopLevelSimpleStatement(@alignCast(@fieldParentPtr("base", node))),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1289,7 +1289,7 @@ pub const Compiler = struct {
|
||||
return evaluateNumberExpression(node, self.source, self.input_code_pages).asWord();
|
||||
} else {
|
||||
std.debug.assert(node.isStringLiteral());
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
const bytes = SourceBytes{
|
||||
.slice = literal.token.slice(self.source),
|
||||
.code_page = self.input_code_pages.getForToken(literal.token),
|
||||
@ -1342,7 +1342,7 @@ pub const Compiler = struct {
|
||||
/// the writer within this function could return error.NoSpaceLeft
|
||||
pub fn writeAcceleratorsData(self: *Compiler, node: *Node.Accelerators, data_writer: anytype) !void {
|
||||
for (node.accelerators, 0..) |accel_node, i| {
|
||||
const accelerator = @fieldParentPtr(Node.Accelerator, "base", accel_node);
|
||||
const accelerator: *Node.Accelerator = @alignCast(@fieldParentPtr("base", accel_node));
|
||||
var modifiers = res.AcceleratorModifiers{};
|
||||
for (accelerator.type_and_options) |type_or_option| {
|
||||
const modifier = rc.AcceleratorTypeAndOptions.map.get(type_or_option.slice(self.source)).?;
|
||||
@ -1426,7 +1426,7 @@ pub const Compiler = struct {
|
||||
for (node.optional_statements) |optional_statement| {
|
||||
switch (optional_statement.id) {
|
||||
.simple_statement => {
|
||||
const simple_statement = @fieldParentPtr(Node.SimpleStatement, "base", optional_statement);
|
||||
const simple_statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", optional_statement));
|
||||
const statement_identifier = simple_statement.identifier;
|
||||
const statement_type = rc.OptionalStatements.dialog_map.get(statement_identifier.slice(self.source)) orelse continue;
|
||||
switch (statement_type) {
|
||||
@ -1440,7 +1440,7 @@ pub const Compiler = struct {
|
||||
},
|
||||
.caption => {
|
||||
std.debug.assert(simple_statement.value.id == .literal);
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
optional_statement_values.caption = literal_node.token;
|
||||
},
|
||||
.class => {
|
||||
@ -1466,7 +1466,7 @@ pub const Compiler = struct {
|
||||
optional_statement_values.class = NameOrOrdinal{ .ordinal = class_ordinal.asWord() };
|
||||
} else {
|
||||
std.debug.assert(simple_statement.value.isStringLiteral());
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
const parsed = try self.parseQuotedStringAsWideString(literal_node.token);
|
||||
optional_statement_values.class = NameOrOrdinal{ .name = parsed };
|
||||
}
|
||||
@ -1492,7 +1492,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
|
||||
std.debug.assert(simple_statement.value.id == .literal);
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
|
||||
const token_slice = literal_node.token.slice(self.source);
|
||||
const bytes = SourceBytes{
|
||||
@ -1542,7 +1542,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
.font_statement => {
|
||||
const font = @fieldParentPtr(Node.FontStatement, "base", optional_statement);
|
||||
const font: *Node.FontStatement = @alignCast(@fieldParentPtr("base", optional_statement));
|
||||
if (optional_statement_values.font != null) {
|
||||
optional_statement_values.font.?.node = font;
|
||||
} else {
|
||||
@ -1581,7 +1581,7 @@ pub const Compiler = struct {
|
||||
// Multiple CLASS parameters are specified and any of them are treated as a number, then
|
||||
// the last CLASS is always treated as a number no matter what
|
||||
if (last_class_would_be_forced_ordinal and optional_statement_values.class.? == .name) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_class.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_class.value));
|
||||
const ordinal_value = res.ForcedOrdinal.fromUtf16Le(optional_statement_values.class.?.name);
|
||||
|
||||
try self.addErrorDetails(.{
|
||||
@ -1611,7 +1611,7 @@ pub const Compiler = struct {
|
||||
// 2. Multiple MENU parameters are specified and any of them are treated as a number, then
|
||||
// the last MENU is always treated as a number no matter what
|
||||
if ((last_menu_would_be_forced_ordinal or last_menu_has_digit_as_first_char) and optional_statement_values.menu.? == .name) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_menu.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_menu.value));
|
||||
const token_slice = literal_node.token.slice(self.source);
|
||||
const bytes = SourceBytes{
|
||||
.slice = token_slice,
|
||||
@ -1658,7 +1658,7 @@ pub const Compiler = struct {
|
||||
// between resinator and the Win32 RC compiler, we only emit a hint instead of
|
||||
// a warning.
|
||||
if (last_menu_did_uppercase) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_menu.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_menu.value));
|
||||
try self.addErrorDetails(.{
|
||||
.err = .dialog_menu_id_was_uppercased,
|
||||
.type = .hint,
|
||||
@ -1704,7 +1704,7 @@ pub const Compiler = struct {
|
||||
defer controls_by_id.deinit();
|
||||
|
||||
for (node.controls) |control_node| {
|
||||
const control = @fieldParentPtr(Node.ControlStatement, "base", control_node);
|
||||
const control: *Node.ControlStatement = @alignCast(@fieldParentPtr("base", control_node));
|
||||
|
||||
self.writeDialogControl(
|
||||
control,
|
||||
@ -1940,7 +1940,7 @@ pub const Compiler = struct {
|
||||
// And then write out the ordinal using a proper a NameOrOrdinal encoding.
|
||||
try ordinal.write(data_writer);
|
||||
} else if (class_node.isStringLiteral()) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", class_node);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", class_node));
|
||||
const parsed = try self.parseQuotedStringAsWideString(literal_node.token);
|
||||
defer self.allocator.free(parsed);
|
||||
if (rc.ControlClass.fromWideString(parsed)) |control_class| {
|
||||
@ -1955,7 +1955,7 @@ pub const Compiler = struct {
|
||||
try name.write(data_writer);
|
||||
}
|
||||
} else {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", class_node);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", class_node));
|
||||
const literal_slice = literal_node.token.slice(self.source);
|
||||
// This succeeding is guaranteed by the parser
|
||||
const control_class = rc.ControlClass.map.get(literal_slice) orelse unreachable;
|
||||
@ -2178,7 +2178,7 @@ pub const Compiler = struct {
|
||||
try writer.writeInt(u16, 0, .little); // null-terminated UTF-16 text
|
||||
},
|
||||
.menu_item => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const menu_item: *Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
var flags = res.MenuItemFlags{};
|
||||
for (menu_item.option_list) |option_token| {
|
||||
// This failing would be a bug in the parser
|
||||
@ -2196,7 +2196,7 @@ pub const Compiler = struct {
|
||||
try writer.writeAll(std.mem.sliceAsBytes(text[0 .. text.len + 1]));
|
||||
},
|
||||
.popup => {
|
||||
const popup = @fieldParentPtr(Node.Popup, "base", node);
|
||||
const popup: *Node.Popup = @alignCast(@fieldParentPtr("base", node));
|
||||
var flags = res.MenuItemFlags{ .value = res.MF.POPUP };
|
||||
for (popup.option_list) |option_token| {
|
||||
// This failing would be a bug in the parser
|
||||
@ -2216,7 +2216,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
inline .menu_item_ex, .popup_ex => |node_type| {
|
||||
const menu_item = @fieldParentPtr(node_type.Type(), "base", node);
|
||||
const menu_item: *node_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
|
||||
if (menu_item.type) |flags| {
|
||||
const value = evaluateNumberExpression(flags, self.source, self.input_code_pages);
|
||||
@ -2295,7 +2295,7 @@ pub const Compiler = struct {
|
||||
for (node.fixed_info) |fixed_info| {
|
||||
switch (fixed_info.id) {
|
||||
.version_statement => {
|
||||
const version_statement = @fieldParentPtr(Node.VersionStatement, "base", fixed_info);
|
||||
const version_statement: *Node.VersionStatement = @alignCast(@fieldParentPtr("base", fixed_info));
|
||||
const version_type = rc.VersionInfo.map.get(version_statement.type.slice(self.source)).?;
|
||||
|
||||
// Ensure that all parts are cleared for each version, to properly account for
|
||||
@ -2345,7 +2345,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
.simple_statement => {
|
||||
const statement = @fieldParentPtr(Node.SimpleStatement, "base", fixed_info);
|
||||
const statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", fixed_info));
|
||||
const statement_type = rc.VersionInfo.map.get(statement.identifier.slice(self.source)).?;
|
||||
const value = evaluateNumberExpression(statement.value, self.source, self.input_code_pages);
|
||||
switch (statement_type) {
|
||||
@ -2416,7 +2416,7 @@ pub const Compiler = struct {
|
||||
|
||||
switch (node.id) {
|
||||
inline .block, .block_value => |node_type| {
|
||||
const block_or_value = @fieldParentPtr(node_type.Type(), "base", node);
|
||||
const block_or_value: *node_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
const parsed_key = try self.parseQuotedStringAsWideString(block_or_value.key);
|
||||
defer self.allocator.free(parsed_key);
|
||||
|
||||
@ -2506,7 +2506,7 @@ pub const Compiler = struct {
|
||||
const language = getLanguageFromOptionalStatements(node.optional_statements, self.source, self.input_code_pages) orelse self.state.language;
|
||||
|
||||
for (node.strings) |string_node| {
|
||||
const string = @fieldParentPtr(Node.StringTableString, "base", string_node);
|
||||
const string: *Node.StringTableString = @alignCast(@fieldParentPtr("base", string_node));
|
||||
const string_id_data = try self.evaluateDataExpression(string.id);
|
||||
const string_id = string_id_data.number.asWord();
|
||||
|
||||
@ -2795,11 +2795,11 @@ pub const Compiler = struct {
|
||||
fn applyToOptionalStatements(language: *res.Language, version: *u32, characteristics: *u32, statements: []*Node, source: []const u8, code_page_lookup: *const CodePageLookup) void {
|
||||
for (statements) |node| switch (node.id) {
|
||||
.language_statement => {
|
||||
const language_statement = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language_statement: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
language.* = languageFromLanguageStatement(language_statement, source, code_page_lookup);
|
||||
},
|
||||
.simple_statement => {
|
||||
const simple_statement = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const simple_statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
const statement_type = rc.OptionalStatements.map.get(simple_statement.identifier.slice(source)) orelse continue;
|
||||
const result = Compiler.evaluateNumberExpression(simple_statement.value, source, code_page_lookup);
|
||||
switch (statement_type) {
|
||||
@ -2824,7 +2824,7 @@ pub const Compiler = struct {
|
||||
pub fn getLanguageFromOptionalStatements(statements: []*Node, source: []const u8, code_page_lookup: *const CodePageLookup) ?res.Language {
|
||||
for (statements) |node| switch (node.id) {
|
||||
.language_statement => {
|
||||
const language_statement = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language_statement: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return languageFromLanguageStatement(language_statement, source, code_page_lookup);
|
||||
},
|
||||
else => continue,
|
||||
|
@ -889,7 +889,7 @@ pub const Parser = struct {
|
||||
if (control == .control) {
|
||||
class = try self.parseExpression(.{});
|
||||
if (class.?.id == .literal) {
|
||||
const class_literal = @fieldParentPtr(Node.Literal, "base", class.?);
|
||||
const class_literal: *Node.Literal = @alignCast(@fieldParentPtr("base", class.?));
|
||||
const is_invalid_control_class = class_literal.token.id == .literal and !rc.ControlClass.map.has(class_literal.token.slice(self.lexer.buffer));
|
||||
if (is_invalid_control_class) {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
|
@ -48,7 +48,7 @@ pub const File = struct {
|
||||
pub fn field_count(file: *const File, node: Ast.Node.Index) u32 {
|
||||
const scope = file.scopes.get(node) orelse return 0;
|
||||
if (scope.tag != .namespace) return 0;
|
||||
const namespace = @fieldParentPtr(Scope.Namespace, "base", scope);
|
||||
const namespace: *Scope.Namespace = @alignCast(@fieldParentPtr("base", scope));
|
||||
return namespace.field_count;
|
||||
}
|
||||
|
||||
@ -439,11 +439,11 @@ pub const Scope = struct {
|
||||
while (true) switch (it.tag) {
|
||||
.top => unreachable,
|
||||
.local => {
|
||||
const local = @fieldParentPtr(Local, "base", it);
|
||||
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
||||
it = local.parent;
|
||||
},
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", it);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", it));
|
||||
return namespace.decl_index;
|
||||
},
|
||||
};
|
||||
@ -453,7 +453,7 @@ pub const Scope = struct {
|
||||
switch (scope.tag) {
|
||||
.top, .local => return null,
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", scope);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", scope));
|
||||
return namespace.names.get(name);
|
||||
},
|
||||
}
|
||||
@ -465,7 +465,7 @@ pub const Scope = struct {
|
||||
while (true) switch (it.tag) {
|
||||
.top => break,
|
||||
.local => {
|
||||
const local = @fieldParentPtr(Local, "base", it);
|
||||
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
||||
const name_token = main_tokens[local.var_node] + 1;
|
||||
const ident_name = ast.tokenSlice(name_token);
|
||||
if (std.mem.eql(u8, ident_name, name)) {
|
||||
@ -474,7 +474,7 @@ pub const Scope = struct {
|
||||
it = local.parent;
|
||||
},
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", it);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", it));
|
||||
if (namespace.names.get(name)) |node| {
|
||||
return node;
|
||||
}
|
||||
|
@ -1062,8 +1062,8 @@ pub fn getUninstallStep(self: *Build) *Step {
|
||||
|
||||
fn makeUninstall(uninstall_step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
_ = prog_node;
|
||||
const uninstall_tls = @fieldParentPtr(*TopLevelStep, "step", uninstall_step);
|
||||
const self = @fieldParentPtr(*Build, "uninstall_tls", uninstall_tls);
|
||||
const uninstall_tls: *TopLevelStep = @fieldParentPtr("step", uninstall_step);
|
||||
const self: *Build = @fieldParentPtr("uninstall_tls", uninstall_tls);
|
||||
|
||||
for (self.installed_files.items) |installed_file| {
|
||||
const full_path = self.getInstallPath(installed_file.dir, installed_file.path);
|
||||
|
@ -231,7 +231,7 @@ fn makeNoOp(step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
|
||||
pub fn cast(step: *Step, comptime T: type) ?*T {
|
||||
if (step.id == T.base_id) {
|
||||
return @fieldParentPtr(*T, "step", step);
|
||||
return @fieldParentPtr("step", step);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ pub fn setName(self: *CheckFile, name: []const u8) void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*CheckFile, "step", step);
|
||||
const self: *CheckFile = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| {
|
||||
|
@ -530,7 +530,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const gpa = b.allocator;
|
||||
const self = @fieldParentPtr(*CheckObject, "step", step);
|
||||
const self: *CheckObject = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAllocOptions(
|
||||
|
@ -918,7 +918,7 @@ fn getGeneratedFilePath(self: *Compile, comptime tag_name: []const u8, asking_st
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(*Compile, "step", step);
|
||||
const self: *Compile = @fieldParentPtr("step", step);
|
||||
|
||||
var zig_args = ArrayList([]const u8).init(arena);
|
||||
defer zig_args.deinit();
|
||||
|
@ -167,7 +167,7 @@ fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T)
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*ConfigHeader, "step", step);
|
||||
const self: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const gpa = b.allocator;
|
||||
const arena = b.allocator;
|
||||
|
||||
|
@ -47,7 +47,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(*Fmt, "step", step);
|
||||
const self: *Fmt = @fieldParentPtr("step", step);
|
||||
|
||||
var argv: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len);
|
||||
|
@ -121,7 +121,7 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self = @fieldParentPtr(*InstallArtifact, "step", step);
|
||||
const self: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const dest_builder = step.owner;
|
||||
const cwd = fs.cwd();
|
||||
|
||||
|
@ -63,7 +63,7 @@ pub fn create(owner: *std.Build, options: Options) *InstallDirStep {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self = @fieldParentPtr(*InstallDirStep, "step", step);
|
||||
const self: *InstallDirStep = @fieldParentPtr("step", step);
|
||||
const dest_builder = self.dest_builder;
|
||||
const arena = dest_builder.allocator;
|
||||
const dest_prefix = dest_builder.getInstallPath(self.options.install_dir, self.options.install_subdir);
|
||||
|
@ -43,7 +43,7 @@ pub fn create(
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const src_builder = step.owner;
|
||||
const self = @fieldParentPtr(*InstallFile, "step", step);
|
||||
const self: *InstallFile = @fieldParentPtr("step", step);
|
||||
const dest_builder = self.dest_builder;
|
||||
const full_src_path = self.source.getPath2(src_builder, step);
|
||||
const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path);
|
||||
|
@ -92,7 +92,7 @@ pub fn getOutputSeparatedDebug(self: *const ObjCopy) ?std.Build.LazyPath {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*ObjCopy, "step", step);
|
||||
const self: *ObjCopy = @fieldParentPtr("step", step);
|
||||
|
||||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
@ -415,7 +415,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*Options, "step", step);
|
||||
const self: *Options = @fieldParentPtr("step", step);
|
||||
|
||||
for (self.args.items) |item| {
|
||||
self.addOption(
|
||||
|
@ -28,7 +28,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*RemoveDir, "step", step);
|
||||
const self: *RemoveDir = @fieldParentPtr("step", step);
|
||||
|
||||
b.build_root.handle.deleteTree(self.dir_path) catch |err| {
|
||||
if (b.build_root.path) |base| {
|
||||
|
@ -497,7 +497,7 @@ const IndexedOutput = struct {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(*Run, "step", step);
|
||||
const self: *Run = @fieldParentPtr("step", step);
|
||||
const has_side_effects = self.hasSideEffects();
|
||||
|
||||
var argv_list = ArrayList([]const u8).init(arena);
|
||||
|
@ -118,7 +118,7 @@ pub fn defineCMacroRaw(self: *TranslateC, name_and_value: []const u8) void {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(*TranslateC, "step", step);
|
||||
const self: *TranslateC = @fieldParentPtr("step", step);
|
||||
|
||||
var argv_list = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv_list.append(b.graph.zig_exe);
|
||||
|
@ -141,7 +141,7 @@ fn maybeUpdateName(wf: *WriteFile) void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const wf = @fieldParentPtr(*WriteFile, "step", step);
|
||||
const wf: *WriteFile = @fieldParentPtr("step", step);
|
||||
|
||||
// Writing to source files is kind of an extra capability of this
|
||||
// WriteFile - arguably it should be a different step. But anyway here
|
||||
|
@ -644,7 +644,7 @@ const PosixImpl = struct {
|
||||
};
|
||||
|
||||
// There's a wait queue on the address; get the queue head and tail.
|
||||
const head = @fieldParentPtr(Waiter, "node", entry_node);
|
||||
const head: *Waiter = @fieldParentPtr("node", entry_node);
|
||||
const tail = head.tail orelse unreachable;
|
||||
|
||||
// Push the waiter to the tail by replacing it and linking to the previous tail.
|
||||
@ -656,7 +656,7 @@ const PosixImpl = struct {
|
||||
fn remove(treap: *Treap, address: usize, max_waiters: usize) WaitList {
|
||||
// Find the wait queue associated with this address and get the head/tail if any.
|
||||
var entry = treap.getEntryFor(address);
|
||||
var queue_head = if (entry.node) |node| @fieldParentPtr(Waiter, "node", node) else null;
|
||||
var queue_head: ?*Waiter = if (entry.node) |node| @fieldParentPtr("node", node) else null;
|
||||
const queue_tail = if (queue_head) |head| head.tail else null;
|
||||
|
||||
// Once we're done updating the head, fix it's tail pointer and update the treap's queue head as well.
|
||||
@ -699,7 +699,7 @@ const PosixImpl = struct {
|
||||
};
|
||||
|
||||
// The queue head and tail must exist if we're removing a queued waiter.
|
||||
const head = @fieldParentPtr(Waiter, "node", entry.node orelse unreachable);
|
||||
const head: *Waiter = @fieldParentPtr("node", entry.node orelse unreachable);
|
||||
const tail = head.tail orelse unreachable;
|
||||
|
||||
// A waiter with a previous link is never the head of the queue.
|
||||
|
@ -88,8 +88,8 @@ pub fn spawn(pool: *Pool, comptime func: anytype, args: anytype) !void {
|
||||
run_node: RunQueue.Node = .{ .data = .{ .runFn = runFn } },
|
||||
|
||||
fn runFn(runnable: *Runnable) void {
|
||||
const run_node = @fieldParentPtr(*RunQueue.Node, "data", runnable);
|
||||
const closure = @fieldParentPtr(*@This(), "run_node", run_node);
|
||||
const run_node: *RunQueue.Node = @fieldParentPtr("data", runnable);
|
||||
const closure: *@This() = @fieldParentPtr("run_node", run_node);
|
||||
@call(.auto, func, closure.arguments);
|
||||
|
||||
// The thread pool's allocator is protected by the mutex.
|
||||
|
@ -108,7 +108,7 @@ pub const ConnectionPool = struct {
|
||||
pool.mutex.lock();
|
||||
defer pool.mutex.unlock();
|
||||
|
||||
const node = @fieldParentPtr(*Node, "data", connection);
|
||||
const node: *Node = @fieldParentPtr("data", connection);
|
||||
|
||||
pool.used.remove(node);
|
||||
|
||||
|
@ -1021,4 +1021,5 @@ test {
|
||||
_ = string_literal;
|
||||
_ = system;
|
||||
_ = target;
|
||||
_ = c_translation;
|
||||
}
|
||||
|
@ -11715,20 +11715,20 @@ const Scope = struct {
|
||||
fn cast(base: *Scope, comptime T: type) ?*T {
|
||||
if (T == Defer) {
|
||||
switch (base.tag) {
|
||||
.defer_normal, .defer_error => return @alignCast(@fieldParentPtr(*align(1) T, "base", base)),
|
||||
.defer_normal, .defer_error => return @alignCast(@fieldParentPtr("base", base)),
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
if (T == Namespace) {
|
||||
switch (base.tag) {
|
||||
.namespace => return @alignCast(@fieldParentPtr(*align(1) T, "base", base)),
|
||||
.namespace => return @alignCast(@fieldParentPtr("base", base)),
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
if (base.tag != T.base_tag)
|
||||
return null;
|
||||
|
||||
return @alignCast(@fieldParentPtr(*align(1) T, "base", base));
|
||||
return @alignCast(@fieldParentPtr("base", base));
|
||||
}
|
||||
|
||||
fn parent(base: *Scope) ?*Scope {
|
||||
|
@ -414,7 +414,7 @@ pub const Macros = struct {
|
||||
}
|
||||
|
||||
pub fn WL_CONTAINER_OF(ptr: anytype, sample: anytype, comptime member: []const u8) @TypeOf(sample) {
|
||||
return @fieldParentPtr(@TypeOf(sample), member, ptr);
|
||||
return @fieldParentPtr(member, ptr);
|
||||
}
|
||||
|
||||
/// A 2-argument function-like macro defined as #define FOO(A, B) (A)(B)
|
||||
|
@ -299,7 +299,7 @@ const ValueArena = struct {
|
||||
/// and must live until the matching call to release().
|
||||
pub fn acquire(self: *ValueArena, child_allocator: Allocator, out_arena_allocator: *std.heap.ArenaAllocator) Allocator {
|
||||
if (self.state_acquired) |state_acquired| {
|
||||
return @fieldParentPtr(std.heap.ArenaAllocator, "state", state_acquired).allocator();
|
||||
return @as(*std.heap.ArenaAllocator, @fieldParentPtr("state", state_acquired)).allocator();
|
||||
}
|
||||
|
||||
out_arena_allocator.* = self.state.promote(child_allocator);
|
||||
@ -309,7 +309,7 @@ const ValueArena = struct {
|
||||
|
||||
/// Releases the allocator acquired by `acquire. `arena_allocator` must match the one passed to `acquire`.
|
||||
pub fn release(self: *ValueArena, arena_allocator: *std.heap.ArenaAllocator) void {
|
||||
if (@fieldParentPtr(std.heap.ArenaAllocator, "state", self.state_acquired.?) == arena_allocator) {
|
||||
if (@as(*std.heap.ArenaAllocator, @fieldParentPtr("state", self.state_acquired.?)) == arena_allocator) {
|
||||
self.state = self.state_acquired.?.*;
|
||||
self.state_acquired = null;
|
||||
}
|
||||
|
95
src/link.zig
95
src/link.zig
@ -188,15 +188,10 @@ pub const File = struct {
|
||||
emit: Compilation.Emit,
|
||||
options: OpenOptions,
|
||||
) !*File {
|
||||
const tag = Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt);
|
||||
switch (tag) {
|
||||
.c => {
|
||||
const ptr = try C.open(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
inline else => |t| {
|
||||
if (build_options.only_c) unreachable;
|
||||
const ptr = try t.Type().open(arena, comp, emit, options);
|
||||
switch (Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt)) {
|
||||
inline else => |tag| {
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
const ptr = try tag.Type().open(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
}
|
||||
@ -208,25 +203,17 @@ pub const File = struct {
|
||||
emit: Compilation.Emit,
|
||||
options: OpenOptions,
|
||||
) !*File {
|
||||
const tag = Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt);
|
||||
switch (tag) {
|
||||
.c => {
|
||||
const ptr = try C.createEmpty(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
inline else => |t| {
|
||||
if (build_options.only_c) unreachable;
|
||||
const ptr = try t.Type().createEmpty(arena, comp, emit, options);
|
||||
switch (Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt)) {
|
||||
inline else => |tag| {
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
const ptr = try tag.Type().createEmpty(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cast(base: *File, comptime T: type) ?*T {
|
||||
if (base.tag != T.base_tag)
|
||||
return null;
|
||||
|
||||
return @fieldParentPtr(*T, "base", base);
|
||||
return if (base.tag == T.base_tag) @fieldParentPtr("base", base) else null;
|
||||
}
|
||||
|
||||
pub fn makeWritable(base: *File) !void {
|
||||
@ -383,7 +370,7 @@ pub const File = struct {
|
||||
.c => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |t| {
|
||||
return @fieldParentPtr(*t.Type(), "base", base).lowerUnnamedConst(val, decl_index);
|
||||
return @as(*t.Type(), @fieldParentPtr("base", base)).lowerUnnamedConst(val, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -402,7 +389,7 @@ pub const File = struct {
|
||||
.c => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |t| {
|
||||
return @fieldParentPtr(*t.Type(), "base", base).getGlobalSymbol(name, lib_name);
|
||||
return @as(*t.Type(), @fieldParentPtr("base", base)).getGlobalSymbol(name, lib_name);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -412,12 +399,9 @@ pub const File = struct {
|
||||
const decl = module.declPtr(decl_index);
|
||||
assert(decl.has_tv);
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(*C, "base", base).updateDecl(module, decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).updateDecl(module, decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateDecl(module, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -431,12 +415,9 @@ pub const File = struct {
|
||||
liveness: Liveness,
|
||||
) UpdateDeclError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(*C, "base", base).updateFunc(module, func_index, air, liveness);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).updateFunc(module, func_index, air, liveness);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateFunc(module, func_index, air, liveness);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -446,12 +427,9 @@ pub const File = struct {
|
||||
assert(decl.has_tv);
|
||||
switch (base.tag) {
|
||||
.spirv, .nvptx => {},
|
||||
.c => {
|
||||
return @fieldParentPtr(*C, "base", base).updateDeclLineNumber(module, decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).updateDeclLineNumber(module, decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateDeclLineNumber(module, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -473,11 +451,9 @@ pub const File = struct {
|
||||
base.releaseLock();
|
||||
if (base.file) |f| f.close();
|
||||
switch (base.tag) {
|
||||
.c => @fieldParentPtr(*C, "base", base).deinit(),
|
||||
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
@fieldParentPtr(*tag.Type(), "base", base).deinit();
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
@as(*tag.Type(), @fieldParentPtr("base", base)).deinit();
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -560,7 +536,7 @@ pub const File = struct {
|
||||
pub fn flush(base: *File, arena: Allocator, prog_node: *std.Progress.Node) FlushError!void {
|
||||
if (build_options.only_c) {
|
||||
assert(base.tag == .c);
|
||||
return @fieldParentPtr(*C, "base", base).flush(arena, prog_node);
|
||||
return @as(*C, @fieldParentPtr("base", base)).flush(arena, prog_node);
|
||||
}
|
||||
const comp = base.comp;
|
||||
if (comp.clang_preprocessor_mode == .yes or comp.clang_preprocessor_mode == .pch) {
|
||||
@ -587,7 +563,7 @@ pub const File = struct {
|
||||
}
|
||||
switch (base.tag) {
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).flush(arena, prog_node);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).flush(arena, prog_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -596,12 +572,9 @@ pub const File = struct {
|
||||
/// rather than final output mode.
|
||||
pub fn flushModule(base: *File, arena: Allocator, prog_node: *std.Progress.Node) FlushError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(*C, "base", base).flushModule(arena, prog_node);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).flushModule(arena, prog_node);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).flushModule(arena, prog_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -609,12 +582,9 @@ pub const File = struct {
|
||||
/// Called when a Decl is deleted from the Module.
|
||||
pub fn freeDecl(base: *File, decl_index: InternPool.DeclIndex) void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
@fieldParentPtr(*C, "base", base).freeDecl(decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
@fieldParentPtr(*tag.Type(), "base", base).freeDecl(decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
@as(*tag.Type(), @fieldParentPtr("base", base)).freeDecl(decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -635,12 +605,9 @@ pub const File = struct {
|
||||
exports: []const *Module.Export,
|
||||
) UpdateExportsError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(*C, "base", base).updateExports(module, exported, exports);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).updateExports(module, exported, exports);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateExports(module, exported, exports);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -664,7 +631,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).getDeclVAddr(decl_index, reloc_info);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).getDeclVAddr(decl_index, reloc_info);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -683,7 +650,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).lowerAnonDecl(decl_val, decl_align, src_loc);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).lowerAnonDecl(decl_val, decl_align, src_loc);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -695,7 +662,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).getAnonDeclVAddr(decl_val, reloc_info);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).getAnonDeclVAddr(decl_val, reloc_info);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -714,7 +681,7 @@ pub const File = struct {
|
||||
=> {},
|
||||
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(*tag.Type(), "base", base).deleteDeclExport(decl_index, name);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).deleteDeclExport(decl_index, name);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -29,34 +29,28 @@ pub const Node = struct {
|
||||
map,
|
||||
list,
|
||||
value,
|
||||
|
||||
pub fn Type(comptime tag: Tag) type {
|
||||
return switch (tag) {
|
||||
.doc => Doc,
|
||||
.map => Map,
|
||||
.list => List,
|
||||
.value => Value,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn cast(self: *const Node, comptime T: type) ?*const T {
|
||||
if (self.tag != T.base_tag) {
|
||||
return null;
|
||||
}
|
||||
return @fieldParentPtr(*const T, "base", self);
|
||||
return @fieldParentPtr("base", self);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Node, allocator: Allocator) void {
|
||||
switch (self.tag) {
|
||||
.doc => {
|
||||
const parent = @fieldParentPtr(*Node.Doc, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.map => {
|
||||
const parent = @fieldParentPtr(*Node.Map, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.list => {
|
||||
const parent = @fieldParentPtr(*Node.List, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.value => {
|
||||
const parent = @fieldParentPtr(*Node.Value, "base", self);
|
||||
inline else => |tag| {
|
||||
const parent: *tag.Type() = @fieldParentPtr("base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
@ -69,12 +63,9 @@ pub const Node = struct {
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
return switch (self.tag) {
|
||||
.doc => @fieldParentPtr(*Node.Doc, "base", self).format(fmt, options, writer),
|
||||
.map => @fieldParentPtr(*Node.Map, "base", self).format(fmt, options, writer),
|
||||
.list => @fieldParentPtr(*Node.List, "base", self).format(fmt, options, writer),
|
||||
.value => @fieldParentPtr(*Node.Value, "base", self).format(fmt, options, writer),
|
||||
};
|
||||
switch (self.tag) {
|
||||
inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(fmt, options, writer),
|
||||
}
|
||||
}
|
||||
|
||||
pub const Doc = struct {
|
||||
|
@ -59,7 +59,7 @@ pub fn RegisterManager(
|
||||
pub const RegisterBitSet = StaticBitSet(tracked_registers.len);
|
||||
|
||||
fn getFunction(self: *Self) *Function {
|
||||
return @alignCast(@fieldParentPtr(*align(1) Function, "register_manager", self));
|
||||
return @alignCast(@fieldParentPtr("register_manager", self));
|
||||
}
|
||||
|
||||
fn excludeRegister(reg: Register, register_class: RegisterBitSet) bool {
|
||||
|
@ -693,5 +693,5 @@ test "zero-bit fields in extern struct pad fields appropriately" {
|
||||
try expect(@intFromPtr(&s) % 2 == 0);
|
||||
try expect(@intFromPtr(&s.y) - @intFromPtr(&s.x) == 2);
|
||||
try expect(@intFromPtr(&s.y) == @intFromPtr(&s.a));
|
||||
try expect(@fieldParentPtr(*S, "a", &s.a) == &s);
|
||||
try expect(@as(*S, @fieldParentPtr("a", &s.a)) == &s);
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1392,13 +1392,13 @@ test "fieldParentPtr of a zero-bit field" {
|
||||
{
|
||||
const a = A{ .u = 0 };
|
||||
const b_ptr = &a.b;
|
||||
const a_ptr = @fieldParentPtr(*const A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
{
|
||||
var a = A{ .u = 0 };
|
||||
const b_ptr = &a.b;
|
||||
const a_ptr = @fieldParentPtr(*const A, "b", b_ptr);
|
||||
const a_ptr: *A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
}
|
||||
@ -1406,17 +1406,17 @@ test "fieldParentPtr of a zero-bit field" {
|
||||
{
|
||||
const a = A{ .u = 0 };
|
||||
const c_ptr = &a.b.c;
|
||||
const b_ptr = @fieldParentPtr(*const @TypeOf(a.b), "c", c_ptr);
|
||||
const b_ptr: @TypeOf(&a.b) = @fieldParentPtr("c", c_ptr);
|
||||
try std.testing.expectEqual(&a.b, b_ptr);
|
||||
const a_ptr = @fieldParentPtr(*const A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
{
|
||||
var a = A{ .u = 0 };
|
||||
const c_ptr = &a.b.c;
|
||||
const b_ptr = @fieldParentPtr(*const @TypeOf(a.b), "c", c_ptr);
|
||||
const b_ptr: @TypeOf(&a.b) = @fieldParentPtr("c", c_ptr);
|
||||
try std.testing.expectEqual(&a.b, b_ptr);
|
||||
const a_ptr = @fieldParentPtr(*const A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ test "fieldParentPtr of tuple" {
|
||||
var x: u32 = 0;
|
||||
_ = &x;
|
||||
const tuple = .{ x, x };
|
||||
try testing.expect(&tuple == @fieldParentPtr(*const @TypeOf(tuple), "1", &tuple[1]));
|
||||
try testing.expect(&tuple == @as(@TypeOf(&tuple), @fieldParentPtr("1", &tuple[1])));
|
||||
}
|
||||
|
||||
test "fieldParentPtr of anon struct" {
|
||||
@ -233,7 +233,7 @@ test "fieldParentPtr of anon struct" {
|
||||
var x: u32 = 0;
|
||||
_ = &x;
|
||||
const anon_st = .{ .foo = x, .bar = x };
|
||||
try testing.expect(&anon_st == @fieldParentPtr(*const @TypeOf(anon_st), "bar", &anon_st.bar));
|
||||
try testing.expect(&anon_st == @as(@TypeOf(&anon_st), @fieldParentPtr("bar", &anon_st.bar)));
|
||||
}
|
||||
|
||||
test "offsetOf tuple" {
|
||||
|
@ -2,12 +2,12 @@ const Foo = extern struct {
|
||||
derp: i32,
|
||||
};
|
||||
export fn foo(a: *i32) *Foo {
|
||||
return @fieldParentPtr(*Foo, "a", a);
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :5:34: error: no field named 'a' in struct 'tmp.Foo'
|
||||
// :5:28: error: no field named 'a' in struct 'tmp.Foo'
|
||||
// :1:20: note: struct declared here
|
||||
|
@ -9,7 +9,7 @@ const foo = Foo{
|
||||
|
||||
comptime {
|
||||
const field_ptr: *i32 = @ptrFromInt(0x1234);
|
||||
const another_foo_ptr = @fieldParentPtr(*const Foo, "b", field_ptr);
|
||||
const another_foo_ptr: *const Foo = @fieldParentPtr("b", field_ptr);
|
||||
_ = another_foo_ptr;
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@ const foo = Foo{
|
||||
};
|
||||
|
||||
comptime {
|
||||
const another_foo_ptr = @fieldParentPtr(*const Foo, "b", &foo.a);
|
||||
const another_foo_ptr: *const Foo = @fieldParentPtr("b", &foo.a);
|
||||
_ = another_foo_ptr;
|
||||
}
|
||||
|
||||
@ -16,5 +16,5 @@ comptime {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :11:29: error: field 'b' has index '1' but pointer value is index '0' of struct 'tmp.Foo'
|
||||
// :11:41: error: field 'b' has index '1' but pointer value is index '0' of struct 'tmp.Foo'
|
||||
// :1:13: note: struct declared here
|
||||
|
@ -1,12 +1,12 @@
|
||||
const Foo = extern struct {
|
||||
a: i32,
|
||||
};
|
||||
export fn foo(a: i32) *Foo {
|
||||
return @fieldParentPtr(*const Foo, "a", a);
|
||||
export fn foo(a: i32) *const Foo {
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :5:45: error: expected pointer type, found 'i32'
|
||||
// :5:33: error: expected pointer type, found 'i32'
|
||||
|
@ -1,10 +1,10 @@
|
||||
const Foo = i32;
|
||||
export fn foo(a: *i32) *Foo {
|
||||
return @fieldParentPtr(Foo, "a", a);
|
||||
export fn foo(a: *i32) Foo {
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=llvm
|
||||
// target=native
|
||||
//
|
||||
// :3:28: error: expected pointer type, found 'i32'
|
||||
// :3:12: error: expected pointer type, found 'i32'
|
||||
|
@ -5,7 +5,7 @@ pub export fn entry1() void {
|
||||
@offsetOf(T, "a");
|
||||
}
|
||||
pub export fn entry2() void {
|
||||
@fieldParentPtr(*T, "a", undefined);
|
||||
@as(*T, @fieldParentPtr("a", undefined));
|
||||
}
|
||||
|
||||
// error
|
||||
@ -13,4 +13,4 @@ pub export fn entry2() void {
|
||||
// target=native
|
||||
//
|
||||
// :5:5: error: no offset available for comptime field
|
||||
// :8:25: error: cannot get @fieldParentPtr of a comptime field
|
||||
// :8:29: error: cannot get @fieldParentPtr of a comptime field
|
||||
|
@ -86,7 +86,7 @@ fn compare_headers(step: *std.Build.Step, prog_node: *std.Progress.Node) !void {
|
||||
const expected_fmt = "expected_{s}";
|
||||
|
||||
for (step.dependencies.items) |config_header_step| {
|
||||
const config_header = @fieldParentPtr(*ConfigHeader, "step", config_header_step);
|
||||
const config_header: *ConfigHeader = @fieldParentPtr("step", config_header_step);
|
||||
|
||||
const zig_header_path = config_header.output_file.path orelse @panic("Could not locate header file");
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user