zig reduce: redesign

Now it works like this:
1. Walk the AST of the source file looking for independent
   reductions and collecting them all into an array list.
2. Randomize the list of transformations. A future enhancement will add
   priority weights to the sorting but for now they are completely
   shuffled.
3. Apply a subset consisting of 1/2 of the transformations and check for
   interestingness.
4. If not interesting, half the subset size again and check again.
5. Repeat until the subset size is 1, then march the transformation
   index forward by 1 with each non-interesting attempt.

At any point if a subset of transformations succeeds in producing an interesting
result, restart the whole process, reparsing the AST and re-generating the list
of all possible transformations and shuffling it again.

As for std.zig.render, the fixups operate based on AST Node Index rather
than Nth index of the function occurence. This allows precise control
over how to mutate the input.
This commit is contained in:
Andrew Kelley 2023-11-03 19:50:49 -07:00
parent dff13e088b
commit 1396479656
3 changed files with 952 additions and 65 deletions

View File

@ -18,18 +18,24 @@ pub const Fixups = struct {
/// The key is the mut token (`var`/`const`) of the variable declaration
/// that should have a `_ = foo;` inserted afterwards.
unused_var_decls: std.AutoHashMapUnmanaged(Ast.TokenIndex, void) = .{},
/// The functions in this unordered set of indices will render with a
/// function body of `@trap()` instead, with all parameters discarded.
/// The indexes correspond to the order in which the functions appear in
/// the file.
gut_functions: std.AutoHashMapUnmanaged(u32, void) = .{},
/// The functions in this unordered set of AST fn decl nodes will render
/// with a function body of `@trap()` instead, with all parameters
/// discarded.
gut_functions: std.AutoHashMapUnmanaged(Ast.Node.Index, void) = .{},
pub fn count(f: Fixups) usize {
return f.unused_var_decls.count();
return f.unused_var_decls.count() +
f.gut_functions.count();
}
pub fn clearRetainingCapacity(f: *Fixups) void {
f.unused_var_decls.clearRetainingCapacity();
f.gut_functions.clearRetainingCapacity();
}
pub fn deinit(f: *Fixups, gpa: Allocator) void {
f.unused_var_decls.deinit(gpa);
f.gut_functions.deinit(gpa);
f.* = undefined;
}
};
@ -39,9 +45,6 @@ const Render = struct {
ais: *Ais,
tree: Ast,
fixups: Fixups,
/// Keeps track of how many function declarations we have seen so far. Used
/// by Fixups.
function_index: u32,
};
pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!void {
@ -55,7 +58,6 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!v
.ais = &auto_indenting_stream,
.tree = tree,
.fixups = fixups,
.function_index = 0,
};
// Render all the line comments at the beginning of the file.
@ -115,8 +117,6 @@ fn renderMember(
try renderDocComments(r, tree.firstToken(decl));
switch (tree.nodes.items(.tag)[decl]) {
.fn_decl => {
const this_index = r.function_index;
r.function_index += 1;
// Some examples:
// pub extern "foo" fn ...
// export fn ...
@ -162,7 +162,7 @@ fn renderMember(
assert(datas[decl].rhs != 0);
try renderExpression(r, fn_proto, .space);
const body_node = datas[decl].rhs;
if (r.fixups.gut_functions.contains(this_index)) {
if (r.fixups.gut_functions.contains(decl)) {
ais.pushIndent();
const lbrace = tree.nodes.items(.main_token)[body_node];
try renderToken(r, lbrace, .newline);
@ -2136,7 +2136,6 @@ fn renderArrayInit(
.ais = &auto_indenting_stream,
.tree = r.tree,
.fixups = r.fixups,
.function_index = r.function_index,
};
// Calculate size of columns in current section

View File

@ -3,6 +3,8 @@ const mem = std.mem;
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const fatal = @import("./main.zig").fatal;
const Ast = std.zig.Ast;
const Walk = @import("reduce/Walk.zig");
const usage =
\\zig reduce [options] ./checker root_source_file.zig [-- [argv]]
@ -16,6 +18,8 @@ const usage =
\\ exit(other): not interesting
\\
\\options:
\\ --seed [integer] Override the random seed. Defaults to 0
\\ --skip-smoke-test Skip interestingness check smoke test
\\ --mod [name]:[deps]:[src] Make a module available for dependency under the given name
\\ deps: [dep],[dep],...
\\ dep: [[import=]name]
@ -32,20 +36,23 @@ const Interestingness = enum { interesting, unknown, boring };
// Roadmap:
// - add thread pool
// - add support for `@import` detection and other files
// - add support for parsing the module flags
// - more fancy transformations
// - reduce flags sent to the compiler
// - @import inlining
// - @import inlining of modules
// - @import inlining of files
// - deleting unused functions and other globals
// - removing statements or blocks of code
// - replacing operands of `and` and `or` with `true` and `false`
// - replacing if conditions with `true` and `false`
// - integrate the build system?
// - reduce flags sent to the compiler
// - integrate with the build system?
pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
var opt_checker_path: ?[]const u8 = null;
var opt_root_source_file_path: ?[]const u8 = null;
var argv: []const []const u8 = &.{};
var seed: u32 = 0;
var skip_smoke_test = false;
{
var i: usize = 2; // skip over "zig" and "reduce"
@ -59,6 +66,23 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} else if (mem.eql(u8, arg, "--")) {
argv = args[i + 1 ..];
break;
} else if (mem.eql(u8, arg, "--skip-smoke-test")) {
skip_smoke_test = true;
} else if (mem.eql(u8, arg, "--main-mod-path")) {
@panic("TODO: implement --main-mod-path");
} else if (mem.eql(u8, arg, "--mod")) {
@panic("TODO: implement --mod");
} else if (mem.eql(u8, arg, "--deps")) {
@panic("TODO: implement --deps");
} else if (mem.eql(u8, arg, "--seed")) {
i += 1;
if (i >= args.len) fatal("expected 32-bit integer after {s}", .{arg});
const next_arg = args[i];
seed = std.fmt.parseUnsigned(u32, next_arg, 0) catch |err| {
fatal("unable to parse seed '{s}' as 32-bit integer: {s}", .{
next_arg, @errorName(err),
});
};
} else {
fatal("unrecognized parameter: '{s}'", .{arg});
}
@ -85,30 +109,11 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
var rendered = std.ArrayList(u8).init(gpa);
defer rendered.deinit();
var prev_rendered = std.ArrayList(u8).init(gpa);
defer prev_rendered.deinit();
const source_code = try std.fs.cwd().readFileAllocOptions(
arena,
root_source_file_path,
std.math.maxInt(u32),
null,
1,
0,
);
var tree = try std.zig.Ast.parse(gpa, source_code, .zig);
var tree = try parse(gpa, arena, root_source_file_path);
defer tree.deinit(gpa);
if (tree.errors.len != 0) {
@panic("syntax errors occurred");
}
var next_gut_fn_index: u32 = 0;
var fixups: std.zig.Ast.Fixups = .{};
{
// smoke test the interestingness check
if (!skip_smoke_test) {
std.debug.print("smoke testing the interestingness check...\n", .{});
switch (try runCheck(arena, interestingness_argv.items)) {
.interesting => {},
.boring, .unknown => |t| {
@ -119,40 +124,97 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
}
}
while (true) {
try fixups.gut_functions.put(arena, next_gut_fn_index, {});
var fixups: Ast.Fixups = .{};
defer fixups.deinit(gpa);
var rng = std.rand.DefaultPrng.init(seed);
// 1. Walk the AST of the source file looking for independent
// reductions and collecting them all into an array list.
// 2. Randomize the list of transformations. A future enhancement will add
// priority weights to the sorting but for now they are completely
// shuffled.
// 3. Apply a subset consisting of 1/2 of the transformations and check for
// interestingness.
// 4. If not interesting, half the subset size again and check again.
// 5. Repeat until the subset size is 1, then march the transformation
// index forward by 1 with each non-interesting attempt.
//
// At any point if a subset of transformations succeeds in producing an interesting
// result, restart the whole process, reparsing the AST and re-generating the list
// of all possible transformations and shuffling it again.
var transformations = std.ArrayList(Walk.Transformation).init(gpa);
defer transformations.deinit();
try Walk.findTransformations(&tree, &transformations);
sortTransformations(transformations.items, rng.random());
fresh: while (transformations.items.len > 0) {
std.debug.print("found {d} possible transformations\n", .{
transformations.items.len,
});
var subset_size: usize = transformations.items.len;
var start_index: usize = 0;
while (start_index < transformations.items.len) {
subset_size = @max(1, subset_size / 2);
const this_set = transformations.items[start_index..][0..subset_size];
try transformationsToFixups(gpa, this_set, &fixups);
rendered.clearRetainingCapacity();
try tree.renderToArrayList(&rendered, fixups);
try std.fs.cwd().writeFile(root_source_file_path, rendered.items);
const interestingness = try runCheck(arena, interestingness_argv.items);
std.debug.print("{d} random transformations: {s}\n", .{
subset_size, @tagName(interestingness),
});
switch (interestingness) {
.interesting => {
const new_tree = try parse(gpa, arena, root_source_file_path);
tree.deinit(gpa);
tree = new_tree;
try Walk.findTransformations(&tree, &transformations);
// Resetting based on the seed again means we will get the same
// results if restarting the reduction process from this new point.
rng = std.rand.DefaultPrng.init(seed);
sortTransformations(transformations.items, rng.random());
continue :fresh;
},
.unknown, .boring => {
// Continue to try the next set of transformations.
// If we tested only one transformation, move on to the next one.
if (subset_size == 1) {
start_index += 1;
}
},
}
}
std.debug.print("all {d} remaining transformations are uninteresting\n", .{
transformations.items.len,
});
// Revert the source back to not be transformed.
fixups.clearRetainingCapacity();
rendered.clearRetainingCapacity();
try tree.renderToArrayList(&rendered, fixups);
if (mem.eql(u8, rendered.items, prev_rendered.items)) {
std.debug.print("no remaining transformations\n", .{});
break;
}
prev_rendered.clearRetainingCapacity();
try prev_rendered.appendSlice(rendered.items);
try std.fs.cwd().writeFile(root_source_file_path, rendered.items);
const interestingness = try runCheck(arena, interestingness_argv.items);
std.debug.print("{s}\n", .{@tagName(interestingness)});
switch (interestingness) {
.interesting => {
next_gut_fn_index += 1;
},
.unknown, .boring => {
// revert the change and try the next transformation
assert(fixups.gut_functions.remove(next_gut_fn_index));
next_gut_fn_index += 1;
rendered.clearRetainingCapacity();
try tree.renderToArrayList(&rendered, fixups);
},
}
return std.process.cleanExit();
}
std.debug.print("no more transformations found\n", .{});
return std.process.cleanExit();
}
fn sortTransformations(transformations: []Walk.Transformation, rng: std.rand.Random) void {
rng.shuffle(Walk.Transformation, transformations);
// Stable sort based on priority to keep randomness as the secondary sort.
// TODO: introduce transformation priorities
// std.mem.sort(transformations);
}
fn termToInteresting(term: std.process.Child.Term) Interestingness {
return switch (term) {
.Exited => |code| switch (code) {
@ -176,3 +238,37 @@ fn runCheck(arena: std.mem.Allocator, argv: []const []const u8) !Interestingness
std.debug.print("{s}", .{result.stderr});
return termToInteresting(result.term);
}
fn transformationsToFixups(
gpa: Allocator,
transforms: []const Walk.Transformation,
fixups: *Ast.Fixups,
) !void {
fixups.clearRetainingCapacity();
for (transforms) |t| switch (t) {
.gut_function => |fn_decl_node| {
try fixups.gut_functions.put(gpa, fn_decl_node, {});
},
};
}
fn parse(gpa: Allocator, arena: Allocator, root_source_file_path: []const u8) !Ast {
const source_code = try std.fs.cwd().readFileAllocOptions(
arena,
root_source_file_path,
std.math.maxInt(u32),
null,
1,
0,
);
var tree = try Ast.parse(gpa, source_code, .zig);
errdefer tree.deinit(gpa);
if (tree.errors.len != 0) {
@panic("syntax errors occurred");
}
return tree;
}

792
src/reduce/Walk.zig Normal file
View File

@ -0,0 +1,792 @@
const std = @import("std");
const Ast = std.zig.Ast;
const Walk = @This();
const assert = std.debug.assert;
ast: *const Ast,
transformations: *std.ArrayList(Transformation),
pub const Transformation = union(enum) {
/// Replace the fn decl AST Node with one whose body is only `@trap()` with
/// discarded parameters.
gut_function: Ast.Node.Index,
};
pub const Error = error{OutOfMemory};
/// The result will be priority shuffled.
pub fn findTransformations(ast: *const Ast, transformations: *std.ArrayList(Transformation)) !void {
transformations.clearRetainingCapacity();
var walk: Walk = .{
.ast = ast,
.transformations = transformations,
};
try walkMembers(&walk, walk.ast.rootDecls());
}
fn walkMembers(w: *Walk, members: []const Ast.Node.Index) Error!void {
for (members) |member| {
try walkMember(w, member);
}
}
fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
const ast = w.ast;
const datas = ast.nodes.items(.data);
switch (ast.nodes.items(.tag)[decl]) {
.fn_decl => {
const fn_proto = datas[decl].lhs;
try walkExpression(w, fn_proto);
const body_node = datas[decl].rhs;
if (!isFnBodyGutted(ast, body_node)) {
try w.transformations.append(.{ .gut_function = decl });
}
try walkExpression(w, body_node);
},
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
=> {
try walkExpression(w, decl);
},
.@"usingnamespace" => {
const expr = datas[decl].lhs;
try walkExpression(w, expr);
},
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> try walkVarDecl(w, ast.fullVarDecl(decl).?),
.test_decl => {
try walkExpression(w, datas[decl].rhs);
},
.container_field_init,
.container_field_align,
.container_field,
=> try walkContainerField(w, ast.fullContainerField(decl).?),
.@"comptime" => try walkExpression(w, decl),
.root => unreachable,
else => unreachable,
}
}
fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
const ast = w.ast;
const token_tags = ast.tokens.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
switch (node_tags[node]) {
.identifier => {},
.number_literal,
.char_literal,
.unreachable_literal,
.anyframe_literal,
.string_literal,
=> {},
.multiline_string_literal => {},
.error_value => {},
.block_two,
.block_two_semicolon,
=> {
const statements = [2]Ast.Node.Index{ datas[node].lhs, datas[node].rhs };
if (datas[node].lhs == 0) {
return walkBlock(w, node, statements[0..0]);
} else if (datas[node].rhs == 0) {
return walkBlock(w, node, statements[0..1]);
} else {
return walkBlock(w, node, statements[0..2]);
}
},
.block,
.block_semicolon,
=> {
const statements = ast.extra_data[datas[node].lhs..datas[node].rhs];
return walkBlock(w, node, statements);
},
.@"errdefer" => {
const expr = datas[node].rhs;
return walkExpression(w, expr);
},
.@"defer" => {
const expr = datas[node].rhs;
return walkExpression(w, expr);
},
.@"comptime", .@"nosuspend" => {
const block = datas[node].lhs;
return walkExpression(w, block);
},
.@"suspend" => {
const body = datas[node].lhs;
return walkExpression(w, body);
},
.@"catch" => {
try walkExpression(w, datas[node].lhs); // target
try walkExpression(w, datas[node].rhs); // fallback
},
.field_access => {
const field_access = datas[node];
try walkExpression(w, field_access.lhs);
},
.error_union,
.switch_range,
=> {
const infix = datas[node];
try walkExpression(w, infix.lhs);
return walkExpression(w, infix.rhs);
},
.for_range => {
const infix = datas[node];
try walkExpression(w, infix.lhs);
if (infix.rhs != 0) {
return walkExpression(w, infix.rhs);
}
},
.add,
.add_wrap,
.add_sat,
.array_cat,
.array_mult,
.assign,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
.assign_shl_sat,
.assign_shr,
.assign_bit_xor,
.assign_div,
.assign_sub,
.assign_sub_wrap,
.assign_sub_sat,
.assign_mod,
.assign_add,
.assign_add_wrap,
.assign_add_sat,
.assign_mul,
.assign_mul_wrap,
.assign_mul_sat,
.bang_equal,
.bit_and,
.bit_or,
.shl,
.shl_sat,
.shr,
.bit_xor,
.bool_and,
.bool_or,
.div,
.equal_equal,
.greater_or_equal,
.greater_than,
.less_or_equal,
.less_than,
.merge_error_sets,
.mod,
.mul,
.mul_wrap,
.mul_sat,
.sub,
.sub_wrap,
.sub_sat,
.@"orelse",
=> {
const infix = datas[node];
try walkExpression(w, infix.lhs);
try walkExpression(w, infix.rhs);
},
.assign_destructure => {
const lhs_count = ast.extra_data[datas[node].lhs];
assert(lhs_count > 1);
const lhs_exprs = ast.extra_data[datas[node].lhs + 1 ..][0..lhs_count];
const rhs = datas[node].rhs;
for (lhs_exprs) |lhs_node| {
switch (node_tags[lhs_node]) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> try walkVarDecl(w, ast.fullVarDecl(lhs_node).?),
else => try walkExpression(w, lhs_node),
}
}
return walkExpression(w, rhs);
},
.bit_not,
.bool_not,
.negation,
.negation_wrap,
.optional_type,
.address_of,
=> {
return walkExpression(w, datas[node].lhs);
},
.@"try",
.@"resume",
.@"await",
=> {
return walkExpression(w, datas[node].lhs);
},
.array_type,
.array_type_sentinel,
=> {},
.ptr_type_aligned,
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
=> {},
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
.array_init_dot_two_comma,
.array_init_dot,
.array_init_dot_comma,
.array_init,
.array_init_comma,
=> {
var elements: [2]Ast.Node.Index = undefined;
return walkArrayInit(w, ast.fullArrayInit(&elements, node).?);
},
.struct_init_one,
.struct_init_one_comma,
.struct_init_dot_two,
.struct_init_dot_two_comma,
.struct_init_dot,
.struct_init_dot_comma,
.struct_init,
.struct_init_comma,
=> {
var buf: [2]Ast.Node.Index = undefined;
return walkStructInit(w, node, ast.fullStructInit(&buf, node).?);
},
.call_one,
.call_one_comma,
.async_call_one,
.async_call_one_comma,
.call,
.call_comma,
.async_call,
.async_call_comma,
=> {
var buf: [1]Ast.Node.Index = undefined;
return walkCall(w, ast.fullCall(&buf, node).?);
},
.array_access => {
const suffix = datas[node];
try walkExpression(w, suffix.lhs);
try walkExpression(w, suffix.rhs);
},
.slice_open, .slice, .slice_sentinel => return walkSlice(w, node, ast.fullSlice(node).?),
.deref => {
try walkExpression(w, datas[node].lhs);
},
.unwrap_optional => {
try walkExpression(w, datas[node].lhs);
},
.@"break" => {
const label_token = datas[node].lhs;
const target = datas[node].rhs;
if (label_token == 0 and target == 0) {
// no expressions
} else if (label_token == 0 and target != 0) {
try walkExpression(w, target);
} else if (label_token != 0 and target == 0) {
try walkIdentifier(w, label_token);
} else if (label_token != 0 and target != 0) {
try walkExpression(w, target);
}
},
.@"continue" => {
const label = datas[node].lhs;
if (label != 0) {
return walkIdentifier(w, label); // label
}
},
.@"return" => {
if (datas[node].lhs != 0) {
try walkExpression(w, datas[node].lhs);
}
},
.grouped_expression => {
try walkExpression(w, datas[node].lhs);
},
.container_decl,
.container_decl_trailing,
.container_decl_arg,
.container_decl_arg_trailing,
.container_decl_two,
.container_decl_two_trailing,
.tagged_union,
.tagged_union_trailing,
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
.tagged_union_two,
.tagged_union_two_trailing,
=> {
var buf: [2]Ast.Node.Index = undefined;
return walkContainerDecl(w, node, ast.fullContainerDecl(&buf, node).?);
},
.error_set_decl => {
const error_token = main_tokens[node];
const lbrace = error_token + 1;
const rbrace = datas[node].rhs;
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
switch (token_tags[i]) {
.doc_comment => unreachable, // TODO
.identifier => try walkIdentifier(w, i),
.comma => {},
else => unreachable,
}
}
},
.builtin_call_two, .builtin_call_two_comma => {
if (datas[node].lhs == 0) {
return walkBuiltinCall(w, main_tokens[node], &.{});
} else if (datas[node].rhs == 0) {
return walkBuiltinCall(w, main_tokens[node], &.{datas[node].lhs});
} else {
return walkBuiltinCall(w, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs });
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[datas[node].lhs..datas[node].rhs];
return walkBuiltinCall(w, main_tokens[node], params);
},
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
=> {
var buf: [1]Ast.Node.Index = undefined;
return walkFnProto(w, ast.fullFnProto(&buf, node).?);
},
.anyframe_type => {
if (datas[node].rhs != 0) {
return walkExpression(w, datas[node].rhs);
}
},
.@"switch",
.switch_comma,
=> {
const condition = datas[node].lhs;
const extra = ast.extraData(datas[node].rhs, Ast.Node.SubRange);
const cases = ast.extra_data[extra.start..extra.end];
try walkExpression(w, condition); // condition expression
try walkExpressions(w, cases);
},
.switch_case_one,
.switch_case_inline_one,
.switch_case,
.switch_case_inline,
=> return walkSwitchCase(w, ast.fullSwitchCase(node).?),
.while_simple,
.while_cont,
.@"while",
=> return walkWhile(w, ast.fullWhile(node).?),
.for_simple,
.@"for",
=> return walkFor(w, ast.fullFor(node).?),
.if_simple,
.@"if",
=> return walkIf(w, ast.fullIf(node).?),
.asm_simple,
.@"asm",
=> return walkAsm(w, ast.fullAsm(node).?),
.enum_literal => {
return walkIdentifier(w, main_tokens[node]); // name
},
.fn_decl => unreachable,
.container_field => unreachable,
.container_field_init => unreachable,
.container_field_align => unreachable,
.root => unreachable,
.global_var_decl => unreachable,
.local_var_decl => unreachable,
.simple_var_decl => unreachable,
.aligned_var_decl => unreachable,
.@"usingnamespace" => unreachable,
.test_decl => unreachable,
.asm_output => unreachable,
.asm_input => unreachable,
}
}
fn walkVarDecl(w: *Walk, var_decl: Ast.full.VarDecl) Error!void {
try walkIdentifier(w, var_decl.ast.mut_token + 1); // name
if (var_decl.ast.type_node != 0) {
try walkExpression(w, var_decl.ast.type_node);
}
if (var_decl.ast.align_node != 0) {
try walkExpression(w, var_decl.ast.align_node);
}
if (var_decl.ast.addrspace_node != 0) {
try walkExpression(w, var_decl.ast.addrspace_node);
}
if (var_decl.ast.section_node != 0) {
try walkExpression(w, var_decl.ast.section_node);
}
assert(var_decl.ast.init_node != 0);
return walkExpression(w, var_decl.ast.init_node);
}
fn walkContainerField(w: *Walk, field: Ast.full.ContainerField) Error!void {
if (field.ast.type_expr != 0) {
try walkExpression(w, field.ast.type_expr); // type
}
if (field.ast.align_expr != 0) {
try walkExpression(w, field.ast.align_expr); // alignment
}
try walkExpression(w, field.ast.value_expr); // value
}
fn walkBlock(
w: *Walk,
block_node: Ast.Node.Index,
statements: []const Ast.Node.Index,
) Error!void {
_ = block_node;
const ast = w.ast;
const node_tags = ast.nodes.items(.tag);
for (statements) |stmt| {
switch (node_tags[stmt]) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> try walkVarDecl(w, ast.fullVarDecl(stmt).?),
else => try walkExpression(w, stmt),
}
}
}
fn walkArrayType(w: *Walk, array_type: Ast.full.ArrayType) Error!void {
try walkExpression(w, array_type.ast.elem_count);
if (array_type.ast.sentinel != 0) {
try walkExpression(w, array_type.ast.sentinel);
}
return walkExpression(w, array_type.ast.elem_type);
}
fn walkArrayInit(w: *Walk, array_init: Ast.full.ArrayInit) Error!void {
if (array_init.ast.type_expr != 0) {
try walkExpression(w, array_init.ast.type_expr); // T
}
for (array_init.ast.elements) |elem_init| {
try walkExpression(w, elem_init);
}
}
fn walkStructInit(
w: *Walk,
struct_node: Ast.Node.Index,
struct_init: Ast.full.StructInit,
) Error!void {
_ = struct_node;
if (struct_init.ast.type_expr != 0) {
try walkExpression(w, struct_init.ast.type_expr); // T
}
for (struct_init.ast.fields) |field_init| {
try walkExpression(w, field_init);
}
}
fn walkCall(w: *Walk, call: Ast.full.Call) Error!void {
try walkExpression(w, call.ast.fn_expr);
try walkParamList(w, call.ast.params);
}
fn walkSlice(
w: *Walk,
slice_node: Ast.Node.Index,
slice: Ast.full.Slice,
) Error!void {
_ = slice_node;
try walkExpression(w, slice.ast.sliced);
try walkExpression(w, slice.ast.start);
if (slice.ast.end != 0) {
try walkExpression(w, slice.ast.end);
}
if (slice.ast.sentinel != 0) {
try walkExpression(w, slice.ast.sentinel);
}
}
fn walkIdentifier(w: *Walk, token_index: Ast.TokenIndex) Error!void {
_ = w;
_ = token_index;
}
fn walkContainerDecl(
w: *Walk,
container_decl_node: Ast.Node.Index,
container_decl: Ast.full.ContainerDecl,
) Error!void {
_ = container_decl_node;
if (container_decl.ast.arg != 0) {
try walkExpression(w, container_decl.ast.arg);
}
for (container_decl.ast.members) |member| {
try walkMember(w, member);
}
}
fn walkBuiltinCall(
w: *Walk,
builtin_token: Ast.TokenIndex,
params: []const Ast.Node.Index,
) Error!void {
_ = builtin_token;
for (params) |param_node| {
try walkExpression(w, param_node);
}
}
fn walkFnProto(w: *Walk, fn_proto: Ast.full.FnProto) Error!void {
const ast = w.ast;
{
var it = fn_proto.iterate(ast);
while (it.next()) |param| {
if (param.type_expr != 0) {
try walkExpression(w, param.type_expr);
}
}
}
if (fn_proto.ast.align_expr != 0) {
try walkExpression(w, fn_proto.ast.align_expr);
}
if (fn_proto.ast.addrspace_expr != 0) {
try walkExpression(w, fn_proto.ast.addrspace_expr);
}
if (fn_proto.ast.section_expr != 0) {
try walkExpression(w, fn_proto.ast.section_expr);
}
if (fn_proto.ast.callconv_expr != 0) {
try walkExpression(w, fn_proto.ast.callconv_expr);
}
try walkExpression(w, fn_proto.ast.return_type);
}
fn walkExpressions(w: *Walk, expressions: []const Ast.Node.Index) Error!void {
for (expressions) |expression| {
try walkExpression(w, expression);
}
}
fn walkSwitchCase(w: *Walk, switch_case: Ast.full.SwitchCase) Error!void {
for (switch_case.ast.values) |value_expr| {
try walkExpression(w, value_expr);
}
try walkExpression(w, switch_case.ast.target_expr);
}
fn walkWhile(w: *Walk, while_node: Ast.full.While) Error!void {
try walkExpression(w, while_node.ast.cond_expr); // condition
if (while_node.ast.cont_expr != 0) {
try walkExpression(w, while_node.ast.cont_expr);
}
try walkExpression(w, while_node.ast.cond_expr); // condition
if (while_node.ast.then_expr != 0) {
try walkExpression(w, while_node.ast.then_expr);
}
if (while_node.ast.else_expr != 0) {
try walkExpression(w, while_node.ast.else_expr);
}
}
fn walkFor(w: *Walk, for_node: Ast.full.For) Error!void {
try walkParamList(w, for_node.ast.inputs);
if (for_node.ast.then_expr != 0) {
try walkExpression(w, for_node.ast.then_expr);
}
if (for_node.ast.else_expr != 0) {
try walkExpression(w, for_node.ast.else_expr);
}
}
fn walkIf(w: *Walk, if_node: Ast.full.If) Error!void {
try walkExpression(w, if_node.ast.cond_expr); // condition
if (if_node.ast.then_expr != 0) {
try walkExpression(w, if_node.ast.then_expr);
}
if (if_node.ast.else_expr != 0) {
try walkExpression(w, if_node.ast.else_expr);
}
}
fn walkAsm(w: *Walk, asm_node: Ast.full.Asm) Error!void {
try walkExpression(w, asm_node.ast.template);
for (asm_node.ast.items) |item| {
try walkExpression(w, item);
}
}
fn walkParamList(w: *Walk, params: []const Ast.Node.Index) Error!void {
for (params) |param_node| {
try walkExpression(w, param_node);
}
}
/// Check if it is already gutted (i.e. its body replaced with `@trap()`).
fn isFnBodyGutted(ast: *const Ast, body_node: Ast.Node.Index) bool {
// skip over discards
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
var statements_buf: [2]Ast.Node.Index = undefined;
const statements = switch (node_tags[body_node]) {
.block_two,
.block_two_semicolon,
=> blk: {
statements_buf[0..2].* = .{ datas[body_node].lhs, datas[body_node].rhs };
break :blk if (datas[body_node].lhs == 0)
statements_buf[0..0]
else if (datas[body_node].rhs == 0)
statements_buf[0..1]
else
statements_buf[0..2];
},
.block,
.block_semicolon,
=> ast.extra_data[datas[body_node].lhs..datas[body_node].rhs],
else => return false,
};
var i: usize = 0;
while (i < statements.len) : (i += 1) {
switch (categorizeStmt(ast, statements[i])) {
.discard_identifier => continue,
.trap_call => return i + 1 == statements.len,
else => return false,
}
}
return false;
}
const StmtCategory = enum {
discard_identifier,
trap_call,
other,
};
fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
const node_tags = ast.nodes.items(.tag);
const datas = ast.nodes.items(.data);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[stmt]) {
.builtin_call_two, .builtin_call_two_comma => {
if (datas[stmt].lhs == 0) {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{});
} else if (datas[stmt].rhs == 0) {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{datas[stmt].lhs});
} else {
return categorizeBuiltinCall(ast, main_tokens[stmt], &.{ datas[stmt].lhs, datas[stmt].rhs });
}
},
.builtin_call, .builtin_call_comma => {
const params = ast.extra_data[datas[stmt].lhs..datas[stmt].rhs];
return categorizeBuiltinCall(ast, main_tokens[stmt], params);
},
.assign => {
const infix = datas[stmt];
if (isDiscardIdent(ast, infix.lhs) and node_tags[infix.rhs] == .identifier)
return .discard_identifier;
return .other;
},
else => return .other,
}
}
fn categorizeBuiltinCall(
ast: *const Ast,
builtin_token: Ast.TokenIndex,
params: []const Ast.Node.Index,
) StmtCategory {
if (params.len != 0) return .other;
const name_bytes = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, name_bytes, "@trap"))
return .trap_call;
return .other;
}
fn isDiscardIdent(ast: *const Ast, node: Ast.Node.Index) bool {
const node_tags = ast.nodes.items(.tag);
const main_tokens = ast.nodes.items(.main_token);
switch (node_tags[node]) {
.identifier => {
const token_index = main_tokens[node];
const name_bytes = ast.tokenSlice(token_index);
return std.mem.eql(u8, name_bytes, "_");
},
else => return false,
}
}