mirror of
https://github.com/ziglang/zig.git
synced 2025-01-06 02:10:24 +00:00
introduce ZON: Zig Object Notation
* std.zig.parse is moved to std.zig.Ast.parse * the new function has an additional parameter that requires passing Mode.zig or Mode.zon * moved parser.zig code to Parse.zig * added parseZon function next to parseRoot function
This commit is contained in:
parent
03cdb4fb58
commit
873bb29c98
@ -513,7 +513,7 @@ set(ZIG_STAGE2_SOURCES
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/Ast.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/CrossTarget.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/c_builtins.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/parse.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/Parse.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/render.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/string_literal.zig"
|
||||
"${CMAKE_SOURCE_DIR}/lib/std/zig/system.zig"
|
||||
|
@ -367,5 +367,5 @@ test "OptionsStep" {
|
||||
\\
|
||||
, options.contents.items);
|
||||
|
||||
_ = try std.zig.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0));
|
||||
_ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig);
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ pub const Tokenizer = tokenizer.Tokenizer;
|
||||
pub const fmtId = fmt.fmtId;
|
||||
pub const fmtEscapes = fmt.fmtEscapes;
|
||||
pub const isValidId = fmt.isValidId;
|
||||
pub const parse = @import("zig/parse.zig").parse;
|
||||
pub const string_literal = @import("zig/string_literal.zig");
|
||||
pub const number_literal = @import("zig/number_literal.zig");
|
||||
pub const primitives = @import("zig/primitives.zig");
|
||||
|
@ -11,13 +11,6 @@ extra_data: []Node.Index,
|
||||
|
||||
errors: []const Error,
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const mem = std.mem;
|
||||
const Token = std.zig.Token;
|
||||
const Ast = @This();
|
||||
|
||||
pub const TokenIndex = u32;
|
||||
pub const ByteOffset = u32;
|
||||
|
||||
@ -34,7 +27,7 @@ pub const Location = struct {
|
||||
line_end: usize,
|
||||
};
|
||||
|
||||
pub fn deinit(tree: *Ast, gpa: mem.Allocator) void {
|
||||
pub fn deinit(tree: *Ast, gpa: Allocator) void {
|
||||
tree.tokens.deinit(gpa);
|
||||
tree.nodes.deinit(gpa);
|
||||
gpa.free(tree.extra_data);
|
||||
@ -48,11 +41,69 @@ pub const RenderError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub const Mode = enum { zig, zon };
|
||||
|
||||
/// Result should be freed with tree.deinit() when there are
|
||||
/// no more references to any of the tokens or nodes.
|
||||
pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!Ast {
|
||||
var tokens = Ast.TokenList{};
|
||||
defer tokens.deinit(gpa);
|
||||
|
||||
// Empirically, the zig std lib has an 8:1 ratio of source bytes to token count.
|
||||
const estimated_token_count = source.len / 8;
|
||||
try tokens.ensureTotalCapacity(gpa, estimated_token_count);
|
||||
|
||||
var tokenizer = std.zig.Tokenizer.init(source);
|
||||
while (true) {
|
||||
const token = tokenizer.next();
|
||||
try tokens.append(gpa, .{
|
||||
.tag = token.tag,
|
||||
.start = @intCast(u32, token.loc.start),
|
||||
});
|
||||
if (token.tag == .eof) break;
|
||||
}
|
||||
|
||||
var parser: Parse = .{
|
||||
.source = source,
|
||||
.gpa = gpa,
|
||||
.token_tags = tokens.items(.tag),
|
||||
.token_starts = tokens.items(.start),
|
||||
.errors = .{},
|
||||
.nodes = .{},
|
||||
.extra_data = .{},
|
||||
.scratch = .{},
|
||||
.tok_i = 0,
|
||||
};
|
||||
defer parser.errors.deinit(gpa);
|
||||
defer parser.nodes.deinit(gpa);
|
||||
defer parser.extra_data.deinit(gpa);
|
||||
defer parser.scratch.deinit(gpa);
|
||||
|
||||
// Empirically, Zig source code has a 2:1 ratio of tokens to AST nodes.
|
||||
// Make sure at least 1 so we can use appendAssumeCapacity on the root node below.
|
||||
const estimated_node_count = (tokens.len + 2) / 2;
|
||||
try parser.nodes.ensureTotalCapacity(gpa, estimated_node_count);
|
||||
|
||||
switch (mode) {
|
||||
.zig => try parser.parseRoot(),
|
||||
.zon => try parser.parseZon(),
|
||||
}
|
||||
|
||||
// TODO experiment with compacting the MultiArrayList slices here
|
||||
return Ast{
|
||||
.source = source,
|
||||
.tokens = tokens.toOwnedSlice(),
|
||||
.nodes = parser.nodes.toOwnedSlice(),
|
||||
.extra_data = try parser.extra_data.toOwnedSlice(gpa),
|
||||
.errors = try parser.errors.toOwnedSlice(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
/// `gpa` is used for allocating the resulting formatted source code, as well as
|
||||
/// for allocating extra stack memory if needed, because this function utilizes recursion.
|
||||
/// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006.
|
||||
/// Caller owns the returned slice of bytes, allocated with `gpa`.
|
||||
pub fn render(tree: Ast, gpa: mem.Allocator) RenderError![]u8 {
|
||||
pub fn render(tree: Ast, gpa: Allocator) RenderError![]u8 {
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
|
||||
@ -3347,3 +3398,12 @@ pub const Node = struct {
|
||||
rparen: TokenIndex,
|
||||
};
|
||||
};
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const mem = std.mem;
|
||||
const Token = std.zig.Token;
|
||||
const Ast = @This();
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Parse = @import("Parse.zig");
|
||||
|
3816
lib/std/zig/Parse.zig
Normal file
3816
lib/std/zig/Parse.zig
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -6073,7 +6073,7 @@ var fixed_buffer_mem: [100 * 1024]u8 = undefined;
|
||||
fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 {
|
||||
const stderr = io.getStdErr().writer();
|
||||
|
||||
var tree = try std.zig.parse(allocator, source);
|
||||
var tree = try std.zig.Ast.parse(allocator, source, .zig);
|
||||
defer tree.deinit(allocator);
|
||||
|
||||
for (tree.errors) |parse_error| {
|
||||
@ -6124,7 +6124,7 @@ fn testCanonical(source: [:0]const u8) !void {
|
||||
const Error = std.zig.Ast.Error.Tag;
|
||||
|
||||
fn testError(source: [:0]const u8, expected_errors: []const Error) !void {
|
||||
var tree = try std.zig.parse(std.testing.allocator, source);
|
||||
var tree = try std.zig.Ast.parse(std.testing.allocator, source, .zig);
|
||||
defer tree.deinit(std.testing.allocator);
|
||||
|
||||
std.testing.expectEqual(expected_errors.len, tree.errors.len) catch |err| {
|
||||
|
@ -1,7 +1,6 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const Tokenizer = std.zig.Tokenizer;
|
||||
const Parser = std.zig.Parser;
|
||||
const io = std.io;
|
||||
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
||||
|
||||
@ -34,6 +33,6 @@ pub fn main() !void {
|
||||
fn testOnce() usize {
|
||||
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
|
||||
var allocator = fixed_buf_alloc.allocator();
|
||||
_ = std.zig.parse(allocator, source) catch @panic("parse failure");
|
||||
_ = std.zig.Ast.parse(allocator, source, .zig) catch @panic("parse failure");
|
||||
return fixed_buf_alloc.end_index;
|
||||
}
|
||||
|
@ -2057,7 +2057,7 @@ pub const File = struct {
|
||||
if (file.tree_loaded) return &file.tree;
|
||||
|
||||
const source = try file.getSource(gpa);
|
||||
file.tree = try std.zig.parse(gpa, source.bytes);
|
||||
file.tree = try Ast.parse(gpa, source.bytes, .zig);
|
||||
file.tree_loaded = true;
|
||||
return &file.tree;
|
||||
}
|
||||
@ -3662,7 +3662,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
|
||||
file.source = source;
|
||||
file.source_loaded = true;
|
||||
|
||||
file.tree = try std.zig.parse(gpa, source);
|
||||
file.tree = try Ast.parse(gpa, source, .zig);
|
||||
defer if (!file.tree_loaded) file.tree.deinit(gpa);
|
||||
|
||||
if (file.tree.errors.len != 0) {
|
||||
@ -3977,7 +3977,7 @@ pub fn populateBuiltinFile(mod: *Module) !void {
|
||||
else => |e| return e,
|
||||
}
|
||||
|
||||
file.tree = try std.zig.parse(gpa, file.source);
|
||||
file.tree = try Ast.parse(gpa, file.source, .zig);
|
||||
file.tree_loaded = true;
|
||||
assert(file.tree.errors.len == 0); // builtin.zig must parse
|
||||
|
||||
|
10
src/main.zig
10
src/main.zig
@ -4361,7 +4361,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
|
||||
};
|
||||
defer gpa.free(source_code);
|
||||
|
||||
var tree = std.zig.parse(gpa, source_code) catch |err| {
|
||||
var tree = Ast.parse(gpa, source_code, .zig) catch |err| {
|
||||
fatal("error parsing stdin: {}", .{err});
|
||||
};
|
||||
defer tree.deinit(gpa);
|
||||
@ -4566,7 +4566,7 @@ fn fmtPathFile(
|
||||
// Add to set after no longer possible to get error.IsDir.
|
||||
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
|
||||
|
||||
var tree = try std.zig.parse(fmt.gpa, source_code);
|
||||
var tree = try Ast.parse(fmt.gpa, source_code, .zig);
|
||||
defer tree.deinit(fmt.gpa);
|
||||
|
||||
try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree.errors, tree, file_path, fmt.color);
|
||||
@ -5312,7 +5312,7 @@ pub fn cmdAstCheck(
|
||||
file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
|
||||
defer file.pkg.destroy(gpa);
|
||||
|
||||
file.tree = try std.zig.parse(gpa, file.source);
|
||||
file.tree = try Ast.parse(gpa, file.source, .zig);
|
||||
file.tree_loaded = true;
|
||||
defer file.tree.deinit(gpa);
|
||||
|
||||
@ -5438,7 +5438,7 @@ pub fn cmdChangelist(
|
||||
file.source = source;
|
||||
file.source_loaded = true;
|
||||
|
||||
file.tree = try std.zig.parse(gpa, file.source);
|
||||
file.tree = try Ast.parse(gpa, file.source, .zig);
|
||||
file.tree_loaded = true;
|
||||
defer file.tree.deinit(gpa);
|
||||
|
||||
@ -5476,7 +5476,7 @@ pub fn cmdChangelist(
|
||||
if (new_amt != new_stat.size)
|
||||
return error.UnexpectedEndOfFile;
|
||||
|
||||
var new_tree = try std.zig.parse(gpa, new_source);
|
||||
var new_tree = try Ast.parse(gpa, new_source, .zig);
|
||||
defer new_tree.deinit(gpa);
|
||||
|
||||
try printErrsMsgToStdErr(gpa, arena, new_tree.errors, new_tree, new_source_file, .auto);
|
||||
|
Loading…
Reference in New Issue
Block a user