mirror of
https://github.com/ziglang/zig.git
synced 2025-02-15 09:00:16 +00:00
std.json: move tests to json/test.zig file
This accomplishes two things: * Works around #8442 by putting stage1-specific logic in to disable all the std.json tests. * Slightly reduces installation size of zig since std lib files ending in "test.zig" are excluded from being installed.
This commit is contained in:
parent
0101a5f75e
commit
c9006d9479
1120
lib/std/json.zig
1120
lib/std/json.zig
File diff suppressed because it is too large
Load Diff
@ -6,6 +6,23 @@
|
||||
const std = @import("../std.zig");
|
||||
const json = std.json;
|
||||
const testing = std.testing;
|
||||
const TokenStream = std.json.TokenStream;
|
||||
const parse = std.json.parse;
|
||||
const ParseOptions = std.json.ParseOptions;
|
||||
const parseFree = std.json.parseFree;
|
||||
const Parser = std.json.Parser;
|
||||
const mem = std.mem;
|
||||
const writeStream = std.json.writeStream;
|
||||
const Value = std.json.Value;
|
||||
const StringifyOptions = std.json.StringifyOptions;
|
||||
const stringify = std.json.stringify;
|
||||
const stringifyAlloc = std.json.stringifyAlloc;
|
||||
const StreamingParser = std.json.StreamingParser;
|
||||
const Token = std.json.Token;
|
||||
const validate = std.json.validate;
|
||||
const Array = std.json.Array;
|
||||
const ObjectMap = std.json.ObjectMap;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
fn testNonStreaming(s: []const u8) !void {
|
||||
var p = json.Parser.init(testing.allocator, false);
|
||||
@ -2004,3 +2021,809 @@ test "out of UTF-16 range" {
|
||||
try utf8Error("\"\xfe\x80\x80\x80\"");
|
||||
try utf8Error("\"\xff\x80\x80\x80\"");
|
||||
}
|
||||
|
||||
test "parse" {
|
||||
var ts = TokenStream.init("false");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("true");
|
||||
try testing.expectEqual(true, try parse(bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("1");
|
||||
try testing.expectEqual(@as(u1, 1), try parse(u1, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("50");
|
||||
try testing.expectError(error.Overflow, parse(u1, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42");
|
||||
try testing.expectEqual(@as(u64, 42), try parse(u64, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42.0");
|
||||
try testing.expectEqual(@as(f64, 42), try parse(f64, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("null");
|
||||
try testing.expectEqual(@as(?bool, null), try parse(?bool, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("true");
|
||||
try testing.expectEqual(@as(?bool, true), try parse(?bool, &ts, ParseOptions{}));
|
||||
|
||||
ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("[102, 111, 111]");
|
||||
try testing.expectEqual(@as([3]u8, "foo".*), try parse([3]u8, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("[]");
|
||||
try testing.expectEqual(@as([0]u8, undefined), try parse([0]u8, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse into enum" {
|
||||
const T = enum(u32) {
|
||||
Foo = 42,
|
||||
Bar,
|
||||
@"with\\escape",
|
||||
};
|
||||
var ts = TokenStream.init("\"Foo\"");
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("42");
|
||||
try testing.expectEqual(@as(T, .Foo), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("\"with\\\\escape\"");
|
||||
try testing.expectEqual(@as(T, .@"with\\escape"), try parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("5");
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("\"Qux\"");
|
||||
try testing.expectError(error.InvalidEnumTag, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse with trailing data" {
|
||||
var ts = TokenStream.init("falsed");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = true }));
|
||||
ts = TokenStream.init("falsed");
|
||||
try testing.expectError(error.InvalidTopLevelTrailing, parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
|
||||
// trailing whitespace is okay
|
||||
ts = TokenStream.init("false \n");
|
||||
try testing.expectEqual(false, try parse(bool, &ts, ParseOptions{ .allow_trailing_data = false }));
|
||||
}
|
||||
|
||||
test "parse into that allocates a slice" {
|
||||
var ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectError(error.AllocatorRequired, parse([]u8, &ts, ParseOptions{}));
|
||||
|
||||
const options = ParseOptions{ .allocator = testing.allocator };
|
||||
{
|
||||
ts = TokenStream.init("\"foo\"");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "foo", r);
|
||||
}
|
||||
{
|
||||
ts = TokenStream.init("[102, 111, 111]");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "foo", r);
|
||||
}
|
||||
{
|
||||
ts = TokenStream.init("\"with\\\\escape\"");
|
||||
const r = try parse([]u8, &ts, options);
|
||||
defer parseFree([]u8, r, options);
|
||||
try testing.expectEqualSlices(u8, "with\\escape", r);
|
||||
}
|
||||
}
|
||||
|
||||
test "parse into tagged union" {
|
||||
{
|
||||
const T = union(enum) {
|
||||
int: i32,
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
var ts = TokenStream.init("1.5");
|
||||
try testing.expectEqual(T{ .float = 1.5 }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // failing allocations should be bubbled up instantly without trying next member
|
||||
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
|
||||
const options = ParseOptions{ .allocator = fail_alloc.allocator() };
|
||||
const T = union(enum) {
|
||||
// both fields here match the input
|
||||
string: []const u8,
|
||||
array: [3]u8,
|
||||
};
|
||||
var ts = TokenStream.init("[1,2,3]");
|
||||
try testing.expectError(error.OutOfMemory, parse(T, &ts, options));
|
||||
}
|
||||
|
||||
{
|
||||
// if multiple matches possible, takes first option
|
||||
const T = union(enum) {
|
||||
x: u8,
|
||||
y: u8,
|
||||
};
|
||||
var ts = TokenStream.init("42");
|
||||
try testing.expectEqual(T{ .x = 42 }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // needs to back out when first union member doesn't match
|
||||
const T = union(enum) {
|
||||
A: struct { x: u32 },
|
||||
B: struct { y: u32 },
|
||||
};
|
||||
var ts = TokenStream.init("{\"y\":42}");
|
||||
try testing.expectEqual(T{ .B = .{ .y = 42 } }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
}
|
||||
|
||||
test "parse union bubbles up AllocatorRequired" {
|
||||
{ // string member first in union (and not matching)
|
||||
const T = union(enum) {
|
||||
string: []const u8,
|
||||
int: i32,
|
||||
};
|
||||
var ts = TokenStream.init("42");
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // string member not first in union (and matching)
|
||||
const T = union(enum) {
|
||||
int: i32,
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
var ts = TokenStream.init("\"foo\"");
|
||||
try testing.expectError(error.AllocatorRequired, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
}
|
||||
|
||||
test "parseFree descends into tagged union" {
|
||||
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 1);
|
||||
const options = ParseOptions{ .allocator = fail_alloc.allocator() };
|
||||
const T = union(enum) {
|
||||
int: i32,
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
// use a string with unicode escape so we know result can't be a reference to global constant
|
||||
var ts = TokenStream.init("\"with\\u0105unicode\"");
|
||||
const r = try parse(T, &ts, options);
|
||||
try testing.expectEqual(std.meta.Tag(T).string, @as(std.meta.Tag(T), r));
|
||||
try testing.expectEqualSlices(u8, "withąunicode", r.string);
|
||||
try testing.expectEqual(@as(usize, 0), fail_alloc.deallocations);
|
||||
parseFree(T, r, options);
|
||||
try testing.expectEqual(@as(usize, 1), fail_alloc.deallocations);
|
||||
}
|
||||
|
||||
test "parse with comptime field" {
|
||||
{
|
||||
const T = struct {
|
||||
comptime a: i32 = 0,
|
||||
b: bool,
|
||||
};
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "a": 0,
|
||||
\\ "b": true
|
||||
\\}
|
||||
);
|
||||
try testing.expectEqual(T{ .a = 0, .b = true }, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
{ // string comptime values currently require an allocator
|
||||
const T = union(enum) {
|
||||
foo: struct {
|
||||
comptime kind: []const u8 = "boolean",
|
||||
b: bool,
|
||||
},
|
||||
bar: struct {
|
||||
comptime kind: []const u8 = "float",
|
||||
b: f64,
|
||||
},
|
||||
};
|
||||
|
||||
const options = ParseOptions{
|
||||
.allocator = std.testing.allocator,
|
||||
};
|
||||
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "kind": "float",
|
||||
\\ "b": 1.0
|
||||
\\}
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
|
||||
// check that parseFree doesn't try to free comptime fields
|
||||
parseFree(T, r, options);
|
||||
}
|
||||
}
|
||||
|
||||
test "parse into struct with no fields" {
|
||||
const T = struct {};
|
||||
var ts = TokenStream.init("{}");
|
||||
try testing.expectEqual(T{}, try parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "parse into struct with misc fields" {
|
||||
@setEvalBranchQuota(10000);
|
||||
const options = ParseOptions{ .allocator = testing.allocator };
|
||||
const T = struct {
|
||||
int: i64,
|
||||
float: f64,
|
||||
@"with\\escape": bool,
|
||||
@"withąunicode😂": bool,
|
||||
language: []const u8,
|
||||
optional: ?bool,
|
||||
default_field: i32 = 42,
|
||||
static_array: [3]f64,
|
||||
dynamic_array: []f64,
|
||||
|
||||
complex: struct {
|
||||
nested: []const u8,
|
||||
},
|
||||
|
||||
veryComplex: []struct {
|
||||
foo: []const u8,
|
||||
},
|
||||
|
||||
a_union: Union,
|
||||
const Union = union(enum) {
|
||||
x: u8,
|
||||
float: f64,
|
||||
string: []const u8,
|
||||
};
|
||||
};
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "int": 420,
|
||||
\\ "float": 3.14,
|
||||
\\ "with\\escape": true,
|
||||
\\ "with\u0105unicode\ud83d\ude02": false,
|
||||
\\ "language": "zig",
|
||||
\\ "optional": null,
|
||||
\\ "static_array": [66.6, 420.420, 69.69],
|
||||
\\ "dynamic_array": [66.6, 420.420, 69.69],
|
||||
\\ "complex": {
|
||||
\\ "nested": "zig"
|
||||
\\ },
|
||||
\\ "veryComplex": [
|
||||
\\ {
|
||||
\\ "foo": "zig"
|
||||
\\ }, {
|
||||
\\ "foo": "rocks"
|
||||
\\ }
|
||||
\\ ],
|
||||
\\ "a_union": 100000
|
||||
\\}
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
defer parseFree(T, r, options);
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
try testing.expectEqual(@as(f64, 3.14), r.float);
|
||||
try testing.expectEqual(true, r.@"with\\escape");
|
||||
try testing.expectEqual(false, r.@"withąunicode😂");
|
||||
try testing.expectEqualSlices(u8, "zig", r.language);
|
||||
try testing.expectEqual(@as(?bool, null), r.optional);
|
||||
try testing.expectEqual(@as(i32, 42), r.default_field);
|
||||
try testing.expectEqual(@as(f64, 66.6), r.static_array[0]);
|
||||
try testing.expectEqual(@as(f64, 420.420), r.static_array[1]);
|
||||
try testing.expectEqual(@as(f64, 69.69), r.static_array[2]);
|
||||
try testing.expectEqual(@as(usize, 3), r.dynamic_array.len);
|
||||
try testing.expectEqual(@as(f64, 66.6), r.dynamic_array[0]);
|
||||
try testing.expectEqual(@as(f64, 420.420), r.dynamic_array[1]);
|
||||
try testing.expectEqual(@as(f64, 69.69), r.dynamic_array[2]);
|
||||
try testing.expectEqualSlices(u8, r.complex.nested, "zig");
|
||||
try testing.expectEqualSlices(u8, "zig", r.veryComplex[0].foo);
|
||||
try testing.expectEqualSlices(u8, "rocks", r.veryComplex[1].foo);
|
||||
try testing.expectEqual(T.Union{ .float = 100000 }, r.a_union);
|
||||
}
|
||||
|
||||
test "parse into struct with strings and arrays with sentinels" {
|
||||
@setEvalBranchQuota(10000);
|
||||
const options = ParseOptions{ .allocator = testing.allocator };
|
||||
const T = struct {
|
||||
language: [:0]const u8,
|
||||
language_without_sentinel: []const u8,
|
||||
data: [:99]const i32,
|
||||
simple_data: []const i32,
|
||||
};
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "language": "zig",
|
||||
\\ "language_without_sentinel": "zig again!",
|
||||
\\ "data": [1, 2, 3],
|
||||
\\ "simple_data": [4, 5, 6]
|
||||
\\}
|
||||
);
|
||||
const r = try parse(T, &ts, options);
|
||||
defer parseFree(T, r, options);
|
||||
|
||||
try testing.expectEqualSentinel(u8, 0, "zig", r.language);
|
||||
|
||||
const data = [_:99]i32{ 1, 2, 3 };
|
||||
try testing.expectEqualSentinel(i32, 99, data[0..data.len], r.data);
|
||||
|
||||
// Make sure that arrays who aren't supposed to have a sentinel still parse without one.
|
||||
try testing.expectEqual(@as(?i32, null), std.meta.sentinel(@TypeOf(r.simple_data)));
|
||||
try testing.expectEqual(@as(?u8, null), std.meta.sentinel(@TypeOf(r.language_without_sentinel)));
|
||||
}
|
||||
|
||||
test "parse into struct with duplicate field" {
|
||||
// allow allocator to detect double frees by keeping bucket in use
|
||||
const ballast = try testing.allocator.alloc(u64, 1);
|
||||
defer testing.allocator.free(ballast);
|
||||
|
||||
const options_first = ParseOptions{ .allocator = testing.allocator, .duplicate_field_behavior = .UseFirst };
|
||||
|
||||
const options_last = ParseOptions{
|
||||
.allocator = testing.allocator,
|
||||
.duplicate_field_behavior = .UseLast,
|
||||
};
|
||||
|
||||
const str = "{ \"a\": 1, \"a\": 0.25 }";
|
||||
|
||||
const T1 = struct { a: *u64 };
|
||||
// both .UseFirst and .UseLast should fail because second "a" value isn't a u64
|
||||
var ts = TokenStream.init(str);
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_first));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectError(error.InvalidNumber, parse(T1, &ts, options_last));
|
||||
|
||||
const T2 = struct { a: f64 };
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(T2{ .a = 1.0 }, try parse(T2, &ts, options_first));
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(T2{ .a = 0.25 }, try parse(T2, &ts, options_last));
|
||||
|
||||
const T3 = struct { comptime a: f64 = 1.0 };
|
||||
// .UseFirst should succeed because second "a" value is unconditionally ignored (even though != 1.0)
|
||||
const t3 = T3{ .a = 1.0 };
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectEqual(t3, try parse(T3, &ts, options_first));
|
||||
// .UseLast should fail because second "a" value is 0.25 which is not equal to default value of 1.0
|
||||
ts = TokenStream.init(str);
|
||||
try testing.expectError(error.UnexpectedValue, parse(T3, &ts, options_last));
|
||||
}
|
||||
|
||||
test "parse into struct ignoring unknown fields" {
|
||||
const T = struct {
|
||||
int: i64,
|
||||
language: []const u8,
|
||||
};
|
||||
|
||||
const ops = ParseOptions{
|
||||
.allocator = testing.allocator,
|
||||
.ignore_unknown_fields = true,
|
||||
};
|
||||
|
||||
var ts = TokenStream.init(
|
||||
\\{
|
||||
\\ "int": 420,
|
||||
\\ "float": 3.14,
|
||||
\\ "with\\escape": true,
|
||||
\\ "with\u0105unicode\ud83d\ude02": false,
|
||||
\\ "optional": null,
|
||||
\\ "static_array": [66.6, 420.420, 69.69],
|
||||
\\ "dynamic_array": [66.6, 420.420, 69.69],
|
||||
\\ "complex": {
|
||||
\\ "nested": "zig"
|
||||
\\ },
|
||||
\\ "veryComplex": [
|
||||
\\ {
|
||||
\\ "foo": "zig"
|
||||
\\ }, {
|
||||
\\ "foo": "rocks"
|
||||
\\ }
|
||||
\\ ],
|
||||
\\ "a_union": 100000,
|
||||
\\ "language": "zig"
|
||||
\\}
|
||||
);
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
try testing.expectEqualSlices(u8, "zig", r.language);
|
||||
}
|
||||
|
||||
const ParseIntoRecursiveUnionDefinitionValue = union(enum) {
|
||||
integer: i64,
|
||||
array: []const ParseIntoRecursiveUnionDefinitionValue,
|
||||
};
|
||||
|
||||
test "parse into recursive union definition" {
|
||||
const T = struct {
|
||||
values: ParseIntoRecursiveUnionDefinitionValue,
|
||||
};
|
||||
const ops = ParseOptions{ .allocator = testing.allocator };
|
||||
|
||||
var ts = TokenStream.init("{\"values\":[58]}");
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].integer);
|
||||
}
|
||||
|
||||
const ParseIntoDoubleRecursiveUnionValueFirst = union(enum) {
|
||||
integer: i64,
|
||||
array: []const ParseIntoDoubleRecursiveUnionValueSecond,
|
||||
};
|
||||
|
||||
const ParseIntoDoubleRecursiveUnionValueSecond = union(enum) {
|
||||
boolean: bool,
|
||||
array: []const ParseIntoDoubleRecursiveUnionValueFirst,
|
||||
};
|
||||
|
||||
test "parse into double recursive union definition" {
|
||||
const T = struct {
|
||||
values: ParseIntoDoubleRecursiveUnionValueFirst,
|
||||
};
|
||||
const ops = ParseOptions{ .allocator = testing.allocator };
|
||||
|
||||
var ts = TokenStream.init("{\"values\":[[58]]}");
|
||||
const r = try parse(T, &ts, ops);
|
||||
defer parseFree(T, r, ops);
|
||||
|
||||
try testing.expectEqual(@as(i64, 58), r.values.array[0].array[0].integer);
|
||||
}
|
||||
|
||||
test "json.parser.dynamic" {
|
||||
var p = Parser.init(testing.allocator, false);
|
||||
defer p.deinit();
|
||||
|
||||
const s =
|
||||
\\{
|
||||
\\ "Image": {
|
||||
\\ "Width": 800,
|
||||
\\ "Height": 600,
|
||||
\\ "Title": "View from 15th Floor",
|
||||
\\ "Thumbnail": {
|
||||
\\ "Url": "http://www.example.com/image/481989943",
|
||||
\\ "Height": 125,
|
||||
\\ "Width": 100
|
||||
\\ },
|
||||
\\ "Animated" : false,
|
||||
\\ "IDs": [116, 943, 234, 38793],
|
||||
\\ "ArrayOfObject": [{"n": "m"}],
|
||||
\\ "double": 1.3412,
|
||||
\\ "LargeInt": 18446744073709551615
|
||||
\\ }
|
||||
\\}
|
||||
;
|
||||
|
||||
var tree = try p.parse(s);
|
||||
defer tree.deinit();
|
||||
|
||||
var root = tree.root;
|
||||
|
||||
var image = root.Object.get("Image").?;
|
||||
|
||||
const width = image.Object.get("Width").?;
|
||||
try testing.expect(width.Integer == 800);
|
||||
|
||||
const height = image.Object.get("Height").?;
|
||||
try testing.expect(height.Integer == 600);
|
||||
|
||||
const title = image.Object.get("Title").?;
|
||||
try testing.expect(mem.eql(u8, title.String, "View from 15th Floor"));
|
||||
|
||||
const animated = image.Object.get("Animated").?;
|
||||
try testing.expect(animated.Bool == false);
|
||||
|
||||
const array_of_object = image.Object.get("ArrayOfObject").?;
|
||||
try testing.expect(array_of_object.Array.items.len == 1);
|
||||
|
||||
const obj0 = array_of_object.Array.items[0].Object.get("n").?;
|
||||
try testing.expect(mem.eql(u8, obj0.String, "m"));
|
||||
|
||||
const double = image.Object.get("double").?;
|
||||
try testing.expect(double.Float == 1.3412);
|
||||
|
||||
const large_int = image.Object.get("LargeInt").?;
|
||||
try testing.expect(mem.eql(u8, large_int.NumberString, "18446744073709551615"));
|
||||
}
|
||||
|
||||
test "write json then parse it" {
|
||||
var out_buffer: [1000]u8 = undefined;
|
||||
|
||||
var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
|
||||
const out_stream = fixed_buffer_stream.writer();
|
||||
var jw = writeStream(out_stream, 4);
|
||||
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("f");
|
||||
try jw.emitBool(false);
|
||||
|
||||
try jw.objectField("t");
|
||||
try jw.emitBool(true);
|
||||
|
||||
try jw.objectField("int");
|
||||
try jw.emitNumber(1234);
|
||||
|
||||
try jw.objectField("array");
|
||||
try jw.beginArray();
|
||||
|
||||
try jw.arrayElem();
|
||||
try jw.emitNull();
|
||||
|
||||
try jw.arrayElem();
|
||||
try jw.emitNumber(12.34);
|
||||
|
||||
try jw.endArray();
|
||||
|
||||
try jw.objectField("str");
|
||||
try jw.emitString("hello");
|
||||
|
||||
try jw.endObject();
|
||||
|
||||
var parser = Parser.init(testing.allocator, false);
|
||||
defer parser.deinit();
|
||||
var tree = try parser.parse(fixed_buffer_stream.getWritten());
|
||||
defer tree.deinit();
|
||||
|
||||
try testing.expect(tree.root.Object.get("f").?.Bool == false);
|
||||
try testing.expect(tree.root.Object.get("t").?.Bool == true);
|
||||
try testing.expect(tree.root.Object.get("int").?.Integer == 1234);
|
||||
try testing.expect(tree.root.Object.get("array").?.Array.items[0].Null == {});
|
||||
try testing.expect(tree.root.Object.get("array").?.Array.items[1].Float == 12.34);
|
||||
try testing.expect(mem.eql(u8, tree.root.Object.get("str").?.String, "hello"));
|
||||
}
|
||||
|
||||
fn testParse(arena_allocator: std.mem.Allocator, json_str: []const u8) !Value {
|
||||
var p = Parser.init(arena_allocator, false);
|
||||
return (try p.parse(json_str)).root;
|
||||
}
|
||||
|
||||
test "parsing empty string gives appropriate error" {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
try testing.expectError(error.UnexpectedEndOfJson, testParse(arena_allocator.allocator(), ""));
|
||||
}
|
||||
|
||||
test "integer after float has proper type" {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const parsed = try testParse(arena_allocator.allocator(),
|
||||
\\{
|
||||
\\ "float": 3.14,
|
||||
\\ "ints": [1, 2, 3]
|
||||
\\}
|
||||
);
|
||||
try std.testing.expect(parsed.Object.get("ints").?.Array.items[0] == .Integer);
|
||||
}
|
||||
|
||||
test "parse exponential into int" {
|
||||
const T = struct { int: i64 };
|
||||
var ts = TokenStream.init("{ \"int\": 4.2e2 }");
|
||||
const r = try parse(T, &ts, ParseOptions{});
|
||||
try testing.expectEqual(@as(i64, 420), r.int);
|
||||
ts = TokenStream.init("{ \"int\": 0.042e2 }");
|
||||
try testing.expectError(error.InvalidNumber, parse(T, &ts, ParseOptions{}));
|
||||
ts = TokenStream.init("{ \"int\": 18446744073709551616.0 }");
|
||||
try testing.expectError(error.Overflow, parse(T, &ts, ParseOptions{}));
|
||||
}
|
||||
|
||||
test "escaped characters" {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const input =
|
||||
\\{
|
||||
\\ "backslash": "\\",
|
||||
\\ "forwardslash": "\/",
|
||||
\\ "newline": "\n",
|
||||
\\ "carriagereturn": "\r",
|
||||
\\ "tab": "\t",
|
||||
\\ "formfeed": "\f",
|
||||
\\ "backspace": "\b",
|
||||
\\ "doublequote": "\"",
|
||||
\\ "unicode": "\u0105",
|
||||
\\ "surrogatepair": "\ud83d\ude02"
|
||||
\\}
|
||||
;
|
||||
|
||||
const obj = (try testParse(arena_allocator.allocator(), input)).Object;
|
||||
|
||||
try testing.expectEqualSlices(u8, obj.get("backslash").?.String, "\\");
|
||||
try testing.expectEqualSlices(u8, obj.get("forwardslash").?.String, "/");
|
||||
try testing.expectEqualSlices(u8, obj.get("newline").?.String, "\n");
|
||||
try testing.expectEqualSlices(u8, obj.get("carriagereturn").?.String, "\r");
|
||||
try testing.expectEqualSlices(u8, obj.get("tab").?.String, "\t");
|
||||
try testing.expectEqualSlices(u8, obj.get("formfeed").?.String, "\x0C");
|
||||
try testing.expectEqualSlices(u8, obj.get("backspace").?.String, "\x08");
|
||||
try testing.expectEqualSlices(u8, obj.get("doublequote").?.String, "\"");
|
||||
try testing.expectEqualSlices(u8, obj.get("unicode").?.String, "ą");
|
||||
try testing.expectEqualSlices(u8, obj.get("surrogatepair").?.String, "😂");
|
||||
}
|
||||
|
||||
test "string copy option" {
|
||||
const input =
|
||||
\\{
|
||||
\\ "noescape": "aą😂",
|
||||
\\ "simple": "\\\/\n\r\t\f\b\"",
|
||||
\\ "unicode": "\u0105",
|
||||
\\ "surrogatepair": "\ud83d\ude02"
|
||||
\\}
|
||||
;
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const allocator = arena_allocator.allocator();
|
||||
|
||||
var parser = Parser.init(allocator, false);
|
||||
const tree_nocopy = try parser.parse(input);
|
||||
const obj_nocopy = tree_nocopy.root.Object;
|
||||
|
||||
parser = Parser.init(allocator, true);
|
||||
const tree_copy = try parser.parse(input);
|
||||
const obj_copy = tree_copy.root.Object;
|
||||
|
||||
for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
|
||||
try testing.expectEqualSlices(u8, obj_nocopy.get(field_name).?.String, obj_copy.get(field_name).?.String);
|
||||
}
|
||||
|
||||
const nocopy_addr = &obj_nocopy.get("noescape").?.String[0];
|
||||
const copy_addr = &obj_copy.get("noescape").?.String[0];
|
||||
|
||||
var found_nocopy = false;
|
||||
for (input) |_, index| {
|
||||
try testing.expect(copy_addr != &input[index]);
|
||||
if (nocopy_addr == &input[index]) {
|
||||
found_nocopy = true;
|
||||
}
|
||||
}
|
||||
try testing.expect(found_nocopy);
|
||||
}
|
||||
|
||||
test "stringify alloc" {
|
||||
const allocator = std.testing.allocator;
|
||||
const expected =
|
||||
\\{"foo":"bar","answer":42,"my_friend":"sammy"}
|
||||
;
|
||||
const actual = try stringifyAlloc(allocator, .{ .foo = "bar", .answer = 42, .my_friend = "sammy" }, .{});
|
||||
defer allocator.free(actual);
|
||||
|
||||
try std.testing.expectEqualStrings(expected, actual);
|
||||
}
|
||||
|
||||
test "json.serialize issue #5959" {
|
||||
var parser: StreamingParser = undefined;
|
||||
// StreamingParser has multiple internal fields set to undefined. This causes issues when using
|
||||
// expectEqual so these are zeroed. We are testing for equality here only because this is a
|
||||
// known small test reproduction which hits the relevant LLVM issue.
|
||||
std.mem.set(u8, @ptrCast([*]u8, &parser)[0..@sizeOf(StreamingParser)], 0);
|
||||
try std.testing.expectEqual(parser, parser);
|
||||
}
|
||||
|
||||
fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void {
|
||||
const token = (p.next() catch unreachable).?;
|
||||
try testing.expect(std.meta.activeTag(token) == id);
|
||||
}
|
||||
|
||||
test "json.token" {
|
||||
const s =
|
||||
\\{
|
||||
\\ "Image": {
|
||||
\\ "Width": 800,
|
||||
\\ "Height": 600,
|
||||
\\ "Title": "View from 15th Floor",
|
||||
\\ "Thumbnail": {
|
||||
\\ "Url": "http://www.example.com/image/481989943",
|
||||
\\ "Height": 125,
|
||||
\\ "Width": 100
|
||||
\\ },
|
||||
\\ "Animated" : false,
|
||||
\\ "IDs": [116, 943, 234, 38793]
|
||||
\\ }
|
||||
\\}
|
||||
;
|
||||
|
||||
var p = TokenStream.init(s);
|
||||
|
||||
try checkNext(&p, .ObjectBegin);
|
||||
try checkNext(&p, .String); // Image
|
||||
try checkNext(&p, .ObjectBegin);
|
||||
try checkNext(&p, .String); // Width
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .String); // Height
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .String); // Title
|
||||
try checkNext(&p, .String);
|
||||
try checkNext(&p, .String); // Thumbnail
|
||||
try checkNext(&p, .ObjectBegin);
|
||||
try checkNext(&p, .String); // Url
|
||||
try checkNext(&p, .String);
|
||||
try checkNext(&p, .String); // Height
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .String); // Width
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .ObjectEnd);
|
||||
try checkNext(&p, .String); // Animated
|
||||
try checkNext(&p, .False);
|
||||
try checkNext(&p, .String); // IDs
|
||||
try checkNext(&p, .ArrayBegin);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .ArrayEnd);
|
||||
try checkNext(&p, .ObjectEnd);
|
||||
try checkNext(&p, .ObjectEnd);
|
||||
|
||||
try testing.expect((try p.next()) == null);
|
||||
}
|
||||
|
||||
test "json.token mismatched close" {
|
||||
var p = TokenStream.init("[102, 111, 111 }");
|
||||
try checkNext(&p, .ArrayBegin);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .Number);
|
||||
try checkNext(&p, .Number);
|
||||
try testing.expectError(error.UnexpectedClosingBrace, p.next());
|
||||
}
|
||||
|
||||
test "json.token premature object close" {
|
||||
var p = TokenStream.init("{ \"key\": }");
|
||||
try checkNext(&p, .ObjectBegin);
|
||||
try checkNext(&p, .String);
|
||||
try testing.expectError(error.InvalidValueBegin, p.next());
|
||||
}
|
||||
|
||||
test "json.validate" {
|
||||
try testing.expectEqual(true, validate("{}"));
|
||||
try testing.expectEqual(true, validate("[]"));
|
||||
try testing.expectEqual(true, validate("[{[[[[{}]]]]}]"));
|
||||
try testing.expectEqual(false, validate("{]"));
|
||||
try testing.expectEqual(false, validate("[}"));
|
||||
try testing.expectEqual(false, validate("{{{{[]}}}]"));
|
||||
}
|
||||
|
||||
test "Value.jsonStringify" {
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e+01");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var vals = [_]Value{
|
||||
.{ .Integer = 1 },
|
||||
.{ .Integer = 2 },
|
||||
.{ .NumberString = "3" },
|
||||
};
|
||||
try (Value{
|
||||
.Array = Array.fromOwnedSlice(undefined, &vals),
|
||||
}).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var obj = ObjectMap.init(testing.allocator);
|
||||
defer obj.deinit();
|
||||
try obj.putNoClobber("a", .{ .String = "b" });
|
||||
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user