Last active
August 15, 2024 11:05
-
-
Save travisstaloch/822c2ecb2043443e2349cab1dd765dee to your computer and use it in GitHub Desktop.
zonny - a toy zon parser
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//! | |
//! tests written for zonny.zig | |
//! | |
//! there is an example of parsing a build.zig.zon file at the bottom of this file. | |
//! | |
const std = @import("std"); | |
const testing = std.testing; | |
const zonny = @import("zonny"); | |
const talloc = testing.allocator; | |
fn parse( | |
comptime T: type, | |
source: [:0]const u8, | |
) !zonny.ParseResult(T) { | |
return zonny.parseLeaky( | |
T, | |
talloc, | |
source, | |
"<test>", | |
std.io.getStdErr().writer(), | |
); | |
} | |
fn expect( | |
comptime T: type, | |
source: [:0]const u8, | |
expected: T, | |
) !void { | |
const r = try parse(T, source); | |
defer r.deinit(talloc); | |
try testing.expectEqualDeep(expected, r.value); | |
} | |
fn expectEqualBytes( | |
comptime T: type, | |
source: [:0]const u8, | |
expected: T, | |
) !void { | |
const r = try parse(T, source); | |
defer r.deinit(talloc); | |
try testing.expectEqualStrings(std.mem.asBytes(&expected), std.mem.asBytes(&r.value)); | |
} | |
fn expectError( | |
comptime T: type, | |
source: [:0]const u8, | |
expected: zonny.ParseError, | |
) !void { | |
try testing.expectError(expected, zonny.parseLeaky( | |
T, | |
talloc, | |
source, | |
"<test>", | |
std.io.null_writer, | |
)); | |
} | |
test "value types" { | |
// bools | |
try expectError(bool, "foo", error.InvalidBool); | |
try expect(bool, "true", true); | |
try expectError(u8, "foo", error.UnexpectedToken); | |
// ints | |
try expectError(u1, "2", error.InvalidInt); | |
try expect(u8, "1", 1); | |
try expect(u8, "'1'", '1'); | |
try expect(u21, "'💩'", '💩'); | |
try expectError(u8, "'💩'", error.InvalidInt); | |
try expect(i8, "-1", -1); | |
// floats | |
try expectError(f32, "foo", error.UnexpectedToken); | |
try expect(f32, "1", 1); | |
try expect(f32, "1.0", 1); | |
try expect(f32, "-1", -1); | |
try expect(f32, "-1.0", -1); | |
const r = try parse(f32, "nan"); | |
defer r.deinit(talloc); | |
try testing.expect(std.math.isNan(r.value)); | |
try expect(f32, "inf", std.math.inf(f32)); | |
try expect(f32, "-inf", -std.math.inf(f32)); | |
// enums | |
const E = enum { foo, bar }; | |
try expectError(E, "foo", error.UnexpectedToken); | |
try expectError(E, ".baz", error.InvalidEnum); | |
try expect(E, ".foo", .foo); | |
try expect(E, ".bar", .bar); | |
try expectError(E, ".baz", error.InvalidEnum); | |
// strings, slices | |
try expect([]const u8, "\"foo\"", "foo"); | |
try expect([]const u8, ".{1,2}", &.{ 1, 2 }); | |
try expect([]const u8, "&.{1,2}", &.{ 1, 2 }); | |
try expect([]const u21, "&.{'⚡','💩'}", &.{ '⚡', '💩' }); | |
// arrays | |
try expect([2]u8, ".{1,2}", .{ 1, 2 }); | |
// void | |
try expect(void, "{ }", {}); | |
try expect(void, "{}", {}); | |
// optional | |
try expect(?u8, "null", null); | |
try expect(?u8, "10", 10); | |
// null | |
// try expect(@Type(.{ .Null = {} }), "null", null); | |
} | |
test "structs" { | |
const S = struct { foo: u8, bar: u1 }; | |
try expectError(S, "baz", error.UnexpectedToken); | |
try expectError(S, ".{.baz}", error.InvalidStruct); | |
try expectError(S, ".{.baz = 0}", error.InvalidFieldName); | |
try expectError(S, ".{.foo = 0, .foo = 0}", error.DuplicateField); | |
try expect(S, ".{.bar = 0, .foo = 42}", .{ .foo = 42, .bar = 0 }); | |
// tuple | |
const T = struct { u8, i8 }; | |
try expect(T, ".{42, 0}", .{ 42, 0 }); | |
// @ fields | |
const S2 = struct { @"struct": bool }; | |
try expect(S2, ".{.@\"struct\" = true}", .{ .@"struct" = true }); | |
} | |
test "@ fields" { | |
const S2 = struct { @"struct": u8 }; | |
var s2: S2 = undefined; | |
const info = @typeInfo(S2).Struct; | |
const FieldIndex = @Type(.{ .Int = .{ | |
.signedness = .unsigned, | |
.bits = comptime std.math.log2_int_ceil(usize, info.fields.len), | |
} }); | |
const Fe = std.meta.FieldEnum(S2); | |
const field_indexes = comptime blk: { | |
var kvs_list: [info.fields.len]struct { []const u8, FieldIndex } = undefined; | |
for (info.fields, 0..) |field, i| | |
kvs_list[i] = .{ field.name, i }; | |
break :blk std.StaticStringMap(FieldIndex).initComptime(kvs_list); | |
}; | |
var name: []const u8 = "struct"; | |
_ = &name; | |
try testing.expectEqual(0, field_indexes.get(name)); | |
const field_idx = field_indexes.get(name) orelse return error.MissingField; | |
const fe: Fe = @enumFromInt(field_idx); | |
switch (fe) { | |
inline else => |tag| @field(s2, info.fields[@intFromEnum(tag)].name) = 42, | |
} | |
try testing.expectEqual(42, s2.@"struct"); | |
} | |
test "unions" { | |
// enum | |
const U = union(enum) { foo: u8, bar: u0 }; | |
try expect(U, ".{.bar = 0}", .{ .bar = 0 }); | |
try expect(U, ".{.foo = 42}", .{ .foo = 42 }); | |
// bare | |
const U2 = union { foo: u8, bar: u0 }; | |
const r = try parse(U2, ".{.bar = 0}"); | |
defer r.deinit(talloc); | |
try expectEqualBytes(U2, ".{.bar = 0}", .{ .bar = 0 }); | |
try expectEqualBytes(U2, ".{.foo = 42}", .{ .foo = 42 }); | |
// @ fields | |
const U3 = union { @"struct": bool }; | |
try expectEqualBytes(U3, ".{.@\"struct\" = true}", .{ .@"struct" = true }); | |
} | |
const BuildZon = struct { | |
name: []const u8, | |
version: []const u8, | |
minimum_zig_version: []const u8, | |
dependencies: zonny.Map(Dep), | |
paths: []const []const u8, | |
}; | |
const Dep = struct { | |
data: Data, | |
lazy: bool = false, | |
const Data = union(enum) { | |
remote: Remote, | |
local: []const u8, | |
}; | |
const Remote = struct { | |
url: []const u8, | |
hash: []const u8, | |
}; | |
pub fn zonnyParse(comptime W: type, ctx: zonny.Ctx(W)) !Dep { | |
zonny.debug("Dep.zonnyParse()\n", .{}); | |
var dep: Dep = undefined; | |
const Field = enum { url, hash, path, lazy }; | |
const field_names = comptime std.meta.fieldNames(Field); | |
const FieldIndex = @Type(.{ .Int = .{ | |
.signedness = .unsigned, | |
.bits = comptime std.math.log2_int_ceil(usize, field_names.len), | |
} }); | |
const field_indexes = comptime blk: { | |
var kvs_list: [field_names.len]struct { []const u8, FieldIndex } = undefined; | |
for (0..field_names.len) |i| | |
kvs_list[i] = .{ field_names[i], i }; | |
break :blk std.StaticStringMap(FieldIndex).initComptime(kvs_list); | |
}; | |
var fields_seen = std.EnumSet(Field).initEmpty(); | |
var buf: [2]std.zig.Ast.Node.Index = undefined; | |
// var state: enum{start, remote, local} = .start; | |
if (ctx.tree.fullStructInit(&buf, ctx.idx)) |v| { | |
for (v.ast.fields) |fidx| { | |
if (ctx.tree.firstToken(fidx) < 2) unreachable; | |
const field_name = ctx.tree.tokenSlice(ctx.tree.firstToken(fidx) - 2); | |
zonny.debug("dep field={s}\n", .{field_name}); | |
const field_idx = field_indexes.get(field_name) orelse | |
return zonny.err( | |
"field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.InvalidFieldName, | |
); | |
const fe: Field = @enumFromInt(field_idx); | |
if (fields_seen.contains(fe)) { | |
return zonny.err( | |
"duplicate field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.DuplicateField, | |
); | |
} | |
fields_seen.insert(fe); | |
switch (fe) { | |
.lazy => dep.lazy = try zonny.parseInner(bool, W, ctx.withIdx(fidx)), | |
.url => { | |
if (fields_seen.contains(.path)) { | |
return zonny.err( | |
"unexpected field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.UnexpectedField, | |
); | |
} | |
if (!fields_seen.contains(.hash)) | |
dep.data = .{ .remote = undefined }; | |
dep.data.remote.url = try zonny.parseInner([]const u8, W, ctx.withIdx(fidx)); | |
}, | |
.hash => { | |
if (fields_seen.contains(.path)) { | |
return zonny.err( | |
"unexpected field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.UnexpectedField, | |
); | |
} | |
if (!fields_seen.contains(.url)) | |
dep.data = .{ .remote = undefined }; | |
dep.data.remote.hash = try zonny.parseInner([]const u8, W, ctx.withIdx(fidx)); | |
}, | |
.path => { | |
if (fields_seen.contains(.hash) or fields_seen.contains(.url)) { | |
return zonny.err( | |
"unexpected field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.UnexpectedField, | |
); | |
} | |
dep.data = .{ .local = undefined }; | |
dep.data.local = try zonny.parseInner([]const u8, W, ctx.withIdx(fidx)); | |
}, | |
} | |
} | |
} else { | |
const slice = ctx.tree.tokenSlice(ctx.tree.firstToken(ctx.idx) - 2); | |
return zonny.err( | |
"unexpected: '{s}'\n", | |
.{slice}, | |
W, | |
ctx, | |
error.UnexpectedToken, | |
); | |
} | |
return dep; | |
} | |
}; | |
test "build.zig.zon" { | |
const source = @embedFile("examples/build.zig.zon"); | |
const r = try zonny.parseLeaky( | |
BuildZon, | |
talloc, | |
source, | |
"build.zig.zon", | |
std.io.getStdErr().writer(), | |
); | |
defer r.deinit(talloc); | |
try testing.expectEqualStrings("zonny", r.value.name); | |
try testing.expectEqualStrings("0.0.0", r.value.version); | |
try testing.expectEqualStrings("0.12.0", r.value.minimum_zig_version); | |
try testing.expectFmt("{ , build.zig, src }", "{s}", .{r.value.paths}); | |
try testing.expectEqual(2, r.value.dependencies.map.count()); | |
try testing.expectEqualDeep( | |
Dep{ .data = .{ .remote = .{ | |
.url = "https://example.com/foo.tar.gz", | |
.hash = "...", | |
} } }, | |
r.value.dependencies.map.get("example1"), | |
); | |
try testing.expectEqualDeep( | |
Dep{ .data = .{ .local = "foo" }, .lazy = true }, | |
r.value.dependencies.map.get("example2"), | |
); | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//! zonny - zon file parser which relies on std.zig.Ast.parse() | |
//! | |
//! references | |
//! | |
//! https://github.com/ziglang/zig/pull/17731 | |
//! | |
//! | |
const std = @import("std"); | |
const mem = std.mem; | |
pub const ParseError = error{ | |
ParseFailure, | |
InvalidFieldName, | |
InvalidBool, | |
InvalidInt, | |
InvalidSlice, | |
InvalidEnum, | |
InvalidUnion, | |
InvalidStruct, | |
InvalidVoid, | |
DuplicateField, | |
UnexpectedToken, | |
UnexpectedField, | |
}; | |
pub fn Ctx(comptime W: type) type { | |
return struct { | |
arena: mem.Allocator, | |
tree: std.zig.Ast, | |
idx: std.zig.Ast.Node.Index, | |
filename: []const u8, | |
error_writer: W, | |
const Self = @This(); | |
pub fn init( | |
arena: mem.Allocator, | |
tree: std.zig.Ast, | |
idx: std.zig.Ast.Node.Index, | |
filename: []const u8, | |
error_writer: anytype, | |
) Self { | |
return .{ | |
.arena = arena, | |
.tree = tree, | |
.idx = idx, | |
.filename = filename, | |
.error_writer = error_writer, | |
}; | |
} | |
pub fn withIdx(self: Self, idx: std.zig.Ast.Node.Index) Self { | |
var s = self; | |
s.idx = idx; | |
return s; | |
} | |
}; | |
} | |
pub fn Map(comptime V: type) type { | |
return struct { | |
map: std.StringArrayHashMapUnmanaged(V), | |
const Self = @This(); | |
pub fn zonnyParse( | |
comptime W: type, | |
ctx: Ctx(W), | |
) !Self { | |
var map: std.StringArrayHashMapUnmanaged(V) = .{}; | |
var buf: [2]std.zig.Ast.Node.Index = undefined; | |
if (ctx.tree.fullStructInit(&buf, ctx.idx)) |v| { | |
for (v.ast.fields) |fidx| { | |
if (ctx.tree.firstToken(fidx) < 2) unreachable; | |
const slice = ctx.tree.tokenSlice(ctx.tree.firstToken(fidx) - 2); | |
const field_name = if (mem.startsWith(u8, slice, "@\"")) | |
slice[2 .. slice.len - 1] | |
else | |
slice; | |
const val = try parseInner(V, W, ctx.withIdx(fidx)); | |
try map.put(ctx.arena, try ctx.arena.dupe(u8, field_name), val); | |
} | |
} else return err("", .{}, W, ctx, error.UnexpectedToken); | |
return .{ .map = map }; | |
} | |
}; | |
} | |
// const show_debug = false; | |
pub fn debug(comptime fmt: []const u8, args: anytype) void { | |
if (@hasDecl(@This(), "show_debug")) | |
std.debug.print(fmt, args); | |
} | |
pub fn ParseResult(comptime T: type) type { | |
return struct { | |
arena: *std.heap.ArenaAllocator, | |
value: T, | |
pub fn deinit(r: @This(), gpa: mem.Allocator) void { | |
r.arena.deinit(); | |
gpa.destroy(r.arena); | |
} | |
}; | |
} | |
pub fn parseLeaky( | |
comptime T: type, | |
gpa: mem.Allocator, | |
source: [:0]const u8, | |
filename: []const u8, | |
error_writer: anytype, | |
) !ParseResult(T) { | |
var tree = try std.zig.Ast.parse(gpa, source, .zon); | |
defer tree.deinit(gpa); | |
const W = @TypeOf(error_writer); | |
if (tree.errors.len > 0) { | |
if (W != void) { | |
for (tree.errors) |e| { | |
const loc = tree.tokenLocation(tree.errorOffset(e), e.token); | |
try error_writer.print("error: {s}:{}:{}: ", .{ filename, loc.line, loc.column }); | |
try tree.renderError(e, error_writer); | |
try error_writer.writeAll("\n"); | |
} | |
} | |
return error.ParseFailed; | |
} | |
const arena = try gpa.create(std.heap.ArenaAllocator); | |
arena.* = std.heap.ArenaAllocator.init(gpa); | |
errdefer { | |
arena.deinit(); | |
gpa.destroy(arena); | |
} | |
return .{ | |
.value = try parseInner(T, W, .{ | |
.arena = arena.allocator(), | |
.tree = tree, | |
.idx = tree.nodes.items(.data)[0].lhs, | |
.filename = filename, | |
.error_writer = error_writer, | |
}), | |
.arena = arena, | |
}; | |
} | |
pub fn parseInner( | |
comptime T: type, | |
comptime W: type, | |
ctx: Ctx(W), | |
) !T { | |
var t: T = undefined; | |
var buf: [2]std.zig.Ast.Node.Index = undefined; | |
switch (@typeInfo(T)) { | |
.Struct => |info| { | |
if (std.meta.hasFn(T, "zonnyParse")) return T.zonnyParse(W, ctx); | |
const I = @Type(.{ .Int = .{ | |
.signedness = .unsigned, | |
.bits = comptime std.math.log2_int_ceil(usize, info.fields.len), | |
} }); | |
const Fe = std.meta.FieldEnum(T); | |
const field_indexes = comptime blk: { | |
var kvs_list: [info.fields.len]struct { []const u8, I } = undefined; | |
for (info.fields, 0..) |field, i| | |
kvs_list[i] = .{ field.name, i }; | |
break :blk std.StaticStringMap(I).initComptime(kvs_list); | |
}; | |
var fields_seen = std.StaticBitSet(info.fields.len).initEmpty(); | |
if (ctx.tree.fullStructInit(&buf, ctx.idx)) |v| { | |
for (v.ast.fields) |fidx| { | |
const slice = ctx.tree.tokenSlice(ctx.tree.firstToken(fidx) - 2); | |
const field_name = if (mem.startsWith(u8, slice, "@\"")) | |
slice[2 .. slice.len - 1] | |
else | |
slice; | |
const field_idx = field_indexes.get(field_name) orelse | |
return err("field: '{s}'\n", .{field_name}, W, ctx.withIdx(fidx), error.InvalidFieldName); | |
if (fields_seen.isSet(field_idx)) { | |
return err( | |
"duplicate field: '{s}'\n", | |
.{field_name}, | |
W, | |
ctx.withIdx(fidx), | |
error.DuplicateField, | |
); | |
} | |
fields_seen.set(field_idx); | |
const fe: Fe = @enumFromInt(field_idx); | |
switch (fe) { | |
inline else => |tag| { | |
const i: I = @intFromEnum(tag); | |
@field(t, info.fields[i].name) = try parseInner( | |
info.fields[i].type, | |
W, | |
ctx.withIdx(fidx), | |
); | |
}, | |
} | |
} | |
} else if (ctx.tree.fullArrayInit(&buf, ctx.idx)) |v| { | |
if (!info.is_tuple) | |
return err("expected tuple\n", .{}, W, ctx, error.InvalidStruct); | |
for (v.ast.elements, 0..) |node, nodei| { | |
const slice = ctx.tree.tokenSlice(ctx.tree.firstToken(node)); | |
debug("slice {s}\n", .{slice}); | |
const field_idx = std.math.cast(I, nodei) orelse | |
return err( | |
"invalid field: '{}'\n", | |
.{nodei}, | |
W, | |
ctx.withIdx(node), | |
error.InvalidFieldName, | |
); | |
if (fields_seen.isSet(field_idx)) { | |
return err( | |
"duplicate field: '{s}'\n", | |
.{slice}, | |
W, | |
ctx.withIdx(node), | |
error.DuplicateField, | |
); | |
} | |
fields_seen.set(field_idx); | |
const fe: Fe = @enumFromInt(field_idx); | |
switch (fe) { | |
inline else => |tag| { | |
const i: I = @intFromEnum(tag); | |
@field(t, info.fields[i].name) = | |
try parseInner(info.fields[i].type, W, ctx.withIdx(node)); | |
}, | |
} | |
} | |
} else { | |
return err("", .{}, W, ctx, error.UnexpectedToken); | |
} | |
}, | |
.Union => |info| { | |
const FieldIndex = @Type(.{ .Int = .{ | |
.signedness = .unsigned, | |
.bits = comptime std.math.log2_int_ceil(usize, info.fields.len), | |
} }); | |
const Fe = std.meta.FieldEnum(T); | |
const field_indexes = comptime blk: { | |
var kvs_list: [info.fields.len]struct { []const u8, FieldIndex } = undefined; | |
for (info.fields, 0..) |field, i| | |
kvs_list[i] = .{ field.name, i }; | |
break :blk std.StaticStringMap(FieldIndex).initComptime(kvs_list); | |
}; | |
if (ctx.tree.fullStructInit(&buf, ctx.idx)) |v| { | |
if (v.ast.fields.len != 1) { | |
const token = ctx.tree.nodes.items(.main_token)[ctx.idx]; | |
return err("error: {s}: InvalidUnion expected 1 field. found {} token {}", .{ ctx.filename, v.ast.fields.len, token }, W, ctx, error.InvalidUnion); | |
} | |
const fidx = v.ast.fields[0]; | |
const slice = ctx.tree.tokenSlice(ctx.tree.firstToken(fidx) - 2); | |
const field_name = if (mem.startsWith(u8, slice, "@\"")) | |
slice[2 .. slice.len - 1] | |
else | |
slice; | |
const field_idx = field_indexes.get(field_name) orelse | |
return err("field: '{s}'\n", .{field_name}, W, ctx.withIdx(fidx), error.InvalidFieldName); | |
const fe: Fe = @enumFromInt(field_idx); | |
switch (fe) { | |
inline else => |tag| { | |
const i = @intFromEnum(tag); | |
return @unionInit(T, info.fields[i].name, try parseInner( | |
info.fields[i].type, | |
W, | |
ctx.withIdx(fidx), | |
)); | |
}, | |
} | |
} else return err("", .{}, W, ctx, error.UnexpectedToken); | |
}, | |
.Pointer => |x| switch (x.size) { | |
.Slice => { | |
const token_index = ctx.tree.nodes.items(.main_token)[ctx.idx]; | |
const slice = ctx.tree.tokenSlice(token_index); | |
const node_tags = ctx.tree.nodes.items(.tag); | |
if (ctx.tree.fullArrayInit(&buf, ctx.idx)) |v| { | |
var items = std.ArrayList(x.child).init(ctx.arena); | |
for (v.ast.elements) |i| { | |
const item = try parseInner(x.child, W, ctx.withIdx(i)); | |
try items.append(item); | |
} | |
return items.toOwnedSlice(); | |
} else if (node_tags[ctx.idx] == .address_of) { | |
const node_datas = ctx.tree.nodes.items(.data); | |
if (ctx.tree.fullArrayInit(&buf, node_datas[ctx.idx].lhs)) |v| { | |
var items = std.ArrayList(x.child).init(ctx.arena); | |
for (v.ast.elements) |i| { | |
const item = try parseInner(x.child, W, ctx.withIdx(i)); | |
try items.append(item); | |
} | |
return items.toOwnedSlice(); | |
} | |
} | |
if (x.child == u8) { | |
switch (slice[0]) { | |
'"' => return std.zig.string_literal.parseAlloc(ctx.arena, slice), | |
else => {}, | |
} | |
} | |
return err("{s} {s}\n", .{ @typeName(x.child), slice }, W, ctx, error.InvalidSlice); | |
}, | |
else => |y| @compileError(std.fmt.comptimePrint("TODO Pointer {s}", .{@tagName(y)})), | |
}, | |
.Array => |x| if (ctx.tree.fullArrayInit(&buf, ctx.idx)) |v| { | |
var a: T = undefined; | |
for (v.ast.elements, 0..) |i, n| { | |
a[n] = try parseInner(x.child, W, ctx.withIdx(i)); | |
} | |
return a; | |
}, | |
.Bool => { | |
const slice = ctx.tree.tokenSlice(ctx.tree.nodes.items(.main_token)[ctx.idx]); | |
const map = std.StaticStringMap(bool).initComptime(.{ | |
.{ "true", true }, | |
.{ "false", false }, | |
}); | |
return if (map.get(slice)) |v| | |
v | |
else | |
err("expected true or false, found '{s}'", .{slice}, W, ctx, error.InvalidBool); | |
}, | |
.Int => |info| { | |
const slice = ctx.tree.tokenSlice(ctx.tree.nodes.items(.main_token)[ctx.idx]); | |
const node_tags = ctx.tree.nodes.items(.tag); | |
return switch (node_tags[ctx.idx]) { | |
.negation => { | |
if (info.signedness == .unsigned) | |
return err("negation of unsigned integer '{s}'", .{slice}, W, ctx, error.UnexpectedToken); | |
const node_datas = ctx.tree.nodes.items(.data); | |
return -try parseInner(T, W, ctx.withIdx(node_datas[ctx.idx].lhs)); | |
}, | |
.number_literal => switch (std.zig.number_literal.parseNumberLiteral(slice)) { | |
.int => |v| std.math.cast(T, v) orelse | |
err("type '{s}' cannot represent value '{s}'", .{ @typeName(T), slice }, W, ctx, error.InvalidInt), | |
.big_int => fatal("TODO big_int, '{s}'", .{slice}, W, ctx, error.UnexpectedToken), | |
else => |tag| err("expected integer, found {s}: '{s}'", .{ @tagName(tag), slice }, W, ctx, error.UnexpectedToken), | |
}, | |
.char_literal => switch (std.zig.parseCharLiteral(slice)) { | |
.success => |v| std.math.cast(T, v) orelse | |
err("integer cast, expected {s} found '{s}'", .{ @typeName(T), slice }, W, ctx, error.InvalidInt), | |
.failure => err("invalid char literal '{s}'", .{slice}, W, ctx, error.UnexpectedToken), | |
}, | |
else => |tag| err("expected integer, found {s}: '{s}'", .{ @tagName(tag), slice }, W, ctx, error.UnexpectedToken), | |
}; | |
}, | |
.Float => { | |
const slice = ctx.tree.tokenSlice(ctx.tree.nodes.items(.main_token)[ctx.idx]); | |
const node_tags = ctx.tree.nodes.items(.tag); | |
return switch (node_tags[ctx.idx]) { | |
.negation => { | |
const node_datas = ctx.tree.nodes.items(.data); | |
return -try parseInner(T, W, ctx.withIdx(node_datas[ctx.idx].lhs)); | |
}, | |
.number_literal => switch (std.zig.number_literal.parseNumberLiteral(slice)) { | |
.float => return std.fmt.parseFloat(T, slice), | |
.int => |v| @floatFromInt(v), | |
else => |tag| err("expected float, found {s}: '{s}'", .{ @tagName(tag), slice }, W, ctx, error.UnexpectedToken), | |
}, | |
.identifier => { | |
const Ident = enum { inf, nan }; | |
const ident = std.meta.stringToEnum(Ident, slice) orelse | |
return err("expected float, found identifier '{s}'", .{slice}, W, ctx, error.UnexpectedToken); | |
return switch (ident) { | |
.inf => std.math.inf(T), | |
.nan => std.math.nan(T), | |
}; | |
}, | |
else => |tag| err("expected float, found {s}: '{s}'", .{ @tagName(tag), slice }, W, ctx, error.UnexpectedToken), | |
}; | |
}, | |
.Enum => { | |
const slice = ctx.tree.tokenSlice(ctx.tree.nodes.items(.main_token)[ctx.idx]); | |
const node_tags = ctx.tree.nodes.items(.tag); | |
// std.debug.print("enum {}\n", .{node_tags[ctx.idx]}); | |
if (node_tags[ctx.idx] != .enum_literal) | |
return err("expected enum. found {s} '{s}'", .{ @tagName(node_tags[ctx.idx]), slice }, W, ctx, error.UnexpectedToken); | |
return std.meta.stringToEnum(T, slice) orelse | |
err("invalid enum '{s}'", .{slice}, W, ctx, error.InvalidEnum); | |
}, | |
.Void => { | |
const slice = ctx.tree.tokenSlice(ctx.tree.nodes.items(.main_token)[ctx.idx]); | |
const node_tags = ctx.tree.nodes.items(.tag); | |
const data = ctx.tree.nodes.items(.data); | |
switch (node_tags[ctx.idx]) { | |
.block_two => if (data[ctx.idx].lhs != 0 or data[ctx.idx].rhs != 0) { | |
return err("invalid void '{s}'", .{slice}, W, ctx, error.InvalidVoid); | |
}, | |
.block => if (data[ctx.idx].lhs != data[ctx.idx].rhs) { | |
return err("invalid void '{s}'", .{slice}, W, ctx, error.InvalidVoid); | |
}, | |
else => return err("expected void literal. found '{s}'", .{slice}, W, ctx, error.UnexpectedToken), | |
} | |
}, | |
.Optional => |x| { | |
const tags = ctx.tree.nodes.items(.tag); | |
if (tags[ctx.idx] == .identifier) { | |
const main_tokens = ctx.tree.nodes.items(.main_token); | |
const token = main_tokens[ctx.idx]; | |
const bytes = ctx.tree.tokenSlice(token); | |
if (std.mem.eql(u8, bytes, "null")) | |
return null; | |
} | |
return try parseInner(x.child, W, ctx); | |
}, | |
else => |x| @compileError(std.fmt.comptimePrint("TODO {s}", .{@tagName(x)})), | |
} | |
return t; | |
} | |
pub fn err( | |
comptime fmt: []const u8, | |
args: anytype, | |
comptime W: type, | |
ctx: Ctx(W), | |
e: ParseError, | |
) (ParseError || W.Error) { | |
const token = ctx.tree.nodes.items(.main_token)[ctx.idx]; | |
// const loc = tree.tokenLocation(tree.errorOffset(e), e.token); | |
// tree.tokenLocation(token) | |
const start = ctx.tree.tokenToSpan(token).start; | |
const loc = ctx.tree.tokenLocation(start, token); | |
try ctx.error_writer.print("error: {s}:{}:{}\n", .{ ctx.filename, loc.line, loc.column }); | |
try ctx.error_writer.print(fmt ++ "\n", args); | |
return e; | |
} | |
pub fn fatal( | |
comptime fmt: []const u8, | |
args: anytype, | |
comptime W: type, | |
ctx: Ctx(W), | |
e: ParseError, | |
) noreturn { | |
const token = ctx.tree.nodes.items(.main_token)[ctx.idx]; | |
// const loc = tree.tokenLocation(tree.errorOffset(e), e.token); | |
// tree.tokenLocation(token) | |
const start = ctx.tree.tokenToSpan(token).start; | |
const loc = ctx.tree.tokenLocation(start, token); | |
// try ctx.error_writer.print("error: {s}:{}:{}\n", .{ ctx.filename, loc.line, loc.column }); | |
// try ctx.error_writer.print(fmt ++ "\n", args); | |
try std.debug.panic("error: {s}:{}:{}\n" ++ fmt ++ "\n", .{ ctx.filename, loc.line, loc.column } ++ args); | |
return e; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment