diff --git a/build.zig b/build.zig index 7dfc785ce39c..b3d79386b820 100644 --- a/build.zig +++ b/build.zig @@ -428,7 +428,7 @@ pub fn build(b: *std.Build) !void { const optimization_modes = chosen_opt_modes_buf[0..chosen_mode_index]; const fmt_include_paths = &.{ "lib", "src", "test", "tools", "build.zig", "build.zig.zon" }; - const fmt_exclude_paths = &.{"test/cases"}; + const fmt_exclude_paths = &.{ "test/cases", "test/behavior/zon" }; const do_fmt = b.addFmt(.{ .paths = fmt_include_paths, .exclude_paths = fmt_exclude_paths, diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig index 2f16d849b051..c79b089832e9 100644 --- a/lib/std/fmt.zig +++ b/lib/std/fmt.zig @@ -1581,7 +1581,8 @@ test parseInt { try std.testing.expectEqual(@as(i5, -16), try std.fmt.parseInt(i5, "-10", 16)); } -fn parseIntWithSign( +/// Like `parseIntWithGenericCharacter`, but with a sign argument. +pub fn parseIntWithSign( comptime Result: type, comptime Character: type, buf: []const Character, diff --git a/lib/std/std.zig b/lib/std/std.zig index cc61111746aa..20543b67d893 100644 --- a/lib/std/std.zig +++ b/lib/std/std.zig @@ -44,6 +44,7 @@ pub const Thread = @import("Thread.zig"); pub const Treap = @import("treap.zig").Treap; pub const Tz = tz.Tz; pub const Uri = @import("Uri.zig"); +pub const zon = @import("zon.zig"); pub const array_hash_map = @import("array_hash_map.zig"); pub const atomic = @import("atomic.zig"); diff --git a/lib/std/zig/Ast.zig b/lib/std/zig/Ast.zig index e6f456290da8..9adcef1db5f8 100644 --- a/lib/std/zig/Ast.zig +++ b/lib/std/zig/Ast.zig @@ -7,12 +7,13 @@ /// Reference to externally-owned data. source: [:0]const u8, +mode: Mode, + tokens: TokenList.Slice, /// The root AST node is assumed to be index 0. Since there can be no /// references to the root node, this means 0 is available to indicate null. nodes: NodeList.Slice, extra_data: []Node.Index, -mode: Mode = .zig, errors: []const Error, diff --git a/lib/std/zig/AstGen.zig b/lib/std/zig/AstGen.zig index a9b518357bfc..be76583f69cf 100644 --- a/lib/std/zig/AstGen.zig +++ b/lib/std/zig/AstGen.zig @@ -130,6 +130,8 @@ fn appendRefsAssumeCapacity(astgen: *AstGen, refs: []const Zir.Inst.Ref) void { } pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir { + assert(tree.mode == .zig); + var arena = std.heap.ArenaAllocator.init(gpa); defer arena.deinit(); @@ -8812,36 +8814,22 @@ fn numberLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index, source_node: } } -fn failWithNumberError(astgen: *AstGen, err: std.zig.number_literal.Error, token: Ast.TokenIndex, bytes: []const u8) InnerError { - const is_float = std.mem.indexOfScalar(u8, bytes, '.') != null; - switch (err) { - .leading_zero => if (is_float) { - return astgen.failTok(token, "number '{s}' has leading zero", .{bytes}); - } else { - return astgen.failTokNotes(token, "number '{s}' has leading zero", .{bytes}, &.{ - try astgen.errNoteTok(token, "use '0o' prefix for octal literals", .{}), - }); - }, - .digit_after_base => return astgen.failTok(token, "expected a digit after base prefix", .{}), - .upper_case_base => |i| return astgen.failOff(token, @intCast(i), "base prefix must be lowercase", .{}), - .invalid_float_base => |i| return astgen.failOff(token, @intCast(i), "invalid base for float literal", .{}), - .repeated_underscore => |i| return astgen.failOff(token, @intCast(i), "repeated digit separator", .{}), - .invalid_underscore_after_special => |i| return astgen.failOff(token, @intCast(i), "expected digit before digit separator", .{}), - .invalid_digit => |info| return astgen.failOff(token, @intCast(info.i), "invalid digit '{c}' for {s} base", .{ bytes[info.i], @tagName(info.base) }), - .invalid_digit_exponent => |i| return astgen.failOff(token, @intCast(i), "invalid digit '{c}' in exponent", .{bytes[i]}), - .duplicate_exponent => |i| return astgen.failOff(token, @intCast(i), "duplicate exponent", .{}), - .exponent_after_underscore => |i| return astgen.failOff(token, @intCast(i), "expected digit before exponent", .{}), - .special_after_underscore => |i| return astgen.failOff(token, @intCast(i), "expected digit before '{c}'", .{bytes[i]}), - .trailing_special => |i| return astgen.failOff(token, @intCast(i), "expected digit after '{c}'", .{bytes[i - 1]}), - .trailing_underscore => |i| return astgen.failOff(token, @intCast(i), "trailing digit separator", .{}), - .duplicate_period => unreachable, // Validated by tokenizer - .invalid_character => unreachable, // Validated by tokenizer - .invalid_exponent_sign => |i| { - assert(bytes.len >= 2 and bytes[0] == '0' and bytes[1] == 'x'); // Validated by tokenizer - return astgen.failOff(token, @intCast(i), "sign '{c}' cannot follow digit '{c}' in hex base", .{ bytes[i], bytes[i - 1] }); - }, - .period_after_exponent => |i| return astgen.failOff(token, @intCast(i), "unexpected period after exponent", .{}), - } +fn failWithNumberError( + astgen: *AstGen, + err: std.zig.number_literal.Error, + token: Ast.TokenIndex, + bytes: []const u8, +) InnerError { + const note = err.noteWithSource(bytes); + const notes: []const u32 = if (note) |n| &.{try astgen.errNoteTok(token, "{s}", .{n})} else &.{}; + try astgen.appendErrorTokNotesOff( + token, + @as(u32, @intCast(err.offset())), + "{}", + .{err.fmtWithSource(bytes)}, + notes, + ); + return error.AnalysisFail; } fn asmExpr( @@ -9336,7 +9324,18 @@ fn builtinCall( } else if (str.len == 0) { return astgen.failTok(str_lit_token, "import path cannot be empty", .{}); } - const result = try gz.addStrTok(.import, str.index, str_lit_token); + const res_ty = try ri.rl.resultType(gz, node) orelse .none; + const payload_index = try addExtra(gz.astgen, Zir.Inst.Import{ + .res_ty = res_ty, + .path = str.index, + }); + const result = try gz.add(.{ + .tag = .import, + .data = .{ .pl_tok = .{ + .src_tok = gz.tokenIndexToRelative(str_lit_token), + .payload_index = payload_index, + } }, + }); const gop = try astgen.imports.getOrPut(astgen.gpa, str.index); if (!gop.found_existing) { gop.value_ptr.* = str_lit_token; @@ -11422,85 +11421,20 @@ fn parseStrLit( } } -fn failWithStrLitError(astgen: *AstGen, err: std.zig.string_literal.Error, token: Ast.TokenIndex, bytes: []const u8, offset: u32) InnerError { +fn failWithStrLitError( + astgen: *AstGen, + err: std.zig.string_literal.Error, + token: Ast.TokenIndex, + bytes: []const u8, + offset: u32, +) InnerError { const raw_string = bytes[offset..]; - switch (err) { - .invalid_escape_character => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "invalid escape character: '{c}'", - .{raw_string[bad_index]}, - ); - }, - .expected_hex_digit => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected hex digit, found '{c}'", - .{raw_string[bad_index]}, - ); - }, - .empty_unicode_escape_sequence => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "empty unicode escape sequence", - .{}, - ); - }, - .expected_hex_digit_or_rbrace => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected hex digit or '}}', found '{c}'", - .{raw_string[bad_index]}, - ); - }, - .invalid_unicode_codepoint => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "unicode escape does not correspond to a valid unicode scalar value", - .{}, - ); - }, - .expected_lbrace => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected '{{', found '{c}", - .{raw_string[bad_index]}, - ); - }, - .expected_rbrace => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected '}}', found '{c}", - .{raw_string[bad_index]}, - ); - }, - .expected_single_quote => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected single quote ('), found '{c}", - .{raw_string[bad_index]}, - ); - }, - .invalid_character => |bad_index| { - return astgen.failOff( - token, - offset + @as(u32, @intCast(bad_index)), - "invalid byte in string or character literal: '{c}'", - .{raw_string[bad_index]}, - ); - }, - .empty_char_literal => { - return astgen.failOff(token, offset, "empty character literal", .{}); - }, - } + return astgen.failOff( + token, + offset + @as(u32, @intCast(err.offset())), + "{}", + .{err.fmtWithSource(raw_string)}, + ); } fn failNode( @@ -11618,7 +11552,7 @@ fn appendErrorTokNotesOff( comptime format: []const u8, args: anytype, notes: []const u32, -) !void { +) Allocator.Error!void { @branchHint(.cold); const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; @@ -11814,32 +11748,17 @@ fn strLitAsString(astgen: *AstGen, str_lit_token: Ast.TokenIndex) !IndexSlice { } fn strLitNodeAsString(astgen: *AstGen, node: Ast.Node.Index) !IndexSlice { - const tree = astgen.tree; - const node_datas = tree.nodes.items(.data); - - const start = node_datas[node].lhs; - const end = node_datas[node].rhs; - const gpa = astgen.gpa; + const data = astgen.tree.nodes.items(.data); const string_bytes = &astgen.string_bytes; const str_index = string_bytes.items.len; - // First line: do not append a newline. - var tok_i = start; - { - const slice = tree.tokenSlice(tok_i); - const line_bytes = slice[2..]; - try string_bytes.appendSlice(gpa, line_bytes); - tok_i += 1; - } - // Following lines: each line prepends a newline. - while (tok_i <= end) : (tok_i += 1) { - const slice = tree.tokenSlice(tok_i); - const line_bytes = slice[2..]; - try string_bytes.ensureUnusedCapacity(gpa, line_bytes.len + 1); - string_bytes.appendAssumeCapacity('\n'); - string_bytes.appendSliceAssumeCapacity(line_bytes); + var parser = std.zig.string_literal.multilineParser(string_bytes.writer(gpa)); + var tok_i = data[node].lhs; + while (tok_i <= data[node].rhs) : (tok_i += 1) { + try parser.line(astgen.tree.tokenSlice(tok_i)); } + const len = string_bytes.items.len - str_index; try string_bytes.append(gpa, 0); return IndexSlice{ diff --git a/lib/std/zig/Zir.zig b/lib/std/zig/Zir.zig index f2c103f8358d..715f020ba663 100644 --- a/lib/std/zig/Zir.zig +++ b/lib/std/zig/Zir.zig @@ -1667,7 +1667,7 @@ pub const Inst = struct { .func = .pl_node, .func_inferred = .pl_node, .func_fancy = .pl_node, - .import = .str_tok, + .import = .pl_tok, .int = .int, .int_big = .str, .float = .float, @@ -3574,6 +3574,13 @@ pub const Inst = struct { /// If `.none`, restore unconditionally. operand: Ref, }; + + pub const Import = struct { + /// The result type of the import, or `.none` if none was available. + res_ty: Ref, + /// The import path. + path: NullTerminatedString, + }; }; pub const SpecialProng = enum { none, @"else", under }; diff --git a/lib/std/zig/number_literal.zig b/lib/std/zig/number_literal.zig index a4dc33eb91c3..40b8c44c176d 100644 --- a/lib/std/zig/number_literal.zig +++ b/lib/std/zig/number_literal.zig @@ -58,8 +58,83 @@ pub const Error = union(enum) { invalid_exponent_sign: usize, /// Period comes directly after exponent. period_after_exponent: usize, + + pub fn fmtWithSource(self: Error, bytes: []const u8) std.fmt.Formatter(formatErrorWithSource) { + return .{ .data = .{ .err = self, .bytes = bytes } }; + } + + pub fn noteWithSource(self: Error, bytes: []const u8) ?[]const u8 { + if (self == .leading_zero) { + const is_float = std.mem.indexOfScalar(u8, bytes, '.') != null; + if (!is_float) return "use '0o' prefix for octal literals"; + } + return null; + } + + pub fn offset(self: Error) usize { + return switch (self) { + .leading_zero => 0, + .digit_after_base => 0, + .upper_case_base => |i| i, + .invalid_float_base => |i| i, + .repeated_underscore => |i| i, + .invalid_underscore_after_special => |i| i, + .invalid_digit => |e| e.i, + .invalid_digit_exponent => |i| i, + .duplicate_period => 0, + .duplicate_exponent => |i| i, + .exponent_after_underscore => |i| i, + .special_after_underscore => |i| i, + .trailing_special => |i| i, + .trailing_underscore => |i| i, + .invalid_character => |i| i, + .invalid_exponent_sign => |i| i, + .period_after_exponent => |i| i, + }; + } +}; + +const FormatWithSource = struct { + bytes: []const u8, + err: Error, }; +fn formatErrorWithSource( + self: FormatWithSource, + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, +) !void { + _ = options; + _ = fmt; + switch (self.err) { + .leading_zero => try writer.print("number '{s}' has leading zero", .{self.bytes}), + .digit_after_base => try writer.writeAll("expected a digit after base prefix"), + .upper_case_base => try writer.writeAll("base prefix must be lowercase"), + .invalid_float_base => try writer.writeAll("invalid base for float literal"), + .repeated_underscore => try writer.writeAll("repeated digit separator"), + .invalid_underscore_after_special => try writer.writeAll("expected digit before digit separator"), + .invalid_digit => |info| try writer.print("invalid digit '{c}' for {s} base", .{ self.bytes[info.i], @tagName(info.base) }), + .invalid_digit_exponent => |i| try writer.print("invalid digit '{c}' in exponent", .{self.bytes[i]}), + .duplicate_exponent => try writer.writeAll("duplicate exponent"), + .exponent_after_underscore => try writer.writeAll("expected digit before exponent"), + .special_after_underscore => |i| try writer.print("expected digit before '{c}'", .{self.bytes[i]}), + .trailing_special => |i| try writer.print("expected digit after '{c}'", .{self.bytes[i - 1]}), + .trailing_underscore => try writer.writeAll("trailing digit separator"), + .duplicate_period => try writer.writeAll("duplicate period"), + .invalid_character => try writer.writeAll("invalid character"), + .invalid_exponent_sign => |i| { + const hex = self.bytes.len >= 2 and self.bytes[0] == '0' and self.bytes[1] == 'x'; + if (hex) { + try writer.print("sign '{c}' cannot follow digit '{c}' in hex base", .{ self.bytes[i], self.bytes[i - 1] }); + } else { + try writer.print("sign '{c}' cannot follow digit '{c}' in current base", .{ self.bytes[i], self.bytes[i - 1] }); + } + }, + .period_after_exponent => try writer.writeAll("unexpected period after exponent"), + } +} + /// Parse Zig number literal accepted by fmt.parseInt, fmt.parseFloat and big_int.setString. /// Valid for any input. pub fn parseNumberLiteral(bytes: []const u8) Result { diff --git a/lib/std/zig/string_literal.zig b/lib/std/zig/string_literal.zig index 69178098379b..96cf49001cb3 100644 --- a/lib/std/zig/string_literal.zig +++ b/lib/std/zig/string_literal.zig @@ -38,8 +38,74 @@ pub const Error = union(enum) { invalid_character: usize, /// `''`. Not returned for string literals. empty_char_literal, + + pub fn fmtWithSource(self: Error, raw_string: []const u8) std.fmt.Formatter(formatErrorWithSource) { + return .{ .data = .{ .err = self, .raw_string = raw_string } }; + } + + pub fn offset(self: Error) usize { + return switch (self) { + .invalid_escape_character => |i| i, + .expected_hex_digit => |i| i, + .empty_unicode_escape_sequence => |i| i, + .expected_hex_digit_or_rbrace => |i| i, + .invalid_unicode_codepoint => |i| i, + .expected_lbrace => |i| i, + .expected_rbrace => |i| i, + .expected_single_quote => |i| i, + .invalid_character => |i| i, + .empty_char_literal => 0, + }; + } }; +const FormatWithSource = struct { + raw_string: []const u8, + err: Error, +}; + +fn formatErrorWithSource( + self: FormatWithSource, + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, +) !void { + _ = options; + _ = fmt; + switch (self.err) { + .invalid_escape_character => |bad_index| { + try writer.print("invalid escape character: '{c}'", .{self.raw_string[bad_index]}); + }, + .expected_hex_digit => |bad_index| { + try writer.print("expected hex digit, found '{c}'", .{self.raw_string[bad_index]}); + }, + .empty_unicode_escape_sequence => { + try writer.writeAll("empty unicode escape sequence"); + }, + .expected_hex_digit_or_rbrace => |bad_index| { + try writer.print("expected hex digit or '}}', found '{c}'", .{self.raw_string[bad_index]}); + }, + .invalid_unicode_codepoint => { + try writer.writeAll("unicode escape does not correspond to a valid unicode scalar value"); + }, + .expected_lbrace => |bad_index| { + try writer.print("expected '{{', found '{c}", .{self.raw_string[bad_index]}); + }, + .expected_rbrace => |bad_index| { + try writer.print("expected '}}', found '{c}", .{self.raw_string[bad_index]}); + }, + .expected_single_quote => |bad_index| { + try writer.print("expected single quote ('), found '{c}", .{self.raw_string[bad_index]}); + }, + .invalid_character => |bad_index| { + try writer.print("invalid byte in string or character literal: '{c}'", .{self.raw_string[bad_index]}); + }, + .empty_char_literal => { + try writer.print("empty character literal", .{}); + }, + } +} + /// Asserts the slice starts and ends with single-quotes. /// Returns an error if there is not exactly one UTF-8 codepoint in between. pub fn parseCharLiteral(slice: []const u8) ParsedCharLiteral { @@ -247,7 +313,7 @@ test parseCharLiteral { /// Parses `bytes` as a Zig string literal and writes the result to the std.io.Writer type. /// Asserts `bytes` has '"' at beginning and end. -pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result { +pub fn parseWrite(writer: anytype, bytes: []const u8) !Result { assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"'); var index: usize = 1; @@ -312,3 +378,151 @@ test parseAlloc { try expect(eql(u8, "foo", try parseAlloc(alloc, "\"f\x6f\x6f\""))); try expect(eql(u8, "f💯", try parseAlloc(alloc, "\"f\u{1f4af}\""))); } + +/// Parses one line at a time of a multiline Zig string literal to a std.io.Writer type. Does not append a null terminator. +pub fn MultilineParser(comptime Writer: type) type { + return struct { + writer: Writer, + first_line: bool, + + pub fn init(writer: Writer) @This() { + return .{ + .writer = writer, + .first_line = true, + }; + } + + /// Parse one line of a multiline string, writing the result to the writer prepending a + /// newline if necessary. + /// + /// Asserts bytes begins with "\\". The line may be terminated with '\n' or "\r\n", but may + /// not contain any interior newlines. + pub fn line(self: *@This(), bytes: []const u8) Writer.Error!void { + assert(bytes.len >= 2 and bytes[0] == '\\' and bytes[1] == '\\'); + var terminator_len: usize = 0; + terminator_len += @intFromBool(bytes[bytes.len - 1] == '\n'); + terminator_len += @intFromBool(bytes[bytes.len - 2] == '\r'); + if (self.first_line) { + self.first_line = false; + } else { + try self.writer.writeByte('\n'); + } + try self.writer.writeAll(bytes[2 .. bytes.len - terminator_len]); + } + }; +} + +pub fn multilineParser(writer: anytype) MultilineParser(@TypeOf(writer)) { + return MultilineParser(@TypeOf(writer)).init(writer); +} + +test "parse multiline" { + // Varying newlines + { + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo"); + try std.testing.expectEqualStrings("foo", parsed.items); + try parser.line("\\\\bar"); + try std.testing.expectEqualStrings("foo\nbar", parsed.items); + } + + { + const temp = + \\foo + \\bar + ; + try std.testing.expectEqualStrings("foo\nbar", temp); + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo"); + try std.testing.expectEqualStrings("foo", parsed.items); + // XXX: this adds the newline but like...does the input ever actually have a newline there? + try parser.line("\\\\bar\n"); + try std.testing.expectEqualStrings("foo\nbar", parsed.items); + } + + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo"); + try std.testing.expectEqualStrings("foo", parsed.items); + try parser.line("\\\\bar\r\n"); + try std.testing.expectEqualStrings("foo\nbar", parsed.items); + } + + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo\n"); + try std.testing.expectEqualStrings("foo", parsed.items); + try parser.line("\\\\bar"); + try std.testing.expectEqualStrings("foo\nbar", parsed.items); + } + + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo\r\n"); + try std.testing.expectEqualStrings("foo", parsed.items); + try parser.line("\\\\bar"); + try std.testing.expectEqualStrings("foo\nbar", parsed.items); + } + } + + // Empty lines + { + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\"); + try std.testing.expectEqualStrings("", parsed.items); + try parser.line("\\\\"); + try std.testing.expectEqualStrings("\n", parsed.items); + try parser.line("\\\\foo"); + try std.testing.expectEqualStrings("\n\nfoo", parsed.items); + try parser.line("\\\\bar"); + try std.testing.expectEqualStrings("\n\nfoo\nbar", parsed.items); + } + + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\foo"); + try std.testing.expectEqualStrings("foo", parsed.items); + try parser.line("\\\\"); + try std.testing.expectEqualStrings("foo\n", parsed.items); + try parser.line("\\\\bar"); + try std.testing.expectEqualStrings("foo\n\nbar", parsed.items); + try parser.line("\\\\"); + try std.testing.expectEqualStrings("foo\n\nbar\n", parsed.items); + } + } + + // No escapes + { + var parsed = std.ArrayList(u8).init(std.testing.allocator); + defer parsed.deinit(); + const writer = parsed.writer(); + var parser = multilineParser(writer); + try parser.line("\\\\no \\n escape"); + try std.testing.expectEqualStrings("no \\n escape", parsed.items); + try parser.line("\\\\still no \\n escape"); + try std.testing.expectEqualStrings("no \\n escape\nstill no \\n escape", parsed.items); + } +} diff --git a/lib/std/zon.zig b/lib/std/zon.zig new file mode 100644 index 000000000000..232229eb3df8 --- /dev/null +++ b/lib/std/zon.zig @@ -0,0 +1,97 @@ +//! ZON serialization and deserialization. +//! +//! # ZON +//! ZON, or Zig Object Notation, is a subset* of Zig used for data storage. ZON contains no type +//! names. +//! +//! Supported Zig primitives: +//! * boolean literals +//! * number literals (including `nan` and `inf`) +//! * character literals +//! * enum literals +//! * `null` and `void` literals +//! * string literals +//! * multiline string literals +//! +//! Supported Zig containers: +//! * anonymous struct literals +//! * anonymous tuple literals +//! * slices +//! * notated as a reference to a tuple literal +//! * this syntax will likely be removed in the future, at which point ZON will not distinguish +//! between slices and tuples +//! +//! Here is an example ZON object: +//! ```zon +//! .{ +//! .a = 1.5, +//! .b = "hello, world!", +//! .c = .{ true, false }, +//! .d = &.{ 1, 2, 3 }, +//! } +//! ``` +//! +//! Individual primitives are also valid ZON, for example: +//! ```zon +//! "This string is a valid ZON object." +//! ``` +//! +//! \* ZON is not currently a true subset of Zig, because it supports `nan` and +//! `inf` literals, which Zig does not. +//! +//! # Deserialization +//! +//! The simplest way to deserialize ZON at runtime is `parseFromSlice`. (For reading ZON at +//! comptime, you can use `@import`.) +//! +//! If you need lower level control, or more detailed diagnostics, you can generate the AST yourself +//! with `std.zig.Ast.parse` and then deserialize it with: +//! * `parseFromAst` +//! * `parseFromAstNoAlloc` +//! +//! If you'd like to deserialize just part of an AST, you can use: +//! * `parseFromAstNode` +//! * `parseFromAstNodeNoAlloc` +//! +//! If you need lower level control than provided by this module, you can operate directly on the +//! results of `std.zig.Ast.parse`. +//! +//! +//! # Serialization +//! +//! The simplest way to serialize to ZON is to call `stringify`. +//! +//! If you need to serialize recursive types, the following functions are also provided: +//! * `stringifyMaxDepth` +//! * `stringifyArbitraryDepth` +//! +//! If you need more control over the serialization process, for example to control which fields are +//! serialized, configure fields individually, or to stringify ZON values that do not exist in +//! memory, you can use `Stringifier`. +//! +//! Note that serializing floats with more than 64 bits may result in a loss of precision +//! (see https://github.com/ziglang/zig/issues/1181). + +pub const ParseOptions = @import("zon/parse.zig").ParseOptions; +pub const ParseStatus = @import("zon/parse.zig").ParseStatus; +pub const parseFromSlice = @import("zon/parse.zig").parseFromSlice; +pub const parseFromAst = @import("zon/parse.zig").parseFromAst; +pub const parseFromAstNoAlloc = @import("zon/parse.zig").parseFromAstNoAlloc; +pub const parseFromAstNode = @import("zon/parse.zig").parseFromAstNode; +pub const parseFromAstNodeNoAlloc = @import("zon/parse.zig").parseFromAstNodeNoAlloc; +pub const parseFree = @import("zon/parse.zig").parseFree; + +pub const StringifierOptions = @import("zon/stringify.zig").StringifierOptions; +pub const StringifyValueOptions = @import("zon/stringify.zig").StringifyValueOptions; +pub const StringifyOptions = @import("zon/stringify.zig").StringifyOptions; +pub const StringifyContainerOptions = @import("zon/stringify.zig").StringifyContainerOptions; +pub const Stringifier = @import("zon/stringify.zig").Stringifier; +pub const stringify = @import("zon/stringify.zig").stringify; +pub const stringifyMaxDepth = @import("zon/stringify.zig").stringifyMaxDepth; +pub const stringifyArbitraryDepth = @import("zon/stringify.zig").stringifyArbitraryDepth; +pub const stringifier = @import("zon/stringify.zig").stringifier; + +test { + _ = @import("zon/parse.zig"); + _ = @import("zon/stringify.zig"); +} diff --git a/lib/std/zon/parse.zig b/lib/std/zon/parse.zig new file mode 100644 index 000000000000..ecc783994ff6 --- /dev/null +++ b/lib/std/zon/parse.zig @@ -0,0 +1,2843 @@ +const std = @import("std"); +const Allocator = std.mem.Allocator; +const Ast = std.zig.Ast; +const NodeIndex = std.zig.Ast.Node.Index; +const TokenIndex = std.zig.Ast.TokenIndex; +const Base = std.zig.number_literal.Base; +const StringLiteralError = std.zig.string_literal.Error; +const NumberLiteralError = std.zig.number_literal.Error; +const assert = std.debug.assert; +const ArrayListUnmanaged = std.ArrayListUnmanaged; + +gpa: Allocator, +ast: *const Ast, +status: ?*ParseStatus, +ident_buf: []u8, + +/// Configuration for the runtime parser. +pub const ParseOptions = struct { + /// If true, unknown fields do not error. + ignore_unknown_fields: bool = false, + /// If true, the parser cleans up partially parsed values on error. This requires some extra + /// bookkeeping, so you may want to turn it off if you don't need this feature (e.g. because + /// you're using arena allocation.) + free_on_error: bool = true, +}; + +/// Information about the success or failure of a parse. +pub const ParseStatus = union { + success: void, + failure: ParseFailure, +}; + +/// Information about a parse failure for presentation to the user via the format functions. +pub const ParseFailure = struct { + ast: *const Ast, + token: TokenIndex, + reason: Reason, + + const Reason = union(enum) { + out_of_memory: void, + expected_union: void, + expected_struct: void, + expected_primitive: struct { type_name: []const u8 }, + expected_enum: void, + expected_tuple_with_fields: struct { + fields: usize, + }, + expected_tuple: void, + expected_string: void, + cannot_represent: struct { type_name: []const u8 }, + negative_integer_zero: void, + invalid_string_literal: struct { + err: StringLiteralError, + }, + invalid_number_literal: struct { + err: NumberLiteralError, + }, + unexpected_field: struct { + fields: []const []const u8, + }, + missing_field: struct { + field_name: []const u8, + }, + duplicate_field: void, + type_expr: void, + address_of: void, + }; + + pub fn fmtLocation(self: *const @This()) std.fmt.Formatter(formatLocation) { + return .{ .data = self }; + } + + fn formatLocation( + self: *const @This(), + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = options; + _ = fmt; + const l = self.ast.tokenLocation(0, self.token); + const offset = switch (self.reason) { + .invalid_string_literal => |r| r.err.offset(), + .invalid_number_literal => |r| r.err.offset(), + else => 0, + }; + try writer.print("{}:{}", .{ l.line + 1, l.column + 1 + offset }); + } + + pub fn fmtError(self: *const @This()) std.fmt.Formatter(formatError) { + return .{ .data = self }; + } + + fn formatError( + self: *const @This(), + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = options; + _ = fmt; + return switch (self.reason) { + .out_of_memory => writer.writeAll("out of memory"), + .expected_union => writer.writeAll("expected union"), + .expected_struct => writer.writeAll("expected struct"), + .expected_primitive => |r| writer.print("expected {s}", .{r.type_name}), + .expected_enum => writer.writeAll("expected enum literal"), + .expected_tuple_with_fields => |r| { + const plural = if (r.fields == 1) "" else "s"; + try writer.print("expected tuple with {} field{s}", .{ r.fields, plural }); + }, + .expected_tuple => writer.writeAll("expected tuple"), + .expected_string => writer.writeAll("expected string"), + .cannot_represent => |r| writer.print("{s} cannot represent value", .{r.type_name}), + .negative_integer_zero => writer.writeAll("integer literal '-0' is ambiguous"), + .invalid_string_literal => |r| writer.print("{}", .{r.err.fmtWithSource(self.ast.tokenSlice(self.token))}), + .invalid_number_literal => |r| writer.print("{}", .{r.err.fmtWithSource(self.ast.tokenSlice(self.token))}), + .unexpected_field => |r| { + try writer.writeAll("unexpected field, "); + if (r.fields.len == 0) { + try writer.writeAll("no fields expected"); + } else { + try writer.writeAll("supported fields: "); + for (0..r.fields.len) |i| { + if (i != 0) try writer.writeAll(", "); + try writer.print("{}", .{std.zig.fmtId(r.fields[i])}); + } + } + }, + .missing_field => |r| writer.print("missing required field {s}", .{r.field_name}), + .duplicate_field => writer.writeAll("duplicate field"), + .type_expr => writer.writeAll("ZON cannot contain type expressions"), + .address_of => writer.writeAll("ZON cannot take the address of a value"), + }; + } + + pub fn noteCount(self: *const @This()) usize { + switch (self.reason) { + .invalid_number_literal => |r| { + const source = self.ast.tokenSlice(self.token); + return if (r.err.noteWithSource(source) != null) 1 else 0; + }, + else => return 0, + } + } + + const FormatNote = struct { + failure: *const ParseFailure, + index: usize, + }; + + pub fn fmtNote(self: *const @This(), index: usize) std.fmt.Formatter(formatNote) { + return .{ .data = .{ .failure = self, .index = index } }; + } + + fn formatNote( + self: FormatNote, + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = options; + _ = fmt; + switch (self.failure.reason) { + .invalid_number_literal => |r| { + std.debug.assert(self.index == 0); + const source = self.failure.ast.tokenSlice(self.failure.token); + try writer.writeAll(r.err.noteWithSource(source).?); + return; + }, + else => {}, + } + + unreachable; + } + + pub fn format( + self: @This(), + comptime fmt: []const u8, + options: std.fmt.FormatOptions, + writer: anytype, + ) !void { + _ = fmt; + _ = options; + try writer.print("{}: {}", .{ self.fmtLocation(), self.fmtError() }); + } +}; + +test "std.zon failure/oom formatting" { + const gpa = std.testing.allocator; + + // Generate a failure + var ast = try std.zig.Ast.parse(gpa, "\"foo\"", .zon); + defer ast.deinit(gpa); + var failing_allocator = std.testing.FailingAllocator.init(gpa, .{ + .fail_index = 0, + .resize_fail_index = 0, + }); + var status: ParseStatus = undefined; + try std.testing.expectError(error.OutOfMemory, parseFromAst( + []const u8, + failing_allocator.allocator(), + &ast, + &status, + .{}, + )); + + // Verify that we can format the entire failure. + const full = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(full); + try std.testing.expectEqualStrings("1:1: out of memory", full); + try std.testing.expectEqual(0, status.failure.noteCount()); + + // Verify that we can format the location by itself + const location = try std.fmt.allocPrint(gpa, "{}", .{status.failure.fmtLocation()}); + defer gpa.free(location); + try std.testing.expectEqualStrings("1:1", location); + + // Verify that we can format the reason by itself + const reason = try std.fmt.allocPrint(gpa, "{}", .{status.failure.fmtError()}); + defer std.testing.allocator.free(reason); + try std.testing.expectEqualStrings("out of memory", reason); +} + +/// Parses the given ZON source. +/// +/// Returns `error.OutOfMemory` on allocator failure, a `error.Type` error if the ZON could not be +/// deserialized into `T`, or `error.Syntax` error if the ZON was invalid. +/// +/// If detailed failure information is needed, see `parseFromAst`. +pub fn parseFromSlice( + /// The type to deserialize into. May only transitively contain the following supported types: + /// * bools + /// * fixed sized numeric types + /// * enums + /// * slices + /// * arrays + /// * structures + /// * unions + /// * optionals + /// * null + comptime T: type, + /// The allocator. Used to temporarily allocate an AST, and to allocate any parts of `T` that + /// require dynamic allocation. + gpa: Allocator, + /// The ZON source. + source: [:0]const u8, + /// Options for the parser. + comptime options: ParseOptions, +) error{ OutOfMemory, Type, Syntax }!T { + if (@inComptime()) { + // Happens if given e.g. @typeOf(null), the default error we get is hard + // to understand. + @compileError("Runtime parser cannot run at comptime."); + } + var ast = try std.zig.Ast.parse(gpa, source, .zon); + defer ast.deinit(gpa); + if (ast.errors.len != 0) return error.Syntax; + return parseFromAst(T, gpa, &ast, null, options); +} + +test "std.zon parseFromSlice syntax error" { + try std.testing.expectError(error.Syntax, parseFromSlice(u8, std.testing.allocator, ".{", .{})); +} + +/// Like `parseFromSlice`, but operates on an AST instead of on ZON source. Asserts that the AST's +/// `errors` field is empty. +/// +/// Returns `error.OutOfMemory` if allocation fails, or `error.Type` if the ZON could not be +/// deserialized into `T`. +/// +/// If `status` is not null, its success field will be set on success, and its failure field will be +/// set on failure. See `ParseFailure` for formatting ZON parse failures in a human readable +/// manner. For formatting AST errors, see `std.zig.Ast.renderError`. +pub fn parseFromAst(comptime T: type, gpa: Allocator, ast: *const Ast, status: ?*ParseStatus, comptime options: ParseOptions) error{ OutOfMemory, Type }!T { + assert(ast.errors.len == 0); + const data = ast.nodes.items(.data); + const root = data[0].lhs; + return parseFromAstNode(T, gpa, ast, root, status, options); +} + +/// Like `parseFromAst`, but does not take an allocator. +/// +/// Asserts at comptime that no value of type `T` requires dynamic allocation. +pub fn parseFromAstNoAlloc(comptime T: type, ast: *const Ast, status: ?*ParseStatus, comptime options: ParseOptions) error{Type}!T { + assert(ast.errors.len == 0); + const data = ast.nodes.items(.data); + const root = data[0].lhs; + return parseFromAstNodeNoAlloc(T, ast, root, status, options); +} + +test "std.zon parseFromAstNoAlloc" { + var ast = try std.zig.Ast.parse(std.testing.allocator, ".{ .x = 1.5, .y = 2.5 }", .zon); + defer ast.deinit(std.testing.allocator); + try std.testing.expectEqual(ast.errors.len, 0); + + const S = struct { x: f32, y: f32 }; + const found = try parseFromAstNoAlloc(S, &ast, null, .{}); + try std.testing.expectEqual(S{ .x = 1.5, .y = 2.5 }, found); +} + +/// Like `parseFromAst`, but the parse starts on `node` instead of on the root of the AST. +pub fn parseFromAstNode(comptime T: type, gpa: Allocator, ast: *const Ast, node: NodeIndex, status: ?*ParseStatus, comptime options: ParseOptions) error{ OutOfMemory, Type }!T { + assert(ast.errors.len == 0); + var ident_buf: [maxIdentLength(T)]u8 = undefined; + var parser = @This(){ + .gpa = gpa, + .ast = ast, + .status = status, + .ident_buf = &ident_buf, + }; + + // Attempt the parse, setting status and returning early if it fails + const result = parser.parseExpr(T, options, node) catch |err| switch (err) { + error.ParserOutOfMemory => return error.OutOfMemory, + error.Type => return error.Type, + }; + + // Set status to success and return the result + if (status) |s| s.* = .{ .success = {} }; + return result; +} + +/// Like `parseFromAstNode`, but does not take an allocator. +/// +/// Asserts at comptime that no value of type `T` requires dynamic allocation. +pub fn parseFromAstNodeNoAlloc(comptime T: type, ast: *const Ast, node: NodeIndex, status: ?*ParseStatus, comptime options: ParseOptions) error{Type}!T { + assert(ast.errors.len == 0); + if (comptime requiresAllocator(T)) { + @compileError(@typeName(T) ++ ": requires allocator"); + } + var buffer: [0]u8 = .{}; + var fba = std.heap.FixedBufferAllocator.init(&buffer); + return parseFromAstNode(T, fba.allocator(), ast, node, status, options) catch |e| switch (e) { + error.OutOfMemory => unreachable, // No allocations + else => |other| return other, + }; +} + +test "std.zon parseFromAstNode and parseFromAstNodeNoAlloc" { + const gpa = std.testing.allocator; + + var ast = try std.zig.Ast.parse(gpa, ".{ .vec = .{ .x = 1.5, .y = 2.5 } }", .zon); + defer ast.deinit(gpa); + try std.testing.expect(ast.errors.len == 0); + + const data = ast.nodes.items(.data); + const root = data[0].lhs; + var buf: [2]NodeIndex = undefined; + const init = ast.fullStructInit(&buf, root).?; + + const Vec2 = struct { x: f32, y: f32 }; + const parsed = try parseFromAstNode(Vec2, gpa, &ast, init.ast.fields[0], null, .{}); + const parsed_no_alloc = try parseFromAstNodeNoAlloc(Vec2, &ast, init.ast.fields[0], null, .{}); + try std.testing.expectEqual(Vec2{ .x = 1.5, .y = 2.5 }, parsed); + try std.testing.expectEqual(Vec2{ .x = 1.5, .y = 2.5 }, parsed_no_alloc); +} + +fn requiresAllocator(comptime T: type) bool { + // Keep in sync with parseFree, stringify, and requiresAllocator. + return switch (@typeInfo(T)) { + .pointer => true, + .array => |array| requiresAllocator(array.child), + .@"struct" => |@"struct"| inline for (@"struct".fields) |field| { + if (requiresAllocator(field.type)) { + break true; + } + } else false, + .@"union" => |@"union"| inline for (@"union".fields) |field| { + if (requiresAllocator(field.type)) { + break true; + } + } else false, + .optional => |optional| requiresAllocator(optional.child), + else => false, + }; +} + +test "std.zon requiresAllocator" { + try std.testing.expect(!requiresAllocator(u8)); + try std.testing.expect(!requiresAllocator(f32)); + try std.testing.expect(!requiresAllocator(enum { foo })); + try std.testing.expect(!requiresAllocator(struct { f32 })); + try std.testing.expect(!requiresAllocator(struct { x: f32 })); + try std.testing.expect(!requiresAllocator([2]u8)); + try std.testing.expect(!requiresAllocator(union { x: f32, y: f32 })); + try std.testing.expect(!requiresAllocator(union(enum) { x: f32, y: f32 })); + try std.testing.expect(!requiresAllocator(?f32)); + try std.testing.expect(!requiresAllocator(void)); + try std.testing.expect(!requiresAllocator(@TypeOf(null))); + + try std.testing.expect(requiresAllocator([]u8)); + try std.testing.expect(requiresAllocator(*struct { u8, u8 })); + try std.testing.expect(requiresAllocator([1][]const u8)); + try std.testing.expect(requiresAllocator(struct { x: i32, y: []u8 })); + try std.testing.expect(requiresAllocator(union { x: i32, y: []u8 })); + try std.testing.expect(requiresAllocator(union(enum) { x: i32, y: []u8 })); + try std.testing.expect(requiresAllocator(?[]u8)); +} + +fn maxIdentLength(comptime T: type) usize { + // Keep in sync with `parseExpr`. + comptime var max = 0; + switch (@typeInfo(T)) { + .bool, .int, .float, .null, .void => {}, + .pointer => |pointer| max = comptime maxIdentLength(pointer.child), + .array => |array| if (array.len > 0) { + max = comptime maxIdentLength(array.child); + }, + .@"struct" => |@"struct"| inline for (@"struct".fields) |field| { + if (!@"struct".is_tuple) { + max = @max(max, field.name.len); + } + max = @max(max, comptime maxIdentLength(field.type)); + }, + .@"union" => |@"union"| inline for (@"union".fields) |field| { + max = @max(max, field.name.len); + max = @max(max, comptime maxIdentLength(field.type)); + }, + .@"enum" => |@"enum"| inline for (@"enum".fields) |field| { + max = @max(max, field.name.len); + }, + .optional => |optional| max = comptime maxIdentLength(optional.child), + else => unreachable, + } + return max; +} + +test "std.zon maxIdentLength" { + // Primitives + try std.testing.expectEqual(0, maxIdentLength(bool)); + try std.testing.expectEqual(0, maxIdentLength(u8)); + try std.testing.expectEqual(0, maxIdentLength(f32)); + try std.testing.expectEqual(0, maxIdentLength(@TypeOf(null))); + try std.testing.expectEqual(0, maxIdentLength(void)); + + // Arrays + try std.testing.expectEqual(0, maxIdentLength([0]u8)); + try std.testing.expectEqual(0, maxIdentLength([5]u8)); + try std.testing.expectEqual(3, maxIdentLength([5]struct { abc: f32 })); + try std.testing.expectEqual(0, maxIdentLength([0]struct { abc: f32 })); + + // Structs + try std.testing.expectEqual(0, maxIdentLength(struct {})); + try std.testing.expectEqual(1, maxIdentLength(struct { a: f32, b: f32 })); + try std.testing.expectEqual(3, maxIdentLength(struct { abc: f32, a: f32 })); + try std.testing.expectEqual(3, maxIdentLength(struct { a: f32, abc: f32 })); + + try std.testing.expectEqual(1, maxIdentLength(struct { a: struct { a: f32 }, b: struct { a: f32 } })); + try std.testing.expectEqual(3, maxIdentLength(struct { a: struct { abc: f32 }, b: struct { a: f32 } })); + try std.testing.expectEqual(3, maxIdentLength(struct { a: struct { a: f32 }, b: struct { abc: f32 } })); + + // Tuples + try std.testing.expectEqual(0, maxIdentLength(struct { f32, u32 })); + try std.testing.expectEqual(3, maxIdentLength(struct { struct { a: u32 }, struct { abc: u32 } })); + try std.testing.expectEqual(3, maxIdentLength(struct { struct { abc: u32 }, struct { a: u32 } })); + + // Unions + try std.testing.expectEqual(0, maxIdentLength(union {})); + + try std.testing.expectEqual(1, maxIdentLength(union { a: f32, b: f32 })); + try std.testing.expectEqual(3, maxIdentLength(union { abc: f32, a: f32 })); + try std.testing.expectEqual(3, maxIdentLength(union { a: f32, abc: f32 })); + + try std.testing.expectEqual(1, maxIdentLength(union { a: union { a: f32 }, b: union { a: f32 } })); + try std.testing.expectEqual(3, maxIdentLength(union { a: union { abc: f32 }, b: union { a: f32 } })); + try std.testing.expectEqual(3, maxIdentLength(union { a: union { a: f32 }, b: union { abc: f32 } })); + + // Enums + try std.testing.expectEqual(0, maxIdentLength(enum {})); + try std.testing.expectEqual(3, maxIdentLength(enum { a, abc })); + try std.testing.expectEqual(3, maxIdentLength(enum { abc, a })); + try std.testing.expectEqual(1, maxIdentLength(enum { a, b })); + + // Optionals + try std.testing.expectEqual(0, maxIdentLength(?u32)); + try std.testing.expectEqual(3, maxIdentLength(?struct { abc: u32 })); + + // Pointers + try std.testing.expectEqual(0, maxIdentLength(*u32)); + try std.testing.expectEqual(3, maxIdentLength(*struct { abc: u32 })); +} + +/// Frees values created by the runtime parser. +/// +/// Provided for convenience, you may also free these values on your own using the same allocator +/// passed into the parser. +/// +/// Asserts at comptime that sufficient information is available to free this type of value. +/// Untagged unions, for example, can be parsed but not freed. +pub fn parseFree(gpa: Allocator, value: anytype) void { + const Value = @TypeOf(value); + + // Keep in sync with parseFree, stringify, and requiresAllocator. + switch (@typeInfo(Value)) { + .bool, .int, .float, .@"enum" => {}, + .pointer => |pointer| { + switch (pointer.size) { + .One, .Many, .C => if (comptime requiresAllocator(Value)) { + @compileError(@typeName(Value) ++ ": parseFree cannot free non slice pointers"); + }, + .Slice => for (value) |item| { + parseFree(gpa, item); + }, + } + return gpa.free(value); + }, + .array => for (value) |item| { + parseFree(gpa, item); + }, + .@"struct" => |@"struct"| inline for (@"struct".fields) |field| { + parseFree(gpa, @field(value, field.name)); + }, + .@"union" => |@"union"| if (@"union".tag_type == null) { + if (comptime requiresAllocator(Value)) { + @compileError(@typeName(Value) ++ ": parseFree cannot free untagged unions"); + } + } else switch (value) { + inline else => |_, tag| { + parseFree(gpa, @field(value, @tagName(tag))); + }, + }, + .optional => if (value) |some| { + parseFree(gpa, some); + }, + .void => {}, + .null => {}, + else => @compileError(@typeName(Value) ++ ": parseFree cannot free this type"), + } +} + +fn parseExpr( + self: *@This(), + comptime T: type, + comptime options: ParseOptions, + node: NodeIndex, +) error{ ParserOutOfMemory, Type }!T { + // Check for address of up front so we can emit a friendlier error (otherwise it will just say + // that the type is wrong, which may be confusing.) + const tags = self.ast.nodes.items(.tag); + if (tags[node] == .address_of) { + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + return self.fail(token, .address_of); + } + + // Keep in sync with parseFree, stringify, and requiresAllocator. + switch (@typeInfo(T)) { + .bool => return self.parseBool(node), + .int, .float => return self.parseNumber(T, node), + .@"enum" => return self.parseEnumLiteral(T, node), + .pointer => return self.parsePointer(T, options, node), + .array => return self.parseArray(T, options, node), + .@"struct" => |@"struct"| if (@"struct".is_tuple) + return self.parseTuple(T, options, node) + else + return self.parseStruct(T, options, node), + .@"union" => return self.parseUnion(T, options, node), + .optional => return self.parseOptional(T, options, node), + .void => return self.parseVoid(node), + + else => @compileError(@typeName(T) ++ ": cannot parse this type"), + } +} + +fn parseVoid(self: @This(), node: NodeIndex) error{ ParserOutOfMemory, Type }!void { + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + const tags = self.ast.nodes.items(.tag); + const data = self.ast.nodes.items(.data); + switch (tags[node]) { + .block_two => if (data[node].lhs != 0 or data[node].rhs != 0) { + return self.fail(token, .{ .expected_primitive = .{ .type_name = "void" } }); + }, + .block => if (data[node].lhs != data[node].rhs) { + return self.fail(token, .{ .expected_primitive = .{ .type_name = "void" } }); + }, + else => return self.fail(token, .{ .expected_primitive = .{ .type_name = "void" } }), + } +} + +test "std.zon void" { + const gpa = std.testing.allocator; + + const parsed: void = try parseFromSlice(void, gpa, "{}", .{}); + _ = parsed; + + // Freeing void is a noop, but it should compile! + const free: void = try parseFromSlice(void, gpa, "{}", .{}); + defer parseFree(gpa, free); + + // Other type + { + var ast = try std.zig.Ast.parse(gpa, "123", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(void, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected void", formatted); + } +} + +fn parseOptional(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const optional = @typeInfo(T).optional; + + const tags = self.ast.nodes.items(.tag); + if (tags[node] == .identifier) { + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + const bytes = self.ast.tokenSlice(token); + if (std.mem.eql(u8, bytes, "null")) { + return null; + } + } + + return try self.parseExpr(optional.child, options, node); +} + +test "std.zon optional" { + const gpa = std.testing.allocator; + + // Basic usage + { + const none = try parseFromSlice(?u32, gpa, "null", .{}); + try std.testing.expect(none == null); + const some = try parseFromSlice(?u32, gpa, "1", .{}); + try std.testing.expect(some.? == 1); + } + + // Deep free + { + const none = try parseFromSlice(?[]const u8, gpa, "null", .{}); + try std.testing.expect(none == null); + const some = try parseFromSlice(?[]const u8, gpa, "\"foo\"", .{}); + defer parseFree(gpa, some); + try std.testing.expectEqualStrings("foo", some.?); + } +} + +fn parseUnion(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const @"union" = @typeInfo(T).@"union"; + const field_infos = @"union".fields; + + if (field_infos.len == 0) { + @compileError(@typeName(T) ++ ": cannot parse unions with no fields"); + } + + // Gather info on the fields + const field_indices = b: { + comptime var kvs_list: [field_infos.len]struct { []const u8, usize } = undefined; + inline for (field_infos, 0..) |field, i| { + kvs_list[i] = .{ field.name, i }; + } + break :b std.StaticStringMap(usize).initComptime(kvs_list); + }; + + // Parse the union + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + const tags = self.ast.nodes.items(.tag); + if (tags[node] == .enum_literal) { + // The union must be tagged for an enum literal to coerce to it + if (@"union".tag_type == null) { + return self.fail(main_tokens[node], .expected_union); + } + + // Get the index of the named field. We don't use `parseEnum` here as + // the order of the enum and the order of the union might not match! + const field_index = b: { + const bytes = try self.parseIdent(T, token); + break :b field_indices.get(bytes) orelse + return self.failUnexpectedField(T, token); + }; + + // Initialize the union from the given field. + switch (field_index) { + inline 0...field_infos.len - 1 => |i| { + // Fail if the field is not void + if (field_infos[i].type != void) + return self.fail(token, .expected_union); + + // Instantiate the union + return @unionInit(T, field_infos[i].name, {}); + }, + else => unreachable, // Can't be out of bounds + } + } else { + var buf: [2]NodeIndex = undefined; + const field_nodes = try self.fields(T, &buf, node); + + if (field_nodes.len != 1) { + return self.fail(token, .expected_union); + } + + // Fill in the field we found + const field_node = field_nodes[0]; + const field_token = self.ast.firstToken(field_node) - 2; + const field_index = b: { + const name = try self.parseIdent(T, field_token); + break :b field_indices.get(name) orelse + return self.failUnexpectedField(T, field_token); + }; + + switch (field_index) { + inline 0...field_infos.len - 1 => |i| { + const value = try self.parseExpr(field_infos[i].type, options, field_node); + return @unionInit(T, field_infos[i].name, value); + }, + else => unreachable, // Can't be out of bounds + } + } +} + +test "std.zon unions" { + const gpa = std.testing.allocator; + + // Unions + { + const Tagged = union(enum) { x: f32, @"y y": bool, z, @"z z" }; + const Untagged = union { x: f32, @"y y": bool, z: void, @"z z": void }; + + const tagged_x = try parseFromSlice(Tagged, gpa, ".{.x = 1.5}", .{}); + try std.testing.expectEqual(Tagged{ .x = 1.5 }, tagged_x); + const tagged_y = try parseFromSlice(Tagged, gpa, ".{.@\"y y\" = true}", .{}); + try std.testing.expectEqual(Tagged{ .@"y y" = true }, tagged_y); + const tagged_z_shorthand = try parseFromSlice(Tagged, gpa, ".z", .{}); + try std.testing.expectEqual(@as(Tagged, .z), tagged_z_shorthand); + const tagged_zz_shorthand = try parseFromSlice(Tagged, gpa, ".@\"z z\"", .{}); + try std.testing.expectEqual(@as(Tagged, .@"z z"), tagged_zz_shorthand); + const tagged_z_explicit = try parseFromSlice(Tagged, gpa, ".{.z = {}}", .{}); + try std.testing.expectEqual(Tagged{ .z = {} }, tagged_z_explicit); + const tagged_zz_explicit = try parseFromSlice(Tagged, gpa, ".{.@\"z z\" = {}}", .{}); + try std.testing.expectEqual(Tagged{ .@"z z" = {} }, tagged_zz_explicit); + + const untagged_x = try parseFromSlice(Untagged, gpa, ".{.x = 1.5}", .{}); + try std.testing.expect(untagged_x.x == 1.5); + const untagged_y = try parseFromSlice(Untagged, gpa, ".{.@\"y y\" = true}", .{}); + try std.testing.expect(untagged_y.@"y y"); + } + + // Deep free + { + const Union = union(enum) { bar: []const u8, baz: bool }; + + const noalloc = try parseFromSlice(Union, gpa, ".{.baz = false}", .{}); + try std.testing.expectEqual(Union{ .baz = false }, noalloc); + + const alloc = try parseFromSlice(Union, gpa, ".{.bar = \"qux\"}", .{}); + defer parseFree(gpa, alloc); + try std.testing.expectEqualDeep(Union{ .bar = "qux" }, alloc); + } + + // Unknown field + { + const Union = union { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.z=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:4: unexpected field, supported fields: x, y", formatted); + } + + // Unknown field with name that's too long for parse + { + const Union = union { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.@\"abc\"=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:4: unexpected field, supported fields: x, y", formatted); + } + + // Extra field + { + const Union = union { x: f32, y: bool }; + var ast = try std.zig.Ast.parse(gpa, ".{.x = 1.5, .y = true}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected union", formatted); + } + + // No fields + { + const Union = union { x: f32, y: bool }; + var ast = try std.zig.Ast.parse(gpa, ".{}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected union", formatted); + } + + // Enum literals cannot coerce into untagged unions + { + const Union = union { x: void }; + var ast = try std.zig.Ast.parse(gpa, ".x", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected union", formatted); + } + + // Unknown field for enum literal coercion + { + const Union = union(enum) { x: void }; + var ast = try std.zig.Ast.parse(gpa, ".y", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: unexpected field, supported fields: x", formatted); + } + + // Unknown field for enum literal coercion that's too long for parse + { + const Union = union(enum) { x: void }; + var ast = try std.zig.Ast.parse(gpa, ".@\"abc\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: unexpected field, supported fields: x", formatted); + } + + // Non void field for enum literal coercion + { + const Union = union(enum) { x: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".x", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Union, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected union", formatted); + } + + // Union field with @ + { + const U = union(enum) { x: void }; + const tag = try parseFromSlice(U, gpa, ".@\"x\"", .{}); + try std.testing.expectEqual(@as(U, .x), tag); + const initializer = try parseFromSlice(U, gpa, ".{.@\"x\" = {}}", .{}); + try std.testing.expectEqual(U{ .x = {} }, initializer); + } +} + +fn elements( + self: @This(), + comptime T: type, + buf: *[2]NodeIndex, + node: NodeIndex, +) error{Type}![]const NodeIndex { + const main_tokens = self.ast.nodes.items(.main_token); + + // Attempt to parse as an array + if (self.ast.fullArrayInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.failTypeExpr(main_tokens[init.ast.type_expr]); + } + return init.ast.elements; + } + + // Attempt to parse as a struct with no fields + if (self.ast.fullStructInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.failTypeExpr(main_tokens[init.ast.type_expr]); + } + if (init.ast.fields.len == 0) { + return init.ast.fields; + } + } + + return self.failExpectedContainer(T, main_tokens[node]); +} + +fn fields( + self: @This(), + comptime T: type, + buf: *[2]NodeIndex, + node: NodeIndex, +) error{Type}![]const NodeIndex { + const main_tokens = self.ast.nodes.items(.main_token); + + // Attempt to parse as a struct + if (self.ast.fullStructInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.failTypeExpr(main_tokens[init.ast.type_expr]); + } + return init.ast.fields; + } + + // Attempt to parse as a zero length array + if (self.ast.fullArrayInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.failTypeExpr(main_tokens[init.ast.type_expr]); + } + if (init.ast.elements.len != 0) { + return self.failExpectedContainer(T, main_tokens[node]); + } + return init.ast.elements; + } + + // Fail otherwise + return self.failExpectedContainer(T, main_tokens[node]); +} + +fn parseStruct(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const @"struct" = @typeInfo(T).@"struct"; + const field_infos = @"struct".fields; + + // Gather info on the fields + const field_indices = b: { + comptime var kvs_list: [field_infos.len]struct { []const u8, usize } = undefined; + inline for (field_infos, 0..) |field, i| { + kvs_list[i] = .{ field.name, i }; + } + break :b std.StaticStringMap(usize).initComptime(kvs_list); + }; + + // Parse the struct + var buf: [2]NodeIndex = undefined; + const field_nodes = try self.fields(T, &buf, node); + + var result: T = undefined; + var field_found: [field_infos.len]bool = .{false} ** field_infos.len; + + // If we fail partway through, free all already initialized fields + var initialized: usize = 0; + errdefer if (options.free_on_error and field_infos.len > 0) { + for (field_nodes[0..initialized]) |initialized_field_node| { + const name_runtime = self.parseIdent(T, self.ast.firstToken(initialized_field_node) - 2) catch unreachable; + switch (field_indices.get(name_runtime) orelse continue) { + inline 0...(field_infos.len - 1) => |name_index| { + const name = field_infos[name_index].name; + parseFree(self.gpa, @field(result, name)); + }, + else => unreachable, // Can't be out of bounds + } + } + }; + + // Fill in the fields we found + for (field_nodes) |field_node| { + const name_token = self.ast.firstToken(field_node) - 2; + const i = b: { + const name = try self.parseIdent(T, name_token); + break :b field_indices.get(name) orelse if (options.ignore_unknown_fields) { + continue; + } else { + return self.failUnexpectedField(T, name_token); + }; + }; + + // We now know the array is not zero sized (assert this so the code compiles) + if (field_found.len == 0) unreachable; + + if (field_found[i]) { + return self.failDuplicateField(name_token); + } + field_found[i] = true; + + switch (i) { + inline 0...(field_infos.len - 1) => |j| @field(result, field_infos[j].name) = try self.parseExpr(field_infos[j].type, options, field_node), + else => unreachable, // Can't be out of bounds + } + + initialized += 1; + } + + // Fill in any missing default fields + inline for (field_found, 0..) |found, i| { + if (!found) { + const field_info = @"struct".fields[i]; + if (field_info.default_value) |default| { + const typed: *const field_info.type = @ptrCast(@alignCast(default)); + @field(result, field_info.name) = typed.*; + } else { + const main_tokens = self.ast.nodes.items(.main_token); + return self.failMissingField(field_infos[i].name, main_tokens[node]); + } + } + } + + return result; +} + +test "std.zon structs" { + const gpa = std.testing.allocator; + + // Structs (various sizes tested since they're parsed differently) + { + const Vec0 = struct {}; + const Vec1 = struct { x: f32 }; + const Vec2 = struct { x: f32, y: f32 }; + const Vec3 = struct { x: f32, y: f32, z: f32 }; + + const zero = try parseFromSlice(Vec0, gpa, ".{}", .{}); + try std.testing.expectEqual(Vec0{}, zero); + + const one = try parseFromSlice(Vec1, gpa, ".{.x = 1.2}", .{}); + try std.testing.expectEqual(Vec1{ .x = 1.2 }, one); + + const two = try parseFromSlice(Vec2, gpa, ".{.x = 1.2, .y = 3.4}", .{}); + try std.testing.expectEqual(Vec2{ .x = 1.2, .y = 3.4 }, two); + + const three = try parseFromSlice(Vec3, gpa, ".{.x = 1.2, .y = 3.4, .z = 5.6}", .{}); + try std.testing.expectEqual(Vec3{ .x = 1.2, .y = 3.4, .z = 5.6 }, three); + } + + // Deep free (structs and arrays) + { + const Foo = struct { bar: []const u8, baz: []const []const u8 }; + + const parsed = try parseFromSlice(Foo, gpa, ".{.bar = \"qux\", .baz = .{\"a\", \"b\"}}", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualDeep(Foo{ .bar = "qux", .baz = &.{ "a", "b" } }, parsed); + } + + // Unknown field + { + const Vec2 = struct { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.x=1.5, .z=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Vec2, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:12: unexpected field, supported fields: x, y", formatted); + } + + // Unknown field too long for parse + { + const Vec2 = struct { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.x=1.5, .@\"abc\"=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Vec2, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:12: unexpected field, supported fields: x, y", formatted); + } + + // Duplicate field + { + const Vec2 = struct { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.x=1.5, .x=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Vec2, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:12: duplicate field", formatted); + } + + // Ignore unknown fields + { + const Vec2 = struct { x: f32, y: f32 = 2.0 }; + const parsed = try parseFromSlice(Vec2, gpa, ".{ .x = 1.0, .z = 3.0 }", .{ + .ignore_unknown_fields = true, + }); + try std.testing.expectEqual(Vec2{ .x = 1.0, .y = 2.0 }, parsed); + } + + // Unknown field when struct has no fields (regression test) + { + const Vec2 = struct {}; + var ast = try std.zig.Ast.parse(gpa, ".{.x=1.5, .z=2.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Vec2, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:4: unexpected field, no fields expected", formatted); + } + + // Missing field + { + const Vec2 = struct { x: f32, y: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.x=1.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Vec2, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: missing required field y", formatted); + } + + // Default field + { + const Vec2 = struct { x: f32, y: f32 = 1.5 }; + const parsed = try parseFromSlice(Vec2, gpa, ".{.x = 1.2}", .{}); + try std.testing.expectEqual(Vec2{ .x = 1.2, .y = 1.5 }, parsed); + } + + // Enum field (regression test, we were previously getting the field name in an + // incorrect way that broke for enum values) + { + const Vec0 = struct { x: enum { x } }; + const parsed = try parseFromSlice(Vec0, gpa, ".{ .x = .x }", .{}); + try std.testing.expectEqual(Vec0{ .x = .x }, parsed); + } + + // Enum field and struct field with @ + { + const Vec0 = struct { @"x x": enum { @"x x" } }; + const parsed = try parseFromSlice(Vec0, gpa, ".{ .@\"x x\" = .@\"x x\" }", .{}); + try std.testing.expectEqual(Vec0{ .@"x x" = .@"x x" }, parsed); + } + + // Type expressions are not allowed + { + // Structs + { + const Empty = struct {}; + + var ast = try std.zig.Ast.parse(gpa, "Empty{}", .zon); + defer ast.deinit(gpa); + + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Empty, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: ZON cannot contain type expressions", formatted); + } + + // Arrays + { + var ast = try std.zig.Ast.parse(gpa, "[3]u8{1, 2, 3}", .zon); + defer ast.deinit(gpa); + + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([3]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: ZON cannot contain type expressions", formatted); + } + + // Slices + { + var ast = try std.zig.Ast.parse(gpa, "[]u8{1, 2, 3}", .zon); + defer ast.deinit(gpa); + + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: ZON cannot contain type expressions", formatted); + } + + // Tuples + { + const Tuple = struct { i32, i32, i32 }; + var ast = try std.zig.Ast.parse(gpa, "Tuple{1, 2, 3}", .zon); + defer ast.deinit(gpa); + + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Tuple, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: ZON cannot contain type expressions", formatted); + } + } +} + +fn parseTuple(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const field_infos = @typeInfo(T).@"struct".fields; + + var result: T = undefined; + + // Parse the struct + var buf: [2]NodeIndex = undefined; + const field_nodes = try self.elements(T, &buf, node); + + if (field_nodes.len != field_infos.len) { + const main_tokens = self.ast.nodes.items(.main_token); + return self.failExpectedContainer(T, main_tokens[node]); + } + + inline for (field_infos, field_nodes, 0..) |field_info, field_node, initialized| { + // If we fail to parse this field, free all fields before it + errdefer if (options.free_on_error) { + inline for (0..field_infos.len) |i| { + if (i >= initialized) break; + parseFree(self.gpa, result[i]); + } + }; + + result[initialized] = try self.parseExpr(field_info.type, options, field_node); + } + + return result; +} + +test "std.zon tuples" { + const gpa = std.testing.allocator; + + // Structs (various sizes tested since they're parsed differently) + { + const Tuple0 = struct {}; + const Tuple1 = struct { f32 }; + const Tuple2 = struct { f32, bool }; + const Tuple3 = struct { f32, bool, u8 }; + + const zero = try parseFromSlice(Tuple0, gpa, ".{}", .{}); + try std.testing.expectEqual(Tuple0{}, zero); + + const one = try parseFromSlice(Tuple1, gpa, ".{1.2}", .{}); + try std.testing.expectEqual(Tuple1{1.2}, one); + + const two = try parseFromSlice(Tuple2, gpa, ".{1.2, true}", .{}); + try std.testing.expectEqual(Tuple2{ 1.2, true }, two); + + const three = try parseFromSlice(Tuple3, gpa, ".{1.2, false, 3}", .{}); + try std.testing.expectEqual(Tuple3{ 1.2, false, 3 }, three); + } + + // Deep free + { + const Tuple = struct { []const u8, []const u8 }; + const parsed = try parseFromSlice(Tuple, gpa, ".{\"hello\", \"world\"}", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualDeep(Tuple{ "hello", "world" }, parsed); + } + + // Extra field + { + const Tuple = struct { f32, bool }; + var ast = try std.zig.Ast.parse(gpa, ".{0.5, true, 123}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Tuple, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 2 fields", formatted); + } + + // Extra field + { + const Tuple = struct { f32, bool }; + var ast = try std.zig.Ast.parse(gpa, ".{0.5}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Tuple, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 2 fields", formatted); + } + + // Tuple with unexpected field names + { + const Tuple = struct { f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{.foo = 10.0}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Tuple, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 1 field", formatted); + } + + // Struct with missing field names + { + const Struct = struct { foo: f32 }; + var ast = try std.zig.Ast.parse(gpa, ".{10.0}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Struct, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected struct", formatted); + } +} + +fn parseArray(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const array_info = @typeInfo(T).array; + // Parse the array + var array: T = undefined; + var buf: [2]NodeIndex = undefined; + const element_nodes = try self.elements(T, &buf, node); + + // Check if the size matches + if (element_nodes.len != array_info.len) { + const main_tokens = self.ast.nodes.items(.main_token); + return self.failExpectedContainer(T, main_tokens[node]); + } + + // Parse the elements and return the array + for (&array, element_nodes, 0..) |*element, element_node, initialized| { + // If we fail to parse this field, free all fields before it + errdefer if (options.free_on_error) { + for (array[0..initialized]) |initialized_item| { + parseFree(self.gpa, initialized_item); + } + }; + + element.* = try self.parseExpr(array_info.child, options, element_node); + } + return array; +} + +// Test sizes 0 to 3 since small sizes get parsed differently +test "std.zon arrays and slices" { + // Issue: https://github.com/ziglang/zig/issues/20881 + if (@import("builtin").zig_backend == .stage2_c) return error.SkipZigTest; + + const gpa = std.testing.allocator; + + // Literals + { + // Arrays + { + const zero = try parseFromSlice([0]u8, gpa, ".{}", .{}); + try std.testing.expectEqualSlices(u8, &@as([0]u8, .{}), &zero); + + const one = try parseFromSlice([1]u8, gpa, ".{'a'}", .{}); + try std.testing.expectEqualSlices(u8, &@as([1]u8, .{'a'}), &one); + + const two = try parseFromSlice([2]u8, gpa, ".{'a', 'b'}", .{}); + try std.testing.expectEqualSlices(u8, &@as([2]u8, .{ 'a', 'b' }), &two); + + const two_comma = try parseFromSlice([2]u8, gpa, ".{'a', 'b',}", .{}); + try std.testing.expectEqualSlices(u8, &@as([2]u8, .{ 'a', 'b' }), &two_comma); + + const three = try parseFromSlice([3]u8, gpa, ".{'a', 'b', 'c'}", .{}); + try std.testing.expectEqualSlices(u8, &.{ 'a', 'b', 'c' }, &three); + + const sentinel = try parseFromSlice([3:'z']u8, gpa, ".{'a', 'b', 'c'}", .{}); + const expected_sentinel: [3:'z']u8 = .{ 'a', 'b', 'c' }; + try std.testing.expectEqualSlices(u8, &expected_sentinel, &sentinel); + } + + // Slice literals + { + const zero = try parseFromSlice([]const u8, gpa, ".{}", .{}); + defer parseFree(gpa, zero); + try std.testing.expectEqualSlices(u8, @as([]const u8, &.{}), zero); + + const one = try parseFromSlice([]u8, gpa, ".{'a'}", .{}); + defer parseFree(gpa, one); + try std.testing.expectEqualSlices(u8, &.{'a'}, one); + + const two = try parseFromSlice([]const u8, gpa, ".{'a', 'b'}", .{}); + defer parseFree(gpa, two); + try std.testing.expectEqualSlices(u8, &.{ 'a', 'b' }, two); + + const two_comma = try parseFromSlice([]const u8, gpa, ".{'a', 'b',}", .{}); + defer parseFree(gpa, two_comma); + try std.testing.expectEqualSlices(u8, &.{ 'a', 'b' }, two_comma); + + const three = try parseFromSlice([]u8, gpa, ".{'a', 'b', 'c'}", .{}); + defer parseFree(gpa, three); + try std.testing.expectEqualSlices(u8, &.{ 'a', 'b', 'c' }, three); + + const sentinel = try parseFromSlice([:'z']const u8, gpa, ".{'a', 'b', 'c'}", .{}); + defer parseFree(gpa, sentinel); + const expected_sentinel: [:'z']const u8 = &.{ 'a', 'b', 'c' }; + try std.testing.expectEqualSlices(u8, expected_sentinel, sentinel); + } + } + + // Deep free + { + // Arrays + { + const parsed = try parseFromSlice([1][]const u8, gpa, ".{\"abc\"}", .{}); + defer parseFree(gpa, parsed); + const expected: [1][]const u8 = .{"abc"}; + try std.testing.expectEqualDeep(expected, parsed); + } + + // Slice literals + { + const parsed = try parseFromSlice([]const []const u8, gpa, ".{\"abc\"}", .{}); + defer parseFree(gpa, parsed); + const expected: []const []const u8 = &.{"abc"}; + try std.testing.expectEqualDeep(expected, parsed); + } + } + + // Sentinels and alignment + { + // Arrays + { + const sentinel = try parseFromSlice([1:2]u8, gpa, ".{1}", .{}); + try std.testing.expectEqual(@as(usize, 1), sentinel.len); + try std.testing.expectEqual(@as(u8, 1), sentinel[0]); + try std.testing.expectEqual(@as(u8, 2), sentinel[1]); + } + + // Slice literals + { + const sentinel = try parseFromSlice([:2]align(4) u8, gpa, ".{1}", .{}); + defer parseFree(gpa, sentinel); + try std.testing.expectEqual(@as(usize, 1), sentinel.len); + try std.testing.expectEqual(@as(u8, 1), sentinel[0]); + try std.testing.expectEqual(@as(u8, 2), sentinel[1]); + } + } + + // Expect 0 find 3 + { + var ast = try std.zig.Ast.parse(gpa, ".{'a', 'b', 'c'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([0]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 0 fields", formatted); + } + + // Expect 1 find 2 + { + var ast = try std.zig.Ast.parse(gpa, ".{'a', 'b'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([1]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 1 field", formatted); + } + + // Expect 2 find 1 + { + var ast = try std.zig.Ast.parse(gpa, ".{'a'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([2]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 2 fields", formatted); + } + + // Expect 3 find 0 + { + var ast = try std.zig.Ast.parse(gpa, ".{}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([3]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected tuple with 3 fields", formatted); + } + + // Wrong inner type + { + // Array + { + var ast = try std.zig.Ast.parse(gpa, ".{'a', 'b', 'c'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([3]bool, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: expected bool", formatted); + } + + // Slice + { + var ast = try std.zig.Ast.parse(gpa, ".{'a', 'b', 'c'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]bool, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: expected bool", formatted); + } + } + + // Complete wrong type + { + // Array + { + var ast = try std.zig.Ast.parse(gpa, "'a'", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([3]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple with 3 fields", formatted); + } + + // Slice + { + var ast = try std.zig.Ast.parse(gpa, "'a'", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + } + + // Address of is not allowed (indirection for slices in ZON is implicit) + { + var ast = try std.zig.Ast.parse(gpa, "&.{'a', 'b', 'c'}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([3]bool, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: ZON cannot take the address of a value", formatted); + } +} + +fn parsePointer(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const tags = self.ast.nodes.items(.tag); + return switch (tags[node]) { + .string_literal => try self.parseStringLiteral(T, node), + .multiline_string_literal => try self.parseMultilineStringLiteral(T, node), + else => self.parseSlice(T, options, node), + }; +} + +fn parseSlice(self: *@This(), comptime T: type, comptime options: ParseOptions, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const pointer = @typeInfo(T).pointer; + // Make sure we're working with a slice + switch (pointer.size) { + .Slice => {}, + .One, .Many, .C => @compileError(@typeName(T) ++ ": non slice pointers not supported"), + } + + // Parse the array literal + const main_tokens = self.ast.nodes.items(.main_token); + var buf: [2]NodeIndex = undefined; + const element_nodes = try self.elements(T, &buf, node); + + // Allocate the slice + const sentinel = if (pointer.sentinel) |s| @as(*const pointer.child, @ptrCast(s)).* else null; + const slice = self.gpa.allocWithOptions( + pointer.child, + element_nodes.len, + pointer.alignment, + sentinel, + ) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(main_tokens[node]), + }; + errdefer self.gpa.free(slice); + + // Parse the elements and return the slice + for (slice, element_nodes, 0..) |*element, element_node, initialized| { + errdefer if (options.free_on_error) { + for (0..initialized) |i| { + parseFree(self.gpa, slice[i]); + } + }; + element.* = try self.parseExpr(pointer.child, options, element_node); + } + return slice; +} + +fn parseStringLiteral(self: *@This(), comptime T: type, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const pointer = @typeInfo(T).pointer; + + if (pointer.size != .Slice) { + @compileError(@typeName(T) ++ ": cannot parse pointers that are not slices"); + } + + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + const raw = self.ast.tokenSlice(token); + + if (pointer.child != u8 or !pointer.is_const or pointer.alignment != 1) { + return self.failExpectedContainer(T, token); + } + var buf = std.ArrayListUnmanaged(u8){}; + defer buf.deinit(self.gpa); + const parse_write_result = std.zig.string_literal.parseWrite( + buf.writer(self.gpa), + raw, + ) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(token), + }; + switch (parse_write_result) { + .success => {}, + .failure => |reason| return self.failInvalidStringLiteral(token, reason), + } + + if (pointer.sentinel) |sentinel| { + if (@as(*const u8, @ptrCast(sentinel)).* != 0) { + return self.failExpectedContainer(T, token); + } + return buf.toOwnedSliceSentinel(self.gpa, 0) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(token), + }; + } + + return buf.toOwnedSlice(self.gpa) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(token), + }; +} + +fn parseMultilineStringLiteral(self: *@This(), comptime T: type, node: NodeIndex) error{ ParserOutOfMemory, Type }!T { + const main_tokens = self.ast.nodes.items(.main_token); + + const pointer = @typeInfo(T).pointer; + + if (pointer.size != .Slice) { + @compileError(@typeName(T) ++ ": cannot parse pointers that are not slices"); + } + + if (pointer.child != u8 or !pointer.is_const or pointer.alignment != 1) { + return self.failExpectedContainer(T, main_tokens[node]); + } + + var buf = std.ArrayListUnmanaged(u8){}; + defer buf.deinit(self.gpa); + const writer = buf.writer(self.gpa); + + var parser = std.zig.string_literal.multilineParser(writer); + const data = self.ast.nodes.items(.data); + var tok_i = data[node].lhs; + while (tok_i <= data[node].rhs) : (tok_i += 1) { + const token_slice = self.ast.tokenSlice(tok_i); + parser.line(token_slice) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(tok_i), + }; + } + + if (pointer.sentinel) |sentinel| { + if (@as(*const u8, @ptrCast(sentinel)).* != 0) { + return self.failExpectedContainer(T, main_tokens[node]); + } + return buf.toOwnedSliceSentinel(self.gpa, 0) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(main_tokens[node]), + }; + } else { + return buf.toOwnedSlice(self.gpa) catch |err| switch (err) { + error.OutOfMemory => return self.failOutOfMemory(main_tokens[node]), + }; + } +} + +test "std.zon string literal" { + const gpa = std.testing.allocator; + + // Basic string literal + { + const parsed = try parseFromSlice([]const u8, gpa, "\"abc\"", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings(@as([]const u8, "abc"), parsed); + } + + // String literal with escape characters + { + const parsed = try parseFromSlice([]const u8, gpa, "\"ab\\nc\"", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings(@as([]const u8, "ab\nc"), parsed); + } + + // String literal with embedded null + { + const parsed = try parseFromSlice([]const u8, gpa, "\"ab\\x00c\"", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings(@as([]const u8, "ab\x00c"), parsed); + } + + // Passing string literal to a mutable slice + { + { + var ast = try std.zig.Ast.parse(gpa, "\"abcd\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "\\\\abcd", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + } + + // Passing string literal to a array + { + { + var ast = try std.zig.Ast.parse(gpa, "\"abcd\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([4:0]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple with 4 fields", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "\\\\abcd", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([4:0]u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple with 4 fields", formatted); + } + } + + // Zero termianted slices + { + { + const parsed: [:0]const u8 = try parseFromSlice([:0]const u8, gpa, "\"abc\"", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings("abc", parsed); + try std.testing.expectEqual(@as(u8, 0), parsed[3]); + } + + { + const parsed: [:0]const u8 = try parseFromSlice([:0]const u8, gpa, "\\\\abc", .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings("abc", parsed); + try std.testing.expectEqual(@as(u8, 0), parsed[3]); + } + } + + // Other value terminated slices + { + { + var ast = try std.zig.Ast.parse(gpa, "\"foo\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([:1]const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "\\\\foo", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([:1]const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + } + + // Expecting string literal, getting something else + { + var ast = try std.zig.Ast.parse(gpa, "true", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected string", formatted); + } + + // Expecting string literal, getting an incompatible tuple + { + var ast = try std.zig.Ast.parse(gpa, ".{false}", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: expected u8", formatted); + } + + // Invalid string literal + { + var ast = try std.zig.Ast.parse(gpa, "\"\\a\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: invalid escape character: 'a'", formatted); + } + + // Slice wrong child type + { + { + var ast = try std.zig.Ast.parse(gpa, "\"a\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]const i8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "\\\\a", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]const i8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + } + + // Bad alignment + { + { + var ast = try std.zig.Ast.parse(gpa, "\"abc\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]align(2) const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "\\\\abc", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst([]align(2) const u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected tuple", formatted); + } + } + + // Multi line strings + inline for (.{ []const u8, [:0]const u8 }) |String| { + // Nested + { + const S = struct { + message: String, + message2: String, + message3: String, + }; + const parsed = try parseFromSlice(S, gpa, + \\.{ + \\ .message = + \\ \\hello, world! + \\ + \\ \\this is a multiline string! + \\ \\ + \\ \\... + \\ + \\ , + \\ .message2 = + \\ \\this too...sort of. + \\ , + \\ .message3 = + \\ \\ + \\ \\and this. + \\} + , .{}); + defer parseFree(gpa, parsed); + try std.testing.expectEqualStrings("hello, world!\nthis is a multiline string!\n\n...", parsed.message); + try std.testing.expectEqualStrings("this too...sort of.", parsed.message2); + try std.testing.expectEqualStrings("\nand this.", parsed.message3); + } + } +} + +fn parseEnumLiteral(self: @This(), comptime T: type, node: NodeIndex) error{Type}!T { + const tags = self.ast.nodes.items(.tag); + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + + switch (tags[node]) { + .enum_literal => { + // Create a comptime string map for the enum fields + const enum_fields = @typeInfo(T).@"enum".fields; + comptime var kvs_list: [enum_fields.len]struct { []const u8, T } = undefined; + inline for (enum_fields, 0..) |field, i| { + kvs_list[i] = .{ field.name, @enumFromInt(field.value) }; + } + const enum_tags = std.StaticStringMap(T).initComptime(kvs_list); + + // Get the tag if it exists + const bytes = try self.parseIdent(T, token); + return enum_tags.get(bytes) orelse + self.failUnexpectedField(T, token); + }, + else => return self.fail(token, .expected_enum), + } +} + +// Note that `parseIdent` may reuse the same buffer when called repeatedly, invalidating +// previous results. +// The resulting bytes may reference a buffer on `self` that can be reused in future calls to +// `parseIdent`. They should only be held onto temporarily. +fn parseIdent(self: @This(), T: type, token: TokenIndex) error{Type}![]const u8 { + var unparsed = self.ast.tokenSlice(token); + + if (unparsed[0] == '@' and unparsed[1] == '"') { + var fba = std.heap.FixedBufferAllocator.init(self.ident_buf); + const alloc = fba.allocator(); + var parsed = std.ArrayListUnmanaged(u8).initCapacity(alloc, self.ident_buf.len) catch unreachable; + + const raw = unparsed[1..unparsed.len]; + const result = std.zig.string_literal.parseWrite(parsed.writer(alloc), raw) catch |err| switch (err) { + // If it's too long for our preallocated buffer, it must be incorrect + error.OutOfMemory => return self.failUnexpectedField(T, token), + }; + switch (result) { + .failure => |reason| return self.failInvalidStringLiteral(token, reason), + .success => {}, + } + if (std.mem.indexOfScalar(u8, parsed.items, 0) != null) { + return self.failUnexpectedField(T, token); + } + return parsed.items; + } + + return unparsed; +} + +test "std.zon enum literals" { + const gpa = std.testing.allocator; + + const Enum = enum { + foo, + bar, + baz, + @"ab\nc", + }; + + // Tags that exist + try std.testing.expectEqual(Enum.foo, try parseFromSlice(Enum, gpa, ".foo", .{})); + try std.testing.expectEqual(Enum.bar, try parseFromSlice(Enum, gpa, ".bar", .{})); + try std.testing.expectEqual(Enum.baz, try parseFromSlice(Enum, gpa, ".baz", .{})); + try std.testing.expectEqual(Enum.@"ab\nc", try parseFromSlice(Enum, gpa, ".@\"ab\\nc\"", .{})); + + // Bad tag + { + var ast = try std.zig.Ast.parse(gpa, ".qux", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Enum, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: unexpected field, supported fields: foo, bar, baz, @\"ab\\nc\"", formatted); + } + + // Bad tag that's too long for parser + { + var ast = try std.zig.Ast.parse(gpa, ".@\"foobarbaz\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Enum, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: unexpected field, supported fields: foo, bar, baz, @\"ab\\nc\"", formatted); + } + + // Bad type + { + var ast = try std.zig.Ast.parse(gpa, "true", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(Enum, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected enum literal", formatted); + } + + // Test embedded nulls in an identifier + { + var ast = try std.zig.Ast.parse(gpa, ".@\"\\x00\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(enum { a }, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: unexpected field, supported fields: a", formatted); + } +} + +fn fail(self: @This(), token: TokenIndex, reason: ParseFailure.Reason) error{Type} { + @branchHint(.cold); + if (self.status) |s| s.* = .{ .failure = .{ + .ast = self.ast, + .token = token, + .reason = reason, + } }; + return error.Type; +} + +fn failOutOfMemory(self: *@This(), token: TokenIndex) error{ParserOutOfMemory} { + // Set our failure state, but ignore the type error because users may want to handle out of + // memory separately from other input errors + self.fail(token, .out_of_memory) catch {}; + + // We don't return error.OutOfMemory directly so that we can't forget to call this function, + // this error will be converted to error.OutOfMemory before returning to the user + return error.ParserOutOfMemory; +} + +fn failInvalidStringLiteral(self: @This(), token: TokenIndex, err: StringLiteralError) error{Type} { + @branchHint(.cold); + return self.fail(token, .{ + .invalid_string_literal = .{ .err = err }, + }); +} + +fn failInvalidNumberLiteral(self: @This(), token: TokenIndex, err: NumberLiteralError) error{Type} { + @branchHint(.cold); + return self.fail(token, .{ + .invalid_number_literal = .{ .err = err }, + }); +} + +fn failCannotRepresent(self: @This(), comptime T: type, token: TokenIndex) error{Type} { + @branchHint(.cold); + return self.fail(token, .{ + .cannot_represent = .{ .type_name = @typeName(T) }, + }); +} + +fn failNegativeIntegerZero(self: @This(), token: TokenIndex) error{Type} { + @branchHint(.cold); + return self.fail(token, .negative_integer_zero); +} + +fn failUnexpectedField(self: @This(), T: type, token: TokenIndex) error{Type} { + @branchHint(.cold); + switch (@typeInfo(T)) { + .@"struct", .@"union", .@"enum" => return self.fail(token, .{ .unexpected_field = .{ + .fields = std.meta.fieldNames(T), + } }), + else => @compileError("unreachable, should not be called for type " ++ @typeName(T)), + } +} + +fn failExpectedContainer(self: @This(), T: type, token: TokenIndex) error{Type} { + @branchHint(.cold); + switch (@typeInfo(T)) { + .@"struct" => |@"struct"| if (@"struct".is_tuple) { + return self.fail(token, .{ .expected_tuple_with_fields = .{ + .fields = @"struct".fields.len, + } }); + } else { + return self.fail(token, .expected_struct); + }, + .@"union" => return self.fail(token, .expected_union), + .array => |array| return self.fail(token, .{ .expected_tuple_with_fields = .{ + .fields = array.len, + } }), + .pointer => |pointer| { + if (pointer.child == u8 and + pointer.size == .Slice and + pointer.is_const and + (pointer.sentinel == null or @as(*const u8, @ptrCast(pointer.sentinel)).* == 0) and + pointer.alignment == 1) + { + return self.fail(token, .expected_string); + } else { + return self.fail(token, .expected_tuple); + } + }, + else => @compileError("unreachable, should not be called for type " ++ @typeName(T)), + } +} + +fn failMissingField(self: @This(), name: []const u8, token: TokenIndex) error{Type} { + @branchHint(.cold); + return self.fail(token, .{ .missing_field = .{ .field_name = name } }); +} + +fn failDuplicateField(self: @This(), token: TokenIndex) error{Type} { + @branchHint(.cold); + return self.fail(token, .duplicate_field); +} + +fn failTypeExpr(self: @This(), token: TokenIndex) error{Type} { + @branchHint(.cold); + return self.fail(token, .type_expr); +} + +fn parseBool(self: @This(), node: NodeIndex) error{Type}!bool { + const tags = self.ast.nodes.items(.tag); + const main_tokens = self.ast.nodes.items(.main_token); + const token = main_tokens[node]; + switch (tags[node]) { + .identifier => { + const bytes = self.ast.tokenSlice(token); + const map = std.StaticStringMap(bool).initComptime(.{ + .{ "true", true }, + .{ "false", false }, + }); + if (map.get(bytes)) |value| { + return value; + } + }, + else => {}, + } + return self.fail(token, .{ .expected_primitive = .{ .type_name = "bool" } }); +} + +test "std.zon parse bool" { + const gpa = std.testing.allocator; + + // Correct floats + try std.testing.expectEqual(true, try parseFromSlice(bool, gpa, "true", .{})); + try std.testing.expectEqual(false, try parseFromSlice(bool, gpa, "false", .{})); + + // Errors + { + var ast = try std.zig.Ast.parse(gpa, " foo", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(bool, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:2: expected bool", formatted); + } + { + var ast = try std.zig.Ast.parse(gpa, "123", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(bool, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected bool", formatted); + } +} + +fn parseNumber( + self: @This(), + comptime T: type, + node: NodeIndex, +) error{Type}!T { + const main_tokens = self.ast.nodes.items(.main_token); + const num_lit_node = self.numLitNode(node); + const tags = self.ast.nodes.items(.tag); + switch (tags[num_lit_node]) { + .number_literal => return self.parseNumberLiteral(T, node), + .char_literal => return self.parseCharLiteral(T, node), + .identifier => switch (@typeInfo(T)) { + .float => { + const token = main_tokens[num_lit_node]; + const bytes = self.ast.tokenSlice(token); + const Ident = enum { inf, nan }; + const map = std.StaticStringMap(Ident).initComptime(.{ + .{ "inf", .inf }, + .{ "nan", .nan }, + }); + if (map.get(bytes)) |value| { + switch (value) { + .inf => if (self.isNegative(node)) { + return -std.math.inf(T); + } else { + return std.math.inf(T); + }, + .nan => return std.math.nan(T), + } + } + }, + else => {}, + }, + else => {}, + } + return self.fail(main_tokens[node], .{ + .expected_primitive = .{ .type_name = @typeName(T) }, + }); +} + +fn parseNumberLiteral(self: @This(), comptime T: type, node: NodeIndex) error{Type}!T { + const num_lit_node = self.numLitNode(node); + const main_tokens = self.ast.nodes.items(.main_token); + const num_lit_token = main_tokens[num_lit_node]; + const token_bytes = self.ast.tokenSlice(num_lit_token); + const number = std.zig.number_literal.parseNumberLiteral(token_bytes); + + switch (number) { + .int => |int| return self.applySignToInt(T, node, int), + .big_int => |base| return self.parseBigNumber(T, node, base), + .float => return self.parseFloat(T, node), + .failure => |reason| return self.failInvalidNumberLiteral(main_tokens[node], reason), + } +} + +fn applySignToInt(self: @This(), comptime T: type, node: NodeIndex, int: anytype) error{Type}!T { + const main_tokens = self.ast.nodes.items(.main_token); + if (self.isNegative(node)) { + if (int == 0) { + return self.failNegativeIntegerZero(main_tokens[node]); + } + switch (@typeInfo(T)) { + .int => |int_info| switch (int_info.signedness) { + .signed => { + const In = @TypeOf(int); + if (std.math.maxInt(In) > std.math.maxInt(T) and int == @as(In, std.math.maxInt(T)) + 1) { + return std.math.minInt(T); + } + + return -(std.math.cast(T, int) orelse return self.failCannotRepresent(T, main_tokens[node])); + }, + .unsigned => return self.failCannotRepresent(T, main_tokens[node]), + }, + .float => return -@as(T, @floatFromInt(int)), + else => @compileError("internal error: expected numeric type"), + } + } else { + switch (@typeInfo(T)) { + .int => return std.math.cast(T, int) orelse + self.failCannotRepresent(T, main_tokens[node]), + .float => return @as(T, @floatFromInt(int)), + else => @compileError("internal error: expected numeric type"), + } + } +} + +fn parseBigNumber( + self: @This(), + comptime T: type, + node: NodeIndex, + base: Base, +) error{Type}!T { + switch (@typeInfo(T)) { + .int => return self.parseBigInt(T, node, base), + .float => { + const result = @as(T, @floatCast(try self.parseFloat(f128, node))); + if (std.math.isNegativeZero(result)) { + const main_tokens = self.ast.nodes.items(.main_token); + return self.failNegativeIntegerZero(main_tokens[node]); + } + return result; + }, + else => @compileError("internal error: expected integer or float type"), + } +} + +fn parseBigInt(self: @This(), comptime T: type, node: NodeIndex, base: Base) error{Type}!T { + const num_lit_node = self.numLitNode(node); + const main_tokens = self.ast.nodes.items(.main_token); + const num_lit_token = main_tokens[num_lit_node]; + const prefix_offset: usize = if (base == .decimal) 0 else 2; + const bytes = self.ast.tokenSlice(num_lit_token)[prefix_offset..]; + const result = if (self.isNegative(node)) + std.fmt.parseIntWithSign(T, u8, bytes, @intFromEnum(base), .neg) + else + std.fmt.parseIntWithSign(T, u8, bytes, @intFromEnum(base), .pos); + return result catch |err| switch (err) { + error.InvalidCharacter => unreachable, + error.Overflow => return self.failCannotRepresent(T, main_tokens[node]), + }; +} + +fn parseFloat( + self: @This(), + comptime T: type, + node: NodeIndex, +) error{Type}!T { + const num_lit_node = self.numLitNode(node); + const main_tokens = self.ast.nodes.items(.main_token); + const num_lit_token = main_tokens[num_lit_node]; + const bytes = self.ast.tokenSlice(num_lit_token); + const Float = if (@typeInfo(T) == .float) T else f128; + const unsigned_float = std.fmt.parseFloat(Float, bytes) catch unreachable; // Already validated + const result = if (self.isNegative(node)) -unsigned_float else unsigned_float; + switch (@typeInfo(T)) { + .float => return @as(T, @floatCast(result)), + .int => return intFromFloatExact(T, result) orelse + return self.failCannotRepresent(T, main_tokens[node]), + else => @compileError("internal error: expected integer or float type"), + } +} + +fn parseCharLiteral(self: @This(), comptime T: type, node: NodeIndex) error{Type}!T { + const num_lit_node = self.numLitNode(node); + const main_tokens = self.ast.nodes.items(.main_token); + const num_lit_token = main_tokens[num_lit_node]; + const token_bytes = self.ast.tokenSlice(num_lit_token); + const char = std.zig.string_literal.parseCharLiteral(token_bytes).success; + return self.applySignToInt(T, node, char); +} + +fn isNegative(self: *const @This(), node: NodeIndex) bool { + const tags = self.ast.nodes.items(.tag); + return tags[node] == .negation; +} + +fn numLitNode(self: *const @This(), node: NodeIndex) NodeIndex { + if (self.isNegative(node)) { + const data = self.ast.nodes.items(.data); + return data[node].lhs; + } else { + return node; + } +} + +fn intFromFloatExact(comptime T: type, value: anytype) ?T { + switch (@typeInfo(@TypeOf(value))) { + .float => {}, + else => @compileError(@typeName(@TypeOf(value)) ++ " is not a runtime floating point type"), + } + switch (@typeInfo(T)) { + .int => {}, + else => @compileError(@typeName(T) ++ " is not a runtime integer type"), + } + + if (value > std.math.maxInt(T) or value < std.math.minInt(T)) { + return null; + } + + if (std.math.isNan(value) or std.math.trunc(value) != value) { + return null; + } + + return @as(T, @intFromFloat(value)); +} + +test "std.zon intFromFloatExact" { + // Valid conversions + try std.testing.expectEqual(@as(u8, 10), intFromFloatExact(u8, @as(f32, 10.0)).?); + try std.testing.expectEqual(@as(i8, -123), intFromFloatExact(i8, @as(f64, @as(f64, -123.0))).?); + try std.testing.expectEqual(@as(i16, 45), intFromFloatExact(i16, @as(f128, @as(f128, 45.0))).?); + + // Out of range + try std.testing.expectEqual(@as(?u4, null), intFromFloatExact(u4, @as(f32, 16.0))); + try std.testing.expectEqual(@as(?i4, null), intFromFloatExact(i4, @as(f64, -17.0))); + try std.testing.expectEqual(@as(?u8, null), intFromFloatExact(u8, @as(f128, -2.0))); + + // Not a whole number + try std.testing.expectEqual(@as(?u8, null), intFromFloatExact(u8, @as(f32, 0.5))); + try std.testing.expectEqual(@as(?i8, null), intFromFloatExact(i8, @as(f64, 0.01))); + + // Infinity and NaN + try std.testing.expectEqual(@as(?u8, null), intFromFloatExact(u8, std.math.inf(f32))); + try std.testing.expectEqual(@as(?u8, null), intFromFloatExact(u8, -std.math.inf(f32))); + try std.testing.expectEqual(@as(?u8, null), intFromFloatExact(u8, std.math.nan(f32))); +} + +test "std.zon parse int" { + const gpa = std.testing.allocator; + + // Test various numbers and types + try std.testing.expectEqual(@as(u8, 10), try parseFromSlice(u8, gpa, "10", .{})); + try std.testing.expectEqual(@as(i16, 24), try parseFromSlice(i16, gpa, "24", .{})); + try std.testing.expectEqual(@as(i14, -4), try parseFromSlice(i14, gpa, "-4", .{})); + try std.testing.expectEqual(@as(i32, -123), try parseFromSlice(i32, gpa, "-123", .{})); + + // Test limits + try std.testing.expectEqual(@as(i8, 127), try parseFromSlice(i8, gpa, "127", .{})); + try std.testing.expectEqual(@as(i8, -128), try parseFromSlice(i8, gpa, "-128", .{})); + + // Test characters + try std.testing.expectEqual(@as(u8, 'a'), try parseFromSlice(u8, gpa, "'a'", .{})); + try std.testing.expectEqual(@as(u8, 'z'), try parseFromSlice(u8, gpa, "'z'", .{})); + try std.testing.expectEqual(@as(i16, -'a'), try parseFromSlice(i16, gpa, "-'a'", .{})); + try std.testing.expectEqual(@as(i16, -'z'), try parseFromSlice(i16, gpa, "-'z'", .{})); + + // Test big integers + try std.testing.expectEqual( + @as(u65, 36893488147419103231), + try parseFromSlice(u65, gpa, "36893488147419103231", .{}), + ); + try std.testing.expectEqual( + @as(u65, 36893488147419103231), + try parseFromSlice(u65, gpa, "368934_881_474191032_31", .{}), + ); + + // Test big integer limits + try std.testing.expectEqual( + @as(i66, 36893488147419103231), + try parseFromSlice(i66, gpa, "36893488147419103231", .{}), + ); + try std.testing.expectEqual( + @as(i66, -36893488147419103232), + try parseFromSlice(i66, gpa, "-36893488147419103232", .{}), + ); + { + var ast = try std.zig.Ast.parse(gpa, "36893488147419103232", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(i66, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: i66 cannot represent value", formatted); + } + { + var ast = try std.zig.Ast.parse(gpa, "-36893488147419103233", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(i66, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: i66 cannot represent value", formatted); + } + + // Test parsing whole number floats as integers + try std.testing.expectEqual(@as(i8, -1), try parseFromSlice(i8, gpa, "-1.0", .{})); + try std.testing.expectEqual(@as(i8, 123), try parseFromSlice(i8, gpa, "123.0", .{})); + + // Test non-decimal integers + try std.testing.expectEqual(@as(i16, 0xff), try parseFromSlice(i16, gpa, "0xff", .{})); + try std.testing.expectEqual(@as(i16, -0xff), try parseFromSlice(i16, gpa, "-0xff", .{})); + try std.testing.expectEqual(@as(i16, 0o77), try parseFromSlice(i16, gpa, "0o77", .{})); + try std.testing.expectEqual(@as(i16, -0o77), try parseFromSlice(i16, gpa, "-0o77", .{})); + try std.testing.expectEqual(@as(i16, 0b11), try parseFromSlice(i16, gpa, "0b11", .{})); + try std.testing.expectEqual(@as(i16, -0b11), try parseFromSlice(i16, gpa, "-0b11", .{})); + + // Test non-decimal big integers + try std.testing.expectEqual(@as(u65, 0x1ffffffffffffffff), try parseFromSlice( + u65, + gpa, + "0x1ffffffffffffffff", + .{}, + )); + try std.testing.expectEqual(@as(i66, 0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "0x1ffffffffffffffff", + .{}, + )); + try std.testing.expectEqual(@as(i66, -0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "-0x1ffffffffffffffff", + .{}, + )); + try std.testing.expectEqual(@as(u65, 0x1ffffffffffffffff), try parseFromSlice( + u65, + gpa, + "0o3777777777777777777777", + .{}, + )); + try std.testing.expectEqual(@as(i66, 0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "0o3777777777777777777777", + .{}, + )); + try std.testing.expectEqual(@as(i66, -0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "-0o3777777777777777777777", + .{}, + )); + try std.testing.expectEqual(@as(u65, 0x1ffffffffffffffff), try parseFromSlice( + u65, + gpa, + "0b11111111111111111111111111111111111111111111111111111111111111111", + .{}, + )); + try std.testing.expectEqual(@as(i66, 0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "0b11111111111111111111111111111111111111111111111111111111111111111", + .{}, + )); + try std.testing.expectEqual(@as(i66, -0x1ffffffffffffffff), try parseFromSlice( + i66, + gpa, + "-0b11111111111111111111111111111111111111111111111111111111111111111", + .{}, + )); + + // Number with invalid character in the middle + { + var ast = try std.zig.Ast.parse(gpa, "32a32", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: invalid digit 'a' for decimal base", formatted); + } + + // Failing to parse as int + { + var ast = try std.zig.Ast.parse(gpa, "true", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected u8", formatted); + } + + // Failing because an int is out of range + { + var ast = try std.zig.Ast.parse(gpa, "256", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: u8 cannot represent value", formatted); + } + + // Failing because a negative int is out of range + { + var ast = try std.zig.Ast.parse(gpa, "-129", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(i8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: i8 cannot represent value", formatted); + } + + // Failing because an unsigned int is negative + { + var ast = try std.zig.Ast.parse(gpa, "-1", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: u8 cannot represent value", formatted); + } + + // Failing because a float is non-whole + { + var ast = try std.zig.Ast.parse(gpa, "1.5", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: u8 cannot represent value", formatted); + } + + // Failing because a float is negative + { + var ast = try std.zig.Ast.parse(gpa, "-1.0", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: u8 cannot represent value", formatted); + } + + // Negative integer zero + { + var ast = try std.zig.Ast.parse(gpa, "-0", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(i8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: integer literal '-0' is ambiguous", formatted); + } + + // Negative integer zero casted to float + { + var ast = try std.zig.Ast.parse(gpa, "-0", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(f32, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: integer literal '-0' is ambiguous", formatted); + } + + // Negative float 0 is allowed + try std.testing.expect(std.math.isNegativeZero(try parseFromSlice(f32, gpa, "-0.0", .{}))); + try std.testing.expect(std.math.isPositiveZero(try parseFromSlice(f32, gpa, "0.0", .{}))); + + // Double negation is not allowed + { + var ast = try std.zig.Ast.parse(gpa, "--2", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(i8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected i8", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "--2.0", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(f32, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected f32", formatted); + } + + // Invalid int literal + { + var ast = try std.zig.Ast.parse(gpa, "0xg", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:3: invalid digit 'g' for hex base", formatted); + } + + // Notes on invalid int literal + { + var ast = try std.zig.Ast.parse(gpa, "0123", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(u8, gpa, &ast, &status, .{})); + try std.testing.expectFmt("1:1: number '0123' has leading zero", "{}", .{status.failure}); + try std.testing.expectEqual(1, status.failure.noteCount()); + try std.testing.expectFmt("use '0o' prefix for octal literals", "{}", .{status.failure.fmtNote(0)}); + } +} + +test "std.zon parse float" { + const gpa = std.testing.allocator; + + // Test decimals + try std.testing.expectEqual(@as(f16, 0.5), try parseFromSlice(f16, gpa, "0.5", .{})); + try std.testing.expectEqual(@as(f32, 123.456), try parseFromSlice(f32, gpa, "123.456", .{})); + try std.testing.expectEqual(@as(f64, -123.456), try parseFromSlice(f64, gpa, "-123.456", .{})); + try std.testing.expectEqual(@as(f128, 42.5), try parseFromSlice(f128, gpa, "42.5", .{})); + + // Test whole numbers with and without decimals + try std.testing.expectEqual(@as(f16, 5.0), try parseFromSlice(f16, gpa, "5.0", .{})); + try std.testing.expectEqual(@as(f16, 5.0), try parseFromSlice(f16, gpa, "5", .{})); + try std.testing.expectEqual(@as(f32, -102), try parseFromSlice(f32, gpa, "-102.0", .{})); + try std.testing.expectEqual(@as(f32, -102), try parseFromSlice(f32, gpa, "-102", .{})); + + // Test characters and negated characters + try std.testing.expectEqual(@as(f32, 'a'), try parseFromSlice(f32, gpa, "'a'", .{})); + try std.testing.expectEqual(@as(f32, 'z'), try parseFromSlice(f32, gpa, "'z'", .{})); + try std.testing.expectEqual(@as(f32, -'z'), try parseFromSlice(f32, gpa, "-'z'", .{})); + + // Test big integers + try std.testing.expectEqual( + @as(f32, 36893488147419103231), + try parseFromSlice(f32, gpa, "36893488147419103231", .{}), + ); + try std.testing.expectEqual( + @as(f32, -36893488147419103231), + try parseFromSlice(f32, gpa, "-36893488147419103231", .{}), + ); + try std.testing.expectEqual(@as(f128, 0x1ffffffffffffffff), try parseFromSlice( + f128, + gpa, + "0x1ffffffffffffffff", + .{}, + )); + try std.testing.expectEqual(@as(f32, 0x1ffffffffffffffff), try parseFromSlice( + f32, + gpa, + "0x1ffffffffffffffff", + .{}, + )); + + // Exponents, underscores + try std.testing.expectEqual(@as(f32, 123.0E+77), try parseFromSlice(f32, gpa, "12_3.0E+77", .{})); + + // Hexadecimal + try std.testing.expectEqual(@as(f32, 0x103.70p-5), try parseFromSlice(f32, gpa, "0x103.70p-5", .{})); + try std.testing.expectEqual(@as(f32, -0x103.70), try parseFromSlice(f32, gpa, "-0x103.70", .{})); + try std.testing.expectEqual( + @as(f32, 0x1234_5678.9ABC_CDEFp-10), + try parseFromSlice(f32, gpa, "0x1234_5678.9ABC_CDEFp-10", .{}), + ); + + // inf, nan + try std.testing.expect(std.math.isPositiveInf(try parseFromSlice(f32, gpa, "inf", .{}))); + try std.testing.expect(std.math.isNegativeInf(try parseFromSlice(f32, gpa, "-inf", .{}))); + try std.testing.expect(std.math.isNan(try parseFromSlice(f32, gpa, "nan", .{}))); + try std.testing.expect(std.math.isNan(try parseFromSlice(f32, gpa, "-nan", .{}))); + + // Bad identifier as float + { + var ast = try std.zig.Ast.parse(gpa, "foo", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(f32, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected f32", formatted); + } + + { + var ast = try std.zig.Ast.parse(gpa, "-foo", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(f32, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected f32", formatted); + } + + // Non float as float + { + var ast = try std.zig.Ast.parse(gpa, "\"foo\"", .zon); + defer ast.deinit(gpa); + var status: ParseStatus = undefined; + try std.testing.expectError(error.Type, parseFromAst(f32, gpa, &ast, &status, .{})); + const formatted = try std.fmt.allocPrint(gpa, "{}", .{status.failure}); + defer gpa.free(formatted); + try std.testing.expectEqualStrings("1:1: expected f32", formatted); + } +} + +test "std.zon free on error" { + // Test freeing partially allocated structs + { + const Struct = struct { + x: []const u8, + y: []const u8, + z: bool, + }; + try std.testing.expectError(error.Type, parseFromSlice(Struct, std.testing.allocator, + \\.{ + \\ .x = "hello", + \\ .y = "world", + \\ .z = "fail", + \\} + , .{})); + } + + // Test freeing partially allocated tuples + { + const Struct = struct { + []const u8, + []const u8, + bool, + }; + try std.testing.expectError(error.Type, parseFromSlice(Struct, std.testing.allocator, + \\.{ + \\ "hello", + \\ "world", + \\ "fail", + \\} + , .{})); + } + + // Test freeing structs with missing fields + { + const Struct = struct { + x: []const u8, + y: bool, + }; + try std.testing.expectError(error.Type, parseFromSlice(Struct, std.testing.allocator, + \\.{ + \\ .x = "hello", + \\} + , .{})); + } + + // Test freeing partially allocated arrays + { + try std.testing.expectError(error.Type, parseFromSlice([3][]const u8, std.testing.allocator, + \\.{ + \\ "hello", + \\ false, + \\ false, + \\} + , .{})); + } + + // Test freeing partially allocated slices + { + try std.testing.expectError(error.Type, parseFromSlice([][]const u8, std.testing.allocator, + \\.{ + \\ "hello", + \\ "world", + \\ false, + \\} + , .{})); + } + + // We can parse types that can't be freed, as long as they contain no allocations, e.g. untagged + // unions. + try std.testing.expectEqual( + @as(f32, 1.5), + (try parseFromSlice(union { x: f32 }, std.testing.allocator, ".{ .x = 1.5 }", .{})).x, + ); + + // We can also parse types that can't be freed if it's impossible for an error to occur after + // the allocation, as is the case here. + { + const result = try parseFromSlice(union { x: []const u8 }, std.testing.allocator, ".{ .x = \"foo\" }", .{}); + defer parseFree(std.testing.allocator, result.x); + try std.testing.expectEqualStrings("foo", result.x); + } + + // However, if it's possible we could get an error requiring we free the value, but the value + // cannot be freed (e.g. untagged unions) then we need to turn off `free_on_error` for it to + // compile. + { + const S = struct { + union { x: []const u8 }, + bool, + }; + const result = try parseFromSlice(S, std.testing.allocator, ".{ .{ .x = \"foo\" }, true }", .{ + .free_on_error = false, + }); + defer parseFree(std.testing.allocator, result[0].x); + try std.testing.expectEqualStrings("foo", result[0].x); + try std.testing.expect(result[1]); + } + + // Again but for structs. + { + const S = struct { + a: union { x: []const u8 }, + b: bool, + }; + const result = try parseFromSlice(S, std.testing.allocator, ".{ .a = .{ .x = \"foo\" }, .b = true }", .{ + .free_on_error = false, + }); + defer parseFree(std.testing.allocator, result.a.x); + try std.testing.expectEqualStrings("foo", result.a.x); + try std.testing.expect(result.b); + } + + // Again but for arrays. + { + const S = [2]union { x: []const u8 }; + const result = try parseFromSlice(S, std.testing.allocator, ".{ .{ .x = \"foo\" }, .{ .x = \"bar\" } }", .{ + .free_on_error = false, + }); + defer parseFree(std.testing.allocator, result[0].x); + defer parseFree(std.testing.allocator, result[1].x); + try std.testing.expectEqualStrings("foo", result[0].x); + try std.testing.expectEqualStrings("bar", result[1].x); + } + + // Again but for slices. + { + const S = []union { x: []const u8 }; + const result = try parseFromSlice(S, std.testing.allocator, ".{ .{ .x = \"foo\" }, .{ .x = \"bar\" } }", .{ + .free_on_error = false, + }); + defer std.testing.allocator.free(result); + defer parseFree(std.testing.allocator, result[0].x); + defer parseFree(std.testing.allocator, result[1].x); + try std.testing.expectEqualStrings("foo", result[0].x); + try std.testing.expectEqualStrings("bar", result[1].x); + } +} diff --git a/lib/std/zon/stringify.zig b/lib/std/zon/stringify.zig new file mode 100644 index 000000000000..2f6c7e7b761e --- /dev/null +++ b/lib/std/zon/stringify.zig @@ -0,0 +1,1920 @@ +const std = @import("std"); + +/// Configuration for stringification. +/// +/// See `StringifyOptions` for more details. +pub const StringifierOptions = struct { + /// If false, only syntactically necessary whitespace is emitted. + whitespace: bool = true, +}; + +/// Options for stringification of an individual value. +/// +/// See `StringifyOptions` for more details. +pub const StringifyValueOptions = struct { + emit_utf8_codepoints: bool = false, + emit_strings_as_containers: bool = false, + emit_default_optional_fields: bool = true, +}; + +/// All stringify options. +pub const StringifyOptions = struct { + /// If false, all whitespace is emitted. Otherwise, whitespace is emitted in the standard Zig + /// style when possible. + whitespace: bool = true, + /// If true, unsigned integers with <= 21 bits are written as their corresponding UTF8 codepoint + /// instead of a numeric literal if one exists. + emit_utf8_codepoints: bool = false, + /// If true, slices of u8s, and pointers to arrays of u8s are serialized as containers. + /// Otherwise they are serialized as string literals. + emit_strings_as_containers: bool = false, + /// If false, struct fields are not written if they are equal to their default value. Comparison + /// is done by `std.meta.eql`. + emit_default_optional_fields: bool = true, +}; + +/// Options for manual serializaation of container types. +pub const StringifyContainerOptions = struct { + /// The whitespace style that should be used for this container. Ignored if whitespace is off. + whitespace_style: union(enum) { + /// If true, wrap every field/item. If false do not. + wrap: bool, + /// Automatically decide whether to wrap or not based on the number of fields. Following + /// the standard rule of thumb, containers with more than two fields are wrapped. + fields: usize, + } = .{ .wrap = true }, + + fn shouldWrap(self: StringifyContainerOptions) bool { + return switch (self.whitespace_style) { + .wrap => |wrap| wrap, + .fields => |fields| fields > 2, + }; + } +}; + +/// Serialize the given value to ZON. +/// +/// It is asserted at comptime that `@TypeOf(val)` is not a recursive type. +pub fn stringify( + /// The value to serialize. May only transitively contain the following supported types: + /// * bools + /// * fixed sized numeric types + /// * exhaustive enums, enum literals + /// * Non-exhaustive enums may hold values that have no literal representation, and + /// therefore cannot be stringified in a way that allows round trips back through the + /// parser. There are plans to resolve this in the future. + /// * slices + /// * arrays + /// * structures + /// * tagged unions + /// * optionals + /// * null + val: anytype, + comptime options: StringifyOptions, + writer: anytype, +) @TypeOf(writer).Error!void { + var serializer = stringifier(writer, .{ + .whitespace = options.whitespace, + }); + try serializer.value(val, .{ + .emit_utf8_codepoints = options.emit_utf8_codepoints, + .emit_strings_as_containers = options.emit_strings_as_containers, + .emit_default_optional_fields = options.emit_default_optional_fields, + }); +} + +/// Like `stringify`, but recursive types are allowed. +/// +/// Returns `error.MaxDepth` if `depth` is exceeded. +pub fn stringifyMaxDepth(val: anytype, comptime options: StringifyOptions, writer: anytype, depth: usize) Stringifier(@TypeOf(writer)).MaxDepthError!void { + var serializer = stringifier(writer, .{ + .whitespace = options.whitespace, + }); + try serializer.valueMaxDepth(val, .{ + .emit_utf8_codepoints = options.emit_utf8_codepoints, + .emit_strings_as_containers = options.emit_strings_as_containers, + .emit_default_optional_fields = options.emit_default_optional_fields, + }, depth); +} + +/// Like `stringify`, but recursive types are allowed. +/// +/// It is the caller's responsibility to ensure that `val` does not contain cycles. +pub fn stringifyArbitraryDepth(val: anytype, comptime options: StringifyOptions, writer: anytype) @TypeOf(writer).Error!void { + var serializer = stringifier(writer, .{ + .whitespace = options.whitespace, + }); + try serializer.valueArbitraryDepth(val, .{ + .emit_utf8_codepoints = options.emit_utf8_codepoints, + .emit_strings_as_containers = options.emit_strings_as_containers, + .emit_default_optional_fields = options.emit_default_optional_fields, + }); +} + +const RecursiveTypeBuffer = [32]type; + +fn typeIsRecursive(comptime T: type) bool { + comptime var buf: RecursiveTypeBuffer = undefined; + return comptime typeIsRecursiveImpl(T, buf[0..0]); +} + +fn typeIsRecursiveImpl(comptime T: type, comptime visited_arg: []type) bool { + comptime var visited = visited_arg; + + // Check if we've already seen this type + inline for (visited) |found| { + if (T == found) { + return true; + } + } + + // Add this type to the stack + if (visited.len >= @typeInfo(RecursiveTypeBuffer).array.len) { + @compileError("recursion limit"); + } + visited.ptr[visited.len] = T; + visited.len += 1; + + // Recurse + switch (@typeInfo(T)) { + .pointer => |pointer| return typeIsRecursiveImpl(pointer.child, visited), + .array => |array| return typeIsRecursiveImpl(array.child, visited), + .@"struct" => |@"struct"| inline for (@"struct".fields) |field| { + if (typeIsRecursiveImpl(field.type, visited)) { + return true; + } + }, + .@"union" => |@"union"| inline for (@"union".fields) |field| { + if (typeIsRecursiveImpl(field.type, visited)) { + return true; + } + }, + .optional => |optional| return typeIsRecursiveImpl(optional.child, visited), + else => {}, + } + return false; +} + +test "std.zon typeIsRecursive" { + try std.testing.expect(!typeIsRecursive(bool)); + try std.testing.expect(!typeIsRecursive(struct { x: i32, y: i32 })); + try std.testing.expect(!typeIsRecursive(struct { i32, i32 })); + try std.testing.expect(typeIsRecursive(struct { x: i32, y: i32, z: *@This() })); + try std.testing.expect(typeIsRecursive(struct { + a: struct { + const A = @This(); + b: struct { + c: *struct { + a: ?A, + }, + }, + }, + })); + try std.testing.expect(typeIsRecursive(struct { + a: [3]*@This(), + })); + try std.testing.expect(typeIsRecursive(struct { + a: union { a: i32, b: *@This() }, + })); +} + +fn checkValueDepth(val: anytype, depth: usize) error{MaxDepth}!void { + if (depth == 0) return error.MaxDepth; + const child_depth = depth - 1; + + switch (@typeInfo(@TypeOf(val))) { + .pointer => |pointer| switch (pointer.size) { + .One => try checkValueDepth(val.*, child_depth), + .Slice => for (val) |item| { + try checkValueDepth(item, child_depth); + }, + .C, .Many => {}, + }, + .array => for (val) |item| { + try checkValueDepth(item, child_depth); + }, + .@"struct" => |@"struct"| inline for (@"struct".fields) |field_info| { + try checkValueDepth(@field(val, field_info.name), child_depth); + }, + .@"union" => |@"union"| if (@"union".tag_type == null) { + return; + } else switch (val) { + inline else => |payload| { + return checkValueDepth(payload, child_depth); + }, + }, + .optional => if (val) |inner| try checkValueDepth(inner, child_depth), + else => {}, + } +} + +fn expectValueDepthEquals(expected: usize, value: anytype) !void { + try checkValueDepth(value, expected); + try std.testing.expectError(error.MaxDepth, checkValueDepth(value, expected - 1)); +} + +test "std.zon checkValueDepth" { + try expectValueDepthEquals(1, 10); + try expectValueDepthEquals(2, .{ .x = 1, .y = 2 }); + try expectValueDepthEquals(2, .{ 1, 2 }); + try expectValueDepthEquals(3, .{ 1, .{ 2, 3 } }); + try expectValueDepthEquals(3, .{ .{ 1, 2 }, 3 }); + try expectValueDepthEquals(3, .{ .x = 0, .y = 1, .z = .{ .x = 3 } }); + try expectValueDepthEquals(3, .{ .x = 0, .y = .{ .x = 1 }, .z = 2 }); + try expectValueDepthEquals(3, .{ .x = .{ .x = 0 }, .y = 1, .z = 2 }); + try expectValueDepthEquals(2, @as(?u32, 1)); + try expectValueDepthEquals(1, @as(?u32, null)); + try expectValueDepthEquals(1, null); + try expectValueDepthEquals(2, &1); + try expectValueDepthEquals(3, &@as(?u32, 1)); + + const Union = union(enum) { + x: u32, + y: struct { x: u32 }, + }; + try expectValueDepthEquals(2, Union{ .x = 1 }); + try expectValueDepthEquals(3, Union{ .y = .{ .x = 1 } }); + + const Recurse = struct { r: ?*const @This() }; + try expectValueDepthEquals(2, Recurse{ .r = null }); + try expectValueDepthEquals(5, Recurse{ .r = &Recurse{ .r = null } }); + try expectValueDepthEquals(8, Recurse{ .r = &Recurse{ .r = &Recurse{ .r = null } } }); + + try expectValueDepthEquals(2, @as([]const u8, &.{ 1, 2, 3 })); + try expectValueDepthEquals(3, @as([]const []const u8, &.{&.{ 1, 2, 3 }})); +} + +/// Lower level control over stringification, you can create a new instance with `stringifier`. +/// +/// Useful when you want control over which fields/items are stringified, how they're represented, +/// or want to write a ZON object that does not exist in memory. +/// +/// You can serialize values with `value`. To serialize recursive types, the following are provided: +/// * `valueMaxDepth` +/// * `valueArbitraryDepth` +/// +/// You can also serialize values using specific notations: +/// * `int` +/// * `float` +/// * `utf8Codepoint` +/// * `slice` +/// * `sliceMaxDepth` +/// * `sliceArbitraryDepth` +/// * `string` +/// * `multilineString` +/// +/// For manual serialization of containers, see: +/// * `startStruct` +/// * `startTuple` +/// * `startSlice` +/// +/// # Example +/// ```zig +/// var serializer = stringifier(writer, .{}); +/// var vec2 = try serializer.startStruct(.{}); +/// try vec2.field("x", 1.5, .{}); +/// try vec2.fieldPrefix(); +/// try serializer.value(2.5); +/// try vec2.finish(); +/// ``` +pub fn Stringifier(comptime Writer: type) type { + return struct { + const Self = @This(); + + pub const MaxDepthError = error{MaxDepth} || Writer.Error; + + options: StringifierOptions, + indent_level: u8, + writer: Writer, + + /// Initialize a stringifier. + fn init(writer: Writer, options: StringifierOptions) Self { + return .{ + .options = options, + .writer = writer, + .indent_level = 0, + }; + } + + /// Serialize a value, similar to `stringify`. + pub fn value(self: *Self, val: anytype, options: StringifyValueOptions) Writer.Error!void { + comptimeAssertNoRecursion(@TypeOf(val)); + return self.valueArbitraryDepth(val, options); + } + + /// Serialize a value, similar to `stringifyMaxDepth`. + pub fn valueMaxDepth(self: *Self, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try checkValueDepth(val, depth); + return self.valueArbitraryDepth(val, options); + } + + /// Serialize a value, similar to `stringifyArbitraryDepth`. + pub fn valueArbitraryDepth(self: *Self, val: anytype, options: StringifyValueOptions) Writer.Error!void { + switch (@typeInfo(@TypeOf(val))) { + .int => |int_info| if (options.emit_utf8_codepoints and + int_info.signedness == .unsigned and + int_info.bits <= 21 and std.unicode.utf8ValidCodepoint(val)) + { + self.utf8Codepoint(val) catch |err| switch (err) { + error.InvalidCodepoint => unreachable, // Already validated + else => |e| return e, + }; + } else { + try self.int(val); + }, + .comptime_int => if (options.emit_utf8_codepoints and + val > 0 and + val <= std.math.maxInt(u21) and + std.unicode.utf8ValidCodepoint(val)) + { + self.utf8Codepoint(val) catch |err| switch (err) { + error.InvalidCodepoint => unreachable, // Already validated + else => |e| return e, + }; + } else { + try self.int(val); + }, + .float, .comptime_float => try self.float(val), + .bool, .null => try std.fmt.format(self.writer, "{}", .{val}), + .enum_literal => { + try self.writer.writeByte('.'); + try self.ident(@tagName(val)); + }, + .@"enum" => |@"enum"| if (@"enum".is_exhaustive) { + try self.writer.writeByte('.'); + try self.ident(@tagName(val)); + } else { + @compileError(@typeName(@TypeOf(val)) ++ ": cannot stringify non-exhaustive enums"); + }, + .void => try self.writer.writeAll("{}"), + .pointer => |pointer| { + const child_type = switch (@typeInfo(pointer.child)) { + .array => |array| array.child, + else => if (pointer.size != .Slice) @compileError(@typeName(@TypeOf(val)) ++ ": cannot stringify pointer to this type") else pointer.child, + }; + if (child_type == u8 and !options.emit_strings_as_containers) { + try self.string(val); + } else { + try self.sliceImpl(val, options); + } + }, + .array => { + var container = try self.startTuple(.{ .whitespace_style = .{ .fields = val.len } }); + for (val) |item_val| { + try container.fieldArbitraryDepth(item_val, options); + } + try container.finish(); + }, + .@"struct" => |@"struct"| if (@"struct".is_tuple) { + var container = try self.startTuple(.{ .whitespace_style = .{ .fields = @"struct".fields.len } }); + inline for (val) |field_value| { + try container.fieldArbitraryDepth(field_value, options); + } + try container.finish(); + } else { + // Decide which fields to emit + const fields, const skipped = if (options.emit_default_optional_fields) b: { + break :b .{ @"struct".fields.len, [1]bool{false} ** @"struct".fields.len }; + } else b: { + var fields = @"struct".fields.len; + var skipped = [1]bool{false} ** @"struct".fields.len; + inline for (@"struct".fields, &skipped) |field_info, *skip| { + if (field_info.default_value) |default_field_value_opaque| { + const field_value = @field(val, field_info.name); + const default_field_value: *const @TypeOf(field_value) = @ptrCast(@alignCast(default_field_value_opaque)); + if (std.meta.eql(field_value, default_field_value.*)) { + skip.* = true; + fields -= 1; + } + } + } + break :b .{ fields, skipped }; + }; + + // Emit those fields + var container = try self.startStruct(.{ .whitespace_style = .{ .fields = fields } }); + inline for (@"struct".fields, skipped) |field_info, skip| { + if (!skip) { + try container.fieldArbitraryDepth(field_info.name, @field(val, field_info.name), options); + } + } + try container.finish(); + }, + .@"union" => |@"union"| if (@"union".tag_type == null) { + @compileError(@typeName(@TypeOf(val)) ++ ": cannot stringify untagged unions"); + } else { + var container = try self.startStruct(.{ .whitespace_style = .{ .fields = 1 } }); + switch (val) { + inline else => |pl, tag| try container.fieldArbitraryDepth(@tagName(tag), pl, options), + } + try container.finish(); + }, + .optional => if (val) |inner| { + try self.valueArbitraryDepth(inner, options); + } else { + try self.writer.writeAll("null"); + }, + + else => @compileError(@typeName(@TypeOf(val)) ++ ": cannot stringify this type"), + } + } + + /// Serialize an integer. + pub fn int(self: *Self, val: anytype) Writer.Error!void { + try std.fmt.formatInt(val, 10, .lower, .{}, self.writer); + } + + /// Serialize a float. + pub fn float(self: *Self, val: anytype) Writer.Error!void { + switch (@typeInfo(@TypeOf(val))) { + .float, .comptime_float => if (std.math.isNan(val)) { + return self.writer.writeAll("nan"); + } else if (@as(f128, val) == std.math.inf(f128)) { + return self.writer.writeAll("inf"); + } else if (@as(f128, val) == -std.math.inf(f128)) { + return self.writer.writeAll("-inf"); + } else { + try std.fmt.format(self.writer, "{d}", .{val}); + }, + else => @compileError(@typeName(@TypeOf(val)) ++ ": expected float"), + } + } + + fn identNeedsEscape(name: []const u8) bool { + std.debug.assert(name.len != 0); + for (name, 0..) |c, i| { + switch (c) { + 'A'...'Z', 'a'...'z', '_' => {}, + '0'...'9' => if (i == 0) return true, + else => return true, + } + } + return std.zig.Token.keywords.has(name); + } + + /// Serialize `name` as an identifier. + /// + /// Escapes the identifier if necessary. + pub fn ident(self: *Self, name: []const u8) Writer.Error!void { + if (identNeedsEscape(name)) { + try self.writer.writeAll("@\""); + try self.writer.writeAll(name); + try self.writer.writeByte('"'); + } else { + try self.writer.writeAll(name); + } + } + + /// Serialize `val` as a UTF8 codepoint. + /// + /// Returns `error.InvalidCodepoint` if `val` is not a valid UTF8 codepoint. + pub fn utf8Codepoint(self: *Self, val: u21) (Writer.Error || error{InvalidCodepoint})!void { + var buf: [8]u8 = undefined; + const len = std.unicode.utf8Encode(val, &buf) catch return error.InvalidCodepoint; + const str = buf[0..len]; + try std.fmt.format(self.writer, "'{'}'", .{std.zig.fmtEscapes(str)}); + } + + /// Like `value`, but always serializes `val` as a slice. + /// + /// Will fail at comptime if `val` is not an array or slice. + pub fn slice(self: *Self, val: anytype, options: StringifyValueOptions) Writer.Error!void { + comptimeAssertNoRecursion(@TypeOf(val)); + try self.sliceArbitraryDepth(val, options); + } + + /// Like `value`, but recursive types are allowed. + /// + /// Returns `error.MaxDepthError` if `depth` is exceeded. + pub fn sliceMaxDepth(self: *Self, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try checkValueDepth(val, depth); + try self.sliceArbitraryDepth(val, options); + } + + /// Like `value`, but recursive types are allowed. + /// + /// It is the caller's responsibility to ensure that `val` does not contain cycles. + pub fn sliceArbitraryDepth(self: *Self, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.sliceImpl(val, options); + } + + fn sliceImpl(self: *Self, val: anytype, options: StringifyValueOptions) Writer.Error!void { + var container = try self.startSlice(.{ .whitespace_style = .{ .fields = val.len } }); + for (val) |item_val| { + try container.itemArbitraryDepth(item_val, options); + } + try container.finish(); + } + + /// Like `value`, but always serializes `val` as a string. + pub fn string(self: *Self, val: []const u8) Writer.Error!void { + try std.fmt.format(self.writer, "\"{}\"", .{std.zig.fmtEscapes(val)}); + } + + /// Options for formatting multiline strings. + pub const MultilineStringOptions = struct { + /// If top level is true, whitespace before and after the multiline string is elided. + /// If it is true, a newline is printed, then the value, followed by a newline, and if + /// whitespace is true any necessary indentation follows. + top_level: bool = false, + }; + + /// Like `value`, but always serializes to a multiline string literal. + /// + /// Returns `error.InnerCarriageReturn` if `val` contains a CR not followed by a newline, + /// since multiline strings cannot represent CR without a following newline. + pub fn multilineString(self: *Self, val: []const u8, options: MultilineStringOptions) (Writer.Error || error{InnerCarriageReturn})!void { + // Make sure the string does not contain any carriage returns not followed by a newline + var i: usize = 0; + while (i < val.len) : (i += 1) { + if (val[i] == '\r') { + if (i + 1 < val.len) { + if (val[i + 1] == '\n') { + i += 1; + continue; + } + } + return error.InnerCarriageReturn; + } + } + + if (!options.top_level) { + try self.newline(); + try self.indent(); + } + + try self.writer.writeAll("\\\\"); + for (val) |c| { + if (c != '\r') { + try self.writer.writeByte(c); // We write newlines here even if whitespace off + if (c == '\n') { + try self.indent(); + try self.writer.writeAll("\\\\"); + } + } + } + + if (!options.top_level) { + try self.writer.writeByte('\n'); // Even if whitespace off + try self.indent(); + } + } + + /// Create a `Struct` for writing ZON structs field by field. + pub fn startStruct(self: *Self, options: StringifyContainerOptions) Writer.Error!Struct { + return Struct.start(self, options); + } + + /// Creates a `Tuple` for writing ZON tuples field by field. + pub fn startTuple(self: *Self, options: StringifyContainerOptions) Writer.Error!Tuple { + return Tuple.start(self, options); + } + + /// Creates a `Slice` for writing ZON slices item by item. + pub fn startSlice(self: *Self, options: StringifyContainerOptions) Writer.Error!Slice { + return Slice.start(self, options); + } + + fn indent(self: *Self) Writer.Error!void { + if (self.options.whitespace) { + try self.writer.writeByteNTimes(' ', 4 * self.indent_level); + } + } + + fn newline(self: *Self) Writer.Error!void { + if (self.options.whitespace) { + try self.writer.writeByte('\n'); + } + } + + fn newlineOrSpace(self: *Self, len: usize) Writer.Error!void { + if (self.containerShouldWrap(len)) { + try self.newline(); + } else { + try self.space(); + } + } + + fn space(self: *Self) Writer.Error!void { + if (self.options.whitespace) { + try self.writer.writeByte(' '); + } + } + + /// Writes ZON tuples field by field. + pub const Tuple = struct { + container: Container, + + fn start(parent: *Self, options: StringifyContainerOptions) Writer.Error!Tuple { + return .{ + .container = try Container.start(parent, .anon, options), + }; + } + + /// Finishes serializing the tuple. + /// + /// Prints a trailing comma as configured when appropriate, and the closing bracket. + pub fn finish(self: *Tuple) Writer.Error!void { + try self.container.finish(); + self.* = undefined; + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. + pub fn field(self: *Tuple, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.field(null, val, options); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. + pub fn fieldMaxDepth(self: *Tuple, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try self.container.fieldMaxDepth(null, val, options, depth); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueArbitraryDepth`. + pub fn fieldArbitraryDepth(self: *Tuple, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.fieldArbitraryDepth(null, val, options); + } + + /// Print a field prefix. This prints any necessary commas, and whitespace as + /// configured. Useful if you want to serialize the field value yourself. + pub fn fieldPrefix(self: *Tuple) Writer.Error!void { + try self.container.fieldPrefix(null); + } + }; + + /// Writes ZON structs field by field. + pub const Struct = struct { + container: Container, + + fn start(parent: *Self, options: StringifyContainerOptions) Writer.Error!Struct { + return .{ + .container = try Container.start(parent, .named, options), + }; + } + + /// Finishes serializing the struct. + /// + /// Prints a trailing comma as configured when appropriate, and the closing bracket. + pub fn finish(self: *Struct) Writer.Error!void { + try self.container.finish(); + self.* = undefined; + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `value`. + pub fn field(self: *Struct, name: []const u8, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.field(name, val, options); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueMaxDepth`. + pub fn fieldMaxDepth(self: *Struct, name: []const u8, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try self.container.fieldMaxDepth(name, val, options, depth); + } + + /// Serialize a field. Equivalent to calling `fieldPrefix` followed by `valueArbitraryDepth`. + pub fn fieldArbitraryDepth(self: *Struct, name: []const u8, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.fieldArbitraryDepth(name, val, options); + } + + /// Print a field prefix. This prints any necessary commas, the field name (escaped if + /// necessary) and whitespace as configured. Useful if you want to serialize the field + /// value yourself. + pub fn fieldPrefix(self: *Struct, name: []const u8) Writer.Error!void { + try self.container.fieldPrefix(name); + } + }; + + /// Writes ZON slices field by field. + pub const Slice = struct { + container: Container, + + fn start(parent: *Self, options: StringifyContainerOptions) Writer.Error!Slice { + try parent.writer.writeByte('&'); + return .{ + .container = try Container.start(parent, .anon, options), + }; + } + + /// Finishes serializing the slice. + /// + /// Prints a trailing comma as configured when appropriate, and the closing bracket. + pub fn finish(self: *Slice) Writer.Error!void { + try self.container.finish(); + self.* = undefined; + } + + /// Serialize an item. Equivalent to calling `itemPrefix` followed by `value`. + pub fn item(self: *Slice, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.field(null, val, options); + } + + /// Serialize an item. Equivalent to calling `itemPrefix` followed by `valueMaxDepth`. + pub fn itemMaxDepth(self: *Slice, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try self.container.fieldMaxDepth(null, val, options, depth); + } + + /// Serialize an item. Equivalent to calling `itemPrefix` followed by `valueArbitraryDepth`. + pub fn itemArbitraryDepth(self: *Slice, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.container.fieldArbitraryDepth(null, val, options); + } + + /// Print a field prefix. This prints any necessary commas, and whitespace as + /// configured. Useful if you want to serialize the item value yourself. + pub fn itemPrefix(self: *Slice) Writer.Error!void { + try self.container.fieldPrefix(null); + } + }; + + const Container = struct { + const FieldStyle = enum { named, anon }; + + serializer: *Self, + field_style: FieldStyle, + options: StringifyContainerOptions, + empty: bool, + + fn start(serializer: *Self, field_style: FieldStyle, options: StringifyContainerOptions) Writer.Error!Container { + if (options.shouldWrap()) serializer.indent_level +|= 1; + try serializer.writer.writeAll(".{"); + return .{ + .serializer = serializer, + .field_style = field_style, + .options = options, + .empty = true, + }; + } + + fn finish(self: *Container) Writer.Error!void { + if (self.options.shouldWrap()) self.serializer.indent_level -|= 1; + if (!self.empty) { + if (self.options.shouldWrap()) { + if (self.serializer.options.whitespace) { + try self.serializer.writer.writeByte(','); + } + try self.serializer.newline(); + try self.serializer.indent(); + } else if (!self.shouldElideSpaces()) { + try self.serializer.space(); + } + } + try self.serializer.writer.writeByte('}'); + self.* = undefined; + } + + fn fieldPrefix(self: *Container, name: ?[]const u8) Writer.Error!void { + if (!self.empty) { + try self.serializer.writer.writeByte(','); + } + self.empty = false; + if (self.options.shouldWrap()) { + try self.serializer.newline(); + } else if (!self.shouldElideSpaces()) { + try self.serializer.space(); + } + if (self.options.shouldWrap()) try self.serializer.indent(); + if (name) |n| { + try self.serializer.writer.writeByte('.'); + try self.serializer.ident(n); + try self.serializer.space(); + try self.serializer.writer.writeByte('='); + try self.serializer.space(); + } + } + + fn field(self: *Container, name: ?[]const u8, val: anytype, options: StringifyValueOptions) Writer.Error!void { + comptimeAssertNoRecursion(@TypeOf(val)); + try self.fieldArbitraryDepth(name, val, options); + } + + fn fieldMaxDepth(self: *Container, name: ?[]const u8, val: anytype, options: StringifyValueOptions, depth: usize) MaxDepthError!void { + try checkValueDepth(val, depth); + try self.fieldArbitraryDepth(name, val, options); + } + + fn fieldArbitraryDepth(self: *Container, name: ?[]const u8, val: anytype, options: StringifyValueOptions) Writer.Error!void { + try self.fieldPrefix(name); + try self.serializer.valueArbitraryDepth(val, options); + } + + fn shouldElideSpaces(self: *const Container) bool { + return switch (self.options.whitespace_style) { + .fields => |fields| self.field_style != .named and fields == 1, + else => false, + }; + } + }; + + fn comptimeAssertNoRecursion(comptime T: type) void { + if (comptime typeIsRecursive(T)) { + @compileError(@typeName(T) ++ ": recursive type stringified without depth limit"); + } + } + }; +} + +/// Creates an instance of `Stringifier`. +pub fn stringifier(writer: anytype, options: StringifierOptions) Stringifier(@TypeOf(writer)) { + return Stringifier(@TypeOf(writer)).init(writer, options); +} + +fn expectStringifyEqual(expected: []const u8, value: anytype, comptime options: StringifyOptions) !void { + var buf = std.ArrayList(u8).init(std.testing.allocator); + defer buf.deinit(); + try stringify(value, options, buf.writer()); + try std.testing.expectEqualStrings(expected, buf.items); +} + +test "std.zon stringify whitespace, high level API" { + try expectStringifyEqual(".{}", .{}, .{}); + try expectStringifyEqual(".{}", .{}, .{ .whitespace = false }); + + try expectStringifyEqual(".{1}", .{1}, .{}); + try expectStringifyEqual(".{1}", .{1}, .{ .whitespace = false }); + + try expectStringifyEqual(".{1}", @as([1]u32, .{1}), .{}); + try expectStringifyEqual(".{1}", @as([1]u32, .{1}), .{ .whitespace = false }); + + try expectStringifyEqual("&.{1}", @as([]const u32, &.{1}), .{}); + try expectStringifyEqual("&.{1}", @as([]const u32, &.{1}), .{ .whitespace = false }); + + try expectStringifyEqual(".{ .x = 1 }", .{ .x = 1 }, .{}); + try expectStringifyEqual(".{.x=1}", .{ .x = 1 }, .{ .whitespace = false }); + + try expectStringifyEqual(".{ 1, 2 }", .{ 1, 2 }, .{}); + try expectStringifyEqual(".{1,2}", .{ 1, 2 }, .{ .whitespace = false }); + + try expectStringifyEqual(".{ 1, 2 }", @as([2]u32, .{ 1, 2 }), .{}); + try expectStringifyEqual(".{1,2}", @as([2]u32, .{ 1, 2 }), .{ .whitespace = false }); + + try expectStringifyEqual("&.{ 1, 2 }", @as([]const u32, &.{ 1, 2 }), .{}); + try expectStringifyEqual("&.{1,2}", @as([]const u32, &.{ 1, 2 }), .{ .whitespace = false }); + + try expectStringifyEqual(".{ .x = 1, .y = 2 }", .{ .x = 1, .y = 2 }, .{}); + try expectStringifyEqual(".{.x=1,.y=2}", .{ .x = 1, .y = 2 }, .{ .whitespace = false }); + + try expectStringifyEqual( + \\.{ + \\ 1, + \\ 2, + \\ 3, + \\} + , .{ 1, 2, 3 }, .{}); + try expectStringifyEqual(".{1,2,3}", .{ 1, 2, 3 }, .{ .whitespace = false }); + + try expectStringifyEqual( + \\.{ + \\ 1, + \\ 2, + \\ 3, + \\} + , @as([3]u32, .{ 1, 2, 3 }), .{}); + try expectStringifyEqual(".{1,2,3}", @as([3]u32, .{ 1, 2, 3 }), .{ .whitespace = false }); + + try expectStringifyEqual( + \\&.{ + \\ 1, + \\ 2, + \\ 3, + \\} + , @as([]const u32, &.{ 1, 2, 3 }), .{}); + try expectStringifyEqual("&.{1,2,3}", @as([]const u32, &.{ 1, 2, 3 }), .{ .whitespace = false }); + + try expectStringifyEqual( + \\.{ + \\ .x = 1, + \\ .y = 2, + \\ .z = 3, + \\} + , .{ .x = 1, .y = 2, .z = 3 }, .{}); + try expectStringifyEqual(".{.x=1,.y=2,.z=3}", .{ .x = 1, .y = 2, .z = 3 }, .{ .whitespace = false }); + + const Union = union(enum) { a: bool, b: i32, c: u8 }; + + try expectStringifyEqual(".{ .b = 1 }", Union{ .b = 1 }, .{}); + try expectStringifyEqual(".{.b=1}", Union{ .b = 1 }, .{ .whitespace = false }); + + // Nested indentation where outer object doesn't wrap + try expectStringifyEqual( + \\.{ .inner = .{ + \\ 1, + \\ 2, + \\ 3, + \\} } + , .{ .inner = .{ 1, 2, 3 } }, .{}); +} + +test "std.zon stringify whitespace, low level API" { + var buffer = std.ArrayList(u8).init(std.testing.allocator); + defer buffer.deinit(); + const writer = buffer.writer(); + var serializer = stringifier(writer, .{}); + + inline for (.{ true, false }) |whitespace| { + serializer.options = .{ .whitespace = whitespace }; + + // Empty containers + { + var container = try serializer.startStruct(.{}); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{}); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .wrap = false } }); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .wrap = false } }); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .fields = 0 } }); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .fields = 0 } }); + try container.finish(); + try std.testing.expectEqualStrings(".{}", buffer.items); + buffer.clearRetainingCapacity(); + } + + // Size 1 + { + var container = try serializer.startStruct(.{}); + try container.field("a", 1, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ .a = 1, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{}); + try container.field(1, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ 1, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{1}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .wrap = false } }); + try container.field("a", 1, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ .a = 1 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + // We get extra spaces here, since we didn't know up front that there would only be one + // field. + var container = try serializer.startTuple(.{ .whitespace_style = .{ .wrap = false } }); + try container.field(1, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ 1 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{1}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .fields = 1 } }); + try container.field("a", 1, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ .a = 1 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .fields = 1 } }); + try container.field(1, .{}); + try container.finish(); + try std.testing.expectEqualStrings(".{1}", buffer.items); + buffer.clearRetainingCapacity(); + } + + // Size 2 + { + var container = try serializer.startStruct(.{}); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ .a = 1, + \\ .b = 2, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{}); + try container.field(1, .{}); + try container.field(2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ 1, + \\ 2, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .wrap = false } }); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .wrap = false } }); + try container.field(1, .{}); + try container.field(2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ 1, 2 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .fields = 2 } }); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .fields = 2 } }); + try container.field(1, .{}); + try container.field(2, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ 1, 2 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + // Size 3 + { + var container = try serializer.startStruct(.{}); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.field("c", 3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ .a = 1, + \\ .b = 2, + \\ .c = 3, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{}); + try container.field(1, .{}); + try container.field(2, .{}); + try container.field(3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ 1, + \\ 2, + \\ 3, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2,3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .wrap = false } }); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.field("c", 3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ .a = 1, .b = 2, .c = 3 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .wrap = false } }); + try container.field(1, .{}); + try container.field(2, .{}); + try container.field(3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings(".{ 1, 2, 3 }", buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2,3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .fields = 3 } }); + try container.field("a", 1, .{}); + try container.field("b", 2, .{}); + try container.field("c", 3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ .a = 1, + \\ .b = 2, + \\ .c = 3, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{.a=1,.b=2,.c=3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + { + var container = try serializer.startTuple(.{ .whitespace_style = .{ .fields = 3 } }); + try container.field(1, .{}); + try container.field(2, .{}); + try container.field(3, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ + \\ 1, + \\ 2, + \\ 3, + \\} + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{1,2,3}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + + // Nested objects where the outer container doesn't wrap but the inner containers do + { + var container = try serializer.startStruct(.{ .whitespace_style = .{ .wrap = false } }); + try container.field("first", .{ 1, 2, 3 }, .{}); + try container.field("second", .{ 4, 5, 6 }, .{}); + try container.finish(); + if (whitespace) { + try std.testing.expectEqualStrings( + \\.{ .first = .{ + \\ 1, + \\ 2, + \\ 3, + \\}, .second = .{ + \\ 4, + \\ 5, + \\ 6, + \\} } + , buffer.items); + } else { + try std.testing.expectEqualStrings(".{.first=.{1,2,3},.second=.{4,5,6}}", buffer.items); + } + buffer.clearRetainingCapacity(); + } + } +} + +test "std.zon stringify utf8 codepoints" { + var buffer = std.ArrayList(u8).init(std.testing.allocator); + defer buffer.deinit(); + const writer = buffer.writer(); + var serializer = stringifier(writer, .{}); + + // Minimal case + try serializer.utf8Codepoint('a'); + try std.testing.expectEqualStrings("'a'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.int('a'); + try std.testing.expectEqualStrings("97", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('a', .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings("'a'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('a', .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings("97", buffer.items); + buffer.clearRetainingCapacity(); + + // Short escaped codepoint + try serializer.utf8Codepoint('\n'); + try std.testing.expectEqualStrings("'\\n'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.int('\n'); + try std.testing.expectEqualStrings("10", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('\n', .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings("'\\n'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('\n', .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings("10", buffer.items); + buffer.clearRetainingCapacity(); + + // Large codepoint + try serializer.utf8Codepoint('⚡'); + try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.int('⚡'); + try std.testing.expectEqualStrings("9889", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('⚡', .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings("'\\xe2\\x9a\\xa1'", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value('⚡', .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings("9889", buffer.items); + buffer.clearRetainingCapacity(); + + // Invalid codepoint + try std.testing.expectError(error.InvalidCodepoint, serializer.utf8Codepoint(0x110000 + 1)); + + try serializer.int(0x110000 + 1); + try std.testing.expectEqualStrings("1114113", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value(0x110000 + 1, .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings("1114113", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value(0x110000 + 1, .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings("1114113", buffer.items); + buffer.clearRetainingCapacity(); + + // Valid codepoint, not a codepoint type + try serializer.value(@as(u22, 'a'), .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings("97", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value(@as(i32, 'a'), .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings("97", buffer.items); + buffer.clearRetainingCapacity(); + + // Make sure value options are passed to children + try serializer.value(.{ .c = '⚡' }, .{ .emit_utf8_codepoints = true }); + try std.testing.expectEqualStrings(".{ .c = '\\xe2\\x9a\\xa1' }", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value(.{ .c = '⚡' }, .{ .emit_utf8_codepoints = false }); + try std.testing.expectEqualStrings(".{ .c = 9889 }", buffer.items); + buffer.clearRetainingCapacity(); +} + +test "std.zon stringify strings" { + var buffer = std.ArrayList(u8).init(std.testing.allocator); + defer buffer.deinit(); + const writer = buffer.writer(); + var serializer = stringifier(writer, .{}); + + // Minimal case + try serializer.string("abc⚡\n"); + try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.slice("abc⚡\n", .{}); + try std.testing.expectEqualStrings( + \\&.{ + \\ 97, + \\ 98, + \\ 99, + \\ 226, + \\ 154, + \\ 161, + \\ 10, + \\} + , buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value("abc⚡\n", .{}); + try std.testing.expectEqualStrings("\"abc\\xe2\\x9a\\xa1\\n\"", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value("abc⚡\n", .{ .emit_strings_as_containers = true }); + try std.testing.expectEqualStrings( + \\&.{ + \\ 97, + \\ 98, + \\ 99, + \\ 226, + \\ 154, + \\ 161, + \\ 10, + \\} + , buffer.items); + buffer.clearRetainingCapacity(); + + // Value options are inherited by children + try serializer.value(.{ .str = "abc" }, .{}); + try std.testing.expectEqualStrings(".{ .str = \"abc\" }", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.value(.{ .str = "abc" }, .{ .emit_strings_as_containers = true }); + try std.testing.expectEqualStrings( + \\.{ .str = &.{ + \\ 97, + \\ 98, + \\ 99, + \\} } + , buffer.items); + buffer.clearRetainingCapacity(); + + // Arrays (rather than pointers to arrays) of u8s are not considered strings, so that data can round trip + // correctly. + try serializer.value("abc".*, .{}); + try std.testing.expectEqualStrings( + \\.{ + \\ 97, + \\ 98, + \\ 99, + \\} + , buffer.items); + buffer.clearRetainingCapacity(); +} + +test "std.zon stringify multiline strings" { + var buf = std.ArrayList(u8).init(std.testing.allocator); + defer buf.deinit(); + const writer = buf.writer(); + var serializer = stringifier(writer, .{}); + + inline for (.{ true, false }) |whitespace| { + serializer.options.whitespace = whitespace; + + { + try serializer.multilineString("", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("abc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("abc⚡\ndef", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("abc⚡\r\ndef", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\abc⚡\n\\\\def", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("\nabc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("\r\nabc⚡", .{ .top_level = true }); + try std.testing.expectEqualStrings("\\\\\n\\\\abc⚡", buf.items); + buf.clearRetainingCapacity(); + } + + { + try serializer.multilineString("abc\ndef", .{}); + if (whitespace) { + try std.testing.expectEqualStrings("\n\\\\abc\n\\\\def\n", buf.items); + } else { + try std.testing.expectEqualStrings("\\\\abc\n\\\\def\n", buf.items); + } + buf.clearRetainingCapacity(); + } + + { + const str: []const u8 = &.{ 'a', '\r', 'c' }; + try serializer.string(str); + try std.testing.expectEqualStrings("\"a\\rc\"", buf.items); + buf.clearRetainingCapacity(); + } + + { + try std.testing.expectError(error.InnerCarriageReturn, serializer.multilineString(@as([]const u8, &.{ 'a', '\r', 'c' }), .{})); + try std.testing.expectError(error.InnerCarriageReturn, serializer.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\n' }), .{})); + try std.testing.expectError(error.InnerCarriageReturn, serializer.multilineString(@as([]const u8, &.{ 'a', '\r', 'c', '\r', '\n' }), .{})); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + } + } +} + +test "std.zon stringify skip default fields" { + const Struct = struct { + x: i32 = 2, + y: i8, + z: u32 = 4, + inner1: struct { a: u8 = 'z', b: u8 = 'y', c: u8 } = .{ + .a = '1', + .b = '2', + .c = '3', + }, + inner2: struct { u8, u8, u8 } = .{ + 'a', + 'b', + 'c', + }, + inner3: struct { u8, u8, u8 } = .{ + 'a', + 'b', + 'c', + }, + }; + + // Not skipping if not set + try expectStringifyEqual( + \\.{ + \\ .x = 2, + \\ .y = 3, + \\ .z = 4, + \\ .inner1 = .{ + \\ .a = '1', + \\ .b = '2', + \\ .c = '3', + \\ }, + \\ .inner2 = .{ + \\ 'a', + \\ 'b', + \\ 'c', + \\ }, + \\ .inner3 = .{ + \\ 'a', + \\ 'b', + \\ 'd', + \\ }, + \\} + , + Struct{ + .y = 3, + .z = 4, + .inner1 = .{ + .a = '1', + .b = '2', + .c = '3', + }, + .inner3 = .{ + 'a', + 'b', + 'd', + }, + }, + .{ .emit_utf8_codepoints = true }, + ); + + // Top level defaults + try expectStringifyEqual( + \\.{ .y = 3, .inner3 = .{ + \\ 'a', + \\ 'b', + \\ 'd', + \\} } + , + Struct{ + .y = 3, + .z = 4, + .inner1 = .{ + .a = '1', + .b = '2', + .c = '3', + }, + .inner3 = .{ + 'a', + 'b', + 'd', + }, + }, + .{ + .emit_default_optional_fields = false, + .emit_utf8_codepoints = true, + }, + ); + + // Inner types having defaults, and defaults changing the number of fields affecting the formatting + try expectStringifyEqual( + \\.{ + \\ .y = 3, + \\ .inner1 = .{ .b = '2', .c = '3' }, + \\ .inner3 = .{ + \\ 'a', + \\ 'b', + \\ 'd', + \\ }, + \\} + , + Struct{ + .y = 3, + .z = 4, + .inner1 = .{ + .a = 'z', + .b = '2', + .c = '3', + }, + .inner3 = .{ + 'a', + 'b', + 'd', + }, + }, + .{ + .emit_default_optional_fields = false, + .emit_utf8_codepoints = true, + }, + ); + + const DefaultStrings = struct { + foo: []const u8 = "abc", + }; + try expectStringifyEqual( + \\.{} + , + DefaultStrings{ .foo = "abc" }, + .{ .emit_default_optional_fields = false }, + ); + try expectStringifyEqual( + \\.{ .foo = "abcd" } + , + DefaultStrings{ .foo = "abcd" }, + .{ .emit_default_optional_fields = false }, + ); +} + +test "std.zon depth limits" { + var buf = std.ArrayList(u8).init(std.testing.allocator); + defer buf.deinit(); + + const Recurse = struct { r: []const @This() }; + + // Normal operation + try stringifyMaxDepth(.{ 1, .{ 2, 3 } }, .{}, buf.writer(), 16); + try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", buf.items); + buf.clearRetainingCapacity(); + + try stringifyArbitraryDepth(.{ 1, .{ 2, 3 } }, .{}, buf.writer()); + try std.testing.expectEqualStrings(".{ 1, .{ 2, 3 } }", buf.items); + buf.clearRetainingCapacity(); + + // Max depth failing on non recursive type + try std.testing.expectError(error.MaxDepth, stringifyMaxDepth(.{ 1, .{ 2, .{ 3, 4 } } }, .{}, buf.writer(), 3)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + + // Max depth passing on recursive type + { + const maybe_recurse = Recurse{ .r = &.{} }; + try stringifyMaxDepth(maybe_recurse, .{}, buf.writer(), 2); + try std.testing.expectEqualStrings(".{ .r = &.{} }", buf.items); + buf.clearRetainingCapacity(); + } + + // Unchecked passing on recursive type + { + const maybe_recurse = Recurse{ .r = &.{} }; + try stringifyArbitraryDepth(maybe_recurse, .{}, buf.writer()); + try std.testing.expectEqualStrings(".{ .r = &.{} }", buf.items); + buf.clearRetainingCapacity(); + } + + // Max depth failing on recursive type due to depth + { + var maybe_recurse = Recurse{ .r = &.{} }; + maybe_recurse.r = &.{.{ .r = &.{} }}; + try std.testing.expectError(error.MaxDepth, stringifyMaxDepth(maybe_recurse, .{}, buf.writer(), 2)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + } + + // Same but for a slice + { + var temp: [1]Recurse = .{.{ .r = &.{} }}; + const maybe_recurse: []const Recurse = &temp; + + try std.testing.expectError(error.MaxDepth, stringifyMaxDepth(maybe_recurse, .{}, buf.writer(), 2)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + + var serializer = stringifier(buf.writer(), .{}); + + try std.testing.expectError(error.MaxDepth, serializer.sliceMaxDepth(maybe_recurse, .{}, 2)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + + try serializer.sliceArbitraryDepth(maybe_recurse, .{}); + try std.testing.expectEqualStrings("&.{.{ .r = &.{} }}", buf.items); + buf.clearRetainingCapacity(); + } + + // A slice succeeding + { + var temp: [1]Recurse = .{.{ .r = &.{} }}; + const maybe_recurse: []const Recurse = &temp; + + try stringifyMaxDepth(maybe_recurse, .{}, buf.writer(), 3); + try std.testing.expectEqualStrings("&.{.{ .r = &.{} }}", buf.items); + buf.clearRetainingCapacity(); + + var serializer = stringifier(buf.writer(), .{}); + + try serializer.sliceMaxDepth(maybe_recurse, .{}, 3); + try std.testing.expectEqualStrings("&.{.{ .r = &.{} }}", buf.items); + buf.clearRetainingCapacity(); + + try serializer.sliceArbitraryDepth(maybe_recurse, .{}); + try std.testing.expectEqualStrings("&.{.{ .r = &.{} }}", buf.items); + buf.clearRetainingCapacity(); + } + + // Max depth failing on recursive type due to recursion + { + var temp: [1]Recurse = .{.{ .r = &.{} }}; + temp[0].r = &temp; + const maybe_recurse: []const Recurse = &temp; + + try std.testing.expectError(error.MaxDepth, stringifyMaxDepth(maybe_recurse, .{}, buf.writer(), 128)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + + var serializer = stringifier(buf.writer(), .{}); + try std.testing.expectError(error.MaxDepth, serializer.sliceMaxDepth(maybe_recurse, .{}, 128)); + try std.testing.expectEqualStrings("", buf.items); + buf.clearRetainingCapacity(); + } + + // Max depth on other parts of the lower level API + { + const writer = buf.writer(); + var serializer = stringifier(writer, .{}); + + const maybe_recurse: []const Recurse = &.{}; + + try std.testing.expectError(error.MaxDepth, serializer.valueMaxDepth(1, .{}, 0)); + try serializer.valueMaxDepth(2, .{}, 1); + try serializer.value(3, .{}); + try serializer.valueArbitraryDepth(maybe_recurse, .{}); + + var s = try serializer.startStruct(.{}); + try std.testing.expectError(error.MaxDepth, s.fieldMaxDepth("a", 1, .{}, 0)); + try s.fieldMaxDepth("b", 4, .{}, 1); + try s.field("c", 5, .{}); + try s.fieldArbitraryDepth("d", maybe_recurse, .{}); + try s.finish(); + + var t = try serializer.startTuple(.{}); + try std.testing.expectError(error.MaxDepth, t.fieldMaxDepth(1, .{}, 0)); + try t.fieldMaxDepth(6, .{}, 1); + try t.field(7, .{}); + try t.fieldArbitraryDepth(maybe_recurse, .{}); + try t.finish(); + + var a = try serializer.startSlice(.{}); + try std.testing.expectError(error.MaxDepth, a.itemMaxDepth(1, .{}, 0)); + try a.itemMaxDepth(8, .{}, 1); + try a.item(9, .{}); + try a.itemArbitraryDepth(maybe_recurse, .{}); + try a.finish(); + + try std.testing.expectEqualStrings( + \\23&.{}.{ + \\ .b = 4, + \\ .c = 5, + \\ .d = &.{}, + \\}.{ + \\ 6, + \\ 7, + \\ &.{}, + \\}&.{ + \\ 8, + \\ 9, + \\ &.{}, + \\} + , buf.items); + } +} + +test "std.zon stringify primitives" { + // Issue: https://github.com/ziglang/zig/issues/20880 + if (@import("builtin").zig_backend == .stage2_c) return error.SkipZigTest; + + try expectStringifyEqual( + \\.{ + \\ .a = 1.5, + \\ .b = 0.3333333333333333333333333333333333, + \\ .c = 3.1415926535897932384626433832795028, + \\ .d = 0, + \\ .e = -0, + \\ .f = inf, + \\ .g = -inf, + \\ .h = nan, + \\} + , + .{ + .a = @as(f128, 1.5), // Make sure explicit f128s work + .b = 1.0 / 3.0, + .c = std.math.pi, + .d = 0.0, + .e = -0.0, + .f = std.math.inf(f32), + .g = -std.math.inf(f32), + .h = std.math.nan(f32), + }, + .{}, + ); + + try expectStringifyEqual( + \\.{ + \\ .a = 18446744073709551616, + \\ .b = -18446744073709551616, + \\ .c = 680564733841876926926749214863536422912, + \\ .d = -680564733841876926926749214863536422912, + \\ .e = 0, + \\} + , + .{ + .a = 18446744073709551616, + .b = -18446744073709551616, + .c = 680564733841876926926749214863536422912, + .d = -680564733841876926926749214863536422912, + .e = 0, + }, + .{}, + ); + + try expectStringifyEqual( + \\.{ + \\ .a = true, + \\ .b = false, + \\ .c = .foo, + \\ .d = {}, + \\ .e = null, + \\} + , + .{ + .a = true, + .b = false, + .c = .foo, + .d = {}, + .e = null, + }, + .{}, + ); + + const Struct = struct { x: f32, y: f32 }; + try expectStringifyEqual( + ".{ .a = .{ .x = 1, .y = 2 }, .b = null }", + .{ + .a = @as(?Struct, .{ .x = 1, .y = 2 }), + .b = @as(?Struct, null), + }, + .{}, + ); + + const E = enum(u8) { + foo, + bar, + }; + try expectStringifyEqual( + ".{ .a = .foo, .b = .foo }", + .{ + .a = .foo, + .b = E.foo, + }, + .{}, + ); +} + +test "std.zon stringify ident" { + var buffer = std.ArrayList(u8).init(std.testing.allocator); + defer buffer.deinit(); + const writer = buffer.writer(); + var serializer = stringifier(writer, .{}); + + try serializer.ident("a"); + try std.testing.expectEqualStrings("a", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("foo_1"); + try std.testing.expectEqualStrings("foo_1", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("_foo_1"); + try std.testing.expectEqualStrings("_foo_1", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("foo bar"); + try std.testing.expectEqualStrings("@\"foo bar\"", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("1foo"); + try std.testing.expectEqualStrings("@\"1foo\"", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("var"); + try std.testing.expectEqualStrings("@\"var\"", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("true"); + try std.testing.expectEqualStrings("true", buffer.items); + buffer.clearRetainingCapacity(); + + try serializer.ident("_"); + try std.testing.expectEqualStrings("_", buffer.items); + buffer.clearRetainingCapacity(); + + const Enum = enum { + @"foo bar", + }; + try expectStringifyEqual(".{ .@\"var\" = .@\"foo bar\", .@\"1\" = .@\"foo bar\" }", .{ + .@"var" = .@"foo bar", + .@"1" = Enum.@"foo bar", + }, .{}); +} diff --git a/src/Air.zig b/src/Air.zig index 4589bb1557cf..883991488c7c 100644 --- a/src/Air.zig +++ b/src/Air.zig @@ -1020,6 +1020,11 @@ pub const Inst = struct { pub fn toType(ref: Ref) Type { return Type.fromInterned(ref.toInterned().?); } + + pub fn toTypeAllowNone(ref: Ref) ?Type { + if (ref == .none) return null; + return ref.toType(); + } }; /// All instructions have an 8-byte payload, which is contained within diff --git a/src/Compilation.zig b/src/Compilation.zig index ad3fcc1c829b..992e8fbda0ce 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -2192,7 +2192,10 @@ pub fn update(comp: *Compilation, main_progress_node: std.Progress.Node) !void { try comp.astgen_work_queue.ensureUnusedCapacity(zcu.import_table.count()); for (zcu.import_table.values()) |file_index| { if (zcu.fileByIndex(file_index).mod.isBuiltin()) continue; - comp.astgen_work_queue.writeItemAssumeCapacity(file_index); + const file = zcu.fileByIndex(file_index); + if (file.mode == .zig) { + comp.astgen_work_queue.writeItemAssumeCapacity(file_index); + } } if (comp.file_system_inputs) |fsi| { for (zcu.import_table.values()) |file_index| { @@ -4150,6 +4153,7 @@ fn workerAstGenFile( wg: *WaitGroup, src: Zcu.AstGenSrc, ) void { + assert(file.mode == .zig); const child_prog_node = prog_node.start(file.sub_file_path, 0); defer child_prog_node.end(); @@ -4202,7 +4206,7 @@ fn workerAstGenFile( const imported_path_digest = pt.zcu.filePathDigest(res.file_index); break :blk .{ res, imported_path_digest }; }; - if (import_result.is_new) { + if (import_result.is_new and import_result.file.mode == .zig) { log.debug("AstGen of {s} has import '{s}'; queuing AstGen of {s}", .{ file.sub_file_path, import_path, import_result.file.sub_file_path, }); diff --git a/src/Package/Manifest.zig b/src/Package/Manifest.zig index 4eed6cc386e6..7691ac3405e1 100644 --- a/src/Package/Manifest.zig +++ b/src/Package/Manifest.zig @@ -14,6 +14,7 @@ pub const Digest = [Hash.digest_length]u8; pub const multihash_len = 1 + 1 + Hash.digest_length; pub const multihash_hex_digest_len = 2 * multihash_len; pub const MultiHashHexDigest = [multihash_hex_digest_len]u8; +const AstGen = std.zig.AstGen; pub const Dependency = struct { location: Location, @@ -456,7 +457,6 @@ const Parse = struct { return duped; } - /// TODO: try to DRY this with AstGen.parseStrLit fn parseStrLit( p: *Parse, token: Ast.TokenIndex, @@ -470,95 +470,13 @@ const Parse = struct { buf.* = buf_managed.moveToUnmanaged(); switch (try result) { .success => {}, - .failure => |err| try p.appendStrLitError(err, token, bytes, offset), - } - } - - /// TODO: try to DRY this with AstGen.failWithStrLitError - fn appendStrLitError( - p: *Parse, - err: std.zig.string_literal.Error, - token: Ast.TokenIndex, - bytes: []const u8, - offset: u32, - ) Allocator.Error!void { - const raw_string = bytes[offset..]; - switch (err) { - .invalid_escape_character => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "invalid escape character: '{c}'", - .{raw_string[bad_index]}, - ); - }, - .expected_hex_digit => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected hex digit, found '{c}'", - .{raw_string[bad_index]}, - ); - }, - .empty_unicode_escape_sequence => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "empty unicode escape sequence", - .{}, - ); - }, - .expected_hex_digit_or_rbrace => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected hex digit or '}}', found '{c}'", - .{raw_string[bad_index]}, - ); - }, - .invalid_unicode_codepoint => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "unicode escape does not correspond to a valid unicode scalar value", - .{}, - ); - }, - .expected_lbrace => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected '{{', found '{c}", - .{raw_string[bad_index]}, - ); - }, - .expected_rbrace => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected '}}', found '{c}", - .{raw_string[bad_index]}, - ); - }, - .expected_single_quote => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "expected single quote ('), found '{c}", - .{raw_string[bad_index]}, - ); - }, - .invalid_character => |bad_index| { - try p.appendErrorOff( - token, - offset + @as(u32, @intCast(bad_index)), - "invalid byte in string or character literal: '{c}'", - .{raw_string[bad_index]}, - ); - }, - .empty_char_literal => { - try p.appendErrorOff(token, offset, "empty character literal", .{}); - }, + .failure => |err| try appendErrorOff( + p, + token, + offset + @as(u32, @intCast(err.offset())), + "{}", + err.fmtWithSource(raw_string), + ), } } diff --git a/src/Package/Module.zig b/src/Package/Module.zig index 996e58ddfb6c..ed6f6041c817 100644 --- a/src/Package/Module.zig +++ b/src/Package/Module.zig @@ -478,6 +478,7 @@ pub fn create(arena: Allocator, options: CreateOptions) !*Package.Module { .status = .never_loaded, .prev_status = .never_loaded, .mod = new, + .mode = .zig, }; break :b new; }; diff --git a/src/Sema.zig b/src/Sema.zig index 1ec384809f09..2a69fff2610e 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -192,6 +192,7 @@ const Alignment = InternPool.Alignment; const AnalUnit = InternPool.AnalUnit; const ComptimeAllocIndex = InternPool.ComptimeAllocIndex; const Cache = std.Build.Cache; +const zon = @import("zon.zig"); pub const default_branch_quota = 1000; pub const default_reference_trace_len = 2; @@ -14474,9 +14475,10 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. const pt = sema.pt; const zcu = pt.zcu; - const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].str_tok; + const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; + const extra = sema.code.extraData(Zir.Inst.Import, inst_data.payload_index).data; const operand_src = block.tokenOffset(inst_data.src_tok); - const operand = inst_data.get(sema.code); + const operand = sema.code.nullTerminatedString(extra.path); const result = pt.importFile(block.getFileScope(zcu), operand) catch |err| switch (err) { error.ImportOutsideModulePath => { @@ -14493,12 +14495,41 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air. return sema.fail(block, operand_src, "unable to open '{s}': {s}", .{ operand, @errorName(err) }); }, }; - try sema.declareDependency(.{ .file = result.file_index }); - try pt.ensureFileAnalyzed(result.file_index); - const ty = zcu.fileRootType(result.file_index); - try sema.declareDependency(.{ .interned = ty }); - try sema.addTypeReferenceEntry(operand_src, ty); - return Air.internedToRef(ty); + switch (result.file.mode) { + .zig => { + try sema.declareDependency(.{ .file = result.file_index }); + try pt.ensureFileAnalyzed(result.file_index); + const ty = zcu.fileRootType(result.file_index); + try sema.declareDependency(.{ .interned = ty }); + try sema.addTypeReferenceEntry(operand_src, ty); + return Air.internedToRef(ty); + }, + .zon => { + _ = result.file.getTree(zcu.gpa) catch |err| { + // TODO: these errors are file system errors; make sure an update() will + // retry this and not cache the file system error, which may be transient. + return sema.fail(block, operand_src, "unable to open '{s}': {s}", .{ result.file.sub_file_path, @errorName(err) }); + }; + + if (extra.res_ty == .none) { + return sema.fail(block, operand_src, "import ZON must have a known result type", .{}); + } + const res_ty_inst = try sema.resolveInst(extra.res_ty); + const res_ty = try sema.analyzeAsType(block, operand_src, res_ty_inst); + if (res_ty.isGenericPoison()) { + return sema.fail(block, operand_src, "import ZON must have a known result type", .{}); + } + + const interned = try zon.lower( + sema, + result.file, + result.file_index, + res_ty, + operand_src, + ); + return Air.internedToRef(interned); + }, + } } fn zirEmbedFile(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref { diff --git a/src/Zcu.zig b/src/Zcu.zig index d03eb4cc9a0e..b2a1ccf5ed86 100644 --- a/src/Zcu.zig +++ b/src/Zcu.zig @@ -453,6 +453,9 @@ pub const File = struct { /// successful, this field is unloaded. prev_zir: ?*Zir = null, + /// Whether the file is Zig or ZON. This filed is always populated. + mode: Ast.Mode, + pub const Status = enum { never_loaded, retryable_failure, @@ -472,6 +475,17 @@ pub const File = struct { root: *Package.Module, }; + pub fn modeFromPath(path: []const u8) Ast.Mode { + if (std.mem.endsWith(u8, path, ".zon")) { + return .zon; + } else if (std.mem.endsWith(u8, path, ".zig")) { + return .zig; + } else { + // `Module.importFile` rejects all other extensions + unreachable; + } + } + pub fn unload(file: *File, gpa: Allocator) void { file.unloadTree(gpa); file.unloadSource(gpa); @@ -546,7 +560,7 @@ pub const File = struct { if (file.tree_loaded) return &file.tree; const source = try file.getSource(gpa); - file.tree = try Ast.parse(gpa, source.bytes, .zig); + file.tree = try Ast.parse(gpa, source.bytes, file.mode); file.tree_loaded = true; return &file.tree; } @@ -663,6 +677,7 @@ pub const File = struct { pub const Index = InternPool.FileIndex; }; +/// Represents the contents of a file loaded with `@embedFile`. pub const EmbedFile = struct { /// Relative to the owning module's root directory. sub_file_path: InternPool.NullTerminatedString, @@ -2087,6 +2102,12 @@ pub const LazySrcLoc = struct { break :inst .{ info.file, info.inst }; }; const file = zcu.fileByIndex(file_index); + + // If we're relative to .main_struct_inst, we know the ast node is the root and don't need to resolve the ZIR, + // which may not exist e.g. in the case of errors in ZON files. + if (zir_inst == .main_struct_inst) return .{ file, 0 }; + + // Otherwise, make sure ZIR is loaded. assert(file.zir_loaded); const zir = file.zir; diff --git a/src/Zcu/PerThread.zig b/src/Zcu/PerThread.zig index c6a8b58f41a0..5fd9a99a9e72 100644 --- a/src/Zcu/PerThread.zig +++ b/src/Zcu/PerThread.zig @@ -1067,6 +1067,7 @@ fn semaFile(pt: Zcu.PerThread, file_index: Zcu.File.Index) Zcu.SemaError!void { const zcu = pt.zcu; const gpa = zcu.gpa; const file = zcu.fileByIndex(file_index); + assert(file.mode == .zig); assert(zcu.fileRootType(file_index) == .none); if (file.status != .success_zir) { @@ -1464,6 +1465,7 @@ pub fn importPkg(pt: Zcu.PerThread, mod: *Module) !Zcu.ImportFileResult { .status = .never_loaded, .prev_status = .never_loaded, .mod = mod, + .mode = Zcu.File.modeFromPath(sub_file_path), }; try new_file.addReference(zcu, .{ .root = mod }); @@ -1494,7 +1496,9 @@ pub fn importFile( if (mod.deps.get(import_string)) |pkg| { return pt.importPkg(pkg); } - if (!std.mem.endsWith(u8, import_string, ".zig")) { + if (!std.mem.endsWith(u8, import_string, ".zig") and + !std.mem.endsWith(u8, import_string, ".zon")) + { return error.ModuleNotFound; } const gpa = zcu.gpa; @@ -1575,6 +1579,7 @@ pub fn importFile( .status = .never_loaded, .prev_status = .never_loaded, .mod = mod, + .mode = Zcu.File.modeFromPath(sub_file_path), }; return .{ diff --git a/src/main.zig b/src/main.zig index 0d239f1f99f0..4aeba62413b4 100644 --- a/src/main.zig +++ b/src/main.zig @@ -6038,6 +6038,7 @@ fn cmdAstCheck( .tree = undefined, .zir = undefined, .mod = undefined, + .mode = .zig, }; if (zig_source_file) |file_name| { var f = fs.cwd().openFile(file_name, .{}) catch |err| { @@ -6362,6 +6363,7 @@ fn cmdDumpZir( .tree = undefined, .zir = try Zcu.loadZirCache(gpa, f), .mod = undefined, + .mode = .zig, }; defer file.zir.deinit(gpa); @@ -6434,6 +6436,7 @@ fn cmdChangelist( .tree = undefined, .zir = undefined, .mod = undefined, + .mode = Zcu.File.modeFromPath(old_source_file), }; file.mod = try Package.Module.createLimited(arena, .{ diff --git a/src/print_zir.zig b/src/print_zir.zig index 808ead0e7991..b3ea54bfab44 100644 --- a/src/print_zir.zig +++ b/src/print_zir.zig @@ -488,7 +488,6 @@ const Writer = struct { .enum_literal, .decl_ref, .decl_val, - .import, .ret_err_value, .ret_err_value_code, .param_anytype, @@ -515,6 +514,8 @@ const Writer = struct { .declaration => try self.writeDeclaration(stream, inst), .extended => try self.writeExtended(stream, inst), + + .import => try self.writeImport(stream, inst), } } @@ -2981,4 +2982,13 @@ const Writer = struct { try stream.writeByte('\n'); } } + + fn writeImport(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void { + const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok; + const extra = self.code.extraData(Zir.Inst.Import, inst_data.payload_index).data; + try self.writeInstRef(stream, extra.res_ty); + const import_path = self.code.nullTerminatedString(extra.path); + try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(import_path)}); + try self.writeSrcTok(stream, inst_data.src_tok); + } }; diff --git a/src/zon.zig b/src/zon.zig new file mode 100644 index 000000000000..0caa6c22994a --- /dev/null +++ b/src/zon.zig @@ -0,0 +1,1134 @@ +const std = @import("std"); +const Zcu = @import("Zcu.zig"); +const Sema = @import("Sema.zig"); +const InternPool = @import("InternPool.zig"); +const Type = @import("Type.zig"); +const Zir = std.zig.Zir; +const AstGen = std.zig.AstGen; +const CompileError = Zcu.CompileError; +const Ast = std.zig.Ast; +const Allocator = std.mem.Allocator; +const assert = std.debug.assert; +const File = Zcu.File; +const LazySrcLoc = Zcu.LazySrcLoc; +const Ref = std.zig.Zir.Inst.Ref; +const NullTerminatedString = InternPool.NullTerminatedString; +const NumberLiteralError = std.zig.number_literal.Error; +const NodeIndex = std.zig.Ast.Node.Index; + +const LowerZon = @This(); + +sema: *Sema, +file: *File, +file_index: Zcu.File.Index, +import_loc: LazySrcLoc, + +/// Lowers the given file as ZON. +pub fn lower( + sema: *Sema, + file: *File, + file_index: Zcu.File.Index, + res_ty: Type, + import_loc: LazySrcLoc, +) CompileError!InternPool.Index { + const lower_zon: LowerZon = .{ + .sema = sema, + .file = file, + .file_index = file_index, + .import_loc = import_loc, + }; + const tree = lower_zon.file.getTree(lower_zon.sema.gpa) catch unreachable; // Already validated + if (tree.errors.len != 0) { + return lower_zon.lowerAstErrors(); + } + + const data = tree.nodes.items(.data); + const root = data[0].lhs; + return lower_zon.lowerExpr(root, res_ty); +} + +fn lazySrcLoc(self: LowerZon, loc: LazySrcLoc.Offset) !LazySrcLoc { + return .{ + .base_node_inst = try self.sema.pt.zcu.intern_pool.trackZir( + self.sema.pt.zcu.gpa, + .main, + .{ .file = self.file_index, .inst = .main_struct_inst }, + ), + .offset = loc, + }; +} + +fn fail( + self: LowerZon, + loc: LazySrcLoc.Offset, + comptime format: []const u8, + args: anytype, +) (Allocator.Error || error{AnalysisFail}) { + @branchHint(.cold); + const src_loc = try self.lazySrcLoc(loc); + const err_msg = try Zcu.ErrorMsg.create(self.sema.pt.zcu.gpa, src_loc, format, args); + try self.sema.pt.zcu.errNote(self.import_loc, err_msg, "imported here", .{}); + try self.sema.pt.zcu.failed_files.putNoClobber(self.sema.pt.zcu.gpa, self.file, err_msg); + return error.AnalysisFail; +} + +fn lowerAstErrors(self: LowerZon) CompileError { + const tree = self.file.tree; + assert(tree.errors.len > 0); + + const gpa = self.sema.gpa; + const ip = &self.sema.pt.zcu.intern_pool; + const parse_err = tree.errors[0]; + + var buf: std.ArrayListUnmanaged(u8) = .{}; + defer buf.deinit(gpa); + + // Create the main error + buf.clearRetainingCapacity(); + try tree.renderError(parse_err, buf.writer(gpa)); + const err_msg = try Zcu.ErrorMsg.create( + gpa, + .{ + .base_node_inst = try ip.trackZir(gpa, .main, .{ + .file = self.file_index, + .inst = .main_struct_inst, + }), + .offset = .{ .token_abs = parse_err.token + @intFromBool(parse_err.token_is_prev) }, + }, + "{s}", + .{buf.items}, + ); + + // Check for invalid bytes + const token_starts = tree.tokens.items(.start); + const token_tags = tree.tokens.items(.tag); + if (token_tags[parse_err.token + @intFromBool(parse_err.token_is_prev)] == .invalid) { + const bad_off: u32 = @intCast(tree.tokenSlice(parse_err.token + @intFromBool(parse_err.token_is_prev)).len); + const byte_abs = token_starts[parse_err.token + @intFromBool(parse_err.token_is_prev)] + bad_off; + try self.sema.pt.zcu.errNote( + .{ + .base_node_inst = try ip.trackZir(gpa, .main, .{ + .file = self.file_index, + .inst = .main_struct_inst, + }), + .offset = .{ .byte_abs = byte_abs }, + }, + err_msg, + "invalid byte: '{'}'", + .{std.zig.fmtEscapes(tree.source[byte_abs..][0..1])}, + ); + } + + // Create the notes + for (tree.errors[1..]) |note| { + if (!note.is_note) break; + + buf.clearRetainingCapacity(); + try tree.renderError(note, buf.writer(gpa)); + try self.sema.pt.zcu.errNote( + .{ + .base_node_inst = try ip.trackZir(gpa, .main, .{ + .file = self.file_index, + .inst = .main_struct_inst, + }), + .offset = .{ .token_abs = note.token + @intFromBool(note.token_is_prev) }, + }, + err_msg, + "{s}", + .{buf.items}, + ); + } + + try self.sema.pt.zcu.failed_files.putNoClobber(gpa, self.file, err_msg); + return error.AnalysisFail; +} + +const Ident = struct { + bytes: []const u8, + owned: bool, + + fn deinit(self: *Ident, allocator: Allocator) void { + if (self.owned) { + allocator.free(self.bytes); + } + self.* = undefined; + } +}; + +fn ident(self: LowerZon, token: Ast.TokenIndex) !Ident { + var bytes = self.file.tree.tokenSlice(token); + + if (bytes[0] == '@' and bytes[1] == '"') { + const gpa = self.sema.gpa; + + const raw_string = bytes[1..bytes.len]; + var parsed = std.ArrayListUnmanaged(u8){}; + defer parsed.deinit(gpa); + + switch (try std.zig.string_literal.parseWrite(parsed.writer(gpa), raw_string)) { + .success => { + if (std.mem.indexOfScalar(u8, parsed.items, 0) != null) { + return self.fail(.{ .token_abs = token }, "identifier cannot contain null bytes", .{}); + } + return .{ + .bytes = try parsed.toOwnedSlice(gpa), + .owned = true, + }; + }, + .failure => |err| { + const offset = self.file.tree.tokens.items(.start)[token]; + return self.fail( + .{ .byte_abs = offset + @as(u32, @intCast(err.offset())) }, + "{}", + .{err.fmtWithSource(raw_string)}, + ); + }, + } + } + + return .{ + .bytes = bytes, + .owned = false, + }; +} + +fn identAsNullTerminatedString(self: LowerZon, token: Ast.TokenIndex) !NullTerminatedString { + var parsed = try self.ident(token); + defer parsed.deinit(self.sema.gpa); + const ip = &self.sema.pt.zcu.intern_pool; + return ip.getOrPutString(self.sema.gpa, self.sema.pt.tid, parsed.bytes, .no_embedded_nulls); +} + +const FieldTypes = union(enum) { + st: struct { + ty: Type, + loaded: InternPool.LoadedStructType, + }, + un: struct { + ty: Type, + loaded: InternPool.LoadedEnumType, + }, + none, + + fn init(ty: ?Type, sema: *Sema) !@This() { + const t = ty orelse return .none; + const ip = &sema.pt.zcu.intern_pool; + switch (t.zigTypeTagOrPoison(sema.pt.zcu) catch return .none) { + .@"struct" => { + try t.resolveFully(sema.pt); + const loaded_struct_type = ip.loadStructType(t.toIntern()); + return .{ .st = .{ + .ty = t, + .loaded = loaded_struct_type, + } }; + }, + .@"union" => { + try t.resolveFully(sema.pt); + const loaded_union_type = ip.loadUnionType(t.toIntern()); + const loaded_tag_type = loaded_union_type.loadTagType(ip); + return .{ .un = .{ + .ty = t, + .loaded = loaded_tag_type, + } }; + }, + else => return .none, + } + } + + fn get(self: *const @This(), name: NullTerminatedString, zcu: *Zcu) ?Type { + const ip = &zcu.intern_pool; + const self_ty, const index = switch (self.*) { + .st => |st| .{ st.ty, st.loaded.nameIndex(ip, name) orelse return null }, + .un => |un| .{ un.ty, un.loaded.nameIndex(ip, name) orelse return null }, + .none => return null, + }; + return self_ty.fieldType(index, zcu); + } +}; + +fn lowerExpr(self: LowerZon, node: Ast.Node.Index, res_ty: Type) CompileError!InternPool.Index { + switch (Type.zigTypeTag(res_ty, self.sema.pt.zcu)) { + .void => return self.lowerVoid(node), + .bool => return self.lowerBool(node), + .int, .comptime_int => return self.lowerInt(node, res_ty), + .float, .comptime_float => return self.lowerFloat(node, res_ty), + .optional => return self.lowerOptional(node, res_ty), + .null => return self.lowerNull(node), + .@"enum" => return self.lowerEnum(node, res_ty), + .enum_literal => return self.lowerEnumLiteral(node, res_ty), + .array => return self.lowerArray(node, res_ty), + .@"struct" => return self.lowerStructOrTuple(node, res_ty), + .@"union" => return self.lowerUnion(node, res_ty), + .pointer => return self.lowerPointer(node, res_ty), + + .type, + .noreturn, + .undefined, + .error_union, + .error_set, + .@"fn", + .@"opaque", + .frame, + .@"anyframe", + .vector, + => return self.fail(.{ .node_abs = node }, "invalid ZON value", .{}), + } +} + +fn lowerVoid(self: LowerZon, node: Ast.Node.Index) !InternPool.Index { + const tags = self.file.tree.nodes.items(.tag); + const data = self.file.tree.nodes.items(.data); + + if (tags[node] == .block_two and data[node].lhs == 0 and data[node].rhs == 0) { + return .void_value; + } + + return self.fail(.{ .node_abs = node }, "expected type 'void'", .{}); +} + +fn lowerBool(self: LowerZon, node: Ast.Node.Index) !InternPool.Index { + const gpa = self.sema.gpa; + const tags = self.file.tree.nodes.items(.tag); + const main_tokens = self.file.tree.nodes.items(.main_token); + + if (tags[node] == .identifier) { + const token = main_tokens[node]; + var litIdent = try self.ident(token); + defer litIdent.deinit(gpa); + + const BoolIdent = enum { true, false }; + const values = std.StaticStringMap(BoolIdent).initComptime(.{ + .{ "true", .true }, + .{ "false", .false }, + }); + if (values.get(litIdent.bytes)) |value| { + return switch (value) { + .true => .bool_true, + .false => .bool_false, + }; + } + } + return self.fail(.{ .node_abs = node }, "expected type 'bool'", .{}); +} + +fn lowerInt( + self: LowerZon, + node: Ast.Node.Index, + res_ty: Type, +) !InternPool.Index { + @setFloatMode(.strict); + + const gpa = self.sema.gpa; + const tags = self.file.tree.nodes.items(.tag); + const main_tokens = self.file.tree.nodes.items(.main_token); + const num_lit_node, const is_negative = if (tags[node] == .negation) b: { + const data = self.file.tree.nodes.items(.data); + break :b .{ + data[node].lhs, + node, + }; + } else .{ + node, + null, + }; + switch (tags[num_lit_node]) { + .char_literal => { + const token = main_tokens[num_lit_node]; + const token_bytes = self.file.tree.tokenSlice(token); + const char = switch (std.zig.string_literal.parseCharLiteral(token_bytes)) { + .success => |char| char, + .failure => |err| { + const offset = self.file.tree.tokens.items(.start)[token]; + return self.fail( + .{ .byte_abs = offset + @as(u32, @intCast(err.offset())) }, + "{}", + .{err.fmtWithSource(token_bytes)}, + ); + }, + }; + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ + .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .i64 = if (is_negative == null) char else -@as(i64, char) }, + }, + }); + }, + .number_literal => { + const token = main_tokens[num_lit_node]; + const token_bytes = self.file.tree.tokenSlice(token); + const parsed = std.zig.number_literal.parseNumberLiteral(token_bytes); + switch (parsed) { + .int => |unsigned| { + if (is_negative) |negative_node| { + if (unsigned == 0) { + return self.fail(.{ .node_abs = negative_node }, "integer literal '-0' is ambiguous", .{}); + } + const signed = std.math.negateCast(unsigned) catch { + var result = try std.math.big.int.Managed.initSet(gpa, unsigned); + defer result.deinit(); + result.negate(); + + if (Type.zigTypeTag(res_ty, self.sema.pt.zcu) == .int) { + const int_info = res_ty.intInfo(self.sema.pt.zcu); + if (!result.fitsInTwosComp(int_info.signedness, int_info.bits)) { + return self.fail( + .{ .node_abs = num_lit_node }, + "type '{}' cannot represent integer value '-{}'", + .{ res_ty.fmt(self.sema.pt), unsigned }, + ); + } + } + + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .big_int = result.toConst() }, + } }); + }; + + if (Type.zigTypeTag(res_ty, self.sema.pt.zcu) == .int) { + const int_info = res_ty.intInfo(self.sema.pt.zcu); + if (std.math.cast(u6, int_info.bits)) |bits| { + const min_int: i64 = if (int_info.signedness == .unsigned) 0 else -(@as(i64, 1) << (bits - 1)); + if (signed < min_int) { + return self.fail( + .{ .node_abs = num_lit_node }, + "type '{}' cannot represent integer value '{}'", + .{ res_ty.fmt(self.sema.pt), unsigned }, + ); + } + } + } + + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .i64 = signed }, + } }); + } else { + if (Type.zigTypeTag(res_ty, self.sema.pt.zcu) == .int) { + const int_info = res_ty.intInfo(self.sema.pt.zcu); + if (std.math.cast(u6, int_info.bits)) |bits| { + const max_int: u64 = (@as(u64, 1) << (bits - @intFromBool(int_info.signedness == .signed))) - 1; + if (unsigned > max_int) { + return self.fail( + .{ .node_abs = num_lit_node }, + "type '{}' cannot represent integer value '{}'", + .{ res_ty.fmt(self.sema.pt), unsigned }, + ); + } + } + } + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .u64 = unsigned }, + } }); + } + }, + .big_int => |base| { + var big_int = try std.math.big.int.Managed.init(gpa); + defer big_int.deinit(); + + const prefix_offset: usize = if (base == .decimal) 0 else 2; + big_int.setString(@intFromEnum(base), token_bytes[prefix_offset..]) catch |err| switch (err) { + error.InvalidCharacter => unreachable, // caught in `parseNumberLiteral` + error.InvalidBase => unreachable, // we only pass 16, 8, 2, see above + error.OutOfMemory => return error.OutOfMemory, + }; + + assert(big_int.isPositive()); + + if (is_negative != null) big_int.negate(); + + if (Type.zigTypeTag(res_ty, self.sema.pt.zcu) == .int) { + const int_info = res_ty.intInfo(self.sema.pt.zcu); + if (!big_int.fitsInTwosComp(int_info.signedness, int_info.bits)) { + return self.fail( + .{ .node_abs = num_lit_node }, + "type '{}' cannot represent integer value '{}'", + .{ res_ty.fmt(self.sema.pt), big_int }, + ); + } + } + + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .big_int = big_int.toConst() }, + } }); + }, + .float => { + const unsigned_float = std.fmt.parseFloat(f128, token_bytes) catch { + // Validated by tokenizer + unreachable; + }; + const float = if (is_negative == null) unsigned_float else -unsigned_float; + + // Check for fractional components + if (@rem(float, 1) != 0) { + return self.fail( + .{ .node_abs = num_lit_node }, + "fractional component prevents float value '{}' from coercion to type '{}'", + .{ float, res_ty.fmt(self.sema.pt) }, + ); + } + + // Create a rational representation of the float + var rational = try std.math.big.Rational.init(gpa); + defer rational.deinit(); + rational.setFloat(f128, float) catch |err| switch (err) { + error.NonFiniteFloat => unreachable, + error.OutOfMemory => return error.OutOfMemory, + }; + + // The float is reduced in rational.setFloat, so we assert that denominator is equal to one + const big_one = std.math.big.int.Const{ .limbs = &.{1}, .positive = true }; + assert(rational.q.toConst().eqlAbs(big_one)); + if (is_negative != null) rational.negate(); + + // Check that the result is in range of the result type + const int_info = res_ty.intInfo(self.sema.pt.zcu); + if (!rational.p.fitsInTwosComp(int_info.signedness, int_info.bits)) { + return self.fail( + .{ .node_abs = num_lit_node }, + "float value '{}' cannot be stored in integer type '{}'", + .{ float, res_ty.fmt(self.sema.pt) }, + ); + } + + return self.sema.pt.zcu.intern_pool.get(gpa, self.sema.pt.tid, .{ + .int = .{ + .ty = res_ty.toIntern(), + .storage = .{ .big_int = rational.p.toConst() }, + }, + }); + }, + .failure => |err| return self.failWithNumberError(token, err), + } + }, + .identifier => { + unreachable; // Decide what error to give here + }, + else => return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}), + } +} + +fn lowerFloat( + self: LowerZon, + node: Ast.Node.Index, + res_ty: Type, +) !InternPool.Index { + @setFloatMode(.strict); + + const tags = self.file.tree.nodes.items(.tag); + const main_tokens = self.file.tree.nodes.items(.main_token); + const num_lit_node, const is_negative = if (tags[node] == .negation) b: { + const data = self.file.tree.nodes.items(.data); + break :b .{ + data[node].lhs, + node, + }; + } else .{ + node, + null, + }; + switch (tags[num_lit_node]) { + .char_literal => { + const token = main_tokens[num_lit_node]; + const token_bytes = self.file.tree.tokenSlice(token); + var char: i64 = switch (std.zig.string_literal.parseCharLiteral(token_bytes)) { + .success => |char| char, + .failure => |err| { + const offset = self.file.tree.tokens.items(.start)[token]; + return self.fail( + .{ .byte_abs = offset + @as(u32, @intCast(err.offset())) }, + "{}", + .{err.fmtWithSource(token_bytes)}, + ); + }, + }; + if (is_negative != null) char = -char; + return self.sema.pt.intern(.{ .float = .{ + .ty = res_ty.toIntern(), + .storage = switch (res_ty.toIntern()) { + .f16_type => .{ .f16 = @floatFromInt(char) }, + .f32_type => .{ .f32 = @floatFromInt(char) }, + .f64_type => .{ .f64 = @floatFromInt(char) }, + .f80_type => .{ .f80 = @floatFromInt(char) }, + .f128_type, .comptime_float_type => .{ .f128 = @floatFromInt(char) }, + else => unreachable, + }, + } }); + }, + .number_literal => { + const token = main_tokens[num_lit_node]; + const token_bytes = self.file.tree.tokenSlice(token); + + var float = std.fmt.parseFloat(f128, token_bytes) catch |err| switch (err) { + error.InvalidCharacter => return self.fail(.{ .node_abs = num_lit_node }, "invalid character", .{}), + }; + if (is_negative != null) float = -float; + + return self.sema.pt.intern(.{ + .float = .{ + .ty = res_ty.toIntern(), + .storage = switch (res_ty.toIntern()) { + .f16_type => .{ .f16 = @floatCast(float) }, + .f32_type => .{ .f32 = @floatCast(float) }, + .f64_type => .{ .f64 = @floatCast(float) }, + .f80_type => .{ .f80 = @floatCast(float) }, + .f128_type, .comptime_float_type => .{ .f128 = float }, + else => unreachable, + }, + }, + }); + }, + .identifier => { + switch (Type.zigTypeTag(res_ty, self.sema.pt.zcu)) { + .float, .comptime_float => {}, + else => return self.fail(.{ .node_abs = num_lit_node }, "invalid ZON value", .{}), + } + const token = main_tokens[num_lit_node]; + const bytes = self.file.tree.tokenSlice(token); + const LitIdent = enum { nan, inf }; + const values = std.StaticStringMap(LitIdent).initComptime(.{ + .{ "nan", .nan }, + .{ "inf", .inf }, + }); + if (values.get(bytes)) |value| { + return switch (value) { + .nan => self.sema.pt.intern(.{ + .float = .{ + .ty = res_ty.toIntern(), + .storage = switch (res_ty.toIntern()) { + .f16_type => .{ .f16 = std.math.nan(f16) }, + .f32_type => .{ .f32 = std.math.nan(f32) }, + .f64_type => .{ .f64 = std.math.nan(f64) }, + .f80_type => .{ .f80 = std.math.nan(f80) }, + .f128_type, .comptime_float_type => .{ .f128 = std.math.nan(f128) }, + else => unreachable, + }, + }, + }), + .inf => self.sema.pt.intern(.{ + .float = .{ + .ty = res_ty.toIntern(), + .storage = switch (res_ty.toIntern()) { + .f16_type => .{ .f16 = if (is_negative == null) std.math.inf(f16) else -std.math.inf(f16) }, + .f32_type => .{ .f32 = if (is_negative == null) std.math.inf(f32) else -std.math.inf(f32) }, + .f64_type => .{ .f64 = if (is_negative == null) std.math.inf(f64) else -std.math.inf(f64) }, + .f80_type => .{ .f80 = if (is_negative == null) std.math.inf(f80) else -std.math.inf(f80) }, + .f128_type, .comptime_float_type => .{ .f128 = if (is_negative == null) std.math.inf(f128) else -std.math.inf(f128) }, + else => unreachable, + }, + }, + }), + }; + } + return self.fail(.{ .node_abs = num_lit_node }, "use of unknown identifier '{s}'", .{bytes}); + }, + else => return self.fail(.{ .node_abs = node }, "invalid ZON value", .{}), + } +} + +fn lowerOptional(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const tags = self.file.tree.nodes.items(.tag); + const main_tokens = self.file.tree.nodes.items(.main_token); + + if (tags[node] == .identifier) { + const token = main_tokens[node]; + const bytes = self.file.tree.tokenSlice(token); + if (std.mem.eql(u8, bytes, "null")) return .null_value; + } + + return self.sema.pt.intern(.{ .opt = .{ + .ty = res_ty.toIntern(), + .val = try self.lowerExpr(node, res_ty.optionalChild(self.sema.pt.zcu)), + } }); +} + +fn lowerNull(self: LowerZon, node: Ast.Node.Index) !InternPool.Index { + const tags = self.file.tree.nodes.items(.tag); + const main_tokens = self.file.tree.nodes.items(.main_token); + + if (tags[node] == .identifier) { + const token = main_tokens[node]; + const bytes = self.file.tree.tokenSlice(token); + if (std.mem.eql(u8, bytes, "null")) return .null_value; + } + + return self.fail(.{ .node_abs = node }, "invalid ZON value", .{}); +} + +fn lowerArray(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const gpa = self.sema.gpa; + + const array_info = res_ty.arrayInfo(self.sema.pt.zcu); + var buf: [2]NodeIndex = undefined; + const elem_nodes = try self.elements(res_ty, &buf, node); + + if (elem_nodes.len != array_info.len) { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}); + } + + const elems = try gpa.alloc(InternPool.Index, elem_nodes.len + @intFromBool(array_info.sentinel != null)); + defer gpa.free(elems); + + for (elem_nodes, 0..) |elem_node, i| { + elems[i] = try self.lowerExpr(elem_node, array_info.elem_type); + } + + if (array_info.sentinel) |sentinel| { + elems[elems.len - 1] = sentinel.toIntern(); + } + + return self.sema.pt.intern(.{ .aggregate = .{ + .ty = res_ty.toIntern(), + .storage = .{ .elems = elems }, + } }); +} + +fn lowerEnum(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const main_tokens = self.file.tree.nodes.items(.main_token); + const tags = self.file.tree.nodes.items(.tag); + const ip = &self.sema.pt.zcu.intern_pool; + + if (tags[node] != .enum_literal) { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}); + } + + const field_name = try self.identAsNullTerminatedString(main_tokens[node]); + const field_index = res_ty.enumFieldIndex(field_name, self.sema.pt.zcu) orelse { + return self.fail(.{ .node_abs = node }, "enum {} has no member named '{}'", .{ + res_ty.fmt(self.sema.pt), + field_name.fmt(ip), + }); + }; + + const value = try self.sema.pt.enumValueFieldIndex(res_ty, field_index); + + return value.toIntern(); +} + +fn lowerEnumLiteral(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const main_tokens = self.file.tree.nodes.items(.main_token); + const tags = self.file.tree.nodes.items(.tag); + const ip = &self.sema.pt.zcu.intern_pool; + const gpa = self.sema.gpa; + + if (tags[node] != .enum_literal) { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}); + } + + return ip.get(gpa, self.sema.pt.tid, .{ + .enum_literal = try self.identAsNullTerminatedString(main_tokens[node]), + }); +} + +fn lowerStructOrTuple(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const ip = &self.sema.pt.zcu.intern_pool; + return switch (ip.indexToKey(res_ty.toIntern())) { + .tuple_type => self.lowerTuple(node, res_ty), + .struct_type => self.lowerStruct(node, res_ty), + else => self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}), + }; +} + +fn lowerTuple(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const ip = &self.sema.pt.zcu.intern_pool; + const gpa = self.sema.gpa; + + const tuple_info = ip.indexToKey(res_ty.toIntern()).tuple_type; + + var buf: [2]Ast.Node.Index = undefined; + const elem_nodes = try self.elements(res_ty, &buf, node); + + const field_types = tuple_info.types.get(ip); + if (elem_nodes.len < field_types.len) { + return self.fail(.{ .node_abs = node }, "missing tuple field with index {}", .{elem_nodes.len}); + } else if (elem_nodes.len > field_types.len) { + return self.fail(.{ .node_abs = node }, "index {} outside tuple of length {}", .{ + field_types.len, + elem_nodes[field_types.len], + }); + } + + const elems = try gpa.alloc(InternPool.Index, field_types.len); + defer gpa.free(elems); + + for (elems, elem_nodes, field_types) |*elem, elem_node, field_type| { + elem.* = try self.lowerExpr(elem_node, Type.fromInterned(field_type)); + } + + return self.sema.pt.intern(.{ .aggregate = .{ + .ty = res_ty.toIntern(), + .storage = .{ .elems = elems }, + } }); +} + +fn lowerStruct(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const ip = &self.sema.pt.zcu.intern_pool; + const gpa = self.sema.gpa; + + try res_ty.resolveFully(self.sema.pt); + const struct_info = self.sema.pt.zcu.typeToStruct(res_ty).?; + + var buf: [2]Ast.Node.Index = undefined; + const field_nodes = try self.fields(res_ty, &buf, node); + + const field_values = try gpa.alloc(InternPool.Index, struct_info.field_names.len); + defer gpa.free(field_values); + + const field_defaults = struct_info.field_inits.get(ip); + for (0..field_values.len) |i| { + field_values[i] = if (i < field_defaults.len) field_defaults[i] else .none; + } + + for (field_nodes) |field_node| { + const field_name_token = self.file.tree.firstToken(field_node) - 2; + const field_name = try self.identAsNullTerminatedString(field_name_token); + + const name_index = struct_info.nameIndex(ip, field_name) orelse { + return self.fail( + .{ .node_abs = field_node }, + "unexpected field '{}'", + .{field_name.fmt(ip)}, + ); + }; + + const field_type = Type.fromInterned(struct_info.field_types.get(ip)[name_index]); + if (field_values[name_index] != .none) { + return self.fail( + .{ .token_abs = field_name_token }, + "duplicate field '{}'", + .{field_name.fmt(ip)}, + ); + } + field_values[name_index] = try self.lowerExpr(field_node, field_type); + } + + const field_names = struct_info.field_names.get(ip); + for (field_values, field_names) |*value, name| { + if (value.* == .none) return self.fail( + .{ .node_abs = node }, + "missing field {}", + .{name.fmt(ip)}, + ); + } + + return self.sema.pt.intern(.{ .aggregate = .{ .ty = res_ty.toIntern(), .storage = .{ + .elems = field_values, + } } }); +} + +fn lowerPointer(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const tags = self.file.tree.nodes.items(.tag); + const ip = &self.sema.pt.zcu.intern_pool; + const gpa = self.sema.gpa; + + const ptr_info = res_ty.ptrInfo(self.sema.pt.zcu); + + if (ptr_info.flags.size != .Slice) { + return self.fail( + .{ .node_abs = node }, + "ZON import cannot be coerced to non slice pointer", + .{}, + ); + } + + // String literals + const string_alignment = ptr_info.flags.alignment == .none or ptr_info.flags.alignment == .@"1"; + const string_sentinel = ptr_info.sentinel == .none or ptr_info.sentinel == .zero_u8; + if (string_alignment and ptr_info.child == .u8_type and string_sentinel) { + if (tags[node] == .string_literal or tags[node] == .multiline_string_literal) { + return self.lowerStringLiteral(node, res_ty); + } + } + + // Slice literals + var buf: [2]NodeIndex = undefined; + const elem_nodes = try self.elements(res_ty, &buf, node); + + const elems = try gpa.alloc(InternPool.Index, elem_nodes.len + @intFromBool(ptr_info.sentinel != .none)); + defer gpa.free(elems); + + for (elem_nodes, 0..) |elem_node, i| { + elems[i] = try self.lowerExpr(elem_node, Type.fromInterned(ptr_info.child)); + } + + if (ptr_info.sentinel != .none) { + elems[elems.len - 1] = ptr_info.sentinel; + } + + const array_ty = try self.sema.pt.intern(.{ .array_type = .{ + .len = elems.len, + .sentinel = ptr_info.sentinel, + .child = ptr_info.child, + } }); + + const array = try self.sema.pt.intern(.{ .aggregate = .{ + .ty = array_ty, + .storage = .{ .elems = elems }, + } }); + + const many_item_ptr_type = try ip.get(gpa, self.sema.pt.tid, .{ .ptr_type = .{ + .child = ptr_info.child, + .sentinel = ptr_info.sentinel, + .flags = b: { + var flags = ptr_info.flags; + flags.size = .Many; + break :b flags; + }, + .packed_offset = ptr_info.packed_offset, + } }); + + const many_item_ptr = try ip.get(gpa, self.sema.pt.tid, .{ + .ptr = .{ + .ty = many_item_ptr_type, + .base_addr = .{ + .uav = .{ + .orig_ty = res_ty.toIntern(), + .val = array, + }, + }, + .byte_offset = 0, + }, + }); + + const len = (try self.sema.pt.intValue(Type.usize, elems.len)).toIntern(); + + return ip.get(gpa, self.sema.pt.tid, .{ .slice = .{ + .ty = res_ty.toIntern(), + .ptr = many_item_ptr, + .len = len, + } }); +} + +fn lowerStringLiteral(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const gpa = self.sema.gpa; + const ip = &self.sema.pt.zcu.intern_pool; + const main_tokens = self.file.tree.nodes.items(.main_token); + const tags = self.file.tree.nodes.items(.tag); + const data = self.file.tree.nodes.items(.data); + + const token = main_tokens[node]; + const raw_string = self.file.tree.tokenSlice(token); + + var bytes = std.ArrayListUnmanaged(u8){}; + defer bytes.deinit(gpa); + switch (tags[node]) { + .string_literal => switch (try std.zig.string_literal.parseWrite(bytes.writer(gpa), raw_string)) { + .success => {}, + .failure => |err| { + const offset = self.file.tree.tokens.items(.start)[token]; + return self.fail( + .{ .byte_abs = offset + @as(u32, @intCast(err.offset())) }, + "{}", + .{err.fmtWithSource(raw_string)}, + ); + }, + }, + .multiline_string_literal => { + var parser = std.zig.string_literal.multilineParser(bytes.writer(gpa)); + var tok_i = data[node].lhs; + while (tok_i <= data[node].rhs) : (tok_i += 1) { + try parser.line(self.file.tree.tokenSlice(tok_i)); + } + }, + else => unreachable, + } + + const string = try ip.getOrPutString(gpa, self.sema.pt.tid, bytes.items, .maybe_embedded_nulls); + const array_ty = try self.sema.pt.intern(.{ .array_type = .{ + .len = bytes.items.len, + .sentinel = .zero_u8, + .child = .u8_type, + } }); + const array_val = try self.sema.pt.intern(.{ .aggregate = .{ + .ty = array_ty, + .storage = .{ .bytes = string }, + } }); + return self.sema.pt.intern(.{ .slice = .{ + .ty = res_ty.toIntern(), + .ptr = try self.sema.pt.intern(.{ .ptr = .{ + .ty = .manyptr_const_u8_sentinel_0_type, + .base_addr = .{ .uav = .{ + .orig_ty = .slice_const_u8_sentinel_0_type, + .val = array_val, + } }, + .byte_offset = 0, + } }), + .len = (try self.sema.pt.intValue(Type.usize, bytes.items.len)).toIntern(), + } }); +} + +fn lowerUnion(self: LowerZon, node: Ast.Node.Index, res_ty: Type) !InternPool.Index { + const tags = self.file.tree.nodes.items(.tag); + const ip = &self.sema.pt.zcu.intern_pool; + const main_tokens = self.file.tree.nodes.items(.main_token); + + try res_ty.resolveFully(self.sema.pt); + const union_info = self.sema.pt.zcu.typeToUnion(res_ty).?; + const enum_tag_info = union_info.loadTagType(ip); + + const field_name, const maybe_field_node = if (tags[node] == .enum_literal) b: { + const field_name = try self.identAsNullTerminatedString(main_tokens[node]); + break :b .{ field_name, null }; + } else b: { + var buf: [2]Ast.Node.Index = undefined; + const field_nodes = try self.fields(res_ty, &buf, node); + if (field_nodes.len > 1) { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}); + } + const field_node = field_nodes[0]; + const field_name_token = self.file.tree.firstToken(field_node) - 2; + const field_name = try self.identAsNullTerminatedString(field_name_token); + break :b .{ field_name, field_node }; + }; + + const name_index = enum_tag_info.nameIndex(ip, field_name) orelse { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{res_ty.fmt(self.sema.pt)}); + }; + const tag_int = if (enum_tag_info.values.len == 0) b: { + // Auto numbered fields + break :b try self.sema.pt.intern(.{ .int = .{ + .ty = enum_tag_info.tag_ty, + .storage = .{ .u64 = name_index }, + } }); + } else b: { + // Explicitly numbered fields + break :b enum_tag_info.values.get(ip)[name_index]; + }; + const tag = try self.sema.pt.intern(.{ .enum_tag = .{ + .ty = union_info.enum_tag_ty, + .int = tag_int, + } }); + const field_type = Type.fromInterned(union_info.field_types.get(ip)[name_index]); + const val = if (maybe_field_node) |field_node| b: { + break :b try self.lowerExpr(field_node, field_type); + } else b: { + if (field_type.toIntern() != .void_type) { + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{field_type.fmt(self.sema.pt)}); + } + break :b .void_value; + }; + return ip.getUnion(self.sema.pt.zcu.gpa, self.sema.pt.tid, .{ + .ty = res_ty.toIntern(), + .tag = tag, + .val = val, + }); +} + +fn fields( + self: LowerZon, + container: Type, + buf: *[2]NodeIndex, + node: NodeIndex, +) ![]const NodeIndex { + if (self.file.tree.fullStructInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.fail( + .{ .node_abs = init.ast.type_expr }, + "ZON cannot contain type expressions", + .{}, + ); + } + return init.ast.fields; + } + + if (self.file.tree.fullArrayInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.fail( + .{ .node_abs = init.ast.type_expr }, + "ZON cannot contain type expressions", + .{}, + ); + } + if (init.ast.elements.len != 0) { + return self.fail( + .{ .node_abs = node }, + "expected type '{}'", + .{container.fmt(self.sema.pt)}, + ); + } + return init.ast.elements; + } + + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{container.fmt(self.sema.pt)}); +} + +fn elements( + self: LowerZon, + container: Type, + buf: *[2]NodeIndex, + node: NodeIndex, +) ![]const NodeIndex { + if (self.file.tree.fullArrayInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.fail( + .{ .node_abs = init.ast.type_expr }, + "ZON cannot contain type expressions", + .{}, + ); + } + return init.ast.elements; + } + + if (self.file.tree.fullStructInit(buf, node)) |init| { + if (init.ast.type_expr != 0) { + return self.fail( + .{ .node_abs = init.ast.type_expr }, + "ZON cannot contain type expressions", + .{}, + ); + } + if (init.ast.fields.len == 0) { + return init.ast.fields; + } + } + + return self.fail(.{ .node_abs = node }, "expected type '{}'", .{container.fmt(self.sema.pt)}); +} + +fn createErrorWithOptionalNote( + self: LowerZon, + src_loc: LazySrcLoc, + comptime fmt: []const u8, + args: anytype, + note: ?[]const u8, +) error{OutOfMemory}!*Zcu.ErrorMsg { + const notes = try self.sema.pt.zcu.gpa.alloc(Zcu.ErrorMsg, if (note == null) 0 else 1); + errdefer self.sema.pt.zcu.gpa.free(notes); + if (note) |n| { + notes[0] = try Zcu.ErrorMsg.init( + self.sema.pt.zcu.gpa, + src_loc, + "{s}", + .{n}, + ); + } + + const err_msg = try Zcu.ErrorMsg.create( + self.sema.pt.zcu.gpa, + src_loc, + fmt, + args, + ); + err_msg.*.notes = notes; + return err_msg; +} + +fn failWithNumberError( + self: LowerZon, + token: Ast.TokenIndex, + err: NumberLiteralError, +) (Allocator.Error || error{AnalysisFail}) { + const offset = self.file.tree.tokens.items(.start)[token]; + const src_loc = try self.lazySrcLoc(.{ .byte_abs = offset + @as(u32, @intCast(err.offset())) }); + const token_bytes = self.file.tree.tokenSlice(token); + const err_msg = try self.createErrorWithOptionalNote( + src_loc, + "{}", + .{err.fmtWithSource(token_bytes)}, + err.noteWithSource(token_bytes), + ); + try self.sema.pt.zcu.failed_files.putNoClobber(self.sema.pt.zcu.gpa, self.file, err_msg); + return error.AnalysisFail; +} diff --git a/test/behavior/zon.zig b/test/behavior/zon.zig new file mode 100644 index 000000000000..f0408fb18cfb --- /dev/null +++ b/test/behavior/zon.zig @@ -0,0 +1,321 @@ +const std = @import("std"); + +const expect = std.testing.expect; +const expectEqual = std.testing.expectEqual; +const expectEqualDeep = std.testing.expectEqualDeep; +const expectEqualSlices = std.testing.expectEqualSlices; +const expectEqualStrings = std.testing.expectEqualStrings; + +test "void" { + try expectEqual({}, @as(void, @import("zon/void.zon"))); +} + +test "bool" { + try expectEqual(true, @as(bool, @import("zon/true.zon"))); + try expectEqual(false, @as(bool, @import("zon/false.zon"))); +} + +test "optional" { + const some: ?u32 = @import("zon/some.zon"); + const none: ?u32 = @import("zon/none.zon"); + const @"null": @TypeOf(null) = @import("zon/none.zon"); + try expectEqual(@as(u32, 10), some); + try expectEqual(@as(?u32, null), none); + try expectEqual(null, @"null"); +} + +test "union" { + // No tag + { + const Union = union { + x: f32, + y: bool, + z: void, + }; + + const union1: Union = @import("zon/union1.zon"); + const union2: Union = @import("zon/union2.zon"); + const union3: Union = @import("zon/union3.zon"); + const union4: Union = @import("zon/union4.zon"); + + try expectEqual(union1.x, 1.5); + try expectEqual(union2.y, true); + try expectEqual(union3.z, {}); + try expectEqual(union4.z, {}); + } + + // Inferred tag + { + const Union = union(enum) { + x: f32, + y: bool, + z: void, + }; + + const union1: Union = @import("zon/union1.zon"); + const union2: Union = @import("zon/union2.zon"); + const union3: Union = @import("zon/union3.zon"); + const union4: Union = @import("zon/union4.zon"); + + try expectEqual(union1.x, 1.5); + try expectEqual(union2.y, true); + try expectEqual(union3.z, {}); + try expectEqual(union4.z, {}); + } + + // Explicit tag + { + const Tag = enum(i128) { + x = -1, + y = 2, + z = 1, + }; + const Union = union(Tag) { + x: f32, + y: bool, + z: void, + }; + + const union1: Union = @import("zon/union1.zon"); + const union2: Union = @import("zon/union2.zon"); + const union3: Union = @import("zon/union3.zon"); + const union4: Union = @import("zon/union4.zon"); + + try expectEqual(union1.x, 1.5); + try expectEqual(union2.y, true); + try expectEqual(union3.z, {}); + try expectEqual(union4.z, {}); + } +} + +test "struct" { + const Vec0 = struct {}; + const Vec1 = struct { x: f32 }; + const Vec2 = struct { x: f32, y: f32 }; + const Escaped = struct { @"0": f32, foo: f32 }; + try expectEqual(Vec0{}, @as(Vec0, @import("zon/vec0.zon"))); + try expectEqual(Vec1{ .x = 1.5 }, @as(Vec1, @import("zon/vec1.zon"))); + try expectEqual(Vec2{ .x = 1.5, .y = 2 }, @as(Vec2, @import("zon/vec2.zon"))); + try expectEqual(Escaped{ .@"0" = 1.5, .foo = 2 }, @as(Escaped, @import("zon/escaped_struct.zon"))); +} + +test "struct default fields" { + const Vec3 = struct { + x: f32, + y: f32, + z: f32 = 123.4, + }; + try expectEqual(Vec3{ .x = 1.5, .y = 2.0, .z = 123.4 }, @as(Vec3, @import("zon/vec2.zon"))); + const ascribed: Vec3 = @import("zon/vec2.zon"); + try expectEqual(Vec3{ .x = 1.5, .y = 2.0, .z = 123.4 }, ascribed); +} + +test "struct enum field" { + const Struct = struct { + x: enum { x, y, z }, + }; + try expectEqual(Struct{ .x = .z }, @as(Struct, @import("zon/enum_field.zon"))); +} + +test "tuple" { + const Tuple = struct { f32, bool, []const u8, u16 }; + try expectEqualDeep(Tuple{ 1.2, true, "hello", 3 }, @as(Tuple, @import("zon/tuple.zon"))); +} + +test "char" { + try expectEqual(@as(u8, 'a'), @as(u8, @import("zon/a.zon"))); + try expectEqual(@as(u8, 'z'), @as(u8, @import("zon/z.zon"))); + try expectEqual(@as(i8, -'a'), @as(i8, @import("zon/a_neg.zon"))); +} + +test "arrays" { + try expectEqual([0]u8{}, @as([0]u8, @import("zon/vec0.zon"))); + try expectEqual([0:1]u8{}, @as([0:1]u8, @import("zon/vec0.zon"))); + try expectEqual(1, @as([0:1]u8, @import("zon/vec0.zon"))[0]); + try expectEqual([4]u8{ 'a', 'b', 'c', 'd' }, @as([4]u8, @import("zon/array.zon"))); + try expectEqual([4:2]u8{ 'a', 'b', 'c', 'd' }, @as([4:2]u8, @import("zon/array.zon"))); + try expectEqual(2, @as([4:2]u8, @import("zon/array.zon"))[4]); +} + +test "slices, arrays, tuples" { + { + const expected_slice: []const u8 = &.{}; + const found_slice: []const u8 = @import("zon/slice-empty.zon"); + try expectEqualSlices(u8, expected_slice, found_slice); + + const expected_array: [0]u8 = .{}; + const found_array: [0]u8 = @import("zon/slice-empty.zon"); + try expectEqual(expected_array, found_array); + + const T = struct {}; + const expected_tuple: T = .{}; + const found_tuple: T = @import("zon/slice-empty.zon"); + try expectEqual(expected_tuple, found_tuple); + } + + { + const expected_slice: []const u8 = &.{1}; + const found_slice: []const u8 = @import("zon/slice-1.zon"); + try expectEqualSlices(u8, expected_slice, found_slice); + + const expected_array: [1]u8 = .{1}; + const found_array: [1]u8 = @import("zon/slice-1.zon"); + try expectEqual(expected_array, found_array); + + const T = struct { u8 }; + const expected_tuple: T = .{1}; + const found_tuple: T = @import("zon/slice-1.zon"); + try expectEqual(expected_tuple, found_tuple); + } + + { + const expected_slice: []const u8 = &.{ 'a', 'b', 'c' }; + const found_slice: []const u8 = @import("zon/slice-abc.zon"); + try expectEqualSlices(u8, expected_slice, found_slice); + + const expected_array: [3]u8 = .{ 'a', 'b', 'c' }; + const found_array: [3]u8 = @import("zon/slice-abc.zon"); + try expectEqual(expected_array, found_array); + + const T = struct { u8, u8, u8 }; + const expected_tuple: T = .{ 'a', 'b', 'c' }; + const found_tuple: T = @import("zon/slice-abc.zon"); + try expectEqual(expected_tuple, found_tuple); + } +} + +test "string literals" { + try expectEqualSlices(u8, "abc", @import("zon/abc.zon")); + try expectEqualSlices(u8, "ab\\c", @import("zon/abc-escaped.zon")); + const zero_terminated: [:0]const u8 = @import("zon/abc.zon"); + try expectEqualDeep(zero_terminated, "abc"); + try expectEqual(0, zero_terminated[zero_terminated.len]); + try expectEqualStrings( + \\Hello, world! + \\This is a multiline string! + \\ There are no escapes, we can, for example, include \n in the string + , @import("zon/multiline_string.zon")); + try expectEqualStrings("a\nb\x00c", @import("zon/string_embedded_null.zon")); +} + +test "enum literals" { + const Enum = enum { + foo, + bar, + baz, + @"0\na", + }; + try expectEqual(Enum.foo, @as(Enum, @import("zon/foo.zon"))); + try expectEqual(.foo, @as(@TypeOf(.foo), @import("zon/foo.zon"))); + try expectEqual(Enum.@"0\na", @as(Enum, @import("zon/escaped_enum.zon"))); +} + +test "int" { + const expected = .{ + // Test various numbers and types + @as(u8, 10), + @as(i16, 24), + @as(i14, -4), + @as(i32, -123), + + // Test limits + @as(i8, 127), + @as(i8, -128), + + // Test characters + @as(u8, 'a'), + @as(u8, 'z'), + + // Test big integers + @as(u65, 36893488147419103231), + @as(u65, 36893488147419103231), + @as(i128, -18446744073709551615), // Only a big int due to negation + @as(i128, -9223372036854775809), // Only a big int due to negation + + // Test big integer limits + @as(i66, 36893488147419103231), + @as(i66, -36893488147419103232), + + // Test parsing whole number floats as integers + @as(i8, -1), + @as(i8, 123), + + // Test non-decimal integers + @as(i16, 0xff), + @as(i16, -0xff), + @as(i16, 0o77), + @as(i16, -0o77), + @as(i16, 0b11), + @as(i16, -0b11), + + // Test non-decimal big integers + @as(u65, 0x1ffffffffffffffff), + @as(i66, 0x1ffffffffffffffff), + @as(i66, -0x1ffffffffffffffff), + @as(u65, 0x1ffffffffffffffff), + @as(i66, 0x1ffffffffffffffff), + @as(i66, -0x1ffffffffffffffff), + @as(u65, 0x1ffffffffffffffff), + @as(i66, 0x1ffffffffffffffff), + @as(i66, -0x1ffffffffffffffff), + }; + const actual: @TypeOf(expected) = @import("zon/ints.zon"); + try expectEqual(expected, actual); +} + +test "floats" { + const expected = .{ + // Test decimals + @as(f16, 0.5), + @as(f32, 123.456), + @as(f64, -123.456), + @as(f128, 42.5), + + // Test whole numbers with and without decimals + @as(f16, 5.0), + @as(f16, 5.0), + @as(f32, -102), + @as(f32, -102), + + // Test characters and negated characters + @as(f32, 'a'), + @as(f32, 'z'), + @as(f32, -'z'), + + // Test big integers + @as(f32, 36893488147419103231), + @as(f32, -36893488147419103231), + @as(f128, 0x1ffffffffffffffff), + @as(f32, 0x1ffffffffffffffff), + + // Exponents, underscores + @as(f32, 123.0E+77), + + // Hexadecimal + @as(f32, 0x103.70p-5), + @as(f32, -0x103.70), + @as(f32, 0x1234_5678.9ABC_CDEFp-10), + }; + const actual: @TypeOf(expected) = @import("zon/floats.zon"); + try expectEqual(actual, expected); +} + +test "inf and nan" { + // comptime float + { + const actual: struct { comptime_float, comptime_float, comptime_float, comptime_float } = @import("zon/inf_and_nan.zon"); + try expect(std.math.isNan(actual[0])); + try expect(std.math.isNan(actual[1])); + try expect(std.math.isPositiveInf(@as(f128, @floatCast(actual[2])))); + try expect(std.math.isNegativeInf(@as(f128, @floatCast(actual[3])))); + } + + // f32 + { + const actual: struct { f32, f32, f32, f32 } = @import("zon/inf_and_nan.zon"); + try expect(std.math.isNan(actual[0])); + try expect(std.math.isNan(actual[1])); + try expect(std.math.isPositiveInf(actual[2])); + try expect(std.math.isNegativeInf(actual[3])); + } +} diff --git a/test/behavior/zon/a.zon b/test/behavior/zon/a.zon new file mode 100644 index 000000000000..67fe32dafeb1 --- /dev/null +++ b/test/behavior/zon/a.zon @@ -0,0 +1 @@ +'a' diff --git a/test/behavior/zon/a_neg.zon b/test/behavior/zon/a_neg.zon new file mode 100644 index 000000000000..b14b16f3d6e9 --- /dev/null +++ b/test/behavior/zon/a_neg.zon @@ -0,0 +1 @@ +-'a' diff --git a/test/behavior/zon/abc-escaped.zon b/test/behavior/zon/abc-escaped.zon new file mode 100644 index 000000000000..8672bb944874 --- /dev/null +++ b/test/behavior/zon/abc-escaped.zon @@ -0,0 +1 @@ +"ab\\c" diff --git a/test/behavior/zon/abc.zon b/test/behavior/zon/abc.zon new file mode 100644 index 000000000000..d1cc1b4e5215 --- /dev/null +++ b/test/behavior/zon/abc.zon @@ -0,0 +1 @@ +"abc" diff --git a/test/behavior/zon/array.zon b/test/behavior/zon/array.zon new file mode 100644 index 000000000000..8ee5ebe0f5f5 --- /dev/null +++ b/test/behavior/zon/array.zon @@ -0,0 +1 @@ +.{ 'a', 'b', 'c', 'd' } diff --git a/test/behavior/zon/enum_field.zon b/test/behavior/zon/enum_field.zon new file mode 100644 index 000000000000..33011e2f6589 --- /dev/null +++ b/test/behavior/zon/enum_field.zon @@ -0,0 +1 @@ +.{ .x = .z } diff --git a/test/behavior/zon/escaped_enum.zon b/test/behavior/zon/escaped_enum.zon new file mode 100644 index 000000000000..14e46d587c42 --- /dev/null +++ b/test/behavior/zon/escaped_enum.zon @@ -0,0 +1 @@ +.@"0\na" diff --git a/test/behavior/zon/escaped_struct.zon b/test/behavior/zon/escaped_struct.zon new file mode 100644 index 000000000000..c5cb978f3303 --- /dev/null +++ b/test/behavior/zon/escaped_struct.zon @@ -0,0 +1,2 @@ +// zig fmt: off +.{ .@"0" = 1.5, .@"foo" = 2 } diff --git a/test/behavior/zon/false.zon b/test/behavior/zon/false.zon new file mode 100644 index 000000000000..0064d7bc7d22 --- /dev/null +++ b/test/behavior/zon/false.zon @@ -0,0 +1,4 @@ +// Comment +false // Another comment +// Yet another comment + diff --git a/test/behavior/zon/floats.zon b/test/behavior/zon/floats.zon new file mode 100644 index 000000000000..4ea199087977 --- /dev/null +++ b/test/behavior/zon/floats.zon @@ -0,0 +1,26 @@ +.{ + 0.5, + 123.456, + -123.456, + 42.5, + + 5.0, + 5, + -102.0, + -102, + + 'a', + 'z', + -'z', + + 36893488147419103231, + -36893488147419103231, + 0x1ffffffffffffffff, + 0x1ffffffffffffffff, + + 12_3.0E+77, + + 0x103.70p-5, + -0x103.70, + 0x1234_5678.9ABC_CDEFp-10, +} diff --git a/test/behavior/zon/foo.zon b/test/behavior/zon/foo.zon new file mode 100644 index 000000000000..1e8ea91de539 --- /dev/null +++ b/test/behavior/zon/foo.zon @@ -0,0 +1 @@ +.foo diff --git a/test/behavior/zon/inf_and_nan.zon b/test/behavior/zon/inf_and_nan.zon new file mode 100644 index 000000000000..0b264f8ded4d --- /dev/null +++ b/test/behavior/zon/inf_and_nan.zon @@ -0,0 +1,6 @@ +.{ + nan, + -nan, + inf, + -inf, +} diff --git a/test/behavior/zon/ints.zon b/test/behavior/zon/ints.zon new file mode 100644 index 000000000000..fb1060324e2b --- /dev/null +++ b/test/behavior/zon/ints.zon @@ -0,0 +1,40 @@ +.{ + 10, + 24, + -4, + -123, + + 127, + -128, + + 'a', + 'z', + + 36893488147419103231, + 368934_881_474191032_31, + -18446744073709551615, + -9223372036854775809, + + 36893488147419103231, + -36893488147419103232, + + -1.0, + 123.0, + + 0xff, + -0xff, + 0o77, + -0o77, + 0b11, + -0b11, + + 0x1ffffffffffffffff, + 0x1ffffffffffffffff, + -0x1ffffffffffffffff, + 0o3777777777777777777777, + 0o3777777777777777777777, + -0o3777777777777777777777, + 0b11111111111111111111111111111111111111111111111111111111111111111, + 0b11111111111111111111111111111111111111111111111111111111111111111, + -0b11111111111111111111111111111111111111111111111111111111111111111, +} diff --git a/test/behavior/zon/multiline_string.zon b/test/behavior/zon/multiline_string.zon new file mode 100644 index 000000000000..5908802ecc65 --- /dev/null +++ b/test/behavior/zon/multiline_string.zon @@ -0,0 +1,4 @@ +// zig fmt: off + \\Hello, world! +\\This is a multiline string! + \\ There are no escapes, we can, for example, include \n in the string diff --git a/test/behavior/zon/none.zon b/test/behavior/zon/none.zon new file mode 100644 index 000000000000..19765bd501b6 --- /dev/null +++ b/test/behavior/zon/none.zon @@ -0,0 +1 @@ +null diff --git a/test/behavior/zon/slice-1.zon b/test/behavior/zon/slice-1.zon new file mode 100644 index 000000000000..7714116d4567 --- /dev/null +++ b/test/behavior/zon/slice-1.zon @@ -0,0 +1 @@ +.{ 1 } \ No newline at end of file diff --git a/test/behavior/zon/slice-abc.zon b/test/behavior/zon/slice-abc.zon new file mode 100644 index 000000000000..e033b2e6ff87 --- /dev/null +++ b/test/behavior/zon/slice-abc.zon @@ -0,0 +1 @@ +.{'a', 'b', 'c'} \ No newline at end of file diff --git a/test/behavior/zon/slice-empty.zon b/test/behavior/zon/slice-empty.zon new file mode 100644 index 000000000000..c1ab9cdd5018 --- /dev/null +++ b/test/behavior/zon/slice-empty.zon @@ -0,0 +1 @@ +.{} \ No newline at end of file diff --git a/test/behavior/zon/some.zon b/test/behavior/zon/some.zon new file mode 100644 index 000000000000..f599e28b8ab0 --- /dev/null +++ b/test/behavior/zon/some.zon @@ -0,0 +1 @@ +10 diff --git a/test/behavior/zon/string_embedded_null.zon b/test/behavior/zon/string_embedded_null.zon new file mode 100644 index 000000000000..420316636402 --- /dev/null +++ b/test/behavior/zon/string_embedded_null.zon @@ -0,0 +1 @@ +"a\nb\x00c" diff --git a/test/behavior/zon/true.zon b/test/behavior/zon/true.zon new file mode 100644 index 000000000000..27ba77ddaf61 --- /dev/null +++ b/test/behavior/zon/true.zon @@ -0,0 +1 @@ +true diff --git a/test/behavior/zon/tuple.zon b/test/behavior/zon/tuple.zon new file mode 100644 index 000000000000..61e6be9fcf9d --- /dev/null +++ b/test/behavior/zon/tuple.zon @@ -0,0 +1 @@ +.{ 1.2, true, "hello", 3 } diff --git a/test/behavior/zon/union1.zon b/test/behavior/zon/union1.zon new file mode 100644 index 000000000000..3dc052f89239 --- /dev/null +++ b/test/behavior/zon/union1.zon @@ -0,0 +1 @@ +.{ .x = 1.5 } diff --git a/test/behavior/zon/union2.zon b/test/behavior/zon/union2.zon new file mode 100644 index 000000000000..5c25d1569001 --- /dev/null +++ b/test/behavior/zon/union2.zon @@ -0,0 +1 @@ +.{ .y = true } diff --git a/test/behavior/zon/union3.zon b/test/behavior/zon/union3.zon new file mode 100644 index 000000000000..3baf4ac17349 --- /dev/null +++ b/test/behavior/zon/union3.zon @@ -0,0 +1 @@ +.z diff --git a/test/behavior/zon/union4.zon b/test/behavior/zon/union4.zon new file mode 100644 index 000000000000..4224d9968bd1 --- /dev/null +++ b/test/behavior/zon/union4.zon @@ -0,0 +1 @@ +.{ .z = {} } diff --git a/test/behavior/zon/vec0.zon b/test/behavior/zon/vec0.zon new file mode 100644 index 000000000000..47c47bc057a0 --- /dev/null +++ b/test/behavior/zon/vec0.zon @@ -0,0 +1 @@ +.{} diff --git a/test/behavior/zon/vec1.zon b/test/behavior/zon/vec1.zon new file mode 100644 index 000000000000..3dc052f89239 --- /dev/null +++ b/test/behavior/zon/vec1.zon @@ -0,0 +1 @@ +.{ .x = 1.5 } diff --git a/test/behavior/zon/vec2.zon b/test/behavior/zon/vec2.zon new file mode 100644 index 000000000000..cc4bff59b9a9 --- /dev/null +++ b/test/behavior/zon/vec2.zon @@ -0,0 +1 @@ +.{ .x = 1.5, .y = 2 } diff --git a/test/behavior/zon/void.zon b/test/behavior/zon/void.zon new file mode 100644 index 000000000000..9e26dfeeb6e6 --- /dev/null +++ b/test/behavior/zon/void.zon @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/test/behavior/zon/z.zon b/test/behavior/zon/z.zon new file mode 100644 index 000000000000..6ad22b40ef89 --- /dev/null +++ b/test/behavior/zon/z.zon @@ -0,0 +1 @@ +'z' diff --git a/test/cases/compile_errors/@import_zon_addr_slice.zig b/test/cases/compile_errors/@import_zon_addr_slice.zig new file mode 100644 index 000000000000..dca859e6581f --- /dev/null +++ b/test/cases/compile_errors/@import_zon_addr_slice.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: struct { value: []const i32 } = @import("zon/addr_slice.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/addr_slice.zon +// +// addr_slice.zon:2:14: error: expected type '[]const i32' +// tmp.zig:2:54: note: imported here diff --git a/test/cases/compile_errors/@import_zon_array_len.zig b/test/cases/compile_errors/@import_zon_array_len.zig new file mode 100644 index 000000000000..342504d5540b --- /dev/null +++ b/test/cases/compile_errors/@import_zon_array_len.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: [4]u8 = @import("zon/array.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/array.zon +// +// array.zon:1:2: error: expected type '[4]u8' +// tmp.zig:2:30: note: imported here diff --git a/test/cases/compile_errors/@import_zon_bad_import.zig b/test/cases/compile_errors/@import_zon_bad_import.zig new file mode 100644 index 000000000000..a84f24dd8250 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_bad_import.zig @@ -0,0 +1,12 @@ +pub fn main() void { + _ = @import( + "bogus-does-not-exist.zon", + ); +} + +// error +// backend=stage2 +// target=native +// output_mode=Exe +// +// :3:9: error: unable to open 'bogus-does-not-exist.zon': FileNotFound diff --git a/test/cases/compile_errors/@import_zon_coerce_pointer.zig b/test/cases/compile_errors/@import_zon_coerce_pointer.zig new file mode 100644 index 000000000000..fc1b94639f60 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_coerce_pointer.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: *struct { u8, u8, u8 } = @import("zon/array.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/array.zon +// +// array.zon:1:2: error: ZON import cannot be coerced to non slice pointer +// tmp.zig:2:47: note: imported here diff --git a/test/cases/compile_errors/@import_zon_double_negation_float.zig b/test/cases/compile_errors/@import_zon_double_negation_float.zig new file mode 100644 index 000000000000..fdcbe5138ca2 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_double_negation_float.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: f32 = @import("zon/double_negation_float.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/double_negation_float.zon +// +// double_negation_float.zon:1:1: error: invalid ZON value +// tmp.zig:2:28: note: imported here diff --git a/test/cases/compile_errors/@import_zon_double_negation_int.zig b/test/cases/compile_errors/@import_zon_double_negation_int.zig new file mode 100644 index 000000000000..2201b09fdda6 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_double_negation_int.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: i32 = @import("zon/double_negation_int.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/double_negation_int.zon +// +// double_negation_int.zon:1:1: error: expected type 'i32' +// tmp.zig:2:28: note: imported here diff --git a/test/cases/compile_errors/@import_zon_enum_embedded_null.zig b/test/cases/compile_errors/@import_zon_enum_embedded_null.zig new file mode 100644 index 000000000000..c7d1dccf5c29 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_enum_embedded_null.zig @@ -0,0 +1,14 @@ +const std = @import("std"); +pub fn main() void { + const E = enum { foo }; + const f: struct { E, E } = @import("zon/enum_embedded_null.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/enum_embedded_null.zon +// +// enum_embedded_null.zon:2:6: error: identifier cannot contain null bytes +// tmp.zig:4:40: note: imported here diff --git a/test/cases/compile_errors/@import_zon_invalid_character.zig b/test/cases/compile_errors/@import_zon_invalid_character.zig new file mode 100644 index 000000000000..a2bf474f63ea --- /dev/null +++ b/test/cases/compile_errors/@import_zon_invalid_character.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: u8 = @import("zon/invalid_character.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/invalid_character.zon +// +// invalid_character.zon:1:3: error: invalid escape character: 'a' +// tmp.zig:2:27: note: imported here diff --git a/test/cases/compile_errors/@import_zon_invalid_number.zig b/test/cases/compile_errors/@import_zon_invalid_number.zig new file mode 100644 index 000000000000..a18f1631de2f --- /dev/null +++ b/test/cases/compile_errors/@import_zon_invalid_number.zig @@ -0,0 +1,11 @@ +pub fn main() void { + const f: u128 = @import("zon/invalid_number.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/invalid_number.zon +// +// invalid_number.zon:1:19: error: invalid digit 'a' for decimal base diff --git a/test/cases/compile_errors/@import_zon_invalid_string.zig b/test/cases/compile_errors/@import_zon_invalid_string.zig new file mode 100644 index 000000000000..e103a4507447 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_invalid_string.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: []const u8 = @import("zon/invalid_string.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/invalid_string.zon +// +// invalid_string.zon:1:5: error: invalid escape character: 'a' +// tmp.zig:2:35: note: imported here diff --git a/test/cases/compile_errors/@import_zon_leading_zero_in_integer.zig b/test/cases/compile_errors/@import_zon_leading_zero_in_integer.zig new file mode 100644 index 000000000000..d901d5621a92 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_leading_zero_in_integer.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: u128 = @import("zon/leading_zero_in_integer.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/leading_zero_in_integer.zon +// +// leading_zero_in_integer.zon:1:1: error: number '0012' has leading zero +// leading_zero_in_integer.zon:1:1: note: use '0o' prefix for octal literals diff --git a/test/cases/compile_errors/@import_zon_negative_zero.zig b/test/cases/compile_errors/@import_zon_negative_zero.zig new file mode 100644 index 000000000000..5e935098a066 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_negative_zero.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: i8 = @import("zon/negative_zero.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/negative_zero.zon +// +// negative_zero.zon:1:1: error: integer literal '-0' is ambiguous +// tmp.zig:2:27: note: imported here diff --git a/test/cases/compile_errors/@import_zon_number_fail_limits.zig b/test/cases/compile_errors/@import_zon_number_fail_limits.zig new file mode 100644 index 000000000000..9d04ce1b74ad --- /dev/null +++ b/test/cases/compile_errors/@import_zon_number_fail_limits.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: i66 = @import("zon/large_number.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/large_number.zon +// +// large_number.zon:1:1: error: type 'i66' cannot represent integer value '36893488147419103232' +// tmp.zig:2:28: note: imported here diff --git a/test/cases/compile_errors/@import_zon_struct_dup_field.zig b/test/cases/compile_errors/@import_zon_struct_dup_field.zig new file mode 100644 index 000000000000..fa35aa268fb0 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_struct_dup_field.zig @@ -0,0 +1,13 @@ +const std = @import("std"); +pub fn main() void { + const f: struct { name: u8 } = @import("zon/struct_dup_field.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/struct_dup_field.zon +// +// struct_dup_field.zon:3:6: error: duplicate field 'name' +// tmp.zig:3:44: note: imported here diff --git a/test/cases/compile_errors/@import_zon_syntax_error.zig b/test/cases/compile_errors/@import_zon_syntax_error.zig new file mode 100644 index 000000000000..0035b5da288b --- /dev/null +++ b/test/cases/compile_errors/@import_zon_syntax_error.zig @@ -0,0 +1,11 @@ +pub fn main() void { + const f: bool = @import("zon/syntax_error.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/syntax_error.zon +// +// syntax_error.zon:3:13: error: expected ',' after initializer diff --git a/test/cases/compile_errors/@import_zon_type_decl.zig b/test/cases/compile_errors/@import_zon_type_decl.zig new file mode 100644 index 000000000000..5d680249d2c0 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_decl.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: struct { foo: type } = @import("zon/type_decl.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/type_decl.zon +// +// type_decl.zon:2:12: error: invalid ZON value +// tmp.zig:2:45: note: imported here diff --git a/test/cases/compile_errors/@import_zon_type_expr_array.zig b/test/cases/compile_errors/@import_zon_type_expr_array.zig new file mode 100644 index 000000000000..994b986ea96e --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_expr_array.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: [3]i32 = @import("zon/type_expr_array.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/type_expr_array.zon +// +// type_expr_array.zon:1:1: error: ZON cannot contain type expressions +// tmp.zig:2:31: note: imported here diff --git a/test/cases/compile_errors/@import_zon_type_expr_fn.zig b/test/cases/compile_errors/@import_zon_type_expr_fn.zig new file mode 100644 index 000000000000..dfc012339ffd --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_expr_fn.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: i32 = @import("zon/type_expr_fn.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/type_expr_fn.zon +// +// type_expr_fn.zon:1:15: error: expected type 'i32' +// tmp.zig:2:28: note: imported here diff --git a/test/cases/compile_errors/@import_zon_type_expr_struct.zig b/test/cases/compile_errors/@import_zon_type_expr_struct.zig new file mode 100644 index 000000000000..bedc9ea37715 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_expr_struct.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: struct { x: f32, y: f32 } = @import("zon/type_expr_struct.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/type_expr_struct.zon +// +// type_expr_struct.zon:1:1: error: ZON cannot contain type expressions +// tmp.zig:2:50: note: imported here diff --git a/test/cases/compile_errors/@import_zon_type_expr_tuple.zig b/test/cases/compile_errors/@import_zon_type_expr_tuple.zig new file mode 100644 index 000000000000..53bb497ff7de --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_expr_tuple.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: struct { f32, f32 } = @import("zon/type_expr_tuple.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/type_expr_tuple.zon +// +// type_expr_tuple.zon:1:1: error: ZON cannot contain type expressions +// tmp.zig:2:44: note: imported here diff --git a/test/cases/compile_errors/@import_zon_type_mismatch.zig b/test/cases/compile_errors/@import_zon_type_mismatch.zig new file mode 100644 index 000000000000..530ee5c147a6 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_type_mismatch.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: bool = @import("zon/struct.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/struct.zon +// +// struct.zon:1:2: error: expected type 'bool' +// tmp.zig:2:29: note: imported here diff --git a/test/cases/compile_errors/@import_zon_unescaped_newline.zig b/test/cases/compile_errors/@import_zon_unescaped_newline.zig new file mode 100644 index 000000000000..a342802ecb54 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_unescaped_newline.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: i8 = @import("zon/unescaped_newline.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/unescaped_newline.zon +// +// unescaped_newline.zon:1:1: error: expected expression, found 'invalid token' +// unescaped_newline.zon:1:3: note: invalid byte: '\n' diff --git a/test/cases/compile_errors/@import_zon_unknown_ident.zig b/test/cases/compile_errors/@import_zon_unknown_ident.zig new file mode 100644 index 000000000000..cbc37f3a7623 --- /dev/null +++ b/test/cases/compile_errors/@import_zon_unknown_ident.zig @@ -0,0 +1,12 @@ +pub fn main() void { + const f: struct { value: bool } = @import("zon/unknown_ident.zon"); + _ = f; +} + +// error +// backend=stage2 +// output_mode=Exe +// imports=zon/unknown_ident.zon +// +// unknown_ident.zon:2:14: error: expected type 'bool' +// tmp.zig:2:47: note: imported here diff --git a/test/cases/compile_errors/zon/addr_slice.zon b/test/cases/compile_errors/zon/addr_slice.zon new file mode 100644 index 000000000000..a0bfcba08fee --- /dev/null +++ b/test/cases/compile_errors/zon/addr_slice.zon @@ -0,0 +1,3 @@ +.{ + .value = &.{ 1, 2, 3 }, +} diff --git a/test/cases/compile_errors/zon/array.zon b/test/cases/compile_errors/zon/array.zon new file mode 100644 index 000000000000..8c639ac4cd2a --- /dev/null +++ b/test/cases/compile_errors/zon/array.zon @@ -0,0 +1 @@ +.{ 'a', 'b', 'c' } diff --git a/test/cases/compile_errors/zon/desktop.ini b/test/cases/compile_errors/zon/desktop.ini new file mode 100644 index 000000000000..0a364e9f2edf --- /dev/null +++ b/test/cases/compile_errors/zon/desktop.ini @@ -0,0 +1,3 @@ +[LocalizedFileNames] +invalid_zon_2.zig=@invalid_zon_2.zig,0 +invalid_zon_1.zig=@invalid_zon_1.zig,0 diff --git a/test/cases/compile_errors/zon/double_negation_float.zon b/test/cases/compile_errors/zon/double_negation_float.zon new file mode 100644 index 000000000000..646b67f8f817 --- /dev/null +++ b/test/cases/compile_errors/zon/double_negation_float.zon @@ -0,0 +1 @@ +--1.0 \ No newline at end of file diff --git a/test/cases/compile_errors/zon/double_negation_int.zon b/test/cases/compile_errors/zon/double_negation_int.zon new file mode 100644 index 000000000000..bfbbe2bf6abb --- /dev/null +++ b/test/cases/compile_errors/zon/double_negation_int.zon @@ -0,0 +1 @@ +--1 \ No newline at end of file diff --git a/test/cases/compile_errors/zon/enum_embedded_null.zon b/test/cases/compile_errors/zon/enum_embedded_null.zon new file mode 100644 index 000000000000..9e5b888e1205 --- /dev/null +++ b/test/cases/compile_errors/zon/enum_embedded_null.zon @@ -0,0 +1,4 @@ +.{ + .@"\x00", + 10, +} diff --git a/test/cases/compile_errors/zon/invalid_character.zon b/test/cases/compile_errors/zon/invalid_character.zon new file mode 100644 index 000000000000..7d2e60640845 --- /dev/null +++ b/test/cases/compile_errors/zon/invalid_character.zon @@ -0,0 +1 @@ +'\a' diff --git a/test/cases/compile_errors/zon/invalid_number.zon b/test/cases/compile_errors/zon/invalid_number.zon new file mode 100644 index 000000000000..0c96bf6190c3 --- /dev/null +++ b/test/cases/compile_errors/zon/invalid_number.zon @@ -0,0 +1 @@ +368934881474191032a32 diff --git a/test/cases/compile_errors/zon/invalid_string.zon b/test/cases/compile_errors/zon/invalid_string.zon new file mode 100644 index 000000000000..aed60487ca94 --- /dev/null +++ b/test/cases/compile_errors/zon/invalid_string.zon @@ -0,0 +1 @@ +"\"\a\"" diff --git a/test/cases/compile_errors/zon/large_number.zon b/test/cases/compile_errors/zon/large_number.zon new file mode 100644 index 000000000000..1ce484120aa1 --- /dev/null +++ b/test/cases/compile_errors/zon/large_number.zon @@ -0,0 +1 @@ +36893488147419103232 diff --git a/test/cases/compile_errors/zon/leading_zero_in_integer.zon b/test/cases/compile_errors/zon/leading_zero_in_integer.zon new file mode 100644 index 000000000000..58aba07363df --- /dev/null +++ b/test/cases/compile_errors/zon/leading_zero_in_integer.zon @@ -0,0 +1 @@ +0012 \ No newline at end of file diff --git a/test/cases/compile_errors/zon/negative_zero.zon b/test/cases/compile_errors/zon/negative_zero.zon new file mode 100644 index 000000000000..16593f0b75b7 --- /dev/null +++ b/test/cases/compile_errors/zon/negative_zero.zon @@ -0,0 +1 @@ +-0 \ No newline at end of file diff --git a/test/cases/compile_errors/zon/struct.zon b/test/cases/compile_errors/zon/struct.zon new file mode 100644 index 000000000000..85ce0281afd5 --- /dev/null +++ b/test/cases/compile_errors/zon/struct.zon @@ -0,0 +1,4 @@ +.{ + .boolean = true, + .number = 123, +} diff --git a/test/cases/compile_errors/zon/struct_dup_field.zon b/test/cases/compile_errors/zon/struct_dup_field.zon new file mode 100644 index 000000000000..9363e2f53e14 --- /dev/null +++ b/test/cases/compile_errors/zon/struct_dup_field.zon @@ -0,0 +1,4 @@ +.{ + .name = 10, + .name = 20, +} diff --git a/test/cases/compile_errors/zon/syntax_error.zon b/test/cases/compile_errors/zon/syntax_error.zon new file mode 100644 index 000000000000..237d3445c8eb --- /dev/null +++ b/test/cases/compile_errors/zon/syntax_error.zon @@ -0,0 +1,4 @@ +.{ + .boolean = true + .number = 123, +} diff --git a/test/cases/compile_errors/zon/type_decl.zon b/test/cases/compile_errors/zon/type_decl.zon new file mode 100644 index 000000000000..bdac762f5475 --- /dev/null +++ b/test/cases/compile_errors/zon/type_decl.zon @@ -0,0 +1,3 @@ +.{ + .foo = struct {}, +} diff --git a/test/cases/compile_errors/zon/type_expr_array.zon b/test/cases/compile_errors/zon/type_expr_array.zon new file mode 100644 index 000000000000..2c76347ae081 --- /dev/null +++ b/test/cases/compile_errors/zon/type_expr_array.zon @@ -0,0 +1 @@ +[3]i32{1, 2, 3} \ No newline at end of file diff --git a/test/cases/compile_errors/zon/type_expr_fn.zon b/test/cases/compile_errors/zon/type_expr_fn.zon new file mode 100644 index 000000000000..8c614f11a8e8 --- /dev/null +++ b/test/cases/compile_errors/zon/type_expr_fn.zon @@ -0,0 +1 @@ +fn foo() void {} diff --git a/test/cases/compile_errors/zon/type_expr_struct.zon b/test/cases/compile_errors/zon/type_expr_struct.zon new file mode 100644 index 000000000000..24d9a64e1168 --- /dev/null +++ b/test/cases/compile_errors/zon/type_expr_struct.zon @@ -0,0 +1 @@ +Vec2{ .x = 1.0, .y = 2.0 } \ No newline at end of file diff --git a/test/cases/compile_errors/zon/type_expr_tuple.zon b/test/cases/compile_errors/zon/type_expr_tuple.zon new file mode 100644 index 000000000000..4281dce0f579 --- /dev/null +++ b/test/cases/compile_errors/zon/type_expr_tuple.zon @@ -0,0 +1 @@ +Vec2{1.0, 2.0} \ No newline at end of file diff --git a/test/cases/compile_errors/zon/unescaped_newline.zon b/test/cases/compile_errors/zon/unescaped_newline.zon new file mode 100644 index 000000000000..f53e156553ac --- /dev/null +++ b/test/cases/compile_errors/zon/unescaped_newline.zon @@ -0,0 +1,2 @@ +"a +b" \ No newline at end of file diff --git a/test/cases/compile_errors/zon/unknown_ident.zon b/test/cases/compile_errors/zon/unknown_ident.zon new file mode 100644 index 000000000000..3e49454f1baa --- /dev/null +++ b/test/cases/compile_errors/zon/unknown_ident.zon @@ -0,0 +1,3 @@ +.{ + .value = truefalse, +} diff --git a/test/src/Cases.zig b/test/src/Cases.zig index 208e8a6ce17d..c1fee4e30672 100644 --- a/test/src/Cases.zig +++ b/test/src/Cases.zig @@ -90,6 +90,11 @@ pub const Case = struct { link_libc: bool = false, pic: ?bool = null, pie: ?bool = null, + /// A list of imports to cache alongside the source file. + imports: []const []const u8 = &.{}, + /// Where to look for imports relative to the `cases_dir_path` given to + /// `lower_to_build_steps`. If null, file imports will assert. + import_path: ?[]const u8 = null, deps: std.ArrayList(DepModule), @@ -414,6 +419,7 @@ fn addFromDirInner( const pic = try manifest.getConfigForKeyAssertSingle("pic", ?bool); const pie = try manifest.getConfigForKeyAssertSingle("pie", ?bool); const emit_bin = try manifest.getConfigForKeyAssertSingle("emit_bin", bool); + const imports = try manifest.getConfigForKeyAlloc(ctx.arena, "imports", []const u8); if (manifest.type == .translate_c) { for (c_frontends) |c_frontend| { @@ -471,7 +477,7 @@ fn addFromDirInner( const next = ctx.cases.items.len; try ctx.cases.append(.{ .name = std.fs.path.stem(filename), - .target = resolved_target, + .import_path = std.fs.path.dirname(filename), .backend = backend, .updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator), .emit_bin = emit_bin, @@ -481,6 +487,8 @@ fn addFromDirInner( .pic = pic, .pie = pie, .deps = std.ArrayList(DepModule).init(ctx.cases.allocator), + .imports = imports, + .target = b.resolveTargetQuery(target_query), }); try cases.append(next); } @@ -617,6 +625,7 @@ pub fn lowerToBuildSteps( ) void { const host = std.zig.system.resolveTargetQuery(.{}) catch |err| std.debug.panic("unable to detect native host: {s}\n", .{@errorName(err)}); + const cases_dir_path = b.build_root.join(b.allocator, &.{ "test", "cases" }) catch @panic("OOM"); for (self.incremental_cases.items) |incr_case| { if (true) { @@ -660,6 +669,15 @@ pub fn lowerToBuildSteps( file_sources.put(file.path, writefiles.add(file.path, file.src)) catch @panic("OOM"); } + for (case.imports) |import_rel| { + const import_abs = std.fs.path.join(b.allocator, &.{ + cases_dir_path, + case.import_path orelse @panic("import_path not set"), + import_rel, + }) catch @panic("OOM"); + _ = writefiles.addCopyFile(.{ .cwd_relative = import_abs }, import_rel); + } + const artifact = if (case.is_test) b.addTest(.{ .root_source_file = root_source_file, .name = case.name, @@ -962,6 +980,8 @@ const TestManifestConfigDefaults = struct { return "null"; } else if (std.mem.eql(u8, key, "pie")) { return "null"; + } else if (std.mem.eql(u8, key, "imports")) { + return ""; } else unreachable; } }; @@ -998,6 +1018,7 @@ const TestManifest = struct { .{ "backend", {} }, .{ "pic", {} }, .{ "pie", {} }, + .{ "imports", {} }, }); const Type = enum { @@ -1020,7 +1041,7 @@ const TestManifest = struct { fn ConfigValueIterator(comptime T: type) type { return struct { - inner: std.mem.SplitIterator(u8, .scalar), + inner: std.mem.TokenIterator(u8, .scalar), fn next(self: *@This()) !?T { const next_raw = self.inner.next() orelse return null; @@ -1098,7 +1119,9 @@ const TestManifest = struct { // Parse key=value(s) var kv_it = std.mem.splitScalar(u8, trimmed, '='); const key = kv_it.first(); - if (!valid_keys.has(key)) return error.InvalidKey; + if (!valid_keys.has(key)) { + return error.InvalidKey; + } try manifest.config_map.putNoClobber(key, kv_it.next() orelse return error.MissingValuesForConfig); } @@ -1115,7 +1138,7 @@ const TestManifest = struct { ) ConfigValueIterator(T) { const bytes = self.config_map.get(key) orelse TestManifestConfigDefaults.get(self.type, key); return ConfigValueIterator(T){ - .inner = std.mem.splitScalar(u8, bytes, ','), + .inner = std.mem.tokenizeScalar(u8, bytes, ','), }; } @@ -1232,6 +1255,18 @@ const TestManifest = struct { return try getDefaultParser(o.child)(str); } }.parse, + .@"struct" => @compileError("no default parser for " ++ @typeName(T)), + .pointer => { + if (T == []const u8) { + return struct { + fn parse(str: []const u8) anyerror!T { + return str; + } + }.parse; + } else { + @compileError("no default parser for " ++ @typeName(T)); + } + }, else => @compileError("no default parser for " ++ @typeName(T)), } }