Skip to content

Commit 62e0a3d

Browse files
der-teufel-programmingikskuh
authored andcommitted
Other small updates
1 parent 7501c4b commit 62e0a3d

File tree

5 files changed

+24
-15
lines changed

5 files changed

+24
-15
lines changed

src/Diagnostics.zig

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@ const Location = @import("Location.zig");
66
const Self = @This();
77

88
memory: std.heap.ArenaAllocator,
9-
errors: std.ArrayListUnmanaged(Error) = .{},
9+
errors: std.ArrayList(Error) = .{},
1010

1111
pub fn init(allocator: std.mem.Allocator) Self {
1212
return Self{
13-
.memory = std.heap.ArenaAllocator.init(allocator),
13+
.memory = .init(allocator),
1414
};
1515
}
1616

@@ -22,20 +22,26 @@ pub fn deinit(self: *Self) void {
2222
pub fn print(self: Self, writer: anytype) !void {
2323
for (self.errors.items) |err| {
2424
const source = err.location.source orelse "???";
25-
try writer.print("{s}:{d}:{d}: {s}: {s}\n", .{
25+
try writer.print("{s}:{d}:{d}: {t}: {s}\n", .{
2626
source,
2727
err.location.line,
2828
err.location.column,
29-
@tagName(err.level),
29+
err.level,
3030
err.message,
3131
});
3232
}
3333
}
3434

35-
pub fn emit(self: *Self, location: Location, level: Error.Level, comptime fmt: []const u8, args: anytype) !void {
35+
pub fn emit(
36+
self: *Self,
37+
location: Location,
38+
level: Error.Level,
39+
comptime fmt: []const u8,
40+
args: anytype,
41+
) !void {
3642
const allocator = self.memory.allocator();
3743

38-
const str = try std.fmt.allocPrintZ(allocator, fmt, args);
44+
const str = try std.fmt.allocPrintSentinel(allocator, fmt, args, 0);
3945
errdefer allocator.free(str);
4046

4147
try self.errors.append(allocator, Error{

src/Location.zig

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,12 @@ pub fn min(a: Location, b: Location) Location {
1313
if (!std.mem.eql(u8, a.source.?, b.source.?))
1414
@panic("a and b must be from the same source file!");
1515
}
16-
var loc = Location{
16+
var loc: Location = .{
1717
.line = undefined,
1818
.column = undefined,
1919
.source = a.source orelse b.source,
2020
};
21+
2122
if (a.line < b.line) {
2223
loc.line = a.line;
2324
loc.column = a.column;
@@ -36,11 +37,12 @@ pub fn max(a: Location, b: Location) Location {
3637
if (!std.mem.eql(u8, a.source.?, b.source.?))
3738
@panic("a and b must be from the same source file!");
3839
}
39-
var loc = Location{
40+
var loc: Location = .{
4041
.line = undefined,
4142
.column = undefined,
4243
.source = a.source orelse b.source,
4344
};
45+
4446
if (a.line > b.line) {
4547
loc.line = a.line;
4648
loc.column = a.column;

src/StringCache.zig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ items: std.StringHashMapUnmanaged(void),
1111

1212
pub fn init(allocator: std.mem.Allocator) StringCache {
1313
return StringCache{
14-
.arena = std.heap.ArenaAllocator.init(allocator),
14+
.arena = .init(allocator),
1515
.items = .{},
1616
};
1717
}

src/parser_core.zig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ pub fn ParserCore(comptime TokenizerT: type, comptime ignore_list: anytype) type
2626
/// The core will only reference the Tokenizer and will modify
2727
/// it's state.
2828
pub fn init(tokenizer: *Tokenizer) Self {
29-
return Self{
29+
return .{
3030
.tokenizer = tokenizer,
3131
};
3232
}

src/tokenizer.zig

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ pub fn Pattern(comptime TokenType: type) type {
1313
match: Matcher,
1414

1515
pub fn create(token_type: TokenType, match: Matcher) Self {
16-
return Self{
16+
return .{
1717
.type = token_type,
1818
.match = match,
1919
};
@@ -40,7 +40,7 @@ pub fn Tokenizer(comptime TokenTypeT: type, comptime patterns: []const Pattern(T
4040
return Self{
4141
.source = source,
4242
.offset = 0,
43-
.current_location = Location{
43+
.current_location = .{
4444
.source = file_name,
4545
.line = 1,
4646
.column = 1,
@@ -49,7 +49,7 @@ pub fn Tokenizer(comptime TokenTypeT: type, comptime patterns: []const Pattern(T
4949
}
5050

5151
pub fn saveState(self: Self) State {
52-
return State{
52+
return .{
5353
.offset = self.offset,
5454
.location = self.current_location,
5555
};
@@ -65,17 +65,18 @@ pub fn Tokenizer(comptime TokenTypeT: type, comptime patterns: []const Pattern(T
6565
const rest = self.source[self.offset..];
6666
if (rest.len == 0)
6767
return null;
68-
const maybe_token = for (patterns) |pat| {
68+
const maybe_token: ?Token = for (patterns) |pat| {
6969
if (pat.match(rest)) |len| {
7070
if (len > 0) {
71-
break Token{
71+
break .{
7272
.location = self.current_location,
7373
.text = rest[0..len],
7474
.type = pat.type,
7575
};
7676
}
7777
}
7878
} else null;
79+
7980
if (maybe_token) |token| {
8081
self.offset += token.text.len;
8182
self.current_location.advance(token.text);

0 commit comments

Comments
 (0)