diff options
| author | Baitinq <[email protected]> | 2025-01-11 01:30:08 +0100 |
|---|---|---|
| committer | Baitinq <[email protected]> | 2025-01-11 01:30:08 +0100 |
| commit | c4148145fd7e0411a1d58a0b076d5233f3788926 (patch) | |
| tree | b9f232ea9e51335bdf9bc29f92429c1a0963b299 /src/tokenizer.zig | |
| parent | Tokenizer: Fix parsing end-of-buf strings (diff) | |
| download | interpreter-c4148145fd7e0411a1d58a0b076d5233f3788926.tar.gz interpreter-c4148145fd7e0411a1d58a0b076d5233f3788926.tar.bz2 interpreter-c4148145fd7e0411a1d58a0b076d5233f3788926.zip | |
Tokenizer: Improve tests
Diffstat (limited to 'src/tokenizer.zig')
| -rw-r--r-- | src/tokenizer.zig | 69 |
1 files changed, 45 insertions, 24 deletions
diff --git a/src/tokenizer.zig b/src/tokenizer.zig index 35da280..1d28f72 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -87,29 +87,50 @@ pub const Token = union(TokenType) { }; test "simple" { - const buf = - \\ let i = 2; - \\ - \\ print(i); - ; - - var token_list = std.ArrayList(Token).init(std.testing.allocator); - defer token_list.deinit(); - - var tokenizer = try Tokenizer.init(@constCast(buf)); - while (tokenizer.next()) |token| { - try token_list.append(token); + // TODO: Add invalid src test + const tests = [_]struct { + buf: []u8, + tokens: []const Token, + }{ + .{ + .buf = @constCast( + \\ let i = 2; + \\ + \\ print(i); + ), + .tokens = &[_]Token{ + Token{ .LET = {} }, + Token{ .IDENTIFIER = @constCast("i") }, + Token{ .EQUALS = {} }, + Token{ .NUMBER = 2 }, + Token{ .SEMICOLON = {} }, + Token{ .PRINT = {} }, + Token{ .LPAREN = {} }, + Token{ .IDENTIFIER = @constCast("i") }, + Token{ .RPAREN = {} }, + Token{ .SEMICOLON = {} }, + }, + }, + .{ + .buf = @constCast( + \\ + \\ let hello + ), + .tokens = &[_]Token{ + Token{ .LET = {} }, + Token{ .IDENTIFIER = @constCast("hello") }, + }, + }, + }; + + for (tests) |t| { + var token_list = std.ArrayList(Token).init(std.testing.allocator); + defer token_list.deinit(); + + var tokenizer = try Tokenizer.init(t.buf); + while (tokenizer.next()) |token| { + try token_list.append(token); + } + try std.testing.expectEqualDeep(t.tokens, token_list.items); } - try std.testing.expectEqualDeep(&.{ - Token{ .LET = void{} }, - Token{ .IDENTIFIER = @constCast("i") }, - Token{ .EQUALS = void{} }, - Token{ .NUMBER = 2 }, - Token{ .SEMICOLON = void{} }, - Token{ .PRINT = void{} }, - Token{ .LPAREN = void{} }, - Token{ .IDENTIFIER = @constCast("i") }, - Token{ .RPAREN = void{} }, - Token{ .SEMICOLON = void{} }, - }, token_list.items); } |