diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2025-01-06 00:37:42 +0100 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2025-01-06 00:48:47 +0100 |
commit | 2567e2d506059bcf81940a75496c46d12ec8d856 (patch) | |
tree | 76b43cf2d070b6e2c72ed5cdffaf0003e0106465 | |
parent | File reading (diff) | |
download | interpreter-2567e2d506059bcf81940a75496c46d12ec8d856.tar.gz interpreter-2567e2d506059bcf81940a75496c46d12ec8d856.tar.bz2 interpreter-2567e2d506059bcf81940a75496c46d12ec8d856.zip |
Add tokenizer test
-rw-r--r-- | src/main.zig | 10 | ||||
-rw-r--r-- | src/tokenizer.zig | 67 |
2 files changed, 72 insertions, 5 deletions
diff --git a/src/main.zig b/src/main.zig index e90f453..41c33fa 100644 --- a/src/main.zig +++ b/src/main.zig @@ -18,6 +18,12 @@ pub fn main() !void { const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024); defer allocator.free(buf); - var tknizer = try tokenizer.Tokenizer.init(buf); - std.debug.print("Next: {any}\n", .{tknizer.next()}); + std.debug.print("Buf:\n{s}\n", .{buf}); + + var sourceTokenizer = try tokenizer.Tokenizer.init(buf); + std.debug.print("Next: {any}\n", .{sourceTokenizer.next()}); +} + +test { + std.testing.refAllDecls(@This()); } diff --git a/src/tokenizer.zig b/src/tokenizer.zig index a69b4eb..f2b013e 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -1,3 +1,5 @@ +const std = @import("std"); + pub const Tokenizer = struct { buf: []u8, offset: u32, @@ -7,10 +9,69 @@ pub const Tokenizer = struct { } pub fn next(_: *Tokenizer) ?Token { - return Token.HI; + return null; + // return Token{ + // .LET = void{}, + // }; } }; -pub const Token = enum { - HI, +const TokenType = enum { + // Keywords + LET, + PRINT, + + // Identifiers + IDENTIFIER, + + // Literals + NUMBER, + STRING, + + // Operators + EQUALS, + + // Punctuation + SEMICOLON, + LPAREN, + RPAREN, }; + +pub const Token = union(TokenType) { + LET: void, + PRINT: void, + IDENTIFIER: []u8, + NUMBER: i64, + STRING: []u8, + EQUALS: void, + SEMICOLON: void, + LPAREN: void, + RPAREN: void, +}; + +test "simple" { + const buf = + \\ let i = 2; + \\ + \\ print(i); + ; + + var tokenizer = try Tokenizer.init(@constCast(buf)); + + var token_list = std.ArrayList(Token).init(std.testing.allocator); + while (tokenizer.next()) |token| { + try token_list.append(token); + } + try std.testing.expectEqualSlices(Token, &.{ + Token{ .LET = void{} }, + Token{ .IDENTIFIER = @constCast("i") }, + Token{ .EQUALS = void{} }, + Token{ .NUMBER = 2 }, + Token{ .SEMICOLON = void{} }, + Token{ .PRINT = void{} }, + Token{ .LPAREN = void{} }, + Token{ .IDENTIFIER = @constCast("i") }, + Token{ .RPAREN = void{} }, + Token{ .SEMICOLON = void{} }, + }, token_list.items); +} |