summary refs log tree commit diff
diff options
context:
space:
mode:
authorBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-11 01:30:08 +0100
committerBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-11 01:30:08 +0100
commitad6f89a8f2afebe0afbb0015cd5508baef7bbaaf (patch)
treeb9f232ea9e51335bdf9bc29f92429c1a0963b299
parentTokenizer: Fix parsing end-of-buf strings (diff)
downloadinterpreter-ad6f89a8f2afebe0afbb0015cd5508baef7bbaaf.tar.gz
interpreter-ad6f89a8f2afebe0afbb0015cd5508baef7bbaaf.tar.bz2
interpreter-ad6f89a8f2afebe0afbb0015cd5508baef7bbaaf.zip
Tokenizer: Improve tests
-rw-r--r--src/tokenizer.zig69
1 files changed, 45 insertions, 24 deletions
diff --git a/src/tokenizer.zig b/src/tokenizer.zig
index 35da280..1d28f72 100644
--- a/src/tokenizer.zig
+++ b/src/tokenizer.zig
@@ -87,29 +87,50 @@ pub const Token = union(TokenType) {
 };
 
 test "simple" {
-    const buf =
-        \\ let i = 2;
-        \\
-        \\ print(i);
-    ;
-
-    var token_list = std.ArrayList(Token).init(std.testing.allocator);
-    defer token_list.deinit();
-
-    var tokenizer = try Tokenizer.init(@constCast(buf));
-    while (tokenizer.next()) |token| {
-        try token_list.append(token);
+    // TODO: Add invalid src test
+    const tests = [_]struct {
+        buf: []u8,
+        tokens: []const Token,
+    }{
+        .{
+            .buf = @constCast(
+                \\ let i = 2;
+                \\
+                \\ print(i);
+            ),
+            .tokens = &[_]Token{
+                Token{ .LET = {} },
+                Token{ .IDENTIFIER = @constCast("i") },
+                Token{ .EQUALS = {} },
+                Token{ .NUMBER = 2 },
+                Token{ .SEMICOLON = {} },
+                Token{ .PRINT = {} },
+                Token{ .LPAREN = {} },
+                Token{ .IDENTIFIER = @constCast("i") },
+                Token{ .RPAREN = {} },
+                Token{ .SEMICOLON = {} },
+            },
+        },
+        .{
+            .buf = @constCast(
+                \\
+                \\ let hello
+            ),
+            .tokens = &[_]Token{
+                Token{ .LET = {} },
+                Token{ .IDENTIFIER = @constCast("hello") },
+            },
+        },
+    };
+
+    for (tests) |t| {
+        var token_list = std.ArrayList(Token).init(std.testing.allocator);
+        defer token_list.deinit();
+
+        var tokenizer = try Tokenizer.init(t.buf);
+        while (tokenizer.next()) |token| {
+            try token_list.append(token);
+        }
+        try std.testing.expectEqualDeep(t.tokens, token_list.items);
     }
-    try std.testing.expectEqualDeep(&.{
-        Token{ .LET = void{} },
-        Token{ .IDENTIFIER = @constCast("i") },
-        Token{ .EQUALS = void{} },
-        Token{ .NUMBER = 2 },
-        Token{ .SEMICOLON = void{} },
-        Token{ .PRINT = void{} },
-        Token{ .LPAREN = void{} },
-        Token{ .IDENTIFIER = @constCast("i") },
-        Token{ .RPAREN = void{} },
-        Token{ .SEMICOLON = void{} },
-    }, token_list.items);
 }