diff options
| author | Baitinq <[email protected]> | 2025-05-10 13:25:41 +0200 |
|---|---|---|
| committer | Baitinq <[email protected]> | 2025-05-10 13:25:50 +0200 |
| commit | 36f0b2f486fbec48972a96581576b5008062be6f (patch) | |
| tree | a3a74d72aea5df49059e7f14a63f17c62154ea42 /src/tokenizer.zig | |
| parent | Misc: Add gitattributes (diff) | |
| download | interpreter-36f0b2f486fbec48972a96581576b5008062be6f.tar.gz interpreter-36f0b2f486fbec48972a96581576b5008062be6f.tar.bz2 interpreter-36f0b2f486fbec48972a96581576b5008062be6f.zip | |
Feature: Add support for imports
Diffstat (limited to '')
| -rw-r--r-- | src/tokenizer.zig | 16 |
1 files changed, 15 insertions, 1 deletions
diff --git a/src/tokenizer.zig b/src/tokenizer.zig index 2b57b8d..53e5c63 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -6,6 +6,7 @@ const TokenizerError = error{ pub const TokenType = union(enum) { // Keywords + IMPORT: void, LET: void, EXTERN: void, IF: void, @@ -64,13 +65,26 @@ pub const Tokenizer = struct { return Tokenizer{ .buf = buf, .offset = 0, .arena = arena }; } - pub fn next(self: *Tokenizer) TokenizerError!?Token { + pub fn tokenize(self: *Tokenizer) ![]Token { + var token_list = std.ArrayList(Token).init(self.arena); + + while (try self.next()) |token| { + std.debug.print("{any}\n", .{token}); + try token_list.append(token); + } + + return token_list.items; + } + + fn next(self: *Tokenizer) TokenizerError!?Token { self.skip_whitespace(); self.skip_comments(); self.skip_whitespace(); if (self.offset >= self.buf.len) return null; + if (self.accept_string("import")) return self.create_token(.{ .IMPORT = void{} }); + if (self.accept_string("let")) return self.create_token(.{ .LET = void{} }); if (self.accept_string("extern")) return self.create_token(.{ .EXTERN = void{} }); if (self.accept_string("if")) return self.create_token(.{ .IF = void{} }); |