summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-06 12:47:53 +0100
committerBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-06 13:13:52 +0100
commitc53b16cc32c26c1ecea2dfee38ca3240cde8e3f8 (patch)
tree49cc589621dd3a84640786f2cc1ed92ae0b42dfc /src
parentAdd language grammar (diff)
downloadinterpreter-c53b16cc32c26c1ecea2dfee38ca3240cde8e3f8.tar.gz
interpreter-c53b16cc32c26c1ecea2dfee38ca3240cde8e3f8.tar.bz2
interpreter-c53b16cc32c26c1ecea2dfee38ca3240cde8e3f8.zip
Start writing parser
Diffstat (limited to 'src')
-rw-r--r--src/main.zig13
-rw-r--r--src/parser.zig56
2 files changed, 69 insertions, 0 deletions
diff --git a/src/main.zig b/src/main.zig
index 3be7b14..128ad7e 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -1,11 +1,14 @@
 const std = @import("std");
 const tokenizer = @import("tokenizer.zig");
+const parser = @import("parser.zig");
 
 pub fn main() !void {
     const pathLen = std.mem.len(std.os.argv[1]);
     const path = std.os.argv[1][0..pathLen];
     std.debug.print("Tokenizing! {s}\n", .{path});
 
+    //TODO: Repl mode
+
     const file = try std.fs.cwd().openFile(path, .{});
 
     var gpa = std.heap.GeneralPurposeAllocator(.{}){};
@@ -20,10 +23,20 @@ pub fn main() !void {
 
     std.debug.print("Buf:\n{s}\n", .{buf});
 
+    var token_list = std.ArrayList(tokenizer.Token).init(allocator);
+    defer token_list.deinit();
+
     var sourceTokenizer = try tokenizer.Tokenizer.init(buf);
     while (sourceTokenizer.next()) |token| {
+        try token_list.append(token);
+    }
+
+    for (token_list.items) |token| {
         std.debug.print("{any}\n", .{token});
     }
+
+    const ast = try parser.Parser.parse(token_list.items);
+    std.debug.print("AST: {any}\n", .{ast});
 }
 
 test {
diff --git a/src/parser.zig b/src/parser.zig
new file mode 100644
index 0000000..c92ca0c
--- /dev/null
+++ b/src/parser.zig
@@ -0,0 +1,56 @@
+const std = @import("std");
+const tokenizer = @import("tokenizer.zig");
+
+const NodeType = enum {
+    PROGRAM,
+    VARIABLE_STATEMENT,
+    PRINT_STATEMENT,
+    NUMBER,
+    IDENTIFIER,
+};
+
+pub const Node = union(NodeType) {
+    PROGRAM: struct {
+        statements: []*Node,
+    },
+    VARIABLE_STATEMENT: struct {
+        is_declaration: bool,
+        name: []const u8,
+        expression: *Node,
+    },
+    PRINT_STATEMENT: struct {
+        expression: *Node,
+    },
+    NUMBER: struct {
+        value: i32,
+    },
+    IDENTIFIER: struct {
+        name: []const u8,
+    },
+};
+
+pub const Parser = struct {
+    pub fn parse(_: []tokenizer.Token) !Node {
+        return Node{
+            .NUMBER = .{ .value = 9 },
+        };
+    }
+};
+
+test "simple" {
+    const tokens: []tokenizer.Token = @constCast(&[_]tokenizer.Token{
+        tokenizer.Token{ .LET = void{} },
+        tokenizer.Token{ .IDENTIFIER = @constCast("i") },
+        tokenizer.Token{ .EQUALS = void{} },
+        tokenizer.Token{ .NUMBER = 2 },
+        tokenizer.Token{ .SEMICOLON = void{} },
+    });
+
+    const ast = try Parser.parse(tokens);
+
+    try std.testing.expectEqualDeep(Node{ .PROGRAM = .{ .statements = @constCast(&[_]*Node{
+        @constCast(&Node{ .VARIABLE_STATEMENT = .{ .is_declaration = true, .name = @constCast("i"), .expression = @constCast(&Node{
+            .NUMBER = .{ .value = 2 },
+        }) } }),
+    }) } }, ast);
+}