about summary refs log tree commit diff
path: root/src/main.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.zig')
-rw-r--r--src/main.zig65
1 files changed, 15 insertions, 50 deletions
diff --git a/src/main.zig b/src/main.zig
index 3ac7f39..5ddd750 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -1,13 +1,9 @@
 const std = @import("std");
 const tokenizer = @import("tokenizer.zig");
 const parser = @import("parser.zig");
-const evaluator = @import("evaluator.zig");
 const codegen = @import("codegen.zig");
 
 pub fn main() !void {
-    const stdout = std.io.getStdOut().writer();
-    const stdin = std.io.getStdIn().reader();
-
     const pathLen = std.mem.len(std.os.argv[1]);
     const path = std.os.argv[1][0..pathLen];
 
@@ -21,48 +17,22 @@ pub fn main() !void {
     var arena = std.heap.ArenaAllocator.init(allocator);
     defer arena.deinit();
 
-    const source_evaluator = try evaluator.Evaluator.init(arena.allocator());
-
-    if (std.mem.eql(u8, path, "-i")) {
-        while (true) {
-            try stdout.print("> ", .{});
-
-            const buf = try stdin.readUntilDelimiterAlloc(allocator, '\n', 1024);
-            defer allocator.free(buf);
-
-            process_buf(buf, allocator, arena.allocator(), source_evaluator, null) catch |err| {
-                try stdout.print("Error processing line: {any}\n", .{err});
-            };
-        }
-    } else {
-        std.debug.print("Tokenizing! {s}\n", .{path});
-        const file = try std.fs.cwd().openFile(path, .{});
-        const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024);
-        defer allocator.free(buf);
-        if (std.os.argv.len < 3) {
-            try process_buf(
-                buf,
-                allocator,
-                arena.allocator(),
-                source_evaluator,
-                null,
-            );
-        } else {
-            const source_codegen = try codegen.CodeGen.init(arena.allocator());
-            defer source_codegen.deinit();
-            try process_buf(
-                buf,
-                allocator,
-                arena.allocator(),
-                source_evaluator,
-                source_codegen,
-            );
-            source_codegen.compile();
-        }
-    }
+    std.debug.print("Tokenizing! {s}\n", .{path});
+    const file = try std.fs.cwd().openFile(path, .{});
+    const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024);
+    defer allocator.free(buf);
+    const source_codegen = try codegen.CodeGen.init(arena.allocator());
+    defer source_codegen.deinit();
+    try process_buf(
+        buf,
+        allocator,
+        arena.allocator(),
+        source_codegen,
+    );
+    source_codegen.compile();
 }
 
-fn process_buf(buf: []u8, allocator: std.mem.Allocator, arena: std.mem.Allocator, source_evaluator: *evaluator.Evaluator, source_codegen: ?*codegen.CodeGen) !void {
+fn process_buf(buf: []u8, allocator: std.mem.Allocator, arena: std.mem.Allocator, source_codegen: ?*codegen.CodeGen) !void {
     std.debug.print("Buf:\n{s}\n", .{buf});
 
     var token_list = std.ArrayList(tokenizer.Token).init(allocator);
@@ -78,12 +48,7 @@ fn process_buf(buf: []u8, allocator: std.mem.Allocator, arena: std.mem.Allocator
     const ast = try source_parser.parse();
     std.debug.print("AST: {any}\n", .{ast});
 
-    if (source_codegen != null) {
-        try source_codegen.?.generate(ast);
-    } else {
-        const result = try source_evaluator.evaluate_ast(ast);
-        std.debug.print("Evaluation result: {any}\n", .{result});
-    }
+    try source_codegen.?.generate(ast);
 }
 
 test {