summary refs log tree commit diff
diff options
context:
space:
mode:
authorBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-05 19:02:58 +0100
committerBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-05 19:03:52 +0100
commit255e702c0240ecb2ae85304c757013c654598c67 (patch)
tree5383d3b8a852c1800832e96290433795a6443286
parentInitial commit (diff)
downloadinterpreter-255e702c0240ecb2ae85304c757013c654598c67.tar.gz
interpreter-255e702c0240ecb2ae85304c757013c654598c67.tar.bz2
interpreter-255e702c0240ecb2ae85304c757013c654598c67.zip
File reading
-rw-r--r--examples/1.src3
-rw-r--r--src/main.zig20
-rw-r--r--src/tokenizer.zig16
3 files changed, 38 insertions, 1 deletions
diff --git a/examples/1.src b/examples/1.src
new file mode 100644
index 0000000..b442d06
--- /dev/null
+++ b/examples/1.src
@@ -0,0 +1,3 @@
+let i = 2;
+
+print(i);
diff --git a/src/main.zig b/src/main.zig
index f92e181..e90f453 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -1,5 +1,23 @@
 const std = @import("std");
+const tokenizer = @import("tokenizer.zig");
 
 pub fn main() !void {
-    std.debug.print("All your {s} are belong to us.\n", .{"codebase"});
+    const pathLen = std.mem.len(std.os.argv[1]);
+    const path = std.os.argv[1][0..pathLen];
+    std.debug.print("Tokenizing! {s}\n", .{path});
+
+    const file = try std.fs.cwd().openFile(path, .{});
+
+    var gpa = std.heap.GeneralPurposeAllocator(.{}){};
+    const allocator = gpa.allocator();
+    defer {
+        const deinit_status = gpa.deinit();
+        if (deinit_status == .leak) @panic("Memory leak detected!");
+    }
+
+    const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024);
+    defer allocator.free(buf);
+
+    var tknizer = try tokenizer.Tokenizer.init(buf);
+    std.debug.print("Next: {any}\n", .{tknizer.next()});
 }
diff --git a/src/tokenizer.zig b/src/tokenizer.zig
new file mode 100644
index 0000000..a69b4eb
--- /dev/null
+++ b/src/tokenizer.zig
@@ -0,0 +1,16 @@
+pub const Tokenizer = struct {
+    buf: []u8,
+    offset: u32,
+
+    pub fn init(buf: []u8) !Tokenizer {
+        return Tokenizer{ .buf = buf, .offset = 0 };
+    }
+
+    pub fn next(_: *Tokenizer) ?Token {
+        return Token.HI;
+    }
+};
+
+pub const Token = enum {
+    HI,
+};