blob: ea1f52853ecd5f2bd420b6fb47f299f6c2da6a15 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
|
const std = @import("std");
const tokenizer = @import("tokenizer.zig");
const parser = @import("parser.zig");
pub fn main() !void {
const pathLen = std.mem.len(std.os.argv[1]);
const path = std.os.argv[1][0..pathLen];
std.debug.print("Tokenizing! {s}\n", .{path});
//TODO: Repl mode
const file = try std.fs.cwd().openFile(path, .{});
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const allocator = gpa.allocator();
defer {
const deinit_status = gpa.deinit();
if (deinit_status == .leak) @panic("Memory leak detected!");
}
const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024);
defer allocator.free(buf);
std.debug.print("Buf:\n{s}\n", .{buf});
var token_list = std.ArrayList(tokenizer.Token).init(allocator);
defer token_list.deinit();
var source_tokenizer = try tokenizer.Tokenizer.init(buf);
while (source_tokenizer.next()) |token| {
try token_list.append(token);
}
for (token_list.items) |token| {
std.debug.print("{any}\n", .{token});
}
const source_parser = parser.Parser.init(token_list.items, allocator);
defer source_parser.deinit();
const ast = try source_parser.parse();
std.debug.print("AST: {any}\n", .{ast});
}
test {
std.testing.refAllDecls(@This());
}
|