summary refs log tree commit diff
path: root/src/main.zig
blob: e95bd4857222554dab073f3ea265628f5a619e36 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
const std = @import("std");
const tokenizer = @import("tokenizer.zig");
const parser = @import("parser.zig");
const evaluator = @import("evaluator.zig");

pub fn main() !void {
    const stdout = std.io.getStdOut().writer();
    const stdin = std.io.getStdIn().reader();

    const pathLen = std.mem.len(std.os.argv[1]);
    const path = std.os.argv[1][0..pathLen];

    var gpa = std.heap.GeneralPurposeAllocator(.{}){};
    const allocator = gpa.allocator();
    defer {
        const deinit_status = gpa.deinit();
        if (deinit_status == .leak) @panic("Memory leak detected!");
    }

    const source_evaluator = try evaluator.Evaluator.init(allocator);
    defer source_evaluator.deinit();

    var arena = std.heap.ArenaAllocator.init(allocator);
    defer arena.deinit();

    if (std.mem.eql(u8, path, "-i")) {
        while (true) {
            try stdout.print("> ", .{});

            const buf = try stdin.readUntilDelimiterAlloc(allocator, '\n', 1024);
            defer allocator.free(buf);

            process_buf(buf, allocator, &arena, source_evaluator) catch |err| {
                try stdout.print("Error processing line: {any}\n", .{err});
            };
        }
    } else {
        std.debug.print("Tokenizing! {s}\n", .{path});
        const file = try std.fs.cwd().openFile(path, .{});
        const buf = try file.readToEndAlloc(allocator, 1 * 1024 * 1024);
        defer allocator.free(buf);
        try process_buf(buf, allocator, &arena, source_evaluator);
    }
}

fn process_buf(buf: []u8, allocator: std.mem.Allocator, arena: *std.heap.ArenaAllocator, source_evaluator: *evaluator.Evaluator) !void {
    std.debug.print("Buf:\n{s}\n", .{buf});

    var token_list = std.ArrayList(tokenizer.Token).init(allocator);
    defer token_list.deinit();

    var source_tokenizer = try tokenizer.Tokenizer.init(buf);
    while (source_tokenizer.next()) |token| {
        try token_list.append(token);
    }

    for (token_list.items) |token| {
        std.debug.print("{any}\n", .{token});
    }

    const source_parser = try parser.Parser.init(token_list.items, arena.allocator());
    const ast = try source_parser.parse();
    std.debug.print("AST: {any}\n", .{ast});

    const result = try source_evaluator.evaluate_ast(ast);
    std.debug.print("Evaluation result: {any}\n", .{result});
}

test {
    std.testing.refAllDecls(@This());
}