diff options
| author | Baitinq <[email protected]> | 2025-06-02 21:11:01 +0200 |
|---|---|---|
| committer | Baitinq <[email protected]> | 2025-06-02 21:11:01 +0200 |
| commit | 72f0d1610ee78d6fd928464c109103ac8d58c22d (patch) | |
| tree | 13cbac9d025f8367d6353d621e22a3008e9a5afb /src/bootstrap/main.src | |
| parent | Bootstrap: Tokenizer: Support missing token types (diff) | |
| download | pry-lang-72f0d1610ee78d6fd928464c109103ac8d58c22d.tar.gz pry-lang-72f0d1610ee78d6fd928464c109103ac8d58c22d.tar.bz2 pry-lang-72f0d1610ee78d6fd928464c109103ac8d58c22d.zip | |
Bootstrap: Tokenizer: Cleanup using slices
Diffstat (limited to 'src/bootstrap/main.src')
| -rw-r--r-- | src/bootstrap/main.src | 39 |
1 files changed, 37 insertions, 2 deletions
diff --git a/src/bootstrap/main.src b/src/bootstrap/main.src index bb840b5..c8338fb 100644 --- a/src/bootstrap/main.src +++ b/src/bootstrap/main.src @@ -1,8 +1,41 @@ +extern fopen = (*i8, *i8) => *i8; +extern fgets = (*i8, i64, *i8) => void; +extern feof = (*i8) => bool; +extern fseek = (*i8, i64, i64) => i64; +extern ftell = (*i8) => i64; +extern fread = (*i8, i64, i64, *i8) => i64; +extern fclose = (*i8) => *i8; + import "!stdlib.src"; import "!mem.src"; +let slice = struct { + data: *void, + data_len: i64, +}; + import "tokenizer.src"; +let read_file = (filename: *i8, alloc: *arena) => slice { + let file = fopen(filename, "r"); + + fseek(file, 0, 2); + let file_size = ftell(file); + fseek(file, 0, 0); + + let buf = cast(*i8, arena_alloc(alloc, file_size + 1)); + + let bytes_read = fread(buf, 1, file_size, file); + (*(buf + cast(*i8, bytes_read))) = '\0'; + + fclose(file); + + let sl = slice{}; + sl.data = cast(*void, buf); + sl.data_len = file_size; + return sl; +}; + let main = (argc: i64, argv: **i8) => i64 { if argc < 2 { println("Need filename!"); @@ -15,8 +48,10 @@ let main = (argc: i64, argv: **i8) => i64 { let alloc = arena_init(999999999); - tokenizer_init(alloc, filename); - tokenizer_deinit(); + let file = read_file(filename, alloc); + + let t = tokenizer_init(alloc, file); + let ts = tokenizer_tokenize(t); arena_free(alloc); |