import "!stdlib.src"; let file_size = 0; let file = 0; let buf = 0; let offset = 0; let tokens = 0; let tokens_len = 0; let read_file = (filename: *i8) => *i8 { file = fopen(filename, "r"); fseek(file, 0, 2); file_size = ftell(file); fseek(file, 0, 0); buf = malloc(file_size + 1); let bytes_read = fread(buf, 1, file_size, file); (*(buf + bytes_read)) = '\0'; return buf; }; let add_token = (tokens: *i8, token: *i8) => i64 { let i = 0; while true { let c = (*(token + i)); (*(tokens + tokens_len)) = c; tokens_len = tokens_len + 1; i = i + 1; if c == '\0' { return 0; }; }; return 0; }; let print_tokens = (tokens: *i8) => i64 { let i = 0; while i < tokens_len { let c = (*(tokens + i)); if c == '\0' { c = '\n'; }; printf("%c", c); i = i + 1; }; return 0; }; let tokenizer_next = () => *i8 { if offset >= file_size { return "EOF"; }; let c = (*(buf + offset)); offset = offset + 1; let t = malloc(2); (*(t + 0)) = c; (*(t + 1)) = '\0'; return t; }; let tokenizer_init = (filename: *i8) => i64 { let buf = read_file(filename); println("File size: %d", file_size); println("%s", buf); tokens = malloc(10000); while true { let t = tokenizer_next(); if strcmp(t, "EOF") { break; }; println("%s", t); add_token(tokens, t); free(t); }; println("PRINT TOKENS"); print_tokens(tokens); return 0; }; let tokenizer_deinit = () => i64 { free(tokens); free(buf); fclose(file); return 0; };