1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
|
const std = @import("std");
pub const Tokenizer = struct {
buf: []u8,
offset: u32,
pub fn init(buf: []u8) !Tokenizer {
return Tokenizer{ .buf = buf, .offset = 0 };
}
pub fn next(self: *Tokenizer) ?Token {
defer self.offset += 1;
self.skip_whitespace();
if (self.offset >= self.buf.len) return null;
const c = self.buf[self.offset];
if (c == ';') return Token{ .SEMICOLON = void{} };
if (c == '(') return Token{ .LPAREN = void{} };
if (c == ')') return Token{ .RPAREN = void{} };
if (c == '=') return Token{ .EQUALS = void{} };
const string = self.consume_string();
self.offset -= 1;
if (std.mem.eql(u8, string, "let")) return Token{ .LET = void{} };
if (std.mem.eql(u8, string, "print")) return Token{ .PRINT = void{} };
if (std.fmt.parseInt(i32, string, 10) catch null) |i| return Token{ .NUMBER = i };
return Token{ .IDENTIFIER = string };
}
fn skip_whitespace(self: *Tokenizer) void {
while (true) {
if (self.offset >= self.buf.len) return;
const c = self.buf[self.offset];
if (!std.ascii.isWhitespace(c)) return;
self.offset += 1;
}
}
fn consume_string(self: *Tokenizer) []u8 {
const start = self.offset;
while (true) {
const c = self.buf[self.offset];
if (!std.ascii.isAlphanumeric(c)) return self.buf[start..self.offset];
self.offset += 1;
}
}
};
const TokenType = enum {
// Keywords
LET,
PRINT,
// Identifiers
IDENTIFIER,
// Literals
NUMBER,
// Operators
EQUALS,
// Punctuation
SEMICOLON,
LPAREN,
RPAREN,
};
pub const Token = union(TokenType) {
LET: void,
PRINT: void,
IDENTIFIER: []u8,
NUMBER: i64,
EQUALS: void,
SEMICOLON: void,
LPAREN: void,
RPAREN: void,
};
test "simple" {
const buf =
\\ let i = 2;
\\
\\ print(i);
;
var token_list = std.ArrayList(Token).init(std.testing.allocator);
defer token_list.deinit();
var tokenizer = try Tokenizer.init(@constCast(buf));
while (tokenizer.next()) |token| {
try token_list.append(token);
}
try std.testing.expectEqualDeep(&.{
Token{ .LET = void{} },
Token{ .IDENTIFIER = @constCast("i") },
Token{ .EQUALS = void{} },
Token{ .NUMBER = 2 },
Token{ .SEMICOLON = void{} },
Token{ .PRINT = void{} },
Token{ .LPAREN = void{} },
Token{ .IDENTIFIER = @constCast("i") },
Token{ .RPAREN = void{} },
Token{ .SEMICOLON = void{} },
}, token_list.items);
}
|