From b53e0f2add02cde1a1618332db89431101902c2f Mon Sep 17 00:00:00 2001 From: Baitinq Date: Wed, 22 Jan 2025 23:42:57 +0100 Subject: Tokenizer: Improve Token type --- src/parser.zig | 34 +++++++++------------------------- 1 file changed, 9 insertions(+), 25 deletions(-) (limited to 'src/parser.zig') diff --git a/src/parser.zig b/src/parser.zig index b407c42..a13f1cc 100644 --- a/src/parser.zig +++ b/src/parser.zig @@ -6,23 +6,7 @@ const ParserError = error{ OutOfMemory, }; -const NodeType = enum { - PROGRAM, - STATEMENT, - ASSIGNMENT_STATEMENT, - FUNCTION_CALL_STATEMENT, - IF_STATEMENT, - WHILE_STATEMENT, - EQUALITY_EXPRESSION, - ADDITIVE_EXPRESSION, - MULTIPLICATIVE_EXPRESSION, - UNARY_EXPRESSION, - PRIMARY_EXPRESSION, - FUNCTION_DEFINITION, - RETURN_STATEMENT, -}; - -pub const Node = union(NodeType) { +pub const Node = union(enum) { PROGRAM: struct { statements: []*Node, }, @@ -159,7 +143,7 @@ pub const Parser = struct { return self.create_node(.{ .ASSIGNMENT_STATEMENT = .{ .is_declaration = is_declaration, - .name = try self.allocator.dupe(u8, identifier.IDENTIFIER), + .name = try self.allocator.dupe(u8, identifier.type.IDENTIFIER), .expression = @constCast(expression), }, }); @@ -178,7 +162,7 @@ pub const Parser = struct { _ = try self.parse_token(tokenizer.TokenType.RPAREN); return self.create_node(.{ .FUNCTION_CALL_STATEMENT = .{ - .name = try self.allocator.dupe(u8, identifier.IDENTIFIER), + .name = try self.allocator.dupe(u8, identifier.type.IDENTIFIER), .arguments = arguments, } }); } @@ -353,7 +337,7 @@ pub const Parser = struct { const token = self.consume_token() orelse return ParserError.ParsingError; - return switch (token) { + return switch (token.type) { .NUMBER => |number_token| try self.create_node(.{ .PRIMARY_EXPRESSION = .{ .NUMBER = .{ @@ -421,7 +405,7 @@ pub const Parser = struct { try node_list.append(try self.create_node(.{ .PRIMARY_EXPRESSION = .{ .IDENTIFIER = .{ - .name = try self.allocator.dupe(u8, ident.IDENTIFIER), + .name = try self.allocator.dupe(u8, ident.type.IDENTIFIER), }, }, })); @@ -444,11 +428,11 @@ pub const Parser = struct { }); } - fn parse_token(self: *Parser, expected_token: tokenizer.TokenType) ParserError!tokenizer.Token { + fn parse_token(self: *Parser, expected_token: std.meta.Tag(tokenizer.TokenType)) ParserError!tokenizer.Token { errdefer if (!self.try_context) std.debug.print("Error accepting token: {any}\n", .{expected_token}); const token = self.peek_token() orelse return ParserError.ParsingError; - if (token != expected_token) { + if (expected_token != std.meta.activeTag(token.type)) { if (!self.try_context) std.debug.print("Expected {any} - found {any}\n", .{ expected_token, token }); return ParserError.ParsingError; } @@ -469,9 +453,9 @@ pub const Parser = struct { return node; } - fn accept_token(self: *Parser, token: tokenizer.TokenType) ?tokenizer.Token { + fn accept_token(self: *Parser, token_type: std.meta.Tag(tokenizer.TokenType)) ?tokenizer.Token { const curr_token = self.peek_token() orelse return null; - if (curr_token == token) { + if (std.meta.activeTag(curr_token.type) == token_type) { return self.consume_token(); } return null; -- cgit 1.4.1