diff options
| author | Baitinq <[email protected]> | 2025-05-18 10:48:30 +0200 |
|---|---|---|
| committer | Baitinq <[email protected]> | 2025-05-18 10:49:44 +0200 |
| commit | 5f3f13a1b5a6cb4d69c7a958760d466be2b72f2a (patch) | |
| tree | f943a2a244fc40e66cc74bfcda78618485cfbae5 /src | |
| parent | Tokenizer: Cleanup consuming logic (diff) | |
| download | interpreter-5f3f13a1b5a6cb4d69c7a958760d466be2b72f2a.tar.gz interpreter-5f3f13a1b5a6cb4d69c7a958760d466be2b72f2a.tar.bz2 interpreter-5f3f13a1b5a6cb4d69c7a958760d466be2b72f2a.zip | |
Feature: Add support for continue statement
Diffstat (limited to 'src')
| -rw-r--r-- | src/bootstrap/tokenizer.src | 25 | ||||
| -rw-r--r-- | src/codegen.zig | 17 | ||||
| -rw-r--r-- | src/parser.zig | 30 | ||||
| -rw-r--r-- | src/tokenizer.zig | 2 |
4 files changed, 50 insertions, 24 deletions
diff --git a/src/bootstrap/tokenizer.src b/src/bootstrap/tokenizer.src index dee4a40..ff2f5a7 100644 --- a/src/bootstrap/tokenizer.src +++ b/src/bootstrap/tokenizer.src @@ -6,7 +6,6 @@ extern atoi = (*i8) => i64; import "!stdlib.src"; let file_size = 0; -let file = 0; let buf = 0; @@ -16,7 +15,7 @@ let tokens = 0; let tokens_len = 0; let read_file = (filename: *i8) => *i8 { - file = fopen(filename, "r"); + let file = fopen(filename, "r"); fseek(file, 0, 2); file_size = ftell(file); @@ -27,6 +26,8 @@ let read_file = (filename: *i8) => *i8 { let bytes_read = fread(buf, 1, file_size, file); (*(buf + bytes_read)) = '\0'; + fclose(file); + return buf; }; @@ -135,19 +136,18 @@ let tokenizer_consume_until_condition = (condition: (i8) => bool) => *i8 { offset = offset + 1; offset = offset + 1; - }; - /* else / continue */ - if !(c == '\\') { - if condition(c) { - return res; - }; - - (*(res + (offset - start))) = c; - (*(res + (offset - start + 1))) = '\0'; + continue; + }; - offset = offset + 1; + if condition(c) { + return res; }; + + (*(res + (offset - start))) = c; + (*(res + (offset - start + 1))) = '\0'; + + offset = offset + 1; }; return null; @@ -379,7 +379,6 @@ let tokenizer_init = (filename: *i8) => i64 { let tokenizer_deinit = () => i64 { free(tokens); free(buf); - fclose(file); return 0; }; diff --git a/src/codegen.zig b/src/codegen.zig index e835fed..33ee3b5 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -23,6 +23,7 @@ pub const CodeGen = struct { arena: std.mem.Allocator, while_loop_exit: ?llvm.LLVMBasicBlockRef, + while_block: ?llvm.LLVMBasicBlockRef, current_function: ?llvm.LLVMValueRef, pub fn init(arena: std.mem.Allocator) !*CodeGen { @@ -45,6 +46,7 @@ pub const CodeGen = struct { .arena = arena, .while_loop_exit = null, + .while_block = null, .current_function = null, }; @@ -121,6 +123,7 @@ pub const CodeGen = struct { }, .RETURN_STATEMENT => |*return_statement| return try self.generate_return_statement(@ptrCast(return_statement)), .BREAK_STATEMENT => |*break_statement| return try self.generate_break_statement(@ptrCast(@alignCast(break_statement))), + .CONTINUE_STATEMENT => |*continue_statement| return try self.generate_continue_statement(@ptrCast(@alignCast(continue_statement))), .IF_STATEMENT => |*if_statement| return try self.generate_if_statement(@ptrCast(if_statement)), .WHILE_STATEMENT => |*while_statement| return try self.generate_while_statement(@ptrCast(while_statement)), .IMPORT_DECLARATION => |*import_declaration| return try self.generate_import_declaration(@ptrCast(import_declaration)), @@ -261,6 +264,14 @@ pub const CodeGen = struct { _ = llvm.LLVMBuildBr(self.builder, self.while_loop_exit.?); } + fn generate_continue_statement(self: *CodeGen, statement: *parser.Node) !void { + errdefer std.debug.print("Error generating continue statement\n", .{}); + std.debug.assert(statement.* == parser.Node.CONTINUE_STATEMENT); + std.debug.assert(self.while_block != null); + + _ = llvm.LLVMBuildBr(self.builder, self.while_block.?); + } + fn generate_if_statement(self: *CodeGen, statement: *parser.Node) !void { errdefer std.debug.print("Error generating if statement\n", .{}); std.debug.assert(statement.* == parser.Node.IF_STATEMENT); @@ -303,7 +314,11 @@ pub const CodeGen = struct { _ = llvm.LLVMBuildCondBr(self.builder, condition_value.value, inner_block, outer_block); self.while_loop_exit = outer_block; - defer self.while_loop_exit = null; + self.while_block = while_block; + defer { + self.while_block = null; + self.while_loop_exit = null; + } _ = llvm.LLVMPositionBuilderAtEnd(self.builder, inner_block); for (while_statement.statements) |stmt| { diff --git a/src/parser.zig b/src/parser.zig index ce1e4cb..6e07491 100644 --- a/src/parser.zig +++ b/src/parser.zig @@ -94,6 +94,7 @@ pub const Node = union(enum) { expression: ?*Node, }, BREAK_STATEMENT: void, + CONTINUE_STATEMENT: void, }; pub const EqualityExpressionType = enum { @@ -148,24 +149,33 @@ pub const Parser = struct { } }); } - // Statement ::= (AssignmentStatement | ImportDeclaration | ExternDeclaration | FunctionCallStatement | IfStatement | WhileStatement | ReturnStatement | "break") SEMICOLON + // Statement ::= (AssignmentStatement | ImportDeclaration | ExternDeclaration | FunctionCallStatement | IfStatement | WhileStatement | ReturnStatement | "break" | "continue") SEMICOLON fn parse_statement(self: *Parser) ParserError!*Node { errdefer if (!self.try_context) std.debug.print("Error parsing statement {any}\n", .{self.peek_token()}); - var statement = self.accept_parse(parse_function_call_statement) orelse + const statement = self.accept_parse(parse_function_call_statement) orelse self.accept_parse(parse_if_statement) orelse self.accept_parse(parse_while_statement) orelse self.accept_parse(parse_return_statement) orelse self.accept_parse(parse_assignment_statement) orelse self.accept_parse(parse_import_declaration) orelse - self.accept_parse(parse_extern_declaration); - - if (statement == null) { - _ = try self.parse_token(tokenizer.TokenType.BREAK); - statement = try self.create_node(.{ - .BREAK_STATEMENT = void{}, - }); - } + self.accept_parse(parse_extern_declaration) orelse + self.accept_parse(struct { + fn parse_break_statement(iself: *Parser) ParserError!*Node { + _ = try iself.parse_token(tokenizer.TokenType.BREAK); + return try iself.create_node(.{ + .BREAK_STATEMENT = void{}, + }); + } + }.parse_break_statement) orelse + self.accept_parse(struct { + fn parse_continue_statement(iself: *Parser) ParserError!*Node { + _ = try iself.parse_token(tokenizer.TokenType.CONTINUE); + return try iself.create_node(.{ + .CONTINUE_STATEMENT = void{}, + }); + } + }.parse_continue_statement); _ = try self.parse_token(tokenizer.TokenType.SEMICOLON); diff --git a/src/tokenizer.zig b/src/tokenizer.zig index 4ae6316..32bdb19 100644 --- a/src/tokenizer.zig +++ b/src/tokenizer.zig @@ -13,6 +13,7 @@ pub const TokenType = union(enum) { WHILE: void, RETURN: void, BREAK: void, + CONTINUE: void, ARROW: void, // Identifiers @@ -93,6 +94,7 @@ pub const Tokenizer = struct { if (self.accept_string("while")) return self.create_token(.{ .WHILE = void{} }); if (self.accept_string("return")) return self.create_token(.{ .RETURN = void{} }); if (self.accept_string("break")) return self.create_token(.{ .BREAK = void{} }); + if (self.accept_string("continue")) return self.create_token(.{ .CONTINUE = void{} }); if (self.accept_string("true")) return self.create_token(.{ .BOOLEAN = true }); if (self.accept_string("false")) return self.create_token(.{ .BOOLEAN = false }); if (self.accept_string("null")) return self.create_token(.{ .NULL = void{} }); |