summary refs log tree commit diff
diff options
context:
space:
mode:
authorBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-18 01:25:19 +0100
committerBaitinq <manuelpalenzuelamerino@gmail.com>2025-01-18 01:25:19 +0100
commit74ff39914bdc5c415be52160c63a4aba3dbfa125 (patch)
treebec2abb6d7063e6aa5dd5946022a231955be1a8a
parentExamples: new example (diff)
downloadinterpreter-74ff39914bdc5c415be52160c63a4aba3dbfa125.tar.gz
interpreter-74ff39914bdc5c415be52160c63a4aba3dbfa125.tar.bz2
interpreter-74ff39914bdc5c415be52160c63a4aba3dbfa125.zip
Lang: Start introducing support for function arguments
-rw-r--r--examples/5.src1
-rw-r--r--grammar.ebnf8
-rw-r--r--src/evaluator.zig1
-rw-r--r--src/parser.zig73
-rw-r--r--src/tokenizer.zig3
5 files changed, 73 insertions, 13 deletions
diff --git a/examples/5.src b/examples/5.src
index ad96168..573ee6a 100644
--- a/examples/5.src
+++ b/examples/5.src
@@ -3,7 +3,6 @@ let print_input = (input) => {
 	return input;
 };
 
-
 let main = () => {
 	return print_input(7);
 };
diff --git a/grammar.ebnf b/grammar.ebnf
index 65cd74e..406c2d0 100644
--- a/grammar.ebnf
+++ b/grammar.ebnf
@@ -6,7 +6,9 @@ AssignmentStatement ::= "let" IDENTIFIER EQUALS Expression
 
 PrintStatement ::= PRINT LPAREN Expression RPAREN -- TODO: this won't be needed once functions support arguments
 
-FunctionCallStatement ::= IDENTIFIER LPAREN RPAREN
+FunctionCallStatement ::= IDENTIFIER LPAREN FunctionArguments? RPAREN
+
+FunctionArguments ::= Expression ("," Expression)*
 
 Expression   ::= AdditiveExpression | FunctionDefinition
 
@@ -14,6 +16,8 @@ AdditiveExpression ::= PrimaryExpression ("+" AdditiveExpression)
 
 PrimaryExpression ::= NUMBER | IDENTIFIER | FunctionCallStatement
 
-FunctionDefinition ::= ARROW LBRACE Statement* ReturnStatement RBRACE
+FunctionDefinition ::= LPAREN FunctionParamters? RPAREN ARROW LBRACE Statement* ReturnStatement RBRACE
+
+FunctionParameters ::= IDENTIFIER ("," IDENTIFIER)*
 
 ReturnStatement ::= RETURN Expression SEMICOLON --TODO: I dont like this
diff --git a/src/evaluator.zig b/src/evaluator.zig
index 2dd6b91..4164f93 100644
--- a/src/evaluator.zig
+++ b/src/evaluator.zig
@@ -9,6 +9,7 @@ const EvaluatorError = error{
 pub const Evaluator = struct {
     ast: ?*parser.Node,
     variables: std.StringHashMap(?i64),
+    //TODO: CREATE STACK WITH SCOPES AND WE CAN SEARCH UP SCOPES
 
     allocator: std.mem.Allocator,
 
diff --git a/src/parser.zig b/src/parser.zig
index f7f90ba..e206403 100644
--- a/src/parser.zig
+++ b/src/parser.zig
@@ -36,6 +36,7 @@ pub const Node = union(NodeType) {
     },
     FUNCTION_CALL_STATEMENT: struct {
         name: []const u8,
+        arguments: []*Node,
     },
     EXPRESSION: union(enum) {
         ADDITIVE_EXPRESSION: struct {
@@ -62,6 +63,7 @@ pub const Node = union(NodeType) {
     },
     FUNCTION_DEFINITION: struct {
         statements: []*Node,
+        parameters: []*Node,
     },
     RETURN_STATEMENT: struct {
         expression: *Node,
@@ -126,7 +128,7 @@ pub const Parser = struct {
         errdefer if (!self.try_context) std.debug.print("Error parsing assignment statement\n", .{});
 
         var is_declaration: bool = false;
-        if (self.match_token(.LET)) {
+        if (self.match_token(.LET) != null) {
             is_declaration = true;
         }
 
@@ -163,16 +165,39 @@ pub const Parser = struct {
         });
     }
 
-    // FunctionCallStatement ::= IDENTIFIER LPAREN RPAREN
+    // FunctionCallStatement ::= IDENTIFIER LPAREN FunctionArguments? RPAREN
     fn parse_function_call_statement(self: *Parser) ParserError!*Node {
         errdefer if (!self.try_context) std.debug.print("Error parsing function call statement\n", .{});
 
         const identifier = try self.accept_token(tokenizer.TokenType.IDENTIFIER);
 
         _ = try self.accept_token(tokenizer.TokenType.LPAREN);
+
+        const arguments = try self.parse_function_arguments();
+
         _ = try self.accept_token(tokenizer.TokenType.RPAREN);
 
-        return self.create_node(.{ .FUNCTION_CALL_STATEMENT = .{ .name = try self.allocator.dupe(u8, identifier.IDENTIFIER) } });
+        return self.create_node(.{ .FUNCTION_CALL_STATEMENT = .{
+            .name = try self.allocator.dupe(u8, identifier.IDENTIFIER),
+            .arguments = arguments,
+        } });
+    }
+
+    // FunctionArguments ::= Expression ("," Expression)*
+    fn parse_function_arguments(self: *Parser) ParserError![]*Node {
+        errdefer if (!self.try_context) std.debug.print("Error parsing function arguments\n", .{});
+        var nodeList = std.ArrayList(*Node).init(self.allocator);
+
+        var first = true;
+        while (self.accept_parse(parse_expression)) |a| {
+            if (!first) {
+                _ = try self.accept_token(tokenizer.TokenType.COMMA);
+            }
+            first = false;
+            try nodeList.append(a);
+        }
+
+        return nodeList.items;
     }
 
     // Expression   ::= AdditiveExpression | FunctionDefinition
@@ -190,7 +215,7 @@ pub const Parser = struct {
 
         const lhs = try self.parse_primary_expression();
 
-        if (self.match_token(tokenizer.TokenType.PLUS)) {
+        if (self.match_token(tokenizer.TokenType.PLUS) != null) {
             const rhs = try self.parse_additive_expression();
             return self.create_node(.{ .ADDITIVE_EXPRESSION = .{
                 .lhs = lhs,
@@ -228,12 +253,16 @@ pub const Parser = struct {
         };
     }
 
-    // FunctionDefinition ::= ARROW LBRACE Statement* ReturnStatement RBRACE
+    // FunctionDefinition ::= LPAREN FunctionParamters? RPAREN ARROW LBRACE Statement* ReturnStatement RBRACE
     fn parse_function_definition(self: *Parser) ParserError!*Node {
         errdefer if (!self.try_context) std.debug.print("Error parsing function definition\n", .{});
 
         _ = try self.accept_token(tokenizer.TokenType.LPAREN);
+
+        const parameters = try self.parse_function_parameters();
+
         _ = try self.accept_token(tokenizer.TokenType.RPAREN);
+
         _ = try self.accept_token(tokenizer.TokenType.ARROW);
         _ = try self.accept_token(tokenizer.TokenType.LBRACE);
 
@@ -248,9 +277,34 @@ pub const Parser = struct {
 
         return self.create_node(.{ .FUNCTION_DEFINITION = .{
             .statements = nodes.items,
+            .parameters = parameters,
         } });
     }
 
+    // FunctionParameters ::= IDENTIFIER ("," IDENTIFIER)*
+    fn parse_function_parameters(self: *Parser) ParserError![]*Node {
+        errdefer if (!self.try_context) std.debug.print("Error parsing function parameters\n", .{});
+        var nodeList = std.ArrayList(*Node).init(self.allocator);
+
+        var first = true;
+        while (self.match_token(tokenizer.TokenType.IDENTIFIER)) |a| {
+            if (!first) {
+                _ = try self.accept_token(tokenizer.TokenType.COMMA);
+            }
+            first = false;
+
+            try nodeList.append(try self.create_node(.{
+                .PRIMARY_EXPRESSION = .{
+                    .IDENTIFIER = .{
+                        .name = try self.allocator.dupe(u8, a.IDENTIFIER),
+                    },
+                },
+            }));
+        }
+
+        return nodeList.items;
+    }
+
     // ReturnStatement :== RETURN Expression
     fn parse_return_statement(self: *Parser) ParserError!*Node {
         errdefer if (!self.try_context) std.debug.print("Error parsing return statement\n", .{});
@@ -290,13 +344,12 @@ pub const Parser = struct {
         return self.consume_token() orelse unreachable;
     }
 
-    fn match_token(self: *Parser, token: tokenizer.TokenType) bool {
-        const curr_token = self.peek_token() orelse return false;
+    fn match_token(self: *Parser, token: tokenizer.TokenType) ?tokenizer.Token {
+        const curr_token = self.peek_token() orelse return null;
         if (curr_token == token) {
-            _ = self.consume_token();
-            return true;
+            return self.consume_token();
         }
-        return false;
+        return null;
     }
 
     fn consume_token(self: *Parser) ?tokenizer.Token {
diff --git a/src/tokenizer.zig b/src/tokenizer.zig
index 4eb807a..b5634a2 100644
--- a/src/tokenizer.zig
+++ b/src/tokenizer.zig
@@ -23,6 +23,7 @@ pub const TokenType = enum {
 
     // Punctuation
     SEMICOLON,
+    COMMA,
     LPAREN,
     RPAREN,
     LBRACE,
@@ -39,6 +40,7 @@ pub const Token = union(TokenType) {
     EQUALS: void,
     PLUS: void,
     SEMICOLON: void,
+    COMMA: void,
     LPAREN: void,
     RPAREN: void,
     LBRACE: void,
@@ -64,6 +66,7 @@ pub const Tokenizer = struct {
 
         if (self.accept_substr("=>")) return Token{ .ARROW = void{} };
         if (c == ';') return Token{ .SEMICOLON = void{} };
+        if (c == ',') return Token{ .COMMA = void{} };
         if (c == '(') return Token{ .LPAREN = void{} };
         if (c == ')') return Token{ .RPAREN = void{} };
         if (c == '{') return Token{ .LBRACE = void{} };