about summary refs log tree commit diff
path: root/src/bootstrap/tokenizer.src
diff options
context:
space:
mode:
authorBaitinq <[email protected]>2025-05-13 09:24:37 +0200
committerBaitinq <[email protected]>2025-05-14 18:27:53 +0200
commitbdfd55720ce0703ba83b96fa45ba0c0ddb5b85ab (patch)
treeeb5bd662c3f42c37d18291b56ce20d9a98837bc2 /src/bootstrap/tokenizer.src
parentFeature: Add support for break statement (diff)
downloadpry-lang-bdfd55720ce0703ba83b96fa45ba0c0ddb5b85ab.tar.gz
pry-lang-bdfd55720ce0703ba83b96fa45ba0c0ddb5b85ab.tar.bz2
pry-lang-bdfd55720ce0703ba83b96fa45ba0c0ddb5b85ab.zip
bootstrap: tokenizer: more changes
Diffstat (limited to 'src/bootstrap/tokenizer.src')
-rw-r--r--src/bootstrap/tokenizer.src47
1 files changed, 46 insertions, 1 deletions
diff --git a/src/bootstrap/tokenizer.src b/src/bootstrap/tokenizer.src
index 1583a6d..5ac8948 100644
--- a/src/bootstrap/tokenizer.src
+++ b/src/bootstrap/tokenizer.src
@@ -7,6 +7,9 @@ let buf = 0;
 
 let offset = 0;
 
+let tokens = 0;
+let tokens_len = 0;
+
 let read_file = (filename: *i8) => *i8 {
 	file = fopen(filename, "r");
 
@@ -22,6 +25,40 @@ let read_file = (filename: *i8) => *i8 {
 	return buf;
 };
 
+let add_token = (tokens: *i8, token: *i8) => i64 {
+	let i = 0;
+	while true {
+		let c = (*(token + i));
+
+		(*(tokens + tokens_len)) = c;
+
+		tokens_len = tokens_len + 1;
+		i = i + 1;
+
+		if c == '\0' {
+			return 0;
+		};
+	};
+
+	return 0;
+};
+
+let print_tokens = (tokens: *i8) => i64 {
+	let i = 0;
+	while i < tokens_len {
+		let c = (*(tokens + i));
+		if c == '\0' {
+			c = '\n';
+		};
+
+		printf("%c", c);
+
+		i = i + 1;
+	};
+
+	return 0;
+};
+
 let tokenizer_next = () => *i8 {
 	if offset >= file_size {
 		return "EOF";
@@ -45,19 +82,27 @@ let tokenizer_init = (filename: *i8) => i64 {
 
 	println("%s", buf);
 
+	tokens = malloc(10000);
+
 	while true {
 		let t = tokenizer_next();
 		if strcmp(t, "EOF") {
-			return 0;
+			break;
 		};
 		println("%s", t);
+		add_token(tokens, t);
 		free(t);
 	};
 
+	println("PRINT TOKENS");
+
+	print_tokens(tokens);
+
 	return 0;
 };
 
 let tokenizer_deinit = () => i64 {
+	free(tokens);
 	free(buf);
 	fclose(file);