diff options
| author | Nic Gaffney <gaffney_nic@protonmail.com> | 2024-08-12 00:44:26 -0500 |
|---|---|---|
| committer | Nic Gaffney <gaffney_nic@protonmail.com> | 2024-08-12 00:44:26 -0500 |
| commit | cf80bb7f1b6fb4ee1d08d3d6850966b4951274b5 (patch) | |
| tree | b7521e70e2e921aef64f58ca2ce04de89f408c8c /src/tokenize.zig | |
| parent | 013086f9c55e33f7bfcfd775c8b9d03fb2311de4 (diff) | |
| download | calico-cf80bb7f1b6fb4ee1d08d3d6850966b4951274b5.tar.gz | |
Calling functions and Function args now works.
You can also import functions now.
Diffstat (limited to 'src/tokenize.zig')
| -rw-r--r-- | src/tokenize.zig | 50 |
1 files changed, 44 insertions, 6 deletions
diff --git a/src/tokenize.zig b/src/tokenize.zig index 770f483..aa3788c 100644 --- a/src/tokenize.zig +++ b/src/tokenize.zig @@ -4,17 +4,21 @@ pub const TokenizeError = error{ UnknownToken, UnexpectedEOF, ExpectedToken, + TokenIteratorOnly, }; pub const TokenType = enum { // Runtime Values ident, + stringLit, intLit, + charLit, // Keywords constant, variable, exit, fun, + import, // Operators plus, minus, @@ -27,18 +31,25 @@ pub const TokenType = enum { closeBrace, openParen, closeParen, + openBracket, + closeBracket, + colon, + comma, arrow, }; pub const Token = union(TokenType) { //RuntimeVar ident: []const u8, + stringLit: []const u8, intLit: i32, + charLit: u8, // Keywords constant, variable, exit, fun, + import, // Operators plus, minus, @@ -51,6 +62,10 @@ pub const Token = union(TokenType) { closeBrace, openParen, closeParen, + openBracket, + closeBracket, + colon, + comma, arrow, pub fn fromChar(char: u8) !Token { @@ -65,7 +80,14 @@ pub const Token = union(TokenType) { '}' => .closeBrace, '(' => .openParen, ')' => .closeParen, - else => TokenizeError.UnknownToken, + '[' => .openBracket, + ']' => .closeBracket, + ':' => .colon, + ',' => .comma, + else => { + // std.debug.print("{c}: ", .{char}); + return TokenizeError.UnknownToken; + }, }; } @@ -73,8 +95,9 @@ pub const Token = union(TokenType) { const eql = std.mem.eql; if (eql(u8, str, "return")) return .exit; if (eql(u8, str, "const")) return .constant; - if (eql(u8, str, "var")) return .variable; + if (eql(u8, str, "varbl")) return .variable; if (eql(u8, str, "fn")) return .fun; + if (eql(u8, str, "import")) return .import; return Token{ .ident = str }; } }; @@ -114,10 +137,12 @@ pub fn Iterator(comptime typ: type) type { return ret; } - pub fn consume(self: *Iterator(typ), comptime expected: TokenType) !?typ { - if (typ != Token) return error.TokenIteratorOnly; - if (!checkType(self.peek().?, expected)) + pub fn consume(self: *Iterator(typ), comptime expected: TokenType) error{ ExpectedToken, TokenIteratorOnly }!?typ { + if (typ != Token) return TokenizeError.TokenIteratorOnly; + if (!checkType(self.peek().?, expected)) { + // std.debug.print("Got {}, expected {}\n", .{ self.peek().?, expected }); return TokenizeError.ExpectedToken; + } return self.next(); } @@ -147,8 +172,10 @@ pub const Tokenizer = struct { /// Releases allocated memory pub fn deinit(self: *Tokenizer) void { for (self.toks.items) |token| { - if (checkType(token, TokenType.ident)) + if (checkType(token, .ident)) self.allocator.free(token.ident); + if (checkType(token, .stringLit)) + self.allocator.free(token.stringLit); } self.toks.deinit(); } @@ -187,6 +214,17 @@ pub const Tokenizer = struct { if (!checkType(token, TokenType.ident)) self.allocator.free(str); buff.clearAndFree(); }, + '"' => { + _ = self.src.next(); + while (self.src.peek().? != '"') + try buff.append(self.src.next().?); + + _ = self.src.next(); + // std.debug.print("{c}\n", .{self.src.peek().?}); + const token = Token{ .stringLit = try buff.toOwnedSlice() }; + try self.toks.append(token); + buff.clearAndFree(); + }, else => self.toks.append(try Token.fromChar(self.src.next().?)), }; } |
