summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main.zig64
-rw-r--r--src/tokenize.zig66
2 files changed, 130 insertions, 0 deletions
diff --git a/src/main.zig b/src/main.zig
new file mode 100644
index 0000000..18239b1
--- /dev/null
+++ b/src/main.zig
@@ -0,0 +1,64 @@
+const std = @import("std");
+const tok = @import("tokenize.zig");
+
+const gftCompilerError = error{NoInputFile};
+
+pub fn main() !void {
+ if (std.os.argv.len != 2) return gftCompilerError.NoInputFile;
+ var gpa = std.heap.GeneralPurposeAllocator(.{}){};
+ defer _ = gpa.deinit();
+ var args = std.process.args();
+ _ = args.skip();
+ const inputFileName = args.next();
+ const inputFile = try std.fs.cwd().openFile(inputFileName.?, .{});
+ defer inputFile.close();
+
+ std.fs.cwd().makeDir("out") catch |err| {
+ if (err != error.PathAlreadyExists) return err;
+ };
+ const outfile = try std.fs.cwd().createFile("out/out.asm", .{});
+ const outWriter = outfile.writer();
+ defer outfile.close();
+
+ // Logic here to compile language
+ const all = try inputFile.readToEndAlloc(gpa.allocator(), 2048);
+ defer gpa.allocator().free(all);
+
+ const toks = try tok.tokenize(gpa.allocator(), all);
+ defer gpa.allocator().free(toks);
+ var tokIter = tok.TokenIterator{ .tokens = toks };
+ try outWriter.print("global _start:\n", .{});
+ while (tokIter.next()) |t| {
+ switch (t) {
+ .ret => {
+ const num = tokIter.next();
+ switch (num.?) {
+ .intLit => {},
+ else => break,
+ }
+ switch (tokIter.next().?) {
+ .semiCol => {},
+ else => break,
+ }
+ try outWriter.print(
+ \\ mov rax, 60
+ \\ mov rdi, {}
+ \\ syscall
+ \\
+ , .{num.?.intLit});
+ gpa.allocator().free(t.ret);
+ },
+ else => {},
+ }
+ }
+
+ const nasmargv = [_][]const u8{ "nasm", "-felf64", "out/out.asm" };
+ const nasmproc = try std.ChildProcess.run(.{ .argv = &nasmargv, .allocator = gpa.allocator() });
+ defer gpa.allocator().free(nasmproc.stdout);
+ defer gpa.allocator().free(nasmproc.stderr);
+
+ const ldargv = [_][]const u8{ "ld", "-o", "out/out", "out/out.o" };
+ const ldproc = try std.ChildProcess.run(.{ .argv = &ldargv, .allocator = gpa.allocator() });
+ defer gpa.allocator().free(ldproc.stdout);
+ defer gpa.allocator().free(ldproc.stderr);
+}
diff --git a/src/tokenize.zig b/src/tokenize.zig
new file mode 100644
index 0000000..b5d5d23
--- /dev/null
+++ b/src/tokenize.zig
@@ -0,0 +1,66 @@
+const std = @import("std");
+
+const TokenError = error{UnknownToken};
+
+const Token = union(enum) {
+ ret: []const u8,
+ intLit: i32,
+ semiCol: u8,
+ nil: void,
+};
+
+pub const TokenIterator = struct {
+ tokens: []const Token,
+ index: usize = 0,
+
+ pub fn next(self: *TokenIterator) ?Token {
+ defer self.*.index = self.*.index + 1;
+ if (self.*.index >= self.*.tokens.len) return null;
+ return self.*.tokens[self.*.index];
+ }
+};
+
+pub fn tokenize(allocator: std.mem.Allocator, buff: []const u8) ![]const Token {
+ var toks = std.ArrayList(Token).init(allocator);
+ defer toks.deinit();
+ var str = std.ArrayList(u8).init(allocator);
+ defer str.deinit();
+
+ var i: u32 = 0;
+ while (i < buff.len) {
+ switch (buff[i]) {
+ ' ', '\n', '\t' => {
+ i = i + 1;
+ continue;
+ },
+ '0'...'9' => {
+ while (std.ascii.isDigit(buff[i])) {
+ try str.append(buff[i]);
+ i = i + 1;
+ }
+ const num: i32 = try std.fmt.parseInt(i32, str.items, 10);
+ try toks.append(.{ .intLit = num });
+ str.deinit();
+ str = std.ArrayList(u8).init(allocator);
+ },
+ 'a'...'z', 'A'...'Z' => {
+ while (std.ascii.isAlphanumeric(buff[i])) {
+ try str.append(buff[i]);
+ i = i + 1;
+ }
+ try toks.append(.{ .ret = try str.toOwnedSlice() });
+ str.deinit();
+ str = std.ArrayList(u8).init(allocator);
+ },
+ ';' => {
+ i = i + 1;
+ try toks.append(.{ .semiCol = ';' });
+ },
+ '+', '-', '*', '/' => {
+ // Process operator
+ },
+ else => {},
+ }
+ }
+ return toks.toOwnedSlice();
+}