summaryrefslogtreecommitdiff
path: root/src/ast.zig
blob: 2f8f20916775a0a2d9515533a61f4f4378c6f9ac (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
const std = @import("std");
const tok = @import("tokenize.zig");

const SyntaxError = error{SyntaxError};
const expectedToken = error{ExpectedToken};

pub const BinOp = enum {
    Add,
    Sub,
    Mul,
    Div,
};

pub const Literal = union(enum) {
    Int: i32,
};

pub const Ast = union(enum) {
    Expr: struct {
        kind: ExprKind,
    },
    Stmt: struct {
        kind: StmtKind,
    },
};

const StmtKind = union(enum) {
    exit: Ast.Expr,
};

const ExprKind = union(enum) {
    Literal: Literal,
    BinaryOp: struct {
        op: BinOp,
        left: Ast.Expr,
        right: Ast.Expr,
    },
};

fn checkType(token: tok.Token, typ: tok.TokenType) bool {
    return switch (token) {
        typ => true,
        else => false,
    };
}

const AstParser = struct {
    tokens: tok.Iterator(tok.Token),
    fn parseStmt(self: *AstParser) !Ast.Stmt {
        return switch (self.tokens.peek().?) {
            .ret => try self.exitStmt(),
        };
    }

    fn parseExpr(self: *AstParser) !Ast.Expr {

    }

    fn exitStmt(self: *AstParser) !Ast.Stmt {
        if (!checkType(self.tokens.consume().?, tok.TokenType.ret)) return expectedToken;
        const value = self.parseExpr();

        if (!checkType(self.tokens.consume().?, tok.TokenType.semiCol)) return expectedToken;
        const kind = StmtKind{ .exit = value };
        return Ast.Stmt{ .kind = kind };
    }
};