summaryrefslogtreecommitdiff
path: root/src/parser.zig
blob: 4252e3bbab5cf5ab62b5190e22a5644ca2aacb3c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
const std = @import("std");
const tok = @import("tokenizer.zig");
const Token = tok.Token;

const typ = union {
    Funct: struct {
        inputtyp: []*typ,
        bodytyp: *typ,
        rettyp: *typ,
    },
    Type: union {
        Int: u32,
        Float: f32,
        Bool: bool,
        String: []const u8,
    },
};

var typmap: std.AutoHashMap([]const u8, typ) = undefined;

var index: u32 = 0;
pub fn parse(allocator: std.mem.Allocator, toks: []Token) void {
    typmap = std.AutoHashMap([]const u8, typ).init(allocator);

    parser: switch (toks[0]) {
        .LPAREN => break :parser typedef(toks),
        .BUILTIN => |b| break :parser builtin(toks, b),
        .FUNC => |f| break :parser funcdef(toks, f),
    }
}

fn typedef(allocator: std.mem.Allocator, toks: []Token) !Token {
    index += 1;
    const name = toks[index];
    var inputTyp: typ = undefined;
    switch (name) {
        .FUNC => {
            var inputArr = std.ArrayList(typ){};
            while (toks[index] != .RPAREN) : (index += 1) {
                if (toks[index] != .LARROW) return error{UnexpectedToken};
                index += 1;
                inputArr.append(allocator, toks[index]);
                // fix this please


            }
        },
    }

    while (toks[index] != .PERIOD) : (index += 1)  {
        switch (toks[index]) {

        }
    }
    return toks[index];
}
fn builtin(toks: []Token, fun: []const u8) Token { _=fun; _=toks; }
fn funcdef(toks: []Token, fun: []const u8) Token { _=fun; _=toks; }