Working more on the parser
authorBrendan Hansen <brendan.f.hansen@gmail.com>
Tue, 19 May 2020 22:59:20 +0000 (17:59 -0500)
committerBrendan Hansen <brendan.f.hansen@gmail.com>
Tue, 19 May 2020 22:59:20 +0000 (17:59 -0500)
onyx
onyx.c
onyxlex.c
onyxlex.h
onyxmsgs.c
onyxparser.c
onyxparser.h
progs/minimal.onyx

diff --git a/onyx b/onyx
index 1cada87b18070428c1c4e5a7f8e4f39090250da9..bd859e2258a9ab525bdcccf169e25f0891856924 100755 (executable)
Binary files a/onyx and b/onyx differ
diff --git a/onyx.c b/onyx.c
index 8406fc07ec0206b25ad9e4faca790c114e19a07f..3840fa00fa648085a09dd87c9b73454774e8b40c 100644 (file)
--- a/onyx.c
+++ b/onyx.c
@@ -1,5 +1,5 @@
 #define BH_NO_STRING
-#define BH_DEBUG
+// #define BH_DEBUG
 #define BH_DEFINE
 #include "bh.h"
 
@@ -23,16 +23,16 @@ int main(int argc, char *argv[]) {
        bh_file_close(&source_file);
 
        OnyxTokenizer tokenizer = onyx_tokenizer_create(alloc, &fc);
-       onyx_parse_tokens(&tokenizer);
+       onyx_lex_tokens(&tokenizer);
        bh_arr(OnyxToken) token_arr = tokenizer.tokens;
 
-       printf("There are %d tokens (Allocated space for %d tokens)\n", bh_arr_length(token_arr), bh_arr_capacity(token_arr));
+       // printf("There are %d tokens (Allocated space for %d tokens)\n", bh_arr_length(token_arr), bh_arr_capacity(token_arr));
 
-       for (OnyxToken* it = token_arr; !bh_arr_end(token_arr, it); it++) {
-               onyx_token_null_toggle(*it);
-               printf("%s '%s' (%s:%ld:%ld)\n", onyx_get_token_type_name(*it), it->token, it->pos.filename, it->pos.line, it->pos.column);
-               onyx_token_null_toggle(*it);
-       }
+       // for (OnyxToken* it = token_arr; !bh_arr_end(token_arr, it); it++) {
+       //      onyx_token_null_toggle(*it);
+       //      printf("%s '%s' (%s:%ld:%ld)\n", onyx_get_token_type_name(*it), it->token, it->pos.filename, it->pos.line, it->pos.column);
+       //      onyx_token_null_toggle(*it);
+       // }
 
        bh_arena msg_arena;
        bh_arena_init(&msg_arena, alloc, 4096);
@@ -52,6 +52,7 @@ int main(int argc, char *argv[]) {
 
        bh_file_contents_delete(&fc);
        onyx_tokenizer_free(&tokenizer);
+       onyx_parser_free(&parser);
        bh_arena_free(&msg_arena);
        bh_arena_free(&ast_arena);
 
index 2f790a1b88696caf9bda79f99eb2c8251251c155..ca877d6c70547382fae56cafbcd60bcd6b7cc887 100644 (file)
--- a/onyxlex.c
+++ b/onyxlex.c
@@ -7,45 +7,45 @@ static const char* onyx_token_type_names[] = {
 
        "TOKEN_TYPE_COMMENT",
 
-       "TOKEN_TYPE_KEYWORD_STRUCT",
-       "TOKEN_TYPE_KEYWORD_USE",
-       "TOKEN_TYPE_KEYWORD_EXPORT",
-       "TOKEN_TYPE_KEYWORD_IF",
-       "TOKEN_TYPE_KEYWORD_ELSE",
-       "TOKEN_TYPE_KEYWORD_FOR",
-       "TOKEN_TYPE_KEYWORD_DO",
-       "TOKEN_TYPE_KEYWORD_RETURN",
-       "TOKEN_TYPE_KEYWORD_FOREIGN",
-       "TOKEN_TYPE_KEYWORD_PROC",
-       "TOKEN_TYPE_KEYWORD_GLOBAL",
-
-       "TOKEN_TYPE_RIGHT_ARROW",
-       "TOKEN_TYPE_LEFT_ARROW",
-       "TOKEN_TYPE_OPEN_PAREN",
-       "TOKEN_TYPE_CLOSE_PAREN",
-       "TOKEN_TYPE_OPEN_BRACE",
-       "TOKEN_TYPE_CLOSE_BRACE",
-       "TOKEN_TYPE_OPEN_BRACKET",
-       "TOKEN_TYPE_CLOSE_BRACKET",
-       "TOKEN_TYPE_OPEN_ANGLE",
-       "TOKEN_TYPE_CLOSE_ANGLE",
-
-       "TOKEN_TYPE_SYM_PLUS",
-       "TOKEN_TYPE_SYM_MINUS",
-       "TOKEN_TYPE_SYM_STAR",
-       "TOKEN_TYPE_SYM_PERCENT",
-       "TOKEN_TYPE_SYM_DOT",
-       "TOKEN_TYPE_SYM_FSLASH",
-       "TOKEN_TYPE_SYM_BSLASH",
-       "TOKEN_TYPE_SYM_COLON",
-       "TOKEN_TYPE_SYM_SEMICOLON",
-       "TOKEN_TYPE_SYM_COMMA",
-       "TOKEN_TYPE_SYM_EQUALS",
-       "TOKEN_TYPE_SYM_GRAVE",
-       "TOKEN_TYPE_SYM_TILDE",
-       "TOKEN_TYPE_SYM_BANG",
-       "TOKEN_TYPE_SYM_CARET",
-       "TOKEN_TYPE_SYM_AMPERSAND",
+       "struct",               //"TOKEN_TYPE_KEYWORD_STRUCT",
+       "use",                  //"TOKEN_TYPE_KEYWORD_USE",
+       "export",               //"TOKEN_TYPE_KEYWORD_EXPORT",
+       "if",                   //"TOKEN_TYPE_KEYWORD_IF",
+       "else",                 //"TOKEN_TYPE_KEYWORD_ELSE",
+       "for",                  //"TOKEN_TYPE_KEYWORD_FOR",
+       "do",                   //"TOKEN_TYPE_KEYWORD_DO",
+       "return",               //"TOKEN_TYPE_KEYWORD_RETURN",
+       "foreign",              //"TOKEN_TYPE_KEYWORD_FOREIGN",
+       "proc",                 //"TOKEN_TYPE_KEYWORD_PROC",
+       "global",               //"TOKEN_TYPE_KEYWORD_GLOBAL",
+
+       "->", //"TOKEN_TYPE_RIGHT_ARROW",
+       "<-", //"TOKEN_TYPE_LEFT_ARROW",
+       "(",  //"TOKEN_TYPE_OPEN_PAREN",
+       ")",  //"TOKEN_TYPE_CLOSE_PAREN",
+       "{",  //"TOKEN_TYPE_OPEN_BRACE",
+       "}",  //"TOKEN_TYPE_CLOSE_BRACE",
+       "[",  //"TOKEN_TYPE_OPEN_BRACKET",
+       "]",  //"TOKEN_TYPE_CLOSE_BRACKET",
+       "<",  //"TOKEN_TYPE_OPEN_ANGLE",
+       ">",  //"TOKEN_TYPE_CLOSE_ANGLE",
+
+       "+",  // "TOKEN_TYPE_SYM_PLUS",
+       "-",  // "TOKEN_TYPE_SYM_MINUS",
+       "*",  // "TOKEN_TYPE_SYM_STAR",
+       "%",  // "TOKEN_TYPE_SYM_PERCENT",
+       ".",  // "TOKEN_TYPE_SYM_DOT",
+       "/",  // "TOKEN_TYPE_SYM_FSLASH",
+       "\\", // "TOKEN_TYPE_SYM_BSLASH",
+       ":",  // "TOKEN_TYPE_SYM_COLON",
+       ";",  // "TOKEN_TYPE_SYM_SEMICOLON",
+       ",",  // "TOKEN_TYPE_SYM_COMMA",
+       "=",  // "TOKEN_TYPE_SYM_EQUALS",
+       "`",  // "TOKEN_TYPE_SYM_GRAVE",
+       "~",  // "TOKEN_TYPE_SYM_TILDE",
+       "!",  // "TOKEN_TYPE_SYM_BANG",
+       "^",  // "TOKEN_TYPE_SYM_CARET",
+       "&",  // "TOKEN_TYPE_SYM_AMPERSAND",
 
        "TOKEN_TYPE_SYMBOL",
        "TOKEN_TYPE_LITERAL_STRING",
@@ -85,8 +85,8 @@ static b32 token_lit(OnyxTokenizer* tokenizer, OnyxToken* tk, char* lit, OnyxTok
        return 0;
 }
 
-const char* onyx_get_token_type_name(OnyxToken tkn) {
-       return onyx_token_type_names[tkn.type];
+const char* onyx_get_token_type_name(OnyxTokenType tkn_type) {
+       return onyx_token_type_names[tkn_type];
 }
 
 void onyx_token_null_toggle(OnyxToken tkn) {
@@ -267,7 +267,7 @@ void onyx_tokenizer_free(OnyxTokenizer* tokenizer) {
        bh_arr_free(tokenizer->tokens);
 }
 
-void onyx_parse_tokens(OnyxTokenizer* tokenizer) {
+void onyx_lex_tokens(OnyxTokenizer* tokenizer) {
        OnyxToken* tk;
        do {
                tk = onyx_get_token(tokenizer);
index 9a3b3f1231cc6448fa2fc2ba0784006d5dfef0f6..1fbce3a91569b2e88d3ce0c7891551b6755e9f14 100644 (file)
--- a/onyxlex.h
+++ b/onyxlex.h
@@ -79,11 +79,11 @@ typedef struct OnyxTokenizer {
        bh_arr(OnyxToken) tokens;
 } OnyxTokenizer;
 
-const char* onyx_get_token_type_name(OnyxToken tkn);
+const char* onyx_get_token_type_name(OnyxTokenType tkn_type);
 void onyx_token_null_toggle(OnyxToken tkn);
 OnyxToken* onyx_get_token(OnyxTokenizer* tokenizer);
 OnyxTokenizer onyx_tokenizer_create(bh_allocator allocator, bh_file_contents *fc);
 void onyx_tokenizer_free(OnyxTokenizer* tokenizer);
-void onyx_parse_tokens(OnyxTokenizer* tokenizer);
+void onyx_lex_tokens(OnyxTokenizer* tokenizer);
 
 #endif
index 9838fa020231d983407f3ea432febc7523aeae1d..81f7b1dffd816d5c81da6158b8c832f2e2d529f0 100644 (file)
@@ -2,7 +2,7 @@
 #include "onyxmsgs.h"
 
 static const char* msg_formats[] = {
-       "expected token '%s'",
+       "expected token '%s', got '%s'",
        "unexpected token '%s'",
        "unknown type '%s'"
 };
index 002dd6ce2aa0afef0bcd8307e998e4479e6b9203..f194266b75b4d79ce6add6e1fc0ef76cbf587b6c 100644 (file)
@@ -5,24 +5,24 @@
 struct OnyxTypeInfo builtin_types[] = {
        { ONYX_TYPE_INFO_KIND_UNKNOWN, 0, "unknown" },
        { ONYX_TYPE_INFO_KIND_VOID, 0, "void" },
-
+    
        { ONYX_TYPE_INFO_KIND_BOOL, 1, "bool", 0, 0, 0, 1 },
-
+    
        { ONYX_TYPE_INFO_KIND_UINT8, 1, "u8", 1, 1, 0, 0 },
        { ONYX_TYPE_INFO_KIND_UINT16, 2, "u16", 1, 1, 0, 0 },
        { ONYX_TYPE_INFO_KIND_UINT32, 4, "u32", 1, 1, 0, 0 },
        { ONYX_TYPE_INFO_KIND_UINT64, 8, "u64", 1, 1, 0, 0 },
-
+    
        { ONYX_TYPE_INFO_KIND_INT8, 1, "i8", 1, 0, 0, 0 },
        { ONYX_TYPE_INFO_KIND_INT16, 2, "i16", 1, 0, 0, 0 },
        { ONYX_TYPE_INFO_KIND_INT32, 4, "i32", 1, 0, 0, 0 },
        { ONYX_TYPE_INFO_KIND_INT64, 8, "i64", 1, 0, 0, 0 },
-
+    
        { ONYX_TYPE_INFO_KIND_FLOAT32, 4, "f32", 0, 0, 1, 0 },
        { ONYX_TYPE_INFO_KIND_FLOAT64, 8, "f64", 0, 0, 1, 0 },
        { ONYX_TYPE_INFO_KIND_SOFT_FLOAT, 8, "sf64", 0, 0, 1, 0 },
-
-       { 0xffffffff }
+    
+       { 0xffffffff } // Sentinel
 };
 
 static OnyxAstNode error_node = { { ONYX_AST_NODE_KIND_ERROR, 0, NULL, &builtin_types[0], NULL, NULL, NULL } };
@@ -30,16 +30,17 @@ static OnyxAstNode error_node = { { ONYX_AST_NODE_KIND_ERROR, 0, NULL, &builtin_
 static void parser_next_token(OnyxParser* parser) {
        parser->prev_token = parser->curr_token;
        parser->curr_token++;
+       while (parser->curr_token->type == TOKEN_TYPE_COMMENT) parser->curr_token++;
 }
 
 static b32 is_terminating_token(OnyxTokenType token_type) {
        switch (token_type) {
-       case TOKEN_TYPE_SYM_SEMICOLON:
-       case TOKEN_TYPE_CLOSE_BRACE:
-       case TOKEN_TYPE_OPEN_BRACE:
-       case TOKEN_TYPE_END_STREAM:
+        case TOKEN_TYPE_SYM_SEMICOLON:
+        case TOKEN_TYPE_CLOSE_BRACE:
+        case TOKEN_TYPE_OPEN_BRACE:
+        case TOKEN_TYPE_END_STREAM:
                return 1;
-       default:
+        default:
                return 0;
        }
 }
@@ -47,12 +48,16 @@ static b32 is_terminating_token(OnyxTokenType token_type) {
 // Advances to next token no matter what
 static OnyxToken* expect(OnyxParser* parser, OnyxTokenType token_type) {
        OnyxToken* token = parser->curr_token;
+       parser_next_token(parser);
+    
        if (token->type != token_type) {
-               onyx_message_add(parser->msgs, ONYX_MESSAGE_TYPE_EXPECTED_TOKEN, token->pos, onyx_get_token_type_name(*token));
+               onyx_message_add(parser->msgs,
+                         ONYX_MESSAGE_TYPE_EXPECTED_TOKEN,
+                         token->pos,
+                         onyx_get_token_type_name(token_type), onyx_get_token_type_name(token->type));
                return NULL;
        }
-
-       parser_next_token(parser);
+    
        return token;
 }
 
@@ -64,175 +69,199 @@ static OnyxAstNode* parse_if_stmt(OnyxParser* parser) {
        return &error_node;
 }
 
-static OnyxAstNode* parse_block(OnyxParser* parser) {
-       assert(parser->curr_token->type == TOKEN_TYPE_OPEN_BRACE);
-
-       return &error_node;
-}
-
 static OnyxAstNode* parse_expression_statement(OnyxParser* parser) {
-
+    
 }
 
 static OnyxAstNode* parse_return_statement(OnyxParser* parser) {
-       // Only should get here with a return as the current token
-       assert(parser->curr_token->type == TOKEN_TYPE_KEYWORD_RETURN);
-
+       expect(parser, TOKEN_TYPE_KEYWORD_RETURN);
+    
        OnyxAstNode* expr = NULL;
-
-       OnyxToken* return_token = parser->curr_token;
+    
        parser_next_token(parser);
        if (parser->curr_token->type != TOKEN_TYPE_SYM_SEMICOLON) {
                expr = parse_expression(parser);
-
+        
                if (expr == &error_node) {
                        return &error_node;
                }
        }
 }
 
-static OnyxAstNode* parse_statement(OnyxParser* parser, b32 is_top_level) {
+static OnyxAstNodeBlock* parse_block(OnyxParser* parser);
+
+static OnyxAstNode* parse_statement(OnyxParser* parser) {
        switch (parser->curr_token->type) {
-       case TOKEN_TYPE_KEYWORD_RETURN:
+        case TOKEN_TYPE_KEYWORD_RETURN:
                return parse_return_statement(parser);
-
-       case TOKEN_TYPE_OPEN_BRACE:
+        
+        case TOKEN_TYPE_OPEN_BRACE:
                return (OnyxAstNode *) parse_block(parser);
-
-       case TOKEN_TYPE_SYMBOL:
-       case TOKEN_TYPE_OPEN_PAREN:
-       case TOKEN_TYPE_SYM_PLUS:
-       case TOKEN_TYPE_SYM_MINUS:
-       case TOKEN_TYPE_SYM_BANG:
-       case TOKEN_TYPE_LITERAL_NUMERIC:
-       case TOKEN_TYPE_LITERAL_STRING:
+        
+        case TOKEN_TYPE_SYMBOL:
+        case TOKEN_TYPE_OPEN_PAREN:
+        case TOKEN_TYPE_SYM_PLUS:
+        case TOKEN_TYPE_SYM_MINUS:
+        case TOKEN_TYPE_SYM_BANG:
+        case TOKEN_TYPE_LITERAL_NUMERIC:
+        case TOKEN_TYPE_LITERAL_STRING:
                return parse_expression_statement(parser);
-
-       case TOKEN_TYPE_KEYWORD_IF:
+        
+        case TOKEN_TYPE_KEYWORD_IF:
                return parse_if_stmt(parser);
-
-       case TOKEN_TYPE_SYM_SEMICOLON:
+        
+        case TOKEN_TYPE_SYM_SEMICOLON:
                return NULL;
-
-       default:
+        
+        default:
                printf("ERROR\n");
                parser_next_token(parser);
                return NULL;
        }
 }
 
+static OnyxAstNodeBlock* parse_block(OnyxParser* parser) {
+       // --- is for an empty block
+       if (parser->curr_token->type == TOKEN_TYPE_SYM_MINUS) {
+               expect(parser, TOKEN_TYPE_SYM_MINUS);
+               expect(parser, TOKEN_TYPE_SYM_MINUS);
+               expect(parser, TOKEN_TYPE_SYM_MINUS);
+               return NULL;
+       }
+    
+       expect(parser, TOKEN_TYPE_OPEN_BRACE);
+    
+       OnyxAstNodeBlock* block = (OnyxAstNodeBlock *) onyx_ast_node_new(parser->allocator, ONYX_AST_NODE_KIND_BLOCK);
+    
+       OnyxAstNode** next = &block->body;
+       OnyxAstNode* stmt = NULL;
+       while (parser->curr_token->type != TOKEN_TYPE_CLOSE_BRACE) {
+               stmt = parse_statement(parser);
+        
+               if (stmt != NULL && stmt->kind != ONYX_AST_NODE_KIND_ERROR) {
+                       *next = stmt;
+                       next = &stmt->next;
+               }
+        
+               expect(parser, TOKEN_TYPE_SYM_SEMICOLON);
+       }
+    
+       return block;
+}
+
 static OnyxTypeInfo* parse_type(OnyxParser* parser) {
        OnyxTypeInfo* type_info = &builtin_types[ONYX_TYPE_INFO_KIND_UNKNOWN];
-
+    
        OnyxToken* symbol = expect(parser, TOKEN_TYPE_SYMBOL);
        if (symbol == NULL) return type_info;
-
+    
        onyx_token_null_toggle(*symbol);
-
+    
        if (!bh_hash_has(OnyxAstNode*, parser->identifiers, symbol->token)) {
                onyx_message_add(parser->msgs, ONYX_MESSAGE_TYPE_UNKNOWN_TYPE, symbol->pos, symbol->token);
        } else {
                OnyxAstNode* type_info_node = bh_hash_get(OnyxAstNode*, parser->identifiers, symbol->token);
-
+        
                if (type_info_node->kind == ONYX_AST_NODE_KIND_TYPE) {
                        type_info = type_info_node->type;
                }
        }
-
+    
        onyx_token_null_toggle(*symbol);
        return type_info;
 }
 
 static OnyxAstNodeParam* parse_function_params(OnyxParser* parser) {
        expect(parser, TOKEN_TYPE_OPEN_PAREN);
-
+    
        if (parser->curr_token->type == TOKEN_TYPE_CLOSE_PAREN) {
                parser_next_token(parser);
                return NULL;
        }
-
+    
        OnyxAstNodeParam* first_param = NULL;
-
+    
        OnyxAstNodeParam* curr_param = NULL;
-       OnyxAstNodeParam** walker = NULL;
-
+       OnyxAstNodeParam* trailer = NULL;
+    
        OnyxToken* symbol;
        while (parser->curr_token->type != TOKEN_TYPE_CLOSE_PAREN) {
                if (parser->curr_token->type == TOKEN_TYPE_SYM_COMMA) parser_next_token(parser);
-
+        
                symbol = expect(parser, TOKEN_TYPE_SYMBOL);
+        
                curr_param = (OnyxAstNodeParam *) onyx_ast_node_new(parser->allocator, ONYX_AST_NODE_KIND_PARAM);
                curr_param->token = symbol;
                curr_param->type = parse_type(parser);
-
+        
+               if (first_param == NULL) first_param = curr_param;
+        
                curr_param->next = NULL;
-               if (first_param == NULL) {
-                       first_param = curr_param;
-               } else {
-                       (*walker)->next = curr_param;
-               }
-               walker = &curr_param;
+               if (trailer) trailer->next = curr_param;
+        
+               trailer = curr_param;
        }
-
+    
        parser_next_token(parser); // Skip the )
        return first_param;
 }
 
 static OnyxAstNodeFuncDef* parse_function_definition(OnyxParser* parser) {
        expect(parser, TOKEN_TYPE_KEYWORD_PROC);
-
+    
        OnyxAstNodeFuncDef* func_def = (OnyxAstNodeFuncDef *) onyx_ast_node_new(parser->allocator, ONYX_AST_NODE_KIND_FUNCDEF);
        func_def->param_count = 0;
-
+    
        OnyxAstNodeParam* params = parse_function_params(parser);
        func_def->params = params;
-
+    
        for (OnyxAstNode* walker = (OnyxAstNode *) params; walker != NULL; walker = walker->next)
                func_def->param_count++;
-
+    
        expect(parser, TOKEN_TYPE_RIGHT_ARROW);
-
+    
        OnyxTypeInfo* return_type = parse_type(parser);
        func_def->return_type = return_type;
-
-       func_def->body = NULL;
+    
+       func_def->body = parse_block(parser);
        return func_def;
 }
 
 
 static OnyxAstNode* parse_top_level_statement(OnyxParser* parser) {
        switch (parser->curr_token->type) {
-       case TOKEN_TYPE_KEYWORD_USE:
+        case TOKEN_TYPE_KEYWORD_USE:
                assert(0);
                break;
-
-       case TOKEN_TYPE_KEYWORD_EXPORT:
+        
+        case TOKEN_TYPE_KEYWORD_EXPORT:
                assert(0);
                break;  
-
-       case TOKEN_TYPE_SYMBOL: {
-               OnyxToken* symbol = parser->curr_token;
-               parser_next_token(parser);
-
-               expect(parser, TOKEN_TYPE_SYM_COLON);
-               expect(parser, TOKEN_TYPE_SYM_COLON);
-
-               if (parser->curr_token->type == TOKEN_TYPE_KEYWORD_PROC) {
-                       OnyxAstNodeFuncDef* func_def = parse_function_definition(parser);
-                       func_def->token = symbol;
-                       return (OnyxAstNode *) func_def;
-
-               } else if (parser->curr_token->type == TOKEN_TYPE_KEYWORD_STRUCT) {
-                       // Handle struct case
-                       assert(0);
-               } else {
-                       onyx_message_add(parser->msgs,
-                               ONYX_MESSAGE_TYPE_UNEXPECTED_TOKEN,
-                               parser->curr_token->pos,
-                               onyx_get_token_type_name(*parser->curr_token));
-               }
-       } break;
+        
+        case TOKEN_TYPE_SYMBOL: {
+            OnyxToken* symbol = parser->curr_token;
+            parser_next_token(parser);
+            
+            expect(parser, TOKEN_TYPE_SYM_COLON);
+            expect(parser, TOKEN_TYPE_SYM_COLON);
+            
+            if (parser->curr_token->type == TOKEN_TYPE_KEYWORD_PROC) {
+                OnyxAstNodeFuncDef* func_def = parse_function_definition(parser);
+                func_def->token = symbol;
+                return (OnyxAstNode *) func_def;
+                
+            } else if (parser->curr_token->type == TOKEN_TYPE_KEYWORD_STRUCT) {
+                // Handle struct case
+                assert(0);
+            } else {
+                onyx_message_add(parser->msgs,
+                                 ONYX_MESSAGE_TYPE_UNEXPECTED_TOKEN,
+                                 parser->curr_token->pos,
+                                 onyx_get_token_type_name(parser->curr_token->type));
+            }
+        } break;
        }
+    
        parser_next_token(parser);
        return NULL;
 }
@@ -246,15 +275,15 @@ static OnyxAstNode* parse_top_level_statement(OnyxParser* parser) {
 OnyxAstNode* onyx_ast_node_new(bh_allocator alloc, OnyxAstNodeKind kind) {\
        OnyxAstNode* node = (OnyxAstNode *) bh_alloc(alloc, sizeof(OnyxAstNode));
        node->kind = kind;
-
+    
        return node;
 }
 
 OnyxParser onyx_parser_create(bh_allocator alloc, OnyxTokenizer *tokenizer, OnyxMessages* msgs) {
        OnyxParser parser;
-
+    
        bh_hash_init(bh_heap_allocator(), parser.identifiers);
-
+    
        OnyxTypeInfo* it = &builtin_types[0];
        while (it->kind != 0xffffffff) {
                OnyxAstNode* tmp = onyx_ast_node_new(alloc, ONYX_AST_NODE_KIND_TYPE);
@@ -262,30 +291,34 @@ OnyxParser onyx_parser_create(bh_allocator alloc, OnyxTokenizer *tokenizer, Onyx
                bh_hash_put(OnyxAstNode*, parser.identifiers, (char *)it->name, tmp);
                it++;
        }
-
+    
        parser.allocator = alloc;
        parser.tokenizer = tokenizer;
        parser.curr_token = tokenizer->tokens;
        parser.prev_token = NULL;
        parser.msgs = msgs;
-
+    
        return parser;
 }
 
+void onyx_parser_free(OnyxParser* parser) {
+       bh_hash_free(parser->identifiers);
+}
+
 OnyxAstNode* onyx_parse(OnyxParser *parser) {
        OnyxAstNode* program = onyx_ast_node_new(parser->allocator, ONYX_AST_NODE_KIND_PROGRAM);
-
+    
        OnyxAstNode** prev_stmt = &program->next;
        OnyxAstNode* curr_stmt = NULL;
        while (parser->curr_token->type != TOKEN_TYPE_END_STREAM) {
                curr_stmt = parse_top_level_statement(parser);
-
+        
                // Building a linked list of statements down the "next" chain
                if (curr_stmt != NULL && curr_stmt != &error_node) {
                        *prev_stmt = curr_stmt;
                        prev_stmt = &curr_stmt->next;
                }
        }
-
+    
        return program;
 }
index 5b2731fd9201401dbaf4e89eabfd44a436decf21..54c2553c291dfe85c5653ce87240add29bae440c 100644 (file)
@@ -104,8 +104,8 @@ struct OnyxAstNodeBlock {
        OnyxToken *token;
        OnyxTypeInfo *return_type;
        OnyxAstNode *next;
+       OnyxAstNode *body;
        OnyxAstNode *unused1;
-       OnyxAstNode *unused2;
 };
 
 struct OnyxAstNodeParam {
@@ -142,10 +142,13 @@ union OnyxAstNode {
        };
 
        OnyxAstNodeBlock as_block;
+       OnyxAstNodeFuncDef as_funcdef;
+       OnyxAstNodeParam as_param;
 };
 
 OnyxAstNode* onyx_ast_node_new(bh_allocator alloc, OnyxAstNodeKind kind);
 OnyxParser onyx_parser_create(bh_allocator alloc, OnyxTokenizer *tokenizer, OnyxMessages* msgs);
+void onyx_parser_free(OnyxParser* parser);
 OnyxAstNode* onyx_parse(OnyxParser *parser);
 
 #endif // #ifndef ONYXPARSER_H
\ No newline at end of file
index 0d4bee4b33677b668a55ef2e761bcf4811def043..9985f5a79f917bb357e26132c65eed7d5e7bdaf2 100644 (file)
@@ -1,3 +1,8 @@
-add :: proc(a i32, b i32) -> i32 {
+/* This is a comment */
+
+log :: proc (a i32, b i64) -> i32 ---;
+
+add :: proc (a i32, b i32) -> i32 {
+       /* More comments */
        return a + b;
-};
+};
\ No newline at end of file