while (1) {
if (parser->hit_unexpected_token) return root;
- if (parser->curr->type == '^') {
- AstPointerType* new = make_node(AstPointerType, Ast_Kind_Pointer_Type);
- new->flags |= Basic_Flag_Pointer;
- new->token = expect_token(parser, '^');
- *next_insertion = (AstType *) new;
- next_insertion = &new->elem;
- }
-
- else if (parser->curr->type == '[') {
- AstType *new;
- OnyxToken *open_bracket = expect_token(parser, '[');
+ switch (parser->curr->type) {
+ case '^': {
+ AstPointerType* new = make_node(AstPointerType, Ast_Kind_Pointer_Type);
+ new->flags |= Basic_Flag_Pointer;
+ new->token = expect_token(parser, '^');
+
+ *next_insertion = (AstType *) new;
+ next_insertion = &new->elem;
+ break;
+ }
- if (parser->curr->type == ']') {
- new = make_node(AstSliceType, Ast_Kind_Slice_Type);
- new->token = open_bracket;
+ case '[': {
+ AstType *new;
+ OnyxToken *open_bracket = expect_token(parser, '[');
- } else if (parser->curr->type == Token_Type_Dot_Dot) {
- new = make_node(AstDynArrType, Ast_Kind_DynArr_Type);
- new->token = open_bracket;
- consume_token(parser);
+ if (parser->curr->type == ']') {
+ new = make_node(AstSliceType, Ast_Kind_Slice_Type);
+ new->token = open_bracket;
- } else {
- new = make_node(AstArrayType, Ast_Kind_Array_Type);
- new->token = open_bracket;
+ } else if (parser->curr->type == Token_Type_Dot_Dot) {
+ new = make_node(AstDynArrType, Ast_Kind_DynArr_Type);
+ new->token = open_bracket;
+ consume_token(parser);
- if (parser->curr->type == '$') {
- AstType** insertion = (AstType **) &((AstArrayType *) new)->count_expr;
- parse_polymorphic_variable(parser, &insertion);
} else {
- ((AstArrayType *) new)->count_expr = parse_expression(parser);
+ new = make_node(AstArrayType, Ast_Kind_Array_Type);
+ new->token = open_bracket;
+
+ if (parser->curr->type == '$') {
+ AstType** insertion = (AstType **) &((AstArrayType *) new)->count_expr;
+ parse_polymorphic_variable(parser, &insertion);
+ } else {
+ ((AstArrayType *) new)->count_expr = parse_expression(parser);
+ }
}
- }
- expect_token(parser, ']');
- *next_insertion = (AstType *) new;
- next_insertion = &((AstSliceType *) new)->elem;
- }
+ expect_token(parser, ']');
+ *next_insertion = (AstType *) new;
+ next_insertion = &((AstSliceType *) new)->elem;
+ break;
+ }
- else if (parser->curr->type == Token_Type_Keyword_Proc) {
- OnyxToken* proc_token = expect_token(parser, Token_Type_Keyword_Proc);
+ case Token_Type_Keyword_Proc: {
+ OnyxToken* proc_token = expect_token(parser, Token_Type_Keyword_Proc);
- bh_arr(AstType *) params = NULL;
- bh_arr_new(global_scratch_allocator, params, 4);
- bh_arr_set_length(params, 0);
+ bh_arr(AstType *) params = NULL;
+ bh_arr_new(global_scratch_allocator, params, 4);
+ bh_arr_set_length(params, 0);
- expect_token(parser, '(');
- while (parser->curr->type != ')') {
- if (parser->hit_unexpected_token) return root;
+ expect_token(parser, '(');
+ while (parser->curr->type != ')') {
+ if (parser->hit_unexpected_token) return root;
- AstType* param_type = parse_type(parser);
- bh_arr_push(params, param_type);
+ if ((parser->curr + 1)->type == ':') {
+ expect_token(parser, Token_Type_Symbol);
+ expect_token(parser, ':');
+ }
- if (parser->curr->type != ')')
- expect_token(parser, ',');
- }
- consume_token(parser);
+ AstType* param_type = parse_type(parser);
+ bh_arr_push(params, param_type);
- AstType* return_type = (AstType *) &basic_type_void;
- if (parser->curr->type == Token_Type_Right_Arrow) {
+ if (parser->curr->type != ')')
+ expect_token(parser, ',');
+ }
consume_token(parser);
- return_type = parse_type(parser);
- }
- i64 param_count = bh_arr_length(params);
- AstFunctionType* new = onyx_ast_node_new(parser->allocator,
- sizeof(AstFunctionType) + sizeof(AstType*) * param_count,
- Ast_Kind_Function_Type);
- new->token = proc_token;
- new->param_count = param_count;
- new->return_type = return_type;
+ AstType* return_type = (AstType *) &basic_type_void;
+ if (parser->curr->type == Token_Type_Right_Arrow) {
+ consume_token(parser);
+ return_type = parse_type(parser);
+ }
- if (param_count > 0)
- fori (i, 0, param_count) new->params[i] = params[i];
+ i64 param_count = bh_arr_length(params);
+ AstFunctionType* new = onyx_ast_node_new(parser->allocator,
+ sizeof(AstFunctionType) + sizeof(AstType*) * param_count,
+ Ast_Kind_Function_Type);
+ new->token = proc_token;
+ new->param_count = param_count;
+ new->return_type = return_type;
- *next_insertion = (AstType *) new;
- next_insertion = NULL;
- }
+ if (param_count > 0)
+ fori (i, 0, param_count) new->params[i] = params[i];
- else if (parser->curr->type == '$') {
- parse_polymorphic_variable(parser, &next_insertion);
- }
+ *next_insertion = (AstType *) new;
+ next_insertion = NULL;
+ break;
+ }
- else if (parser->curr->type == Token_Type_Symbol) {
- AstNode* symbol_node = make_node(AstNode, Ast_Kind_Symbol);
- symbol_node->token = expect_token(parser, Token_Type_Symbol);
+ case '$': {
+ parse_polymorphic_variable(parser, &next_insertion);
+ break;
+ }
- *next_insertion = (AstType *) symbol_node;
+ case Token_Type_Symbol: {
+ AstNode* symbol_node = make_node(AstNode, Ast_Kind_Symbol);
+ symbol_node->token = expect_token(parser, Token_Type_Symbol);
- while (parser->curr->type == '.') {
- consume_token(parser);
- AstFieldAccess* field = make_node(AstFieldAccess, Ast_Kind_Field_Access);
- field->token = expect_token(parser, Token_Type_Symbol);
- field->expr = (AstTyped *) *next_insertion;
+ *next_insertion = (AstType *) symbol_node;
- *next_insertion = (AstType *) field;
- }
+ while (parser->curr->type == '.') {
+ consume_token(parser);
+ AstFieldAccess* field = make_node(AstFieldAccess, Ast_Kind_Field_Access);
+ field->token = expect_token(parser, Token_Type_Symbol);
+ field->expr = (AstTyped *) *next_insertion;
- if (parser->curr->type == '(') {
- OnyxToken* paren_token = expect_token(parser, '(');
+ *next_insertion = (AstType *) field;
+ }
- bh_arr(AstNode *) params = NULL;
- bh_arr_new(global_heap_allocator, params, 2);
+ if (parser->curr->type == '(') {
+ OnyxToken* paren_token = expect_token(parser, '(');
- while (parser->curr->type != ')') {
- if (parser->hit_unexpected_token) break;
+ bh_arr(AstNode *) params = NULL;
+ bh_arr_new(global_heap_allocator, params, 2);
- AstNode* t = (AstNode *) parse_type(parser);
- bh_arr_push(params, t);
+ while (parser->curr->type != ')') {
+ if (parser->hit_unexpected_token) break;
- if (parser->curr->type != ')')
- expect_token(parser, ',');
- }
- expect_token(parser, ')');
+ AstNode* t = (AstNode *) parse_type(parser);
+ bh_arr_push(params, t);
- AstPolyCallType* pc_type = make_node(AstPolyCallType, Ast_Kind_Poly_Call_Type);
- pc_type->token = paren_token;
- pc_type->callee = *next_insertion;
- pc_type->params = params;
+ if (parser->curr->type != ')')
+ expect_token(parser, ',');
+ }
+ expect_token(parser, ')');
- *next_insertion = (AstType *) pc_type;
- }
+ AstPolyCallType* pc_type = make_node(AstPolyCallType, Ast_Kind_Poly_Call_Type);
+ pc_type->token = paren_token;
+ pc_type->callee = *next_insertion;
+ pc_type->params = params;
- next_insertion = NULL;
- }
+ *next_insertion = (AstType *) pc_type;
+ }
- else if (parser->curr->type == Token_Type_Keyword_Struct) {
- AstStructType* s_node = parse_struct(parser);
- *next_insertion = (AstType *) s_node;
- next_insertion = NULL;
- }
+ next_insertion = NULL;
+ break;
+ }
- else if (parse_possible_directive(parser, "value")) {
- // :ValueDirectiveHack
- *next_insertion = (AstType *) parse_expression(parser);
- next_insertion = NULL;
- break;
- }
+ case Token_Type_Keyword_Struct: {
+ AstStructType* s_node = parse_struct(parser);
+ *next_insertion = (AstType *) s_node;
+ next_insertion = NULL;
+ break;
+ }
- else if (parser->curr->type == '<') {
- // :TypeValueInterchange
- expect_token(parser, '<');
- *next_insertion = (AstType *) parse_expression(parser);
- next_insertion = NULL;
- expect_token(parser, '>');
+ case '#': {
+ // :ValueDirectiveHack
+ if (parse_possible_directive(parser, "value")) {
+ // It is very weird to put these here.
+ case Token_Type_Literal_Integer:
+ case Token_Type_Literal_String:
+ case Token_Type_Literal_Float:
+ case Token_Type_Literal_True:
+ case Token_Type_Literal_False:
+ *next_insertion = (AstType *) parse_expression(parser);
+ next_insertion = NULL;
+ break;
+ }
- break;
- }
+ }
- else {
- onyx_report_error(parser->curr->pos, "unexpected token '%b'.", parser->curr->text, parser->curr->length);
- consume_token(parser);
- break;
+ default:
+ onyx_report_error(parser->curr->pos, "unexpected token '%b'.", parser->curr->text, parser->curr->length);
+ consume_token(parser);
+ break;
}
if (next_insertion == NULL) break;