expect_no_stored_tags_pos(parser, parser->curr->pos);
}
+static b32 parse_possible_tag(OnyxParser *parser) {
+ b32 parsed = 0;
+ while (parse_possible_directive(parser, "tag") || consume_token_if_next(parser, '@')) {
+ parser->tag_depth += 1;
+ parsed = 1;
+
+ do {
+ AstTyped* expr = parse_expression(parser, 0);
+ bh_arr_push(parser->stored_tags, expr);
+ } while (consume_token_if_next(parser, ','));
+
+ parser->tag_depth -= 1;
+ }
+
+ return parsed;
+}
+
static void flush_stored_tags(OnyxParser *parser, bh_arr(AstTyped *) *out_arr) {
//
// When tag_depth > 0, no tags will be added to the element.
}
}
-static void parse_meta_tags(OnyxParser *parser, bh_arr(AstTyped *) *out_arr) {
- bh_arr(AstTyped *) meta_tags = NULL;
- while (parse_possible_directive(parser, "tag") || consume_token_if_next(parser, '@')) {
- if (meta_tags == NULL) bh_arr_new(global_heap_allocator, meta_tags, 1);
-
- parser->tag_depth += 1;
-
- do {
- AstTyped* expr = parse_expression(parser, 0);
- bh_arr_push(meta_tags, expr);
- } while (consume_token_if_next(parser, ','));
-
- parser->tag_depth -= 1;
- }
-
- *out_arr = meta_tags;
-}
-
static AstStructType* parse_struct(OnyxParser* parser) {
OnyxToken *s_token = expect_token(parser, Token_Type_Keyword_Struct);
while (!consume_token_if_next(parser, '}')) {
if (parser->hit_unexpected_token) return s_node;
+ parse_possible_tag(parser);
+
if (parse_possible_directive(parser, "persist")) {
b32 thread_local = parse_possible_directive(parser, "thread_local");
}
bh_arr(AstTyped *) meta_tags=NULL;
- parse_meta_tags(parser, &meta_tags);
+ flush_stored_tags(parser, &meta_tags);
if (parser->curr->type == '}') {
consume_token(parser);
while (!consume_token_if_next(parser, '}')) {
if (parser->hit_unexpected_token) return u_node;
+ parse_possible_tag(parser);
+
if (next_tokens_are(parser, 3, Token_Type_Symbol, ':', ':')) {
OnyxToken* binding_name = expect_token(parser, Token_Type_Symbol);
consume_token(parser);
}
bh_arr(AstTyped *) meta_tags=NULL;
- parse_meta_tags(parser, &meta_tags);
+ flush_stored_tags(parser, &meta_tags);
if (parser->curr->type == '}') {
consume_token(parser);
AstBinding* binding = NULL;
+ if (parse_possible_tag(parser)) return;
+
switch ((u16) parser->curr->type) {
case Token_Type_Keyword_Use: {
OnyxToken *use_token = expect_token(parser, Token_Type_Keyword_Use);
ENTITY_SUBMIT(library);
return;
}
- else if (parse_possible_directive(parser, "tag")) {
- parser->tag_depth += 1;
-
- AstTyped *expr = parse_expression(parser, 0);
- bh_arr_push(parser->stored_tags, expr);
-
- parser->tag_depth -= 1;
- return;
- }
else if (parse_possible_directive(parser, "doc")) {
// This is a future feature I want to add to the language, proper docstrings.
// For now (and so I start documenting thing...), #doc can be used anywhere
break;
}
- case '@': {
- consume_token(parser);
- parser->tag_depth += 1;
-
- AstTyped *expr = parse_expression(parser, 0);
- bh_arr_push(parser->stored_tags, expr);
-
- parser->tag_depth -= 1;
- return;
- }
-
default: break;
}