static void compile_opts_free(OnyxCompileOptions* opts) {
bh_arr_free(opts->files);
+ bh_arr_free(opts->included_folders);
}
static char* lookup_included_file(CompilerState* cs, OnyxToken* filename) {
static char path[256];
- fori (i, 0, 511) path[i] = 0;
+ fori (i, 0, 255) path[i] = 0;
static char fn[128];
+ fori (i, 0, 127) fn[i] = 0;
token_toggle_end(filename);
if (!bh_str_ends_with(filename->text, ".onyx")) {
bh_snprintf(fn, 128, "%s.onyx", filename->text);
}
static void merge_parse_results(CompilerState* compiler_state, ParseResults* results) {
- bh_arr_each(AstIncludeFile *, include, results->files) {
- char* filename = lookup_included_file(compiler_state, (*include)->filename);
- char* formatted_name = bh_strdup(global_heap_allocator, filename);
-
- bh_arr_push(compiler_state->queued_files, formatted_name);
+ bh_arr_each(AstInclude *, include, results->includes) {
+ if ((*include)->kind == Ast_Kind_Include_File) {
+ char* filename = lookup_included_file(compiler_state, (*include)->name);
+ char* formatted_name = bh_strdup(global_heap_allocator, filename);
+
+ bh_arr_push(compiler_state->queued_files, formatted_name);
+ } else if ((*include)->kind == Ast_Kind_Include_Folder) {
+ const char* folder = bh_aprintf(global_heap_allocator, "%b", (*include)->name->text, (*include)->name->length);
+ bh_arr_push(compiler_state->options->included_folders, folder);
+ }
}
Entity ent;
is_private = 1;
}
- switch (parser->curr->type) {
+ switch ((u16) parser->curr->type) {
case Token_Type_Keyword_Use: {
OnyxToken* use_token = expect_token(parser, Token_Type_Keyword_Use);
- if (parser->curr->type == Token_Type_Keyword_Package) {
- consume_token(parser);
-
- AstUsePackage* upack = make_node(AstUsePackage, Ast_Kind_Use_Package);
- upack->token = use_token;
+ expect_token(parser, Token_Type_Keyword_Package);
- AstNode* pack_symbol = make_node(AstNode, Ast_Kind_Symbol);
- pack_symbol->token = expect_token(parser, Token_Type_Symbol);
- upack->package = (AstPackage *) pack_symbol;
+ AstUsePackage* upack = make_node(AstUsePackage, Ast_Kind_Use_Package);
+ upack->token = use_token;
- if (parser->curr->type == Token_Type_Keyword_As) {
- consume_token(parser);
- upack->alias = expect_token(parser, Token_Type_Symbol);
- }
+ AstNode* pack_symbol = make_node(AstNode, Ast_Kind_Symbol);
+ pack_symbol->token = expect_token(parser, Token_Type_Symbol);
+ upack->package = (AstPackage *) pack_symbol;
- if (parser->curr->type == '{') {
- consume_token(parser);
-
- bh_arr_new(global_heap_allocator, upack->only, 4);
+ if (parser->curr->type == Token_Type_Keyword_As) {
+ consume_token(parser);
+ upack->alias = expect_token(parser, Token_Type_Symbol);
+ }
- while (parser->curr->type != '}') {
- if (parser->hit_unexpected_token) return NULL;
+ if (parser->curr->type == '{') {
+ consume_token(parser);
- AstAlias* alias = make_node(AstAlias, Ast_Kind_Alias);
- alias->token = expect_token(parser, Token_Type_Symbol);
+ bh_arr_new(global_heap_allocator, upack->only, 4);
- if (parser->curr->type == Token_Type_Keyword_As) {
- consume_token(parser);
- alias->alias = expect_token(parser, Token_Type_Symbol);
- } else {
- alias->alias = alias->token;
- }
+ while (parser->curr->type != '}') {
+ if (parser->hit_unexpected_token) return NULL;
- bh_arr_push(upack->only, alias);
+ AstAlias* alias = make_node(AstAlias, Ast_Kind_Alias);
+ alias->token = expect_token(parser, Token_Type_Symbol);
- if (parser->curr->type != '}')
- expect_token(parser, ',');
+ if (parser->curr->type == Token_Type_Keyword_As) {
+ consume_token(parser);
+ alias->alias = expect_token(parser, Token_Type_Symbol);
+ } else {
+ alias->alias = alias->token;
}
- consume_token(parser);
- }
+ bh_arr_push(upack->only, alias);
- add_node_to_process(parser, (AstNode *) upack);
- return NULL;
-
- } else {
- AstIncludeFile* include = make_node(AstIncludeFile, Ast_Kind_Include_File);
- include->token = use_token;
- include->filename = expect_token(parser, Token_Type_Literal_String);
+ if (parser->curr->type != '}')
+ expect_token(parser, ',');
+ }
- return (AstNode *) include;
+ consume_token(parser);
}
+
+ add_node_to_process(parser, (AstNode *) upack);
+ return NULL;
}
case Token_Type_Keyword_Proc:
}
}
+ case '#': {
+ while (parser->curr->type == '#') {
+ OnyxToken* dir_token = parser->curr;
+
+ if (parse_possible_directive(parser, "include_file")) {
+ AstInclude* include = make_node(AstInclude, Ast_Kind_Include_File);
+ include->token = dir_token;
+ include->name = expect_token(parser, Token_Type_Literal_String);
+
+ return (AstNode *) include;
+ }
+ else if (parse_possible_directive(parser, "include_folder")) {
+ AstInclude* include = make_node(AstInclude, Ast_Kind_Include_Folder);
+ include->token = dir_token;
+ include->name = expect_token(parser, Token_Type_Literal_String);
+
+ return (AstNode *) include;
+ }
+ else {
+ onyx_message_add(Msg_Type_Unknown_Directive,
+ dir_token->pos, dir_token->text, dir_token->length);
+ return NULL;
+ }
+ }
+ }
+
default: break;
}
parser.results = (ParseResults) {
.allocator = global_heap_allocator,
- .files = NULL,
+ .includes = NULL,
.nodes_to_process = NULL,
};
- bh_arr_new(parser.results.allocator, parser.results.files, 4);
+ bh_arr_new(parser.results.allocator, parser.results.includes, 4);
bh_arr_new(parser.results.allocator, parser.results.nodes_to_process, 4);
return parser;
while (curr_stmt != NULL) {
switch (curr_stmt->kind) {
- case Ast_Kind_Include_File: bh_arr_push(parser->results.files, (AstIncludeFile *) curr_stmt); break;
+ case Ast_Kind_Include_File:
+ case Ast_Kind_Include_Folder:
+ bh_arr_push(parser->results.includes, (AstInclude *) curr_stmt);
+ break;
case Ast_Kind_Binding: {
if (((AstBinding *) curr_stmt)->node->flags & Ast_Flag_Private_Package) {
symbol_introduce(parser->package->private_scope,