AstNode *body;
Scope *scope;
- bh_arr(AstTyped *) allocate_exprs;
-
Scope *binding_scope;
};
struct AstDefer { AstNode_base; AstNode *stmt; };
AstNode *stmt;
} DeferredStmt;
+typedef struct AllocatedSpace {
+ u64 depth;
+ AstTyped *expr;
+} AllocatedSpace;
+
typedef struct StrLitInfo {
u32 addr;
u32 len;
// NOTE: Mapping ptrs to elements
bh_imap elem_map;
- bh_arr(DeferredStmt) deferred_stmts;
+ bh_arr(DeferredStmt) deferred_stmts;
+ bh_arr(AllocatedSpace) local_allocations;
// NOTE: Used internally as a map from strings that represent function types,
// 0x7f 0x7f : 0x7f ( (i32, i32) -> i32 )
stmt->flags |= Ast_Flag_Expr_Ignored;
return check_binaryop((AstBinaryOp **) pstmt, 1);
+ // NOTE: Local variable declarations used to be removed after the symbol
+ // resolution phase because long long ago, all locals needed to be known
+ // in a block in order to efficiently allocate enough space and registers
+ // for them all. Now with LocalAllocator, this is no longer necessary.
+ // Therefore, locals stay in the tree and need to be passed along.
+ case Ast_Kind_Local: return Check_Success;
+
default:
stmt->flags |= Ast_Flag_Expr_Ignored;
return check_expression((AstTyped **) pstmt);
CheckStatus check_block(AstBlock* block) {
CHECK(statement_chain, &block->body);
- bh_arr_each(AstTyped *, value, block->allocate_exprs) {
- fill_in_type(*value);
-
- if ((*value)->kind == Ast_Kind_Local) {
- if ((*value)->type == NULL) {
- onyx_report_error((*value)->token->pos,
- "Unable to resolve type for local '%b'.",
- (*value)->token->text, (*value)->token->length);
- return Check_Error;
- }
-
- if ((*value)->type->kind == Type_Kind_Compound) {
- onyx_report_error((*value)->token->pos,
- "Compound type not allowed as local variable type. Try splitting this into multiple variables.");
- return Check_Error;
- }
- }
- }
+ // bh_arr_each(AstTyped *, value, block->allocate_exprs) {
+ // fill_in_type(*value);
+
+ // if ((*value)->kind == Ast_Kind_Local) {
+ // if ((*value)->type == NULL) {
+ // onyx_report_error((*value)->token->pos,
+ // "Unable to resolve type for local '%b'.",
+ // (*value)->token->text, (*value)->token->length);
+ // return Check_Error;
+ // }
+
+ // if ((*value)->type->kind == Type_Kind_Compound) {
+ // onyx_report_error((*value)->token->pos,
+ // "Compound type not allowed as local variable type. Try splitting this into multiple variables.");
+ // return Check_Error;
+ // }
+ // }
+ // }
return Check_Success;
}
case Ast_Kind_Block:
((AstBlock *) nn)->body = ast_clone_list(a, ((AstBlock *) node)->body);
- ((AstBlock *) nn)->allocate_exprs = NULL;
- bh_arr_new(global_heap_allocator, ((AstBlock *) nn)->allocate_exprs, 4);
break;
case Ast_Kind_Defer:
static AstBlock* parse_block(OnyxParser* parser) {
AstBlock* block = make_node(AstBlock, Ast_Kind_Block);
- bh_arr_new(global_heap_allocator, block->allocate_exprs, 4);
// NOTE: --- is for an empty block
if (parser->curr->type == Token_Type_Empty_Block) {
} SymresStatus;
static SymresStatus symres_type(AstType** type);
-static SymresStatus symres_local(AstLocal** local, b32 add_to_block_locals);
+static SymresStatus symres_local(AstLocal** local);
static SymresStatus symres_call(AstCall* call);
static SymresStatus symres_size_of(AstSizeOf* so);
static SymresStatus symres_align_of(AstAlignOf* so);
return Symres_Success;
}
-static SymresStatus symres_local(AstLocal** local, b32 add_to_block_locals) {
+static SymresStatus symres_local(AstLocal** local) {
SYMRES(type, &(*local)->type_node);
- // NOTE: This is a little gross, but it is allows for finer control
- // over when locals are in scope in a block, which reduces the number
- // of unique WASM locals and stack space needed.
- // - brendanfh 2020/12/16
- if (add_to_block_locals)
- bh_arr_push(bh_arr_last(block_stack)->allocate_exprs, (AstTyped *) *local);
-
bh_arr_push(curr_function->allocate_exprs, (AstTyped *) *local);
if ((*local)->token != NULL)
bh_arr_each(AstTyped *, expr, al->values)
SYMRES(expression, expr);
- if (bh_arr_length(block_stack) > 0) {
- bh_arr_push(bh_arr_last(block_stack)->allocate_exprs, (AstTyped *) al);
+ if (curr_function != NULL)
bh_arr_push(curr_function->allocate_exprs, (AstTyped *) al);
- }
return Symres_Success;
}
ifnode->scope = scope_create(context.ast_alloc, curr_scope, ifnode->token->pos);
scope_enter(ifnode->scope);
- SYMRES(local, &ifnode->local, 0);
+ SYMRES(local, &ifnode->local);
SYMRES(statement, (AstNode **) &ifnode->assignment, NULL);
}
whilenode->scope = scope_create(context.ast_alloc, curr_scope, whilenode->token->pos);
scope_enter(whilenode->scope);
- SYMRES(local, &whilenode->local, 0);
+ SYMRES(local, &whilenode->local);
SYMRES(statement, (AstNode **) &whilenode->assignment, NULL);
}
fornode->scope = scope_create(context.ast_alloc, curr_scope, fornode->token->pos);
scope_enter(fornode->scope);
SYMRES(expression, &fornode->iter);
- SYMRES(local, &fornode->var, 0);
+ SYMRES(local, &fornode->var);
SYMRES(block, fornode->stmt);
scope_leave();
case Ast_Kind_Jump: break;
case Ast_Kind_Local:
- if (remove) *remove = 1;
- SYMRES(local, (AstLocal **) stmt, 1);
+ // if (remove) *remove = 1;
+ SYMRES(local, (AstLocal **) stmt);
break;
case Ast_Kind_Use:
+//
+// There are several things I'm seeing in this file that I want to clean up.
+// They are:
+// [ ] remove the need to know if the stack is needed before generating the function.
+// Just leave 5 nops at the beginning because they will be automatically removed
+// by the WASM outputter.
+// [ ] remove the need to have "allocate_exprs" on blocks and in functions. This will
+// be easy once the above is done.
+// [ ] there should be a better way to emit pending deferred statements because there
+// is some code duplication between emit_return and emit_structured_jump.
+
+
+
+
#define BH_DEBUG
#include "onyxwasm.h"
#include "onyxutils.h"
EMIT_FUNC(function_body, AstFunction* fd);
EMIT_FUNC(block, AstBlock* block, b32 generate_block_headers);
EMIT_FUNC(statement, AstNode* stmt);
+EMIT_FUNC(local_allocation, AstTyped* stmt);
+EMIT_FUNC_NO_ARGS(free_local_allocations);
EMIT_FUNC(assignment, AstBinaryOp* assign);
EMIT_FUNC(assignment_of_array, AstTyped* left, AstTyped* right);
EMIT_FUNC(compound_assignment, AstBinaryOp* assign);
if (generate_block_headers)
emit_enter_structured_block(mod, &code, SBT_Breakable_Block);
- bh_arr_each(AstTyped *, expr, block->allocate_exprs)
- bh_imap_put(&mod->local_map, (u64) *expr, local_allocate(mod->local_alloc, *expr));
-
forll (AstNode, stmt, block->body, next) {
emit_statement(mod, &code, stmt);
}
emit_deferred_stmts(mod, &code, (AstNode *) block);
-
- bh_arr_each(AstTyped *, expr, block->allocate_exprs)
- local_free(mod->local_alloc, *expr);
+ emit_free_local_allocations(mod, &code);
if (generate_block_headers)
emit_leave_structured_block(mod, &code);
case Ast_Kind_Jump: emit_structured_jump(mod, &code, (AstJump *) stmt); break;
case Ast_Kind_Block: emit_block(mod, &code, (AstBlock *) stmt, 1); break;
case Ast_Kind_Defer: emit_defer(mod, &code, (AstDefer *) stmt); break;
+ case Ast_Kind_Local: emit_local_allocation(mod, &code, (AstTyped *) stmt); break;
default: emit_expression(mod, &code, (AstTyped *) stmt); break;
}
*pcode = code;
}
+EMIT_FUNC(local_allocation, AstTyped* stmt) {
+ bh_imap_put(&mod->local_map, (u64) stmt, local_allocate(mod->local_alloc, stmt));
+
+ bh_arr_push(mod->local_allocations, ((AllocatedSpace) {
+ .depth = bh_arr_length(mod->structured_jump_target),
+ .expr = stmt,
+ }));
+}
+
+EMIT_FUNC_NO_ARGS(free_local_allocations) {
+ if (bh_arr_length(mod->local_allocations) == 0) return;
+
+ u64 depth = bh_arr_length(mod->structured_jump_target);
+ while (bh_arr_length(mod->local_allocations) > 0 && bh_arr_last(mod->local_allocations).depth == depth) {
+ // CHECK: Not sure this next line is okay to be here...
+ bh_imap_delete(&mod->local_map, (u64) bh_arr_last(mod->local_allocations).expr);
+
+ local_free(mod->local_alloc, bh_arr_last(mod->local_allocations).expr);
+ bh_arr_pop(mod->local_allocations);
+ }
+}
+
EMIT_FUNC(assignment, AstBinaryOp* assign) {
bh_arr(WasmInstruction) code = *pcode;
}
case Ast_Kind_Array_Literal: {
+ emit_local_allocation(mod, &code, expr);
emit_array_literal(mod, &code, (AstArrayLiteral *) expr);
break;
}
if (type_get_param_pass(*param_type) == Param_Pass_By_Implicit_Pointer) {
*(t++) = (char) onyx_type_to_wasm_type(&basic_types[Basic_Kind_Rawptr]);
- } else {
- if (type_is_structlike_strict(*param_type)) {
- u32 mem_count = type_structlike_mem_count(*param_type);
- StructMember smem;
+ }
+ else if (type_is_structlike_strict(*param_type)) {
+ u32 mem_count = type_structlike_mem_count(*param_type);
+ StructMember smem;
- fori (i, 0, mem_count) {
- type_lookup_member_by_idx(*param_type, i, &smem);
- *(t++) = (char) onyx_type_to_wasm_type(smem.type);
- }
+ fori (i, 0, mem_count) {
+ type_lookup_member_by_idx(*param_type, i, &smem);
+ *(t++) = (char) onyx_type_to_wasm_type(smem.type);
+ }
- param_count += mem_count - 1;
+ param_count += mem_count - 1;
- } else {
- *(t++) = (char) onyx_type_to_wasm_type(*param_type);
- }
+ } else {
+ *(t++) = (char) onyx_type_to_wasm_type(*param_type);
}
param_type++;
bh_imap_init(&module.elem_map, global_heap_allocator, 16);
bh_arr_new(global_heap_allocator, module.deferred_stmts, 4);
+ bh_arr_new(global_heap_allocator, module.local_allocations, 4);
WasmExport mem_export = {
.kind = WASM_FOREIGN_MEMORY,