s_type->Struct.alignment = alignment;
s_type->Struct.size = size;
- s_type->Struct.linear_members = NULL;
- bh_arr_new(global_heap_allocator, s_type->Struct.linear_members, s_type->Struct.mem_count);
- build_linear_types_with_offset(s_type, &s_type->Struct.linear_members, 0);
-
s_type->Struct.status = SPS_Members_Done;
return s_type;
}
type->Struct.alignment = alignment;
type->Struct.size = size;
- type->Struct.linear_members = NULL;
- bh_arr_new(global_heap_allocator, type->Struct.linear_members, type->Struct.mem_count);
- build_linear_types_with_offset(type, &type->Struct.linear_members, 0);
type->Struct.status = SPS_Uses_Done;
lit->generated_inferred_type = type;
}
void build_linear_types_with_offset(Type* type, bh_arr(TypeWithOffset)* pdest, u32 offset) {
+ // nocheckin :StructAsm
+ /*
if (type_is_structlike_strict(type)) {
u32 mem_count = type_structlike_mem_count(type);
StructMember smem = { 0 };
build_linear_types_with_offset(smem.type, pdest, offset + smem.offset);
}
- } else if (type->kind == Type_Kind_Compound) {
+ } else */
+
+ if (type->kind == Type_Kind_Compound) {
u32 elem_offset = 0;
fori (i, 0, type->Compound.count) {
build_linear_types_with_offset(type->Compound.types[i], pdest, offset + elem_offset);
case Type_Kind_VarArgs: return 2;
case Type_Kind_DynArray: return 5;
case Type_Kind_Compound: return bh_arr_length(type->Compound.linear_members);
- case Type_Kind_Struct: return bh_arr_length(type->Struct.linear_members);
default: return 1;
}
}
return 1;
}
case Type_Kind_Compound: *two = type->Compound.linear_members[idx]; return 1;
- case Type_Kind_Struct: *two = type->Struct.linear_members[idx]; return 1;
case Type_Kind_Distinct:
two->type = type->Distinct.base_type;
return -1;
}
- case Type_Kind_Struct: {
- i32 idx = 0;
- bh_arr_each(TypeWithOffset, two, type->Struct.linear_members) {
- if (two->offset == offset) return idx;
- idx++;
- }
-
+ default:
+ if (offset == 0) return 0;
return -1;
- }
- default: return -1;
}
}
-b32 type_struct_is_simple(Type* type) {
+/*b32 type_struct_is_simple(Type* type) {
if (type->kind != Type_Kind_Struct) return 0;
b32 is_simple = 1;
bh_arr_each(StructMember *, mem, type->Struct.memarr) {
- if (type_is_compound((*mem)->type) || (*mem)->type->kind == Type_Kind_Array) {
+ if (type_linear_member_count((*mem)->type) != 1
+ (*mem)->type->kind == Type_Kind_Array) {
is_simple = 0;
break;
}
}
return is_simple;
-}
+}*/
b32 type_is_pointer(Type* type) {
if (type == NULL) return 0;
b32 type_is_compound(Type* type) {
if (type == NULL) return 0;
- if (type->kind == Type_Kind_Struct) {
- //
- // This is for the kind of common case where a structure simply wraps a
- // single non-compound value; in this situation, the structure can be
- // "dissolved" at compile-time and turn into the underlying type.
- //
-
- if (bh_arr_length(type->Struct.linear_members) != 1) return 1;
- return type_is_compound(type->Struct.linear_members[0].type);
- }
-
return type->kind != Type_Kind_Basic
&& type->kind != Type_Kind_Pointer
&& type->kind != Type_Kind_Enum
&& type->kind != Type_Kind_Function
&& type->kind != Type_Kind_Array
- && type->kind != Type_Kind_Distinct;
+ && type->kind != Type_Kind_Distinct
+ && type->kind != Type_Kind_Struct
+ && type->kind != Type_Kind_DynArray;
}
b32 type_is_simd(Type* type) {
u32 type_structlike_is_simple(Type* type) {
if (type == NULL) return 0;
switch (type->kind) {
- case Type_Kind_Struct: return type_struct_is_simple(type);
case Type_Kind_Slice: return 1;
case Type_Kind_VarArgs: return 1;
case Type_Kind_DynArray: return 0;
+ case Type_Kind_Struct: return 0; // :StructAsm type_struct_is_simple(type);
default: return 0;
}
}
#define WASM_TYPE_FUNC WASM_TYPE_INT32
#define WASM_TYPE_VOID 0x00
+static b32 onyx_type_is_stored_in_memory(Type *type) {
+ if (type->kind == Type_Kind_Struct || type->kind == Type_Kind_DynArray) {
+ return 1;
+ }
+
+ return 0;
+}
+
static WasmType onyx_type_to_wasm_type(Type* type) {
- if (type->kind == Type_Kind_Struct) {
- if (type_linear_member_count(type) == 1) {
+ if (onyx_type_is_stored_in_memory(type)) {
+ /*if (type_linear_member_count(type) == 1) {
return onyx_type_to_wasm_type(type->Struct.linear_members[0].type);
- }
+ }*/
// :StructAsm
- return WASM_TYPE_VOID;
+ return WASM_TYPE_PTR;
}
if (type->kind == Type_Kind_Slice) {
return WASM_TYPE_VOID;
}
+static b32 onyx_type_is_multiple_wasm_values(Type *type) {
+ if (type->kind == Type_Kind_Slice || type->kind == Type_Kind_VarArgs) {
+ return 1;
+ }
+
+ // Dynamic arrays and slices are represented as a single pointer to
+ // data for the structure.
+
+ return 0;
+}
+
static i32 generate_type_idx(OnyxWasmModule* mod, Type* ft);
static i32 get_element_idx(OnyxWasmModule* mod, AstFunction* func);
EMIT_FUNC(location_return_offset, AstTyped* expr, u64* offset_return);
EMIT_FUNC(location, AstTyped* expr);
EMIT_FUNC(compound_load, Type* type, u64 offset, i32 ignored_value_count);
-EMIT_FUNC(struct_lval, AstTyped* lval);
-EMIT_FUNC(struct_literal, AstStructLiteral* sl);
EMIT_FUNC(compound_store, Type* type, u64 offset, b32 location_first);
+EMIT_FUNC(struct_store, Type* type, u32 offset);
+EMIT_FUNC(struct_literal, AstStructLiteral* sl);
EMIT_FUNC(array_store, Type* type, u32 offset);
EMIT_FUNC(array_literal, AstArrayLiteral* al);
EMIT_FUNC(range_literal, AstRangeLiteral* range);
}
}
+ /*
if (lval->kind == Ast_Kind_Field_Access) {
// :StructAsm
// This code was never right anyway...
return;
}
}
+ */
if (lval->kind == Ast_Kind_Global) {
emit_expression(mod, &code, assign->right);
EMIT_FUNC(store_instruction, Type* type, u32 offset) {
bh_arr(WasmInstruction) code = *pcode;
- // :StructAsm
+ if (onyx_type_is_stored_in_memory(type)) {
+ emit_struct_store(mod, pcode, type, offset);
+ return;
+ }
+
+ if (type->kind == Type_Kind_Array) {
+ emit_array_store(mod, pcode, type, offset);
+ return;
+ }
+
if (type_is_compound(type)) {
emit_compound_store(mod, pcode, type, offset, 0);
return;
}
- // :StructAsm
+ /*
if (type->kind == Type_Kind_Struct) {
assert(bh_arr_length(type->Struct.linear_members) == 1);
type = type->Struct.linear_members[0].type;
}
-
- if (type->kind == Type_Kind_Array) {
- emit_array_store(mod, pcode, type, offset);
- return;
- }
+ */
if (type->kind == Type_Kind_Enum) type = type->Enum.backing;
if (type->kind == Type_Kind_Distinct) type = type->Distinct.base_type;
u64 expr_tmp = local_raw_allocate(mod->local_alloc, wt);
WIL(token, WI_LOCAL_SET, expr_tmp);
- u64 offset = 0;
- emit_location_return_offset(mod, &code, lval, &offset);
+ // nocheckin
+ // emit_location_return_offset(mod, &code, lval, &offset);
+ emit_location(mod, &code, lval);
WIL(token, WI_LOCAL_GET, expr_tmp);
local_raw_free(mod->local_alloc, wt);
- emit_store_instruction(mod, &code, lval->type, offset);
+ emit_store_instruction(mod, &code, lval->type, 0);
*pcode = code;
return;
bh_arr(WasmInstruction) code = *pcode;
// If this is a structure, use the emit_struct_lval function.
- // :StructAsm
- if (type_is_structlike_strict(lval->type)) {
- emit_struct_lval(mod, &code, lval);
- }
+ // if (type_is_structlike_strict(lval->type)) {
+ // emit_struct_lval(mod, &code, lval);
+ // }
// If this is a WASM local, simply set the local and continue.
- else if (bh_imap_get(&mod->local_map, (u64) lval) & LOCAL_IS_WASM) {
+ if (bh_imap_get(&mod->local_map, (u64) lval) & LOCAL_IS_WASM) {
u64 localidx = bh_imap_get(&mod->local_map, (u64) lval);
WIL(token, WI_LOCAL_SET, localidx);
}
EMIT_FUNC(load_instruction, Type* type, u32 offset) {
bh_arr(WasmInstruction) code = *pcode;
- // :StructAsm
+ if (type->kind == Type_Kind_Array || onyx_type_is_stored_in_memory(type)) {
+ if (offset != 0) {
+ WID(NULL, WI_PTR_CONST, offset);
+ WI(NULL, WI_PTR_ADD);
+ }
+
+ *pcode = code;
+ return;
+ }
+
if (type_is_compound(type)) {
emit_compound_load(mod, pcode, type, offset, 0);
return;
}
- // :StructAsm
+ /*
if (type->kind == Type_Kind_Struct) {
assert(bh_arr_length(type->Struct.linear_members) == 1);
type = type->Struct.linear_members[0].type;
}
-
- if (type->kind == Type_Kind_Array) {
- if (offset != 0) {
- WID(NULL, WI_PTR_CONST, offset);
- WI(NULL, WI_PTR_ADD);
- }
-
- *pcode = code;
- return;
- }
+ */
if (type->kind == Type_Kind_Enum) type = type->Enum.backing;
if (type->kind == Type_Kind_Distinct) type = type->Distinct.base_type;
remove_info.iterator_data_ptr = iterator_data_ptr;
remove_info.iterator_remove_func = iterator_remove_func;
- TypeWithOffset remove_func_type;
- type_linear_member_lookup(for_node->iter->type, 3, &remove_func_type);
+ StructMember remove_func_type;
+ type_lookup_member_by_idx(for_node->iter->type, 3, &remove_func_type);
remove_info.remove_func_type_idx = generate_type_idx(mod, remove_func_type.type);
bh_arr_push(mod->for_remove_info, remove_info);
emit_enter_structured_block(mod, &code, SBT_Basic_Block, for_node->token);
if (!for_node->no_close) {
- TypeWithOffset close_func_type;
- type_linear_member_lookup(for_node->iter->type, 2, &close_func_type);
+ StructMember close_func_type;
+ type_lookup_member_by_idx(for_node->iter->type, 2, &close_func_type);
i32 close_type_idx = generate_type_idx(mod, close_func_type.type);
WasmInstruction* close_instructions = bh_alloc_array(global_heap_allocator, WasmInstruction, 8);
// CLEANUP: Calling a function is way too f-ing complicated. FACTOR IT!!
u64 stack_top_idx = bh_imap_get(&mod->index_map, (u64) &builtin_stack_top);
- // :StructAsm
- // :StructAsm
- // :StructAsm
- TypeWithOffset next_func_type;
- type_linear_member_lookup(for_node->iter->type, 1, &next_func_type);
+ StructMember next_func_type;
+ type_lookup_member_by_idx(for_node->iter->type, 1, &next_func_type);
Type* return_type = next_func_type.type->Function.return_type;
u32 return_size = type_size_of(return_type);
place_on_stack = 1;
}
- // :StructAsm
if (place_on_stack) WIL(call_token, WI_LOCAL_GET, stack_top_store_local);
emit_expression(mod, &code, arg->value);
}
if (arg->pass_as_any) {
- WIL(call_token, WI_I32_CONST, arg->value->type->id);
+ u32 arg_size = type_size_of(arg->value->type);
+
+ u64 ugly_temporary = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR);
+ WIL(call_token, WI_LOCAL_SET, ugly_temporary);
+
+ WIL(call_token, WI_LOCAL_GET, stack_top_store_local);
+ WIL(call_token, WI_LOCAL_GET, ugly_temporary);
+ emit_store_instruction(mod, &code, &basic_types[Basic_Kind_Rawptr], reserve_size + arg_size + 0);
+
+ WIL(call_token, WI_LOCAL_GET, stack_top_store_local);
+ WID(call_token, WI_I32_CONST, arg->value->type->id);
+ emit_store_instruction(mod, &code, &basic_types[Basic_Kind_Type_Index], reserve_size + arg_size + 4);
+
+ reserve_size += 2 * POINTER_SIZE;
}
reserve_size += type_size_of(arg->value->type);
}
if (cc == CC_Return_Stack) {
- // :StructAsm
- // :StructAsm
- // :StructAsm
- // :StructAsm
WID(call_token, WI_GLOBAL_GET, stack_top_idx);
emit_load_with_ignored_instruction(mod, &code, return_type, reserve_size - return_size, call->ignored_return_value_count);
}
*pcode = code;
}
-EMIT_FUNC(struct_lval, AstTyped* lval) {
- bh_arr(WasmInstruction) code = *pcode;
-
- // :StructAsm
- assert(type_is_structlike_strict(lval->type));
-
- u64 offset = 0;
- emit_location_return_offset(mod, &code, lval, &offset);
- emit_compound_store(mod, &code, lval->type, offset, 1);
-
- *pcode = code;
-}
-
EMIT_FUNC(compound_load, Type* type, u64 offset, i32 ignored_value_count) {
bh_arr(WasmInstruction) code = *pcode;
i32 mem_count = type_linear_member_count(type);
EMIT_FUNC(struct_literal, AstStructLiteral* sl) {
bh_arr(WasmInstruction) code = *pcode;
- // :StructAsm
+ if (!onyx_type_is_stored_in_memory(sl->type)) {
+ bh_arr_each(AstTyped *, val, sl->args.values) {
+ emit_expression(mod, &code, *val);
+ }
+
+ *pcode = code;
+ return;
+ }
+
+ emit_local_allocation(mod, &code, (AstTyped *) sl);
+
+ u64 local_offset = (u64) bh_imap_get(&mod->local_map, (u64) sl);
+ assert((local_offset & LOCAL_IS_WASM) == 0);
+
+ i32 idx = 0;
+ StructMember smem;
bh_arr_each(AstTyped *, val, sl->args.values) {
+ type_lookup_member_by_idx(sl->type, idx, &smem);
+
+ // CLEANUP: When emitting a structure literal inside of a structure literal,
+ // there should be a separate path taken to reduce the amount of redundant memory.
+ WIL(sl->token, WI_LOCAL_GET, mod->stack_base_idx);
emit_expression(mod, &code, *val);
+ emit_store_instruction(mod, &code, (*val)->type, smem.offset);
+
+ idx += 1;
+ }
+
+ WIL(sl->token, WI_LOCAL_GET, mod->stack_base_idx);
+
+ if (local_offset > 0) {
+ WIL(sl->token, WI_PTR_CONST, local_offset);
+ WI(sl->token, WI_PTR_ADD);
}
*pcode = code;
}
+// <src_ptr> <- top of stack
+// <dest_ptr>
+EMIT_FUNC(struct_store, Type *type, u32 offset) {
+ assert(onyx_type_is_stored_in_memory(type));
+ bh_arr(WasmInstruction) code = *pcode;
+
+ if (offset != 0) {
+ u64 rptr_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR);
+ WIL(NULL, WI_LOCAL_SET, rptr_local);
+
+ WIL(NULL, WI_PTR_CONST, offset);
+ WI(NULL, WI_PTR_ADD);
+ WIL(NULL, WI_LOCAL_GET, rptr_local);
+
+ local_raw_free(mod->local_alloc, WASM_TYPE_PTR);
+ }
+
+ u64 rptr_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR);
+ u64 lptr_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR);
+ WIL(NULL, WI_LOCAL_SET, rptr_local);
+ WIL(NULL, WI_LOCAL_SET, lptr_local);
+
+ WIL(NULL, WI_LOCAL_GET, rptr_local);
+ WID(NULL, WI_I32_CONST, 0);
+ WI(NULL, WI_I32_NE);
+ emit_enter_structured_block(mod, &code, SBT_Basic_If, NULL);
+
+ WIL(NULL, WI_LOCAL_GET, lptr_local);
+ WIL(NULL, WI_LOCAL_GET, rptr_local);
+ WIL(NULL, WI_I32_CONST, type_size_of(type));
+
+ // Use a simple memory copy if it is available.
+ if (context.options->use_post_mvp_features) {
+ WI(NULL, WI_MEMORY_COPY);
+ } else {
+ emit_intrinsic_memory_copy(mod, &code);
+ }
+
+ WI(NULL, WI_ELSE);
+
+ WIL(NULL, WI_LOCAL_GET, lptr_local);
+ WIL(NULL, WI_I32_CONST, 0);
+ WIL(NULL, WI_I32_CONST, type_size_of(type));
+
+ if (context.options->use_post_mvp_features) {
+ WI(NULL, WI_MEMORY_FILL);
+ } else {
+ emit_intrinsic_memory_fill(mod, &code);
+ }
+
+ emit_leave_structured_block(mod, &code);
+ local_raw_free(mod->local_alloc, WASM_TYPE_PTR);
+ local_raw_free(mod->local_alloc, WASM_TYPE_PTR);
+
+ *pcode = code;
+ return;
+}
+
EMIT_FUNC(array_store, Type* type, u32 offset) {
assert(type->kind == Type_Kind_Array);
bh_arr(WasmInstruction) code = *pcode;
EMIT_FUNC(array_literal, AstArrayLiteral* al) {
bh_arr(WasmInstruction) code = *pcode;
+ emit_local_allocation(mod, &code, (AstTyped *) al);
+
u64 local_offset = (u64) bh_imap_get(&mod->local_map, (u64) al);
assert((local_offset & LOCAL_IS_WASM) == 0);
expr = (AstTyped *) strip_aliases((AstNode *) expr);
switch (expr->kind) {
- // :StructAsm
case Ast_Kind_Param:
case Ast_Kind_Local:
case Ast_Kind_Array_Literal:
}
case Ast_Kind_Struct_Literal: {
- // :StructAsm
emit_struct_literal(mod, &code, (AstStructLiteral *) expr);
break;
}
case Ast_Kind_Array_Literal: {
- emit_local_allocation(mod, &code, expr);
emit_array_literal(mod, &code, (AstArrayLiteral *) expr);
break;
}
}
}
- if (is_lval((AstNode *) field->expr) || type_is_pointer(field->expr->type)) {
+ if (is_lval((AstNode *) field->expr)
+ || type_is_pointer(field->expr->type)
+ || onyx_type_is_stored_in_memory(field->expr->type)) {
u64 offset = 0;
emit_field_access_location(mod, &code, field, &offset);
emit_load_instruction(mod, &code, field->type, offset);
emit_generic_store_instruction(mod, &code, (AstTyped *) dest, NULL);
} else if (mod->curr_cc == CC_Return_Stack) {
- // :StructAsm
WIL(NULL, WI_LOCAL_GET, mod->stack_base_idx);
WID(NULL, WI_I32_CONST, type_size_of(ret->expr->type));
WI(NULL, WI_I32_SUB);
EMIT_FUNC(zero_value_for_type, Type* type, OnyxToken* where) {
bh_arr(WasmInstruction) code = *pcode;
- if (type_is_structlike_strict(type)) {
+ if (onyx_type_is_multiple_wasm_values(type)) {
i32 mem_count = type_linear_member_count(type);
TypeWithOffset two;
type_linear_member_lookup(type, i, &two);
emit_zero_value_for_type(mod, &code, two.type, where);
}
- }
- else if (type->kind == Type_Kind_Function) {
+
+ } else if (type->kind == Type_Kind_Function) {
WID(NULL, WI_I32_CONST, mod->null_proc_func_idx);
- }
- else {
+
+ } else {
WasmType wt = onyx_type_to_wasm_type(type);
if (wt == WASM_TYPE_VOID) {
onyx_report_error(where->pos, Error_Critical, "Cannot produce a zero-value for this type.");
i32 params_left = param_count;
while (params_left-- > 0) {
- if (type_get_param_pass(*param_type) == Param_Pass_By_Implicit_Pointer) {
+ if ((*param_type)->kind == Type_Kind_Struct) {
+ *(t++) = (char) onyx_type_to_wasm_type(*param_type);
+ }
+
+ else if (type_get_param_pass(*param_type) == Param_Pass_By_Implicit_Pointer) {
*(t++) = (char) onyx_type_to_wasm_type(&basic_types[Basic_Kind_Rawptr]);
}
+
// :StructAsm
else if (type_is_structlike_strict(*param_type)) {
u32 mem_count = type_structlike_mem_count(*param_type);
}
*(t++) = ':';
- // HACK: Slightly: the wasm type for structs has to be 0x00
WasmType return_type = onyx_type_to_wasm_type(ft->Function.return_type);
- if (ft->Function.return_type->kind == Type_Kind_Struct && !type_is_compound(ft->Function.return_type)) {
+
+ /*
+ if (ft->Function.return_type->kind == Type_Kind_Struct
+ && type_linear_member_count(ft->Function.return_type) == 1) {
return_type = onyx_type_to_wasm_type(ft->Function.return_type->Struct.linear_members[0].type);
}
+ */
*(t++) = (char) return_type;
*t = '\0';
bh_arr_insert_end(wasm_func.code, 5);
fori (i, 0, 5) wasm_func.code[i] = (WasmInstruction) { WI_NOP, 0 };
- // TODO: Emit debug info for the above instructions
-
mod->stack_base_idx = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR);
debug_function_set_ptr_idx(mod, func_idx, mod->stack_base_idx);