WI_MEMORY_COPY = EXT_INSTR_MASK | 0x0a,
WI_MEMORY_FILL = EXT_INSTR_MASK | 0x0b,
+ WI_ATOMIC_NOTIFY = ATOMIC_INSTR_MASK | 0x00,
+ WI_ATOMIC_WAIT32 = ATOMIC_INSTR_MASK | 0x01,
+ WI_ATOMIC_WAIT64 = ATOMIC_INSTR_MASK | 0x02,
+
+ WI_ATOMIC_FENCE = ATOMIC_INSTR_MASK | 0x03,
+
+ WI_ATOMIC_I32_LOAD = ATOMIC_INSTR_MASK | 0x10,
+ WI_ATOMIC_I64_LOAD = ATOMIC_INSTR_MASK | 0x11,
+ WI_ATOMIC_I32_LOAD8_U = ATOMIC_INSTR_MASK | 0x12,
+ WI_ATOMIC_I32_LOAD16_U = ATOMIC_INSTR_MASK | 0x13,
+ WI_ATOMIC_I64_LOAD8_U = ATOMIC_INSTR_MASK | 0x14,
+ WI_ATOMIC_I64_LOAD16_U = ATOMIC_INSTR_MASK | 0x15,
+ WI_ATOMIC_I64_LOAD32_U = ATOMIC_INSTR_MASK | 0x16,
+
+ WI_ATOMIC_I32_STORE = ATOMIC_INSTR_MASK | 0x17,
+ WI_ATOMIC_I64_STORE = ATOMIC_INSTR_MASK | 0x18,
+ WI_ATOMIC_I32_STORE8 = ATOMIC_INSTR_MASK | 0x19,
+ WI_ATOMIC_I32_STORE16 = ATOMIC_INSTR_MASK | 0x1a,
+ WI_ATOMIC_I64_STORE8 = ATOMIC_INSTR_MASK | 0x1b,
+ WI_ATOMIC_I64_STORE16 = ATOMIC_INSTR_MASK | 0x1c,
+ WI_ATOMIC_I64_STORE32 = ATOMIC_INSTR_MASK | 0x1d,
+
+ WI_ATOMIC_I32_ADD = ATOMIC_INSTR_MASK | 0x1e,
+ WI_ATOMIC_I64_ADD = ATOMIC_INSTR_MASK | 0x1f,
+ WI_ATOMIC_I32_ADD8_U = ATOMIC_INSTR_MASK | 0x20,
+ WI_ATOMIC_I32_ADD16_U = ATOMIC_INSTR_MASK | 0x21,
+ WI_ATOMIC_I64_ADD8_U = ATOMIC_INSTR_MASK | 0x22,
+ WI_ATOMIC_I64_ADD16_U = ATOMIC_INSTR_MASK | 0x23,
+ WI_ATOMIC_I64_ADD32_U = ATOMIC_INSTR_MASK | 0x24,
+
+ WI_ATOMIC_I32_SUB = ATOMIC_INSTR_MASK | 0x25,
+ WI_ATOMIC_I64_SUB = ATOMIC_INSTR_MASK | 0x26,
+ WI_ATOMIC_I32_SUB8_U = ATOMIC_INSTR_MASK | 0x27,
+ WI_ATOMIC_I32_SUB16_U = ATOMIC_INSTR_MASK | 0x28,
+ WI_ATOMIC_I64_SUB8_U = ATOMIC_INSTR_MASK | 0x29,
+ WI_ATOMIC_I64_SUB16_U = ATOMIC_INSTR_MASK | 0x2a,
+ WI_ATOMIC_I64_SUB32_U = ATOMIC_INSTR_MASK | 0x2b,
+
+ WI_ATOMIC_I32_AND = ATOMIC_INSTR_MASK | 0x2c,
+ WI_ATOMIC_I64_AND = ATOMIC_INSTR_MASK | 0x2d,
+ WI_ATOMIC_I32_AND8_U = ATOMIC_INSTR_MASK | 0x2e,
+ WI_ATOMIC_I32_AND16_U = ATOMIC_INSTR_MASK | 0x2f,
+ WI_ATOMIC_I64_AND8_U = ATOMIC_INSTR_MASK | 0x30,
+ WI_ATOMIC_I64_AND16_U = ATOMIC_INSTR_MASK | 0x31,
+ WI_ATOMIC_I64_AND32_U = ATOMIC_INSTR_MASK | 0x32,
+
+ WI_ATOMIC_I32_OR = ATOMIC_INSTR_MASK | 0x33,
+ WI_ATOMIC_I64_OR = ATOMIC_INSTR_MASK | 0x34,
+ WI_ATOMIC_I32_OR8_U = ATOMIC_INSTR_MASK | 0x35,
+ WI_ATOMIC_I32_OR16_U = ATOMIC_INSTR_MASK | 0x36,
+ WI_ATOMIC_I64_OR8_U = ATOMIC_INSTR_MASK | 0x37,
+ WI_ATOMIC_I64_OR16_U = ATOMIC_INSTR_MASK | 0x38,
+ WI_ATOMIC_I64_OR32_U = ATOMIC_INSTR_MASK | 0x39,
+
+ WI_ATOMIC_I32_XOR = ATOMIC_INSTR_MASK | 0x3a,
+ WI_ATOMIC_I64_XOR = ATOMIC_INSTR_MASK | 0x3b,
+ WI_ATOMIC_I32_XOR8_U = ATOMIC_INSTR_MASK | 0x3c,
+ WI_ATOMIC_I32_XOR16_U = ATOMIC_INSTR_MASK | 0x3d,
+ WI_ATOMIC_I64_XOR8_U = ATOMIC_INSTR_MASK | 0x3e,
+ WI_ATOMIC_I64_XOR16_U = ATOMIC_INSTR_MASK | 0x3f,
+ WI_ATOMIC_I64_XOR32_U = ATOMIC_INSTR_MASK | 0x40,
+
+ WI_ATOMIC_I32_XCHG = ATOMIC_INSTR_MASK | 0x41,
+ WI_ATOMIC_I64_XCHG = ATOMIC_INSTR_MASK | 0x42,
+ WI_ATOMIC_I32_XCHG8_U = ATOMIC_INSTR_MASK | 0x43,
+ WI_ATOMIC_I32_XCHG16_U = ATOMIC_INSTR_MASK | 0x44,
+ WI_ATOMIC_I64_XCHG8_U = ATOMIC_INSTR_MASK | 0x45,
+ WI_ATOMIC_I64_XCHG16_U = ATOMIC_INSTR_MASK | 0x46,
+ WI_ATOMIC_I64_XCHG32_U = ATOMIC_INSTR_MASK | 0x47,
+
+ WI_ATOMIC_I32_CMPXCHG = ATOMIC_INSTR_MASK | 0x48,
+ WI_ATOMIC_I64_CMPXCHG = ATOMIC_INSTR_MASK | 0x49,
+ WI_ATOMIC_I32_CMPXCHG8_U = ATOMIC_INSTR_MASK | 0x4a,
+ WI_ATOMIC_I32_CMPXCHG16_U = ATOMIC_INSTR_MASK | 0x4b,
+ WI_ATOMIC_I64_CMPXCHG8_U = ATOMIC_INSTR_MASK | 0x4c,
+ WI_ATOMIC_I64_CMPXCHG16_U = ATOMIC_INSTR_MASK | 0x4d,
+ WI_ATOMIC_I64_CMPXCHG32_U = ATOMIC_INSTR_MASK | 0x4e,
} WasmInstructionType;
typedef union {
case ONYX_INTRINSIC_F32X4_CONVERT_I32X4_S: WI(WI_F32X4_CONVERT_I32X4_S); break;
case ONYX_INTRINSIC_F32X4_CONVERT_I32X4_U: WI(WI_F32X4_CONVERT_I32X4_U); break;
+ case ONYX_INTRINSIC_ATOMIC_WAIT: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_wait(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_NOTIFY: {
+ emit_intrinsic_atomic_notify(mod, &code);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_FENCE: {
+ emit_intrinsic_atomic_fence(mod, &code);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_LOAD: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_load(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_STORE: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_store(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_ADD: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_add(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_SUB: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_sub(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_AND: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_and(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_OR: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_or(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_XOR: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_xor(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_XCHG: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_xchg(mod, &code, atomic_type, call->token);
+ break;
+ }
+
+ case ONYX_INTRINSIC_ATOMIC_CMPXCHG: {
+ Type* atomic_type = ((AstArgument *) call->args.values[0])->value->type->Pointer.elem;
+ emit_intrinsic_atomic_cmpxchg(mod, &code, atomic_type, call->token);
+ break;
+ }
+
default: assert(("Unsupported intrinsic", 0));
}
type_get_name(type));
break;
}
-
+
*pcode = code;
}
EMIT_FUNC(intrinsic_atomic_wait, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_WAIT32, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_WAIT64, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic wait, '%s'. Only i32 and i64 are supported.", type_get_name(type));
}
-EMIT_FUNC(intrinsic_atomic_notify, Type* type, OnyxToken* where) {
+EMIT_FUNC_NO_ARGS(intrinsic_atomic_notify) {
+ bh_arr(WasmInstruction) code = *pcode;
+ WI(WI_ATOMIC_NOTIFY);
+ *pcode = code;
}
EMIT_FUNC_NO_ARGS(intrinsic_atomic_fence) {
+ bh_arr(WasmInstruction) code = *pcode;
+ WI(WI_ATOMIC_FENCE);
+ *pcode = code;
}
EMIT_FUNC(intrinsic_atomic_load, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_LOAD8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_LOAD16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_LOAD, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_LOAD, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic load, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_store, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_STORE8, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_STORE16, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_STORE, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_STORE, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic store, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_add, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_ADD8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_ADD16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_ADD, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_ADD, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic add, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_sub, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_SUB8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_SUB16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_SUB, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_SUB, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic sub, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_and, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_AND8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_AND16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_AND, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_AND, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic and, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_or, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_OR8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_OR16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_OR, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_OR, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic or, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_xor, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_XOR8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_XOR16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_XOR, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_XOR, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic xor, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_xchg, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_XCHG8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_XCHG16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_XCHG, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_XCHG, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic xchg, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}
EMIT_FUNC(intrinsic_atomic_cmpxchg, Type* type, OnyxToken* where) {
+ if (type->kind != Type_Kind_Basic) goto bad_type;
+
+ bh_arr(WasmInstruction) code = *pcode;
+
+ switch (type->Basic.kind) {
+ case Basic_Kind_U8: WID(WI_ATOMIC_I32_CMPXCHG8_U, ((WasmInstructionData) { 0, 0 })); break;
+ case Basic_Kind_U16: WID(WI_ATOMIC_I32_CMPXCHG16_U, ((WasmInstructionData) { 1, 0 })); break;
+
+ case Basic_Kind_I32:
+ case Basic_Kind_U32: WID(WI_ATOMIC_I32_CMPXCHG, ((WasmInstructionData) { 2, 0 })); break;
+
+ case Basic_Kind_I64:
+ case Basic_Kind_U64: WID(WI_ATOMIC_I64_CMPXCHG, ((WasmInstructionData) { 3, 0 })); break;
+
+ default: goto bad_type;
+ }
+
+ *pcode = code;
+ return;
+
+bad_type:
+ onyx_report_error(where->pos, "Bad type for atomic cmpxchg, '%s'. Only u8, u16, u32, i32, u64, and i64 are supported.", type_get_name(type));
}