*.html
session.vim
*.sublime-project
-*.sublime-workspace
\ No newline at end of file
+*.sublime-workspace
+*.pdb
+*.ilk
+*.obj
+*.rdbg
\ No newline at end of file
--- /dev/null
+@echo off
+
+del *.pdb > NUL 2> NUL
+del *.ilk > NUL 2> NUL
+
+cl.exe /Od /MDd /Z7 /I include /std:c17 /Tc src/onyx.c src/onyxbuiltins.c src/onyxchecker.c src/onyxclone.c src/onyxdoc.c src/onyxentities.c src/onyxerrors.c src/onyxlex.c src/onyxparser.c src/onyxsempass.c src/onyxsymres.c src/onyxtypes.c src/onyxutils.c src/onyxwasm.c /link /DEBUG /OUT:onyx.exe /incremental:no /opt:ref /subsystem:console
+
+del *.obj > NUL 2> NUL
\ No newline at end of file
case #char "i" {
n : i32;
- if !vararg_get(va, ^n) do return;
+ if !vararg_get(va, ^n) do return buffer.data[0 .. 0];
ibuf : [128] u8;
istr := i64_to_str(~~n, 10, ibuf[0 .. 128]);
case #char "l" {
n : i64;
- if !vararg_get(va, ^n) do return;
+ if !vararg_get(va, ^n) do return buffer.data[0 .. 0];
ibuf : [128] u8;
istr := i64_to_str(n, 10, ibuf[0 .. 128]);
case #char "f" {
n : f32;
- if !vararg_get(va, ^n) do return;
+ if !vararg_get(va, ^n) do return buffer.data[0 .. 0];
fbuf : [128] u8;
fstr := f64_to_str(~~n, fbuf[0 .. 128]);
case #char "d" {
n : f64;
- if !vararg_get(va, ^n) do return;
+ if !vararg_get(va, ^n) do return buffer.data[0 .. 0];
fbuf : [128] u8;
fstr := f64_to_str(n, fbuf[0 .. 128]);
case #char "s" {
s : str;
- if !vararg_get(va, ^s) do return;
+ if !vararg_get(va, ^s) do return buffer.data[0 .. 0];
for a: s {
buffer[len] = a;
case #char "p" {
n : rawptr;
- if !vararg_get(va, ^n) do return;
+ if !vararg_get(va, ^n) do return buffer.data[0 .. 0];
ibuf : [128] u8;
istr := i64_to_str(~~n, 16, ibuf[0 .. 128]);
case #char "c" {
c : u8;
- if !vararg_get(va, ^c) do return;
+ if !vararg_get(va, ^c) do return buffer.data[0 .. 0];
buffer[len] = c;
len += 1;
case #char "b" {
b : bool;
- if !vararg_get(va, ^b) do return;
+ if !vararg_get(va, ^b) do return buffer.data[0 .. 0];
s := "false";
if b do s = "true";
--- /dev/null
+package core.intrinsics.simd
+
+use package simd as simd
+
+i8x16 :: #type simd.i8x16
+i16x8 :: #type simd.i16x8
+i32x4 :: #type simd.i32x4
+i64x2 :: #type simd.i64x2
+f32x4 :: #type simd.f32x4
+f64x2 :: #type simd.f64x2
+v128 :: #type simd.v128
+
+// NOTE: These u8 values must be compile time known values.
+v128_const :: proc (b1: u8, b2: u8, b3: u8, b4: u8,
+ b5: u8, b6: u8, b7: u8, b8: u8,
+ b9: u8, b10: u8, b11: u8, b12: u8,
+ b13: u8, b14: u8, b15: u8, b16: u8) -> v128 #intrinsic ---
+
+i8x16_const :: proc (b1: i8, b2: i8, b3: i8, b4: i8,
+ b5: i8, b6: i8, b7: i8, b8: i8,
+ b9: i8, b10: i8, b11: i8, b12: i8,
+ b13: i8, b14: i8, b15: i8, b16: i8) -> i8x16 #intrinsic ---
+i16x8_const :: proc (b1: i16, b2: i16, b3: i16, b4: i16,
+ b5: i16, b6: i16, b7: i16, b8: i16) -> i16x8 #intrinsic ---
+i32x4_const :: proc (b1: i32, b2: i32, b3: i32, b4: i32) -> i32x4 #intrinsic ---
+i64x2_const :: proc (b1: i64, b2: i64) -> i64x2 #intrinsic ---
+f32x4_const :: proc (b1: f32, b2: f32, b3: f32, b4: f32) -> f32x4 #intrinsic ---
+f64x2_const :: proc (b1: f64, b2: f64) -> f64x2 #intrinsic ---
+
+// NOTE: These u8 values must be compile time known values.
+i8x16_shuffle :: proc (a: v128, b: v128,
+ b1: u8, b2: u8, b3: u8, b4: u8,
+ b5: u8, b6: u8, b7: u8, b8: u8,
+ b9: u8, b10: u8, b11: u8, b12: u8,
+ b13: u8, b14: u8, b15: u8, b16: u8) -> v128 #intrinsic ---
+
+i8x16_extract_lane_s :: proc (v: i8x16, l: u32) -> i8 #intrinsic ---
+i8x16_extract_lane_u :: proc (v: i8x16, l: u32) -> u8 #intrinsic ---
+i8x16_replace_lane :: proc (v: i8x16, l: u32, val: i8) -> i8x16 #intrinsic ---
+i16x8_extract_lane_s :: proc (v: i16x8, l: u32) -> i16 #intrinsic ---
+i16x8_extract_lane_u :: proc (v: i16x8, l: u32) -> u16 #intrinsic ---
+i16x8_replace_lane :: proc (v: i16x8, l: u32, val: i16) -> i16x8 #intrinsic ---
+i32x4_extract_lane :: proc (v: i32x4, l: u32) -> i32 #intrinsic ---
+i32x4_replace_lane :: proc (v: i32x4, l: u32, val: i32) -> i32x4 #intrinsic ---
+i64x2_extract_lane :: proc (v: i64x2, l: u32) -> i64 #intrinsic ---
+i64x2_replace_lane :: proc (v: i64x2, l: u32, val: i64) -> i64x2 #intrinsic ---
+f32x4_extract_lane :: proc (v: f32x4, l: u32) -> f32 #intrinsic ---
+f32x4_replace_lane :: proc (v: f32x4, l: u32, val: f32) -> f32x4 #intrinsic ---
+f64x2_extract_lane :: proc (v: f64x2, l: u32) -> f64 #intrinsic ---
+f64x2_replace_lane :: proc (v: f64x2, l: u32, val: f64) -> f64x2 #intrinsic ---
+
+i8x16_swizzle :: proc (v: v128, s: v128) -> v128 #intrinsic ---
+i8x16_splat :: proc (val: i8) -> i8x16 #intrinsic ---
+i16x8_splat :: proc (val: i16) -> i16x8 #intrinsic ---
+i32x4_splat :: proc (val: i32) -> i32x4 #intrinsic ---
+i64x2_splat :: proc (val: i64) -> i64x2 #intrinsic ---
+f32x4_splat :: proc (val: f32) -> f32x4 #intrinsic ---
+f64x2_splat :: proc (val: f64) -> f64x2 #intrinsic ---
+
+i8x16_eq :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_neq :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_lt_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_lt_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_gt_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_gt_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_le_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_le_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_ge_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_ge_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+
+i16x8_eq :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_neq :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_lt_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_lt_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_gt_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_gt_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_le_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_le_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_ge_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_ge_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+
+i32x4_eq :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_neq :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_lt_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_lt_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_gt_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_gt_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_le_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_le_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_ge_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_ge_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+
+f32x4_eq :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+f32x4_neq :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+f32x4_lt :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+f32x4_gt :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+f32x4_le :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+f32x4_ge :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
+
+f64x2_eq :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_neq :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_lt :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_gt :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_le :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_ge :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+
+v128_not :: proc (v: v128) -> v128 #intrinsic ---
+v128_and :: proc (a: v128, b: v128) -> v128 #intrinsic ---
+v128_andnot :: proc (a: v128, b: v128) -> v128 #intrinsic ---
+v128_or :: proc (a: v128, b: v128) -> v128 #intrinsic ---
+v128_xor :: proc (a: v128, b: v128) -> v128 #intrinsic ---
+v128_bitselect :: proc (a: v128, b: v128, c: v128) -> v128 #intrinsic ---
+
+i8x16_abs :: proc (a: i8x16) -> i8x16 #intrinsic ---
+i8x16_neg :: proc (a: i8x16) -> i8x16 #intrinsic ---
+i8x16_any_true :: proc (a: i8x16) -> bool #intrinsic ---
+i8x16_all_true :: proc (a: i8x16) -> bool #intrinsic ---
+i8x16_bitmask :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_narrow_i16x8_s :: proc (a: i16x8) -> i8x16 #intrinsic ---
+i8x16_narrow_i16x8_u :: proc (a: i16x8) -> i8x16 #intrinsic ---
+i8x16_shl :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
+i8x16_shr_s :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
+i8x16_shr_u :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
+i8x16_add :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_add_sat_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_add_sat_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_sub :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_sub_sat_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_sub_sat_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_min_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_min_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_max_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_max_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
+i8x16_avgr_u :: proc (a: i8x16) -> i8x16 #intrinsic ---
+
+i16x8_abs :: proc (a: i16x8) -> i16x8 #intrinsic ---
+i16x8_neg :: proc (a: i16x8) -> i16x8 #intrinsic ---
+i16x8_any_true :: proc (a: i16x8) -> bool #intrinsic ---
+i16x8_all_true :: proc (a: i16x8) -> bool #intrinsic ---
+i16x8_bitmask :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_narrow_i32x4_s :: proc (a: i32x4) -> i16x8 #intrinsic ---
+i16x8_narrow_i32x4_u :: proc (a: i32x4) -> i16x8 #intrinsic ---
+i16x8_widen_low_i8x16_s :: proc (a: i8x16) -> i16x8 #intrinsic ---
+i16x8_widen_high_i8x16_s :: proc (a: i8x16) -> i16x8 #intrinsic ---
+i16x8_widen_low_i8x16_u :: proc (a: i8x16) -> i16x8 #intrinsic ---
+i16x8_widen_high_i8x16_u :: proc (a: i8x16) -> i16x8 #intrinsic ---
+i16x8_shl :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
+i16x8_shr_s :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
+i16x8_shr_u :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
+i16x8_add :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_add_sat_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_add_sat_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_sub :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_sub_sat_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_sub_sat_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_mul :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_min_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_min_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_max_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_max_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
+i16x8_avgr_u :: proc (a: i16x8) -> i16x8 #intrinsic ---
+
+i32x4_abs :: proc (a: i32x4) -> i32x4 #intrinsic ---
+i32x4_neg :: proc (a: i32x4) -> i32x4 #intrinsic ---
+i32x4_any_true :: proc (a: i32x4) -> bool #intrinsic ---
+i32x4_all_true :: proc (a: i32x4) -> bool #intrinsic ---
+i32x4_bitmask :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_widen_low_i16x8_s :: proc (a: i16x8) -> i32x4 #intrinsic ---
+i32x4_widen_high_i16x8_s :: proc (a: i16x8) -> i32x4 #intrinsic ---
+i32x4_widen_low_i16x8_u :: proc (a: i16x8) -> i32x4 #intrinsic ---
+i32x4_widen_high_i16x8_u :: proc (a: i16x8) -> i32x4 #intrinsic ---
+i32x4_shl :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
+i32x4_shr_s :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
+i32x4_shl_u :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
+i32x4_add :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_sub :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_mul :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_min_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_min_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_max_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+i32x4_max_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
+
+i64x2_neg :: proc (a: i64x2) -> i64x2 #intrinsic ---
+i64x2_shl :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
+i64x2_shr_s :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
+i64x2_shr_u :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
+i64x2_add :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
+i64x2_sub :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
+i64x2_mul :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
+
+f32x4_abs :: proc (a: f32x4) -> f32x4 #intrinsic ---
+f32x4_neg :: proc (a: f32x4) -> f32x4 #intrinsic ---
+f32x4_sqrt :: proc (a: f32x4) -> f32x4 #intrinsic ---
+f32x4_add :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+f32x4_sub :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+f32x4_mul :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+f32x4_div :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+f32x4_min :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+f32x4_max :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
+
+f64x2_abs :: proc (a: f64x2) -> f64x2 #intrinsic ---
+f64x2_neg :: proc (a: f64x2) -> f64x2 #intrinsic ---
+f64x2_sqrt :: proc (a: f64x2) -> f64x2 #intrinsic ---
+f64x2_add :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_sub :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_mul :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_div :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_min :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+f64x2_max :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
+
+// NOTE: These may be backwards
+i32x4_trunc_sat_f32x4_s :: proc (v: f32x4) -> i32x4 #intrinsic ---
+i32x4_trunc_sat_f32x4_u :: proc (v: f32x4) -> i32x4 #intrinsic ---
+f32x4_convert_i32x4_s :: proc (v: i32x4) -> f32x4 #intrinsic ---
+f32x4_convert_i32x4_u :: proc (v: i32x4) -> f32x4 #intrinsic ---
+++ /dev/null
-package core.intrinsics.simd
-
-// NOTE: These u8 values must be compile time known values.
-v128_const :: proc (b1: u8, b2: u8, b3: u8, b4: u8,
- b5: u8, b6: u8, b7: u8, b8: u8,
- b9: u8, b10: u8, b11: u8, b12: u8,
- b13: u8, b14: u8, b15: u8, b16: u8) -> v128 #intrinsic ---
-
-i8x16_const :: proc (b1: i8, b2: i8, b3: i8, b4: i8,
- b5: i8, b6: i8, b7: i8, b8: i8,
- b9: i8, b10: i8, b11: i8, b12: i8,
- b13: i8, b14: i8, b15: i8, b16: i8) -> i8x16 #intrinsic ---
-i16x8_const :: proc (b1: i16, b2: i16, b3: i16, b4: i16,
- b5: i16, b6: i16, b7: i16, b8: i16) -> i16x8 #intrinsic ---
-i32x4_const :: proc (b1: i32, b2: i32, b3: i32, b4: i32) -> i32x4 #intrinsic ---
-i64x2_const :: proc (b1: i64, b2: i64) -> i64x2 #intrinsic ---
-f32x4_const :: proc (b1: f32, b2: f32, b3: f32, b4: f32) -> f32x4 #intrinsic ---
-f64x2_const :: proc (b1: f64, b2: f64) -> f64x2 #intrinsic ---
-
-// NOTE: These u8 values must be compile time known values.
-i8x16_shuffle :: proc (a: v128, b: v128,
- b1: u8, b2: u8, b3: u8, b4: u8,
- b5: u8, b6: u8, b7: u8, b8: u8,
- b9: u8, b10: u8, b11: u8, b12: u8,
- b13: u8, b14: u8, b15: u8, b16: u8) -> v128 #intrinsic ---
-
-i8x16_extract_lane_s :: proc (v: i8x16, l: u32) -> i8 #intrinsic ---
-i8x16_extract_lane_u :: proc (v: i8x16, l: u32) -> u8 #intrinsic ---
-i8x16_replace_lane :: proc (v: i8x16, l: u32, val: i8) -> i8x16 #intrinsic ---
-i16x8_extract_lane_s :: proc (v: i16x8, l: u32) -> i16 #intrinsic ---
-i16x8_extract_lane_u :: proc (v: i16x8, l: u32) -> u16 #intrinsic ---
-i16x8_replace_lane :: proc (v: i16x8, l: u32, val: i16) -> i16x8 #intrinsic ---
-i32x4_extract_lane :: proc (v: i32x4, l: u32) -> i32 #intrinsic ---
-i32x4_replace_lane :: proc (v: i32x4, l: u32, val: i32) -> i32x4 #intrinsic ---
-i64x2_extract_lane :: proc (v: i64x2, l: u32) -> i64 #intrinsic ---
-i64x2_replace_lane :: proc (v: i64x2, l: u32, val: i64) -> i64x2 #intrinsic ---
-f32x4_extract_lane :: proc (v: f32x4, l: u32) -> f32 #intrinsic ---
-f32x4_replace_lane :: proc (v: f32x4, l: u32, val: f32) -> f32x4 #intrinsic ---
-f64x2_extract_lane :: proc (v: f64x2, l: u32) -> f64 #intrinsic ---
-f64x2_replace_lane :: proc (v: f64x2, l: u32, val: f64) -> f64x2 #intrinsic ---
-
-i8x16_swizzle :: proc (v: v128, s: v128) -> v128 #intrinsic ---
-i8x16_splat :: proc (val: i8) -> i8x16 #intrinsic ---
-i16x8_splat :: proc (val: i16) -> i16x8 #intrinsic ---
-i32x4_splat :: proc (val: i32) -> i32x4 #intrinsic ---
-i64x2_splat :: proc (val: i64) -> i64x2 #intrinsic ---
-f32x4_splat :: proc (val: f32) -> f32x4 #intrinsic ---
-f64x2_splat :: proc (val: f64) -> f64x2 #intrinsic ---
-
-i8x16_eq :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_neq :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_lt_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_lt_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_gt_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_gt_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_le_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_le_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_ge_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_ge_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-
-i16x8_eq :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_neq :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_lt_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_lt_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_gt_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_gt_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_le_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_le_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_ge_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_ge_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-
-i32x4_eq :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_neq :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_lt_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_lt_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_gt_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_gt_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_le_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_le_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_ge_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_ge_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-
-f32x4_eq :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-f32x4_neq :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-f32x4_lt :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-f32x4_gt :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-f32x4_le :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-f32x4_ge :: proc (a: f32x4, b: f32x4) -> i32x4 #intrinsic ---
-
-f64x2_eq :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_neq :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_lt :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_gt :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_le :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_ge :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-
-v128_not :: proc (v: v128) -> v128 #intrinsic ---
-v128_and :: proc (a: v128, b: v128) -> v128 #intrinsic ---
-v128_andnot :: proc (a: v128, b: v128) -> v128 #intrinsic ---
-v128_or :: proc (a: v128, b: v128) -> v128 #intrinsic ---
-v128_xor :: proc (a: v128, b: v128) -> v128 #intrinsic ---
-v128_bitselect :: proc (a: v128, b: v128, c: v128) -> v128 #intrinsic ---
-
-i8x16_abs :: proc (a: i8x16) -> i8x16 #intrinsic ---
-i8x16_neg :: proc (a: i8x16) -> i8x16 #intrinsic ---
-i8x16_any_true :: proc (a: i8x16) -> bool #intrinsic ---
-i8x16_all_true :: proc (a: i8x16) -> bool #intrinsic ---
-i8x16_bitmask :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_narrow_i16x8_s :: proc (a: i16x8) -> i8x16 #intrinsic ---
-i8x16_narrow_i16x8_u :: proc (a: i16x8) -> i8x16 #intrinsic ---
-i8x16_shl :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
-i8x16_shr_s :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
-i8x16_shr_u :: proc (a: i8x16, s: i32) -> i8x16 #intrinsic ---
-i8x16_add :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_add_sat_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_add_sat_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_sub :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_sub_sat_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_sub_sat_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_min_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_min_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_max_s :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_max_u :: proc (a: i8x16, b: i8x16) -> i8x16 #intrinsic ---
-i8x16_avgr_u :: proc (a: i8x16) -> i8x16 #intrinsic ---
-
-i16x8_abs :: proc (a: i16x8) -> i16x8 #intrinsic ---
-i16x8_neg :: proc (a: i16x8) -> i16x8 #intrinsic ---
-i16x8_any_true :: proc (a: i16x8) -> bool #intrinsic ---
-i16x8_all_true :: proc (a: i16x8) -> bool #intrinsic ---
-i16x8_bitmask :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_narrow_i32x4_s :: proc (a: i32x4) -> i16x8 #intrinsic ---
-i16x8_narrow_i32x4_u :: proc (a: i32x4) -> i16x8 #intrinsic ---
-i16x8_widen_low_i8x16_s :: proc (a: i8x16) -> i16x8 #intrinsic ---
-i16x8_widen_high_i8x16_s :: proc (a: i8x16) -> i16x8 #intrinsic ---
-i16x8_widen_low_i8x16_u :: proc (a: i8x16) -> i16x8 #intrinsic ---
-i16x8_widen_high_i8x16_u :: proc (a: i8x16) -> i16x8 #intrinsic ---
-i16x8_shl :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
-i16x8_shr_s :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
-i16x8_shr_u :: proc (a: i16x8, s: i32) -> i16x8 #intrinsic ---
-i16x8_add :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_add_sat_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_add_sat_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_sub :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_sub_sat_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_sub_sat_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_mul :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_min_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_min_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_max_s :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_max_u :: proc (a: i16x8, b: i16x8) -> i16x8 #intrinsic ---
-i16x8_avgr_u :: proc (a: i16x8) -> i16x8 #intrinsic ---
-
-i32x4_abs :: proc (a: i32x4) -> i32x4 #intrinsic ---
-i32x4_neg :: proc (a: i32x4) -> i32x4 #intrinsic ---
-i32x4_any_true :: proc (a: i32x4) -> bool #intrinsic ---
-i32x4_all_true :: proc (a: i32x4) -> bool #intrinsic ---
-i32x4_bitmask :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_widen_low_i16x8_s :: proc (a: i16x8) -> i32x4 #intrinsic ---
-i32x4_widen_high_i16x8_s :: proc (a: i16x8) -> i32x4 #intrinsic ---
-i32x4_widen_low_i16x8_u :: proc (a: i16x8) -> i32x4 #intrinsic ---
-i32x4_widen_high_i16x8_u :: proc (a: i16x8) -> i32x4 #intrinsic ---
-i32x4_shl :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
-i32x4_shr_s :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
-i32x4_shl_u :: proc (a: i32x4, s: i32) -> i32x4 #intrinsic ---
-i32x4_add :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_sub :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_mul :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_min_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_min_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_max_s :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-i32x4_max_u :: proc (a: i32x4, b: i32x4) -> i32x4 #intrinsic ---
-
-i64x2_neg :: proc (a: i64x2) -> i64x2 #intrinsic ---
-i64x2_shl :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
-i64x2_shr_s :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
-i64x2_shr_u :: proc (a: i64x2, s: i32) -> i64x2 #intrinsic ---
-i64x2_add :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
-i64x2_sub :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
-i64x2_mul :: proc (a: i64x2, b: i64x2) -> i64x2 #intrinsic ---
-
-f32x4_abs :: proc (a: f32x4) -> f32x4 #intrinsic ---
-f32x4_neg :: proc (a: f32x4) -> f32x4 #intrinsic ---
-f32x4_sqrt :: proc (a: f32x4) -> f32x4 #intrinsic ---
-f32x4_add :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-f32x4_sub :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-f32x4_mul :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-f32x4_div :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-f32x4_min :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-f32x4_max :: proc (a: f32x4, b: f32x4) -> f32x4 #intrinsic ---
-
-f64x2_abs :: proc (a: f64x2) -> f64x2 #intrinsic ---
-f64x2_neg :: proc (a: f64x2) -> f64x2 #intrinsic ---
-f64x2_sqrt :: proc (a: f64x2) -> f64x2 #intrinsic ---
-f64x2_add :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_sub :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_mul :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_div :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_min :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-f64x2_max :: proc (a: f64x2, b: f64x2) -> f64x2 #intrinsic ---
-
-// NOTE: These may be backwards
-i32x4_trunc_sat_f32x4_s :: proc (v: f32x4) -> i32x4 #intrinsic ---
-i32x4_trunc_sat_f32x4_u :: proc (v: f32x4) -> i32x4 #intrinsic ---
-f32x4_convert_i32x4_s :: proc (v: i32x4) -> f32x4 #intrinsic ---
-f32x4_convert_i32x4_u :: proc (v: i32x4) -> f32x4 #intrinsic ---
#include "small_windows.h"
#endif
+#if defined(_WIN32) || defined(_WIN64)
+ #define _BH_WINDOWS 1
+#endif
+
+#if defined(__unix__)
+ #define _BH_LINUX 1
+#endif
+
//-------------------------------------------------------------------------------------
// Better types
#define forir(var, hi, lo) for (i64 var = (hi); var >= (lo); var--)
#define forll(T, var, start, step) for (T* var = (start); var != NULL; var = var->step)
-#ifdef BH_DEBUG
+#if defined(BH_DEBUG) && !defined(_BH_WINDOWS)
#define DEBUG_HERE __asm("int $3")
#else
#define DEBUG_HERE
BH_FILE_WHENCE_END = SEEK_END,
} bh_file_whence;
-#ifdef _WIN32
+#ifdef _BH_WINDOWS
typedef HANDLE bh_file_descriptor;
#else
typedef int bh_file_descriptor;
bh_file_error bh_file_new(bh_file* file, bh_file_descriptor fd, const char* filename);
b32 bh_file_read_at(bh_file* file, i64 offset, void* buffer, isize buff_size, isize* bytes_read);
b32 bh_file_write_at(bh_file* file, i64 offset, void const* buffer, isize buff_size, isize* bytes_wrote);
-static b32 bh__file_seek_wrapper(i32 fd, i64 offset, bh_file_whence whence, i64* new_offset);
+i64 bh_file_seek_to(bh_file* file, i64 offset);
i64 bh_file_seek_to_end(bh_file* file);
i64 bh_file_skip(bh_file* file, i64 bytes);
i64 bh_file_tell(bh_file* file);
-//-------------------------------------------------------------------------------------
-// Better debug functions
-//-------------------------------------------------------------------------------------
-#ifdef BH_DEBUG
-
-void* bh__debug_malloc(size_t size, const char* file, u64 line);
-i32 bh__debug_posix_memalign(void** ret, size_t alignment, size_t size, const char* file, u64 line);
-void bh__debug_free(void* ptr, const char* file, u64 line);
-void* bh__debug_realloc(void* ptr, size_t size, const char* file, u64 line);
-
-#ifdef BH_DEFINE
-
-void* bh__debug_malloc(size_t size, const char* file, u64 line) {
- void* p = malloc(size);
- bh_printf("[DEBUG] %p = malloc(%d) at %s:%d\n", p, size, file, line);
- return p;
-}
-
-i32 bh__debug_posix_memalign(void** ret, size_t alignment, size_t size, const char* file, u64 line) {
- i32 success = posix_memalign(ret, alignment, size);
- bh_printf("[DEBUG] %p = posix_memalign(%d, %d) at %s:%d\n", *ret, alignment, size, file, line);
- return success;
-}
-
-void bh__debug_free(void* ptr, const char* file, u64 line) {
- bh_printf("[DEBUG] free(%p) at %s:%d\n", ptr, file, line);
- free(ptr);
-}
-
-void* bh__debug_realloc(void* ptr, size_t size, const char* file, u64 line) {
- void* p = realloc(ptr, size);
- bh_printf("[DEBUG] %p = realloc(%p, %d) at %s:%d\n", p, ptr, size, file, line);
- return p;
-}
-
-#endif
-
-#define malloc(size) (bh__debug_malloc(size, __FILE__, __LINE__))
-#define posix_memalign(ret, alignment, size) (bh__debug_posix_memalign(ret, alignment, size, __FILE__, __LINE__))
-#define free(ptr) (bh__debug_free(ptr, __FILE__, __LINE__))
-#define realloc(ptr, size) (bh__debug_realloc(ptr, size, __FILE__, __LINE__))
-
-#endif
-
-
-
-
-
#define bh_arr_grow(arr, cap) (bh__arr_grow(bh_arr_allocator(arr), (void **) &(arr), sizeof(*(arr)), cap))
#define bh_arr_shrink(arr, cap) (bh__arr_shrink((void **) &(arr), sizeof(*(arr)), cap))
-#define bh_arr_set_length(arr, n) ( \
- bh__arr_grow(bh_arr_allocator(arr), (void **) &(arr), sizeof(*(arr)), n), \
- bh__arrhead(arr)->length = n)
+#define bh_arr_set_length(arr, n) (bh__arrhead(arr)->length = n)
#define bh_arr_insertn(arr, i, n) (bh__arr_insertn((void **) &(arr), sizeof(*(arr)), i, n))
}
b32 char_in_range(const char lo, const char hi, const char a) {
- return lo <= a <= hi;
+ return lo <= a && a <= hi;
}
i64 chars_match(char* ptr1, char* ptr2) {
switch (action) {
case bh_allocator_action_alloc: {
+#if defined(_BH_WINDOWS)
+ retval = _aligned_malloc(size, alignment);
+#elif defined(_BH_LINUX)
i32 success = posix_memalign(&retval, alignment, size);
-
+#endif
if (flags & bh_allocator_flag_clear && retval != NULL) {
memset(retval, 0, size);
}
case bh_allocator_action_resize: {
// TODO: Maybe replace with better custom function
+#if defined(_BH_WINDOWS)
+ retval = _aligned_realloc(prev_memory, size, alignment);
+#elif defined(_BH_LINUX)
retval = realloc(prev_memory, size);
+#endif
} break;
case bh_allocator_action_free: {
+#if defined(_BH_WINDOWS)
+ _aligned_free(prev_memory);
+#elif defined(_BH_LINUX)
free(prev_memory);
+#endif
} break;
}
void bh_managed_heap_free(bh_managed_heap* mh) {
bh_arr_each(bh__imap_entry, p, mh->ptrs.entries) {
+#if defined(_BH_WINDOWS)
+ _aligned_free((void *) p->key);
+#elif defined(_BH_LINUX)
free((void *) p->key);
+#endif
}
bh_imap_free(&mh->ptrs);
switch (action) {
case bh_allocator_action_alloc: {
+#if defined(_BH_WINDOWS)
+ retval = _aligned_malloc(size, alignment);
+#elif defined(_BH_LINUX)
i32 success = posix_memalign(&retval, alignment, size);
+#endif
if (flags & bh_allocator_flag_clear && retval != NULL) {
memset(retval, 0, size);
case bh_allocator_action_resize: {
bh_imap_delete(&mh->ptrs, (u64) prev_memory);
+#if defined(_BH_WINDOWS)
+ retval = _aligned_realloc(prev_memory, size, alignment);
+#elif defined(_BH_LINUX)
retval = realloc(prev_memory, size);
+#endif
bh_imap_put(&mh->ptrs, (u64) retval, 1);
} break;
case bh_allocator_action_free: {
bh_imap_delete(&mh->ptrs, (u64) prev_memory);
+#if defined(_BH_WINDOWS)
+ _aligned_free(prev_memory);
+#elif defined(_BH_LINUX)
free(prev_memory);
+#endif
} break;
}
//-------------------------------------------------------------------------------------
#ifndef BH_NO_FILE
+static b32 bh__file_seek_wrapper(bh_file_descriptor fd, i64 offset, bh_file_whence whence, i64* new_offset);
+
bh_file_error bh_file_get_standard(bh_file* file, bh_file_standard stand) {
const char* filename = NULL;
-#if defined(_WIN32)
+#if defined(_BH_WINDOWS)
bh_file_descriptor sd_fd;
switch (stand) {
default:
return BH_FILE_ERROR_BAD_FD;
}
+ file->fd = sd_fd;
-#elif defined(__linux__)
+#elif defined(_BH_LINUX)
i32 sd_fd = -1;
switch (stand) {
file->fd = sd_fd;
#endif
+
file->filename = filename;
return BH_FILE_ERROR_NONE;
}
}
bh_file_error bh_file_open_mode(bh_file* file, bh_file_mode mode, const char* filename) {
-#if _WIN32
+#if defined(_BH_WINDOWS)
DWORD desired_access;
DWORD creation_disposition;
- void *handle;
- wchar_t *w_text;
-
switch (mode & BH_FILE_MODE_MODES) {
case BH_FILE_MODE_READ:
desired_access = GENERIC_READ;
}
- file->fd = CreateFileA(filename, desired_access, FILE_SHARE_READ | FILE_SHARE_DELETE, NULL, creation_disposition, FILE_ATTRIBUTE_NORMAL, NULL);
+ file->fd = CreateFileA(filename,
+ desired_access,
+ FILE_SHARE_READ,
+ NULL,
+ creation_disposition,
+ FILE_ATTRIBUTE_NORMAL,
+ NULL);
+
+ if (file->fd == INVALID_HANDLE_VALUE) {
+ return BH_FILE_ERROR_INVALID;
+ }
file->filename = filename;
return BH_FILE_ERROR_NONE;
-#elif __linux__
+#elif defined(_BH_LINUX)
i32 os_mode = 0;
switch (mode & BH_FILE_MODE_MODES) {
file->filename = filename;
return BH_FILE_ERROR_NONE;
-#else
- return BH_FILE_ERROR_INVALID;
#endif
}
}
b32 bh_file_read_at(bh_file* file, i64 offset, void* buffer, isize buff_size, isize* bytes_read) {
+#if defined(_BH_WINDOWS)
+ bh_file_seek_to(file, offset);
+ BOOL res = ReadFile(file->fd, buffer, buff_size, (i32 *) bytes_read, NULL);
+ if (res) return 1;
+ else return 0;
+
+#elif defined(_BH_LINUX)
isize res = pread(file->fd, buffer, buff_size, offset);
if (res < 0) return 0;
if (bytes_read) *bytes_read = res;
return 1;
+#endif
}
b32 bh_file_write_at(bh_file* file, i64 offset, void const* buffer, isize buff_size, isize* bytes_wrote) {
isize res;
i64 current_offset = 0;
+
+#if defined(_BH_WINDOWS)
bh__file_seek_wrapper(file->fd, 0, BH_FILE_WHENCE_CURRENT, ¤t_offset);
+ res = (isize) WriteFile(file->fd, buffer, buff_size, (i32 *) bytes_wrote, NULL);
+ return res;
+
+#elif defined(_BH_LINUX)
if (current_offset == offset || file->fd == 1 || file->fd == 2) {
// Standard in and out do like pwrite()
res = write(file->fd, buffer, buff_size);
if (bytes_wrote) *bytes_wrote = res;
return 1;
+#endif
}
-static b32 bh__file_seek_wrapper(i32 fd, i64 offset, bh_file_whence whence, i64* new_offset) {
+static b32 bh__file_seek_wrapper(bh_file_descriptor fd, i64 offset, bh_file_whence whence, i64* new_offset) {
+#if defined(_BH_WINDOWS)
+ LARGE_INTEGER new_file_pointer;
+ LARGE_INTEGER dest;
+ dest.QuadPart = offset;
+
+ BOOL res = SetFilePointerEx(fd, dest, &new_file_pointer, whence);
+ *new_offset = new_file_pointer.QuadPart;
+
+ return res;
+
+#elif defined(_BH_LINUX)
i64 res = lseek64(fd, offset, whence);
if (res < 0) return 0;
if (new_offset) *new_offset = res;
return 1;
+#endif
}
// Returns new offset
bh_file_error bh_file_close(bh_file* file) {
bh_file_error err = BH_FILE_ERROR_NONE;
+
+#if defined(_BH_WINDOWS)
+ BOOL success = CloseHandle(file->fd);
+ if (!success) err = BH_FILE_ERROR_INVALID;
+
+ return err;
+
+#elif defined(_BH_LINUX)
i32 res = close(file->fd);
if (res < 0)
err = BH_FILE_ERROR_INVALID;
return err;
+#endif
}
b32 bh_file_read(bh_file* file, void* buffer, isize buff_size) {
bh__arr* arrptr = bh__arrhead(*arr);
bh_free(arrptr->allocator, arrptr);
*arr = NULL;
+ return 1;
}
void* bh__arr_copy(bh_allocator alloc, void *arr, i32 elemsize) {
bh_free((*table)->allocator, *table);
*table = NULL;
+ return 1;
}
// Assumes NULL terminated string for key
u64 bh_time_curr() {
+#if defined(_BH_WINDOWS)
+ return clock();
+
+#elif defined(_BH_LINUX)
struct timespec spec;
clock_gettime(CLOCK_REALTIME, &spec);
}
return sec * 1000 + ms;
+#endif
}
u64 bh_time_duration(u64 old) {
+#if defined(_BH_WINDOWS)
+ u64 curr = bh_time_curr();
+ return (u64) (((f64) (curr - old)) / CLOCKS_PER_SEC);
+
+#elif defined(_BH_LINUX)
u64 curr = bh_time_curr();
return curr - old;
+#endif
}
#endif // ifdef BH_DEFINE
} MONITORINFO;
#define INFINITE 0xffffffffl
-#define INVALID_HANDLE_VALUE ((void *)(intptr)(-1))
+#define INVALID_HANDLE_VALUE ((void *)(intptr_t)(-1))
typedef DWORD WINAPI THREAD_START_ROUTINE(void *parameter);
package main
#load "core/std/wasi"
-#load "core/simd_intrinsics"
+#load "core/intrinsics/simd"
use package core
-use package simd
+use package core.intrinsics.simd
main :: proc (args: [] cstr) {
init_positions();
#ifndef CORE_INSTALLATION
- #ifdef __unix__
+ #ifdef _BH_LINUX
#define CORE_INSTALLATION "/usr/share/onyx"
#elif defined(_WIN32) || defined(_WIN64)
#define CORE_INSTALLATION "C:\\Program Files\\Onyx"
output_dummy_progress_bar(compiler_state);
// Slowing things down for the effect
+#if defined(_BH_WINDOWS)
+ Sleep(1);
+#elif defined(_BH_LINUX)
usleep(1000);
+#endif
if (ent.expr->token) {
OnyxFilePos pos = ent.expr->token->pos;
case Ast_Kind_Directive_Solidify: return sizeof(AstDirectiveSolidify);
case Ast_Kind_Count: return 0;
}
+
+ return 0;
}
AstNode* ast_clone_list(bh_allocator a, void* n) {
OnyxTokenizer tknizer = {
.start = fc->data,
.curr = fc->data,
- .end = fc->data + fc->length,
+ .end = bh_pointer_add(fc->data, fc->length),
.filename = fc->filename,
}
if (bh_arr_length(sl->values) == 0) {
+ bh_arr_new(global_heap_allocator, sl->values, type_structlike_mem_count(sl->type));
bh_arr_set_length(sl->values, type_structlike_mem_count(sl->type));
bh_arr_zero(sl->values);
func_type->kind = Type_Kind_Function;
func_type->Function.param_count = param_count;
+ func_type->Function.needed_param_count = param_count;
func_type->Function.return_type = type_build_from_ast(alloc, ftype_node->return_type);
if (param_count > 0)
case Ast_Kind_Symbol:
assert(("symbol node in type expression", 0));
return NULL;
-
- default: return NULL;
}
+
+ return NULL;
}
Type* type_build_function_type(bh_allocator alloc, AstFunction* func) {
num->type = type;
return 1;
}
- if (value <= ((u64) 1 << (type->Basic.size * 8)) - 1) {
- num->type = type;
- return 1;
+ switch (type->Basic.size) {
+ case 1: if (value <= 255) {
+ num->type = type;
+ return 1;
+ }
+ case 2: if (value <= 65535) {
+ num->type = type;
+ return 1;
+ }
+ case 4: if (value <= 4294967295) {
+ num->type = type;
+ return 1;
+ }
}
- onyx_report_error(num->token->pos, "Integer constant with value '%l' does not fit into %d-bits.",
+ onyx_report_error(num->token->pos, "Unsigned integer constant with value '%l' does not fit into %d-bits.",
num->value.l,
type->Basic.size * 8);
} else {
i64 value = (i64) num->value.l;
switch (type->Basic.size) {
- case 1: if (-128 <= value && value <= 127) {
+ case 1: if (-128ll <= value && value <= 127ll) {
num->value.i = (i32) value;
num->type = type;
return 1;
} break;
- case 2: if (-32768 <= value && value <= 32767) {
+ case 2: if (-32768ll <= value && value <= 32767ll) {
num->value.i = (i32) value;
num->type = type;
return 1;
} break;
- case 4: if (-2147483648 <= value && value <= 2147483647) {
+ case 4: if (-2147483648ll <= value && value <= 2147483647ll) {
num->value.i = (i32) value;
num->type = type;
return 1;