From: Brendan Hansen Date: Sun, 8 Jan 2023 05:32:21 +0000 (-0600) Subject: added experimental ability of ignoring some return values X-Git-Url: https://git.brendanfh.com/?a=commitdiff_plain;h=753d140dff12cf599d5d68a034913749fd7aa0f2;p=onyx.git added experimental ability of ignoring some return values --- diff --git a/compiler/build.sh b/compiler/build.sh index 8fd62e5c..82af69d1 100755 --- a/compiler/build.sh +++ b/compiler/build.sh @@ -45,7 +45,6 @@ C_FILES="onyxrun wasm_runtime " echo "Compiling onyx-run..." $CC -o "../bin/onyx-run" \ $FLAGS \ - "-DCORE_INSTALLATION=\"CORE_DIR\"" \ $INCLUDES \ $(echo "$C_FILES" | sed 's/ /\n/g;s/\([a-zA-Z_0-9]*\)\n/src\/\1.c\n/g;s/\n/ /g') \ $LIBS diff --git a/compiler/include/astnodes.h b/compiler/include/astnodes.h index de02e1ae..fdf8c45c 100644 --- a/compiler/include/astnodes.h +++ b/compiler/include/astnodes.h @@ -726,6 +726,7 @@ struct AstCall { }; VarArgKind va_kind; + i32 ignored_return_value_count; }; struct AstCompound { AstTyped_base; @@ -1897,6 +1898,12 @@ static inline AstFunction* get_function_from_node(AstNode* node) { return NULL; } +static inline AstCall *get_call_expr_from_node(AstNode *node) { + if (node->kind == Ast_Kind_Call) return (AstCall *) node; + if (node->kind == Ast_Kind_Method_Call) return (AstCall *) ((AstBinaryOp *) node)->right; + return NULL; +} + static inline void convert_polyproc_to_function(AstFunction *func) { if (func->kind != Ast_Kind_Polymorphic_Proc) return; diff --git a/compiler/src/astnodes.c b/compiler/src/astnodes.c index 22f27ff2..1a880994 100644 --- a/compiler/src/astnodes.c +++ b/compiler/src/astnodes.c @@ -733,16 +733,16 @@ TypeMatch unify_node_and_type_(AstTyped** pnode, Type* type, b32 permanent) { // [N] T -> [] T // [..] T -> [] T // ..T -> [] T - else if (node_type && type->kind == Type_Kind_Slice) { - if (node_type->kind == Type_Kind_Array || node_type->kind == Type_Kind_DynArray || node_type->kind == Type_Kind_VarArgs) { - char* dummy; - b32 legal = cast_is_legal(node_type, type, &dummy); - if (permanent && legal) { - *pnode = (AstTyped *) make_cast(context.ast_alloc, node, type); - } + else if (node_type && type->kind == Type_Kind_Slice && + (node_type->kind == Type_Kind_Array || node_type->kind == Type_Kind_DynArray || node_type->kind == Type_Kind_VarArgs)) { - return legal ? TYPE_MATCH_SUCCESS : TYPE_MATCH_FAILED; + char* dummy; + b32 legal = cast_is_legal(node_type, type, &dummy); + if (permanent && legal) { + *pnode = (AstTyped *) make_cast(context.ast_alloc, node, type); } + + return legal ? TYPE_MATCH_SUCCESS : TYPE_MATCH_FAILED; } // If the node is a numeric literal, try to convert it to the destination type. @@ -841,6 +841,38 @@ TypeMatch unify_node_and_type_(AstTyped** pnode, Type* type, b32 permanent) { } } + // + // This case enables to ability to have less values on the + // left hand side of an assignment than what the right hand + // side call would be returning. + else if (node_type->kind == Type_Kind_Compound) { + AstCall *call = get_call_expr_from_node((AstNode *) node); + if (!call) return TYPE_MATCH_FAILED; + + i32 keep = 0; + + if (type->kind != Type_Kind_Compound) { + if (!types_are_compatible(node_type->Compound.types[0], type)) { + return TYPE_MATCH_FAILED; + } + + keep += type_linear_member_count(type); + + } else { + fori (i, 0, type->Compound.count) { + if (!types_are_compatible(node_type->Compound.types[i], type->Compound.types[i])) { + return TYPE_MATCH_FAILED; + } + + keep += type_linear_member_count(node_type->Compound.types[i]); + } + } + + call->ignored_return_value_count = type_linear_member_count(node_type) - keep; + + return TYPE_MATCH_SUCCESS; + } + return TYPE_MATCH_FAILED; } diff --git a/compiler/src/checker.c b/compiler/src/checker.c index 27c6893d..d8fa6066 100644 --- a/compiler/src/checker.c +++ b/compiler/src/checker.c @@ -842,6 +842,28 @@ static AstCall* binaryop_try_operator_overload(AstBinaryOp* binop, AstTyped* thi } +static CheckStatus assign_type_or_check(AstTyped **node, Type *type, OnyxToken *report_loc) { + if (node && (*node)->type == NULL) { + (*node)->type = type; + + } else { + TYPE_CHECK(node, type) { + ERROR_(report_loc->pos, + "Cannot assign value of type '%s' to a '%s'.", + type_get_name(type), + node_get_type_name(*node)); + return Check_Error; + } + } + + return Check_Success; +} + +#define TRY_ASSIGN_TYPE_OR_FAIL(node, type, report) do { \ + CheckStatus stat = assign_type_or_check((node), (type), (report)); \ + if (stat != Check_Success) return stat; \ + } while (0); + CheckStatus check_binaryop_assignment(AstBinaryOp** pbinop) { AstBinaryOp* binop = *pbinop; if (current_checking_level == EXPRESSION_LEVEL) @@ -880,20 +902,38 @@ CheckStatus check_binaryop_assignment(AstBinaryOp** pbinop) { ERROR(binop->token->pos, "Could not resolve type of right hand side to infer."); } else { - YIELD(binop->token->pos, "Trying to resolve try of right hand side."); + YIELD(binop->token->pos, "Trying to resolve type of right hand side."); } } if (right_type->kind == Type_Kind_Compound) { AstCompound* lhs = (AstCompound *) binop->left; - i32 expr_count = right_type->Compound.count; - if (lhs->kind != Ast_Kind_Compound || bh_arr_length(lhs->exprs) != expr_count) { - ERROR_(binop->token->pos, "Expected left hand side to have %d expressions.", expr_count); + + i32 given_expr_count = right_type->Compound.count; + i32 store_expr_count = 1; + if (lhs->kind == Ast_Kind_Compound) { + store_expr_count = bh_arr_length(lhs->exprs); + } + + if (get_call_expr_from_node((AstNode *) binop->right)) { + if (store_expr_count > given_expr_count) { + ERROR_(binop->token->pos, "Left hand side can only have %d expressions here.", given_expr_count); + } + + } else if (store_expr_count != given_expr_count) { + ERROR_(binop->token->pos, "Expected left hand side to have %d expressions.", given_expr_count); } - fori (i, 0, expr_count) lhs->exprs[i]->type = right_type->Compound.types[i]; + if (store_expr_count == 1 && lhs->kind != Ast_Kind_Compound) { + TRY_ASSIGN_TYPE_OR_FAIL(&binop->left, right_type->Compound.types[0], binop->token); - lhs->type = type_build_compound_type(context.ast_alloc, lhs); + } else { + fori (i, 0, store_expr_count) { + TRY_ASSIGN_TYPE_OR_FAIL(&lhs->exprs[i], right_type->Compound.types[i], binop->token); + } + + lhs->type = type_build_compound_type(context.ast_alloc, lhs); + } } else { binop->left->type = right_type; @@ -1194,11 +1234,7 @@ CheckStatus check_binaryop(AstBinaryOp** pbinop) { if (binop->operation == Binary_Op_Bool_And || binop->operation == Binary_Op_Bool_Or) return check_binaryop_bool(pbinop); - // NOTE: The left side cannot be compound. - // The right side always is numeric. - // The left side cannot be rawptr. - if (type_is_compound(binop->left->type)) goto bad_binaryop; - if (!type_is_numeric(binop->right->type)) goto bad_binaryop; + // NOTE: The left side cannot be rawptr. if (type_is_rawptr(binop->left->type)) { ERROR(binop->token->pos, "Cannot operate on a 'rawptr'. Cast it to a another pointer type first."); } diff --git a/compiler/src/wasm_emit.c b/compiler/src/wasm_emit.c index ecced22e..78753bd5 100644 --- a/compiler/src/wasm_emit.c +++ b/compiler/src/wasm_emit.c @@ -492,7 +492,10 @@ EMIT_FUNC(assignment, AstBinaryOp* assign); EMIT_FUNC(assignment_of_array, AstTyped* left, AstTyped* right); EMIT_FUNC(compound_assignment, AstBinaryOp* assign); EMIT_FUNC(store_instruction, Type* type, u32 offset); +EMIT_FUNC(flip_and_store_instruction, AstTyped *lval, OnyxToken *token); +EMIT_FUNC(generic_store_instruction, AstTyped *lval, OnyxToken *token); EMIT_FUNC(load_instruction, Type* type, u32 offset); +EMIT_FUNC(load_with_ignored_instruction, Type* type, u32 offset, i32 ignored_value_count); EMIT_FUNC(if, AstIfWhile* if_node); EMIT_FUNC(while, AstIfWhile* while_node); EMIT_FUNC(for, AstFor* for_node); @@ -512,7 +515,7 @@ EMIT_FUNC(local_location, AstLocal* local, u64* offset_return); EMIT_FUNC(memory_reservation_location, AstMemRes* memres); EMIT_FUNC(location_return_offset, AstTyped* expr, u64* offset_return); EMIT_FUNC(location, AstTyped* expr); -EMIT_FUNC(compound_load, Type* type, u64 offset); +EMIT_FUNC(compound_load, Type* type, u64 offset, i32 ignored_value_count); EMIT_FUNC(struct_lval, AstTyped* lval); EMIT_FUNC(struct_literal, AstStructLiteral* sl); EMIT_FUNC(compound_store, Type* type, u64 offset, b32 location_first); @@ -898,32 +901,21 @@ EMIT_FUNC(compound_assignment, AstBinaryOp* assign) { emit_expression(mod, &code, assign->right); + if (assign->left->kind != Ast_Kind_Compound) { + emit_generic_store_instruction(mod, &code, (AstTyped *) assign->left, assign->token); + *pcode = code; + return; + } + + // It is assumed at this point that the correct number + // of expression/values will be on the stack to consume. + // + // In reverse, for each location to store on the left hand side, + // store the values on the stack into their respective locations. + // AstCompound* compound_lval = (AstCompound *) assign->left; bh_arr_rev_each(AstTyped *, plval, compound_lval->exprs) { - AstTyped *lval = *plval; - - if (type_is_structlike_strict(lval->type)) { - emit_struct_lval(mod, &code, lval); - continue; - } - - if (lval->kind == Ast_Kind_Local || lval->kind == Ast_Kind_Param) { - if (bh_imap_get(&mod->local_map, (u64) lval) & LOCAL_IS_WASM) { - u64 localidx = bh_imap_get(&mod->local_map, (u64) lval); - WIL(assign->token, WI_LOCAL_SET, localidx); - continue; - } - } - - WasmType wt = onyx_type_to_wasm_type(lval->type); - u64 expr_tmp = local_raw_allocate(mod->local_alloc, wt); - WIL(assign->token, WI_LOCAL_SET, expr_tmp); - u64 offset = 0; - emit_location_return_offset(mod, &code, lval, &offset); - WIL(assign->token, WI_LOCAL_GET, expr_tmp); - - local_raw_free(mod->local_alloc, wt); - emit_store_instruction(mod, &code, lval->type, offset); + emit_generic_store_instruction(mod, &code, *plval, assign->token); } *pcode = code; @@ -980,11 +972,76 @@ EMIT_FUNC(store_instruction, Type* type, u32 offset) { *pcode = code; } +EMIT_FUNC(flip_and_store_instruction, AstTyped *lval, OnyxToken *token) { + bh_arr(WasmInstruction) code = *pcode; + + WasmType wt = onyx_type_to_wasm_type(lval->type); + u64 expr_tmp = local_raw_allocate(mod->local_alloc, wt); + WIL(token, WI_LOCAL_SET, expr_tmp); + + u64 offset = 0; + emit_location_return_offset(mod, &code, lval, &offset); + WIL(token, WI_LOCAL_GET, expr_tmp); + + local_raw_free(mod->local_alloc, wt); + emit_store_instruction(mod, &code, lval->type, offset); + + *pcode = code; + return; +} + +// +// What "store_instruction" should have been. This takes an l-value, assumes +// a value of that type is on the value stack, and then stores it into the l-value, +// doing whatever is necessary. +EMIT_FUNC(generic_store_instruction, AstTyped *lval, OnyxToken *token) { + bh_arr(WasmInstruction) code = *pcode; + + // If this is a structure, use the emit_struct_lval function. + if (type_is_structlike_strict(lval->type)) { + emit_struct_lval(mod, &code, lval); + } + + // If this is a WASM local, simply set the local and continue. + else if (bh_imap_get(&mod->local_map, (u64) lval) & LOCAL_IS_WASM) { + u64 localidx = bh_imap_get(&mod->local_map, (u64) lval); + WIL(token, WI_LOCAL_SET, localidx); + } + + else if (type_is_compound(lval->type)) { + u64 offset = 0; + emit_location_return_offset(mod, &code, lval, &offset); + emit_compound_store(mod, &code, lval->type, offset, 1); + } + + // Otherwise, you have to do this "fun" sequence of instructions + // where you temporarily store the top value on the stack, emit + // the location, and then replace the value. If WASM would have + // just decided to place the location parameter for the store + // instructions at the top of the stack, not the second to top, + // this would not be an issue. + else { + emit_flip_and_store_instruction(mod, &code, lval, token); + } + + *pcode = code; + return; +} + +EMIT_FUNC(load_with_ignored_instruction, Type* type, u32 offset, i32 ignored_value_count) { + if (type_is_compound(type)) { + emit_compound_load(mod, pcode, type, offset, ignored_value_count); + return; + } + + emit_load_instruction(mod, pcode, type, offset); +} + EMIT_FUNC(load_instruction, Type* type, u32 offset) { bh_arr(WasmInstruction) code = *pcode; if (type_is_compound(type)) { - emit_compound_load(mod, pcode, type, offset); + emit_compound_load(mod, pcode, type, offset, 0); return; } @@ -1240,84 +1297,6 @@ EMIT_FUNC(for_range, AstFor* for_node, u64 iter_local) { *pcode = code; } -EMIT_FUNC(for_array, AstFor* for_node, u64 iter_local) { - bh_arr(WasmInstruction) code = *pcode; - - u64 end_ptr_local, ptr_local; - end_ptr_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR); - - if (for_node->by_pointer) { - ptr_local = iter_local; - } else { - ptr_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR); - } - - AstLocal* var = for_node->var; - b32 it_is_local = (b32) ((iter_local & LOCAL_IS_WASM) != 0); - u64 offset = 0; - - u64 elem_size; - if (for_node->by_pointer) elem_size = type_size_of(var->type->Pointer.elem); - else elem_size = type_size_of(var->type); - - WIL(for_node->token, WI_LOCAL_TEE, ptr_local); - WIL(for_node->token, WI_PTR_CONST, for_node->iter->type->Array.count * elem_size); - WI(for_node->token, WI_PTR_ADD); - WIL(for_node->token, WI_LOCAL_SET, end_ptr_local); - - if (for_node->has_first) { - for_node->first_local = local_raw_allocate(mod->local_alloc, WASM_TYPE_INT32); - WIL(for_node->token, WI_I32_CONST, 1); - WIL(for_node->token, WI_LOCAL_SET, for_node->first_local); - } - - emit_enter_structured_block(mod, &code, SBT_Breakable_Block, for_node->token); - emit_enter_structured_block(mod, &code, SBT_Basic_Loop, for_node->token); - emit_enter_structured_block(mod, &code, SBT_Continue_Block, for_node->token); - - WIL(for_node->token, WI_LOCAL_GET, ptr_local); - WIL(for_node->token, WI_LOCAL_GET, end_ptr_local); - WI(for_node->token, WI_PTR_GE); - WID(for_node->token, WI_COND_JUMP, 0x02); - - if (!for_node->by_pointer) { - if (!it_is_local) emit_local_location(mod, &code, var, &offset); - - WIL(for_node->token, WI_LOCAL_GET, ptr_local); - emit_load_instruction(mod, &code, var->type, 0); - - if (!it_is_local) emit_store_instruction(mod, &code, var->type, offset); - else WIL(for_node->token, WI_LOCAL_SET, iter_local); - } - - emit_block(mod, &code, for_node->stmt, 0); - - emit_leave_structured_block(mod, &code); - - WIL(for_node->token, WI_LOCAL_GET, ptr_local); - WIL(for_node->token, WI_PTR_CONST, elem_size); - WI(for_node->token, WI_PTR_ADD); - WIL(for_node->token, WI_LOCAL_SET, ptr_local); - - if (for_node->has_first) { - WIL(NULL, WI_I32_CONST, 0); - WIL(NULL, WI_LOCAL_SET, for_node->first_local); - } - - if (bh_arr_last(code).type != WI_JUMP) - WID(for_node->token, WI_JUMP, 0x00); - - emit_leave_structured_block(mod, &code); - emit_leave_structured_block(mod, &code); - - if (for_node->has_first) local_raw_free(mod->local_alloc, WASM_TYPE_INT32); - - local_raw_free(mod->local_alloc, WASM_TYPE_PTR); - if (!for_node->by_pointer) local_raw_free(mod->local_alloc, WASM_TYPE_PTR); - - *pcode = code; -} - EMIT_FUNC(for_slice, AstFor* for_node, u64 iter_local) { bh_arr(WasmInstruction) code = *pcode; @@ -1551,12 +1530,25 @@ EMIT_FUNC(for, AstFor* for_node) { switch (for_node->loop_type) { case For_Loop_Range: emit_for_range(mod, &code, for_node, iter_local); break; - case For_Loop_Array: emit_for_array(mod, &code, for_node, iter_local); break; + + // NOTE: For static arrays, simply outputing the size + // of the array right after the pointer to the start + // of the array essentially makes it a slice. + case For_Loop_Array: + WIL(NULL, WI_I32_CONST, for_node->iter->type->Array.count); + emit_for_slice(mod, &code, for_node, iter_local); + break; + // NOTE: A dynamic array is just a slice with a capacity and allocator on the end. // Just dropping the extra fields will mean we can just use the slice implementation. // - brendanfh 2020/09/04 // - brendanfh 2021/04/13 - case For_Loop_DynArr: WI(for_node->token, WI_DROP); WI(for_node->token, WI_DROP); WI(for_node->token, WI_DROP); + case For_Loop_DynArr: + WI(for_node->token, WI_DROP); + WI(for_node->token, WI_DROP); + WI(for_node->token, WI_DROP); + // fallthrough + case For_Loop_Slice: emit_for_slice(mod, &code, for_node, iter_local); break; case For_Loop_Iterator: emit_for_iterator(mod, &code, for_node, iter_local); break; default: onyx_report_error(for_node->token->pos, Error_Critical, "Invalid for loop type. You should probably not be seeing this..."); @@ -2091,7 +2083,7 @@ EMIT_FUNC(call, AstCall* call) { if (cc == CC_Return_Stack) { WID(call_token, WI_GLOBAL_GET, stack_top_idx); - emit_load_instruction(mod, &code, return_type, reserve_size - return_size); + emit_load_with_ignored_instruction(mod, &code, return_type, reserve_size - return_size, call->ignored_return_value_count); } local_raw_free(mod->local_alloc, WASM_TYPE_PTR); @@ -2746,14 +2738,18 @@ EMIT_FUNC(struct_lval, AstTyped* lval) { *pcode = code; } -EMIT_FUNC(compound_load, Type* type, u64 offset) { +EMIT_FUNC(compound_load, Type* type, u64 offset, i32 ignored_value_count) { bh_arr(WasmInstruction) code = *pcode; i32 mem_count = type_linear_member_count(type); TypeWithOffset two; + assert(mem_count > ignored_value_count); + mem_count -= ignored_value_count; + if (mem_count == 1) { type_linear_member_lookup(type, 0, &two); emit_load_instruction(mod, &code, two.type, offset + two.offset); // two.offset should be 0 + } else { u64 tmp_idx = local_raw_allocate(mod->local_alloc, WASM_TYPE_PTR); WIL(NULL, WI_LOCAL_TEE, tmp_idx); @@ -2980,7 +2976,6 @@ EMIT_FUNC(range_literal, AstRangeLiteral* range) { EMIT_FUNC(if_expression, AstIfExpression* if_expr) { bh_arr(WasmInstruction) code = *pcode; - u64 offset = 0; u64 result_local = local_allocate(mod->local_alloc, (AstTyped *) if_expr); b32 result_is_local = (b32) ((result_local & LOCAL_IS_WASM) != 0); bh_imap_put(&mod->local_map, (u64) if_expr, result_local); @@ -2988,26 +2983,17 @@ EMIT_FUNC(if_expression, AstIfExpression* if_expr) { emit_expression(mod, &code, if_expr->cond); emit_enter_structured_block(mod, &code, SBT_Basic_If, if_expr->token); - if (!result_is_local) emit_local_location(mod, &code, (AstLocal *) if_expr, &offset); - emit_expression(mod, &code, if_expr->true_expr); + emit_generic_store_instruction(mod, &code, (AstTyped *) if_expr, if_expr->token); - if (!result_is_local) emit_store_instruction(mod, &code, if_expr->type, offset); - else WIL(if_expr->token, WI_LOCAL_SET, result_local); - - offset = 0; WI(if_expr->token, WI_ELSE); - if (!result_is_local) emit_local_location(mod, &code, (AstLocal *) if_expr, &offset); - emit_expression(mod, &code, if_expr->false_expr); - - if (!result_is_local) emit_store_instruction(mod, &code, if_expr->type, offset); - else WIL(if_expr->token, WI_LOCAL_SET, result_local); + emit_generic_store_instruction(mod, &code, (AstTyped *) if_expr, if_expr->token); emit_leave_structured_block(mod, &code); - offset = 0; if (!result_is_local) { + u64 offset = 0; emit_local_location(mod, &code, (AstLocal *) if_expr, &offset); emit_load_instruction(mod, &code, if_expr->type, offset); @@ -3055,7 +3041,9 @@ EMIT_FUNC(location_return_offset, AstTyped* expr, u64* offset_return) { case Ast_Kind_Param: case Ast_Kind_Local: case Ast_Kind_Array_Literal: - case Ast_Kind_Struct_Literal: { + case Ast_Kind_Struct_Literal: + case Ast_Kind_Do_Block: + case Ast_Kind_If_Expression: { emit_local_location(mod, &code, (AstLocal *) expr, offset_return); break; } @@ -3631,16 +3619,9 @@ EMIT_FUNC(return, AstReturn* ret) { if (ret->expr) { if (bh_arr_length(mod->return_location_stack) > 0) { AstLocal* dest = bh_arr_last(mod->return_location_stack); - u64 dest_loc = bh_imap_get(&mod->local_map, (u64) dest); - b32 dest_is_local = (b32) ((dest_loc & LOCAL_IS_WASM) != 0); - - u64 offset = 0; - if (!dest_is_local) emit_local_location(mod, &code, dest, &offset); emit_expression(mod, &code, ret->expr); - - if (!dest_is_local) emit_store_instruction(mod, &code, dest->type, offset); - else WIL(NULL, WI_LOCAL_SET, dest_loc); + emit_generic_store_instruction(mod, &code, (AstTyped *) dest, NULL); } else if (mod->curr_cc == CC_Return_Stack) { WIL(NULL, WI_LOCAL_GET, mod->stack_base_idx); diff --git a/core/container/array.onyx b/core/container/array.onyx index 6c3655e3..4b4432e3 100644 --- a/core/container/array.onyx +++ b/core/container/array.onyx @@ -183,6 +183,9 @@ pop :: (arr: ^[..] $T) -> T { return arr.data[arr.count]; } +concat :: #match #local {} + +#overload concat :: (arr: ^[..] $T, other: [] T) { if !ensure_capacity(arr, arr.count + other.count) do return; @@ -190,6 +193,13 @@ concat :: (arr: ^[..] $T, other: [] T) { arr.count += other.count; } +#overload +concat :: (arr: ^[..] $T, other: Iterator(T)) { + for other { + push(arr, it); + } +} + filter :: macro (arr: ^[..] $T, body: Code) { move := 0; @@ -471,6 +481,7 @@ fold :: macro (arr: [] $T, init: $R, body: Code) -> R { return acc; } +/* map :: #match #locked { macro (arr: [] $T, f: (^T) -> void) do for ^it: arr do f(it);, macro (arr: [] $T, f: (T) -> T) do for ^it: arr do *it = f(*it);, @@ -478,6 +489,7 @@ map :: #match #locked { macro (arr: [] $T, data: $R, f: (^T, R) -> void) do for ^it: arr do f(it, data);, macro (arr: [] $T, data: $R, f: (T, R) -> T) do for ^it: arr do *it = f(*it, data);, } +*/ every :: #match #local {}