diff options
| author | Damien George <damien.p.george@gmail.com> | 2014-08-16 22:31:57 +0100 | 
|---|---|---|
| committer | Damien George <damien.p.george@gmail.com> | 2014-08-16 22:31:57 +0100 | 
| commit | 7fe2191c9b72e16d271735ca24a9def7ba072217 (patch) | |
| tree | 70def977a706546272b0f4237bec92598b95054b /py/emitnative.c | |
| parent | 86de21b810693bccdd88d53aacb6d8acf26f09e0 (diff) | |
py: Code clean-up in native emitter; improve thumb native calls.
Diffstat (limited to 'py/emitnative.c')
| -rw-r--r-- | py/emitnative.c | 122 | 
1 files changed, 61 insertions, 61 deletions
| diff --git a/py/emitnative.c b/py/emitnative.c index 87329808e..8385f9905 100644 --- a/py/emitnative.c +++ b/py/emitnative.c @@ -530,55 +530,55 @@ STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, in      emit_post_push_reg(emit, vtyped, regd);  } -STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind, void *fun) { +STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {      need_reg_all(emit);  #if N_X64 -    asm_x64_call_ind(emit->as, fun, REG_RAX); +    asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);  #elif N_THUMB      asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);  #endif  } -STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, void *fun, mp_int_t arg_val, int arg_reg) { +STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {      need_reg_all(emit);      ASM_MOV_IMM_TO_REG(arg_val, arg_reg);  #if N_X64 -    asm_x64_call_ind(emit->as, fun, REG_RAX); +    asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);  #elif N_THUMB      asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);  #endif  }  // the first arg is stored in the code aligned on a mp_uint_t boundary -STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, void *fun, mp_int_t arg_val, int arg_reg) { +STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {      need_reg_all(emit);      ASM_MOV_ALIGNED_IMM_TO_REG(arg_val, arg_reg);  #if N_X64 -    asm_x64_call_ind(emit->as, fun, REG_RAX); +    asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);  #elif N_THUMB      asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);  #endif  } -STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, void *fun, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) { +STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {      need_reg_all(emit);      ASM_MOV_IMM_TO_REG(arg_val1, arg_reg1);      ASM_MOV_IMM_TO_REG(arg_val2, arg_reg2);  #if N_X64 -    asm_x64_call_ind(emit->as, fun, REG_RAX); +    asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);  #elif N_THUMB      asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);  #endif  }  // the first arg is stored in the code aligned on a mp_uint_t boundary -STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, void *fun, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) { +STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) {      need_reg_all(emit);      ASM_MOV_ALIGNED_IMM_TO_REG(arg_val1, arg_reg1);      ASM_MOV_IMM_TO_REG(arg_val2, arg_reg2);      ASM_MOV_IMM_TO_REG(arg_val3, arg_reg3);  #if N_X64 -    asm_x64_call_ind(emit->as, fun, REG_RAX); +    asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);  #elif N_THUMB      asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);  #endif @@ -631,7 +631,7 @@ STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_de          if (si->vtype != VTYPE_PYOBJ) {              mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;              ASM_MOV_LOCAL_TO_REG(local_num, REG_ARG_1); -            emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, mp_convert_native_to_obj, si->vtype, REG_ARG_2); // arg2 = type +            emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type              ASM_MOV_REG_TO_LOCAL(REG_RET, local_num);              si->vtype = VTYPE_PYOBJ;          } @@ -684,7 +684,7 @@ STATIC void emit_native_import_name(emit_t *emit, qstr qst) {      emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3); // arg2 = fromlist, arg3 = level      assert(vtype_fromlist == VTYPE_PYOBJ);      assert(vtype_level == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_IMPORT_NAME, mp_import_name, qst, REG_ARG_1); // arg1 = import name +    emit_call_with_imm_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -694,7 +694,7 @@ STATIC void emit_native_import_from(emit_t *emit, qstr qst) {      vtype_kind_t vtype_module;      emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module      assert(vtype_module == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_IMPORT_FROM, mp_import_from, qst, REG_ARG_2); // arg2 = import name +    emit_call_with_imm_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -703,7 +703,7 @@ STATIC void emit_native_import_star(emit_t *emit) {      vtype_kind_t vtype_module;      emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module      assert(vtype_module == VTYPE_PYOBJ); -    emit_call(emit, MP_F_IMPORT_ALL, mp_import_all); +    emit_call(emit, MP_F_IMPORT_ALL);      emit_post(emit);  } @@ -745,14 +745,14 @@ STATIC void emit_native_load_const_int(emit_t *emit, qstr qst) {      DEBUG_printf("load_const_int %s\n", qstr_str(st));      // for viper: load integer, check fits in 32 bits      emit_native_pre(emit); -    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_INT, mp_load_const_int, qst, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_INT, qst, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  }  STATIC void emit_native_load_const_dec(emit_t *emit, qstr qstr) {      // for viper, a float/complex is just a Python object      emit_native_pre(emit); -    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_DEC, mp_load_const_dec, qstr, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_DEC, qstr, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -765,9 +765,9 @@ STATIC void emit_native_load_const_str(emit_t *emit, qstr qstr, bool bytes) {          emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qstr));      } else {          if (bytes) { -            emit_call_with_imm_arg(emit, 0, mp_load_const_bytes, qstr, REG_ARG_1); // TODO need to add function to runtime table +            emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_BYTES, qstr, REG_ARG_1);          } else { -            emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_STR, mp_load_const_str, qstr, REG_ARG_1); +            emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_STR, qstr, REG_ARG_1);          }          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      } @@ -815,13 +815,13 @@ STATIC void emit_native_load_deref(emit_t *emit, qstr qstr, int local_num) {  STATIC void emit_native_load_name(emit_t *emit, qstr qstr) {      emit_native_pre(emit); -    emit_call_with_imm_arg(emit, MP_F_LOAD_NAME, mp_load_name, qstr, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_LOAD_NAME, qstr, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  }  STATIC void emit_native_load_global(emit_t *emit, qstr qstr) {      emit_native_pre(emit); -    emit_call_with_imm_arg(emit, MP_F_LOAD_GLOBAL, mp_load_global, qstr, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_LOAD_GLOBAL, qstr, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -833,7 +833,7 @@ STATIC void emit_native_load_attr(emit_t *emit, qstr qstr) {      vtype_kind_t vtype_base;      emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base      assert(vtype_base == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_LOAD_ATTR, mp_load_attr, qstr, REG_ARG_2); // arg2 = attribute name +    emit_call_with_imm_arg(emit, MP_F_LOAD_ATTR, qstr, REG_ARG_2); // arg2 = attribute name      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -842,12 +842,12 @@ STATIC void emit_native_load_method(emit_t *emit, qstr qstr) {      emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base      assert(vtype_base == VTYPE_PYOBJ);      emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr -    emit_call_with_imm_arg(emit, MP_F_LOAD_METHOD, mp_load_method, qstr, REG_ARG_2); // arg2 = method name +    emit_call_with_imm_arg(emit, MP_F_LOAD_METHOD, qstr, REG_ARG_2); // arg2 = method name  }  STATIC void emit_native_load_build_class(emit_t *emit) {      emit_native_pre(emit); -    emit_call(emit, MP_F_LOAD_BUILD_CLASS, mp_load_build_class); +    emit_call(emit, MP_F_LOAD_BUILD_CLASS);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -855,7 +855,7 @@ STATIC void emit_native_load_subscr(emit_t *emit) {      vtype_kind_t vtype_lhs, vtype_rhs;      emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);      if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) { -        emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, mp_obj_subscr, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3); +        emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      } else {          printf("ViperTypeError: can't do subscr of types %d and %d\n", vtype_lhs, vtype_rhs); @@ -906,7 +906,7 @@ STATIC void emit_native_store_name(emit_t *emit, qstr qstr) {      vtype_kind_t vtype;      emit_pre_pop_reg(emit, &vtype, REG_ARG_2);      assert(vtype == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_STORE_NAME, mp_store_name, qstr, REG_ARG_1); // arg1 = name +    emit_call_with_imm_arg(emit, MP_F_STORE_NAME, qstr, REG_ARG_1); // arg1 = name      emit_post(emit);  } @@ -916,10 +916,10 @@ STATIC void emit_native_store_global(emit_t *emit, qstr qstr) {          emit_pre_pop_reg(emit, &vtype, REG_ARG_2);      } else {          emit_pre_pop_reg(emit, &vtype, REG_ARG_1); -        emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, mp_convert_native_to_obj, vtype, REG_ARG_2); // arg2 = type +        emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type          ASM_MOV_REG_TO_REG(REG_RET, REG_ARG_2);      } -    emit_call_with_imm_arg(emit, MP_F_STORE_GLOBAL, mp_store_global, qstr, REG_ARG_1); // arg1 = name +    emit_call_with_imm_arg(emit, MP_F_STORE_GLOBAL, qstr, REG_ARG_1); // arg1 = name      emit_post(emit);  } @@ -928,7 +928,7 @@ STATIC void emit_native_store_attr(emit_t *emit, qstr qstr) {      emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value      assert(vtype_base == VTYPE_PYOBJ);      assert(vtype_val == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_STORE_ATTR, mp_store_attr, qstr, REG_ARG_2); // arg2 = attribute name +    emit_call_with_imm_arg(emit, MP_F_STORE_ATTR, qstr, REG_ARG_2); // arg2 = attribute name      emit_post(emit);  } @@ -942,7 +942,7 @@ STATIC void emit_native_store_subscr(emit_t *emit) {      assert(vtype_index == VTYPE_PYOBJ);      assert(vtype_base == VTYPE_PYOBJ);      assert(vtype_value == VTYPE_PYOBJ); -    emit_call(emit, MP_F_OBJ_SUBSCR, mp_obj_subscr); +    emit_call(emit, MP_F_OBJ_SUBSCR);  }  STATIC void emit_native_delete_fast(emit_t *emit, qstr qstr, int local_num) { @@ -972,7 +972,7 @@ STATIC void emit_native_delete_attr(emit_t *emit, qstr qstr) {      vtype_kind_t vtype_base;      emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base      assert(vtype_base == VTYPE_PYOBJ); -    emit_call_with_2_imm_args(emit, MP_F_STORE_ATTR, mp_store_attr, qstr, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3); // arg2 = attribute name, arg3 = value (null for delete) +    emit_call_with_2_imm_args(emit, MP_F_STORE_ATTR, qstr, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3); // arg2 = attribute name, arg3 = value (null for delete)      emit_post(emit);  } @@ -981,7 +981,7 @@ STATIC void emit_native_delete_subscr(emit_t *emit) {      emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base      assert(vtype_index == VTYPE_PYOBJ);      assert(vtype_base == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, mp_obj_subscr, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3); +    emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);  }  STATIC void emit_native_dup_top(emit_t *emit) { @@ -1035,7 +1035,7 @@ STATIC void emit_native_jump_helper(emit_t *emit, uint label, bool pop) {          }      } else if (vtype == VTYPE_PYOBJ) {          emit_pre_pop_reg(emit, &vtype, REG_ARG_1); -        emit_call(emit, MP_F_OBJ_IS_TRUE, mp_obj_is_true); +        emit_call(emit, MP_F_OBJ_IS_TRUE);          if (!pop) {              emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);          } @@ -1119,7 +1119,7 @@ STATIC void emit_native_setup_except(emit_t *emit, uint label) {      // need to commit stack because we may jump elsewhere      need_stack_settled(emit);      emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(mp_uint_t)); // arg1 = pointer to nlr buf -    emit_call(emit, 0, nlr_push); // TODO need to add function to runtime table +    emit_call(emit, MP_F_NLR_PUSH);  #if N_X64      asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET);      asm_x64_jcc_label(emit->as, JCC_JNZ, label); @@ -1145,7 +1145,7 @@ STATIC void emit_native_get_iter(emit_t *emit) {      vtype_kind_t vtype;      emit_pre_pop_reg(emit, &vtype, REG_ARG_1);      assert(vtype == VTYPE_PYOBJ); -    emit_call(emit, MP_F_GETITER, mp_getiter); +    emit_call(emit, MP_F_GETITER);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -1154,7 +1154,7 @@ STATIC void emit_native_for_iter(emit_t *emit, uint label) {      vtype_kind_t vtype;      emit_access_stack(emit, 1, &vtype, REG_ARG_1);      assert(vtype == VTYPE_PYOBJ); -    emit_call(emit, MP_F_ITERNEXT, mp_iternext); +    emit_call(emit, MP_F_ITERNEXT);      ASM_MOV_IMM_TO_REG((mp_uint_t)MP_OBJ_STOP_ITERATION, REG_TEMP1);  #if N_X64      asm_x64_cmp_r64_with_r64(emit->as, REG_RET, REG_TEMP1); @@ -1175,7 +1175,7 @@ STATIC void emit_native_for_iter_end(emit_t *emit) {  STATIC void emit_native_pop_block(emit_t *emit) {      emit_native_pre(emit); -    emit_call(emit, 0, nlr_pop); // TODO need to add function to runtime table +    emit_call(emit, MP_F_NLR_POP);      adjust_stack(emit, -(mp_int_t)(sizeof(nlr_buf_t) / sizeof(mp_uint_t)));      emit_post(emit);  } @@ -1183,7 +1183,7 @@ STATIC void emit_native_pop_block(emit_t *emit) {  STATIC void emit_native_pop_except(emit_t *emit) {      /*      emit_native_pre(emit); -    emit_call(emit, 0, nlr_pop); // TODO need to add function to runtime table +    emit_call(emit, MP_F_NLR_POP);      adjust_stack(emit, -(mp_int_t)(sizeof(nlr_buf_t) / sizeof(mp_uint_t)));      emit_post(emit);      */ @@ -1197,7 +1197,7 @@ STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {          vtype_kind_t vtype;          emit_pre_pop_reg(emit, &vtype, REG_ARG_2);          assert(vtype == VTYPE_PYOBJ); -        emit_call_with_imm_arg(emit, MP_F_UNARY_OP, mp_unary_op, op, REG_ARG_1); +        emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      }  } @@ -1230,7 +1230,7 @@ STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {              assert(0);          }      } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) { -        emit_call_with_imm_arg(emit, MP_F_BINARY_OP, mp_binary_op, op, REG_ARG_1); +        emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      } else {          printf("ViperTypeError: can't do binary op between types %d and %d\n", vtype_lhs, vtype_rhs); @@ -1243,14 +1243,14 @@ STATIC void emit_native_build_tuple(emit_t *emit, int n_args) {      //   if wrapped in byte_array, or something, allocates memory and fills it      emit_native_pre(emit);      emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items -    emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE, mp_obj_new_tuple, n_args, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE, n_args, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple  }  STATIC void emit_native_build_list(emit_t *emit, int n_args) {      emit_native_pre(emit);      emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items -    emit_call_with_imm_arg(emit, MP_F_BUILD_LIST, mp_obj_new_list, n_args, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_BUILD_LIST, n_args, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new list  } @@ -1261,13 +1261,13 @@ STATIC void emit_native_list_append(emit_t *emit, int list_index) {      emit_access_stack(emit, list_index, &vtype_list, REG_ARG_1);      assert(vtype_list == VTYPE_PYOBJ);      assert(vtype_item == VTYPE_PYOBJ); -    emit_call(emit, MP_F_LIST_APPEND, mp_obj_list_append); +    emit_call(emit, MP_F_LIST_APPEND);      emit_post(emit);  }  STATIC void emit_native_build_map(emit_t *emit, int n_args) {      emit_native_pre(emit); -    emit_call_with_imm_arg(emit, MP_F_BUILD_MAP, mp_obj_new_dict, n_args, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_BUILD_MAP, n_args, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new map  } @@ -1277,7 +1277,7 @@ STATIC void emit_native_store_map(emit_t *emit) {      assert(vtype_key == VTYPE_PYOBJ);      assert(vtype_value == VTYPE_PYOBJ);      assert(vtype_map == VTYPE_PYOBJ); -    emit_call(emit, MP_F_STORE_MAP, mp_obj_dict_store); +    emit_call(emit, MP_F_STORE_MAP);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map  } @@ -1289,14 +1289,14 @@ STATIC void emit_native_map_add(emit_t *emit, int map_index) {      assert(vtype_map == VTYPE_PYOBJ);      assert(vtype_key == VTYPE_PYOBJ);      assert(vtype_value == VTYPE_PYOBJ); -    emit_call(emit, MP_F_STORE_MAP, mp_obj_dict_store); +    emit_call(emit, MP_F_STORE_MAP);      emit_post(emit);  }  STATIC void emit_native_build_set(emit_t *emit, int n_args) {      emit_native_pre(emit);      emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items -    emit_call_with_imm_arg(emit, MP_F_BUILD_SET, mp_obj_new_set, n_args, REG_ARG_1); +    emit_call_with_imm_arg(emit, MP_F_BUILD_SET, n_args, REG_ARG_1);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new set  } @@ -1307,7 +1307,7 @@ STATIC void emit_native_set_add(emit_t *emit, int set_index) {      emit_access_stack(emit, set_index, &vtype_set, REG_ARG_1);      assert(vtype_set == VTYPE_PYOBJ);      assert(vtype_item == VTYPE_PYOBJ); -    emit_call(emit, MP_F_STORE_SET, mp_obj_set_store); +    emit_call(emit, MP_F_STORE_SET);      emit_post(emit);  } @@ -1318,7 +1318,7 @@ STATIC void emit_native_build_slice(emit_t *emit, int n_args) {          emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop          assert(vtype_start == VTYPE_PYOBJ);          assert(vtype_stop == VTYPE_PYOBJ); -        emit_call_with_imm_arg(emit, MP_F_NEW_SLICE, mp_obj_new_slice, (mp_uint_t)mp_const_none, REG_ARG_3); // arg3 = step +        emit_call_with_imm_arg(emit, MP_F_NEW_SLICE, (mp_uint_t)mp_const_none, REG_ARG_3); // arg3 = step          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      } else {          assert(n_args == 3); @@ -1327,7 +1327,7 @@ STATIC void emit_native_build_slice(emit_t *emit, int n_args) {          assert(vtype_start == VTYPE_PYOBJ);          assert(vtype_stop == VTYPE_PYOBJ);          assert(vtype_step == VTYPE_PYOBJ); -        emit_call(emit, MP_F_NEW_SLICE, mp_obj_new_slice); +        emit_call(emit, MP_F_NEW_SLICE);          emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);      }  } @@ -1338,7 +1338,7 @@ STATIC void emit_native_unpack_sequence(emit_t *emit, int n_args) {      emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq      assert(vtype_base == VTYPE_PYOBJ);      emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr -    emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, mp_unpack_sequence, n_args, REG_ARG_2); // arg2 = n_args +    emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args  }  STATIC void emit_native_unpack_ex(emit_t *emit, int n_left, int n_right) { @@ -1347,20 +1347,20 @@ STATIC void emit_native_unpack_ex(emit_t *emit, int n_left, int n_right) {      emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq      assert(vtype_base == VTYPE_PYOBJ);      emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr -    emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, mp_unpack_ex, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right +    emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right  }  STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, uint n_pos_defaults, uint n_kw_defaults) {      // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them      emit_native_pre(emit);      if (n_pos_defaults == 0 && n_kw_defaults == 0) { -        emit_call_with_3_imm_args_and_first_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, mp_make_function_from_raw_code, (mp_uint_t)scope->raw_code, REG_ARG_1, (mp_uint_t)MP_OBJ_NULL, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3); +        emit_call_with_3_imm_args_and_first_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, (mp_uint_t)scope->raw_code, REG_ARG_1, (mp_uint_t)MP_OBJ_NULL, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);      } else {          vtype_kind_t vtype_def_tuple, vtype_def_dict;          emit_pre_pop_reg_reg(emit, &vtype_def_dict, REG_ARG_3, &vtype_def_tuple, REG_ARG_2);          assert(vtype_def_tuple == VTYPE_PYOBJ);          assert(vtype_def_dict == VTYPE_PYOBJ); -        emit_call_with_imm_arg_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, mp_make_function_from_raw_code, (mp_uint_t)scope->raw_code, REG_ARG_1); +        emit_call_with_imm_arg_aligned(emit, MP_F_MAKE_FUNCTION_FROM_RAW_CODE, (mp_uint_t)scope->raw_code, REG_ARG_1);      }      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -1380,20 +1380,20 @@ STATIC void emit_native_call_function(emit_t *emit, int n_positional, int n_keyw          vtype_kind_t vtype_fun;          emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function          assert(vtype_fun == VTYPE_PYOBJ); -        emit_call(emit, MP_F_CALL_FUNCTION_0, mp_call_function_0); +        emit_call(emit, MP_F_CALL_FUNCTION_0);      } else if (n_positional == 1) {          vtype_kind_t vtype_fun, vtype_arg1;          emit_pre_pop_reg_reg(emit, &vtype_arg1, REG_ARG_2, &vtype_fun, REG_ARG_1); // the single argument, the function          assert(vtype_fun == VTYPE_PYOBJ);          assert(vtype_arg1 == VTYPE_PYOBJ); -        emit_call(emit, MP_F_CALL_FUNCTION_1, mp_call_function_1); +        emit_call(emit, MP_F_CALL_FUNCTION_1);      } else if (n_positional == 2) {          vtype_kind_t vtype_fun, vtype_arg1, vtype_arg2;          emit_pre_pop_reg_reg_reg(emit, &vtype_arg2, REG_ARG_3, &vtype_arg1, REG_ARG_2, &vtype_fun, REG_ARG_1); // the second argument, the first argument, the function          assert(vtype_fun == VTYPE_PYOBJ);          assert(vtype_arg1 == VTYPE_PYOBJ);          assert(vtype_arg2 == VTYPE_PYOBJ); -        emit_call(emit, MP_F_CALL_FUNCTION_2, mp_call_function_2); +        emit_call(emit, MP_F_CALL_FUNCTION_2);      } else {      */ @@ -1404,7 +1404,7 @@ STATIC void emit_native_call_function(emit_t *emit, int n_positional, int n_keyw      vtype_kind_t vtype_fun;      emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function      assert(vtype_fun == VTYPE_PYOBJ); -    emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, mp_native_call_function_n_kw, n_positional | (n_keyword << 8), REG_ARG_2); +    emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -1417,20 +1417,20 @@ STATIC void emit_native_call_method(emit_t *emit, int n_positional, int n_keywor          emit_pre_pop_reg_reg(emit, &vtype_self, REG_ARG_2, &vtype_meth, REG_ARG_1); // the self object (or NULL), the method          assert(vtype_meth == VTYPE_PYOBJ);          assert(vtype_self == VTYPE_PYOBJ); -        emit_call(emit, MP_F_CALL_METHOD_1, mp_call_method_1); +        emit_call(emit, MP_F_CALL_METHOD_1);      } else if (n_positional == 1) {          vtype_kind_t vtype_meth, vtype_self, vtype_arg1;          emit_pre_pop_reg_reg_reg(emit, &vtype_arg1, REG_ARG_3, &vtype_self, REG_ARG_2, &vtype_meth, REG_ARG_1); // the first argument, the self object (or NULL), the method          assert(vtype_meth == VTYPE_PYOBJ);          assert(vtype_self == VTYPE_PYOBJ);          assert(vtype_arg1 == VTYPE_PYOBJ); -        emit_call(emit, MP_F_CALL_METHOD_2, mp_call_method_2); +        emit_call(emit, MP_F_CALL_METHOD_2);      } else {      */      emit_native_pre(emit);      emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self -    emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, mp_call_method_n_kw, n_positional, REG_ARG_1, n_keyword, REG_ARG_2); +    emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);      emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);  } @@ -1467,7 +1467,7 @@ STATIC void emit_native_raise_varargs(emit_t *emit, int n_args) {          printf("ViperTypeError: must raise an object\n");      }      // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type)) -    emit_call(emit, MP_F_NATIVE_RAISE, mp_native_raise); +    emit_call(emit, MP_F_NATIVE_RAISE);  }  STATIC void emit_native_yield_value(emit_t *emit) { | 
