Browse Source

py/asmarm: Simplify asm_arm_bl_ind to only load via index, not literal.

The maximum index into mp_fun_table is currently less than 1024 and should
stay that way to keep things efficient for all architectures, so there is
no need to handle loading the pointer directly via a literal in this
function.
pull/4161/merge
Damien George 6 years ago
parent
commit
5f1dd5b86b
  1. 18
      py/asmarm.c
  2. 4
      py/asmarm.h
  3. 2
      py/emitnative.c

18
py/asmarm.c

@ -362,19 +362,11 @@ void asm_arm_b_label(asm_arm_t *as, uint label) {
asm_arm_bcc_label(as, ASM_ARM_CC_AL, label);
}
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp) {
// If the table offset fits into the ldr instruction
if (fun_id < (0x1000 / 4)) {
emit_al(as, asm_arm_op_mov_reg(ASM_ARM_REG_LR, ASM_ARM_REG_PC)); // mov lr, pc
emit_al(as, 0x597f000 | (fun_id << 2)); // ldr pc, [r7, #fun_id*4]
return;
}
emit_al(as, 0x59f0004 | (reg_temp << 12)); // ldr rd, [pc, #4]
// Set lr after fun_ptr
emit_al(as, asm_arm_op_add_imm(ASM_ARM_REG_LR, ASM_ARM_REG_PC, 4)); // add lr, pc, #4
emit_al(as, asm_arm_op_mov_reg(ASM_ARM_REG_PC, reg_temp)); // mov pc, reg_temp
emit(as, (uint) fun_ptr);
void asm_arm_bl_ind(asm_arm_t *as, uint fun_id, uint reg_temp) {
// The table offset should fit into the ldr instruction
assert(fun_id < (0x1000 / 4));
emit_al(as, asm_arm_op_mov_reg(ASM_ARM_REG_LR, ASM_ARM_REG_PC)); // mov lr, pc
emit_al(as, 0x597f000 | (fun_id << 2)); // ldr pc, [r7, #fun_id*4]
}
void asm_arm_bx_reg(asm_arm_t *as, uint reg_src) {

4
py/asmarm.h

@ -121,7 +121,7 @@ void asm_arm_pop(asm_arm_t *as, uint reglist);
// control flow
void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label);
void asm_arm_b_label(asm_arm_t *as, uint label);
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
void asm_arm_bl_ind(asm_arm_t *as, uint fun_id, uint reg_temp);
void asm_arm_bx_reg(asm_arm_t *as, uint reg_src);
// Holds a pointer to mp_fun_table
@ -174,7 +174,7 @@ void asm_arm_bx_reg(asm_arm_t *as, uint reg_src);
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
} while (0)
#define ASM_JUMP_REG(as, reg) asm_arm_bx_reg((as), (reg))
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, idx, ASM_ARM_REG_R3)
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_arm_mov_local_reg((as), (local_num), (reg_src))
#define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_arm_mov_reg_i32((as), (reg_dest), (imm))

2
py/emitnative.c

@ -489,7 +489,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
#if N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
#else
ASM_CALL_IND(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE);
#endif

Loading…
Cancel
Save