Browse Source

Merge pull request #833 from Vogtinator/arm-native

Basic native ARM emitter
pull/835/merge
Damien George 10 years ago
parent
commit
1ac6faa732
  1. 327
      py/asmarm.c
  2. 104
      py/asmarm.h
  3. 7
      py/compile.c
  4. 3
      py/emit.h
  5. 125
      py/emitnative.c
  6. 7
      py/mpconfig.h
  7. 6
      py/py.mk
  8. 2
      py/qstrdefs.h

327
py/asmarm.c

@ -0,0 +1,327 @@
/*
* This file is part of the Micro Python project, http://micropython.org/
*
* The MIT License (MIT)
*
* Copyright (c) 2014 Fabian Vogt
* Copyright (c) 2013, 2014 Damien P. George
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <stdio.h>
#include <assert.h>
#include <string.h>
#include "mpconfig.h"
#include "misc.h"
#include "asmarm.h"
// wrapper around everything in this file
#if MICROPY_EMIT_ARM
#define SIGNED_FIT24(x) (((x) & 0xff800000) == 0) || (((x) & 0xff000000) == 0xff000000)
struct _asm_arm_t {
uint pass;
uint code_offset;
uint code_size;
byte *code_base;
byte dummy_data[4];
uint max_num_labels;
int *label_offsets;
int num_locals;
uint push_reglist;
uint stack_adjust;
};
asm_arm_t *asm_arm_new(uint max_num_labels) {
asm_arm_t *as;
as = m_new0(asm_arm_t, 1);
as->max_num_labels = max_num_labels;
as->label_offsets = m_new(int, max_num_labels);
return as;
}
void asm_arm_free(asm_arm_t *as, bool free_code) {
if (free_code) {
m_del(byte, as->code_base, as->code_size);
}
m_del_obj(asm_arm_t, as);
}
void asm_arm_start_pass(asm_arm_t *as, uint pass) {
as->pass = pass;
as->code_offset = 0;
if (pass == ASM_ARM_PASS_COMPUTE) {
memset(as->label_offsets, -1, as->max_num_labels * sizeof(int));
}
}
void asm_arm_end_pass(asm_arm_t *as) {
if (as->pass == ASM_ARM_PASS_COMPUTE) {
// calculate size of code in bytes
as->code_size = as->code_offset;
as->code_base = m_new(byte, as->code_size);
}
}
// all functions must go through this one to emit bytes
// if as->pass < ASM_ARM_PASS_EMIT, then this function only returns a buffer of 4 bytes length
STATIC byte *asm_arm_get_cur_to_write_bytes(asm_arm_t *as, int num_bytes_to_write) {
if (as->pass < ASM_ARM_PASS_EMIT) {
as->code_offset += num_bytes_to_write;
return as->dummy_data;
} else {
assert(as->code_offset + num_bytes_to_write <= as->code_size);
byte *c = as->code_base + as->code_offset;
as->code_offset += num_bytes_to_write;
return c;
}
}
uint asm_arm_get_code_size(asm_arm_t *as) {
return as->code_size;
}
void *asm_arm_get_code(asm_arm_t *as) {
return as->code_base;
}
// Insert word into instruction flow
STATIC void emit(asm_arm_t *as, uint op) {
*(uint*)asm_arm_get_cur_to_write_bytes(as, 4) = op;
}
// Insert word into instruction flow, add "ALWAYS" condition code
STATIC void emit_al(asm_arm_t *as, uint op) {
emit(as, op | ARM_CC_AL);
}
// Basic instructions without condition code
STATIC uint asm_arm_op_push(uint reglist) {
// stmfd sp!, {reglist}
return 0x92d0000 | (reglist & 0xFFFF);
}
STATIC uint asm_arm_op_pop(uint reglist) {
// ldmfd sp!, {reglist}
return 0x8bd0000 | (reglist & 0xFFFF);
}
STATIC uint asm_arm_op_mov_reg(uint rd, uint rn) {
// mov rd, rn
return 0x1a00000 | (rd << 12) | rn;
}
STATIC uint asm_arm_op_mov_imm(uint rd, uint imm) {
// mov rd, #imm
return 0x3a00000 | (rd << 12) | imm;
}
STATIC uint asm_arm_op_mvn_imm(uint rd, uint imm) {
// mvn rd, #imm
return 0x3e00000 | (rd << 12) | imm;
}
STATIC uint asm_arm_op_add_imm(uint rd, uint rn, uint imm) {
// add rd, rn, #imm
return 0x2800000 | (rn << 16) | (rd << 12) | (imm & 0xFF);
}
STATIC uint asm_arm_op_add_reg(uint rd, uint rn, uint rm) {
// add rd, rn, rm
return 0x0800000 | (rn << 16) | (rd << 12) | rm;
}
STATIC uint asm_arm_op_sub_imm(uint rd, uint rn, uint imm) {
// sub rd, rn, #imm
return 0x2400000 | (rn << 16) | (rd << 12) | (imm & 0xFF);
}
void asm_arm_bkpt(asm_arm_t *as) {
// bkpt #0
emit_al(as, 0x1200070);
}
// locals:
// - stored on the stack in ascending order
// - numbered 0 through as->num_locals-1
// - SP points to first local
//
// | SP
// v
// l0 l1 l2 ... l(n-1)
// ^ ^
// | low address | high address in RAM
void asm_arm_entry(asm_arm_t *as, int num_locals) {
if (num_locals < 0) {
num_locals = 0;
}
as->stack_adjust = 0;
as->num_locals = num_locals;
as->push_reglist = 1 << REG_R1 | 1 << REG_R2 | 1 << REG_R3 | 1 << REG_R4
| 1 << REG_R5 | 1 << REG_R6 | 1 << REG_R7 | 1 << REG_R8;
// Only adjust the stack if there are more locals than usable registers
if(num_locals > 3) {
as->stack_adjust = num_locals * 4;
// Align stack to 8 bytes
if(as->num_locals & 1)
as->stack_adjust += 4;
}
emit_al(as, asm_arm_op_push(as->push_reglist | 1 << REG_LR));
if (as->stack_adjust > 0) {
emit_al(as, asm_arm_op_sub_imm(REG_SP, REG_SP, as->stack_adjust));
}
}
void asm_arm_exit(asm_arm_t *as) {
if (as->stack_adjust > 0) {
emit_al(as, asm_arm_op_add_imm(REG_SP, REG_SP, as->stack_adjust));
}
emit_al(as, asm_arm_op_pop(as->push_reglist | (1 << REG_PC)));
}
void asm_arm_label_assign(asm_arm_t *as, uint label) {
assert(label < as->max_num_labels);
if (as->pass < ASM_ARM_PASS_EMIT) {
// assign label offset
assert(as->label_offsets[label] == -1);
as->label_offsets[label] = as->code_offset;
} else {
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
assert(as->label_offsets[label] == as->code_offset);
}
}
void asm_arm_align(asm_arm_t* as, uint align) {
// TODO fill unused data with NOPs?
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
}
void asm_arm_data(asm_arm_t* as, uint bytesize, uint val) {
byte *c = asm_arm_get_cur_to_write_bytes(as, bytesize);
// only write to the buffer in the emit pass (otherwise we overflow dummy_data)
if (as->pass == ASM_ARM_PASS_EMIT) {
// little endian
for (uint i = 0; i < bytesize; i++) {
*c++ = val;
val >>= 8;
}
}
}
void asm_arm_mov_reg_reg(asm_arm_t *as, uint reg_dest, uint reg_src) {
emit_al(as, asm_arm_op_mov_reg(reg_dest, reg_src));
}
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm) {
// TODO: There are more variants of immediate values
if ((imm & 0xFF) == imm) {
emit_al(as, asm_arm_op_mov_imm(rd, imm));
} else if (imm < 0 && ((-imm) & 0xFF) == -imm) {
emit_al(as, asm_arm_op_mvn_imm(rd, -imm));
} else {
//Insert immediate into code and jump over it
emit_al(as, 0x59f0000 | (rd << 12)); // ldr rd, [pc]
emit_al(as, 0xa000000); // b pc
emit(as, imm);
}
}
void asm_arm_mov_local_reg(asm_arm_t *as, int local_num, uint rd) {
// str rd, [sp, #local_num*4]
emit_al(as, 0x58d0000 | (rd << 12) | (local_num << 2));
}
void asm_arm_mov_reg_local(asm_arm_t *as, uint rd, int local_num) {
// ldr rd, [sp, #local_num*4]
emit_al(as, 0x59d0000 | (rd << 12) | (local_num << 2));
}
void asm_arm_cmp_reg_i8(asm_arm_t *as, uint rd, int imm) {
// cmp rd, #imm
emit_al(as, 0x3500000 | (rd << 16) | (imm & 0xFF));
}
void asm_arm_cmp_reg_reg(asm_arm_t *as, uint rd, uint rn) {
// cmp rd, rn
emit_al(as, 0x1500000 | (rd << 16) | rn);
}
void asm_arm_less_op(asm_arm_t *as, uint rd, uint rn) {
asm_arm_cmp_reg_reg(as, rd, rn); // cmp rd, rn
emit(as, asm_arm_op_mov_imm(REG_RET, 1) | ARM_CC_LT); // movlt REG_RET, #1
emit(as, asm_arm_op_mov_imm(REG_RET, 0) | ARM_CC_GE); // movge REG_RET, #0
}
void asm_arm_add_reg(asm_arm_t *as, uint rd, uint rn, uint rm) {
// add rd, rn, rm
emit_al(as, asm_arm_op_add_reg(rd, rn, rm));
}
void asm_arm_mov_reg_local_addr(asm_arm_t *as, uint rd, int local_num) {
// add rd, sp, #local_num*4
emit_al(as, asm_arm_op_add_imm(rd, REG_SP, local_num << 2));
}
void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label) {
assert(label < as->max_num_labels);
int dest = as->label_offsets[label];
int rel = dest - as->code_offset;
rel -= 8; // account for instruction prefetch, PC is 8 bytes ahead of this instruction
rel >>= 2; // in ARM mode the branch target is 32-bit aligned, so the 2 LSB are omitted
if (SIGNED_FIT24(rel)) {
emit(as, cond | 0xa000000 | (rel & 0xffffff));
} else {
printf("asm_arm_bcc: branch does not fit in 24 bits\n");
}
}
void asm_arm_b_label(asm_arm_t *as, uint label) {
asm_arm_bcc_label(as, ARM_CC_AL, label);
}
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp) {
// If the table offset fits into the ldr instruction
if(fun_id < (0x1000 / 4)) {
emit_al(as, asm_arm_op_mov_reg(REG_LR, REG_PC)); // mov lr, pc
emit_al(as, 0x597f000 | (fun_id << 2)); // ldr pc, [r7, #fun_id*4]
return;
}
emit_al(as, 0x59f0004 | (reg_temp << 12)); // ldr rd, [pc, #4]
// Set lr after fun_ptr
emit_al(as, asm_arm_op_add_imm(REG_LR, REG_PC, 4)); // add lr, pc, #4
emit_al(as, asm_arm_op_mov_reg(REG_PC, reg_temp)); // mov pc, reg_temp
emit(as, (uint) fun_ptr);
}
#endif // MICROPY_EMIT_ARM

104
py/asmarm.h

@ -0,0 +1,104 @@
/*
* This file is part of the Micro Python project, http://micropython.org/
*
* The MIT License (MIT)
*
* Copyright (c) 2014 Fabian Vogt
* Copyright (c) 2013, 2014 Damien P. George
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#define ASM_ARM_PASS_COMPUTE (1)
#define ASM_ARM_PASS_EMIT (2)
#define REG_R0 (0)
#define REG_R1 (1)
#define REG_R2 (2)
#define REG_R3 (3)
#define REG_R4 (4)
#define REG_R5 (5)
#define REG_R6 (6)
#define REG_R7 (7)
#define REG_R8 (8)
#define REG_R9 (9)
#define REG_R10 (10)
#define REG_R11 (11)
#define REG_R12 (12)
#define REG_R13 (13)
#define REG_R14 (14)
#define REG_R15 (15)
#define REG_SP (REG_R13)
#define REG_LR (REG_R14)
#define REG_PC (REG_R15)
#define REG_RET REG_R0
#define REG_ARG_1 REG_R0
#define REG_ARG_2 REG_R1
#define REG_ARG_3 REG_R2
#define REG_ARG_4 REG_R3
#define ARM_CC_EQ (0x0 << 28)
#define ARM_CC_NE (0x1 << 28)
#define ARM_CC_CS (0x2 << 28)
#define ARM_CC_CC (0x3 << 28)
#define ARM_CC_MI (0x4 << 28)
#define ARM_CC_PL (0x5 << 28)
#define ARM_CC_VS (0x6 << 28)
#define ARM_CC_VC (0x7 << 28)
#define ARM_CC_HI (0x8 << 28)
#define ARM_CC_LS (0x9 << 28)
#define ARM_CC_GE (0xa << 28)
#define ARM_CC_LT (0xb << 28)
#define ARM_CC_GT (0xc << 28)
#define ARM_CC_LE (0xd << 28)
#define ARM_CC_AL (0xe << 28)
typedef struct _asm_arm_t asm_arm_t;
asm_arm_t *asm_arm_new(uint max_num_labels);
void asm_arm_free(asm_arm_t *as, bool free_code);
void asm_arm_start_pass(asm_arm_t *as, uint pass);
void asm_arm_end_pass(asm_arm_t *as);
uint asm_arm_get_code_size(asm_arm_t *as);
void *asm_arm_get_code(asm_arm_t *as);
void asm_arm_entry(asm_arm_t *as, int num_locals);
void asm_arm_exit(asm_arm_t *as);
void asm_arm_label_assign(asm_arm_t *as, uint label);
void asm_arm_align(asm_arm_t* as, uint align);
void asm_arm_data(asm_arm_t* as, uint bytesize, uint val);
void asm_arm_bkpt(asm_arm_t *as);
void asm_arm_mov_reg_reg(asm_arm_t *as, uint reg_dest, uint reg_src);
void asm_arm_mov_reg_i32(asm_arm_t *as, uint rd, int imm);
void asm_arm_mov_local_reg(asm_arm_t *as, int local_num, uint rd);
void asm_arm_mov_reg_local(asm_arm_t *as, uint rd, int local_num);
void asm_arm_cmp_reg_i8(asm_arm_t *as, uint rd, int imm);
void asm_arm_cmp_reg_reg(asm_arm_t *as, uint rd, uint rn);
void asm_arm_less_op(asm_arm_t *as, uint rd, uint rn);
void asm_arm_add_reg(asm_arm_t *as, uint rd, uint rn, uint rm);
void asm_arm_mov_reg_local_addr(asm_arm_t *as, uint rd, int local_num);
void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label);
void asm_arm_b_label(asm_arm_t *as, uint label);
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);

7
py/compile.c

@ -3626,6 +3626,11 @@ mp_obj_t mp_compile(mp_parse_node_t pn, qstr source_file, uint emit_opt, bool is
emit_native = emit_native_thumb_new(max_num_labels);
}
comp->emit_method_table = &emit_native_thumb_method_table;
#elif MICROPY_EMIT_ARM
if (emit_native == NULL) {
emit_native = emit_native_arm_new(max_num_labels);
}
comp->emit_method_table = &emit_native_arm_method_table;
#endif
comp->emit = emit_native;
EMIT_ARG(set_native_type, MP_EMIT_NATIVE_TYPE_ENABLE, s->emit_options == MP_EMIT_OPT_VIPER, 0);
@ -3669,6 +3674,8 @@ mp_obj_t mp_compile(mp_parse_node_t pn, qstr source_file, uint emit_opt, bool is
emit_native_x64_free(emit_native);
#elif MICROPY_EMIT_THUMB
emit_native_thumb_free(emit_native);
#elif MICROPY_EMIT_ARM
emit_native_arm_free(emit_native);
#endif
}
#endif

3
py/emit.h

@ -161,17 +161,20 @@ extern const emit_method_table_t emit_cpython_method_table;
extern const emit_method_table_t emit_bc_method_table;
extern const emit_method_table_t emit_native_x64_method_table;
extern const emit_method_table_t emit_native_thumb_method_table;
extern const emit_method_table_t emit_native_arm_method_table;
emit_t *emit_pass1_new(void);
emit_t *emit_cpython_new(uint max_num_labels);
emit_t *emit_bc_new(uint max_num_labels);
emit_t *emit_native_x64_new(uint max_num_labels);
emit_t *emit_native_thumb_new(uint max_num_labels);
emit_t *emit_native_arm_new(uint max_num_labels);
void emit_pass1_free(emit_t *emit);
void emit_bc_free(emit_t *emit);
void emit_native_x64_free(emit_t *emit);
void emit_native_thumb_free(emit_t *emit);
void emit_native_arm_free(emit_t *emit);
typedef struct _emit_inline_asm_t emit_inline_asm_t;

125
py/emitnative.c

@ -69,7 +69,7 @@
#endif
// wrapper around everything in this file
#if (MICROPY_EMIT_X64 && N_X64) || (MICROPY_EMIT_THUMB && N_THUMB)
#if (MICROPY_EMIT_X64 && N_X64) || (MICROPY_EMIT_THUMB && N_THUMB) || (MICROPY_EMIT_ARM && N_ARM)
#if N_X64
@ -117,6 +117,30 @@
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_thumb_mov_reg_reg(emit->as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_thumb_mov_reg_local_addr(emit->as, (reg), (local_num))
#elif N_ARM
// ARM specific stuff
#include "asmarm.h"
#define REG_LOCAL_1 (REG_R4)
#define REG_LOCAL_2 (REG_R5)
#define REG_LOCAL_3 (REG_R6)
#define REG_LOCAL_NUM (3)
#define EXPORT_FUN(name) emit_native_arm_##name
#define REG_TEMP0 (REG_R0)
#define REG_TEMP1 (REG_R1)
#define REG_TEMP2 (REG_R2)
#define ASM_MOV_REG_TO_LOCAL(reg, local_num) asm_arm_mov_local_reg(emit->as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(imm, reg) asm_arm_mov_reg_i32(emit->as, (reg), (imm))
#define ASM_MOV_ALIGNED_IMM_TO_REG(imm, reg) asm_arm_mov_reg_i32(emit->as, (reg), (imm))
#define ASM_MOV_IMM_TO_LOCAL_USING(imm, local_num, reg_temp) do { asm_arm_mov_reg_i32(emit->as, (reg_temp), (imm)); asm_arm_mov_local_reg(emit->as, (local_num), (reg_temp)); } while (false)
#define ASM_MOV_LOCAL_TO_REG(local_num, reg) asm_arm_mov_reg_local(emit->as, (reg), (local_num))
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_arm_mov_reg_reg(emit->as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_arm_mov_reg_local_addr(emit->as, (reg), (local_num))
#endif
typedef enum {
@ -169,6 +193,8 @@ struct _emit_t {
asm_x64_t *as;
#elif N_THUMB
asm_thumb_t *as;
#elif N_ARM
asm_arm_t *as;
#endif
};
@ -178,6 +204,8 @@ emit_t *EXPORT_FUN(new)(uint max_num_labels) {
emit->as = asm_x64_new(max_num_labels);
#elif N_THUMB
emit->as = asm_thumb_new(max_num_labels);
#elif N_ARM
emit->as = asm_arm_new(max_num_labels);
#endif
return emit;
}
@ -187,6 +215,8 @@ void EXPORT_FUN(free)(emit_t *emit) {
asm_x64_free(emit->as, false);
#elif N_THUMB
asm_thumb_free(emit->as, false);
#elif N_ARM
asm_arm_free(emit->as, false);
#endif
m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
@ -260,6 +290,8 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
asm_x64_start_pass(emit->as, pass == MP_PASS_EMIT ? ASM_X64_PASS_EMIT : ASM_X64_PASS_COMPUTE);
#elif N_THUMB
asm_thumb_start_pass(emit->as, pass == MP_PASS_EMIT ? ASM_THUMB_PASS_EMIT : ASM_THUMB_PASS_COMPUTE);
#elif N_ARM
asm_arm_start_pass(emit->as, pass == MP_PASS_EMIT ? ASM_ARM_PASS_EMIT : ASM_ARM_PASS_COMPUTE);
#endif
// entry to function
@ -276,6 +308,8 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
asm_x64_entry(emit->as, num_locals);
#elif N_THUMB
asm_thumb_entry(emit->as, num_locals);
#elif N_ARM
asm_arm_entry(emit->as, num_locals);
#endif
// initialise locals from parameters
@ -309,6 +343,23 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
}
asm_thumb_mov_reg_i32(emit->as, REG_R7, (mp_uint_t)mp_fun_table);
#elif N_ARM
for (int i = 0; i < scope->num_pos_args; i++) {
if (i == 0) {
asm_arm_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1);
} else if (i == 1) {
asm_arm_mov_reg_reg(emit->as, REG_LOCAL_2, REG_ARG_2);
} else if (i == 2) {
asm_arm_mov_reg_reg(emit->as, REG_LOCAL_3, REG_ARG_3);
} else if (i == 3) {
asm_arm_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
} else {
// TODO not implemented
assert(0);
}
}
asm_arm_mov_reg_i32(emit->as, REG_R7, (mp_uint_t)mp_fun_table);
#endif
}
@ -323,6 +374,11 @@ STATIC void emit_native_end_pass(emit_t *emit) {
asm_thumb_exit(emit->as);
}
asm_thumb_end_pass(emit->as);
#elif N_ARM
if (!emit->last_emit_was_return_value) {
asm_arm_exit(emit->as);
}
asm_arm_end_pass(emit->as);
#endif
// check stack is back to zero size
@ -337,6 +393,9 @@ STATIC void emit_native_end_pass(emit_t *emit) {
#elif N_THUMB
void *f = asm_thumb_get_code(emit->as);
mp_uint_t f_len = asm_thumb_get_code_size(emit->as);
#elif N_ARM
void *f = asm_arm_get_code(emit->as);
mp_uint_t f_len = asm_arm_get_code_size(emit->as);
#endif
// compute type signature
@ -536,6 +595,8 @@ STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);
#elif N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}
@ -546,6 +607,8 @@ STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_
asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);
#elif N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}
@ -557,6 +620,8 @@ STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind,
asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);
#elif N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}
@ -568,6 +633,8 @@ STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_i
asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);
#elif N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}
@ -581,6 +648,8 @@ STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kin
asm_x64_call_ind(emit->as, mp_fun_table[fun_kind], REG_RAX);
#elif N_THUMB
asm_thumb_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#elif N_ARM
asm_arm_bl_ind(emit->as, mp_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}
@ -673,6 +742,8 @@ STATIC void emit_native_label_assign(emit_t *emit, uint l) {
asm_x64_label_assign(emit->as, l);
#elif N_THUMB
asm_thumb_label_assign(emit->as, l);
#elif N_ARM
asm_arm_label_assign(emit->as, l);
#endif
emit_post(emit);
}
@ -804,6 +875,18 @@ STATIC void emit_native_load_fast(emit_t *emit, qstr qstr, uint id_flags, int lo
asm_thumb_mov_reg_local(emit->as, REG_R0, local_num - REG_LOCAL_NUM);
emit_post_push_reg(emit, vtype, REG_R0);
}
#elif N_ARM
if (local_num == 0) {
emit_post_push_reg(emit, vtype, REG_LOCAL_1);
} else if (local_num == 1) {
emit_post_push_reg(emit, vtype, REG_LOCAL_2);
} else if (local_num == 2) {
emit_post_push_reg(emit, vtype, REG_LOCAL_3);
} else {
need_reg_single(emit, REG_R0, 0);
asm_arm_mov_reg_local(emit->as, REG_R0, local_num - REG_LOCAL_NUM);
emit_post_push_reg(emit, vtype, REG_R0);
}
#endif
}
@ -882,6 +965,17 @@ STATIC void emit_native_store_fast(emit_t *emit, qstr qstr, int local_num) {
emit_pre_pop_reg(emit, &vtype, REG_R0);
asm_thumb_mov_local_reg(emit->as, local_num - REG_LOCAL_NUM, REG_R0);
}
#elif N_ARM
if (local_num == 0) {
emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
} else if (local_num == 1) {
emit_pre_pop_reg(emit, &vtype, REG_LOCAL_2);
} else if (local_num == 2) {
emit_pre_pop_reg(emit, &vtype, REG_LOCAL_3);
} else {
emit_pre_pop_reg(emit, &vtype, REG_R0);
asm_arm_mov_local_reg(emit->as, local_num - REG_LOCAL_NUM, REG_R0);
}
#endif
emit_post(emit);
@ -1022,6 +1116,8 @@ STATIC void emit_native_jump(emit_t *emit, uint label) {
asm_x64_jmp_label(emit->as, label);
#elif N_THUMB
asm_thumb_b_label(emit->as, label);
#elif N_ARM
asm_arm_b_label(emit->as, label);
#endif
emit_post(emit);
}
@ -1055,6 +1151,9 @@ STATIC void emit_native_pop_jump_if_true(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
asm_thumb_bcc_label(emit->as, THUMB_CC_NE, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_NE, label);
#endif
emit_post(emit);
}
@ -1067,6 +1166,9 @@ STATIC void emit_native_pop_jump_if_false(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_EQ, label);
#endif
emit_post(emit);
}
@ -1079,6 +1181,9 @@ STATIC void emit_native_jump_if_true_or_pop(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
asm_thumb_bcc_label(emit->as, THUMB_CC_NE, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_NE, label);
#endif
adjust_stack(emit, -1);
emit_post(emit);
@ -1092,6 +1197,9 @@ STATIC void emit_native_jump_if_false_or_pop(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_EQ, label);
#endif
adjust_stack(emit, -1);
emit_post(emit);
@ -1126,6 +1234,9 @@ STATIC void emit_native_setup_except(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
asm_thumb_bcc_label(emit->as, THUMB_CC_NE, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_NE, label);
#endif
emit_post(emit);
}
@ -1162,6 +1273,9 @@ STATIC void emit_native_for_iter(emit_t *emit, uint label) {
#elif N_THUMB
asm_thumb_cmp_rlo_rlo(emit->as, REG_RET, REG_TEMP1);
asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
#elif N_ARM
asm_arm_cmp_reg_i8(emit->as, REG_RET, 0);
asm_arm_bcc_label(emit->as, ARM_CC_EQ, label);
#endif
emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}
@ -1211,6 +1325,8 @@ STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
asm_x64_add_r64_to_r64(emit->as, REG_ARG_3, REG_ARG_2);
#elif N_THUMB
asm_thumb_add_rlo_rlo_rlo(emit->as, REG_ARG_2, REG_ARG_2, REG_ARG_3);
#elif N_ARM
asm_arm_add_reg(emit->as, REG_ARG_2, REG_ARG_2, REG_ARG_3);
#endif
emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2);
} else if (op == MP_BINARY_OP_LESS) {
@ -1223,6 +1339,8 @@ STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
asm_thumb_ite_ge(emit->as);
asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0); // if r0 >= r1
asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1); // if r0 < r1
#elif N_ARM
asm_arm_less_op(emit->as, REG_ARG_2, REG_ARG_3);
#endif
emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
} else {
@ -1456,6 +1574,9 @@ STATIC void emit_native_return_value(emit_t *emit) {
#elif N_THUMB
//asm_thumb_call_ind(emit->as, 0, REG_R0); to seg fault for debugging with gdb
asm_thumb_exit(emit->as);
#elif N_ARM
//asm_arm_bkpt(emit->as); to insert a bkpt and not segfault for debugging
asm_arm_exit(emit->as);
#endif
}
@ -1584,4 +1705,4 @@ const emit_method_table_t EXPORT_FUN(method_table) = {
emit_native_end_except_handler,
};
#endif // (MICROPY_EMIT_X64 && N_X64) || (MICROPY_EMIT_THUMB && N_THUMB)
#endif // (MICROPY_EMIT_X64 && N_X64) || (MICROPY_EMIT_THUMB && N_THUMB) || (MICROPY_EMIT_ARM && N_ARM)

7
py/mpconfig.h

@ -111,8 +111,13 @@
#define MICROPY_EMIT_INLINE_THUMB (0)
#endif
// Whether to emit ARM native code
#ifndef MICROPY_EMIT_ARM
#define MICROPY_EMIT_ARM (0)
#endif
// Convenience definition for whether any native emitter is enabled
#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB)
#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB || MICROPY_EMIT_ARM)
/*****************************************************************************/
/* Compiler configuration */

6
py/py.mk

@ -38,6 +38,8 @@ PY_O_BASENAME = \
asmthumb.o \
emitnthumb.o \
emitinlinethumb.o \
asmarm.o \
emitnarm.o \
formatfloat.o \
parsenumbase.o \
parsenum.o \
@ -145,6 +147,10 @@ $(PY_BUILD)/emitnthumb.o: CFLAGS += -DN_THUMB
$(PY_BUILD)/emitnthumb.o: py/emitnative.c
$(call compile_c)
$(PY_BUILD)/emitnarm.o: CFLAGS += -DN_ARM
$(PY_BUILD)/emitnarm.o: py/emitnative.c
$(call compile_c)
# optimising gc for speed; 5ms down to 4ms on pybv2
$(PY_BUILD)/gc.o: CFLAGS += $(CSUPEROPT)

2
py/qstrdefs.h

@ -73,7 +73,7 @@ Q(micropython)
Q(bytecode)
Q(const)
#if MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB
#if MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB || MICROPY_EMIT_ARM
Q(native)
Q(viper)
Q(uint)

Loading…
Cancel
Save