Skip to content

Commit c8746e1

Browse files
committed
py: Move arch-specific assembler macros from emitnative to asmXXX.h.
1 parent c271359 commit c8746e1

5 files changed

Lines changed: 331 additions & 307 deletions

File tree

py/asmarm.h

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,4 +122,84 @@ void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label);
122122
void asm_arm_b_label(asm_arm_t *as, uint label);
123123
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
124124

125+
#if GENERIC_ASM_API
126+
127+
// The following macros provide a (mostly) arch-independent API to
128+
// generate native code, and are used by the native emitter.
129+
130+
#define ASM_WORD_SIZE (4)
131+
132+
#define REG_RET ASM_ARM_REG_R0
133+
#define REG_ARG_1 ASM_ARM_REG_R0
134+
#define REG_ARG_2 ASM_ARM_REG_R1
135+
#define REG_ARG_3 ASM_ARM_REG_R2
136+
#define REG_ARG_4 ASM_ARM_REG_R3
137+
138+
#define REG_TEMP0 ASM_ARM_REG_R0
139+
#define REG_TEMP1 ASM_ARM_REG_R1
140+
#define REG_TEMP2 ASM_ARM_REG_R2
141+
142+
#define REG_LOCAL_1 ASM_ARM_REG_R4
143+
#define REG_LOCAL_2 ASM_ARM_REG_R5
144+
#define REG_LOCAL_3 ASM_ARM_REG_R6
145+
#define REG_LOCAL_NUM (3)
146+
147+
#define ASM_T asm_arm_t
148+
#define ASM_END_PASS asm_arm_end_pass
149+
#define ASM_ENTRY asm_arm_entry
150+
#define ASM_EXIT asm_arm_exit
151+
152+
#define ASM_JUMP asm_arm_b_label
153+
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
154+
do { \
155+
asm_arm_cmp_reg_i8(as, reg, 0); \
156+
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
157+
} while (0)
158+
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
159+
do { \
160+
asm_arm_cmp_reg_i8(as, reg, 0); \
161+
asm_arm_bcc_label(as, ASM_ARM_CC_NE, label); \
162+
} while (0)
163+
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
164+
do { \
165+
asm_arm_cmp_reg_reg(as, reg1, reg2); \
166+
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
167+
} while (0)
168+
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
169+
170+
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_arm_mov_local_reg(as, (local_num), (reg))
171+
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
172+
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
173+
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
174+
do { \
175+
asm_arm_mov_reg_i32(as, (reg_temp), (imm)); \
176+
asm_arm_mov_local_reg(as, (local_num), (reg_temp)); \
177+
} while (false)
178+
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_arm_mov_reg_local(as, (reg), (local_num))
179+
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
180+
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_arm_mov_reg_local_addr(as, (reg), (local_num))
181+
182+
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_arm_lsl_reg_reg((as), (reg_dest), (reg_shift))
183+
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_arm_asr_reg_reg((as), (reg_dest), (reg_shift))
184+
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_arm_orr_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
185+
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_arm_eor_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
186+
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_arm_and_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
187+
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_arm_add_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
188+
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_arm_sub_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
189+
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_arm_mul_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
190+
191+
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
192+
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
193+
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_arm_ldrb_reg_reg((as), (reg_dest), (reg_base))
194+
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_arm_ldrh_reg_reg((as), (reg_dest), (reg_base))
195+
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
196+
197+
#define ASM_STORE_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
198+
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_str_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
199+
#define ASM_STORE8_REG_REG(as, reg_value, reg_base) asm_arm_strb_reg_reg((as), (reg_value), (reg_base))
200+
#define ASM_STORE16_REG_REG(as, reg_value, reg_base) asm_arm_strh_reg_reg((as), (reg_value), (reg_base))
201+
#define ASM_STORE32_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
202+
203+
#endif // GENERIC_ASM_API
204+
125205
#endif // __MICROPY_INCLUDED_PY_ASMARM_H__

py/asmthumb.h

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -237,4 +237,85 @@ void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narro
237237
void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch
238238
void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp); // convenience
239239

240+
#if GENERIC_ASM_API
241+
242+
// The following macros provide a (mostly) arch-independent API to
243+
// generate native code, and are used by the native emitter.
244+
245+
#define ASM_WORD_SIZE (4)
246+
247+
#define REG_RET ASM_THUMB_REG_R0
248+
#define REG_ARG_1 ASM_THUMB_REG_R0
249+
#define REG_ARG_2 ASM_THUMB_REG_R1
250+
#define REG_ARG_3 ASM_THUMB_REG_R2
251+
#define REG_ARG_4 ASM_THUMB_REG_R3
252+
// rest of args go on stack
253+
254+
#define REG_TEMP0 ASM_THUMB_REG_R0
255+
#define REG_TEMP1 ASM_THUMB_REG_R1
256+
#define REG_TEMP2 ASM_THUMB_REG_R2
257+
258+
#define REG_LOCAL_1 ASM_THUMB_REG_R4
259+
#define REG_LOCAL_2 ASM_THUMB_REG_R5
260+
#define REG_LOCAL_3 ASM_THUMB_REG_R6
261+
#define REG_LOCAL_NUM (3)
262+
263+
#define ASM_T asm_thumb_t
264+
#define ASM_END_PASS asm_thumb_end_pass
265+
#define ASM_ENTRY asm_thumb_entry
266+
#define ASM_EXIT asm_thumb_exit
267+
268+
#define ASM_JUMP asm_thumb_b_label
269+
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
270+
do { \
271+
asm_thumb_cmp_rlo_i8(as, reg, 0); \
272+
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
273+
} while (0)
274+
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
275+
do { \
276+
asm_thumb_cmp_rlo_i8(as, reg, 0); \
277+
asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \
278+
} while (0)
279+
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
280+
do { \
281+
asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
282+
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
283+
} while (0)
284+
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
285+
286+
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_thumb_mov_local_reg(as, (local_num), (reg))
287+
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_optimised(as, (reg), (imm))
288+
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_aligned(as, (reg), (imm))
289+
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
290+
do { \
291+
asm_thumb_mov_reg_i32_optimised(as, (reg_temp), (imm)); \
292+
asm_thumb_mov_local_reg(as, (local_num), (reg_temp)); \
293+
} while (false)
294+
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local(as, (reg), (local_num))
295+
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
296+
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local_addr(as, (reg), (local_num))
297+
298+
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
299+
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
300+
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src))
301+
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src))
302+
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src))
303+
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
304+
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
305+
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src))
306+
307+
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
308+
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), (word_offset))
309+
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
310+
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
311+
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
312+
313+
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
314+
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
315+
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
316+
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
317+
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
318+
319+
#endif // GENERIC_ASM_API
320+
240321
#endif // __MICROPY_INCLUDED_PY_ASMTHUMB_H__

py/asmx64.h

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,4 +115,87 @@ void asm_x64_mov_r64_to_local(asm_x64_t* as, int src_r64, int dest_local_num);
115115
void asm_x64_mov_local_addr_to_r64(asm_x64_t* as, int local_num, int dest_r64);
116116
void asm_x64_call_ind(asm_x64_t* as, void* ptr, int temp_r32);
117117

118+
#if GENERIC_ASM_API
119+
120+
// The following macros provide a (mostly) arch-independent API to
121+
// generate native code, and are used by the native emitter.
122+
123+
#define ASM_WORD_SIZE (8)
124+
125+
#define REG_RET ASM_X64_REG_RAX
126+
#define REG_ARG_1 ASM_X64_REG_RDI
127+
#define REG_ARG_2 ASM_X64_REG_RSI
128+
#define REG_ARG_3 ASM_X64_REG_RDX
129+
#define REG_ARG_4 ASM_X64_REG_RCX
130+
#define REG_ARG_5 ASM_X64_REG_R08
131+
132+
// caller-save
133+
#define REG_TEMP0 ASM_X64_REG_RAX
134+
#define REG_TEMP1 ASM_X64_REG_RDI
135+
#define REG_TEMP2 ASM_X64_REG_RSI
136+
137+
// callee-save
138+
#define REG_LOCAL_1 ASM_X64_REG_RBX
139+
#define REG_LOCAL_2 ASM_X64_REG_R12
140+
#define REG_LOCAL_3 ASM_X64_REG_R13
141+
#define REG_LOCAL_NUM (3)
142+
143+
#define ASM_T asm_x64_t
144+
#define ASM_END_PASS asm_x64_end_pass
145+
#define ASM_ENTRY asm_x64_entry
146+
#define ASM_EXIT asm_x64_exit
147+
148+
#define ASM_JUMP asm_x64_jmp_label
149+
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
150+
do { \
151+
asm_x64_test_r8_with_r8(as, reg, reg); \
152+
asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
153+
} while (0)
154+
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
155+
do { \
156+
asm_x64_test_r8_with_r8(as, reg, reg); \
157+
asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
158+
} while (0)
159+
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
160+
do { \
161+
asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
162+
asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
163+
} while (0)
164+
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX)
165+
166+
#define ASM_MOV_REG_TO_LOCAL asm_x64_mov_r64_to_local
167+
#define ASM_MOV_IMM_TO_REG asm_x64_mov_i64_to_r64_optimised
168+
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x64_mov_i64_to_r64_aligned
169+
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
170+
do { \
171+
asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \
172+
asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \
173+
} while (false)
174+
#define ASM_MOV_LOCAL_TO_REG asm_x64_mov_local_to_r64
175+
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
176+
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x64_mov_local_addr_to_r64
177+
178+
#define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
179+
#define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
180+
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src))
181+
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src))
182+
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src))
183+
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src))
184+
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src))
185+
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src))
186+
187+
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest))
188+
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest))
189+
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest))
190+
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest))
191+
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest))
192+
193+
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0)
194+
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset))
195+
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
196+
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
197+
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
198+
199+
#endif // GENERIC_ASM_API
200+
118201
#endif // __MICROPY_INCLUDED_PY_ASMX64_H__

py/asmx86.h

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,4 +113,87 @@ void asm_x86_mov_r32_to_local(asm_x86_t* as, int src_r32, int dest_local_num);
113113
void asm_x86_mov_local_addr_to_r32(asm_x86_t* as, int local_num, int dest_r32);
114114
void asm_x86_call_ind(asm_x86_t* as, void* ptr, mp_uint_t n_args, int temp_r32);
115115

116+
#if GENERIC_ASM_API
117+
118+
// The following macros provide a (mostly) arch-independent API to
119+
// generate native code, and are used by the native emitter.
120+
121+
#define ASM_WORD_SIZE (4)
122+
123+
#define REG_RET ASM_X86_REG_EAX
124+
#define REG_ARG_1 ASM_X86_REG_ARG_1
125+
#define REG_ARG_2 ASM_X86_REG_ARG_2
126+
#define REG_ARG_3 ASM_X86_REG_ARG_3
127+
#define REG_ARG_4 ASM_X86_REG_ARG_4
128+
#define REG_ARG_5 ASM_X86_REG_ARG_5
129+
130+
// caller-save, so can be used as temporaries
131+
#define REG_TEMP0 ASM_X86_REG_EAX
132+
#define REG_TEMP1 ASM_X86_REG_ECX
133+
#define REG_TEMP2 ASM_X86_REG_EDX
134+
135+
// callee-save, so can be used as locals
136+
#define REG_LOCAL_1 ASM_X86_REG_EBX
137+
#define REG_LOCAL_2 ASM_X86_REG_ESI
138+
#define REG_LOCAL_3 ASM_X86_REG_EDI
139+
#define REG_LOCAL_NUM (3)
140+
141+
#define ASM_T asm_x86_t
142+
#define ASM_END_PASS asm_x86_end_pass
143+
#define ASM_ENTRY asm_x86_entry
144+
#define ASM_EXIT asm_x86_exit
145+
146+
#define ASM_JUMP asm_x86_jmp_label
147+
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
148+
do { \
149+
asm_x86_test_r8_with_r8(as, reg, reg); \
150+
asm_x86_jcc_label(as, ASM_X86_CC_JZ, label); \
151+
} while (0)
152+
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
153+
do { \
154+
asm_x86_test_r8_with_r8(as, reg, reg); \
155+
asm_x86_jcc_label(as, ASM_X86_CC_JNZ, label); \
156+
} while (0)
157+
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
158+
do { \
159+
asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
160+
asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
161+
} while (0)
162+
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], ASM_X86_REG_EAX)
163+
164+
#define ASM_MOV_REG_TO_LOCAL asm_x86_mov_r32_to_local
165+
#define ASM_MOV_IMM_TO_REG asm_x86_mov_i32_to_r32
166+
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x86_mov_i32_to_r32_aligned
167+
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
168+
do { \
169+
asm_x86_mov_i32_to_r32(as, (imm), (reg_temp)); \
170+
asm_x86_mov_r32_to_local(as, (reg_temp), (local_num)); \
171+
} while (false)
172+
#define ASM_MOV_LOCAL_TO_REG asm_x86_mov_local_to_r32
173+
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
174+
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x86_mov_local_addr_to_r32
175+
176+
#define ASM_LSL_REG(as, reg) asm_x86_shl_r32_cl((as), (reg))
177+
#define ASM_ASR_REG(as, reg) asm_x86_sar_r32_cl((as), (reg))
178+
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x86_or_r32_r32((as), (reg_dest), (reg_src))
179+
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x86_xor_r32_r32((as), (reg_dest), (reg_src))
180+
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x86_and_r32_r32((as), (reg_dest), (reg_src))
181+
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x86_add_r32_r32((as), (reg_dest), (reg_src))
182+
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x86_sub_r32_r32((as), (reg_dest), (reg_src))
183+
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x86_mul_r32_r32((as), (reg_dest), (reg_src))
184+
185+
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
186+
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x86_mov_mem32_to_r32((as), (reg_base), 4 * (word_offset), (reg_dest))
187+
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem8_to_r32zx((as), (reg_base), 0, (reg_dest))
188+
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem16_to_r32zx((as), (reg_base), 0, (reg_dest))
189+
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
190+
191+
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
192+
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 4 * (word_offset))
193+
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x86_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
194+
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x86_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
195+
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
196+
197+
#endif // GENERIC_ASM_API
198+
116199
#endif // __MICROPY_INCLUDED_PY_ASMX86_H__

0 commit comments

Comments
 (0)