asmthumb.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. /*
  2. * This file is part of the MicroPython project, http://micropython.org/
  3. *
  4. * The MIT License (MIT)
  5. *
  6. * Copyright (c) 2013, 2014 Damien P. George
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy
  9. * of this software and associated documentation files (the "Software"), to deal
  10. * in the Software without restriction, including without limitation the rights
  11. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. * copies of the Software, and to permit persons to whom the Software is
  13. * furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in
  16. * all copies or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  21. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  22. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  23. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  24. * THE SOFTWARE.
  25. */
  26. #ifndef MICROPY_INCLUDED_PY_ASMTHUMB_H
  27. #define MICROPY_INCLUDED_PY_ASMTHUMB_H
  28. #include <assert.h>
  29. #include "py/misc.h"
  30. #include "py/asmbase.h"
  31. #define ASM_THUMB_REG_R0 (0)
  32. #define ASM_THUMB_REG_R1 (1)
  33. #define ASM_THUMB_REG_R2 (2)
  34. #define ASM_THUMB_REG_R3 (3)
  35. #define ASM_THUMB_REG_R4 (4)
  36. #define ASM_THUMB_REG_R5 (5)
  37. #define ASM_THUMB_REG_R6 (6)
  38. #define ASM_THUMB_REG_R7 (7)
  39. #define ASM_THUMB_REG_R8 (8)
  40. #define ASM_THUMB_REG_R9 (9)
  41. #define ASM_THUMB_REG_R10 (10)
  42. #define ASM_THUMB_REG_R11 (11)
  43. #define ASM_THUMB_REG_R12 (12)
  44. #define ASM_THUMB_REG_R13 (13)
  45. #define ASM_THUMB_REG_R14 (14)
  46. #define ASM_THUMB_REG_R15 (15)
  47. #define ASM_THUMB_REG_LR (REG_R14)
  48. #define ASM_THUMB_CC_EQ (0x0)
  49. #define ASM_THUMB_CC_NE (0x1)
  50. #define ASM_THUMB_CC_CS (0x2)
  51. #define ASM_THUMB_CC_CC (0x3)
  52. #define ASM_THUMB_CC_MI (0x4)
  53. #define ASM_THUMB_CC_PL (0x5)
  54. #define ASM_THUMB_CC_VS (0x6)
  55. #define ASM_THUMB_CC_VC (0x7)
  56. #define ASM_THUMB_CC_HI (0x8)
  57. #define ASM_THUMB_CC_LS (0x9)
  58. #define ASM_THUMB_CC_GE (0xa)
  59. #define ASM_THUMB_CC_LT (0xb)
  60. #define ASM_THUMB_CC_GT (0xc)
  61. #define ASM_THUMB_CC_LE (0xd)
  62. typedef struct _asm_thumb_t {
  63. mp_asm_base_t base;
  64. uint32_t push_reglist;
  65. uint32_t stack_adjust;
  66. } asm_thumb_t;
  67. void asm_thumb_end_pass(asm_thumb_t *as);
  68. void asm_thumb_entry(asm_thumb_t *as, int num_locals);
  69. void asm_thumb_exit(asm_thumb_t *as);
  70. // argument order follows ARM, in general dest is first
  71. // note there is a difference between movw and mov.w, and many others!
  72. #define ASM_THUMB_OP_IT (0xbf00)
  73. #define ASM_THUMB_OP_ITE_EQ (0xbf0c)
  74. #define ASM_THUMB_OP_ITE_CS (0xbf2c)
  75. #define ASM_THUMB_OP_ITE_MI (0xbf4c)
  76. #define ASM_THUMB_OP_ITE_VS (0xbf6c)
  77. #define ASM_THUMB_OP_ITE_HI (0xbf8c)
  78. #define ASM_THUMB_OP_ITE_GE (0xbfac)
  79. #define ASM_THUMB_OP_ITE_GT (0xbfcc)
  80. #define ASM_THUMB_OP_NOP (0xbf00)
  81. #define ASM_THUMB_OP_WFI (0xbf30)
  82. #define ASM_THUMB_OP_CPSID_I (0xb672) // cpsid i, disable irq
  83. #define ASM_THUMB_OP_CPSIE_I (0xb662) // cpsie i, enable irq
  84. void asm_thumb_op16(asm_thumb_t *as, uint op);
  85. void asm_thumb_op32(asm_thumb_t *as, uint op1, uint op2);
  86. static inline void asm_thumb_it_cc(asm_thumb_t *as, uint cc, uint mask)
  87. { asm_thumb_op16(as, ASM_THUMB_OP_IT | (cc << 4) | mask); }
  88. // FORMAT 1: move shifted register
  89. #define ASM_THUMB_FORMAT_1_LSL (0x0000)
  90. #define ASM_THUMB_FORMAT_1_LSR (0x0800)
  91. #define ASM_THUMB_FORMAT_1_ASR (0x1000)
  92. #define ASM_THUMB_FORMAT_1_ENCODE(op, rlo_dest, rlo_src, offset) \
  93. ((op) | ((offset) << 6) | ((rlo_src) << 3) | (rlo_dest))
  94. static inline void asm_thumb_format_1(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src, uint offset) {
  95. assert(rlo_dest < ASM_THUMB_REG_R8);
  96. assert(rlo_src < ASM_THUMB_REG_R8);
  97. asm_thumb_op16(as, ASM_THUMB_FORMAT_1_ENCODE(op, rlo_dest, rlo_src, offset));
  98. }
  99. // FORMAT 2: add/subtract
  100. #define ASM_THUMB_FORMAT_2_ADD (0x1800)
  101. #define ASM_THUMB_FORMAT_2_SUB (0x1a00)
  102. #define ASM_THUMB_FORMAT_2_REG_OPERAND (0x0000)
  103. #define ASM_THUMB_FORMAT_2_IMM_OPERAND (0x0400)
  104. #define ASM_THUMB_FORMAT_2_ENCODE(op, rlo_dest, rlo_src, src_b) \
  105. ((op) | ((src_b) << 6) | ((rlo_src) << 3) | (rlo_dest))
  106. static inline void asm_thumb_format_2(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src, int src_b) {
  107. assert(rlo_dest < ASM_THUMB_REG_R8);
  108. assert(rlo_src < ASM_THUMB_REG_R8);
  109. asm_thumb_op16(as, ASM_THUMB_FORMAT_2_ENCODE(op, rlo_dest, rlo_src, src_b));
  110. }
  111. static inline void asm_thumb_add_rlo_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, uint rlo_src_b)
  112. { asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_ADD | ASM_THUMB_FORMAT_2_REG_OPERAND, rlo_dest, rlo_src_a, rlo_src_b); }
  113. static inline void asm_thumb_add_rlo_rlo_i3(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, int i3_src)
  114. { asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_ADD | ASM_THUMB_FORMAT_2_IMM_OPERAND, rlo_dest, rlo_src_a, i3_src); }
  115. static inline void asm_thumb_sub_rlo_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, uint rlo_src_b)
  116. { asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_SUB | ASM_THUMB_FORMAT_2_REG_OPERAND, rlo_dest, rlo_src_a, rlo_src_b); }
  117. static inline void asm_thumb_sub_rlo_rlo_i3(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, int i3_src)
  118. { asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_SUB | ASM_THUMB_FORMAT_2_IMM_OPERAND, rlo_dest, rlo_src_a, i3_src); }
  119. // FORMAT 3: move/compare/add/subtract immediate
  120. // These instructions all do zero extension of the i8 value
  121. #define ASM_THUMB_FORMAT_3_MOV (0x2000)
  122. #define ASM_THUMB_FORMAT_3_CMP (0x2800)
  123. #define ASM_THUMB_FORMAT_3_ADD (0x3000)
  124. #define ASM_THUMB_FORMAT_3_SUB (0x3800)
  125. #define ASM_THUMB_FORMAT_3_ENCODE(op, rlo, i8) ((op) | ((rlo) << 8) | (i8))
  126. static inline void asm_thumb_format_3(asm_thumb_t *as, uint op, uint rlo, int i8) {
  127. assert(rlo < ASM_THUMB_REG_R8);
  128. asm_thumb_op16(as, ASM_THUMB_FORMAT_3_ENCODE(op, rlo, i8));
  129. }
  130. static inline void asm_thumb_mov_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_MOV, rlo, i8); }
  131. static inline void asm_thumb_cmp_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_CMP, rlo, i8); }
  132. static inline void asm_thumb_add_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_ADD, rlo, i8); }
  133. static inline void asm_thumb_sub_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_SUB, rlo, i8); }
  134. // FORMAT 4: ALU operations
  135. #define ASM_THUMB_FORMAT_4_AND (0x4000)
  136. #define ASM_THUMB_FORMAT_4_EOR (0x4040)
  137. #define ASM_THUMB_FORMAT_4_LSL (0x4080)
  138. #define ASM_THUMB_FORMAT_4_LSR (0x40c0)
  139. #define ASM_THUMB_FORMAT_4_ASR (0x4100)
  140. #define ASM_THUMB_FORMAT_4_ADC (0x4140)
  141. #define ASM_THUMB_FORMAT_4_SBC (0x4180)
  142. #define ASM_THUMB_FORMAT_4_ROR (0x41c0)
  143. #define ASM_THUMB_FORMAT_4_TST (0x4200)
  144. #define ASM_THUMB_FORMAT_4_NEG (0x4240)
  145. #define ASM_THUMB_FORMAT_4_CMP (0x4280)
  146. #define ASM_THUMB_FORMAT_4_CMN (0x42c0)
  147. #define ASM_THUMB_FORMAT_4_ORR (0x4300)
  148. #define ASM_THUMB_FORMAT_4_MUL (0x4340)
  149. #define ASM_THUMB_FORMAT_4_BIC (0x4380)
  150. #define ASM_THUMB_FORMAT_4_MVN (0x43c0)
  151. void asm_thumb_format_4(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src);
  152. static inline void asm_thumb_cmp_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_CMP, rlo_dest, rlo_src); }
  153. // FORMAT 9: load/store with immediate offset
  154. // For word transfers the offset must be aligned, and >>2
  155. // FORMAT 10: load/store halfword
  156. // The offset must be aligned, and >>1
  157. // The load is zero extended into the register
  158. #define ASM_THUMB_FORMAT_9_STR (0x6000)
  159. #define ASM_THUMB_FORMAT_9_LDR (0x6800)
  160. #define ASM_THUMB_FORMAT_9_WORD_TRANSFER (0x0000)
  161. #define ASM_THUMB_FORMAT_9_BYTE_TRANSFER (0x1000)
  162. #define ASM_THUMB_FORMAT_10_STRH (0x8000)
  163. #define ASM_THUMB_FORMAT_10_LDRH (0x8800)
  164. #define ASM_THUMB_FORMAT_9_10_ENCODE(op, rlo_dest, rlo_base, offset) \
  165. ((op) | (((offset) << 6) & 0x07c0) | ((rlo_base) << 3) | (rlo_dest))
  166. static inline void asm_thumb_format_9_10(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_base, uint offset)
  167. { asm_thumb_op16(as, ASM_THUMB_FORMAT_9_10_ENCODE(op, rlo_dest, rlo_base, offset)); }
  168. static inline void asm_thumb_str_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint word_offset)
  169. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_WORD_TRANSFER, rlo_src, rlo_base, word_offset); }
  170. static inline void asm_thumb_strb_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint byte_offset)
  171. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER, rlo_src, rlo_base, byte_offset); }
  172. static inline void asm_thumb_strh_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint byte_offset)
  173. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_10_STRH, rlo_src, rlo_base, byte_offset); }
  174. static inline void asm_thumb_ldr_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint word_offset)
  175. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_WORD_TRANSFER, rlo_dest, rlo_base, word_offset); }
  176. static inline void asm_thumb_ldrb_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint byte_offset)
  177. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER , rlo_dest, rlo_base, byte_offset); }
  178. static inline void asm_thumb_ldrh_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint byte_offset)
  179. { asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_10_LDRH, rlo_dest, rlo_base, byte_offset); }
  180. // TODO convert these to above format style
  181. #define ASM_THUMB_OP_MOVW (0xf240)
  182. #define ASM_THUMB_OP_MOVT (0xf2c0)
  183. void asm_thumb_mov_reg_reg(asm_thumb_t *as, uint reg_dest, uint reg_src);
  184. void asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src);
  185. // these return true if the destination is in range, false otherwise
  186. bool asm_thumb_b_n_label(asm_thumb_t *as, uint label);
  187. bool asm_thumb_bcc_nw_label(asm_thumb_t *as, int cond, uint label, bool wide);
  188. bool asm_thumb_bl_label(asm_thumb_t *as, uint label);
  189. void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src); // convenience
  190. void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32_src); // convenience
  191. void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32); // convenience
  192. void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num_dest, uint rlo_src); // convenience
  193. void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
  194. void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
  195. void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narrow or wide branch
  196. void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch
  197. void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp); // convenience
  198. #if GENERIC_ASM_API
  199. // The following macros provide a (mostly) arch-independent API to
  200. // generate native code, and are used by the native emitter.
  201. #define ASM_WORD_SIZE (4)
  202. #define REG_RET ASM_THUMB_REG_R0
  203. #define REG_ARG_1 ASM_THUMB_REG_R0
  204. #define REG_ARG_2 ASM_THUMB_REG_R1
  205. #define REG_ARG_3 ASM_THUMB_REG_R2
  206. #define REG_ARG_4 ASM_THUMB_REG_R3
  207. // rest of args go on stack
  208. #define REG_TEMP0 ASM_THUMB_REG_R0
  209. #define REG_TEMP1 ASM_THUMB_REG_R1
  210. #define REG_TEMP2 ASM_THUMB_REG_R2
  211. #define REG_LOCAL_1 ASM_THUMB_REG_R4
  212. #define REG_LOCAL_2 ASM_THUMB_REG_R5
  213. #define REG_LOCAL_3 ASM_THUMB_REG_R6
  214. #define REG_LOCAL_NUM (3)
  215. #define ASM_T asm_thumb_t
  216. #define ASM_END_PASS asm_thumb_end_pass
  217. #define ASM_ENTRY asm_thumb_entry
  218. #define ASM_EXIT asm_thumb_exit
  219. #define ASM_JUMP asm_thumb_b_label
  220. #define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
  221. do { \
  222. asm_thumb_cmp_rlo_i8(as, reg, 0); \
  223. asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
  224. } while (0)
  225. #define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
  226. do { \
  227. asm_thumb_cmp_rlo_i8(as, reg, 0); \
  228. asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \
  229. } while (0)
  230. #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
  231. do { \
  232. asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
  233. asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
  234. } while (0)
  235. #define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
  236. #define ASM_MOV_LOCAL_REG(as, local_num, reg) asm_thumb_mov_local_reg((as), (local_num), (reg))
  237. #define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_thumb_mov_reg_i32_optimised((as), (reg_dest), (imm))
  238. #define ASM_MOV_REG_ALIGNED_IMM(as, reg_dest, imm) asm_thumb_mov_reg_i32_aligned((as), (reg_dest), (imm))
  239. #define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_thumb_mov_reg_local((as), (reg_dest), (local_num))
  240. #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
  241. #define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_thumb_mov_reg_local_addr((as), (reg_dest), (local_num))
  242. #define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
  243. #define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
  244. #define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src))
  245. #define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src))
  246. #define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src))
  247. #define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
  248. #define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
  249. #define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src))
  250. #define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
  251. #define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), (word_offset))
  252. #define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
  253. #define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
  254. #define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
  255. #define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
  256. #define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
  257. #define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
  258. #define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
  259. #define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
  260. #endif // GENERIC_ASM_API
  261. #endif // MICROPY_INCLUDED_PY_ASMTHUMB_H