xref: /openbmc/qemu/tcg/tci.c (revision 9cbb6362)
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011, 2016 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23  * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
29 
30 #include "qemu-common.h"
31 #include "tcg/tcg.h"           /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg-op.h"
34 
35 /* Marker for missing code. */
36 #define TODO() \
37     do { \
38         fprintf(stderr, "TODO %s:%u: %s()\n", \
39                 __FILE__, __LINE__, __func__); \
40         tcg_abort(); \
41     } while (0)
42 
43 #if MAX_OPC_PARAM_IARGS != 6
44 # error Fix needed, number of supported input arguments changed!
45 #endif
46 #if TCG_TARGET_REG_BITS == 32
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48                                     tcg_target_ulong, tcg_target_ulong,
49                                     tcg_target_ulong, tcg_target_ulong,
50                                     tcg_target_ulong, tcg_target_ulong,
51                                     tcg_target_ulong, tcg_target_ulong,
52                                     tcg_target_ulong, tcg_target_ulong);
53 #else
54 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
55                                     tcg_target_ulong, tcg_target_ulong,
56                                     tcg_target_ulong, tcg_target_ulong);
57 #endif
58 
59 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
60 {
61     tci_assert(index < TCG_TARGET_NB_REGS);
62     return regs[index];
63 }
64 
65 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
66 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
67 {
68     return (int8_t)tci_read_reg(regs, index);
69 }
70 #endif
71 
72 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
73 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
74 {
75     return (int16_t)tci_read_reg(regs, index);
76 }
77 #endif
78 
79 #if TCG_TARGET_REG_BITS == 64
80 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
81 {
82     return (int32_t)tci_read_reg(regs, index);
83 }
84 #endif
85 
86 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
87 {
88     return (uint8_t)tci_read_reg(regs, index);
89 }
90 
91 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
92 {
93     return (uint16_t)tci_read_reg(regs, index);
94 }
95 
96 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
97 {
98     return (uint32_t)tci_read_reg(regs, index);
99 }
100 
101 #if TCG_TARGET_REG_BITS == 64
102 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
103 {
104     return tci_read_reg(regs, index);
105 }
106 #endif
107 
108 static void
109 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
110 {
111     tci_assert(index < TCG_TARGET_NB_REGS);
112     tci_assert(index != TCG_AREG0);
113     tci_assert(index != TCG_REG_CALL_STACK);
114     regs[index] = value;
115 }
116 
117 #if TCG_TARGET_REG_BITS == 64
118 static void
119 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
120 {
121     tci_write_reg(regs, index, value);
122 }
123 #endif
124 
125 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
126 {
127     tci_write_reg(regs, index, value);
128 }
129 
130 static void
131 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
132 {
133     tci_write_reg(regs, index, value);
134 }
135 
136 #if TCG_TARGET_REG_BITS == 32
137 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
138                             uint32_t low_index, uint64_t value)
139 {
140     tci_write_reg(regs, low_index, value);
141     tci_write_reg(regs, high_index, value >> 32);
142 }
143 #elif TCG_TARGET_REG_BITS == 64
144 static void
145 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
146 {
147     tci_write_reg(regs, index, value);
148 }
149 #endif
150 
151 #if TCG_TARGET_REG_BITS == 32
152 /* Create a 64 bit value from two 32 bit values. */
153 static uint64_t tci_uint64(uint32_t high, uint32_t low)
154 {
155     return ((uint64_t)high << 32) + low;
156 }
157 #endif
158 
159 /* Read constant (native size) from bytecode. */
160 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
161 {
162     tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
163     *tb_ptr += sizeof(value);
164     return value;
165 }
166 
167 /* Read unsigned constant (32 bit) from bytecode. */
168 static uint32_t tci_read_i32(uint8_t **tb_ptr)
169 {
170     uint32_t value = *(uint32_t *)(*tb_ptr);
171     *tb_ptr += sizeof(value);
172     return value;
173 }
174 
175 /* Read signed constant (32 bit) from bytecode. */
176 static int32_t tci_read_s32(uint8_t **tb_ptr)
177 {
178     int32_t value = *(int32_t *)(*tb_ptr);
179     *tb_ptr += sizeof(value);
180     return value;
181 }
182 
183 #if TCG_TARGET_REG_BITS == 64
184 /* Read constant (64 bit) from bytecode. */
185 static uint64_t tci_read_i64(uint8_t **tb_ptr)
186 {
187     uint64_t value = *(uint64_t *)(*tb_ptr);
188     *tb_ptr += sizeof(value);
189     return value;
190 }
191 #endif
192 
193 /* Read indexed register (native size) from bytecode. */
194 static tcg_target_ulong
195 tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
196 {
197     tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
198     *tb_ptr += 1;
199     return value;
200 }
201 
202 /* Read indexed register (8 bit) from bytecode. */
203 static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
204 {
205     uint8_t value = tci_read_reg8(regs, **tb_ptr);
206     *tb_ptr += 1;
207     return value;
208 }
209 
210 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
211 /* Read indexed register (8 bit signed) from bytecode. */
212 static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
213 {
214     int8_t value = tci_read_reg8s(regs, **tb_ptr);
215     *tb_ptr += 1;
216     return value;
217 }
218 #endif
219 
220 /* Read indexed register (16 bit) from bytecode. */
221 static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
222 {
223     uint16_t value = tci_read_reg16(regs, **tb_ptr);
224     *tb_ptr += 1;
225     return value;
226 }
227 
228 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
229 /* Read indexed register (16 bit signed) from bytecode. */
230 static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
231 {
232     int16_t value = tci_read_reg16s(regs, **tb_ptr);
233     *tb_ptr += 1;
234     return value;
235 }
236 #endif
237 
238 /* Read indexed register (32 bit) from bytecode. */
239 static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
240 {
241     uint32_t value = tci_read_reg32(regs, **tb_ptr);
242     *tb_ptr += 1;
243     return value;
244 }
245 
246 #if TCG_TARGET_REG_BITS == 32
247 /* Read two indexed registers (2 * 32 bit) from bytecode. */
248 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
249 {
250     uint32_t low = tci_read_r32(regs, tb_ptr);
251     return tci_uint64(tci_read_r32(regs, tb_ptr), low);
252 }
253 #elif TCG_TARGET_REG_BITS == 64
254 /* Read indexed register (32 bit signed) from bytecode. */
255 static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
256 {
257     int32_t value = tci_read_reg32s(regs, **tb_ptr);
258     *tb_ptr += 1;
259     return value;
260 }
261 
262 /* Read indexed register (64 bit) from bytecode. */
263 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
264 {
265     uint64_t value = tci_read_reg64(regs, **tb_ptr);
266     *tb_ptr += 1;
267     return value;
268 }
269 #endif
270 
271 /* Read indexed register(s) with target address from bytecode. */
272 static target_ulong
273 tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
274 {
275     target_ulong taddr = tci_read_r(regs, tb_ptr);
276 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
277     taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
278 #endif
279     return taddr;
280 }
281 
282 /* Read indexed register or constant (native size) from bytecode. */
283 static tcg_target_ulong
284 tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
285 {
286     tcg_target_ulong value;
287     TCGReg r = **tb_ptr;
288     *tb_ptr += 1;
289     if (r == TCG_CONST) {
290         value = tci_read_i(tb_ptr);
291     } else {
292         value = tci_read_reg(regs, r);
293     }
294     return value;
295 }
296 
297 /* Read indexed register or constant (32 bit) from bytecode. */
298 static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
299 {
300     uint32_t value;
301     TCGReg r = **tb_ptr;
302     *tb_ptr += 1;
303     if (r == TCG_CONST) {
304         value = tci_read_i32(tb_ptr);
305     } else {
306         value = tci_read_reg32(regs, r);
307     }
308     return value;
309 }
310 
311 #if TCG_TARGET_REG_BITS == 32
312 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
313 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
314 {
315     uint32_t low = tci_read_ri32(regs, tb_ptr);
316     return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
317 }
318 #elif TCG_TARGET_REG_BITS == 64
319 /* Read indexed register or constant (64 bit) from bytecode. */
320 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
321 {
322     uint64_t value;
323     TCGReg r = **tb_ptr;
324     *tb_ptr += 1;
325     if (r == TCG_CONST) {
326         value = tci_read_i64(tb_ptr);
327     } else {
328         value = tci_read_reg64(regs, r);
329     }
330     return value;
331 }
332 #endif
333 
334 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
335 {
336     tcg_target_ulong label = tci_read_i(tb_ptr);
337     tci_assert(label != 0);
338     return label;
339 }
340 
341 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
342 {
343     bool result = false;
344     int32_t i0 = u0;
345     int32_t i1 = u1;
346     switch (condition) {
347     case TCG_COND_EQ:
348         result = (u0 == u1);
349         break;
350     case TCG_COND_NE:
351         result = (u0 != u1);
352         break;
353     case TCG_COND_LT:
354         result = (i0 < i1);
355         break;
356     case TCG_COND_GE:
357         result = (i0 >= i1);
358         break;
359     case TCG_COND_LE:
360         result = (i0 <= i1);
361         break;
362     case TCG_COND_GT:
363         result = (i0 > i1);
364         break;
365     case TCG_COND_LTU:
366         result = (u0 < u1);
367         break;
368     case TCG_COND_GEU:
369         result = (u0 >= u1);
370         break;
371     case TCG_COND_LEU:
372         result = (u0 <= u1);
373         break;
374     case TCG_COND_GTU:
375         result = (u0 > u1);
376         break;
377     default:
378         TODO();
379     }
380     return result;
381 }
382 
383 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
384 {
385     bool result = false;
386     int64_t i0 = u0;
387     int64_t i1 = u1;
388     switch (condition) {
389     case TCG_COND_EQ:
390         result = (u0 == u1);
391         break;
392     case TCG_COND_NE:
393         result = (u0 != u1);
394         break;
395     case TCG_COND_LT:
396         result = (i0 < i1);
397         break;
398     case TCG_COND_GE:
399         result = (i0 >= i1);
400         break;
401     case TCG_COND_LE:
402         result = (i0 <= i1);
403         break;
404     case TCG_COND_GT:
405         result = (i0 > i1);
406         break;
407     case TCG_COND_LTU:
408         result = (u0 < u1);
409         break;
410     case TCG_COND_GEU:
411         result = (u0 >= u1);
412         break;
413     case TCG_COND_LEU:
414         result = (u0 <= u1);
415         break;
416     case TCG_COND_GTU:
417         result = (u0 > u1);
418         break;
419     default:
420         TODO();
421     }
422     return result;
423 }
424 
425 #ifdef CONFIG_SOFTMMU
426 # define qemu_ld_ub \
427     helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
428 # define qemu_ld_leuw \
429     helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
430 # define qemu_ld_leul \
431     helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
432 # define qemu_ld_leq \
433     helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
434 # define qemu_ld_beuw \
435     helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
436 # define qemu_ld_beul \
437     helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
438 # define qemu_ld_beq \
439     helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
440 # define qemu_st_b(X) \
441     helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
442 # define qemu_st_lew(X) \
443     helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
444 # define qemu_st_lel(X) \
445     helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
446 # define qemu_st_leq(X) \
447     helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
448 # define qemu_st_bew(X) \
449     helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
450 # define qemu_st_bel(X) \
451     helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
452 # define qemu_st_beq(X) \
453     helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
454 #else
455 # define qemu_ld_ub      ldub_p(g2h(taddr))
456 # define qemu_ld_leuw    lduw_le_p(g2h(taddr))
457 # define qemu_ld_leul    (uint32_t)ldl_le_p(g2h(taddr))
458 # define qemu_ld_leq     ldq_le_p(g2h(taddr))
459 # define qemu_ld_beuw    lduw_be_p(g2h(taddr))
460 # define qemu_ld_beul    (uint32_t)ldl_be_p(g2h(taddr))
461 # define qemu_ld_beq     ldq_be_p(g2h(taddr))
462 # define qemu_st_b(X)    stb_p(g2h(taddr), X)
463 # define qemu_st_lew(X)  stw_le_p(g2h(taddr), X)
464 # define qemu_st_lel(X)  stl_le_p(g2h(taddr), X)
465 # define qemu_st_leq(X)  stq_le_p(g2h(taddr), X)
466 # define qemu_st_bew(X)  stw_be_p(g2h(taddr), X)
467 # define qemu_st_bel(X)  stl_be_p(g2h(taddr), X)
468 # define qemu_st_beq(X)  stq_be_p(g2h(taddr), X)
469 #endif
470 
471 /* Interpret pseudo code in tb. */
472 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
473 {
474     tcg_target_ulong regs[TCG_TARGET_NB_REGS];
475     long tcg_temps[CPU_TEMP_BUF_NLONGS];
476     uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
477     uintptr_t ret = 0;
478 
479     regs[TCG_AREG0] = (tcg_target_ulong)env;
480     regs[TCG_REG_CALL_STACK] = sp_value;
481     tci_assert(tb_ptr);
482 
483     for (;;) {
484         TCGOpcode opc = tb_ptr[0];
485 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
486         uint8_t op_size = tb_ptr[1];
487         uint8_t *old_code_ptr = tb_ptr;
488 #endif
489         tcg_target_ulong t0;
490         tcg_target_ulong t1;
491         tcg_target_ulong t2;
492         tcg_target_ulong label;
493         TCGCond condition;
494         target_ulong taddr;
495         uint8_t tmp8;
496         uint16_t tmp16;
497         uint32_t tmp32;
498         uint64_t tmp64;
499 #if TCG_TARGET_REG_BITS == 32
500         uint64_t v64;
501 #endif
502         TCGMemOpIdx oi;
503 
504 #if defined(GETPC)
505         tci_tb_ptr = (uintptr_t)tb_ptr;
506 #endif
507 
508         /* Skip opcode and size entry. */
509         tb_ptr += 2;
510 
511         switch (opc) {
512         case INDEX_op_call:
513             t0 = tci_read_ri(regs, &tb_ptr);
514 #if TCG_TARGET_REG_BITS == 32
515             tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
516                                           tci_read_reg(regs, TCG_REG_R1),
517                                           tci_read_reg(regs, TCG_REG_R2),
518                                           tci_read_reg(regs, TCG_REG_R3),
519                                           tci_read_reg(regs, TCG_REG_R5),
520                                           tci_read_reg(regs, TCG_REG_R6),
521                                           tci_read_reg(regs, TCG_REG_R7),
522                                           tci_read_reg(regs, TCG_REG_R8),
523                                           tci_read_reg(regs, TCG_REG_R9),
524                                           tci_read_reg(regs, TCG_REG_R10),
525                                           tci_read_reg(regs, TCG_REG_R11),
526                                           tci_read_reg(regs, TCG_REG_R12));
527             tci_write_reg(regs, TCG_REG_R0, tmp64);
528             tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
529 #else
530             tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
531                                           tci_read_reg(regs, TCG_REG_R1),
532                                           tci_read_reg(regs, TCG_REG_R2),
533                                           tci_read_reg(regs, TCG_REG_R3),
534                                           tci_read_reg(regs, TCG_REG_R5),
535                                           tci_read_reg(regs, TCG_REG_R6));
536             tci_write_reg(regs, TCG_REG_R0, tmp64);
537 #endif
538             break;
539         case INDEX_op_br:
540             label = tci_read_label(&tb_ptr);
541             tci_assert(tb_ptr == old_code_ptr + op_size);
542             tb_ptr = (uint8_t *)label;
543             continue;
544         case INDEX_op_setcond_i32:
545             t0 = *tb_ptr++;
546             t1 = tci_read_r32(regs, &tb_ptr);
547             t2 = tci_read_ri32(regs, &tb_ptr);
548             condition = *tb_ptr++;
549             tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
550             break;
551 #if TCG_TARGET_REG_BITS == 32
552         case INDEX_op_setcond2_i32:
553             t0 = *tb_ptr++;
554             tmp64 = tci_read_r64(regs, &tb_ptr);
555             v64 = tci_read_ri64(regs, &tb_ptr);
556             condition = *tb_ptr++;
557             tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
558             break;
559 #elif TCG_TARGET_REG_BITS == 64
560         case INDEX_op_setcond_i64:
561             t0 = *tb_ptr++;
562             t1 = tci_read_r64(regs, &tb_ptr);
563             t2 = tci_read_ri64(regs, &tb_ptr);
564             condition = *tb_ptr++;
565             tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
566             break;
567 #endif
568         case INDEX_op_mov_i32:
569             t0 = *tb_ptr++;
570             t1 = tci_read_r32(regs, &tb_ptr);
571             tci_write_reg32(regs, t0, t1);
572             break;
573         case INDEX_op_movi_i32:
574             t0 = *tb_ptr++;
575             t1 = tci_read_i32(&tb_ptr);
576             tci_write_reg32(regs, t0, t1);
577             break;
578 
579             /* Load/store operations (32 bit). */
580 
581         case INDEX_op_ld8u_i32:
582             t0 = *tb_ptr++;
583             t1 = tci_read_r(regs, &tb_ptr);
584             t2 = tci_read_s32(&tb_ptr);
585             tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
586             break;
587         case INDEX_op_ld8s_i32:
588         case INDEX_op_ld16u_i32:
589             TODO();
590             break;
591         case INDEX_op_ld16s_i32:
592             TODO();
593             break;
594         case INDEX_op_ld_i32:
595             t0 = *tb_ptr++;
596             t1 = tci_read_r(regs, &tb_ptr);
597             t2 = tci_read_s32(&tb_ptr);
598             tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
599             break;
600         case INDEX_op_st8_i32:
601             t0 = tci_read_r8(regs, &tb_ptr);
602             t1 = tci_read_r(regs, &tb_ptr);
603             t2 = tci_read_s32(&tb_ptr);
604             *(uint8_t *)(t1 + t2) = t0;
605             break;
606         case INDEX_op_st16_i32:
607             t0 = tci_read_r16(regs, &tb_ptr);
608             t1 = tci_read_r(regs, &tb_ptr);
609             t2 = tci_read_s32(&tb_ptr);
610             *(uint16_t *)(t1 + t2) = t0;
611             break;
612         case INDEX_op_st_i32:
613             t0 = tci_read_r32(regs, &tb_ptr);
614             t1 = tci_read_r(regs, &tb_ptr);
615             t2 = tci_read_s32(&tb_ptr);
616             tci_assert(t1 != sp_value || (int32_t)t2 < 0);
617             *(uint32_t *)(t1 + t2) = t0;
618             break;
619 
620             /* Arithmetic operations (32 bit). */
621 
622         case INDEX_op_add_i32:
623             t0 = *tb_ptr++;
624             t1 = tci_read_ri32(regs, &tb_ptr);
625             t2 = tci_read_ri32(regs, &tb_ptr);
626             tci_write_reg32(regs, t0, t1 + t2);
627             break;
628         case INDEX_op_sub_i32:
629             t0 = *tb_ptr++;
630             t1 = tci_read_ri32(regs, &tb_ptr);
631             t2 = tci_read_ri32(regs, &tb_ptr);
632             tci_write_reg32(regs, t0, t1 - t2);
633             break;
634         case INDEX_op_mul_i32:
635             t0 = *tb_ptr++;
636             t1 = tci_read_ri32(regs, &tb_ptr);
637             t2 = tci_read_ri32(regs, &tb_ptr);
638             tci_write_reg32(regs, t0, t1 * t2);
639             break;
640 #if TCG_TARGET_HAS_div_i32
641         case INDEX_op_div_i32:
642             t0 = *tb_ptr++;
643             t1 = tci_read_ri32(regs, &tb_ptr);
644             t2 = tci_read_ri32(regs, &tb_ptr);
645             tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
646             break;
647         case INDEX_op_divu_i32:
648             t0 = *tb_ptr++;
649             t1 = tci_read_ri32(regs, &tb_ptr);
650             t2 = tci_read_ri32(regs, &tb_ptr);
651             tci_write_reg32(regs, t0, t1 / t2);
652             break;
653         case INDEX_op_rem_i32:
654             t0 = *tb_ptr++;
655             t1 = tci_read_ri32(regs, &tb_ptr);
656             t2 = tci_read_ri32(regs, &tb_ptr);
657             tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
658             break;
659         case INDEX_op_remu_i32:
660             t0 = *tb_ptr++;
661             t1 = tci_read_ri32(regs, &tb_ptr);
662             t2 = tci_read_ri32(regs, &tb_ptr);
663             tci_write_reg32(regs, t0, t1 % t2);
664             break;
665 #elif TCG_TARGET_HAS_div2_i32
666         case INDEX_op_div2_i32:
667         case INDEX_op_divu2_i32:
668             TODO();
669             break;
670 #endif
671         case INDEX_op_and_i32:
672             t0 = *tb_ptr++;
673             t1 = tci_read_ri32(regs, &tb_ptr);
674             t2 = tci_read_ri32(regs, &tb_ptr);
675             tci_write_reg32(regs, t0, t1 & t2);
676             break;
677         case INDEX_op_or_i32:
678             t0 = *tb_ptr++;
679             t1 = tci_read_ri32(regs, &tb_ptr);
680             t2 = tci_read_ri32(regs, &tb_ptr);
681             tci_write_reg32(regs, t0, t1 | t2);
682             break;
683         case INDEX_op_xor_i32:
684             t0 = *tb_ptr++;
685             t1 = tci_read_ri32(regs, &tb_ptr);
686             t2 = tci_read_ri32(regs, &tb_ptr);
687             tci_write_reg32(regs, t0, t1 ^ t2);
688             break;
689 
690             /* Shift/rotate operations (32 bit). */
691 
692         case INDEX_op_shl_i32:
693             t0 = *tb_ptr++;
694             t1 = tci_read_ri32(regs, &tb_ptr);
695             t2 = tci_read_ri32(regs, &tb_ptr);
696             tci_write_reg32(regs, t0, t1 << (t2 & 31));
697             break;
698         case INDEX_op_shr_i32:
699             t0 = *tb_ptr++;
700             t1 = tci_read_ri32(regs, &tb_ptr);
701             t2 = tci_read_ri32(regs, &tb_ptr);
702             tci_write_reg32(regs, t0, t1 >> (t2 & 31));
703             break;
704         case INDEX_op_sar_i32:
705             t0 = *tb_ptr++;
706             t1 = tci_read_ri32(regs, &tb_ptr);
707             t2 = tci_read_ri32(regs, &tb_ptr);
708             tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
709             break;
710 #if TCG_TARGET_HAS_rot_i32
711         case INDEX_op_rotl_i32:
712             t0 = *tb_ptr++;
713             t1 = tci_read_ri32(regs, &tb_ptr);
714             t2 = tci_read_ri32(regs, &tb_ptr);
715             tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
716             break;
717         case INDEX_op_rotr_i32:
718             t0 = *tb_ptr++;
719             t1 = tci_read_ri32(regs, &tb_ptr);
720             t2 = tci_read_ri32(regs, &tb_ptr);
721             tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
722             break;
723 #endif
724 #if TCG_TARGET_HAS_deposit_i32
725         case INDEX_op_deposit_i32:
726             t0 = *tb_ptr++;
727             t1 = tci_read_r32(regs, &tb_ptr);
728             t2 = tci_read_r32(regs, &tb_ptr);
729             tmp16 = *tb_ptr++;
730             tmp8 = *tb_ptr++;
731             tmp32 = (((1 << tmp8) - 1) << tmp16);
732             tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
733             break;
734 #endif
735         case INDEX_op_brcond_i32:
736             t0 = tci_read_r32(regs, &tb_ptr);
737             t1 = tci_read_ri32(regs, &tb_ptr);
738             condition = *tb_ptr++;
739             label = tci_read_label(&tb_ptr);
740             if (tci_compare32(t0, t1, condition)) {
741                 tci_assert(tb_ptr == old_code_ptr + op_size);
742                 tb_ptr = (uint8_t *)label;
743                 continue;
744             }
745             break;
746 #if TCG_TARGET_REG_BITS == 32
747         case INDEX_op_add2_i32:
748             t0 = *tb_ptr++;
749             t1 = *tb_ptr++;
750             tmp64 = tci_read_r64(regs, &tb_ptr);
751             tmp64 += tci_read_r64(regs, &tb_ptr);
752             tci_write_reg64(regs, t1, t0, tmp64);
753             break;
754         case INDEX_op_sub2_i32:
755             t0 = *tb_ptr++;
756             t1 = *tb_ptr++;
757             tmp64 = tci_read_r64(regs, &tb_ptr);
758             tmp64 -= tci_read_r64(regs, &tb_ptr);
759             tci_write_reg64(regs, t1, t0, tmp64);
760             break;
761         case INDEX_op_brcond2_i32:
762             tmp64 = tci_read_r64(regs, &tb_ptr);
763             v64 = tci_read_ri64(regs, &tb_ptr);
764             condition = *tb_ptr++;
765             label = tci_read_label(&tb_ptr);
766             if (tci_compare64(tmp64, v64, condition)) {
767                 tci_assert(tb_ptr == old_code_ptr + op_size);
768                 tb_ptr = (uint8_t *)label;
769                 continue;
770             }
771             break;
772         case INDEX_op_mulu2_i32:
773             t0 = *tb_ptr++;
774             t1 = *tb_ptr++;
775             t2 = tci_read_r32(regs, &tb_ptr);
776             tmp64 = tci_read_r32(regs, &tb_ptr);
777             tci_write_reg64(regs, t1, t0, t2 * tmp64);
778             break;
779 #endif /* TCG_TARGET_REG_BITS == 32 */
780 #if TCG_TARGET_HAS_ext8s_i32
781         case INDEX_op_ext8s_i32:
782             t0 = *tb_ptr++;
783             t1 = tci_read_r8s(regs, &tb_ptr);
784             tci_write_reg32(regs, t0, t1);
785             break;
786 #endif
787 #if TCG_TARGET_HAS_ext16s_i32
788         case INDEX_op_ext16s_i32:
789             t0 = *tb_ptr++;
790             t1 = tci_read_r16s(regs, &tb_ptr);
791             tci_write_reg32(regs, t0, t1);
792             break;
793 #endif
794 #if TCG_TARGET_HAS_ext8u_i32
795         case INDEX_op_ext8u_i32:
796             t0 = *tb_ptr++;
797             t1 = tci_read_r8(regs, &tb_ptr);
798             tci_write_reg32(regs, t0, t1);
799             break;
800 #endif
801 #if TCG_TARGET_HAS_ext16u_i32
802         case INDEX_op_ext16u_i32:
803             t0 = *tb_ptr++;
804             t1 = tci_read_r16(regs, &tb_ptr);
805             tci_write_reg32(regs, t0, t1);
806             break;
807 #endif
808 #if TCG_TARGET_HAS_bswap16_i32
809         case INDEX_op_bswap16_i32:
810             t0 = *tb_ptr++;
811             t1 = tci_read_r16(regs, &tb_ptr);
812             tci_write_reg32(regs, t0, bswap16(t1));
813             break;
814 #endif
815 #if TCG_TARGET_HAS_bswap32_i32
816         case INDEX_op_bswap32_i32:
817             t0 = *tb_ptr++;
818             t1 = tci_read_r32(regs, &tb_ptr);
819             tci_write_reg32(regs, t0, bswap32(t1));
820             break;
821 #endif
822 #if TCG_TARGET_HAS_not_i32
823         case INDEX_op_not_i32:
824             t0 = *tb_ptr++;
825             t1 = tci_read_r32(regs, &tb_ptr);
826             tci_write_reg32(regs, t0, ~t1);
827             break;
828 #endif
829 #if TCG_TARGET_HAS_neg_i32
830         case INDEX_op_neg_i32:
831             t0 = *tb_ptr++;
832             t1 = tci_read_r32(regs, &tb_ptr);
833             tci_write_reg32(regs, t0, -t1);
834             break;
835 #endif
836 #if TCG_TARGET_REG_BITS == 64
837         case INDEX_op_mov_i64:
838             t0 = *tb_ptr++;
839             t1 = tci_read_r64(regs, &tb_ptr);
840             tci_write_reg64(regs, t0, t1);
841             break;
842         case INDEX_op_movi_i64:
843             t0 = *tb_ptr++;
844             t1 = tci_read_i64(&tb_ptr);
845             tci_write_reg64(regs, t0, t1);
846             break;
847 
848             /* Load/store operations (64 bit). */
849 
850         case INDEX_op_ld8u_i64:
851             t0 = *tb_ptr++;
852             t1 = tci_read_r(regs, &tb_ptr);
853             t2 = tci_read_s32(&tb_ptr);
854             tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
855             break;
856         case INDEX_op_ld8s_i64:
857         case INDEX_op_ld16u_i64:
858         case INDEX_op_ld16s_i64:
859             TODO();
860             break;
861         case INDEX_op_ld32u_i64:
862             t0 = *tb_ptr++;
863             t1 = tci_read_r(regs, &tb_ptr);
864             t2 = tci_read_s32(&tb_ptr);
865             tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
866             break;
867         case INDEX_op_ld32s_i64:
868             t0 = *tb_ptr++;
869             t1 = tci_read_r(regs, &tb_ptr);
870             t2 = tci_read_s32(&tb_ptr);
871             tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
872             break;
873         case INDEX_op_ld_i64:
874             t0 = *tb_ptr++;
875             t1 = tci_read_r(regs, &tb_ptr);
876             t2 = tci_read_s32(&tb_ptr);
877             tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
878             break;
879         case INDEX_op_st8_i64:
880             t0 = tci_read_r8(regs, &tb_ptr);
881             t1 = tci_read_r(regs, &tb_ptr);
882             t2 = tci_read_s32(&tb_ptr);
883             *(uint8_t *)(t1 + t2) = t0;
884             break;
885         case INDEX_op_st16_i64:
886             t0 = tci_read_r16(regs, &tb_ptr);
887             t1 = tci_read_r(regs, &tb_ptr);
888             t2 = tci_read_s32(&tb_ptr);
889             *(uint16_t *)(t1 + t2) = t0;
890             break;
891         case INDEX_op_st32_i64:
892             t0 = tci_read_r32(regs, &tb_ptr);
893             t1 = tci_read_r(regs, &tb_ptr);
894             t2 = tci_read_s32(&tb_ptr);
895             *(uint32_t *)(t1 + t2) = t0;
896             break;
897         case INDEX_op_st_i64:
898             t0 = tci_read_r64(regs, &tb_ptr);
899             t1 = tci_read_r(regs, &tb_ptr);
900             t2 = tci_read_s32(&tb_ptr);
901             tci_assert(t1 != sp_value || (int32_t)t2 < 0);
902             *(uint64_t *)(t1 + t2) = t0;
903             break;
904 
905             /* Arithmetic operations (64 bit). */
906 
907         case INDEX_op_add_i64:
908             t0 = *tb_ptr++;
909             t1 = tci_read_ri64(regs, &tb_ptr);
910             t2 = tci_read_ri64(regs, &tb_ptr);
911             tci_write_reg64(regs, t0, t1 + t2);
912             break;
913         case INDEX_op_sub_i64:
914             t0 = *tb_ptr++;
915             t1 = tci_read_ri64(regs, &tb_ptr);
916             t2 = tci_read_ri64(regs, &tb_ptr);
917             tci_write_reg64(regs, t0, t1 - t2);
918             break;
919         case INDEX_op_mul_i64:
920             t0 = *tb_ptr++;
921             t1 = tci_read_ri64(regs, &tb_ptr);
922             t2 = tci_read_ri64(regs, &tb_ptr);
923             tci_write_reg64(regs, t0, t1 * t2);
924             break;
925 #if TCG_TARGET_HAS_div_i64
926         case INDEX_op_div_i64:
927         case INDEX_op_divu_i64:
928         case INDEX_op_rem_i64:
929         case INDEX_op_remu_i64:
930             TODO();
931             break;
932 #elif TCG_TARGET_HAS_div2_i64
933         case INDEX_op_div2_i64:
934         case INDEX_op_divu2_i64:
935             TODO();
936             break;
937 #endif
938         case INDEX_op_and_i64:
939             t0 = *tb_ptr++;
940             t1 = tci_read_ri64(regs, &tb_ptr);
941             t2 = tci_read_ri64(regs, &tb_ptr);
942             tci_write_reg64(regs, t0, t1 & t2);
943             break;
944         case INDEX_op_or_i64:
945             t0 = *tb_ptr++;
946             t1 = tci_read_ri64(regs, &tb_ptr);
947             t2 = tci_read_ri64(regs, &tb_ptr);
948             tci_write_reg64(regs, t0, t1 | t2);
949             break;
950         case INDEX_op_xor_i64:
951             t0 = *tb_ptr++;
952             t1 = tci_read_ri64(regs, &tb_ptr);
953             t2 = tci_read_ri64(regs, &tb_ptr);
954             tci_write_reg64(regs, t0, t1 ^ t2);
955             break;
956 
957             /* Shift/rotate operations (64 bit). */
958 
959         case INDEX_op_shl_i64:
960             t0 = *tb_ptr++;
961             t1 = tci_read_ri64(regs, &tb_ptr);
962             t2 = tci_read_ri64(regs, &tb_ptr);
963             tci_write_reg64(regs, t0, t1 << (t2 & 63));
964             break;
965         case INDEX_op_shr_i64:
966             t0 = *tb_ptr++;
967             t1 = tci_read_ri64(regs, &tb_ptr);
968             t2 = tci_read_ri64(regs, &tb_ptr);
969             tci_write_reg64(regs, t0, t1 >> (t2 & 63));
970             break;
971         case INDEX_op_sar_i64:
972             t0 = *tb_ptr++;
973             t1 = tci_read_ri64(regs, &tb_ptr);
974             t2 = tci_read_ri64(regs, &tb_ptr);
975             tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
976             break;
977 #if TCG_TARGET_HAS_rot_i64
978         case INDEX_op_rotl_i64:
979             t0 = *tb_ptr++;
980             t1 = tci_read_ri64(regs, &tb_ptr);
981             t2 = tci_read_ri64(regs, &tb_ptr);
982             tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
983             break;
984         case INDEX_op_rotr_i64:
985             t0 = *tb_ptr++;
986             t1 = tci_read_ri64(regs, &tb_ptr);
987             t2 = tci_read_ri64(regs, &tb_ptr);
988             tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
989             break;
990 #endif
991 #if TCG_TARGET_HAS_deposit_i64
992         case INDEX_op_deposit_i64:
993             t0 = *tb_ptr++;
994             t1 = tci_read_r64(regs, &tb_ptr);
995             t2 = tci_read_r64(regs, &tb_ptr);
996             tmp16 = *tb_ptr++;
997             tmp8 = *tb_ptr++;
998             tmp64 = (((1ULL << tmp8) - 1) << tmp16);
999             tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
1000             break;
1001 #endif
1002         case INDEX_op_brcond_i64:
1003             t0 = tci_read_r64(regs, &tb_ptr);
1004             t1 = tci_read_ri64(regs, &tb_ptr);
1005             condition = *tb_ptr++;
1006             label = tci_read_label(&tb_ptr);
1007             if (tci_compare64(t0, t1, condition)) {
1008                 tci_assert(tb_ptr == old_code_ptr + op_size);
1009                 tb_ptr = (uint8_t *)label;
1010                 continue;
1011             }
1012             break;
1013 #if TCG_TARGET_HAS_ext8u_i64
1014         case INDEX_op_ext8u_i64:
1015             t0 = *tb_ptr++;
1016             t1 = tci_read_r8(regs, &tb_ptr);
1017             tci_write_reg64(regs, t0, t1);
1018             break;
1019 #endif
1020 #if TCG_TARGET_HAS_ext8s_i64
1021         case INDEX_op_ext8s_i64:
1022             t0 = *tb_ptr++;
1023             t1 = tci_read_r8s(regs, &tb_ptr);
1024             tci_write_reg64(regs, t0, t1);
1025             break;
1026 #endif
1027 #if TCG_TARGET_HAS_ext16s_i64
1028         case INDEX_op_ext16s_i64:
1029             t0 = *tb_ptr++;
1030             t1 = tci_read_r16s(regs, &tb_ptr);
1031             tci_write_reg64(regs, t0, t1);
1032             break;
1033 #endif
1034 #if TCG_TARGET_HAS_ext16u_i64
1035         case INDEX_op_ext16u_i64:
1036             t0 = *tb_ptr++;
1037             t1 = tci_read_r16(regs, &tb_ptr);
1038             tci_write_reg64(regs, t0, t1);
1039             break;
1040 #endif
1041 #if TCG_TARGET_HAS_ext32s_i64
1042         case INDEX_op_ext32s_i64:
1043 #endif
1044         case INDEX_op_ext_i32_i64:
1045             t0 = *tb_ptr++;
1046             t1 = tci_read_r32s(regs, &tb_ptr);
1047             tci_write_reg64(regs, t0, t1);
1048             break;
1049 #if TCG_TARGET_HAS_ext32u_i64
1050         case INDEX_op_ext32u_i64:
1051 #endif
1052         case INDEX_op_extu_i32_i64:
1053             t0 = *tb_ptr++;
1054             t1 = tci_read_r32(regs, &tb_ptr);
1055             tci_write_reg64(regs, t0, t1);
1056             break;
1057 #if TCG_TARGET_HAS_bswap16_i64
1058         case INDEX_op_bswap16_i64:
1059             t0 = *tb_ptr++;
1060             t1 = tci_read_r16(regs, &tb_ptr);
1061             tci_write_reg64(regs, t0, bswap16(t1));
1062             break;
1063 #endif
1064 #if TCG_TARGET_HAS_bswap32_i64
1065         case INDEX_op_bswap32_i64:
1066             t0 = *tb_ptr++;
1067             t1 = tci_read_r32(regs, &tb_ptr);
1068             tci_write_reg64(regs, t0, bswap32(t1));
1069             break;
1070 #endif
1071 #if TCG_TARGET_HAS_bswap64_i64
1072         case INDEX_op_bswap64_i64:
1073             t0 = *tb_ptr++;
1074             t1 = tci_read_r64(regs, &tb_ptr);
1075             tci_write_reg64(regs, t0, bswap64(t1));
1076             break;
1077 #endif
1078 #if TCG_TARGET_HAS_not_i64
1079         case INDEX_op_not_i64:
1080             t0 = *tb_ptr++;
1081             t1 = tci_read_r64(regs, &tb_ptr);
1082             tci_write_reg64(regs, t0, ~t1);
1083             break;
1084 #endif
1085 #if TCG_TARGET_HAS_neg_i64
1086         case INDEX_op_neg_i64:
1087             t0 = *tb_ptr++;
1088             t1 = tci_read_r64(regs, &tb_ptr);
1089             tci_write_reg64(regs, t0, -t1);
1090             break;
1091 #endif
1092 #endif /* TCG_TARGET_REG_BITS == 64 */
1093 
1094             /* QEMU specific operations. */
1095 
1096         case INDEX_op_exit_tb:
1097             ret = *(uint64_t *)tb_ptr;
1098             goto exit;
1099             break;
1100         case INDEX_op_goto_tb:
1101             /* Jump address is aligned */
1102             tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1103             t0 = atomic_read((int32_t *)tb_ptr);
1104             tb_ptr += sizeof(int32_t);
1105             tci_assert(tb_ptr == old_code_ptr + op_size);
1106             tb_ptr += (int32_t)t0;
1107             continue;
1108         case INDEX_op_qemu_ld_i32:
1109             t0 = *tb_ptr++;
1110             taddr = tci_read_ulong(regs, &tb_ptr);
1111             oi = tci_read_i(&tb_ptr);
1112             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1113             case MO_UB:
1114                 tmp32 = qemu_ld_ub;
1115                 break;
1116             case MO_SB:
1117                 tmp32 = (int8_t)qemu_ld_ub;
1118                 break;
1119             case MO_LEUW:
1120                 tmp32 = qemu_ld_leuw;
1121                 break;
1122             case MO_LESW:
1123                 tmp32 = (int16_t)qemu_ld_leuw;
1124                 break;
1125             case MO_LEUL:
1126                 tmp32 = qemu_ld_leul;
1127                 break;
1128             case MO_BEUW:
1129                 tmp32 = qemu_ld_beuw;
1130                 break;
1131             case MO_BESW:
1132                 tmp32 = (int16_t)qemu_ld_beuw;
1133                 break;
1134             case MO_BEUL:
1135                 tmp32 = qemu_ld_beul;
1136                 break;
1137             default:
1138                 tcg_abort();
1139             }
1140             tci_write_reg(regs, t0, tmp32);
1141             break;
1142         case INDEX_op_qemu_ld_i64:
1143             t0 = *tb_ptr++;
1144             if (TCG_TARGET_REG_BITS == 32) {
1145                 t1 = *tb_ptr++;
1146             }
1147             taddr = tci_read_ulong(regs, &tb_ptr);
1148             oi = tci_read_i(&tb_ptr);
1149             switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1150             case MO_UB:
1151                 tmp64 = qemu_ld_ub;
1152                 break;
1153             case MO_SB:
1154                 tmp64 = (int8_t)qemu_ld_ub;
1155                 break;
1156             case MO_LEUW:
1157                 tmp64 = qemu_ld_leuw;
1158                 break;
1159             case MO_LESW:
1160                 tmp64 = (int16_t)qemu_ld_leuw;
1161                 break;
1162             case MO_LEUL:
1163                 tmp64 = qemu_ld_leul;
1164                 break;
1165             case MO_LESL:
1166                 tmp64 = (int32_t)qemu_ld_leul;
1167                 break;
1168             case MO_LEQ:
1169                 tmp64 = qemu_ld_leq;
1170                 break;
1171             case MO_BEUW:
1172                 tmp64 = qemu_ld_beuw;
1173                 break;
1174             case MO_BESW:
1175                 tmp64 = (int16_t)qemu_ld_beuw;
1176                 break;
1177             case MO_BEUL:
1178                 tmp64 = qemu_ld_beul;
1179                 break;
1180             case MO_BESL:
1181                 tmp64 = (int32_t)qemu_ld_beul;
1182                 break;
1183             case MO_BEQ:
1184                 tmp64 = qemu_ld_beq;
1185                 break;
1186             default:
1187                 tcg_abort();
1188             }
1189             tci_write_reg(regs, t0, tmp64);
1190             if (TCG_TARGET_REG_BITS == 32) {
1191                 tci_write_reg(regs, t1, tmp64 >> 32);
1192             }
1193             break;
1194         case INDEX_op_qemu_st_i32:
1195             t0 = tci_read_r(regs, &tb_ptr);
1196             taddr = tci_read_ulong(regs, &tb_ptr);
1197             oi = tci_read_i(&tb_ptr);
1198             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1199             case MO_UB:
1200                 qemu_st_b(t0);
1201                 break;
1202             case MO_LEUW:
1203                 qemu_st_lew(t0);
1204                 break;
1205             case MO_LEUL:
1206                 qemu_st_lel(t0);
1207                 break;
1208             case MO_BEUW:
1209                 qemu_st_bew(t0);
1210                 break;
1211             case MO_BEUL:
1212                 qemu_st_bel(t0);
1213                 break;
1214             default:
1215                 tcg_abort();
1216             }
1217             break;
1218         case INDEX_op_qemu_st_i64:
1219             tmp64 = tci_read_r64(regs, &tb_ptr);
1220             taddr = tci_read_ulong(regs, &tb_ptr);
1221             oi = tci_read_i(&tb_ptr);
1222             switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1223             case MO_UB:
1224                 qemu_st_b(tmp64);
1225                 break;
1226             case MO_LEUW:
1227                 qemu_st_lew(tmp64);
1228                 break;
1229             case MO_LEUL:
1230                 qemu_st_lel(tmp64);
1231                 break;
1232             case MO_LEQ:
1233                 qemu_st_leq(tmp64);
1234                 break;
1235             case MO_BEUW:
1236                 qemu_st_bew(tmp64);
1237                 break;
1238             case MO_BEUL:
1239                 qemu_st_bel(tmp64);
1240                 break;
1241             case MO_BEQ:
1242                 qemu_st_beq(tmp64);
1243                 break;
1244             default:
1245                 tcg_abort();
1246             }
1247             break;
1248         case INDEX_op_mb:
1249             /* Ensure ordering for all kinds */
1250             smp_mb();
1251             break;
1252         default:
1253             TODO();
1254             break;
1255         }
1256         tci_assert(tb_ptr == old_code_ptr + op_size);
1257     }
1258 exit:
1259     return ret;
1260 }
1261