xref: /openbmc/qemu/tcg/tci.c (revision 27e7de3ca740cffbdfc06e9cb138f3339d14dda7)
1  /*
2   * Tiny Code Interpreter for QEMU
3   *
4   * Copyright (c) 2009, 2011, 2016 Stefan Weil
5   *
6   * This program is free software: you can redistribute it and/or modify
7   * it under the terms of the GNU General Public License as published by
8   * the Free Software Foundation, either version 2 of the License, or
9   * (at your option) any later version.
10   *
11   * This program is distributed in the hope that it will be useful,
12   * but WITHOUT ANY WARRANTY; without even the implied warranty of
13   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14   * GNU General Public License for more details.
15   *
16   * You should have received a copy of the GNU General Public License
17   * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18   */
19  
20  #include "qemu/osdep.h"
21  
22  /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23   * Without assertions, the interpreter runs much faster. */
24  #if defined(CONFIG_DEBUG_TCG)
25  # define tci_assert(cond) assert(cond)
26  #else
27  # define tci_assert(cond) ((void)0)
28  #endif
29  
30  #include "qemu-common.h"
31  #include "tcg/tcg.h"           /* MAX_OPC_PARAM_IARGS */
32  #include "exec/cpu_ldst.h"
33  #include "tcg/tcg-op.h"
34  #include "qemu/compiler.h"
35  
36  #if MAX_OPC_PARAM_IARGS != 6
37  # error Fix needed, number of supported input arguments changed!
38  #endif
39  #if TCG_TARGET_REG_BITS == 32
40  typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41                                      tcg_target_ulong, tcg_target_ulong,
42                                      tcg_target_ulong, tcg_target_ulong,
43                                      tcg_target_ulong, tcg_target_ulong,
44                                      tcg_target_ulong, tcg_target_ulong,
45                                      tcg_target_ulong, tcg_target_ulong);
46  #else
47  typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48                                      tcg_target_ulong, tcg_target_ulong,
49                                      tcg_target_ulong, tcg_target_ulong);
50  #endif
51  
52  __thread uintptr_t tci_tb_ptr;
53  
54  static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
55  {
56      tci_assert(index < TCG_TARGET_NB_REGS);
57      return regs[index];
58  }
59  
60  static void
61  tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
62  {
63      tci_assert(index < TCG_TARGET_NB_REGS);
64      tci_assert(index != TCG_AREG0);
65      tci_assert(index != TCG_REG_CALL_STACK);
66      regs[index] = value;
67  }
68  
69  #if TCG_TARGET_REG_BITS == 32
70  static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
71                              uint32_t low_index, uint64_t value)
72  {
73      tci_write_reg(regs, low_index, value);
74      tci_write_reg(regs, high_index, value >> 32);
75  }
76  #endif
77  
78  #if TCG_TARGET_REG_BITS == 32
79  /* Create a 64 bit value from two 32 bit values. */
80  static uint64_t tci_uint64(uint32_t high, uint32_t low)
81  {
82      return ((uint64_t)high << 32) + low;
83  }
84  #endif
85  
86  /* Read constant (native size) from bytecode. */
87  static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
88  {
89      tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
90      *tb_ptr += sizeof(value);
91      return value;
92  }
93  
94  /* Read unsigned constant (32 bit) from bytecode. */
95  static uint32_t tci_read_i32(const uint8_t **tb_ptr)
96  {
97      uint32_t value = *(const uint32_t *)(*tb_ptr);
98      *tb_ptr += sizeof(value);
99      return value;
100  }
101  
102  /* Read signed constant (32 bit) from bytecode. */
103  static int32_t tci_read_s32(const uint8_t **tb_ptr)
104  {
105      int32_t value = *(const int32_t *)(*tb_ptr);
106      *tb_ptr += sizeof(value);
107      return value;
108  }
109  
110  #if TCG_TARGET_REG_BITS == 64
111  /* Read constant (64 bit) from bytecode. */
112  static uint64_t tci_read_i64(const uint8_t **tb_ptr)
113  {
114      uint64_t value = *(const uint64_t *)(*tb_ptr);
115      *tb_ptr += sizeof(value);
116      return value;
117  }
118  #endif
119  
120  /* Read indexed register (native size) from bytecode. */
121  static tcg_target_ulong
122  tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
123  {
124      tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
125      *tb_ptr += 1;
126      return value;
127  }
128  
129  #if TCG_TARGET_REG_BITS == 32
130  /* Read two indexed registers (2 * 32 bit) from bytecode. */
131  static uint64_t tci_read_r64(const tcg_target_ulong *regs,
132                               const uint8_t **tb_ptr)
133  {
134      uint32_t low = tci_read_r(regs, tb_ptr);
135      return tci_uint64(tci_read_r(regs, tb_ptr), low);
136  }
137  #elif TCG_TARGET_REG_BITS == 64
138  /* Read indexed register (64 bit) from bytecode. */
139  static uint64_t tci_read_r64(const tcg_target_ulong *regs,
140                               const uint8_t **tb_ptr)
141  {
142      return tci_read_r(regs, tb_ptr);
143  }
144  #endif
145  
146  /* Read indexed register(s) with target address from bytecode. */
147  static target_ulong
148  tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
149  {
150      target_ulong taddr = tci_read_r(regs, tb_ptr);
151  #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
152      taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
153  #endif
154      return taddr;
155  }
156  
157  static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
158  {
159      tcg_target_ulong label = tci_read_i(tb_ptr);
160      tci_assert(label != 0);
161      return label;
162  }
163  
164  static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
165  {
166      bool result = false;
167      int32_t i0 = u0;
168      int32_t i1 = u1;
169      switch (condition) {
170      case TCG_COND_EQ:
171          result = (u0 == u1);
172          break;
173      case TCG_COND_NE:
174          result = (u0 != u1);
175          break;
176      case TCG_COND_LT:
177          result = (i0 < i1);
178          break;
179      case TCG_COND_GE:
180          result = (i0 >= i1);
181          break;
182      case TCG_COND_LE:
183          result = (i0 <= i1);
184          break;
185      case TCG_COND_GT:
186          result = (i0 > i1);
187          break;
188      case TCG_COND_LTU:
189          result = (u0 < u1);
190          break;
191      case TCG_COND_GEU:
192          result = (u0 >= u1);
193          break;
194      case TCG_COND_LEU:
195          result = (u0 <= u1);
196          break;
197      case TCG_COND_GTU:
198          result = (u0 > u1);
199          break;
200      default:
201          g_assert_not_reached();
202      }
203      return result;
204  }
205  
206  static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
207  {
208      bool result = false;
209      int64_t i0 = u0;
210      int64_t i1 = u1;
211      switch (condition) {
212      case TCG_COND_EQ:
213          result = (u0 == u1);
214          break;
215      case TCG_COND_NE:
216          result = (u0 != u1);
217          break;
218      case TCG_COND_LT:
219          result = (i0 < i1);
220          break;
221      case TCG_COND_GE:
222          result = (i0 >= i1);
223          break;
224      case TCG_COND_LE:
225          result = (i0 <= i1);
226          break;
227      case TCG_COND_GT:
228          result = (i0 > i1);
229          break;
230      case TCG_COND_LTU:
231          result = (u0 < u1);
232          break;
233      case TCG_COND_GEU:
234          result = (u0 >= u1);
235          break;
236      case TCG_COND_LEU:
237          result = (u0 <= u1);
238          break;
239      case TCG_COND_GTU:
240          result = (u0 > u1);
241          break;
242      default:
243          g_assert_not_reached();
244      }
245      return result;
246  }
247  
248  #define qemu_ld_ub \
249      cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
250  #define qemu_ld_leuw \
251      cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
252  #define qemu_ld_leul \
253      cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
254  #define qemu_ld_leq \
255      cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
256  #define qemu_ld_beuw \
257      cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
258  #define qemu_ld_beul \
259      cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
260  #define qemu_ld_beq \
261      cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
262  #define qemu_st_b(X) \
263      cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
264  #define qemu_st_lew(X) \
265      cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
266  #define qemu_st_lel(X) \
267      cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
268  #define qemu_st_leq(X) \
269      cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
270  #define qemu_st_bew(X) \
271      cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
272  #define qemu_st_bel(X) \
273      cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
274  #define qemu_st_beq(X) \
275      cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
276  
277  #if TCG_TARGET_REG_BITS == 64
278  # define CASE_32_64(x) \
279          case glue(glue(INDEX_op_, x), _i64): \
280          case glue(glue(INDEX_op_, x), _i32):
281  # define CASE_64(x) \
282          case glue(glue(INDEX_op_, x), _i64):
283  #else
284  # define CASE_32_64(x) \
285          case glue(glue(INDEX_op_, x), _i32):
286  # define CASE_64(x)
287  #endif
288  
289  /* Interpret pseudo code in tb. */
290  /*
291   * Disable CFI checks.
292   * One possible operation in the pseudo code is a call to binary code.
293   * Therefore, disable CFI checks in the interpreter function
294   */
295  uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
296                                              const void *v_tb_ptr)
297  {
298      const uint8_t *tb_ptr = v_tb_ptr;
299      tcg_target_ulong regs[TCG_TARGET_NB_REGS];
300      long tcg_temps[CPU_TEMP_BUF_NLONGS];
301      uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
302      uintptr_t ret = 0;
303  
304      regs[TCG_AREG0] = (tcg_target_ulong)env;
305      regs[TCG_REG_CALL_STACK] = sp_value;
306      tci_assert(tb_ptr);
307  
308      for (;;) {
309          TCGOpcode opc = tb_ptr[0];
310  #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
311          uint8_t op_size = tb_ptr[1];
312          const uint8_t *old_code_ptr = tb_ptr;
313  #endif
314          tcg_target_ulong t0;
315          tcg_target_ulong t1;
316          tcg_target_ulong t2;
317          tcg_target_ulong label;
318          TCGCond condition;
319          target_ulong taddr;
320          uint8_t tmp8;
321          uint16_t tmp16;
322          uint32_t tmp32;
323          uint64_t tmp64;
324  #if TCG_TARGET_REG_BITS == 32
325          uint64_t v64;
326  #endif
327          TCGMemOpIdx oi;
328  
329          /* Skip opcode and size entry. */
330          tb_ptr += 2;
331  
332          switch (opc) {
333          case INDEX_op_call:
334              t0 = tci_read_i(&tb_ptr);
335              tci_tb_ptr = (uintptr_t)tb_ptr;
336  #if TCG_TARGET_REG_BITS == 32
337              tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
338                                            tci_read_reg(regs, TCG_REG_R1),
339                                            tci_read_reg(regs, TCG_REG_R2),
340                                            tci_read_reg(regs, TCG_REG_R3),
341                                            tci_read_reg(regs, TCG_REG_R4),
342                                            tci_read_reg(regs, TCG_REG_R5),
343                                            tci_read_reg(regs, TCG_REG_R6),
344                                            tci_read_reg(regs, TCG_REG_R7),
345                                            tci_read_reg(regs, TCG_REG_R8),
346                                            tci_read_reg(regs, TCG_REG_R9),
347                                            tci_read_reg(regs, TCG_REG_R10),
348                                            tci_read_reg(regs, TCG_REG_R11));
349              tci_write_reg(regs, TCG_REG_R0, tmp64);
350              tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
351  #else
352              tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
353                                            tci_read_reg(regs, TCG_REG_R1),
354                                            tci_read_reg(regs, TCG_REG_R2),
355                                            tci_read_reg(regs, TCG_REG_R3),
356                                            tci_read_reg(regs, TCG_REG_R4),
357                                            tci_read_reg(regs, TCG_REG_R5));
358              tci_write_reg(regs, TCG_REG_R0, tmp64);
359  #endif
360              break;
361          case INDEX_op_br:
362              label = tci_read_label(&tb_ptr);
363              tci_assert(tb_ptr == old_code_ptr + op_size);
364              tb_ptr = (uint8_t *)label;
365              continue;
366          case INDEX_op_setcond_i32:
367              t0 = *tb_ptr++;
368              t1 = tci_read_r(regs, &tb_ptr);
369              t2 = tci_read_r(regs, &tb_ptr);
370              condition = *tb_ptr++;
371              tci_write_reg(regs, t0, tci_compare32(t1, t2, condition));
372              break;
373  #if TCG_TARGET_REG_BITS == 32
374          case INDEX_op_setcond2_i32:
375              t0 = *tb_ptr++;
376              tmp64 = tci_read_r64(regs, &tb_ptr);
377              v64 = tci_read_r64(regs, &tb_ptr);
378              condition = *tb_ptr++;
379              tci_write_reg(regs, t0, tci_compare64(tmp64, v64, condition));
380              break;
381  #elif TCG_TARGET_REG_BITS == 64
382          case INDEX_op_setcond_i64:
383              t0 = *tb_ptr++;
384              t1 = tci_read_r(regs, &tb_ptr);
385              t2 = tci_read_r(regs, &tb_ptr);
386              condition = *tb_ptr++;
387              tci_write_reg(regs, t0, tci_compare64(t1, t2, condition));
388              break;
389  #endif
390          CASE_32_64(mov)
391              t0 = *tb_ptr++;
392              t1 = tci_read_r(regs, &tb_ptr);
393              tci_write_reg(regs, t0, t1);
394              break;
395          case INDEX_op_tci_movi_i32:
396              t0 = *tb_ptr++;
397              t1 = tci_read_i32(&tb_ptr);
398              tci_write_reg(regs, t0, t1);
399              break;
400  
401              /* Load/store operations (32 bit). */
402  
403          CASE_32_64(ld8u)
404              t0 = *tb_ptr++;
405              t1 = tci_read_r(regs, &tb_ptr);
406              t2 = tci_read_s32(&tb_ptr);
407              tci_write_reg(regs, t0, *(uint8_t *)(t1 + t2));
408              break;
409          CASE_32_64(ld8s)
410              t0 = *tb_ptr++;
411              t1 = tci_read_r(regs, &tb_ptr);
412              t2 = tci_read_s32(&tb_ptr);
413              tci_write_reg(regs, t0, *(int8_t *)(t1 + t2));
414              break;
415          CASE_32_64(ld16u)
416              t0 = *tb_ptr++;
417              t1 = tci_read_r(regs, &tb_ptr);
418              t2 = tci_read_s32(&tb_ptr);
419              tci_write_reg(regs, t0, *(uint16_t *)(t1 + t2));
420              break;
421          CASE_32_64(ld16s)
422              t0 = *tb_ptr++;
423              t1 = tci_read_r(regs, &tb_ptr);
424              t2 = tci_read_s32(&tb_ptr);
425              tci_write_reg(regs, t0, *(int16_t *)(t1 + t2));
426              break;
427          case INDEX_op_ld_i32:
428          CASE_64(ld32u)
429              t0 = *tb_ptr++;
430              t1 = tci_read_r(regs, &tb_ptr);
431              t2 = tci_read_s32(&tb_ptr);
432              tci_write_reg(regs, t0, *(uint32_t *)(t1 + t2));
433              break;
434          CASE_32_64(st8)
435              t0 = tci_read_r(regs, &tb_ptr);
436              t1 = tci_read_r(regs, &tb_ptr);
437              t2 = tci_read_s32(&tb_ptr);
438              *(uint8_t *)(t1 + t2) = t0;
439              break;
440          CASE_32_64(st16)
441              t0 = tci_read_r(regs, &tb_ptr);
442              t1 = tci_read_r(regs, &tb_ptr);
443              t2 = tci_read_s32(&tb_ptr);
444              *(uint16_t *)(t1 + t2) = t0;
445              break;
446          case INDEX_op_st_i32:
447          CASE_64(st32)
448              t0 = tci_read_r(regs, &tb_ptr);
449              t1 = tci_read_r(regs, &tb_ptr);
450              t2 = tci_read_s32(&tb_ptr);
451              *(uint32_t *)(t1 + t2) = t0;
452              break;
453  
454              /* Arithmetic operations (mixed 32/64 bit). */
455  
456          CASE_32_64(add)
457              t0 = *tb_ptr++;
458              t1 = tci_read_r(regs, &tb_ptr);
459              t2 = tci_read_r(regs, &tb_ptr);
460              tci_write_reg(regs, t0, t1 + t2);
461              break;
462          CASE_32_64(sub)
463              t0 = *tb_ptr++;
464              t1 = tci_read_r(regs, &tb_ptr);
465              t2 = tci_read_r(regs, &tb_ptr);
466              tci_write_reg(regs, t0, t1 - t2);
467              break;
468          CASE_32_64(mul)
469              t0 = *tb_ptr++;
470              t1 = tci_read_r(regs, &tb_ptr);
471              t2 = tci_read_r(regs, &tb_ptr);
472              tci_write_reg(regs, t0, t1 * t2);
473              break;
474          CASE_32_64(and)
475              t0 = *tb_ptr++;
476              t1 = tci_read_r(regs, &tb_ptr);
477              t2 = tci_read_r(regs, &tb_ptr);
478              tci_write_reg(regs, t0, t1 & t2);
479              break;
480          CASE_32_64(or)
481              t0 = *tb_ptr++;
482              t1 = tci_read_r(regs, &tb_ptr);
483              t2 = tci_read_r(regs, &tb_ptr);
484              tci_write_reg(regs, t0, t1 | t2);
485              break;
486          CASE_32_64(xor)
487              t0 = *tb_ptr++;
488              t1 = tci_read_r(regs, &tb_ptr);
489              t2 = tci_read_r(regs, &tb_ptr);
490              tci_write_reg(regs, t0, t1 ^ t2);
491              break;
492  
493              /* Arithmetic operations (32 bit). */
494  
495          case INDEX_op_div_i32:
496              t0 = *tb_ptr++;
497              t1 = tci_read_r(regs, &tb_ptr);
498              t2 = tci_read_r(regs, &tb_ptr);
499              tci_write_reg(regs, t0, (int32_t)t1 / (int32_t)t2);
500              break;
501          case INDEX_op_divu_i32:
502              t0 = *tb_ptr++;
503              t1 = tci_read_r(regs, &tb_ptr);
504              t2 = tci_read_r(regs, &tb_ptr);
505              tci_write_reg(regs, t0, (uint32_t)t1 / (uint32_t)t2);
506              break;
507          case INDEX_op_rem_i32:
508              t0 = *tb_ptr++;
509              t1 = tci_read_r(regs, &tb_ptr);
510              t2 = tci_read_r(regs, &tb_ptr);
511              tci_write_reg(regs, t0, (int32_t)t1 % (int32_t)t2);
512              break;
513          case INDEX_op_remu_i32:
514              t0 = *tb_ptr++;
515              t1 = tci_read_r(regs, &tb_ptr);
516              t2 = tci_read_r(regs, &tb_ptr);
517              tci_write_reg(regs, t0, (uint32_t)t1 % (uint32_t)t2);
518              break;
519  
520              /* Shift/rotate operations (32 bit). */
521  
522          case INDEX_op_shl_i32:
523              t0 = *tb_ptr++;
524              t1 = tci_read_r(regs, &tb_ptr);
525              t2 = tci_read_r(regs, &tb_ptr);
526              tci_write_reg(regs, t0, (uint32_t)t1 << (t2 & 31));
527              break;
528          case INDEX_op_shr_i32:
529              t0 = *tb_ptr++;
530              t1 = tci_read_r(regs, &tb_ptr);
531              t2 = tci_read_r(regs, &tb_ptr);
532              tci_write_reg(regs, t0, (uint32_t)t1 >> (t2 & 31));
533              break;
534          case INDEX_op_sar_i32:
535              t0 = *tb_ptr++;
536              t1 = tci_read_r(regs, &tb_ptr);
537              t2 = tci_read_r(regs, &tb_ptr);
538              tci_write_reg(regs, t0, (int32_t)t1 >> (t2 & 31));
539              break;
540  #if TCG_TARGET_HAS_rot_i32
541          case INDEX_op_rotl_i32:
542              t0 = *tb_ptr++;
543              t1 = tci_read_r(regs, &tb_ptr);
544              t2 = tci_read_r(regs, &tb_ptr);
545              tci_write_reg(regs, t0, rol32(t1, t2 & 31));
546              break;
547          case INDEX_op_rotr_i32:
548              t0 = *tb_ptr++;
549              t1 = tci_read_r(regs, &tb_ptr);
550              t2 = tci_read_r(regs, &tb_ptr);
551              tci_write_reg(regs, t0, ror32(t1, t2 & 31));
552              break;
553  #endif
554  #if TCG_TARGET_HAS_deposit_i32
555          case INDEX_op_deposit_i32:
556              t0 = *tb_ptr++;
557              t1 = tci_read_r(regs, &tb_ptr);
558              t2 = tci_read_r(regs, &tb_ptr);
559              tmp16 = *tb_ptr++;
560              tmp8 = *tb_ptr++;
561              tmp32 = (((1 << tmp8) - 1) << tmp16);
562              tci_write_reg(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
563              break;
564  #endif
565          case INDEX_op_brcond_i32:
566              t0 = tci_read_r(regs, &tb_ptr);
567              t1 = tci_read_r(regs, &tb_ptr);
568              condition = *tb_ptr++;
569              label = tci_read_label(&tb_ptr);
570              if (tci_compare32(t0, t1, condition)) {
571                  tci_assert(tb_ptr == old_code_ptr + op_size);
572                  tb_ptr = (uint8_t *)label;
573                  continue;
574              }
575              break;
576  #if TCG_TARGET_REG_BITS == 32
577          case INDEX_op_add2_i32:
578              t0 = *tb_ptr++;
579              t1 = *tb_ptr++;
580              tmp64 = tci_read_r64(regs, &tb_ptr);
581              tmp64 += tci_read_r64(regs, &tb_ptr);
582              tci_write_reg64(regs, t1, t0, tmp64);
583              break;
584          case INDEX_op_sub2_i32:
585              t0 = *tb_ptr++;
586              t1 = *tb_ptr++;
587              tmp64 = tci_read_r64(regs, &tb_ptr);
588              tmp64 -= tci_read_r64(regs, &tb_ptr);
589              tci_write_reg64(regs, t1, t0, tmp64);
590              break;
591          case INDEX_op_brcond2_i32:
592              tmp64 = tci_read_r64(regs, &tb_ptr);
593              v64 = tci_read_r64(regs, &tb_ptr);
594              condition = *tb_ptr++;
595              label = tci_read_label(&tb_ptr);
596              if (tci_compare64(tmp64, v64, condition)) {
597                  tci_assert(tb_ptr == old_code_ptr + op_size);
598                  tb_ptr = (uint8_t *)label;
599                  continue;
600              }
601              break;
602          case INDEX_op_mulu2_i32:
603              t0 = *tb_ptr++;
604              t1 = *tb_ptr++;
605              t2 = tci_read_r(regs, &tb_ptr);
606              tmp64 = (uint32_t)tci_read_r(regs, &tb_ptr);
607              tci_write_reg64(regs, t1, t0, (uint32_t)t2 * tmp64);
608              break;
609  #endif /* TCG_TARGET_REG_BITS == 32 */
610  #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
611          CASE_32_64(ext8s)
612              t0 = *tb_ptr++;
613              t1 = tci_read_r(regs, &tb_ptr);
614              tci_write_reg(regs, t0, (int8_t)t1);
615              break;
616  #endif
617  #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
618          CASE_32_64(ext16s)
619              t0 = *tb_ptr++;
620              t1 = tci_read_r(regs, &tb_ptr);
621              tci_write_reg(regs, t0, (int16_t)t1);
622              break;
623  #endif
624  #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
625          CASE_32_64(ext8u)
626              t0 = *tb_ptr++;
627              t1 = tci_read_r(regs, &tb_ptr);
628              tci_write_reg(regs, t0, (uint8_t)t1);
629              break;
630  #endif
631  #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
632          CASE_32_64(ext16u)
633              t0 = *tb_ptr++;
634              t1 = tci_read_r(regs, &tb_ptr);
635              tci_write_reg(regs, t0, (uint16_t)t1);
636              break;
637  #endif
638  #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
639          CASE_32_64(bswap16)
640              t0 = *tb_ptr++;
641              t1 = tci_read_r(regs, &tb_ptr);
642              tci_write_reg(regs, t0, bswap16(t1));
643              break;
644  #endif
645  #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
646          CASE_32_64(bswap32)
647              t0 = *tb_ptr++;
648              t1 = tci_read_r(regs, &tb_ptr);
649              tci_write_reg(regs, t0, bswap32(t1));
650              break;
651  #endif
652  #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
653          CASE_32_64(not)
654              t0 = *tb_ptr++;
655              t1 = tci_read_r(regs, &tb_ptr);
656              tci_write_reg(regs, t0, ~t1);
657              break;
658  #endif
659  #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
660          CASE_32_64(neg)
661              t0 = *tb_ptr++;
662              t1 = tci_read_r(regs, &tb_ptr);
663              tci_write_reg(regs, t0, -t1);
664              break;
665  #endif
666  #if TCG_TARGET_REG_BITS == 64
667          case INDEX_op_tci_movi_i64:
668              t0 = *tb_ptr++;
669              t1 = tci_read_i64(&tb_ptr);
670              tci_write_reg(regs, t0, t1);
671              break;
672  
673              /* Load/store operations (64 bit). */
674  
675          case INDEX_op_ld32s_i64:
676              t0 = *tb_ptr++;
677              t1 = tci_read_r(regs, &tb_ptr);
678              t2 = tci_read_s32(&tb_ptr);
679              tci_write_reg(regs, t0, *(int32_t *)(t1 + t2));
680              break;
681          case INDEX_op_ld_i64:
682              t0 = *tb_ptr++;
683              t1 = tci_read_r(regs, &tb_ptr);
684              t2 = tci_read_s32(&tb_ptr);
685              tci_write_reg(regs, t0, *(uint64_t *)(t1 + t2));
686              break;
687          case INDEX_op_st_i64:
688              t0 = tci_read_r(regs, &tb_ptr);
689              t1 = tci_read_r(regs, &tb_ptr);
690              t2 = tci_read_s32(&tb_ptr);
691              *(uint64_t *)(t1 + t2) = t0;
692              break;
693  
694              /* Arithmetic operations (64 bit). */
695  
696          case INDEX_op_div_i64:
697              t0 = *tb_ptr++;
698              t1 = tci_read_r(regs, &tb_ptr);
699              t2 = tci_read_r(regs, &tb_ptr);
700              tci_write_reg(regs, t0, (int64_t)t1 / (int64_t)t2);
701              break;
702          case INDEX_op_divu_i64:
703              t0 = *tb_ptr++;
704              t1 = tci_read_r(regs, &tb_ptr);
705              t2 = tci_read_r(regs, &tb_ptr);
706              tci_write_reg(regs, t0, (uint64_t)t1 / (uint64_t)t2);
707              break;
708          case INDEX_op_rem_i64:
709              t0 = *tb_ptr++;
710              t1 = tci_read_r(regs, &tb_ptr);
711              t2 = tci_read_r(regs, &tb_ptr);
712              tci_write_reg(regs, t0, (int64_t)t1 % (int64_t)t2);
713              break;
714          case INDEX_op_remu_i64:
715              t0 = *tb_ptr++;
716              t1 = tci_read_r(regs, &tb_ptr);
717              t2 = tci_read_r(regs, &tb_ptr);
718              tci_write_reg(regs, t0, (uint64_t)t1 % (uint64_t)t2);
719              break;
720  
721              /* Shift/rotate operations (64 bit). */
722  
723          case INDEX_op_shl_i64:
724              t0 = *tb_ptr++;
725              t1 = tci_read_r(regs, &tb_ptr);
726              t2 = tci_read_r(regs, &tb_ptr);
727              tci_write_reg(regs, t0, t1 << (t2 & 63));
728              break;
729          case INDEX_op_shr_i64:
730              t0 = *tb_ptr++;
731              t1 = tci_read_r(regs, &tb_ptr);
732              t2 = tci_read_r(regs, &tb_ptr);
733              tci_write_reg(regs, t0, t1 >> (t2 & 63));
734              break;
735          case INDEX_op_sar_i64:
736              t0 = *tb_ptr++;
737              t1 = tci_read_r(regs, &tb_ptr);
738              t2 = tci_read_r(regs, &tb_ptr);
739              tci_write_reg(regs, t0, ((int64_t)t1 >> (t2 & 63)));
740              break;
741  #if TCG_TARGET_HAS_rot_i64
742          case INDEX_op_rotl_i64:
743              t0 = *tb_ptr++;
744              t1 = tci_read_r(regs, &tb_ptr);
745              t2 = tci_read_r(regs, &tb_ptr);
746              tci_write_reg(regs, t0, rol64(t1, t2 & 63));
747              break;
748          case INDEX_op_rotr_i64:
749              t0 = *tb_ptr++;
750              t1 = tci_read_r(regs, &tb_ptr);
751              t2 = tci_read_r(regs, &tb_ptr);
752              tci_write_reg(regs, t0, ror64(t1, t2 & 63));
753              break;
754  #endif
755  #if TCG_TARGET_HAS_deposit_i64
756          case INDEX_op_deposit_i64:
757              t0 = *tb_ptr++;
758              t1 = tci_read_r(regs, &tb_ptr);
759              t2 = tci_read_r(regs, &tb_ptr);
760              tmp16 = *tb_ptr++;
761              tmp8 = *tb_ptr++;
762              tmp64 = (((1ULL << tmp8) - 1) << tmp16);
763              tci_write_reg(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
764              break;
765  #endif
766          case INDEX_op_brcond_i64:
767              t0 = tci_read_r(regs, &tb_ptr);
768              t1 = tci_read_r(regs, &tb_ptr);
769              condition = *tb_ptr++;
770              label = tci_read_label(&tb_ptr);
771              if (tci_compare64(t0, t1, condition)) {
772                  tci_assert(tb_ptr == old_code_ptr + op_size);
773                  tb_ptr = (uint8_t *)label;
774                  continue;
775              }
776              break;
777  #if TCG_TARGET_HAS_ext32s_i64
778          case INDEX_op_ext32s_i64:
779  #endif
780          case INDEX_op_ext_i32_i64:
781              t0 = *tb_ptr++;
782              t1 = tci_read_r(regs, &tb_ptr);
783              tci_write_reg(regs, t0, (int32_t)t1);
784              break;
785  #if TCG_TARGET_HAS_ext32u_i64
786          case INDEX_op_ext32u_i64:
787  #endif
788          case INDEX_op_extu_i32_i64:
789              t0 = *tb_ptr++;
790              t1 = tci_read_r(regs, &tb_ptr);
791              tci_write_reg(regs, t0, (uint32_t)t1);
792              break;
793  #if TCG_TARGET_HAS_bswap64_i64
794          case INDEX_op_bswap64_i64:
795              t0 = *tb_ptr++;
796              t1 = tci_read_r(regs, &tb_ptr);
797              tci_write_reg(regs, t0, bswap64(t1));
798              break;
799  #endif
800  #endif /* TCG_TARGET_REG_BITS == 64 */
801  
802              /* QEMU specific operations. */
803  
804          case INDEX_op_exit_tb:
805              ret = *(uint64_t *)tb_ptr;
806              goto exit;
807              break;
808          case INDEX_op_goto_tb:
809              /* Jump address is aligned */
810              tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
811              t0 = qatomic_read((int32_t *)tb_ptr);
812              tb_ptr += sizeof(int32_t);
813              tci_assert(tb_ptr == old_code_ptr + op_size);
814              tb_ptr += (int32_t)t0;
815              continue;
816          case INDEX_op_qemu_ld_i32:
817              t0 = *tb_ptr++;
818              taddr = tci_read_ulong(regs, &tb_ptr);
819              oi = tci_read_i(&tb_ptr);
820              switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
821              case MO_UB:
822                  tmp32 = qemu_ld_ub;
823                  break;
824              case MO_SB:
825                  tmp32 = (int8_t)qemu_ld_ub;
826                  break;
827              case MO_LEUW:
828                  tmp32 = qemu_ld_leuw;
829                  break;
830              case MO_LESW:
831                  tmp32 = (int16_t)qemu_ld_leuw;
832                  break;
833              case MO_LEUL:
834                  tmp32 = qemu_ld_leul;
835                  break;
836              case MO_BEUW:
837                  tmp32 = qemu_ld_beuw;
838                  break;
839              case MO_BESW:
840                  tmp32 = (int16_t)qemu_ld_beuw;
841                  break;
842              case MO_BEUL:
843                  tmp32 = qemu_ld_beul;
844                  break;
845              default:
846                  g_assert_not_reached();
847              }
848              tci_write_reg(regs, t0, tmp32);
849              break;
850          case INDEX_op_qemu_ld_i64:
851              t0 = *tb_ptr++;
852              if (TCG_TARGET_REG_BITS == 32) {
853                  t1 = *tb_ptr++;
854              }
855              taddr = tci_read_ulong(regs, &tb_ptr);
856              oi = tci_read_i(&tb_ptr);
857              switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
858              case MO_UB:
859                  tmp64 = qemu_ld_ub;
860                  break;
861              case MO_SB:
862                  tmp64 = (int8_t)qemu_ld_ub;
863                  break;
864              case MO_LEUW:
865                  tmp64 = qemu_ld_leuw;
866                  break;
867              case MO_LESW:
868                  tmp64 = (int16_t)qemu_ld_leuw;
869                  break;
870              case MO_LEUL:
871                  tmp64 = qemu_ld_leul;
872                  break;
873              case MO_LESL:
874                  tmp64 = (int32_t)qemu_ld_leul;
875                  break;
876              case MO_LEQ:
877                  tmp64 = qemu_ld_leq;
878                  break;
879              case MO_BEUW:
880                  tmp64 = qemu_ld_beuw;
881                  break;
882              case MO_BESW:
883                  tmp64 = (int16_t)qemu_ld_beuw;
884                  break;
885              case MO_BEUL:
886                  tmp64 = qemu_ld_beul;
887                  break;
888              case MO_BESL:
889                  tmp64 = (int32_t)qemu_ld_beul;
890                  break;
891              case MO_BEQ:
892                  tmp64 = qemu_ld_beq;
893                  break;
894              default:
895                  g_assert_not_reached();
896              }
897              tci_write_reg(regs, t0, tmp64);
898              if (TCG_TARGET_REG_BITS == 32) {
899                  tci_write_reg(regs, t1, tmp64 >> 32);
900              }
901              break;
902          case INDEX_op_qemu_st_i32:
903              t0 = tci_read_r(regs, &tb_ptr);
904              taddr = tci_read_ulong(regs, &tb_ptr);
905              oi = tci_read_i(&tb_ptr);
906              switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
907              case MO_UB:
908                  qemu_st_b(t0);
909                  break;
910              case MO_LEUW:
911                  qemu_st_lew(t0);
912                  break;
913              case MO_LEUL:
914                  qemu_st_lel(t0);
915                  break;
916              case MO_BEUW:
917                  qemu_st_bew(t0);
918                  break;
919              case MO_BEUL:
920                  qemu_st_bel(t0);
921                  break;
922              default:
923                  g_assert_not_reached();
924              }
925              break;
926          case INDEX_op_qemu_st_i64:
927              tmp64 = tci_read_r64(regs, &tb_ptr);
928              taddr = tci_read_ulong(regs, &tb_ptr);
929              oi = tci_read_i(&tb_ptr);
930              switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
931              case MO_UB:
932                  qemu_st_b(tmp64);
933                  break;
934              case MO_LEUW:
935                  qemu_st_lew(tmp64);
936                  break;
937              case MO_LEUL:
938                  qemu_st_lel(tmp64);
939                  break;
940              case MO_LEQ:
941                  qemu_st_leq(tmp64);
942                  break;
943              case MO_BEUW:
944                  qemu_st_bew(tmp64);
945                  break;
946              case MO_BEUL:
947                  qemu_st_bel(tmp64);
948                  break;
949              case MO_BEQ:
950                  qemu_st_beq(tmp64);
951                  break;
952              default:
953                  g_assert_not_reached();
954              }
955              break;
956          case INDEX_op_mb:
957              /* Ensure ordering for all kinds */
958              smp_mb();
959              break;
960          default:
961              g_assert_not_reached();
962          }
963          tci_assert(tb_ptr == old_code_ptr + op_size);
964      }
965  exit:
966      return ret;
967  }
968