xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision c2387413)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31    do { \
32        fprintf(stderr, "TODO %s:%u: %s()\n", \
33                __FILE__, __LINE__, __func__); \
34        tcg_abort(); \
35    } while (0)
36
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
40static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
41{
42    switch (op) {
43    case INDEX_op_ld8u_i32:
44    case INDEX_op_ld8s_i32:
45    case INDEX_op_ld16u_i32:
46    case INDEX_op_ld16s_i32:
47    case INDEX_op_ld_i32:
48    case INDEX_op_ld8u_i64:
49    case INDEX_op_ld8s_i64:
50    case INDEX_op_ld16u_i64:
51    case INDEX_op_ld16s_i64:
52    case INDEX_op_ld32u_i64:
53    case INDEX_op_ld32s_i64:
54    case INDEX_op_ld_i64:
55    case INDEX_op_not_i32:
56    case INDEX_op_not_i64:
57    case INDEX_op_neg_i32:
58    case INDEX_op_neg_i64:
59    case INDEX_op_ext8s_i32:
60    case INDEX_op_ext8s_i64:
61    case INDEX_op_ext16s_i32:
62    case INDEX_op_ext16s_i64:
63    case INDEX_op_ext8u_i32:
64    case INDEX_op_ext8u_i64:
65    case INDEX_op_ext16u_i32:
66    case INDEX_op_ext16u_i64:
67    case INDEX_op_ext32s_i64:
68    case INDEX_op_ext32u_i64:
69    case INDEX_op_ext_i32_i64:
70    case INDEX_op_extu_i32_i64:
71    case INDEX_op_bswap16_i32:
72    case INDEX_op_bswap16_i64:
73    case INDEX_op_bswap32_i32:
74    case INDEX_op_bswap32_i64:
75    case INDEX_op_bswap64_i64:
76        return C_O1_I1(r, r);
77
78    case INDEX_op_st8_i32:
79    case INDEX_op_st16_i32:
80    case INDEX_op_st_i32:
81    case INDEX_op_st8_i64:
82    case INDEX_op_st16_i64:
83    case INDEX_op_st32_i64:
84    case INDEX_op_st_i64:
85        return C_O0_I2(r, r);
86
87    case INDEX_op_div_i32:
88    case INDEX_op_div_i64:
89    case INDEX_op_divu_i32:
90    case INDEX_op_divu_i64:
91    case INDEX_op_rem_i32:
92    case INDEX_op_rem_i64:
93    case INDEX_op_remu_i32:
94    case INDEX_op_remu_i64:
95    case INDEX_op_add_i32:
96    case INDEX_op_add_i64:
97    case INDEX_op_sub_i32:
98    case INDEX_op_sub_i64:
99    case INDEX_op_mul_i32:
100    case INDEX_op_mul_i64:
101    case INDEX_op_and_i32:
102    case INDEX_op_and_i64:
103    case INDEX_op_andc_i32:
104    case INDEX_op_andc_i64:
105    case INDEX_op_eqv_i32:
106    case INDEX_op_eqv_i64:
107    case INDEX_op_nand_i32:
108    case INDEX_op_nand_i64:
109    case INDEX_op_nor_i32:
110    case INDEX_op_nor_i64:
111    case INDEX_op_or_i32:
112    case INDEX_op_or_i64:
113    case INDEX_op_orc_i32:
114    case INDEX_op_orc_i64:
115    case INDEX_op_xor_i32:
116    case INDEX_op_xor_i64:
117    case INDEX_op_shl_i32:
118    case INDEX_op_shl_i64:
119    case INDEX_op_shr_i32:
120    case INDEX_op_shr_i64:
121    case INDEX_op_sar_i32:
122    case INDEX_op_sar_i64:
123    case INDEX_op_rotl_i32:
124    case INDEX_op_rotl_i64:
125    case INDEX_op_rotr_i32:
126    case INDEX_op_rotr_i64:
127    case INDEX_op_setcond_i32:
128    case INDEX_op_setcond_i64:
129        return C_O1_I2(r, r, r);
130
131    case INDEX_op_deposit_i32:
132    case INDEX_op_deposit_i64:
133        return C_O1_I2(r, 0, r);
134
135    case INDEX_op_brcond_i32:
136    case INDEX_op_brcond_i64:
137        return C_O0_I2(r, r);
138
139#if TCG_TARGET_REG_BITS == 32
140    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
141    case INDEX_op_add2_i32:
142    case INDEX_op_sub2_i32:
143        return C_O2_I4(r, r, r, r, r, r);
144    case INDEX_op_brcond2_i32:
145        return C_O0_I4(r, r, r, r);
146    case INDEX_op_mulu2_i32:
147        return C_O2_I2(r, r, r, r);
148    case INDEX_op_setcond2_i32:
149        return C_O1_I4(r, r, r, r, r);
150#endif
151
152    case INDEX_op_qemu_ld_i32:
153        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
154                ? C_O1_I1(r, r)
155                : C_O1_I2(r, r, r));
156    case INDEX_op_qemu_ld_i64:
157        return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
158                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
159                : C_O2_I2(r, r, r, r));
160    case INDEX_op_qemu_st_i32:
161        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
162                ? C_O0_I2(r, r)
163                : C_O0_I3(r, r, r));
164    case INDEX_op_qemu_st_i64:
165        return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
166                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
167                : C_O0_I4(r, r, r, r));
168
169    default:
170        g_assert_not_reached();
171    }
172}
173
174static const int tcg_target_reg_alloc_order[] = {
175    TCG_REG_R0,
176    TCG_REG_R1,
177    TCG_REG_R2,
178    TCG_REG_R3,
179    TCG_REG_R4,
180    TCG_REG_R5,
181    TCG_REG_R6,
182    TCG_REG_R7,
183    TCG_REG_R8,
184    TCG_REG_R9,
185    TCG_REG_R10,
186    TCG_REG_R11,
187    TCG_REG_R12,
188    TCG_REG_R13,
189    TCG_REG_R14,
190    TCG_REG_R15,
191};
192
193#if MAX_OPC_PARAM_IARGS != 6
194# error Fix needed, number of supported input arguments changed!
195#endif
196
197static const int tcg_target_call_iarg_regs[] = {
198    TCG_REG_R0,
199    TCG_REG_R1,
200    TCG_REG_R2,
201    TCG_REG_R3,
202    TCG_REG_R4,
203    TCG_REG_R5,
204#if TCG_TARGET_REG_BITS == 32
205    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
206    TCG_REG_R6,
207    TCG_REG_R7,
208    TCG_REG_R8,
209    TCG_REG_R9,
210    TCG_REG_R10,
211    TCG_REG_R11,
212#endif
213};
214
215static const int tcg_target_call_oarg_regs[] = {
216    TCG_REG_R0,
217#if TCG_TARGET_REG_BITS == 32
218    TCG_REG_R1
219#endif
220};
221
222#ifdef CONFIG_DEBUG_TCG
223static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
224    "r00",
225    "r01",
226    "r02",
227    "r03",
228    "r04",
229    "r05",
230    "r06",
231    "r07",
232    "r08",
233    "r09",
234    "r10",
235    "r11",
236    "r12",
237    "r13",
238    "r14",
239    "r15",
240};
241#endif
242
243static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
244                        intptr_t value, intptr_t addend)
245{
246    /* tcg_out_reloc always uses the same type, addend. */
247    tcg_debug_assert(type == sizeof(tcg_target_long));
248    tcg_debug_assert(addend == 0);
249    tcg_debug_assert(value != 0);
250    if (TCG_TARGET_REG_BITS == 32) {
251        tcg_patch32(code_ptr, value);
252    } else {
253        tcg_patch64(code_ptr, value);
254    }
255    return true;
256}
257
258#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
259/* Show current bytecode. Used by tcg interpreter. */
260void tci_disas(uint8_t opc)
261{
262    const TCGOpDef *def = &tcg_op_defs[opc];
263    fprintf(stderr, "TCG %s %u, %u, %u\n",
264            def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
265}
266#endif
267
268/* Write value (native size). */
269static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
270{
271    if (TCG_TARGET_REG_BITS == 32) {
272        tcg_out32(s, v);
273    } else {
274        tcg_out64(s, v);
275    }
276}
277
278/* Write opcode. */
279static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
280{
281    tcg_out8(s, op);
282    tcg_out8(s, 0);
283}
284
285/* Write register. */
286static void tcg_out_r(TCGContext *s, TCGArg t0)
287{
288    tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
289    tcg_out8(s, t0);
290}
291
292/* Write label. */
293static void tci_out_label(TCGContext *s, TCGLabel *label)
294{
295    if (label->has_value) {
296        tcg_out_i(s, label->u.value);
297        tcg_debug_assert(label->u.value);
298    } else {
299        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
300        s->code_ptr += sizeof(tcg_target_ulong);
301    }
302}
303
304static void stack_bounds_check(TCGReg base, target_long offset)
305{
306    if (base == TCG_REG_CALL_STACK) {
307        tcg_debug_assert(offset < 0);
308        tcg_debug_assert(offset >= -(CPU_TEMP_BUF_NLONGS * sizeof(long)));
309    }
310}
311
312static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
313                       intptr_t arg2)
314{
315    uint8_t *old_code_ptr = s->code_ptr;
316
317    stack_bounds_check(arg1, arg2);
318    if (type == TCG_TYPE_I32) {
319        tcg_out_op_t(s, INDEX_op_ld_i32);
320        tcg_out_r(s, ret);
321        tcg_out_r(s, arg1);
322        tcg_out32(s, arg2);
323    } else {
324        tcg_debug_assert(type == TCG_TYPE_I64);
325#if TCG_TARGET_REG_BITS == 64
326        tcg_out_op_t(s, INDEX_op_ld_i64);
327        tcg_out_r(s, ret);
328        tcg_out_r(s, arg1);
329        tcg_debug_assert(arg2 == (int32_t)arg2);
330        tcg_out32(s, arg2);
331#else
332        TODO();
333#endif
334    }
335    old_code_ptr[1] = s->code_ptr - old_code_ptr;
336}
337
338static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
339{
340    uint8_t *old_code_ptr = s->code_ptr;
341    tcg_debug_assert(ret != arg);
342#if TCG_TARGET_REG_BITS == 32
343    tcg_out_op_t(s, INDEX_op_mov_i32);
344#else
345    tcg_out_op_t(s, INDEX_op_mov_i64);
346#endif
347    tcg_out_r(s, ret);
348    tcg_out_r(s, arg);
349    old_code_ptr[1] = s->code_ptr - old_code_ptr;
350    return true;
351}
352
353static void tcg_out_movi(TCGContext *s, TCGType type,
354                         TCGReg t0, tcg_target_long arg)
355{
356    uint8_t *old_code_ptr = s->code_ptr;
357    uint32_t arg32 = arg;
358    if (type == TCG_TYPE_I32 || arg == arg32) {
359        tcg_out_op_t(s, INDEX_op_tci_movi_i32);
360        tcg_out_r(s, t0);
361        tcg_out32(s, arg32);
362    } else {
363        tcg_debug_assert(type == TCG_TYPE_I64);
364#if TCG_TARGET_REG_BITS == 64
365        tcg_out_op_t(s, INDEX_op_tci_movi_i64);
366        tcg_out_r(s, t0);
367        tcg_out64(s, arg);
368#else
369        TODO();
370#endif
371    }
372    old_code_ptr[1] = s->code_ptr - old_code_ptr;
373}
374
375static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
376{
377    uint8_t *old_code_ptr = s->code_ptr;
378    tcg_out_op_t(s, INDEX_op_call);
379    tcg_out_i(s, (uintptr_t)arg);
380    old_code_ptr[1] = s->code_ptr - old_code_ptr;
381}
382
383static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
384                       const int *const_args)
385{
386    uint8_t *old_code_ptr = s->code_ptr;
387
388    tcg_out_op_t(s, opc);
389
390    switch (opc) {
391    case INDEX_op_exit_tb:
392        tcg_out64(s, args[0]);
393        break;
394    case INDEX_op_goto_tb:
395        if (s->tb_jmp_insn_offset) {
396            /* Direct jump method. */
397            /* Align for atomic patching and thread safety */
398            s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
399            s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
400            tcg_out32(s, 0);
401        } else {
402            /* Indirect jump method. */
403            TODO();
404        }
405        set_jmp_reset_offset(s, args[0]);
406        break;
407    case INDEX_op_br:
408        tci_out_label(s, arg_label(args[0]));
409        break;
410    case INDEX_op_setcond_i32:
411        tcg_out_r(s, args[0]);
412        tcg_out_r(s, args[1]);
413        tcg_out_r(s, args[2]);
414        tcg_out8(s, args[3]);   /* condition */
415        break;
416#if TCG_TARGET_REG_BITS == 32
417    case INDEX_op_setcond2_i32:
418        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
419        tcg_out_r(s, args[0]);
420        tcg_out_r(s, args[1]);
421        tcg_out_r(s, args[2]);
422        tcg_out_r(s, args[3]);
423        tcg_out_r(s, args[4]);
424        tcg_out8(s, args[5]);   /* condition */
425        break;
426#elif TCG_TARGET_REG_BITS == 64
427    case INDEX_op_setcond_i64:
428        tcg_out_r(s, args[0]);
429        tcg_out_r(s, args[1]);
430        tcg_out_r(s, args[2]);
431        tcg_out8(s, args[3]);   /* condition */
432        break;
433#endif
434    case INDEX_op_ld8u_i32:
435    case INDEX_op_ld8s_i32:
436    case INDEX_op_ld16u_i32:
437    case INDEX_op_ld16s_i32:
438    case INDEX_op_ld_i32:
439    case INDEX_op_st8_i32:
440    case INDEX_op_st16_i32:
441    case INDEX_op_st_i32:
442    case INDEX_op_ld8u_i64:
443    case INDEX_op_ld8s_i64:
444    case INDEX_op_ld16u_i64:
445    case INDEX_op_ld16s_i64:
446    case INDEX_op_ld32u_i64:
447    case INDEX_op_ld32s_i64:
448    case INDEX_op_ld_i64:
449    case INDEX_op_st8_i64:
450    case INDEX_op_st16_i64:
451    case INDEX_op_st32_i64:
452    case INDEX_op_st_i64:
453        stack_bounds_check(args[1], args[2]);
454        tcg_out_r(s, args[0]);
455        tcg_out_r(s, args[1]);
456        tcg_debug_assert(args[2] == (int32_t)args[2]);
457        tcg_out32(s, args[2]);
458        break;
459    case INDEX_op_add_i32:
460    case INDEX_op_sub_i32:
461    case INDEX_op_mul_i32:
462    case INDEX_op_and_i32:
463    case INDEX_op_andc_i32:     /* Optional (TCG_TARGET_HAS_andc_i32). */
464    case INDEX_op_eqv_i32:      /* Optional (TCG_TARGET_HAS_eqv_i32). */
465    case INDEX_op_nand_i32:     /* Optional (TCG_TARGET_HAS_nand_i32). */
466    case INDEX_op_nor_i32:      /* Optional (TCG_TARGET_HAS_nor_i32). */
467    case INDEX_op_or_i32:
468    case INDEX_op_orc_i32:      /* Optional (TCG_TARGET_HAS_orc_i32). */
469    case INDEX_op_xor_i32:
470    case INDEX_op_shl_i32:
471    case INDEX_op_shr_i32:
472    case INDEX_op_sar_i32:
473    case INDEX_op_rotl_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
474    case INDEX_op_rotr_i32:     /* Optional (TCG_TARGET_HAS_rot_i32). */
475        tcg_out_r(s, args[0]);
476        tcg_out_r(s, args[1]);
477        tcg_out_r(s, args[2]);
478        break;
479    case INDEX_op_deposit_i32:  /* Optional (TCG_TARGET_HAS_deposit_i32). */
480        tcg_out_r(s, args[0]);
481        tcg_out_r(s, args[1]);
482        tcg_out_r(s, args[2]);
483        tcg_debug_assert(args[3] <= UINT8_MAX);
484        tcg_out8(s, args[3]);
485        tcg_debug_assert(args[4] <= UINT8_MAX);
486        tcg_out8(s, args[4]);
487        break;
488
489#if TCG_TARGET_REG_BITS == 64
490    case INDEX_op_add_i64:
491    case INDEX_op_sub_i64:
492    case INDEX_op_mul_i64:
493    case INDEX_op_and_i64:
494    case INDEX_op_andc_i64:     /* Optional (TCG_TARGET_HAS_andc_i64). */
495    case INDEX_op_eqv_i64:      /* Optional (TCG_TARGET_HAS_eqv_i64). */
496    case INDEX_op_nand_i64:     /* Optional (TCG_TARGET_HAS_nand_i64). */
497    case INDEX_op_nor_i64:      /* Optional (TCG_TARGET_HAS_nor_i64). */
498    case INDEX_op_or_i64:
499    case INDEX_op_orc_i64:      /* Optional (TCG_TARGET_HAS_orc_i64). */
500    case INDEX_op_xor_i64:
501    case INDEX_op_shl_i64:
502    case INDEX_op_shr_i64:
503    case INDEX_op_sar_i64:
504    case INDEX_op_rotl_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
505    case INDEX_op_rotr_i64:     /* Optional (TCG_TARGET_HAS_rot_i64). */
506    case INDEX_op_div_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
507    case INDEX_op_divu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
508    case INDEX_op_rem_i64:      /* Optional (TCG_TARGET_HAS_div_i64). */
509    case INDEX_op_remu_i64:     /* Optional (TCG_TARGET_HAS_div_i64). */
510        tcg_out_r(s, args[0]);
511        tcg_out_r(s, args[1]);
512        tcg_out_r(s, args[2]);
513        break;
514    case INDEX_op_deposit_i64:  /* Optional (TCG_TARGET_HAS_deposit_i64). */
515        tcg_out_r(s, args[0]);
516        tcg_out_r(s, args[1]);
517        tcg_out_r(s, args[2]);
518        tcg_debug_assert(args[3] <= UINT8_MAX);
519        tcg_out8(s, args[3]);
520        tcg_debug_assert(args[4] <= UINT8_MAX);
521        tcg_out8(s, args[4]);
522        break;
523    case INDEX_op_brcond_i64:
524        tcg_out_r(s, args[0]);
525        tcg_out_r(s, args[1]);
526        tcg_out8(s, args[2]);           /* condition */
527        tci_out_label(s, arg_label(args[3]));
528        break;
529    case INDEX_op_bswap16_i64:  /* Optional (TCG_TARGET_HAS_bswap16_i64). */
530    case INDEX_op_bswap32_i64:  /* Optional (TCG_TARGET_HAS_bswap32_i64). */
531    case INDEX_op_bswap64_i64:  /* Optional (TCG_TARGET_HAS_bswap64_i64). */
532    case INDEX_op_not_i64:      /* Optional (TCG_TARGET_HAS_not_i64). */
533    case INDEX_op_neg_i64:      /* Optional (TCG_TARGET_HAS_neg_i64). */
534    case INDEX_op_ext8s_i64:    /* Optional (TCG_TARGET_HAS_ext8s_i64). */
535    case INDEX_op_ext8u_i64:    /* Optional (TCG_TARGET_HAS_ext8u_i64). */
536    case INDEX_op_ext16s_i64:   /* Optional (TCG_TARGET_HAS_ext16s_i64). */
537    case INDEX_op_ext16u_i64:   /* Optional (TCG_TARGET_HAS_ext16u_i64). */
538    case INDEX_op_ext32s_i64:   /* Optional (TCG_TARGET_HAS_ext32s_i64). */
539    case INDEX_op_ext32u_i64:   /* Optional (TCG_TARGET_HAS_ext32u_i64). */
540    case INDEX_op_ext_i32_i64:
541    case INDEX_op_extu_i32_i64:
542#endif /* TCG_TARGET_REG_BITS == 64 */
543    case INDEX_op_neg_i32:      /* Optional (TCG_TARGET_HAS_neg_i32). */
544    case INDEX_op_not_i32:      /* Optional (TCG_TARGET_HAS_not_i32). */
545    case INDEX_op_ext8s_i32:    /* Optional (TCG_TARGET_HAS_ext8s_i32). */
546    case INDEX_op_ext16s_i32:   /* Optional (TCG_TARGET_HAS_ext16s_i32). */
547    case INDEX_op_ext8u_i32:    /* Optional (TCG_TARGET_HAS_ext8u_i32). */
548    case INDEX_op_ext16u_i32:   /* Optional (TCG_TARGET_HAS_ext16u_i32). */
549    case INDEX_op_bswap16_i32:  /* Optional (TCG_TARGET_HAS_bswap16_i32). */
550    case INDEX_op_bswap32_i32:  /* Optional (TCG_TARGET_HAS_bswap32_i32). */
551        tcg_out_r(s, args[0]);
552        tcg_out_r(s, args[1]);
553        break;
554    case INDEX_op_div_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
555    case INDEX_op_divu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
556    case INDEX_op_rem_i32:      /* Optional (TCG_TARGET_HAS_div_i32). */
557    case INDEX_op_remu_i32:     /* Optional (TCG_TARGET_HAS_div_i32). */
558        tcg_out_r(s, args[0]);
559        tcg_out_r(s, args[1]);
560        tcg_out_r(s, args[2]);
561        break;
562#if TCG_TARGET_REG_BITS == 32
563    case INDEX_op_add2_i32:
564    case INDEX_op_sub2_i32:
565        tcg_out_r(s, args[0]);
566        tcg_out_r(s, args[1]);
567        tcg_out_r(s, args[2]);
568        tcg_out_r(s, args[3]);
569        tcg_out_r(s, args[4]);
570        tcg_out_r(s, args[5]);
571        break;
572    case INDEX_op_brcond2_i32:
573        tcg_out_r(s, args[0]);
574        tcg_out_r(s, args[1]);
575        tcg_out_r(s, args[2]);
576        tcg_out_r(s, args[3]);
577        tcg_out8(s, args[4]);           /* condition */
578        tci_out_label(s, arg_label(args[5]));
579        break;
580    case INDEX_op_mulu2_i32:
581        tcg_out_r(s, args[0]);
582        tcg_out_r(s, args[1]);
583        tcg_out_r(s, args[2]);
584        tcg_out_r(s, args[3]);
585        break;
586#endif
587    case INDEX_op_brcond_i32:
588        tcg_out_r(s, args[0]);
589        tcg_out_r(s, args[1]);
590        tcg_out8(s, args[2]);           /* condition */
591        tci_out_label(s, arg_label(args[3]));
592        break;
593    case INDEX_op_qemu_ld_i32:
594        tcg_out_r(s, *args++);
595        tcg_out_r(s, *args++);
596        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
597            tcg_out_r(s, *args++);
598        }
599        tcg_out_i(s, *args++);
600        break;
601    case INDEX_op_qemu_ld_i64:
602        tcg_out_r(s, *args++);
603        if (TCG_TARGET_REG_BITS == 32) {
604            tcg_out_r(s, *args++);
605        }
606        tcg_out_r(s, *args++);
607        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
608            tcg_out_r(s, *args++);
609        }
610        tcg_out_i(s, *args++);
611        break;
612    case INDEX_op_qemu_st_i32:
613        tcg_out_r(s, *args++);
614        tcg_out_r(s, *args++);
615        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
616            tcg_out_r(s, *args++);
617        }
618        tcg_out_i(s, *args++);
619        break;
620    case INDEX_op_qemu_st_i64:
621        tcg_out_r(s, *args++);
622        if (TCG_TARGET_REG_BITS == 32) {
623            tcg_out_r(s, *args++);
624        }
625        tcg_out_r(s, *args++);
626        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
627            tcg_out_r(s, *args++);
628        }
629        tcg_out_i(s, *args++);
630        break;
631    case INDEX_op_mb:
632        break;
633    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
634    case INDEX_op_mov_i64:
635    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
636    default:
637        tcg_abort();
638    }
639    old_code_ptr[1] = s->code_ptr - old_code_ptr;
640}
641
642static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
643                       intptr_t arg2)
644{
645    uint8_t *old_code_ptr = s->code_ptr;
646
647    stack_bounds_check(arg1, arg2);
648    if (type == TCG_TYPE_I32) {
649        tcg_out_op_t(s, INDEX_op_st_i32);
650        tcg_out_r(s, arg);
651        tcg_out_r(s, arg1);
652        tcg_out32(s, arg2);
653    } else {
654        tcg_debug_assert(type == TCG_TYPE_I64);
655#if TCG_TARGET_REG_BITS == 64
656        tcg_out_op_t(s, INDEX_op_st_i64);
657        tcg_out_r(s, arg);
658        tcg_out_r(s, arg1);
659        tcg_out32(s, arg2);
660#else
661        TODO();
662#endif
663    }
664    old_code_ptr[1] = s->code_ptr - old_code_ptr;
665}
666
667static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
668                               TCGReg base, intptr_t ofs)
669{
670    return false;
671}
672
673/* Test if a constant matches the constraint. */
674static int tcg_target_const_match(tcg_target_long val, TCGType type,
675                                  const TCGArgConstraint *arg_ct)
676{
677    /* No need to return 0 or 1, 0 or != 0 is good enough. */
678    return arg_ct->ct & TCG_CT_CONST;
679}
680
681static void tcg_target_init(TCGContext *s)
682{
683#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
684    const char *envval = getenv("DEBUG_TCG");
685    if (envval) {
686        qemu_set_log(strtol(envval, NULL, 0));
687    }
688#endif
689
690    /* The current code uses uint8_t for tcg operations. */
691    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
692
693    /* Registers available for 32 bit operations. */
694    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
695    /* Registers available for 64 bit operations. */
696    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
697    /* TODO: Which registers should be set here? */
698    tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
699
700    s->reserved_regs = 0;
701    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
702
703    /* We use negative offsets from "sp" so that we can distinguish
704       stores that might pretend to be call arguments.  */
705    tcg_set_frame(s, TCG_REG_CALL_STACK,
706                  -CPU_TEMP_BUF_NLONGS * sizeof(long),
707                  CPU_TEMP_BUF_NLONGS * sizeof(long));
708}
709
710/* Generate global QEMU prologue and epilogue code. */
711static inline void tcg_target_qemu_prologue(TCGContext *s)
712{
713}
714