xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 49bd751497f3b71550b152ef9da0e265a94a64c1)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_eqv_i32:
99    case INDEX_op_eqv_i64:
100    case INDEX_op_nand_i32:
101    case INDEX_op_nand_i64:
102    case INDEX_op_nor_i32:
103    case INDEX_op_nor_i64:
104    case INDEX_op_orc_i32:
105    case INDEX_op_orc_i64:
106    case INDEX_op_xor_i32:
107    case INDEX_op_xor_i64:
108    case INDEX_op_shl_i32:
109    case INDEX_op_shl_i64:
110    case INDEX_op_shr_i32:
111    case INDEX_op_shr_i64:
112    case INDEX_op_sar_i32:
113    case INDEX_op_sar_i64:
114    case INDEX_op_rotl_i32:
115    case INDEX_op_rotl_i64:
116    case INDEX_op_rotr_i32:
117    case INDEX_op_rotr_i64:
118    case INDEX_op_setcond_i32:
119    case INDEX_op_setcond_i64:
120    case INDEX_op_deposit_i32:
121    case INDEX_op_deposit_i64:
122    case INDEX_op_clz_i32:
123    case INDEX_op_clz_i64:
124    case INDEX_op_ctz_i32:
125    case INDEX_op_ctz_i64:
126        return C_O1_I2(r, r, r);
127
128    case INDEX_op_brcond_i32:
129    case INDEX_op_brcond_i64:
130        return C_O0_I2(r, r);
131
132    case INDEX_op_add2_i32:
133    case INDEX_op_add2_i64:
134    case INDEX_op_sub2_i32:
135    case INDEX_op_sub2_i64:
136        return C_O2_I4(r, r, r, r, r, r);
137
138#if TCG_TARGET_REG_BITS == 32
139    case INDEX_op_brcond2_i32:
140        return C_O0_I4(r, r, r, r);
141#endif
142
143    case INDEX_op_mulu2_i32:
144    case INDEX_op_mulu2_i64:
145    case INDEX_op_muls2_i32:
146    case INDEX_op_muls2_i64:
147        return C_O2_I2(r, r, r, r);
148
149    case INDEX_op_movcond_i32:
150    case INDEX_op_movcond_i64:
151    case INDEX_op_setcond2_i32:
152        return C_O1_I4(r, r, r, r, r);
153
154    case INDEX_op_qemu_ld_i32:
155        return C_O1_I1(r, r);
156    case INDEX_op_qemu_ld_i64:
157        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
158    case INDEX_op_qemu_st_i32:
159        return C_O0_I2(r, r);
160    case INDEX_op_qemu_st_i64:
161        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
162
163    default:
164        return C_NotImplemented;
165    }
166}
167
168static const int tcg_target_reg_alloc_order[] = {
169    TCG_REG_R4,
170    TCG_REG_R5,
171    TCG_REG_R6,
172    TCG_REG_R7,
173    TCG_REG_R8,
174    TCG_REG_R9,
175    TCG_REG_R10,
176    TCG_REG_R11,
177    TCG_REG_R12,
178    TCG_REG_R13,
179    TCG_REG_R14,
180    TCG_REG_R15,
181    /* Either 2 or 4 of these are call clobbered, so use them last. */
182    TCG_REG_R3,
183    TCG_REG_R2,
184    TCG_REG_R1,
185    TCG_REG_R0,
186};
187
188/* No call arguments via registers.  All will be stored on the "stack". */
189static const int tcg_target_call_iarg_regs[] = { };
190
191static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
192{
193    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
194    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
195    return TCG_REG_R0 + slot;
196}
197
198#ifdef CONFIG_DEBUG_TCG
199static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
200    "r00",
201    "r01",
202    "r02",
203    "r03",
204    "r04",
205    "r05",
206    "r06",
207    "r07",
208    "r08",
209    "r09",
210    "r10",
211    "r11",
212    "r12",
213    "r13",
214    "r14",
215    "r15",
216};
217#endif
218
219static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
220                        intptr_t value, intptr_t addend)
221{
222    intptr_t diff = value - (intptr_t)(code_ptr + 1);
223
224    tcg_debug_assert(addend == 0);
225    tcg_debug_assert(type == 20);
226
227    if (diff == sextract32(diff, 0, type)) {
228        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
229        return true;
230    }
231    return false;
232}
233
234static void stack_bounds_check(TCGReg base, intptr_t offset)
235{
236    if (base == TCG_REG_CALL_STACK) {
237        tcg_debug_assert(offset >= 0);
238        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
239                                   TCG_STATIC_FRAME_SIZE));
240    }
241}
242
243static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
244{
245    tcg_insn_unit insn = 0;
246
247    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
248    insn = deposit32(insn, 0, 8, op);
249    tcg_out32(s, insn);
250}
251
252static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
253{
254    tcg_insn_unit insn = 0;
255    intptr_t diff;
256
257    /* Special case for exit_tb: map null -> 0. */
258    if (p0 == NULL) {
259        diff = 0;
260    } else {
261        diff = p0 - (void *)(s->code_ptr + 1);
262        tcg_debug_assert(diff != 0);
263        if (diff != sextract32(diff, 0, 20)) {
264            tcg_raise_tb_overflow(s);
265        }
266    }
267    insn = deposit32(insn, 0, 8, op);
268    insn = deposit32(insn, 12, 20, diff);
269    tcg_out32(s, insn);
270}
271
272static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
273{
274    tcg_insn_unit insn = 0;
275
276    insn = deposit32(insn, 0, 8, op);
277    insn = deposit32(insn, 8, 4, r0);
278    tcg_out32(s, insn);
279}
280
281static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
282{
283    tcg_out32(s, (uint8_t)op);
284}
285
286static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
287{
288    tcg_insn_unit insn = 0;
289
290    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
291    insn = deposit32(insn, 0, 8, op);
292    insn = deposit32(insn, 8, 4, r0);
293    insn = deposit32(insn, 12, 20, i1);
294    tcg_out32(s, insn);
295}
296
297static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
298{
299    tcg_insn_unit insn = 0;
300
301    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
302    insn = deposit32(insn, 0, 8, op);
303    insn = deposit32(insn, 8, 4, r0);
304    tcg_out32(s, insn);
305}
306
307static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
308{
309    tcg_insn_unit insn = 0;
310
311    insn = deposit32(insn, 0, 8, op);
312    insn = deposit32(insn, 8, 4, r0);
313    insn = deposit32(insn, 12, 4, r1);
314    tcg_out32(s, insn);
315}
316
317static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
318                           TCGReg r0, TCGReg r1, TCGArg m2)
319{
320    tcg_insn_unit insn = 0;
321
322    tcg_debug_assert(m2 == extract32(m2, 0, 16));
323    insn = deposit32(insn, 0, 8, op);
324    insn = deposit32(insn, 8, 4, r0);
325    insn = deposit32(insn, 12, 4, r1);
326    insn = deposit32(insn, 16, 16, m2);
327    tcg_out32(s, insn);
328}
329
330static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
331                           TCGReg r0, TCGReg r1, TCGReg r2)
332{
333    tcg_insn_unit insn = 0;
334
335    insn = deposit32(insn, 0, 8, op);
336    insn = deposit32(insn, 8, 4, r0);
337    insn = deposit32(insn, 12, 4, r1);
338    insn = deposit32(insn, 16, 4, r2);
339    tcg_out32(s, insn);
340}
341
342static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
343                           TCGReg r0, TCGReg r1, intptr_t i2)
344{
345    tcg_insn_unit insn = 0;
346
347    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
348    insn = deposit32(insn, 0, 8, op);
349    insn = deposit32(insn, 8, 4, r0);
350    insn = deposit32(insn, 12, 4, r1);
351    insn = deposit32(insn, 16, 16, i2);
352    tcg_out32(s, insn);
353}
354
355static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
356                            TCGReg r1, uint8_t b2, uint8_t b3)
357{
358    tcg_insn_unit insn = 0;
359
360    tcg_debug_assert(b2 == extract32(b2, 0, 6));
361    tcg_debug_assert(b3 == extract32(b3, 0, 6));
362    insn = deposit32(insn, 0, 8, op);
363    insn = deposit32(insn, 8, 4, r0);
364    insn = deposit32(insn, 12, 4, r1);
365    insn = deposit32(insn, 16, 6, b2);
366    insn = deposit32(insn, 22, 6, b3);
367    tcg_out32(s, insn);
368}
369
370static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
371                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
372{
373    tcg_insn_unit insn = 0;
374
375    insn = deposit32(insn, 0, 8, op);
376    insn = deposit32(insn, 8, 4, r0);
377    insn = deposit32(insn, 12, 4, r1);
378    insn = deposit32(insn, 16, 4, r2);
379    insn = deposit32(insn, 20, 4, c3);
380    tcg_out32(s, insn);
381}
382
383static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
384                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
385{
386    tcg_insn_unit insn = 0;
387
388    tcg_debug_assert(b3 == extract32(b3, 0, 6));
389    tcg_debug_assert(b4 == extract32(b4, 0, 6));
390    insn = deposit32(insn, 0, 8, op);
391    insn = deposit32(insn, 8, 4, r0);
392    insn = deposit32(insn, 12, 4, r1);
393    insn = deposit32(insn, 16, 4, r2);
394    insn = deposit32(insn, 20, 6, b3);
395    insn = deposit32(insn, 26, 6, b4);
396    tcg_out32(s, insn);
397}
398
399static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
400                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
401{
402    tcg_insn_unit insn = 0;
403
404    insn = deposit32(insn, 0, 8, op);
405    insn = deposit32(insn, 8, 4, r0);
406    insn = deposit32(insn, 12, 4, r1);
407    insn = deposit32(insn, 16, 4, r2);
408    insn = deposit32(insn, 20, 4, r3);
409    tcg_out32(s, insn);
410}
411
412static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
413                              TCGReg r0, TCGReg r1, TCGReg r2,
414                              TCGReg r3, TCGReg r4, TCGCond c5)
415{
416    tcg_insn_unit insn = 0;
417
418    insn = deposit32(insn, 0, 8, op);
419    insn = deposit32(insn, 8, 4, r0);
420    insn = deposit32(insn, 12, 4, r1);
421    insn = deposit32(insn, 16, 4, r2);
422    insn = deposit32(insn, 20, 4, r3);
423    insn = deposit32(insn, 24, 4, r4);
424    insn = deposit32(insn, 28, 4, c5);
425    tcg_out32(s, insn);
426}
427
428static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
429                              TCGReg r0, TCGReg r1, TCGReg r2,
430                              TCGReg r3, TCGReg r4, TCGReg r5)
431{
432    tcg_insn_unit insn = 0;
433
434    insn = deposit32(insn, 0, 8, op);
435    insn = deposit32(insn, 8, 4, r0);
436    insn = deposit32(insn, 12, 4, r1);
437    insn = deposit32(insn, 16, 4, r2);
438    insn = deposit32(insn, 20, 4, r3);
439    insn = deposit32(insn, 24, 4, r4);
440    insn = deposit32(insn, 28, 4, r5);
441    tcg_out32(s, insn);
442}
443
444static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
445                         TCGReg base, intptr_t offset)
446{
447    stack_bounds_check(base, offset);
448    if (offset != sextract32(offset, 0, 16)) {
449        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
450        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
451        base = TCG_REG_TMP;
452        offset = 0;
453    }
454    tcg_out_op_rrs(s, op, val, base, offset);
455}
456
457static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
458                       intptr_t offset)
459{
460    switch (type) {
461    case TCG_TYPE_I32:
462        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
463        break;
464#if TCG_TARGET_REG_BITS == 64
465    case TCG_TYPE_I64:
466        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
467        break;
468#endif
469    default:
470        g_assert_not_reached();
471    }
472}
473
474static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
475{
476    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
477    return true;
478}
479
480static void tcg_out_movi(TCGContext *s, TCGType type,
481                         TCGReg ret, tcg_target_long arg)
482{
483    switch (type) {
484    case TCG_TYPE_I32:
485#if TCG_TARGET_REG_BITS == 64
486        arg = (int32_t)arg;
487        /* fall through */
488    case TCG_TYPE_I64:
489#endif
490        break;
491    default:
492        g_assert_not_reached();
493    }
494
495    if (arg == sextract32(arg, 0, 20)) {
496        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
497    } else {
498        tcg_insn_unit insn = 0;
499
500        new_pool_label(s, arg, 20, s->code_ptr, 0);
501        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
502        insn = deposit32(insn, 8, 4, ret);
503        tcg_out32(s, insn);
504    }
505}
506
507static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
508                            TCGReg rs, unsigned pos, unsigned len)
509{
510    TCGOpcode opc = type == TCG_TYPE_I32 ?
511                    INDEX_op_extract_i32 :
512                    INDEX_op_extract_i64;
513    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
514}
515
516static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
517                             TCGReg rs, unsigned pos, unsigned len)
518{
519    TCGOpcode opc = type == TCG_TYPE_I32 ?
520                    INDEX_op_sextract_i32 :
521                    INDEX_op_sextract_i64;
522    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
523}
524
525static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
526{
527    tcg_out_sextract(s, type, rd, rs, 0, 8);
528}
529
530static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
531{
532    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
533}
534
535static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
536{
537    tcg_out_sextract(s, type, rd, rs, 0, 16);
538}
539
540static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
541{
542    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
543}
544
545static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
546{
547    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
548    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
549}
550
551static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
552{
553    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
554    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
555}
556
557static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
558{
559    tcg_out_ext32s(s, rd, rs);
560}
561
562static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
563{
564    tcg_out_ext32u(s, rd, rs);
565}
566
567static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
568{
569    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
570    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
571}
572
573static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
574{
575    return false;
576}
577
578static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
579                             tcg_target_long imm)
580{
581    /* This function is only used for passing structs by reference. */
582    g_assert_not_reached();
583}
584
585static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
586                         const TCGHelperInfo *info)
587{
588    ffi_cif *cif = info->cif;
589    tcg_insn_unit insn = 0;
590    uint8_t which;
591
592    if (cif->rtype == &ffi_type_void) {
593        which = 0;
594    } else {
595        tcg_debug_assert(cif->rtype->size == 4 ||
596                         cif->rtype->size == 8 ||
597                         cif->rtype->size == 16);
598        which = ctz32(cif->rtype->size) - 1;
599    }
600    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
601    insn = deposit32(insn, 0, 8, INDEX_op_call);
602    insn = deposit32(insn, 8, 4, which);
603    tcg_out32(s, insn);
604}
605
606#if TCG_TARGET_REG_BITS == 64
607# define CASE_32_64(x) \
608        case glue(glue(INDEX_op_, x), _i64): \
609        case glue(glue(INDEX_op_, x), _i32):
610# define CASE_64(x) \
611        case glue(glue(INDEX_op_, x), _i64):
612#else
613# define CASE_32_64(x) \
614        case glue(glue(INDEX_op_, x), _i32):
615# define CASE_64(x)
616#endif
617
618static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
619{
620    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
621}
622
623static void tcg_out_goto_tb(TCGContext *s, int which)
624{
625    /* indirect jump method. */
626    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
627    set_jmp_reset_offset(s, which);
628}
629
630void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
631                              uintptr_t jmp_rx, uintptr_t jmp_rw)
632{
633    /* Always indirect, nothing to do */
634}
635
636static void tgen_add(TCGContext *s, TCGType type,
637                     TCGReg a0, TCGReg a1, TCGReg a2)
638{
639    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
640}
641
642static const TCGOutOpBinary outop_add = {
643    .base.static_constraint = C_O1_I2(r, r, r),
644    .out_rrr = tgen_add,
645};
646
647static void tgen_and(TCGContext *s, TCGType type,
648                     TCGReg a0, TCGReg a1, TCGReg a2)
649{
650    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
651}
652
653static const TCGOutOpBinary outop_and = {
654    .base.static_constraint = C_O1_I2(r, r, r),
655    .out_rrr = tgen_and,
656};
657
658static void tgen_andc(TCGContext *s, TCGType type,
659                      TCGReg a0, TCGReg a1, TCGReg a2)
660{
661    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
662}
663
664static const TCGOutOpBinary outop_andc = {
665    .base.static_constraint = C_O1_I2(r, r, r),
666    .out_rrr = tgen_andc,
667};
668
669static void tgen_or(TCGContext *s, TCGType type,
670                     TCGReg a0, TCGReg a1, TCGReg a2)
671{
672    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
673}
674
675static const TCGOutOpBinary outop_or = {
676    .base.static_constraint = C_O1_I2(r, r, r),
677    .out_rrr = tgen_or,
678};
679
680
681static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
682                       const TCGArg args[TCG_MAX_OP_ARGS],
683                       const int const_args[TCG_MAX_OP_ARGS])
684{
685    int width;
686
687    switch (opc) {
688    case INDEX_op_goto_ptr:
689        tcg_out_op_r(s, opc, args[0]);
690        break;
691
692    case INDEX_op_br:
693        tcg_out_op_l(s, opc, arg_label(args[0]));
694        break;
695
696    CASE_32_64(setcond)
697        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
698        break;
699
700    CASE_32_64(movcond)
701    case INDEX_op_setcond2_i32:
702        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
703                          args[3], args[4], args[5]);
704        break;
705
706    CASE_32_64(ld8u)
707    CASE_32_64(ld8s)
708    CASE_32_64(ld16u)
709    CASE_32_64(ld16s)
710    case INDEX_op_ld_i32:
711    CASE_64(ld32u)
712    CASE_64(ld32s)
713    CASE_64(ld)
714    CASE_32_64(st8)
715    CASE_32_64(st16)
716    case INDEX_op_st_i32:
717    CASE_64(st32)
718    CASE_64(st)
719        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
720        break;
721
722    CASE_32_64(sub)
723    CASE_32_64(mul)
724    CASE_32_64(xor)
725    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
726    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
727    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
728    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
729    CASE_32_64(shl)
730    CASE_32_64(shr)
731    CASE_32_64(sar)
732    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
733    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
734    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
735    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
736    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
737    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
738    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
739    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
740        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
741        break;
742
743    CASE_32_64(deposit)
744        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
745        break;
746
747    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
748    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
749        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
750        break;
751
752    CASE_32_64(brcond)
753        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
754                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
755                        TCG_REG_TMP, args[0], args[1], args[2]);
756        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
757        break;
758
759    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
760    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
761    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
762    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
763    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
764        tcg_out_op_rr(s, opc, args[0], args[1]);
765        break;
766
767    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
768    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
769        width = 16;
770        goto do_bswap;
771    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
772        width = 32;
773    do_bswap:
774        /* The base tci bswaps zero-extend, and ignore high bits. */
775        tcg_out_op_rr(s, opc, args[0], args[1]);
776        if (args[2] & TCG_BSWAP_OS) {
777            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
778        }
779        break;
780
781    CASE_32_64(add2)
782    CASE_32_64(sub2)
783        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
784                          args[3], args[4], args[5]);
785        break;
786
787#if TCG_TARGET_REG_BITS == 32
788    case INDEX_op_brcond2_i32:
789        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
790                          args[0], args[1], args[2], args[3], args[4]);
791        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
792        break;
793#endif
794
795    CASE_32_64(mulu2)
796    CASE_32_64(muls2)
797        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
798        break;
799
800    case INDEX_op_qemu_ld_i64:
801    case INDEX_op_qemu_st_i64:
802        if (TCG_TARGET_REG_BITS == 32) {
803            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
804            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
805            break;
806        }
807        /* fall through */
808    case INDEX_op_qemu_ld_i32:
809    case INDEX_op_qemu_st_i32:
810        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
811            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
812            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
813        } else {
814            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
815        }
816        break;
817
818    case INDEX_op_mb:
819        tcg_out_op_v(s, opc);
820        break;
821
822    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
823    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
824    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
825    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
826    case INDEX_op_extu_i32_i64:
827    case INDEX_op_extrl_i64_i32:
828    default:
829        g_assert_not_reached();
830    }
831}
832
833static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
834                       intptr_t offset)
835{
836    switch (type) {
837    case TCG_TYPE_I32:
838        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
839        break;
840#if TCG_TARGET_REG_BITS == 64
841    case TCG_TYPE_I64:
842        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
843        break;
844#endif
845    default:
846        g_assert_not_reached();
847    }
848}
849
850static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
851                               TCGReg base, intptr_t ofs)
852{
853    return false;
854}
855
856/* Test if a constant matches the constraint. */
857static bool tcg_target_const_match(int64_t val, int ct,
858                                   TCGType type, TCGCond cond, int vece)
859{
860    return ct & TCG_CT_CONST;
861}
862
863static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
864{
865    memset(p, 0, sizeof(*p) * count);
866}
867
868static void tcg_target_init(TCGContext *s)
869{
870    /* The current code uses uint8_t for tcg operations. */
871    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
872
873    /* Registers available for 32 bit operations. */
874    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
875    /* Registers available for 64 bit operations. */
876    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
877    /*
878     * The interpreter "registers" are in the local stack frame and
879     * cannot be clobbered by the called helper functions.  However,
880     * the interpreter assumes a 128-bit return value and assigns to
881     * the return value registers.
882     */
883    tcg_target_call_clobber_regs =
884        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
885
886    s->reserved_regs = 0;
887    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
888    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
889
890    /* The call arguments come first, followed by the temp storage. */
891    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
892                  TCG_STATIC_FRAME_SIZE);
893}
894
895/* Generate global QEMU prologue and epilogue code. */
896static inline void tcg_target_qemu_prologue(TCGContext *s)
897{
898}
899
900static void tcg_out_tb_start(TCGContext *s)
901{
902    /* nothing to do */
903}
904
905bool tcg_target_has_memory_bswap(MemOp memop)
906{
907    return true;
908}
909
910static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
911{
912    g_assert_not_reached();
913}
914
915static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
916{
917    g_assert_not_reached();
918}
919