xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 7498d882cbe39ae7df4315ea006830e640f0d47b)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap64_i64:
61    case INDEX_op_extract_i32:
62    case INDEX_op_extract_i64:
63    case INDEX_op_sextract_i32:
64    case INDEX_op_sextract_i64:
65        return C_O1_I1(r, r);
66
67    case INDEX_op_st8_i32:
68    case INDEX_op_st16_i32:
69    case INDEX_op_st_i32:
70    case INDEX_op_st8_i64:
71    case INDEX_op_st16_i64:
72    case INDEX_op_st32_i64:
73    case INDEX_op_st_i64:
74        return C_O0_I2(r, r);
75
76    case INDEX_op_deposit_i32:
77    case INDEX_op_deposit_i64:
78        return C_O1_I2(r, r, r);
79
80    case INDEX_op_add2_i32:
81    case INDEX_op_add2_i64:
82    case INDEX_op_sub2_i32:
83    case INDEX_op_sub2_i64:
84        return C_O2_I4(r, r, r, r, r, r);
85
86    case INDEX_op_qemu_ld_i32:
87        return C_O1_I1(r, r);
88    case INDEX_op_qemu_ld_i64:
89        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
90    case INDEX_op_qemu_st_i32:
91        return C_O0_I2(r, r);
92    case INDEX_op_qemu_st_i64:
93        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
94
95    default:
96        return C_NotImplemented;
97    }
98}
99
100static const int tcg_target_reg_alloc_order[] = {
101    TCG_REG_R4,
102    TCG_REG_R5,
103    TCG_REG_R6,
104    TCG_REG_R7,
105    TCG_REG_R8,
106    TCG_REG_R9,
107    TCG_REG_R10,
108    TCG_REG_R11,
109    TCG_REG_R12,
110    TCG_REG_R13,
111    TCG_REG_R14,
112    TCG_REG_R15,
113    /* Either 2 or 4 of these are call clobbered, so use them last. */
114    TCG_REG_R3,
115    TCG_REG_R2,
116    TCG_REG_R1,
117    TCG_REG_R0,
118};
119
120/* No call arguments via registers.  All will be stored on the "stack". */
121static const int tcg_target_call_iarg_regs[] = { };
122
123static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
124{
125    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
126    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
127    return TCG_REG_R0 + slot;
128}
129
130#ifdef CONFIG_DEBUG_TCG
131static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
132    "r00",
133    "r01",
134    "r02",
135    "r03",
136    "r04",
137    "r05",
138    "r06",
139    "r07",
140    "r08",
141    "r09",
142    "r10",
143    "r11",
144    "r12",
145    "r13",
146    "r14",
147    "r15",
148};
149#endif
150
151static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
152                        intptr_t value, intptr_t addend)
153{
154    intptr_t diff = value - (intptr_t)(code_ptr + 1);
155
156    tcg_debug_assert(addend == 0);
157    tcg_debug_assert(type == 20);
158
159    if (diff == sextract32(diff, 0, type)) {
160        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
161        return true;
162    }
163    return false;
164}
165
166static void stack_bounds_check(TCGReg base, intptr_t offset)
167{
168    if (base == TCG_REG_CALL_STACK) {
169        tcg_debug_assert(offset >= 0);
170        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
171                                   TCG_STATIC_FRAME_SIZE));
172    }
173}
174
175static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
176{
177    tcg_insn_unit insn = 0;
178
179    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
180    insn = deposit32(insn, 0, 8, op);
181    tcg_out32(s, insn);
182}
183
184static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
185{
186    tcg_insn_unit insn = 0;
187    intptr_t diff;
188
189    /* Special case for exit_tb: map null -> 0. */
190    if (p0 == NULL) {
191        diff = 0;
192    } else {
193        diff = p0 - (void *)(s->code_ptr + 1);
194        tcg_debug_assert(diff != 0);
195        if (diff != sextract32(diff, 0, 20)) {
196            tcg_raise_tb_overflow(s);
197        }
198    }
199    insn = deposit32(insn, 0, 8, op);
200    insn = deposit32(insn, 12, 20, diff);
201    tcg_out32(s, insn);
202}
203
204static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
205{
206    tcg_insn_unit insn = 0;
207
208    insn = deposit32(insn, 0, 8, op);
209    insn = deposit32(insn, 8, 4, r0);
210    tcg_out32(s, insn);
211}
212
213static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
214{
215    tcg_out32(s, (uint8_t)op);
216}
217
218static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
219{
220    tcg_insn_unit insn = 0;
221
222    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
223    insn = deposit32(insn, 0, 8, op);
224    insn = deposit32(insn, 8, 4, r0);
225    insn = deposit32(insn, 12, 20, i1);
226    tcg_out32(s, insn);
227}
228
229static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
230{
231    tcg_insn_unit insn = 0;
232
233    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
234    insn = deposit32(insn, 0, 8, op);
235    insn = deposit32(insn, 8, 4, r0);
236    tcg_out32(s, insn);
237}
238
239static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
240{
241    tcg_insn_unit insn = 0;
242
243    insn = deposit32(insn, 0, 8, op);
244    insn = deposit32(insn, 8, 4, r0);
245    insn = deposit32(insn, 12, 4, r1);
246    tcg_out32(s, insn);
247}
248
249static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
250                           TCGReg r0, TCGReg r1, TCGArg m2)
251{
252    tcg_insn_unit insn = 0;
253
254    tcg_debug_assert(m2 == extract32(m2, 0, 16));
255    insn = deposit32(insn, 0, 8, op);
256    insn = deposit32(insn, 8, 4, r0);
257    insn = deposit32(insn, 12, 4, r1);
258    insn = deposit32(insn, 16, 16, m2);
259    tcg_out32(s, insn);
260}
261
262static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
263                           TCGReg r0, TCGReg r1, TCGReg r2)
264{
265    tcg_insn_unit insn = 0;
266
267    insn = deposit32(insn, 0, 8, op);
268    insn = deposit32(insn, 8, 4, r0);
269    insn = deposit32(insn, 12, 4, r1);
270    insn = deposit32(insn, 16, 4, r2);
271    tcg_out32(s, insn);
272}
273
274static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
275                           TCGReg r0, TCGReg r1, intptr_t i2)
276{
277    tcg_insn_unit insn = 0;
278
279    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
280    insn = deposit32(insn, 0, 8, op);
281    insn = deposit32(insn, 8, 4, r0);
282    insn = deposit32(insn, 12, 4, r1);
283    insn = deposit32(insn, 16, 16, i2);
284    tcg_out32(s, insn);
285}
286
287static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
288                            TCGReg r1, uint8_t b2, uint8_t b3)
289{
290    tcg_insn_unit insn = 0;
291
292    tcg_debug_assert(b2 == extract32(b2, 0, 6));
293    tcg_debug_assert(b3 == extract32(b3, 0, 6));
294    insn = deposit32(insn, 0, 8, op);
295    insn = deposit32(insn, 8, 4, r0);
296    insn = deposit32(insn, 12, 4, r1);
297    insn = deposit32(insn, 16, 6, b2);
298    insn = deposit32(insn, 22, 6, b3);
299    tcg_out32(s, insn);
300}
301
302static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
303                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
304{
305    tcg_insn_unit insn = 0;
306
307    insn = deposit32(insn, 0, 8, op);
308    insn = deposit32(insn, 8, 4, r0);
309    insn = deposit32(insn, 12, 4, r1);
310    insn = deposit32(insn, 16, 4, r2);
311    insn = deposit32(insn, 20, 4, c3);
312    tcg_out32(s, insn);
313}
314
315static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
316                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
317{
318    tcg_insn_unit insn = 0;
319
320    tcg_debug_assert(b3 == extract32(b3, 0, 6));
321    tcg_debug_assert(b4 == extract32(b4, 0, 6));
322    insn = deposit32(insn, 0, 8, op);
323    insn = deposit32(insn, 8, 4, r0);
324    insn = deposit32(insn, 12, 4, r1);
325    insn = deposit32(insn, 16, 4, r2);
326    insn = deposit32(insn, 20, 6, b3);
327    insn = deposit32(insn, 26, 6, b4);
328    tcg_out32(s, insn);
329}
330
331static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
332                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
333{
334    tcg_insn_unit insn = 0;
335
336    insn = deposit32(insn, 0, 8, op);
337    insn = deposit32(insn, 8, 4, r0);
338    insn = deposit32(insn, 12, 4, r1);
339    insn = deposit32(insn, 16, 4, r2);
340    insn = deposit32(insn, 20, 4, r3);
341    tcg_out32(s, insn);
342}
343
344static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
345                              TCGReg r0, TCGReg r1, TCGReg r2,
346                              TCGReg r3, TCGReg r4, TCGCond c5)
347{
348    tcg_insn_unit insn = 0;
349
350    insn = deposit32(insn, 0, 8, op);
351    insn = deposit32(insn, 8, 4, r0);
352    insn = deposit32(insn, 12, 4, r1);
353    insn = deposit32(insn, 16, 4, r2);
354    insn = deposit32(insn, 20, 4, r3);
355    insn = deposit32(insn, 24, 4, r4);
356    insn = deposit32(insn, 28, 4, c5);
357    tcg_out32(s, insn);
358}
359
360static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
361                              TCGReg r0, TCGReg r1, TCGReg r2,
362                              TCGReg r3, TCGReg r4, TCGReg r5)
363{
364    tcg_insn_unit insn = 0;
365
366    insn = deposit32(insn, 0, 8, op);
367    insn = deposit32(insn, 8, 4, r0);
368    insn = deposit32(insn, 12, 4, r1);
369    insn = deposit32(insn, 16, 4, r2);
370    insn = deposit32(insn, 20, 4, r3);
371    insn = deposit32(insn, 24, 4, r4);
372    insn = deposit32(insn, 28, 4, r5);
373    tcg_out32(s, insn);
374}
375
376static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
377                         TCGReg base, intptr_t offset)
378{
379    stack_bounds_check(base, offset);
380    if (offset != sextract32(offset, 0, 16)) {
381        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
382        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
383        base = TCG_REG_TMP;
384        offset = 0;
385    }
386    tcg_out_op_rrs(s, op, val, base, offset);
387}
388
389static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
390                       intptr_t offset)
391{
392    switch (type) {
393    case TCG_TYPE_I32:
394        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
395        break;
396#if TCG_TARGET_REG_BITS == 64
397    case TCG_TYPE_I64:
398        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
399        break;
400#endif
401    default:
402        g_assert_not_reached();
403    }
404}
405
406static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
407{
408    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
409    return true;
410}
411
412static void tcg_out_movi(TCGContext *s, TCGType type,
413                         TCGReg ret, tcg_target_long arg)
414{
415    switch (type) {
416    case TCG_TYPE_I32:
417#if TCG_TARGET_REG_BITS == 64
418        arg = (int32_t)arg;
419        /* fall through */
420    case TCG_TYPE_I64:
421#endif
422        break;
423    default:
424        g_assert_not_reached();
425    }
426
427    if (arg == sextract32(arg, 0, 20)) {
428        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
429    } else {
430        tcg_insn_unit insn = 0;
431
432        new_pool_label(s, arg, 20, s->code_ptr, 0);
433        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
434        insn = deposit32(insn, 8, 4, ret);
435        tcg_out32(s, insn);
436    }
437}
438
439static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
440                            TCGReg rs, unsigned pos, unsigned len)
441{
442    TCGOpcode opc = type == TCG_TYPE_I32 ?
443                    INDEX_op_extract_i32 :
444                    INDEX_op_extract_i64;
445    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
446}
447
448static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
449                             TCGReg rs, unsigned pos, unsigned len)
450{
451    TCGOpcode opc = type == TCG_TYPE_I32 ?
452                    INDEX_op_sextract_i32 :
453                    INDEX_op_sextract_i64;
454    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
455}
456
457static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
458{
459    tcg_out_sextract(s, type, rd, rs, 0, 8);
460}
461
462static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
463{
464    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
465}
466
467static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
468{
469    tcg_out_sextract(s, type, rd, rs, 0, 16);
470}
471
472static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
473{
474    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
475}
476
477static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
478{
479    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
480    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
481}
482
483static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
484{
485    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
486    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
487}
488
489static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
490{
491    tcg_out_ext32s(s, rd, rs);
492}
493
494static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
495{
496    tcg_out_ext32u(s, rd, rs);
497}
498
499static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
500{
501    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
502    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
503}
504
505static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
506{
507    return false;
508}
509
510static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
511                             tcg_target_long imm)
512{
513    /* This function is only used for passing structs by reference. */
514    g_assert_not_reached();
515}
516
517static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
518                         const TCGHelperInfo *info)
519{
520    ffi_cif *cif = info->cif;
521    tcg_insn_unit insn = 0;
522    uint8_t which;
523
524    if (cif->rtype == &ffi_type_void) {
525        which = 0;
526    } else {
527        tcg_debug_assert(cif->rtype->size == 4 ||
528                         cif->rtype->size == 8 ||
529                         cif->rtype->size == 16);
530        which = ctz32(cif->rtype->size) - 1;
531    }
532    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
533    insn = deposit32(insn, 0, 8, INDEX_op_call);
534    insn = deposit32(insn, 8, 4, which);
535    tcg_out32(s, insn);
536}
537
538#if TCG_TARGET_REG_BITS == 64
539# define CASE_32_64(x) \
540        case glue(glue(INDEX_op_, x), _i64): \
541        case glue(glue(INDEX_op_, x), _i32):
542# define CASE_64(x) \
543        case glue(glue(INDEX_op_, x), _i64):
544#else
545# define CASE_32_64(x) \
546        case glue(glue(INDEX_op_, x), _i32):
547# define CASE_64(x)
548#endif
549
550static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
551{
552    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
553}
554
555static void tcg_out_goto_tb(TCGContext *s, int which)
556{
557    /* indirect jump method. */
558    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
559    set_jmp_reset_offset(s, which);
560}
561
562void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
563                              uintptr_t jmp_rx, uintptr_t jmp_rw)
564{
565    /* Always indirect, nothing to do */
566}
567
568static void tgen_add(TCGContext *s, TCGType type,
569                     TCGReg a0, TCGReg a1, TCGReg a2)
570{
571    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
572}
573
574static const TCGOutOpBinary outop_add = {
575    .base.static_constraint = C_O1_I2(r, r, r),
576    .out_rrr = tgen_add,
577};
578
579static void tgen_and(TCGContext *s, TCGType type,
580                     TCGReg a0, TCGReg a1, TCGReg a2)
581{
582    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
583}
584
585static const TCGOutOpBinary outop_and = {
586    .base.static_constraint = C_O1_I2(r, r, r),
587    .out_rrr = tgen_and,
588};
589
590static void tgen_andc(TCGContext *s, TCGType type,
591                      TCGReg a0, TCGReg a1, TCGReg a2)
592{
593    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
594}
595
596static const TCGOutOpBinary outop_andc = {
597    .base.static_constraint = C_O1_I2(r, r, r),
598    .out_rrr = tgen_andc,
599};
600
601static void tgen_clz(TCGContext *s, TCGType type,
602                      TCGReg a0, TCGReg a1, TCGReg a2)
603{
604    TCGOpcode opc = (type == TCG_TYPE_I32
605                     ? INDEX_op_tci_clz32
606                     : INDEX_op_clz);
607    tcg_out_op_rrr(s, opc, a0, a1, a2);
608}
609
610static const TCGOutOpBinary outop_clz = {
611    .base.static_constraint = C_O1_I2(r, r, r),
612    .out_rrr = tgen_clz,
613};
614
615static void tgen_ctz(TCGContext *s, TCGType type,
616                      TCGReg a0, TCGReg a1, TCGReg a2)
617{
618    TCGOpcode opc = (type == TCG_TYPE_I32
619                     ? INDEX_op_tci_ctz32
620                     : INDEX_op_ctz);
621    tcg_out_op_rrr(s, opc, a0, a1, a2);
622}
623
624static const TCGOutOpBinary outop_ctz = {
625    .base.static_constraint = C_O1_I2(r, r, r),
626    .out_rrr = tgen_ctz,
627};
628
629static void tgen_divs(TCGContext *s, TCGType type,
630                      TCGReg a0, TCGReg a1, TCGReg a2)
631{
632    TCGOpcode opc = (type == TCG_TYPE_I32
633                     ? INDEX_op_tci_divs32
634                     : INDEX_op_divs);
635    tcg_out_op_rrr(s, opc, a0, a1, a2);
636}
637
638static const TCGOutOpBinary outop_divs = {
639    .base.static_constraint = C_O1_I2(r, r, r),
640    .out_rrr = tgen_divs,
641};
642
643static const TCGOutOpDivRem outop_divs2 = {
644    .base.static_constraint = C_NotImplemented,
645};
646
647static void tgen_divu(TCGContext *s, TCGType type,
648                      TCGReg a0, TCGReg a1, TCGReg a2)
649{
650    TCGOpcode opc = (type == TCG_TYPE_I32
651                     ? INDEX_op_tci_divu32
652                     : INDEX_op_divu);
653    tcg_out_op_rrr(s, opc, a0, a1, a2);
654}
655
656static const TCGOutOpBinary outop_divu = {
657    .base.static_constraint = C_O1_I2(r, r, r),
658    .out_rrr = tgen_divu,
659};
660
661static const TCGOutOpDivRem outop_divu2 = {
662    .base.static_constraint = C_NotImplemented,
663};
664
665static void tgen_eqv(TCGContext *s, TCGType type,
666                     TCGReg a0, TCGReg a1, TCGReg a2)
667{
668    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
669}
670
671static const TCGOutOpBinary outop_eqv = {
672    .base.static_constraint = C_O1_I2(r, r, r),
673    .out_rrr = tgen_eqv,
674};
675
676static void tgen_mul(TCGContext *s, TCGType type,
677                     TCGReg a0, TCGReg a1, TCGReg a2)
678{
679    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
680}
681
682static const TCGOutOpBinary outop_mul = {
683    .base.static_constraint = C_O1_I2(r, r, r),
684    .out_rrr = tgen_mul,
685};
686
687static TCGConstraintSetIndex cset_mul2(TCGType type, unsigned flags)
688{
689    return type == TCG_TYPE_REG ? C_O2_I2(r, r, r, r) : C_NotImplemented;
690}
691
692static void tgen_muls2(TCGContext *s, TCGType type,
693                       TCGReg a0, TCGReg a1, TCGReg a2, TCGReg a3)
694{
695    tcg_out_op_rrrr(s, INDEX_op_muls2, a0, a1, a2, a3);
696}
697
698static const TCGOutOpMul2 outop_muls2 = {
699    .base.static_constraint = C_Dynamic,
700    .base.dynamic_constraint = cset_mul2,
701    .out_rrrr = tgen_muls2,
702};
703
704static const TCGOutOpBinary outop_mulsh = {
705    .base.static_constraint = C_NotImplemented,
706};
707
708static void tgen_mulu2(TCGContext *s, TCGType type,
709                       TCGReg a0, TCGReg a1, TCGReg a2, TCGReg a3)
710{
711    tcg_out_op_rrrr(s, INDEX_op_mulu2, a0, a1, a2, a3);
712}
713
714static const TCGOutOpMul2 outop_mulu2 = {
715    .base.static_constraint = C_Dynamic,
716    .base.dynamic_constraint = cset_mul2,
717    .out_rrrr = tgen_mulu2,
718};
719
720static const TCGOutOpBinary outop_muluh = {
721    .base.static_constraint = C_NotImplemented,
722};
723
724static void tgen_nand(TCGContext *s, TCGType type,
725                     TCGReg a0, TCGReg a1, TCGReg a2)
726{
727    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
728}
729
730static const TCGOutOpBinary outop_nand = {
731    .base.static_constraint = C_O1_I2(r, r, r),
732    .out_rrr = tgen_nand,
733};
734
735static void tgen_nor(TCGContext *s, TCGType type,
736                     TCGReg a0, TCGReg a1, TCGReg a2)
737{
738    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
739}
740
741static const TCGOutOpBinary outop_nor = {
742    .base.static_constraint = C_O1_I2(r, r, r),
743    .out_rrr = tgen_nor,
744};
745
746static void tgen_or(TCGContext *s, TCGType type,
747                     TCGReg a0, TCGReg a1, TCGReg a2)
748{
749    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
750}
751
752static const TCGOutOpBinary outop_or = {
753    .base.static_constraint = C_O1_I2(r, r, r),
754    .out_rrr = tgen_or,
755};
756
757static void tgen_orc(TCGContext *s, TCGType type,
758                     TCGReg a0, TCGReg a1, TCGReg a2)
759{
760    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
761}
762
763static const TCGOutOpBinary outop_orc = {
764    .base.static_constraint = C_O1_I2(r, r, r),
765    .out_rrr = tgen_orc,
766};
767
768static void tgen_rems(TCGContext *s, TCGType type,
769                      TCGReg a0, TCGReg a1, TCGReg a2)
770{
771    TCGOpcode opc = (type == TCG_TYPE_I32
772                     ? INDEX_op_tci_rems32
773                     : INDEX_op_rems);
774    tcg_out_op_rrr(s, opc, a0, a1, a2);
775}
776
777static const TCGOutOpBinary outop_rems = {
778    .base.static_constraint = C_O1_I2(r, r, r),
779    .out_rrr = tgen_rems,
780};
781
782static void tgen_remu(TCGContext *s, TCGType type,
783                      TCGReg a0, TCGReg a1, TCGReg a2)
784{
785    TCGOpcode opc = (type == TCG_TYPE_I32
786                     ? INDEX_op_tci_remu32
787                     : INDEX_op_remu);
788    tcg_out_op_rrr(s, opc, a0, a1, a2);
789}
790
791static const TCGOutOpBinary outop_remu = {
792    .base.static_constraint = C_O1_I2(r, r, r),
793    .out_rrr = tgen_remu,
794};
795
796static void tgen_rotl(TCGContext *s, TCGType type,
797                     TCGReg a0, TCGReg a1, TCGReg a2)
798{
799    TCGOpcode opc = (type == TCG_TYPE_I32
800                     ? INDEX_op_tci_rotl32
801                     : INDEX_op_rotl);
802    tcg_out_op_rrr(s, opc, a0, a1, a2);
803}
804
805static const TCGOutOpBinary outop_rotl = {
806    .base.static_constraint = C_O1_I2(r, r, r),
807    .out_rrr = tgen_rotl,
808};
809
810static void tgen_rotr(TCGContext *s, TCGType type,
811                     TCGReg a0, TCGReg a1, TCGReg a2)
812{
813    TCGOpcode opc = (type == TCG_TYPE_I32
814                     ? INDEX_op_tci_rotr32
815                     : INDEX_op_rotr);
816    tcg_out_op_rrr(s, opc, a0, a1, a2);
817}
818
819static const TCGOutOpBinary outop_rotr = {
820    .base.static_constraint = C_O1_I2(r, r, r),
821    .out_rrr = tgen_rotr,
822};
823
824static void tgen_sar(TCGContext *s, TCGType type,
825                     TCGReg a0, TCGReg a1, TCGReg a2)
826{
827    if (type < TCG_TYPE_REG) {
828        tcg_out_ext32s(s, TCG_REG_TMP, a1);
829        a1 = TCG_REG_TMP;
830    }
831    tcg_out_op_rrr(s, INDEX_op_sar, a0, a1, a2);
832}
833
834static const TCGOutOpBinary outop_sar = {
835    .base.static_constraint = C_O1_I2(r, r, r),
836    .out_rrr = tgen_sar,
837};
838
839static void tgen_shl(TCGContext *s, TCGType type,
840                     TCGReg a0, TCGReg a1, TCGReg a2)
841{
842    tcg_out_op_rrr(s, INDEX_op_shl, a0, a1, a2);
843}
844
845static const TCGOutOpBinary outop_shl = {
846    .base.static_constraint = C_O1_I2(r, r, r),
847    .out_rrr = tgen_shl,
848};
849
850static void tgen_shr(TCGContext *s, TCGType type,
851                     TCGReg a0, TCGReg a1, TCGReg a2)
852{
853    if (type < TCG_TYPE_REG) {
854        tcg_out_ext32u(s, TCG_REG_TMP, a1);
855        a1 = TCG_REG_TMP;
856    }
857    tcg_out_op_rrr(s, INDEX_op_shr, a0, a1, a2);
858}
859
860static const TCGOutOpBinary outop_shr = {
861    .base.static_constraint = C_O1_I2(r, r, r),
862    .out_rrr = tgen_shr,
863};
864
865static void tgen_sub(TCGContext *s, TCGType type,
866                     TCGReg a0, TCGReg a1, TCGReg a2)
867{
868    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
869}
870
871static const TCGOutOpSubtract outop_sub = {
872    .base.static_constraint = C_O1_I2(r, r, r),
873    .out_rrr = tgen_sub,
874};
875
876static void tgen_xor(TCGContext *s, TCGType type,
877                     TCGReg a0, TCGReg a1, TCGReg a2)
878{
879    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
880}
881
882static const TCGOutOpBinary outop_xor = {
883    .base.static_constraint = C_O1_I2(r, r, r),
884    .out_rrr = tgen_xor,
885};
886
887static void tgen_ctpop(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
888{
889    tcg_out_op_rr(s, INDEX_op_ctpop, a0, a1);
890}
891
892static TCGConstraintSetIndex cset_ctpop(TCGType type, unsigned flags)
893{
894    return type == TCG_TYPE_REG ? C_O1_I1(r, r) : C_NotImplemented;
895}
896
897static const TCGOutOpUnary outop_ctpop = {
898    .base.static_constraint = C_Dynamic,
899    .base.dynamic_constraint = cset_ctpop,
900    .out_rr = tgen_ctpop,
901};
902
903static void tgen_bswap16(TCGContext *s, TCGType type,
904                         TCGReg a0, TCGReg a1, unsigned flags)
905{
906    tcg_out_op_rr(s, INDEX_op_bswap16, a0, a1);
907    if (flags & TCG_BSWAP_OS) {
908        tcg_out_sextract(s, TCG_TYPE_REG, a0, a0, 0, 16);
909    }
910}
911
912static const TCGOutOpBswap outop_bswap16 = {
913    .base.static_constraint = C_O1_I1(r, r),
914    .out_rr = tgen_bswap16,
915};
916
917static void tgen_bswap32(TCGContext *s, TCGType type,
918                         TCGReg a0, TCGReg a1, unsigned flags)
919{
920    tcg_out_op_rr(s, INDEX_op_bswap32, a0, a1);
921    if (flags & TCG_BSWAP_OS) {
922        tcg_out_sextract(s, TCG_TYPE_REG, a0, a0, 0, 32);
923    }
924}
925
926static const TCGOutOpBswap outop_bswap32 = {
927    .base.static_constraint = C_O1_I1(r, r),
928    .out_rr = tgen_bswap32,
929};
930
931static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
932{
933    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
934}
935
936static const TCGOutOpUnary outop_neg = {
937    .base.static_constraint = C_O1_I1(r, r),
938    .out_rr = tgen_neg,
939};
940
941static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
942{
943    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
944}
945
946static const TCGOutOpUnary outop_not = {
947    .base.static_constraint = C_O1_I1(r, r),
948    .out_rr = tgen_not,
949};
950
951static void tgen_setcond(TCGContext *s, TCGType type, TCGCond cond,
952                         TCGReg dest, TCGReg arg1, TCGReg arg2)
953{
954    TCGOpcode opc = (type == TCG_TYPE_I32
955                     ? INDEX_op_tci_setcond32
956                     : INDEX_op_setcond);
957    tcg_out_op_rrrc(s, opc, dest, arg1, arg2, cond);
958}
959
960static const TCGOutOpSetcond outop_setcond = {
961    .base.static_constraint = C_O1_I2(r, r, r),
962    .out_rrr = tgen_setcond,
963};
964
965static void tgen_negsetcond(TCGContext *s, TCGType type, TCGCond cond,
966                            TCGReg dest, TCGReg arg1, TCGReg arg2)
967{
968    tgen_setcond(s, type, cond, dest, arg1, arg2);
969    tgen_neg(s, type, dest, dest);
970}
971
972static const TCGOutOpSetcond outop_negsetcond = {
973    .base.static_constraint = C_O1_I2(r, r, r),
974    .out_rrr = tgen_negsetcond,
975};
976
977static void tgen_brcond(TCGContext *s, TCGType type, TCGCond cond,
978                        TCGReg arg0, TCGReg arg1, TCGLabel *l)
979{
980    tgen_setcond(s, type, cond, TCG_REG_TMP, arg0, arg1);
981    tcg_out_op_rl(s, INDEX_op_brcond, TCG_REG_TMP, l);
982}
983
984static const TCGOutOpBrcond outop_brcond = {
985    .base.static_constraint = C_O0_I2(r, r),
986    .out_rr = tgen_brcond,
987};
988
989static void tgen_movcond(TCGContext *s, TCGType type, TCGCond cond,
990                         TCGReg ret, TCGReg c1, TCGArg c2, bool const_c2,
991                         TCGArg vt, bool const_vt, TCGArg vf, bool consf_vf)
992{
993    TCGOpcode opc = (type == TCG_TYPE_I32
994                     ? INDEX_op_tci_movcond32
995                     : INDEX_op_movcond);
996    tcg_out_op_rrrrrc(s, opc, ret, c1, c2, vt, vf, cond);
997}
998
999static const TCGOutOpMovcond outop_movcond = {
1000    .base.static_constraint = C_O1_I4(r, r, r, r, r),
1001    .out = tgen_movcond,
1002};
1003
1004static void tgen_brcond2(TCGContext *s, TCGCond cond, TCGReg al, TCGReg ah,
1005                         TCGArg bl, bool const_bl,
1006                         TCGArg bh, bool const_bh, TCGLabel *l)
1007{
1008    tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
1009                      al, ah, bl, bh, cond);
1010    tcg_out_op_rl(s, INDEX_op_brcond, TCG_REG_TMP, l);
1011}
1012
1013#if TCG_TARGET_REG_BITS != 32
1014__attribute__((unused))
1015#endif
1016static const TCGOutOpBrcond2 outop_brcond2 = {
1017    .base.static_constraint = C_O0_I4(r, r, r, r),
1018    .out = tgen_brcond2,
1019};
1020
1021static void tgen_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
1022                          TCGReg al, TCGReg ah,
1023                          TCGArg bl, bool const_bl,
1024                          TCGArg bh, bool const_bh)
1025{
1026    tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, ret, al, ah, bl, bh, cond);
1027}
1028
1029#if TCG_TARGET_REG_BITS != 32
1030__attribute__((unused))
1031#endif
1032static const TCGOutOpSetcond2 outop_setcond2 = {
1033    .base.static_constraint = C_O1_I4(r, r, r, r, r),
1034    .out = tgen_setcond2,
1035};
1036
1037static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
1038                       const TCGArg args[TCG_MAX_OP_ARGS],
1039                       const int const_args[TCG_MAX_OP_ARGS])
1040{
1041    switch (opc) {
1042    case INDEX_op_goto_ptr:
1043        tcg_out_op_r(s, opc, args[0]);
1044        break;
1045
1046    case INDEX_op_br:
1047        tcg_out_op_l(s, opc, arg_label(args[0]));
1048        break;
1049
1050    CASE_32_64(ld8u)
1051    CASE_32_64(ld8s)
1052    CASE_32_64(ld16u)
1053    CASE_32_64(ld16s)
1054    case INDEX_op_ld_i32:
1055    CASE_64(ld32u)
1056    CASE_64(ld32s)
1057    CASE_64(ld)
1058    CASE_32_64(st8)
1059    CASE_32_64(st16)
1060    case INDEX_op_st_i32:
1061    CASE_64(st32)
1062    CASE_64(st)
1063        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
1064        break;
1065
1066    CASE_32_64(deposit)
1067        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
1068        break;
1069
1070    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
1071    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
1072        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
1073        break;
1074
1075    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
1076        tcg_out_op_rr(s, opc, args[0], args[1]);
1077        break;
1078
1079    CASE_32_64(add2)
1080    CASE_32_64(sub2)
1081        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
1082                          args[3], args[4], args[5]);
1083        break;
1084
1085    case INDEX_op_qemu_ld_i64:
1086    case INDEX_op_qemu_st_i64:
1087        if (TCG_TARGET_REG_BITS == 32) {
1088            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
1089            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
1090            break;
1091        }
1092        /* fall through */
1093    case INDEX_op_qemu_ld_i32:
1094    case INDEX_op_qemu_st_i32:
1095        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
1096            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
1097            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
1098        } else {
1099            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
1100        }
1101        break;
1102
1103    case INDEX_op_mb:
1104        tcg_out_op_v(s, opc);
1105        break;
1106
1107    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
1108    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
1109    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
1110    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
1111    case INDEX_op_extu_i32_i64:
1112    case INDEX_op_extrl_i64_i32:
1113    default:
1114        g_assert_not_reached();
1115    }
1116}
1117
1118static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
1119                       intptr_t offset)
1120{
1121    switch (type) {
1122    case TCG_TYPE_I32:
1123        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
1124        break;
1125#if TCG_TARGET_REG_BITS == 64
1126    case TCG_TYPE_I64:
1127        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
1128        break;
1129#endif
1130    default:
1131        g_assert_not_reached();
1132    }
1133}
1134
1135static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
1136                               TCGReg base, intptr_t ofs)
1137{
1138    return false;
1139}
1140
1141/* Test if a constant matches the constraint. */
1142static bool tcg_target_const_match(int64_t val, int ct,
1143                                   TCGType type, TCGCond cond, int vece)
1144{
1145    return ct & TCG_CT_CONST;
1146}
1147
1148static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
1149{
1150    memset(p, 0, sizeof(*p) * count);
1151}
1152
1153static void tcg_target_init(TCGContext *s)
1154{
1155    /* The current code uses uint8_t for tcg operations. */
1156    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1157
1158    /* Registers available for 32 bit operations. */
1159    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1160    /* Registers available for 64 bit operations. */
1161    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1162    /*
1163     * The interpreter "registers" are in the local stack frame and
1164     * cannot be clobbered by the called helper functions.  However,
1165     * the interpreter assumes a 128-bit return value and assigns to
1166     * the return value registers.
1167     */
1168    tcg_target_call_clobber_regs =
1169        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1170
1171    s->reserved_regs = 0;
1172    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1173    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1174
1175    /* The call arguments come first, followed by the temp storage. */
1176    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1177                  TCG_STATIC_FRAME_SIZE);
1178}
1179
1180/* Generate global QEMU prologue and epilogue code. */
1181static inline void tcg_target_qemu_prologue(TCGContext *s)
1182{
1183}
1184
1185static void tcg_out_tb_start(TCGContext *s)
1186{
1187    /* nothing to do */
1188}
1189
1190bool tcg_target_has_memory_bswap(MemOp memop)
1191{
1192    return true;
1193}
1194
1195static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1196{
1197    g_assert_not_reached();
1198}
1199
1200static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1201{
1202    g_assert_not_reached();
1203}
1204