xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 74dbd36f1f87bd7fc4705644d63c5561a23b0567)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_sar_i32:
83    case INDEX_op_sar_i64:
84    case INDEX_op_rotl_i32:
85    case INDEX_op_rotl_i64:
86    case INDEX_op_rotr_i32:
87    case INDEX_op_rotr_i64:
88    case INDEX_op_setcond_i32:
89    case INDEX_op_setcond_i64:
90    case INDEX_op_deposit_i32:
91    case INDEX_op_deposit_i64:
92    case INDEX_op_clz_i32:
93    case INDEX_op_clz_i64:
94    case INDEX_op_ctz_i32:
95    case INDEX_op_ctz_i64:
96        return C_O1_I2(r, r, r);
97
98    case INDEX_op_brcond_i32:
99    case INDEX_op_brcond_i64:
100        return C_O0_I2(r, r);
101
102    case INDEX_op_add2_i32:
103    case INDEX_op_add2_i64:
104    case INDEX_op_sub2_i32:
105    case INDEX_op_sub2_i64:
106        return C_O2_I4(r, r, r, r, r, r);
107
108#if TCG_TARGET_REG_BITS == 32
109    case INDEX_op_brcond2_i32:
110        return C_O0_I4(r, r, r, r);
111#endif
112
113    case INDEX_op_mulu2_i32:
114    case INDEX_op_mulu2_i64:
115    case INDEX_op_muls2_i32:
116    case INDEX_op_muls2_i64:
117        return C_O2_I2(r, r, r, r);
118
119    case INDEX_op_movcond_i32:
120    case INDEX_op_movcond_i64:
121    case INDEX_op_setcond2_i32:
122        return C_O1_I4(r, r, r, r, r);
123
124    case INDEX_op_qemu_ld_i32:
125        return C_O1_I1(r, r);
126    case INDEX_op_qemu_ld_i64:
127        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
128    case INDEX_op_qemu_st_i32:
129        return C_O0_I2(r, r);
130    case INDEX_op_qemu_st_i64:
131        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
132
133    default:
134        return C_NotImplemented;
135    }
136}
137
138static const int tcg_target_reg_alloc_order[] = {
139    TCG_REG_R4,
140    TCG_REG_R5,
141    TCG_REG_R6,
142    TCG_REG_R7,
143    TCG_REG_R8,
144    TCG_REG_R9,
145    TCG_REG_R10,
146    TCG_REG_R11,
147    TCG_REG_R12,
148    TCG_REG_R13,
149    TCG_REG_R14,
150    TCG_REG_R15,
151    /* Either 2 or 4 of these are call clobbered, so use them last. */
152    TCG_REG_R3,
153    TCG_REG_R2,
154    TCG_REG_R1,
155    TCG_REG_R0,
156};
157
158/* No call arguments via registers.  All will be stored on the "stack". */
159static const int tcg_target_call_iarg_regs[] = { };
160
161static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
162{
163    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
164    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
165    return TCG_REG_R0 + slot;
166}
167
168#ifdef CONFIG_DEBUG_TCG
169static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
170    "r00",
171    "r01",
172    "r02",
173    "r03",
174    "r04",
175    "r05",
176    "r06",
177    "r07",
178    "r08",
179    "r09",
180    "r10",
181    "r11",
182    "r12",
183    "r13",
184    "r14",
185    "r15",
186};
187#endif
188
189static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
190                        intptr_t value, intptr_t addend)
191{
192    intptr_t diff = value - (intptr_t)(code_ptr + 1);
193
194    tcg_debug_assert(addend == 0);
195    tcg_debug_assert(type == 20);
196
197    if (diff == sextract32(diff, 0, type)) {
198        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
199        return true;
200    }
201    return false;
202}
203
204static void stack_bounds_check(TCGReg base, intptr_t offset)
205{
206    if (base == TCG_REG_CALL_STACK) {
207        tcg_debug_assert(offset >= 0);
208        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
209                                   TCG_STATIC_FRAME_SIZE));
210    }
211}
212
213static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
214{
215    tcg_insn_unit insn = 0;
216
217    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
218    insn = deposit32(insn, 0, 8, op);
219    tcg_out32(s, insn);
220}
221
222static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
223{
224    tcg_insn_unit insn = 0;
225    intptr_t diff;
226
227    /* Special case for exit_tb: map null -> 0. */
228    if (p0 == NULL) {
229        diff = 0;
230    } else {
231        diff = p0 - (void *)(s->code_ptr + 1);
232        tcg_debug_assert(diff != 0);
233        if (diff != sextract32(diff, 0, 20)) {
234            tcg_raise_tb_overflow(s);
235        }
236    }
237    insn = deposit32(insn, 0, 8, op);
238    insn = deposit32(insn, 12, 20, diff);
239    tcg_out32(s, insn);
240}
241
242static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
243{
244    tcg_insn_unit insn = 0;
245
246    insn = deposit32(insn, 0, 8, op);
247    insn = deposit32(insn, 8, 4, r0);
248    tcg_out32(s, insn);
249}
250
251static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
252{
253    tcg_out32(s, (uint8_t)op);
254}
255
256static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
257{
258    tcg_insn_unit insn = 0;
259
260    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
261    insn = deposit32(insn, 0, 8, op);
262    insn = deposit32(insn, 8, 4, r0);
263    insn = deposit32(insn, 12, 20, i1);
264    tcg_out32(s, insn);
265}
266
267static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
268{
269    tcg_insn_unit insn = 0;
270
271    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
272    insn = deposit32(insn, 0, 8, op);
273    insn = deposit32(insn, 8, 4, r0);
274    tcg_out32(s, insn);
275}
276
277static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
278{
279    tcg_insn_unit insn = 0;
280
281    insn = deposit32(insn, 0, 8, op);
282    insn = deposit32(insn, 8, 4, r0);
283    insn = deposit32(insn, 12, 4, r1);
284    tcg_out32(s, insn);
285}
286
287static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
288                           TCGReg r0, TCGReg r1, TCGArg m2)
289{
290    tcg_insn_unit insn = 0;
291
292    tcg_debug_assert(m2 == extract32(m2, 0, 16));
293    insn = deposit32(insn, 0, 8, op);
294    insn = deposit32(insn, 8, 4, r0);
295    insn = deposit32(insn, 12, 4, r1);
296    insn = deposit32(insn, 16, 16, m2);
297    tcg_out32(s, insn);
298}
299
300static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
301                           TCGReg r0, TCGReg r1, TCGReg r2)
302{
303    tcg_insn_unit insn = 0;
304
305    insn = deposit32(insn, 0, 8, op);
306    insn = deposit32(insn, 8, 4, r0);
307    insn = deposit32(insn, 12, 4, r1);
308    insn = deposit32(insn, 16, 4, r2);
309    tcg_out32(s, insn);
310}
311
312static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
313                           TCGReg r0, TCGReg r1, intptr_t i2)
314{
315    tcg_insn_unit insn = 0;
316
317    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
318    insn = deposit32(insn, 0, 8, op);
319    insn = deposit32(insn, 8, 4, r0);
320    insn = deposit32(insn, 12, 4, r1);
321    insn = deposit32(insn, 16, 16, i2);
322    tcg_out32(s, insn);
323}
324
325static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
326                            TCGReg r1, uint8_t b2, uint8_t b3)
327{
328    tcg_insn_unit insn = 0;
329
330    tcg_debug_assert(b2 == extract32(b2, 0, 6));
331    tcg_debug_assert(b3 == extract32(b3, 0, 6));
332    insn = deposit32(insn, 0, 8, op);
333    insn = deposit32(insn, 8, 4, r0);
334    insn = deposit32(insn, 12, 4, r1);
335    insn = deposit32(insn, 16, 6, b2);
336    insn = deposit32(insn, 22, 6, b3);
337    tcg_out32(s, insn);
338}
339
340static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
341                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
342{
343    tcg_insn_unit insn = 0;
344
345    insn = deposit32(insn, 0, 8, op);
346    insn = deposit32(insn, 8, 4, r0);
347    insn = deposit32(insn, 12, 4, r1);
348    insn = deposit32(insn, 16, 4, r2);
349    insn = deposit32(insn, 20, 4, c3);
350    tcg_out32(s, insn);
351}
352
353static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
354                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
355{
356    tcg_insn_unit insn = 0;
357
358    tcg_debug_assert(b3 == extract32(b3, 0, 6));
359    tcg_debug_assert(b4 == extract32(b4, 0, 6));
360    insn = deposit32(insn, 0, 8, op);
361    insn = deposit32(insn, 8, 4, r0);
362    insn = deposit32(insn, 12, 4, r1);
363    insn = deposit32(insn, 16, 4, r2);
364    insn = deposit32(insn, 20, 6, b3);
365    insn = deposit32(insn, 26, 6, b4);
366    tcg_out32(s, insn);
367}
368
369static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
370                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
371{
372    tcg_insn_unit insn = 0;
373
374    insn = deposit32(insn, 0, 8, op);
375    insn = deposit32(insn, 8, 4, r0);
376    insn = deposit32(insn, 12, 4, r1);
377    insn = deposit32(insn, 16, 4, r2);
378    insn = deposit32(insn, 20, 4, r3);
379    tcg_out32(s, insn);
380}
381
382static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
383                              TCGReg r0, TCGReg r1, TCGReg r2,
384                              TCGReg r3, TCGReg r4, TCGCond c5)
385{
386    tcg_insn_unit insn = 0;
387
388    insn = deposit32(insn, 0, 8, op);
389    insn = deposit32(insn, 8, 4, r0);
390    insn = deposit32(insn, 12, 4, r1);
391    insn = deposit32(insn, 16, 4, r2);
392    insn = deposit32(insn, 20, 4, r3);
393    insn = deposit32(insn, 24, 4, r4);
394    insn = deposit32(insn, 28, 4, c5);
395    tcg_out32(s, insn);
396}
397
398static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
399                              TCGReg r0, TCGReg r1, TCGReg r2,
400                              TCGReg r3, TCGReg r4, TCGReg r5)
401{
402    tcg_insn_unit insn = 0;
403
404    insn = deposit32(insn, 0, 8, op);
405    insn = deposit32(insn, 8, 4, r0);
406    insn = deposit32(insn, 12, 4, r1);
407    insn = deposit32(insn, 16, 4, r2);
408    insn = deposit32(insn, 20, 4, r3);
409    insn = deposit32(insn, 24, 4, r4);
410    insn = deposit32(insn, 28, 4, r5);
411    tcg_out32(s, insn);
412}
413
414static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
415                         TCGReg base, intptr_t offset)
416{
417    stack_bounds_check(base, offset);
418    if (offset != sextract32(offset, 0, 16)) {
419        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
420        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
421        base = TCG_REG_TMP;
422        offset = 0;
423    }
424    tcg_out_op_rrs(s, op, val, base, offset);
425}
426
427static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
428                       intptr_t offset)
429{
430    switch (type) {
431    case TCG_TYPE_I32:
432        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
433        break;
434#if TCG_TARGET_REG_BITS == 64
435    case TCG_TYPE_I64:
436        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
437        break;
438#endif
439    default:
440        g_assert_not_reached();
441    }
442}
443
444static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
445{
446    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
447    return true;
448}
449
450static void tcg_out_movi(TCGContext *s, TCGType type,
451                         TCGReg ret, tcg_target_long arg)
452{
453    switch (type) {
454    case TCG_TYPE_I32:
455#if TCG_TARGET_REG_BITS == 64
456        arg = (int32_t)arg;
457        /* fall through */
458    case TCG_TYPE_I64:
459#endif
460        break;
461    default:
462        g_assert_not_reached();
463    }
464
465    if (arg == sextract32(arg, 0, 20)) {
466        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
467    } else {
468        tcg_insn_unit insn = 0;
469
470        new_pool_label(s, arg, 20, s->code_ptr, 0);
471        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
472        insn = deposit32(insn, 8, 4, ret);
473        tcg_out32(s, insn);
474    }
475}
476
477static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
478                            TCGReg rs, unsigned pos, unsigned len)
479{
480    TCGOpcode opc = type == TCG_TYPE_I32 ?
481                    INDEX_op_extract_i32 :
482                    INDEX_op_extract_i64;
483    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
484}
485
486static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
487                             TCGReg rs, unsigned pos, unsigned len)
488{
489    TCGOpcode opc = type == TCG_TYPE_I32 ?
490                    INDEX_op_sextract_i32 :
491                    INDEX_op_sextract_i64;
492    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
493}
494
495static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
496{
497    tcg_out_sextract(s, type, rd, rs, 0, 8);
498}
499
500static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
501{
502    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
503}
504
505static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
506{
507    tcg_out_sextract(s, type, rd, rs, 0, 16);
508}
509
510static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
511{
512    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
513}
514
515static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
516{
517    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
518    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
519}
520
521static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
522{
523    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
524    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
525}
526
527static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
528{
529    tcg_out_ext32s(s, rd, rs);
530}
531
532static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
533{
534    tcg_out_ext32u(s, rd, rs);
535}
536
537static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
538{
539    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
540    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
541}
542
543static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
544{
545    return false;
546}
547
548static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
549                             tcg_target_long imm)
550{
551    /* This function is only used for passing structs by reference. */
552    g_assert_not_reached();
553}
554
555static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
556                         const TCGHelperInfo *info)
557{
558    ffi_cif *cif = info->cif;
559    tcg_insn_unit insn = 0;
560    uint8_t which;
561
562    if (cif->rtype == &ffi_type_void) {
563        which = 0;
564    } else {
565        tcg_debug_assert(cif->rtype->size == 4 ||
566                         cif->rtype->size == 8 ||
567                         cif->rtype->size == 16);
568        which = ctz32(cif->rtype->size) - 1;
569    }
570    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
571    insn = deposit32(insn, 0, 8, INDEX_op_call);
572    insn = deposit32(insn, 8, 4, which);
573    tcg_out32(s, insn);
574}
575
576#if TCG_TARGET_REG_BITS == 64
577# define CASE_32_64(x) \
578        case glue(glue(INDEX_op_, x), _i64): \
579        case glue(glue(INDEX_op_, x), _i32):
580# define CASE_64(x) \
581        case glue(glue(INDEX_op_, x), _i64):
582#else
583# define CASE_32_64(x) \
584        case glue(glue(INDEX_op_, x), _i32):
585# define CASE_64(x)
586#endif
587
588static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
589{
590    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
591}
592
593static void tcg_out_goto_tb(TCGContext *s, int which)
594{
595    /* indirect jump method. */
596    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
597    set_jmp_reset_offset(s, which);
598}
599
600void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
601                              uintptr_t jmp_rx, uintptr_t jmp_rw)
602{
603    /* Always indirect, nothing to do */
604}
605
606static void tgen_add(TCGContext *s, TCGType type,
607                     TCGReg a0, TCGReg a1, TCGReg a2)
608{
609    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
610}
611
612static const TCGOutOpBinary outop_add = {
613    .base.static_constraint = C_O1_I2(r, r, r),
614    .out_rrr = tgen_add,
615};
616
617static void tgen_and(TCGContext *s, TCGType type,
618                     TCGReg a0, TCGReg a1, TCGReg a2)
619{
620    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
621}
622
623static const TCGOutOpBinary outop_and = {
624    .base.static_constraint = C_O1_I2(r, r, r),
625    .out_rrr = tgen_and,
626};
627
628static void tgen_andc(TCGContext *s, TCGType type,
629                      TCGReg a0, TCGReg a1, TCGReg a2)
630{
631    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
632}
633
634static const TCGOutOpBinary outop_andc = {
635    .base.static_constraint = C_O1_I2(r, r, r),
636    .out_rrr = tgen_andc,
637};
638
639static void tgen_divs(TCGContext *s, TCGType type,
640                      TCGReg a0, TCGReg a1, TCGReg a2)
641{
642    TCGOpcode opc = (type == TCG_TYPE_I32
643                     ? INDEX_op_tci_divs32
644                     : INDEX_op_divs);
645    tcg_out_op_rrr(s, opc, a0, a1, a2);
646}
647
648static const TCGOutOpBinary outop_divs = {
649    .base.static_constraint = C_O1_I2(r, r, r),
650    .out_rrr = tgen_divs,
651};
652
653static const TCGOutOpDivRem outop_divs2 = {
654    .base.static_constraint = C_NotImplemented,
655};
656
657static void tgen_divu(TCGContext *s, TCGType type,
658                      TCGReg a0, TCGReg a1, TCGReg a2)
659{
660    TCGOpcode opc = (type == TCG_TYPE_I32
661                     ? INDEX_op_tci_divu32
662                     : INDEX_op_divu);
663    tcg_out_op_rrr(s, opc, a0, a1, a2);
664}
665
666static const TCGOutOpBinary outop_divu = {
667    .base.static_constraint = C_O1_I2(r, r, r),
668    .out_rrr = tgen_divu,
669};
670
671static const TCGOutOpDivRem outop_divu2 = {
672    .base.static_constraint = C_NotImplemented,
673};
674
675static void tgen_eqv(TCGContext *s, TCGType type,
676                     TCGReg a0, TCGReg a1, TCGReg a2)
677{
678    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
679}
680
681static const TCGOutOpBinary outop_eqv = {
682    .base.static_constraint = C_O1_I2(r, r, r),
683    .out_rrr = tgen_eqv,
684};
685
686static void tgen_mul(TCGContext *s, TCGType type,
687                     TCGReg a0, TCGReg a1, TCGReg a2)
688{
689    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
690}
691
692static const TCGOutOpBinary outop_mul = {
693    .base.static_constraint = C_O1_I2(r, r, r),
694    .out_rrr = tgen_mul,
695};
696
697static const TCGOutOpBinary outop_mulsh = {
698    .base.static_constraint = C_NotImplemented,
699};
700
701static const TCGOutOpBinary outop_muluh = {
702    .base.static_constraint = C_NotImplemented,
703};
704
705static void tgen_nand(TCGContext *s, TCGType type,
706                     TCGReg a0, TCGReg a1, TCGReg a2)
707{
708    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
709}
710
711static const TCGOutOpBinary outop_nand = {
712    .base.static_constraint = C_O1_I2(r, r, r),
713    .out_rrr = tgen_nand,
714};
715
716static void tgen_nor(TCGContext *s, TCGType type,
717                     TCGReg a0, TCGReg a1, TCGReg a2)
718{
719    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
720}
721
722static const TCGOutOpBinary outop_nor = {
723    .base.static_constraint = C_O1_I2(r, r, r),
724    .out_rrr = tgen_nor,
725};
726
727static void tgen_or(TCGContext *s, TCGType type,
728                     TCGReg a0, TCGReg a1, TCGReg a2)
729{
730    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
731}
732
733static const TCGOutOpBinary outop_or = {
734    .base.static_constraint = C_O1_I2(r, r, r),
735    .out_rrr = tgen_or,
736};
737
738static void tgen_orc(TCGContext *s, TCGType type,
739                     TCGReg a0, TCGReg a1, TCGReg a2)
740{
741    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
742}
743
744static const TCGOutOpBinary outop_orc = {
745    .base.static_constraint = C_O1_I2(r, r, r),
746    .out_rrr = tgen_orc,
747};
748
749static void tgen_rems(TCGContext *s, TCGType type,
750                      TCGReg a0, TCGReg a1, TCGReg a2)
751{
752    TCGOpcode opc = (type == TCG_TYPE_I32
753                     ? INDEX_op_tci_rems32
754                     : INDEX_op_rems);
755    tcg_out_op_rrr(s, opc, a0, a1, a2);
756}
757
758static const TCGOutOpBinary outop_rems = {
759    .base.static_constraint = C_O1_I2(r, r, r),
760    .out_rrr = tgen_rems,
761};
762
763static void tgen_remu(TCGContext *s, TCGType type,
764                      TCGReg a0, TCGReg a1, TCGReg a2)
765{
766    TCGOpcode opc = (type == TCG_TYPE_I32
767                     ? INDEX_op_tci_remu32
768                     : INDEX_op_remu);
769    tcg_out_op_rrr(s, opc, a0, a1, a2);
770}
771
772static const TCGOutOpBinary outop_remu = {
773    .base.static_constraint = C_O1_I2(r, r, r),
774    .out_rrr = tgen_remu,
775};
776
777static void tgen_shl(TCGContext *s, TCGType type,
778                     TCGReg a0, TCGReg a1, TCGReg a2)
779{
780    tcg_out_op_rrr(s, INDEX_op_shl, a0, a1, a2);
781}
782
783static const TCGOutOpBinary outop_shl = {
784    .base.static_constraint = C_O1_I2(r, r, r),
785    .out_rrr = tgen_shl,
786};
787
788static void tgen_shr(TCGContext *s, TCGType type,
789                     TCGReg a0, TCGReg a1, TCGReg a2)
790{
791    if (type < TCG_TYPE_REG) {
792        tcg_out_ext32u(s, TCG_REG_TMP, a1);
793        a1 = TCG_REG_TMP;
794    }
795    tcg_out_op_rrr(s, INDEX_op_shr, a0, a1, a2);
796}
797
798static const TCGOutOpBinary outop_shr = {
799    .base.static_constraint = C_O1_I2(r, r, r),
800    .out_rrr = tgen_shr,
801};
802
803static void tgen_sub(TCGContext *s, TCGType type,
804                     TCGReg a0, TCGReg a1, TCGReg a2)
805{
806    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
807}
808
809static const TCGOutOpSubtract outop_sub = {
810    .base.static_constraint = C_O1_I2(r, r, r),
811    .out_rrr = tgen_sub,
812};
813
814static void tgen_xor(TCGContext *s, TCGType type,
815                     TCGReg a0, TCGReg a1, TCGReg a2)
816{
817    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
818}
819
820static const TCGOutOpBinary outop_xor = {
821    .base.static_constraint = C_O1_I2(r, r, r),
822    .out_rrr = tgen_xor,
823};
824
825static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
826{
827    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
828}
829
830static const TCGOutOpUnary outop_neg = {
831    .base.static_constraint = C_O1_I1(r, r),
832    .out_rr = tgen_neg,
833};
834
835static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
836{
837    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
838}
839
840static const TCGOutOpUnary outop_not = {
841    .base.static_constraint = C_O1_I1(r, r),
842    .out_rr = tgen_not,
843};
844
845
846static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
847                       const TCGArg args[TCG_MAX_OP_ARGS],
848                       const int const_args[TCG_MAX_OP_ARGS])
849{
850    int width;
851
852    switch (opc) {
853    case INDEX_op_goto_ptr:
854        tcg_out_op_r(s, opc, args[0]);
855        break;
856
857    case INDEX_op_br:
858        tcg_out_op_l(s, opc, arg_label(args[0]));
859        break;
860
861    CASE_32_64(setcond)
862        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
863        break;
864
865    CASE_32_64(movcond)
866    case INDEX_op_setcond2_i32:
867        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
868                          args[3], args[4], args[5]);
869        break;
870
871    CASE_32_64(ld8u)
872    CASE_32_64(ld8s)
873    CASE_32_64(ld16u)
874    CASE_32_64(ld16s)
875    case INDEX_op_ld_i32:
876    CASE_64(ld32u)
877    CASE_64(ld32s)
878    CASE_64(ld)
879    CASE_32_64(st8)
880    CASE_32_64(st16)
881    case INDEX_op_st_i32:
882    CASE_64(st32)
883    CASE_64(st)
884        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
885        break;
886
887    CASE_32_64(sar)
888    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
889    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
890    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
891    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
892        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
893        break;
894
895    CASE_32_64(deposit)
896        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
897        break;
898
899    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
900    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
901        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
902        break;
903
904    CASE_32_64(brcond)
905        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
906                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
907                        TCG_REG_TMP, args[0], args[1], args[2]);
908        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
909        break;
910
911    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
912    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
913    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
914        tcg_out_op_rr(s, opc, args[0], args[1]);
915        break;
916
917    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
918    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
919        width = 16;
920        goto do_bswap;
921    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
922        width = 32;
923    do_bswap:
924        /* The base tci bswaps zero-extend, and ignore high bits. */
925        tcg_out_op_rr(s, opc, args[0], args[1]);
926        if (args[2] & TCG_BSWAP_OS) {
927            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
928        }
929        break;
930
931    CASE_32_64(add2)
932    CASE_32_64(sub2)
933        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
934                          args[3], args[4], args[5]);
935        break;
936
937#if TCG_TARGET_REG_BITS == 32
938    case INDEX_op_brcond2_i32:
939        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
940                          args[0], args[1], args[2], args[3], args[4]);
941        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
942        break;
943#endif
944
945    CASE_32_64(mulu2)
946    CASE_32_64(muls2)
947        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
948        break;
949
950    case INDEX_op_qemu_ld_i64:
951    case INDEX_op_qemu_st_i64:
952        if (TCG_TARGET_REG_BITS == 32) {
953            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
954            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
955            break;
956        }
957        /* fall through */
958    case INDEX_op_qemu_ld_i32:
959    case INDEX_op_qemu_st_i32:
960        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
961            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
962            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
963        } else {
964            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
965        }
966        break;
967
968    case INDEX_op_mb:
969        tcg_out_op_v(s, opc);
970        break;
971
972    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
973    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
974    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
975    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
976    case INDEX_op_extu_i32_i64:
977    case INDEX_op_extrl_i64_i32:
978    default:
979        g_assert_not_reached();
980    }
981}
982
983static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
984                       intptr_t offset)
985{
986    switch (type) {
987    case TCG_TYPE_I32:
988        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
989        break;
990#if TCG_TARGET_REG_BITS == 64
991    case TCG_TYPE_I64:
992        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
993        break;
994#endif
995    default:
996        g_assert_not_reached();
997    }
998}
999
1000static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
1001                               TCGReg base, intptr_t ofs)
1002{
1003    return false;
1004}
1005
1006/* Test if a constant matches the constraint. */
1007static bool tcg_target_const_match(int64_t val, int ct,
1008                                   TCGType type, TCGCond cond, int vece)
1009{
1010    return ct & TCG_CT_CONST;
1011}
1012
1013static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
1014{
1015    memset(p, 0, sizeof(*p) * count);
1016}
1017
1018static void tcg_target_init(TCGContext *s)
1019{
1020    /* The current code uses uint8_t for tcg operations. */
1021    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1022
1023    /* Registers available for 32 bit operations. */
1024    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1025    /* Registers available for 64 bit operations. */
1026    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1027    /*
1028     * The interpreter "registers" are in the local stack frame and
1029     * cannot be clobbered by the called helper functions.  However,
1030     * the interpreter assumes a 128-bit return value and assigns to
1031     * the return value registers.
1032     */
1033    tcg_target_call_clobber_regs =
1034        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1035
1036    s->reserved_regs = 0;
1037    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1038    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1039
1040    /* The call arguments come first, followed by the temp storage. */
1041    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1042                  TCG_STATIC_FRAME_SIZE);
1043}
1044
1045/* Generate global QEMU prologue and epilogue code. */
1046static inline void tcg_target_qemu_prologue(TCGContext *s)
1047{
1048}
1049
1050static void tcg_out_tb_start(TCGContext *s)
1051{
1052    /* nothing to do */
1053}
1054
1055bool tcg_target_has_memory_bswap(MemOp memop)
1056{
1057    return true;
1058}
1059
1060static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1061{
1062    g_assert_not_reached();
1063}
1064
1065static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1066{
1067    g_assert_not_reached();
1068}
1069