xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision c96447d838d67db509cde1a190132e14b8672055)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_setcond_i32:
83    case INDEX_op_setcond_i64:
84    case INDEX_op_deposit_i32:
85    case INDEX_op_deposit_i64:
86        return C_O1_I2(r, r, r);
87
88    case INDEX_op_brcond_i32:
89    case INDEX_op_brcond_i64:
90        return C_O0_I2(r, r);
91
92    case INDEX_op_add2_i32:
93    case INDEX_op_add2_i64:
94    case INDEX_op_sub2_i32:
95    case INDEX_op_sub2_i64:
96        return C_O2_I4(r, r, r, r, r, r);
97
98#if TCG_TARGET_REG_BITS == 32
99    case INDEX_op_brcond2_i32:
100        return C_O0_I4(r, r, r, r);
101#endif
102
103    case INDEX_op_mulu2_i32:
104    case INDEX_op_mulu2_i64:
105    case INDEX_op_muls2_i32:
106    case INDEX_op_muls2_i64:
107        return C_O2_I2(r, r, r, r);
108
109    case INDEX_op_movcond_i32:
110    case INDEX_op_movcond_i64:
111    case INDEX_op_setcond2_i32:
112        return C_O1_I4(r, r, r, r, r);
113
114    case INDEX_op_qemu_ld_i32:
115        return C_O1_I1(r, r);
116    case INDEX_op_qemu_ld_i64:
117        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
118    case INDEX_op_qemu_st_i32:
119        return C_O0_I2(r, r);
120    case INDEX_op_qemu_st_i64:
121        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
122
123    default:
124        return C_NotImplemented;
125    }
126}
127
128static const int tcg_target_reg_alloc_order[] = {
129    TCG_REG_R4,
130    TCG_REG_R5,
131    TCG_REG_R6,
132    TCG_REG_R7,
133    TCG_REG_R8,
134    TCG_REG_R9,
135    TCG_REG_R10,
136    TCG_REG_R11,
137    TCG_REG_R12,
138    TCG_REG_R13,
139    TCG_REG_R14,
140    TCG_REG_R15,
141    /* Either 2 or 4 of these are call clobbered, so use them last. */
142    TCG_REG_R3,
143    TCG_REG_R2,
144    TCG_REG_R1,
145    TCG_REG_R0,
146};
147
148/* No call arguments via registers.  All will be stored on the "stack". */
149static const int tcg_target_call_iarg_regs[] = { };
150
151static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
152{
153    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
154    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
155    return TCG_REG_R0 + slot;
156}
157
158#ifdef CONFIG_DEBUG_TCG
159static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
160    "r00",
161    "r01",
162    "r02",
163    "r03",
164    "r04",
165    "r05",
166    "r06",
167    "r07",
168    "r08",
169    "r09",
170    "r10",
171    "r11",
172    "r12",
173    "r13",
174    "r14",
175    "r15",
176};
177#endif
178
179static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
180                        intptr_t value, intptr_t addend)
181{
182    intptr_t diff = value - (intptr_t)(code_ptr + 1);
183
184    tcg_debug_assert(addend == 0);
185    tcg_debug_assert(type == 20);
186
187    if (diff == sextract32(diff, 0, type)) {
188        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
189        return true;
190    }
191    return false;
192}
193
194static void stack_bounds_check(TCGReg base, intptr_t offset)
195{
196    if (base == TCG_REG_CALL_STACK) {
197        tcg_debug_assert(offset >= 0);
198        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
199                                   TCG_STATIC_FRAME_SIZE));
200    }
201}
202
203static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
204{
205    tcg_insn_unit insn = 0;
206
207    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
208    insn = deposit32(insn, 0, 8, op);
209    tcg_out32(s, insn);
210}
211
212static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
213{
214    tcg_insn_unit insn = 0;
215    intptr_t diff;
216
217    /* Special case for exit_tb: map null -> 0. */
218    if (p0 == NULL) {
219        diff = 0;
220    } else {
221        diff = p0 - (void *)(s->code_ptr + 1);
222        tcg_debug_assert(diff != 0);
223        if (diff != sextract32(diff, 0, 20)) {
224            tcg_raise_tb_overflow(s);
225        }
226    }
227    insn = deposit32(insn, 0, 8, op);
228    insn = deposit32(insn, 12, 20, diff);
229    tcg_out32(s, insn);
230}
231
232static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
233{
234    tcg_insn_unit insn = 0;
235
236    insn = deposit32(insn, 0, 8, op);
237    insn = deposit32(insn, 8, 4, r0);
238    tcg_out32(s, insn);
239}
240
241static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
242{
243    tcg_out32(s, (uint8_t)op);
244}
245
246static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
247{
248    tcg_insn_unit insn = 0;
249
250    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
251    insn = deposit32(insn, 0, 8, op);
252    insn = deposit32(insn, 8, 4, r0);
253    insn = deposit32(insn, 12, 20, i1);
254    tcg_out32(s, insn);
255}
256
257static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
258{
259    tcg_insn_unit insn = 0;
260
261    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
262    insn = deposit32(insn, 0, 8, op);
263    insn = deposit32(insn, 8, 4, r0);
264    tcg_out32(s, insn);
265}
266
267static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
268{
269    tcg_insn_unit insn = 0;
270
271    insn = deposit32(insn, 0, 8, op);
272    insn = deposit32(insn, 8, 4, r0);
273    insn = deposit32(insn, 12, 4, r1);
274    tcg_out32(s, insn);
275}
276
277static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
278                           TCGReg r0, TCGReg r1, TCGArg m2)
279{
280    tcg_insn_unit insn = 0;
281
282    tcg_debug_assert(m2 == extract32(m2, 0, 16));
283    insn = deposit32(insn, 0, 8, op);
284    insn = deposit32(insn, 8, 4, r0);
285    insn = deposit32(insn, 12, 4, r1);
286    insn = deposit32(insn, 16, 16, m2);
287    tcg_out32(s, insn);
288}
289
290static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
291                           TCGReg r0, TCGReg r1, TCGReg r2)
292{
293    tcg_insn_unit insn = 0;
294
295    insn = deposit32(insn, 0, 8, op);
296    insn = deposit32(insn, 8, 4, r0);
297    insn = deposit32(insn, 12, 4, r1);
298    insn = deposit32(insn, 16, 4, r2);
299    tcg_out32(s, insn);
300}
301
302static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
303                           TCGReg r0, TCGReg r1, intptr_t i2)
304{
305    tcg_insn_unit insn = 0;
306
307    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
308    insn = deposit32(insn, 0, 8, op);
309    insn = deposit32(insn, 8, 4, r0);
310    insn = deposit32(insn, 12, 4, r1);
311    insn = deposit32(insn, 16, 16, i2);
312    tcg_out32(s, insn);
313}
314
315static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
316                            TCGReg r1, uint8_t b2, uint8_t b3)
317{
318    tcg_insn_unit insn = 0;
319
320    tcg_debug_assert(b2 == extract32(b2, 0, 6));
321    tcg_debug_assert(b3 == extract32(b3, 0, 6));
322    insn = deposit32(insn, 0, 8, op);
323    insn = deposit32(insn, 8, 4, r0);
324    insn = deposit32(insn, 12, 4, r1);
325    insn = deposit32(insn, 16, 6, b2);
326    insn = deposit32(insn, 22, 6, b3);
327    tcg_out32(s, insn);
328}
329
330static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
331                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
332{
333    tcg_insn_unit insn = 0;
334
335    insn = deposit32(insn, 0, 8, op);
336    insn = deposit32(insn, 8, 4, r0);
337    insn = deposit32(insn, 12, 4, r1);
338    insn = deposit32(insn, 16, 4, r2);
339    insn = deposit32(insn, 20, 4, c3);
340    tcg_out32(s, insn);
341}
342
343static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
344                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
345{
346    tcg_insn_unit insn = 0;
347
348    tcg_debug_assert(b3 == extract32(b3, 0, 6));
349    tcg_debug_assert(b4 == extract32(b4, 0, 6));
350    insn = deposit32(insn, 0, 8, op);
351    insn = deposit32(insn, 8, 4, r0);
352    insn = deposit32(insn, 12, 4, r1);
353    insn = deposit32(insn, 16, 4, r2);
354    insn = deposit32(insn, 20, 6, b3);
355    insn = deposit32(insn, 26, 6, b4);
356    tcg_out32(s, insn);
357}
358
359static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
360                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
361{
362    tcg_insn_unit insn = 0;
363
364    insn = deposit32(insn, 0, 8, op);
365    insn = deposit32(insn, 8, 4, r0);
366    insn = deposit32(insn, 12, 4, r1);
367    insn = deposit32(insn, 16, 4, r2);
368    insn = deposit32(insn, 20, 4, r3);
369    tcg_out32(s, insn);
370}
371
372static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
373                              TCGReg r0, TCGReg r1, TCGReg r2,
374                              TCGReg r3, TCGReg r4, TCGCond c5)
375{
376    tcg_insn_unit insn = 0;
377
378    insn = deposit32(insn, 0, 8, op);
379    insn = deposit32(insn, 8, 4, r0);
380    insn = deposit32(insn, 12, 4, r1);
381    insn = deposit32(insn, 16, 4, r2);
382    insn = deposit32(insn, 20, 4, r3);
383    insn = deposit32(insn, 24, 4, r4);
384    insn = deposit32(insn, 28, 4, c5);
385    tcg_out32(s, insn);
386}
387
388static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
389                              TCGReg r0, TCGReg r1, TCGReg r2,
390                              TCGReg r3, TCGReg r4, TCGReg r5)
391{
392    tcg_insn_unit insn = 0;
393
394    insn = deposit32(insn, 0, 8, op);
395    insn = deposit32(insn, 8, 4, r0);
396    insn = deposit32(insn, 12, 4, r1);
397    insn = deposit32(insn, 16, 4, r2);
398    insn = deposit32(insn, 20, 4, r3);
399    insn = deposit32(insn, 24, 4, r4);
400    insn = deposit32(insn, 28, 4, r5);
401    tcg_out32(s, insn);
402}
403
404static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
405                         TCGReg base, intptr_t offset)
406{
407    stack_bounds_check(base, offset);
408    if (offset != sextract32(offset, 0, 16)) {
409        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
410        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
411        base = TCG_REG_TMP;
412        offset = 0;
413    }
414    tcg_out_op_rrs(s, op, val, base, offset);
415}
416
417static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
418                       intptr_t offset)
419{
420    switch (type) {
421    case TCG_TYPE_I32:
422        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
423        break;
424#if TCG_TARGET_REG_BITS == 64
425    case TCG_TYPE_I64:
426        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
427        break;
428#endif
429    default:
430        g_assert_not_reached();
431    }
432}
433
434static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
435{
436    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
437    return true;
438}
439
440static void tcg_out_movi(TCGContext *s, TCGType type,
441                         TCGReg ret, tcg_target_long arg)
442{
443    switch (type) {
444    case TCG_TYPE_I32:
445#if TCG_TARGET_REG_BITS == 64
446        arg = (int32_t)arg;
447        /* fall through */
448    case TCG_TYPE_I64:
449#endif
450        break;
451    default:
452        g_assert_not_reached();
453    }
454
455    if (arg == sextract32(arg, 0, 20)) {
456        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
457    } else {
458        tcg_insn_unit insn = 0;
459
460        new_pool_label(s, arg, 20, s->code_ptr, 0);
461        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
462        insn = deposit32(insn, 8, 4, ret);
463        tcg_out32(s, insn);
464    }
465}
466
467static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
468                            TCGReg rs, unsigned pos, unsigned len)
469{
470    TCGOpcode opc = type == TCG_TYPE_I32 ?
471                    INDEX_op_extract_i32 :
472                    INDEX_op_extract_i64;
473    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
474}
475
476static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
477                             TCGReg rs, unsigned pos, unsigned len)
478{
479    TCGOpcode opc = type == TCG_TYPE_I32 ?
480                    INDEX_op_sextract_i32 :
481                    INDEX_op_sextract_i64;
482    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
483}
484
485static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
486{
487    tcg_out_sextract(s, type, rd, rs, 0, 8);
488}
489
490static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
491{
492    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
493}
494
495static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
496{
497    tcg_out_sextract(s, type, rd, rs, 0, 16);
498}
499
500static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
501{
502    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
503}
504
505static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
506{
507    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
508    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
509}
510
511static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
512{
513    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
514    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
515}
516
517static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
518{
519    tcg_out_ext32s(s, rd, rs);
520}
521
522static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
523{
524    tcg_out_ext32u(s, rd, rs);
525}
526
527static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
528{
529    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
530    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
531}
532
533static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
534{
535    return false;
536}
537
538static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
539                             tcg_target_long imm)
540{
541    /* This function is only used for passing structs by reference. */
542    g_assert_not_reached();
543}
544
545static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
546                         const TCGHelperInfo *info)
547{
548    ffi_cif *cif = info->cif;
549    tcg_insn_unit insn = 0;
550    uint8_t which;
551
552    if (cif->rtype == &ffi_type_void) {
553        which = 0;
554    } else {
555        tcg_debug_assert(cif->rtype->size == 4 ||
556                         cif->rtype->size == 8 ||
557                         cif->rtype->size == 16);
558        which = ctz32(cif->rtype->size) - 1;
559    }
560    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
561    insn = deposit32(insn, 0, 8, INDEX_op_call);
562    insn = deposit32(insn, 8, 4, which);
563    tcg_out32(s, insn);
564}
565
566#if TCG_TARGET_REG_BITS == 64
567# define CASE_32_64(x) \
568        case glue(glue(INDEX_op_, x), _i64): \
569        case glue(glue(INDEX_op_, x), _i32):
570# define CASE_64(x) \
571        case glue(glue(INDEX_op_, x), _i64):
572#else
573# define CASE_32_64(x) \
574        case glue(glue(INDEX_op_, x), _i32):
575# define CASE_64(x)
576#endif
577
578static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
579{
580    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
581}
582
583static void tcg_out_goto_tb(TCGContext *s, int which)
584{
585    /* indirect jump method. */
586    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
587    set_jmp_reset_offset(s, which);
588}
589
590void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
591                              uintptr_t jmp_rx, uintptr_t jmp_rw)
592{
593    /* Always indirect, nothing to do */
594}
595
596static void tgen_add(TCGContext *s, TCGType type,
597                     TCGReg a0, TCGReg a1, TCGReg a2)
598{
599    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
600}
601
602static const TCGOutOpBinary outop_add = {
603    .base.static_constraint = C_O1_I2(r, r, r),
604    .out_rrr = tgen_add,
605};
606
607static void tgen_and(TCGContext *s, TCGType type,
608                     TCGReg a0, TCGReg a1, TCGReg a2)
609{
610    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
611}
612
613static const TCGOutOpBinary outop_and = {
614    .base.static_constraint = C_O1_I2(r, r, r),
615    .out_rrr = tgen_and,
616};
617
618static void tgen_andc(TCGContext *s, TCGType type,
619                      TCGReg a0, TCGReg a1, TCGReg a2)
620{
621    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
622}
623
624static const TCGOutOpBinary outop_andc = {
625    .base.static_constraint = C_O1_I2(r, r, r),
626    .out_rrr = tgen_andc,
627};
628
629static void tgen_clz(TCGContext *s, TCGType type,
630                      TCGReg a0, TCGReg a1, TCGReg a2)
631{
632    TCGOpcode opc = (type == TCG_TYPE_I32
633                     ? INDEX_op_tci_clz32
634                     : INDEX_op_clz);
635    tcg_out_op_rrr(s, opc, a0, a1, a2);
636}
637
638static const TCGOutOpBinary outop_clz = {
639    .base.static_constraint = C_O1_I2(r, r, r),
640    .out_rrr = tgen_clz,
641};
642
643static void tgen_ctz(TCGContext *s, TCGType type,
644                      TCGReg a0, TCGReg a1, TCGReg a2)
645{
646    TCGOpcode opc = (type == TCG_TYPE_I32
647                     ? INDEX_op_tci_ctz32
648                     : INDEX_op_ctz);
649    tcg_out_op_rrr(s, opc, a0, a1, a2);
650}
651
652static const TCGOutOpBinary outop_ctz = {
653    .base.static_constraint = C_O1_I2(r, r, r),
654    .out_rrr = tgen_ctz,
655};
656
657static void tgen_divs(TCGContext *s, TCGType type,
658                      TCGReg a0, TCGReg a1, TCGReg a2)
659{
660    TCGOpcode opc = (type == TCG_TYPE_I32
661                     ? INDEX_op_tci_divs32
662                     : INDEX_op_divs);
663    tcg_out_op_rrr(s, opc, a0, a1, a2);
664}
665
666static const TCGOutOpBinary outop_divs = {
667    .base.static_constraint = C_O1_I2(r, r, r),
668    .out_rrr = tgen_divs,
669};
670
671static const TCGOutOpDivRem outop_divs2 = {
672    .base.static_constraint = C_NotImplemented,
673};
674
675static void tgen_divu(TCGContext *s, TCGType type,
676                      TCGReg a0, TCGReg a1, TCGReg a2)
677{
678    TCGOpcode opc = (type == TCG_TYPE_I32
679                     ? INDEX_op_tci_divu32
680                     : INDEX_op_divu);
681    tcg_out_op_rrr(s, opc, a0, a1, a2);
682}
683
684static const TCGOutOpBinary outop_divu = {
685    .base.static_constraint = C_O1_I2(r, r, r),
686    .out_rrr = tgen_divu,
687};
688
689static const TCGOutOpDivRem outop_divu2 = {
690    .base.static_constraint = C_NotImplemented,
691};
692
693static void tgen_eqv(TCGContext *s, TCGType type,
694                     TCGReg a0, TCGReg a1, TCGReg a2)
695{
696    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
697}
698
699static const TCGOutOpBinary outop_eqv = {
700    .base.static_constraint = C_O1_I2(r, r, r),
701    .out_rrr = tgen_eqv,
702};
703
704static void tgen_mul(TCGContext *s, TCGType type,
705                     TCGReg a0, TCGReg a1, TCGReg a2)
706{
707    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
708}
709
710static const TCGOutOpBinary outop_mul = {
711    .base.static_constraint = C_O1_I2(r, r, r),
712    .out_rrr = tgen_mul,
713};
714
715static const TCGOutOpBinary outop_mulsh = {
716    .base.static_constraint = C_NotImplemented,
717};
718
719static const TCGOutOpBinary outop_muluh = {
720    .base.static_constraint = C_NotImplemented,
721};
722
723static void tgen_nand(TCGContext *s, TCGType type,
724                     TCGReg a0, TCGReg a1, TCGReg a2)
725{
726    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
727}
728
729static const TCGOutOpBinary outop_nand = {
730    .base.static_constraint = C_O1_I2(r, r, r),
731    .out_rrr = tgen_nand,
732};
733
734static void tgen_nor(TCGContext *s, TCGType type,
735                     TCGReg a0, TCGReg a1, TCGReg a2)
736{
737    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
738}
739
740static const TCGOutOpBinary outop_nor = {
741    .base.static_constraint = C_O1_I2(r, r, r),
742    .out_rrr = tgen_nor,
743};
744
745static void tgen_or(TCGContext *s, TCGType type,
746                     TCGReg a0, TCGReg a1, TCGReg a2)
747{
748    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
749}
750
751static const TCGOutOpBinary outop_or = {
752    .base.static_constraint = C_O1_I2(r, r, r),
753    .out_rrr = tgen_or,
754};
755
756static void tgen_orc(TCGContext *s, TCGType type,
757                     TCGReg a0, TCGReg a1, TCGReg a2)
758{
759    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
760}
761
762static const TCGOutOpBinary outop_orc = {
763    .base.static_constraint = C_O1_I2(r, r, r),
764    .out_rrr = tgen_orc,
765};
766
767static void tgen_rems(TCGContext *s, TCGType type,
768                      TCGReg a0, TCGReg a1, TCGReg a2)
769{
770    TCGOpcode opc = (type == TCG_TYPE_I32
771                     ? INDEX_op_tci_rems32
772                     : INDEX_op_rems);
773    tcg_out_op_rrr(s, opc, a0, a1, a2);
774}
775
776static const TCGOutOpBinary outop_rems = {
777    .base.static_constraint = C_O1_I2(r, r, r),
778    .out_rrr = tgen_rems,
779};
780
781static void tgen_remu(TCGContext *s, TCGType type,
782                      TCGReg a0, TCGReg a1, TCGReg a2)
783{
784    TCGOpcode opc = (type == TCG_TYPE_I32
785                     ? INDEX_op_tci_remu32
786                     : INDEX_op_remu);
787    tcg_out_op_rrr(s, opc, a0, a1, a2);
788}
789
790static const TCGOutOpBinary outop_remu = {
791    .base.static_constraint = C_O1_I2(r, r, r),
792    .out_rrr = tgen_remu,
793};
794
795static void tgen_rotl(TCGContext *s, TCGType type,
796                     TCGReg a0, TCGReg a1, TCGReg a2)
797{
798    TCGOpcode opc = (type == TCG_TYPE_I32
799                     ? INDEX_op_tci_rotl32
800                     : INDEX_op_rotl);
801    tcg_out_op_rrr(s, opc, a0, a1, a2);
802}
803
804static const TCGOutOpBinary outop_rotl = {
805    .base.static_constraint = C_O1_I2(r, r, r),
806    .out_rrr = tgen_rotl,
807};
808
809static void tgen_rotr(TCGContext *s, TCGType type,
810                     TCGReg a0, TCGReg a1, TCGReg a2)
811{
812    TCGOpcode opc = (type == TCG_TYPE_I32
813                     ? INDEX_op_tci_rotr32
814                     : INDEX_op_rotr);
815    tcg_out_op_rrr(s, opc, a0, a1, a2);
816}
817
818static const TCGOutOpBinary outop_rotr = {
819    .base.static_constraint = C_O1_I2(r, r, r),
820    .out_rrr = tgen_rotr,
821};
822
823static void tgen_sar(TCGContext *s, TCGType type,
824                     TCGReg a0, TCGReg a1, TCGReg a2)
825{
826    if (type < TCG_TYPE_REG) {
827        tcg_out_ext32s(s, TCG_REG_TMP, a1);
828        a1 = TCG_REG_TMP;
829    }
830    tcg_out_op_rrr(s, INDEX_op_sar, a0, a1, a2);
831}
832
833static const TCGOutOpBinary outop_sar = {
834    .base.static_constraint = C_O1_I2(r, r, r),
835    .out_rrr = tgen_sar,
836};
837
838static void tgen_shl(TCGContext *s, TCGType type,
839                     TCGReg a0, TCGReg a1, TCGReg a2)
840{
841    tcg_out_op_rrr(s, INDEX_op_shl, a0, a1, a2);
842}
843
844static const TCGOutOpBinary outop_shl = {
845    .base.static_constraint = C_O1_I2(r, r, r),
846    .out_rrr = tgen_shl,
847};
848
849static void tgen_shr(TCGContext *s, TCGType type,
850                     TCGReg a0, TCGReg a1, TCGReg a2)
851{
852    if (type < TCG_TYPE_REG) {
853        tcg_out_ext32u(s, TCG_REG_TMP, a1);
854        a1 = TCG_REG_TMP;
855    }
856    tcg_out_op_rrr(s, INDEX_op_shr, a0, a1, a2);
857}
858
859static const TCGOutOpBinary outop_shr = {
860    .base.static_constraint = C_O1_I2(r, r, r),
861    .out_rrr = tgen_shr,
862};
863
864static void tgen_sub(TCGContext *s, TCGType type,
865                     TCGReg a0, TCGReg a1, TCGReg a2)
866{
867    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
868}
869
870static const TCGOutOpSubtract outop_sub = {
871    .base.static_constraint = C_O1_I2(r, r, r),
872    .out_rrr = tgen_sub,
873};
874
875static void tgen_xor(TCGContext *s, TCGType type,
876                     TCGReg a0, TCGReg a1, TCGReg a2)
877{
878    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
879}
880
881static const TCGOutOpBinary outop_xor = {
882    .base.static_constraint = C_O1_I2(r, r, r),
883    .out_rrr = tgen_xor,
884};
885
886static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
887{
888    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
889}
890
891static const TCGOutOpUnary outop_neg = {
892    .base.static_constraint = C_O1_I1(r, r),
893    .out_rr = tgen_neg,
894};
895
896static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
897{
898    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
899}
900
901static const TCGOutOpUnary outop_not = {
902    .base.static_constraint = C_O1_I1(r, r),
903    .out_rr = tgen_not,
904};
905
906
907static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
908                       const TCGArg args[TCG_MAX_OP_ARGS],
909                       const int const_args[TCG_MAX_OP_ARGS])
910{
911    int width;
912
913    switch (opc) {
914    case INDEX_op_goto_ptr:
915        tcg_out_op_r(s, opc, args[0]);
916        break;
917
918    case INDEX_op_br:
919        tcg_out_op_l(s, opc, arg_label(args[0]));
920        break;
921
922    CASE_32_64(setcond)
923        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
924        break;
925
926    CASE_32_64(movcond)
927    case INDEX_op_setcond2_i32:
928        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
929                          args[3], args[4], args[5]);
930        break;
931
932    CASE_32_64(ld8u)
933    CASE_32_64(ld8s)
934    CASE_32_64(ld16u)
935    CASE_32_64(ld16s)
936    case INDEX_op_ld_i32:
937    CASE_64(ld32u)
938    CASE_64(ld32s)
939    CASE_64(ld)
940    CASE_32_64(st8)
941    CASE_32_64(st16)
942    case INDEX_op_st_i32:
943    CASE_64(st32)
944    CASE_64(st)
945        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
946        break;
947
948    CASE_32_64(deposit)
949        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
950        break;
951
952    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
953    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
954        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
955        break;
956
957    CASE_32_64(brcond)
958        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
959                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
960                        TCG_REG_TMP, args[0], args[1], args[2]);
961        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
962        break;
963
964    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
965    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
966    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
967        tcg_out_op_rr(s, opc, args[0], args[1]);
968        break;
969
970    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
971    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
972        width = 16;
973        goto do_bswap;
974    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
975        width = 32;
976    do_bswap:
977        /* The base tci bswaps zero-extend, and ignore high bits. */
978        tcg_out_op_rr(s, opc, args[0], args[1]);
979        if (args[2] & TCG_BSWAP_OS) {
980            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
981        }
982        break;
983
984    CASE_32_64(add2)
985    CASE_32_64(sub2)
986        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
987                          args[3], args[4], args[5]);
988        break;
989
990#if TCG_TARGET_REG_BITS == 32
991    case INDEX_op_brcond2_i32:
992        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
993                          args[0], args[1], args[2], args[3], args[4]);
994        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
995        break;
996#endif
997
998    CASE_32_64(mulu2)
999    CASE_32_64(muls2)
1000        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
1001        break;
1002
1003    case INDEX_op_qemu_ld_i64:
1004    case INDEX_op_qemu_st_i64:
1005        if (TCG_TARGET_REG_BITS == 32) {
1006            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
1007            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
1008            break;
1009        }
1010        /* fall through */
1011    case INDEX_op_qemu_ld_i32:
1012    case INDEX_op_qemu_st_i32:
1013        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
1014            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
1015            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
1016        } else {
1017            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
1018        }
1019        break;
1020
1021    case INDEX_op_mb:
1022        tcg_out_op_v(s, opc);
1023        break;
1024
1025    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
1026    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
1027    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
1028    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
1029    case INDEX_op_extu_i32_i64:
1030    case INDEX_op_extrl_i64_i32:
1031    default:
1032        g_assert_not_reached();
1033    }
1034}
1035
1036static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
1037                       intptr_t offset)
1038{
1039    switch (type) {
1040    case TCG_TYPE_I32:
1041        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
1042        break;
1043#if TCG_TARGET_REG_BITS == 64
1044    case TCG_TYPE_I64:
1045        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
1046        break;
1047#endif
1048    default:
1049        g_assert_not_reached();
1050    }
1051}
1052
1053static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
1054                               TCGReg base, intptr_t ofs)
1055{
1056    return false;
1057}
1058
1059/* Test if a constant matches the constraint. */
1060static bool tcg_target_const_match(int64_t val, int ct,
1061                                   TCGType type, TCGCond cond, int vece)
1062{
1063    return ct & TCG_CT_CONST;
1064}
1065
1066static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
1067{
1068    memset(p, 0, sizeof(*p) * count);
1069}
1070
1071static void tcg_target_init(TCGContext *s)
1072{
1073    /* The current code uses uint8_t for tcg operations. */
1074    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1075
1076    /* Registers available for 32 bit operations. */
1077    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1078    /* Registers available for 64 bit operations. */
1079    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1080    /*
1081     * The interpreter "registers" are in the local stack frame and
1082     * cannot be clobbered by the called helper functions.  However,
1083     * the interpreter assumes a 128-bit return value and assigns to
1084     * the return value registers.
1085     */
1086    tcg_target_call_clobber_regs =
1087        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1088
1089    s->reserved_regs = 0;
1090    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1091    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1092
1093    /* The call arguments come first, followed by the temp storage. */
1094    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1095                  TCG_STATIC_FRAME_SIZE);
1096}
1097
1098/* Generate global QEMU prologue and epilogue code. */
1099static inline void tcg_target_qemu_prologue(TCGContext *s)
1100{
1101}
1102
1103static void tcg_out_tb_start(TCGContext *s)
1104{
1105    /* nothing to do */
1106}
1107
1108bool tcg_target_has_memory_bswap(MemOp memop)
1109{
1110    return true;
1111}
1112
1113static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1114{
1115    g_assert_not_reached();
1116}
1117
1118static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1119{
1120    g_assert_not_reached();
1121}
1122