xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 0dd07ee1122abaf1adb4f1e00a8e0b89937f53bd)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap32_i32:
61    case INDEX_op_bswap32_i64:
62    case INDEX_op_bswap64_i64:
63    case INDEX_op_extract_i32:
64    case INDEX_op_extract_i64:
65    case INDEX_op_sextract_i32:
66    case INDEX_op_sextract_i64:
67        return C_O1_I1(r, r);
68
69    case INDEX_op_st8_i32:
70    case INDEX_op_st16_i32:
71    case INDEX_op_st_i32:
72    case INDEX_op_st8_i64:
73    case INDEX_op_st16_i64:
74    case INDEX_op_st32_i64:
75    case INDEX_op_st_i64:
76        return C_O0_I2(r, r);
77
78    case INDEX_op_deposit_i32:
79    case INDEX_op_deposit_i64:
80        return C_O1_I2(r, r, r);
81
82    case INDEX_op_add2_i32:
83    case INDEX_op_add2_i64:
84    case INDEX_op_sub2_i32:
85    case INDEX_op_sub2_i64:
86        return C_O2_I4(r, r, r, r, r, r);
87
88    case INDEX_op_qemu_ld_i32:
89        return C_O1_I1(r, r);
90    case INDEX_op_qemu_ld_i64:
91        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
92    case INDEX_op_qemu_st_i32:
93        return C_O0_I2(r, r);
94    case INDEX_op_qemu_st_i64:
95        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
96
97    default:
98        return C_NotImplemented;
99    }
100}
101
102static const int tcg_target_reg_alloc_order[] = {
103    TCG_REG_R4,
104    TCG_REG_R5,
105    TCG_REG_R6,
106    TCG_REG_R7,
107    TCG_REG_R8,
108    TCG_REG_R9,
109    TCG_REG_R10,
110    TCG_REG_R11,
111    TCG_REG_R12,
112    TCG_REG_R13,
113    TCG_REG_R14,
114    TCG_REG_R15,
115    /* Either 2 or 4 of these are call clobbered, so use them last. */
116    TCG_REG_R3,
117    TCG_REG_R2,
118    TCG_REG_R1,
119    TCG_REG_R0,
120};
121
122/* No call arguments via registers.  All will be stored on the "stack". */
123static const int tcg_target_call_iarg_regs[] = { };
124
125static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
126{
127    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
128    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
129    return TCG_REG_R0 + slot;
130}
131
132#ifdef CONFIG_DEBUG_TCG
133static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
134    "r00",
135    "r01",
136    "r02",
137    "r03",
138    "r04",
139    "r05",
140    "r06",
141    "r07",
142    "r08",
143    "r09",
144    "r10",
145    "r11",
146    "r12",
147    "r13",
148    "r14",
149    "r15",
150};
151#endif
152
153static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
154                        intptr_t value, intptr_t addend)
155{
156    intptr_t diff = value - (intptr_t)(code_ptr + 1);
157
158    tcg_debug_assert(addend == 0);
159    tcg_debug_assert(type == 20);
160
161    if (diff == sextract32(diff, 0, type)) {
162        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
163        return true;
164    }
165    return false;
166}
167
168static void stack_bounds_check(TCGReg base, intptr_t offset)
169{
170    if (base == TCG_REG_CALL_STACK) {
171        tcg_debug_assert(offset >= 0);
172        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
173                                   TCG_STATIC_FRAME_SIZE));
174    }
175}
176
177static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
178{
179    tcg_insn_unit insn = 0;
180
181    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
182    insn = deposit32(insn, 0, 8, op);
183    tcg_out32(s, insn);
184}
185
186static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
187{
188    tcg_insn_unit insn = 0;
189    intptr_t diff;
190
191    /* Special case for exit_tb: map null -> 0. */
192    if (p0 == NULL) {
193        diff = 0;
194    } else {
195        diff = p0 - (void *)(s->code_ptr + 1);
196        tcg_debug_assert(diff != 0);
197        if (diff != sextract32(diff, 0, 20)) {
198            tcg_raise_tb_overflow(s);
199        }
200    }
201    insn = deposit32(insn, 0, 8, op);
202    insn = deposit32(insn, 12, 20, diff);
203    tcg_out32(s, insn);
204}
205
206static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
207{
208    tcg_insn_unit insn = 0;
209
210    insn = deposit32(insn, 0, 8, op);
211    insn = deposit32(insn, 8, 4, r0);
212    tcg_out32(s, insn);
213}
214
215static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
216{
217    tcg_out32(s, (uint8_t)op);
218}
219
220static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
221{
222    tcg_insn_unit insn = 0;
223
224    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
225    insn = deposit32(insn, 0, 8, op);
226    insn = deposit32(insn, 8, 4, r0);
227    insn = deposit32(insn, 12, 20, i1);
228    tcg_out32(s, insn);
229}
230
231static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
232{
233    tcg_insn_unit insn = 0;
234
235    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
236    insn = deposit32(insn, 0, 8, op);
237    insn = deposit32(insn, 8, 4, r0);
238    tcg_out32(s, insn);
239}
240
241static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
242{
243    tcg_insn_unit insn = 0;
244
245    insn = deposit32(insn, 0, 8, op);
246    insn = deposit32(insn, 8, 4, r0);
247    insn = deposit32(insn, 12, 4, r1);
248    tcg_out32(s, insn);
249}
250
251static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
252                           TCGReg r0, TCGReg r1, TCGArg m2)
253{
254    tcg_insn_unit insn = 0;
255
256    tcg_debug_assert(m2 == extract32(m2, 0, 16));
257    insn = deposit32(insn, 0, 8, op);
258    insn = deposit32(insn, 8, 4, r0);
259    insn = deposit32(insn, 12, 4, r1);
260    insn = deposit32(insn, 16, 16, m2);
261    tcg_out32(s, insn);
262}
263
264static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
265                           TCGReg r0, TCGReg r1, TCGReg r2)
266{
267    tcg_insn_unit insn = 0;
268
269    insn = deposit32(insn, 0, 8, op);
270    insn = deposit32(insn, 8, 4, r0);
271    insn = deposit32(insn, 12, 4, r1);
272    insn = deposit32(insn, 16, 4, r2);
273    tcg_out32(s, insn);
274}
275
276static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
277                           TCGReg r0, TCGReg r1, intptr_t i2)
278{
279    tcg_insn_unit insn = 0;
280
281    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
282    insn = deposit32(insn, 0, 8, op);
283    insn = deposit32(insn, 8, 4, r0);
284    insn = deposit32(insn, 12, 4, r1);
285    insn = deposit32(insn, 16, 16, i2);
286    tcg_out32(s, insn);
287}
288
289static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
290                            TCGReg r1, uint8_t b2, uint8_t b3)
291{
292    tcg_insn_unit insn = 0;
293
294    tcg_debug_assert(b2 == extract32(b2, 0, 6));
295    tcg_debug_assert(b3 == extract32(b3, 0, 6));
296    insn = deposit32(insn, 0, 8, op);
297    insn = deposit32(insn, 8, 4, r0);
298    insn = deposit32(insn, 12, 4, r1);
299    insn = deposit32(insn, 16, 6, b2);
300    insn = deposit32(insn, 22, 6, b3);
301    tcg_out32(s, insn);
302}
303
304static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
305                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
306{
307    tcg_insn_unit insn = 0;
308
309    insn = deposit32(insn, 0, 8, op);
310    insn = deposit32(insn, 8, 4, r0);
311    insn = deposit32(insn, 12, 4, r1);
312    insn = deposit32(insn, 16, 4, r2);
313    insn = deposit32(insn, 20, 4, c3);
314    tcg_out32(s, insn);
315}
316
317static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
318                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
319{
320    tcg_insn_unit insn = 0;
321
322    tcg_debug_assert(b3 == extract32(b3, 0, 6));
323    tcg_debug_assert(b4 == extract32(b4, 0, 6));
324    insn = deposit32(insn, 0, 8, op);
325    insn = deposit32(insn, 8, 4, r0);
326    insn = deposit32(insn, 12, 4, r1);
327    insn = deposit32(insn, 16, 4, r2);
328    insn = deposit32(insn, 20, 6, b3);
329    insn = deposit32(insn, 26, 6, b4);
330    tcg_out32(s, insn);
331}
332
333static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
334                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
335{
336    tcg_insn_unit insn = 0;
337
338    insn = deposit32(insn, 0, 8, op);
339    insn = deposit32(insn, 8, 4, r0);
340    insn = deposit32(insn, 12, 4, r1);
341    insn = deposit32(insn, 16, 4, r2);
342    insn = deposit32(insn, 20, 4, r3);
343    tcg_out32(s, insn);
344}
345
346static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
347                              TCGReg r0, TCGReg r1, TCGReg r2,
348                              TCGReg r3, TCGReg r4, TCGCond c5)
349{
350    tcg_insn_unit insn = 0;
351
352    insn = deposit32(insn, 0, 8, op);
353    insn = deposit32(insn, 8, 4, r0);
354    insn = deposit32(insn, 12, 4, r1);
355    insn = deposit32(insn, 16, 4, r2);
356    insn = deposit32(insn, 20, 4, r3);
357    insn = deposit32(insn, 24, 4, r4);
358    insn = deposit32(insn, 28, 4, c5);
359    tcg_out32(s, insn);
360}
361
362static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
363                              TCGReg r0, TCGReg r1, TCGReg r2,
364                              TCGReg r3, TCGReg r4, TCGReg r5)
365{
366    tcg_insn_unit insn = 0;
367
368    insn = deposit32(insn, 0, 8, op);
369    insn = deposit32(insn, 8, 4, r0);
370    insn = deposit32(insn, 12, 4, r1);
371    insn = deposit32(insn, 16, 4, r2);
372    insn = deposit32(insn, 20, 4, r3);
373    insn = deposit32(insn, 24, 4, r4);
374    insn = deposit32(insn, 28, 4, r5);
375    tcg_out32(s, insn);
376}
377
378static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
379                         TCGReg base, intptr_t offset)
380{
381    stack_bounds_check(base, offset);
382    if (offset != sextract32(offset, 0, 16)) {
383        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
384        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
385        base = TCG_REG_TMP;
386        offset = 0;
387    }
388    tcg_out_op_rrs(s, op, val, base, offset);
389}
390
391static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
392                       intptr_t offset)
393{
394    switch (type) {
395    case TCG_TYPE_I32:
396        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
397        break;
398#if TCG_TARGET_REG_BITS == 64
399    case TCG_TYPE_I64:
400        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
401        break;
402#endif
403    default:
404        g_assert_not_reached();
405    }
406}
407
408static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
409{
410    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
411    return true;
412}
413
414static void tcg_out_movi(TCGContext *s, TCGType type,
415                         TCGReg ret, tcg_target_long arg)
416{
417    switch (type) {
418    case TCG_TYPE_I32:
419#if TCG_TARGET_REG_BITS == 64
420        arg = (int32_t)arg;
421        /* fall through */
422    case TCG_TYPE_I64:
423#endif
424        break;
425    default:
426        g_assert_not_reached();
427    }
428
429    if (arg == sextract32(arg, 0, 20)) {
430        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
431    } else {
432        tcg_insn_unit insn = 0;
433
434        new_pool_label(s, arg, 20, s->code_ptr, 0);
435        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
436        insn = deposit32(insn, 8, 4, ret);
437        tcg_out32(s, insn);
438    }
439}
440
441static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
442                            TCGReg rs, unsigned pos, unsigned len)
443{
444    TCGOpcode opc = type == TCG_TYPE_I32 ?
445                    INDEX_op_extract_i32 :
446                    INDEX_op_extract_i64;
447    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
448}
449
450static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
451                             TCGReg rs, unsigned pos, unsigned len)
452{
453    TCGOpcode opc = type == TCG_TYPE_I32 ?
454                    INDEX_op_sextract_i32 :
455                    INDEX_op_sextract_i64;
456    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
457}
458
459static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
460{
461    tcg_out_sextract(s, type, rd, rs, 0, 8);
462}
463
464static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
465{
466    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
467}
468
469static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
470{
471    tcg_out_sextract(s, type, rd, rs, 0, 16);
472}
473
474static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
475{
476    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
477}
478
479static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
480{
481    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
482    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
483}
484
485static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
486{
487    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
488    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
489}
490
491static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
492{
493    tcg_out_ext32s(s, rd, rs);
494}
495
496static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
497{
498    tcg_out_ext32u(s, rd, rs);
499}
500
501static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
502{
503    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
504    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
505}
506
507static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
508{
509    return false;
510}
511
512static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
513                             tcg_target_long imm)
514{
515    /* This function is only used for passing structs by reference. */
516    g_assert_not_reached();
517}
518
519static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
520                         const TCGHelperInfo *info)
521{
522    ffi_cif *cif = info->cif;
523    tcg_insn_unit insn = 0;
524    uint8_t which;
525
526    if (cif->rtype == &ffi_type_void) {
527        which = 0;
528    } else {
529        tcg_debug_assert(cif->rtype->size == 4 ||
530                         cif->rtype->size == 8 ||
531                         cif->rtype->size == 16);
532        which = ctz32(cif->rtype->size) - 1;
533    }
534    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
535    insn = deposit32(insn, 0, 8, INDEX_op_call);
536    insn = deposit32(insn, 8, 4, which);
537    tcg_out32(s, insn);
538}
539
540#if TCG_TARGET_REG_BITS == 64
541# define CASE_32_64(x) \
542        case glue(glue(INDEX_op_, x), _i64): \
543        case glue(glue(INDEX_op_, x), _i32):
544# define CASE_64(x) \
545        case glue(glue(INDEX_op_, x), _i64):
546#else
547# define CASE_32_64(x) \
548        case glue(glue(INDEX_op_, x), _i32):
549# define CASE_64(x)
550#endif
551
552static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
553{
554    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
555}
556
557static void tcg_out_goto_tb(TCGContext *s, int which)
558{
559    /* indirect jump method. */
560    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
561    set_jmp_reset_offset(s, which);
562}
563
564void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
565                              uintptr_t jmp_rx, uintptr_t jmp_rw)
566{
567    /* Always indirect, nothing to do */
568}
569
570static void tgen_add(TCGContext *s, TCGType type,
571                     TCGReg a0, TCGReg a1, TCGReg a2)
572{
573    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
574}
575
576static const TCGOutOpBinary outop_add = {
577    .base.static_constraint = C_O1_I2(r, r, r),
578    .out_rrr = tgen_add,
579};
580
581static void tgen_and(TCGContext *s, TCGType type,
582                     TCGReg a0, TCGReg a1, TCGReg a2)
583{
584    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
585}
586
587static const TCGOutOpBinary outop_and = {
588    .base.static_constraint = C_O1_I2(r, r, r),
589    .out_rrr = tgen_and,
590};
591
592static void tgen_andc(TCGContext *s, TCGType type,
593                      TCGReg a0, TCGReg a1, TCGReg a2)
594{
595    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
596}
597
598static const TCGOutOpBinary outop_andc = {
599    .base.static_constraint = C_O1_I2(r, r, r),
600    .out_rrr = tgen_andc,
601};
602
603static void tgen_clz(TCGContext *s, TCGType type,
604                      TCGReg a0, TCGReg a1, TCGReg a2)
605{
606    TCGOpcode opc = (type == TCG_TYPE_I32
607                     ? INDEX_op_tci_clz32
608                     : INDEX_op_clz);
609    tcg_out_op_rrr(s, opc, a0, a1, a2);
610}
611
612static const TCGOutOpBinary outop_clz = {
613    .base.static_constraint = C_O1_I2(r, r, r),
614    .out_rrr = tgen_clz,
615};
616
617static void tgen_ctz(TCGContext *s, TCGType type,
618                      TCGReg a0, TCGReg a1, TCGReg a2)
619{
620    TCGOpcode opc = (type == TCG_TYPE_I32
621                     ? INDEX_op_tci_ctz32
622                     : INDEX_op_ctz);
623    tcg_out_op_rrr(s, opc, a0, a1, a2);
624}
625
626static const TCGOutOpBinary outop_ctz = {
627    .base.static_constraint = C_O1_I2(r, r, r),
628    .out_rrr = tgen_ctz,
629};
630
631static void tgen_divs(TCGContext *s, TCGType type,
632                      TCGReg a0, TCGReg a1, TCGReg a2)
633{
634    TCGOpcode opc = (type == TCG_TYPE_I32
635                     ? INDEX_op_tci_divs32
636                     : INDEX_op_divs);
637    tcg_out_op_rrr(s, opc, a0, a1, a2);
638}
639
640static const TCGOutOpBinary outop_divs = {
641    .base.static_constraint = C_O1_I2(r, r, r),
642    .out_rrr = tgen_divs,
643};
644
645static const TCGOutOpDivRem outop_divs2 = {
646    .base.static_constraint = C_NotImplemented,
647};
648
649static void tgen_divu(TCGContext *s, TCGType type,
650                      TCGReg a0, TCGReg a1, TCGReg a2)
651{
652    TCGOpcode opc = (type == TCG_TYPE_I32
653                     ? INDEX_op_tci_divu32
654                     : INDEX_op_divu);
655    tcg_out_op_rrr(s, opc, a0, a1, a2);
656}
657
658static const TCGOutOpBinary outop_divu = {
659    .base.static_constraint = C_O1_I2(r, r, r),
660    .out_rrr = tgen_divu,
661};
662
663static const TCGOutOpDivRem outop_divu2 = {
664    .base.static_constraint = C_NotImplemented,
665};
666
667static void tgen_eqv(TCGContext *s, TCGType type,
668                     TCGReg a0, TCGReg a1, TCGReg a2)
669{
670    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
671}
672
673static const TCGOutOpBinary outop_eqv = {
674    .base.static_constraint = C_O1_I2(r, r, r),
675    .out_rrr = tgen_eqv,
676};
677
678static void tgen_mul(TCGContext *s, TCGType type,
679                     TCGReg a0, TCGReg a1, TCGReg a2)
680{
681    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
682}
683
684static const TCGOutOpBinary outop_mul = {
685    .base.static_constraint = C_O1_I2(r, r, r),
686    .out_rrr = tgen_mul,
687};
688
689static TCGConstraintSetIndex cset_mul2(TCGType type, unsigned flags)
690{
691    return type == TCG_TYPE_REG ? C_O2_I2(r, r, r, r) : C_NotImplemented;
692}
693
694static void tgen_muls2(TCGContext *s, TCGType type,
695                       TCGReg a0, TCGReg a1, TCGReg a2, TCGReg a3)
696{
697    tcg_out_op_rrrr(s, INDEX_op_muls2, a0, a1, a2, a3);
698}
699
700static const TCGOutOpMul2 outop_muls2 = {
701    .base.static_constraint = C_Dynamic,
702    .base.dynamic_constraint = cset_mul2,
703    .out_rrrr = tgen_muls2,
704};
705
706static const TCGOutOpBinary outop_mulsh = {
707    .base.static_constraint = C_NotImplemented,
708};
709
710static void tgen_mulu2(TCGContext *s, TCGType type,
711                       TCGReg a0, TCGReg a1, TCGReg a2, TCGReg a3)
712{
713    tcg_out_op_rrrr(s, INDEX_op_mulu2, a0, a1, a2, a3);
714}
715
716static const TCGOutOpMul2 outop_mulu2 = {
717    .base.static_constraint = C_Dynamic,
718    .base.dynamic_constraint = cset_mul2,
719    .out_rrrr = tgen_mulu2,
720};
721
722static const TCGOutOpBinary outop_muluh = {
723    .base.static_constraint = C_NotImplemented,
724};
725
726static void tgen_nand(TCGContext *s, TCGType type,
727                     TCGReg a0, TCGReg a1, TCGReg a2)
728{
729    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
730}
731
732static const TCGOutOpBinary outop_nand = {
733    .base.static_constraint = C_O1_I2(r, r, r),
734    .out_rrr = tgen_nand,
735};
736
737static void tgen_nor(TCGContext *s, TCGType type,
738                     TCGReg a0, TCGReg a1, TCGReg a2)
739{
740    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
741}
742
743static const TCGOutOpBinary outop_nor = {
744    .base.static_constraint = C_O1_I2(r, r, r),
745    .out_rrr = tgen_nor,
746};
747
748static void tgen_or(TCGContext *s, TCGType type,
749                     TCGReg a0, TCGReg a1, TCGReg a2)
750{
751    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
752}
753
754static const TCGOutOpBinary outop_or = {
755    .base.static_constraint = C_O1_I2(r, r, r),
756    .out_rrr = tgen_or,
757};
758
759static void tgen_orc(TCGContext *s, TCGType type,
760                     TCGReg a0, TCGReg a1, TCGReg a2)
761{
762    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
763}
764
765static const TCGOutOpBinary outop_orc = {
766    .base.static_constraint = C_O1_I2(r, r, r),
767    .out_rrr = tgen_orc,
768};
769
770static void tgen_rems(TCGContext *s, TCGType type,
771                      TCGReg a0, TCGReg a1, TCGReg a2)
772{
773    TCGOpcode opc = (type == TCG_TYPE_I32
774                     ? INDEX_op_tci_rems32
775                     : INDEX_op_rems);
776    tcg_out_op_rrr(s, opc, a0, a1, a2);
777}
778
779static const TCGOutOpBinary outop_rems = {
780    .base.static_constraint = C_O1_I2(r, r, r),
781    .out_rrr = tgen_rems,
782};
783
784static void tgen_remu(TCGContext *s, TCGType type,
785                      TCGReg a0, TCGReg a1, TCGReg a2)
786{
787    TCGOpcode opc = (type == TCG_TYPE_I32
788                     ? INDEX_op_tci_remu32
789                     : INDEX_op_remu);
790    tcg_out_op_rrr(s, opc, a0, a1, a2);
791}
792
793static const TCGOutOpBinary outop_remu = {
794    .base.static_constraint = C_O1_I2(r, r, r),
795    .out_rrr = tgen_remu,
796};
797
798static void tgen_rotl(TCGContext *s, TCGType type,
799                     TCGReg a0, TCGReg a1, TCGReg a2)
800{
801    TCGOpcode opc = (type == TCG_TYPE_I32
802                     ? INDEX_op_tci_rotl32
803                     : INDEX_op_rotl);
804    tcg_out_op_rrr(s, opc, a0, a1, a2);
805}
806
807static const TCGOutOpBinary outop_rotl = {
808    .base.static_constraint = C_O1_I2(r, r, r),
809    .out_rrr = tgen_rotl,
810};
811
812static void tgen_rotr(TCGContext *s, TCGType type,
813                     TCGReg a0, TCGReg a1, TCGReg a2)
814{
815    TCGOpcode opc = (type == TCG_TYPE_I32
816                     ? INDEX_op_tci_rotr32
817                     : INDEX_op_rotr);
818    tcg_out_op_rrr(s, opc, a0, a1, a2);
819}
820
821static const TCGOutOpBinary outop_rotr = {
822    .base.static_constraint = C_O1_I2(r, r, r),
823    .out_rrr = tgen_rotr,
824};
825
826static void tgen_sar(TCGContext *s, TCGType type,
827                     TCGReg a0, TCGReg a1, TCGReg a2)
828{
829    if (type < TCG_TYPE_REG) {
830        tcg_out_ext32s(s, TCG_REG_TMP, a1);
831        a1 = TCG_REG_TMP;
832    }
833    tcg_out_op_rrr(s, INDEX_op_sar, a0, a1, a2);
834}
835
836static const TCGOutOpBinary outop_sar = {
837    .base.static_constraint = C_O1_I2(r, r, r),
838    .out_rrr = tgen_sar,
839};
840
841static void tgen_shl(TCGContext *s, TCGType type,
842                     TCGReg a0, TCGReg a1, TCGReg a2)
843{
844    tcg_out_op_rrr(s, INDEX_op_shl, a0, a1, a2);
845}
846
847static const TCGOutOpBinary outop_shl = {
848    .base.static_constraint = C_O1_I2(r, r, r),
849    .out_rrr = tgen_shl,
850};
851
852static void tgen_shr(TCGContext *s, TCGType type,
853                     TCGReg a0, TCGReg a1, TCGReg a2)
854{
855    if (type < TCG_TYPE_REG) {
856        tcg_out_ext32u(s, TCG_REG_TMP, a1);
857        a1 = TCG_REG_TMP;
858    }
859    tcg_out_op_rrr(s, INDEX_op_shr, a0, a1, a2);
860}
861
862static const TCGOutOpBinary outop_shr = {
863    .base.static_constraint = C_O1_I2(r, r, r),
864    .out_rrr = tgen_shr,
865};
866
867static void tgen_sub(TCGContext *s, TCGType type,
868                     TCGReg a0, TCGReg a1, TCGReg a2)
869{
870    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
871}
872
873static const TCGOutOpSubtract outop_sub = {
874    .base.static_constraint = C_O1_I2(r, r, r),
875    .out_rrr = tgen_sub,
876};
877
878static void tgen_xor(TCGContext *s, TCGType type,
879                     TCGReg a0, TCGReg a1, TCGReg a2)
880{
881    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
882}
883
884static const TCGOutOpBinary outop_xor = {
885    .base.static_constraint = C_O1_I2(r, r, r),
886    .out_rrr = tgen_xor,
887};
888
889static void tgen_ctpop(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
890{
891    tcg_out_op_rr(s, INDEX_op_ctpop, a0, a1);
892}
893
894static TCGConstraintSetIndex cset_ctpop(TCGType type, unsigned flags)
895{
896    return type == TCG_TYPE_REG ? C_O1_I1(r, r) : C_NotImplemented;
897}
898
899static const TCGOutOpUnary outop_ctpop = {
900    .base.static_constraint = C_Dynamic,
901    .base.dynamic_constraint = cset_ctpop,
902    .out_rr = tgen_ctpop,
903};
904
905static void tgen_bswap16(TCGContext *s, TCGType type,
906                         TCGReg a0, TCGReg a1, unsigned flags)
907{
908    tcg_out_op_rr(s, INDEX_op_bswap16, a0, a1);
909    if (flags & TCG_BSWAP_OS) {
910        tcg_out_sextract(s, TCG_TYPE_REG, a0, a0, 0, 16);
911    }
912}
913
914static const TCGOutOpBswap outop_bswap16 = {
915    .base.static_constraint = C_O1_I1(r, r),
916    .out_rr = tgen_bswap16,
917};
918
919static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
920{
921    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
922}
923
924static const TCGOutOpUnary outop_neg = {
925    .base.static_constraint = C_O1_I1(r, r),
926    .out_rr = tgen_neg,
927};
928
929static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
930{
931    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
932}
933
934static const TCGOutOpUnary outop_not = {
935    .base.static_constraint = C_O1_I1(r, r),
936    .out_rr = tgen_not,
937};
938
939static void tgen_setcond(TCGContext *s, TCGType type, TCGCond cond,
940                         TCGReg dest, TCGReg arg1, TCGReg arg2)
941{
942    TCGOpcode opc = (type == TCG_TYPE_I32
943                     ? INDEX_op_tci_setcond32
944                     : INDEX_op_setcond);
945    tcg_out_op_rrrc(s, opc, dest, arg1, arg2, cond);
946}
947
948static const TCGOutOpSetcond outop_setcond = {
949    .base.static_constraint = C_O1_I2(r, r, r),
950    .out_rrr = tgen_setcond,
951};
952
953static void tgen_negsetcond(TCGContext *s, TCGType type, TCGCond cond,
954                            TCGReg dest, TCGReg arg1, TCGReg arg2)
955{
956    tgen_setcond(s, type, cond, dest, arg1, arg2);
957    tgen_neg(s, type, dest, dest);
958}
959
960static const TCGOutOpSetcond outop_negsetcond = {
961    .base.static_constraint = C_O1_I2(r, r, r),
962    .out_rrr = tgen_negsetcond,
963};
964
965static void tgen_brcond(TCGContext *s, TCGType type, TCGCond cond,
966                        TCGReg arg0, TCGReg arg1, TCGLabel *l)
967{
968    tgen_setcond(s, type, cond, TCG_REG_TMP, arg0, arg1);
969    tcg_out_op_rl(s, INDEX_op_brcond, TCG_REG_TMP, l);
970}
971
972static const TCGOutOpBrcond outop_brcond = {
973    .base.static_constraint = C_O0_I2(r, r),
974    .out_rr = tgen_brcond,
975};
976
977static void tgen_movcond(TCGContext *s, TCGType type, TCGCond cond,
978                         TCGReg ret, TCGReg c1, TCGArg c2, bool const_c2,
979                         TCGArg vt, bool const_vt, TCGArg vf, bool consf_vf)
980{
981    TCGOpcode opc = (type == TCG_TYPE_I32
982                     ? INDEX_op_tci_movcond32
983                     : INDEX_op_movcond);
984    tcg_out_op_rrrrrc(s, opc, ret, c1, c2, vt, vf, cond);
985}
986
987static const TCGOutOpMovcond outop_movcond = {
988    .base.static_constraint = C_O1_I4(r, r, r, r, r),
989    .out = tgen_movcond,
990};
991
992static void tgen_brcond2(TCGContext *s, TCGCond cond, TCGReg al, TCGReg ah,
993                         TCGArg bl, bool const_bl,
994                         TCGArg bh, bool const_bh, TCGLabel *l)
995{
996    tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
997                      al, ah, bl, bh, cond);
998    tcg_out_op_rl(s, INDEX_op_brcond, TCG_REG_TMP, l);
999}
1000
1001#if TCG_TARGET_REG_BITS != 32
1002__attribute__((unused))
1003#endif
1004static const TCGOutOpBrcond2 outop_brcond2 = {
1005    .base.static_constraint = C_O0_I4(r, r, r, r),
1006    .out = tgen_brcond2,
1007};
1008
1009static void tgen_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
1010                          TCGReg al, TCGReg ah,
1011                          TCGArg bl, bool const_bl,
1012                          TCGArg bh, bool const_bh)
1013{
1014    tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, ret, al, ah, bl, bh, cond);
1015}
1016
1017#if TCG_TARGET_REG_BITS != 32
1018__attribute__((unused))
1019#endif
1020static const TCGOutOpSetcond2 outop_setcond2 = {
1021    .base.static_constraint = C_O1_I4(r, r, r, r, r),
1022    .out = tgen_setcond2,
1023};
1024
1025static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
1026                       const TCGArg args[TCG_MAX_OP_ARGS],
1027                       const int const_args[TCG_MAX_OP_ARGS])
1028{
1029    int width;
1030
1031    switch (opc) {
1032    case INDEX_op_goto_ptr:
1033        tcg_out_op_r(s, opc, args[0]);
1034        break;
1035
1036    case INDEX_op_br:
1037        tcg_out_op_l(s, opc, arg_label(args[0]));
1038        break;
1039
1040    CASE_32_64(ld8u)
1041    CASE_32_64(ld8s)
1042    CASE_32_64(ld16u)
1043    CASE_32_64(ld16s)
1044    case INDEX_op_ld_i32:
1045    CASE_64(ld32u)
1046    CASE_64(ld32s)
1047    CASE_64(ld)
1048    CASE_32_64(st8)
1049    CASE_32_64(st16)
1050    case INDEX_op_st_i32:
1051    CASE_64(st32)
1052    CASE_64(st)
1053        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
1054        break;
1055
1056    CASE_32_64(deposit)
1057        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
1058        break;
1059
1060    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
1061    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
1062        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
1063        break;
1064
1065    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
1066    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
1067        tcg_out_op_rr(s, opc, args[0], args[1]);
1068        break;
1069
1070    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
1071        width = 32;
1072        /* The base tci bswaps zero-extend, and ignore high bits. */
1073        tcg_out_op_rr(s, opc, args[0], args[1]);
1074        if (args[2] & TCG_BSWAP_OS) {
1075            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
1076        }
1077        break;
1078
1079    CASE_32_64(add2)
1080    CASE_32_64(sub2)
1081        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
1082                          args[3], args[4], args[5]);
1083        break;
1084
1085    case INDEX_op_qemu_ld_i64:
1086    case INDEX_op_qemu_st_i64:
1087        if (TCG_TARGET_REG_BITS == 32) {
1088            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
1089            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
1090            break;
1091        }
1092        /* fall through */
1093    case INDEX_op_qemu_ld_i32:
1094    case INDEX_op_qemu_st_i32:
1095        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
1096            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
1097            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
1098        } else {
1099            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
1100        }
1101        break;
1102
1103    case INDEX_op_mb:
1104        tcg_out_op_v(s, opc);
1105        break;
1106
1107    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
1108    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
1109    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
1110    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
1111    case INDEX_op_extu_i32_i64:
1112    case INDEX_op_extrl_i64_i32:
1113    default:
1114        g_assert_not_reached();
1115    }
1116}
1117
1118static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
1119                       intptr_t offset)
1120{
1121    switch (type) {
1122    case TCG_TYPE_I32:
1123        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
1124        break;
1125#if TCG_TARGET_REG_BITS == 64
1126    case TCG_TYPE_I64:
1127        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
1128        break;
1129#endif
1130    default:
1131        g_assert_not_reached();
1132    }
1133}
1134
1135static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
1136                               TCGReg base, intptr_t ofs)
1137{
1138    return false;
1139}
1140
1141/* Test if a constant matches the constraint. */
1142static bool tcg_target_const_match(int64_t val, int ct,
1143                                   TCGType type, TCGCond cond, int vece)
1144{
1145    return ct & TCG_CT_CONST;
1146}
1147
1148static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
1149{
1150    memset(p, 0, sizeof(*p) * count);
1151}
1152
1153static void tcg_target_init(TCGContext *s)
1154{
1155    /* The current code uses uint8_t for tcg operations. */
1156    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1157
1158    /* Registers available for 32 bit operations. */
1159    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1160    /* Registers available for 64 bit operations. */
1161    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1162    /*
1163     * The interpreter "registers" are in the local stack frame and
1164     * cannot be clobbered by the called helper functions.  However,
1165     * the interpreter assumes a 128-bit return value and assigns to
1166     * the return value registers.
1167     */
1168    tcg_target_call_clobber_regs =
1169        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1170
1171    s->reserved_regs = 0;
1172    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1173    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1174
1175    /* The call arguments come first, followed by the temp storage. */
1176    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1177                  TCG_STATIC_FRAME_SIZE);
1178}
1179
1180/* Generate global QEMU prologue and epilogue code. */
1181static inline void tcg_target_qemu_prologue(TCGContext *s)
1182{
1183}
1184
1185static void tcg_out_tb_start(TCGContext *s)
1186{
1187    /* nothing to do */
1188}
1189
1190bool tcg_target_has_memory_bswap(MemOp memop)
1191{
1192    return true;
1193}
1194
1195static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1196{
1197    g_assert_not_reached();
1198}
1199
1200static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1201{
1202    g_assert_not_reached();
1203}
1204