xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 5c0968a7e1da73f91f148d563a29af529427c5a5)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_nand_i32:
99    case INDEX_op_nand_i64:
100    case INDEX_op_nor_i32:
101    case INDEX_op_nor_i64:
102    case INDEX_op_shl_i32:
103    case INDEX_op_shl_i64:
104    case INDEX_op_shr_i32:
105    case INDEX_op_shr_i64:
106    case INDEX_op_sar_i32:
107    case INDEX_op_sar_i64:
108    case INDEX_op_rotl_i32:
109    case INDEX_op_rotl_i64:
110    case INDEX_op_rotr_i32:
111    case INDEX_op_rotr_i64:
112    case INDEX_op_setcond_i32:
113    case INDEX_op_setcond_i64:
114    case INDEX_op_deposit_i32:
115    case INDEX_op_deposit_i64:
116    case INDEX_op_clz_i32:
117    case INDEX_op_clz_i64:
118    case INDEX_op_ctz_i32:
119    case INDEX_op_ctz_i64:
120        return C_O1_I2(r, r, r);
121
122    case INDEX_op_brcond_i32:
123    case INDEX_op_brcond_i64:
124        return C_O0_I2(r, r);
125
126    case INDEX_op_add2_i32:
127    case INDEX_op_add2_i64:
128    case INDEX_op_sub2_i32:
129    case INDEX_op_sub2_i64:
130        return C_O2_I4(r, r, r, r, r, r);
131
132#if TCG_TARGET_REG_BITS == 32
133    case INDEX_op_brcond2_i32:
134        return C_O0_I4(r, r, r, r);
135#endif
136
137    case INDEX_op_mulu2_i32:
138    case INDEX_op_mulu2_i64:
139    case INDEX_op_muls2_i32:
140    case INDEX_op_muls2_i64:
141        return C_O2_I2(r, r, r, r);
142
143    case INDEX_op_movcond_i32:
144    case INDEX_op_movcond_i64:
145    case INDEX_op_setcond2_i32:
146        return C_O1_I4(r, r, r, r, r);
147
148    case INDEX_op_qemu_ld_i32:
149        return C_O1_I1(r, r);
150    case INDEX_op_qemu_ld_i64:
151        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
152    case INDEX_op_qemu_st_i32:
153        return C_O0_I2(r, r);
154    case INDEX_op_qemu_st_i64:
155        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
156
157    default:
158        return C_NotImplemented;
159    }
160}
161
162static const int tcg_target_reg_alloc_order[] = {
163    TCG_REG_R4,
164    TCG_REG_R5,
165    TCG_REG_R6,
166    TCG_REG_R7,
167    TCG_REG_R8,
168    TCG_REG_R9,
169    TCG_REG_R10,
170    TCG_REG_R11,
171    TCG_REG_R12,
172    TCG_REG_R13,
173    TCG_REG_R14,
174    TCG_REG_R15,
175    /* Either 2 or 4 of these are call clobbered, so use them last. */
176    TCG_REG_R3,
177    TCG_REG_R2,
178    TCG_REG_R1,
179    TCG_REG_R0,
180};
181
182/* No call arguments via registers.  All will be stored on the "stack". */
183static const int tcg_target_call_iarg_regs[] = { };
184
185static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
186{
187    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
188    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
189    return TCG_REG_R0 + slot;
190}
191
192#ifdef CONFIG_DEBUG_TCG
193static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
194    "r00",
195    "r01",
196    "r02",
197    "r03",
198    "r04",
199    "r05",
200    "r06",
201    "r07",
202    "r08",
203    "r09",
204    "r10",
205    "r11",
206    "r12",
207    "r13",
208    "r14",
209    "r15",
210};
211#endif
212
213static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
214                        intptr_t value, intptr_t addend)
215{
216    intptr_t diff = value - (intptr_t)(code_ptr + 1);
217
218    tcg_debug_assert(addend == 0);
219    tcg_debug_assert(type == 20);
220
221    if (diff == sextract32(diff, 0, type)) {
222        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
223        return true;
224    }
225    return false;
226}
227
228static void stack_bounds_check(TCGReg base, intptr_t offset)
229{
230    if (base == TCG_REG_CALL_STACK) {
231        tcg_debug_assert(offset >= 0);
232        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
233                                   TCG_STATIC_FRAME_SIZE));
234    }
235}
236
237static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
238{
239    tcg_insn_unit insn = 0;
240
241    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
242    insn = deposit32(insn, 0, 8, op);
243    tcg_out32(s, insn);
244}
245
246static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
247{
248    tcg_insn_unit insn = 0;
249    intptr_t diff;
250
251    /* Special case for exit_tb: map null -> 0. */
252    if (p0 == NULL) {
253        diff = 0;
254    } else {
255        diff = p0 - (void *)(s->code_ptr + 1);
256        tcg_debug_assert(diff != 0);
257        if (diff != sextract32(diff, 0, 20)) {
258            tcg_raise_tb_overflow(s);
259        }
260    }
261    insn = deposit32(insn, 0, 8, op);
262    insn = deposit32(insn, 12, 20, diff);
263    tcg_out32(s, insn);
264}
265
266static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
267{
268    tcg_insn_unit insn = 0;
269
270    insn = deposit32(insn, 0, 8, op);
271    insn = deposit32(insn, 8, 4, r0);
272    tcg_out32(s, insn);
273}
274
275static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
276{
277    tcg_out32(s, (uint8_t)op);
278}
279
280static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
281{
282    tcg_insn_unit insn = 0;
283
284    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
285    insn = deposit32(insn, 0, 8, op);
286    insn = deposit32(insn, 8, 4, r0);
287    insn = deposit32(insn, 12, 20, i1);
288    tcg_out32(s, insn);
289}
290
291static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
292{
293    tcg_insn_unit insn = 0;
294
295    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
296    insn = deposit32(insn, 0, 8, op);
297    insn = deposit32(insn, 8, 4, r0);
298    tcg_out32(s, insn);
299}
300
301static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
302{
303    tcg_insn_unit insn = 0;
304
305    insn = deposit32(insn, 0, 8, op);
306    insn = deposit32(insn, 8, 4, r0);
307    insn = deposit32(insn, 12, 4, r1);
308    tcg_out32(s, insn);
309}
310
311static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
312                           TCGReg r0, TCGReg r1, TCGArg m2)
313{
314    tcg_insn_unit insn = 0;
315
316    tcg_debug_assert(m2 == extract32(m2, 0, 16));
317    insn = deposit32(insn, 0, 8, op);
318    insn = deposit32(insn, 8, 4, r0);
319    insn = deposit32(insn, 12, 4, r1);
320    insn = deposit32(insn, 16, 16, m2);
321    tcg_out32(s, insn);
322}
323
324static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
325                           TCGReg r0, TCGReg r1, TCGReg r2)
326{
327    tcg_insn_unit insn = 0;
328
329    insn = deposit32(insn, 0, 8, op);
330    insn = deposit32(insn, 8, 4, r0);
331    insn = deposit32(insn, 12, 4, r1);
332    insn = deposit32(insn, 16, 4, r2);
333    tcg_out32(s, insn);
334}
335
336static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
337                           TCGReg r0, TCGReg r1, intptr_t i2)
338{
339    tcg_insn_unit insn = 0;
340
341    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
342    insn = deposit32(insn, 0, 8, op);
343    insn = deposit32(insn, 8, 4, r0);
344    insn = deposit32(insn, 12, 4, r1);
345    insn = deposit32(insn, 16, 16, i2);
346    tcg_out32(s, insn);
347}
348
349static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
350                            TCGReg r1, uint8_t b2, uint8_t b3)
351{
352    tcg_insn_unit insn = 0;
353
354    tcg_debug_assert(b2 == extract32(b2, 0, 6));
355    tcg_debug_assert(b3 == extract32(b3, 0, 6));
356    insn = deposit32(insn, 0, 8, op);
357    insn = deposit32(insn, 8, 4, r0);
358    insn = deposit32(insn, 12, 4, r1);
359    insn = deposit32(insn, 16, 6, b2);
360    insn = deposit32(insn, 22, 6, b3);
361    tcg_out32(s, insn);
362}
363
364static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
365                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
366{
367    tcg_insn_unit insn = 0;
368
369    insn = deposit32(insn, 0, 8, op);
370    insn = deposit32(insn, 8, 4, r0);
371    insn = deposit32(insn, 12, 4, r1);
372    insn = deposit32(insn, 16, 4, r2);
373    insn = deposit32(insn, 20, 4, c3);
374    tcg_out32(s, insn);
375}
376
377static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
378                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
379{
380    tcg_insn_unit insn = 0;
381
382    tcg_debug_assert(b3 == extract32(b3, 0, 6));
383    tcg_debug_assert(b4 == extract32(b4, 0, 6));
384    insn = deposit32(insn, 0, 8, op);
385    insn = deposit32(insn, 8, 4, r0);
386    insn = deposit32(insn, 12, 4, r1);
387    insn = deposit32(insn, 16, 4, r2);
388    insn = deposit32(insn, 20, 6, b3);
389    insn = deposit32(insn, 26, 6, b4);
390    tcg_out32(s, insn);
391}
392
393static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
394                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
395{
396    tcg_insn_unit insn = 0;
397
398    insn = deposit32(insn, 0, 8, op);
399    insn = deposit32(insn, 8, 4, r0);
400    insn = deposit32(insn, 12, 4, r1);
401    insn = deposit32(insn, 16, 4, r2);
402    insn = deposit32(insn, 20, 4, r3);
403    tcg_out32(s, insn);
404}
405
406static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
407                              TCGReg r0, TCGReg r1, TCGReg r2,
408                              TCGReg r3, TCGReg r4, TCGCond c5)
409{
410    tcg_insn_unit insn = 0;
411
412    insn = deposit32(insn, 0, 8, op);
413    insn = deposit32(insn, 8, 4, r0);
414    insn = deposit32(insn, 12, 4, r1);
415    insn = deposit32(insn, 16, 4, r2);
416    insn = deposit32(insn, 20, 4, r3);
417    insn = deposit32(insn, 24, 4, r4);
418    insn = deposit32(insn, 28, 4, c5);
419    tcg_out32(s, insn);
420}
421
422static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
423                              TCGReg r0, TCGReg r1, TCGReg r2,
424                              TCGReg r3, TCGReg r4, TCGReg r5)
425{
426    tcg_insn_unit insn = 0;
427
428    insn = deposit32(insn, 0, 8, op);
429    insn = deposit32(insn, 8, 4, r0);
430    insn = deposit32(insn, 12, 4, r1);
431    insn = deposit32(insn, 16, 4, r2);
432    insn = deposit32(insn, 20, 4, r3);
433    insn = deposit32(insn, 24, 4, r4);
434    insn = deposit32(insn, 28, 4, r5);
435    tcg_out32(s, insn);
436}
437
438static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
439                         TCGReg base, intptr_t offset)
440{
441    stack_bounds_check(base, offset);
442    if (offset != sextract32(offset, 0, 16)) {
443        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
444        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
445        base = TCG_REG_TMP;
446        offset = 0;
447    }
448    tcg_out_op_rrs(s, op, val, base, offset);
449}
450
451static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
452                       intptr_t offset)
453{
454    switch (type) {
455    case TCG_TYPE_I32:
456        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
457        break;
458#if TCG_TARGET_REG_BITS == 64
459    case TCG_TYPE_I64:
460        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
461        break;
462#endif
463    default:
464        g_assert_not_reached();
465    }
466}
467
468static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
469{
470    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
471    return true;
472}
473
474static void tcg_out_movi(TCGContext *s, TCGType type,
475                         TCGReg ret, tcg_target_long arg)
476{
477    switch (type) {
478    case TCG_TYPE_I32:
479#if TCG_TARGET_REG_BITS == 64
480        arg = (int32_t)arg;
481        /* fall through */
482    case TCG_TYPE_I64:
483#endif
484        break;
485    default:
486        g_assert_not_reached();
487    }
488
489    if (arg == sextract32(arg, 0, 20)) {
490        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
491    } else {
492        tcg_insn_unit insn = 0;
493
494        new_pool_label(s, arg, 20, s->code_ptr, 0);
495        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
496        insn = deposit32(insn, 8, 4, ret);
497        tcg_out32(s, insn);
498    }
499}
500
501static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
502                            TCGReg rs, unsigned pos, unsigned len)
503{
504    TCGOpcode opc = type == TCG_TYPE_I32 ?
505                    INDEX_op_extract_i32 :
506                    INDEX_op_extract_i64;
507    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
508}
509
510static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
511                             TCGReg rs, unsigned pos, unsigned len)
512{
513    TCGOpcode opc = type == TCG_TYPE_I32 ?
514                    INDEX_op_sextract_i32 :
515                    INDEX_op_sextract_i64;
516    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
517}
518
519static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
520{
521    tcg_out_sextract(s, type, rd, rs, 0, 8);
522}
523
524static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
525{
526    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
527}
528
529static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
530{
531    tcg_out_sextract(s, type, rd, rs, 0, 16);
532}
533
534static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
535{
536    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
537}
538
539static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
540{
541    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
542    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
543}
544
545static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
546{
547    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
548    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
549}
550
551static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
552{
553    tcg_out_ext32s(s, rd, rs);
554}
555
556static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
557{
558    tcg_out_ext32u(s, rd, rs);
559}
560
561static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
562{
563    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
564    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
565}
566
567static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
568{
569    return false;
570}
571
572static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
573                             tcg_target_long imm)
574{
575    /* This function is only used for passing structs by reference. */
576    g_assert_not_reached();
577}
578
579static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
580                         const TCGHelperInfo *info)
581{
582    ffi_cif *cif = info->cif;
583    tcg_insn_unit insn = 0;
584    uint8_t which;
585
586    if (cif->rtype == &ffi_type_void) {
587        which = 0;
588    } else {
589        tcg_debug_assert(cif->rtype->size == 4 ||
590                         cif->rtype->size == 8 ||
591                         cif->rtype->size == 16);
592        which = ctz32(cif->rtype->size) - 1;
593    }
594    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
595    insn = deposit32(insn, 0, 8, INDEX_op_call);
596    insn = deposit32(insn, 8, 4, which);
597    tcg_out32(s, insn);
598}
599
600#if TCG_TARGET_REG_BITS == 64
601# define CASE_32_64(x) \
602        case glue(glue(INDEX_op_, x), _i64): \
603        case glue(glue(INDEX_op_, x), _i32):
604# define CASE_64(x) \
605        case glue(glue(INDEX_op_, x), _i64):
606#else
607# define CASE_32_64(x) \
608        case glue(glue(INDEX_op_, x), _i32):
609# define CASE_64(x)
610#endif
611
612static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
613{
614    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
615}
616
617static void tcg_out_goto_tb(TCGContext *s, int which)
618{
619    /* indirect jump method. */
620    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
621    set_jmp_reset_offset(s, which);
622}
623
624void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
625                              uintptr_t jmp_rx, uintptr_t jmp_rw)
626{
627    /* Always indirect, nothing to do */
628}
629
630static void tgen_add(TCGContext *s, TCGType type,
631                     TCGReg a0, TCGReg a1, TCGReg a2)
632{
633    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
634}
635
636static const TCGOutOpBinary outop_add = {
637    .base.static_constraint = C_O1_I2(r, r, r),
638    .out_rrr = tgen_add,
639};
640
641static void tgen_and(TCGContext *s, TCGType type,
642                     TCGReg a0, TCGReg a1, TCGReg a2)
643{
644    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
645}
646
647static const TCGOutOpBinary outop_and = {
648    .base.static_constraint = C_O1_I2(r, r, r),
649    .out_rrr = tgen_and,
650};
651
652static void tgen_andc(TCGContext *s, TCGType type,
653                      TCGReg a0, TCGReg a1, TCGReg a2)
654{
655    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
656}
657
658static const TCGOutOpBinary outop_andc = {
659    .base.static_constraint = C_O1_I2(r, r, r),
660    .out_rrr = tgen_andc,
661};
662
663static void tgen_eqv(TCGContext *s, TCGType type,
664                     TCGReg a0, TCGReg a1, TCGReg a2)
665{
666    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
667}
668
669static const TCGOutOpBinary outop_eqv = {
670    .base.static_constraint = C_O1_I2(r, r, r),
671    .out_rrr = tgen_eqv,
672};
673
674static void tgen_or(TCGContext *s, TCGType type,
675                     TCGReg a0, TCGReg a1, TCGReg a2)
676{
677    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
678}
679
680static const TCGOutOpBinary outop_or = {
681    .base.static_constraint = C_O1_I2(r, r, r),
682    .out_rrr = tgen_or,
683};
684
685static void tgen_orc(TCGContext *s, TCGType type,
686                     TCGReg a0, TCGReg a1, TCGReg a2)
687{
688    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
689}
690
691static const TCGOutOpBinary outop_orc = {
692    .base.static_constraint = C_O1_I2(r, r, r),
693    .out_rrr = tgen_orc,
694};
695
696static void tgen_xor(TCGContext *s, TCGType type,
697                     TCGReg a0, TCGReg a1, TCGReg a2)
698{
699    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
700}
701
702static const TCGOutOpBinary outop_xor = {
703    .base.static_constraint = C_O1_I2(r, r, r),
704    .out_rrr = tgen_xor,
705};
706
707
708static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
709                       const TCGArg args[TCG_MAX_OP_ARGS],
710                       const int const_args[TCG_MAX_OP_ARGS])
711{
712    int width;
713
714    switch (opc) {
715    case INDEX_op_goto_ptr:
716        tcg_out_op_r(s, opc, args[0]);
717        break;
718
719    case INDEX_op_br:
720        tcg_out_op_l(s, opc, arg_label(args[0]));
721        break;
722
723    CASE_32_64(setcond)
724        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
725        break;
726
727    CASE_32_64(movcond)
728    case INDEX_op_setcond2_i32:
729        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
730                          args[3], args[4], args[5]);
731        break;
732
733    CASE_32_64(ld8u)
734    CASE_32_64(ld8s)
735    CASE_32_64(ld16u)
736    CASE_32_64(ld16s)
737    case INDEX_op_ld_i32:
738    CASE_64(ld32u)
739    CASE_64(ld32s)
740    CASE_64(ld)
741    CASE_32_64(st8)
742    CASE_32_64(st16)
743    case INDEX_op_st_i32:
744    CASE_64(st32)
745    CASE_64(st)
746        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
747        break;
748
749    CASE_32_64(sub)
750    CASE_32_64(mul)
751    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
752    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
753    CASE_32_64(shl)
754    CASE_32_64(shr)
755    CASE_32_64(sar)
756    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
757    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
758    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
759    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
760    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
761    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
762    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
763    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
764        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
765        break;
766
767    CASE_32_64(deposit)
768        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
769        break;
770
771    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
772    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
773        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
774        break;
775
776    CASE_32_64(brcond)
777        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
778                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
779                        TCG_REG_TMP, args[0], args[1], args[2]);
780        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
781        break;
782
783    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
784    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
785    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
786    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
787    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
788        tcg_out_op_rr(s, opc, args[0], args[1]);
789        break;
790
791    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
792    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
793        width = 16;
794        goto do_bswap;
795    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
796        width = 32;
797    do_bswap:
798        /* The base tci bswaps zero-extend, and ignore high bits. */
799        tcg_out_op_rr(s, opc, args[0], args[1]);
800        if (args[2] & TCG_BSWAP_OS) {
801            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
802        }
803        break;
804
805    CASE_32_64(add2)
806    CASE_32_64(sub2)
807        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
808                          args[3], args[4], args[5]);
809        break;
810
811#if TCG_TARGET_REG_BITS == 32
812    case INDEX_op_brcond2_i32:
813        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
814                          args[0], args[1], args[2], args[3], args[4]);
815        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
816        break;
817#endif
818
819    CASE_32_64(mulu2)
820    CASE_32_64(muls2)
821        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
822        break;
823
824    case INDEX_op_qemu_ld_i64:
825    case INDEX_op_qemu_st_i64:
826        if (TCG_TARGET_REG_BITS == 32) {
827            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
828            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
829            break;
830        }
831        /* fall through */
832    case INDEX_op_qemu_ld_i32:
833    case INDEX_op_qemu_st_i32:
834        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
835            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
836            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
837        } else {
838            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
839        }
840        break;
841
842    case INDEX_op_mb:
843        tcg_out_op_v(s, opc);
844        break;
845
846    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
847    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
848    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
849    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
850    case INDEX_op_extu_i32_i64:
851    case INDEX_op_extrl_i64_i32:
852    default:
853        g_assert_not_reached();
854    }
855}
856
857static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
858                       intptr_t offset)
859{
860    switch (type) {
861    case TCG_TYPE_I32:
862        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
863        break;
864#if TCG_TARGET_REG_BITS == 64
865    case TCG_TYPE_I64:
866        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
867        break;
868#endif
869    default:
870        g_assert_not_reached();
871    }
872}
873
874static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
875                               TCGReg base, intptr_t ofs)
876{
877    return false;
878}
879
880/* Test if a constant matches the constraint. */
881static bool tcg_target_const_match(int64_t val, int ct,
882                                   TCGType type, TCGCond cond, int vece)
883{
884    return ct & TCG_CT_CONST;
885}
886
887static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
888{
889    memset(p, 0, sizeof(*p) * count);
890}
891
892static void tcg_target_init(TCGContext *s)
893{
894    /* The current code uses uint8_t for tcg operations. */
895    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
896
897    /* Registers available for 32 bit operations. */
898    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
899    /* Registers available for 64 bit operations. */
900    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
901    /*
902     * The interpreter "registers" are in the local stack frame and
903     * cannot be clobbered by the called helper functions.  However,
904     * the interpreter assumes a 128-bit return value and assigns to
905     * the return value registers.
906     */
907    tcg_target_call_clobber_regs =
908        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
909
910    s->reserved_regs = 0;
911    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
912    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
913
914    /* The call arguments come first, followed by the temp storage. */
915    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
916                  TCG_STATIC_FRAME_SIZE);
917}
918
919/* Generate global QEMU prologue and epilogue code. */
920static inline void tcg_target_qemu_prologue(TCGContext *s)
921{
922}
923
924static void tcg_out_tb_start(TCGContext *s)
925{
926    /* nothing to do */
927}
928
929bool tcg_target_has_memory_bswap(MemOp memop)
930{
931    return true;
932}
933
934static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
935{
936    g_assert_not_reached();
937}
938
939static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
940{
941    g_assert_not_reached();
942}
943