xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 59379a45af1f4d62fc8c1ae0ddee988f47075787)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_nor_i32:
99    case INDEX_op_nor_i64:
100    case INDEX_op_shl_i32:
101    case INDEX_op_shl_i64:
102    case INDEX_op_shr_i32:
103    case INDEX_op_shr_i64:
104    case INDEX_op_sar_i32:
105    case INDEX_op_sar_i64:
106    case INDEX_op_rotl_i32:
107    case INDEX_op_rotl_i64:
108    case INDEX_op_rotr_i32:
109    case INDEX_op_rotr_i64:
110    case INDEX_op_setcond_i32:
111    case INDEX_op_setcond_i64:
112    case INDEX_op_deposit_i32:
113    case INDEX_op_deposit_i64:
114    case INDEX_op_clz_i32:
115    case INDEX_op_clz_i64:
116    case INDEX_op_ctz_i32:
117    case INDEX_op_ctz_i64:
118        return C_O1_I2(r, r, r);
119
120    case INDEX_op_brcond_i32:
121    case INDEX_op_brcond_i64:
122        return C_O0_I2(r, r);
123
124    case INDEX_op_add2_i32:
125    case INDEX_op_add2_i64:
126    case INDEX_op_sub2_i32:
127    case INDEX_op_sub2_i64:
128        return C_O2_I4(r, r, r, r, r, r);
129
130#if TCG_TARGET_REG_BITS == 32
131    case INDEX_op_brcond2_i32:
132        return C_O0_I4(r, r, r, r);
133#endif
134
135    case INDEX_op_mulu2_i32:
136    case INDEX_op_mulu2_i64:
137    case INDEX_op_muls2_i32:
138    case INDEX_op_muls2_i64:
139        return C_O2_I2(r, r, r, r);
140
141    case INDEX_op_movcond_i32:
142    case INDEX_op_movcond_i64:
143    case INDEX_op_setcond2_i32:
144        return C_O1_I4(r, r, r, r, r);
145
146    case INDEX_op_qemu_ld_i32:
147        return C_O1_I1(r, r);
148    case INDEX_op_qemu_ld_i64:
149        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
150    case INDEX_op_qemu_st_i32:
151        return C_O0_I2(r, r);
152    case INDEX_op_qemu_st_i64:
153        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
154
155    default:
156        return C_NotImplemented;
157    }
158}
159
160static const int tcg_target_reg_alloc_order[] = {
161    TCG_REG_R4,
162    TCG_REG_R5,
163    TCG_REG_R6,
164    TCG_REG_R7,
165    TCG_REG_R8,
166    TCG_REG_R9,
167    TCG_REG_R10,
168    TCG_REG_R11,
169    TCG_REG_R12,
170    TCG_REG_R13,
171    TCG_REG_R14,
172    TCG_REG_R15,
173    /* Either 2 or 4 of these are call clobbered, so use them last. */
174    TCG_REG_R3,
175    TCG_REG_R2,
176    TCG_REG_R1,
177    TCG_REG_R0,
178};
179
180/* No call arguments via registers.  All will be stored on the "stack". */
181static const int tcg_target_call_iarg_regs[] = { };
182
183static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
184{
185    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
186    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
187    return TCG_REG_R0 + slot;
188}
189
190#ifdef CONFIG_DEBUG_TCG
191static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
192    "r00",
193    "r01",
194    "r02",
195    "r03",
196    "r04",
197    "r05",
198    "r06",
199    "r07",
200    "r08",
201    "r09",
202    "r10",
203    "r11",
204    "r12",
205    "r13",
206    "r14",
207    "r15",
208};
209#endif
210
211static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
212                        intptr_t value, intptr_t addend)
213{
214    intptr_t diff = value - (intptr_t)(code_ptr + 1);
215
216    tcg_debug_assert(addend == 0);
217    tcg_debug_assert(type == 20);
218
219    if (diff == sextract32(diff, 0, type)) {
220        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
221        return true;
222    }
223    return false;
224}
225
226static void stack_bounds_check(TCGReg base, intptr_t offset)
227{
228    if (base == TCG_REG_CALL_STACK) {
229        tcg_debug_assert(offset >= 0);
230        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
231                                   TCG_STATIC_FRAME_SIZE));
232    }
233}
234
235static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
236{
237    tcg_insn_unit insn = 0;
238
239    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
240    insn = deposit32(insn, 0, 8, op);
241    tcg_out32(s, insn);
242}
243
244static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
245{
246    tcg_insn_unit insn = 0;
247    intptr_t diff;
248
249    /* Special case for exit_tb: map null -> 0. */
250    if (p0 == NULL) {
251        diff = 0;
252    } else {
253        diff = p0 - (void *)(s->code_ptr + 1);
254        tcg_debug_assert(diff != 0);
255        if (diff != sextract32(diff, 0, 20)) {
256            tcg_raise_tb_overflow(s);
257        }
258    }
259    insn = deposit32(insn, 0, 8, op);
260    insn = deposit32(insn, 12, 20, diff);
261    tcg_out32(s, insn);
262}
263
264static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
265{
266    tcg_insn_unit insn = 0;
267
268    insn = deposit32(insn, 0, 8, op);
269    insn = deposit32(insn, 8, 4, r0);
270    tcg_out32(s, insn);
271}
272
273static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
274{
275    tcg_out32(s, (uint8_t)op);
276}
277
278static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
279{
280    tcg_insn_unit insn = 0;
281
282    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
283    insn = deposit32(insn, 0, 8, op);
284    insn = deposit32(insn, 8, 4, r0);
285    insn = deposit32(insn, 12, 20, i1);
286    tcg_out32(s, insn);
287}
288
289static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
290{
291    tcg_insn_unit insn = 0;
292
293    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
294    insn = deposit32(insn, 0, 8, op);
295    insn = deposit32(insn, 8, 4, r0);
296    tcg_out32(s, insn);
297}
298
299static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
300{
301    tcg_insn_unit insn = 0;
302
303    insn = deposit32(insn, 0, 8, op);
304    insn = deposit32(insn, 8, 4, r0);
305    insn = deposit32(insn, 12, 4, r1);
306    tcg_out32(s, insn);
307}
308
309static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
310                           TCGReg r0, TCGReg r1, TCGArg m2)
311{
312    tcg_insn_unit insn = 0;
313
314    tcg_debug_assert(m2 == extract32(m2, 0, 16));
315    insn = deposit32(insn, 0, 8, op);
316    insn = deposit32(insn, 8, 4, r0);
317    insn = deposit32(insn, 12, 4, r1);
318    insn = deposit32(insn, 16, 16, m2);
319    tcg_out32(s, insn);
320}
321
322static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
323                           TCGReg r0, TCGReg r1, TCGReg r2)
324{
325    tcg_insn_unit insn = 0;
326
327    insn = deposit32(insn, 0, 8, op);
328    insn = deposit32(insn, 8, 4, r0);
329    insn = deposit32(insn, 12, 4, r1);
330    insn = deposit32(insn, 16, 4, r2);
331    tcg_out32(s, insn);
332}
333
334static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
335                           TCGReg r0, TCGReg r1, intptr_t i2)
336{
337    tcg_insn_unit insn = 0;
338
339    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
340    insn = deposit32(insn, 0, 8, op);
341    insn = deposit32(insn, 8, 4, r0);
342    insn = deposit32(insn, 12, 4, r1);
343    insn = deposit32(insn, 16, 16, i2);
344    tcg_out32(s, insn);
345}
346
347static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
348                            TCGReg r1, uint8_t b2, uint8_t b3)
349{
350    tcg_insn_unit insn = 0;
351
352    tcg_debug_assert(b2 == extract32(b2, 0, 6));
353    tcg_debug_assert(b3 == extract32(b3, 0, 6));
354    insn = deposit32(insn, 0, 8, op);
355    insn = deposit32(insn, 8, 4, r0);
356    insn = deposit32(insn, 12, 4, r1);
357    insn = deposit32(insn, 16, 6, b2);
358    insn = deposit32(insn, 22, 6, b3);
359    tcg_out32(s, insn);
360}
361
362static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
363                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
364{
365    tcg_insn_unit insn = 0;
366
367    insn = deposit32(insn, 0, 8, op);
368    insn = deposit32(insn, 8, 4, r0);
369    insn = deposit32(insn, 12, 4, r1);
370    insn = deposit32(insn, 16, 4, r2);
371    insn = deposit32(insn, 20, 4, c3);
372    tcg_out32(s, insn);
373}
374
375static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
376                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
377{
378    tcg_insn_unit insn = 0;
379
380    tcg_debug_assert(b3 == extract32(b3, 0, 6));
381    tcg_debug_assert(b4 == extract32(b4, 0, 6));
382    insn = deposit32(insn, 0, 8, op);
383    insn = deposit32(insn, 8, 4, r0);
384    insn = deposit32(insn, 12, 4, r1);
385    insn = deposit32(insn, 16, 4, r2);
386    insn = deposit32(insn, 20, 6, b3);
387    insn = deposit32(insn, 26, 6, b4);
388    tcg_out32(s, insn);
389}
390
391static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
392                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
393{
394    tcg_insn_unit insn = 0;
395
396    insn = deposit32(insn, 0, 8, op);
397    insn = deposit32(insn, 8, 4, r0);
398    insn = deposit32(insn, 12, 4, r1);
399    insn = deposit32(insn, 16, 4, r2);
400    insn = deposit32(insn, 20, 4, r3);
401    tcg_out32(s, insn);
402}
403
404static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
405                              TCGReg r0, TCGReg r1, TCGReg r2,
406                              TCGReg r3, TCGReg r4, TCGCond c5)
407{
408    tcg_insn_unit insn = 0;
409
410    insn = deposit32(insn, 0, 8, op);
411    insn = deposit32(insn, 8, 4, r0);
412    insn = deposit32(insn, 12, 4, r1);
413    insn = deposit32(insn, 16, 4, r2);
414    insn = deposit32(insn, 20, 4, r3);
415    insn = deposit32(insn, 24, 4, r4);
416    insn = deposit32(insn, 28, 4, c5);
417    tcg_out32(s, insn);
418}
419
420static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
421                              TCGReg r0, TCGReg r1, TCGReg r2,
422                              TCGReg r3, TCGReg r4, TCGReg r5)
423{
424    tcg_insn_unit insn = 0;
425
426    insn = deposit32(insn, 0, 8, op);
427    insn = deposit32(insn, 8, 4, r0);
428    insn = deposit32(insn, 12, 4, r1);
429    insn = deposit32(insn, 16, 4, r2);
430    insn = deposit32(insn, 20, 4, r3);
431    insn = deposit32(insn, 24, 4, r4);
432    insn = deposit32(insn, 28, 4, r5);
433    tcg_out32(s, insn);
434}
435
436static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
437                         TCGReg base, intptr_t offset)
438{
439    stack_bounds_check(base, offset);
440    if (offset != sextract32(offset, 0, 16)) {
441        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
442        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
443        base = TCG_REG_TMP;
444        offset = 0;
445    }
446    tcg_out_op_rrs(s, op, val, base, offset);
447}
448
449static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
450                       intptr_t offset)
451{
452    switch (type) {
453    case TCG_TYPE_I32:
454        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
455        break;
456#if TCG_TARGET_REG_BITS == 64
457    case TCG_TYPE_I64:
458        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
459        break;
460#endif
461    default:
462        g_assert_not_reached();
463    }
464}
465
466static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
467{
468    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
469    return true;
470}
471
472static void tcg_out_movi(TCGContext *s, TCGType type,
473                         TCGReg ret, tcg_target_long arg)
474{
475    switch (type) {
476    case TCG_TYPE_I32:
477#if TCG_TARGET_REG_BITS == 64
478        arg = (int32_t)arg;
479        /* fall through */
480    case TCG_TYPE_I64:
481#endif
482        break;
483    default:
484        g_assert_not_reached();
485    }
486
487    if (arg == sextract32(arg, 0, 20)) {
488        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
489    } else {
490        tcg_insn_unit insn = 0;
491
492        new_pool_label(s, arg, 20, s->code_ptr, 0);
493        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
494        insn = deposit32(insn, 8, 4, ret);
495        tcg_out32(s, insn);
496    }
497}
498
499static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
500                            TCGReg rs, unsigned pos, unsigned len)
501{
502    TCGOpcode opc = type == TCG_TYPE_I32 ?
503                    INDEX_op_extract_i32 :
504                    INDEX_op_extract_i64;
505    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
506}
507
508static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
509                             TCGReg rs, unsigned pos, unsigned len)
510{
511    TCGOpcode opc = type == TCG_TYPE_I32 ?
512                    INDEX_op_sextract_i32 :
513                    INDEX_op_sextract_i64;
514    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
515}
516
517static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
518{
519    tcg_out_sextract(s, type, rd, rs, 0, 8);
520}
521
522static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
523{
524    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
525}
526
527static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
528{
529    tcg_out_sextract(s, type, rd, rs, 0, 16);
530}
531
532static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
533{
534    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
535}
536
537static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
538{
539    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
540    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
541}
542
543static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
544{
545    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
546    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
547}
548
549static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
550{
551    tcg_out_ext32s(s, rd, rs);
552}
553
554static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
555{
556    tcg_out_ext32u(s, rd, rs);
557}
558
559static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
560{
561    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
562    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
563}
564
565static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
566{
567    return false;
568}
569
570static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
571                             tcg_target_long imm)
572{
573    /* This function is only used for passing structs by reference. */
574    g_assert_not_reached();
575}
576
577static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
578                         const TCGHelperInfo *info)
579{
580    ffi_cif *cif = info->cif;
581    tcg_insn_unit insn = 0;
582    uint8_t which;
583
584    if (cif->rtype == &ffi_type_void) {
585        which = 0;
586    } else {
587        tcg_debug_assert(cif->rtype->size == 4 ||
588                         cif->rtype->size == 8 ||
589                         cif->rtype->size == 16);
590        which = ctz32(cif->rtype->size) - 1;
591    }
592    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
593    insn = deposit32(insn, 0, 8, INDEX_op_call);
594    insn = deposit32(insn, 8, 4, which);
595    tcg_out32(s, insn);
596}
597
598#if TCG_TARGET_REG_BITS == 64
599# define CASE_32_64(x) \
600        case glue(glue(INDEX_op_, x), _i64): \
601        case glue(glue(INDEX_op_, x), _i32):
602# define CASE_64(x) \
603        case glue(glue(INDEX_op_, x), _i64):
604#else
605# define CASE_32_64(x) \
606        case glue(glue(INDEX_op_, x), _i32):
607# define CASE_64(x)
608#endif
609
610static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
611{
612    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
613}
614
615static void tcg_out_goto_tb(TCGContext *s, int which)
616{
617    /* indirect jump method. */
618    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
619    set_jmp_reset_offset(s, which);
620}
621
622void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
623                              uintptr_t jmp_rx, uintptr_t jmp_rw)
624{
625    /* Always indirect, nothing to do */
626}
627
628static void tgen_add(TCGContext *s, TCGType type,
629                     TCGReg a0, TCGReg a1, TCGReg a2)
630{
631    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
632}
633
634static const TCGOutOpBinary outop_add = {
635    .base.static_constraint = C_O1_I2(r, r, r),
636    .out_rrr = tgen_add,
637};
638
639static void tgen_and(TCGContext *s, TCGType type,
640                     TCGReg a0, TCGReg a1, TCGReg a2)
641{
642    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
643}
644
645static const TCGOutOpBinary outop_and = {
646    .base.static_constraint = C_O1_I2(r, r, r),
647    .out_rrr = tgen_and,
648};
649
650static void tgen_andc(TCGContext *s, TCGType type,
651                      TCGReg a0, TCGReg a1, TCGReg a2)
652{
653    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
654}
655
656static const TCGOutOpBinary outop_andc = {
657    .base.static_constraint = C_O1_I2(r, r, r),
658    .out_rrr = tgen_andc,
659};
660
661static void tgen_eqv(TCGContext *s, TCGType type,
662                     TCGReg a0, TCGReg a1, TCGReg a2)
663{
664    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
665}
666
667static const TCGOutOpBinary outop_eqv = {
668    .base.static_constraint = C_O1_I2(r, r, r),
669    .out_rrr = tgen_eqv,
670};
671
672static void tgen_nand(TCGContext *s, TCGType type,
673                     TCGReg a0, TCGReg a1, TCGReg a2)
674{
675    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
676}
677
678static const TCGOutOpBinary outop_nand = {
679    .base.static_constraint = C_O1_I2(r, r, r),
680    .out_rrr = tgen_nand,
681};
682
683static void tgen_or(TCGContext *s, TCGType type,
684                     TCGReg a0, TCGReg a1, TCGReg a2)
685{
686    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
687}
688
689static const TCGOutOpBinary outop_or = {
690    .base.static_constraint = C_O1_I2(r, r, r),
691    .out_rrr = tgen_or,
692};
693
694static void tgen_orc(TCGContext *s, TCGType type,
695                     TCGReg a0, TCGReg a1, TCGReg a2)
696{
697    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
698}
699
700static const TCGOutOpBinary outop_orc = {
701    .base.static_constraint = C_O1_I2(r, r, r),
702    .out_rrr = tgen_orc,
703};
704
705static void tgen_xor(TCGContext *s, TCGType type,
706                     TCGReg a0, TCGReg a1, TCGReg a2)
707{
708    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
709}
710
711static const TCGOutOpBinary outop_xor = {
712    .base.static_constraint = C_O1_I2(r, r, r),
713    .out_rrr = tgen_xor,
714};
715
716
717static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
718                       const TCGArg args[TCG_MAX_OP_ARGS],
719                       const int const_args[TCG_MAX_OP_ARGS])
720{
721    int width;
722
723    switch (opc) {
724    case INDEX_op_goto_ptr:
725        tcg_out_op_r(s, opc, args[0]);
726        break;
727
728    case INDEX_op_br:
729        tcg_out_op_l(s, opc, arg_label(args[0]));
730        break;
731
732    CASE_32_64(setcond)
733        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
734        break;
735
736    CASE_32_64(movcond)
737    case INDEX_op_setcond2_i32:
738        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
739                          args[3], args[4], args[5]);
740        break;
741
742    CASE_32_64(ld8u)
743    CASE_32_64(ld8s)
744    CASE_32_64(ld16u)
745    CASE_32_64(ld16s)
746    case INDEX_op_ld_i32:
747    CASE_64(ld32u)
748    CASE_64(ld32s)
749    CASE_64(ld)
750    CASE_32_64(st8)
751    CASE_32_64(st16)
752    case INDEX_op_st_i32:
753    CASE_64(st32)
754    CASE_64(st)
755        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
756        break;
757
758    CASE_32_64(sub)
759    CASE_32_64(mul)
760    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
761    CASE_32_64(shl)
762    CASE_32_64(shr)
763    CASE_32_64(sar)
764    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
765    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
766    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
767    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
768    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
769    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
770    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
771    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
772        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
773        break;
774
775    CASE_32_64(deposit)
776        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
777        break;
778
779    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
780    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
781        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
782        break;
783
784    CASE_32_64(brcond)
785        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
786                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
787                        TCG_REG_TMP, args[0], args[1], args[2]);
788        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
789        break;
790
791    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
792    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
793    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
794    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
795    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
796        tcg_out_op_rr(s, opc, args[0], args[1]);
797        break;
798
799    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
800    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
801        width = 16;
802        goto do_bswap;
803    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
804        width = 32;
805    do_bswap:
806        /* The base tci bswaps zero-extend, and ignore high bits. */
807        tcg_out_op_rr(s, opc, args[0], args[1]);
808        if (args[2] & TCG_BSWAP_OS) {
809            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
810        }
811        break;
812
813    CASE_32_64(add2)
814    CASE_32_64(sub2)
815        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
816                          args[3], args[4], args[5]);
817        break;
818
819#if TCG_TARGET_REG_BITS == 32
820    case INDEX_op_brcond2_i32:
821        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
822                          args[0], args[1], args[2], args[3], args[4]);
823        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
824        break;
825#endif
826
827    CASE_32_64(mulu2)
828    CASE_32_64(muls2)
829        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
830        break;
831
832    case INDEX_op_qemu_ld_i64:
833    case INDEX_op_qemu_st_i64:
834        if (TCG_TARGET_REG_BITS == 32) {
835            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
836            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
837            break;
838        }
839        /* fall through */
840    case INDEX_op_qemu_ld_i32:
841    case INDEX_op_qemu_st_i32:
842        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
843            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
844            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
845        } else {
846            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
847        }
848        break;
849
850    case INDEX_op_mb:
851        tcg_out_op_v(s, opc);
852        break;
853
854    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
855    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
856    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
857    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
858    case INDEX_op_extu_i32_i64:
859    case INDEX_op_extrl_i64_i32:
860    default:
861        g_assert_not_reached();
862    }
863}
864
865static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
866                       intptr_t offset)
867{
868    switch (type) {
869    case TCG_TYPE_I32:
870        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
871        break;
872#if TCG_TARGET_REG_BITS == 64
873    case TCG_TYPE_I64:
874        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
875        break;
876#endif
877    default:
878        g_assert_not_reached();
879    }
880}
881
882static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
883                               TCGReg base, intptr_t ofs)
884{
885    return false;
886}
887
888/* Test if a constant matches the constraint. */
889static bool tcg_target_const_match(int64_t val, int ct,
890                                   TCGType type, TCGCond cond, int vece)
891{
892    return ct & TCG_CT_CONST;
893}
894
895static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
896{
897    memset(p, 0, sizeof(*p) * count);
898}
899
900static void tcg_target_init(TCGContext *s)
901{
902    /* The current code uses uint8_t for tcg operations. */
903    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
904
905    /* Registers available for 32 bit operations. */
906    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
907    /* Registers available for 64 bit operations. */
908    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
909    /*
910     * The interpreter "registers" are in the local stack frame and
911     * cannot be clobbered by the called helper functions.  However,
912     * the interpreter assumes a 128-bit return value and assigns to
913     * the return value registers.
914     */
915    tcg_target_call_clobber_regs =
916        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
917
918    s->reserved_regs = 0;
919    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
920    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
921
922    /* The call arguments come first, followed by the temp storage. */
923    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
924                  TCG_STATIC_FRAME_SIZE);
925}
926
927/* Generate global QEMU prologue and epilogue code. */
928static inline void tcg_target_qemu_prologue(TCGContext *s)
929{
930}
931
932static void tcg_out_tb_start(TCGContext *s)
933{
934    /* nothing to do */
935}
936
937bool tcg_target_has_memory_bswap(MemOp memop)
938{
939    return true;
940}
941
942static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
943{
944    g_assert_not_reached();
945}
946
947static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
948{
949    g_assert_not_reached();
950}
951