xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 5c62d3779b8b1075782672751165c0e4f716762f)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_div_i32:
83    case INDEX_op_div_i64:
84    case INDEX_op_divu_i32:
85    case INDEX_op_divu_i64:
86    case INDEX_op_rem_i32:
87    case INDEX_op_rem_i64:
88    case INDEX_op_remu_i32:
89    case INDEX_op_remu_i64:
90    case INDEX_op_mul_i32:
91    case INDEX_op_mul_i64:
92    case INDEX_op_shl_i32:
93    case INDEX_op_shl_i64:
94    case INDEX_op_shr_i32:
95    case INDEX_op_shr_i64:
96    case INDEX_op_sar_i32:
97    case INDEX_op_sar_i64:
98    case INDEX_op_rotl_i32:
99    case INDEX_op_rotl_i64:
100    case INDEX_op_rotr_i32:
101    case INDEX_op_rotr_i64:
102    case INDEX_op_setcond_i32:
103    case INDEX_op_setcond_i64:
104    case INDEX_op_deposit_i32:
105    case INDEX_op_deposit_i64:
106    case INDEX_op_clz_i32:
107    case INDEX_op_clz_i64:
108    case INDEX_op_ctz_i32:
109    case INDEX_op_ctz_i64:
110        return C_O1_I2(r, r, r);
111
112    case INDEX_op_brcond_i32:
113    case INDEX_op_brcond_i64:
114        return C_O0_I2(r, r);
115
116    case INDEX_op_add2_i32:
117    case INDEX_op_add2_i64:
118    case INDEX_op_sub2_i32:
119    case INDEX_op_sub2_i64:
120        return C_O2_I4(r, r, r, r, r, r);
121
122#if TCG_TARGET_REG_BITS == 32
123    case INDEX_op_brcond2_i32:
124        return C_O0_I4(r, r, r, r);
125#endif
126
127    case INDEX_op_mulu2_i32:
128    case INDEX_op_mulu2_i64:
129    case INDEX_op_muls2_i32:
130    case INDEX_op_muls2_i64:
131        return C_O2_I2(r, r, r, r);
132
133    case INDEX_op_movcond_i32:
134    case INDEX_op_movcond_i64:
135    case INDEX_op_setcond2_i32:
136        return C_O1_I4(r, r, r, r, r);
137
138    case INDEX_op_qemu_ld_i32:
139        return C_O1_I1(r, r);
140    case INDEX_op_qemu_ld_i64:
141        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
142    case INDEX_op_qemu_st_i32:
143        return C_O0_I2(r, r);
144    case INDEX_op_qemu_st_i64:
145        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
146
147    default:
148        return C_NotImplemented;
149    }
150}
151
152static const int tcg_target_reg_alloc_order[] = {
153    TCG_REG_R4,
154    TCG_REG_R5,
155    TCG_REG_R6,
156    TCG_REG_R7,
157    TCG_REG_R8,
158    TCG_REG_R9,
159    TCG_REG_R10,
160    TCG_REG_R11,
161    TCG_REG_R12,
162    TCG_REG_R13,
163    TCG_REG_R14,
164    TCG_REG_R15,
165    /* Either 2 or 4 of these are call clobbered, so use them last. */
166    TCG_REG_R3,
167    TCG_REG_R2,
168    TCG_REG_R1,
169    TCG_REG_R0,
170};
171
172/* No call arguments via registers.  All will be stored on the "stack". */
173static const int tcg_target_call_iarg_regs[] = { };
174
175static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
176{
177    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
178    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
179    return TCG_REG_R0 + slot;
180}
181
182#ifdef CONFIG_DEBUG_TCG
183static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
184    "r00",
185    "r01",
186    "r02",
187    "r03",
188    "r04",
189    "r05",
190    "r06",
191    "r07",
192    "r08",
193    "r09",
194    "r10",
195    "r11",
196    "r12",
197    "r13",
198    "r14",
199    "r15",
200};
201#endif
202
203static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
204                        intptr_t value, intptr_t addend)
205{
206    intptr_t diff = value - (intptr_t)(code_ptr + 1);
207
208    tcg_debug_assert(addend == 0);
209    tcg_debug_assert(type == 20);
210
211    if (diff == sextract32(diff, 0, type)) {
212        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
213        return true;
214    }
215    return false;
216}
217
218static void stack_bounds_check(TCGReg base, intptr_t offset)
219{
220    if (base == TCG_REG_CALL_STACK) {
221        tcg_debug_assert(offset >= 0);
222        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
223                                   TCG_STATIC_FRAME_SIZE));
224    }
225}
226
227static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
228{
229    tcg_insn_unit insn = 0;
230
231    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
232    insn = deposit32(insn, 0, 8, op);
233    tcg_out32(s, insn);
234}
235
236static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
237{
238    tcg_insn_unit insn = 0;
239    intptr_t diff;
240
241    /* Special case for exit_tb: map null -> 0. */
242    if (p0 == NULL) {
243        diff = 0;
244    } else {
245        diff = p0 - (void *)(s->code_ptr + 1);
246        tcg_debug_assert(diff != 0);
247        if (diff != sextract32(diff, 0, 20)) {
248            tcg_raise_tb_overflow(s);
249        }
250    }
251    insn = deposit32(insn, 0, 8, op);
252    insn = deposit32(insn, 12, 20, diff);
253    tcg_out32(s, insn);
254}
255
256static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
257{
258    tcg_insn_unit insn = 0;
259
260    insn = deposit32(insn, 0, 8, op);
261    insn = deposit32(insn, 8, 4, r0);
262    tcg_out32(s, insn);
263}
264
265static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
266{
267    tcg_out32(s, (uint8_t)op);
268}
269
270static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
271{
272    tcg_insn_unit insn = 0;
273
274    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
275    insn = deposit32(insn, 0, 8, op);
276    insn = deposit32(insn, 8, 4, r0);
277    insn = deposit32(insn, 12, 20, i1);
278    tcg_out32(s, insn);
279}
280
281static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
282{
283    tcg_insn_unit insn = 0;
284
285    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
286    insn = deposit32(insn, 0, 8, op);
287    insn = deposit32(insn, 8, 4, r0);
288    tcg_out32(s, insn);
289}
290
291static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
292{
293    tcg_insn_unit insn = 0;
294
295    insn = deposit32(insn, 0, 8, op);
296    insn = deposit32(insn, 8, 4, r0);
297    insn = deposit32(insn, 12, 4, r1);
298    tcg_out32(s, insn);
299}
300
301static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
302                           TCGReg r0, TCGReg r1, TCGArg m2)
303{
304    tcg_insn_unit insn = 0;
305
306    tcg_debug_assert(m2 == extract32(m2, 0, 16));
307    insn = deposit32(insn, 0, 8, op);
308    insn = deposit32(insn, 8, 4, r0);
309    insn = deposit32(insn, 12, 4, r1);
310    insn = deposit32(insn, 16, 16, m2);
311    tcg_out32(s, insn);
312}
313
314static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
315                           TCGReg r0, TCGReg r1, TCGReg r2)
316{
317    tcg_insn_unit insn = 0;
318
319    insn = deposit32(insn, 0, 8, op);
320    insn = deposit32(insn, 8, 4, r0);
321    insn = deposit32(insn, 12, 4, r1);
322    insn = deposit32(insn, 16, 4, r2);
323    tcg_out32(s, insn);
324}
325
326static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
327                           TCGReg r0, TCGReg r1, intptr_t i2)
328{
329    tcg_insn_unit insn = 0;
330
331    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
332    insn = deposit32(insn, 0, 8, op);
333    insn = deposit32(insn, 8, 4, r0);
334    insn = deposit32(insn, 12, 4, r1);
335    insn = deposit32(insn, 16, 16, i2);
336    tcg_out32(s, insn);
337}
338
339static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
340                            TCGReg r1, uint8_t b2, uint8_t b3)
341{
342    tcg_insn_unit insn = 0;
343
344    tcg_debug_assert(b2 == extract32(b2, 0, 6));
345    tcg_debug_assert(b3 == extract32(b3, 0, 6));
346    insn = deposit32(insn, 0, 8, op);
347    insn = deposit32(insn, 8, 4, r0);
348    insn = deposit32(insn, 12, 4, r1);
349    insn = deposit32(insn, 16, 6, b2);
350    insn = deposit32(insn, 22, 6, b3);
351    tcg_out32(s, insn);
352}
353
354static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
355                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
356{
357    tcg_insn_unit insn = 0;
358
359    insn = deposit32(insn, 0, 8, op);
360    insn = deposit32(insn, 8, 4, r0);
361    insn = deposit32(insn, 12, 4, r1);
362    insn = deposit32(insn, 16, 4, r2);
363    insn = deposit32(insn, 20, 4, c3);
364    tcg_out32(s, insn);
365}
366
367static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
368                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
369{
370    tcg_insn_unit insn = 0;
371
372    tcg_debug_assert(b3 == extract32(b3, 0, 6));
373    tcg_debug_assert(b4 == extract32(b4, 0, 6));
374    insn = deposit32(insn, 0, 8, op);
375    insn = deposit32(insn, 8, 4, r0);
376    insn = deposit32(insn, 12, 4, r1);
377    insn = deposit32(insn, 16, 4, r2);
378    insn = deposit32(insn, 20, 6, b3);
379    insn = deposit32(insn, 26, 6, b4);
380    tcg_out32(s, insn);
381}
382
383static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
384                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
385{
386    tcg_insn_unit insn = 0;
387
388    insn = deposit32(insn, 0, 8, op);
389    insn = deposit32(insn, 8, 4, r0);
390    insn = deposit32(insn, 12, 4, r1);
391    insn = deposit32(insn, 16, 4, r2);
392    insn = deposit32(insn, 20, 4, r3);
393    tcg_out32(s, insn);
394}
395
396static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
397                              TCGReg r0, TCGReg r1, TCGReg r2,
398                              TCGReg r3, TCGReg r4, TCGCond c5)
399{
400    tcg_insn_unit insn = 0;
401
402    insn = deposit32(insn, 0, 8, op);
403    insn = deposit32(insn, 8, 4, r0);
404    insn = deposit32(insn, 12, 4, r1);
405    insn = deposit32(insn, 16, 4, r2);
406    insn = deposit32(insn, 20, 4, r3);
407    insn = deposit32(insn, 24, 4, r4);
408    insn = deposit32(insn, 28, 4, c5);
409    tcg_out32(s, insn);
410}
411
412static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
413                              TCGReg r0, TCGReg r1, TCGReg r2,
414                              TCGReg r3, TCGReg r4, TCGReg r5)
415{
416    tcg_insn_unit insn = 0;
417
418    insn = deposit32(insn, 0, 8, op);
419    insn = deposit32(insn, 8, 4, r0);
420    insn = deposit32(insn, 12, 4, r1);
421    insn = deposit32(insn, 16, 4, r2);
422    insn = deposit32(insn, 20, 4, r3);
423    insn = deposit32(insn, 24, 4, r4);
424    insn = deposit32(insn, 28, 4, r5);
425    tcg_out32(s, insn);
426}
427
428static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
429                         TCGReg base, intptr_t offset)
430{
431    stack_bounds_check(base, offset);
432    if (offset != sextract32(offset, 0, 16)) {
433        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
434        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
435        base = TCG_REG_TMP;
436        offset = 0;
437    }
438    tcg_out_op_rrs(s, op, val, base, offset);
439}
440
441static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
442                       intptr_t offset)
443{
444    switch (type) {
445    case TCG_TYPE_I32:
446        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
447        break;
448#if TCG_TARGET_REG_BITS == 64
449    case TCG_TYPE_I64:
450        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
451        break;
452#endif
453    default:
454        g_assert_not_reached();
455    }
456}
457
458static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
459{
460    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
461    return true;
462}
463
464static void tcg_out_movi(TCGContext *s, TCGType type,
465                         TCGReg ret, tcg_target_long arg)
466{
467    switch (type) {
468    case TCG_TYPE_I32:
469#if TCG_TARGET_REG_BITS == 64
470        arg = (int32_t)arg;
471        /* fall through */
472    case TCG_TYPE_I64:
473#endif
474        break;
475    default:
476        g_assert_not_reached();
477    }
478
479    if (arg == sextract32(arg, 0, 20)) {
480        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
481    } else {
482        tcg_insn_unit insn = 0;
483
484        new_pool_label(s, arg, 20, s->code_ptr, 0);
485        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
486        insn = deposit32(insn, 8, 4, ret);
487        tcg_out32(s, insn);
488    }
489}
490
491static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
492                            TCGReg rs, unsigned pos, unsigned len)
493{
494    TCGOpcode opc = type == TCG_TYPE_I32 ?
495                    INDEX_op_extract_i32 :
496                    INDEX_op_extract_i64;
497    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
498}
499
500static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
501                             TCGReg rs, unsigned pos, unsigned len)
502{
503    TCGOpcode opc = type == TCG_TYPE_I32 ?
504                    INDEX_op_sextract_i32 :
505                    INDEX_op_sextract_i64;
506    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
507}
508
509static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
510{
511    tcg_out_sextract(s, type, rd, rs, 0, 8);
512}
513
514static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
515{
516    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
517}
518
519static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
520{
521    tcg_out_sextract(s, type, rd, rs, 0, 16);
522}
523
524static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
525{
526    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
527}
528
529static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
530{
531    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
532    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
533}
534
535static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
536{
537    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
538    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
539}
540
541static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
542{
543    tcg_out_ext32s(s, rd, rs);
544}
545
546static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
547{
548    tcg_out_ext32u(s, rd, rs);
549}
550
551static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
552{
553    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
554    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
555}
556
557static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
558{
559    return false;
560}
561
562static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
563                             tcg_target_long imm)
564{
565    /* This function is only used for passing structs by reference. */
566    g_assert_not_reached();
567}
568
569static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
570                         const TCGHelperInfo *info)
571{
572    ffi_cif *cif = info->cif;
573    tcg_insn_unit insn = 0;
574    uint8_t which;
575
576    if (cif->rtype == &ffi_type_void) {
577        which = 0;
578    } else {
579        tcg_debug_assert(cif->rtype->size == 4 ||
580                         cif->rtype->size == 8 ||
581                         cif->rtype->size == 16);
582        which = ctz32(cif->rtype->size) - 1;
583    }
584    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
585    insn = deposit32(insn, 0, 8, INDEX_op_call);
586    insn = deposit32(insn, 8, 4, which);
587    tcg_out32(s, insn);
588}
589
590#if TCG_TARGET_REG_BITS == 64
591# define CASE_32_64(x) \
592        case glue(glue(INDEX_op_, x), _i64): \
593        case glue(glue(INDEX_op_, x), _i32):
594# define CASE_64(x) \
595        case glue(glue(INDEX_op_, x), _i64):
596#else
597# define CASE_32_64(x) \
598        case glue(glue(INDEX_op_, x), _i32):
599# define CASE_64(x)
600#endif
601
602static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
603{
604    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
605}
606
607static void tcg_out_goto_tb(TCGContext *s, int which)
608{
609    /* indirect jump method. */
610    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
611    set_jmp_reset_offset(s, which);
612}
613
614void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
615                              uintptr_t jmp_rx, uintptr_t jmp_rw)
616{
617    /* Always indirect, nothing to do */
618}
619
620static void tgen_add(TCGContext *s, TCGType type,
621                     TCGReg a0, TCGReg a1, TCGReg a2)
622{
623    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
624}
625
626static const TCGOutOpBinary outop_add = {
627    .base.static_constraint = C_O1_I2(r, r, r),
628    .out_rrr = tgen_add,
629};
630
631static void tgen_and(TCGContext *s, TCGType type,
632                     TCGReg a0, TCGReg a1, TCGReg a2)
633{
634    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
635}
636
637static const TCGOutOpBinary outop_and = {
638    .base.static_constraint = C_O1_I2(r, r, r),
639    .out_rrr = tgen_and,
640};
641
642static void tgen_andc(TCGContext *s, TCGType type,
643                      TCGReg a0, TCGReg a1, TCGReg a2)
644{
645    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
646}
647
648static const TCGOutOpBinary outop_andc = {
649    .base.static_constraint = C_O1_I2(r, r, r),
650    .out_rrr = tgen_andc,
651};
652
653static void tgen_eqv(TCGContext *s, TCGType type,
654                     TCGReg a0, TCGReg a1, TCGReg a2)
655{
656    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
657}
658
659static const TCGOutOpBinary outop_eqv = {
660    .base.static_constraint = C_O1_I2(r, r, r),
661    .out_rrr = tgen_eqv,
662};
663
664static void tgen_nand(TCGContext *s, TCGType type,
665                     TCGReg a0, TCGReg a1, TCGReg a2)
666{
667    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
668}
669
670static const TCGOutOpBinary outop_nand = {
671    .base.static_constraint = C_O1_I2(r, r, r),
672    .out_rrr = tgen_nand,
673};
674
675static void tgen_nor(TCGContext *s, TCGType type,
676                     TCGReg a0, TCGReg a1, TCGReg a2)
677{
678    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
679}
680
681static const TCGOutOpBinary outop_nor = {
682    .base.static_constraint = C_O1_I2(r, r, r),
683    .out_rrr = tgen_nor,
684};
685
686static void tgen_or(TCGContext *s, TCGType type,
687                     TCGReg a0, TCGReg a1, TCGReg a2)
688{
689    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
690}
691
692static const TCGOutOpBinary outop_or = {
693    .base.static_constraint = C_O1_I2(r, r, r),
694    .out_rrr = tgen_or,
695};
696
697static void tgen_orc(TCGContext *s, TCGType type,
698                     TCGReg a0, TCGReg a1, TCGReg a2)
699{
700    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
701}
702
703static const TCGOutOpBinary outop_orc = {
704    .base.static_constraint = C_O1_I2(r, r, r),
705    .out_rrr = tgen_orc,
706};
707
708static void tgen_sub(TCGContext *s, TCGType type,
709                     TCGReg a0, TCGReg a1, TCGReg a2)
710{
711    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
712}
713
714static const TCGOutOpSubtract outop_sub = {
715    .base.static_constraint = C_O1_I2(r, r, r),
716    .out_rrr = tgen_sub,
717};
718
719static void tgen_xor(TCGContext *s, TCGType type,
720                     TCGReg a0, TCGReg a1, TCGReg a2)
721{
722    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
723}
724
725static const TCGOutOpBinary outop_xor = {
726    .base.static_constraint = C_O1_I2(r, r, r),
727    .out_rrr = tgen_xor,
728};
729
730static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
731{
732    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
733}
734
735static const TCGOutOpUnary outop_neg = {
736    .base.static_constraint = C_O1_I1(r, r),
737    .out_rr = tgen_neg,
738};
739
740static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
741{
742    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
743}
744
745static const TCGOutOpUnary outop_not = {
746    .base.static_constraint = C_O1_I1(r, r),
747    .out_rr = tgen_not,
748};
749
750
751static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
752                       const TCGArg args[TCG_MAX_OP_ARGS],
753                       const int const_args[TCG_MAX_OP_ARGS])
754{
755    int width;
756
757    switch (opc) {
758    case INDEX_op_goto_ptr:
759        tcg_out_op_r(s, opc, args[0]);
760        break;
761
762    case INDEX_op_br:
763        tcg_out_op_l(s, opc, arg_label(args[0]));
764        break;
765
766    CASE_32_64(setcond)
767        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
768        break;
769
770    CASE_32_64(movcond)
771    case INDEX_op_setcond2_i32:
772        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
773                          args[3], args[4], args[5]);
774        break;
775
776    CASE_32_64(ld8u)
777    CASE_32_64(ld8s)
778    CASE_32_64(ld16u)
779    CASE_32_64(ld16s)
780    case INDEX_op_ld_i32:
781    CASE_64(ld32u)
782    CASE_64(ld32s)
783    CASE_64(ld)
784    CASE_32_64(st8)
785    CASE_32_64(st16)
786    case INDEX_op_st_i32:
787    CASE_64(st32)
788    CASE_64(st)
789        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
790        break;
791
792    CASE_32_64(mul)
793    CASE_32_64(shl)
794    CASE_32_64(shr)
795    CASE_32_64(sar)
796    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
797    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
798    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
799    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
800    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
801    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
802    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
803    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
804        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
805        break;
806
807    CASE_32_64(deposit)
808        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
809        break;
810
811    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
812    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
813        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
814        break;
815
816    CASE_32_64(brcond)
817        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
818                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
819                        TCG_REG_TMP, args[0], args[1], args[2]);
820        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
821        break;
822
823    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
824    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
825    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
826        tcg_out_op_rr(s, opc, args[0], args[1]);
827        break;
828
829    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
830    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
831        width = 16;
832        goto do_bswap;
833    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
834        width = 32;
835    do_bswap:
836        /* The base tci bswaps zero-extend, and ignore high bits. */
837        tcg_out_op_rr(s, opc, args[0], args[1]);
838        if (args[2] & TCG_BSWAP_OS) {
839            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
840        }
841        break;
842
843    CASE_32_64(add2)
844    CASE_32_64(sub2)
845        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
846                          args[3], args[4], args[5]);
847        break;
848
849#if TCG_TARGET_REG_BITS == 32
850    case INDEX_op_brcond2_i32:
851        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
852                          args[0], args[1], args[2], args[3], args[4]);
853        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
854        break;
855#endif
856
857    CASE_32_64(mulu2)
858    CASE_32_64(muls2)
859        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
860        break;
861
862    case INDEX_op_qemu_ld_i64:
863    case INDEX_op_qemu_st_i64:
864        if (TCG_TARGET_REG_BITS == 32) {
865            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
866            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
867            break;
868        }
869        /* fall through */
870    case INDEX_op_qemu_ld_i32:
871    case INDEX_op_qemu_st_i32:
872        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
873            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
874            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
875        } else {
876            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
877        }
878        break;
879
880    case INDEX_op_mb:
881        tcg_out_op_v(s, opc);
882        break;
883
884    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
885    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
886    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
887    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
888    case INDEX_op_extu_i32_i64:
889    case INDEX_op_extrl_i64_i32:
890    default:
891        g_assert_not_reached();
892    }
893}
894
895static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
896                       intptr_t offset)
897{
898    switch (type) {
899    case TCG_TYPE_I32:
900        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
901        break;
902#if TCG_TARGET_REG_BITS == 64
903    case TCG_TYPE_I64:
904        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
905        break;
906#endif
907    default:
908        g_assert_not_reached();
909    }
910}
911
912static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
913                               TCGReg base, intptr_t ofs)
914{
915    return false;
916}
917
918/* Test if a constant matches the constraint. */
919static bool tcg_target_const_match(int64_t val, int ct,
920                                   TCGType type, TCGCond cond, int vece)
921{
922    return ct & TCG_CT_CONST;
923}
924
925static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
926{
927    memset(p, 0, sizeof(*p) * count);
928}
929
930static void tcg_target_init(TCGContext *s)
931{
932    /* The current code uses uint8_t for tcg operations. */
933    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
934
935    /* Registers available for 32 bit operations. */
936    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
937    /* Registers available for 64 bit operations. */
938    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
939    /*
940     * The interpreter "registers" are in the local stack frame and
941     * cannot be clobbered by the called helper functions.  However,
942     * the interpreter assumes a 128-bit return value and assigns to
943     * the return value registers.
944     */
945    tcg_target_call_clobber_regs =
946        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
947
948    s->reserved_regs = 0;
949    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
950    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
951
952    /* The call arguments come first, followed by the temp storage. */
953    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
954                  TCG_STATIC_FRAME_SIZE);
955}
956
957/* Generate global QEMU prologue and epilogue code. */
958static inline void tcg_target_qemu_prologue(TCGContext *s)
959{
960}
961
962static void tcg_out_tb_start(TCGContext *s)
963{
964    /* nothing to do */
965}
966
967bool tcg_target_has_memory_bswap(MemOp memop)
968{
969    return true;
970}
971
972static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
973{
974    g_assert_not_reached();
975}
976
977static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
978{
979    g_assert_not_reached();
980}
981