xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 6971358747d8998a5770d1bf997495d3061d6c6a)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_ext_i32_i64:
61    case INDEX_op_extu_i32_i64:
62    case INDEX_op_bswap16_i32:
63    case INDEX_op_bswap16_i64:
64    case INDEX_op_bswap32_i32:
65    case INDEX_op_bswap32_i64:
66    case INDEX_op_bswap64_i64:
67    case INDEX_op_extract_i32:
68    case INDEX_op_extract_i64:
69    case INDEX_op_sextract_i32:
70    case INDEX_op_sextract_i64:
71    case INDEX_op_ctpop_i32:
72    case INDEX_op_ctpop_i64:
73        return C_O1_I1(r, r);
74
75    case INDEX_op_st8_i32:
76    case INDEX_op_st16_i32:
77    case INDEX_op_st_i32:
78    case INDEX_op_st8_i64:
79    case INDEX_op_st16_i64:
80    case INDEX_op_st32_i64:
81    case INDEX_op_st_i64:
82        return C_O0_I2(r, r);
83
84    case INDEX_op_div_i32:
85    case INDEX_op_div_i64:
86    case INDEX_op_divu_i32:
87    case INDEX_op_divu_i64:
88    case INDEX_op_rem_i32:
89    case INDEX_op_rem_i64:
90    case INDEX_op_remu_i32:
91    case INDEX_op_remu_i64:
92    case INDEX_op_mul_i32:
93    case INDEX_op_mul_i64:
94    case INDEX_op_shl_i32:
95    case INDEX_op_shl_i64:
96    case INDEX_op_shr_i32:
97    case INDEX_op_shr_i64:
98    case INDEX_op_sar_i32:
99    case INDEX_op_sar_i64:
100    case INDEX_op_rotl_i32:
101    case INDEX_op_rotl_i64:
102    case INDEX_op_rotr_i32:
103    case INDEX_op_rotr_i64:
104    case INDEX_op_setcond_i32:
105    case INDEX_op_setcond_i64:
106    case INDEX_op_deposit_i32:
107    case INDEX_op_deposit_i64:
108    case INDEX_op_clz_i32:
109    case INDEX_op_clz_i64:
110    case INDEX_op_ctz_i32:
111    case INDEX_op_ctz_i64:
112        return C_O1_I2(r, r, r);
113
114    case INDEX_op_brcond_i32:
115    case INDEX_op_brcond_i64:
116        return C_O0_I2(r, r);
117
118    case INDEX_op_add2_i32:
119    case INDEX_op_add2_i64:
120    case INDEX_op_sub2_i32:
121    case INDEX_op_sub2_i64:
122        return C_O2_I4(r, r, r, r, r, r);
123
124#if TCG_TARGET_REG_BITS == 32
125    case INDEX_op_brcond2_i32:
126        return C_O0_I4(r, r, r, r);
127#endif
128
129    case INDEX_op_mulu2_i32:
130    case INDEX_op_mulu2_i64:
131    case INDEX_op_muls2_i32:
132    case INDEX_op_muls2_i64:
133        return C_O2_I2(r, r, r, r);
134
135    case INDEX_op_movcond_i32:
136    case INDEX_op_movcond_i64:
137    case INDEX_op_setcond2_i32:
138        return C_O1_I4(r, r, r, r, r);
139
140    case INDEX_op_qemu_ld_i32:
141        return C_O1_I1(r, r);
142    case INDEX_op_qemu_ld_i64:
143        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
144    case INDEX_op_qemu_st_i32:
145        return C_O0_I2(r, r);
146    case INDEX_op_qemu_st_i64:
147        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
148
149    default:
150        return C_NotImplemented;
151    }
152}
153
154static const int tcg_target_reg_alloc_order[] = {
155    TCG_REG_R4,
156    TCG_REG_R5,
157    TCG_REG_R6,
158    TCG_REG_R7,
159    TCG_REG_R8,
160    TCG_REG_R9,
161    TCG_REG_R10,
162    TCG_REG_R11,
163    TCG_REG_R12,
164    TCG_REG_R13,
165    TCG_REG_R14,
166    TCG_REG_R15,
167    /* Either 2 or 4 of these are call clobbered, so use them last. */
168    TCG_REG_R3,
169    TCG_REG_R2,
170    TCG_REG_R1,
171    TCG_REG_R0,
172};
173
174/* No call arguments via registers.  All will be stored on the "stack". */
175static const int tcg_target_call_iarg_regs[] = { };
176
177static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
178{
179    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
180    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
181    return TCG_REG_R0 + slot;
182}
183
184#ifdef CONFIG_DEBUG_TCG
185static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
186    "r00",
187    "r01",
188    "r02",
189    "r03",
190    "r04",
191    "r05",
192    "r06",
193    "r07",
194    "r08",
195    "r09",
196    "r10",
197    "r11",
198    "r12",
199    "r13",
200    "r14",
201    "r15",
202};
203#endif
204
205static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
206                        intptr_t value, intptr_t addend)
207{
208    intptr_t diff = value - (intptr_t)(code_ptr + 1);
209
210    tcg_debug_assert(addend == 0);
211    tcg_debug_assert(type == 20);
212
213    if (diff == sextract32(diff, 0, type)) {
214        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
215        return true;
216    }
217    return false;
218}
219
220static void stack_bounds_check(TCGReg base, intptr_t offset)
221{
222    if (base == TCG_REG_CALL_STACK) {
223        tcg_debug_assert(offset >= 0);
224        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
225                                   TCG_STATIC_FRAME_SIZE));
226    }
227}
228
229static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
230{
231    tcg_insn_unit insn = 0;
232
233    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
234    insn = deposit32(insn, 0, 8, op);
235    tcg_out32(s, insn);
236}
237
238static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
239{
240    tcg_insn_unit insn = 0;
241    intptr_t diff;
242
243    /* Special case for exit_tb: map null -> 0. */
244    if (p0 == NULL) {
245        diff = 0;
246    } else {
247        diff = p0 - (void *)(s->code_ptr + 1);
248        tcg_debug_assert(diff != 0);
249        if (diff != sextract32(diff, 0, 20)) {
250            tcg_raise_tb_overflow(s);
251        }
252    }
253    insn = deposit32(insn, 0, 8, op);
254    insn = deposit32(insn, 12, 20, diff);
255    tcg_out32(s, insn);
256}
257
258static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
259{
260    tcg_insn_unit insn = 0;
261
262    insn = deposit32(insn, 0, 8, op);
263    insn = deposit32(insn, 8, 4, r0);
264    tcg_out32(s, insn);
265}
266
267static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
268{
269    tcg_out32(s, (uint8_t)op);
270}
271
272static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
273{
274    tcg_insn_unit insn = 0;
275
276    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
277    insn = deposit32(insn, 0, 8, op);
278    insn = deposit32(insn, 8, 4, r0);
279    insn = deposit32(insn, 12, 20, i1);
280    tcg_out32(s, insn);
281}
282
283static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
284{
285    tcg_insn_unit insn = 0;
286
287    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
288    insn = deposit32(insn, 0, 8, op);
289    insn = deposit32(insn, 8, 4, r0);
290    tcg_out32(s, insn);
291}
292
293static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
294{
295    tcg_insn_unit insn = 0;
296
297    insn = deposit32(insn, 0, 8, op);
298    insn = deposit32(insn, 8, 4, r0);
299    insn = deposit32(insn, 12, 4, r1);
300    tcg_out32(s, insn);
301}
302
303static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
304                           TCGReg r0, TCGReg r1, TCGArg m2)
305{
306    tcg_insn_unit insn = 0;
307
308    tcg_debug_assert(m2 == extract32(m2, 0, 16));
309    insn = deposit32(insn, 0, 8, op);
310    insn = deposit32(insn, 8, 4, r0);
311    insn = deposit32(insn, 12, 4, r1);
312    insn = deposit32(insn, 16, 16, m2);
313    tcg_out32(s, insn);
314}
315
316static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
317                           TCGReg r0, TCGReg r1, TCGReg r2)
318{
319    tcg_insn_unit insn = 0;
320
321    insn = deposit32(insn, 0, 8, op);
322    insn = deposit32(insn, 8, 4, r0);
323    insn = deposit32(insn, 12, 4, r1);
324    insn = deposit32(insn, 16, 4, r2);
325    tcg_out32(s, insn);
326}
327
328static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
329                           TCGReg r0, TCGReg r1, intptr_t i2)
330{
331    tcg_insn_unit insn = 0;
332
333    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
334    insn = deposit32(insn, 0, 8, op);
335    insn = deposit32(insn, 8, 4, r0);
336    insn = deposit32(insn, 12, 4, r1);
337    insn = deposit32(insn, 16, 16, i2);
338    tcg_out32(s, insn);
339}
340
341static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
342                            TCGReg r1, uint8_t b2, uint8_t b3)
343{
344    tcg_insn_unit insn = 0;
345
346    tcg_debug_assert(b2 == extract32(b2, 0, 6));
347    tcg_debug_assert(b3 == extract32(b3, 0, 6));
348    insn = deposit32(insn, 0, 8, op);
349    insn = deposit32(insn, 8, 4, r0);
350    insn = deposit32(insn, 12, 4, r1);
351    insn = deposit32(insn, 16, 6, b2);
352    insn = deposit32(insn, 22, 6, b3);
353    tcg_out32(s, insn);
354}
355
356static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
357                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
358{
359    tcg_insn_unit insn = 0;
360
361    insn = deposit32(insn, 0, 8, op);
362    insn = deposit32(insn, 8, 4, r0);
363    insn = deposit32(insn, 12, 4, r1);
364    insn = deposit32(insn, 16, 4, r2);
365    insn = deposit32(insn, 20, 4, c3);
366    tcg_out32(s, insn);
367}
368
369static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
370                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
371{
372    tcg_insn_unit insn = 0;
373
374    tcg_debug_assert(b3 == extract32(b3, 0, 6));
375    tcg_debug_assert(b4 == extract32(b4, 0, 6));
376    insn = deposit32(insn, 0, 8, op);
377    insn = deposit32(insn, 8, 4, r0);
378    insn = deposit32(insn, 12, 4, r1);
379    insn = deposit32(insn, 16, 4, r2);
380    insn = deposit32(insn, 20, 6, b3);
381    insn = deposit32(insn, 26, 6, b4);
382    tcg_out32(s, insn);
383}
384
385static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
386                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
387{
388    tcg_insn_unit insn = 0;
389
390    insn = deposit32(insn, 0, 8, op);
391    insn = deposit32(insn, 8, 4, r0);
392    insn = deposit32(insn, 12, 4, r1);
393    insn = deposit32(insn, 16, 4, r2);
394    insn = deposit32(insn, 20, 4, r3);
395    tcg_out32(s, insn);
396}
397
398static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
399                              TCGReg r0, TCGReg r1, TCGReg r2,
400                              TCGReg r3, TCGReg r4, TCGCond c5)
401{
402    tcg_insn_unit insn = 0;
403
404    insn = deposit32(insn, 0, 8, op);
405    insn = deposit32(insn, 8, 4, r0);
406    insn = deposit32(insn, 12, 4, r1);
407    insn = deposit32(insn, 16, 4, r2);
408    insn = deposit32(insn, 20, 4, r3);
409    insn = deposit32(insn, 24, 4, r4);
410    insn = deposit32(insn, 28, 4, c5);
411    tcg_out32(s, insn);
412}
413
414static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
415                              TCGReg r0, TCGReg r1, TCGReg r2,
416                              TCGReg r3, TCGReg r4, TCGReg r5)
417{
418    tcg_insn_unit insn = 0;
419
420    insn = deposit32(insn, 0, 8, op);
421    insn = deposit32(insn, 8, 4, r0);
422    insn = deposit32(insn, 12, 4, r1);
423    insn = deposit32(insn, 16, 4, r2);
424    insn = deposit32(insn, 20, 4, r3);
425    insn = deposit32(insn, 24, 4, r4);
426    insn = deposit32(insn, 28, 4, r5);
427    tcg_out32(s, insn);
428}
429
430static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
431                         TCGReg base, intptr_t offset)
432{
433    stack_bounds_check(base, offset);
434    if (offset != sextract32(offset, 0, 16)) {
435        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
436        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
437        base = TCG_REG_TMP;
438        offset = 0;
439    }
440    tcg_out_op_rrs(s, op, val, base, offset);
441}
442
443static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
444                       intptr_t offset)
445{
446    switch (type) {
447    case TCG_TYPE_I32:
448        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
449        break;
450#if TCG_TARGET_REG_BITS == 64
451    case TCG_TYPE_I64:
452        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
453        break;
454#endif
455    default:
456        g_assert_not_reached();
457    }
458}
459
460static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
461{
462    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
463    return true;
464}
465
466static void tcg_out_movi(TCGContext *s, TCGType type,
467                         TCGReg ret, tcg_target_long arg)
468{
469    switch (type) {
470    case TCG_TYPE_I32:
471#if TCG_TARGET_REG_BITS == 64
472        arg = (int32_t)arg;
473        /* fall through */
474    case TCG_TYPE_I64:
475#endif
476        break;
477    default:
478        g_assert_not_reached();
479    }
480
481    if (arg == sextract32(arg, 0, 20)) {
482        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
483    } else {
484        tcg_insn_unit insn = 0;
485
486        new_pool_label(s, arg, 20, s->code_ptr, 0);
487        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
488        insn = deposit32(insn, 8, 4, ret);
489        tcg_out32(s, insn);
490    }
491}
492
493static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
494                            TCGReg rs, unsigned pos, unsigned len)
495{
496    TCGOpcode opc = type == TCG_TYPE_I32 ?
497                    INDEX_op_extract_i32 :
498                    INDEX_op_extract_i64;
499    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
500}
501
502static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
503                             TCGReg rs, unsigned pos, unsigned len)
504{
505    TCGOpcode opc = type == TCG_TYPE_I32 ?
506                    INDEX_op_sextract_i32 :
507                    INDEX_op_sextract_i64;
508    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
509}
510
511static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
512{
513    tcg_out_sextract(s, type, rd, rs, 0, 8);
514}
515
516static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
517{
518    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
519}
520
521static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
522{
523    tcg_out_sextract(s, type, rd, rs, 0, 16);
524}
525
526static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
527{
528    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
529}
530
531static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
532{
533    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
534    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
535}
536
537static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
538{
539    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
540    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
541}
542
543static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
544{
545    tcg_out_ext32s(s, rd, rs);
546}
547
548static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
549{
550    tcg_out_ext32u(s, rd, rs);
551}
552
553static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
554{
555    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
556    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
557}
558
559static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
560{
561    return false;
562}
563
564static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
565                             tcg_target_long imm)
566{
567    /* This function is only used for passing structs by reference. */
568    g_assert_not_reached();
569}
570
571static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
572                         const TCGHelperInfo *info)
573{
574    ffi_cif *cif = info->cif;
575    tcg_insn_unit insn = 0;
576    uint8_t which;
577
578    if (cif->rtype == &ffi_type_void) {
579        which = 0;
580    } else {
581        tcg_debug_assert(cif->rtype->size == 4 ||
582                         cif->rtype->size == 8 ||
583                         cif->rtype->size == 16);
584        which = ctz32(cif->rtype->size) - 1;
585    }
586    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
587    insn = deposit32(insn, 0, 8, INDEX_op_call);
588    insn = deposit32(insn, 8, 4, which);
589    tcg_out32(s, insn);
590}
591
592#if TCG_TARGET_REG_BITS == 64
593# define CASE_32_64(x) \
594        case glue(glue(INDEX_op_, x), _i64): \
595        case glue(glue(INDEX_op_, x), _i32):
596# define CASE_64(x) \
597        case glue(glue(INDEX_op_, x), _i64):
598#else
599# define CASE_32_64(x) \
600        case glue(glue(INDEX_op_, x), _i32):
601# define CASE_64(x)
602#endif
603
604static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
605{
606    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
607}
608
609static void tcg_out_goto_tb(TCGContext *s, int which)
610{
611    /* indirect jump method. */
612    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
613    set_jmp_reset_offset(s, which);
614}
615
616void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
617                              uintptr_t jmp_rx, uintptr_t jmp_rw)
618{
619    /* Always indirect, nothing to do */
620}
621
622static void tgen_add(TCGContext *s, TCGType type,
623                     TCGReg a0, TCGReg a1, TCGReg a2)
624{
625    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
626}
627
628static const TCGOutOpBinary outop_add = {
629    .base.static_constraint = C_O1_I2(r, r, r),
630    .out_rrr = tgen_add,
631};
632
633static void tgen_and(TCGContext *s, TCGType type,
634                     TCGReg a0, TCGReg a1, TCGReg a2)
635{
636    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
637}
638
639static const TCGOutOpBinary outop_and = {
640    .base.static_constraint = C_O1_I2(r, r, r),
641    .out_rrr = tgen_and,
642};
643
644static void tgen_andc(TCGContext *s, TCGType type,
645                      TCGReg a0, TCGReg a1, TCGReg a2)
646{
647    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
648}
649
650static const TCGOutOpBinary outop_andc = {
651    .base.static_constraint = C_O1_I2(r, r, r),
652    .out_rrr = tgen_andc,
653};
654
655static void tgen_eqv(TCGContext *s, TCGType type,
656                     TCGReg a0, TCGReg a1, TCGReg a2)
657{
658    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
659}
660
661static const TCGOutOpBinary outop_eqv = {
662    .base.static_constraint = C_O1_I2(r, r, r),
663    .out_rrr = tgen_eqv,
664};
665
666static void tgen_nand(TCGContext *s, TCGType type,
667                     TCGReg a0, TCGReg a1, TCGReg a2)
668{
669    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
670}
671
672static const TCGOutOpBinary outop_nand = {
673    .base.static_constraint = C_O1_I2(r, r, r),
674    .out_rrr = tgen_nand,
675};
676
677static void tgen_nor(TCGContext *s, TCGType type,
678                     TCGReg a0, TCGReg a1, TCGReg a2)
679{
680    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
681}
682
683static const TCGOutOpBinary outop_nor = {
684    .base.static_constraint = C_O1_I2(r, r, r),
685    .out_rrr = tgen_nor,
686};
687
688static void tgen_or(TCGContext *s, TCGType type,
689                     TCGReg a0, TCGReg a1, TCGReg a2)
690{
691    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
692}
693
694static const TCGOutOpBinary outop_or = {
695    .base.static_constraint = C_O1_I2(r, r, r),
696    .out_rrr = tgen_or,
697};
698
699static void tgen_orc(TCGContext *s, TCGType type,
700                     TCGReg a0, TCGReg a1, TCGReg a2)
701{
702    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
703}
704
705static const TCGOutOpBinary outop_orc = {
706    .base.static_constraint = C_O1_I2(r, r, r),
707    .out_rrr = tgen_orc,
708};
709
710static void tgen_sub(TCGContext *s, TCGType type,
711                     TCGReg a0, TCGReg a1, TCGReg a2)
712{
713    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
714}
715
716static const TCGOutOpSubtract outop_sub = {
717    .base.static_constraint = C_O1_I2(r, r, r),
718    .out_rrr = tgen_sub,
719};
720
721static void tgen_xor(TCGContext *s, TCGType type,
722                     TCGReg a0, TCGReg a1, TCGReg a2)
723{
724    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
725}
726
727static const TCGOutOpBinary outop_xor = {
728    .base.static_constraint = C_O1_I2(r, r, r),
729    .out_rrr = tgen_xor,
730};
731
732static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
733{
734    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
735}
736
737static const TCGOutOpUnary outop_neg = {
738    .base.static_constraint = C_O1_I1(r, r),
739    .out_rr = tgen_neg,
740};
741
742
743static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
744                       const TCGArg args[TCG_MAX_OP_ARGS],
745                       const int const_args[TCG_MAX_OP_ARGS])
746{
747    int width;
748
749    switch (opc) {
750    case INDEX_op_goto_ptr:
751        tcg_out_op_r(s, opc, args[0]);
752        break;
753
754    case INDEX_op_br:
755        tcg_out_op_l(s, opc, arg_label(args[0]));
756        break;
757
758    CASE_32_64(setcond)
759        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
760        break;
761
762    CASE_32_64(movcond)
763    case INDEX_op_setcond2_i32:
764        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
765                          args[3], args[4], args[5]);
766        break;
767
768    CASE_32_64(ld8u)
769    CASE_32_64(ld8s)
770    CASE_32_64(ld16u)
771    CASE_32_64(ld16s)
772    case INDEX_op_ld_i32:
773    CASE_64(ld32u)
774    CASE_64(ld32s)
775    CASE_64(ld)
776    CASE_32_64(st8)
777    CASE_32_64(st16)
778    case INDEX_op_st_i32:
779    CASE_64(st32)
780    CASE_64(st)
781        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
782        break;
783
784    CASE_32_64(mul)
785    CASE_32_64(shl)
786    CASE_32_64(shr)
787    CASE_32_64(sar)
788    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
789    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
790    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
791    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
792    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
793    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
794    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
795    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
796        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
797        break;
798
799    CASE_32_64(deposit)
800        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
801        break;
802
803    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
804    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
805        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
806        break;
807
808    CASE_32_64(brcond)
809        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
810                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
811                        TCG_REG_TMP, args[0], args[1], args[2]);
812        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
813        break;
814
815    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
816    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
817    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
818    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
819        tcg_out_op_rr(s, opc, args[0], args[1]);
820        break;
821
822    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
823    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
824        width = 16;
825        goto do_bswap;
826    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
827        width = 32;
828    do_bswap:
829        /* The base tci bswaps zero-extend, and ignore high bits. */
830        tcg_out_op_rr(s, opc, args[0], args[1]);
831        if (args[2] & TCG_BSWAP_OS) {
832            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
833        }
834        break;
835
836    CASE_32_64(add2)
837    CASE_32_64(sub2)
838        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
839                          args[3], args[4], args[5]);
840        break;
841
842#if TCG_TARGET_REG_BITS == 32
843    case INDEX_op_brcond2_i32:
844        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
845                          args[0], args[1], args[2], args[3], args[4]);
846        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
847        break;
848#endif
849
850    CASE_32_64(mulu2)
851    CASE_32_64(muls2)
852        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
853        break;
854
855    case INDEX_op_qemu_ld_i64:
856    case INDEX_op_qemu_st_i64:
857        if (TCG_TARGET_REG_BITS == 32) {
858            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
859            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
860            break;
861        }
862        /* fall through */
863    case INDEX_op_qemu_ld_i32:
864    case INDEX_op_qemu_st_i32:
865        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
866            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
867            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
868        } else {
869            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
870        }
871        break;
872
873    case INDEX_op_mb:
874        tcg_out_op_v(s, opc);
875        break;
876
877    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
878    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
879    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
880    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
881    case INDEX_op_extu_i32_i64:
882    case INDEX_op_extrl_i64_i32:
883    default:
884        g_assert_not_reached();
885    }
886}
887
888static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
889                       intptr_t offset)
890{
891    switch (type) {
892    case TCG_TYPE_I32:
893        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
894        break;
895#if TCG_TARGET_REG_BITS == 64
896    case TCG_TYPE_I64:
897        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
898        break;
899#endif
900    default:
901        g_assert_not_reached();
902    }
903}
904
905static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
906                               TCGReg base, intptr_t ofs)
907{
908    return false;
909}
910
911/* Test if a constant matches the constraint. */
912static bool tcg_target_const_match(int64_t val, int ct,
913                                   TCGType type, TCGCond cond, int vece)
914{
915    return ct & TCG_CT_CONST;
916}
917
918static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
919{
920    memset(p, 0, sizeof(*p) * count);
921}
922
923static void tcg_target_init(TCGContext *s)
924{
925    /* The current code uses uint8_t for tcg operations. */
926    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
927
928    /* Registers available for 32 bit operations. */
929    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
930    /* Registers available for 64 bit operations. */
931    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
932    /*
933     * The interpreter "registers" are in the local stack frame and
934     * cannot be clobbered by the called helper functions.  However,
935     * the interpreter assumes a 128-bit return value and assigns to
936     * the return value registers.
937     */
938    tcg_target_call_clobber_regs =
939        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
940
941    s->reserved_regs = 0;
942    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
943    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
944
945    /* The call arguments come first, followed by the temp storage. */
946    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
947                  TCG_STATIC_FRAME_SIZE);
948}
949
950/* Generate global QEMU prologue and epilogue code. */
951static inline void tcg_target_qemu_prologue(TCGContext *s)
952{
953}
954
955static void tcg_out_tb_start(TCGContext *s)
956{
957    /* nothing to do */
958}
959
960bool tcg_target_has_memory_bswap(MemOp memop)
961{
962    return true;
963}
964
965static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
966{
967    g_assert_not_reached();
968}
969
970static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
971{
972    g_assert_not_reached();
973}
974