xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision b5701261da6607e61ef1fe605d85bf31806fcd34)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_add_i32:
95    case INDEX_op_add_i64:
96    case INDEX_op_sub_i32:
97    case INDEX_op_sub_i64:
98    case INDEX_op_mul_i32:
99    case INDEX_op_mul_i64:
100    case INDEX_op_and_i32:
101    case INDEX_op_and_i64:
102    case INDEX_op_andc_i32:
103    case INDEX_op_andc_i64:
104    case INDEX_op_eqv_i32:
105    case INDEX_op_eqv_i64:
106    case INDEX_op_nand_i32:
107    case INDEX_op_nand_i64:
108    case INDEX_op_nor_i32:
109    case INDEX_op_nor_i64:
110    case INDEX_op_or_i32:
111    case INDEX_op_or_i64:
112    case INDEX_op_orc_i32:
113    case INDEX_op_orc_i64:
114    case INDEX_op_xor_i32:
115    case INDEX_op_xor_i64:
116    case INDEX_op_shl_i32:
117    case INDEX_op_shl_i64:
118    case INDEX_op_shr_i32:
119    case INDEX_op_shr_i64:
120    case INDEX_op_sar_i32:
121    case INDEX_op_sar_i64:
122    case INDEX_op_rotl_i32:
123    case INDEX_op_rotl_i64:
124    case INDEX_op_rotr_i32:
125    case INDEX_op_rotr_i64:
126    case INDEX_op_setcond_i32:
127    case INDEX_op_setcond_i64:
128    case INDEX_op_deposit_i32:
129    case INDEX_op_deposit_i64:
130    case INDEX_op_clz_i32:
131    case INDEX_op_clz_i64:
132    case INDEX_op_ctz_i32:
133    case INDEX_op_ctz_i64:
134        return C_O1_I2(r, r, r);
135
136    case INDEX_op_brcond_i32:
137    case INDEX_op_brcond_i64:
138        return C_O0_I2(r, r);
139
140    case INDEX_op_add2_i32:
141    case INDEX_op_add2_i64:
142    case INDEX_op_sub2_i32:
143    case INDEX_op_sub2_i64:
144        return C_O2_I4(r, r, r, r, r, r);
145
146#if TCG_TARGET_REG_BITS == 32
147    case INDEX_op_brcond2_i32:
148        return C_O0_I4(r, r, r, r);
149#endif
150
151    case INDEX_op_mulu2_i32:
152    case INDEX_op_mulu2_i64:
153    case INDEX_op_muls2_i32:
154    case INDEX_op_muls2_i64:
155        return C_O2_I2(r, r, r, r);
156
157    case INDEX_op_movcond_i32:
158    case INDEX_op_movcond_i64:
159    case INDEX_op_setcond2_i32:
160        return C_O1_I4(r, r, r, r, r);
161
162    case INDEX_op_qemu_ld_i32:
163        return C_O1_I1(r, r);
164    case INDEX_op_qemu_ld_i64:
165        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
166    case INDEX_op_qemu_st_i32:
167        return C_O0_I2(r, r);
168    case INDEX_op_qemu_st_i64:
169        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
170
171    default:
172        return C_NotImplemented;
173    }
174}
175
176static const int tcg_target_reg_alloc_order[] = {
177    TCG_REG_R4,
178    TCG_REG_R5,
179    TCG_REG_R6,
180    TCG_REG_R7,
181    TCG_REG_R8,
182    TCG_REG_R9,
183    TCG_REG_R10,
184    TCG_REG_R11,
185    TCG_REG_R12,
186    TCG_REG_R13,
187    TCG_REG_R14,
188    TCG_REG_R15,
189    /* Either 2 or 4 of these are call clobbered, so use them last. */
190    TCG_REG_R3,
191    TCG_REG_R2,
192    TCG_REG_R1,
193    TCG_REG_R0,
194};
195
196/* No call arguments via registers.  All will be stored on the "stack". */
197static const int tcg_target_call_iarg_regs[] = { };
198
199static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
200{
201    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
202    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
203    return TCG_REG_R0 + slot;
204}
205
206#ifdef CONFIG_DEBUG_TCG
207static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
208    "r00",
209    "r01",
210    "r02",
211    "r03",
212    "r04",
213    "r05",
214    "r06",
215    "r07",
216    "r08",
217    "r09",
218    "r10",
219    "r11",
220    "r12",
221    "r13",
222    "r14",
223    "r15",
224};
225#endif
226
227static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
228                        intptr_t value, intptr_t addend)
229{
230    intptr_t diff = value - (intptr_t)(code_ptr + 1);
231
232    tcg_debug_assert(addend == 0);
233    tcg_debug_assert(type == 20);
234
235    if (diff == sextract32(diff, 0, type)) {
236        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
237        return true;
238    }
239    return false;
240}
241
242static void stack_bounds_check(TCGReg base, intptr_t offset)
243{
244    if (base == TCG_REG_CALL_STACK) {
245        tcg_debug_assert(offset >= 0);
246        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
247                                   TCG_STATIC_FRAME_SIZE));
248    }
249}
250
251static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
252{
253    tcg_insn_unit insn = 0;
254
255    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
256    insn = deposit32(insn, 0, 8, op);
257    tcg_out32(s, insn);
258}
259
260static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
261{
262    tcg_insn_unit insn = 0;
263    intptr_t diff;
264
265    /* Special case for exit_tb: map null -> 0. */
266    if (p0 == NULL) {
267        diff = 0;
268    } else {
269        diff = p0 - (void *)(s->code_ptr + 1);
270        tcg_debug_assert(diff != 0);
271        if (diff != sextract32(diff, 0, 20)) {
272            tcg_raise_tb_overflow(s);
273        }
274    }
275    insn = deposit32(insn, 0, 8, op);
276    insn = deposit32(insn, 12, 20, diff);
277    tcg_out32(s, insn);
278}
279
280static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
281{
282    tcg_insn_unit insn = 0;
283
284    insn = deposit32(insn, 0, 8, op);
285    insn = deposit32(insn, 8, 4, r0);
286    tcg_out32(s, insn);
287}
288
289static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
290{
291    tcg_out32(s, (uint8_t)op);
292}
293
294static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
295{
296    tcg_insn_unit insn = 0;
297
298    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
299    insn = deposit32(insn, 0, 8, op);
300    insn = deposit32(insn, 8, 4, r0);
301    insn = deposit32(insn, 12, 20, i1);
302    tcg_out32(s, insn);
303}
304
305static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
306{
307    tcg_insn_unit insn = 0;
308
309    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
310    insn = deposit32(insn, 0, 8, op);
311    insn = deposit32(insn, 8, 4, r0);
312    tcg_out32(s, insn);
313}
314
315static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
316{
317    tcg_insn_unit insn = 0;
318
319    insn = deposit32(insn, 0, 8, op);
320    insn = deposit32(insn, 8, 4, r0);
321    insn = deposit32(insn, 12, 4, r1);
322    tcg_out32(s, insn);
323}
324
325static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
326                           TCGReg r0, TCGReg r1, TCGArg m2)
327{
328    tcg_insn_unit insn = 0;
329
330    tcg_debug_assert(m2 == extract32(m2, 0, 16));
331    insn = deposit32(insn, 0, 8, op);
332    insn = deposit32(insn, 8, 4, r0);
333    insn = deposit32(insn, 12, 4, r1);
334    insn = deposit32(insn, 16, 16, m2);
335    tcg_out32(s, insn);
336}
337
338static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
339                           TCGReg r0, TCGReg r1, TCGReg r2)
340{
341    tcg_insn_unit insn = 0;
342
343    insn = deposit32(insn, 0, 8, op);
344    insn = deposit32(insn, 8, 4, r0);
345    insn = deposit32(insn, 12, 4, r1);
346    insn = deposit32(insn, 16, 4, r2);
347    tcg_out32(s, insn);
348}
349
350static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
351                           TCGReg r0, TCGReg r1, intptr_t i2)
352{
353    tcg_insn_unit insn = 0;
354
355    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
356    insn = deposit32(insn, 0, 8, op);
357    insn = deposit32(insn, 8, 4, r0);
358    insn = deposit32(insn, 12, 4, r1);
359    insn = deposit32(insn, 16, 16, i2);
360    tcg_out32(s, insn);
361}
362
363static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
364                            TCGReg r1, uint8_t b2, uint8_t b3)
365{
366    tcg_insn_unit insn = 0;
367
368    tcg_debug_assert(b2 == extract32(b2, 0, 6));
369    tcg_debug_assert(b3 == extract32(b3, 0, 6));
370    insn = deposit32(insn, 0, 8, op);
371    insn = deposit32(insn, 8, 4, r0);
372    insn = deposit32(insn, 12, 4, r1);
373    insn = deposit32(insn, 16, 6, b2);
374    insn = deposit32(insn, 22, 6, b3);
375    tcg_out32(s, insn);
376}
377
378static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
379                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
380{
381    tcg_insn_unit insn = 0;
382
383    insn = deposit32(insn, 0, 8, op);
384    insn = deposit32(insn, 8, 4, r0);
385    insn = deposit32(insn, 12, 4, r1);
386    insn = deposit32(insn, 16, 4, r2);
387    insn = deposit32(insn, 20, 4, c3);
388    tcg_out32(s, insn);
389}
390
391static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
392                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
393{
394    tcg_insn_unit insn = 0;
395
396    tcg_debug_assert(b3 == extract32(b3, 0, 6));
397    tcg_debug_assert(b4 == extract32(b4, 0, 6));
398    insn = deposit32(insn, 0, 8, op);
399    insn = deposit32(insn, 8, 4, r0);
400    insn = deposit32(insn, 12, 4, r1);
401    insn = deposit32(insn, 16, 4, r2);
402    insn = deposit32(insn, 20, 6, b3);
403    insn = deposit32(insn, 26, 6, b4);
404    tcg_out32(s, insn);
405}
406
407static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
408                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
409{
410    tcg_insn_unit insn = 0;
411
412    insn = deposit32(insn, 0, 8, op);
413    insn = deposit32(insn, 8, 4, r0);
414    insn = deposit32(insn, 12, 4, r1);
415    insn = deposit32(insn, 16, 4, r2);
416    insn = deposit32(insn, 20, 4, r3);
417    tcg_out32(s, insn);
418}
419
420static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
421                              TCGReg r0, TCGReg r1, TCGReg r2,
422                              TCGReg r3, TCGReg r4, TCGCond c5)
423{
424    tcg_insn_unit insn = 0;
425
426    insn = deposit32(insn, 0, 8, op);
427    insn = deposit32(insn, 8, 4, r0);
428    insn = deposit32(insn, 12, 4, r1);
429    insn = deposit32(insn, 16, 4, r2);
430    insn = deposit32(insn, 20, 4, r3);
431    insn = deposit32(insn, 24, 4, r4);
432    insn = deposit32(insn, 28, 4, c5);
433    tcg_out32(s, insn);
434}
435
436static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
437                              TCGReg r0, TCGReg r1, TCGReg r2,
438                              TCGReg r3, TCGReg r4, TCGReg r5)
439{
440    tcg_insn_unit insn = 0;
441
442    insn = deposit32(insn, 0, 8, op);
443    insn = deposit32(insn, 8, 4, r0);
444    insn = deposit32(insn, 12, 4, r1);
445    insn = deposit32(insn, 16, 4, r2);
446    insn = deposit32(insn, 20, 4, r3);
447    insn = deposit32(insn, 24, 4, r4);
448    insn = deposit32(insn, 28, 4, r5);
449    tcg_out32(s, insn);
450}
451
452static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
453                         TCGReg base, intptr_t offset)
454{
455    stack_bounds_check(base, offset);
456    if (offset != sextract32(offset, 0, 16)) {
457        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
458        tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
459                           ? INDEX_op_add_i32 : INDEX_op_add_i64),
460                       TCG_REG_TMP, TCG_REG_TMP, base);
461        base = TCG_REG_TMP;
462        offset = 0;
463    }
464    tcg_out_op_rrs(s, op, val, base, offset);
465}
466
467static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
468                       intptr_t offset)
469{
470    switch (type) {
471    case TCG_TYPE_I32:
472        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
473        break;
474#if TCG_TARGET_REG_BITS == 64
475    case TCG_TYPE_I64:
476        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
477        break;
478#endif
479    default:
480        g_assert_not_reached();
481    }
482}
483
484static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
485{
486    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
487    return true;
488}
489
490static void tcg_out_movi(TCGContext *s, TCGType type,
491                         TCGReg ret, tcg_target_long arg)
492{
493    switch (type) {
494    case TCG_TYPE_I32:
495#if TCG_TARGET_REG_BITS == 64
496        arg = (int32_t)arg;
497        /* fall through */
498    case TCG_TYPE_I64:
499#endif
500        break;
501    default:
502        g_assert_not_reached();
503    }
504
505    if (arg == sextract32(arg, 0, 20)) {
506        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
507    } else {
508        tcg_insn_unit insn = 0;
509
510        new_pool_label(s, arg, 20, s->code_ptr, 0);
511        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
512        insn = deposit32(insn, 8, 4, ret);
513        tcg_out32(s, insn);
514    }
515}
516
517static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
518                            TCGReg rs, unsigned pos, unsigned len)
519{
520    TCGOpcode opc = type == TCG_TYPE_I32 ?
521                    INDEX_op_extract_i32 :
522                    INDEX_op_extract_i64;
523    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
524}
525
526static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
527                             TCGReg rs, unsigned pos, unsigned len)
528{
529    TCGOpcode opc = type == TCG_TYPE_I32 ?
530                    INDEX_op_sextract_i32 :
531                    INDEX_op_sextract_i64;
532    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
533}
534
535static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
536{
537    tcg_out_sextract(s, type, rd, rs, 0, 8);
538}
539
540static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
541{
542    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
543}
544
545static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
546{
547    tcg_out_sextract(s, type, rd, rs, 0, 16);
548}
549
550static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
551{
552    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
553}
554
555static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
556{
557    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
558    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
559}
560
561static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
562{
563    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
564    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
565}
566
567static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
568{
569    tcg_out_ext32s(s, rd, rs);
570}
571
572static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
573{
574    tcg_out_ext32u(s, rd, rs);
575}
576
577static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
578{
579    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
580    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
581}
582
583static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
584{
585    return false;
586}
587
588static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
589                             tcg_target_long imm)
590{
591    /* This function is only used for passing structs by reference. */
592    g_assert_not_reached();
593}
594
595static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
596                         const TCGHelperInfo *info)
597{
598    ffi_cif *cif = info->cif;
599    tcg_insn_unit insn = 0;
600    uint8_t which;
601
602    if (cif->rtype == &ffi_type_void) {
603        which = 0;
604    } else {
605        tcg_debug_assert(cif->rtype->size == 4 ||
606                         cif->rtype->size == 8 ||
607                         cif->rtype->size == 16);
608        which = ctz32(cif->rtype->size) - 1;
609    }
610    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
611    insn = deposit32(insn, 0, 8, INDEX_op_call);
612    insn = deposit32(insn, 8, 4, which);
613    tcg_out32(s, insn);
614}
615
616#if TCG_TARGET_REG_BITS == 64
617# define CASE_32_64(x) \
618        case glue(glue(INDEX_op_, x), _i64): \
619        case glue(glue(INDEX_op_, x), _i32):
620# define CASE_64(x) \
621        case glue(glue(INDEX_op_, x), _i64):
622#else
623# define CASE_32_64(x) \
624        case glue(glue(INDEX_op_, x), _i32):
625# define CASE_64(x)
626#endif
627
628static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
629{
630    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
631}
632
633static void tcg_out_goto_tb(TCGContext *s, int which)
634{
635    /* indirect jump method. */
636    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
637    set_jmp_reset_offset(s, which);
638}
639
640void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
641                              uintptr_t jmp_rx, uintptr_t jmp_rw)
642{
643    /* Always indirect, nothing to do */
644}
645
646static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
647                       const TCGArg args[TCG_MAX_OP_ARGS],
648                       const int const_args[TCG_MAX_OP_ARGS])
649{
650    int width;
651
652    switch (opc) {
653    case INDEX_op_goto_ptr:
654        tcg_out_op_r(s, opc, args[0]);
655        break;
656
657    case INDEX_op_br:
658        tcg_out_op_l(s, opc, arg_label(args[0]));
659        break;
660
661    CASE_32_64(setcond)
662        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
663        break;
664
665    CASE_32_64(movcond)
666    case INDEX_op_setcond2_i32:
667        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
668                          args[3], args[4], args[5]);
669        break;
670
671    CASE_32_64(ld8u)
672    CASE_32_64(ld8s)
673    CASE_32_64(ld16u)
674    CASE_32_64(ld16s)
675    case INDEX_op_ld_i32:
676    CASE_64(ld32u)
677    CASE_64(ld32s)
678    CASE_64(ld)
679    CASE_32_64(st8)
680    CASE_32_64(st16)
681    case INDEX_op_st_i32:
682    CASE_64(st32)
683    CASE_64(st)
684        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
685        break;
686
687    CASE_32_64(add)
688    CASE_32_64(sub)
689    CASE_32_64(mul)
690    CASE_32_64(and)
691    CASE_32_64(or)
692    CASE_32_64(xor)
693    CASE_32_64(andc)     /* Optional (TCG_TARGET_HAS_andc_*). */
694    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
695    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
696    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
697    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
698    CASE_32_64(shl)
699    CASE_32_64(shr)
700    CASE_32_64(sar)
701    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
702    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
703    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
704    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
705    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
706    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
707    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
708    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
709        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
710        break;
711
712    CASE_32_64(deposit)
713        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
714        break;
715
716    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
717    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
718        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
719        break;
720
721    CASE_32_64(brcond)
722        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
723                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
724                        TCG_REG_TMP, args[0], args[1], args[2]);
725        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
726        break;
727
728    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
729    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
730    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
731    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
732    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
733        tcg_out_op_rr(s, opc, args[0], args[1]);
734        break;
735
736    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
737    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
738        width = 16;
739        goto do_bswap;
740    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
741        width = 32;
742    do_bswap:
743        /* The base tci bswaps zero-extend, and ignore high bits. */
744        tcg_out_op_rr(s, opc, args[0], args[1]);
745        if (args[2] & TCG_BSWAP_OS) {
746            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
747        }
748        break;
749
750    CASE_32_64(add2)
751    CASE_32_64(sub2)
752        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
753                          args[3], args[4], args[5]);
754        break;
755
756#if TCG_TARGET_REG_BITS == 32
757    case INDEX_op_brcond2_i32:
758        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
759                          args[0], args[1], args[2], args[3], args[4]);
760        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
761        break;
762#endif
763
764    CASE_32_64(mulu2)
765    CASE_32_64(muls2)
766        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
767        break;
768
769    case INDEX_op_qemu_ld_i64:
770    case INDEX_op_qemu_st_i64:
771        if (TCG_TARGET_REG_BITS == 32) {
772            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
773            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
774            break;
775        }
776        /* fall through */
777    case INDEX_op_qemu_ld_i32:
778    case INDEX_op_qemu_st_i32:
779        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
780            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
781            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
782        } else {
783            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
784        }
785        break;
786
787    case INDEX_op_mb:
788        tcg_out_op_v(s, opc);
789        break;
790
791    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
792    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
793    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
794    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
795    case INDEX_op_extu_i32_i64:
796    case INDEX_op_extrl_i64_i32:
797    default:
798        g_assert_not_reached();
799    }
800}
801
802static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
803                       intptr_t offset)
804{
805    switch (type) {
806    case TCG_TYPE_I32:
807        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
808        break;
809#if TCG_TARGET_REG_BITS == 64
810    case TCG_TYPE_I64:
811        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
812        break;
813#endif
814    default:
815        g_assert_not_reached();
816    }
817}
818
819static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
820                               TCGReg base, intptr_t ofs)
821{
822    return false;
823}
824
825/* Test if a constant matches the constraint. */
826static bool tcg_target_const_match(int64_t val, int ct,
827                                   TCGType type, TCGCond cond, int vece)
828{
829    return ct & TCG_CT_CONST;
830}
831
832static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
833{
834    memset(p, 0, sizeof(*p) * count);
835}
836
837static void tcg_target_init(TCGContext *s)
838{
839    /* The current code uses uint8_t for tcg operations. */
840    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
841
842    /* Registers available for 32 bit operations. */
843    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
844    /* Registers available for 64 bit operations. */
845    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
846    /*
847     * The interpreter "registers" are in the local stack frame and
848     * cannot be clobbered by the called helper functions.  However,
849     * the interpreter assumes a 128-bit return value and assigns to
850     * the return value registers.
851     */
852    tcg_target_call_clobber_regs =
853        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
854
855    s->reserved_regs = 0;
856    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
857    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
858
859    /* The call arguments come first, followed by the temp storage. */
860    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
861                  TCG_STATIC_FRAME_SIZE);
862}
863
864/* Generate global QEMU prologue and epilogue code. */
865static inline void tcg_target_qemu_prologue(TCGContext *s)
866{
867}
868
869static void tcg_out_tb_start(TCGContext *s)
870{
871    /* nothing to do */
872}
873
874bool tcg_target_has_memory_bswap(MemOp memop)
875{
876    return true;
877}
878
879static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
880{
881    g_assert_not_reached();
882}
883
884static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
885{
886    g_assert_not_reached();
887}
888