xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision d31f1185fb029b44c439a6961a6cb087df6567d9)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext8s_i32:
63    case INDEX_op_ext8s_i64:
64    case INDEX_op_ext16s_i32:
65    case INDEX_op_ext16s_i64:
66    case INDEX_op_ext8u_i32:
67    case INDEX_op_ext8u_i64:
68    case INDEX_op_ext16u_i32:
69    case INDEX_op_ext16u_i64:
70    case INDEX_op_ext32s_i64:
71    case INDEX_op_ext32u_i64:
72    case INDEX_op_ext_i32_i64:
73    case INDEX_op_extu_i32_i64:
74    case INDEX_op_bswap16_i32:
75    case INDEX_op_bswap16_i64:
76    case INDEX_op_bswap32_i32:
77    case INDEX_op_bswap32_i64:
78    case INDEX_op_bswap64_i64:
79    case INDEX_op_extract_i32:
80    case INDEX_op_extract_i64:
81    case INDEX_op_sextract_i32:
82    case INDEX_op_sextract_i64:
83    case INDEX_op_ctpop_i32:
84    case INDEX_op_ctpop_i64:
85        return C_O1_I1(r, r);
86
87    case INDEX_op_st8_i32:
88    case INDEX_op_st16_i32:
89    case INDEX_op_st_i32:
90    case INDEX_op_st8_i64:
91    case INDEX_op_st16_i64:
92    case INDEX_op_st32_i64:
93    case INDEX_op_st_i64:
94        return C_O0_I2(r, r);
95
96    case INDEX_op_div_i32:
97    case INDEX_op_div_i64:
98    case INDEX_op_divu_i32:
99    case INDEX_op_divu_i64:
100    case INDEX_op_rem_i32:
101    case INDEX_op_rem_i64:
102    case INDEX_op_remu_i32:
103    case INDEX_op_remu_i64:
104    case INDEX_op_add_i32:
105    case INDEX_op_add_i64:
106    case INDEX_op_sub_i32:
107    case INDEX_op_sub_i64:
108    case INDEX_op_mul_i32:
109    case INDEX_op_mul_i64:
110    case INDEX_op_and_i32:
111    case INDEX_op_and_i64:
112    case INDEX_op_andc_i32:
113    case INDEX_op_andc_i64:
114    case INDEX_op_eqv_i32:
115    case INDEX_op_eqv_i64:
116    case INDEX_op_nand_i32:
117    case INDEX_op_nand_i64:
118    case INDEX_op_nor_i32:
119    case INDEX_op_nor_i64:
120    case INDEX_op_or_i32:
121    case INDEX_op_or_i64:
122    case INDEX_op_orc_i32:
123    case INDEX_op_orc_i64:
124    case INDEX_op_xor_i32:
125    case INDEX_op_xor_i64:
126    case INDEX_op_shl_i32:
127    case INDEX_op_shl_i64:
128    case INDEX_op_shr_i32:
129    case INDEX_op_shr_i64:
130    case INDEX_op_sar_i32:
131    case INDEX_op_sar_i64:
132    case INDEX_op_rotl_i32:
133    case INDEX_op_rotl_i64:
134    case INDEX_op_rotr_i32:
135    case INDEX_op_rotr_i64:
136    case INDEX_op_setcond_i32:
137    case INDEX_op_setcond_i64:
138    case INDEX_op_deposit_i32:
139    case INDEX_op_deposit_i64:
140    case INDEX_op_clz_i32:
141    case INDEX_op_clz_i64:
142    case INDEX_op_ctz_i32:
143    case INDEX_op_ctz_i64:
144        return C_O1_I2(r, r, r);
145
146    case INDEX_op_brcond_i32:
147    case INDEX_op_brcond_i64:
148        return C_O0_I2(r, r);
149
150    case INDEX_op_add2_i32:
151    case INDEX_op_add2_i64:
152    case INDEX_op_sub2_i32:
153    case INDEX_op_sub2_i64:
154        return C_O2_I4(r, r, r, r, r, r);
155
156#if TCG_TARGET_REG_BITS == 32
157    case INDEX_op_brcond2_i32:
158        return C_O0_I4(r, r, r, r);
159#endif
160
161    case INDEX_op_mulu2_i32:
162    case INDEX_op_mulu2_i64:
163    case INDEX_op_muls2_i32:
164    case INDEX_op_muls2_i64:
165        return C_O2_I2(r, r, r, r);
166
167    case INDEX_op_movcond_i32:
168    case INDEX_op_movcond_i64:
169    case INDEX_op_setcond2_i32:
170        return C_O1_I4(r, r, r, r, r);
171
172    case INDEX_op_qemu_ld_a32_i32:
173        return C_O1_I1(r, r);
174    case INDEX_op_qemu_ld_a64_i32:
175        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r);
176    case INDEX_op_qemu_ld_a32_i64:
177        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
178    case INDEX_op_qemu_ld_a64_i64:
179        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r);
180    case INDEX_op_qemu_st_a32_i32:
181        return C_O0_I2(r, r);
182    case INDEX_op_qemu_st_a64_i32:
183        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
184    case INDEX_op_qemu_st_a32_i64:
185        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
186    case INDEX_op_qemu_st_a64_i64:
187        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r);
188
189    default:
190        return C_NotImplemented;
191    }
192}
193
194static const int tcg_target_reg_alloc_order[] = {
195    TCG_REG_R4,
196    TCG_REG_R5,
197    TCG_REG_R6,
198    TCG_REG_R7,
199    TCG_REG_R8,
200    TCG_REG_R9,
201    TCG_REG_R10,
202    TCG_REG_R11,
203    TCG_REG_R12,
204    TCG_REG_R13,
205    TCG_REG_R14,
206    TCG_REG_R15,
207    /* Either 2 or 4 of these are call clobbered, so use them last. */
208    TCG_REG_R3,
209    TCG_REG_R2,
210    TCG_REG_R1,
211    TCG_REG_R0,
212};
213
214/* No call arguments via registers.  All will be stored on the "stack". */
215static const int tcg_target_call_iarg_regs[] = { };
216
217static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
218{
219    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
220    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
221    return TCG_REG_R0 + slot;
222}
223
224#ifdef CONFIG_DEBUG_TCG
225static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
226    "r00",
227    "r01",
228    "r02",
229    "r03",
230    "r04",
231    "r05",
232    "r06",
233    "r07",
234    "r08",
235    "r09",
236    "r10",
237    "r11",
238    "r12",
239    "r13",
240    "r14",
241    "r15",
242};
243#endif
244
245static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
246                        intptr_t value, intptr_t addend)
247{
248    intptr_t diff = value - (intptr_t)(code_ptr + 1);
249
250    tcg_debug_assert(addend == 0);
251    tcg_debug_assert(type == 20);
252
253    if (diff == sextract32(diff, 0, type)) {
254        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
255        return true;
256    }
257    return false;
258}
259
260static void stack_bounds_check(TCGReg base, intptr_t offset)
261{
262    if (base == TCG_REG_CALL_STACK) {
263        tcg_debug_assert(offset >= 0);
264        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
265                                   TCG_STATIC_FRAME_SIZE));
266    }
267}
268
269static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
270{
271    tcg_insn_unit insn = 0;
272
273    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
274    insn = deposit32(insn, 0, 8, op);
275    tcg_out32(s, insn);
276}
277
278static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
279{
280    tcg_insn_unit insn = 0;
281    intptr_t diff;
282
283    /* Special case for exit_tb: map null -> 0. */
284    if (p0 == NULL) {
285        diff = 0;
286    } else {
287        diff = p0 - (void *)(s->code_ptr + 1);
288        tcg_debug_assert(diff != 0);
289        if (diff != sextract32(diff, 0, 20)) {
290            tcg_raise_tb_overflow(s);
291        }
292    }
293    insn = deposit32(insn, 0, 8, op);
294    insn = deposit32(insn, 12, 20, diff);
295    tcg_out32(s, insn);
296}
297
298static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
299{
300    tcg_insn_unit insn = 0;
301
302    insn = deposit32(insn, 0, 8, op);
303    insn = deposit32(insn, 8, 4, r0);
304    tcg_out32(s, insn);
305}
306
307static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
308{
309    tcg_out32(s, (uint8_t)op);
310}
311
312static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
313{
314    tcg_insn_unit insn = 0;
315
316    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
317    insn = deposit32(insn, 0, 8, op);
318    insn = deposit32(insn, 8, 4, r0);
319    insn = deposit32(insn, 12, 20, i1);
320    tcg_out32(s, insn);
321}
322
323static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
324{
325    tcg_insn_unit insn = 0;
326
327    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
328    insn = deposit32(insn, 0, 8, op);
329    insn = deposit32(insn, 8, 4, r0);
330    tcg_out32(s, insn);
331}
332
333static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
334{
335    tcg_insn_unit insn = 0;
336
337    insn = deposit32(insn, 0, 8, op);
338    insn = deposit32(insn, 8, 4, r0);
339    insn = deposit32(insn, 12, 4, r1);
340    tcg_out32(s, insn);
341}
342
343static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
344                           TCGReg r0, TCGReg r1, TCGArg m2)
345{
346    tcg_insn_unit insn = 0;
347
348    tcg_debug_assert(m2 == extract32(m2, 0, 16));
349    insn = deposit32(insn, 0, 8, op);
350    insn = deposit32(insn, 8, 4, r0);
351    insn = deposit32(insn, 12, 4, r1);
352    insn = deposit32(insn, 16, 16, m2);
353    tcg_out32(s, insn);
354}
355
356static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
357                           TCGReg r0, TCGReg r1, TCGReg r2)
358{
359    tcg_insn_unit insn = 0;
360
361    insn = deposit32(insn, 0, 8, op);
362    insn = deposit32(insn, 8, 4, r0);
363    insn = deposit32(insn, 12, 4, r1);
364    insn = deposit32(insn, 16, 4, r2);
365    tcg_out32(s, insn);
366}
367
368static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
369                           TCGReg r0, TCGReg r1, intptr_t i2)
370{
371    tcg_insn_unit insn = 0;
372
373    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
374    insn = deposit32(insn, 0, 8, op);
375    insn = deposit32(insn, 8, 4, r0);
376    insn = deposit32(insn, 12, 4, r1);
377    insn = deposit32(insn, 16, 16, i2);
378    tcg_out32(s, insn);
379}
380
381static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
382                            TCGReg r1, uint8_t b2, uint8_t b3)
383{
384    tcg_insn_unit insn = 0;
385
386    tcg_debug_assert(b2 == extract32(b2, 0, 6));
387    tcg_debug_assert(b3 == extract32(b3, 0, 6));
388    insn = deposit32(insn, 0, 8, op);
389    insn = deposit32(insn, 8, 4, r0);
390    insn = deposit32(insn, 12, 4, r1);
391    insn = deposit32(insn, 16, 6, b2);
392    insn = deposit32(insn, 22, 6, b3);
393    tcg_out32(s, insn);
394}
395
396static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
397                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
398{
399    tcg_insn_unit insn = 0;
400
401    insn = deposit32(insn, 0, 8, op);
402    insn = deposit32(insn, 8, 4, r0);
403    insn = deposit32(insn, 12, 4, r1);
404    insn = deposit32(insn, 16, 4, r2);
405    insn = deposit32(insn, 20, 4, c3);
406    tcg_out32(s, insn);
407}
408
409static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
410                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
411{
412    tcg_insn_unit insn = 0;
413
414    tcg_debug_assert(b3 == extract32(b3, 0, 6));
415    tcg_debug_assert(b4 == extract32(b4, 0, 6));
416    insn = deposit32(insn, 0, 8, op);
417    insn = deposit32(insn, 8, 4, r0);
418    insn = deposit32(insn, 12, 4, r1);
419    insn = deposit32(insn, 16, 4, r2);
420    insn = deposit32(insn, 20, 6, b3);
421    insn = deposit32(insn, 26, 6, b4);
422    tcg_out32(s, insn);
423}
424
425static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0,
426                             TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4)
427{
428    tcg_insn_unit insn = 0;
429
430    insn = deposit32(insn, 0, 8, op);
431    insn = deposit32(insn, 8, 4, r0);
432    insn = deposit32(insn, 12, 4, r1);
433    insn = deposit32(insn, 16, 4, r2);
434    insn = deposit32(insn, 20, 4, r3);
435    insn = deposit32(insn, 24, 4, r4);
436    tcg_out32(s, insn);
437}
438
439static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
440                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
441{
442    tcg_insn_unit insn = 0;
443
444    insn = deposit32(insn, 0, 8, op);
445    insn = deposit32(insn, 8, 4, r0);
446    insn = deposit32(insn, 12, 4, r1);
447    insn = deposit32(insn, 16, 4, r2);
448    insn = deposit32(insn, 20, 4, r3);
449    tcg_out32(s, insn);
450}
451
452static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
453                              TCGReg r0, TCGReg r1, TCGReg r2,
454                              TCGReg r3, TCGReg r4, TCGCond c5)
455{
456    tcg_insn_unit insn = 0;
457
458    insn = deposit32(insn, 0, 8, op);
459    insn = deposit32(insn, 8, 4, r0);
460    insn = deposit32(insn, 12, 4, r1);
461    insn = deposit32(insn, 16, 4, r2);
462    insn = deposit32(insn, 20, 4, r3);
463    insn = deposit32(insn, 24, 4, r4);
464    insn = deposit32(insn, 28, 4, c5);
465    tcg_out32(s, insn);
466}
467
468static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
469                              TCGReg r0, TCGReg r1, TCGReg r2,
470                              TCGReg r3, TCGReg r4, TCGReg r5)
471{
472    tcg_insn_unit insn = 0;
473
474    insn = deposit32(insn, 0, 8, op);
475    insn = deposit32(insn, 8, 4, r0);
476    insn = deposit32(insn, 12, 4, r1);
477    insn = deposit32(insn, 16, 4, r2);
478    insn = deposit32(insn, 20, 4, r3);
479    insn = deposit32(insn, 24, 4, r4);
480    insn = deposit32(insn, 28, 4, r5);
481    tcg_out32(s, insn);
482}
483
484static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
485                         TCGReg base, intptr_t offset)
486{
487    stack_bounds_check(base, offset);
488    if (offset != sextract32(offset, 0, 16)) {
489        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
490        tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
491                           ? INDEX_op_add_i32 : INDEX_op_add_i64),
492                       TCG_REG_TMP, TCG_REG_TMP, base);
493        base = TCG_REG_TMP;
494        offset = 0;
495    }
496    tcg_out_op_rrs(s, op, val, base, offset);
497}
498
499static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
500                       intptr_t offset)
501{
502    switch (type) {
503    case TCG_TYPE_I32:
504        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
505        break;
506#if TCG_TARGET_REG_BITS == 64
507    case TCG_TYPE_I64:
508        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
509        break;
510#endif
511    default:
512        g_assert_not_reached();
513    }
514}
515
516static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
517{
518    switch (type) {
519    case TCG_TYPE_I32:
520        tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
521        break;
522#if TCG_TARGET_REG_BITS == 64
523    case TCG_TYPE_I64:
524        tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
525        break;
526#endif
527    default:
528        g_assert_not_reached();
529    }
530    return true;
531}
532
533static void tcg_out_movi(TCGContext *s, TCGType type,
534                         TCGReg ret, tcg_target_long arg)
535{
536    switch (type) {
537    case TCG_TYPE_I32:
538#if TCG_TARGET_REG_BITS == 64
539        arg = (int32_t)arg;
540        /* fall through */
541    case TCG_TYPE_I64:
542#endif
543        break;
544    default:
545        g_assert_not_reached();
546    }
547
548    if (arg == sextract32(arg, 0, 20)) {
549        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
550    } else {
551        tcg_insn_unit insn = 0;
552
553        new_pool_label(s, arg, 20, s->code_ptr, 0);
554        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
555        insn = deposit32(insn, 8, 4, ret);
556        tcg_out32(s, insn);
557    }
558}
559
560static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
561{
562    switch (type) {
563    case TCG_TYPE_I32:
564        tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
565        tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
566        break;
567#if TCG_TARGET_REG_BITS == 64
568    case TCG_TYPE_I64:
569        tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
570        tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
571        break;
572#endif
573    default:
574        g_assert_not_reached();
575    }
576}
577
578static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
579{
580    if (TCG_TARGET_REG_BITS == 64) {
581        tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
582        tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
583    } else {
584        tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
585        tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
586    }
587}
588
589static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
590{
591    switch (type) {
592    case TCG_TYPE_I32:
593        tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
594        tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs);
595        break;
596#if TCG_TARGET_REG_BITS == 64
597    case TCG_TYPE_I64:
598        tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64);
599        tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs);
600        break;
601#endif
602    default:
603        g_assert_not_reached();
604    }
605}
606
607static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
608{
609    if (TCG_TARGET_REG_BITS == 64) {
610        tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64);
611        tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs);
612    } else {
613        tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32);
614        tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs);
615    }
616}
617
618static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
619{
620    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
621    tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64);
622    tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
623}
624
625static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
626{
627    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
628    tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64);
629    tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
630}
631
632static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
633{
634    tcg_out_ext32s(s, rd, rs);
635}
636
637static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
638{
639    tcg_out_ext32u(s, rd, rs);
640}
641
642static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
643{
644    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
645    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
646}
647
648static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
649{
650    return false;
651}
652
653static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
654                             tcg_target_long imm)
655{
656    /* This function is only used for passing structs by reference. */
657    g_assert_not_reached();
658}
659
660static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
661                         const TCGHelperInfo *info)
662{
663    ffi_cif *cif = info->cif;
664    tcg_insn_unit insn = 0;
665    uint8_t which;
666
667    if (cif->rtype == &ffi_type_void) {
668        which = 0;
669    } else {
670        tcg_debug_assert(cif->rtype->size == 4 ||
671                         cif->rtype->size == 8 ||
672                         cif->rtype->size == 16);
673        which = ctz32(cif->rtype->size) - 1;
674    }
675    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
676    insn = deposit32(insn, 0, 8, INDEX_op_call);
677    insn = deposit32(insn, 8, 4, which);
678    tcg_out32(s, insn);
679}
680
681#if TCG_TARGET_REG_BITS == 64
682# define CASE_32_64(x) \
683        case glue(glue(INDEX_op_, x), _i64): \
684        case glue(glue(INDEX_op_, x), _i32):
685# define CASE_64(x) \
686        case glue(glue(INDEX_op_, x), _i64):
687#else
688# define CASE_32_64(x) \
689        case glue(glue(INDEX_op_, x), _i32):
690# define CASE_64(x)
691#endif
692
693static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
694{
695    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
696}
697
698static void tcg_out_goto_tb(TCGContext *s, int which)
699{
700    /* indirect jump method. */
701    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
702    set_jmp_reset_offset(s, which);
703}
704
705void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
706                              uintptr_t jmp_rx, uintptr_t jmp_rw)
707{
708    /* Always indirect, nothing to do */
709}
710
711static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
712                       const TCGArg args[TCG_MAX_OP_ARGS],
713                       const int const_args[TCG_MAX_OP_ARGS])
714{
715    TCGOpcode exts;
716
717    switch (opc) {
718    case INDEX_op_goto_ptr:
719        tcg_out_op_r(s, opc, args[0]);
720        break;
721
722    case INDEX_op_br:
723        tcg_out_op_l(s, opc, arg_label(args[0]));
724        break;
725
726    CASE_32_64(setcond)
727        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
728        break;
729
730    CASE_32_64(movcond)
731    case INDEX_op_setcond2_i32:
732        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
733                          args[3], args[4], args[5]);
734        break;
735
736    CASE_32_64(ld8u)
737    CASE_32_64(ld8s)
738    CASE_32_64(ld16u)
739    CASE_32_64(ld16s)
740    case INDEX_op_ld_i32:
741    CASE_64(ld32u)
742    CASE_64(ld32s)
743    CASE_64(ld)
744    CASE_32_64(st8)
745    CASE_32_64(st16)
746    case INDEX_op_st_i32:
747    CASE_64(st32)
748    CASE_64(st)
749        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
750        break;
751
752    CASE_32_64(add)
753    CASE_32_64(sub)
754    CASE_32_64(mul)
755    CASE_32_64(and)
756    CASE_32_64(or)
757    CASE_32_64(xor)
758    CASE_32_64(andc)     /* Optional (TCG_TARGET_HAS_andc_*). */
759    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
760    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
761    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
762    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
763    CASE_32_64(shl)
764    CASE_32_64(shr)
765    CASE_32_64(sar)
766    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
767    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
768    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
769    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
770    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
771    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
772    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
773    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
774        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
775        break;
776
777    CASE_32_64(deposit)
778        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
779        break;
780
781    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
782    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
783        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
784        break;
785
786    CASE_32_64(brcond)
787        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
788                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
789                        TCG_REG_TMP, args[0], args[1], args[2]);
790        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
791        break;
792
793    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
794    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
795    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
796    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
797    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
798        tcg_out_op_rr(s, opc, args[0], args[1]);
799        break;
800
801    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
802        exts = INDEX_op_ext16s_i32;
803        goto do_bswap;
804    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
805        exts = INDEX_op_ext16s_i64;
806        goto do_bswap;
807    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
808        exts = INDEX_op_ext32s_i64;
809    do_bswap:
810        /* The base tci bswaps zero-extend, and ignore high bits. */
811        tcg_out_op_rr(s, opc, args[0], args[1]);
812        if (args[2] & TCG_BSWAP_OS) {
813            tcg_out_op_rr(s, exts, args[0], args[0]);
814        }
815        break;
816
817    CASE_32_64(add2)
818    CASE_32_64(sub2)
819        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
820                          args[3], args[4], args[5]);
821        break;
822
823#if TCG_TARGET_REG_BITS == 32
824    case INDEX_op_brcond2_i32:
825        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
826                          args[0], args[1], args[2], args[3], args[4]);
827        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
828        break;
829#endif
830
831    CASE_32_64(mulu2)
832    CASE_32_64(muls2)
833        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
834        break;
835
836    case INDEX_op_qemu_ld_a32_i32:
837    case INDEX_op_qemu_st_a32_i32:
838        tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
839        break;
840    case INDEX_op_qemu_ld_a64_i32:
841    case INDEX_op_qemu_st_a64_i32:
842    case INDEX_op_qemu_ld_a32_i64:
843    case INDEX_op_qemu_st_a32_i64:
844        if (TCG_TARGET_REG_BITS == 64) {
845            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
846        } else {
847            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
848            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
849        }
850        break;
851    case INDEX_op_qemu_ld_a64_i64:
852    case INDEX_op_qemu_st_a64_i64:
853        if (TCG_TARGET_REG_BITS == 64) {
854            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
855        } else {
856            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
857            tcg_out_op_rrrrr(s, opc, args[0], args[1],
858                             args[2], args[3], TCG_REG_TMP);
859        }
860        break;
861
862    case INDEX_op_mb:
863        tcg_out_op_v(s, opc);
864        break;
865
866    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
867    case INDEX_op_mov_i64:
868    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
869    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
870    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
871    case INDEX_op_ext8s_i32:  /* Always emitted via tcg_reg_alloc_op.  */
872    case INDEX_op_ext8s_i64:
873    case INDEX_op_ext8u_i32:
874    case INDEX_op_ext8u_i64:
875    case INDEX_op_ext16s_i32:
876    case INDEX_op_ext16s_i64:
877    case INDEX_op_ext16u_i32:
878    case INDEX_op_ext16u_i64:
879    case INDEX_op_ext32s_i64:
880    case INDEX_op_ext32u_i64:
881    case INDEX_op_ext_i32_i64:
882    case INDEX_op_extu_i32_i64:
883    case INDEX_op_extrl_i64_i32:
884    default:
885        g_assert_not_reached();
886    }
887}
888
889static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
890                       intptr_t offset)
891{
892    switch (type) {
893    case TCG_TYPE_I32:
894        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
895        break;
896#if TCG_TARGET_REG_BITS == 64
897    case TCG_TYPE_I64:
898        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
899        break;
900#endif
901    default:
902        g_assert_not_reached();
903    }
904}
905
906static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
907                               TCGReg base, intptr_t ofs)
908{
909    return false;
910}
911
912/* Test if a constant matches the constraint. */
913static bool tcg_target_const_match(int64_t val, int ct,
914                                   TCGType type, TCGCond cond, int vece)
915{
916    return ct & TCG_CT_CONST;
917}
918
919static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
920{
921    memset(p, 0, sizeof(*p) * count);
922}
923
924static void tcg_target_init(TCGContext *s)
925{
926    /* The current code uses uint8_t for tcg operations. */
927    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
928
929    /* Registers available for 32 bit operations. */
930    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
931    /* Registers available for 64 bit operations. */
932    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
933    /*
934     * The interpreter "registers" are in the local stack frame and
935     * cannot be clobbered by the called helper functions.  However,
936     * the interpreter assumes a 128-bit return value and assigns to
937     * the return value registers.
938     */
939    tcg_target_call_clobber_regs =
940        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
941
942    s->reserved_regs = 0;
943    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
944    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
945
946    /* The call arguments come first, followed by the temp storage. */
947    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
948                  TCG_STATIC_FRAME_SIZE);
949}
950
951/* Generate global QEMU prologue and epilogue code. */
952static inline void tcg_target_qemu_prologue(TCGContext *s)
953{
954}
955
956static void tcg_out_tb_start(TCGContext *s)
957{
958    /* nothing to do */
959}
960
961bool tcg_target_has_memory_bswap(MemOp memop)
962{
963    return true;
964}
965
966static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
967{
968    g_assert_not_reached();
969}
970
971static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
972{
973    g_assert_not_reached();
974}
975