xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 12d1a768bdfea6e27a3a829228840d72507613a1)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext8s_i32:
63    case INDEX_op_ext8s_i64:
64    case INDEX_op_ext16s_i32:
65    case INDEX_op_ext16s_i64:
66    case INDEX_op_ext8u_i32:
67    case INDEX_op_ext8u_i64:
68    case INDEX_op_ext16u_i32:
69    case INDEX_op_ext16u_i64:
70    case INDEX_op_ext32s_i64:
71    case INDEX_op_ext32u_i64:
72    case INDEX_op_ext_i32_i64:
73    case INDEX_op_extu_i32_i64:
74    case INDEX_op_bswap16_i32:
75    case INDEX_op_bswap16_i64:
76    case INDEX_op_bswap32_i32:
77    case INDEX_op_bswap32_i64:
78    case INDEX_op_bswap64_i64:
79    case INDEX_op_extract_i32:
80    case INDEX_op_extract_i64:
81    case INDEX_op_sextract_i32:
82    case INDEX_op_sextract_i64:
83    case INDEX_op_ctpop_i32:
84    case INDEX_op_ctpop_i64:
85        return C_O1_I1(r, r);
86
87    case INDEX_op_st8_i32:
88    case INDEX_op_st16_i32:
89    case INDEX_op_st_i32:
90    case INDEX_op_st8_i64:
91    case INDEX_op_st16_i64:
92    case INDEX_op_st32_i64:
93    case INDEX_op_st_i64:
94        return C_O0_I2(r, r);
95
96    case INDEX_op_div_i32:
97    case INDEX_op_div_i64:
98    case INDEX_op_divu_i32:
99    case INDEX_op_divu_i64:
100    case INDEX_op_rem_i32:
101    case INDEX_op_rem_i64:
102    case INDEX_op_remu_i32:
103    case INDEX_op_remu_i64:
104    case INDEX_op_add_i32:
105    case INDEX_op_add_i64:
106    case INDEX_op_sub_i32:
107    case INDEX_op_sub_i64:
108    case INDEX_op_mul_i32:
109    case INDEX_op_mul_i64:
110    case INDEX_op_and_i32:
111    case INDEX_op_and_i64:
112    case INDEX_op_andc_i32:
113    case INDEX_op_andc_i64:
114    case INDEX_op_eqv_i32:
115    case INDEX_op_eqv_i64:
116    case INDEX_op_nand_i32:
117    case INDEX_op_nand_i64:
118    case INDEX_op_nor_i32:
119    case INDEX_op_nor_i64:
120    case INDEX_op_or_i32:
121    case INDEX_op_or_i64:
122    case INDEX_op_orc_i32:
123    case INDEX_op_orc_i64:
124    case INDEX_op_xor_i32:
125    case INDEX_op_xor_i64:
126    case INDEX_op_shl_i32:
127    case INDEX_op_shl_i64:
128    case INDEX_op_shr_i32:
129    case INDEX_op_shr_i64:
130    case INDEX_op_sar_i32:
131    case INDEX_op_sar_i64:
132    case INDEX_op_rotl_i32:
133    case INDEX_op_rotl_i64:
134    case INDEX_op_rotr_i32:
135    case INDEX_op_rotr_i64:
136    case INDEX_op_setcond_i32:
137    case INDEX_op_setcond_i64:
138    case INDEX_op_deposit_i32:
139    case INDEX_op_deposit_i64:
140    case INDEX_op_clz_i32:
141    case INDEX_op_clz_i64:
142    case INDEX_op_ctz_i32:
143    case INDEX_op_ctz_i64:
144        return C_O1_I2(r, r, r);
145
146    case INDEX_op_brcond_i32:
147    case INDEX_op_brcond_i64:
148        return C_O0_I2(r, r);
149
150    case INDEX_op_add2_i32:
151    case INDEX_op_add2_i64:
152    case INDEX_op_sub2_i32:
153    case INDEX_op_sub2_i64:
154        return C_O2_I4(r, r, r, r, r, r);
155
156#if TCG_TARGET_REG_BITS == 32
157    case INDEX_op_brcond2_i32:
158        return C_O0_I4(r, r, r, r);
159#endif
160
161    case INDEX_op_mulu2_i32:
162    case INDEX_op_mulu2_i64:
163    case INDEX_op_muls2_i32:
164    case INDEX_op_muls2_i64:
165        return C_O2_I2(r, r, r, r);
166
167    case INDEX_op_movcond_i32:
168    case INDEX_op_movcond_i64:
169    case INDEX_op_setcond2_i32:
170        return C_O1_I4(r, r, r, r, r);
171
172    case INDEX_op_qemu_ld_i32:
173        return C_O1_I1(r, r);
174    case INDEX_op_qemu_ld_i64:
175        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
176    case INDEX_op_qemu_st_i32:
177        return C_O0_I2(r, r);
178    case INDEX_op_qemu_st_i64:
179        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
180
181    default:
182        return C_NotImplemented;
183    }
184}
185
186static const int tcg_target_reg_alloc_order[] = {
187    TCG_REG_R4,
188    TCG_REG_R5,
189    TCG_REG_R6,
190    TCG_REG_R7,
191    TCG_REG_R8,
192    TCG_REG_R9,
193    TCG_REG_R10,
194    TCG_REG_R11,
195    TCG_REG_R12,
196    TCG_REG_R13,
197    TCG_REG_R14,
198    TCG_REG_R15,
199    /* Either 2 or 4 of these are call clobbered, so use them last. */
200    TCG_REG_R3,
201    TCG_REG_R2,
202    TCG_REG_R1,
203    TCG_REG_R0,
204};
205
206/* No call arguments via registers.  All will be stored on the "stack". */
207static const int tcg_target_call_iarg_regs[] = { };
208
209static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
210{
211    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
212    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
213    return TCG_REG_R0 + slot;
214}
215
216#ifdef CONFIG_DEBUG_TCG
217static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
218    "r00",
219    "r01",
220    "r02",
221    "r03",
222    "r04",
223    "r05",
224    "r06",
225    "r07",
226    "r08",
227    "r09",
228    "r10",
229    "r11",
230    "r12",
231    "r13",
232    "r14",
233    "r15",
234};
235#endif
236
237static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
238                        intptr_t value, intptr_t addend)
239{
240    intptr_t diff = value - (intptr_t)(code_ptr + 1);
241
242    tcg_debug_assert(addend == 0);
243    tcg_debug_assert(type == 20);
244
245    if (diff == sextract32(diff, 0, type)) {
246        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
247        return true;
248    }
249    return false;
250}
251
252static void stack_bounds_check(TCGReg base, intptr_t offset)
253{
254    if (base == TCG_REG_CALL_STACK) {
255        tcg_debug_assert(offset >= 0);
256        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
257                                   TCG_STATIC_FRAME_SIZE));
258    }
259}
260
261static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
262{
263    tcg_insn_unit insn = 0;
264
265    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
266    insn = deposit32(insn, 0, 8, op);
267    tcg_out32(s, insn);
268}
269
270static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
271{
272    tcg_insn_unit insn = 0;
273    intptr_t diff;
274
275    /* Special case for exit_tb: map null -> 0. */
276    if (p0 == NULL) {
277        diff = 0;
278    } else {
279        diff = p0 - (void *)(s->code_ptr + 1);
280        tcg_debug_assert(diff != 0);
281        if (diff != sextract32(diff, 0, 20)) {
282            tcg_raise_tb_overflow(s);
283        }
284    }
285    insn = deposit32(insn, 0, 8, op);
286    insn = deposit32(insn, 12, 20, diff);
287    tcg_out32(s, insn);
288}
289
290static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
291{
292    tcg_insn_unit insn = 0;
293
294    insn = deposit32(insn, 0, 8, op);
295    insn = deposit32(insn, 8, 4, r0);
296    tcg_out32(s, insn);
297}
298
299static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
300{
301    tcg_out32(s, (uint8_t)op);
302}
303
304static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
305{
306    tcg_insn_unit insn = 0;
307
308    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
309    insn = deposit32(insn, 0, 8, op);
310    insn = deposit32(insn, 8, 4, r0);
311    insn = deposit32(insn, 12, 20, i1);
312    tcg_out32(s, insn);
313}
314
315static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
316{
317    tcg_insn_unit insn = 0;
318
319    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
320    insn = deposit32(insn, 0, 8, op);
321    insn = deposit32(insn, 8, 4, r0);
322    tcg_out32(s, insn);
323}
324
325static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
326{
327    tcg_insn_unit insn = 0;
328
329    insn = deposit32(insn, 0, 8, op);
330    insn = deposit32(insn, 8, 4, r0);
331    insn = deposit32(insn, 12, 4, r1);
332    tcg_out32(s, insn);
333}
334
335static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
336                           TCGReg r0, TCGReg r1, TCGArg m2)
337{
338    tcg_insn_unit insn = 0;
339
340    tcg_debug_assert(m2 == extract32(m2, 0, 16));
341    insn = deposit32(insn, 0, 8, op);
342    insn = deposit32(insn, 8, 4, r0);
343    insn = deposit32(insn, 12, 4, r1);
344    insn = deposit32(insn, 16, 16, m2);
345    tcg_out32(s, insn);
346}
347
348static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
349                           TCGReg r0, TCGReg r1, TCGReg r2)
350{
351    tcg_insn_unit insn = 0;
352
353    insn = deposit32(insn, 0, 8, op);
354    insn = deposit32(insn, 8, 4, r0);
355    insn = deposit32(insn, 12, 4, r1);
356    insn = deposit32(insn, 16, 4, r2);
357    tcg_out32(s, insn);
358}
359
360static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
361                           TCGReg r0, TCGReg r1, intptr_t i2)
362{
363    tcg_insn_unit insn = 0;
364
365    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
366    insn = deposit32(insn, 0, 8, op);
367    insn = deposit32(insn, 8, 4, r0);
368    insn = deposit32(insn, 12, 4, r1);
369    insn = deposit32(insn, 16, 16, i2);
370    tcg_out32(s, insn);
371}
372
373static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
374                            TCGReg r1, uint8_t b2, uint8_t b3)
375{
376    tcg_insn_unit insn = 0;
377
378    tcg_debug_assert(b2 == extract32(b2, 0, 6));
379    tcg_debug_assert(b3 == extract32(b3, 0, 6));
380    insn = deposit32(insn, 0, 8, op);
381    insn = deposit32(insn, 8, 4, r0);
382    insn = deposit32(insn, 12, 4, r1);
383    insn = deposit32(insn, 16, 6, b2);
384    insn = deposit32(insn, 22, 6, b3);
385    tcg_out32(s, insn);
386}
387
388static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
389                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
390{
391    tcg_insn_unit insn = 0;
392
393    insn = deposit32(insn, 0, 8, op);
394    insn = deposit32(insn, 8, 4, r0);
395    insn = deposit32(insn, 12, 4, r1);
396    insn = deposit32(insn, 16, 4, r2);
397    insn = deposit32(insn, 20, 4, c3);
398    tcg_out32(s, insn);
399}
400
401static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
402                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
403{
404    tcg_insn_unit insn = 0;
405
406    tcg_debug_assert(b3 == extract32(b3, 0, 6));
407    tcg_debug_assert(b4 == extract32(b4, 0, 6));
408    insn = deposit32(insn, 0, 8, op);
409    insn = deposit32(insn, 8, 4, r0);
410    insn = deposit32(insn, 12, 4, r1);
411    insn = deposit32(insn, 16, 4, r2);
412    insn = deposit32(insn, 20, 6, b3);
413    insn = deposit32(insn, 26, 6, b4);
414    tcg_out32(s, insn);
415}
416
417static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
418                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
419{
420    tcg_insn_unit insn = 0;
421
422    insn = deposit32(insn, 0, 8, op);
423    insn = deposit32(insn, 8, 4, r0);
424    insn = deposit32(insn, 12, 4, r1);
425    insn = deposit32(insn, 16, 4, r2);
426    insn = deposit32(insn, 20, 4, r3);
427    tcg_out32(s, insn);
428}
429
430static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
431                              TCGReg r0, TCGReg r1, TCGReg r2,
432                              TCGReg r3, TCGReg r4, TCGCond c5)
433{
434    tcg_insn_unit insn = 0;
435
436    insn = deposit32(insn, 0, 8, op);
437    insn = deposit32(insn, 8, 4, r0);
438    insn = deposit32(insn, 12, 4, r1);
439    insn = deposit32(insn, 16, 4, r2);
440    insn = deposit32(insn, 20, 4, r3);
441    insn = deposit32(insn, 24, 4, r4);
442    insn = deposit32(insn, 28, 4, c5);
443    tcg_out32(s, insn);
444}
445
446static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
447                              TCGReg r0, TCGReg r1, TCGReg r2,
448                              TCGReg r3, TCGReg r4, TCGReg r5)
449{
450    tcg_insn_unit insn = 0;
451
452    insn = deposit32(insn, 0, 8, op);
453    insn = deposit32(insn, 8, 4, r0);
454    insn = deposit32(insn, 12, 4, r1);
455    insn = deposit32(insn, 16, 4, r2);
456    insn = deposit32(insn, 20, 4, r3);
457    insn = deposit32(insn, 24, 4, r4);
458    insn = deposit32(insn, 28, 4, r5);
459    tcg_out32(s, insn);
460}
461
462static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
463                         TCGReg base, intptr_t offset)
464{
465    stack_bounds_check(base, offset);
466    if (offset != sextract32(offset, 0, 16)) {
467        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
468        tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
469                           ? INDEX_op_add_i32 : INDEX_op_add_i64),
470                       TCG_REG_TMP, TCG_REG_TMP, base);
471        base = TCG_REG_TMP;
472        offset = 0;
473    }
474    tcg_out_op_rrs(s, op, val, base, offset);
475}
476
477static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
478                       intptr_t offset)
479{
480    switch (type) {
481    case TCG_TYPE_I32:
482        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
483        break;
484#if TCG_TARGET_REG_BITS == 64
485    case TCG_TYPE_I64:
486        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
487        break;
488#endif
489    default:
490        g_assert_not_reached();
491    }
492}
493
494static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
495{
496    switch (type) {
497    case TCG_TYPE_I32:
498        tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
499        break;
500#if TCG_TARGET_REG_BITS == 64
501    case TCG_TYPE_I64:
502        tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
503        break;
504#endif
505    default:
506        g_assert_not_reached();
507    }
508    return true;
509}
510
511static void tcg_out_movi(TCGContext *s, TCGType type,
512                         TCGReg ret, tcg_target_long arg)
513{
514    switch (type) {
515    case TCG_TYPE_I32:
516#if TCG_TARGET_REG_BITS == 64
517        arg = (int32_t)arg;
518        /* fall through */
519    case TCG_TYPE_I64:
520#endif
521        break;
522    default:
523        g_assert_not_reached();
524    }
525
526    if (arg == sextract32(arg, 0, 20)) {
527        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
528    } else {
529        tcg_insn_unit insn = 0;
530
531        new_pool_label(s, arg, 20, s->code_ptr, 0);
532        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
533        insn = deposit32(insn, 8, 4, ret);
534        tcg_out32(s, insn);
535    }
536}
537
538static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
539{
540    switch (type) {
541    case TCG_TYPE_I32:
542        tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
543        tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
544        break;
545#if TCG_TARGET_REG_BITS == 64
546    case TCG_TYPE_I64:
547        tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
548        tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
549        break;
550#endif
551    default:
552        g_assert_not_reached();
553    }
554}
555
556static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
557{
558    if (TCG_TARGET_REG_BITS == 64) {
559        tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
560        tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
561    } else {
562        tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
563        tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
564    }
565}
566
567static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
568{
569    switch (type) {
570    case TCG_TYPE_I32:
571        tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
572        tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs);
573        break;
574#if TCG_TARGET_REG_BITS == 64
575    case TCG_TYPE_I64:
576        tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64);
577        tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs);
578        break;
579#endif
580    default:
581        g_assert_not_reached();
582    }
583}
584
585static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
586{
587    if (TCG_TARGET_REG_BITS == 64) {
588        tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64);
589        tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs);
590    } else {
591        tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32);
592        tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs);
593    }
594}
595
596static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
597{
598    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
599    tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64);
600    tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
601}
602
603static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
604{
605    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
606    tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64);
607    tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
608}
609
610static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
611{
612    tcg_out_ext32s(s, rd, rs);
613}
614
615static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
616{
617    tcg_out_ext32u(s, rd, rs);
618}
619
620static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
621{
622    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
623    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
624}
625
626static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
627{
628    return false;
629}
630
631static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
632                             tcg_target_long imm)
633{
634    /* This function is only used for passing structs by reference. */
635    g_assert_not_reached();
636}
637
638static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
639                         const TCGHelperInfo *info)
640{
641    ffi_cif *cif = info->cif;
642    tcg_insn_unit insn = 0;
643    uint8_t which;
644
645    if (cif->rtype == &ffi_type_void) {
646        which = 0;
647    } else {
648        tcg_debug_assert(cif->rtype->size == 4 ||
649                         cif->rtype->size == 8 ||
650                         cif->rtype->size == 16);
651        which = ctz32(cif->rtype->size) - 1;
652    }
653    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
654    insn = deposit32(insn, 0, 8, INDEX_op_call);
655    insn = deposit32(insn, 8, 4, which);
656    tcg_out32(s, insn);
657}
658
659#if TCG_TARGET_REG_BITS == 64
660# define CASE_32_64(x) \
661        case glue(glue(INDEX_op_, x), _i64): \
662        case glue(glue(INDEX_op_, x), _i32):
663# define CASE_64(x) \
664        case glue(glue(INDEX_op_, x), _i64):
665#else
666# define CASE_32_64(x) \
667        case glue(glue(INDEX_op_, x), _i32):
668# define CASE_64(x)
669#endif
670
671static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
672{
673    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
674}
675
676static void tcg_out_goto_tb(TCGContext *s, int which)
677{
678    /* indirect jump method. */
679    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
680    set_jmp_reset_offset(s, which);
681}
682
683void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
684                              uintptr_t jmp_rx, uintptr_t jmp_rw)
685{
686    /* Always indirect, nothing to do */
687}
688
689static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
690                       const TCGArg args[TCG_MAX_OP_ARGS],
691                       const int const_args[TCG_MAX_OP_ARGS])
692{
693    TCGOpcode exts;
694
695    switch (opc) {
696    case INDEX_op_goto_ptr:
697        tcg_out_op_r(s, opc, args[0]);
698        break;
699
700    case INDEX_op_br:
701        tcg_out_op_l(s, opc, arg_label(args[0]));
702        break;
703
704    CASE_32_64(setcond)
705        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
706        break;
707
708    CASE_32_64(movcond)
709    case INDEX_op_setcond2_i32:
710        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
711                          args[3], args[4], args[5]);
712        break;
713
714    CASE_32_64(ld8u)
715    CASE_32_64(ld8s)
716    CASE_32_64(ld16u)
717    CASE_32_64(ld16s)
718    case INDEX_op_ld_i32:
719    CASE_64(ld32u)
720    CASE_64(ld32s)
721    CASE_64(ld)
722    CASE_32_64(st8)
723    CASE_32_64(st16)
724    case INDEX_op_st_i32:
725    CASE_64(st32)
726    CASE_64(st)
727        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
728        break;
729
730    CASE_32_64(add)
731    CASE_32_64(sub)
732    CASE_32_64(mul)
733    CASE_32_64(and)
734    CASE_32_64(or)
735    CASE_32_64(xor)
736    CASE_32_64(andc)     /* Optional (TCG_TARGET_HAS_andc_*). */
737    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
738    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
739    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
740    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
741    CASE_32_64(shl)
742    CASE_32_64(shr)
743    CASE_32_64(sar)
744    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
745    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
746    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
747    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
748    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
749    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
750    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
751    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
752        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
753        break;
754
755    CASE_32_64(deposit)
756        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
757        break;
758
759    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
760    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
761        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
762        break;
763
764    CASE_32_64(brcond)
765        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
766                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
767                        TCG_REG_TMP, args[0], args[1], args[2]);
768        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
769        break;
770
771    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
772    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
773    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
774    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
775    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
776        tcg_out_op_rr(s, opc, args[0], args[1]);
777        break;
778
779    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
780        exts = INDEX_op_ext16s_i32;
781        goto do_bswap;
782    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
783        exts = INDEX_op_ext16s_i64;
784        goto do_bswap;
785    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
786        exts = INDEX_op_ext32s_i64;
787    do_bswap:
788        /* The base tci bswaps zero-extend, and ignore high bits. */
789        tcg_out_op_rr(s, opc, args[0], args[1]);
790        if (args[2] & TCG_BSWAP_OS) {
791            tcg_out_op_rr(s, exts, args[0], args[0]);
792        }
793        break;
794
795    CASE_32_64(add2)
796    CASE_32_64(sub2)
797        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
798                          args[3], args[4], args[5]);
799        break;
800
801#if TCG_TARGET_REG_BITS == 32
802    case INDEX_op_brcond2_i32:
803        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
804                          args[0], args[1], args[2], args[3], args[4]);
805        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
806        break;
807#endif
808
809    CASE_32_64(mulu2)
810    CASE_32_64(muls2)
811        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
812        break;
813
814    case INDEX_op_qemu_ld_i64:
815    case INDEX_op_qemu_st_i64:
816        if (TCG_TARGET_REG_BITS == 32) {
817            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
818            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
819            break;
820        }
821        /* fall through */
822    case INDEX_op_qemu_ld_i32:
823    case INDEX_op_qemu_st_i32:
824        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
825            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
826            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
827        } else {
828            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
829        }
830        break;
831
832    case INDEX_op_mb:
833        tcg_out_op_v(s, opc);
834        break;
835
836    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
837    case INDEX_op_mov_i64:
838    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
839    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
840    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
841    case INDEX_op_ext8s_i32:  /* Always emitted via tcg_reg_alloc_op.  */
842    case INDEX_op_ext8s_i64:
843    case INDEX_op_ext8u_i32:
844    case INDEX_op_ext8u_i64:
845    case INDEX_op_ext16s_i32:
846    case INDEX_op_ext16s_i64:
847    case INDEX_op_ext16u_i32:
848    case INDEX_op_ext16u_i64:
849    case INDEX_op_ext32s_i64:
850    case INDEX_op_ext32u_i64:
851    case INDEX_op_ext_i32_i64:
852    case INDEX_op_extu_i32_i64:
853    case INDEX_op_extrl_i64_i32:
854    default:
855        g_assert_not_reached();
856    }
857}
858
859static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
860                       intptr_t offset)
861{
862    switch (type) {
863    case TCG_TYPE_I32:
864        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
865        break;
866#if TCG_TARGET_REG_BITS == 64
867    case TCG_TYPE_I64:
868        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
869        break;
870#endif
871    default:
872        g_assert_not_reached();
873    }
874}
875
876static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
877                               TCGReg base, intptr_t ofs)
878{
879    return false;
880}
881
882/* Test if a constant matches the constraint. */
883static bool tcg_target_const_match(int64_t val, int ct,
884                                   TCGType type, TCGCond cond, int vece)
885{
886    return ct & TCG_CT_CONST;
887}
888
889static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
890{
891    memset(p, 0, sizeof(*p) * count);
892}
893
894static void tcg_target_init(TCGContext *s)
895{
896    /* The current code uses uint8_t for tcg operations. */
897    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
898
899    /* Registers available for 32 bit operations. */
900    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
901    /* Registers available for 64 bit operations. */
902    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
903    /*
904     * The interpreter "registers" are in the local stack frame and
905     * cannot be clobbered by the called helper functions.  However,
906     * the interpreter assumes a 128-bit return value and assigns to
907     * the return value registers.
908     */
909    tcg_target_call_clobber_regs =
910        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
911
912    s->reserved_regs = 0;
913    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
914    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
915
916    /* The call arguments come first, followed by the temp storage. */
917    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
918                  TCG_STATIC_FRAME_SIZE);
919}
920
921/* Generate global QEMU prologue and epilogue code. */
922static inline void tcg_target_qemu_prologue(TCGContext *s)
923{
924}
925
926static void tcg_out_tb_start(TCGContext *s)
927{
928    /* nothing to do */
929}
930
931bool tcg_target_has_memory_bswap(MemOp memop)
932{
933    return true;
934}
935
936static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
937{
938    g_assert_not_reached();
939}
940
941static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
942{
943    g_assert_not_reached();
944}
945