xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 6aba25ebb9eb6e1e86398294694aa0ab1f12076f)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_eqv_i32:
99    case INDEX_op_eqv_i64:
100    case INDEX_op_nand_i32:
101    case INDEX_op_nand_i64:
102    case INDEX_op_nor_i32:
103    case INDEX_op_nor_i64:
104    case INDEX_op_xor_i32:
105    case INDEX_op_xor_i64:
106    case INDEX_op_shl_i32:
107    case INDEX_op_shl_i64:
108    case INDEX_op_shr_i32:
109    case INDEX_op_shr_i64:
110    case INDEX_op_sar_i32:
111    case INDEX_op_sar_i64:
112    case INDEX_op_rotl_i32:
113    case INDEX_op_rotl_i64:
114    case INDEX_op_rotr_i32:
115    case INDEX_op_rotr_i64:
116    case INDEX_op_setcond_i32:
117    case INDEX_op_setcond_i64:
118    case INDEX_op_deposit_i32:
119    case INDEX_op_deposit_i64:
120    case INDEX_op_clz_i32:
121    case INDEX_op_clz_i64:
122    case INDEX_op_ctz_i32:
123    case INDEX_op_ctz_i64:
124        return C_O1_I2(r, r, r);
125
126    case INDEX_op_brcond_i32:
127    case INDEX_op_brcond_i64:
128        return C_O0_I2(r, r);
129
130    case INDEX_op_add2_i32:
131    case INDEX_op_add2_i64:
132    case INDEX_op_sub2_i32:
133    case INDEX_op_sub2_i64:
134        return C_O2_I4(r, r, r, r, r, r);
135
136#if TCG_TARGET_REG_BITS == 32
137    case INDEX_op_brcond2_i32:
138        return C_O0_I4(r, r, r, r);
139#endif
140
141    case INDEX_op_mulu2_i32:
142    case INDEX_op_mulu2_i64:
143    case INDEX_op_muls2_i32:
144    case INDEX_op_muls2_i64:
145        return C_O2_I2(r, r, r, r);
146
147    case INDEX_op_movcond_i32:
148    case INDEX_op_movcond_i64:
149    case INDEX_op_setcond2_i32:
150        return C_O1_I4(r, r, r, r, r);
151
152    case INDEX_op_qemu_ld_i32:
153        return C_O1_I1(r, r);
154    case INDEX_op_qemu_ld_i64:
155        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
156    case INDEX_op_qemu_st_i32:
157        return C_O0_I2(r, r);
158    case INDEX_op_qemu_st_i64:
159        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
160
161    default:
162        return C_NotImplemented;
163    }
164}
165
166static const int tcg_target_reg_alloc_order[] = {
167    TCG_REG_R4,
168    TCG_REG_R5,
169    TCG_REG_R6,
170    TCG_REG_R7,
171    TCG_REG_R8,
172    TCG_REG_R9,
173    TCG_REG_R10,
174    TCG_REG_R11,
175    TCG_REG_R12,
176    TCG_REG_R13,
177    TCG_REG_R14,
178    TCG_REG_R15,
179    /* Either 2 or 4 of these are call clobbered, so use them last. */
180    TCG_REG_R3,
181    TCG_REG_R2,
182    TCG_REG_R1,
183    TCG_REG_R0,
184};
185
186/* No call arguments via registers.  All will be stored on the "stack". */
187static const int tcg_target_call_iarg_regs[] = { };
188
189static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
190{
191    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
192    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
193    return TCG_REG_R0 + slot;
194}
195
196#ifdef CONFIG_DEBUG_TCG
197static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
198    "r00",
199    "r01",
200    "r02",
201    "r03",
202    "r04",
203    "r05",
204    "r06",
205    "r07",
206    "r08",
207    "r09",
208    "r10",
209    "r11",
210    "r12",
211    "r13",
212    "r14",
213    "r15",
214};
215#endif
216
217static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
218                        intptr_t value, intptr_t addend)
219{
220    intptr_t diff = value - (intptr_t)(code_ptr + 1);
221
222    tcg_debug_assert(addend == 0);
223    tcg_debug_assert(type == 20);
224
225    if (diff == sextract32(diff, 0, type)) {
226        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
227        return true;
228    }
229    return false;
230}
231
232static void stack_bounds_check(TCGReg base, intptr_t offset)
233{
234    if (base == TCG_REG_CALL_STACK) {
235        tcg_debug_assert(offset >= 0);
236        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
237                                   TCG_STATIC_FRAME_SIZE));
238    }
239}
240
241static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
242{
243    tcg_insn_unit insn = 0;
244
245    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
246    insn = deposit32(insn, 0, 8, op);
247    tcg_out32(s, insn);
248}
249
250static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
251{
252    tcg_insn_unit insn = 0;
253    intptr_t diff;
254
255    /* Special case for exit_tb: map null -> 0. */
256    if (p0 == NULL) {
257        diff = 0;
258    } else {
259        diff = p0 - (void *)(s->code_ptr + 1);
260        tcg_debug_assert(diff != 0);
261        if (diff != sextract32(diff, 0, 20)) {
262            tcg_raise_tb_overflow(s);
263        }
264    }
265    insn = deposit32(insn, 0, 8, op);
266    insn = deposit32(insn, 12, 20, diff);
267    tcg_out32(s, insn);
268}
269
270static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
271{
272    tcg_insn_unit insn = 0;
273
274    insn = deposit32(insn, 0, 8, op);
275    insn = deposit32(insn, 8, 4, r0);
276    tcg_out32(s, insn);
277}
278
279static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
280{
281    tcg_out32(s, (uint8_t)op);
282}
283
284static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
285{
286    tcg_insn_unit insn = 0;
287
288    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
289    insn = deposit32(insn, 0, 8, op);
290    insn = deposit32(insn, 8, 4, r0);
291    insn = deposit32(insn, 12, 20, i1);
292    tcg_out32(s, insn);
293}
294
295static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
296{
297    tcg_insn_unit insn = 0;
298
299    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
300    insn = deposit32(insn, 0, 8, op);
301    insn = deposit32(insn, 8, 4, r0);
302    tcg_out32(s, insn);
303}
304
305static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
306{
307    tcg_insn_unit insn = 0;
308
309    insn = deposit32(insn, 0, 8, op);
310    insn = deposit32(insn, 8, 4, r0);
311    insn = deposit32(insn, 12, 4, r1);
312    tcg_out32(s, insn);
313}
314
315static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
316                           TCGReg r0, TCGReg r1, TCGArg m2)
317{
318    tcg_insn_unit insn = 0;
319
320    tcg_debug_assert(m2 == extract32(m2, 0, 16));
321    insn = deposit32(insn, 0, 8, op);
322    insn = deposit32(insn, 8, 4, r0);
323    insn = deposit32(insn, 12, 4, r1);
324    insn = deposit32(insn, 16, 16, m2);
325    tcg_out32(s, insn);
326}
327
328static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
329                           TCGReg r0, TCGReg r1, TCGReg r2)
330{
331    tcg_insn_unit insn = 0;
332
333    insn = deposit32(insn, 0, 8, op);
334    insn = deposit32(insn, 8, 4, r0);
335    insn = deposit32(insn, 12, 4, r1);
336    insn = deposit32(insn, 16, 4, r2);
337    tcg_out32(s, insn);
338}
339
340static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
341                           TCGReg r0, TCGReg r1, intptr_t i2)
342{
343    tcg_insn_unit insn = 0;
344
345    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
346    insn = deposit32(insn, 0, 8, op);
347    insn = deposit32(insn, 8, 4, r0);
348    insn = deposit32(insn, 12, 4, r1);
349    insn = deposit32(insn, 16, 16, i2);
350    tcg_out32(s, insn);
351}
352
353static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
354                            TCGReg r1, uint8_t b2, uint8_t b3)
355{
356    tcg_insn_unit insn = 0;
357
358    tcg_debug_assert(b2 == extract32(b2, 0, 6));
359    tcg_debug_assert(b3 == extract32(b3, 0, 6));
360    insn = deposit32(insn, 0, 8, op);
361    insn = deposit32(insn, 8, 4, r0);
362    insn = deposit32(insn, 12, 4, r1);
363    insn = deposit32(insn, 16, 6, b2);
364    insn = deposit32(insn, 22, 6, b3);
365    tcg_out32(s, insn);
366}
367
368static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
369                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
370{
371    tcg_insn_unit insn = 0;
372
373    insn = deposit32(insn, 0, 8, op);
374    insn = deposit32(insn, 8, 4, r0);
375    insn = deposit32(insn, 12, 4, r1);
376    insn = deposit32(insn, 16, 4, r2);
377    insn = deposit32(insn, 20, 4, c3);
378    tcg_out32(s, insn);
379}
380
381static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
382                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
383{
384    tcg_insn_unit insn = 0;
385
386    tcg_debug_assert(b3 == extract32(b3, 0, 6));
387    tcg_debug_assert(b4 == extract32(b4, 0, 6));
388    insn = deposit32(insn, 0, 8, op);
389    insn = deposit32(insn, 8, 4, r0);
390    insn = deposit32(insn, 12, 4, r1);
391    insn = deposit32(insn, 16, 4, r2);
392    insn = deposit32(insn, 20, 6, b3);
393    insn = deposit32(insn, 26, 6, b4);
394    tcg_out32(s, insn);
395}
396
397static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
398                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
399{
400    tcg_insn_unit insn = 0;
401
402    insn = deposit32(insn, 0, 8, op);
403    insn = deposit32(insn, 8, 4, r0);
404    insn = deposit32(insn, 12, 4, r1);
405    insn = deposit32(insn, 16, 4, r2);
406    insn = deposit32(insn, 20, 4, r3);
407    tcg_out32(s, insn);
408}
409
410static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
411                              TCGReg r0, TCGReg r1, TCGReg r2,
412                              TCGReg r3, TCGReg r4, TCGCond c5)
413{
414    tcg_insn_unit insn = 0;
415
416    insn = deposit32(insn, 0, 8, op);
417    insn = deposit32(insn, 8, 4, r0);
418    insn = deposit32(insn, 12, 4, r1);
419    insn = deposit32(insn, 16, 4, r2);
420    insn = deposit32(insn, 20, 4, r3);
421    insn = deposit32(insn, 24, 4, r4);
422    insn = deposit32(insn, 28, 4, c5);
423    tcg_out32(s, insn);
424}
425
426static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
427                              TCGReg r0, TCGReg r1, TCGReg r2,
428                              TCGReg r3, TCGReg r4, TCGReg r5)
429{
430    tcg_insn_unit insn = 0;
431
432    insn = deposit32(insn, 0, 8, op);
433    insn = deposit32(insn, 8, 4, r0);
434    insn = deposit32(insn, 12, 4, r1);
435    insn = deposit32(insn, 16, 4, r2);
436    insn = deposit32(insn, 20, 4, r3);
437    insn = deposit32(insn, 24, 4, r4);
438    insn = deposit32(insn, 28, 4, r5);
439    tcg_out32(s, insn);
440}
441
442static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
443                         TCGReg base, intptr_t offset)
444{
445    stack_bounds_check(base, offset);
446    if (offset != sextract32(offset, 0, 16)) {
447        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
448        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
449        base = TCG_REG_TMP;
450        offset = 0;
451    }
452    tcg_out_op_rrs(s, op, val, base, offset);
453}
454
455static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
456                       intptr_t offset)
457{
458    switch (type) {
459    case TCG_TYPE_I32:
460        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
461        break;
462#if TCG_TARGET_REG_BITS == 64
463    case TCG_TYPE_I64:
464        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
465        break;
466#endif
467    default:
468        g_assert_not_reached();
469    }
470}
471
472static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
473{
474    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
475    return true;
476}
477
478static void tcg_out_movi(TCGContext *s, TCGType type,
479                         TCGReg ret, tcg_target_long arg)
480{
481    switch (type) {
482    case TCG_TYPE_I32:
483#if TCG_TARGET_REG_BITS == 64
484        arg = (int32_t)arg;
485        /* fall through */
486    case TCG_TYPE_I64:
487#endif
488        break;
489    default:
490        g_assert_not_reached();
491    }
492
493    if (arg == sextract32(arg, 0, 20)) {
494        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
495    } else {
496        tcg_insn_unit insn = 0;
497
498        new_pool_label(s, arg, 20, s->code_ptr, 0);
499        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
500        insn = deposit32(insn, 8, 4, ret);
501        tcg_out32(s, insn);
502    }
503}
504
505static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
506                            TCGReg rs, unsigned pos, unsigned len)
507{
508    TCGOpcode opc = type == TCG_TYPE_I32 ?
509                    INDEX_op_extract_i32 :
510                    INDEX_op_extract_i64;
511    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
512}
513
514static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
515                             TCGReg rs, unsigned pos, unsigned len)
516{
517    TCGOpcode opc = type == TCG_TYPE_I32 ?
518                    INDEX_op_sextract_i32 :
519                    INDEX_op_sextract_i64;
520    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
521}
522
523static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
524{
525    tcg_out_sextract(s, type, rd, rs, 0, 8);
526}
527
528static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
529{
530    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
531}
532
533static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
534{
535    tcg_out_sextract(s, type, rd, rs, 0, 16);
536}
537
538static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
539{
540    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
541}
542
543static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
544{
545    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
546    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
547}
548
549static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
550{
551    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
552    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
553}
554
555static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
556{
557    tcg_out_ext32s(s, rd, rs);
558}
559
560static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
561{
562    tcg_out_ext32u(s, rd, rs);
563}
564
565static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
566{
567    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
568    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
569}
570
571static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
572{
573    return false;
574}
575
576static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
577                             tcg_target_long imm)
578{
579    /* This function is only used for passing structs by reference. */
580    g_assert_not_reached();
581}
582
583static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
584                         const TCGHelperInfo *info)
585{
586    ffi_cif *cif = info->cif;
587    tcg_insn_unit insn = 0;
588    uint8_t which;
589
590    if (cif->rtype == &ffi_type_void) {
591        which = 0;
592    } else {
593        tcg_debug_assert(cif->rtype->size == 4 ||
594                         cif->rtype->size == 8 ||
595                         cif->rtype->size == 16);
596        which = ctz32(cif->rtype->size) - 1;
597    }
598    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
599    insn = deposit32(insn, 0, 8, INDEX_op_call);
600    insn = deposit32(insn, 8, 4, which);
601    tcg_out32(s, insn);
602}
603
604#if TCG_TARGET_REG_BITS == 64
605# define CASE_32_64(x) \
606        case glue(glue(INDEX_op_, x), _i64): \
607        case glue(glue(INDEX_op_, x), _i32):
608# define CASE_64(x) \
609        case glue(glue(INDEX_op_, x), _i64):
610#else
611# define CASE_32_64(x) \
612        case glue(glue(INDEX_op_, x), _i32):
613# define CASE_64(x)
614#endif
615
616static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
617{
618    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
619}
620
621static void tcg_out_goto_tb(TCGContext *s, int which)
622{
623    /* indirect jump method. */
624    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
625    set_jmp_reset_offset(s, which);
626}
627
628void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
629                              uintptr_t jmp_rx, uintptr_t jmp_rw)
630{
631    /* Always indirect, nothing to do */
632}
633
634static void tgen_add(TCGContext *s, TCGType type,
635                     TCGReg a0, TCGReg a1, TCGReg a2)
636{
637    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
638}
639
640static const TCGOutOpBinary outop_add = {
641    .base.static_constraint = C_O1_I2(r, r, r),
642    .out_rrr = tgen_add,
643};
644
645static void tgen_and(TCGContext *s, TCGType type,
646                     TCGReg a0, TCGReg a1, TCGReg a2)
647{
648    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
649}
650
651static const TCGOutOpBinary outop_and = {
652    .base.static_constraint = C_O1_I2(r, r, r),
653    .out_rrr = tgen_and,
654};
655
656static void tgen_andc(TCGContext *s, TCGType type,
657                      TCGReg a0, TCGReg a1, TCGReg a2)
658{
659    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
660}
661
662static const TCGOutOpBinary outop_andc = {
663    .base.static_constraint = C_O1_I2(r, r, r),
664    .out_rrr = tgen_andc,
665};
666
667static void tgen_or(TCGContext *s, TCGType type,
668                     TCGReg a0, TCGReg a1, TCGReg a2)
669{
670    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
671}
672
673static const TCGOutOpBinary outop_or = {
674    .base.static_constraint = C_O1_I2(r, r, r),
675    .out_rrr = tgen_or,
676};
677
678static void tgen_orc(TCGContext *s, TCGType type,
679                     TCGReg a0, TCGReg a1, TCGReg a2)
680{
681    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
682}
683
684static const TCGOutOpBinary outop_orc = {
685    .base.static_constraint = C_O1_I2(r, r, r),
686    .out_rrr = tgen_orc,
687};
688
689
690static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
691                       const TCGArg args[TCG_MAX_OP_ARGS],
692                       const int const_args[TCG_MAX_OP_ARGS])
693{
694    int width;
695
696    switch (opc) {
697    case INDEX_op_goto_ptr:
698        tcg_out_op_r(s, opc, args[0]);
699        break;
700
701    case INDEX_op_br:
702        tcg_out_op_l(s, opc, arg_label(args[0]));
703        break;
704
705    CASE_32_64(setcond)
706        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
707        break;
708
709    CASE_32_64(movcond)
710    case INDEX_op_setcond2_i32:
711        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
712                          args[3], args[4], args[5]);
713        break;
714
715    CASE_32_64(ld8u)
716    CASE_32_64(ld8s)
717    CASE_32_64(ld16u)
718    CASE_32_64(ld16s)
719    case INDEX_op_ld_i32:
720    CASE_64(ld32u)
721    CASE_64(ld32s)
722    CASE_64(ld)
723    CASE_32_64(st8)
724    CASE_32_64(st16)
725    case INDEX_op_st_i32:
726    CASE_64(st32)
727    CASE_64(st)
728        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
729        break;
730
731    CASE_32_64(sub)
732    CASE_32_64(mul)
733    CASE_32_64(xor)
734    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
735    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
736    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
737    CASE_32_64(shl)
738    CASE_32_64(shr)
739    CASE_32_64(sar)
740    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
741    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
742    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
743    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
744    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
745    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
746    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
747    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
748        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
749        break;
750
751    CASE_32_64(deposit)
752        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
753        break;
754
755    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
756    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
757        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
758        break;
759
760    CASE_32_64(brcond)
761        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
762                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
763                        TCG_REG_TMP, args[0], args[1], args[2]);
764        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
765        break;
766
767    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
768    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
769    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
770    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
771    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
772        tcg_out_op_rr(s, opc, args[0], args[1]);
773        break;
774
775    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
776    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
777        width = 16;
778        goto do_bswap;
779    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
780        width = 32;
781    do_bswap:
782        /* The base tci bswaps zero-extend, and ignore high bits. */
783        tcg_out_op_rr(s, opc, args[0], args[1]);
784        if (args[2] & TCG_BSWAP_OS) {
785            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
786        }
787        break;
788
789    CASE_32_64(add2)
790    CASE_32_64(sub2)
791        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
792                          args[3], args[4], args[5]);
793        break;
794
795#if TCG_TARGET_REG_BITS == 32
796    case INDEX_op_brcond2_i32:
797        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
798                          args[0], args[1], args[2], args[3], args[4]);
799        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
800        break;
801#endif
802
803    CASE_32_64(mulu2)
804    CASE_32_64(muls2)
805        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
806        break;
807
808    case INDEX_op_qemu_ld_i64:
809    case INDEX_op_qemu_st_i64:
810        if (TCG_TARGET_REG_BITS == 32) {
811            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
812            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
813            break;
814        }
815        /* fall through */
816    case INDEX_op_qemu_ld_i32:
817    case INDEX_op_qemu_st_i32:
818        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
819            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
820            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
821        } else {
822            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
823        }
824        break;
825
826    case INDEX_op_mb:
827        tcg_out_op_v(s, opc);
828        break;
829
830    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
831    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
832    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
833    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
834    case INDEX_op_extu_i32_i64:
835    case INDEX_op_extrl_i64_i32:
836    default:
837        g_assert_not_reached();
838    }
839}
840
841static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
842                       intptr_t offset)
843{
844    switch (type) {
845    case TCG_TYPE_I32:
846        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
847        break;
848#if TCG_TARGET_REG_BITS == 64
849    case TCG_TYPE_I64:
850        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
851        break;
852#endif
853    default:
854        g_assert_not_reached();
855    }
856}
857
858static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
859                               TCGReg base, intptr_t ofs)
860{
861    return false;
862}
863
864/* Test if a constant matches the constraint. */
865static bool tcg_target_const_match(int64_t val, int ct,
866                                   TCGType type, TCGCond cond, int vece)
867{
868    return ct & TCG_CT_CONST;
869}
870
871static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
872{
873    memset(p, 0, sizeof(*p) * count);
874}
875
876static void tcg_target_init(TCGContext *s)
877{
878    /* The current code uses uint8_t for tcg operations. */
879    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
880
881    /* Registers available for 32 bit operations. */
882    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
883    /* Registers available for 64 bit operations. */
884    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
885    /*
886     * The interpreter "registers" are in the local stack frame and
887     * cannot be clobbered by the called helper functions.  However,
888     * the interpreter assumes a 128-bit return value and assigns to
889     * the return value registers.
890     */
891    tcg_target_call_clobber_regs =
892        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
893
894    s->reserved_regs = 0;
895    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
896    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
897
898    /* The call arguments come first, followed by the temp storage. */
899    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
900                  TCG_STATIC_FRAME_SIZE);
901}
902
903/* Generate global QEMU prologue and epilogue code. */
904static inline void tcg_target_qemu_prologue(TCGContext *s)
905{
906}
907
908static void tcg_out_tb_start(TCGContext *s)
909{
910    /* nothing to do */
911}
912
913bool tcg_target_has_memory_bswap(MemOp memop)
914{
915    return true;
916}
917
918static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
919{
920    g_assert_not_reached();
921}
922
923static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
924{
925    g_assert_not_reached();
926}
927