xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision fffd3dc9022efe89b9196d738127c294cf43a4d6)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_eqv_i32:
99    case INDEX_op_eqv_i64:
100    case INDEX_op_nand_i32:
101    case INDEX_op_nand_i64:
102    case INDEX_op_nor_i32:
103    case INDEX_op_nor_i64:
104    case INDEX_op_shl_i32:
105    case INDEX_op_shl_i64:
106    case INDEX_op_shr_i32:
107    case INDEX_op_shr_i64:
108    case INDEX_op_sar_i32:
109    case INDEX_op_sar_i64:
110    case INDEX_op_rotl_i32:
111    case INDEX_op_rotl_i64:
112    case INDEX_op_rotr_i32:
113    case INDEX_op_rotr_i64:
114    case INDEX_op_setcond_i32:
115    case INDEX_op_setcond_i64:
116    case INDEX_op_deposit_i32:
117    case INDEX_op_deposit_i64:
118    case INDEX_op_clz_i32:
119    case INDEX_op_clz_i64:
120    case INDEX_op_ctz_i32:
121    case INDEX_op_ctz_i64:
122        return C_O1_I2(r, r, r);
123
124    case INDEX_op_brcond_i32:
125    case INDEX_op_brcond_i64:
126        return C_O0_I2(r, r);
127
128    case INDEX_op_add2_i32:
129    case INDEX_op_add2_i64:
130    case INDEX_op_sub2_i32:
131    case INDEX_op_sub2_i64:
132        return C_O2_I4(r, r, r, r, r, r);
133
134#if TCG_TARGET_REG_BITS == 32
135    case INDEX_op_brcond2_i32:
136        return C_O0_I4(r, r, r, r);
137#endif
138
139    case INDEX_op_mulu2_i32:
140    case INDEX_op_mulu2_i64:
141    case INDEX_op_muls2_i32:
142    case INDEX_op_muls2_i64:
143        return C_O2_I2(r, r, r, r);
144
145    case INDEX_op_movcond_i32:
146    case INDEX_op_movcond_i64:
147    case INDEX_op_setcond2_i32:
148        return C_O1_I4(r, r, r, r, r);
149
150    case INDEX_op_qemu_ld_i32:
151        return C_O1_I1(r, r);
152    case INDEX_op_qemu_ld_i64:
153        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
154    case INDEX_op_qemu_st_i32:
155        return C_O0_I2(r, r);
156    case INDEX_op_qemu_st_i64:
157        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
158
159    default:
160        return C_NotImplemented;
161    }
162}
163
164static const int tcg_target_reg_alloc_order[] = {
165    TCG_REG_R4,
166    TCG_REG_R5,
167    TCG_REG_R6,
168    TCG_REG_R7,
169    TCG_REG_R8,
170    TCG_REG_R9,
171    TCG_REG_R10,
172    TCG_REG_R11,
173    TCG_REG_R12,
174    TCG_REG_R13,
175    TCG_REG_R14,
176    TCG_REG_R15,
177    /* Either 2 or 4 of these are call clobbered, so use them last. */
178    TCG_REG_R3,
179    TCG_REG_R2,
180    TCG_REG_R1,
181    TCG_REG_R0,
182};
183
184/* No call arguments via registers.  All will be stored on the "stack". */
185static const int tcg_target_call_iarg_regs[] = { };
186
187static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
188{
189    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
190    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
191    return TCG_REG_R0 + slot;
192}
193
194#ifdef CONFIG_DEBUG_TCG
195static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
196    "r00",
197    "r01",
198    "r02",
199    "r03",
200    "r04",
201    "r05",
202    "r06",
203    "r07",
204    "r08",
205    "r09",
206    "r10",
207    "r11",
208    "r12",
209    "r13",
210    "r14",
211    "r15",
212};
213#endif
214
215static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
216                        intptr_t value, intptr_t addend)
217{
218    intptr_t diff = value - (intptr_t)(code_ptr + 1);
219
220    tcg_debug_assert(addend == 0);
221    tcg_debug_assert(type == 20);
222
223    if (diff == sextract32(diff, 0, type)) {
224        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
225        return true;
226    }
227    return false;
228}
229
230static void stack_bounds_check(TCGReg base, intptr_t offset)
231{
232    if (base == TCG_REG_CALL_STACK) {
233        tcg_debug_assert(offset >= 0);
234        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
235                                   TCG_STATIC_FRAME_SIZE));
236    }
237}
238
239static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
240{
241    tcg_insn_unit insn = 0;
242
243    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
244    insn = deposit32(insn, 0, 8, op);
245    tcg_out32(s, insn);
246}
247
248static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
249{
250    tcg_insn_unit insn = 0;
251    intptr_t diff;
252
253    /* Special case for exit_tb: map null -> 0. */
254    if (p0 == NULL) {
255        diff = 0;
256    } else {
257        diff = p0 - (void *)(s->code_ptr + 1);
258        tcg_debug_assert(diff != 0);
259        if (diff != sextract32(diff, 0, 20)) {
260            tcg_raise_tb_overflow(s);
261        }
262    }
263    insn = deposit32(insn, 0, 8, op);
264    insn = deposit32(insn, 12, 20, diff);
265    tcg_out32(s, insn);
266}
267
268static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
269{
270    tcg_insn_unit insn = 0;
271
272    insn = deposit32(insn, 0, 8, op);
273    insn = deposit32(insn, 8, 4, r0);
274    tcg_out32(s, insn);
275}
276
277static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
278{
279    tcg_out32(s, (uint8_t)op);
280}
281
282static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
283{
284    tcg_insn_unit insn = 0;
285
286    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
287    insn = deposit32(insn, 0, 8, op);
288    insn = deposit32(insn, 8, 4, r0);
289    insn = deposit32(insn, 12, 20, i1);
290    tcg_out32(s, insn);
291}
292
293static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
294{
295    tcg_insn_unit insn = 0;
296
297    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
298    insn = deposit32(insn, 0, 8, op);
299    insn = deposit32(insn, 8, 4, r0);
300    tcg_out32(s, insn);
301}
302
303static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
304{
305    tcg_insn_unit insn = 0;
306
307    insn = deposit32(insn, 0, 8, op);
308    insn = deposit32(insn, 8, 4, r0);
309    insn = deposit32(insn, 12, 4, r1);
310    tcg_out32(s, insn);
311}
312
313static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
314                           TCGReg r0, TCGReg r1, TCGArg m2)
315{
316    tcg_insn_unit insn = 0;
317
318    tcg_debug_assert(m2 == extract32(m2, 0, 16));
319    insn = deposit32(insn, 0, 8, op);
320    insn = deposit32(insn, 8, 4, r0);
321    insn = deposit32(insn, 12, 4, r1);
322    insn = deposit32(insn, 16, 16, m2);
323    tcg_out32(s, insn);
324}
325
326static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
327                           TCGReg r0, TCGReg r1, TCGReg r2)
328{
329    tcg_insn_unit insn = 0;
330
331    insn = deposit32(insn, 0, 8, op);
332    insn = deposit32(insn, 8, 4, r0);
333    insn = deposit32(insn, 12, 4, r1);
334    insn = deposit32(insn, 16, 4, r2);
335    tcg_out32(s, insn);
336}
337
338static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
339                           TCGReg r0, TCGReg r1, intptr_t i2)
340{
341    tcg_insn_unit insn = 0;
342
343    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
344    insn = deposit32(insn, 0, 8, op);
345    insn = deposit32(insn, 8, 4, r0);
346    insn = deposit32(insn, 12, 4, r1);
347    insn = deposit32(insn, 16, 16, i2);
348    tcg_out32(s, insn);
349}
350
351static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
352                            TCGReg r1, uint8_t b2, uint8_t b3)
353{
354    tcg_insn_unit insn = 0;
355
356    tcg_debug_assert(b2 == extract32(b2, 0, 6));
357    tcg_debug_assert(b3 == extract32(b3, 0, 6));
358    insn = deposit32(insn, 0, 8, op);
359    insn = deposit32(insn, 8, 4, r0);
360    insn = deposit32(insn, 12, 4, r1);
361    insn = deposit32(insn, 16, 6, b2);
362    insn = deposit32(insn, 22, 6, b3);
363    tcg_out32(s, insn);
364}
365
366static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
367                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
368{
369    tcg_insn_unit insn = 0;
370
371    insn = deposit32(insn, 0, 8, op);
372    insn = deposit32(insn, 8, 4, r0);
373    insn = deposit32(insn, 12, 4, r1);
374    insn = deposit32(insn, 16, 4, r2);
375    insn = deposit32(insn, 20, 4, c3);
376    tcg_out32(s, insn);
377}
378
379static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
380                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
381{
382    tcg_insn_unit insn = 0;
383
384    tcg_debug_assert(b3 == extract32(b3, 0, 6));
385    tcg_debug_assert(b4 == extract32(b4, 0, 6));
386    insn = deposit32(insn, 0, 8, op);
387    insn = deposit32(insn, 8, 4, r0);
388    insn = deposit32(insn, 12, 4, r1);
389    insn = deposit32(insn, 16, 4, r2);
390    insn = deposit32(insn, 20, 6, b3);
391    insn = deposit32(insn, 26, 6, b4);
392    tcg_out32(s, insn);
393}
394
395static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
396                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
397{
398    tcg_insn_unit insn = 0;
399
400    insn = deposit32(insn, 0, 8, op);
401    insn = deposit32(insn, 8, 4, r0);
402    insn = deposit32(insn, 12, 4, r1);
403    insn = deposit32(insn, 16, 4, r2);
404    insn = deposit32(insn, 20, 4, r3);
405    tcg_out32(s, insn);
406}
407
408static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
409                              TCGReg r0, TCGReg r1, TCGReg r2,
410                              TCGReg r3, TCGReg r4, TCGCond c5)
411{
412    tcg_insn_unit insn = 0;
413
414    insn = deposit32(insn, 0, 8, op);
415    insn = deposit32(insn, 8, 4, r0);
416    insn = deposit32(insn, 12, 4, r1);
417    insn = deposit32(insn, 16, 4, r2);
418    insn = deposit32(insn, 20, 4, r3);
419    insn = deposit32(insn, 24, 4, r4);
420    insn = deposit32(insn, 28, 4, c5);
421    tcg_out32(s, insn);
422}
423
424static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
425                              TCGReg r0, TCGReg r1, TCGReg r2,
426                              TCGReg r3, TCGReg r4, TCGReg r5)
427{
428    tcg_insn_unit insn = 0;
429
430    insn = deposit32(insn, 0, 8, op);
431    insn = deposit32(insn, 8, 4, r0);
432    insn = deposit32(insn, 12, 4, r1);
433    insn = deposit32(insn, 16, 4, r2);
434    insn = deposit32(insn, 20, 4, r3);
435    insn = deposit32(insn, 24, 4, r4);
436    insn = deposit32(insn, 28, 4, r5);
437    tcg_out32(s, insn);
438}
439
440static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
441                         TCGReg base, intptr_t offset)
442{
443    stack_bounds_check(base, offset);
444    if (offset != sextract32(offset, 0, 16)) {
445        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
446        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
447        base = TCG_REG_TMP;
448        offset = 0;
449    }
450    tcg_out_op_rrs(s, op, val, base, offset);
451}
452
453static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
454                       intptr_t offset)
455{
456    switch (type) {
457    case TCG_TYPE_I32:
458        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
459        break;
460#if TCG_TARGET_REG_BITS == 64
461    case TCG_TYPE_I64:
462        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
463        break;
464#endif
465    default:
466        g_assert_not_reached();
467    }
468}
469
470static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
471{
472    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
473    return true;
474}
475
476static void tcg_out_movi(TCGContext *s, TCGType type,
477                         TCGReg ret, tcg_target_long arg)
478{
479    switch (type) {
480    case TCG_TYPE_I32:
481#if TCG_TARGET_REG_BITS == 64
482        arg = (int32_t)arg;
483        /* fall through */
484    case TCG_TYPE_I64:
485#endif
486        break;
487    default:
488        g_assert_not_reached();
489    }
490
491    if (arg == sextract32(arg, 0, 20)) {
492        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
493    } else {
494        tcg_insn_unit insn = 0;
495
496        new_pool_label(s, arg, 20, s->code_ptr, 0);
497        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
498        insn = deposit32(insn, 8, 4, ret);
499        tcg_out32(s, insn);
500    }
501}
502
503static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
504                            TCGReg rs, unsigned pos, unsigned len)
505{
506    TCGOpcode opc = type == TCG_TYPE_I32 ?
507                    INDEX_op_extract_i32 :
508                    INDEX_op_extract_i64;
509    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
510}
511
512static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
513                             TCGReg rs, unsigned pos, unsigned len)
514{
515    TCGOpcode opc = type == TCG_TYPE_I32 ?
516                    INDEX_op_sextract_i32 :
517                    INDEX_op_sextract_i64;
518    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
519}
520
521static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
522{
523    tcg_out_sextract(s, type, rd, rs, 0, 8);
524}
525
526static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
527{
528    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
529}
530
531static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
532{
533    tcg_out_sextract(s, type, rd, rs, 0, 16);
534}
535
536static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
537{
538    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
539}
540
541static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
542{
543    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
544    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
545}
546
547static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
548{
549    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
550    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
551}
552
553static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
554{
555    tcg_out_ext32s(s, rd, rs);
556}
557
558static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
559{
560    tcg_out_ext32u(s, rd, rs);
561}
562
563static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
564{
565    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
566    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
567}
568
569static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
570{
571    return false;
572}
573
574static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
575                             tcg_target_long imm)
576{
577    /* This function is only used for passing structs by reference. */
578    g_assert_not_reached();
579}
580
581static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
582                         const TCGHelperInfo *info)
583{
584    ffi_cif *cif = info->cif;
585    tcg_insn_unit insn = 0;
586    uint8_t which;
587
588    if (cif->rtype == &ffi_type_void) {
589        which = 0;
590    } else {
591        tcg_debug_assert(cif->rtype->size == 4 ||
592                         cif->rtype->size == 8 ||
593                         cif->rtype->size == 16);
594        which = ctz32(cif->rtype->size) - 1;
595    }
596    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
597    insn = deposit32(insn, 0, 8, INDEX_op_call);
598    insn = deposit32(insn, 8, 4, which);
599    tcg_out32(s, insn);
600}
601
602#if TCG_TARGET_REG_BITS == 64
603# define CASE_32_64(x) \
604        case glue(glue(INDEX_op_, x), _i64): \
605        case glue(glue(INDEX_op_, x), _i32):
606# define CASE_64(x) \
607        case glue(glue(INDEX_op_, x), _i64):
608#else
609# define CASE_32_64(x) \
610        case glue(glue(INDEX_op_, x), _i32):
611# define CASE_64(x)
612#endif
613
614static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
615{
616    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
617}
618
619static void tcg_out_goto_tb(TCGContext *s, int which)
620{
621    /* indirect jump method. */
622    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
623    set_jmp_reset_offset(s, which);
624}
625
626void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
627                              uintptr_t jmp_rx, uintptr_t jmp_rw)
628{
629    /* Always indirect, nothing to do */
630}
631
632static void tgen_add(TCGContext *s, TCGType type,
633                     TCGReg a0, TCGReg a1, TCGReg a2)
634{
635    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
636}
637
638static const TCGOutOpBinary outop_add = {
639    .base.static_constraint = C_O1_I2(r, r, r),
640    .out_rrr = tgen_add,
641};
642
643static void tgen_and(TCGContext *s, TCGType type,
644                     TCGReg a0, TCGReg a1, TCGReg a2)
645{
646    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
647}
648
649static const TCGOutOpBinary outop_and = {
650    .base.static_constraint = C_O1_I2(r, r, r),
651    .out_rrr = tgen_and,
652};
653
654static void tgen_andc(TCGContext *s, TCGType type,
655                      TCGReg a0, TCGReg a1, TCGReg a2)
656{
657    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
658}
659
660static const TCGOutOpBinary outop_andc = {
661    .base.static_constraint = C_O1_I2(r, r, r),
662    .out_rrr = tgen_andc,
663};
664
665static void tgen_or(TCGContext *s, TCGType type,
666                     TCGReg a0, TCGReg a1, TCGReg a2)
667{
668    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
669}
670
671static const TCGOutOpBinary outop_or = {
672    .base.static_constraint = C_O1_I2(r, r, r),
673    .out_rrr = tgen_or,
674};
675
676static void tgen_orc(TCGContext *s, TCGType type,
677                     TCGReg a0, TCGReg a1, TCGReg a2)
678{
679    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
680}
681
682static const TCGOutOpBinary outop_orc = {
683    .base.static_constraint = C_O1_I2(r, r, r),
684    .out_rrr = tgen_orc,
685};
686
687static void tgen_xor(TCGContext *s, TCGType type,
688                     TCGReg a0, TCGReg a1, TCGReg a2)
689{
690    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
691}
692
693static const TCGOutOpBinary outop_xor = {
694    .base.static_constraint = C_O1_I2(r, r, r),
695    .out_rrr = tgen_xor,
696};
697
698
699static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
700                       const TCGArg args[TCG_MAX_OP_ARGS],
701                       const int const_args[TCG_MAX_OP_ARGS])
702{
703    int width;
704
705    switch (opc) {
706    case INDEX_op_goto_ptr:
707        tcg_out_op_r(s, opc, args[0]);
708        break;
709
710    case INDEX_op_br:
711        tcg_out_op_l(s, opc, arg_label(args[0]));
712        break;
713
714    CASE_32_64(setcond)
715        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
716        break;
717
718    CASE_32_64(movcond)
719    case INDEX_op_setcond2_i32:
720        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
721                          args[3], args[4], args[5]);
722        break;
723
724    CASE_32_64(ld8u)
725    CASE_32_64(ld8s)
726    CASE_32_64(ld16u)
727    CASE_32_64(ld16s)
728    case INDEX_op_ld_i32:
729    CASE_64(ld32u)
730    CASE_64(ld32s)
731    CASE_64(ld)
732    CASE_32_64(st8)
733    CASE_32_64(st16)
734    case INDEX_op_st_i32:
735    CASE_64(st32)
736    CASE_64(st)
737        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
738        break;
739
740    CASE_32_64(sub)
741    CASE_32_64(mul)
742    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
743    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
744    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
745    CASE_32_64(shl)
746    CASE_32_64(shr)
747    CASE_32_64(sar)
748    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
749    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
750    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
751    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
752    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
753    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
754    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
755    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
756        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
757        break;
758
759    CASE_32_64(deposit)
760        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
761        break;
762
763    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
764    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
765        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
766        break;
767
768    CASE_32_64(brcond)
769        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
770                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
771                        TCG_REG_TMP, args[0], args[1], args[2]);
772        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
773        break;
774
775    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
776    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
777    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
778    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
779    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
780        tcg_out_op_rr(s, opc, args[0], args[1]);
781        break;
782
783    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
784    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
785        width = 16;
786        goto do_bswap;
787    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
788        width = 32;
789    do_bswap:
790        /* The base tci bswaps zero-extend, and ignore high bits. */
791        tcg_out_op_rr(s, opc, args[0], args[1]);
792        if (args[2] & TCG_BSWAP_OS) {
793            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
794        }
795        break;
796
797    CASE_32_64(add2)
798    CASE_32_64(sub2)
799        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
800                          args[3], args[4], args[5]);
801        break;
802
803#if TCG_TARGET_REG_BITS == 32
804    case INDEX_op_brcond2_i32:
805        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
806                          args[0], args[1], args[2], args[3], args[4]);
807        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
808        break;
809#endif
810
811    CASE_32_64(mulu2)
812    CASE_32_64(muls2)
813        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
814        break;
815
816    case INDEX_op_qemu_ld_i64:
817    case INDEX_op_qemu_st_i64:
818        if (TCG_TARGET_REG_BITS == 32) {
819            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
820            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
821            break;
822        }
823        /* fall through */
824    case INDEX_op_qemu_ld_i32:
825    case INDEX_op_qemu_st_i32:
826        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
827            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
828            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
829        } else {
830            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
831        }
832        break;
833
834    case INDEX_op_mb:
835        tcg_out_op_v(s, opc);
836        break;
837
838    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
839    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
840    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
841    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
842    case INDEX_op_extu_i32_i64:
843    case INDEX_op_extrl_i64_i32:
844    default:
845        g_assert_not_reached();
846    }
847}
848
849static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
850                       intptr_t offset)
851{
852    switch (type) {
853    case TCG_TYPE_I32:
854        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
855        break;
856#if TCG_TARGET_REG_BITS == 64
857    case TCG_TYPE_I64:
858        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
859        break;
860#endif
861    default:
862        g_assert_not_reached();
863    }
864}
865
866static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
867                               TCGReg base, intptr_t ofs)
868{
869    return false;
870}
871
872/* Test if a constant matches the constraint. */
873static bool tcg_target_const_match(int64_t val, int ct,
874                                   TCGType type, TCGCond cond, int vece)
875{
876    return ct & TCG_CT_CONST;
877}
878
879static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
880{
881    memset(p, 0, sizeof(*p) * count);
882}
883
884static void tcg_target_init(TCGContext *s)
885{
886    /* The current code uses uint8_t for tcg operations. */
887    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
888
889    /* Registers available for 32 bit operations. */
890    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
891    /* Registers available for 64 bit operations. */
892    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
893    /*
894     * The interpreter "registers" are in the local stack frame and
895     * cannot be clobbered by the called helper functions.  However,
896     * the interpreter assumes a 128-bit return value and assigns to
897     * the return value registers.
898     */
899    tcg_target_call_clobber_regs =
900        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
901
902    s->reserved_regs = 0;
903    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
904    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
905
906    /* The call arguments come first, followed by the temp storage. */
907    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
908                  TCG_STATIC_FRAME_SIZE);
909}
910
911/* Generate global QEMU prologue and epilogue code. */
912static inline void tcg_target_qemu_prologue(TCGContext *s)
913{
914}
915
916static void tcg_out_tb_start(TCGContext *s)
917{
918    /* nothing to do */
919}
920
921bool tcg_target_has_memory_bswap(MemOp memop)
922{
923    return true;
924}
925
926static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
927{
928    g_assert_not_reached();
929}
930
931static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
932{
933    g_assert_not_reached();
934}
935