xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 6ca594517ab389f3095c4aab745e168cdd8e8ff5)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_shr_i32:
83    case INDEX_op_shr_i64:
84    case INDEX_op_sar_i32:
85    case INDEX_op_sar_i64:
86    case INDEX_op_rotl_i32:
87    case INDEX_op_rotl_i64:
88    case INDEX_op_rotr_i32:
89    case INDEX_op_rotr_i64:
90    case INDEX_op_setcond_i32:
91    case INDEX_op_setcond_i64:
92    case INDEX_op_deposit_i32:
93    case INDEX_op_deposit_i64:
94    case INDEX_op_clz_i32:
95    case INDEX_op_clz_i64:
96    case INDEX_op_ctz_i32:
97    case INDEX_op_ctz_i64:
98        return C_O1_I2(r, r, r);
99
100    case INDEX_op_brcond_i32:
101    case INDEX_op_brcond_i64:
102        return C_O0_I2(r, r);
103
104    case INDEX_op_add2_i32:
105    case INDEX_op_add2_i64:
106    case INDEX_op_sub2_i32:
107    case INDEX_op_sub2_i64:
108        return C_O2_I4(r, r, r, r, r, r);
109
110#if TCG_TARGET_REG_BITS == 32
111    case INDEX_op_brcond2_i32:
112        return C_O0_I4(r, r, r, r);
113#endif
114
115    case INDEX_op_mulu2_i32:
116    case INDEX_op_mulu2_i64:
117    case INDEX_op_muls2_i32:
118    case INDEX_op_muls2_i64:
119        return C_O2_I2(r, r, r, r);
120
121    case INDEX_op_movcond_i32:
122    case INDEX_op_movcond_i64:
123    case INDEX_op_setcond2_i32:
124        return C_O1_I4(r, r, r, r, r);
125
126    case INDEX_op_qemu_ld_i32:
127        return C_O1_I1(r, r);
128    case INDEX_op_qemu_ld_i64:
129        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
130    case INDEX_op_qemu_st_i32:
131        return C_O0_I2(r, r);
132    case INDEX_op_qemu_st_i64:
133        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
134
135    default:
136        return C_NotImplemented;
137    }
138}
139
140static const int tcg_target_reg_alloc_order[] = {
141    TCG_REG_R4,
142    TCG_REG_R5,
143    TCG_REG_R6,
144    TCG_REG_R7,
145    TCG_REG_R8,
146    TCG_REG_R9,
147    TCG_REG_R10,
148    TCG_REG_R11,
149    TCG_REG_R12,
150    TCG_REG_R13,
151    TCG_REG_R14,
152    TCG_REG_R15,
153    /* Either 2 or 4 of these are call clobbered, so use them last. */
154    TCG_REG_R3,
155    TCG_REG_R2,
156    TCG_REG_R1,
157    TCG_REG_R0,
158};
159
160/* No call arguments via registers.  All will be stored on the "stack". */
161static const int tcg_target_call_iarg_regs[] = { };
162
163static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
164{
165    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
166    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
167    return TCG_REG_R0 + slot;
168}
169
170#ifdef CONFIG_DEBUG_TCG
171static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
172    "r00",
173    "r01",
174    "r02",
175    "r03",
176    "r04",
177    "r05",
178    "r06",
179    "r07",
180    "r08",
181    "r09",
182    "r10",
183    "r11",
184    "r12",
185    "r13",
186    "r14",
187    "r15",
188};
189#endif
190
191static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
192                        intptr_t value, intptr_t addend)
193{
194    intptr_t diff = value - (intptr_t)(code_ptr + 1);
195
196    tcg_debug_assert(addend == 0);
197    tcg_debug_assert(type == 20);
198
199    if (diff == sextract32(diff, 0, type)) {
200        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
201        return true;
202    }
203    return false;
204}
205
206static void stack_bounds_check(TCGReg base, intptr_t offset)
207{
208    if (base == TCG_REG_CALL_STACK) {
209        tcg_debug_assert(offset >= 0);
210        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
211                                   TCG_STATIC_FRAME_SIZE));
212    }
213}
214
215static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
216{
217    tcg_insn_unit insn = 0;
218
219    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
220    insn = deposit32(insn, 0, 8, op);
221    tcg_out32(s, insn);
222}
223
224static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
225{
226    tcg_insn_unit insn = 0;
227    intptr_t diff;
228
229    /* Special case for exit_tb: map null -> 0. */
230    if (p0 == NULL) {
231        diff = 0;
232    } else {
233        diff = p0 - (void *)(s->code_ptr + 1);
234        tcg_debug_assert(diff != 0);
235        if (diff != sextract32(diff, 0, 20)) {
236            tcg_raise_tb_overflow(s);
237        }
238    }
239    insn = deposit32(insn, 0, 8, op);
240    insn = deposit32(insn, 12, 20, diff);
241    tcg_out32(s, insn);
242}
243
244static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
245{
246    tcg_insn_unit insn = 0;
247
248    insn = deposit32(insn, 0, 8, op);
249    insn = deposit32(insn, 8, 4, r0);
250    tcg_out32(s, insn);
251}
252
253static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
254{
255    tcg_out32(s, (uint8_t)op);
256}
257
258static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
259{
260    tcg_insn_unit insn = 0;
261
262    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
263    insn = deposit32(insn, 0, 8, op);
264    insn = deposit32(insn, 8, 4, r0);
265    insn = deposit32(insn, 12, 20, i1);
266    tcg_out32(s, insn);
267}
268
269static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
270{
271    tcg_insn_unit insn = 0;
272
273    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
274    insn = deposit32(insn, 0, 8, op);
275    insn = deposit32(insn, 8, 4, r0);
276    tcg_out32(s, insn);
277}
278
279static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
280{
281    tcg_insn_unit insn = 0;
282
283    insn = deposit32(insn, 0, 8, op);
284    insn = deposit32(insn, 8, 4, r0);
285    insn = deposit32(insn, 12, 4, r1);
286    tcg_out32(s, insn);
287}
288
289static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
290                           TCGReg r0, TCGReg r1, TCGArg m2)
291{
292    tcg_insn_unit insn = 0;
293
294    tcg_debug_assert(m2 == extract32(m2, 0, 16));
295    insn = deposit32(insn, 0, 8, op);
296    insn = deposit32(insn, 8, 4, r0);
297    insn = deposit32(insn, 12, 4, r1);
298    insn = deposit32(insn, 16, 16, m2);
299    tcg_out32(s, insn);
300}
301
302static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
303                           TCGReg r0, TCGReg r1, TCGReg r2)
304{
305    tcg_insn_unit insn = 0;
306
307    insn = deposit32(insn, 0, 8, op);
308    insn = deposit32(insn, 8, 4, r0);
309    insn = deposit32(insn, 12, 4, r1);
310    insn = deposit32(insn, 16, 4, r2);
311    tcg_out32(s, insn);
312}
313
314static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
315                           TCGReg r0, TCGReg r1, intptr_t i2)
316{
317    tcg_insn_unit insn = 0;
318
319    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
320    insn = deposit32(insn, 0, 8, op);
321    insn = deposit32(insn, 8, 4, r0);
322    insn = deposit32(insn, 12, 4, r1);
323    insn = deposit32(insn, 16, 16, i2);
324    tcg_out32(s, insn);
325}
326
327static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
328                            TCGReg r1, uint8_t b2, uint8_t b3)
329{
330    tcg_insn_unit insn = 0;
331
332    tcg_debug_assert(b2 == extract32(b2, 0, 6));
333    tcg_debug_assert(b3 == extract32(b3, 0, 6));
334    insn = deposit32(insn, 0, 8, op);
335    insn = deposit32(insn, 8, 4, r0);
336    insn = deposit32(insn, 12, 4, r1);
337    insn = deposit32(insn, 16, 6, b2);
338    insn = deposit32(insn, 22, 6, b3);
339    tcg_out32(s, insn);
340}
341
342static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
343                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
344{
345    tcg_insn_unit insn = 0;
346
347    insn = deposit32(insn, 0, 8, op);
348    insn = deposit32(insn, 8, 4, r0);
349    insn = deposit32(insn, 12, 4, r1);
350    insn = deposit32(insn, 16, 4, r2);
351    insn = deposit32(insn, 20, 4, c3);
352    tcg_out32(s, insn);
353}
354
355static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
356                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
357{
358    tcg_insn_unit insn = 0;
359
360    tcg_debug_assert(b3 == extract32(b3, 0, 6));
361    tcg_debug_assert(b4 == extract32(b4, 0, 6));
362    insn = deposit32(insn, 0, 8, op);
363    insn = deposit32(insn, 8, 4, r0);
364    insn = deposit32(insn, 12, 4, r1);
365    insn = deposit32(insn, 16, 4, r2);
366    insn = deposit32(insn, 20, 6, b3);
367    insn = deposit32(insn, 26, 6, b4);
368    tcg_out32(s, insn);
369}
370
371static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
372                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
373{
374    tcg_insn_unit insn = 0;
375
376    insn = deposit32(insn, 0, 8, op);
377    insn = deposit32(insn, 8, 4, r0);
378    insn = deposit32(insn, 12, 4, r1);
379    insn = deposit32(insn, 16, 4, r2);
380    insn = deposit32(insn, 20, 4, r3);
381    tcg_out32(s, insn);
382}
383
384static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
385                              TCGReg r0, TCGReg r1, TCGReg r2,
386                              TCGReg r3, TCGReg r4, TCGCond c5)
387{
388    tcg_insn_unit insn = 0;
389
390    insn = deposit32(insn, 0, 8, op);
391    insn = deposit32(insn, 8, 4, r0);
392    insn = deposit32(insn, 12, 4, r1);
393    insn = deposit32(insn, 16, 4, r2);
394    insn = deposit32(insn, 20, 4, r3);
395    insn = deposit32(insn, 24, 4, r4);
396    insn = deposit32(insn, 28, 4, c5);
397    tcg_out32(s, insn);
398}
399
400static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
401                              TCGReg r0, TCGReg r1, TCGReg r2,
402                              TCGReg r3, TCGReg r4, TCGReg r5)
403{
404    tcg_insn_unit insn = 0;
405
406    insn = deposit32(insn, 0, 8, op);
407    insn = deposit32(insn, 8, 4, r0);
408    insn = deposit32(insn, 12, 4, r1);
409    insn = deposit32(insn, 16, 4, r2);
410    insn = deposit32(insn, 20, 4, r3);
411    insn = deposit32(insn, 24, 4, r4);
412    insn = deposit32(insn, 28, 4, r5);
413    tcg_out32(s, insn);
414}
415
416static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
417                         TCGReg base, intptr_t offset)
418{
419    stack_bounds_check(base, offset);
420    if (offset != sextract32(offset, 0, 16)) {
421        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
422        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
423        base = TCG_REG_TMP;
424        offset = 0;
425    }
426    tcg_out_op_rrs(s, op, val, base, offset);
427}
428
429static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
430                       intptr_t offset)
431{
432    switch (type) {
433    case TCG_TYPE_I32:
434        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
435        break;
436#if TCG_TARGET_REG_BITS == 64
437    case TCG_TYPE_I64:
438        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
439        break;
440#endif
441    default:
442        g_assert_not_reached();
443    }
444}
445
446static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
447{
448    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
449    return true;
450}
451
452static void tcg_out_movi(TCGContext *s, TCGType type,
453                         TCGReg ret, tcg_target_long arg)
454{
455    switch (type) {
456    case TCG_TYPE_I32:
457#if TCG_TARGET_REG_BITS == 64
458        arg = (int32_t)arg;
459        /* fall through */
460    case TCG_TYPE_I64:
461#endif
462        break;
463    default:
464        g_assert_not_reached();
465    }
466
467    if (arg == sextract32(arg, 0, 20)) {
468        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
469    } else {
470        tcg_insn_unit insn = 0;
471
472        new_pool_label(s, arg, 20, s->code_ptr, 0);
473        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
474        insn = deposit32(insn, 8, 4, ret);
475        tcg_out32(s, insn);
476    }
477}
478
479static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
480                            TCGReg rs, unsigned pos, unsigned len)
481{
482    TCGOpcode opc = type == TCG_TYPE_I32 ?
483                    INDEX_op_extract_i32 :
484                    INDEX_op_extract_i64;
485    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
486}
487
488static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
489                             TCGReg rs, unsigned pos, unsigned len)
490{
491    TCGOpcode opc = type == TCG_TYPE_I32 ?
492                    INDEX_op_sextract_i32 :
493                    INDEX_op_sextract_i64;
494    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
495}
496
497static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
498{
499    tcg_out_sextract(s, type, rd, rs, 0, 8);
500}
501
502static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
503{
504    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
505}
506
507static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
508{
509    tcg_out_sextract(s, type, rd, rs, 0, 16);
510}
511
512static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
513{
514    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
515}
516
517static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
518{
519    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
520    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
521}
522
523static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
524{
525    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
526    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
527}
528
529static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
530{
531    tcg_out_ext32s(s, rd, rs);
532}
533
534static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
535{
536    tcg_out_ext32u(s, rd, rs);
537}
538
539static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
540{
541    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
542    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
543}
544
545static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
546{
547    return false;
548}
549
550static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
551                             tcg_target_long imm)
552{
553    /* This function is only used for passing structs by reference. */
554    g_assert_not_reached();
555}
556
557static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
558                         const TCGHelperInfo *info)
559{
560    ffi_cif *cif = info->cif;
561    tcg_insn_unit insn = 0;
562    uint8_t which;
563
564    if (cif->rtype == &ffi_type_void) {
565        which = 0;
566    } else {
567        tcg_debug_assert(cif->rtype->size == 4 ||
568                         cif->rtype->size == 8 ||
569                         cif->rtype->size == 16);
570        which = ctz32(cif->rtype->size) - 1;
571    }
572    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
573    insn = deposit32(insn, 0, 8, INDEX_op_call);
574    insn = deposit32(insn, 8, 4, which);
575    tcg_out32(s, insn);
576}
577
578#if TCG_TARGET_REG_BITS == 64
579# define CASE_32_64(x) \
580        case glue(glue(INDEX_op_, x), _i64): \
581        case glue(glue(INDEX_op_, x), _i32):
582# define CASE_64(x) \
583        case glue(glue(INDEX_op_, x), _i64):
584#else
585# define CASE_32_64(x) \
586        case glue(glue(INDEX_op_, x), _i32):
587# define CASE_64(x)
588#endif
589
590static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
591{
592    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
593}
594
595static void tcg_out_goto_tb(TCGContext *s, int which)
596{
597    /* indirect jump method. */
598    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
599    set_jmp_reset_offset(s, which);
600}
601
602void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
603                              uintptr_t jmp_rx, uintptr_t jmp_rw)
604{
605    /* Always indirect, nothing to do */
606}
607
608static void tgen_add(TCGContext *s, TCGType type,
609                     TCGReg a0, TCGReg a1, TCGReg a2)
610{
611    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
612}
613
614static const TCGOutOpBinary outop_add = {
615    .base.static_constraint = C_O1_I2(r, r, r),
616    .out_rrr = tgen_add,
617};
618
619static void tgen_and(TCGContext *s, TCGType type,
620                     TCGReg a0, TCGReg a1, TCGReg a2)
621{
622    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
623}
624
625static const TCGOutOpBinary outop_and = {
626    .base.static_constraint = C_O1_I2(r, r, r),
627    .out_rrr = tgen_and,
628};
629
630static void tgen_andc(TCGContext *s, TCGType type,
631                      TCGReg a0, TCGReg a1, TCGReg a2)
632{
633    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
634}
635
636static const TCGOutOpBinary outop_andc = {
637    .base.static_constraint = C_O1_I2(r, r, r),
638    .out_rrr = tgen_andc,
639};
640
641static void tgen_divs(TCGContext *s, TCGType type,
642                      TCGReg a0, TCGReg a1, TCGReg a2)
643{
644    TCGOpcode opc = (type == TCG_TYPE_I32
645                     ? INDEX_op_tci_divs32
646                     : INDEX_op_divs);
647    tcg_out_op_rrr(s, opc, a0, a1, a2);
648}
649
650static const TCGOutOpBinary outop_divs = {
651    .base.static_constraint = C_O1_I2(r, r, r),
652    .out_rrr = tgen_divs,
653};
654
655static const TCGOutOpDivRem outop_divs2 = {
656    .base.static_constraint = C_NotImplemented,
657};
658
659static void tgen_divu(TCGContext *s, TCGType type,
660                      TCGReg a0, TCGReg a1, TCGReg a2)
661{
662    TCGOpcode opc = (type == TCG_TYPE_I32
663                     ? INDEX_op_tci_divu32
664                     : INDEX_op_divu);
665    tcg_out_op_rrr(s, opc, a0, a1, a2);
666}
667
668static const TCGOutOpBinary outop_divu = {
669    .base.static_constraint = C_O1_I2(r, r, r),
670    .out_rrr = tgen_divu,
671};
672
673static const TCGOutOpDivRem outop_divu2 = {
674    .base.static_constraint = C_NotImplemented,
675};
676
677static void tgen_eqv(TCGContext *s, TCGType type,
678                     TCGReg a0, TCGReg a1, TCGReg a2)
679{
680    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
681}
682
683static const TCGOutOpBinary outop_eqv = {
684    .base.static_constraint = C_O1_I2(r, r, r),
685    .out_rrr = tgen_eqv,
686};
687
688static void tgen_mul(TCGContext *s, TCGType type,
689                     TCGReg a0, TCGReg a1, TCGReg a2)
690{
691    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
692}
693
694static const TCGOutOpBinary outop_mul = {
695    .base.static_constraint = C_O1_I2(r, r, r),
696    .out_rrr = tgen_mul,
697};
698
699static const TCGOutOpBinary outop_mulsh = {
700    .base.static_constraint = C_NotImplemented,
701};
702
703static const TCGOutOpBinary outop_muluh = {
704    .base.static_constraint = C_NotImplemented,
705};
706
707static void tgen_nand(TCGContext *s, TCGType type,
708                     TCGReg a0, TCGReg a1, TCGReg a2)
709{
710    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
711}
712
713static const TCGOutOpBinary outop_nand = {
714    .base.static_constraint = C_O1_I2(r, r, r),
715    .out_rrr = tgen_nand,
716};
717
718static void tgen_nor(TCGContext *s, TCGType type,
719                     TCGReg a0, TCGReg a1, TCGReg a2)
720{
721    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
722}
723
724static const TCGOutOpBinary outop_nor = {
725    .base.static_constraint = C_O1_I2(r, r, r),
726    .out_rrr = tgen_nor,
727};
728
729static void tgen_or(TCGContext *s, TCGType type,
730                     TCGReg a0, TCGReg a1, TCGReg a2)
731{
732    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
733}
734
735static const TCGOutOpBinary outop_or = {
736    .base.static_constraint = C_O1_I2(r, r, r),
737    .out_rrr = tgen_or,
738};
739
740static void tgen_orc(TCGContext *s, TCGType type,
741                     TCGReg a0, TCGReg a1, TCGReg a2)
742{
743    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
744}
745
746static const TCGOutOpBinary outop_orc = {
747    .base.static_constraint = C_O1_I2(r, r, r),
748    .out_rrr = tgen_orc,
749};
750
751static void tgen_rems(TCGContext *s, TCGType type,
752                      TCGReg a0, TCGReg a1, TCGReg a2)
753{
754    TCGOpcode opc = (type == TCG_TYPE_I32
755                     ? INDEX_op_tci_rems32
756                     : INDEX_op_rems);
757    tcg_out_op_rrr(s, opc, a0, a1, a2);
758}
759
760static const TCGOutOpBinary outop_rems = {
761    .base.static_constraint = C_O1_I2(r, r, r),
762    .out_rrr = tgen_rems,
763};
764
765static void tgen_remu(TCGContext *s, TCGType type,
766                      TCGReg a0, TCGReg a1, TCGReg a2)
767{
768    TCGOpcode opc = (type == TCG_TYPE_I32
769                     ? INDEX_op_tci_remu32
770                     : INDEX_op_remu);
771    tcg_out_op_rrr(s, opc, a0, a1, a2);
772}
773
774static const TCGOutOpBinary outop_remu = {
775    .base.static_constraint = C_O1_I2(r, r, r),
776    .out_rrr = tgen_remu,
777};
778
779static void tgen_shl(TCGContext *s, TCGType type,
780                     TCGReg a0, TCGReg a1, TCGReg a2)
781{
782    tcg_out_op_rrr(s, INDEX_op_shl, a0, a1, a2);
783}
784
785static const TCGOutOpBinary outop_shl = {
786    .base.static_constraint = C_O1_I2(r, r, r),
787    .out_rrr = tgen_shl,
788};
789
790static void tgen_sub(TCGContext *s, TCGType type,
791                     TCGReg a0, TCGReg a1, TCGReg a2)
792{
793    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
794}
795
796static const TCGOutOpSubtract outop_sub = {
797    .base.static_constraint = C_O1_I2(r, r, r),
798    .out_rrr = tgen_sub,
799};
800
801static void tgen_xor(TCGContext *s, TCGType type,
802                     TCGReg a0, TCGReg a1, TCGReg a2)
803{
804    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
805}
806
807static const TCGOutOpBinary outop_xor = {
808    .base.static_constraint = C_O1_I2(r, r, r),
809    .out_rrr = tgen_xor,
810};
811
812static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
813{
814    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
815}
816
817static const TCGOutOpUnary outop_neg = {
818    .base.static_constraint = C_O1_I1(r, r),
819    .out_rr = tgen_neg,
820};
821
822static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
823{
824    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
825}
826
827static const TCGOutOpUnary outop_not = {
828    .base.static_constraint = C_O1_I1(r, r),
829    .out_rr = tgen_not,
830};
831
832
833static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
834                       const TCGArg args[TCG_MAX_OP_ARGS],
835                       const int const_args[TCG_MAX_OP_ARGS])
836{
837    int width;
838
839    switch (opc) {
840    case INDEX_op_goto_ptr:
841        tcg_out_op_r(s, opc, args[0]);
842        break;
843
844    case INDEX_op_br:
845        tcg_out_op_l(s, opc, arg_label(args[0]));
846        break;
847
848    CASE_32_64(setcond)
849        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
850        break;
851
852    CASE_32_64(movcond)
853    case INDEX_op_setcond2_i32:
854        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
855                          args[3], args[4], args[5]);
856        break;
857
858    CASE_32_64(ld8u)
859    CASE_32_64(ld8s)
860    CASE_32_64(ld16u)
861    CASE_32_64(ld16s)
862    case INDEX_op_ld_i32:
863    CASE_64(ld32u)
864    CASE_64(ld32s)
865    CASE_64(ld)
866    CASE_32_64(st8)
867    CASE_32_64(st16)
868    case INDEX_op_st_i32:
869    CASE_64(st32)
870    CASE_64(st)
871        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
872        break;
873
874    CASE_32_64(shr)
875    CASE_32_64(sar)
876    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
877    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
878    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
879    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
880        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
881        break;
882
883    CASE_32_64(deposit)
884        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
885        break;
886
887    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
888    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
889        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
890        break;
891
892    CASE_32_64(brcond)
893        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
894                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
895                        TCG_REG_TMP, args[0], args[1], args[2]);
896        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
897        break;
898
899    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
900    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
901    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
902        tcg_out_op_rr(s, opc, args[0], args[1]);
903        break;
904
905    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
906    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
907        width = 16;
908        goto do_bswap;
909    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
910        width = 32;
911    do_bswap:
912        /* The base tci bswaps zero-extend, and ignore high bits. */
913        tcg_out_op_rr(s, opc, args[0], args[1]);
914        if (args[2] & TCG_BSWAP_OS) {
915            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
916        }
917        break;
918
919    CASE_32_64(add2)
920    CASE_32_64(sub2)
921        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
922                          args[3], args[4], args[5]);
923        break;
924
925#if TCG_TARGET_REG_BITS == 32
926    case INDEX_op_brcond2_i32:
927        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
928                          args[0], args[1], args[2], args[3], args[4]);
929        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
930        break;
931#endif
932
933    CASE_32_64(mulu2)
934    CASE_32_64(muls2)
935        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
936        break;
937
938    case INDEX_op_qemu_ld_i64:
939    case INDEX_op_qemu_st_i64:
940        if (TCG_TARGET_REG_BITS == 32) {
941            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
942            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
943            break;
944        }
945        /* fall through */
946    case INDEX_op_qemu_ld_i32:
947    case INDEX_op_qemu_st_i32:
948        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
949            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
950            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
951        } else {
952            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
953        }
954        break;
955
956    case INDEX_op_mb:
957        tcg_out_op_v(s, opc);
958        break;
959
960    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
961    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
962    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
963    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
964    case INDEX_op_extu_i32_i64:
965    case INDEX_op_extrl_i64_i32:
966    default:
967        g_assert_not_reached();
968    }
969}
970
971static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
972                       intptr_t offset)
973{
974    switch (type) {
975    case TCG_TYPE_I32:
976        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
977        break;
978#if TCG_TARGET_REG_BITS == 64
979    case TCG_TYPE_I64:
980        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
981        break;
982#endif
983    default:
984        g_assert_not_reached();
985    }
986}
987
988static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
989                               TCGReg base, intptr_t ofs)
990{
991    return false;
992}
993
994/* Test if a constant matches the constraint. */
995static bool tcg_target_const_match(int64_t val, int ct,
996                                   TCGType type, TCGCond cond, int vece)
997{
998    return ct & TCG_CT_CONST;
999}
1000
1001static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
1002{
1003    memset(p, 0, sizeof(*p) * count);
1004}
1005
1006static void tcg_target_init(TCGContext *s)
1007{
1008    /* The current code uses uint8_t for tcg operations. */
1009    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1010
1011    /* Registers available for 32 bit operations. */
1012    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1013    /* Registers available for 64 bit operations. */
1014    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1015    /*
1016     * The interpreter "registers" are in the local stack frame and
1017     * cannot be clobbered by the called helper functions.  However,
1018     * the interpreter assumes a 128-bit return value and assigns to
1019     * the return value registers.
1020     */
1021    tcg_target_call_clobber_regs =
1022        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1023
1024    s->reserved_regs = 0;
1025    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1026    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1027
1028    /* The call arguments come first, followed by the temp storage. */
1029    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1030                  TCG_STATIC_FRAME_SIZE);
1031}
1032
1033/* Generate global QEMU prologue and epilogue code. */
1034static inline void tcg_target_qemu_prologue(TCGContext *s)
1035{
1036}
1037
1038static void tcg_out_tb_start(TCGContext *s)
1039{
1040    /* nothing to do */
1041}
1042
1043bool tcg_target_has_memory_bswap(MemOp memop)
1044{
1045    return true;
1046}
1047
1048static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1049{
1050    g_assert_not_reached();
1051}
1052
1053static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1054{
1055    g_assert_not_reached();
1056}
1057