xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision c3b920b3d6a685484904d3060f3eb69401051bf0)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_andc_i32:
99    case INDEX_op_andc_i64:
100    case INDEX_op_eqv_i32:
101    case INDEX_op_eqv_i64:
102    case INDEX_op_nand_i32:
103    case INDEX_op_nand_i64:
104    case INDEX_op_nor_i32:
105    case INDEX_op_nor_i64:
106    case INDEX_op_or_i32:
107    case INDEX_op_or_i64:
108    case INDEX_op_orc_i32:
109    case INDEX_op_orc_i64:
110    case INDEX_op_xor_i32:
111    case INDEX_op_xor_i64:
112    case INDEX_op_shl_i32:
113    case INDEX_op_shl_i64:
114    case INDEX_op_shr_i32:
115    case INDEX_op_shr_i64:
116    case INDEX_op_sar_i32:
117    case INDEX_op_sar_i64:
118    case INDEX_op_rotl_i32:
119    case INDEX_op_rotl_i64:
120    case INDEX_op_rotr_i32:
121    case INDEX_op_rotr_i64:
122    case INDEX_op_setcond_i32:
123    case INDEX_op_setcond_i64:
124    case INDEX_op_deposit_i32:
125    case INDEX_op_deposit_i64:
126    case INDEX_op_clz_i32:
127    case INDEX_op_clz_i64:
128    case INDEX_op_ctz_i32:
129    case INDEX_op_ctz_i64:
130        return C_O1_I2(r, r, r);
131
132    case INDEX_op_brcond_i32:
133    case INDEX_op_brcond_i64:
134        return C_O0_I2(r, r);
135
136    case INDEX_op_add2_i32:
137    case INDEX_op_add2_i64:
138    case INDEX_op_sub2_i32:
139    case INDEX_op_sub2_i64:
140        return C_O2_I4(r, r, r, r, r, r);
141
142#if TCG_TARGET_REG_BITS == 32
143    case INDEX_op_brcond2_i32:
144        return C_O0_I4(r, r, r, r);
145#endif
146
147    case INDEX_op_mulu2_i32:
148    case INDEX_op_mulu2_i64:
149    case INDEX_op_muls2_i32:
150    case INDEX_op_muls2_i64:
151        return C_O2_I2(r, r, r, r);
152
153    case INDEX_op_movcond_i32:
154    case INDEX_op_movcond_i64:
155    case INDEX_op_setcond2_i32:
156        return C_O1_I4(r, r, r, r, r);
157
158    case INDEX_op_qemu_ld_i32:
159        return C_O1_I1(r, r);
160    case INDEX_op_qemu_ld_i64:
161        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
162    case INDEX_op_qemu_st_i32:
163        return C_O0_I2(r, r);
164    case INDEX_op_qemu_st_i64:
165        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
166
167    default:
168        return C_NotImplemented;
169    }
170}
171
172static const int tcg_target_reg_alloc_order[] = {
173    TCG_REG_R4,
174    TCG_REG_R5,
175    TCG_REG_R6,
176    TCG_REG_R7,
177    TCG_REG_R8,
178    TCG_REG_R9,
179    TCG_REG_R10,
180    TCG_REG_R11,
181    TCG_REG_R12,
182    TCG_REG_R13,
183    TCG_REG_R14,
184    TCG_REG_R15,
185    /* Either 2 or 4 of these are call clobbered, so use them last. */
186    TCG_REG_R3,
187    TCG_REG_R2,
188    TCG_REG_R1,
189    TCG_REG_R0,
190};
191
192/* No call arguments via registers.  All will be stored on the "stack". */
193static const int tcg_target_call_iarg_regs[] = { };
194
195static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
196{
197    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
198    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
199    return TCG_REG_R0 + slot;
200}
201
202#ifdef CONFIG_DEBUG_TCG
203static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
204    "r00",
205    "r01",
206    "r02",
207    "r03",
208    "r04",
209    "r05",
210    "r06",
211    "r07",
212    "r08",
213    "r09",
214    "r10",
215    "r11",
216    "r12",
217    "r13",
218    "r14",
219    "r15",
220};
221#endif
222
223static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
224                        intptr_t value, intptr_t addend)
225{
226    intptr_t diff = value - (intptr_t)(code_ptr + 1);
227
228    tcg_debug_assert(addend == 0);
229    tcg_debug_assert(type == 20);
230
231    if (diff == sextract32(diff, 0, type)) {
232        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
233        return true;
234    }
235    return false;
236}
237
238static void stack_bounds_check(TCGReg base, intptr_t offset)
239{
240    if (base == TCG_REG_CALL_STACK) {
241        tcg_debug_assert(offset >= 0);
242        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
243                                   TCG_STATIC_FRAME_SIZE));
244    }
245}
246
247static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
248{
249    tcg_insn_unit insn = 0;
250
251    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
252    insn = deposit32(insn, 0, 8, op);
253    tcg_out32(s, insn);
254}
255
256static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
257{
258    tcg_insn_unit insn = 0;
259    intptr_t diff;
260
261    /* Special case for exit_tb: map null -> 0. */
262    if (p0 == NULL) {
263        diff = 0;
264    } else {
265        diff = p0 - (void *)(s->code_ptr + 1);
266        tcg_debug_assert(diff != 0);
267        if (diff != sextract32(diff, 0, 20)) {
268            tcg_raise_tb_overflow(s);
269        }
270    }
271    insn = deposit32(insn, 0, 8, op);
272    insn = deposit32(insn, 12, 20, diff);
273    tcg_out32(s, insn);
274}
275
276static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
277{
278    tcg_insn_unit insn = 0;
279
280    insn = deposit32(insn, 0, 8, op);
281    insn = deposit32(insn, 8, 4, r0);
282    tcg_out32(s, insn);
283}
284
285static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
286{
287    tcg_out32(s, (uint8_t)op);
288}
289
290static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
291{
292    tcg_insn_unit insn = 0;
293
294    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
295    insn = deposit32(insn, 0, 8, op);
296    insn = deposit32(insn, 8, 4, r0);
297    insn = deposit32(insn, 12, 20, i1);
298    tcg_out32(s, insn);
299}
300
301static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
302{
303    tcg_insn_unit insn = 0;
304
305    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
306    insn = deposit32(insn, 0, 8, op);
307    insn = deposit32(insn, 8, 4, r0);
308    tcg_out32(s, insn);
309}
310
311static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
312{
313    tcg_insn_unit insn = 0;
314
315    insn = deposit32(insn, 0, 8, op);
316    insn = deposit32(insn, 8, 4, r0);
317    insn = deposit32(insn, 12, 4, r1);
318    tcg_out32(s, insn);
319}
320
321static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
322                           TCGReg r0, TCGReg r1, TCGArg m2)
323{
324    tcg_insn_unit insn = 0;
325
326    tcg_debug_assert(m2 == extract32(m2, 0, 16));
327    insn = deposit32(insn, 0, 8, op);
328    insn = deposit32(insn, 8, 4, r0);
329    insn = deposit32(insn, 12, 4, r1);
330    insn = deposit32(insn, 16, 16, m2);
331    tcg_out32(s, insn);
332}
333
334static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
335                           TCGReg r0, TCGReg r1, TCGReg r2)
336{
337    tcg_insn_unit insn = 0;
338
339    insn = deposit32(insn, 0, 8, op);
340    insn = deposit32(insn, 8, 4, r0);
341    insn = deposit32(insn, 12, 4, r1);
342    insn = deposit32(insn, 16, 4, r2);
343    tcg_out32(s, insn);
344}
345
346static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
347                           TCGReg r0, TCGReg r1, intptr_t i2)
348{
349    tcg_insn_unit insn = 0;
350
351    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
352    insn = deposit32(insn, 0, 8, op);
353    insn = deposit32(insn, 8, 4, r0);
354    insn = deposit32(insn, 12, 4, r1);
355    insn = deposit32(insn, 16, 16, i2);
356    tcg_out32(s, insn);
357}
358
359static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
360                            TCGReg r1, uint8_t b2, uint8_t b3)
361{
362    tcg_insn_unit insn = 0;
363
364    tcg_debug_assert(b2 == extract32(b2, 0, 6));
365    tcg_debug_assert(b3 == extract32(b3, 0, 6));
366    insn = deposit32(insn, 0, 8, op);
367    insn = deposit32(insn, 8, 4, r0);
368    insn = deposit32(insn, 12, 4, r1);
369    insn = deposit32(insn, 16, 6, b2);
370    insn = deposit32(insn, 22, 6, b3);
371    tcg_out32(s, insn);
372}
373
374static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
375                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
376{
377    tcg_insn_unit insn = 0;
378
379    insn = deposit32(insn, 0, 8, op);
380    insn = deposit32(insn, 8, 4, r0);
381    insn = deposit32(insn, 12, 4, r1);
382    insn = deposit32(insn, 16, 4, r2);
383    insn = deposit32(insn, 20, 4, c3);
384    tcg_out32(s, insn);
385}
386
387static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
388                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
389{
390    tcg_insn_unit insn = 0;
391
392    tcg_debug_assert(b3 == extract32(b3, 0, 6));
393    tcg_debug_assert(b4 == extract32(b4, 0, 6));
394    insn = deposit32(insn, 0, 8, op);
395    insn = deposit32(insn, 8, 4, r0);
396    insn = deposit32(insn, 12, 4, r1);
397    insn = deposit32(insn, 16, 4, r2);
398    insn = deposit32(insn, 20, 6, b3);
399    insn = deposit32(insn, 26, 6, b4);
400    tcg_out32(s, insn);
401}
402
403static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
404                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
405{
406    tcg_insn_unit insn = 0;
407
408    insn = deposit32(insn, 0, 8, op);
409    insn = deposit32(insn, 8, 4, r0);
410    insn = deposit32(insn, 12, 4, r1);
411    insn = deposit32(insn, 16, 4, r2);
412    insn = deposit32(insn, 20, 4, r3);
413    tcg_out32(s, insn);
414}
415
416static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
417                              TCGReg r0, TCGReg r1, TCGReg r2,
418                              TCGReg r3, TCGReg r4, TCGCond c5)
419{
420    tcg_insn_unit insn = 0;
421
422    insn = deposit32(insn, 0, 8, op);
423    insn = deposit32(insn, 8, 4, r0);
424    insn = deposit32(insn, 12, 4, r1);
425    insn = deposit32(insn, 16, 4, r2);
426    insn = deposit32(insn, 20, 4, r3);
427    insn = deposit32(insn, 24, 4, r4);
428    insn = deposit32(insn, 28, 4, c5);
429    tcg_out32(s, insn);
430}
431
432static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
433                              TCGReg r0, TCGReg r1, TCGReg r2,
434                              TCGReg r3, TCGReg r4, TCGReg r5)
435{
436    tcg_insn_unit insn = 0;
437
438    insn = deposit32(insn, 0, 8, op);
439    insn = deposit32(insn, 8, 4, r0);
440    insn = deposit32(insn, 12, 4, r1);
441    insn = deposit32(insn, 16, 4, r2);
442    insn = deposit32(insn, 20, 4, r3);
443    insn = deposit32(insn, 24, 4, r4);
444    insn = deposit32(insn, 28, 4, r5);
445    tcg_out32(s, insn);
446}
447
448static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
449                         TCGReg base, intptr_t offset)
450{
451    stack_bounds_check(base, offset);
452    if (offset != sextract32(offset, 0, 16)) {
453        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
454        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
455        base = TCG_REG_TMP;
456        offset = 0;
457    }
458    tcg_out_op_rrs(s, op, val, base, offset);
459}
460
461static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
462                       intptr_t offset)
463{
464    switch (type) {
465    case TCG_TYPE_I32:
466        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
467        break;
468#if TCG_TARGET_REG_BITS == 64
469    case TCG_TYPE_I64:
470        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
471        break;
472#endif
473    default:
474        g_assert_not_reached();
475    }
476}
477
478static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
479{
480    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
481    return true;
482}
483
484static void tcg_out_movi(TCGContext *s, TCGType type,
485                         TCGReg ret, tcg_target_long arg)
486{
487    switch (type) {
488    case TCG_TYPE_I32:
489#if TCG_TARGET_REG_BITS == 64
490        arg = (int32_t)arg;
491        /* fall through */
492    case TCG_TYPE_I64:
493#endif
494        break;
495    default:
496        g_assert_not_reached();
497    }
498
499    if (arg == sextract32(arg, 0, 20)) {
500        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
501    } else {
502        tcg_insn_unit insn = 0;
503
504        new_pool_label(s, arg, 20, s->code_ptr, 0);
505        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
506        insn = deposit32(insn, 8, 4, ret);
507        tcg_out32(s, insn);
508    }
509}
510
511static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
512                            TCGReg rs, unsigned pos, unsigned len)
513{
514    TCGOpcode opc = type == TCG_TYPE_I32 ?
515                    INDEX_op_extract_i32 :
516                    INDEX_op_extract_i64;
517    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
518}
519
520static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
521                             TCGReg rs, unsigned pos, unsigned len)
522{
523    TCGOpcode opc = type == TCG_TYPE_I32 ?
524                    INDEX_op_sextract_i32 :
525                    INDEX_op_sextract_i64;
526    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
527}
528
529static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
530{
531    tcg_out_sextract(s, type, rd, rs, 0, 8);
532}
533
534static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
535{
536    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
537}
538
539static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
540{
541    tcg_out_sextract(s, type, rd, rs, 0, 16);
542}
543
544static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
545{
546    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
547}
548
549static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
550{
551    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
552    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
553}
554
555static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
556{
557    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
558    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
559}
560
561static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
562{
563    tcg_out_ext32s(s, rd, rs);
564}
565
566static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
567{
568    tcg_out_ext32u(s, rd, rs);
569}
570
571static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
572{
573    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
574    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
575}
576
577static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
578{
579    return false;
580}
581
582static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
583                             tcg_target_long imm)
584{
585    /* This function is only used for passing structs by reference. */
586    g_assert_not_reached();
587}
588
589static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
590                         const TCGHelperInfo *info)
591{
592    ffi_cif *cif = info->cif;
593    tcg_insn_unit insn = 0;
594    uint8_t which;
595
596    if (cif->rtype == &ffi_type_void) {
597        which = 0;
598    } else {
599        tcg_debug_assert(cif->rtype->size == 4 ||
600                         cif->rtype->size == 8 ||
601                         cif->rtype->size == 16);
602        which = ctz32(cif->rtype->size) - 1;
603    }
604    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
605    insn = deposit32(insn, 0, 8, INDEX_op_call);
606    insn = deposit32(insn, 8, 4, which);
607    tcg_out32(s, insn);
608}
609
610#if TCG_TARGET_REG_BITS == 64
611# define CASE_32_64(x) \
612        case glue(glue(INDEX_op_, x), _i64): \
613        case glue(glue(INDEX_op_, x), _i32):
614# define CASE_64(x) \
615        case glue(glue(INDEX_op_, x), _i64):
616#else
617# define CASE_32_64(x) \
618        case glue(glue(INDEX_op_, x), _i32):
619# define CASE_64(x)
620#endif
621
622static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
623{
624    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
625}
626
627static void tcg_out_goto_tb(TCGContext *s, int which)
628{
629    /* indirect jump method. */
630    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
631    set_jmp_reset_offset(s, which);
632}
633
634void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
635                              uintptr_t jmp_rx, uintptr_t jmp_rw)
636{
637    /* Always indirect, nothing to do */
638}
639
640static void tgen_add(TCGContext *s, TCGType type,
641                     TCGReg a0, TCGReg a1, TCGReg a2)
642{
643    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
644}
645
646static const TCGOutOpBinary outop_add = {
647    .base.static_constraint = C_O1_I2(r, r, r),
648    .out_rrr = tgen_add,
649};
650
651static void tgen_and(TCGContext *s, TCGType type,
652                     TCGReg a0, TCGReg a1, TCGReg a2)
653{
654    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
655}
656
657static const TCGOutOpBinary outop_and = {
658    .base.static_constraint = C_O1_I2(r, r, r),
659    .out_rrr = tgen_and,
660};
661
662
663static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
664                       const TCGArg args[TCG_MAX_OP_ARGS],
665                       const int const_args[TCG_MAX_OP_ARGS])
666{
667    int width;
668
669    switch (opc) {
670    case INDEX_op_goto_ptr:
671        tcg_out_op_r(s, opc, args[0]);
672        break;
673
674    case INDEX_op_br:
675        tcg_out_op_l(s, opc, arg_label(args[0]));
676        break;
677
678    CASE_32_64(setcond)
679        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
680        break;
681
682    CASE_32_64(movcond)
683    case INDEX_op_setcond2_i32:
684        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
685                          args[3], args[4], args[5]);
686        break;
687
688    CASE_32_64(ld8u)
689    CASE_32_64(ld8s)
690    CASE_32_64(ld16u)
691    CASE_32_64(ld16s)
692    case INDEX_op_ld_i32:
693    CASE_64(ld32u)
694    CASE_64(ld32s)
695    CASE_64(ld)
696    CASE_32_64(st8)
697    CASE_32_64(st16)
698    case INDEX_op_st_i32:
699    CASE_64(st32)
700    CASE_64(st)
701        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
702        break;
703
704    CASE_32_64(sub)
705    CASE_32_64(mul)
706    CASE_32_64(or)
707    CASE_32_64(xor)
708    CASE_32_64(andc)     /* Optional (TCG_TARGET_HAS_andc_*). */
709    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
710    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
711    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
712    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
713    CASE_32_64(shl)
714    CASE_32_64(shr)
715    CASE_32_64(sar)
716    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
717    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
718    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
719    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
720    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
721    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
722    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
723    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
724        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
725        break;
726
727    CASE_32_64(deposit)
728        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
729        break;
730
731    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
732    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
733        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
734        break;
735
736    CASE_32_64(brcond)
737        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
738                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
739                        TCG_REG_TMP, args[0], args[1], args[2]);
740        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
741        break;
742
743    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
744    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
745    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
746    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
747    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
748        tcg_out_op_rr(s, opc, args[0], args[1]);
749        break;
750
751    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
752    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
753        width = 16;
754        goto do_bswap;
755    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
756        width = 32;
757    do_bswap:
758        /* The base tci bswaps zero-extend, and ignore high bits. */
759        tcg_out_op_rr(s, opc, args[0], args[1]);
760        if (args[2] & TCG_BSWAP_OS) {
761            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
762        }
763        break;
764
765    CASE_32_64(add2)
766    CASE_32_64(sub2)
767        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
768                          args[3], args[4], args[5]);
769        break;
770
771#if TCG_TARGET_REG_BITS == 32
772    case INDEX_op_brcond2_i32:
773        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
774                          args[0], args[1], args[2], args[3], args[4]);
775        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
776        break;
777#endif
778
779    CASE_32_64(mulu2)
780    CASE_32_64(muls2)
781        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
782        break;
783
784    case INDEX_op_qemu_ld_i64:
785    case INDEX_op_qemu_st_i64:
786        if (TCG_TARGET_REG_BITS == 32) {
787            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
788            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
789            break;
790        }
791        /* fall through */
792    case INDEX_op_qemu_ld_i32:
793    case INDEX_op_qemu_st_i32:
794        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
795            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
796            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
797        } else {
798            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
799        }
800        break;
801
802    case INDEX_op_mb:
803        tcg_out_op_v(s, opc);
804        break;
805
806    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
807    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
808    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
809    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
810    case INDEX_op_extu_i32_i64:
811    case INDEX_op_extrl_i64_i32:
812    default:
813        g_assert_not_reached();
814    }
815}
816
817static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
818                       intptr_t offset)
819{
820    switch (type) {
821    case TCG_TYPE_I32:
822        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
823        break;
824#if TCG_TARGET_REG_BITS == 64
825    case TCG_TYPE_I64:
826        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
827        break;
828#endif
829    default:
830        g_assert_not_reached();
831    }
832}
833
834static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
835                               TCGReg base, intptr_t ofs)
836{
837    return false;
838}
839
840/* Test if a constant matches the constraint. */
841static bool tcg_target_const_match(int64_t val, int ct,
842                                   TCGType type, TCGCond cond, int vece)
843{
844    return ct & TCG_CT_CONST;
845}
846
847static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
848{
849    memset(p, 0, sizeof(*p) * count);
850}
851
852static void tcg_target_init(TCGContext *s)
853{
854    /* The current code uses uint8_t for tcg operations. */
855    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
856
857    /* Registers available for 32 bit operations. */
858    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
859    /* Registers available for 64 bit operations. */
860    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
861    /*
862     * The interpreter "registers" are in the local stack frame and
863     * cannot be clobbered by the called helper functions.  However,
864     * the interpreter assumes a 128-bit return value and assigns to
865     * the return value registers.
866     */
867    tcg_target_call_clobber_regs =
868        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
869
870    s->reserved_regs = 0;
871    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
872    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
873
874    /* The call arguments come first, followed by the temp storage. */
875    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
876                  TCG_STATIC_FRAME_SIZE);
877}
878
879/* Generate global QEMU prologue and epilogue code. */
880static inline void tcg_target_qemu_prologue(TCGContext *s)
881{
882}
883
884static void tcg_out_tb_start(TCGContext *s)
885{
886    /* nothing to do */
887}
888
889bool tcg_target_has_memory_bswap(MemOp memop)
890{
891    return true;
892}
893
894static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
895{
896    g_assert_not_reached();
897}
898
899static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
900{
901    g_assert_not_reached();
902}
903