xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 3a8c4e9e53c6f4aa7c590971950000b174e74fa1)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_shl_i32:
99    case INDEX_op_shl_i64:
100    case INDEX_op_shr_i32:
101    case INDEX_op_shr_i64:
102    case INDEX_op_sar_i32:
103    case INDEX_op_sar_i64:
104    case INDEX_op_rotl_i32:
105    case INDEX_op_rotl_i64:
106    case INDEX_op_rotr_i32:
107    case INDEX_op_rotr_i64:
108    case INDEX_op_setcond_i32:
109    case INDEX_op_setcond_i64:
110    case INDEX_op_deposit_i32:
111    case INDEX_op_deposit_i64:
112    case INDEX_op_clz_i32:
113    case INDEX_op_clz_i64:
114    case INDEX_op_ctz_i32:
115    case INDEX_op_ctz_i64:
116        return C_O1_I2(r, r, r);
117
118    case INDEX_op_brcond_i32:
119    case INDEX_op_brcond_i64:
120        return C_O0_I2(r, r);
121
122    case INDEX_op_add2_i32:
123    case INDEX_op_add2_i64:
124    case INDEX_op_sub2_i32:
125    case INDEX_op_sub2_i64:
126        return C_O2_I4(r, r, r, r, r, r);
127
128#if TCG_TARGET_REG_BITS == 32
129    case INDEX_op_brcond2_i32:
130        return C_O0_I4(r, r, r, r);
131#endif
132
133    case INDEX_op_mulu2_i32:
134    case INDEX_op_mulu2_i64:
135    case INDEX_op_muls2_i32:
136    case INDEX_op_muls2_i64:
137        return C_O2_I2(r, r, r, r);
138
139    case INDEX_op_movcond_i32:
140    case INDEX_op_movcond_i64:
141    case INDEX_op_setcond2_i32:
142        return C_O1_I4(r, r, r, r, r);
143
144    case INDEX_op_qemu_ld_i32:
145        return C_O1_I1(r, r);
146    case INDEX_op_qemu_ld_i64:
147        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
148    case INDEX_op_qemu_st_i32:
149        return C_O0_I2(r, r);
150    case INDEX_op_qemu_st_i64:
151        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
152
153    default:
154        return C_NotImplemented;
155    }
156}
157
158static const int tcg_target_reg_alloc_order[] = {
159    TCG_REG_R4,
160    TCG_REG_R5,
161    TCG_REG_R6,
162    TCG_REG_R7,
163    TCG_REG_R8,
164    TCG_REG_R9,
165    TCG_REG_R10,
166    TCG_REG_R11,
167    TCG_REG_R12,
168    TCG_REG_R13,
169    TCG_REG_R14,
170    TCG_REG_R15,
171    /* Either 2 or 4 of these are call clobbered, so use them last. */
172    TCG_REG_R3,
173    TCG_REG_R2,
174    TCG_REG_R1,
175    TCG_REG_R0,
176};
177
178/* No call arguments via registers.  All will be stored on the "stack". */
179static const int tcg_target_call_iarg_regs[] = { };
180
181static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
182{
183    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
184    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
185    return TCG_REG_R0 + slot;
186}
187
188#ifdef CONFIG_DEBUG_TCG
189static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
190    "r00",
191    "r01",
192    "r02",
193    "r03",
194    "r04",
195    "r05",
196    "r06",
197    "r07",
198    "r08",
199    "r09",
200    "r10",
201    "r11",
202    "r12",
203    "r13",
204    "r14",
205    "r15",
206};
207#endif
208
209static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
210                        intptr_t value, intptr_t addend)
211{
212    intptr_t diff = value - (intptr_t)(code_ptr + 1);
213
214    tcg_debug_assert(addend == 0);
215    tcg_debug_assert(type == 20);
216
217    if (diff == sextract32(diff, 0, type)) {
218        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
219        return true;
220    }
221    return false;
222}
223
224static void stack_bounds_check(TCGReg base, intptr_t offset)
225{
226    if (base == TCG_REG_CALL_STACK) {
227        tcg_debug_assert(offset >= 0);
228        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
229                                   TCG_STATIC_FRAME_SIZE));
230    }
231}
232
233static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
234{
235    tcg_insn_unit insn = 0;
236
237    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
238    insn = deposit32(insn, 0, 8, op);
239    tcg_out32(s, insn);
240}
241
242static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
243{
244    tcg_insn_unit insn = 0;
245    intptr_t diff;
246
247    /* Special case for exit_tb: map null -> 0. */
248    if (p0 == NULL) {
249        diff = 0;
250    } else {
251        diff = p0 - (void *)(s->code_ptr + 1);
252        tcg_debug_assert(diff != 0);
253        if (diff != sextract32(diff, 0, 20)) {
254            tcg_raise_tb_overflow(s);
255        }
256    }
257    insn = deposit32(insn, 0, 8, op);
258    insn = deposit32(insn, 12, 20, diff);
259    tcg_out32(s, insn);
260}
261
262static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
263{
264    tcg_insn_unit insn = 0;
265
266    insn = deposit32(insn, 0, 8, op);
267    insn = deposit32(insn, 8, 4, r0);
268    tcg_out32(s, insn);
269}
270
271static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
272{
273    tcg_out32(s, (uint8_t)op);
274}
275
276static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
277{
278    tcg_insn_unit insn = 0;
279
280    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
281    insn = deposit32(insn, 0, 8, op);
282    insn = deposit32(insn, 8, 4, r0);
283    insn = deposit32(insn, 12, 20, i1);
284    tcg_out32(s, insn);
285}
286
287static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
288{
289    tcg_insn_unit insn = 0;
290
291    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
292    insn = deposit32(insn, 0, 8, op);
293    insn = deposit32(insn, 8, 4, r0);
294    tcg_out32(s, insn);
295}
296
297static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
298{
299    tcg_insn_unit insn = 0;
300
301    insn = deposit32(insn, 0, 8, op);
302    insn = deposit32(insn, 8, 4, r0);
303    insn = deposit32(insn, 12, 4, r1);
304    tcg_out32(s, insn);
305}
306
307static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
308                           TCGReg r0, TCGReg r1, TCGArg m2)
309{
310    tcg_insn_unit insn = 0;
311
312    tcg_debug_assert(m2 == extract32(m2, 0, 16));
313    insn = deposit32(insn, 0, 8, op);
314    insn = deposit32(insn, 8, 4, r0);
315    insn = deposit32(insn, 12, 4, r1);
316    insn = deposit32(insn, 16, 16, m2);
317    tcg_out32(s, insn);
318}
319
320static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
321                           TCGReg r0, TCGReg r1, TCGReg r2)
322{
323    tcg_insn_unit insn = 0;
324
325    insn = deposit32(insn, 0, 8, op);
326    insn = deposit32(insn, 8, 4, r0);
327    insn = deposit32(insn, 12, 4, r1);
328    insn = deposit32(insn, 16, 4, r2);
329    tcg_out32(s, insn);
330}
331
332static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
333                           TCGReg r0, TCGReg r1, intptr_t i2)
334{
335    tcg_insn_unit insn = 0;
336
337    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
338    insn = deposit32(insn, 0, 8, op);
339    insn = deposit32(insn, 8, 4, r0);
340    insn = deposit32(insn, 12, 4, r1);
341    insn = deposit32(insn, 16, 16, i2);
342    tcg_out32(s, insn);
343}
344
345static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
346                            TCGReg r1, uint8_t b2, uint8_t b3)
347{
348    tcg_insn_unit insn = 0;
349
350    tcg_debug_assert(b2 == extract32(b2, 0, 6));
351    tcg_debug_assert(b3 == extract32(b3, 0, 6));
352    insn = deposit32(insn, 0, 8, op);
353    insn = deposit32(insn, 8, 4, r0);
354    insn = deposit32(insn, 12, 4, r1);
355    insn = deposit32(insn, 16, 6, b2);
356    insn = deposit32(insn, 22, 6, b3);
357    tcg_out32(s, insn);
358}
359
360static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
361                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
362{
363    tcg_insn_unit insn = 0;
364
365    insn = deposit32(insn, 0, 8, op);
366    insn = deposit32(insn, 8, 4, r0);
367    insn = deposit32(insn, 12, 4, r1);
368    insn = deposit32(insn, 16, 4, r2);
369    insn = deposit32(insn, 20, 4, c3);
370    tcg_out32(s, insn);
371}
372
373static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
374                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
375{
376    tcg_insn_unit insn = 0;
377
378    tcg_debug_assert(b3 == extract32(b3, 0, 6));
379    tcg_debug_assert(b4 == extract32(b4, 0, 6));
380    insn = deposit32(insn, 0, 8, op);
381    insn = deposit32(insn, 8, 4, r0);
382    insn = deposit32(insn, 12, 4, r1);
383    insn = deposit32(insn, 16, 4, r2);
384    insn = deposit32(insn, 20, 6, b3);
385    insn = deposit32(insn, 26, 6, b4);
386    tcg_out32(s, insn);
387}
388
389static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
390                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
391{
392    tcg_insn_unit insn = 0;
393
394    insn = deposit32(insn, 0, 8, op);
395    insn = deposit32(insn, 8, 4, r0);
396    insn = deposit32(insn, 12, 4, r1);
397    insn = deposit32(insn, 16, 4, r2);
398    insn = deposit32(insn, 20, 4, r3);
399    tcg_out32(s, insn);
400}
401
402static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
403                              TCGReg r0, TCGReg r1, TCGReg r2,
404                              TCGReg r3, TCGReg r4, TCGCond c5)
405{
406    tcg_insn_unit insn = 0;
407
408    insn = deposit32(insn, 0, 8, op);
409    insn = deposit32(insn, 8, 4, r0);
410    insn = deposit32(insn, 12, 4, r1);
411    insn = deposit32(insn, 16, 4, r2);
412    insn = deposit32(insn, 20, 4, r3);
413    insn = deposit32(insn, 24, 4, r4);
414    insn = deposit32(insn, 28, 4, c5);
415    tcg_out32(s, insn);
416}
417
418static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
419                              TCGReg r0, TCGReg r1, TCGReg r2,
420                              TCGReg r3, TCGReg r4, TCGReg r5)
421{
422    tcg_insn_unit insn = 0;
423
424    insn = deposit32(insn, 0, 8, op);
425    insn = deposit32(insn, 8, 4, r0);
426    insn = deposit32(insn, 12, 4, r1);
427    insn = deposit32(insn, 16, 4, r2);
428    insn = deposit32(insn, 20, 4, r3);
429    insn = deposit32(insn, 24, 4, r4);
430    insn = deposit32(insn, 28, 4, r5);
431    tcg_out32(s, insn);
432}
433
434static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
435                         TCGReg base, intptr_t offset)
436{
437    stack_bounds_check(base, offset);
438    if (offset != sextract32(offset, 0, 16)) {
439        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
440        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
441        base = TCG_REG_TMP;
442        offset = 0;
443    }
444    tcg_out_op_rrs(s, op, val, base, offset);
445}
446
447static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
448                       intptr_t offset)
449{
450    switch (type) {
451    case TCG_TYPE_I32:
452        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
453        break;
454#if TCG_TARGET_REG_BITS == 64
455    case TCG_TYPE_I64:
456        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
457        break;
458#endif
459    default:
460        g_assert_not_reached();
461    }
462}
463
464static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
465{
466    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
467    return true;
468}
469
470static void tcg_out_movi(TCGContext *s, TCGType type,
471                         TCGReg ret, tcg_target_long arg)
472{
473    switch (type) {
474    case TCG_TYPE_I32:
475#if TCG_TARGET_REG_BITS == 64
476        arg = (int32_t)arg;
477        /* fall through */
478    case TCG_TYPE_I64:
479#endif
480        break;
481    default:
482        g_assert_not_reached();
483    }
484
485    if (arg == sextract32(arg, 0, 20)) {
486        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
487    } else {
488        tcg_insn_unit insn = 0;
489
490        new_pool_label(s, arg, 20, s->code_ptr, 0);
491        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
492        insn = deposit32(insn, 8, 4, ret);
493        tcg_out32(s, insn);
494    }
495}
496
497static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
498                            TCGReg rs, unsigned pos, unsigned len)
499{
500    TCGOpcode opc = type == TCG_TYPE_I32 ?
501                    INDEX_op_extract_i32 :
502                    INDEX_op_extract_i64;
503    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
504}
505
506static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
507                             TCGReg rs, unsigned pos, unsigned len)
508{
509    TCGOpcode opc = type == TCG_TYPE_I32 ?
510                    INDEX_op_sextract_i32 :
511                    INDEX_op_sextract_i64;
512    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
513}
514
515static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
516{
517    tcg_out_sextract(s, type, rd, rs, 0, 8);
518}
519
520static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
521{
522    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
523}
524
525static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
526{
527    tcg_out_sextract(s, type, rd, rs, 0, 16);
528}
529
530static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
531{
532    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
533}
534
535static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
536{
537    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
538    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
539}
540
541static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
542{
543    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
544    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
545}
546
547static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
548{
549    tcg_out_ext32s(s, rd, rs);
550}
551
552static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
553{
554    tcg_out_ext32u(s, rd, rs);
555}
556
557static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
558{
559    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
560    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
561}
562
563static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
564{
565    return false;
566}
567
568static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
569                             tcg_target_long imm)
570{
571    /* This function is only used for passing structs by reference. */
572    g_assert_not_reached();
573}
574
575static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
576                         const TCGHelperInfo *info)
577{
578    ffi_cif *cif = info->cif;
579    tcg_insn_unit insn = 0;
580    uint8_t which;
581
582    if (cif->rtype == &ffi_type_void) {
583        which = 0;
584    } else {
585        tcg_debug_assert(cif->rtype->size == 4 ||
586                         cif->rtype->size == 8 ||
587                         cif->rtype->size == 16);
588        which = ctz32(cif->rtype->size) - 1;
589    }
590    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
591    insn = deposit32(insn, 0, 8, INDEX_op_call);
592    insn = deposit32(insn, 8, 4, which);
593    tcg_out32(s, insn);
594}
595
596#if TCG_TARGET_REG_BITS == 64
597# define CASE_32_64(x) \
598        case glue(glue(INDEX_op_, x), _i64): \
599        case glue(glue(INDEX_op_, x), _i32):
600# define CASE_64(x) \
601        case glue(glue(INDEX_op_, x), _i64):
602#else
603# define CASE_32_64(x) \
604        case glue(glue(INDEX_op_, x), _i32):
605# define CASE_64(x)
606#endif
607
608static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
609{
610    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
611}
612
613static void tcg_out_goto_tb(TCGContext *s, int which)
614{
615    /* indirect jump method. */
616    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
617    set_jmp_reset_offset(s, which);
618}
619
620void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
621                              uintptr_t jmp_rx, uintptr_t jmp_rw)
622{
623    /* Always indirect, nothing to do */
624}
625
626static void tgen_add(TCGContext *s, TCGType type,
627                     TCGReg a0, TCGReg a1, TCGReg a2)
628{
629    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
630}
631
632static const TCGOutOpBinary outop_add = {
633    .base.static_constraint = C_O1_I2(r, r, r),
634    .out_rrr = tgen_add,
635};
636
637static void tgen_and(TCGContext *s, TCGType type,
638                     TCGReg a0, TCGReg a1, TCGReg a2)
639{
640    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
641}
642
643static const TCGOutOpBinary outop_and = {
644    .base.static_constraint = C_O1_I2(r, r, r),
645    .out_rrr = tgen_and,
646};
647
648static void tgen_andc(TCGContext *s, TCGType type,
649                      TCGReg a0, TCGReg a1, TCGReg a2)
650{
651    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
652}
653
654static const TCGOutOpBinary outop_andc = {
655    .base.static_constraint = C_O1_I2(r, r, r),
656    .out_rrr = tgen_andc,
657};
658
659static void tgen_eqv(TCGContext *s, TCGType type,
660                     TCGReg a0, TCGReg a1, TCGReg a2)
661{
662    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
663}
664
665static const TCGOutOpBinary outop_eqv = {
666    .base.static_constraint = C_O1_I2(r, r, r),
667    .out_rrr = tgen_eqv,
668};
669
670static void tgen_nand(TCGContext *s, TCGType type,
671                     TCGReg a0, TCGReg a1, TCGReg a2)
672{
673    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
674}
675
676static const TCGOutOpBinary outop_nand = {
677    .base.static_constraint = C_O1_I2(r, r, r),
678    .out_rrr = tgen_nand,
679};
680
681static void tgen_nor(TCGContext *s, TCGType type,
682                     TCGReg a0, TCGReg a1, TCGReg a2)
683{
684    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
685}
686
687static const TCGOutOpBinary outop_nor = {
688    .base.static_constraint = C_O1_I2(r, r, r),
689    .out_rrr = tgen_nor,
690};
691
692static void tgen_or(TCGContext *s, TCGType type,
693                     TCGReg a0, TCGReg a1, TCGReg a2)
694{
695    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
696}
697
698static const TCGOutOpBinary outop_or = {
699    .base.static_constraint = C_O1_I2(r, r, r),
700    .out_rrr = tgen_or,
701};
702
703static void tgen_orc(TCGContext *s, TCGType type,
704                     TCGReg a0, TCGReg a1, TCGReg a2)
705{
706    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
707}
708
709static const TCGOutOpBinary outop_orc = {
710    .base.static_constraint = C_O1_I2(r, r, r),
711    .out_rrr = tgen_orc,
712};
713
714static void tgen_xor(TCGContext *s, TCGType type,
715                     TCGReg a0, TCGReg a1, TCGReg a2)
716{
717    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
718}
719
720static const TCGOutOpBinary outop_xor = {
721    .base.static_constraint = C_O1_I2(r, r, r),
722    .out_rrr = tgen_xor,
723};
724
725
726static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
727                       const TCGArg args[TCG_MAX_OP_ARGS],
728                       const int const_args[TCG_MAX_OP_ARGS])
729{
730    int width;
731
732    switch (opc) {
733    case INDEX_op_goto_ptr:
734        tcg_out_op_r(s, opc, args[0]);
735        break;
736
737    case INDEX_op_br:
738        tcg_out_op_l(s, opc, arg_label(args[0]));
739        break;
740
741    CASE_32_64(setcond)
742        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
743        break;
744
745    CASE_32_64(movcond)
746    case INDEX_op_setcond2_i32:
747        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
748                          args[3], args[4], args[5]);
749        break;
750
751    CASE_32_64(ld8u)
752    CASE_32_64(ld8s)
753    CASE_32_64(ld16u)
754    CASE_32_64(ld16s)
755    case INDEX_op_ld_i32:
756    CASE_64(ld32u)
757    CASE_64(ld32s)
758    CASE_64(ld)
759    CASE_32_64(st8)
760    CASE_32_64(st16)
761    case INDEX_op_st_i32:
762    CASE_64(st32)
763    CASE_64(st)
764        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
765        break;
766
767    CASE_32_64(sub)
768    CASE_32_64(mul)
769    CASE_32_64(shl)
770    CASE_32_64(shr)
771    CASE_32_64(sar)
772    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
773    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
774    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
775    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
776    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
777    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
778    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
779    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
780        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
781        break;
782
783    CASE_32_64(deposit)
784        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
785        break;
786
787    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
788    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
789        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
790        break;
791
792    CASE_32_64(brcond)
793        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
794                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
795                        TCG_REG_TMP, args[0], args[1], args[2]);
796        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
797        break;
798
799    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
800    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
801    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
802    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
803    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
804        tcg_out_op_rr(s, opc, args[0], args[1]);
805        break;
806
807    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
808    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
809        width = 16;
810        goto do_bswap;
811    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
812        width = 32;
813    do_bswap:
814        /* The base tci bswaps zero-extend, and ignore high bits. */
815        tcg_out_op_rr(s, opc, args[0], args[1]);
816        if (args[2] & TCG_BSWAP_OS) {
817            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
818        }
819        break;
820
821    CASE_32_64(add2)
822    CASE_32_64(sub2)
823        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
824                          args[3], args[4], args[5]);
825        break;
826
827#if TCG_TARGET_REG_BITS == 32
828    case INDEX_op_brcond2_i32:
829        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
830                          args[0], args[1], args[2], args[3], args[4]);
831        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
832        break;
833#endif
834
835    CASE_32_64(mulu2)
836    CASE_32_64(muls2)
837        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
838        break;
839
840    case INDEX_op_qemu_ld_i64:
841    case INDEX_op_qemu_st_i64:
842        if (TCG_TARGET_REG_BITS == 32) {
843            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
844            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
845            break;
846        }
847        /* fall through */
848    case INDEX_op_qemu_ld_i32:
849    case INDEX_op_qemu_st_i32:
850        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
851            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
852            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
853        } else {
854            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
855        }
856        break;
857
858    case INDEX_op_mb:
859        tcg_out_op_v(s, opc);
860        break;
861
862    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
863    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
864    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
865    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
866    case INDEX_op_extu_i32_i64:
867    case INDEX_op_extrl_i64_i32:
868    default:
869        g_assert_not_reached();
870    }
871}
872
873static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
874                       intptr_t offset)
875{
876    switch (type) {
877    case TCG_TYPE_I32:
878        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
879        break;
880#if TCG_TARGET_REG_BITS == 64
881    case TCG_TYPE_I64:
882        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
883        break;
884#endif
885    default:
886        g_assert_not_reached();
887    }
888}
889
890static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
891                               TCGReg base, intptr_t ofs)
892{
893    return false;
894}
895
896/* Test if a constant matches the constraint. */
897static bool tcg_target_const_match(int64_t val, int ct,
898                                   TCGType type, TCGCond cond, int vece)
899{
900    return ct & TCG_CT_CONST;
901}
902
903static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
904{
905    memset(p, 0, sizeof(*p) * count);
906}
907
908static void tcg_target_init(TCGContext *s)
909{
910    /* The current code uses uint8_t for tcg operations. */
911    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
912
913    /* Registers available for 32 bit operations. */
914    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
915    /* Registers available for 64 bit operations. */
916    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
917    /*
918     * The interpreter "registers" are in the local stack frame and
919     * cannot be clobbered by the called helper functions.  However,
920     * the interpreter assumes a 128-bit return value and assigns to
921     * the return value registers.
922     */
923    tcg_target_call_clobber_regs =
924        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
925
926    s->reserved_regs = 0;
927    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
928    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
929
930    /* The call arguments come first, followed by the temp storage. */
931    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
932                  TCG_STATIC_FRAME_SIZE);
933}
934
935/* Generate global QEMU prologue and epilogue code. */
936static inline void tcg_target_qemu_prologue(TCGContext *s)
937{
938}
939
940static void tcg_out_tb_start(TCGContext *s)
941{
942    /* nothing to do */
943}
944
945bool tcg_target_has_memory_bswap(MemOp memop)
946{
947    return true;
948}
949
950static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
951{
952    g_assert_not_reached();
953}
954
955static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
956{
957    g_assert_not_reached();
958}
959