xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision aa28c9ef8e109db40d4781d82452805486f2a2bf)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_div_i32:
83    case INDEX_op_div_i64:
84    case INDEX_op_divu_i32:
85    case INDEX_op_divu_i64:
86    case INDEX_op_rem_i32:
87    case INDEX_op_rem_i64:
88    case INDEX_op_remu_i32:
89    case INDEX_op_remu_i64:
90    case INDEX_op_shl_i32:
91    case INDEX_op_shl_i64:
92    case INDEX_op_shr_i32:
93    case INDEX_op_shr_i64:
94    case INDEX_op_sar_i32:
95    case INDEX_op_sar_i64:
96    case INDEX_op_rotl_i32:
97    case INDEX_op_rotl_i64:
98    case INDEX_op_rotr_i32:
99    case INDEX_op_rotr_i64:
100    case INDEX_op_setcond_i32:
101    case INDEX_op_setcond_i64:
102    case INDEX_op_deposit_i32:
103    case INDEX_op_deposit_i64:
104    case INDEX_op_clz_i32:
105    case INDEX_op_clz_i64:
106    case INDEX_op_ctz_i32:
107    case INDEX_op_ctz_i64:
108        return C_O1_I2(r, r, r);
109
110    case INDEX_op_brcond_i32:
111    case INDEX_op_brcond_i64:
112        return C_O0_I2(r, r);
113
114    case INDEX_op_add2_i32:
115    case INDEX_op_add2_i64:
116    case INDEX_op_sub2_i32:
117    case INDEX_op_sub2_i64:
118        return C_O2_I4(r, r, r, r, r, r);
119
120#if TCG_TARGET_REG_BITS == 32
121    case INDEX_op_brcond2_i32:
122        return C_O0_I4(r, r, r, r);
123#endif
124
125    case INDEX_op_mulu2_i32:
126    case INDEX_op_mulu2_i64:
127    case INDEX_op_muls2_i32:
128    case INDEX_op_muls2_i64:
129        return C_O2_I2(r, r, r, r);
130
131    case INDEX_op_movcond_i32:
132    case INDEX_op_movcond_i64:
133    case INDEX_op_setcond2_i32:
134        return C_O1_I4(r, r, r, r, r);
135
136    case INDEX_op_qemu_ld_i32:
137        return C_O1_I1(r, r);
138    case INDEX_op_qemu_ld_i64:
139        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
140    case INDEX_op_qemu_st_i32:
141        return C_O0_I2(r, r);
142    case INDEX_op_qemu_st_i64:
143        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
144
145    default:
146        return C_NotImplemented;
147    }
148}
149
150static const int tcg_target_reg_alloc_order[] = {
151    TCG_REG_R4,
152    TCG_REG_R5,
153    TCG_REG_R6,
154    TCG_REG_R7,
155    TCG_REG_R8,
156    TCG_REG_R9,
157    TCG_REG_R10,
158    TCG_REG_R11,
159    TCG_REG_R12,
160    TCG_REG_R13,
161    TCG_REG_R14,
162    TCG_REG_R15,
163    /* Either 2 or 4 of these are call clobbered, so use them last. */
164    TCG_REG_R3,
165    TCG_REG_R2,
166    TCG_REG_R1,
167    TCG_REG_R0,
168};
169
170/* No call arguments via registers.  All will be stored on the "stack". */
171static const int tcg_target_call_iarg_regs[] = { };
172
173static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
174{
175    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
176    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
177    return TCG_REG_R0 + slot;
178}
179
180#ifdef CONFIG_DEBUG_TCG
181static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
182    "r00",
183    "r01",
184    "r02",
185    "r03",
186    "r04",
187    "r05",
188    "r06",
189    "r07",
190    "r08",
191    "r09",
192    "r10",
193    "r11",
194    "r12",
195    "r13",
196    "r14",
197    "r15",
198};
199#endif
200
201static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
202                        intptr_t value, intptr_t addend)
203{
204    intptr_t diff = value - (intptr_t)(code_ptr + 1);
205
206    tcg_debug_assert(addend == 0);
207    tcg_debug_assert(type == 20);
208
209    if (diff == sextract32(diff, 0, type)) {
210        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
211        return true;
212    }
213    return false;
214}
215
216static void stack_bounds_check(TCGReg base, intptr_t offset)
217{
218    if (base == TCG_REG_CALL_STACK) {
219        tcg_debug_assert(offset >= 0);
220        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
221                                   TCG_STATIC_FRAME_SIZE));
222    }
223}
224
225static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
226{
227    tcg_insn_unit insn = 0;
228
229    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
230    insn = deposit32(insn, 0, 8, op);
231    tcg_out32(s, insn);
232}
233
234static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
235{
236    tcg_insn_unit insn = 0;
237    intptr_t diff;
238
239    /* Special case for exit_tb: map null -> 0. */
240    if (p0 == NULL) {
241        diff = 0;
242    } else {
243        diff = p0 - (void *)(s->code_ptr + 1);
244        tcg_debug_assert(diff != 0);
245        if (diff != sextract32(diff, 0, 20)) {
246            tcg_raise_tb_overflow(s);
247        }
248    }
249    insn = deposit32(insn, 0, 8, op);
250    insn = deposit32(insn, 12, 20, diff);
251    tcg_out32(s, insn);
252}
253
254static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
255{
256    tcg_insn_unit insn = 0;
257
258    insn = deposit32(insn, 0, 8, op);
259    insn = deposit32(insn, 8, 4, r0);
260    tcg_out32(s, insn);
261}
262
263static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
264{
265    tcg_out32(s, (uint8_t)op);
266}
267
268static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
269{
270    tcg_insn_unit insn = 0;
271
272    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
273    insn = deposit32(insn, 0, 8, op);
274    insn = deposit32(insn, 8, 4, r0);
275    insn = deposit32(insn, 12, 20, i1);
276    tcg_out32(s, insn);
277}
278
279static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
280{
281    tcg_insn_unit insn = 0;
282
283    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
284    insn = deposit32(insn, 0, 8, op);
285    insn = deposit32(insn, 8, 4, r0);
286    tcg_out32(s, insn);
287}
288
289static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
290{
291    tcg_insn_unit insn = 0;
292
293    insn = deposit32(insn, 0, 8, op);
294    insn = deposit32(insn, 8, 4, r0);
295    insn = deposit32(insn, 12, 4, r1);
296    tcg_out32(s, insn);
297}
298
299static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
300                           TCGReg r0, TCGReg r1, TCGArg m2)
301{
302    tcg_insn_unit insn = 0;
303
304    tcg_debug_assert(m2 == extract32(m2, 0, 16));
305    insn = deposit32(insn, 0, 8, op);
306    insn = deposit32(insn, 8, 4, r0);
307    insn = deposit32(insn, 12, 4, r1);
308    insn = deposit32(insn, 16, 16, m2);
309    tcg_out32(s, insn);
310}
311
312static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
313                           TCGReg r0, TCGReg r1, TCGReg r2)
314{
315    tcg_insn_unit insn = 0;
316
317    insn = deposit32(insn, 0, 8, op);
318    insn = deposit32(insn, 8, 4, r0);
319    insn = deposit32(insn, 12, 4, r1);
320    insn = deposit32(insn, 16, 4, r2);
321    tcg_out32(s, insn);
322}
323
324static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
325                           TCGReg r0, TCGReg r1, intptr_t i2)
326{
327    tcg_insn_unit insn = 0;
328
329    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
330    insn = deposit32(insn, 0, 8, op);
331    insn = deposit32(insn, 8, 4, r0);
332    insn = deposit32(insn, 12, 4, r1);
333    insn = deposit32(insn, 16, 16, i2);
334    tcg_out32(s, insn);
335}
336
337static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
338                            TCGReg r1, uint8_t b2, uint8_t b3)
339{
340    tcg_insn_unit insn = 0;
341
342    tcg_debug_assert(b2 == extract32(b2, 0, 6));
343    tcg_debug_assert(b3 == extract32(b3, 0, 6));
344    insn = deposit32(insn, 0, 8, op);
345    insn = deposit32(insn, 8, 4, r0);
346    insn = deposit32(insn, 12, 4, r1);
347    insn = deposit32(insn, 16, 6, b2);
348    insn = deposit32(insn, 22, 6, b3);
349    tcg_out32(s, insn);
350}
351
352static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
353                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
354{
355    tcg_insn_unit insn = 0;
356
357    insn = deposit32(insn, 0, 8, op);
358    insn = deposit32(insn, 8, 4, r0);
359    insn = deposit32(insn, 12, 4, r1);
360    insn = deposit32(insn, 16, 4, r2);
361    insn = deposit32(insn, 20, 4, c3);
362    tcg_out32(s, insn);
363}
364
365static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
366                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
367{
368    tcg_insn_unit insn = 0;
369
370    tcg_debug_assert(b3 == extract32(b3, 0, 6));
371    tcg_debug_assert(b4 == extract32(b4, 0, 6));
372    insn = deposit32(insn, 0, 8, op);
373    insn = deposit32(insn, 8, 4, r0);
374    insn = deposit32(insn, 12, 4, r1);
375    insn = deposit32(insn, 16, 4, r2);
376    insn = deposit32(insn, 20, 6, b3);
377    insn = deposit32(insn, 26, 6, b4);
378    tcg_out32(s, insn);
379}
380
381static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
382                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
383{
384    tcg_insn_unit insn = 0;
385
386    insn = deposit32(insn, 0, 8, op);
387    insn = deposit32(insn, 8, 4, r0);
388    insn = deposit32(insn, 12, 4, r1);
389    insn = deposit32(insn, 16, 4, r2);
390    insn = deposit32(insn, 20, 4, r3);
391    tcg_out32(s, insn);
392}
393
394static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
395                              TCGReg r0, TCGReg r1, TCGReg r2,
396                              TCGReg r3, TCGReg r4, TCGCond c5)
397{
398    tcg_insn_unit insn = 0;
399
400    insn = deposit32(insn, 0, 8, op);
401    insn = deposit32(insn, 8, 4, r0);
402    insn = deposit32(insn, 12, 4, r1);
403    insn = deposit32(insn, 16, 4, r2);
404    insn = deposit32(insn, 20, 4, r3);
405    insn = deposit32(insn, 24, 4, r4);
406    insn = deposit32(insn, 28, 4, c5);
407    tcg_out32(s, insn);
408}
409
410static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
411                              TCGReg r0, TCGReg r1, TCGReg r2,
412                              TCGReg r3, TCGReg r4, TCGReg r5)
413{
414    tcg_insn_unit insn = 0;
415
416    insn = deposit32(insn, 0, 8, op);
417    insn = deposit32(insn, 8, 4, r0);
418    insn = deposit32(insn, 12, 4, r1);
419    insn = deposit32(insn, 16, 4, r2);
420    insn = deposit32(insn, 20, 4, r3);
421    insn = deposit32(insn, 24, 4, r4);
422    insn = deposit32(insn, 28, 4, r5);
423    tcg_out32(s, insn);
424}
425
426static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
427                         TCGReg base, intptr_t offset)
428{
429    stack_bounds_check(base, offset);
430    if (offset != sextract32(offset, 0, 16)) {
431        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
432        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
433        base = TCG_REG_TMP;
434        offset = 0;
435    }
436    tcg_out_op_rrs(s, op, val, base, offset);
437}
438
439static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
440                       intptr_t offset)
441{
442    switch (type) {
443    case TCG_TYPE_I32:
444        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
445        break;
446#if TCG_TARGET_REG_BITS == 64
447    case TCG_TYPE_I64:
448        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
449        break;
450#endif
451    default:
452        g_assert_not_reached();
453    }
454}
455
456static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
457{
458    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
459    return true;
460}
461
462static void tcg_out_movi(TCGContext *s, TCGType type,
463                         TCGReg ret, tcg_target_long arg)
464{
465    switch (type) {
466    case TCG_TYPE_I32:
467#if TCG_TARGET_REG_BITS == 64
468        arg = (int32_t)arg;
469        /* fall through */
470    case TCG_TYPE_I64:
471#endif
472        break;
473    default:
474        g_assert_not_reached();
475    }
476
477    if (arg == sextract32(arg, 0, 20)) {
478        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
479    } else {
480        tcg_insn_unit insn = 0;
481
482        new_pool_label(s, arg, 20, s->code_ptr, 0);
483        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
484        insn = deposit32(insn, 8, 4, ret);
485        tcg_out32(s, insn);
486    }
487}
488
489static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
490                            TCGReg rs, unsigned pos, unsigned len)
491{
492    TCGOpcode opc = type == TCG_TYPE_I32 ?
493                    INDEX_op_extract_i32 :
494                    INDEX_op_extract_i64;
495    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
496}
497
498static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
499                             TCGReg rs, unsigned pos, unsigned len)
500{
501    TCGOpcode opc = type == TCG_TYPE_I32 ?
502                    INDEX_op_sextract_i32 :
503                    INDEX_op_sextract_i64;
504    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
505}
506
507static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
508{
509    tcg_out_sextract(s, type, rd, rs, 0, 8);
510}
511
512static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
513{
514    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
515}
516
517static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
518{
519    tcg_out_sextract(s, type, rd, rs, 0, 16);
520}
521
522static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
523{
524    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
525}
526
527static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
528{
529    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
530    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
531}
532
533static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
534{
535    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
536    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
537}
538
539static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
540{
541    tcg_out_ext32s(s, rd, rs);
542}
543
544static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
545{
546    tcg_out_ext32u(s, rd, rs);
547}
548
549static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
550{
551    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
552    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
553}
554
555static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
556{
557    return false;
558}
559
560static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
561                             tcg_target_long imm)
562{
563    /* This function is only used for passing structs by reference. */
564    g_assert_not_reached();
565}
566
567static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
568                         const TCGHelperInfo *info)
569{
570    ffi_cif *cif = info->cif;
571    tcg_insn_unit insn = 0;
572    uint8_t which;
573
574    if (cif->rtype == &ffi_type_void) {
575        which = 0;
576    } else {
577        tcg_debug_assert(cif->rtype->size == 4 ||
578                         cif->rtype->size == 8 ||
579                         cif->rtype->size == 16);
580        which = ctz32(cif->rtype->size) - 1;
581    }
582    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
583    insn = deposit32(insn, 0, 8, INDEX_op_call);
584    insn = deposit32(insn, 8, 4, which);
585    tcg_out32(s, insn);
586}
587
588#if TCG_TARGET_REG_BITS == 64
589# define CASE_32_64(x) \
590        case glue(glue(INDEX_op_, x), _i64): \
591        case glue(glue(INDEX_op_, x), _i32):
592# define CASE_64(x) \
593        case glue(glue(INDEX_op_, x), _i64):
594#else
595# define CASE_32_64(x) \
596        case glue(glue(INDEX_op_, x), _i32):
597# define CASE_64(x)
598#endif
599
600static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
601{
602    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
603}
604
605static void tcg_out_goto_tb(TCGContext *s, int which)
606{
607    /* indirect jump method. */
608    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
609    set_jmp_reset_offset(s, which);
610}
611
612void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
613                              uintptr_t jmp_rx, uintptr_t jmp_rw)
614{
615    /* Always indirect, nothing to do */
616}
617
618static void tgen_add(TCGContext *s, TCGType type,
619                     TCGReg a0, TCGReg a1, TCGReg a2)
620{
621    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
622}
623
624static const TCGOutOpBinary outop_add = {
625    .base.static_constraint = C_O1_I2(r, r, r),
626    .out_rrr = tgen_add,
627};
628
629static void tgen_and(TCGContext *s, TCGType type,
630                     TCGReg a0, TCGReg a1, TCGReg a2)
631{
632    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
633}
634
635static const TCGOutOpBinary outop_and = {
636    .base.static_constraint = C_O1_I2(r, r, r),
637    .out_rrr = tgen_and,
638};
639
640static void tgen_andc(TCGContext *s, TCGType type,
641                      TCGReg a0, TCGReg a1, TCGReg a2)
642{
643    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
644}
645
646static const TCGOutOpBinary outop_andc = {
647    .base.static_constraint = C_O1_I2(r, r, r),
648    .out_rrr = tgen_andc,
649};
650
651static void tgen_eqv(TCGContext *s, TCGType type,
652                     TCGReg a0, TCGReg a1, TCGReg a2)
653{
654    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
655}
656
657static const TCGOutOpBinary outop_eqv = {
658    .base.static_constraint = C_O1_I2(r, r, r),
659    .out_rrr = tgen_eqv,
660};
661
662static void tgen_mul(TCGContext *s, TCGType type,
663                     TCGReg a0, TCGReg a1, TCGReg a2)
664{
665    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
666}
667
668static const TCGOutOpBinary outop_mul = {
669    .base.static_constraint = C_O1_I2(r, r, r),
670    .out_rrr = tgen_mul,
671};
672
673static const TCGOutOpBinary outop_muluh = {
674    .base.static_constraint = C_NotImplemented,
675};
676
677static void tgen_nand(TCGContext *s, TCGType type,
678                     TCGReg a0, TCGReg a1, TCGReg a2)
679{
680    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
681}
682
683static const TCGOutOpBinary outop_nand = {
684    .base.static_constraint = C_O1_I2(r, r, r),
685    .out_rrr = tgen_nand,
686};
687
688static void tgen_nor(TCGContext *s, TCGType type,
689                     TCGReg a0, TCGReg a1, TCGReg a2)
690{
691    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
692}
693
694static const TCGOutOpBinary outop_nor = {
695    .base.static_constraint = C_O1_I2(r, r, r),
696    .out_rrr = tgen_nor,
697};
698
699static void tgen_or(TCGContext *s, TCGType type,
700                     TCGReg a0, TCGReg a1, TCGReg a2)
701{
702    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
703}
704
705static const TCGOutOpBinary outop_or = {
706    .base.static_constraint = C_O1_I2(r, r, r),
707    .out_rrr = tgen_or,
708};
709
710static void tgen_orc(TCGContext *s, TCGType type,
711                     TCGReg a0, TCGReg a1, TCGReg a2)
712{
713    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
714}
715
716static const TCGOutOpBinary outop_orc = {
717    .base.static_constraint = C_O1_I2(r, r, r),
718    .out_rrr = tgen_orc,
719};
720
721static void tgen_sub(TCGContext *s, TCGType type,
722                     TCGReg a0, TCGReg a1, TCGReg a2)
723{
724    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
725}
726
727static const TCGOutOpSubtract outop_sub = {
728    .base.static_constraint = C_O1_I2(r, r, r),
729    .out_rrr = tgen_sub,
730};
731
732static void tgen_xor(TCGContext *s, TCGType type,
733                     TCGReg a0, TCGReg a1, TCGReg a2)
734{
735    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
736}
737
738static const TCGOutOpBinary outop_xor = {
739    .base.static_constraint = C_O1_I2(r, r, r),
740    .out_rrr = tgen_xor,
741};
742
743static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
744{
745    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
746}
747
748static const TCGOutOpUnary outop_neg = {
749    .base.static_constraint = C_O1_I1(r, r),
750    .out_rr = tgen_neg,
751};
752
753static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
754{
755    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
756}
757
758static const TCGOutOpUnary outop_not = {
759    .base.static_constraint = C_O1_I1(r, r),
760    .out_rr = tgen_not,
761};
762
763
764static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
765                       const TCGArg args[TCG_MAX_OP_ARGS],
766                       const int const_args[TCG_MAX_OP_ARGS])
767{
768    int width;
769
770    switch (opc) {
771    case INDEX_op_goto_ptr:
772        tcg_out_op_r(s, opc, args[0]);
773        break;
774
775    case INDEX_op_br:
776        tcg_out_op_l(s, opc, arg_label(args[0]));
777        break;
778
779    CASE_32_64(setcond)
780        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
781        break;
782
783    CASE_32_64(movcond)
784    case INDEX_op_setcond2_i32:
785        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
786                          args[3], args[4], args[5]);
787        break;
788
789    CASE_32_64(ld8u)
790    CASE_32_64(ld8s)
791    CASE_32_64(ld16u)
792    CASE_32_64(ld16s)
793    case INDEX_op_ld_i32:
794    CASE_64(ld32u)
795    CASE_64(ld32s)
796    CASE_64(ld)
797    CASE_32_64(st8)
798    CASE_32_64(st16)
799    case INDEX_op_st_i32:
800    CASE_64(st32)
801    CASE_64(st)
802        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
803        break;
804
805    CASE_32_64(shl)
806    CASE_32_64(shr)
807    CASE_32_64(sar)
808    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
809    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
810    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
811    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
812    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
813    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
814    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
815    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
816        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
817        break;
818
819    CASE_32_64(deposit)
820        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
821        break;
822
823    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
824    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
825        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
826        break;
827
828    CASE_32_64(brcond)
829        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
830                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
831                        TCG_REG_TMP, args[0], args[1], args[2]);
832        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
833        break;
834
835    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
836    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
837    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
838        tcg_out_op_rr(s, opc, args[0], args[1]);
839        break;
840
841    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
842    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
843        width = 16;
844        goto do_bswap;
845    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
846        width = 32;
847    do_bswap:
848        /* The base tci bswaps zero-extend, and ignore high bits. */
849        tcg_out_op_rr(s, opc, args[0], args[1]);
850        if (args[2] & TCG_BSWAP_OS) {
851            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
852        }
853        break;
854
855    CASE_32_64(add2)
856    CASE_32_64(sub2)
857        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
858                          args[3], args[4], args[5]);
859        break;
860
861#if TCG_TARGET_REG_BITS == 32
862    case INDEX_op_brcond2_i32:
863        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
864                          args[0], args[1], args[2], args[3], args[4]);
865        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
866        break;
867#endif
868
869    CASE_32_64(mulu2)
870    CASE_32_64(muls2)
871        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
872        break;
873
874    case INDEX_op_qemu_ld_i64:
875    case INDEX_op_qemu_st_i64:
876        if (TCG_TARGET_REG_BITS == 32) {
877            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
878            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
879            break;
880        }
881        /* fall through */
882    case INDEX_op_qemu_ld_i32:
883    case INDEX_op_qemu_st_i32:
884        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
885            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
886            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
887        } else {
888            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
889        }
890        break;
891
892    case INDEX_op_mb:
893        tcg_out_op_v(s, opc);
894        break;
895
896    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
897    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
898    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
899    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
900    case INDEX_op_extu_i32_i64:
901    case INDEX_op_extrl_i64_i32:
902    default:
903        g_assert_not_reached();
904    }
905}
906
907static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
908                       intptr_t offset)
909{
910    switch (type) {
911    case TCG_TYPE_I32:
912        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
913        break;
914#if TCG_TARGET_REG_BITS == 64
915    case TCG_TYPE_I64:
916        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
917        break;
918#endif
919    default:
920        g_assert_not_reached();
921    }
922}
923
924static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
925                               TCGReg base, intptr_t ofs)
926{
927    return false;
928}
929
930/* Test if a constant matches the constraint. */
931static bool tcg_target_const_match(int64_t val, int ct,
932                                   TCGType type, TCGCond cond, int vece)
933{
934    return ct & TCG_CT_CONST;
935}
936
937static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
938{
939    memset(p, 0, sizeof(*p) * count);
940}
941
942static void tcg_target_init(TCGContext *s)
943{
944    /* The current code uses uint8_t for tcg operations. */
945    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
946
947    /* Registers available for 32 bit operations. */
948    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
949    /* Registers available for 64 bit operations. */
950    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
951    /*
952     * The interpreter "registers" are in the local stack frame and
953     * cannot be clobbered by the called helper functions.  However,
954     * the interpreter assumes a 128-bit return value and assigns to
955     * the return value registers.
956     */
957    tcg_target_call_clobber_regs =
958        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
959
960    s->reserved_regs = 0;
961    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
962    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
963
964    /* The call arguments come first, followed by the temp storage. */
965    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
966                  TCG_STATIC_FRAME_SIZE);
967}
968
969/* Generate global QEMU prologue and epilogue code. */
970static inline void tcg_target_qemu_prologue(TCGContext *s)
971{
972}
973
974static void tcg_out_tb_start(TCGContext *s)
975{
976    /* nothing to do */
977}
978
979bool tcg_target_has_memory_bswap(MemOp memop)
980{
981    return true;
982}
983
984static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
985{
986    g_assert_not_reached();
987}
988
989static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
990{
991    g_assert_not_reached();
992}
993