xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision cd9acd2049a385e54314d58f35b4bfce7c031d55)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_ext_i32_i64:
59    case INDEX_op_extu_i32_i64:
60    case INDEX_op_bswap16_i32:
61    case INDEX_op_bswap16_i64:
62    case INDEX_op_bswap32_i32:
63    case INDEX_op_bswap32_i64:
64    case INDEX_op_bswap64_i64:
65    case INDEX_op_extract_i32:
66    case INDEX_op_extract_i64:
67    case INDEX_op_sextract_i32:
68    case INDEX_op_sextract_i64:
69    case INDEX_op_ctpop_i32:
70    case INDEX_op_ctpop_i64:
71        return C_O1_I1(r, r);
72
73    case INDEX_op_st8_i32:
74    case INDEX_op_st16_i32:
75    case INDEX_op_st_i32:
76    case INDEX_op_st8_i64:
77    case INDEX_op_st16_i64:
78    case INDEX_op_st32_i64:
79    case INDEX_op_st_i64:
80        return C_O0_I2(r, r);
81
82    case INDEX_op_shl_i32:
83    case INDEX_op_shl_i64:
84    case INDEX_op_shr_i32:
85    case INDEX_op_shr_i64:
86    case INDEX_op_sar_i32:
87    case INDEX_op_sar_i64:
88    case INDEX_op_rotl_i32:
89    case INDEX_op_rotl_i64:
90    case INDEX_op_rotr_i32:
91    case INDEX_op_rotr_i64:
92    case INDEX_op_setcond_i32:
93    case INDEX_op_setcond_i64:
94    case INDEX_op_deposit_i32:
95    case INDEX_op_deposit_i64:
96    case INDEX_op_clz_i32:
97    case INDEX_op_clz_i64:
98    case INDEX_op_ctz_i32:
99    case INDEX_op_ctz_i64:
100        return C_O1_I2(r, r, r);
101
102    case INDEX_op_brcond_i32:
103    case INDEX_op_brcond_i64:
104        return C_O0_I2(r, r);
105
106    case INDEX_op_add2_i32:
107    case INDEX_op_add2_i64:
108    case INDEX_op_sub2_i32:
109    case INDEX_op_sub2_i64:
110        return C_O2_I4(r, r, r, r, r, r);
111
112#if TCG_TARGET_REG_BITS == 32
113    case INDEX_op_brcond2_i32:
114        return C_O0_I4(r, r, r, r);
115#endif
116
117    case INDEX_op_mulu2_i32:
118    case INDEX_op_mulu2_i64:
119    case INDEX_op_muls2_i32:
120    case INDEX_op_muls2_i64:
121        return C_O2_I2(r, r, r, r);
122
123    case INDEX_op_movcond_i32:
124    case INDEX_op_movcond_i64:
125    case INDEX_op_setcond2_i32:
126        return C_O1_I4(r, r, r, r, r);
127
128    case INDEX_op_qemu_ld_i32:
129        return C_O1_I1(r, r);
130    case INDEX_op_qemu_ld_i64:
131        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
132    case INDEX_op_qemu_st_i32:
133        return C_O0_I2(r, r);
134    case INDEX_op_qemu_st_i64:
135        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
136
137    default:
138        return C_NotImplemented;
139    }
140}
141
142static const int tcg_target_reg_alloc_order[] = {
143    TCG_REG_R4,
144    TCG_REG_R5,
145    TCG_REG_R6,
146    TCG_REG_R7,
147    TCG_REG_R8,
148    TCG_REG_R9,
149    TCG_REG_R10,
150    TCG_REG_R11,
151    TCG_REG_R12,
152    TCG_REG_R13,
153    TCG_REG_R14,
154    TCG_REG_R15,
155    /* Either 2 or 4 of these are call clobbered, so use them last. */
156    TCG_REG_R3,
157    TCG_REG_R2,
158    TCG_REG_R1,
159    TCG_REG_R0,
160};
161
162/* No call arguments via registers.  All will be stored on the "stack". */
163static const int tcg_target_call_iarg_regs[] = { };
164
165static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
166{
167    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
168    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
169    return TCG_REG_R0 + slot;
170}
171
172#ifdef CONFIG_DEBUG_TCG
173static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
174    "r00",
175    "r01",
176    "r02",
177    "r03",
178    "r04",
179    "r05",
180    "r06",
181    "r07",
182    "r08",
183    "r09",
184    "r10",
185    "r11",
186    "r12",
187    "r13",
188    "r14",
189    "r15",
190};
191#endif
192
193static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
194                        intptr_t value, intptr_t addend)
195{
196    intptr_t diff = value - (intptr_t)(code_ptr + 1);
197
198    tcg_debug_assert(addend == 0);
199    tcg_debug_assert(type == 20);
200
201    if (diff == sextract32(diff, 0, type)) {
202        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
203        return true;
204    }
205    return false;
206}
207
208static void stack_bounds_check(TCGReg base, intptr_t offset)
209{
210    if (base == TCG_REG_CALL_STACK) {
211        tcg_debug_assert(offset >= 0);
212        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
213                                   TCG_STATIC_FRAME_SIZE));
214    }
215}
216
217static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
218{
219    tcg_insn_unit insn = 0;
220
221    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
222    insn = deposit32(insn, 0, 8, op);
223    tcg_out32(s, insn);
224}
225
226static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
227{
228    tcg_insn_unit insn = 0;
229    intptr_t diff;
230
231    /* Special case for exit_tb: map null -> 0. */
232    if (p0 == NULL) {
233        diff = 0;
234    } else {
235        diff = p0 - (void *)(s->code_ptr + 1);
236        tcg_debug_assert(diff != 0);
237        if (diff != sextract32(diff, 0, 20)) {
238            tcg_raise_tb_overflow(s);
239        }
240    }
241    insn = deposit32(insn, 0, 8, op);
242    insn = deposit32(insn, 12, 20, diff);
243    tcg_out32(s, insn);
244}
245
246static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
247{
248    tcg_insn_unit insn = 0;
249
250    insn = deposit32(insn, 0, 8, op);
251    insn = deposit32(insn, 8, 4, r0);
252    tcg_out32(s, insn);
253}
254
255static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
256{
257    tcg_out32(s, (uint8_t)op);
258}
259
260static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
261{
262    tcg_insn_unit insn = 0;
263
264    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
265    insn = deposit32(insn, 0, 8, op);
266    insn = deposit32(insn, 8, 4, r0);
267    insn = deposit32(insn, 12, 20, i1);
268    tcg_out32(s, insn);
269}
270
271static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
272{
273    tcg_insn_unit insn = 0;
274
275    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
276    insn = deposit32(insn, 0, 8, op);
277    insn = deposit32(insn, 8, 4, r0);
278    tcg_out32(s, insn);
279}
280
281static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
282{
283    tcg_insn_unit insn = 0;
284
285    insn = deposit32(insn, 0, 8, op);
286    insn = deposit32(insn, 8, 4, r0);
287    insn = deposit32(insn, 12, 4, r1);
288    tcg_out32(s, insn);
289}
290
291static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
292                           TCGReg r0, TCGReg r1, TCGArg m2)
293{
294    tcg_insn_unit insn = 0;
295
296    tcg_debug_assert(m2 == extract32(m2, 0, 16));
297    insn = deposit32(insn, 0, 8, op);
298    insn = deposit32(insn, 8, 4, r0);
299    insn = deposit32(insn, 12, 4, r1);
300    insn = deposit32(insn, 16, 16, m2);
301    tcg_out32(s, insn);
302}
303
304static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
305                           TCGReg r0, TCGReg r1, TCGReg r2)
306{
307    tcg_insn_unit insn = 0;
308
309    insn = deposit32(insn, 0, 8, op);
310    insn = deposit32(insn, 8, 4, r0);
311    insn = deposit32(insn, 12, 4, r1);
312    insn = deposit32(insn, 16, 4, r2);
313    tcg_out32(s, insn);
314}
315
316static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
317                           TCGReg r0, TCGReg r1, intptr_t i2)
318{
319    tcg_insn_unit insn = 0;
320
321    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
322    insn = deposit32(insn, 0, 8, op);
323    insn = deposit32(insn, 8, 4, r0);
324    insn = deposit32(insn, 12, 4, r1);
325    insn = deposit32(insn, 16, 16, i2);
326    tcg_out32(s, insn);
327}
328
329static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
330                            TCGReg r1, uint8_t b2, uint8_t b3)
331{
332    tcg_insn_unit insn = 0;
333
334    tcg_debug_assert(b2 == extract32(b2, 0, 6));
335    tcg_debug_assert(b3 == extract32(b3, 0, 6));
336    insn = deposit32(insn, 0, 8, op);
337    insn = deposit32(insn, 8, 4, r0);
338    insn = deposit32(insn, 12, 4, r1);
339    insn = deposit32(insn, 16, 6, b2);
340    insn = deposit32(insn, 22, 6, b3);
341    tcg_out32(s, insn);
342}
343
344static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
345                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
346{
347    tcg_insn_unit insn = 0;
348
349    insn = deposit32(insn, 0, 8, op);
350    insn = deposit32(insn, 8, 4, r0);
351    insn = deposit32(insn, 12, 4, r1);
352    insn = deposit32(insn, 16, 4, r2);
353    insn = deposit32(insn, 20, 4, c3);
354    tcg_out32(s, insn);
355}
356
357static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
358                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
359{
360    tcg_insn_unit insn = 0;
361
362    tcg_debug_assert(b3 == extract32(b3, 0, 6));
363    tcg_debug_assert(b4 == extract32(b4, 0, 6));
364    insn = deposit32(insn, 0, 8, op);
365    insn = deposit32(insn, 8, 4, r0);
366    insn = deposit32(insn, 12, 4, r1);
367    insn = deposit32(insn, 16, 4, r2);
368    insn = deposit32(insn, 20, 6, b3);
369    insn = deposit32(insn, 26, 6, b4);
370    tcg_out32(s, insn);
371}
372
373static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
374                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
375{
376    tcg_insn_unit insn = 0;
377
378    insn = deposit32(insn, 0, 8, op);
379    insn = deposit32(insn, 8, 4, r0);
380    insn = deposit32(insn, 12, 4, r1);
381    insn = deposit32(insn, 16, 4, r2);
382    insn = deposit32(insn, 20, 4, r3);
383    tcg_out32(s, insn);
384}
385
386static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
387                              TCGReg r0, TCGReg r1, TCGReg r2,
388                              TCGReg r3, TCGReg r4, TCGCond c5)
389{
390    tcg_insn_unit insn = 0;
391
392    insn = deposit32(insn, 0, 8, op);
393    insn = deposit32(insn, 8, 4, r0);
394    insn = deposit32(insn, 12, 4, r1);
395    insn = deposit32(insn, 16, 4, r2);
396    insn = deposit32(insn, 20, 4, r3);
397    insn = deposit32(insn, 24, 4, r4);
398    insn = deposit32(insn, 28, 4, c5);
399    tcg_out32(s, insn);
400}
401
402static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
403                              TCGReg r0, TCGReg r1, TCGReg r2,
404                              TCGReg r3, TCGReg r4, TCGReg r5)
405{
406    tcg_insn_unit insn = 0;
407
408    insn = deposit32(insn, 0, 8, op);
409    insn = deposit32(insn, 8, 4, r0);
410    insn = deposit32(insn, 12, 4, r1);
411    insn = deposit32(insn, 16, 4, r2);
412    insn = deposit32(insn, 20, 4, r3);
413    insn = deposit32(insn, 24, 4, r4);
414    insn = deposit32(insn, 28, 4, r5);
415    tcg_out32(s, insn);
416}
417
418static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
419                         TCGReg base, intptr_t offset)
420{
421    stack_bounds_check(base, offset);
422    if (offset != sextract32(offset, 0, 16)) {
423        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
424        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
425        base = TCG_REG_TMP;
426        offset = 0;
427    }
428    tcg_out_op_rrs(s, op, val, base, offset);
429}
430
431static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
432                       intptr_t offset)
433{
434    switch (type) {
435    case TCG_TYPE_I32:
436        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
437        break;
438#if TCG_TARGET_REG_BITS == 64
439    case TCG_TYPE_I64:
440        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
441        break;
442#endif
443    default:
444        g_assert_not_reached();
445    }
446}
447
448static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
449{
450    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
451    return true;
452}
453
454static void tcg_out_movi(TCGContext *s, TCGType type,
455                         TCGReg ret, tcg_target_long arg)
456{
457    switch (type) {
458    case TCG_TYPE_I32:
459#if TCG_TARGET_REG_BITS == 64
460        arg = (int32_t)arg;
461        /* fall through */
462    case TCG_TYPE_I64:
463#endif
464        break;
465    default:
466        g_assert_not_reached();
467    }
468
469    if (arg == sextract32(arg, 0, 20)) {
470        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
471    } else {
472        tcg_insn_unit insn = 0;
473
474        new_pool_label(s, arg, 20, s->code_ptr, 0);
475        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
476        insn = deposit32(insn, 8, 4, ret);
477        tcg_out32(s, insn);
478    }
479}
480
481static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
482                            TCGReg rs, unsigned pos, unsigned len)
483{
484    TCGOpcode opc = type == TCG_TYPE_I32 ?
485                    INDEX_op_extract_i32 :
486                    INDEX_op_extract_i64;
487    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
488}
489
490static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
491                             TCGReg rs, unsigned pos, unsigned len)
492{
493    TCGOpcode opc = type == TCG_TYPE_I32 ?
494                    INDEX_op_sextract_i32 :
495                    INDEX_op_sextract_i64;
496    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
497}
498
499static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
500{
501    tcg_out_sextract(s, type, rd, rs, 0, 8);
502}
503
504static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
505{
506    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
507}
508
509static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
510{
511    tcg_out_sextract(s, type, rd, rs, 0, 16);
512}
513
514static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
515{
516    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
517}
518
519static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
520{
521    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
522    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
523}
524
525static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
526{
527    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
528    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
529}
530
531static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
532{
533    tcg_out_ext32s(s, rd, rs);
534}
535
536static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
537{
538    tcg_out_ext32u(s, rd, rs);
539}
540
541static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
542{
543    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
544    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
545}
546
547static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
548{
549    return false;
550}
551
552static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
553                             tcg_target_long imm)
554{
555    /* This function is only used for passing structs by reference. */
556    g_assert_not_reached();
557}
558
559static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
560                         const TCGHelperInfo *info)
561{
562    ffi_cif *cif = info->cif;
563    tcg_insn_unit insn = 0;
564    uint8_t which;
565
566    if (cif->rtype == &ffi_type_void) {
567        which = 0;
568    } else {
569        tcg_debug_assert(cif->rtype->size == 4 ||
570                         cif->rtype->size == 8 ||
571                         cif->rtype->size == 16);
572        which = ctz32(cif->rtype->size) - 1;
573    }
574    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
575    insn = deposit32(insn, 0, 8, INDEX_op_call);
576    insn = deposit32(insn, 8, 4, which);
577    tcg_out32(s, insn);
578}
579
580#if TCG_TARGET_REG_BITS == 64
581# define CASE_32_64(x) \
582        case glue(glue(INDEX_op_, x), _i64): \
583        case glue(glue(INDEX_op_, x), _i32):
584# define CASE_64(x) \
585        case glue(glue(INDEX_op_, x), _i64):
586#else
587# define CASE_32_64(x) \
588        case glue(glue(INDEX_op_, x), _i32):
589# define CASE_64(x)
590#endif
591
592static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
593{
594    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
595}
596
597static void tcg_out_goto_tb(TCGContext *s, int which)
598{
599    /* indirect jump method. */
600    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
601    set_jmp_reset_offset(s, which);
602}
603
604void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
605                              uintptr_t jmp_rx, uintptr_t jmp_rw)
606{
607    /* Always indirect, nothing to do */
608}
609
610static void tgen_add(TCGContext *s, TCGType type,
611                     TCGReg a0, TCGReg a1, TCGReg a2)
612{
613    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
614}
615
616static const TCGOutOpBinary outop_add = {
617    .base.static_constraint = C_O1_I2(r, r, r),
618    .out_rrr = tgen_add,
619};
620
621static void tgen_and(TCGContext *s, TCGType type,
622                     TCGReg a0, TCGReg a1, TCGReg a2)
623{
624    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
625}
626
627static const TCGOutOpBinary outop_and = {
628    .base.static_constraint = C_O1_I2(r, r, r),
629    .out_rrr = tgen_and,
630};
631
632static void tgen_andc(TCGContext *s, TCGType type,
633                      TCGReg a0, TCGReg a1, TCGReg a2)
634{
635    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
636}
637
638static const TCGOutOpBinary outop_andc = {
639    .base.static_constraint = C_O1_I2(r, r, r),
640    .out_rrr = tgen_andc,
641};
642
643static void tgen_divs(TCGContext *s, TCGType type,
644                      TCGReg a0, TCGReg a1, TCGReg a2)
645{
646    TCGOpcode opc = (type == TCG_TYPE_I32
647                     ? INDEX_op_tci_divs32
648                     : INDEX_op_divs);
649    tcg_out_op_rrr(s, opc, a0, a1, a2);
650}
651
652static const TCGOutOpBinary outop_divs = {
653    .base.static_constraint = C_O1_I2(r, r, r),
654    .out_rrr = tgen_divs,
655};
656
657static const TCGOutOpDivRem outop_divs2 = {
658    .base.static_constraint = C_NotImplemented,
659};
660
661static void tgen_divu(TCGContext *s, TCGType type,
662                      TCGReg a0, TCGReg a1, TCGReg a2)
663{
664    TCGOpcode opc = (type == TCG_TYPE_I32
665                     ? INDEX_op_tci_divu32
666                     : INDEX_op_divu);
667    tcg_out_op_rrr(s, opc, a0, a1, a2);
668}
669
670static const TCGOutOpBinary outop_divu = {
671    .base.static_constraint = C_O1_I2(r, r, r),
672    .out_rrr = tgen_divu,
673};
674
675static const TCGOutOpDivRem outop_divu2 = {
676    .base.static_constraint = C_NotImplemented,
677};
678
679static void tgen_eqv(TCGContext *s, TCGType type,
680                     TCGReg a0, TCGReg a1, TCGReg a2)
681{
682    tcg_out_op_rrr(s, INDEX_op_eqv, a0, a1, a2);
683}
684
685static const TCGOutOpBinary outop_eqv = {
686    .base.static_constraint = C_O1_I2(r, r, r),
687    .out_rrr = tgen_eqv,
688};
689
690static void tgen_mul(TCGContext *s, TCGType type,
691                     TCGReg a0, TCGReg a1, TCGReg a2)
692{
693    tcg_out_op_rrr(s, INDEX_op_mul, a0, a1, a2);
694}
695
696static const TCGOutOpBinary outop_mul = {
697    .base.static_constraint = C_O1_I2(r, r, r),
698    .out_rrr = tgen_mul,
699};
700
701static const TCGOutOpBinary outop_mulsh = {
702    .base.static_constraint = C_NotImplemented,
703};
704
705static const TCGOutOpBinary outop_muluh = {
706    .base.static_constraint = C_NotImplemented,
707};
708
709static void tgen_nand(TCGContext *s, TCGType type,
710                     TCGReg a0, TCGReg a1, TCGReg a2)
711{
712    tcg_out_op_rrr(s, INDEX_op_nand, a0, a1, a2);
713}
714
715static const TCGOutOpBinary outop_nand = {
716    .base.static_constraint = C_O1_I2(r, r, r),
717    .out_rrr = tgen_nand,
718};
719
720static void tgen_nor(TCGContext *s, TCGType type,
721                     TCGReg a0, TCGReg a1, TCGReg a2)
722{
723    tcg_out_op_rrr(s, INDEX_op_nor, a0, a1, a2);
724}
725
726static const TCGOutOpBinary outop_nor = {
727    .base.static_constraint = C_O1_I2(r, r, r),
728    .out_rrr = tgen_nor,
729};
730
731static void tgen_or(TCGContext *s, TCGType type,
732                     TCGReg a0, TCGReg a1, TCGReg a2)
733{
734    tcg_out_op_rrr(s, INDEX_op_or, a0, a1, a2);
735}
736
737static const TCGOutOpBinary outop_or = {
738    .base.static_constraint = C_O1_I2(r, r, r),
739    .out_rrr = tgen_or,
740};
741
742static void tgen_orc(TCGContext *s, TCGType type,
743                     TCGReg a0, TCGReg a1, TCGReg a2)
744{
745    tcg_out_op_rrr(s, INDEX_op_orc, a0, a1, a2);
746}
747
748static const TCGOutOpBinary outop_orc = {
749    .base.static_constraint = C_O1_I2(r, r, r),
750    .out_rrr = tgen_orc,
751};
752
753static void tgen_rems(TCGContext *s, TCGType type,
754                      TCGReg a0, TCGReg a1, TCGReg a2)
755{
756    TCGOpcode opc = (type == TCG_TYPE_I32
757                     ? INDEX_op_tci_rems32
758                     : INDEX_op_rems);
759    tcg_out_op_rrr(s, opc, a0, a1, a2);
760}
761
762static const TCGOutOpBinary outop_rems = {
763    .base.static_constraint = C_O1_I2(r, r, r),
764    .out_rrr = tgen_rems,
765};
766
767static void tgen_remu(TCGContext *s, TCGType type,
768                      TCGReg a0, TCGReg a1, TCGReg a2)
769{
770    TCGOpcode opc = (type == TCG_TYPE_I32
771                     ? INDEX_op_tci_remu32
772                     : INDEX_op_remu);
773    tcg_out_op_rrr(s, opc, a0, a1, a2);
774}
775
776static const TCGOutOpBinary outop_remu = {
777    .base.static_constraint = C_O1_I2(r, r, r),
778    .out_rrr = tgen_remu,
779};
780
781static void tgen_sub(TCGContext *s, TCGType type,
782                     TCGReg a0, TCGReg a1, TCGReg a2)
783{
784    tcg_out_op_rrr(s, INDEX_op_sub, a0, a1, a2);
785}
786
787static const TCGOutOpSubtract outop_sub = {
788    .base.static_constraint = C_O1_I2(r, r, r),
789    .out_rrr = tgen_sub,
790};
791
792static void tgen_xor(TCGContext *s, TCGType type,
793                     TCGReg a0, TCGReg a1, TCGReg a2)
794{
795    tcg_out_op_rrr(s, INDEX_op_xor, a0, a1, a2);
796}
797
798static const TCGOutOpBinary outop_xor = {
799    .base.static_constraint = C_O1_I2(r, r, r),
800    .out_rrr = tgen_xor,
801};
802
803static void tgen_neg(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
804{
805    tcg_out_op_rr(s, INDEX_op_neg, a0, a1);
806}
807
808static const TCGOutOpUnary outop_neg = {
809    .base.static_constraint = C_O1_I1(r, r),
810    .out_rr = tgen_neg,
811};
812
813static void tgen_not(TCGContext *s, TCGType type, TCGReg a0, TCGReg a1)
814{
815    tcg_out_op_rr(s, INDEX_op_not, a0, a1);
816}
817
818static const TCGOutOpUnary outop_not = {
819    .base.static_constraint = C_O1_I1(r, r),
820    .out_rr = tgen_not,
821};
822
823
824static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
825                       const TCGArg args[TCG_MAX_OP_ARGS],
826                       const int const_args[TCG_MAX_OP_ARGS])
827{
828    int width;
829
830    switch (opc) {
831    case INDEX_op_goto_ptr:
832        tcg_out_op_r(s, opc, args[0]);
833        break;
834
835    case INDEX_op_br:
836        tcg_out_op_l(s, opc, arg_label(args[0]));
837        break;
838
839    CASE_32_64(setcond)
840        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
841        break;
842
843    CASE_32_64(movcond)
844    case INDEX_op_setcond2_i32:
845        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
846                          args[3], args[4], args[5]);
847        break;
848
849    CASE_32_64(ld8u)
850    CASE_32_64(ld8s)
851    CASE_32_64(ld16u)
852    CASE_32_64(ld16s)
853    case INDEX_op_ld_i32:
854    CASE_64(ld32u)
855    CASE_64(ld32s)
856    CASE_64(ld)
857    CASE_32_64(st8)
858    CASE_32_64(st16)
859    case INDEX_op_st_i32:
860    CASE_64(st32)
861    CASE_64(st)
862        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
863        break;
864
865    CASE_32_64(shl)
866    CASE_32_64(shr)
867    CASE_32_64(sar)
868    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
869    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
870    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
871    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
872        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
873        break;
874
875    CASE_32_64(deposit)
876        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
877        break;
878
879    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
880    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
881        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
882        break;
883
884    CASE_32_64(brcond)
885        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
886                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
887                        TCG_REG_TMP, args[0], args[1], args[2]);
888        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
889        break;
890
891    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
892    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
893    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
894        tcg_out_op_rr(s, opc, args[0], args[1]);
895        break;
896
897    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
898    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
899        width = 16;
900        goto do_bswap;
901    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
902        width = 32;
903    do_bswap:
904        /* The base tci bswaps zero-extend, and ignore high bits. */
905        tcg_out_op_rr(s, opc, args[0], args[1]);
906        if (args[2] & TCG_BSWAP_OS) {
907            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
908        }
909        break;
910
911    CASE_32_64(add2)
912    CASE_32_64(sub2)
913        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
914                          args[3], args[4], args[5]);
915        break;
916
917#if TCG_TARGET_REG_BITS == 32
918    case INDEX_op_brcond2_i32:
919        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
920                          args[0], args[1], args[2], args[3], args[4]);
921        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
922        break;
923#endif
924
925    CASE_32_64(mulu2)
926    CASE_32_64(muls2)
927        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
928        break;
929
930    case INDEX_op_qemu_ld_i64:
931    case INDEX_op_qemu_st_i64:
932        if (TCG_TARGET_REG_BITS == 32) {
933            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
934            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
935            break;
936        }
937        /* fall through */
938    case INDEX_op_qemu_ld_i32:
939    case INDEX_op_qemu_st_i32:
940        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
941            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
942            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
943        } else {
944            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
945        }
946        break;
947
948    case INDEX_op_mb:
949        tcg_out_op_v(s, opc);
950        break;
951
952    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
953    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
954    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
955    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
956    case INDEX_op_extu_i32_i64:
957    case INDEX_op_extrl_i64_i32:
958    default:
959        g_assert_not_reached();
960    }
961}
962
963static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
964                       intptr_t offset)
965{
966    switch (type) {
967    case TCG_TYPE_I32:
968        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
969        break;
970#if TCG_TARGET_REG_BITS == 64
971    case TCG_TYPE_I64:
972        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
973        break;
974#endif
975    default:
976        g_assert_not_reached();
977    }
978}
979
980static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
981                               TCGReg base, intptr_t ofs)
982{
983    return false;
984}
985
986/* Test if a constant matches the constraint. */
987static bool tcg_target_const_match(int64_t val, int ct,
988                                   TCGType type, TCGCond cond, int vece)
989{
990    return ct & TCG_CT_CONST;
991}
992
993static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
994{
995    memset(p, 0, sizeof(*p) * count);
996}
997
998static void tcg_target_init(TCGContext *s)
999{
1000    /* The current code uses uint8_t for tcg operations. */
1001    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
1002
1003    /* Registers available for 32 bit operations. */
1004    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
1005    /* Registers available for 64 bit operations. */
1006    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
1007    /*
1008     * The interpreter "registers" are in the local stack frame and
1009     * cannot be clobbered by the called helper functions.  However,
1010     * the interpreter assumes a 128-bit return value and assigns to
1011     * the return value registers.
1012     */
1013    tcg_target_call_clobber_regs =
1014        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
1015
1016    s->reserved_regs = 0;
1017    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
1018    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
1019
1020    /* The call arguments come first, followed by the temp storage. */
1021    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
1022                  TCG_STATIC_FRAME_SIZE);
1023}
1024
1025/* Generate global QEMU prologue and epilogue code. */
1026static inline void tcg_target_qemu_prologue(TCGContext *s)
1027{
1028}
1029
1030static void tcg_out_tb_start(TCGContext *s)
1031{
1032    /* nothing to do */
1033}
1034
1035bool tcg_target_has_memory_bswap(MemOp memop)
1036{
1037    return true;
1038}
1039
1040static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1041{
1042    g_assert_not_reached();
1043}
1044
1045static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
1046{
1047    g_assert_not_reached();
1048}
1049