xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 46f96bff163512f9f8f9959de4a18c0799001422)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* Used for function call generation. */
26#define TCG_TARGET_CALL_STACK_OFFSET    0
27#define TCG_TARGET_STACK_ALIGN          8
28#if TCG_TARGET_REG_BITS == 32
29# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_EVEN
30# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_EVEN
31# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_EVEN
32#else
33# define TCG_TARGET_CALL_ARG_I32        TCG_CALL_ARG_NORMAL
34# define TCG_TARGET_CALL_ARG_I64        TCG_CALL_ARG_NORMAL
35# define TCG_TARGET_CALL_ARG_I128       TCG_CALL_ARG_NORMAL
36#endif
37#define TCG_TARGET_CALL_RET_I128        TCG_CALL_RET_NORMAL
38
39static TCGConstraintSetIndex
40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
41{
42    switch (op) {
43    case INDEX_op_goto_ptr:
44        return C_O0_I1(r);
45
46    case INDEX_op_ld8u_i32:
47    case INDEX_op_ld8s_i32:
48    case INDEX_op_ld16u_i32:
49    case INDEX_op_ld16s_i32:
50    case INDEX_op_ld_i32:
51    case INDEX_op_ld8u_i64:
52    case INDEX_op_ld8s_i64:
53    case INDEX_op_ld16u_i64:
54    case INDEX_op_ld16s_i64:
55    case INDEX_op_ld32u_i64:
56    case INDEX_op_ld32s_i64:
57    case INDEX_op_ld_i64:
58    case INDEX_op_not_i32:
59    case INDEX_op_not_i64:
60    case INDEX_op_neg_i32:
61    case INDEX_op_neg_i64:
62    case INDEX_op_ext_i32_i64:
63    case INDEX_op_extu_i32_i64:
64    case INDEX_op_bswap16_i32:
65    case INDEX_op_bswap16_i64:
66    case INDEX_op_bswap32_i32:
67    case INDEX_op_bswap32_i64:
68    case INDEX_op_bswap64_i64:
69    case INDEX_op_extract_i32:
70    case INDEX_op_extract_i64:
71    case INDEX_op_sextract_i32:
72    case INDEX_op_sextract_i64:
73    case INDEX_op_ctpop_i32:
74    case INDEX_op_ctpop_i64:
75        return C_O1_I1(r, r);
76
77    case INDEX_op_st8_i32:
78    case INDEX_op_st16_i32:
79    case INDEX_op_st_i32:
80    case INDEX_op_st8_i64:
81    case INDEX_op_st16_i64:
82    case INDEX_op_st32_i64:
83    case INDEX_op_st_i64:
84        return C_O0_I2(r, r);
85
86    case INDEX_op_div_i32:
87    case INDEX_op_div_i64:
88    case INDEX_op_divu_i32:
89    case INDEX_op_divu_i64:
90    case INDEX_op_rem_i32:
91    case INDEX_op_rem_i64:
92    case INDEX_op_remu_i32:
93    case INDEX_op_remu_i64:
94    case INDEX_op_sub_i32:
95    case INDEX_op_sub_i64:
96    case INDEX_op_mul_i32:
97    case INDEX_op_mul_i64:
98    case INDEX_op_eqv_i32:
99    case INDEX_op_eqv_i64:
100    case INDEX_op_nand_i32:
101    case INDEX_op_nand_i64:
102    case INDEX_op_nor_i32:
103    case INDEX_op_nor_i64:
104    case INDEX_op_or_i32:
105    case INDEX_op_or_i64:
106    case INDEX_op_orc_i32:
107    case INDEX_op_orc_i64:
108    case INDEX_op_xor_i32:
109    case INDEX_op_xor_i64:
110    case INDEX_op_shl_i32:
111    case INDEX_op_shl_i64:
112    case INDEX_op_shr_i32:
113    case INDEX_op_shr_i64:
114    case INDEX_op_sar_i32:
115    case INDEX_op_sar_i64:
116    case INDEX_op_rotl_i32:
117    case INDEX_op_rotl_i64:
118    case INDEX_op_rotr_i32:
119    case INDEX_op_rotr_i64:
120    case INDEX_op_setcond_i32:
121    case INDEX_op_setcond_i64:
122    case INDEX_op_deposit_i32:
123    case INDEX_op_deposit_i64:
124    case INDEX_op_clz_i32:
125    case INDEX_op_clz_i64:
126    case INDEX_op_ctz_i32:
127    case INDEX_op_ctz_i64:
128        return C_O1_I2(r, r, r);
129
130    case INDEX_op_brcond_i32:
131    case INDEX_op_brcond_i64:
132        return C_O0_I2(r, r);
133
134    case INDEX_op_add2_i32:
135    case INDEX_op_add2_i64:
136    case INDEX_op_sub2_i32:
137    case INDEX_op_sub2_i64:
138        return C_O2_I4(r, r, r, r, r, r);
139
140#if TCG_TARGET_REG_BITS == 32
141    case INDEX_op_brcond2_i32:
142        return C_O0_I4(r, r, r, r);
143#endif
144
145    case INDEX_op_mulu2_i32:
146    case INDEX_op_mulu2_i64:
147    case INDEX_op_muls2_i32:
148    case INDEX_op_muls2_i64:
149        return C_O2_I2(r, r, r, r);
150
151    case INDEX_op_movcond_i32:
152    case INDEX_op_movcond_i64:
153    case INDEX_op_setcond2_i32:
154        return C_O1_I4(r, r, r, r, r);
155
156    case INDEX_op_qemu_ld_i32:
157        return C_O1_I1(r, r);
158    case INDEX_op_qemu_ld_i64:
159        return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
160    case INDEX_op_qemu_st_i32:
161        return C_O0_I2(r, r);
162    case INDEX_op_qemu_st_i64:
163        return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
164
165    default:
166        return C_NotImplemented;
167    }
168}
169
170static const int tcg_target_reg_alloc_order[] = {
171    TCG_REG_R4,
172    TCG_REG_R5,
173    TCG_REG_R6,
174    TCG_REG_R7,
175    TCG_REG_R8,
176    TCG_REG_R9,
177    TCG_REG_R10,
178    TCG_REG_R11,
179    TCG_REG_R12,
180    TCG_REG_R13,
181    TCG_REG_R14,
182    TCG_REG_R15,
183    /* Either 2 or 4 of these are call clobbered, so use them last. */
184    TCG_REG_R3,
185    TCG_REG_R2,
186    TCG_REG_R1,
187    TCG_REG_R0,
188};
189
190/* No call arguments via registers.  All will be stored on the "stack". */
191static const int tcg_target_call_iarg_regs[] = { };
192
193static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
194{
195    tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
196    tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
197    return TCG_REG_R0 + slot;
198}
199
200#ifdef CONFIG_DEBUG_TCG
201static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
202    "r00",
203    "r01",
204    "r02",
205    "r03",
206    "r04",
207    "r05",
208    "r06",
209    "r07",
210    "r08",
211    "r09",
212    "r10",
213    "r11",
214    "r12",
215    "r13",
216    "r14",
217    "r15",
218};
219#endif
220
221static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
222                        intptr_t value, intptr_t addend)
223{
224    intptr_t diff = value - (intptr_t)(code_ptr + 1);
225
226    tcg_debug_assert(addend == 0);
227    tcg_debug_assert(type == 20);
228
229    if (diff == sextract32(diff, 0, type)) {
230        tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
231        return true;
232    }
233    return false;
234}
235
236static void stack_bounds_check(TCGReg base, intptr_t offset)
237{
238    if (base == TCG_REG_CALL_STACK) {
239        tcg_debug_assert(offset >= 0);
240        tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
241                                   TCG_STATIC_FRAME_SIZE));
242    }
243}
244
245static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
246{
247    tcg_insn_unit insn = 0;
248
249    tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
250    insn = deposit32(insn, 0, 8, op);
251    tcg_out32(s, insn);
252}
253
254static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
255{
256    tcg_insn_unit insn = 0;
257    intptr_t diff;
258
259    /* Special case for exit_tb: map null -> 0. */
260    if (p0 == NULL) {
261        diff = 0;
262    } else {
263        diff = p0 - (void *)(s->code_ptr + 1);
264        tcg_debug_assert(diff != 0);
265        if (diff != sextract32(diff, 0, 20)) {
266            tcg_raise_tb_overflow(s);
267        }
268    }
269    insn = deposit32(insn, 0, 8, op);
270    insn = deposit32(insn, 12, 20, diff);
271    tcg_out32(s, insn);
272}
273
274static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
275{
276    tcg_insn_unit insn = 0;
277
278    insn = deposit32(insn, 0, 8, op);
279    insn = deposit32(insn, 8, 4, r0);
280    tcg_out32(s, insn);
281}
282
283static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
284{
285    tcg_out32(s, (uint8_t)op);
286}
287
288static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
289{
290    tcg_insn_unit insn = 0;
291
292    tcg_debug_assert(i1 == sextract32(i1, 0, 20));
293    insn = deposit32(insn, 0, 8, op);
294    insn = deposit32(insn, 8, 4, r0);
295    insn = deposit32(insn, 12, 20, i1);
296    tcg_out32(s, insn);
297}
298
299static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
300{
301    tcg_insn_unit insn = 0;
302
303    tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
304    insn = deposit32(insn, 0, 8, op);
305    insn = deposit32(insn, 8, 4, r0);
306    tcg_out32(s, insn);
307}
308
309static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
310{
311    tcg_insn_unit insn = 0;
312
313    insn = deposit32(insn, 0, 8, op);
314    insn = deposit32(insn, 8, 4, r0);
315    insn = deposit32(insn, 12, 4, r1);
316    tcg_out32(s, insn);
317}
318
319static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
320                           TCGReg r0, TCGReg r1, TCGArg m2)
321{
322    tcg_insn_unit insn = 0;
323
324    tcg_debug_assert(m2 == extract32(m2, 0, 16));
325    insn = deposit32(insn, 0, 8, op);
326    insn = deposit32(insn, 8, 4, r0);
327    insn = deposit32(insn, 12, 4, r1);
328    insn = deposit32(insn, 16, 16, m2);
329    tcg_out32(s, insn);
330}
331
332static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
333                           TCGReg r0, TCGReg r1, TCGReg r2)
334{
335    tcg_insn_unit insn = 0;
336
337    insn = deposit32(insn, 0, 8, op);
338    insn = deposit32(insn, 8, 4, r0);
339    insn = deposit32(insn, 12, 4, r1);
340    insn = deposit32(insn, 16, 4, r2);
341    tcg_out32(s, insn);
342}
343
344static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
345                           TCGReg r0, TCGReg r1, intptr_t i2)
346{
347    tcg_insn_unit insn = 0;
348
349    tcg_debug_assert(i2 == sextract32(i2, 0, 16));
350    insn = deposit32(insn, 0, 8, op);
351    insn = deposit32(insn, 8, 4, r0);
352    insn = deposit32(insn, 12, 4, r1);
353    insn = deposit32(insn, 16, 16, i2);
354    tcg_out32(s, insn);
355}
356
357static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
358                            TCGReg r1, uint8_t b2, uint8_t b3)
359{
360    tcg_insn_unit insn = 0;
361
362    tcg_debug_assert(b2 == extract32(b2, 0, 6));
363    tcg_debug_assert(b3 == extract32(b3, 0, 6));
364    insn = deposit32(insn, 0, 8, op);
365    insn = deposit32(insn, 8, 4, r0);
366    insn = deposit32(insn, 12, 4, r1);
367    insn = deposit32(insn, 16, 6, b2);
368    insn = deposit32(insn, 22, 6, b3);
369    tcg_out32(s, insn);
370}
371
372static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
373                            TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
374{
375    tcg_insn_unit insn = 0;
376
377    insn = deposit32(insn, 0, 8, op);
378    insn = deposit32(insn, 8, 4, r0);
379    insn = deposit32(insn, 12, 4, r1);
380    insn = deposit32(insn, 16, 4, r2);
381    insn = deposit32(insn, 20, 4, c3);
382    tcg_out32(s, insn);
383}
384
385static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
386                             TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
387{
388    tcg_insn_unit insn = 0;
389
390    tcg_debug_assert(b3 == extract32(b3, 0, 6));
391    tcg_debug_assert(b4 == extract32(b4, 0, 6));
392    insn = deposit32(insn, 0, 8, op);
393    insn = deposit32(insn, 8, 4, r0);
394    insn = deposit32(insn, 12, 4, r1);
395    insn = deposit32(insn, 16, 4, r2);
396    insn = deposit32(insn, 20, 6, b3);
397    insn = deposit32(insn, 26, 6, b4);
398    tcg_out32(s, insn);
399}
400
401static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
402                            TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
403{
404    tcg_insn_unit insn = 0;
405
406    insn = deposit32(insn, 0, 8, op);
407    insn = deposit32(insn, 8, 4, r0);
408    insn = deposit32(insn, 12, 4, r1);
409    insn = deposit32(insn, 16, 4, r2);
410    insn = deposit32(insn, 20, 4, r3);
411    tcg_out32(s, insn);
412}
413
414static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
415                              TCGReg r0, TCGReg r1, TCGReg r2,
416                              TCGReg r3, TCGReg r4, TCGCond c5)
417{
418    tcg_insn_unit insn = 0;
419
420    insn = deposit32(insn, 0, 8, op);
421    insn = deposit32(insn, 8, 4, r0);
422    insn = deposit32(insn, 12, 4, r1);
423    insn = deposit32(insn, 16, 4, r2);
424    insn = deposit32(insn, 20, 4, r3);
425    insn = deposit32(insn, 24, 4, r4);
426    insn = deposit32(insn, 28, 4, c5);
427    tcg_out32(s, insn);
428}
429
430static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
431                              TCGReg r0, TCGReg r1, TCGReg r2,
432                              TCGReg r3, TCGReg r4, TCGReg r5)
433{
434    tcg_insn_unit insn = 0;
435
436    insn = deposit32(insn, 0, 8, op);
437    insn = deposit32(insn, 8, 4, r0);
438    insn = deposit32(insn, 12, 4, r1);
439    insn = deposit32(insn, 16, 4, r2);
440    insn = deposit32(insn, 20, 4, r3);
441    insn = deposit32(insn, 24, 4, r4);
442    insn = deposit32(insn, 28, 4, r5);
443    tcg_out32(s, insn);
444}
445
446static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
447                         TCGReg base, intptr_t offset)
448{
449    stack_bounds_check(base, offset);
450    if (offset != sextract32(offset, 0, 16)) {
451        tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
452        tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base);
453        base = TCG_REG_TMP;
454        offset = 0;
455    }
456    tcg_out_op_rrs(s, op, val, base, offset);
457}
458
459static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
460                       intptr_t offset)
461{
462    switch (type) {
463    case TCG_TYPE_I32:
464        tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
465        break;
466#if TCG_TARGET_REG_BITS == 64
467    case TCG_TYPE_I64:
468        tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
469        break;
470#endif
471    default:
472        g_assert_not_reached();
473    }
474}
475
476static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
477{
478    tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
479    return true;
480}
481
482static void tcg_out_movi(TCGContext *s, TCGType type,
483                         TCGReg ret, tcg_target_long arg)
484{
485    switch (type) {
486    case TCG_TYPE_I32:
487#if TCG_TARGET_REG_BITS == 64
488        arg = (int32_t)arg;
489        /* fall through */
490    case TCG_TYPE_I64:
491#endif
492        break;
493    default:
494        g_assert_not_reached();
495    }
496
497    if (arg == sextract32(arg, 0, 20)) {
498        tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
499    } else {
500        tcg_insn_unit insn = 0;
501
502        new_pool_label(s, arg, 20, s->code_ptr, 0);
503        insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
504        insn = deposit32(insn, 8, 4, ret);
505        tcg_out32(s, insn);
506    }
507}
508
509static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
510                            TCGReg rs, unsigned pos, unsigned len)
511{
512    TCGOpcode opc = type == TCG_TYPE_I32 ?
513                    INDEX_op_extract_i32 :
514                    INDEX_op_extract_i64;
515    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
516}
517
518static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
519                             TCGReg rs, unsigned pos, unsigned len)
520{
521    TCGOpcode opc = type == TCG_TYPE_I32 ?
522                    INDEX_op_sextract_i32 :
523                    INDEX_op_sextract_i64;
524    tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
525}
526
527static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
528{
529    tcg_out_sextract(s, type, rd, rs, 0, 8);
530}
531
532static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
533{
534    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
535}
536
537static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
538{
539    tcg_out_sextract(s, type, rd, rs, 0, 16);
540}
541
542static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
543{
544    tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
545}
546
547static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
548{
549    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
550    tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
551}
552
553static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
554{
555    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
556    tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
557}
558
559static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
560{
561    tcg_out_ext32s(s, rd, rs);
562}
563
564static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
565{
566    tcg_out_ext32u(s, rd, rs);
567}
568
569static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
570{
571    tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
572    tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
573}
574
575static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
576{
577    return false;
578}
579
580static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
581                             tcg_target_long imm)
582{
583    /* This function is only used for passing structs by reference. */
584    g_assert_not_reached();
585}
586
587static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
588                         const TCGHelperInfo *info)
589{
590    ffi_cif *cif = info->cif;
591    tcg_insn_unit insn = 0;
592    uint8_t which;
593
594    if (cif->rtype == &ffi_type_void) {
595        which = 0;
596    } else {
597        tcg_debug_assert(cif->rtype->size == 4 ||
598                         cif->rtype->size == 8 ||
599                         cif->rtype->size == 16);
600        which = ctz32(cif->rtype->size) - 1;
601    }
602    new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
603    insn = deposit32(insn, 0, 8, INDEX_op_call);
604    insn = deposit32(insn, 8, 4, which);
605    tcg_out32(s, insn);
606}
607
608#if TCG_TARGET_REG_BITS == 64
609# define CASE_32_64(x) \
610        case glue(glue(INDEX_op_, x), _i64): \
611        case glue(glue(INDEX_op_, x), _i32):
612# define CASE_64(x) \
613        case glue(glue(INDEX_op_, x), _i64):
614#else
615# define CASE_32_64(x) \
616        case glue(glue(INDEX_op_, x), _i32):
617# define CASE_64(x)
618#endif
619
620static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
621{
622    tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
623}
624
625static void tcg_out_goto_tb(TCGContext *s, int which)
626{
627    /* indirect jump method. */
628    tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
629    set_jmp_reset_offset(s, which);
630}
631
632void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
633                              uintptr_t jmp_rx, uintptr_t jmp_rw)
634{
635    /* Always indirect, nothing to do */
636}
637
638static void tgen_add(TCGContext *s, TCGType type,
639                     TCGReg a0, TCGReg a1, TCGReg a2)
640{
641    tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2);
642}
643
644static const TCGOutOpBinary outop_add = {
645    .base.static_constraint = C_O1_I2(r, r, r),
646    .out_rrr = tgen_add,
647};
648
649static void tgen_and(TCGContext *s, TCGType type,
650                     TCGReg a0, TCGReg a1, TCGReg a2)
651{
652    tcg_out_op_rrr(s, INDEX_op_and, a0, a1, a2);
653}
654
655static const TCGOutOpBinary outop_and = {
656    .base.static_constraint = C_O1_I2(r, r, r),
657    .out_rrr = tgen_and,
658};
659
660static void tgen_andc(TCGContext *s, TCGType type,
661                      TCGReg a0, TCGReg a1, TCGReg a2)
662{
663    tcg_out_op_rrr(s, INDEX_op_andc, a0, a1, a2);
664}
665
666static const TCGOutOpBinary outop_andc = {
667    .base.static_constraint = C_O1_I2(r, r, r),
668    .out_rrr = tgen_andc,
669};
670
671
672static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
673                       const TCGArg args[TCG_MAX_OP_ARGS],
674                       const int const_args[TCG_MAX_OP_ARGS])
675{
676    int width;
677
678    switch (opc) {
679    case INDEX_op_goto_ptr:
680        tcg_out_op_r(s, opc, args[0]);
681        break;
682
683    case INDEX_op_br:
684        tcg_out_op_l(s, opc, arg_label(args[0]));
685        break;
686
687    CASE_32_64(setcond)
688        tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
689        break;
690
691    CASE_32_64(movcond)
692    case INDEX_op_setcond2_i32:
693        tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
694                          args[3], args[4], args[5]);
695        break;
696
697    CASE_32_64(ld8u)
698    CASE_32_64(ld8s)
699    CASE_32_64(ld16u)
700    CASE_32_64(ld16s)
701    case INDEX_op_ld_i32:
702    CASE_64(ld32u)
703    CASE_64(ld32s)
704    CASE_64(ld)
705    CASE_32_64(st8)
706    CASE_32_64(st16)
707    case INDEX_op_st_i32:
708    CASE_64(st32)
709    CASE_64(st)
710        tcg_out_ldst(s, opc, args[0], args[1], args[2]);
711        break;
712
713    CASE_32_64(sub)
714    CASE_32_64(mul)
715    CASE_32_64(or)
716    CASE_32_64(xor)
717    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
718    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
719    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
720    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
721    CASE_32_64(shl)
722    CASE_32_64(shr)
723    CASE_32_64(sar)
724    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
725    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
726    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
727    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
728    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
729    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
730    CASE_32_64(clz)      /* Optional (TCG_TARGET_HAS_clz_*). */
731    CASE_32_64(ctz)      /* Optional (TCG_TARGET_HAS_ctz_*). */
732        tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
733        break;
734
735    CASE_32_64(deposit)
736        tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]);
737        break;
738
739    CASE_32_64(extract)  /* Optional (TCG_TARGET_HAS_extract_*). */
740    CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
741        tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]);
742        break;
743
744    CASE_32_64(brcond)
745        tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
746                            ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
747                        TCG_REG_TMP, args[0], args[1], args[2]);
748        tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
749        break;
750
751    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
752    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
753    CASE_32_64(ctpop)    /* Optional (TCG_TARGET_HAS_ctpop_*). */
754    case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
755    case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
756        tcg_out_op_rr(s, opc, args[0], args[1]);
757        break;
758
759    case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
760    case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
761        width = 16;
762        goto do_bswap;
763    case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
764        width = 32;
765    do_bswap:
766        /* The base tci bswaps zero-extend, and ignore high bits. */
767        tcg_out_op_rr(s, opc, args[0], args[1]);
768        if (args[2] & TCG_BSWAP_OS) {
769            tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
770        }
771        break;
772
773    CASE_32_64(add2)
774    CASE_32_64(sub2)
775        tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
776                          args[3], args[4], args[5]);
777        break;
778
779#if TCG_TARGET_REG_BITS == 32
780    case INDEX_op_brcond2_i32:
781        tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
782                          args[0], args[1], args[2], args[3], args[4]);
783        tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
784        break;
785#endif
786
787    CASE_32_64(mulu2)
788    CASE_32_64(muls2)
789        tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
790        break;
791
792    case INDEX_op_qemu_ld_i64:
793    case INDEX_op_qemu_st_i64:
794        if (TCG_TARGET_REG_BITS == 32) {
795            tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
796            tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
797            break;
798        }
799        /* fall through */
800    case INDEX_op_qemu_ld_i32:
801    case INDEX_op_qemu_st_i32:
802        if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) {
803            tcg_out_ext32u(s, TCG_REG_TMP, args[1]);
804            tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]);
805        } else {
806            tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
807        }
808        break;
809
810    case INDEX_op_mb:
811        tcg_out_op_v(s, opc);
812        break;
813
814    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
815    case INDEX_op_exit_tb:  /* Always emitted via tcg_out_exit_tb.  */
816    case INDEX_op_goto_tb:  /* Always emitted via tcg_out_goto_tb.  */
817    case INDEX_op_ext_i32_i64:  /* Always emitted via tcg_reg_alloc_op.  */
818    case INDEX_op_extu_i32_i64:
819    case INDEX_op_extrl_i64_i32:
820    default:
821        g_assert_not_reached();
822    }
823}
824
825static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
826                       intptr_t offset)
827{
828    switch (type) {
829    case TCG_TYPE_I32:
830        tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
831        break;
832#if TCG_TARGET_REG_BITS == 64
833    case TCG_TYPE_I64:
834        tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
835        break;
836#endif
837    default:
838        g_assert_not_reached();
839    }
840}
841
842static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
843                               TCGReg base, intptr_t ofs)
844{
845    return false;
846}
847
848/* Test if a constant matches the constraint. */
849static bool tcg_target_const_match(int64_t val, int ct,
850                                   TCGType type, TCGCond cond, int vece)
851{
852    return ct & TCG_CT_CONST;
853}
854
855static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
856{
857    memset(p, 0, sizeof(*p) * count);
858}
859
860static void tcg_target_init(TCGContext *s)
861{
862    /* The current code uses uint8_t for tcg operations. */
863    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
864
865    /* Registers available for 32 bit operations. */
866    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
867    /* Registers available for 64 bit operations. */
868    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
869    /*
870     * The interpreter "registers" are in the local stack frame and
871     * cannot be clobbered by the called helper functions.  However,
872     * the interpreter assumes a 128-bit return value and assigns to
873     * the return value registers.
874     */
875    tcg_target_call_clobber_regs =
876        MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
877
878    s->reserved_regs = 0;
879    tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
880    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
881
882    /* The call arguments come first, followed by the temp storage. */
883    tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
884                  TCG_STATIC_FRAME_SIZE);
885}
886
887/* Generate global QEMU prologue and epilogue code. */
888static inline void tcg_target_qemu_prologue(TCGContext *s)
889{
890}
891
892static void tcg_out_tb_start(TCGContext *s)
893{
894    /* nothing to do */
895}
896
897bool tcg_target_has_memory_bswap(MemOp memop)
898{
899    return true;
900}
901
902static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
903{
904    g_assert_not_reached();
905}
906
907static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
908{
909    g_assert_not_reached();
910}
911