xref: /openbmc/qemu/tcg/tci/tcg-target.c.inc (revision 59964b4f)
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
25/* TODO list:
26 * - See TODO comments in code.
27 */
28
29/* Marker for missing code. */
30#define TODO() \
31    do { \
32        fprintf(stderr, "TODO %s:%u: %s()\n", \
33                __FILE__, __LINE__, __func__); \
34        tcg_abort(); \
35    } while (0)
36
37/* Bitfield n...m (in 32 bit value). */
38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
39
40static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
41{
42    switch (op) {
43    case INDEX_op_ld8u_i32:
44    case INDEX_op_ld8s_i32:
45    case INDEX_op_ld16u_i32:
46    case INDEX_op_ld16s_i32:
47    case INDEX_op_ld_i32:
48    case INDEX_op_ld8u_i64:
49    case INDEX_op_ld8s_i64:
50    case INDEX_op_ld16u_i64:
51    case INDEX_op_ld16s_i64:
52    case INDEX_op_ld32u_i64:
53    case INDEX_op_ld32s_i64:
54    case INDEX_op_ld_i64:
55    case INDEX_op_not_i32:
56    case INDEX_op_not_i64:
57    case INDEX_op_neg_i32:
58    case INDEX_op_neg_i64:
59    case INDEX_op_ext8s_i32:
60    case INDEX_op_ext8s_i64:
61    case INDEX_op_ext16s_i32:
62    case INDEX_op_ext16s_i64:
63    case INDEX_op_ext8u_i32:
64    case INDEX_op_ext8u_i64:
65    case INDEX_op_ext16u_i32:
66    case INDEX_op_ext16u_i64:
67    case INDEX_op_ext32s_i64:
68    case INDEX_op_ext32u_i64:
69    case INDEX_op_ext_i32_i64:
70    case INDEX_op_extu_i32_i64:
71    case INDEX_op_bswap16_i32:
72    case INDEX_op_bswap16_i64:
73    case INDEX_op_bswap32_i32:
74    case INDEX_op_bswap32_i64:
75    case INDEX_op_bswap64_i64:
76        return C_O1_I1(r, r);
77
78    case INDEX_op_st8_i32:
79    case INDEX_op_st16_i32:
80    case INDEX_op_st_i32:
81    case INDEX_op_st8_i64:
82    case INDEX_op_st16_i64:
83    case INDEX_op_st32_i64:
84    case INDEX_op_st_i64:
85        return C_O0_I2(r, r);
86
87    case INDEX_op_div_i32:
88    case INDEX_op_div_i64:
89    case INDEX_op_divu_i32:
90    case INDEX_op_divu_i64:
91    case INDEX_op_rem_i32:
92    case INDEX_op_rem_i64:
93    case INDEX_op_remu_i32:
94    case INDEX_op_remu_i64:
95    case INDEX_op_add_i32:
96    case INDEX_op_add_i64:
97    case INDEX_op_sub_i32:
98    case INDEX_op_sub_i64:
99    case INDEX_op_mul_i32:
100    case INDEX_op_mul_i64:
101    case INDEX_op_and_i32:
102    case INDEX_op_and_i64:
103    case INDEX_op_andc_i32:
104    case INDEX_op_andc_i64:
105    case INDEX_op_eqv_i32:
106    case INDEX_op_eqv_i64:
107    case INDEX_op_nand_i32:
108    case INDEX_op_nand_i64:
109    case INDEX_op_nor_i32:
110    case INDEX_op_nor_i64:
111    case INDEX_op_or_i32:
112    case INDEX_op_or_i64:
113    case INDEX_op_orc_i32:
114    case INDEX_op_orc_i64:
115    case INDEX_op_xor_i32:
116    case INDEX_op_xor_i64:
117    case INDEX_op_shl_i32:
118    case INDEX_op_shl_i64:
119    case INDEX_op_shr_i32:
120    case INDEX_op_shr_i64:
121    case INDEX_op_sar_i32:
122    case INDEX_op_sar_i64:
123    case INDEX_op_rotl_i32:
124    case INDEX_op_rotl_i64:
125    case INDEX_op_rotr_i32:
126    case INDEX_op_rotr_i64:
127    case INDEX_op_setcond_i32:
128    case INDEX_op_setcond_i64:
129    case INDEX_op_deposit_i32:
130    case INDEX_op_deposit_i64:
131        return C_O1_I2(r, r, r);
132
133    case INDEX_op_brcond_i32:
134    case INDEX_op_brcond_i64:
135        return C_O0_I2(r, r);
136
137#if TCG_TARGET_REG_BITS == 32
138    /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
139    case INDEX_op_add2_i32:
140    case INDEX_op_sub2_i32:
141        return C_O2_I4(r, r, r, r, r, r);
142    case INDEX_op_brcond2_i32:
143        return C_O0_I4(r, r, r, r);
144    case INDEX_op_mulu2_i32:
145        return C_O2_I2(r, r, r, r);
146    case INDEX_op_setcond2_i32:
147        return C_O1_I4(r, r, r, r, r);
148#endif
149
150    case INDEX_op_qemu_ld_i32:
151        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
152                ? C_O1_I1(r, r)
153                : C_O1_I2(r, r, r));
154    case INDEX_op_qemu_ld_i64:
155        return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
156                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
157                : C_O2_I2(r, r, r, r));
158    case INDEX_op_qemu_st_i32:
159        return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
160                ? C_O0_I2(r, r)
161                : C_O0_I3(r, r, r));
162    case INDEX_op_qemu_st_i64:
163        return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
164                : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
165                : C_O0_I4(r, r, r, r));
166
167    default:
168        g_assert_not_reached();
169    }
170}
171
172static const int tcg_target_reg_alloc_order[] = {
173    TCG_REG_R0,
174    TCG_REG_R1,
175    TCG_REG_R2,
176    TCG_REG_R3,
177    TCG_REG_R4,
178    TCG_REG_R5,
179    TCG_REG_R6,
180    TCG_REG_R7,
181    TCG_REG_R8,
182    TCG_REG_R9,
183    TCG_REG_R10,
184    TCG_REG_R11,
185    TCG_REG_R12,
186    TCG_REG_R13,
187    TCG_REG_R14,
188    TCG_REG_R15,
189};
190
191#if MAX_OPC_PARAM_IARGS != 6
192# error Fix needed, number of supported input arguments changed!
193#endif
194
195static const int tcg_target_call_iarg_regs[] = {
196    TCG_REG_R0,
197    TCG_REG_R1,
198    TCG_REG_R2,
199    TCG_REG_R3,
200    TCG_REG_R4,
201    TCG_REG_R5,
202#if TCG_TARGET_REG_BITS == 32
203    /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
204    TCG_REG_R6,
205    TCG_REG_R7,
206    TCG_REG_R8,
207    TCG_REG_R9,
208    TCG_REG_R10,
209    TCG_REG_R11,
210#endif
211};
212
213static const int tcg_target_call_oarg_regs[] = {
214    TCG_REG_R0,
215#if TCG_TARGET_REG_BITS == 32
216    TCG_REG_R1
217#endif
218};
219
220#ifdef CONFIG_DEBUG_TCG
221static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
222    "r00",
223    "r01",
224    "r02",
225    "r03",
226    "r04",
227    "r05",
228    "r06",
229    "r07",
230    "r08",
231    "r09",
232    "r10",
233    "r11",
234    "r12",
235    "r13",
236    "r14",
237    "r15",
238};
239#endif
240
241static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
242                        intptr_t value, intptr_t addend)
243{
244    /* tcg_out_reloc always uses the same type, addend. */
245    tcg_debug_assert(type == sizeof(tcg_target_long));
246    tcg_debug_assert(addend == 0);
247    tcg_debug_assert(value != 0);
248    if (TCG_TARGET_REG_BITS == 32) {
249        tcg_patch32(code_ptr, value);
250    } else {
251        tcg_patch64(code_ptr, value);
252    }
253    return true;
254}
255
256/* Write value (native size). */
257static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
258{
259    if (TCG_TARGET_REG_BITS == 32) {
260        tcg_out32(s, v);
261    } else {
262        tcg_out64(s, v);
263    }
264}
265
266/* Write opcode. */
267static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
268{
269    tcg_out8(s, op);
270    tcg_out8(s, 0);
271}
272
273/* Write register. */
274static void tcg_out_r(TCGContext *s, TCGArg t0)
275{
276    tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
277    tcg_out8(s, t0);
278}
279
280/* Write label. */
281static void tci_out_label(TCGContext *s, TCGLabel *label)
282{
283    if (label->has_value) {
284        tcg_out_i(s, label->u.value);
285        tcg_debug_assert(label->u.value);
286    } else {
287        tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
288        s->code_ptr += sizeof(tcg_target_ulong);
289    }
290}
291
292static void stack_bounds_check(TCGReg base, target_long offset)
293{
294    if (base == TCG_REG_CALL_STACK) {
295        tcg_debug_assert(offset < 0);
296        tcg_debug_assert(offset >= -(CPU_TEMP_BUF_NLONGS * sizeof(long)));
297    }
298}
299
300static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
301                       intptr_t arg2)
302{
303    uint8_t *old_code_ptr = s->code_ptr;
304
305    stack_bounds_check(arg1, arg2);
306    if (type == TCG_TYPE_I32) {
307        tcg_out_op_t(s, INDEX_op_ld_i32);
308        tcg_out_r(s, ret);
309        tcg_out_r(s, arg1);
310        tcg_out32(s, arg2);
311    } else {
312        tcg_debug_assert(type == TCG_TYPE_I64);
313#if TCG_TARGET_REG_BITS == 64
314        tcg_out_op_t(s, INDEX_op_ld_i64);
315        tcg_out_r(s, ret);
316        tcg_out_r(s, arg1);
317        tcg_debug_assert(arg2 == (int32_t)arg2);
318        tcg_out32(s, arg2);
319#else
320        TODO();
321#endif
322    }
323    old_code_ptr[1] = s->code_ptr - old_code_ptr;
324}
325
326static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
327{
328    uint8_t *old_code_ptr = s->code_ptr;
329    tcg_debug_assert(ret != arg);
330#if TCG_TARGET_REG_BITS == 32
331    tcg_out_op_t(s, INDEX_op_mov_i32);
332#else
333    tcg_out_op_t(s, INDEX_op_mov_i64);
334#endif
335    tcg_out_r(s, ret);
336    tcg_out_r(s, arg);
337    old_code_ptr[1] = s->code_ptr - old_code_ptr;
338    return true;
339}
340
341static void tcg_out_movi(TCGContext *s, TCGType type,
342                         TCGReg t0, tcg_target_long arg)
343{
344    uint8_t *old_code_ptr = s->code_ptr;
345    uint32_t arg32 = arg;
346    if (type == TCG_TYPE_I32 || arg == arg32) {
347        tcg_out_op_t(s, INDEX_op_tci_movi_i32);
348        tcg_out_r(s, t0);
349        tcg_out32(s, arg32);
350    } else {
351        tcg_debug_assert(type == TCG_TYPE_I64);
352#if TCG_TARGET_REG_BITS == 64
353        tcg_out_op_t(s, INDEX_op_tci_movi_i64);
354        tcg_out_r(s, t0);
355        tcg_out64(s, arg);
356#else
357        TODO();
358#endif
359    }
360    old_code_ptr[1] = s->code_ptr - old_code_ptr;
361}
362
363static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
364{
365    uint8_t *old_code_ptr = s->code_ptr;
366    tcg_out_op_t(s, INDEX_op_call);
367    tcg_out_i(s, (uintptr_t)arg);
368    old_code_ptr[1] = s->code_ptr - old_code_ptr;
369}
370
371#if TCG_TARGET_REG_BITS == 64
372# define CASE_32_64(x) \
373        case glue(glue(INDEX_op_, x), _i64): \
374        case glue(glue(INDEX_op_, x), _i32):
375# define CASE_64(x) \
376        case glue(glue(INDEX_op_, x), _i64):
377#else
378# define CASE_32_64(x) \
379        case glue(glue(INDEX_op_, x), _i32):
380# define CASE_64(x)
381#endif
382
383static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
384                       const int *const_args)
385{
386    uint8_t *old_code_ptr = s->code_ptr;
387
388    tcg_out_op_t(s, opc);
389
390    switch (opc) {
391    case INDEX_op_exit_tb:
392        tcg_out_i(s, args[0]);
393        break;
394
395    case INDEX_op_goto_tb:
396        tcg_debug_assert(s->tb_jmp_insn_offset == 0);
397        /* indirect jump method. */
398        tcg_out_i(s, (uintptr_t)(s->tb_jmp_target_addr + args[0]));
399        set_jmp_reset_offset(s, args[0]);
400        break;
401
402    case INDEX_op_br:
403        tci_out_label(s, arg_label(args[0]));
404        break;
405
406    CASE_32_64(setcond)
407        tcg_out_r(s, args[0]);
408        tcg_out_r(s, args[1]);
409        tcg_out_r(s, args[2]);
410        tcg_out8(s, args[3]);   /* condition */
411        break;
412
413#if TCG_TARGET_REG_BITS == 32
414    case INDEX_op_setcond2_i32:
415        /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
416        tcg_out_r(s, args[0]);
417        tcg_out_r(s, args[1]);
418        tcg_out_r(s, args[2]);
419        tcg_out_r(s, args[3]);
420        tcg_out_r(s, args[4]);
421        tcg_out8(s, args[5]);   /* condition */
422        break;
423#endif
424
425    CASE_32_64(ld8u)
426    CASE_32_64(ld8s)
427    CASE_32_64(ld16u)
428    CASE_32_64(ld16s)
429    case INDEX_op_ld_i32:
430    CASE_64(ld32u)
431    CASE_64(ld32s)
432    CASE_64(ld)
433    CASE_32_64(st8)
434    CASE_32_64(st16)
435    case INDEX_op_st_i32:
436    CASE_64(st32)
437    CASE_64(st)
438        stack_bounds_check(args[1], args[2]);
439        tcg_out_r(s, args[0]);
440        tcg_out_r(s, args[1]);
441        tcg_debug_assert(args[2] == (int32_t)args[2]);
442        tcg_out32(s, args[2]);
443        break;
444
445    CASE_32_64(add)
446    CASE_32_64(sub)
447    CASE_32_64(mul)
448    CASE_32_64(and)
449    CASE_32_64(or)
450    CASE_32_64(xor)
451    CASE_32_64(andc)     /* Optional (TCG_TARGET_HAS_andc_*). */
452    CASE_32_64(orc)      /* Optional (TCG_TARGET_HAS_orc_*). */
453    CASE_32_64(eqv)      /* Optional (TCG_TARGET_HAS_eqv_*). */
454    CASE_32_64(nand)     /* Optional (TCG_TARGET_HAS_nand_*). */
455    CASE_32_64(nor)      /* Optional (TCG_TARGET_HAS_nor_*). */
456    CASE_32_64(shl)
457    CASE_32_64(shr)
458    CASE_32_64(sar)
459    CASE_32_64(rotl)     /* Optional (TCG_TARGET_HAS_rot_*). */
460    CASE_32_64(rotr)     /* Optional (TCG_TARGET_HAS_rot_*). */
461    CASE_32_64(div)      /* Optional (TCG_TARGET_HAS_div_*). */
462    CASE_32_64(divu)     /* Optional (TCG_TARGET_HAS_div_*). */
463    CASE_32_64(rem)      /* Optional (TCG_TARGET_HAS_div_*). */
464    CASE_32_64(remu)     /* Optional (TCG_TARGET_HAS_div_*). */
465        tcg_out_r(s, args[0]);
466        tcg_out_r(s, args[1]);
467        tcg_out_r(s, args[2]);
468        break;
469
470    CASE_32_64(deposit)  /* Optional (TCG_TARGET_HAS_deposit_*). */
471        {
472            TCGArg pos = args[3], len = args[4];
473            TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
474
475            tcg_debug_assert(pos < max);
476            tcg_debug_assert(pos + len <= max);
477
478            tcg_out_r(s, args[0]);
479            tcg_out_r(s, args[1]);
480            tcg_out_r(s, args[2]);
481            tcg_out8(s, pos);
482            tcg_out8(s, len);
483        }
484        break;
485
486    CASE_32_64(brcond)
487        tcg_out_r(s, args[0]);
488        tcg_out_r(s, args[1]);
489        tcg_out8(s, args[2]);           /* condition */
490        tci_out_label(s, arg_label(args[3]));
491        break;
492
493    CASE_32_64(neg)      /* Optional (TCG_TARGET_HAS_neg_*). */
494    CASE_32_64(not)      /* Optional (TCG_TARGET_HAS_not_*). */
495    CASE_32_64(ext8s)    /* Optional (TCG_TARGET_HAS_ext8s_*). */
496    CASE_32_64(ext8u)    /* Optional (TCG_TARGET_HAS_ext8u_*). */
497    CASE_32_64(ext16s)   /* Optional (TCG_TARGET_HAS_ext16s_*). */
498    CASE_32_64(ext16u)   /* Optional (TCG_TARGET_HAS_ext16u_*). */
499    CASE_64(ext32s)      /* Optional (TCG_TARGET_HAS_ext32s_i64). */
500    CASE_64(ext32u)      /* Optional (TCG_TARGET_HAS_ext32u_i64). */
501    CASE_64(ext_i32)
502    CASE_64(extu_i32)
503    CASE_32_64(bswap16)  /* Optional (TCG_TARGET_HAS_bswap16_*). */
504    CASE_32_64(bswap32)  /* Optional (TCG_TARGET_HAS_bswap32_*). */
505    CASE_64(bswap64)     /* Optional (TCG_TARGET_HAS_bswap64_i64). */
506        tcg_out_r(s, args[0]);
507        tcg_out_r(s, args[1]);
508        break;
509
510#if TCG_TARGET_REG_BITS == 32
511    case INDEX_op_add2_i32:
512    case INDEX_op_sub2_i32:
513        tcg_out_r(s, args[0]);
514        tcg_out_r(s, args[1]);
515        tcg_out_r(s, args[2]);
516        tcg_out_r(s, args[3]);
517        tcg_out_r(s, args[4]);
518        tcg_out_r(s, args[5]);
519        break;
520    case INDEX_op_brcond2_i32:
521        tcg_out_r(s, args[0]);
522        tcg_out_r(s, args[1]);
523        tcg_out_r(s, args[2]);
524        tcg_out_r(s, args[3]);
525        tcg_out8(s, args[4]);           /* condition */
526        tci_out_label(s, arg_label(args[5]));
527        break;
528    case INDEX_op_mulu2_i32:
529        tcg_out_r(s, args[0]);
530        tcg_out_r(s, args[1]);
531        tcg_out_r(s, args[2]);
532        tcg_out_r(s, args[3]);
533        break;
534#endif
535
536    case INDEX_op_qemu_ld_i32:
537    case INDEX_op_qemu_st_i32:
538        tcg_out_r(s, *args++);
539        tcg_out_r(s, *args++);
540        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
541            tcg_out_r(s, *args++);
542        }
543        tcg_out32(s, *args++);
544        break;
545
546    case INDEX_op_qemu_ld_i64:
547    case INDEX_op_qemu_st_i64:
548        tcg_out_r(s, *args++);
549        if (TCG_TARGET_REG_BITS == 32) {
550            tcg_out_r(s, *args++);
551        }
552        tcg_out_r(s, *args++);
553        if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
554            tcg_out_r(s, *args++);
555        }
556        tcg_out32(s, *args++);
557        break;
558
559    case INDEX_op_mb:
560        break;
561
562    case INDEX_op_mov_i32:  /* Always emitted via tcg_out_mov.  */
563    case INDEX_op_mov_i64:
564    case INDEX_op_call:     /* Always emitted via tcg_out_call.  */
565    default:
566        tcg_abort();
567    }
568    old_code_ptr[1] = s->code_ptr - old_code_ptr;
569}
570
571static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
572                       intptr_t arg2)
573{
574    uint8_t *old_code_ptr = s->code_ptr;
575
576    stack_bounds_check(arg1, arg2);
577    if (type == TCG_TYPE_I32) {
578        tcg_out_op_t(s, INDEX_op_st_i32);
579        tcg_out_r(s, arg);
580        tcg_out_r(s, arg1);
581        tcg_out32(s, arg2);
582    } else {
583        tcg_debug_assert(type == TCG_TYPE_I64);
584#if TCG_TARGET_REG_BITS == 64
585        tcg_out_op_t(s, INDEX_op_st_i64);
586        tcg_out_r(s, arg);
587        tcg_out_r(s, arg1);
588        tcg_out32(s, arg2);
589#else
590        TODO();
591#endif
592    }
593    old_code_ptr[1] = s->code_ptr - old_code_ptr;
594}
595
596static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
597                               TCGReg base, intptr_t ofs)
598{
599    return false;
600}
601
602/* Test if a constant matches the constraint. */
603static int tcg_target_const_match(tcg_target_long val, TCGType type,
604                                  const TCGArgConstraint *arg_ct)
605{
606    /* No need to return 0 or 1, 0 or != 0 is good enough. */
607    return arg_ct->ct & TCG_CT_CONST;
608}
609
610static void tcg_target_init(TCGContext *s)
611{
612#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
613    const char *envval = getenv("DEBUG_TCG");
614    if (envval) {
615        qemu_set_log(strtol(envval, NULL, 0));
616    }
617#endif
618
619    /* The current code uses uint8_t for tcg operations. */
620    tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
621
622    /* Registers available for 32 bit operations. */
623    tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
624    /* Registers available for 64 bit operations. */
625    tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
626    /* TODO: Which registers should be set here? */
627    tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
628
629    s->reserved_regs = 0;
630    tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
631
632    /* We use negative offsets from "sp" so that we can distinguish
633       stores that might pretend to be call arguments.  */
634    tcg_set_frame(s, TCG_REG_CALL_STACK,
635                  -CPU_TEMP_BUF_NLONGS * sizeof(long),
636                  CPU_TEMP_BUF_NLONGS * sizeof(long));
637}
638
639/* Generate global QEMU prologue and epilogue code. */
640static inline void tcg_target_qemu_prologue(TCGContext *s)
641{
642}
643