1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25/* TODO list: 26 * - See TODO comments in code. 27 */ 28 29/* Marker for missing code. */ 30#define TODO() \ 31 do { \ 32 fprintf(stderr, "TODO %s:%u: %s()\n", \ 33 __FILE__, __LINE__, __func__); \ 34 tcg_abort(); \ 35 } while (0) 36 37/* Bitfield n...m (in 32 bit value). */ 38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m) 39 40static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) 41{ 42 switch (op) { 43 case INDEX_op_ld8u_i32: 44 case INDEX_op_ld8s_i32: 45 case INDEX_op_ld16u_i32: 46 case INDEX_op_ld16s_i32: 47 case INDEX_op_ld_i32: 48 case INDEX_op_ld8u_i64: 49 case INDEX_op_ld8s_i64: 50 case INDEX_op_ld16u_i64: 51 case INDEX_op_ld16s_i64: 52 case INDEX_op_ld32u_i64: 53 case INDEX_op_ld32s_i64: 54 case INDEX_op_ld_i64: 55 case INDEX_op_not_i32: 56 case INDEX_op_not_i64: 57 case INDEX_op_neg_i32: 58 case INDEX_op_neg_i64: 59 case INDEX_op_ext8s_i32: 60 case INDEX_op_ext8s_i64: 61 case INDEX_op_ext16s_i32: 62 case INDEX_op_ext16s_i64: 63 case INDEX_op_ext8u_i32: 64 case INDEX_op_ext8u_i64: 65 case INDEX_op_ext16u_i32: 66 case INDEX_op_ext16u_i64: 67 case INDEX_op_ext32s_i64: 68 case INDEX_op_ext32u_i64: 69 case INDEX_op_ext_i32_i64: 70 case INDEX_op_extu_i32_i64: 71 case INDEX_op_bswap16_i32: 72 case INDEX_op_bswap16_i64: 73 case INDEX_op_bswap32_i32: 74 case INDEX_op_bswap32_i64: 75 case INDEX_op_bswap64_i64: 76 return C_O1_I1(r, r); 77 78 case INDEX_op_st8_i32: 79 case INDEX_op_st16_i32: 80 case INDEX_op_st_i32: 81 case INDEX_op_st8_i64: 82 case INDEX_op_st16_i64: 83 case INDEX_op_st32_i64: 84 case INDEX_op_st_i64: 85 return C_O0_I2(r, r); 86 87 case INDEX_op_div_i32: 88 case INDEX_op_div_i64: 89 case INDEX_op_divu_i32: 90 case INDEX_op_divu_i64: 91 case INDEX_op_rem_i32: 92 case INDEX_op_rem_i64: 93 case INDEX_op_remu_i32: 94 case INDEX_op_remu_i64: 95 case INDEX_op_add_i32: 96 case INDEX_op_add_i64: 97 case INDEX_op_sub_i32: 98 case INDEX_op_sub_i64: 99 case INDEX_op_mul_i32: 100 case INDEX_op_mul_i64: 101 case INDEX_op_and_i32: 102 case INDEX_op_and_i64: 103 case INDEX_op_andc_i32: 104 case INDEX_op_andc_i64: 105 case INDEX_op_eqv_i32: 106 case INDEX_op_eqv_i64: 107 case INDEX_op_nand_i32: 108 case INDEX_op_nand_i64: 109 case INDEX_op_nor_i32: 110 case INDEX_op_nor_i64: 111 case INDEX_op_or_i32: 112 case INDEX_op_or_i64: 113 case INDEX_op_orc_i32: 114 case INDEX_op_orc_i64: 115 case INDEX_op_xor_i32: 116 case INDEX_op_xor_i64: 117 case INDEX_op_shl_i32: 118 case INDEX_op_shl_i64: 119 case INDEX_op_shr_i32: 120 case INDEX_op_shr_i64: 121 case INDEX_op_sar_i32: 122 case INDEX_op_sar_i64: 123 case INDEX_op_rotl_i32: 124 case INDEX_op_rotl_i64: 125 case INDEX_op_rotr_i32: 126 case INDEX_op_rotr_i64: 127 case INDEX_op_setcond_i32: 128 case INDEX_op_setcond_i64: 129 return C_O1_I2(r, r, r); 130 131 case INDEX_op_deposit_i32: 132 case INDEX_op_deposit_i64: 133 return C_O1_I2(r, 0, r); 134 135 case INDEX_op_brcond_i32: 136 case INDEX_op_brcond_i64: 137 return C_O0_I2(r, r); 138 139#if TCG_TARGET_REG_BITS == 32 140 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */ 141 case INDEX_op_add2_i32: 142 case INDEX_op_sub2_i32: 143 return C_O2_I4(r, r, r, r, r, r); 144 case INDEX_op_brcond2_i32: 145 return C_O0_I4(r, r, r, r); 146 case INDEX_op_mulu2_i32: 147 return C_O2_I2(r, r, r, r); 148 case INDEX_op_setcond2_i32: 149 return C_O1_I4(r, r, r, r, r); 150#endif 151 152 case INDEX_op_qemu_ld_i32: 153 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 154 ? C_O1_I1(r, r) 155 : C_O1_I2(r, r, r)); 156 case INDEX_op_qemu_ld_i64: 157 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) 158 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r) 159 : C_O2_I2(r, r, r, r)); 160 case INDEX_op_qemu_st_i32: 161 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 162 ? C_O0_I2(r, r) 163 : C_O0_I3(r, r, r)); 164 case INDEX_op_qemu_st_i64: 165 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) 166 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r) 167 : C_O0_I4(r, r, r, r)); 168 169 default: 170 g_assert_not_reached(); 171 } 172} 173 174static const int tcg_target_reg_alloc_order[] = { 175 TCG_REG_R0, 176 TCG_REG_R1, 177 TCG_REG_R2, 178 TCG_REG_R3, 179 TCG_REG_R4, 180 TCG_REG_R5, 181 TCG_REG_R6, 182 TCG_REG_R7, 183 TCG_REG_R8, 184 TCG_REG_R9, 185 TCG_REG_R10, 186 TCG_REG_R11, 187 TCG_REG_R12, 188 TCG_REG_R13, 189 TCG_REG_R14, 190 TCG_REG_R15, 191}; 192 193#if MAX_OPC_PARAM_IARGS != 6 194# error Fix needed, number of supported input arguments changed! 195#endif 196 197static const int tcg_target_call_iarg_regs[] = { 198 TCG_REG_R0, 199 TCG_REG_R1, 200 TCG_REG_R2, 201 TCG_REG_R3, 202 TCG_REG_R4, 203 TCG_REG_R5, 204#if TCG_TARGET_REG_BITS == 32 205 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */ 206 TCG_REG_R6, 207 TCG_REG_R7, 208 TCG_REG_R8, 209 TCG_REG_R9, 210 TCG_REG_R10, 211 TCG_REG_R11, 212#endif 213}; 214 215static const int tcg_target_call_oarg_regs[] = { 216 TCG_REG_R0, 217#if TCG_TARGET_REG_BITS == 32 218 TCG_REG_R1 219#endif 220}; 221 222#ifdef CONFIG_DEBUG_TCG 223static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 224 "r00", 225 "r01", 226 "r02", 227 "r03", 228 "r04", 229 "r05", 230 "r06", 231 "r07", 232 "r08", 233 "r09", 234 "r10", 235 "r11", 236 "r12", 237 "r13", 238 "r14", 239 "r15", 240}; 241#endif 242 243static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 244 intptr_t value, intptr_t addend) 245{ 246 /* tcg_out_reloc always uses the same type, addend. */ 247 tcg_debug_assert(type == sizeof(tcg_target_long)); 248 tcg_debug_assert(addend == 0); 249 tcg_debug_assert(value != 0); 250 if (TCG_TARGET_REG_BITS == 32) { 251 tcg_patch32(code_ptr, value); 252 } else { 253 tcg_patch64(code_ptr, value); 254 } 255 return true; 256} 257 258#if defined(CONFIG_DEBUG_TCG_INTERPRETER) 259/* Show current bytecode. Used by tcg interpreter. */ 260void tci_disas(uint8_t opc) 261{ 262 const TCGOpDef *def = &tcg_op_defs[opc]; 263 fprintf(stderr, "TCG %s %u, %u, %u\n", 264 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs); 265} 266#endif 267 268/* Write value (native size). */ 269static void tcg_out_i(TCGContext *s, tcg_target_ulong v) 270{ 271 if (TCG_TARGET_REG_BITS == 32) { 272 tcg_out32(s, v); 273 } else { 274 tcg_out64(s, v); 275 } 276} 277 278/* Write opcode. */ 279static void tcg_out_op_t(TCGContext *s, TCGOpcode op) 280{ 281 tcg_out8(s, op); 282 tcg_out8(s, 0); 283} 284 285/* Write register. */ 286static void tcg_out_r(TCGContext *s, TCGArg t0) 287{ 288 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS); 289 tcg_out8(s, t0); 290} 291 292/* Write label. */ 293static void tci_out_label(TCGContext *s, TCGLabel *label) 294{ 295 if (label->has_value) { 296 tcg_out_i(s, label->u.value); 297 tcg_debug_assert(label->u.value); 298 } else { 299 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0); 300 s->code_ptr += sizeof(tcg_target_ulong); 301 } 302} 303 304static void stack_bounds_check(TCGReg base, target_long offset) 305{ 306 if (base == TCG_REG_CALL_STACK) { 307 tcg_debug_assert(offset < 0); 308 tcg_debug_assert(offset >= -(CPU_TEMP_BUF_NLONGS * sizeof(long))); 309 } 310} 311 312static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1, 313 intptr_t arg2) 314{ 315 uint8_t *old_code_ptr = s->code_ptr; 316 317 stack_bounds_check(arg1, arg2); 318 if (type == TCG_TYPE_I32) { 319 tcg_out_op_t(s, INDEX_op_ld_i32); 320 tcg_out_r(s, ret); 321 tcg_out_r(s, arg1); 322 tcg_out32(s, arg2); 323 } else { 324 tcg_debug_assert(type == TCG_TYPE_I64); 325#if TCG_TARGET_REG_BITS == 64 326 tcg_out_op_t(s, INDEX_op_ld_i64); 327 tcg_out_r(s, ret); 328 tcg_out_r(s, arg1); 329 tcg_debug_assert(arg2 == (int32_t)arg2); 330 tcg_out32(s, arg2); 331#else 332 TODO(); 333#endif 334 } 335 old_code_ptr[1] = s->code_ptr - old_code_ptr; 336} 337 338static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 339{ 340 uint8_t *old_code_ptr = s->code_ptr; 341 tcg_debug_assert(ret != arg); 342#if TCG_TARGET_REG_BITS == 32 343 tcg_out_op_t(s, INDEX_op_mov_i32); 344#else 345 tcg_out_op_t(s, INDEX_op_mov_i64); 346#endif 347 tcg_out_r(s, ret); 348 tcg_out_r(s, arg); 349 old_code_ptr[1] = s->code_ptr - old_code_ptr; 350 return true; 351} 352 353static void tcg_out_movi(TCGContext *s, TCGType type, 354 TCGReg t0, tcg_target_long arg) 355{ 356 uint8_t *old_code_ptr = s->code_ptr; 357 uint32_t arg32 = arg; 358 if (type == TCG_TYPE_I32 || arg == arg32) { 359 tcg_out_op_t(s, INDEX_op_tci_movi_i32); 360 tcg_out_r(s, t0); 361 tcg_out32(s, arg32); 362 } else { 363 tcg_debug_assert(type == TCG_TYPE_I64); 364#if TCG_TARGET_REG_BITS == 64 365 tcg_out_op_t(s, INDEX_op_tci_movi_i64); 366 tcg_out_r(s, t0); 367 tcg_out64(s, arg); 368#else 369 TODO(); 370#endif 371 } 372 old_code_ptr[1] = s->code_ptr - old_code_ptr; 373} 374 375static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg) 376{ 377 uint8_t *old_code_ptr = s->code_ptr; 378 tcg_out_op_t(s, INDEX_op_call); 379 tcg_out_i(s, (uintptr_t)arg); 380 old_code_ptr[1] = s->code_ptr - old_code_ptr; 381} 382 383#if TCG_TARGET_REG_BITS == 64 384# define CASE_32_64(x) \ 385 case glue(glue(INDEX_op_, x), _i64): \ 386 case glue(glue(INDEX_op_, x), _i32): 387# define CASE_64(x) \ 388 case glue(glue(INDEX_op_, x), _i64): 389#else 390# define CASE_32_64(x) \ 391 case glue(glue(INDEX_op_, x), _i32): 392# define CASE_64(x) 393#endif 394 395static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, 396 const int *const_args) 397{ 398 uint8_t *old_code_ptr = s->code_ptr; 399 400 tcg_out_op_t(s, opc); 401 402 switch (opc) { 403 case INDEX_op_exit_tb: 404 tcg_out64(s, args[0]); 405 break; 406 407 case INDEX_op_goto_tb: 408 if (s->tb_jmp_insn_offset) { 409 /* Direct jump method. */ 410 /* Align for atomic patching and thread safety */ 411 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4); 412 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s); 413 tcg_out32(s, 0); 414 } else { 415 /* Indirect jump method. */ 416 TODO(); 417 } 418 set_jmp_reset_offset(s, args[0]); 419 break; 420 421 case INDEX_op_br: 422 tci_out_label(s, arg_label(args[0])); 423 break; 424 425 CASE_32_64(setcond) 426 tcg_out_r(s, args[0]); 427 tcg_out_r(s, args[1]); 428 tcg_out_r(s, args[2]); 429 tcg_out8(s, args[3]); /* condition */ 430 break; 431 432#if TCG_TARGET_REG_BITS == 32 433 case INDEX_op_setcond2_i32: 434 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */ 435 tcg_out_r(s, args[0]); 436 tcg_out_r(s, args[1]); 437 tcg_out_r(s, args[2]); 438 tcg_out_r(s, args[3]); 439 tcg_out_r(s, args[4]); 440 tcg_out8(s, args[5]); /* condition */ 441 break; 442#endif 443 444 CASE_32_64(ld8u) 445 CASE_32_64(ld8s) 446 CASE_32_64(ld16u) 447 CASE_32_64(ld16s) 448 case INDEX_op_ld_i32: 449 CASE_64(ld32u) 450 CASE_64(ld32s) 451 CASE_64(ld) 452 CASE_32_64(st8) 453 CASE_32_64(st16) 454 case INDEX_op_st_i32: 455 CASE_64(st32) 456 CASE_64(st) 457 stack_bounds_check(args[1], args[2]); 458 tcg_out_r(s, args[0]); 459 tcg_out_r(s, args[1]); 460 tcg_debug_assert(args[2] == (int32_t)args[2]); 461 tcg_out32(s, args[2]); 462 break; 463 464 CASE_32_64(add) 465 CASE_32_64(sub) 466 CASE_32_64(mul) 467 CASE_32_64(and) 468 CASE_32_64(or) 469 CASE_32_64(xor) 470 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 471 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 472 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 473 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 474 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 475 CASE_32_64(shl) 476 CASE_32_64(shr) 477 CASE_32_64(sar) 478 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 479 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 480 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 481 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 482 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 483 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 484 tcg_out_r(s, args[0]); 485 tcg_out_r(s, args[1]); 486 tcg_out_r(s, args[2]); 487 break; 488 489 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ 490 tcg_out_r(s, args[0]); 491 tcg_out_r(s, args[1]); 492 tcg_out_r(s, args[2]); 493 tcg_debug_assert(args[3] <= UINT8_MAX); 494 tcg_out8(s, args[3]); 495 tcg_debug_assert(args[4] <= UINT8_MAX); 496 tcg_out8(s, args[4]); 497 break; 498 499 CASE_32_64(brcond) 500 tcg_out_r(s, args[0]); 501 tcg_out_r(s, args[1]); 502 tcg_out8(s, args[2]); /* condition */ 503 tci_out_label(s, arg_label(args[3])); 504 break; 505 506 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 507 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 508 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */ 509 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */ 510 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */ 511 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */ 512 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */ 513 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */ 514 CASE_64(ext_i32) 515 CASE_64(extu_i32) 516 CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */ 517 CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */ 518 CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 519 tcg_out_r(s, args[0]); 520 tcg_out_r(s, args[1]); 521 break; 522 523#if TCG_TARGET_REG_BITS == 32 524 case INDEX_op_add2_i32: 525 case INDEX_op_sub2_i32: 526 tcg_out_r(s, args[0]); 527 tcg_out_r(s, args[1]); 528 tcg_out_r(s, args[2]); 529 tcg_out_r(s, args[3]); 530 tcg_out_r(s, args[4]); 531 tcg_out_r(s, args[5]); 532 break; 533 case INDEX_op_brcond2_i32: 534 tcg_out_r(s, args[0]); 535 tcg_out_r(s, args[1]); 536 tcg_out_r(s, args[2]); 537 tcg_out_r(s, args[3]); 538 tcg_out8(s, args[4]); /* condition */ 539 tci_out_label(s, arg_label(args[5])); 540 break; 541 case INDEX_op_mulu2_i32: 542 tcg_out_r(s, args[0]); 543 tcg_out_r(s, args[1]); 544 tcg_out_r(s, args[2]); 545 tcg_out_r(s, args[3]); 546 break; 547#endif 548 549 case INDEX_op_qemu_ld_i32: 550 case INDEX_op_qemu_st_i32: 551 tcg_out_r(s, *args++); 552 tcg_out_r(s, *args++); 553 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) { 554 tcg_out_r(s, *args++); 555 } 556 tcg_out_i(s, *args++); 557 break; 558 559 case INDEX_op_qemu_ld_i64: 560 case INDEX_op_qemu_st_i64: 561 tcg_out_r(s, *args++); 562 if (TCG_TARGET_REG_BITS == 32) { 563 tcg_out_r(s, *args++); 564 } 565 tcg_out_r(s, *args++); 566 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) { 567 tcg_out_r(s, *args++); 568 } 569 tcg_out_i(s, *args++); 570 break; 571 572 case INDEX_op_mb: 573 break; 574 575 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ 576 case INDEX_op_mov_i64: 577 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 578 default: 579 tcg_abort(); 580 } 581 old_code_ptr[1] = s->code_ptr - old_code_ptr; 582} 583 584static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1, 585 intptr_t arg2) 586{ 587 uint8_t *old_code_ptr = s->code_ptr; 588 589 stack_bounds_check(arg1, arg2); 590 if (type == TCG_TYPE_I32) { 591 tcg_out_op_t(s, INDEX_op_st_i32); 592 tcg_out_r(s, arg); 593 tcg_out_r(s, arg1); 594 tcg_out32(s, arg2); 595 } else { 596 tcg_debug_assert(type == TCG_TYPE_I64); 597#if TCG_TARGET_REG_BITS == 64 598 tcg_out_op_t(s, INDEX_op_st_i64); 599 tcg_out_r(s, arg); 600 tcg_out_r(s, arg1); 601 tcg_out32(s, arg2); 602#else 603 TODO(); 604#endif 605 } 606 old_code_ptr[1] = s->code_ptr - old_code_ptr; 607} 608 609static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 610 TCGReg base, intptr_t ofs) 611{ 612 return false; 613} 614 615/* Test if a constant matches the constraint. */ 616static int tcg_target_const_match(tcg_target_long val, TCGType type, 617 const TCGArgConstraint *arg_ct) 618{ 619 /* No need to return 0 or 1, 0 or != 0 is good enough. */ 620 return arg_ct->ct & TCG_CT_CONST; 621} 622 623static void tcg_target_init(TCGContext *s) 624{ 625#if defined(CONFIG_DEBUG_TCG_INTERPRETER) 626 const char *envval = getenv("DEBUG_TCG"); 627 if (envval) { 628 qemu_set_log(strtol(envval, NULL, 0)); 629 } 630#endif 631 632 /* The current code uses uint8_t for tcg operations. */ 633 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 634 635 /* Registers available for 32 bit operations. */ 636 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 637 /* Registers available for 64 bit operations. */ 638 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 639 /* TODO: Which registers should be set here? */ 640 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1; 641 642 s->reserved_regs = 0; 643 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 644 645 /* We use negative offsets from "sp" so that we can distinguish 646 stores that might pretend to be call arguments. */ 647 tcg_set_frame(s, TCG_REG_CALL_STACK, 648 -CPU_TEMP_BUF_NLONGS * sizeof(long), 649 CPU_TEMP_BUF_NLONGS * sizeof(long)); 650} 651 652/* Generate global QEMU prologue and epilogue code. */ 653static inline void tcg_target_qemu_prologue(TCGContext *s) 654{ 655} 656