1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25/* TODO list: 26 * - See TODO comments in code. 27 */ 28 29/* Marker for missing code. */ 30#define TODO() \ 31 do { \ 32 fprintf(stderr, "TODO %s:%u: %s()\n", \ 33 __FILE__, __LINE__, __func__); \ 34 tcg_abort(); \ 35 } while (0) 36 37/* Bitfield n...m (in 32 bit value). */ 38#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m) 39 40static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) 41{ 42 switch (op) { 43 case INDEX_op_ld8u_i32: 44 case INDEX_op_ld8s_i32: 45 case INDEX_op_ld16u_i32: 46 case INDEX_op_ld16s_i32: 47 case INDEX_op_ld_i32: 48 case INDEX_op_ld8u_i64: 49 case INDEX_op_ld8s_i64: 50 case INDEX_op_ld16u_i64: 51 case INDEX_op_ld16s_i64: 52 case INDEX_op_ld32u_i64: 53 case INDEX_op_ld32s_i64: 54 case INDEX_op_ld_i64: 55 case INDEX_op_not_i32: 56 case INDEX_op_not_i64: 57 case INDEX_op_neg_i32: 58 case INDEX_op_neg_i64: 59 case INDEX_op_ext8s_i32: 60 case INDEX_op_ext8s_i64: 61 case INDEX_op_ext16s_i32: 62 case INDEX_op_ext16s_i64: 63 case INDEX_op_ext8u_i32: 64 case INDEX_op_ext8u_i64: 65 case INDEX_op_ext16u_i32: 66 case INDEX_op_ext16u_i64: 67 case INDEX_op_ext32s_i64: 68 case INDEX_op_ext32u_i64: 69 case INDEX_op_ext_i32_i64: 70 case INDEX_op_extu_i32_i64: 71 case INDEX_op_bswap16_i32: 72 case INDEX_op_bswap16_i64: 73 case INDEX_op_bswap32_i32: 74 case INDEX_op_bswap32_i64: 75 case INDEX_op_bswap64_i64: 76 return C_O1_I1(r, r); 77 78 case INDEX_op_st8_i32: 79 case INDEX_op_st16_i32: 80 case INDEX_op_st_i32: 81 case INDEX_op_st8_i64: 82 case INDEX_op_st16_i64: 83 case INDEX_op_st32_i64: 84 case INDEX_op_st_i64: 85 return C_O0_I2(r, r); 86 87 case INDEX_op_div_i32: 88 case INDEX_op_div_i64: 89 case INDEX_op_divu_i32: 90 case INDEX_op_divu_i64: 91 case INDEX_op_rem_i32: 92 case INDEX_op_rem_i64: 93 case INDEX_op_remu_i32: 94 case INDEX_op_remu_i64: 95 case INDEX_op_add_i32: 96 case INDEX_op_add_i64: 97 case INDEX_op_sub_i32: 98 case INDEX_op_sub_i64: 99 case INDEX_op_mul_i32: 100 case INDEX_op_mul_i64: 101 case INDEX_op_and_i32: 102 case INDEX_op_and_i64: 103 case INDEX_op_andc_i32: 104 case INDEX_op_andc_i64: 105 case INDEX_op_eqv_i32: 106 case INDEX_op_eqv_i64: 107 case INDEX_op_nand_i32: 108 case INDEX_op_nand_i64: 109 case INDEX_op_nor_i32: 110 case INDEX_op_nor_i64: 111 case INDEX_op_or_i32: 112 case INDEX_op_or_i64: 113 case INDEX_op_orc_i32: 114 case INDEX_op_orc_i64: 115 case INDEX_op_xor_i32: 116 case INDEX_op_xor_i64: 117 case INDEX_op_shl_i32: 118 case INDEX_op_shl_i64: 119 case INDEX_op_shr_i32: 120 case INDEX_op_shr_i64: 121 case INDEX_op_sar_i32: 122 case INDEX_op_sar_i64: 123 case INDEX_op_rotl_i32: 124 case INDEX_op_rotl_i64: 125 case INDEX_op_rotr_i32: 126 case INDEX_op_rotr_i64: 127 case INDEX_op_setcond_i32: 128 case INDEX_op_setcond_i64: 129 case INDEX_op_deposit_i32: 130 case INDEX_op_deposit_i64: 131 return C_O1_I2(r, r, r); 132 133 case INDEX_op_brcond_i32: 134 case INDEX_op_brcond_i64: 135 return C_O0_I2(r, r); 136 137#if TCG_TARGET_REG_BITS == 32 138 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */ 139 case INDEX_op_add2_i32: 140 case INDEX_op_sub2_i32: 141 return C_O2_I4(r, r, r, r, r, r); 142 case INDEX_op_brcond2_i32: 143 return C_O0_I4(r, r, r, r); 144 case INDEX_op_mulu2_i32: 145 return C_O2_I2(r, r, r, r); 146 case INDEX_op_setcond2_i32: 147 return C_O1_I4(r, r, r, r, r); 148#endif 149 150 case INDEX_op_qemu_ld_i32: 151 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 152 ? C_O1_I1(r, r) 153 : C_O1_I2(r, r, r)); 154 case INDEX_op_qemu_ld_i64: 155 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) 156 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r) 157 : C_O2_I2(r, r, r, r)); 158 case INDEX_op_qemu_st_i32: 159 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 160 ? C_O0_I2(r, r) 161 : C_O0_I3(r, r, r)); 162 case INDEX_op_qemu_st_i64: 163 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) 164 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r) 165 : C_O0_I4(r, r, r, r)); 166 167 default: 168 g_assert_not_reached(); 169 } 170} 171 172static const int tcg_target_reg_alloc_order[] = { 173 TCG_REG_R0, 174 TCG_REG_R1, 175 TCG_REG_R2, 176 TCG_REG_R3, 177 TCG_REG_R4, 178 TCG_REG_R5, 179 TCG_REG_R6, 180 TCG_REG_R7, 181 TCG_REG_R8, 182 TCG_REG_R9, 183 TCG_REG_R10, 184 TCG_REG_R11, 185 TCG_REG_R12, 186 TCG_REG_R13, 187 TCG_REG_R14, 188 TCG_REG_R15, 189}; 190 191#if MAX_OPC_PARAM_IARGS != 6 192# error Fix needed, number of supported input arguments changed! 193#endif 194 195static const int tcg_target_call_iarg_regs[] = { 196 TCG_REG_R0, 197 TCG_REG_R1, 198 TCG_REG_R2, 199 TCG_REG_R3, 200 TCG_REG_R4, 201 TCG_REG_R5, 202#if TCG_TARGET_REG_BITS == 32 203 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */ 204 TCG_REG_R6, 205 TCG_REG_R7, 206 TCG_REG_R8, 207 TCG_REG_R9, 208 TCG_REG_R10, 209 TCG_REG_R11, 210#endif 211}; 212 213static const int tcg_target_call_oarg_regs[] = { 214 TCG_REG_R0, 215#if TCG_TARGET_REG_BITS == 32 216 TCG_REG_R1 217#endif 218}; 219 220#ifdef CONFIG_DEBUG_TCG 221static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 222 "r00", 223 "r01", 224 "r02", 225 "r03", 226 "r04", 227 "r05", 228 "r06", 229 "r07", 230 "r08", 231 "r09", 232 "r10", 233 "r11", 234 "r12", 235 "r13", 236 "r14", 237 "r15", 238}; 239#endif 240 241static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 242 intptr_t value, intptr_t addend) 243{ 244 /* tcg_out_reloc always uses the same type, addend. */ 245 tcg_debug_assert(type == sizeof(tcg_target_long)); 246 tcg_debug_assert(addend == 0); 247 tcg_debug_assert(value != 0); 248 if (TCG_TARGET_REG_BITS == 32) { 249 tcg_patch32(code_ptr, value); 250 } else { 251 tcg_patch64(code_ptr, value); 252 } 253 return true; 254} 255 256/* Write value (native size). */ 257static void tcg_out_i(TCGContext *s, tcg_target_ulong v) 258{ 259 if (TCG_TARGET_REG_BITS == 32) { 260 tcg_out32(s, v); 261 } else { 262 tcg_out64(s, v); 263 } 264} 265 266/* Write opcode. */ 267static void tcg_out_op_t(TCGContext *s, TCGOpcode op) 268{ 269 tcg_out8(s, op); 270 tcg_out8(s, 0); 271} 272 273/* Write register. */ 274static void tcg_out_r(TCGContext *s, TCGArg t0) 275{ 276 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS); 277 tcg_out8(s, t0); 278} 279 280/* Write label. */ 281static void tci_out_label(TCGContext *s, TCGLabel *label) 282{ 283 if (label->has_value) { 284 tcg_out_i(s, label->u.value); 285 tcg_debug_assert(label->u.value); 286 } else { 287 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0); 288 s->code_ptr += sizeof(tcg_target_ulong); 289 } 290} 291 292static void stack_bounds_check(TCGReg base, target_long offset) 293{ 294 if (base == TCG_REG_CALL_STACK) { 295 tcg_debug_assert(offset < 0); 296 tcg_debug_assert(offset >= -(CPU_TEMP_BUF_NLONGS * sizeof(long))); 297 } 298} 299 300static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0) 301{ 302 uint8_t *old_code_ptr = s->code_ptr; 303 304 tcg_out_op_t(s, op); 305 tci_out_label(s, l0); 306 307 old_code_ptr[1] = s->code_ptr - old_code_ptr; 308} 309 310static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0) 311{ 312 uint8_t *old_code_ptr = s->code_ptr; 313 314 tcg_out_op_t(s, op); 315 tcg_out_i(s, (uintptr_t)p0); 316 317 old_code_ptr[1] = s->code_ptr - old_code_ptr; 318} 319 320static void tcg_out_op_v(TCGContext *s, TCGOpcode op) 321{ 322 uint8_t *old_code_ptr = s->code_ptr; 323 324 tcg_out_op_t(s, op); 325 326 old_code_ptr[1] = s->code_ptr - old_code_ptr; 327} 328 329static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1) 330{ 331 uint8_t *old_code_ptr = s->code_ptr; 332 333 tcg_out_op_t(s, op); 334 tcg_out_r(s, r0); 335 tcg_out32(s, i1); 336 337 old_code_ptr[1] = s->code_ptr - old_code_ptr; 338} 339 340#if TCG_TARGET_REG_BITS == 64 341static void tcg_out_op_rI(TCGContext *s, TCGOpcode op, 342 TCGReg r0, uint64_t i1) 343{ 344 uint8_t *old_code_ptr = s->code_ptr; 345 346 tcg_out_op_t(s, op); 347 tcg_out_r(s, r0); 348 tcg_out64(s, i1); 349 350 old_code_ptr[1] = s->code_ptr - old_code_ptr; 351} 352#endif 353 354static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1) 355{ 356 uint8_t *old_code_ptr = s->code_ptr; 357 358 tcg_out_op_t(s, op); 359 tcg_out_r(s, r0); 360 tcg_out_r(s, r1); 361 362 old_code_ptr[1] = s->code_ptr - old_code_ptr; 363} 364 365static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op, 366 TCGReg r0, TCGReg r1, TCGArg m2) 367{ 368 uint8_t *old_code_ptr = s->code_ptr; 369 370 tcg_out_op_t(s, op); 371 tcg_out_r(s, r0); 372 tcg_out_r(s, r1); 373 tcg_out32(s, m2); 374 375 old_code_ptr[1] = s->code_ptr - old_code_ptr; 376} 377 378static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op, 379 TCGReg r0, TCGReg r1, TCGReg r2) 380{ 381 uint8_t *old_code_ptr = s->code_ptr; 382 383 tcg_out_op_t(s, op); 384 tcg_out_r(s, r0); 385 tcg_out_r(s, r1); 386 tcg_out_r(s, r2); 387 388 old_code_ptr[1] = s->code_ptr - old_code_ptr; 389} 390 391static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op, 392 TCGReg r0, TCGReg r1, intptr_t i2) 393{ 394 uint8_t *old_code_ptr = s->code_ptr; 395 396 tcg_out_op_t(s, op); 397 tcg_out_r(s, r0); 398 tcg_out_r(s, r1); 399 tcg_debug_assert(i2 == (int32_t)i2); 400 tcg_out32(s, i2); 401 402 old_code_ptr[1] = s->code_ptr - old_code_ptr; 403} 404 405static void tcg_out_op_rrcl(TCGContext *s, TCGOpcode op, 406 TCGReg r0, TCGReg r1, TCGCond c2, TCGLabel *l3) 407{ 408 uint8_t *old_code_ptr = s->code_ptr; 409 410 tcg_out_op_t(s, op); 411 tcg_out_r(s, r0); 412 tcg_out_r(s, r1); 413 tcg_out8(s, c2); 414 tci_out_label(s, l3); 415 416 old_code_ptr[1] = s->code_ptr - old_code_ptr; 417} 418 419static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op, 420 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3) 421{ 422 uint8_t *old_code_ptr = s->code_ptr; 423 424 tcg_out_op_t(s, op); 425 tcg_out_r(s, r0); 426 tcg_out_r(s, r1); 427 tcg_out_r(s, r2); 428 tcg_out8(s, c3); 429 430 old_code_ptr[1] = s->code_ptr - old_code_ptr; 431} 432 433static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op, 434 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3) 435{ 436 uint8_t *old_code_ptr = s->code_ptr; 437 438 tcg_out_op_t(s, op); 439 tcg_out_r(s, r0); 440 tcg_out_r(s, r1); 441 tcg_out_r(s, r2); 442 tcg_out32(s, m3); 443 444 old_code_ptr[1] = s->code_ptr - old_code_ptr; 445} 446 447static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 448 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4) 449{ 450 uint8_t *old_code_ptr = s->code_ptr; 451 452 tcg_out_op_t(s, op); 453 tcg_out_r(s, r0); 454 tcg_out_r(s, r1); 455 tcg_out_r(s, r2); 456 tcg_out8(s, b3); 457 tcg_out8(s, b4); 458 459 old_code_ptr[1] = s->code_ptr - old_code_ptr; 460} 461 462static void tcg_out_op_rrrrm(TCGContext *s, TCGOpcode op, TCGReg r0, 463 TCGReg r1, TCGReg r2, TCGReg r3, TCGArg m4) 464{ 465 uint8_t *old_code_ptr = s->code_ptr; 466 467 tcg_out_op_t(s, op); 468 tcg_out_r(s, r0); 469 tcg_out_r(s, r1); 470 tcg_out_r(s, r2); 471 tcg_out_r(s, r3); 472 tcg_out32(s, m4); 473 474 old_code_ptr[1] = s->code_ptr - old_code_ptr; 475} 476 477#if TCG_TARGET_REG_BITS == 32 478static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op, 479 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3) 480{ 481 uint8_t *old_code_ptr = s->code_ptr; 482 483 tcg_out_op_t(s, op); 484 tcg_out_r(s, r0); 485 tcg_out_r(s, r1); 486 tcg_out_r(s, r2); 487 tcg_out_r(s, r3); 488 489 old_code_ptr[1] = s->code_ptr - old_code_ptr; 490} 491 492static void tcg_out_op_rrrrcl(TCGContext *s, TCGOpcode op, 493 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3, 494 TCGCond c4, TCGLabel *l5) 495{ 496 uint8_t *old_code_ptr = s->code_ptr; 497 498 tcg_out_op_t(s, op); 499 tcg_out_r(s, r0); 500 tcg_out_r(s, r1); 501 tcg_out_r(s, r2); 502 tcg_out_r(s, r3); 503 tcg_out8(s, c4); 504 tci_out_label(s, l5); 505 506 old_code_ptr[1] = s->code_ptr - old_code_ptr; 507} 508 509static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op, 510 TCGReg r0, TCGReg r1, TCGReg r2, 511 TCGReg r3, TCGReg r4, TCGCond c5) 512{ 513 uint8_t *old_code_ptr = s->code_ptr; 514 515 tcg_out_op_t(s, op); 516 tcg_out_r(s, r0); 517 tcg_out_r(s, r1); 518 tcg_out_r(s, r2); 519 tcg_out_r(s, r3); 520 tcg_out_r(s, r4); 521 tcg_out8(s, c5); 522 523 old_code_ptr[1] = s->code_ptr - old_code_ptr; 524} 525 526static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op, 527 TCGReg r0, TCGReg r1, TCGReg r2, 528 TCGReg r3, TCGReg r4, TCGReg r5) 529{ 530 uint8_t *old_code_ptr = s->code_ptr; 531 532 tcg_out_op_t(s, op); 533 tcg_out_r(s, r0); 534 tcg_out_r(s, r1); 535 tcg_out_r(s, r2); 536 tcg_out_r(s, r3); 537 tcg_out_r(s, r4); 538 tcg_out_r(s, r5); 539 540 old_code_ptr[1] = s->code_ptr - old_code_ptr; 541} 542#endif 543 544static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 545 intptr_t offset) 546{ 547 stack_bounds_check(base, offset); 548 switch (type) { 549 case TCG_TYPE_I32: 550 tcg_out_op_rrs(s, INDEX_op_ld_i32, val, base, offset); 551 break; 552#if TCG_TARGET_REG_BITS == 64 553 case TCG_TYPE_I64: 554 tcg_out_op_rrs(s, INDEX_op_ld_i64, val, base, offset); 555 break; 556#endif 557 default: 558 g_assert_not_reached(); 559 } 560} 561 562static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 563{ 564 switch (type) { 565 case TCG_TYPE_I32: 566 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg); 567 break; 568#if TCG_TARGET_REG_BITS == 64 569 case TCG_TYPE_I64: 570 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg); 571 break; 572#endif 573 default: 574 g_assert_not_reached(); 575 } 576 return true; 577} 578 579static void tcg_out_movi(TCGContext *s, TCGType type, 580 TCGReg ret, tcg_target_long arg) 581{ 582 switch (type) { 583 case TCG_TYPE_I32: 584 tcg_out_op_ri(s, INDEX_op_tci_movi_i32, ret, arg); 585 break; 586#if TCG_TARGET_REG_BITS == 64 587 case TCG_TYPE_I64: 588 tcg_out_op_rI(s, INDEX_op_tci_movi_i64, ret, arg); 589 break; 590#endif 591 default: 592 g_assert_not_reached(); 593 } 594} 595 596static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg) 597{ 598 uint8_t *old_code_ptr = s->code_ptr; 599 tcg_out_op_t(s, INDEX_op_call); 600 tcg_out_i(s, (uintptr_t)arg); 601 old_code_ptr[1] = s->code_ptr - old_code_ptr; 602} 603 604#if TCG_TARGET_REG_BITS == 64 605# define CASE_32_64(x) \ 606 case glue(glue(INDEX_op_, x), _i64): \ 607 case glue(glue(INDEX_op_, x), _i32): 608# define CASE_64(x) \ 609 case glue(glue(INDEX_op_, x), _i64): 610#else 611# define CASE_32_64(x) \ 612 case glue(glue(INDEX_op_, x), _i32): 613# define CASE_64(x) 614#endif 615 616static void tcg_out_op(TCGContext *s, TCGOpcode opc, 617 const TCGArg args[TCG_MAX_OP_ARGS], 618 const int const_args[TCG_MAX_OP_ARGS]) 619{ 620 switch (opc) { 621 case INDEX_op_exit_tb: 622 tcg_out_op_p(s, opc, (void *)args[0]); 623 break; 624 625 case INDEX_op_goto_tb: 626 tcg_debug_assert(s->tb_jmp_insn_offset == 0); 627 /* indirect jump method. */ 628 tcg_out_op_p(s, opc, s->tb_jmp_target_addr + args[0]); 629 set_jmp_reset_offset(s, args[0]); 630 break; 631 632 case INDEX_op_br: 633 tcg_out_op_l(s, opc, arg_label(args[0])); 634 break; 635 636 CASE_32_64(setcond) 637 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]); 638 break; 639 640#if TCG_TARGET_REG_BITS == 32 641 case INDEX_op_setcond2_i32: 642 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2], 643 args[3], args[4], args[5]); 644 break; 645#endif 646 647 CASE_32_64(ld8u) 648 CASE_32_64(ld8s) 649 CASE_32_64(ld16u) 650 CASE_32_64(ld16s) 651 case INDEX_op_ld_i32: 652 CASE_64(ld32u) 653 CASE_64(ld32s) 654 CASE_64(ld) 655 CASE_32_64(st8) 656 CASE_32_64(st16) 657 case INDEX_op_st_i32: 658 CASE_64(st32) 659 CASE_64(st) 660 stack_bounds_check(args[1], args[2]); 661 tcg_out_op_rrs(s, opc, args[0], args[1], args[2]); 662 break; 663 664 CASE_32_64(add) 665 CASE_32_64(sub) 666 CASE_32_64(mul) 667 CASE_32_64(and) 668 CASE_32_64(or) 669 CASE_32_64(xor) 670 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 671 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 672 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 673 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 674 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 675 CASE_32_64(shl) 676 CASE_32_64(shr) 677 CASE_32_64(sar) 678 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 679 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 680 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 681 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 682 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 683 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 684 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]); 685 break; 686 687 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ 688 { 689 TCGArg pos = args[3], len = args[4]; 690 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64; 691 692 tcg_debug_assert(pos < max); 693 tcg_debug_assert(pos + len <= max); 694 695 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len); 696 } 697 break; 698 699 CASE_32_64(brcond) 700 tcg_out_op_rrcl(s, opc, args[0], args[1], args[2], arg_label(args[3])); 701 break; 702 703 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 704 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 705 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */ 706 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */ 707 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */ 708 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */ 709 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */ 710 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */ 711 CASE_64(ext_i32) 712 CASE_64(extu_i32) 713 CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */ 714 CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */ 715 CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 716 tcg_out_op_rr(s, opc, args[0], args[1]); 717 break; 718 719#if TCG_TARGET_REG_BITS == 32 720 case INDEX_op_add2_i32: 721 case INDEX_op_sub2_i32: 722 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2], 723 args[3], args[4], args[5]); 724 break; 725 case INDEX_op_brcond2_i32: 726 tcg_out_op_rrrrcl(s, opc, args[0], args[1], args[2], 727 args[3], args[4], arg_label(args[5])); 728 break; 729 case INDEX_op_mulu2_i32: 730 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]); 731 break; 732#endif 733 734 case INDEX_op_qemu_ld_i32: 735 case INDEX_op_qemu_st_i32: 736 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { 737 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 738 } else { 739 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); 740 } 741 break; 742 743 case INDEX_op_qemu_ld_i64: 744 case INDEX_op_qemu_st_i64: 745 if (TCG_TARGET_REG_BITS == 64) { 746 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 747 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { 748 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); 749 } else { 750 tcg_out_op_rrrrm(s, opc, args[0], args[1], 751 args[2], args[3], args[4]); 752 } 753 break; 754 755 case INDEX_op_mb: 756 tcg_out_op_v(s, opc); 757 break; 758 759 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ 760 case INDEX_op_mov_i64: 761 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 762 default: 763 tcg_abort(); 764 } 765} 766 767static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 768 intptr_t offset) 769{ 770 stack_bounds_check(base, offset); 771 switch (type) { 772 case TCG_TYPE_I32: 773 tcg_out_op_rrs(s, INDEX_op_st_i32, val, base, offset); 774 break; 775#if TCG_TARGET_REG_BITS == 64 776 case TCG_TYPE_I64: 777 tcg_out_op_rrs(s, INDEX_op_st_i64, val, base, offset); 778 break; 779#endif 780 default: 781 g_assert_not_reached(); 782 } 783} 784 785static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 786 TCGReg base, intptr_t ofs) 787{ 788 return false; 789} 790 791/* Test if a constant matches the constraint. */ 792static int tcg_target_const_match(tcg_target_long val, TCGType type, 793 const TCGArgConstraint *arg_ct) 794{ 795 /* No need to return 0 or 1, 0 or != 0 is good enough. */ 796 return arg_ct->ct & TCG_CT_CONST; 797} 798 799static void tcg_target_init(TCGContext *s) 800{ 801#if defined(CONFIG_DEBUG_TCG_INTERPRETER) 802 const char *envval = getenv("DEBUG_TCG"); 803 if (envval) { 804 qemu_set_log(strtol(envval, NULL, 0)); 805 } 806#endif 807 808 /* The current code uses uint8_t for tcg operations. */ 809 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 810 811 /* Registers available for 32 bit operations. */ 812 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 813 /* Registers available for 64 bit operations. */ 814 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 815 /* TODO: Which registers should be set here? */ 816 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1; 817 818 s->reserved_regs = 0; 819 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 820 821 /* We use negative offsets from "sp" so that we can distinguish 822 stores that might pretend to be call arguments. */ 823 tcg_set_frame(s, TCG_REG_CALL_STACK, 824 -CPU_TEMP_BUF_NLONGS * sizeof(long), 825 CPU_TEMP_BUF_NLONGS * sizeof(long)); 826} 827 828/* Generate global QEMU prologue and epilogue code. */ 829static inline void tcg_target_qemu_prologue(TCGContext *s) 830{ 831} 832