1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25#include "../tcg-pool.c.inc" 26 27static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) 28{ 29 switch (op) { 30 case INDEX_op_goto_ptr: 31 return C_O0_I1(r); 32 33 case INDEX_op_ld8u_i32: 34 case INDEX_op_ld8s_i32: 35 case INDEX_op_ld16u_i32: 36 case INDEX_op_ld16s_i32: 37 case INDEX_op_ld_i32: 38 case INDEX_op_ld8u_i64: 39 case INDEX_op_ld8s_i64: 40 case INDEX_op_ld16u_i64: 41 case INDEX_op_ld16s_i64: 42 case INDEX_op_ld32u_i64: 43 case INDEX_op_ld32s_i64: 44 case INDEX_op_ld_i64: 45 case INDEX_op_not_i32: 46 case INDEX_op_not_i64: 47 case INDEX_op_neg_i32: 48 case INDEX_op_neg_i64: 49 case INDEX_op_ext8s_i32: 50 case INDEX_op_ext8s_i64: 51 case INDEX_op_ext16s_i32: 52 case INDEX_op_ext16s_i64: 53 case INDEX_op_ext8u_i32: 54 case INDEX_op_ext8u_i64: 55 case INDEX_op_ext16u_i32: 56 case INDEX_op_ext16u_i64: 57 case INDEX_op_ext32s_i64: 58 case INDEX_op_ext32u_i64: 59 case INDEX_op_ext_i32_i64: 60 case INDEX_op_extu_i32_i64: 61 case INDEX_op_bswap16_i32: 62 case INDEX_op_bswap16_i64: 63 case INDEX_op_bswap32_i32: 64 case INDEX_op_bswap32_i64: 65 case INDEX_op_bswap64_i64: 66 case INDEX_op_extract_i32: 67 case INDEX_op_extract_i64: 68 case INDEX_op_sextract_i32: 69 case INDEX_op_sextract_i64: 70 case INDEX_op_ctpop_i32: 71 case INDEX_op_ctpop_i64: 72 return C_O1_I1(r, r); 73 74 case INDEX_op_st8_i32: 75 case INDEX_op_st16_i32: 76 case INDEX_op_st_i32: 77 case INDEX_op_st8_i64: 78 case INDEX_op_st16_i64: 79 case INDEX_op_st32_i64: 80 case INDEX_op_st_i64: 81 return C_O0_I2(r, r); 82 83 case INDEX_op_div_i32: 84 case INDEX_op_div_i64: 85 case INDEX_op_divu_i32: 86 case INDEX_op_divu_i64: 87 case INDEX_op_rem_i32: 88 case INDEX_op_rem_i64: 89 case INDEX_op_remu_i32: 90 case INDEX_op_remu_i64: 91 case INDEX_op_add_i32: 92 case INDEX_op_add_i64: 93 case INDEX_op_sub_i32: 94 case INDEX_op_sub_i64: 95 case INDEX_op_mul_i32: 96 case INDEX_op_mul_i64: 97 case INDEX_op_and_i32: 98 case INDEX_op_and_i64: 99 case INDEX_op_andc_i32: 100 case INDEX_op_andc_i64: 101 case INDEX_op_eqv_i32: 102 case INDEX_op_eqv_i64: 103 case INDEX_op_nand_i32: 104 case INDEX_op_nand_i64: 105 case INDEX_op_nor_i32: 106 case INDEX_op_nor_i64: 107 case INDEX_op_or_i32: 108 case INDEX_op_or_i64: 109 case INDEX_op_orc_i32: 110 case INDEX_op_orc_i64: 111 case INDEX_op_xor_i32: 112 case INDEX_op_xor_i64: 113 case INDEX_op_shl_i32: 114 case INDEX_op_shl_i64: 115 case INDEX_op_shr_i32: 116 case INDEX_op_shr_i64: 117 case INDEX_op_sar_i32: 118 case INDEX_op_sar_i64: 119 case INDEX_op_rotl_i32: 120 case INDEX_op_rotl_i64: 121 case INDEX_op_rotr_i32: 122 case INDEX_op_rotr_i64: 123 case INDEX_op_setcond_i32: 124 case INDEX_op_setcond_i64: 125 case INDEX_op_deposit_i32: 126 case INDEX_op_deposit_i64: 127 case INDEX_op_clz_i32: 128 case INDEX_op_clz_i64: 129 case INDEX_op_ctz_i32: 130 case INDEX_op_ctz_i64: 131 return C_O1_I2(r, r, r); 132 133 case INDEX_op_brcond_i32: 134 case INDEX_op_brcond_i64: 135 return C_O0_I2(r, r); 136 137 case INDEX_op_add2_i32: 138 case INDEX_op_add2_i64: 139 case INDEX_op_sub2_i32: 140 case INDEX_op_sub2_i64: 141 return C_O2_I4(r, r, r, r, r, r); 142 143#if TCG_TARGET_REG_BITS == 32 144 case INDEX_op_brcond2_i32: 145 return C_O0_I4(r, r, r, r); 146#endif 147 148 case INDEX_op_mulu2_i32: 149 case INDEX_op_mulu2_i64: 150 case INDEX_op_muls2_i32: 151 case INDEX_op_muls2_i64: 152 return C_O2_I2(r, r, r, r); 153 154 case INDEX_op_movcond_i32: 155 case INDEX_op_movcond_i64: 156 case INDEX_op_setcond2_i32: 157 return C_O1_I4(r, r, r, r, r); 158 159 case INDEX_op_qemu_ld_i32: 160 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 161 ? C_O1_I1(r, r) 162 : C_O1_I2(r, r, r)); 163 case INDEX_op_qemu_ld_i64: 164 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) 165 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r) 166 : C_O2_I2(r, r, r, r)); 167 case INDEX_op_qemu_st_i32: 168 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS 169 ? C_O0_I2(r, r) 170 : C_O0_I3(r, r, r)); 171 case INDEX_op_qemu_st_i64: 172 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) 173 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r) 174 : C_O0_I4(r, r, r, r)); 175 176 default: 177 g_assert_not_reached(); 178 } 179} 180 181static const int tcg_target_reg_alloc_order[] = { 182 TCG_REG_R2, 183 TCG_REG_R3, 184 TCG_REG_R4, 185 TCG_REG_R5, 186 TCG_REG_R6, 187 TCG_REG_R7, 188 TCG_REG_R8, 189 TCG_REG_R9, 190 TCG_REG_R10, 191 TCG_REG_R11, 192 TCG_REG_R12, 193 TCG_REG_R13, 194 TCG_REG_R14, 195 TCG_REG_R15, 196 TCG_REG_R1, 197 TCG_REG_R0, 198}; 199 200#if MAX_OPC_PARAM_IARGS != 6 201# error Fix needed, number of supported input arguments changed! 202#endif 203 204/* No call arguments via registers. All will be stored on the "stack". */ 205static const int tcg_target_call_iarg_regs[] = { }; 206 207static const int tcg_target_call_oarg_regs[] = { 208 TCG_REG_R0, 209#if TCG_TARGET_REG_BITS == 32 210 TCG_REG_R1 211#endif 212}; 213 214#ifdef CONFIG_DEBUG_TCG 215static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 216 "r00", 217 "r01", 218 "r02", 219 "r03", 220 "r04", 221 "r05", 222 "r06", 223 "r07", 224 "r08", 225 "r09", 226 "r10", 227 "r11", 228 "r12", 229 "r13", 230 "r14", 231 "r15", 232}; 233#endif 234 235static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 236 intptr_t value, intptr_t addend) 237{ 238 intptr_t diff = value - (intptr_t)(code_ptr + 1); 239 240 tcg_debug_assert(addend == 0); 241 tcg_debug_assert(type == 20); 242 243 if (diff == sextract32(diff, 0, type)) { 244 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff)); 245 return true; 246 } 247 return false; 248} 249 250static void stack_bounds_check(TCGReg base, target_long offset) 251{ 252 if (base == TCG_REG_CALL_STACK) { 253 tcg_debug_assert(offset >= 0); 254 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE + 255 TCG_STATIC_FRAME_SIZE)); 256 } 257} 258 259static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0) 260{ 261 tcg_insn_unit insn = 0; 262 263 tcg_out_reloc(s, s->code_ptr, 20, l0, 0); 264 insn = deposit32(insn, 0, 8, op); 265 tcg_out32(s, insn); 266} 267 268static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0) 269{ 270 tcg_insn_unit insn = 0; 271 intptr_t diff; 272 273 /* Special case for exit_tb: map null -> 0. */ 274 if (p0 == NULL) { 275 diff = 0; 276 } else { 277 diff = p0 - (void *)(s->code_ptr + 1); 278 tcg_debug_assert(diff != 0); 279 if (diff != sextract32(diff, 0, 20)) { 280 tcg_raise_tb_overflow(s); 281 } 282 } 283 insn = deposit32(insn, 0, 8, op); 284 insn = deposit32(insn, 12, 20, diff); 285 tcg_out32(s, insn); 286} 287 288static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0) 289{ 290 tcg_insn_unit insn = 0; 291 292 insn = deposit32(insn, 0, 8, op); 293 insn = deposit32(insn, 8, 4, r0); 294 tcg_out32(s, insn); 295} 296 297static void tcg_out_op_v(TCGContext *s, TCGOpcode op) 298{ 299 tcg_out32(s, (uint8_t)op); 300} 301 302static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1) 303{ 304 tcg_insn_unit insn = 0; 305 306 tcg_debug_assert(i1 == sextract32(i1, 0, 20)); 307 insn = deposit32(insn, 0, 8, op); 308 insn = deposit32(insn, 8, 4, r0); 309 insn = deposit32(insn, 12, 20, i1); 310 tcg_out32(s, insn); 311} 312 313static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1) 314{ 315 tcg_insn_unit insn = 0; 316 317 tcg_out_reloc(s, s->code_ptr, 20, l1, 0); 318 insn = deposit32(insn, 0, 8, op); 319 insn = deposit32(insn, 8, 4, r0); 320 tcg_out32(s, insn); 321} 322 323static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1) 324{ 325 tcg_insn_unit insn = 0; 326 327 insn = deposit32(insn, 0, 8, op); 328 insn = deposit32(insn, 8, 4, r0); 329 insn = deposit32(insn, 12, 4, r1); 330 tcg_out32(s, insn); 331} 332 333static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op, 334 TCGReg r0, TCGReg r1, TCGArg m2) 335{ 336 tcg_insn_unit insn = 0; 337 338 tcg_debug_assert(m2 == extract32(m2, 0, 12)); 339 insn = deposit32(insn, 0, 8, op); 340 insn = deposit32(insn, 8, 4, r0); 341 insn = deposit32(insn, 12, 4, r1); 342 insn = deposit32(insn, 20, 12, m2); 343 tcg_out32(s, insn); 344} 345 346static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op, 347 TCGReg r0, TCGReg r1, TCGReg r2) 348{ 349 tcg_insn_unit insn = 0; 350 351 insn = deposit32(insn, 0, 8, op); 352 insn = deposit32(insn, 8, 4, r0); 353 insn = deposit32(insn, 12, 4, r1); 354 insn = deposit32(insn, 16, 4, r2); 355 tcg_out32(s, insn); 356} 357 358static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op, 359 TCGReg r0, TCGReg r1, intptr_t i2) 360{ 361 tcg_insn_unit insn = 0; 362 363 tcg_debug_assert(i2 == sextract32(i2, 0, 16)); 364 insn = deposit32(insn, 0, 8, op); 365 insn = deposit32(insn, 8, 4, r0); 366 insn = deposit32(insn, 12, 4, r1); 367 insn = deposit32(insn, 16, 16, i2); 368 tcg_out32(s, insn); 369} 370 371static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 372 TCGReg r1, uint8_t b2, uint8_t b3) 373{ 374 tcg_insn_unit insn = 0; 375 376 tcg_debug_assert(b2 == extract32(b2, 0, 6)); 377 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 378 insn = deposit32(insn, 0, 8, op); 379 insn = deposit32(insn, 8, 4, r0); 380 insn = deposit32(insn, 12, 4, r1); 381 insn = deposit32(insn, 16, 6, b2); 382 insn = deposit32(insn, 22, 6, b3); 383 tcg_out32(s, insn); 384} 385 386static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op, 387 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3) 388{ 389 tcg_insn_unit insn = 0; 390 391 insn = deposit32(insn, 0, 8, op); 392 insn = deposit32(insn, 8, 4, r0); 393 insn = deposit32(insn, 12, 4, r1); 394 insn = deposit32(insn, 16, 4, r2); 395 insn = deposit32(insn, 20, 4, c3); 396 tcg_out32(s, insn); 397} 398 399static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op, 400 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3) 401{ 402 tcg_insn_unit insn = 0; 403 404 tcg_debug_assert(m3 == extract32(m3, 0, 12)); 405 insn = deposit32(insn, 0, 8, op); 406 insn = deposit32(insn, 8, 4, r0); 407 insn = deposit32(insn, 12, 4, r1); 408 insn = deposit32(insn, 16, 4, r2); 409 insn = deposit32(insn, 20, 12, m3); 410 tcg_out32(s, insn); 411} 412 413static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 414 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4) 415{ 416 tcg_insn_unit insn = 0; 417 418 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 419 tcg_debug_assert(b4 == extract32(b4, 0, 6)); 420 insn = deposit32(insn, 0, 8, op); 421 insn = deposit32(insn, 8, 4, r0); 422 insn = deposit32(insn, 12, 4, r1); 423 insn = deposit32(insn, 16, 4, r2); 424 insn = deposit32(insn, 20, 6, b3); 425 insn = deposit32(insn, 26, 6, b4); 426 tcg_out32(s, insn); 427} 428 429static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0, 430 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4) 431{ 432 tcg_insn_unit insn = 0; 433 434 insn = deposit32(insn, 0, 8, op); 435 insn = deposit32(insn, 8, 4, r0); 436 insn = deposit32(insn, 12, 4, r1); 437 insn = deposit32(insn, 16, 4, r2); 438 insn = deposit32(insn, 20, 4, r3); 439 insn = deposit32(insn, 24, 4, r4); 440 tcg_out32(s, insn); 441} 442 443static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op, 444 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3) 445{ 446 tcg_insn_unit insn = 0; 447 448 insn = deposit32(insn, 0, 8, op); 449 insn = deposit32(insn, 8, 4, r0); 450 insn = deposit32(insn, 12, 4, r1); 451 insn = deposit32(insn, 16, 4, r2); 452 insn = deposit32(insn, 20, 4, r3); 453 tcg_out32(s, insn); 454} 455 456static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op, 457 TCGReg r0, TCGReg r1, TCGReg r2, 458 TCGReg r3, TCGReg r4, TCGCond c5) 459{ 460 tcg_insn_unit insn = 0; 461 462 insn = deposit32(insn, 0, 8, op); 463 insn = deposit32(insn, 8, 4, r0); 464 insn = deposit32(insn, 12, 4, r1); 465 insn = deposit32(insn, 16, 4, r2); 466 insn = deposit32(insn, 20, 4, r3); 467 insn = deposit32(insn, 24, 4, r4); 468 insn = deposit32(insn, 28, 4, c5); 469 tcg_out32(s, insn); 470} 471 472static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op, 473 TCGReg r0, TCGReg r1, TCGReg r2, 474 TCGReg r3, TCGReg r4, TCGReg r5) 475{ 476 tcg_insn_unit insn = 0; 477 478 insn = deposit32(insn, 0, 8, op); 479 insn = deposit32(insn, 8, 4, r0); 480 insn = deposit32(insn, 12, 4, r1); 481 insn = deposit32(insn, 16, 4, r2); 482 insn = deposit32(insn, 20, 4, r3); 483 insn = deposit32(insn, 24, 4, r4); 484 insn = deposit32(insn, 28, 4, r5); 485 tcg_out32(s, insn); 486} 487 488static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val, 489 TCGReg base, intptr_t offset) 490{ 491 stack_bounds_check(base, offset); 492 if (offset != sextract32(offset, 0, 16)) { 493 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset); 494 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32 495 ? INDEX_op_add_i32 : INDEX_op_add_i64), 496 TCG_REG_TMP, TCG_REG_TMP, base); 497 base = TCG_REG_TMP; 498 offset = 0; 499 } 500 tcg_out_op_rrs(s, op, val, base, offset); 501} 502 503static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 504 intptr_t offset) 505{ 506 switch (type) { 507 case TCG_TYPE_I32: 508 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset); 509 break; 510#if TCG_TARGET_REG_BITS == 64 511 case TCG_TYPE_I64: 512 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset); 513 break; 514#endif 515 default: 516 g_assert_not_reached(); 517 } 518} 519 520static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 521{ 522 switch (type) { 523 case TCG_TYPE_I32: 524 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg); 525 break; 526#if TCG_TARGET_REG_BITS == 64 527 case TCG_TYPE_I64: 528 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg); 529 break; 530#endif 531 default: 532 g_assert_not_reached(); 533 } 534 return true; 535} 536 537static void tcg_out_movi(TCGContext *s, TCGType type, 538 TCGReg ret, tcg_target_long arg) 539{ 540 switch (type) { 541 case TCG_TYPE_I32: 542#if TCG_TARGET_REG_BITS == 64 543 arg = (int32_t)arg; 544 /* fall through */ 545 case TCG_TYPE_I64: 546#endif 547 break; 548 default: 549 g_assert_not_reached(); 550 } 551 552 if (arg == sextract32(arg, 0, 20)) { 553 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg); 554 } else { 555 tcg_insn_unit insn = 0; 556 557 new_pool_label(s, arg, 20, s->code_ptr, 0); 558 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl); 559 insn = deposit32(insn, 8, 4, ret); 560 tcg_out32(s, insn); 561 } 562} 563 564static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func, 565 ffi_cif *cif) 566{ 567 tcg_insn_unit insn = 0; 568 uint8_t which; 569 570 if (cif->rtype == &ffi_type_void) { 571 which = 0; 572 } else if (cif->rtype->size == 4) { 573 which = 1; 574 } else { 575 tcg_debug_assert(cif->rtype->size == 8); 576 which = 2; 577 } 578 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif); 579 insn = deposit32(insn, 0, 8, INDEX_op_call); 580 insn = deposit32(insn, 8, 4, which); 581 tcg_out32(s, insn); 582} 583 584#if TCG_TARGET_REG_BITS == 64 585# define CASE_32_64(x) \ 586 case glue(glue(INDEX_op_, x), _i64): \ 587 case glue(glue(INDEX_op_, x), _i32): 588# define CASE_64(x) \ 589 case glue(glue(INDEX_op_, x), _i64): 590#else 591# define CASE_32_64(x) \ 592 case glue(glue(INDEX_op_, x), _i32): 593# define CASE_64(x) 594#endif 595 596static void tcg_out_op(TCGContext *s, TCGOpcode opc, 597 const TCGArg args[TCG_MAX_OP_ARGS], 598 const int const_args[TCG_MAX_OP_ARGS]) 599{ 600 TCGOpcode exts; 601 602 switch (opc) { 603 case INDEX_op_exit_tb: 604 tcg_out_op_p(s, opc, (void *)args[0]); 605 break; 606 607 case INDEX_op_goto_tb: 608 tcg_debug_assert(s->tb_jmp_insn_offset == 0); 609 /* indirect jump method. */ 610 tcg_out_op_p(s, opc, s->tb_jmp_target_addr + args[0]); 611 set_jmp_reset_offset(s, args[0]); 612 break; 613 614 case INDEX_op_goto_ptr: 615 tcg_out_op_r(s, opc, args[0]); 616 break; 617 618 case INDEX_op_br: 619 tcg_out_op_l(s, opc, arg_label(args[0])); 620 break; 621 622 CASE_32_64(setcond) 623 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]); 624 break; 625 626 CASE_32_64(movcond) 627 case INDEX_op_setcond2_i32: 628 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2], 629 args[3], args[4], args[5]); 630 break; 631 632 CASE_32_64(ld8u) 633 CASE_32_64(ld8s) 634 CASE_32_64(ld16u) 635 CASE_32_64(ld16s) 636 case INDEX_op_ld_i32: 637 CASE_64(ld32u) 638 CASE_64(ld32s) 639 CASE_64(ld) 640 CASE_32_64(st8) 641 CASE_32_64(st16) 642 case INDEX_op_st_i32: 643 CASE_64(st32) 644 CASE_64(st) 645 tcg_out_ldst(s, opc, args[0], args[1], args[2]); 646 break; 647 648 CASE_32_64(add) 649 CASE_32_64(sub) 650 CASE_32_64(mul) 651 CASE_32_64(and) 652 CASE_32_64(or) 653 CASE_32_64(xor) 654 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 655 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 656 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 657 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 658 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 659 CASE_32_64(shl) 660 CASE_32_64(shr) 661 CASE_32_64(sar) 662 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 663 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 664 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 665 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 666 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 667 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 668 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */ 669 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */ 670 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]); 671 break; 672 673 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ 674 { 675 TCGArg pos = args[3], len = args[4]; 676 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64; 677 678 tcg_debug_assert(pos < max); 679 tcg_debug_assert(pos + len <= max); 680 681 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len); 682 } 683 break; 684 685 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */ 686 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */ 687 { 688 TCGArg pos = args[2], len = args[3]; 689 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32; 690 691 tcg_debug_assert(pos < max); 692 tcg_debug_assert(pos + len <= max); 693 694 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len); 695 } 696 break; 697 698 CASE_32_64(brcond) 699 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32 700 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64), 701 TCG_REG_TMP, args[0], args[1], args[2]); 702 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3])); 703 break; 704 705 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 706 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 707 CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */ 708 CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */ 709 CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */ 710 CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */ 711 CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */ 712 CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */ 713 CASE_64(ext_i32) 714 CASE_64(extu_i32) 715 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */ 716 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */ 717 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 718 tcg_out_op_rr(s, opc, args[0], args[1]); 719 break; 720 721 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */ 722 exts = INDEX_op_ext16s_i32; 723 goto do_bswap; 724 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */ 725 exts = INDEX_op_ext16s_i64; 726 goto do_bswap; 727 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */ 728 exts = INDEX_op_ext32s_i64; 729 do_bswap: 730 /* The base tci bswaps zero-extend, and ignore high bits. */ 731 tcg_out_op_rr(s, opc, args[0], args[1]); 732 if (args[2] & TCG_BSWAP_OS) { 733 tcg_out_op_rr(s, exts, args[0], args[0]); 734 } 735 break; 736 737 CASE_32_64(add2) 738 CASE_32_64(sub2) 739 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2], 740 args[3], args[4], args[5]); 741 break; 742 743#if TCG_TARGET_REG_BITS == 32 744 case INDEX_op_brcond2_i32: 745 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP, 746 args[0], args[1], args[2], args[3], args[4]); 747 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5])); 748 break; 749#endif 750 751 CASE_32_64(mulu2) 752 CASE_32_64(muls2) 753 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]); 754 break; 755 756 case INDEX_op_qemu_ld_i32: 757 case INDEX_op_qemu_st_i32: 758 if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { 759 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 760 } else { 761 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); 762 } 763 break; 764 765 case INDEX_op_qemu_ld_i64: 766 case INDEX_op_qemu_st_i64: 767 if (TCG_TARGET_REG_BITS == 64) { 768 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 769 } else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { 770 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); 771 } else { 772 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]); 773 tcg_out_op_rrrrr(s, opc, args[0], args[1], 774 args[2], args[3], TCG_REG_TMP); 775 } 776 break; 777 778 case INDEX_op_mb: 779 tcg_out_op_v(s, opc); 780 break; 781 782 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ 783 case INDEX_op_mov_i64: 784 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 785 default: 786 tcg_abort(); 787 } 788} 789 790static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 791 intptr_t offset) 792{ 793 switch (type) { 794 case TCG_TYPE_I32: 795 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset); 796 break; 797#if TCG_TARGET_REG_BITS == 64 798 case TCG_TYPE_I64: 799 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset); 800 break; 801#endif 802 default: 803 g_assert_not_reached(); 804 } 805} 806 807static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 808 TCGReg base, intptr_t ofs) 809{ 810 return false; 811} 812 813/* Test if a constant matches the constraint. */ 814static bool tcg_target_const_match(int64_t val, TCGType type, int ct) 815{ 816 return ct & TCG_CT_CONST; 817} 818 819static void tcg_out_nop_fill(tcg_insn_unit *p, int count) 820{ 821 memset(p, 0, sizeof(*p) * count); 822} 823 824static void tcg_target_init(TCGContext *s) 825{ 826#if defined(CONFIG_DEBUG_TCG_INTERPRETER) 827 const char *envval = getenv("DEBUG_TCG"); 828 if (envval) { 829 qemu_set_log(strtol(envval, NULL, 0)); 830 } 831#endif 832 833 /* The current code uses uint8_t for tcg operations. */ 834 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 835 836 /* Registers available for 32 bit operations. */ 837 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 838 /* Registers available for 64 bit operations. */ 839 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 840 /* 841 * The interpreter "registers" are in the local stack frame and 842 * cannot be clobbered by the called helper functions. However, 843 * the interpreter assumes a 64-bit return value and assigns to 844 * the return value registers. 845 */ 846 tcg_target_call_clobber_regs = 847 MAKE_64BIT_MASK(TCG_REG_R0, 64 / TCG_TARGET_REG_BITS); 848 849 s->reserved_regs = 0; 850 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP); 851 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 852 853 /* The call arguments come first, followed by the temp storage. */ 854 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE, 855 TCG_STATIC_FRAME_SIZE); 856} 857 858/* Generate global QEMU prologue and epilogue code. */ 859static inline void tcg_target_qemu_prologue(TCGContext *s) 860{ 861} 862