1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25#include "../tcg-pool.c.inc" 26 27static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) 28{ 29 switch (op) { 30 case INDEX_op_goto_ptr: 31 return C_O0_I1(r); 32 33 case INDEX_op_ld8u_i32: 34 case INDEX_op_ld8s_i32: 35 case INDEX_op_ld16u_i32: 36 case INDEX_op_ld16s_i32: 37 case INDEX_op_ld_i32: 38 case INDEX_op_ld8u_i64: 39 case INDEX_op_ld8s_i64: 40 case INDEX_op_ld16u_i64: 41 case INDEX_op_ld16s_i64: 42 case INDEX_op_ld32u_i64: 43 case INDEX_op_ld32s_i64: 44 case INDEX_op_ld_i64: 45 case INDEX_op_not_i32: 46 case INDEX_op_not_i64: 47 case INDEX_op_neg_i32: 48 case INDEX_op_neg_i64: 49 case INDEX_op_ext8s_i32: 50 case INDEX_op_ext8s_i64: 51 case INDEX_op_ext16s_i32: 52 case INDEX_op_ext16s_i64: 53 case INDEX_op_ext8u_i32: 54 case INDEX_op_ext8u_i64: 55 case INDEX_op_ext16u_i32: 56 case INDEX_op_ext16u_i64: 57 case INDEX_op_ext32s_i64: 58 case INDEX_op_ext32u_i64: 59 case INDEX_op_ext_i32_i64: 60 case INDEX_op_extu_i32_i64: 61 case INDEX_op_bswap16_i32: 62 case INDEX_op_bswap16_i64: 63 case INDEX_op_bswap32_i32: 64 case INDEX_op_bswap32_i64: 65 case INDEX_op_bswap64_i64: 66 case INDEX_op_extract_i32: 67 case INDEX_op_extract_i64: 68 case INDEX_op_sextract_i32: 69 case INDEX_op_sextract_i64: 70 case INDEX_op_ctpop_i32: 71 case INDEX_op_ctpop_i64: 72 return C_O1_I1(r, r); 73 74 case INDEX_op_st8_i32: 75 case INDEX_op_st16_i32: 76 case INDEX_op_st_i32: 77 case INDEX_op_st8_i64: 78 case INDEX_op_st16_i64: 79 case INDEX_op_st32_i64: 80 case INDEX_op_st_i64: 81 return C_O0_I2(r, r); 82 83 case INDEX_op_div_i32: 84 case INDEX_op_div_i64: 85 case INDEX_op_divu_i32: 86 case INDEX_op_divu_i64: 87 case INDEX_op_rem_i32: 88 case INDEX_op_rem_i64: 89 case INDEX_op_remu_i32: 90 case INDEX_op_remu_i64: 91 case INDEX_op_add_i32: 92 case INDEX_op_add_i64: 93 case INDEX_op_sub_i32: 94 case INDEX_op_sub_i64: 95 case INDEX_op_mul_i32: 96 case INDEX_op_mul_i64: 97 case INDEX_op_and_i32: 98 case INDEX_op_and_i64: 99 case INDEX_op_andc_i32: 100 case INDEX_op_andc_i64: 101 case INDEX_op_eqv_i32: 102 case INDEX_op_eqv_i64: 103 case INDEX_op_nand_i32: 104 case INDEX_op_nand_i64: 105 case INDEX_op_nor_i32: 106 case INDEX_op_nor_i64: 107 case INDEX_op_or_i32: 108 case INDEX_op_or_i64: 109 case INDEX_op_orc_i32: 110 case INDEX_op_orc_i64: 111 case INDEX_op_xor_i32: 112 case INDEX_op_xor_i64: 113 case INDEX_op_shl_i32: 114 case INDEX_op_shl_i64: 115 case INDEX_op_shr_i32: 116 case INDEX_op_shr_i64: 117 case INDEX_op_sar_i32: 118 case INDEX_op_sar_i64: 119 case INDEX_op_rotl_i32: 120 case INDEX_op_rotl_i64: 121 case INDEX_op_rotr_i32: 122 case INDEX_op_rotr_i64: 123 case INDEX_op_setcond_i32: 124 case INDEX_op_setcond_i64: 125 case INDEX_op_deposit_i32: 126 case INDEX_op_deposit_i64: 127 case INDEX_op_clz_i32: 128 case INDEX_op_clz_i64: 129 case INDEX_op_ctz_i32: 130 case INDEX_op_ctz_i64: 131 return C_O1_I2(r, r, r); 132 133 case INDEX_op_brcond_i32: 134 case INDEX_op_brcond_i64: 135 return C_O0_I2(r, r); 136 137 case INDEX_op_add2_i32: 138 case INDEX_op_add2_i64: 139 case INDEX_op_sub2_i32: 140 case INDEX_op_sub2_i64: 141 return C_O2_I4(r, r, r, r, r, r); 142 143#if TCG_TARGET_REG_BITS == 32 144 case INDEX_op_brcond2_i32: 145 return C_O0_I4(r, r, r, r); 146#endif 147 148 case INDEX_op_mulu2_i32: 149 case INDEX_op_mulu2_i64: 150 case INDEX_op_muls2_i32: 151 case INDEX_op_muls2_i64: 152 return C_O2_I2(r, r, r, r); 153 154 case INDEX_op_movcond_i32: 155 case INDEX_op_movcond_i64: 156 case INDEX_op_setcond2_i32: 157 return C_O1_I4(r, r, r, r, r); 158 159 case INDEX_op_qemu_ld_a32_i32: 160 return C_O1_I1(r, r); 161 case INDEX_op_qemu_ld_a64_i32: 162 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r); 163 case INDEX_op_qemu_ld_a32_i64: 164 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r); 165 case INDEX_op_qemu_ld_a64_i64: 166 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r); 167 case INDEX_op_qemu_st_a32_i32: 168 return C_O0_I2(r, r); 169 case INDEX_op_qemu_st_a64_i32: 170 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r); 171 case INDEX_op_qemu_st_a32_i64: 172 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r); 173 case INDEX_op_qemu_st_a64_i64: 174 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r); 175 176 default: 177 g_assert_not_reached(); 178 } 179} 180 181static const int tcg_target_reg_alloc_order[] = { 182 TCG_REG_R2, 183 TCG_REG_R3, 184 TCG_REG_R4, 185 TCG_REG_R5, 186 TCG_REG_R6, 187 TCG_REG_R7, 188 TCG_REG_R8, 189 TCG_REG_R9, 190 TCG_REG_R10, 191 TCG_REG_R11, 192 TCG_REG_R12, 193 TCG_REG_R13, 194 TCG_REG_R14, 195 TCG_REG_R15, 196 TCG_REG_R1, 197 TCG_REG_R0, 198}; 199 200/* No call arguments via registers. All will be stored on the "stack". */ 201static const int tcg_target_call_iarg_regs[] = { }; 202 203static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot) 204{ 205 tcg_debug_assert(kind == TCG_CALL_RET_NORMAL); 206 tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS); 207 return TCG_REG_R0 + slot; 208} 209 210#ifdef CONFIG_DEBUG_TCG 211static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 212 "r00", 213 "r01", 214 "r02", 215 "r03", 216 "r04", 217 "r05", 218 "r06", 219 "r07", 220 "r08", 221 "r09", 222 "r10", 223 "r11", 224 "r12", 225 "r13", 226 "r14", 227 "r15", 228}; 229#endif 230 231static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 232 intptr_t value, intptr_t addend) 233{ 234 intptr_t diff = value - (intptr_t)(code_ptr + 1); 235 236 tcg_debug_assert(addend == 0); 237 tcg_debug_assert(type == 20); 238 239 if (diff == sextract32(diff, 0, type)) { 240 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff)); 241 return true; 242 } 243 return false; 244} 245 246static void stack_bounds_check(TCGReg base, intptr_t offset) 247{ 248 if (base == TCG_REG_CALL_STACK) { 249 tcg_debug_assert(offset >= 0); 250 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE + 251 TCG_STATIC_FRAME_SIZE)); 252 } 253} 254 255static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0) 256{ 257 tcg_insn_unit insn = 0; 258 259 tcg_out_reloc(s, s->code_ptr, 20, l0, 0); 260 insn = deposit32(insn, 0, 8, op); 261 tcg_out32(s, insn); 262} 263 264static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0) 265{ 266 tcg_insn_unit insn = 0; 267 intptr_t diff; 268 269 /* Special case for exit_tb: map null -> 0. */ 270 if (p0 == NULL) { 271 diff = 0; 272 } else { 273 diff = p0 - (void *)(s->code_ptr + 1); 274 tcg_debug_assert(diff != 0); 275 if (diff != sextract32(diff, 0, 20)) { 276 tcg_raise_tb_overflow(s); 277 } 278 } 279 insn = deposit32(insn, 0, 8, op); 280 insn = deposit32(insn, 12, 20, diff); 281 tcg_out32(s, insn); 282} 283 284static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0) 285{ 286 tcg_insn_unit insn = 0; 287 288 insn = deposit32(insn, 0, 8, op); 289 insn = deposit32(insn, 8, 4, r0); 290 tcg_out32(s, insn); 291} 292 293static void tcg_out_op_v(TCGContext *s, TCGOpcode op) 294{ 295 tcg_out32(s, (uint8_t)op); 296} 297 298static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1) 299{ 300 tcg_insn_unit insn = 0; 301 302 tcg_debug_assert(i1 == sextract32(i1, 0, 20)); 303 insn = deposit32(insn, 0, 8, op); 304 insn = deposit32(insn, 8, 4, r0); 305 insn = deposit32(insn, 12, 20, i1); 306 tcg_out32(s, insn); 307} 308 309static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1) 310{ 311 tcg_insn_unit insn = 0; 312 313 tcg_out_reloc(s, s->code_ptr, 20, l1, 0); 314 insn = deposit32(insn, 0, 8, op); 315 insn = deposit32(insn, 8, 4, r0); 316 tcg_out32(s, insn); 317} 318 319static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1) 320{ 321 tcg_insn_unit insn = 0; 322 323 insn = deposit32(insn, 0, 8, op); 324 insn = deposit32(insn, 8, 4, r0); 325 insn = deposit32(insn, 12, 4, r1); 326 tcg_out32(s, insn); 327} 328 329static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op, 330 TCGReg r0, TCGReg r1, TCGArg m2) 331{ 332 tcg_insn_unit insn = 0; 333 334 tcg_debug_assert(m2 == extract32(m2, 0, 12)); 335 insn = deposit32(insn, 0, 8, op); 336 insn = deposit32(insn, 8, 4, r0); 337 insn = deposit32(insn, 12, 4, r1); 338 insn = deposit32(insn, 20, 12, m2); 339 tcg_out32(s, insn); 340} 341 342static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op, 343 TCGReg r0, TCGReg r1, TCGReg r2) 344{ 345 tcg_insn_unit insn = 0; 346 347 insn = deposit32(insn, 0, 8, op); 348 insn = deposit32(insn, 8, 4, r0); 349 insn = deposit32(insn, 12, 4, r1); 350 insn = deposit32(insn, 16, 4, r2); 351 tcg_out32(s, insn); 352} 353 354static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op, 355 TCGReg r0, TCGReg r1, intptr_t i2) 356{ 357 tcg_insn_unit insn = 0; 358 359 tcg_debug_assert(i2 == sextract32(i2, 0, 16)); 360 insn = deposit32(insn, 0, 8, op); 361 insn = deposit32(insn, 8, 4, r0); 362 insn = deposit32(insn, 12, 4, r1); 363 insn = deposit32(insn, 16, 16, i2); 364 tcg_out32(s, insn); 365} 366 367static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 368 TCGReg r1, uint8_t b2, uint8_t b3) 369{ 370 tcg_insn_unit insn = 0; 371 372 tcg_debug_assert(b2 == extract32(b2, 0, 6)); 373 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 374 insn = deposit32(insn, 0, 8, op); 375 insn = deposit32(insn, 8, 4, r0); 376 insn = deposit32(insn, 12, 4, r1); 377 insn = deposit32(insn, 16, 6, b2); 378 insn = deposit32(insn, 22, 6, b3); 379 tcg_out32(s, insn); 380} 381 382static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op, 383 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3) 384{ 385 tcg_insn_unit insn = 0; 386 387 insn = deposit32(insn, 0, 8, op); 388 insn = deposit32(insn, 8, 4, r0); 389 insn = deposit32(insn, 12, 4, r1); 390 insn = deposit32(insn, 16, 4, r2); 391 insn = deposit32(insn, 20, 4, c3); 392 tcg_out32(s, insn); 393} 394 395static void tcg_out_op_rrrm(TCGContext *s, TCGOpcode op, 396 TCGReg r0, TCGReg r1, TCGReg r2, TCGArg m3) 397{ 398 tcg_insn_unit insn = 0; 399 400 tcg_debug_assert(m3 == extract32(m3, 0, 12)); 401 insn = deposit32(insn, 0, 8, op); 402 insn = deposit32(insn, 8, 4, r0); 403 insn = deposit32(insn, 12, 4, r1); 404 insn = deposit32(insn, 16, 4, r2); 405 insn = deposit32(insn, 20, 12, m3); 406 tcg_out32(s, insn); 407} 408 409static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 410 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4) 411{ 412 tcg_insn_unit insn = 0; 413 414 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 415 tcg_debug_assert(b4 == extract32(b4, 0, 6)); 416 insn = deposit32(insn, 0, 8, op); 417 insn = deposit32(insn, 8, 4, r0); 418 insn = deposit32(insn, 12, 4, r1); 419 insn = deposit32(insn, 16, 4, r2); 420 insn = deposit32(insn, 20, 6, b3); 421 insn = deposit32(insn, 26, 6, b4); 422 tcg_out32(s, insn); 423} 424 425static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0, 426 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4) 427{ 428 tcg_insn_unit insn = 0; 429 430 insn = deposit32(insn, 0, 8, op); 431 insn = deposit32(insn, 8, 4, r0); 432 insn = deposit32(insn, 12, 4, r1); 433 insn = deposit32(insn, 16, 4, r2); 434 insn = deposit32(insn, 20, 4, r3); 435 insn = deposit32(insn, 24, 4, r4); 436 tcg_out32(s, insn); 437} 438 439static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op, 440 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3) 441{ 442 tcg_insn_unit insn = 0; 443 444 insn = deposit32(insn, 0, 8, op); 445 insn = deposit32(insn, 8, 4, r0); 446 insn = deposit32(insn, 12, 4, r1); 447 insn = deposit32(insn, 16, 4, r2); 448 insn = deposit32(insn, 20, 4, r3); 449 tcg_out32(s, insn); 450} 451 452static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op, 453 TCGReg r0, TCGReg r1, TCGReg r2, 454 TCGReg r3, TCGReg r4, TCGCond c5) 455{ 456 tcg_insn_unit insn = 0; 457 458 insn = deposit32(insn, 0, 8, op); 459 insn = deposit32(insn, 8, 4, r0); 460 insn = deposit32(insn, 12, 4, r1); 461 insn = deposit32(insn, 16, 4, r2); 462 insn = deposit32(insn, 20, 4, r3); 463 insn = deposit32(insn, 24, 4, r4); 464 insn = deposit32(insn, 28, 4, c5); 465 tcg_out32(s, insn); 466} 467 468static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op, 469 TCGReg r0, TCGReg r1, TCGReg r2, 470 TCGReg r3, TCGReg r4, TCGReg r5) 471{ 472 tcg_insn_unit insn = 0; 473 474 insn = deposit32(insn, 0, 8, op); 475 insn = deposit32(insn, 8, 4, r0); 476 insn = deposit32(insn, 12, 4, r1); 477 insn = deposit32(insn, 16, 4, r2); 478 insn = deposit32(insn, 20, 4, r3); 479 insn = deposit32(insn, 24, 4, r4); 480 insn = deposit32(insn, 28, 4, r5); 481 tcg_out32(s, insn); 482} 483 484static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val, 485 TCGReg base, intptr_t offset) 486{ 487 stack_bounds_check(base, offset); 488 if (offset != sextract32(offset, 0, 16)) { 489 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset); 490 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32 491 ? INDEX_op_add_i32 : INDEX_op_add_i64), 492 TCG_REG_TMP, TCG_REG_TMP, base); 493 base = TCG_REG_TMP; 494 offset = 0; 495 } 496 tcg_out_op_rrs(s, op, val, base, offset); 497} 498 499static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 500 intptr_t offset) 501{ 502 switch (type) { 503 case TCG_TYPE_I32: 504 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset); 505 break; 506#if TCG_TARGET_REG_BITS == 64 507 case TCG_TYPE_I64: 508 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset); 509 break; 510#endif 511 default: 512 g_assert_not_reached(); 513 } 514} 515 516static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 517{ 518 switch (type) { 519 case TCG_TYPE_I32: 520 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg); 521 break; 522#if TCG_TARGET_REG_BITS == 64 523 case TCG_TYPE_I64: 524 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg); 525 break; 526#endif 527 default: 528 g_assert_not_reached(); 529 } 530 return true; 531} 532 533static void tcg_out_movi(TCGContext *s, TCGType type, 534 TCGReg ret, tcg_target_long arg) 535{ 536 switch (type) { 537 case TCG_TYPE_I32: 538#if TCG_TARGET_REG_BITS == 64 539 arg = (int32_t)arg; 540 /* fall through */ 541 case TCG_TYPE_I64: 542#endif 543 break; 544 default: 545 g_assert_not_reached(); 546 } 547 548 if (arg == sextract32(arg, 0, 20)) { 549 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg); 550 } else { 551 tcg_insn_unit insn = 0; 552 553 new_pool_label(s, arg, 20, s->code_ptr, 0); 554 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl); 555 insn = deposit32(insn, 8, 4, ret); 556 tcg_out32(s, insn); 557 } 558} 559 560static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 561{ 562 switch (type) { 563 case TCG_TYPE_I32: 564 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32); 565 tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs); 566 break; 567#if TCG_TARGET_REG_BITS == 64 568 case TCG_TYPE_I64: 569 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64); 570 tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs); 571 break; 572#endif 573 default: 574 g_assert_not_reached(); 575 } 576} 577 578static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs) 579{ 580 if (TCG_TARGET_REG_BITS == 64) { 581 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64); 582 tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs); 583 } else { 584 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32); 585 tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs); 586 } 587} 588 589static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 590{ 591 switch (type) { 592 case TCG_TYPE_I32: 593 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32); 594 tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs); 595 break; 596#if TCG_TARGET_REG_BITS == 64 597 case TCG_TYPE_I64: 598 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64); 599 tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs); 600 break; 601#endif 602 default: 603 g_assert_not_reached(); 604 } 605} 606 607static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs) 608{ 609 if (TCG_TARGET_REG_BITS == 64) { 610 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64); 611 tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs); 612 } else { 613 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32); 614 tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs); 615 } 616} 617 618static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs) 619{ 620 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 621 tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64); 622 tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs); 623} 624 625static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs) 626{ 627 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 628 tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64); 629 tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs); 630} 631 632static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 633{ 634 tcg_out_ext32s(s, rd, rs); 635} 636 637static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 638{ 639 tcg_out_ext32u(s, rd, rs); 640} 641 642static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs) 643{ 644 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 645 tcg_out_mov(s, TCG_TYPE_I32, rd, rs); 646} 647 648static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2) 649{ 650 return false; 651} 652 653static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs, 654 tcg_target_long imm) 655{ 656 /* This function is only used for passing structs by reference. */ 657 g_assert_not_reached(); 658} 659 660static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func, 661 const TCGHelperInfo *info) 662{ 663 ffi_cif *cif = info->cif; 664 tcg_insn_unit insn = 0; 665 uint8_t which; 666 667 if (cif->rtype == &ffi_type_void) { 668 which = 0; 669 } else { 670 tcg_debug_assert(cif->rtype->size == 4 || 671 cif->rtype->size == 8 || 672 cif->rtype->size == 16); 673 which = ctz32(cif->rtype->size) - 1; 674 } 675 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif); 676 insn = deposit32(insn, 0, 8, INDEX_op_call); 677 insn = deposit32(insn, 8, 4, which); 678 tcg_out32(s, insn); 679} 680 681#if TCG_TARGET_REG_BITS == 64 682# define CASE_32_64(x) \ 683 case glue(glue(INDEX_op_, x), _i64): \ 684 case glue(glue(INDEX_op_, x), _i32): 685# define CASE_64(x) \ 686 case glue(glue(INDEX_op_, x), _i64): 687#else 688# define CASE_32_64(x) \ 689 case glue(glue(INDEX_op_, x), _i32): 690# define CASE_64(x) 691#endif 692 693static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg) 694{ 695 tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg); 696} 697 698static void tcg_out_goto_tb(TCGContext *s, int which) 699{ 700 /* indirect jump method. */ 701 tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which)); 702 set_jmp_reset_offset(s, which); 703} 704 705void tb_target_set_jmp_target(const TranslationBlock *tb, int n, 706 uintptr_t jmp_rx, uintptr_t jmp_rw) 707{ 708 /* Always indirect, nothing to do */ 709} 710 711static void tcg_out_op(TCGContext *s, TCGOpcode opc, 712 const TCGArg args[TCG_MAX_OP_ARGS], 713 const int const_args[TCG_MAX_OP_ARGS]) 714{ 715 TCGOpcode exts; 716 717 switch (opc) { 718 case INDEX_op_goto_ptr: 719 tcg_out_op_r(s, opc, args[0]); 720 break; 721 722 case INDEX_op_br: 723 tcg_out_op_l(s, opc, arg_label(args[0])); 724 break; 725 726 CASE_32_64(setcond) 727 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]); 728 break; 729 730 CASE_32_64(movcond) 731 case INDEX_op_setcond2_i32: 732 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2], 733 args[3], args[4], args[5]); 734 break; 735 736 CASE_32_64(ld8u) 737 CASE_32_64(ld8s) 738 CASE_32_64(ld16u) 739 CASE_32_64(ld16s) 740 case INDEX_op_ld_i32: 741 CASE_64(ld32u) 742 CASE_64(ld32s) 743 CASE_64(ld) 744 CASE_32_64(st8) 745 CASE_32_64(st16) 746 case INDEX_op_st_i32: 747 CASE_64(st32) 748 CASE_64(st) 749 tcg_out_ldst(s, opc, args[0], args[1], args[2]); 750 break; 751 752 CASE_32_64(add) 753 CASE_32_64(sub) 754 CASE_32_64(mul) 755 CASE_32_64(and) 756 CASE_32_64(or) 757 CASE_32_64(xor) 758 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 759 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 760 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 761 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 762 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 763 CASE_32_64(shl) 764 CASE_32_64(shr) 765 CASE_32_64(sar) 766 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 767 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 768 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 769 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 770 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 771 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 772 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */ 773 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */ 774 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]); 775 break; 776 777 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ 778 { 779 TCGArg pos = args[3], len = args[4]; 780 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64; 781 782 tcg_debug_assert(pos < max); 783 tcg_debug_assert(pos + len <= max); 784 785 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len); 786 } 787 break; 788 789 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */ 790 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */ 791 { 792 TCGArg pos = args[2], len = args[3]; 793 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32; 794 795 tcg_debug_assert(pos < max); 796 tcg_debug_assert(pos + len <= max); 797 798 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len); 799 } 800 break; 801 802 CASE_32_64(brcond) 803 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32 804 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64), 805 TCG_REG_TMP, args[0], args[1], args[2]); 806 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3])); 807 break; 808 809 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 810 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 811 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */ 812 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */ 813 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 814 tcg_out_op_rr(s, opc, args[0], args[1]); 815 break; 816 817 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */ 818 exts = INDEX_op_ext16s_i32; 819 goto do_bswap; 820 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */ 821 exts = INDEX_op_ext16s_i64; 822 goto do_bswap; 823 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */ 824 exts = INDEX_op_ext32s_i64; 825 do_bswap: 826 /* The base tci bswaps zero-extend, and ignore high bits. */ 827 tcg_out_op_rr(s, opc, args[0], args[1]); 828 if (args[2] & TCG_BSWAP_OS) { 829 tcg_out_op_rr(s, exts, args[0], args[0]); 830 } 831 break; 832 833 CASE_32_64(add2) 834 CASE_32_64(sub2) 835 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2], 836 args[3], args[4], args[5]); 837 break; 838 839#if TCG_TARGET_REG_BITS == 32 840 case INDEX_op_brcond2_i32: 841 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP, 842 args[0], args[1], args[2], args[3], args[4]); 843 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5])); 844 break; 845#endif 846 847 CASE_32_64(mulu2) 848 CASE_32_64(muls2) 849 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]); 850 break; 851 852 case INDEX_op_qemu_ld_a32_i32: 853 case INDEX_op_qemu_st_a32_i32: 854 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 855 break; 856 case INDEX_op_qemu_ld_a64_i32: 857 case INDEX_op_qemu_st_a64_i32: 858 case INDEX_op_qemu_ld_a32_i64: 859 case INDEX_op_qemu_st_a32_i64: 860 if (TCG_TARGET_REG_BITS == 64) { 861 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 862 } else { 863 tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); 864 } 865 break; 866 case INDEX_op_qemu_ld_a64_i64: 867 case INDEX_op_qemu_st_a64_i64: 868 if (TCG_TARGET_REG_BITS == 64) { 869 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 870 } else { 871 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]); 872 tcg_out_op_rrrrr(s, opc, args[0], args[1], 873 args[2], args[3], TCG_REG_TMP); 874 } 875 break; 876 877 case INDEX_op_mb: 878 tcg_out_op_v(s, opc); 879 break; 880 881 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ 882 case INDEX_op_mov_i64: 883 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 884 case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */ 885 case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */ 886 case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */ 887 case INDEX_op_ext8s_i64: 888 case INDEX_op_ext8u_i32: 889 case INDEX_op_ext8u_i64: 890 case INDEX_op_ext16s_i32: 891 case INDEX_op_ext16s_i64: 892 case INDEX_op_ext16u_i32: 893 case INDEX_op_ext16u_i64: 894 case INDEX_op_ext32s_i64: 895 case INDEX_op_ext32u_i64: 896 case INDEX_op_ext_i32_i64: 897 case INDEX_op_extu_i32_i64: 898 case INDEX_op_extrl_i64_i32: 899 default: 900 g_assert_not_reached(); 901 } 902} 903 904static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 905 intptr_t offset) 906{ 907 switch (type) { 908 case TCG_TYPE_I32: 909 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset); 910 break; 911#if TCG_TARGET_REG_BITS == 64 912 case TCG_TYPE_I64: 913 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset); 914 break; 915#endif 916 default: 917 g_assert_not_reached(); 918 } 919} 920 921static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 922 TCGReg base, intptr_t ofs) 923{ 924 return false; 925} 926 927/* Test if a constant matches the constraint. */ 928static bool tcg_target_const_match(int64_t val, TCGType type, int ct) 929{ 930 return ct & TCG_CT_CONST; 931} 932 933static void tcg_out_nop_fill(tcg_insn_unit *p, int count) 934{ 935 memset(p, 0, sizeof(*p) * count); 936} 937 938static void tcg_target_init(TCGContext *s) 939{ 940 /* The current code uses uint8_t for tcg operations. */ 941 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 942 943 /* Registers available for 32 bit operations. */ 944 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 945 /* Registers available for 64 bit operations. */ 946 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 947 /* 948 * The interpreter "registers" are in the local stack frame and 949 * cannot be clobbered by the called helper functions. However, 950 * the interpreter assumes a 64-bit return value and assigns to 951 * the return value registers. 952 */ 953 tcg_target_call_clobber_regs = 954 MAKE_64BIT_MASK(TCG_REG_R0, 64 / TCG_TARGET_REG_BITS); 955 956 s->reserved_regs = 0; 957 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP); 958 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 959 960 /* The call arguments come first, followed by the temp storage. */ 961 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE, 962 TCG_STATIC_FRAME_SIZE); 963} 964 965/* Generate global QEMU prologue and epilogue code. */ 966static inline void tcg_target_qemu_prologue(TCGContext *s) 967{ 968} 969 970bool tcg_target_has_memory_bswap(MemOp memop) 971{ 972 return true; 973} 974