1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25#include "../tcg-pool.c.inc" 26 27static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op) 28{ 29 switch (op) { 30 case INDEX_op_goto_ptr: 31 return C_O0_I1(r); 32 33 case INDEX_op_ld8u_i32: 34 case INDEX_op_ld8s_i32: 35 case INDEX_op_ld16u_i32: 36 case INDEX_op_ld16s_i32: 37 case INDEX_op_ld_i32: 38 case INDEX_op_ld8u_i64: 39 case INDEX_op_ld8s_i64: 40 case INDEX_op_ld16u_i64: 41 case INDEX_op_ld16s_i64: 42 case INDEX_op_ld32u_i64: 43 case INDEX_op_ld32s_i64: 44 case INDEX_op_ld_i64: 45 case INDEX_op_not_i32: 46 case INDEX_op_not_i64: 47 case INDEX_op_neg_i32: 48 case INDEX_op_neg_i64: 49 case INDEX_op_ext8s_i32: 50 case INDEX_op_ext8s_i64: 51 case INDEX_op_ext16s_i32: 52 case INDEX_op_ext16s_i64: 53 case INDEX_op_ext8u_i32: 54 case INDEX_op_ext8u_i64: 55 case INDEX_op_ext16u_i32: 56 case INDEX_op_ext16u_i64: 57 case INDEX_op_ext32s_i64: 58 case INDEX_op_ext32u_i64: 59 case INDEX_op_ext_i32_i64: 60 case INDEX_op_extu_i32_i64: 61 case INDEX_op_bswap16_i32: 62 case INDEX_op_bswap16_i64: 63 case INDEX_op_bswap32_i32: 64 case INDEX_op_bswap32_i64: 65 case INDEX_op_bswap64_i64: 66 case INDEX_op_extract_i32: 67 case INDEX_op_extract_i64: 68 case INDEX_op_sextract_i32: 69 case INDEX_op_sextract_i64: 70 case INDEX_op_ctpop_i32: 71 case INDEX_op_ctpop_i64: 72 return C_O1_I1(r, r); 73 74 case INDEX_op_st8_i32: 75 case INDEX_op_st16_i32: 76 case INDEX_op_st_i32: 77 case INDEX_op_st8_i64: 78 case INDEX_op_st16_i64: 79 case INDEX_op_st32_i64: 80 case INDEX_op_st_i64: 81 return C_O0_I2(r, r); 82 83 case INDEX_op_div_i32: 84 case INDEX_op_div_i64: 85 case INDEX_op_divu_i32: 86 case INDEX_op_divu_i64: 87 case INDEX_op_rem_i32: 88 case INDEX_op_rem_i64: 89 case INDEX_op_remu_i32: 90 case INDEX_op_remu_i64: 91 case INDEX_op_add_i32: 92 case INDEX_op_add_i64: 93 case INDEX_op_sub_i32: 94 case INDEX_op_sub_i64: 95 case INDEX_op_mul_i32: 96 case INDEX_op_mul_i64: 97 case INDEX_op_and_i32: 98 case INDEX_op_and_i64: 99 case INDEX_op_andc_i32: 100 case INDEX_op_andc_i64: 101 case INDEX_op_eqv_i32: 102 case INDEX_op_eqv_i64: 103 case INDEX_op_nand_i32: 104 case INDEX_op_nand_i64: 105 case INDEX_op_nor_i32: 106 case INDEX_op_nor_i64: 107 case INDEX_op_or_i32: 108 case INDEX_op_or_i64: 109 case INDEX_op_orc_i32: 110 case INDEX_op_orc_i64: 111 case INDEX_op_xor_i32: 112 case INDEX_op_xor_i64: 113 case INDEX_op_shl_i32: 114 case INDEX_op_shl_i64: 115 case INDEX_op_shr_i32: 116 case INDEX_op_shr_i64: 117 case INDEX_op_sar_i32: 118 case INDEX_op_sar_i64: 119 case INDEX_op_rotl_i32: 120 case INDEX_op_rotl_i64: 121 case INDEX_op_rotr_i32: 122 case INDEX_op_rotr_i64: 123 case INDEX_op_setcond_i32: 124 case INDEX_op_setcond_i64: 125 case INDEX_op_deposit_i32: 126 case INDEX_op_deposit_i64: 127 case INDEX_op_clz_i32: 128 case INDEX_op_clz_i64: 129 case INDEX_op_ctz_i32: 130 case INDEX_op_ctz_i64: 131 return C_O1_I2(r, r, r); 132 133 case INDEX_op_brcond_i32: 134 case INDEX_op_brcond_i64: 135 return C_O0_I2(r, r); 136 137 case INDEX_op_add2_i32: 138 case INDEX_op_add2_i64: 139 case INDEX_op_sub2_i32: 140 case INDEX_op_sub2_i64: 141 return C_O2_I4(r, r, r, r, r, r); 142 143#if TCG_TARGET_REG_BITS == 32 144 case INDEX_op_brcond2_i32: 145 return C_O0_I4(r, r, r, r); 146#endif 147 148 case INDEX_op_mulu2_i32: 149 case INDEX_op_mulu2_i64: 150 case INDEX_op_muls2_i32: 151 case INDEX_op_muls2_i64: 152 return C_O2_I2(r, r, r, r); 153 154 case INDEX_op_movcond_i32: 155 case INDEX_op_movcond_i64: 156 case INDEX_op_setcond2_i32: 157 return C_O1_I4(r, r, r, r, r); 158 159 case INDEX_op_qemu_ld_a32_i32: 160 return C_O1_I1(r, r); 161 case INDEX_op_qemu_ld_a64_i32: 162 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r); 163 case INDEX_op_qemu_ld_a32_i64: 164 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r); 165 case INDEX_op_qemu_ld_a64_i64: 166 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r); 167 case INDEX_op_qemu_st_a32_i32: 168 return C_O0_I2(r, r); 169 case INDEX_op_qemu_st_a64_i32: 170 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r); 171 case INDEX_op_qemu_st_a32_i64: 172 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r); 173 case INDEX_op_qemu_st_a64_i64: 174 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r); 175 176 default: 177 g_assert_not_reached(); 178 } 179} 180 181static const int tcg_target_reg_alloc_order[] = { 182 TCG_REG_R4, 183 TCG_REG_R5, 184 TCG_REG_R6, 185 TCG_REG_R7, 186 TCG_REG_R8, 187 TCG_REG_R9, 188 TCG_REG_R10, 189 TCG_REG_R11, 190 TCG_REG_R12, 191 TCG_REG_R13, 192 TCG_REG_R14, 193 TCG_REG_R15, 194 /* Either 2 or 4 of these are call clobbered, so use them last. */ 195 TCG_REG_R3, 196 TCG_REG_R2, 197 TCG_REG_R1, 198 TCG_REG_R0, 199}; 200 201/* No call arguments via registers. All will be stored on the "stack". */ 202static const int tcg_target_call_iarg_regs[] = { }; 203 204static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot) 205{ 206 tcg_debug_assert(kind == TCG_CALL_RET_NORMAL); 207 tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS); 208 return TCG_REG_R0 + slot; 209} 210 211#ifdef CONFIG_DEBUG_TCG 212static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 213 "r00", 214 "r01", 215 "r02", 216 "r03", 217 "r04", 218 "r05", 219 "r06", 220 "r07", 221 "r08", 222 "r09", 223 "r10", 224 "r11", 225 "r12", 226 "r13", 227 "r14", 228 "r15", 229}; 230#endif 231 232static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 233 intptr_t value, intptr_t addend) 234{ 235 intptr_t diff = value - (intptr_t)(code_ptr + 1); 236 237 tcg_debug_assert(addend == 0); 238 tcg_debug_assert(type == 20); 239 240 if (diff == sextract32(diff, 0, type)) { 241 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff)); 242 return true; 243 } 244 return false; 245} 246 247static void stack_bounds_check(TCGReg base, intptr_t offset) 248{ 249 if (base == TCG_REG_CALL_STACK) { 250 tcg_debug_assert(offset >= 0); 251 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE + 252 TCG_STATIC_FRAME_SIZE)); 253 } 254} 255 256static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0) 257{ 258 tcg_insn_unit insn = 0; 259 260 tcg_out_reloc(s, s->code_ptr, 20, l0, 0); 261 insn = deposit32(insn, 0, 8, op); 262 tcg_out32(s, insn); 263} 264 265static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0) 266{ 267 tcg_insn_unit insn = 0; 268 intptr_t diff; 269 270 /* Special case for exit_tb: map null -> 0. */ 271 if (p0 == NULL) { 272 diff = 0; 273 } else { 274 diff = p0 - (void *)(s->code_ptr + 1); 275 tcg_debug_assert(diff != 0); 276 if (diff != sextract32(diff, 0, 20)) { 277 tcg_raise_tb_overflow(s); 278 } 279 } 280 insn = deposit32(insn, 0, 8, op); 281 insn = deposit32(insn, 12, 20, diff); 282 tcg_out32(s, insn); 283} 284 285static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0) 286{ 287 tcg_insn_unit insn = 0; 288 289 insn = deposit32(insn, 0, 8, op); 290 insn = deposit32(insn, 8, 4, r0); 291 tcg_out32(s, insn); 292} 293 294static void tcg_out_op_v(TCGContext *s, TCGOpcode op) 295{ 296 tcg_out32(s, (uint8_t)op); 297} 298 299static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1) 300{ 301 tcg_insn_unit insn = 0; 302 303 tcg_debug_assert(i1 == sextract32(i1, 0, 20)); 304 insn = deposit32(insn, 0, 8, op); 305 insn = deposit32(insn, 8, 4, r0); 306 insn = deposit32(insn, 12, 20, i1); 307 tcg_out32(s, insn); 308} 309 310static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1) 311{ 312 tcg_insn_unit insn = 0; 313 314 tcg_out_reloc(s, s->code_ptr, 20, l1, 0); 315 insn = deposit32(insn, 0, 8, op); 316 insn = deposit32(insn, 8, 4, r0); 317 tcg_out32(s, insn); 318} 319 320static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1) 321{ 322 tcg_insn_unit insn = 0; 323 324 insn = deposit32(insn, 0, 8, op); 325 insn = deposit32(insn, 8, 4, r0); 326 insn = deposit32(insn, 12, 4, r1); 327 tcg_out32(s, insn); 328} 329 330static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op, 331 TCGReg r0, TCGReg r1, TCGArg m2) 332{ 333 tcg_insn_unit insn = 0; 334 335 tcg_debug_assert(m2 == extract32(m2, 0, 16)); 336 insn = deposit32(insn, 0, 8, op); 337 insn = deposit32(insn, 8, 4, r0); 338 insn = deposit32(insn, 12, 4, r1); 339 insn = deposit32(insn, 16, 16, m2); 340 tcg_out32(s, insn); 341} 342 343static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op, 344 TCGReg r0, TCGReg r1, TCGReg r2) 345{ 346 tcg_insn_unit insn = 0; 347 348 insn = deposit32(insn, 0, 8, op); 349 insn = deposit32(insn, 8, 4, r0); 350 insn = deposit32(insn, 12, 4, r1); 351 insn = deposit32(insn, 16, 4, r2); 352 tcg_out32(s, insn); 353} 354 355static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op, 356 TCGReg r0, TCGReg r1, intptr_t i2) 357{ 358 tcg_insn_unit insn = 0; 359 360 tcg_debug_assert(i2 == sextract32(i2, 0, 16)); 361 insn = deposit32(insn, 0, 8, op); 362 insn = deposit32(insn, 8, 4, r0); 363 insn = deposit32(insn, 12, 4, r1); 364 insn = deposit32(insn, 16, 16, i2); 365 tcg_out32(s, insn); 366} 367 368static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 369 TCGReg r1, uint8_t b2, uint8_t b3) 370{ 371 tcg_insn_unit insn = 0; 372 373 tcg_debug_assert(b2 == extract32(b2, 0, 6)); 374 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 375 insn = deposit32(insn, 0, 8, op); 376 insn = deposit32(insn, 8, 4, r0); 377 insn = deposit32(insn, 12, 4, r1); 378 insn = deposit32(insn, 16, 6, b2); 379 insn = deposit32(insn, 22, 6, b3); 380 tcg_out32(s, insn); 381} 382 383static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op, 384 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3) 385{ 386 tcg_insn_unit insn = 0; 387 388 insn = deposit32(insn, 0, 8, op); 389 insn = deposit32(insn, 8, 4, r0); 390 insn = deposit32(insn, 12, 4, r1); 391 insn = deposit32(insn, 16, 4, r2); 392 insn = deposit32(insn, 20, 4, c3); 393 tcg_out32(s, insn); 394} 395 396static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 397 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4) 398{ 399 tcg_insn_unit insn = 0; 400 401 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 402 tcg_debug_assert(b4 == extract32(b4, 0, 6)); 403 insn = deposit32(insn, 0, 8, op); 404 insn = deposit32(insn, 8, 4, r0); 405 insn = deposit32(insn, 12, 4, r1); 406 insn = deposit32(insn, 16, 4, r2); 407 insn = deposit32(insn, 20, 6, b3); 408 insn = deposit32(insn, 26, 6, b4); 409 tcg_out32(s, insn); 410} 411 412static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0, 413 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4) 414{ 415 tcg_insn_unit insn = 0; 416 417 insn = deposit32(insn, 0, 8, op); 418 insn = deposit32(insn, 8, 4, r0); 419 insn = deposit32(insn, 12, 4, r1); 420 insn = deposit32(insn, 16, 4, r2); 421 insn = deposit32(insn, 20, 4, r3); 422 insn = deposit32(insn, 24, 4, r4); 423 tcg_out32(s, insn); 424} 425 426static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op, 427 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3) 428{ 429 tcg_insn_unit insn = 0; 430 431 insn = deposit32(insn, 0, 8, op); 432 insn = deposit32(insn, 8, 4, r0); 433 insn = deposit32(insn, 12, 4, r1); 434 insn = deposit32(insn, 16, 4, r2); 435 insn = deposit32(insn, 20, 4, r3); 436 tcg_out32(s, insn); 437} 438 439static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op, 440 TCGReg r0, TCGReg r1, TCGReg r2, 441 TCGReg r3, TCGReg r4, TCGCond c5) 442{ 443 tcg_insn_unit insn = 0; 444 445 insn = deposit32(insn, 0, 8, op); 446 insn = deposit32(insn, 8, 4, r0); 447 insn = deposit32(insn, 12, 4, r1); 448 insn = deposit32(insn, 16, 4, r2); 449 insn = deposit32(insn, 20, 4, r3); 450 insn = deposit32(insn, 24, 4, r4); 451 insn = deposit32(insn, 28, 4, c5); 452 tcg_out32(s, insn); 453} 454 455static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op, 456 TCGReg r0, TCGReg r1, TCGReg r2, 457 TCGReg r3, TCGReg r4, TCGReg r5) 458{ 459 tcg_insn_unit insn = 0; 460 461 insn = deposit32(insn, 0, 8, op); 462 insn = deposit32(insn, 8, 4, r0); 463 insn = deposit32(insn, 12, 4, r1); 464 insn = deposit32(insn, 16, 4, r2); 465 insn = deposit32(insn, 20, 4, r3); 466 insn = deposit32(insn, 24, 4, r4); 467 insn = deposit32(insn, 28, 4, r5); 468 tcg_out32(s, insn); 469} 470 471static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val, 472 TCGReg base, intptr_t offset) 473{ 474 stack_bounds_check(base, offset); 475 if (offset != sextract32(offset, 0, 16)) { 476 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset); 477 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32 478 ? INDEX_op_add_i32 : INDEX_op_add_i64), 479 TCG_REG_TMP, TCG_REG_TMP, base); 480 base = TCG_REG_TMP; 481 offset = 0; 482 } 483 tcg_out_op_rrs(s, op, val, base, offset); 484} 485 486static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 487 intptr_t offset) 488{ 489 switch (type) { 490 case TCG_TYPE_I32: 491 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset); 492 break; 493#if TCG_TARGET_REG_BITS == 64 494 case TCG_TYPE_I64: 495 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset); 496 break; 497#endif 498 default: 499 g_assert_not_reached(); 500 } 501} 502 503static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 504{ 505 switch (type) { 506 case TCG_TYPE_I32: 507 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg); 508 break; 509#if TCG_TARGET_REG_BITS == 64 510 case TCG_TYPE_I64: 511 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg); 512 break; 513#endif 514 default: 515 g_assert_not_reached(); 516 } 517 return true; 518} 519 520static void tcg_out_movi(TCGContext *s, TCGType type, 521 TCGReg ret, tcg_target_long arg) 522{ 523 switch (type) { 524 case TCG_TYPE_I32: 525#if TCG_TARGET_REG_BITS == 64 526 arg = (int32_t)arg; 527 /* fall through */ 528 case TCG_TYPE_I64: 529#endif 530 break; 531 default: 532 g_assert_not_reached(); 533 } 534 535 if (arg == sextract32(arg, 0, 20)) { 536 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg); 537 } else { 538 tcg_insn_unit insn = 0; 539 540 new_pool_label(s, arg, 20, s->code_ptr, 0); 541 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl); 542 insn = deposit32(insn, 8, 4, ret); 543 tcg_out32(s, insn); 544 } 545} 546 547static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 548{ 549 switch (type) { 550 case TCG_TYPE_I32: 551 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32); 552 tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs); 553 break; 554#if TCG_TARGET_REG_BITS == 64 555 case TCG_TYPE_I64: 556 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64); 557 tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs); 558 break; 559#endif 560 default: 561 g_assert_not_reached(); 562 } 563} 564 565static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs) 566{ 567 if (TCG_TARGET_REG_BITS == 64) { 568 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64); 569 tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs); 570 } else { 571 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32); 572 tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs); 573 } 574} 575 576static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 577{ 578 switch (type) { 579 case TCG_TYPE_I32: 580 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32); 581 tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs); 582 break; 583#if TCG_TARGET_REG_BITS == 64 584 case TCG_TYPE_I64: 585 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64); 586 tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs); 587 break; 588#endif 589 default: 590 g_assert_not_reached(); 591 } 592} 593 594static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs) 595{ 596 if (TCG_TARGET_REG_BITS == 64) { 597 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64); 598 tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs); 599 } else { 600 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32); 601 tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs); 602 } 603} 604 605static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs) 606{ 607 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 608 tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64); 609 tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs); 610} 611 612static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs) 613{ 614 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 615 tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64); 616 tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs); 617} 618 619static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 620{ 621 tcg_out_ext32s(s, rd, rs); 622} 623 624static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 625{ 626 tcg_out_ext32u(s, rd, rs); 627} 628 629static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs) 630{ 631 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 632 tcg_out_mov(s, TCG_TYPE_I32, rd, rs); 633} 634 635static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2) 636{ 637 return false; 638} 639 640static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs, 641 tcg_target_long imm) 642{ 643 /* This function is only used for passing structs by reference. */ 644 g_assert_not_reached(); 645} 646 647static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func, 648 const TCGHelperInfo *info) 649{ 650 ffi_cif *cif = info->cif; 651 tcg_insn_unit insn = 0; 652 uint8_t which; 653 654 if (cif->rtype == &ffi_type_void) { 655 which = 0; 656 } else { 657 tcg_debug_assert(cif->rtype->size == 4 || 658 cif->rtype->size == 8 || 659 cif->rtype->size == 16); 660 which = ctz32(cif->rtype->size) - 1; 661 } 662 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif); 663 insn = deposit32(insn, 0, 8, INDEX_op_call); 664 insn = deposit32(insn, 8, 4, which); 665 tcg_out32(s, insn); 666} 667 668#if TCG_TARGET_REG_BITS == 64 669# define CASE_32_64(x) \ 670 case glue(glue(INDEX_op_, x), _i64): \ 671 case glue(glue(INDEX_op_, x), _i32): 672# define CASE_64(x) \ 673 case glue(glue(INDEX_op_, x), _i64): 674#else 675# define CASE_32_64(x) \ 676 case glue(glue(INDEX_op_, x), _i32): 677# define CASE_64(x) 678#endif 679 680static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg) 681{ 682 tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg); 683} 684 685static void tcg_out_goto_tb(TCGContext *s, int which) 686{ 687 /* indirect jump method. */ 688 tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which)); 689 set_jmp_reset_offset(s, which); 690} 691 692void tb_target_set_jmp_target(const TranslationBlock *tb, int n, 693 uintptr_t jmp_rx, uintptr_t jmp_rw) 694{ 695 /* Always indirect, nothing to do */ 696} 697 698static void tcg_out_op(TCGContext *s, TCGOpcode opc, 699 const TCGArg args[TCG_MAX_OP_ARGS], 700 const int const_args[TCG_MAX_OP_ARGS]) 701{ 702 TCGOpcode exts; 703 704 switch (opc) { 705 case INDEX_op_goto_ptr: 706 tcg_out_op_r(s, opc, args[0]); 707 break; 708 709 case INDEX_op_br: 710 tcg_out_op_l(s, opc, arg_label(args[0])); 711 break; 712 713 CASE_32_64(setcond) 714 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]); 715 break; 716 717 CASE_32_64(movcond) 718 case INDEX_op_setcond2_i32: 719 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2], 720 args[3], args[4], args[5]); 721 break; 722 723 CASE_32_64(ld8u) 724 CASE_32_64(ld8s) 725 CASE_32_64(ld16u) 726 CASE_32_64(ld16s) 727 case INDEX_op_ld_i32: 728 CASE_64(ld32u) 729 CASE_64(ld32s) 730 CASE_64(ld) 731 CASE_32_64(st8) 732 CASE_32_64(st16) 733 case INDEX_op_st_i32: 734 CASE_64(st32) 735 CASE_64(st) 736 tcg_out_ldst(s, opc, args[0], args[1], args[2]); 737 break; 738 739 CASE_32_64(add) 740 CASE_32_64(sub) 741 CASE_32_64(mul) 742 CASE_32_64(and) 743 CASE_32_64(or) 744 CASE_32_64(xor) 745 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 746 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 747 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 748 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 749 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 750 CASE_32_64(shl) 751 CASE_32_64(shr) 752 CASE_32_64(sar) 753 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 754 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 755 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 756 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 757 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 758 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 759 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */ 760 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */ 761 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]); 762 break; 763 764 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ 765 { 766 TCGArg pos = args[3], len = args[4]; 767 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64; 768 769 tcg_debug_assert(pos < max); 770 tcg_debug_assert(pos + len <= max); 771 772 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len); 773 } 774 break; 775 776 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */ 777 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */ 778 { 779 TCGArg pos = args[2], len = args[3]; 780 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32; 781 782 tcg_debug_assert(pos < max); 783 tcg_debug_assert(pos + len <= max); 784 785 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len); 786 } 787 break; 788 789 CASE_32_64(brcond) 790 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32 791 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64), 792 TCG_REG_TMP, args[0], args[1], args[2]); 793 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3])); 794 break; 795 796 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 797 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 798 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */ 799 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */ 800 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 801 tcg_out_op_rr(s, opc, args[0], args[1]); 802 break; 803 804 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */ 805 exts = INDEX_op_ext16s_i32; 806 goto do_bswap; 807 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */ 808 exts = INDEX_op_ext16s_i64; 809 goto do_bswap; 810 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */ 811 exts = INDEX_op_ext32s_i64; 812 do_bswap: 813 /* The base tci bswaps zero-extend, and ignore high bits. */ 814 tcg_out_op_rr(s, opc, args[0], args[1]); 815 if (args[2] & TCG_BSWAP_OS) { 816 tcg_out_op_rr(s, exts, args[0], args[0]); 817 } 818 break; 819 820 CASE_32_64(add2) 821 CASE_32_64(sub2) 822 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2], 823 args[3], args[4], args[5]); 824 break; 825 826#if TCG_TARGET_REG_BITS == 32 827 case INDEX_op_brcond2_i32: 828 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP, 829 args[0], args[1], args[2], args[3], args[4]); 830 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5])); 831 break; 832#endif 833 834 CASE_32_64(mulu2) 835 CASE_32_64(muls2) 836 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]); 837 break; 838 839 case INDEX_op_qemu_ld_a32_i32: 840 case INDEX_op_qemu_st_a32_i32: 841 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 842 break; 843 case INDEX_op_qemu_ld_a64_i32: 844 case INDEX_op_qemu_st_a64_i32: 845 case INDEX_op_qemu_ld_a32_i64: 846 case INDEX_op_qemu_st_a32_i64: 847 if (TCG_TARGET_REG_BITS == 64) { 848 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 849 } else { 850 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]); 851 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP); 852 } 853 break; 854 case INDEX_op_qemu_ld_a64_i64: 855 case INDEX_op_qemu_st_a64_i64: 856 if (TCG_TARGET_REG_BITS == 64) { 857 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 858 } else { 859 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]); 860 tcg_out_op_rrrrr(s, opc, args[0], args[1], 861 args[2], args[3], TCG_REG_TMP); 862 } 863 break; 864 865 case INDEX_op_mb: 866 tcg_out_op_v(s, opc); 867 break; 868 869 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ 870 case INDEX_op_mov_i64: 871 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 872 case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */ 873 case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */ 874 case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */ 875 case INDEX_op_ext8s_i64: 876 case INDEX_op_ext8u_i32: 877 case INDEX_op_ext8u_i64: 878 case INDEX_op_ext16s_i32: 879 case INDEX_op_ext16s_i64: 880 case INDEX_op_ext16u_i32: 881 case INDEX_op_ext16u_i64: 882 case INDEX_op_ext32s_i64: 883 case INDEX_op_ext32u_i64: 884 case INDEX_op_ext_i32_i64: 885 case INDEX_op_extu_i32_i64: 886 case INDEX_op_extrl_i64_i32: 887 default: 888 g_assert_not_reached(); 889 } 890} 891 892static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 893 intptr_t offset) 894{ 895 switch (type) { 896 case TCG_TYPE_I32: 897 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset); 898 break; 899#if TCG_TARGET_REG_BITS == 64 900 case TCG_TYPE_I64: 901 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset); 902 break; 903#endif 904 default: 905 g_assert_not_reached(); 906 } 907} 908 909static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 910 TCGReg base, intptr_t ofs) 911{ 912 return false; 913} 914 915/* Test if a constant matches the constraint. */ 916static bool tcg_target_const_match(int64_t val, TCGType type, int ct) 917{ 918 return ct & TCG_CT_CONST; 919} 920 921static void tcg_out_nop_fill(tcg_insn_unit *p, int count) 922{ 923 memset(p, 0, sizeof(*p) * count); 924} 925 926static void tcg_target_init(TCGContext *s) 927{ 928 /* The current code uses uint8_t for tcg operations. */ 929 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 930 931 /* Registers available for 32 bit operations. */ 932 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 933 /* Registers available for 64 bit operations. */ 934 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 935 /* 936 * The interpreter "registers" are in the local stack frame and 937 * cannot be clobbered by the called helper functions. However, 938 * the interpreter assumes a 128-bit return value and assigns to 939 * the return value registers. 940 */ 941 tcg_target_call_clobber_regs = 942 MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS); 943 944 s->reserved_regs = 0; 945 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP); 946 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 947 948 /* The call arguments come first, followed by the temp storage. */ 949 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE, 950 TCG_STATIC_FRAME_SIZE); 951} 952 953/* Generate global QEMU prologue and epilogue code. */ 954static inline void tcg_target_qemu_prologue(TCGContext *s) 955{ 956} 957 958bool tcg_target_has_memory_bswap(MemOp memop) 959{ 960 return true; 961} 962