1/* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2009, 2011 Stefan Weil 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 25/* Used for function call generation. */ 26#define TCG_TARGET_CALL_STACK_OFFSET 0 27#define TCG_TARGET_STACK_ALIGN 8 28#if TCG_TARGET_REG_BITS == 32 29# define TCG_TARGET_CALL_ARG_I32 TCG_CALL_ARG_EVEN 30# define TCG_TARGET_CALL_ARG_I64 TCG_CALL_ARG_EVEN 31# define TCG_TARGET_CALL_ARG_I128 TCG_CALL_ARG_EVEN 32#else 33# define TCG_TARGET_CALL_ARG_I32 TCG_CALL_ARG_NORMAL 34# define TCG_TARGET_CALL_ARG_I64 TCG_CALL_ARG_NORMAL 35# define TCG_TARGET_CALL_ARG_I128 TCG_CALL_ARG_NORMAL 36#endif 37#define TCG_TARGET_CALL_RET_I128 TCG_CALL_RET_NORMAL 38 39static TCGConstraintSetIndex 40tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags) 41{ 42 switch (op) { 43 case INDEX_op_goto_ptr: 44 return C_O0_I1(r); 45 46 case INDEX_op_ld8u_i32: 47 case INDEX_op_ld8s_i32: 48 case INDEX_op_ld16u_i32: 49 case INDEX_op_ld16s_i32: 50 case INDEX_op_ld_i32: 51 case INDEX_op_ld8u_i64: 52 case INDEX_op_ld8s_i64: 53 case INDEX_op_ld16u_i64: 54 case INDEX_op_ld16s_i64: 55 case INDEX_op_ld32u_i64: 56 case INDEX_op_ld32s_i64: 57 case INDEX_op_ld_i64: 58 case INDEX_op_not_i32: 59 case INDEX_op_not_i64: 60 case INDEX_op_neg_i32: 61 case INDEX_op_neg_i64: 62 case INDEX_op_ext_i32_i64: 63 case INDEX_op_extu_i32_i64: 64 case INDEX_op_bswap16_i32: 65 case INDEX_op_bswap16_i64: 66 case INDEX_op_bswap32_i32: 67 case INDEX_op_bswap32_i64: 68 case INDEX_op_bswap64_i64: 69 case INDEX_op_extract_i32: 70 case INDEX_op_extract_i64: 71 case INDEX_op_sextract_i32: 72 case INDEX_op_sextract_i64: 73 case INDEX_op_ctpop_i32: 74 case INDEX_op_ctpop_i64: 75 return C_O1_I1(r, r); 76 77 case INDEX_op_st8_i32: 78 case INDEX_op_st16_i32: 79 case INDEX_op_st_i32: 80 case INDEX_op_st8_i64: 81 case INDEX_op_st16_i64: 82 case INDEX_op_st32_i64: 83 case INDEX_op_st_i64: 84 return C_O0_I2(r, r); 85 86 case INDEX_op_div_i32: 87 case INDEX_op_div_i64: 88 case INDEX_op_divu_i32: 89 case INDEX_op_divu_i64: 90 case INDEX_op_rem_i32: 91 case INDEX_op_rem_i64: 92 case INDEX_op_remu_i32: 93 case INDEX_op_remu_i64: 94 case INDEX_op_sub_i32: 95 case INDEX_op_sub_i64: 96 case INDEX_op_mul_i32: 97 case INDEX_op_mul_i64: 98 case INDEX_op_and_i32: 99 case INDEX_op_and_i64: 100 case INDEX_op_andc_i32: 101 case INDEX_op_andc_i64: 102 case INDEX_op_eqv_i32: 103 case INDEX_op_eqv_i64: 104 case INDEX_op_nand_i32: 105 case INDEX_op_nand_i64: 106 case INDEX_op_nor_i32: 107 case INDEX_op_nor_i64: 108 case INDEX_op_or_i32: 109 case INDEX_op_or_i64: 110 case INDEX_op_orc_i32: 111 case INDEX_op_orc_i64: 112 case INDEX_op_xor_i32: 113 case INDEX_op_xor_i64: 114 case INDEX_op_shl_i32: 115 case INDEX_op_shl_i64: 116 case INDEX_op_shr_i32: 117 case INDEX_op_shr_i64: 118 case INDEX_op_sar_i32: 119 case INDEX_op_sar_i64: 120 case INDEX_op_rotl_i32: 121 case INDEX_op_rotl_i64: 122 case INDEX_op_rotr_i32: 123 case INDEX_op_rotr_i64: 124 case INDEX_op_setcond_i32: 125 case INDEX_op_setcond_i64: 126 case INDEX_op_deposit_i32: 127 case INDEX_op_deposit_i64: 128 case INDEX_op_clz_i32: 129 case INDEX_op_clz_i64: 130 case INDEX_op_ctz_i32: 131 case INDEX_op_ctz_i64: 132 return C_O1_I2(r, r, r); 133 134 case INDEX_op_brcond_i32: 135 case INDEX_op_brcond_i64: 136 return C_O0_I2(r, r); 137 138 case INDEX_op_add2_i32: 139 case INDEX_op_add2_i64: 140 case INDEX_op_sub2_i32: 141 case INDEX_op_sub2_i64: 142 return C_O2_I4(r, r, r, r, r, r); 143 144#if TCG_TARGET_REG_BITS == 32 145 case INDEX_op_brcond2_i32: 146 return C_O0_I4(r, r, r, r); 147#endif 148 149 case INDEX_op_mulu2_i32: 150 case INDEX_op_mulu2_i64: 151 case INDEX_op_muls2_i32: 152 case INDEX_op_muls2_i64: 153 return C_O2_I2(r, r, r, r); 154 155 case INDEX_op_movcond_i32: 156 case INDEX_op_movcond_i64: 157 case INDEX_op_setcond2_i32: 158 return C_O1_I4(r, r, r, r, r); 159 160 case INDEX_op_qemu_ld_i32: 161 return C_O1_I1(r, r); 162 case INDEX_op_qemu_ld_i64: 163 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r); 164 case INDEX_op_qemu_st_i32: 165 return C_O0_I2(r, r); 166 case INDEX_op_qemu_st_i64: 167 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r); 168 169 default: 170 return C_NotImplemented; 171 } 172} 173 174static const int tcg_target_reg_alloc_order[] = { 175 TCG_REG_R4, 176 TCG_REG_R5, 177 TCG_REG_R6, 178 TCG_REG_R7, 179 TCG_REG_R8, 180 TCG_REG_R9, 181 TCG_REG_R10, 182 TCG_REG_R11, 183 TCG_REG_R12, 184 TCG_REG_R13, 185 TCG_REG_R14, 186 TCG_REG_R15, 187 /* Either 2 or 4 of these are call clobbered, so use them last. */ 188 TCG_REG_R3, 189 TCG_REG_R2, 190 TCG_REG_R1, 191 TCG_REG_R0, 192}; 193 194/* No call arguments via registers. All will be stored on the "stack". */ 195static const int tcg_target_call_iarg_regs[] = { }; 196 197static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot) 198{ 199 tcg_debug_assert(kind == TCG_CALL_RET_NORMAL); 200 tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS); 201 return TCG_REG_R0 + slot; 202} 203 204#ifdef CONFIG_DEBUG_TCG 205static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { 206 "r00", 207 "r01", 208 "r02", 209 "r03", 210 "r04", 211 "r05", 212 "r06", 213 "r07", 214 "r08", 215 "r09", 216 "r10", 217 "r11", 218 "r12", 219 "r13", 220 "r14", 221 "r15", 222}; 223#endif 224 225static bool patch_reloc(tcg_insn_unit *code_ptr, int type, 226 intptr_t value, intptr_t addend) 227{ 228 intptr_t diff = value - (intptr_t)(code_ptr + 1); 229 230 tcg_debug_assert(addend == 0); 231 tcg_debug_assert(type == 20); 232 233 if (diff == sextract32(diff, 0, type)) { 234 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff)); 235 return true; 236 } 237 return false; 238} 239 240static void stack_bounds_check(TCGReg base, intptr_t offset) 241{ 242 if (base == TCG_REG_CALL_STACK) { 243 tcg_debug_assert(offset >= 0); 244 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE + 245 TCG_STATIC_FRAME_SIZE)); 246 } 247} 248 249static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0) 250{ 251 tcg_insn_unit insn = 0; 252 253 tcg_out_reloc(s, s->code_ptr, 20, l0, 0); 254 insn = deposit32(insn, 0, 8, op); 255 tcg_out32(s, insn); 256} 257 258static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0) 259{ 260 tcg_insn_unit insn = 0; 261 intptr_t diff; 262 263 /* Special case for exit_tb: map null -> 0. */ 264 if (p0 == NULL) { 265 diff = 0; 266 } else { 267 diff = p0 - (void *)(s->code_ptr + 1); 268 tcg_debug_assert(diff != 0); 269 if (diff != sextract32(diff, 0, 20)) { 270 tcg_raise_tb_overflow(s); 271 } 272 } 273 insn = deposit32(insn, 0, 8, op); 274 insn = deposit32(insn, 12, 20, diff); 275 tcg_out32(s, insn); 276} 277 278static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0) 279{ 280 tcg_insn_unit insn = 0; 281 282 insn = deposit32(insn, 0, 8, op); 283 insn = deposit32(insn, 8, 4, r0); 284 tcg_out32(s, insn); 285} 286 287static void tcg_out_op_v(TCGContext *s, TCGOpcode op) 288{ 289 tcg_out32(s, (uint8_t)op); 290} 291 292static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1) 293{ 294 tcg_insn_unit insn = 0; 295 296 tcg_debug_assert(i1 == sextract32(i1, 0, 20)); 297 insn = deposit32(insn, 0, 8, op); 298 insn = deposit32(insn, 8, 4, r0); 299 insn = deposit32(insn, 12, 20, i1); 300 tcg_out32(s, insn); 301} 302 303static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1) 304{ 305 tcg_insn_unit insn = 0; 306 307 tcg_out_reloc(s, s->code_ptr, 20, l1, 0); 308 insn = deposit32(insn, 0, 8, op); 309 insn = deposit32(insn, 8, 4, r0); 310 tcg_out32(s, insn); 311} 312 313static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1) 314{ 315 tcg_insn_unit insn = 0; 316 317 insn = deposit32(insn, 0, 8, op); 318 insn = deposit32(insn, 8, 4, r0); 319 insn = deposit32(insn, 12, 4, r1); 320 tcg_out32(s, insn); 321} 322 323static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op, 324 TCGReg r0, TCGReg r1, TCGArg m2) 325{ 326 tcg_insn_unit insn = 0; 327 328 tcg_debug_assert(m2 == extract32(m2, 0, 16)); 329 insn = deposit32(insn, 0, 8, op); 330 insn = deposit32(insn, 8, 4, r0); 331 insn = deposit32(insn, 12, 4, r1); 332 insn = deposit32(insn, 16, 16, m2); 333 tcg_out32(s, insn); 334} 335 336static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op, 337 TCGReg r0, TCGReg r1, TCGReg r2) 338{ 339 tcg_insn_unit insn = 0; 340 341 insn = deposit32(insn, 0, 8, op); 342 insn = deposit32(insn, 8, 4, r0); 343 insn = deposit32(insn, 12, 4, r1); 344 insn = deposit32(insn, 16, 4, r2); 345 tcg_out32(s, insn); 346} 347 348static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op, 349 TCGReg r0, TCGReg r1, intptr_t i2) 350{ 351 tcg_insn_unit insn = 0; 352 353 tcg_debug_assert(i2 == sextract32(i2, 0, 16)); 354 insn = deposit32(insn, 0, 8, op); 355 insn = deposit32(insn, 8, 4, r0); 356 insn = deposit32(insn, 12, 4, r1); 357 insn = deposit32(insn, 16, 16, i2); 358 tcg_out32(s, insn); 359} 360 361static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 362 TCGReg r1, uint8_t b2, uint8_t b3) 363{ 364 tcg_insn_unit insn = 0; 365 366 tcg_debug_assert(b2 == extract32(b2, 0, 6)); 367 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 368 insn = deposit32(insn, 0, 8, op); 369 insn = deposit32(insn, 8, 4, r0); 370 insn = deposit32(insn, 12, 4, r1); 371 insn = deposit32(insn, 16, 6, b2); 372 insn = deposit32(insn, 22, 6, b3); 373 tcg_out32(s, insn); 374} 375 376static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op, 377 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3) 378{ 379 tcg_insn_unit insn = 0; 380 381 insn = deposit32(insn, 0, 8, op); 382 insn = deposit32(insn, 8, 4, r0); 383 insn = deposit32(insn, 12, 4, r1); 384 insn = deposit32(insn, 16, 4, r2); 385 insn = deposit32(insn, 20, 4, c3); 386 tcg_out32(s, insn); 387} 388 389static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0, 390 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4) 391{ 392 tcg_insn_unit insn = 0; 393 394 tcg_debug_assert(b3 == extract32(b3, 0, 6)); 395 tcg_debug_assert(b4 == extract32(b4, 0, 6)); 396 insn = deposit32(insn, 0, 8, op); 397 insn = deposit32(insn, 8, 4, r0); 398 insn = deposit32(insn, 12, 4, r1); 399 insn = deposit32(insn, 16, 4, r2); 400 insn = deposit32(insn, 20, 6, b3); 401 insn = deposit32(insn, 26, 6, b4); 402 tcg_out32(s, insn); 403} 404 405static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op, 406 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3) 407{ 408 tcg_insn_unit insn = 0; 409 410 insn = deposit32(insn, 0, 8, op); 411 insn = deposit32(insn, 8, 4, r0); 412 insn = deposit32(insn, 12, 4, r1); 413 insn = deposit32(insn, 16, 4, r2); 414 insn = deposit32(insn, 20, 4, r3); 415 tcg_out32(s, insn); 416} 417 418static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op, 419 TCGReg r0, TCGReg r1, TCGReg r2, 420 TCGReg r3, TCGReg r4, TCGCond c5) 421{ 422 tcg_insn_unit insn = 0; 423 424 insn = deposit32(insn, 0, 8, op); 425 insn = deposit32(insn, 8, 4, r0); 426 insn = deposit32(insn, 12, 4, r1); 427 insn = deposit32(insn, 16, 4, r2); 428 insn = deposit32(insn, 20, 4, r3); 429 insn = deposit32(insn, 24, 4, r4); 430 insn = deposit32(insn, 28, 4, c5); 431 tcg_out32(s, insn); 432} 433 434static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op, 435 TCGReg r0, TCGReg r1, TCGReg r2, 436 TCGReg r3, TCGReg r4, TCGReg r5) 437{ 438 tcg_insn_unit insn = 0; 439 440 insn = deposit32(insn, 0, 8, op); 441 insn = deposit32(insn, 8, 4, r0); 442 insn = deposit32(insn, 12, 4, r1); 443 insn = deposit32(insn, 16, 4, r2); 444 insn = deposit32(insn, 20, 4, r3); 445 insn = deposit32(insn, 24, 4, r4); 446 insn = deposit32(insn, 28, 4, r5); 447 tcg_out32(s, insn); 448} 449 450static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val, 451 TCGReg base, intptr_t offset) 452{ 453 stack_bounds_check(base, offset); 454 if (offset != sextract32(offset, 0, 16)) { 455 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset); 456 tcg_out_op_rrr(s, INDEX_op_add, TCG_REG_TMP, TCG_REG_TMP, base); 457 base = TCG_REG_TMP; 458 offset = 0; 459 } 460 tcg_out_op_rrs(s, op, val, base, offset); 461} 462 463static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 464 intptr_t offset) 465{ 466 switch (type) { 467 case TCG_TYPE_I32: 468 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset); 469 break; 470#if TCG_TARGET_REG_BITS == 64 471 case TCG_TYPE_I64: 472 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset); 473 break; 474#endif 475 default: 476 g_assert_not_reached(); 477 } 478} 479 480static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) 481{ 482 tcg_out_op_rr(s, INDEX_op_mov, ret, arg); 483 return true; 484} 485 486static void tcg_out_movi(TCGContext *s, TCGType type, 487 TCGReg ret, tcg_target_long arg) 488{ 489 switch (type) { 490 case TCG_TYPE_I32: 491#if TCG_TARGET_REG_BITS == 64 492 arg = (int32_t)arg; 493 /* fall through */ 494 case TCG_TYPE_I64: 495#endif 496 break; 497 default: 498 g_assert_not_reached(); 499 } 500 501 if (arg == sextract32(arg, 0, 20)) { 502 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg); 503 } else { 504 tcg_insn_unit insn = 0; 505 506 new_pool_label(s, arg, 20, s->code_ptr, 0); 507 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl); 508 insn = deposit32(insn, 8, 4, ret); 509 tcg_out32(s, insn); 510 } 511} 512 513static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd, 514 TCGReg rs, unsigned pos, unsigned len) 515{ 516 TCGOpcode opc = type == TCG_TYPE_I32 ? 517 INDEX_op_extract_i32 : 518 INDEX_op_extract_i64; 519 tcg_out_op_rrbb(s, opc, rd, rs, pos, len); 520} 521 522static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd, 523 TCGReg rs, unsigned pos, unsigned len) 524{ 525 TCGOpcode opc = type == TCG_TYPE_I32 ? 526 INDEX_op_sextract_i32 : 527 INDEX_op_sextract_i64; 528 tcg_out_op_rrbb(s, opc, rd, rs, pos, len); 529} 530 531static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 532{ 533 tcg_out_sextract(s, type, rd, rs, 0, 8); 534} 535 536static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs) 537{ 538 tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8); 539} 540 541static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) 542{ 543 tcg_out_sextract(s, type, rd, rs, 0, 16); 544} 545 546static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs) 547{ 548 tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16); 549} 550 551static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs) 552{ 553 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 554 tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32); 555} 556 557static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs) 558{ 559 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 560 tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32); 561} 562 563static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 564{ 565 tcg_out_ext32s(s, rd, rs); 566} 567 568static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) 569{ 570 tcg_out_ext32u(s, rd, rs); 571} 572 573static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs) 574{ 575 tcg_debug_assert(TCG_TARGET_REG_BITS == 64); 576 tcg_out_mov(s, TCG_TYPE_I32, rd, rs); 577} 578 579static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2) 580{ 581 return false; 582} 583 584static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs, 585 tcg_target_long imm) 586{ 587 /* This function is only used for passing structs by reference. */ 588 g_assert_not_reached(); 589} 590 591static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func, 592 const TCGHelperInfo *info) 593{ 594 ffi_cif *cif = info->cif; 595 tcg_insn_unit insn = 0; 596 uint8_t which; 597 598 if (cif->rtype == &ffi_type_void) { 599 which = 0; 600 } else { 601 tcg_debug_assert(cif->rtype->size == 4 || 602 cif->rtype->size == 8 || 603 cif->rtype->size == 16); 604 which = ctz32(cif->rtype->size) - 1; 605 } 606 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif); 607 insn = deposit32(insn, 0, 8, INDEX_op_call); 608 insn = deposit32(insn, 8, 4, which); 609 tcg_out32(s, insn); 610} 611 612#if TCG_TARGET_REG_BITS == 64 613# define CASE_32_64(x) \ 614 case glue(glue(INDEX_op_, x), _i64): \ 615 case glue(glue(INDEX_op_, x), _i32): 616# define CASE_64(x) \ 617 case glue(glue(INDEX_op_, x), _i64): 618#else 619# define CASE_32_64(x) \ 620 case glue(glue(INDEX_op_, x), _i32): 621# define CASE_64(x) 622#endif 623 624static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg) 625{ 626 tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg); 627} 628 629static void tcg_out_goto_tb(TCGContext *s, int which) 630{ 631 /* indirect jump method. */ 632 tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which)); 633 set_jmp_reset_offset(s, which); 634} 635 636void tb_target_set_jmp_target(const TranslationBlock *tb, int n, 637 uintptr_t jmp_rx, uintptr_t jmp_rw) 638{ 639 /* Always indirect, nothing to do */ 640} 641 642static void tgen_add(TCGContext *s, TCGType type, 643 TCGReg a0, TCGReg a1, TCGReg a2) 644{ 645 tcg_out_op_rrr(s, INDEX_op_add, a0, a1, a2); 646} 647 648static const TCGOutOpBinary outop_add = { 649 .base.static_constraint = C_O1_I2(r, r, r), 650 .out_rrr = tgen_add, 651}; 652 653 654static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type, 655 const TCGArg args[TCG_MAX_OP_ARGS], 656 const int const_args[TCG_MAX_OP_ARGS]) 657{ 658 int width; 659 660 switch (opc) { 661 case INDEX_op_goto_ptr: 662 tcg_out_op_r(s, opc, args[0]); 663 break; 664 665 case INDEX_op_br: 666 tcg_out_op_l(s, opc, arg_label(args[0])); 667 break; 668 669 CASE_32_64(setcond) 670 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]); 671 break; 672 673 CASE_32_64(movcond) 674 case INDEX_op_setcond2_i32: 675 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2], 676 args[3], args[4], args[5]); 677 break; 678 679 CASE_32_64(ld8u) 680 CASE_32_64(ld8s) 681 CASE_32_64(ld16u) 682 CASE_32_64(ld16s) 683 case INDEX_op_ld_i32: 684 CASE_64(ld32u) 685 CASE_64(ld32s) 686 CASE_64(ld) 687 CASE_32_64(st8) 688 CASE_32_64(st16) 689 case INDEX_op_st_i32: 690 CASE_64(st32) 691 CASE_64(st) 692 tcg_out_ldst(s, opc, args[0], args[1], args[2]); 693 break; 694 695 CASE_32_64(sub) 696 CASE_32_64(mul) 697 CASE_32_64(and) 698 CASE_32_64(or) 699 CASE_32_64(xor) 700 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ 701 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ 702 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ 703 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ 704 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ 705 CASE_32_64(shl) 706 CASE_32_64(shr) 707 CASE_32_64(sar) 708 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ 709 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ 710 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ 711 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ 712 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ 713 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ 714 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */ 715 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */ 716 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]); 717 break; 718 719 CASE_32_64(deposit) 720 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], args[3], args[4]); 721 break; 722 723 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */ 724 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */ 725 tcg_out_op_rrbb(s, opc, args[0], args[1], args[2], args[3]); 726 break; 727 728 CASE_32_64(brcond) 729 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32 730 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64), 731 TCG_REG_TMP, args[0], args[1], args[2]); 732 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3])); 733 break; 734 735 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ 736 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ 737 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */ 738 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */ 739 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */ 740 tcg_out_op_rr(s, opc, args[0], args[1]); 741 break; 742 743 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */ 744 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */ 745 width = 16; 746 goto do_bswap; 747 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */ 748 width = 32; 749 do_bswap: 750 /* The base tci bswaps zero-extend, and ignore high bits. */ 751 tcg_out_op_rr(s, opc, args[0], args[1]); 752 if (args[2] & TCG_BSWAP_OS) { 753 tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width); 754 } 755 break; 756 757 CASE_32_64(add2) 758 CASE_32_64(sub2) 759 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2], 760 args[3], args[4], args[5]); 761 break; 762 763#if TCG_TARGET_REG_BITS == 32 764 case INDEX_op_brcond2_i32: 765 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP, 766 args[0], args[1], args[2], args[3], args[4]); 767 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5])); 768 break; 769#endif 770 771 CASE_32_64(mulu2) 772 CASE_32_64(muls2) 773 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]); 774 break; 775 776 case INDEX_op_qemu_ld_i64: 777 case INDEX_op_qemu_st_i64: 778 if (TCG_TARGET_REG_BITS == 32) { 779 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]); 780 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP); 781 break; 782 } 783 /* fall through */ 784 case INDEX_op_qemu_ld_i32: 785 case INDEX_op_qemu_st_i32: 786 if (TCG_TARGET_REG_BITS == 64 && s->addr_type == TCG_TYPE_I32) { 787 tcg_out_ext32u(s, TCG_REG_TMP, args[1]); 788 tcg_out_op_rrm(s, opc, args[0], TCG_REG_TMP, args[2]); 789 } else { 790 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); 791 } 792 break; 793 794 case INDEX_op_mb: 795 tcg_out_op_v(s, opc); 796 break; 797 798 case INDEX_op_call: /* Always emitted via tcg_out_call. */ 799 case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */ 800 case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */ 801 case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */ 802 case INDEX_op_extu_i32_i64: 803 case INDEX_op_extrl_i64_i32: 804 default: 805 g_assert_not_reached(); 806 } 807} 808 809static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base, 810 intptr_t offset) 811{ 812 switch (type) { 813 case TCG_TYPE_I32: 814 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset); 815 break; 816#if TCG_TARGET_REG_BITS == 64 817 case TCG_TYPE_I64: 818 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset); 819 break; 820#endif 821 default: 822 g_assert_not_reached(); 823 } 824} 825 826static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, 827 TCGReg base, intptr_t ofs) 828{ 829 return false; 830} 831 832/* Test if a constant matches the constraint. */ 833static bool tcg_target_const_match(int64_t val, int ct, 834 TCGType type, TCGCond cond, int vece) 835{ 836 return ct & TCG_CT_CONST; 837} 838 839static void tcg_out_nop_fill(tcg_insn_unit *p, int count) 840{ 841 memset(p, 0, sizeof(*p) * count); 842} 843 844static void tcg_target_init(TCGContext *s) 845{ 846 /* The current code uses uint8_t for tcg operations. */ 847 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX); 848 849 /* Registers available for 32 bit operations. */ 850 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1; 851 /* Registers available for 64 bit operations. */ 852 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1; 853 /* 854 * The interpreter "registers" are in the local stack frame and 855 * cannot be clobbered by the called helper functions. However, 856 * the interpreter assumes a 128-bit return value and assigns to 857 * the return value registers. 858 */ 859 tcg_target_call_clobber_regs = 860 MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS); 861 862 s->reserved_regs = 0; 863 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP); 864 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK); 865 866 /* The call arguments come first, followed by the temp storage. */ 867 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE, 868 TCG_STATIC_FRAME_SIZE); 869} 870 871/* Generate global QEMU prologue and epilogue code. */ 872static inline void tcg_target_qemu_prologue(TCGContext *s) 873{ 874} 875 876static void tcg_out_tb_start(TCGContext *s) 877{ 878 /* nothing to do */ 879} 880 881bool tcg_target_has_memory_bswap(MemOp memop) 882{ 883 return true; 884} 885 886static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l) 887{ 888 g_assert_not_reached(); 889} 890 891static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l) 892{ 893 g_assert_not_reached(); 894} 895