1 /* 2 * Tiny Code Interpreter for QEMU 3 * 4 * Copyright (c) 2009, 2011, 2016 Stefan Weil 5 * 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 2 of the License, or 9 * (at your option) any later version. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 * 16 * You should have received a copy of the GNU General Public License 17 * along with this program. If not, see <http://www.gnu.org/licenses/>. 18 */ 19 20 #include "qemu/osdep.h" 21 #include "tcg/tcg.h" 22 #include "tcg/helper-info.h" 23 #include "tcg/tcg-ldst.h" 24 #include "disas/dis-asm.h" 25 #include "tcg-has.h" 26 #include <ffi.h> 27 28 29 #define ctpop_tr glue(ctpop, TCG_TARGET_REG_BITS) 30 #define deposit_tr glue(deposit, TCG_TARGET_REG_BITS) 31 #define extract_tr glue(extract, TCG_TARGET_REG_BITS) 32 #define sextract_tr glue(sextract, TCG_TARGET_REG_BITS) 33 34 /* 35 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined). 36 * Without assertions, the interpreter runs much faster. 37 */ 38 #if defined(CONFIG_DEBUG_TCG) 39 # define tci_assert(cond) assert(cond) 40 #else 41 # define tci_assert(cond) ((void)(cond)) 42 #endif 43 44 __thread uintptr_t tci_tb_ptr; 45 46 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, 47 uint32_t low_index, uint64_t value) 48 { 49 regs[low_index] = (uint32_t)value; 50 regs[high_index] = value >> 32; 51 } 52 53 /* Create a 64 bit value from two 32 bit values. */ 54 static uint64_t tci_uint64(uint32_t high, uint32_t low) 55 { 56 return ((uint64_t)high << 32) + low; 57 } 58 59 /* 60 * Load sets of arguments all at once. The naming convention is: 61 * tci_args_<arguments> 62 * where arguments is a sequence of 63 * 64 * b = immediate (bit position) 65 * c = condition (TCGCond) 66 * i = immediate (uint32_t) 67 * I = immediate (tcg_target_ulong) 68 * l = label or pointer 69 * m = immediate (MemOpIdx) 70 * n = immediate (call return length) 71 * r = register 72 * s = signed ldst offset 73 */ 74 75 static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0) 76 { 77 int diff = sextract32(insn, 12, 20); 78 *l0 = diff ? (void *)tb_ptr + diff : NULL; 79 } 80 81 static void tci_args_r(uint32_t insn, TCGReg *r0) 82 { 83 *r0 = extract32(insn, 8, 4); 84 } 85 86 static void tci_args_nl(uint32_t insn, const void *tb_ptr, 87 uint8_t *n0, void **l1) 88 { 89 *n0 = extract32(insn, 8, 4); 90 *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr; 91 } 92 93 static void tci_args_rl(uint32_t insn, const void *tb_ptr, 94 TCGReg *r0, void **l1) 95 { 96 *r0 = extract32(insn, 8, 4); 97 *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr; 98 } 99 100 static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1) 101 { 102 *r0 = extract32(insn, 8, 4); 103 *r1 = extract32(insn, 12, 4); 104 } 105 106 static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1) 107 { 108 *r0 = extract32(insn, 8, 4); 109 *i1 = sextract32(insn, 12, 20); 110 } 111 112 static void tci_args_rrm(uint32_t insn, TCGReg *r0, 113 TCGReg *r1, MemOpIdx *m2) 114 { 115 *r0 = extract32(insn, 8, 4); 116 *r1 = extract32(insn, 12, 4); 117 *m2 = extract32(insn, 16, 16); 118 } 119 120 static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2) 121 { 122 *r0 = extract32(insn, 8, 4); 123 *r1 = extract32(insn, 12, 4); 124 *r2 = extract32(insn, 16, 4); 125 } 126 127 static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2) 128 { 129 *r0 = extract32(insn, 8, 4); 130 *r1 = extract32(insn, 12, 4); 131 *i2 = sextract32(insn, 16, 16); 132 } 133 134 static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1, 135 uint8_t *i2, uint8_t *i3) 136 { 137 *r0 = extract32(insn, 8, 4); 138 *r1 = extract32(insn, 12, 4); 139 *i2 = extract32(insn, 16, 6); 140 *i3 = extract32(insn, 22, 6); 141 } 142 143 static void tci_args_rrrc(uint32_t insn, 144 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3) 145 { 146 *r0 = extract32(insn, 8, 4); 147 *r1 = extract32(insn, 12, 4); 148 *r2 = extract32(insn, 16, 4); 149 *c3 = extract32(insn, 20, 4); 150 } 151 152 static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1, 153 TCGReg *r2, uint8_t *i3, uint8_t *i4) 154 { 155 *r0 = extract32(insn, 8, 4); 156 *r1 = extract32(insn, 12, 4); 157 *r2 = extract32(insn, 16, 4); 158 *i3 = extract32(insn, 20, 6); 159 *i4 = extract32(insn, 26, 6); 160 } 161 162 static void tci_args_rrrr(uint32_t insn, 163 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3) 164 { 165 *r0 = extract32(insn, 8, 4); 166 *r1 = extract32(insn, 12, 4); 167 *r2 = extract32(insn, 16, 4); 168 *r3 = extract32(insn, 20, 4); 169 } 170 171 static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1, 172 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5) 173 { 174 *r0 = extract32(insn, 8, 4); 175 *r1 = extract32(insn, 12, 4); 176 *r2 = extract32(insn, 16, 4); 177 *r3 = extract32(insn, 20, 4); 178 *r4 = extract32(insn, 24, 4); 179 *c5 = extract32(insn, 28, 4); 180 } 181 182 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) 183 { 184 bool result = false; 185 int32_t i0 = u0; 186 int32_t i1 = u1; 187 switch (condition) { 188 case TCG_COND_EQ: 189 result = (u0 == u1); 190 break; 191 case TCG_COND_NE: 192 result = (u0 != u1); 193 break; 194 case TCG_COND_LT: 195 result = (i0 < i1); 196 break; 197 case TCG_COND_GE: 198 result = (i0 >= i1); 199 break; 200 case TCG_COND_LE: 201 result = (i0 <= i1); 202 break; 203 case TCG_COND_GT: 204 result = (i0 > i1); 205 break; 206 case TCG_COND_LTU: 207 result = (u0 < u1); 208 break; 209 case TCG_COND_GEU: 210 result = (u0 >= u1); 211 break; 212 case TCG_COND_LEU: 213 result = (u0 <= u1); 214 break; 215 case TCG_COND_GTU: 216 result = (u0 > u1); 217 break; 218 case TCG_COND_TSTEQ: 219 result = (u0 & u1) == 0; 220 break; 221 case TCG_COND_TSTNE: 222 result = (u0 & u1) != 0; 223 break; 224 default: 225 g_assert_not_reached(); 226 } 227 return result; 228 } 229 230 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) 231 { 232 bool result = false; 233 int64_t i0 = u0; 234 int64_t i1 = u1; 235 switch (condition) { 236 case TCG_COND_EQ: 237 result = (u0 == u1); 238 break; 239 case TCG_COND_NE: 240 result = (u0 != u1); 241 break; 242 case TCG_COND_LT: 243 result = (i0 < i1); 244 break; 245 case TCG_COND_GE: 246 result = (i0 >= i1); 247 break; 248 case TCG_COND_LE: 249 result = (i0 <= i1); 250 break; 251 case TCG_COND_GT: 252 result = (i0 > i1); 253 break; 254 case TCG_COND_LTU: 255 result = (u0 < u1); 256 break; 257 case TCG_COND_GEU: 258 result = (u0 >= u1); 259 break; 260 case TCG_COND_LEU: 261 result = (u0 <= u1); 262 break; 263 case TCG_COND_GTU: 264 result = (u0 > u1); 265 break; 266 case TCG_COND_TSTEQ: 267 result = (u0 & u1) == 0; 268 break; 269 case TCG_COND_TSTNE: 270 result = (u0 & u1) != 0; 271 break; 272 default: 273 g_assert_not_reached(); 274 } 275 return result; 276 } 277 278 static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr, 279 MemOpIdx oi, const void *tb_ptr) 280 { 281 MemOp mop = get_memop(oi); 282 uintptr_t ra = (uintptr_t)tb_ptr; 283 284 switch (mop & MO_SSIZE) { 285 case MO_UB: 286 return helper_ldub_mmu(env, taddr, oi, ra); 287 case MO_SB: 288 return helper_ldsb_mmu(env, taddr, oi, ra); 289 case MO_UW: 290 return helper_lduw_mmu(env, taddr, oi, ra); 291 case MO_SW: 292 return helper_ldsw_mmu(env, taddr, oi, ra); 293 case MO_UL: 294 return helper_ldul_mmu(env, taddr, oi, ra); 295 case MO_SL: 296 return helper_ldsl_mmu(env, taddr, oi, ra); 297 case MO_UQ: 298 return helper_ldq_mmu(env, taddr, oi, ra); 299 default: 300 g_assert_not_reached(); 301 } 302 } 303 304 static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val, 305 MemOpIdx oi, const void *tb_ptr) 306 { 307 MemOp mop = get_memop(oi); 308 uintptr_t ra = (uintptr_t)tb_ptr; 309 310 switch (mop & MO_SIZE) { 311 case MO_UB: 312 helper_stb_mmu(env, taddr, val, oi, ra); 313 break; 314 case MO_UW: 315 helper_stw_mmu(env, taddr, val, oi, ra); 316 break; 317 case MO_UL: 318 helper_stl_mmu(env, taddr, val, oi, ra); 319 break; 320 case MO_UQ: 321 helper_stq_mmu(env, taddr, val, oi, ra); 322 break; 323 default: 324 g_assert_not_reached(); 325 } 326 } 327 328 /* Interpret pseudo code in tb. */ 329 /* 330 * Disable CFI checks. 331 * One possible operation in the pseudo code is a call to binary code. 332 * Therefore, disable CFI checks in the interpreter function 333 */ 334 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env, 335 const void *v_tb_ptr) 336 { 337 const uint32_t *tb_ptr = v_tb_ptr; 338 tcg_target_ulong regs[TCG_TARGET_NB_REGS]; 339 uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE) 340 / sizeof(uint64_t)]; 341 bool carry = false; 342 343 regs[TCG_AREG0] = (tcg_target_ulong)env; 344 regs[TCG_REG_CALL_STACK] = (uintptr_t)stack; 345 tci_assert(tb_ptr); 346 347 for (;;) { 348 uint32_t insn; 349 TCGOpcode opc; 350 TCGReg r0, r1, r2, r3, r4; 351 tcg_target_ulong t1; 352 TCGCond condition; 353 uint8_t pos, len; 354 uint32_t tmp32; 355 uint64_t tmp64, taddr; 356 MemOpIdx oi; 357 int32_t ofs; 358 void *ptr; 359 360 insn = *tb_ptr++; 361 opc = extract32(insn, 0, 8); 362 363 switch (opc) { 364 case INDEX_op_call: 365 { 366 void *call_slots[MAX_CALL_IARGS]; 367 ffi_cif *cif; 368 void *func; 369 unsigned i, s, n; 370 371 tci_args_nl(insn, tb_ptr, &len, &ptr); 372 func = ((void **)ptr)[0]; 373 cif = ((void **)ptr)[1]; 374 375 n = cif->nargs; 376 for (i = s = 0; i < n; ++i) { 377 ffi_type *t = cif->arg_types[i]; 378 call_slots[i] = &stack[s]; 379 s += DIV_ROUND_UP(t->size, 8); 380 } 381 382 /* Helper functions may need to access the "return address" */ 383 tci_tb_ptr = (uintptr_t)tb_ptr; 384 ffi_call(cif, func, stack, call_slots); 385 } 386 387 switch (len) { 388 case 0: /* void */ 389 break; 390 case 1: /* uint32_t */ 391 /* 392 * The result winds up "left-aligned" in the stack[0] slot. 393 * Note that libffi has an odd special case in that it will 394 * always widen an integral result to ffi_arg. 395 */ 396 if (sizeof(ffi_arg) == 8) { 397 regs[TCG_REG_R0] = (uint32_t)stack[0]; 398 } else { 399 regs[TCG_REG_R0] = *(uint32_t *)stack; 400 } 401 break; 402 case 2: /* uint64_t */ 403 /* 404 * For TCG_TARGET_REG_BITS == 32, the register pair 405 * must stay in host memory order. 406 */ 407 memcpy(®s[TCG_REG_R0], stack, 8); 408 break; 409 case 3: /* Int128 */ 410 memcpy(®s[TCG_REG_R0], stack, 16); 411 break; 412 default: 413 g_assert_not_reached(); 414 } 415 break; 416 417 case INDEX_op_br: 418 tci_args_l(insn, tb_ptr, &ptr); 419 tb_ptr = ptr; 420 continue; 421 #if TCG_TARGET_REG_BITS == 32 422 case INDEX_op_setcond2_i32: 423 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition); 424 regs[r0] = tci_compare64(tci_uint64(regs[r2], regs[r1]), 425 tci_uint64(regs[r4], regs[r3]), 426 condition); 427 break; 428 #elif TCG_TARGET_REG_BITS == 64 429 case INDEX_op_setcond: 430 tci_args_rrrc(insn, &r0, &r1, &r2, &condition); 431 regs[r0] = tci_compare64(regs[r1], regs[r2], condition); 432 break; 433 case INDEX_op_movcond: 434 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition); 435 tmp32 = tci_compare64(regs[r1], regs[r2], condition); 436 regs[r0] = regs[tmp32 ? r3 : r4]; 437 break; 438 #endif 439 case INDEX_op_mov: 440 tci_args_rr(insn, &r0, &r1); 441 regs[r0] = regs[r1]; 442 break; 443 case INDEX_op_tci_movi: 444 tci_args_ri(insn, &r0, &t1); 445 regs[r0] = t1; 446 break; 447 case INDEX_op_tci_movl: 448 tci_args_rl(insn, tb_ptr, &r0, &ptr); 449 regs[r0] = *(tcg_target_ulong *)ptr; 450 break; 451 case INDEX_op_tci_setcarry: 452 carry = true; 453 break; 454 455 /* Load/store operations (32 bit). */ 456 457 case INDEX_op_ld8u: 458 tci_args_rrs(insn, &r0, &r1, &ofs); 459 ptr = (void *)(regs[r1] + ofs); 460 regs[r0] = *(uint8_t *)ptr; 461 break; 462 case INDEX_op_ld8s: 463 tci_args_rrs(insn, &r0, &r1, &ofs); 464 ptr = (void *)(regs[r1] + ofs); 465 regs[r0] = *(int8_t *)ptr; 466 break; 467 case INDEX_op_ld16u: 468 tci_args_rrs(insn, &r0, &r1, &ofs); 469 ptr = (void *)(regs[r1] + ofs); 470 regs[r0] = *(uint16_t *)ptr; 471 break; 472 case INDEX_op_ld16s: 473 tci_args_rrs(insn, &r0, &r1, &ofs); 474 ptr = (void *)(regs[r1] + ofs); 475 regs[r0] = *(int16_t *)ptr; 476 break; 477 case INDEX_op_ld: 478 tci_args_rrs(insn, &r0, &r1, &ofs); 479 ptr = (void *)(regs[r1] + ofs); 480 regs[r0] = *(tcg_target_ulong *)ptr; 481 break; 482 case INDEX_op_st8: 483 tci_args_rrs(insn, &r0, &r1, &ofs); 484 ptr = (void *)(regs[r1] + ofs); 485 *(uint8_t *)ptr = regs[r0]; 486 break; 487 case INDEX_op_st16: 488 tci_args_rrs(insn, &r0, &r1, &ofs); 489 ptr = (void *)(regs[r1] + ofs); 490 *(uint16_t *)ptr = regs[r0]; 491 break; 492 case INDEX_op_st: 493 tci_args_rrs(insn, &r0, &r1, &ofs); 494 ptr = (void *)(regs[r1] + ofs); 495 *(tcg_target_ulong *)ptr = regs[r0]; 496 break; 497 498 /* Arithmetic operations (mixed 32/64 bit). */ 499 500 case INDEX_op_add: 501 tci_args_rrr(insn, &r0, &r1, &r2); 502 regs[r0] = regs[r1] + regs[r2]; 503 break; 504 case INDEX_op_sub: 505 tci_args_rrr(insn, &r0, &r1, &r2); 506 regs[r0] = regs[r1] - regs[r2]; 507 break; 508 case INDEX_op_mul: 509 tci_args_rrr(insn, &r0, &r1, &r2); 510 regs[r0] = regs[r1] * regs[r2]; 511 break; 512 case INDEX_op_and: 513 tci_args_rrr(insn, &r0, &r1, &r2); 514 regs[r0] = regs[r1] & regs[r2]; 515 break; 516 case INDEX_op_or: 517 tci_args_rrr(insn, &r0, &r1, &r2); 518 regs[r0] = regs[r1] | regs[r2]; 519 break; 520 case INDEX_op_xor: 521 tci_args_rrr(insn, &r0, &r1, &r2); 522 regs[r0] = regs[r1] ^ regs[r2]; 523 break; 524 case INDEX_op_andc: 525 tci_args_rrr(insn, &r0, &r1, &r2); 526 regs[r0] = regs[r1] & ~regs[r2]; 527 break; 528 case INDEX_op_orc: 529 tci_args_rrr(insn, &r0, &r1, &r2); 530 regs[r0] = regs[r1] | ~regs[r2]; 531 break; 532 case INDEX_op_eqv: 533 tci_args_rrr(insn, &r0, &r1, &r2); 534 regs[r0] = ~(regs[r1] ^ regs[r2]); 535 break; 536 case INDEX_op_nand: 537 tci_args_rrr(insn, &r0, &r1, &r2); 538 regs[r0] = ~(regs[r1] & regs[r2]); 539 break; 540 case INDEX_op_nor: 541 tci_args_rrr(insn, &r0, &r1, &r2); 542 regs[r0] = ~(regs[r1] | regs[r2]); 543 break; 544 case INDEX_op_neg: 545 tci_args_rr(insn, &r0, &r1); 546 regs[r0] = -regs[r1]; 547 break; 548 case INDEX_op_not: 549 tci_args_rr(insn, &r0, &r1); 550 regs[r0] = ~regs[r1]; 551 break; 552 case INDEX_op_ctpop: 553 tci_args_rr(insn, &r0, &r1); 554 regs[r0] = ctpop_tr(regs[r1]); 555 break; 556 case INDEX_op_addco: 557 tci_args_rrr(insn, &r0, &r1, &r2); 558 t1 = regs[r1] + regs[r2]; 559 carry = t1 < regs[r1]; 560 regs[r0] = t1; 561 break; 562 case INDEX_op_addci: 563 tci_args_rrr(insn, &r0, &r1, &r2); 564 regs[r0] = regs[r1] + regs[r2] + carry; 565 break; 566 case INDEX_op_addcio: 567 tci_args_rrr(insn, &r0, &r1, &r2); 568 if (carry) { 569 t1 = regs[r1] + regs[r2] + 1; 570 carry = t1 <= regs[r1]; 571 } else { 572 t1 = regs[r1] + regs[r2]; 573 carry = t1 < regs[r1]; 574 } 575 regs[r0] = t1; 576 break; 577 case INDEX_op_subbo: 578 tci_args_rrr(insn, &r0, &r1, &r2); 579 carry = regs[r1] < regs[r2]; 580 regs[r0] = regs[r1] - regs[r2]; 581 break; 582 case INDEX_op_subbi: 583 tci_args_rrr(insn, &r0, &r1, &r2); 584 regs[r0] = regs[r1] - regs[r2] - carry; 585 break; 586 case INDEX_op_subbio: 587 tci_args_rrr(insn, &r0, &r1, &r2); 588 if (carry) { 589 carry = regs[r1] <= regs[r2]; 590 regs[r0] = regs[r1] - regs[r2] - 1; 591 } else { 592 carry = regs[r1] < regs[r2]; 593 regs[r0] = regs[r1] - regs[r2]; 594 } 595 break; 596 case INDEX_op_muls2: 597 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 598 #if TCG_TARGET_REG_BITS == 32 599 tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3]; 600 tci_write_reg64(regs, r1, r0, tmp64); 601 #else 602 muls64(®s[r0], ®s[r1], regs[r2], regs[r3]); 603 #endif 604 break; 605 case INDEX_op_mulu2: 606 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 607 #if TCG_TARGET_REG_BITS == 32 608 tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3]; 609 tci_write_reg64(regs, r1, r0, tmp64); 610 #else 611 mulu64(®s[r0], ®s[r1], regs[r2], regs[r3]); 612 #endif 613 break; 614 615 /* Arithmetic operations (32 bit). */ 616 617 case INDEX_op_tci_divs32: 618 tci_args_rrr(insn, &r0, &r1, &r2); 619 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2]; 620 break; 621 case INDEX_op_tci_divu32: 622 tci_args_rrr(insn, &r0, &r1, &r2); 623 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2]; 624 break; 625 case INDEX_op_tci_rems32: 626 tci_args_rrr(insn, &r0, &r1, &r2); 627 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2]; 628 break; 629 case INDEX_op_tci_remu32: 630 tci_args_rrr(insn, &r0, &r1, &r2); 631 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2]; 632 break; 633 case INDEX_op_tci_clz32: 634 tci_args_rrr(insn, &r0, &r1, &r2); 635 tmp32 = regs[r1]; 636 regs[r0] = tmp32 ? clz32(tmp32) : regs[r2]; 637 break; 638 case INDEX_op_tci_ctz32: 639 tci_args_rrr(insn, &r0, &r1, &r2); 640 tmp32 = regs[r1]; 641 regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2]; 642 break; 643 case INDEX_op_tci_setcond32: 644 tci_args_rrrc(insn, &r0, &r1, &r2, &condition); 645 regs[r0] = tci_compare32(regs[r1], regs[r2], condition); 646 break; 647 case INDEX_op_tci_movcond32: 648 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition); 649 tmp32 = tci_compare32(regs[r1], regs[r2], condition); 650 regs[r0] = regs[tmp32 ? r3 : r4]; 651 break; 652 653 /* Shift/rotate operations. */ 654 655 case INDEX_op_shl: 656 tci_args_rrr(insn, &r0, &r1, &r2); 657 regs[r0] = regs[r1] << (regs[r2] % TCG_TARGET_REG_BITS); 658 break; 659 case INDEX_op_shr: 660 tci_args_rrr(insn, &r0, &r1, &r2); 661 regs[r0] = regs[r1] >> (regs[r2] % TCG_TARGET_REG_BITS); 662 break; 663 case INDEX_op_sar: 664 tci_args_rrr(insn, &r0, &r1, &r2); 665 regs[r0] = ((tcg_target_long)regs[r1] 666 >> (regs[r2] % TCG_TARGET_REG_BITS)); 667 break; 668 case INDEX_op_tci_rotl32: 669 tci_args_rrr(insn, &r0, &r1, &r2); 670 regs[r0] = rol32(regs[r1], regs[r2] & 31); 671 break; 672 case INDEX_op_tci_rotr32: 673 tci_args_rrr(insn, &r0, &r1, &r2); 674 regs[r0] = ror32(regs[r1], regs[r2] & 31); 675 break; 676 case INDEX_op_deposit: 677 tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len); 678 regs[r0] = deposit_tr(regs[r1], pos, len, regs[r2]); 679 break; 680 case INDEX_op_extract: 681 tci_args_rrbb(insn, &r0, &r1, &pos, &len); 682 regs[r0] = extract_tr(regs[r1], pos, len); 683 break; 684 case INDEX_op_sextract: 685 tci_args_rrbb(insn, &r0, &r1, &pos, &len); 686 regs[r0] = sextract_tr(regs[r1], pos, len); 687 break; 688 case INDEX_op_brcond: 689 tci_args_rl(insn, tb_ptr, &r0, &ptr); 690 if (regs[r0]) { 691 tb_ptr = ptr; 692 } 693 break; 694 case INDEX_op_bswap16: 695 tci_args_rr(insn, &r0, &r1); 696 regs[r0] = bswap16(regs[r1]); 697 break; 698 case INDEX_op_bswap32: 699 tci_args_rr(insn, &r0, &r1); 700 regs[r0] = bswap32(regs[r1]); 701 break; 702 #if TCG_TARGET_REG_BITS == 64 703 /* Load/store operations (64 bit). */ 704 705 case INDEX_op_ld32u: 706 tci_args_rrs(insn, &r0, &r1, &ofs); 707 ptr = (void *)(regs[r1] + ofs); 708 regs[r0] = *(uint32_t *)ptr; 709 break; 710 case INDEX_op_ld32s: 711 tci_args_rrs(insn, &r0, &r1, &ofs); 712 ptr = (void *)(regs[r1] + ofs); 713 regs[r0] = *(int32_t *)ptr; 714 break; 715 case INDEX_op_st32: 716 tci_args_rrs(insn, &r0, &r1, &ofs); 717 ptr = (void *)(regs[r1] + ofs); 718 *(uint32_t *)ptr = regs[r0]; 719 break; 720 721 /* Arithmetic operations (64 bit). */ 722 723 case INDEX_op_divs: 724 tci_args_rrr(insn, &r0, &r1, &r2); 725 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2]; 726 break; 727 case INDEX_op_divu: 728 tci_args_rrr(insn, &r0, &r1, &r2); 729 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2]; 730 break; 731 case INDEX_op_rems: 732 tci_args_rrr(insn, &r0, &r1, &r2); 733 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2]; 734 break; 735 case INDEX_op_remu: 736 tci_args_rrr(insn, &r0, &r1, &r2); 737 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2]; 738 break; 739 case INDEX_op_clz: 740 tci_args_rrr(insn, &r0, &r1, &r2); 741 regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2]; 742 break; 743 case INDEX_op_ctz: 744 tci_args_rrr(insn, &r0, &r1, &r2); 745 regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2]; 746 break; 747 748 /* Shift/rotate operations (64 bit). */ 749 750 case INDEX_op_rotl: 751 tci_args_rrr(insn, &r0, &r1, &r2); 752 regs[r0] = rol64(regs[r1], regs[r2] & 63); 753 break; 754 case INDEX_op_rotr: 755 tci_args_rrr(insn, &r0, &r1, &r2); 756 regs[r0] = ror64(regs[r1], regs[r2] & 63); 757 break; 758 case INDEX_op_ext_i32_i64: 759 tci_args_rr(insn, &r0, &r1); 760 regs[r0] = (int32_t)regs[r1]; 761 break; 762 case INDEX_op_extu_i32_i64: 763 tci_args_rr(insn, &r0, &r1); 764 regs[r0] = (uint32_t)regs[r1]; 765 break; 766 case INDEX_op_bswap64: 767 tci_args_rr(insn, &r0, &r1); 768 regs[r0] = bswap64(regs[r1]); 769 break; 770 #endif /* TCG_TARGET_REG_BITS == 64 */ 771 772 /* QEMU specific operations. */ 773 774 case INDEX_op_exit_tb: 775 tci_args_l(insn, tb_ptr, &ptr); 776 return (uintptr_t)ptr; 777 778 case INDEX_op_goto_tb: 779 tci_args_l(insn, tb_ptr, &ptr); 780 tb_ptr = *(void **)ptr; 781 break; 782 783 case INDEX_op_goto_ptr: 784 tci_args_r(insn, &r0); 785 ptr = (void *)regs[r0]; 786 if (!ptr) { 787 return 0; 788 } 789 tb_ptr = ptr; 790 break; 791 792 case INDEX_op_qemu_ld_i32: 793 tci_args_rrm(insn, &r0, &r1, &oi); 794 taddr = regs[r1]; 795 regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr); 796 break; 797 798 case INDEX_op_qemu_ld_i64: 799 if (TCG_TARGET_REG_BITS == 64) { 800 tci_args_rrm(insn, &r0, &r1, &oi); 801 taddr = regs[r1]; 802 } else { 803 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 804 taddr = regs[r2]; 805 oi = regs[r3]; 806 } 807 tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr); 808 if (TCG_TARGET_REG_BITS == 32) { 809 tci_write_reg64(regs, r1, r0, tmp64); 810 } else { 811 regs[r0] = tmp64; 812 } 813 break; 814 815 case INDEX_op_qemu_st_i32: 816 tci_args_rrm(insn, &r0, &r1, &oi); 817 taddr = regs[r1]; 818 tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr); 819 break; 820 821 case INDEX_op_qemu_st_i64: 822 if (TCG_TARGET_REG_BITS == 64) { 823 tci_args_rrm(insn, &r0, &r1, &oi); 824 tmp64 = regs[r0]; 825 taddr = regs[r1]; 826 } else { 827 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 828 tmp64 = tci_uint64(regs[r1], regs[r0]); 829 taddr = regs[r2]; 830 oi = regs[r3]; 831 } 832 tci_qemu_st(env, taddr, tmp64, oi, tb_ptr); 833 break; 834 835 case INDEX_op_mb: 836 /* Ensure ordering for all kinds */ 837 smp_mb(); 838 break; 839 default: 840 g_assert_not_reached(); 841 } 842 } 843 } 844 845 /* 846 * Disassembler that matches the interpreter 847 */ 848 849 static const char *str_r(TCGReg r) 850 { 851 static const char regs[TCG_TARGET_NB_REGS][4] = { 852 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", 853 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp" 854 }; 855 856 QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14); 857 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15); 858 859 assert((unsigned)r < TCG_TARGET_NB_REGS); 860 return regs[r]; 861 } 862 863 static const char *str_c(TCGCond c) 864 { 865 static const char cond[16][8] = { 866 [TCG_COND_NEVER] = "never", 867 [TCG_COND_ALWAYS] = "always", 868 [TCG_COND_EQ] = "eq", 869 [TCG_COND_NE] = "ne", 870 [TCG_COND_LT] = "lt", 871 [TCG_COND_GE] = "ge", 872 [TCG_COND_LE] = "le", 873 [TCG_COND_GT] = "gt", 874 [TCG_COND_LTU] = "ltu", 875 [TCG_COND_GEU] = "geu", 876 [TCG_COND_LEU] = "leu", 877 [TCG_COND_GTU] = "gtu", 878 [TCG_COND_TSTEQ] = "tsteq", 879 [TCG_COND_TSTNE] = "tstne", 880 }; 881 882 assert((unsigned)c < ARRAY_SIZE(cond)); 883 assert(cond[c][0] != 0); 884 return cond[c]; 885 } 886 887 /* Disassemble TCI bytecode. */ 888 int print_insn_tci(bfd_vma addr, disassemble_info *info) 889 { 890 const uint32_t *tb_ptr = (const void *)(uintptr_t)addr; 891 const TCGOpDef *def; 892 const char *op_name; 893 uint32_t insn; 894 TCGOpcode op; 895 TCGReg r0, r1, r2, r3, r4; 896 tcg_target_ulong i1; 897 int32_t s2; 898 TCGCond c; 899 MemOpIdx oi; 900 uint8_t pos, len; 901 void *ptr; 902 903 /* TCI is always the host, so we don't need to load indirect. */ 904 insn = *tb_ptr++; 905 906 info->fprintf_func(info->stream, "%08x ", insn); 907 908 op = extract32(insn, 0, 8); 909 def = &tcg_op_defs[op]; 910 op_name = def->name; 911 912 switch (op) { 913 case INDEX_op_br: 914 case INDEX_op_exit_tb: 915 case INDEX_op_goto_tb: 916 tci_args_l(insn, tb_ptr, &ptr); 917 info->fprintf_func(info->stream, "%-12s %p", op_name, ptr); 918 break; 919 920 case INDEX_op_goto_ptr: 921 tci_args_r(insn, &r0); 922 info->fprintf_func(info->stream, "%-12s %s", op_name, str_r(r0)); 923 break; 924 925 case INDEX_op_call: 926 tci_args_nl(insn, tb_ptr, &len, &ptr); 927 info->fprintf_func(info->stream, "%-12s %d, %p", op_name, len, ptr); 928 break; 929 930 case INDEX_op_brcond: 931 tci_args_rl(insn, tb_ptr, &r0, &ptr); 932 info->fprintf_func(info->stream, "%-12s %s, 0, ne, %p", 933 op_name, str_r(r0), ptr); 934 break; 935 936 case INDEX_op_setcond: 937 case INDEX_op_tci_setcond32: 938 tci_args_rrrc(insn, &r0, &r1, &r2, &c); 939 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s", 940 op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c)); 941 break; 942 943 case INDEX_op_tci_movi: 944 tci_args_ri(insn, &r0, &i1); 945 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx, 946 op_name, str_r(r0), i1); 947 break; 948 949 case INDEX_op_tci_movl: 950 tci_args_rl(insn, tb_ptr, &r0, &ptr); 951 info->fprintf_func(info->stream, "%-12s %s, %p", 952 op_name, str_r(r0), ptr); 953 break; 954 955 case INDEX_op_tci_setcarry: 956 info->fprintf_func(info->stream, "%-12s", op_name); 957 break; 958 959 case INDEX_op_ld8u: 960 case INDEX_op_ld8s: 961 case INDEX_op_ld16u: 962 case INDEX_op_ld16s: 963 case INDEX_op_ld32u: 964 case INDEX_op_ld: 965 case INDEX_op_st8: 966 case INDEX_op_st16: 967 case INDEX_op_st32: 968 case INDEX_op_st: 969 tci_args_rrs(insn, &r0, &r1, &s2); 970 info->fprintf_func(info->stream, "%-12s %s, %s, %d", 971 op_name, str_r(r0), str_r(r1), s2); 972 break; 973 974 case INDEX_op_bswap16: 975 case INDEX_op_bswap32: 976 case INDEX_op_ctpop: 977 case INDEX_op_mov: 978 case INDEX_op_neg: 979 case INDEX_op_not: 980 case INDEX_op_ext_i32_i64: 981 case INDEX_op_extu_i32_i64: 982 case INDEX_op_bswap64: 983 tci_args_rr(insn, &r0, &r1); 984 info->fprintf_func(info->stream, "%-12s %s, %s", 985 op_name, str_r(r0), str_r(r1)); 986 break; 987 988 case INDEX_op_add: 989 case INDEX_op_addci: 990 case INDEX_op_addcio: 991 case INDEX_op_addco: 992 case INDEX_op_and: 993 case INDEX_op_andc: 994 case INDEX_op_clz: 995 case INDEX_op_ctz: 996 case INDEX_op_divs: 997 case INDEX_op_divu: 998 case INDEX_op_eqv: 999 case INDEX_op_mul: 1000 case INDEX_op_nand: 1001 case INDEX_op_nor: 1002 case INDEX_op_or: 1003 case INDEX_op_orc: 1004 case INDEX_op_rems: 1005 case INDEX_op_remu: 1006 case INDEX_op_rotl: 1007 case INDEX_op_rotr: 1008 case INDEX_op_sar: 1009 case INDEX_op_shl: 1010 case INDEX_op_shr: 1011 case INDEX_op_sub: 1012 case INDEX_op_subbi: 1013 case INDEX_op_subbio: 1014 case INDEX_op_subbo: 1015 case INDEX_op_xor: 1016 case INDEX_op_tci_ctz32: 1017 case INDEX_op_tci_clz32: 1018 case INDEX_op_tci_divs32: 1019 case INDEX_op_tci_divu32: 1020 case INDEX_op_tci_rems32: 1021 case INDEX_op_tci_remu32: 1022 case INDEX_op_tci_rotl32: 1023 case INDEX_op_tci_rotr32: 1024 tci_args_rrr(insn, &r0, &r1, &r2); 1025 info->fprintf_func(info->stream, "%-12s %s, %s, %s", 1026 op_name, str_r(r0), str_r(r1), str_r(r2)); 1027 break; 1028 1029 case INDEX_op_deposit: 1030 tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len); 1031 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d", 1032 op_name, str_r(r0), str_r(r1), str_r(r2), pos, len); 1033 break; 1034 1035 case INDEX_op_extract: 1036 case INDEX_op_sextract: 1037 tci_args_rrbb(insn, &r0, &r1, &pos, &len); 1038 info->fprintf_func(info->stream, "%-12s %s,%s,%d,%d", 1039 op_name, str_r(r0), str_r(r1), pos, len); 1040 break; 1041 1042 case INDEX_op_tci_movcond32: 1043 case INDEX_op_movcond: 1044 case INDEX_op_setcond2_i32: 1045 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c); 1046 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s", 1047 op_name, str_r(r0), str_r(r1), str_r(r2), 1048 str_r(r3), str_r(r4), str_c(c)); 1049 break; 1050 1051 case INDEX_op_muls2: 1052 case INDEX_op_mulu2: 1053 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 1054 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s", 1055 op_name, str_r(r0), str_r(r1), 1056 str_r(r2), str_r(r3)); 1057 break; 1058 1059 case INDEX_op_qemu_ld_i64: 1060 case INDEX_op_qemu_st_i64: 1061 if (TCG_TARGET_REG_BITS == 32) { 1062 tci_args_rrrr(insn, &r0, &r1, &r2, &r3); 1063 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s", 1064 op_name, str_r(r0), str_r(r1), 1065 str_r(r2), str_r(r3)); 1066 break; 1067 } 1068 /* fall through */ 1069 case INDEX_op_qemu_ld_i32: 1070 case INDEX_op_qemu_st_i32: 1071 tci_args_rrm(insn, &r0, &r1, &oi); 1072 info->fprintf_func(info->stream, "%-12s %s, %s, %x", 1073 op_name, str_r(r0), str_r(r1), oi); 1074 break; 1075 1076 case 0: 1077 /* tcg_out_nop_fill uses zeros */ 1078 if (insn == 0) { 1079 info->fprintf_func(info->stream, "align"); 1080 break; 1081 } 1082 /* fall through */ 1083 1084 default: 1085 info->fprintf_func(info->stream, "illegal opcode %d", op); 1086 break; 1087 } 1088 1089 return sizeof(insn); 1090 } 1091