1 /* 2 * Tiny Code Interpreter for QEMU 3 * 4 * Copyright (c) 2009, 2011, 2016 Stefan Weil 5 * 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 2 of the License, or 9 * (at your option) any later version. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 * 16 * You should have received a copy of the GNU General Public License 17 * along with this program. If not, see <http://www.gnu.org/licenses/>. 18 */ 19 20 #include "qemu/osdep.h" 21 22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). 23 * Without assertions, the interpreter runs much faster. */ 24 #if defined(CONFIG_DEBUG_TCG) 25 # define tci_assert(cond) assert(cond) 26 #else 27 # define tci_assert(cond) ((void)0) 28 #endif 29 30 #include "qemu-common.h" 31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ 32 #include "exec/cpu_ldst.h" 33 #include "tcg-op.h" 34 35 /* Marker for missing code. */ 36 #define TODO() \ 37 do { \ 38 fprintf(stderr, "TODO %s:%u: %s()\n", \ 39 __FILE__, __LINE__, __func__); \ 40 tcg_abort(); \ 41 } while (0) 42 43 #if MAX_OPC_PARAM_IARGS != 5 44 # error Fix needed, number of supported input arguments changed! 45 #endif 46 #if TCG_TARGET_REG_BITS == 32 47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, 48 tcg_target_ulong, tcg_target_ulong, 49 tcg_target_ulong, tcg_target_ulong, 50 tcg_target_ulong, tcg_target_ulong, 51 tcg_target_ulong, tcg_target_ulong); 52 #else 53 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, 54 tcg_target_ulong, tcg_target_ulong, 55 tcg_target_ulong); 56 #endif 57 58 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS]; 59 60 static tcg_target_ulong tci_read_reg(TCGReg index) 61 { 62 tci_assert(index < ARRAY_SIZE(tci_reg)); 63 return tci_reg[index]; 64 } 65 66 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 67 static int8_t tci_read_reg8s(TCGReg index) 68 { 69 return (int8_t)tci_read_reg(index); 70 } 71 #endif 72 73 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 74 static int16_t tci_read_reg16s(TCGReg index) 75 { 76 return (int16_t)tci_read_reg(index); 77 } 78 #endif 79 80 #if TCG_TARGET_REG_BITS == 64 81 static int32_t tci_read_reg32s(TCGReg index) 82 { 83 return (int32_t)tci_read_reg(index); 84 } 85 #endif 86 87 static uint8_t tci_read_reg8(TCGReg index) 88 { 89 return (uint8_t)tci_read_reg(index); 90 } 91 92 static uint16_t tci_read_reg16(TCGReg index) 93 { 94 return (uint16_t)tci_read_reg(index); 95 } 96 97 static uint32_t tci_read_reg32(TCGReg index) 98 { 99 return (uint32_t)tci_read_reg(index); 100 } 101 102 #if TCG_TARGET_REG_BITS == 64 103 static uint64_t tci_read_reg64(TCGReg index) 104 { 105 return tci_read_reg(index); 106 } 107 #endif 108 109 static void tci_write_reg(TCGReg index, tcg_target_ulong value) 110 { 111 tci_assert(index < ARRAY_SIZE(tci_reg)); 112 tci_assert(index != TCG_AREG0); 113 tci_assert(index != TCG_REG_CALL_STACK); 114 tci_reg[index] = value; 115 } 116 117 #if TCG_TARGET_REG_BITS == 64 118 static void tci_write_reg32s(TCGReg index, int32_t value) 119 { 120 tci_write_reg(index, value); 121 } 122 #endif 123 124 static void tci_write_reg8(TCGReg index, uint8_t value) 125 { 126 tci_write_reg(index, value); 127 } 128 129 static void tci_write_reg32(TCGReg index, uint32_t value) 130 { 131 tci_write_reg(index, value); 132 } 133 134 #if TCG_TARGET_REG_BITS == 32 135 static void tci_write_reg64(uint32_t high_index, uint32_t low_index, 136 uint64_t value) 137 { 138 tci_write_reg(low_index, value); 139 tci_write_reg(high_index, value >> 32); 140 } 141 #elif TCG_TARGET_REG_BITS == 64 142 static void tci_write_reg64(TCGReg index, uint64_t value) 143 { 144 tci_write_reg(index, value); 145 } 146 #endif 147 148 #if TCG_TARGET_REG_BITS == 32 149 /* Create a 64 bit value from two 32 bit values. */ 150 static uint64_t tci_uint64(uint32_t high, uint32_t low) 151 { 152 return ((uint64_t)high << 32) + low; 153 } 154 #endif 155 156 /* Read constant (native size) from bytecode. */ 157 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) 158 { 159 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); 160 *tb_ptr += sizeof(value); 161 return value; 162 } 163 164 /* Read unsigned constant (32 bit) from bytecode. */ 165 static uint32_t tci_read_i32(uint8_t **tb_ptr) 166 { 167 uint32_t value = *(uint32_t *)(*tb_ptr); 168 *tb_ptr += sizeof(value); 169 return value; 170 } 171 172 /* Read signed constant (32 bit) from bytecode. */ 173 static int32_t tci_read_s32(uint8_t **tb_ptr) 174 { 175 int32_t value = *(int32_t *)(*tb_ptr); 176 *tb_ptr += sizeof(value); 177 return value; 178 } 179 180 #if TCG_TARGET_REG_BITS == 64 181 /* Read constant (64 bit) from bytecode. */ 182 static uint64_t tci_read_i64(uint8_t **tb_ptr) 183 { 184 uint64_t value = *(uint64_t *)(*tb_ptr); 185 *tb_ptr += sizeof(value); 186 return value; 187 } 188 #endif 189 190 /* Read indexed register (native size) from bytecode. */ 191 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr) 192 { 193 tcg_target_ulong value = tci_read_reg(**tb_ptr); 194 *tb_ptr += 1; 195 return value; 196 } 197 198 /* Read indexed register (8 bit) from bytecode. */ 199 static uint8_t tci_read_r8(uint8_t **tb_ptr) 200 { 201 uint8_t value = tci_read_reg8(**tb_ptr); 202 *tb_ptr += 1; 203 return value; 204 } 205 206 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 207 /* Read indexed register (8 bit signed) from bytecode. */ 208 static int8_t tci_read_r8s(uint8_t **tb_ptr) 209 { 210 int8_t value = tci_read_reg8s(**tb_ptr); 211 *tb_ptr += 1; 212 return value; 213 } 214 #endif 215 216 /* Read indexed register (16 bit) from bytecode. */ 217 static uint16_t tci_read_r16(uint8_t **tb_ptr) 218 { 219 uint16_t value = tci_read_reg16(**tb_ptr); 220 *tb_ptr += 1; 221 return value; 222 } 223 224 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 225 /* Read indexed register (16 bit signed) from bytecode. */ 226 static int16_t tci_read_r16s(uint8_t **tb_ptr) 227 { 228 int16_t value = tci_read_reg16s(**tb_ptr); 229 *tb_ptr += 1; 230 return value; 231 } 232 #endif 233 234 /* Read indexed register (32 bit) from bytecode. */ 235 static uint32_t tci_read_r32(uint8_t **tb_ptr) 236 { 237 uint32_t value = tci_read_reg32(**tb_ptr); 238 *tb_ptr += 1; 239 return value; 240 } 241 242 #if TCG_TARGET_REG_BITS == 32 243 /* Read two indexed registers (2 * 32 bit) from bytecode. */ 244 static uint64_t tci_read_r64(uint8_t **tb_ptr) 245 { 246 uint32_t low = tci_read_r32(tb_ptr); 247 return tci_uint64(tci_read_r32(tb_ptr), low); 248 } 249 #elif TCG_TARGET_REG_BITS == 64 250 /* Read indexed register (32 bit signed) from bytecode. */ 251 static int32_t tci_read_r32s(uint8_t **tb_ptr) 252 { 253 int32_t value = tci_read_reg32s(**tb_ptr); 254 *tb_ptr += 1; 255 return value; 256 } 257 258 /* Read indexed register (64 bit) from bytecode. */ 259 static uint64_t tci_read_r64(uint8_t **tb_ptr) 260 { 261 uint64_t value = tci_read_reg64(**tb_ptr); 262 *tb_ptr += 1; 263 return value; 264 } 265 #endif 266 267 /* Read indexed register(s) with target address from bytecode. */ 268 static target_ulong tci_read_ulong(uint8_t **tb_ptr) 269 { 270 target_ulong taddr = tci_read_r(tb_ptr); 271 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS 272 taddr += (uint64_t)tci_read_r(tb_ptr) << 32; 273 #endif 274 return taddr; 275 } 276 277 /* Read indexed register or constant (native size) from bytecode. */ 278 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr) 279 { 280 tcg_target_ulong value; 281 TCGReg r = **tb_ptr; 282 *tb_ptr += 1; 283 if (r == TCG_CONST) { 284 value = tci_read_i(tb_ptr); 285 } else { 286 value = tci_read_reg(r); 287 } 288 return value; 289 } 290 291 /* Read indexed register or constant (32 bit) from bytecode. */ 292 static uint32_t tci_read_ri32(uint8_t **tb_ptr) 293 { 294 uint32_t value; 295 TCGReg r = **tb_ptr; 296 *tb_ptr += 1; 297 if (r == TCG_CONST) { 298 value = tci_read_i32(tb_ptr); 299 } else { 300 value = tci_read_reg32(r); 301 } 302 return value; 303 } 304 305 #if TCG_TARGET_REG_BITS == 32 306 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ 307 static uint64_t tci_read_ri64(uint8_t **tb_ptr) 308 { 309 uint32_t low = tci_read_ri32(tb_ptr); 310 return tci_uint64(tci_read_ri32(tb_ptr), low); 311 } 312 #elif TCG_TARGET_REG_BITS == 64 313 /* Read indexed register or constant (64 bit) from bytecode. */ 314 static uint64_t tci_read_ri64(uint8_t **tb_ptr) 315 { 316 uint64_t value; 317 TCGReg r = **tb_ptr; 318 *tb_ptr += 1; 319 if (r == TCG_CONST) { 320 value = tci_read_i64(tb_ptr); 321 } else { 322 value = tci_read_reg64(r); 323 } 324 return value; 325 } 326 #endif 327 328 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) 329 { 330 tcg_target_ulong label = tci_read_i(tb_ptr); 331 tci_assert(label != 0); 332 return label; 333 } 334 335 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) 336 { 337 bool result = false; 338 int32_t i0 = u0; 339 int32_t i1 = u1; 340 switch (condition) { 341 case TCG_COND_EQ: 342 result = (u0 == u1); 343 break; 344 case TCG_COND_NE: 345 result = (u0 != u1); 346 break; 347 case TCG_COND_LT: 348 result = (i0 < i1); 349 break; 350 case TCG_COND_GE: 351 result = (i0 >= i1); 352 break; 353 case TCG_COND_LE: 354 result = (i0 <= i1); 355 break; 356 case TCG_COND_GT: 357 result = (i0 > i1); 358 break; 359 case TCG_COND_LTU: 360 result = (u0 < u1); 361 break; 362 case TCG_COND_GEU: 363 result = (u0 >= u1); 364 break; 365 case TCG_COND_LEU: 366 result = (u0 <= u1); 367 break; 368 case TCG_COND_GTU: 369 result = (u0 > u1); 370 break; 371 default: 372 TODO(); 373 } 374 return result; 375 } 376 377 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) 378 { 379 bool result = false; 380 int64_t i0 = u0; 381 int64_t i1 = u1; 382 switch (condition) { 383 case TCG_COND_EQ: 384 result = (u0 == u1); 385 break; 386 case TCG_COND_NE: 387 result = (u0 != u1); 388 break; 389 case TCG_COND_LT: 390 result = (i0 < i1); 391 break; 392 case TCG_COND_GE: 393 result = (i0 >= i1); 394 break; 395 case TCG_COND_LE: 396 result = (i0 <= i1); 397 break; 398 case TCG_COND_GT: 399 result = (i0 > i1); 400 break; 401 case TCG_COND_LTU: 402 result = (u0 < u1); 403 break; 404 case TCG_COND_GEU: 405 result = (u0 >= u1); 406 break; 407 case TCG_COND_LEU: 408 result = (u0 <= u1); 409 break; 410 case TCG_COND_GTU: 411 result = (u0 > u1); 412 break; 413 default: 414 TODO(); 415 } 416 return result; 417 } 418 419 #ifdef CONFIG_SOFTMMU 420 # define qemu_ld_ub \ 421 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 422 # define qemu_ld_leuw \ 423 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 424 # define qemu_ld_leul \ 425 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 426 # define qemu_ld_leq \ 427 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 428 # define qemu_ld_beuw \ 429 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 430 # define qemu_ld_beul \ 431 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 432 # define qemu_ld_beq \ 433 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 434 # define qemu_st_b(X) \ 435 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 436 # define qemu_st_lew(X) \ 437 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 438 # define qemu_st_lel(X) \ 439 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 440 # define qemu_st_leq(X) \ 441 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 442 # define qemu_st_bew(X) \ 443 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 444 # define qemu_st_bel(X) \ 445 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 446 # define qemu_st_beq(X) \ 447 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 448 #else 449 # define qemu_ld_ub ldub_p(g2h(taddr)) 450 # define qemu_ld_leuw lduw_le_p(g2h(taddr)) 451 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) 452 # define qemu_ld_leq ldq_le_p(g2h(taddr)) 453 # define qemu_ld_beuw lduw_be_p(g2h(taddr)) 454 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) 455 # define qemu_ld_beq ldq_be_p(g2h(taddr)) 456 # define qemu_st_b(X) stb_p(g2h(taddr), X) 457 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) 458 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) 459 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) 460 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) 461 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) 462 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) 463 #endif 464 465 /* Interpret pseudo code in tb. */ 466 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) 467 { 468 long tcg_temps[CPU_TEMP_BUF_NLONGS]; 469 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); 470 uintptr_t ret = 0; 471 472 tci_reg[TCG_AREG0] = (tcg_target_ulong)env; 473 tci_reg[TCG_REG_CALL_STACK] = sp_value; 474 tci_assert(tb_ptr); 475 476 for (;;) { 477 TCGOpcode opc = tb_ptr[0]; 478 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) 479 uint8_t op_size = tb_ptr[1]; 480 uint8_t *old_code_ptr = tb_ptr; 481 #endif 482 tcg_target_ulong t0; 483 tcg_target_ulong t1; 484 tcg_target_ulong t2; 485 tcg_target_ulong label; 486 TCGCond condition; 487 target_ulong taddr; 488 uint8_t tmp8; 489 uint16_t tmp16; 490 uint32_t tmp32; 491 uint64_t tmp64; 492 #if TCG_TARGET_REG_BITS == 32 493 uint64_t v64; 494 #endif 495 TCGMemOpIdx oi; 496 497 #if defined(GETPC) 498 tci_tb_ptr = (uintptr_t)tb_ptr; 499 #endif 500 501 /* Skip opcode and size entry. */ 502 tb_ptr += 2; 503 504 switch (opc) { 505 case INDEX_op_call: 506 t0 = tci_read_ri(&tb_ptr); 507 #if TCG_TARGET_REG_BITS == 32 508 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), 509 tci_read_reg(TCG_REG_R1), 510 tci_read_reg(TCG_REG_R2), 511 tci_read_reg(TCG_REG_R3), 512 tci_read_reg(TCG_REG_R5), 513 tci_read_reg(TCG_REG_R6), 514 tci_read_reg(TCG_REG_R7), 515 tci_read_reg(TCG_REG_R8), 516 tci_read_reg(TCG_REG_R9), 517 tci_read_reg(TCG_REG_R10)); 518 tci_write_reg(TCG_REG_R0, tmp64); 519 tci_write_reg(TCG_REG_R1, tmp64 >> 32); 520 #else 521 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0), 522 tci_read_reg(TCG_REG_R1), 523 tci_read_reg(TCG_REG_R2), 524 tci_read_reg(TCG_REG_R3), 525 tci_read_reg(TCG_REG_R5)); 526 tci_write_reg(TCG_REG_R0, tmp64); 527 #endif 528 break; 529 case INDEX_op_br: 530 label = tci_read_label(&tb_ptr); 531 tci_assert(tb_ptr == old_code_ptr + op_size); 532 tb_ptr = (uint8_t *)label; 533 continue; 534 case INDEX_op_setcond_i32: 535 t0 = *tb_ptr++; 536 t1 = tci_read_r32(&tb_ptr); 537 t2 = tci_read_ri32(&tb_ptr); 538 condition = *tb_ptr++; 539 tci_write_reg32(t0, tci_compare32(t1, t2, condition)); 540 break; 541 #if TCG_TARGET_REG_BITS == 32 542 case INDEX_op_setcond2_i32: 543 t0 = *tb_ptr++; 544 tmp64 = tci_read_r64(&tb_ptr); 545 v64 = tci_read_ri64(&tb_ptr); 546 condition = *tb_ptr++; 547 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition)); 548 break; 549 #elif TCG_TARGET_REG_BITS == 64 550 case INDEX_op_setcond_i64: 551 t0 = *tb_ptr++; 552 t1 = tci_read_r64(&tb_ptr); 553 t2 = tci_read_ri64(&tb_ptr); 554 condition = *tb_ptr++; 555 tci_write_reg64(t0, tci_compare64(t1, t2, condition)); 556 break; 557 #endif 558 case INDEX_op_mov_i32: 559 t0 = *tb_ptr++; 560 t1 = tci_read_r32(&tb_ptr); 561 tci_write_reg32(t0, t1); 562 break; 563 case INDEX_op_movi_i32: 564 t0 = *tb_ptr++; 565 t1 = tci_read_i32(&tb_ptr); 566 tci_write_reg32(t0, t1); 567 break; 568 569 /* Load/store operations (32 bit). */ 570 571 case INDEX_op_ld8u_i32: 572 t0 = *tb_ptr++; 573 t1 = tci_read_r(&tb_ptr); 574 t2 = tci_read_s32(&tb_ptr); 575 tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); 576 break; 577 case INDEX_op_ld8s_i32: 578 case INDEX_op_ld16u_i32: 579 TODO(); 580 break; 581 case INDEX_op_ld16s_i32: 582 TODO(); 583 break; 584 case INDEX_op_ld_i32: 585 t0 = *tb_ptr++; 586 t1 = tci_read_r(&tb_ptr); 587 t2 = tci_read_s32(&tb_ptr); 588 tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); 589 break; 590 case INDEX_op_st8_i32: 591 t0 = tci_read_r8(&tb_ptr); 592 t1 = tci_read_r(&tb_ptr); 593 t2 = tci_read_s32(&tb_ptr); 594 *(uint8_t *)(t1 + t2) = t0; 595 break; 596 case INDEX_op_st16_i32: 597 t0 = tci_read_r16(&tb_ptr); 598 t1 = tci_read_r(&tb_ptr); 599 t2 = tci_read_s32(&tb_ptr); 600 *(uint16_t *)(t1 + t2) = t0; 601 break; 602 case INDEX_op_st_i32: 603 t0 = tci_read_r32(&tb_ptr); 604 t1 = tci_read_r(&tb_ptr); 605 t2 = tci_read_s32(&tb_ptr); 606 tci_assert(t1 != sp_value || (int32_t)t2 < 0); 607 *(uint32_t *)(t1 + t2) = t0; 608 break; 609 610 /* Arithmetic operations (32 bit). */ 611 612 case INDEX_op_add_i32: 613 t0 = *tb_ptr++; 614 t1 = tci_read_ri32(&tb_ptr); 615 t2 = tci_read_ri32(&tb_ptr); 616 tci_write_reg32(t0, t1 + t2); 617 break; 618 case INDEX_op_sub_i32: 619 t0 = *tb_ptr++; 620 t1 = tci_read_ri32(&tb_ptr); 621 t2 = tci_read_ri32(&tb_ptr); 622 tci_write_reg32(t0, t1 - t2); 623 break; 624 case INDEX_op_mul_i32: 625 t0 = *tb_ptr++; 626 t1 = tci_read_ri32(&tb_ptr); 627 t2 = tci_read_ri32(&tb_ptr); 628 tci_write_reg32(t0, t1 * t2); 629 break; 630 #if TCG_TARGET_HAS_div_i32 631 case INDEX_op_div_i32: 632 t0 = *tb_ptr++; 633 t1 = tci_read_ri32(&tb_ptr); 634 t2 = tci_read_ri32(&tb_ptr); 635 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2); 636 break; 637 case INDEX_op_divu_i32: 638 t0 = *tb_ptr++; 639 t1 = tci_read_ri32(&tb_ptr); 640 t2 = tci_read_ri32(&tb_ptr); 641 tci_write_reg32(t0, t1 / t2); 642 break; 643 case INDEX_op_rem_i32: 644 t0 = *tb_ptr++; 645 t1 = tci_read_ri32(&tb_ptr); 646 t2 = tci_read_ri32(&tb_ptr); 647 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2); 648 break; 649 case INDEX_op_remu_i32: 650 t0 = *tb_ptr++; 651 t1 = tci_read_ri32(&tb_ptr); 652 t2 = tci_read_ri32(&tb_ptr); 653 tci_write_reg32(t0, t1 % t2); 654 break; 655 #elif TCG_TARGET_HAS_div2_i32 656 case INDEX_op_div2_i32: 657 case INDEX_op_divu2_i32: 658 TODO(); 659 break; 660 #endif 661 case INDEX_op_and_i32: 662 t0 = *tb_ptr++; 663 t1 = tci_read_ri32(&tb_ptr); 664 t2 = tci_read_ri32(&tb_ptr); 665 tci_write_reg32(t0, t1 & t2); 666 break; 667 case INDEX_op_or_i32: 668 t0 = *tb_ptr++; 669 t1 = tci_read_ri32(&tb_ptr); 670 t2 = tci_read_ri32(&tb_ptr); 671 tci_write_reg32(t0, t1 | t2); 672 break; 673 case INDEX_op_xor_i32: 674 t0 = *tb_ptr++; 675 t1 = tci_read_ri32(&tb_ptr); 676 t2 = tci_read_ri32(&tb_ptr); 677 tci_write_reg32(t0, t1 ^ t2); 678 break; 679 680 /* Shift/rotate operations (32 bit). */ 681 682 case INDEX_op_shl_i32: 683 t0 = *tb_ptr++; 684 t1 = tci_read_ri32(&tb_ptr); 685 t2 = tci_read_ri32(&tb_ptr); 686 tci_write_reg32(t0, t1 << (t2 & 31)); 687 break; 688 case INDEX_op_shr_i32: 689 t0 = *tb_ptr++; 690 t1 = tci_read_ri32(&tb_ptr); 691 t2 = tci_read_ri32(&tb_ptr); 692 tci_write_reg32(t0, t1 >> (t2 & 31)); 693 break; 694 case INDEX_op_sar_i32: 695 t0 = *tb_ptr++; 696 t1 = tci_read_ri32(&tb_ptr); 697 t2 = tci_read_ri32(&tb_ptr); 698 tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31))); 699 break; 700 #if TCG_TARGET_HAS_rot_i32 701 case INDEX_op_rotl_i32: 702 t0 = *tb_ptr++; 703 t1 = tci_read_ri32(&tb_ptr); 704 t2 = tci_read_ri32(&tb_ptr); 705 tci_write_reg32(t0, rol32(t1, t2 & 31)); 706 break; 707 case INDEX_op_rotr_i32: 708 t0 = *tb_ptr++; 709 t1 = tci_read_ri32(&tb_ptr); 710 t2 = tci_read_ri32(&tb_ptr); 711 tci_write_reg32(t0, ror32(t1, t2 & 31)); 712 break; 713 #endif 714 #if TCG_TARGET_HAS_deposit_i32 715 case INDEX_op_deposit_i32: 716 t0 = *tb_ptr++; 717 t1 = tci_read_r32(&tb_ptr); 718 t2 = tci_read_r32(&tb_ptr); 719 tmp16 = *tb_ptr++; 720 tmp8 = *tb_ptr++; 721 tmp32 = (((1 << tmp8) - 1) << tmp16); 722 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); 723 break; 724 #endif 725 case INDEX_op_brcond_i32: 726 t0 = tci_read_r32(&tb_ptr); 727 t1 = tci_read_ri32(&tb_ptr); 728 condition = *tb_ptr++; 729 label = tci_read_label(&tb_ptr); 730 if (tci_compare32(t0, t1, condition)) { 731 tci_assert(tb_ptr == old_code_ptr + op_size); 732 tb_ptr = (uint8_t *)label; 733 continue; 734 } 735 break; 736 #if TCG_TARGET_REG_BITS == 32 737 case INDEX_op_add2_i32: 738 t0 = *tb_ptr++; 739 t1 = *tb_ptr++; 740 tmp64 = tci_read_r64(&tb_ptr); 741 tmp64 += tci_read_r64(&tb_ptr); 742 tci_write_reg64(t1, t0, tmp64); 743 break; 744 case INDEX_op_sub2_i32: 745 t0 = *tb_ptr++; 746 t1 = *tb_ptr++; 747 tmp64 = tci_read_r64(&tb_ptr); 748 tmp64 -= tci_read_r64(&tb_ptr); 749 tci_write_reg64(t1, t0, tmp64); 750 break; 751 case INDEX_op_brcond2_i32: 752 tmp64 = tci_read_r64(&tb_ptr); 753 v64 = tci_read_ri64(&tb_ptr); 754 condition = *tb_ptr++; 755 label = tci_read_label(&tb_ptr); 756 if (tci_compare64(tmp64, v64, condition)) { 757 tci_assert(tb_ptr == old_code_ptr + op_size); 758 tb_ptr = (uint8_t *)label; 759 continue; 760 } 761 break; 762 case INDEX_op_mulu2_i32: 763 t0 = *tb_ptr++; 764 t1 = *tb_ptr++; 765 t2 = tci_read_r32(&tb_ptr); 766 tmp64 = tci_read_r32(&tb_ptr); 767 tci_write_reg64(t1, t0, t2 * tmp64); 768 break; 769 #endif /* TCG_TARGET_REG_BITS == 32 */ 770 #if TCG_TARGET_HAS_ext8s_i32 771 case INDEX_op_ext8s_i32: 772 t0 = *tb_ptr++; 773 t1 = tci_read_r8s(&tb_ptr); 774 tci_write_reg32(t0, t1); 775 break; 776 #endif 777 #if TCG_TARGET_HAS_ext16s_i32 778 case INDEX_op_ext16s_i32: 779 t0 = *tb_ptr++; 780 t1 = tci_read_r16s(&tb_ptr); 781 tci_write_reg32(t0, t1); 782 break; 783 #endif 784 #if TCG_TARGET_HAS_ext8u_i32 785 case INDEX_op_ext8u_i32: 786 t0 = *tb_ptr++; 787 t1 = tci_read_r8(&tb_ptr); 788 tci_write_reg32(t0, t1); 789 break; 790 #endif 791 #if TCG_TARGET_HAS_ext16u_i32 792 case INDEX_op_ext16u_i32: 793 t0 = *tb_ptr++; 794 t1 = tci_read_r16(&tb_ptr); 795 tci_write_reg32(t0, t1); 796 break; 797 #endif 798 #if TCG_TARGET_HAS_bswap16_i32 799 case INDEX_op_bswap16_i32: 800 t0 = *tb_ptr++; 801 t1 = tci_read_r16(&tb_ptr); 802 tci_write_reg32(t0, bswap16(t1)); 803 break; 804 #endif 805 #if TCG_TARGET_HAS_bswap32_i32 806 case INDEX_op_bswap32_i32: 807 t0 = *tb_ptr++; 808 t1 = tci_read_r32(&tb_ptr); 809 tci_write_reg32(t0, bswap32(t1)); 810 break; 811 #endif 812 #if TCG_TARGET_HAS_not_i32 813 case INDEX_op_not_i32: 814 t0 = *tb_ptr++; 815 t1 = tci_read_r32(&tb_ptr); 816 tci_write_reg32(t0, ~t1); 817 break; 818 #endif 819 #if TCG_TARGET_HAS_neg_i32 820 case INDEX_op_neg_i32: 821 t0 = *tb_ptr++; 822 t1 = tci_read_r32(&tb_ptr); 823 tci_write_reg32(t0, -t1); 824 break; 825 #endif 826 #if TCG_TARGET_REG_BITS == 64 827 case INDEX_op_mov_i64: 828 t0 = *tb_ptr++; 829 t1 = tci_read_r64(&tb_ptr); 830 tci_write_reg64(t0, t1); 831 break; 832 case INDEX_op_movi_i64: 833 t0 = *tb_ptr++; 834 t1 = tci_read_i64(&tb_ptr); 835 tci_write_reg64(t0, t1); 836 break; 837 838 /* Load/store operations (64 bit). */ 839 840 case INDEX_op_ld8u_i64: 841 t0 = *tb_ptr++; 842 t1 = tci_read_r(&tb_ptr); 843 t2 = tci_read_s32(&tb_ptr); 844 tci_write_reg8(t0, *(uint8_t *)(t1 + t2)); 845 break; 846 case INDEX_op_ld8s_i64: 847 case INDEX_op_ld16u_i64: 848 case INDEX_op_ld16s_i64: 849 TODO(); 850 break; 851 case INDEX_op_ld32u_i64: 852 t0 = *tb_ptr++; 853 t1 = tci_read_r(&tb_ptr); 854 t2 = tci_read_s32(&tb_ptr); 855 tci_write_reg32(t0, *(uint32_t *)(t1 + t2)); 856 break; 857 case INDEX_op_ld32s_i64: 858 t0 = *tb_ptr++; 859 t1 = tci_read_r(&tb_ptr); 860 t2 = tci_read_s32(&tb_ptr); 861 tci_write_reg32s(t0, *(int32_t *)(t1 + t2)); 862 break; 863 case INDEX_op_ld_i64: 864 t0 = *tb_ptr++; 865 t1 = tci_read_r(&tb_ptr); 866 t2 = tci_read_s32(&tb_ptr); 867 tci_write_reg64(t0, *(uint64_t *)(t1 + t2)); 868 break; 869 case INDEX_op_st8_i64: 870 t0 = tci_read_r8(&tb_ptr); 871 t1 = tci_read_r(&tb_ptr); 872 t2 = tci_read_s32(&tb_ptr); 873 *(uint8_t *)(t1 + t2) = t0; 874 break; 875 case INDEX_op_st16_i64: 876 t0 = tci_read_r16(&tb_ptr); 877 t1 = tci_read_r(&tb_ptr); 878 t2 = tci_read_s32(&tb_ptr); 879 *(uint16_t *)(t1 + t2) = t0; 880 break; 881 case INDEX_op_st32_i64: 882 t0 = tci_read_r32(&tb_ptr); 883 t1 = tci_read_r(&tb_ptr); 884 t2 = tci_read_s32(&tb_ptr); 885 *(uint32_t *)(t1 + t2) = t0; 886 break; 887 case INDEX_op_st_i64: 888 t0 = tci_read_r64(&tb_ptr); 889 t1 = tci_read_r(&tb_ptr); 890 t2 = tci_read_s32(&tb_ptr); 891 tci_assert(t1 != sp_value || (int32_t)t2 < 0); 892 *(uint64_t *)(t1 + t2) = t0; 893 break; 894 895 /* Arithmetic operations (64 bit). */ 896 897 case INDEX_op_add_i64: 898 t0 = *tb_ptr++; 899 t1 = tci_read_ri64(&tb_ptr); 900 t2 = tci_read_ri64(&tb_ptr); 901 tci_write_reg64(t0, t1 + t2); 902 break; 903 case INDEX_op_sub_i64: 904 t0 = *tb_ptr++; 905 t1 = tci_read_ri64(&tb_ptr); 906 t2 = tci_read_ri64(&tb_ptr); 907 tci_write_reg64(t0, t1 - t2); 908 break; 909 case INDEX_op_mul_i64: 910 t0 = *tb_ptr++; 911 t1 = tci_read_ri64(&tb_ptr); 912 t2 = tci_read_ri64(&tb_ptr); 913 tci_write_reg64(t0, t1 * t2); 914 break; 915 #if TCG_TARGET_HAS_div_i64 916 case INDEX_op_div_i64: 917 case INDEX_op_divu_i64: 918 case INDEX_op_rem_i64: 919 case INDEX_op_remu_i64: 920 TODO(); 921 break; 922 #elif TCG_TARGET_HAS_div2_i64 923 case INDEX_op_div2_i64: 924 case INDEX_op_divu2_i64: 925 TODO(); 926 break; 927 #endif 928 case INDEX_op_and_i64: 929 t0 = *tb_ptr++; 930 t1 = tci_read_ri64(&tb_ptr); 931 t2 = tci_read_ri64(&tb_ptr); 932 tci_write_reg64(t0, t1 & t2); 933 break; 934 case INDEX_op_or_i64: 935 t0 = *tb_ptr++; 936 t1 = tci_read_ri64(&tb_ptr); 937 t2 = tci_read_ri64(&tb_ptr); 938 tci_write_reg64(t0, t1 | t2); 939 break; 940 case INDEX_op_xor_i64: 941 t0 = *tb_ptr++; 942 t1 = tci_read_ri64(&tb_ptr); 943 t2 = tci_read_ri64(&tb_ptr); 944 tci_write_reg64(t0, t1 ^ t2); 945 break; 946 947 /* Shift/rotate operations (64 bit). */ 948 949 case INDEX_op_shl_i64: 950 t0 = *tb_ptr++; 951 t1 = tci_read_ri64(&tb_ptr); 952 t2 = tci_read_ri64(&tb_ptr); 953 tci_write_reg64(t0, t1 << (t2 & 63)); 954 break; 955 case INDEX_op_shr_i64: 956 t0 = *tb_ptr++; 957 t1 = tci_read_ri64(&tb_ptr); 958 t2 = tci_read_ri64(&tb_ptr); 959 tci_write_reg64(t0, t1 >> (t2 & 63)); 960 break; 961 case INDEX_op_sar_i64: 962 t0 = *tb_ptr++; 963 t1 = tci_read_ri64(&tb_ptr); 964 t2 = tci_read_ri64(&tb_ptr); 965 tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63))); 966 break; 967 #if TCG_TARGET_HAS_rot_i64 968 case INDEX_op_rotl_i64: 969 t0 = *tb_ptr++; 970 t1 = tci_read_ri64(&tb_ptr); 971 t2 = tci_read_ri64(&tb_ptr); 972 tci_write_reg64(t0, rol64(t1, t2 & 63)); 973 break; 974 case INDEX_op_rotr_i64: 975 t0 = *tb_ptr++; 976 t1 = tci_read_ri64(&tb_ptr); 977 t2 = tci_read_ri64(&tb_ptr); 978 tci_write_reg64(t0, ror64(t1, t2 & 63)); 979 break; 980 #endif 981 #if TCG_TARGET_HAS_deposit_i64 982 case INDEX_op_deposit_i64: 983 t0 = *tb_ptr++; 984 t1 = tci_read_r64(&tb_ptr); 985 t2 = tci_read_r64(&tb_ptr); 986 tmp16 = *tb_ptr++; 987 tmp8 = *tb_ptr++; 988 tmp64 = (((1ULL << tmp8) - 1) << tmp16); 989 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); 990 break; 991 #endif 992 case INDEX_op_brcond_i64: 993 t0 = tci_read_r64(&tb_ptr); 994 t1 = tci_read_ri64(&tb_ptr); 995 condition = *tb_ptr++; 996 label = tci_read_label(&tb_ptr); 997 if (tci_compare64(t0, t1, condition)) { 998 tci_assert(tb_ptr == old_code_ptr + op_size); 999 tb_ptr = (uint8_t *)label; 1000 continue; 1001 } 1002 break; 1003 #if TCG_TARGET_HAS_ext8u_i64 1004 case INDEX_op_ext8u_i64: 1005 t0 = *tb_ptr++; 1006 t1 = tci_read_r8(&tb_ptr); 1007 tci_write_reg64(t0, t1); 1008 break; 1009 #endif 1010 #if TCG_TARGET_HAS_ext8s_i64 1011 case INDEX_op_ext8s_i64: 1012 t0 = *tb_ptr++; 1013 t1 = tci_read_r8s(&tb_ptr); 1014 tci_write_reg64(t0, t1); 1015 break; 1016 #endif 1017 #if TCG_TARGET_HAS_ext16s_i64 1018 case INDEX_op_ext16s_i64: 1019 t0 = *tb_ptr++; 1020 t1 = tci_read_r16s(&tb_ptr); 1021 tci_write_reg64(t0, t1); 1022 break; 1023 #endif 1024 #if TCG_TARGET_HAS_ext16u_i64 1025 case INDEX_op_ext16u_i64: 1026 t0 = *tb_ptr++; 1027 t1 = tci_read_r16(&tb_ptr); 1028 tci_write_reg64(t0, t1); 1029 break; 1030 #endif 1031 #if TCG_TARGET_HAS_ext32s_i64 1032 case INDEX_op_ext32s_i64: 1033 #endif 1034 case INDEX_op_ext_i32_i64: 1035 t0 = *tb_ptr++; 1036 t1 = tci_read_r32s(&tb_ptr); 1037 tci_write_reg64(t0, t1); 1038 break; 1039 #if TCG_TARGET_HAS_ext32u_i64 1040 case INDEX_op_ext32u_i64: 1041 #endif 1042 case INDEX_op_extu_i32_i64: 1043 t0 = *tb_ptr++; 1044 t1 = tci_read_r32(&tb_ptr); 1045 tci_write_reg64(t0, t1); 1046 break; 1047 #if TCG_TARGET_HAS_bswap16_i64 1048 case INDEX_op_bswap16_i64: 1049 t0 = *tb_ptr++; 1050 t1 = tci_read_r16(&tb_ptr); 1051 tci_write_reg64(t0, bswap16(t1)); 1052 break; 1053 #endif 1054 #if TCG_TARGET_HAS_bswap32_i64 1055 case INDEX_op_bswap32_i64: 1056 t0 = *tb_ptr++; 1057 t1 = tci_read_r32(&tb_ptr); 1058 tci_write_reg64(t0, bswap32(t1)); 1059 break; 1060 #endif 1061 #if TCG_TARGET_HAS_bswap64_i64 1062 case INDEX_op_bswap64_i64: 1063 t0 = *tb_ptr++; 1064 t1 = tci_read_r64(&tb_ptr); 1065 tci_write_reg64(t0, bswap64(t1)); 1066 break; 1067 #endif 1068 #if TCG_TARGET_HAS_not_i64 1069 case INDEX_op_not_i64: 1070 t0 = *tb_ptr++; 1071 t1 = tci_read_r64(&tb_ptr); 1072 tci_write_reg64(t0, ~t1); 1073 break; 1074 #endif 1075 #if TCG_TARGET_HAS_neg_i64 1076 case INDEX_op_neg_i64: 1077 t0 = *tb_ptr++; 1078 t1 = tci_read_r64(&tb_ptr); 1079 tci_write_reg64(t0, -t1); 1080 break; 1081 #endif 1082 #endif /* TCG_TARGET_REG_BITS == 64 */ 1083 1084 /* QEMU specific operations. */ 1085 1086 case INDEX_op_exit_tb: 1087 ret = *(uint64_t *)tb_ptr; 1088 goto exit; 1089 break; 1090 case INDEX_op_goto_tb: 1091 /* Jump address is aligned */ 1092 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); 1093 t0 = atomic_read((int32_t *)tb_ptr); 1094 tb_ptr += sizeof(int32_t); 1095 tci_assert(tb_ptr == old_code_ptr + op_size); 1096 tb_ptr += (int32_t)t0; 1097 continue; 1098 case INDEX_op_qemu_ld_i32: 1099 t0 = *tb_ptr++; 1100 taddr = tci_read_ulong(&tb_ptr); 1101 oi = tci_read_i(&tb_ptr); 1102 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { 1103 case MO_UB: 1104 tmp32 = qemu_ld_ub; 1105 break; 1106 case MO_SB: 1107 tmp32 = (int8_t)qemu_ld_ub; 1108 break; 1109 case MO_LEUW: 1110 tmp32 = qemu_ld_leuw; 1111 break; 1112 case MO_LESW: 1113 tmp32 = (int16_t)qemu_ld_leuw; 1114 break; 1115 case MO_LEUL: 1116 tmp32 = qemu_ld_leul; 1117 break; 1118 case MO_BEUW: 1119 tmp32 = qemu_ld_beuw; 1120 break; 1121 case MO_BESW: 1122 tmp32 = (int16_t)qemu_ld_beuw; 1123 break; 1124 case MO_BEUL: 1125 tmp32 = qemu_ld_beul; 1126 break; 1127 default: 1128 tcg_abort(); 1129 } 1130 tci_write_reg(t0, tmp32); 1131 break; 1132 case INDEX_op_qemu_ld_i64: 1133 t0 = *tb_ptr++; 1134 if (TCG_TARGET_REG_BITS == 32) { 1135 t1 = *tb_ptr++; 1136 } 1137 taddr = tci_read_ulong(&tb_ptr); 1138 oi = tci_read_i(&tb_ptr); 1139 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { 1140 case MO_UB: 1141 tmp64 = qemu_ld_ub; 1142 break; 1143 case MO_SB: 1144 tmp64 = (int8_t)qemu_ld_ub; 1145 break; 1146 case MO_LEUW: 1147 tmp64 = qemu_ld_leuw; 1148 break; 1149 case MO_LESW: 1150 tmp64 = (int16_t)qemu_ld_leuw; 1151 break; 1152 case MO_LEUL: 1153 tmp64 = qemu_ld_leul; 1154 break; 1155 case MO_LESL: 1156 tmp64 = (int32_t)qemu_ld_leul; 1157 break; 1158 case MO_LEQ: 1159 tmp64 = qemu_ld_leq; 1160 break; 1161 case MO_BEUW: 1162 tmp64 = qemu_ld_beuw; 1163 break; 1164 case MO_BESW: 1165 tmp64 = (int16_t)qemu_ld_beuw; 1166 break; 1167 case MO_BEUL: 1168 tmp64 = qemu_ld_beul; 1169 break; 1170 case MO_BESL: 1171 tmp64 = (int32_t)qemu_ld_beul; 1172 break; 1173 case MO_BEQ: 1174 tmp64 = qemu_ld_beq; 1175 break; 1176 default: 1177 tcg_abort(); 1178 } 1179 tci_write_reg(t0, tmp64); 1180 if (TCG_TARGET_REG_BITS == 32) { 1181 tci_write_reg(t1, tmp64 >> 32); 1182 } 1183 break; 1184 case INDEX_op_qemu_st_i32: 1185 t0 = tci_read_r(&tb_ptr); 1186 taddr = tci_read_ulong(&tb_ptr); 1187 oi = tci_read_i(&tb_ptr); 1188 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { 1189 case MO_UB: 1190 qemu_st_b(t0); 1191 break; 1192 case MO_LEUW: 1193 qemu_st_lew(t0); 1194 break; 1195 case MO_LEUL: 1196 qemu_st_lel(t0); 1197 break; 1198 case MO_BEUW: 1199 qemu_st_bew(t0); 1200 break; 1201 case MO_BEUL: 1202 qemu_st_bel(t0); 1203 break; 1204 default: 1205 tcg_abort(); 1206 } 1207 break; 1208 case INDEX_op_qemu_st_i64: 1209 tmp64 = tci_read_r64(&tb_ptr); 1210 taddr = tci_read_ulong(&tb_ptr); 1211 oi = tci_read_i(&tb_ptr); 1212 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { 1213 case MO_UB: 1214 qemu_st_b(tmp64); 1215 break; 1216 case MO_LEUW: 1217 qemu_st_lew(tmp64); 1218 break; 1219 case MO_LEUL: 1220 qemu_st_lel(tmp64); 1221 break; 1222 case MO_LEQ: 1223 qemu_st_leq(tmp64); 1224 break; 1225 case MO_BEUW: 1226 qemu_st_bew(tmp64); 1227 break; 1228 case MO_BEUL: 1229 qemu_st_bel(tmp64); 1230 break; 1231 case MO_BEQ: 1232 qemu_st_beq(tmp64); 1233 break; 1234 default: 1235 tcg_abort(); 1236 } 1237 break; 1238 case INDEX_op_mb: 1239 /* Ensure ordering for all kinds */ 1240 smp_mb(); 1241 break; 1242 default: 1243 TODO(); 1244 break; 1245 } 1246 tci_assert(tb_ptr == old_code_ptr + op_size); 1247 } 1248 exit: 1249 return ret; 1250 } 1251