1 /* 2 * Tiny Code Interpreter for QEMU 3 * 4 * Copyright (c) 2009, 2011, 2016 Stefan Weil 5 * 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 2 of the License, or 9 * (at your option) any later version. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 * 16 * You should have received a copy of the GNU General Public License 17 * along with this program. If not, see <http://www.gnu.org/licenses/>. 18 */ 19 20 #include "qemu/osdep.h" 21 22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). 23 * Without assertions, the interpreter runs much faster. */ 24 #if defined(CONFIG_DEBUG_TCG) 25 # define tci_assert(cond) assert(cond) 26 #else 27 # define tci_assert(cond) ((void)0) 28 #endif 29 30 #include "qemu-common.h" 31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ 32 #include "exec/cpu_ldst.h" 33 #include "tcg/tcg-op.h" 34 #include "qemu/compiler.h" 35 36 /* Marker for missing code. */ 37 #define TODO() \ 38 do { \ 39 fprintf(stderr, "TODO %s:%u: %s()\n", \ 40 __FILE__, __LINE__, __func__); \ 41 tcg_abort(); \ 42 } while (0) 43 44 #if MAX_OPC_PARAM_IARGS != 6 45 # error Fix needed, number of supported input arguments changed! 46 #endif 47 #if TCG_TARGET_REG_BITS == 32 48 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, 49 tcg_target_ulong, tcg_target_ulong, 50 tcg_target_ulong, tcg_target_ulong, 51 tcg_target_ulong, tcg_target_ulong, 52 tcg_target_ulong, tcg_target_ulong, 53 tcg_target_ulong, tcg_target_ulong); 54 #else 55 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, 56 tcg_target_ulong, tcg_target_ulong, 57 tcg_target_ulong, tcg_target_ulong); 58 #endif 59 60 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index) 61 { 62 tci_assert(index < TCG_TARGET_NB_REGS); 63 return regs[index]; 64 } 65 66 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 67 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index) 68 { 69 return (int8_t)tci_read_reg(regs, index); 70 } 71 #endif 72 73 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 74 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index) 75 { 76 return (int16_t)tci_read_reg(regs, index); 77 } 78 #endif 79 80 #if TCG_TARGET_REG_BITS == 64 81 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index) 82 { 83 return (int32_t)tci_read_reg(regs, index); 84 } 85 #endif 86 87 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index) 88 { 89 return (uint8_t)tci_read_reg(regs, index); 90 } 91 92 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index) 93 { 94 return (uint16_t)tci_read_reg(regs, index); 95 } 96 97 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index) 98 { 99 return (uint32_t)tci_read_reg(regs, index); 100 } 101 102 #if TCG_TARGET_REG_BITS == 64 103 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index) 104 { 105 return tci_read_reg(regs, index); 106 } 107 #endif 108 109 static void 110 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value) 111 { 112 tci_assert(index < TCG_TARGET_NB_REGS); 113 tci_assert(index != TCG_AREG0); 114 tci_assert(index != TCG_REG_CALL_STACK); 115 regs[index] = value; 116 } 117 118 #if TCG_TARGET_REG_BITS == 64 119 static void 120 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value) 121 { 122 tci_write_reg(regs, index, value); 123 } 124 #endif 125 126 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value) 127 { 128 tci_write_reg(regs, index, value); 129 } 130 131 static void 132 tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value) 133 { 134 tci_write_reg(regs, index, value); 135 } 136 137 static void 138 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value) 139 { 140 tci_write_reg(regs, index, value); 141 } 142 143 #if TCG_TARGET_REG_BITS == 32 144 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, 145 uint32_t low_index, uint64_t value) 146 { 147 tci_write_reg(regs, low_index, value); 148 tci_write_reg(regs, high_index, value >> 32); 149 } 150 #elif TCG_TARGET_REG_BITS == 64 151 static void 152 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value) 153 { 154 tci_write_reg(regs, index, value); 155 } 156 #endif 157 158 #if TCG_TARGET_REG_BITS == 32 159 /* Create a 64 bit value from two 32 bit values. */ 160 static uint64_t tci_uint64(uint32_t high, uint32_t low) 161 { 162 return ((uint64_t)high << 32) + low; 163 } 164 #endif 165 166 /* Read constant (native size) from bytecode. */ 167 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr) 168 { 169 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr); 170 *tb_ptr += sizeof(value); 171 return value; 172 } 173 174 /* Read unsigned constant (32 bit) from bytecode. */ 175 static uint32_t tci_read_i32(const uint8_t **tb_ptr) 176 { 177 uint32_t value = *(const uint32_t *)(*tb_ptr); 178 *tb_ptr += sizeof(value); 179 return value; 180 } 181 182 /* Read signed constant (32 bit) from bytecode. */ 183 static int32_t tci_read_s32(const uint8_t **tb_ptr) 184 { 185 int32_t value = *(const int32_t *)(*tb_ptr); 186 *tb_ptr += sizeof(value); 187 return value; 188 } 189 190 #if TCG_TARGET_REG_BITS == 64 191 /* Read constant (64 bit) from bytecode. */ 192 static uint64_t tci_read_i64(const uint8_t **tb_ptr) 193 { 194 uint64_t value = *(const uint64_t *)(*tb_ptr); 195 *tb_ptr += sizeof(value); 196 return value; 197 } 198 #endif 199 200 /* Read indexed register (native size) from bytecode. */ 201 static tcg_target_ulong 202 tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr) 203 { 204 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr); 205 *tb_ptr += 1; 206 return value; 207 } 208 209 /* Read indexed register (8 bit) from bytecode. */ 210 static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr) 211 { 212 uint8_t value = tci_read_reg8(regs, **tb_ptr); 213 *tb_ptr += 1; 214 return value; 215 } 216 217 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 218 /* Read indexed register (8 bit signed) from bytecode. */ 219 static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr) 220 { 221 int8_t value = tci_read_reg8s(regs, **tb_ptr); 222 *tb_ptr += 1; 223 return value; 224 } 225 #endif 226 227 /* Read indexed register (16 bit) from bytecode. */ 228 static uint16_t tci_read_r16(const tcg_target_ulong *regs, 229 const uint8_t **tb_ptr) 230 { 231 uint16_t value = tci_read_reg16(regs, **tb_ptr); 232 *tb_ptr += 1; 233 return value; 234 } 235 236 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 237 /* Read indexed register (16 bit signed) from bytecode. */ 238 static int16_t tci_read_r16s(const tcg_target_ulong *regs, 239 const uint8_t **tb_ptr) 240 { 241 int16_t value = tci_read_reg16s(regs, **tb_ptr); 242 *tb_ptr += 1; 243 return value; 244 } 245 #endif 246 247 /* Read indexed register (32 bit) from bytecode. */ 248 static uint32_t tci_read_r32(const tcg_target_ulong *regs, 249 const uint8_t **tb_ptr) 250 { 251 uint32_t value = tci_read_reg32(regs, **tb_ptr); 252 *tb_ptr += 1; 253 return value; 254 } 255 256 #if TCG_TARGET_REG_BITS == 32 257 /* Read two indexed registers (2 * 32 bit) from bytecode. */ 258 static uint64_t tci_read_r64(const tcg_target_ulong *regs, 259 const uint8_t **tb_ptr) 260 { 261 uint32_t low = tci_read_r32(regs, tb_ptr); 262 return tci_uint64(tci_read_r32(regs, tb_ptr), low); 263 } 264 #elif TCG_TARGET_REG_BITS == 64 265 /* Read indexed register (32 bit signed) from bytecode. */ 266 static int32_t tci_read_r32s(const tcg_target_ulong *regs, 267 const uint8_t **tb_ptr) 268 { 269 int32_t value = tci_read_reg32s(regs, **tb_ptr); 270 *tb_ptr += 1; 271 return value; 272 } 273 274 /* Read indexed register (64 bit) from bytecode. */ 275 static uint64_t tci_read_r64(const tcg_target_ulong *regs, 276 const uint8_t **tb_ptr) 277 { 278 uint64_t value = tci_read_reg64(regs, **tb_ptr); 279 *tb_ptr += 1; 280 return value; 281 } 282 #endif 283 284 /* Read indexed register(s) with target address from bytecode. */ 285 static target_ulong 286 tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr) 287 { 288 target_ulong taddr = tci_read_r(regs, tb_ptr); 289 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS 290 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32; 291 #endif 292 return taddr; 293 } 294 295 /* Read indexed register or constant (native size) from bytecode. */ 296 static tcg_target_ulong 297 tci_read_ri(const tcg_target_ulong *regs, const uint8_t **tb_ptr) 298 { 299 tcg_target_ulong value; 300 TCGReg r = **tb_ptr; 301 *tb_ptr += 1; 302 if (r == TCG_CONST) { 303 value = tci_read_i(tb_ptr); 304 } else { 305 value = tci_read_reg(regs, r); 306 } 307 return value; 308 } 309 310 /* Read indexed register or constant (32 bit) from bytecode. */ 311 static uint32_t tci_read_ri32(const tcg_target_ulong *regs, 312 const uint8_t **tb_ptr) 313 { 314 uint32_t value; 315 TCGReg r = **tb_ptr; 316 *tb_ptr += 1; 317 if (r == TCG_CONST) { 318 value = tci_read_i32(tb_ptr); 319 } else { 320 value = tci_read_reg32(regs, r); 321 } 322 return value; 323 } 324 325 #if TCG_TARGET_REG_BITS == 32 326 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ 327 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, 328 const uint8_t **tb_ptr) 329 { 330 uint32_t low = tci_read_ri32(regs, tb_ptr); 331 return tci_uint64(tci_read_ri32(regs, tb_ptr), low); 332 } 333 #elif TCG_TARGET_REG_BITS == 64 334 /* Read indexed register or constant (64 bit) from bytecode. */ 335 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, 336 const uint8_t **tb_ptr) 337 { 338 uint64_t value; 339 TCGReg r = **tb_ptr; 340 *tb_ptr += 1; 341 if (r == TCG_CONST) { 342 value = tci_read_i64(tb_ptr); 343 } else { 344 value = tci_read_reg64(regs, r); 345 } 346 return value; 347 } 348 #endif 349 350 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr) 351 { 352 tcg_target_ulong label = tci_read_i(tb_ptr); 353 tci_assert(label != 0); 354 return label; 355 } 356 357 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) 358 { 359 bool result = false; 360 int32_t i0 = u0; 361 int32_t i1 = u1; 362 switch (condition) { 363 case TCG_COND_EQ: 364 result = (u0 == u1); 365 break; 366 case TCG_COND_NE: 367 result = (u0 != u1); 368 break; 369 case TCG_COND_LT: 370 result = (i0 < i1); 371 break; 372 case TCG_COND_GE: 373 result = (i0 >= i1); 374 break; 375 case TCG_COND_LE: 376 result = (i0 <= i1); 377 break; 378 case TCG_COND_GT: 379 result = (i0 > i1); 380 break; 381 case TCG_COND_LTU: 382 result = (u0 < u1); 383 break; 384 case TCG_COND_GEU: 385 result = (u0 >= u1); 386 break; 387 case TCG_COND_LEU: 388 result = (u0 <= u1); 389 break; 390 case TCG_COND_GTU: 391 result = (u0 > u1); 392 break; 393 default: 394 TODO(); 395 } 396 return result; 397 } 398 399 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) 400 { 401 bool result = false; 402 int64_t i0 = u0; 403 int64_t i1 = u1; 404 switch (condition) { 405 case TCG_COND_EQ: 406 result = (u0 == u1); 407 break; 408 case TCG_COND_NE: 409 result = (u0 != u1); 410 break; 411 case TCG_COND_LT: 412 result = (i0 < i1); 413 break; 414 case TCG_COND_GE: 415 result = (i0 >= i1); 416 break; 417 case TCG_COND_LE: 418 result = (i0 <= i1); 419 break; 420 case TCG_COND_GT: 421 result = (i0 > i1); 422 break; 423 case TCG_COND_LTU: 424 result = (u0 < u1); 425 break; 426 case TCG_COND_GEU: 427 result = (u0 >= u1); 428 break; 429 case TCG_COND_LEU: 430 result = (u0 <= u1); 431 break; 432 case TCG_COND_GTU: 433 result = (u0 > u1); 434 break; 435 default: 436 TODO(); 437 } 438 return result; 439 } 440 441 #ifdef CONFIG_SOFTMMU 442 # define qemu_ld_ub \ 443 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 444 # define qemu_ld_leuw \ 445 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 446 # define qemu_ld_leul \ 447 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 448 # define qemu_ld_leq \ 449 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 450 # define qemu_ld_beuw \ 451 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 452 # define qemu_ld_beul \ 453 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 454 # define qemu_ld_beq \ 455 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) 456 # define qemu_st_b(X) \ 457 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 458 # define qemu_st_lew(X) \ 459 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 460 # define qemu_st_lel(X) \ 461 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 462 # define qemu_st_leq(X) \ 463 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 464 # define qemu_st_bew(X) \ 465 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 466 # define qemu_st_bel(X) \ 467 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 468 # define qemu_st_beq(X) \ 469 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) 470 #else 471 # define qemu_ld_ub ldub_p(g2h(taddr)) 472 # define qemu_ld_leuw lduw_le_p(g2h(taddr)) 473 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) 474 # define qemu_ld_leq ldq_le_p(g2h(taddr)) 475 # define qemu_ld_beuw lduw_be_p(g2h(taddr)) 476 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) 477 # define qemu_ld_beq ldq_be_p(g2h(taddr)) 478 # define qemu_st_b(X) stb_p(g2h(taddr), X) 479 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) 480 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) 481 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) 482 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) 483 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) 484 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) 485 #endif 486 487 /* Interpret pseudo code in tb. */ 488 /* 489 * Disable CFI checks. 490 * One possible operation in the pseudo code is a call to binary code. 491 * Therefore, disable CFI checks in the interpreter function 492 */ 493 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env, 494 const void *v_tb_ptr) 495 { 496 const uint8_t *tb_ptr = v_tb_ptr; 497 tcg_target_ulong regs[TCG_TARGET_NB_REGS]; 498 long tcg_temps[CPU_TEMP_BUF_NLONGS]; 499 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); 500 uintptr_t ret = 0; 501 502 regs[TCG_AREG0] = (tcg_target_ulong)env; 503 regs[TCG_REG_CALL_STACK] = sp_value; 504 tci_assert(tb_ptr); 505 506 for (;;) { 507 TCGOpcode opc = tb_ptr[0]; 508 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) 509 uint8_t op_size = tb_ptr[1]; 510 const uint8_t *old_code_ptr = tb_ptr; 511 #endif 512 tcg_target_ulong t0; 513 tcg_target_ulong t1; 514 tcg_target_ulong t2; 515 tcg_target_ulong label; 516 TCGCond condition; 517 target_ulong taddr; 518 uint8_t tmp8; 519 uint16_t tmp16; 520 uint32_t tmp32; 521 uint64_t tmp64; 522 #if TCG_TARGET_REG_BITS == 32 523 uint64_t v64; 524 #endif 525 TCGMemOpIdx oi; 526 527 #if defined(GETPC) 528 tci_tb_ptr = (uintptr_t)tb_ptr; 529 #endif 530 531 /* Skip opcode and size entry. */ 532 tb_ptr += 2; 533 534 switch (opc) { 535 case INDEX_op_call: 536 t0 = tci_read_ri(regs, &tb_ptr); 537 #if TCG_TARGET_REG_BITS == 32 538 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), 539 tci_read_reg(regs, TCG_REG_R1), 540 tci_read_reg(regs, TCG_REG_R2), 541 tci_read_reg(regs, TCG_REG_R3), 542 tci_read_reg(regs, TCG_REG_R5), 543 tci_read_reg(regs, TCG_REG_R6), 544 tci_read_reg(regs, TCG_REG_R7), 545 tci_read_reg(regs, TCG_REG_R8), 546 tci_read_reg(regs, TCG_REG_R9), 547 tci_read_reg(regs, TCG_REG_R10), 548 tci_read_reg(regs, TCG_REG_R11), 549 tci_read_reg(regs, TCG_REG_R12)); 550 tci_write_reg(regs, TCG_REG_R0, tmp64); 551 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32); 552 #else 553 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), 554 tci_read_reg(regs, TCG_REG_R1), 555 tci_read_reg(regs, TCG_REG_R2), 556 tci_read_reg(regs, TCG_REG_R3), 557 tci_read_reg(regs, TCG_REG_R5), 558 tci_read_reg(regs, TCG_REG_R6)); 559 tci_write_reg(regs, TCG_REG_R0, tmp64); 560 #endif 561 break; 562 case INDEX_op_br: 563 label = tci_read_label(&tb_ptr); 564 tci_assert(tb_ptr == old_code_ptr + op_size); 565 tb_ptr = (uint8_t *)label; 566 continue; 567 case INDEX_op_setcond_i32: 568 t0 = *tb_ptr++; 569 t1 = tci_read_r32(regs, &tb_ptr); 570 t2 = tci_read_ri32(regs, &tb_ptr); 571 condition = *tb_ptr++; 572 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition)); 573 break; 574 #if TCG_TARGET_REG_BITS == 32 575 case INDEX_op_setcond2_i32: 576 t0 = *tb_ptr++; 577 tmp64 = tci_read_r64(regs, &tb_ptr); 578 v64 = tci_read_ri64(regs, &tb_ptr); 579 condition = *tb_ptr++; 580 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition)); 581 break; 582 #elif TCG_TARGET_REG_BITS == 64 583 case INDEX_op_setcond_i64: 584 t0 = *tb_ptr++; 585 t1 = tci_read_r64(regs, &tb_ptr); 586 t2 = tci_read_ri64(regs, &tb_ptr); 587 condition = *tb_ptr++; 588 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition)); 589 break; 590 #endif 591 case INDEX_op_mov_i32: 592 t0 = *tb_ptr++; 593 t1 = tci_read_r32(regs, &tb_ptr); 594 tci_write_reg32(regs, t0, t1); 595 break; 596 case INDEX_op_movi_i32: 597 t0 = *tb_ptr++; 598 t1 = tci_read_i32(&tb_ptr); 599 tci_write_reg32(regs, t0, t1); 600 break; 601 602 /* Load/store operations (32 bit). */ 603 604 case INDEX_op_ld8u_i32: 605 t0 = *tb_ptr++; 606 t1 = tci_read_r(regs, &tb_ptr); 607 t2 = tci_read_s32(&tb_ptr); 608 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); 609 break; 610 case INDEX_op_ld8s_i32: 611 TODO(); 612 break; 613 case INDEX_op_ld16u_i32: 614 TODO(); 615 break; 616 case INDEX_op_ld16s_i32: 617 TODO(); 618 break; 619 case INDEX_op_ld_i32: 620 t0 = *tb_ptr++; 621 t1 = tci_read_r(regs, &tb_ptr); 622 t2 = tci_read_s32(&tb_ptr); 623 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); 624 break; 625 case INDEX_op_st8_i32: 626 t0 = tci_read_r8(regs, &tb_ptr); 627 t1 = tci_read_r(regs, &tb_ptr); 628 t2 = tci_read_s32(&tb_ptr); 629 *(uint8_t *)(t1 + t2) = t0; 630 break; 631 case INDEX_op_st16_i32: 632 t0 = tci_read_r16(regs, &tb_ptr); 633 t1 = tci_read_r(regs, &tb_ptr); 634 t2 = tci_read_s32(&tb_ptr); 635 *(uint16_t *)(t1 + t2) = t0; 636 break; 637 case INDEX_op_st_i32: 638 t0 = tci_read_r32(regs, &tb_ptr); 639 t1 = tci_read_r(regs, &tb_ptr); 640 t2 = tci_read_s32(&tb_ptr); 641 tci_assert(t1 != sp_value || (int32_t)t2 < 0); 642 *(uint32_t *)(t1 + t2) = t0; 643 break; 644 645 /* Arithmetic operations (32 bit). */ 646 647 case INDEX_op_add_i32: 648 t0 = *tb_ptr++; 649 t1 = tci_read_ri32(regs, &tb_ptr); 650 t2 = tci_read_ri32(regs, &tb_ptr); 651 tci_write_reg32(regs, t0, t1 + t2); 652 break; 653 case INDEX_op_sub_i32: 654 t0 = *tb_ptr++; 655 t1 = tci_read_ri32(regs, &tb_ptr); 656 t2 = tci_read_ri32(regs, &tb_ptr); 657 tci_write_reg32(regs, t0, t1 - t2); 658 break; 659 case INDEX_op_mul_i32: 660 t0 = *tb_ptr++; 661 t1 = tci_read_ri32(regs, &tb_ptr); 662 t2 = tci_read_ri32(regs, &tb_ptr); 663 tci_write_reg32(regs, t0, t1 * t2); 664 break; 665 #if TCG_TARGET_HAS_div_i32 666 case INDEX_op_div_i32: 667 t0 = *tb_ptr++; 668 t1 = tci_read_ri32(regs, &tb_ptr); 669 t2 = tci_read_ri32(regs, &tb_ptr); 670 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2); 671 break; 672 case INDEX_op_divu_i32: 673 t0 = *tb_ptr++; 674 t1 = tci_read_ri32(regs, &tb_ptr); 675 t2 = tci_read_ri32(regs, &tb_ptr); 676 tci_write_reg32(regs, t0, t1 / t2); 677 break; 678 case INDEX_op_rem_i32: 679 t0 = *tb_ptr++; 680 t1 = tci_read_ri32(regs, &tb_ptr); 681 t2 = tci_read_ri32(regs, &tb_ptr); 682 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2); 683 break; 684 case INDEX_op_remu_i32: 685 t0 = *tb_ptr++; 686 t1 = tci_read_ri32(regs, &tb_ptr); 687 t2 = tci_read_ri32(regs, &tb_ptr); 688 tci_write_reg32(regs, t0, t1 % t2); 689 break; 690 #elif TCG_TARGET_HAS_div2_i32 691 case INDEX_op_div2_i32: 692 case INDEX_op_divu2_i32: 693 TODO(); 694 break; 695 #endif 696 case INDEX_op_and_i32: 697 t0 = *tb_ptr++; 698 t1 = tci_read_ri32(regs, &tb_ptr); 699 t2 = tci_read_ri32(regs, &tb_ptr); 700 tci_write_reg32(regs, t0, t1 & t2); 701 break; 702 case INDEX_op_or_i32: 703 t0 = *tb_ptr++; 704 t1 = tci_read_ri32(regs, &tb_ptr); 705 t2 = tci_read_ri32(regs, &tb_ptr); 706 tci_write_reg32(regs, t0, t1 | t2); 707 break; 708 case INDEX_op_xor_i32: 709 t0 = *tb_ptr++; 710 t1 = tci_read_ri32(regs, &tb_ptr); 711 t2 = tci_read_ri32(regs, &tb_ptr); 712 tci_write_reg32(regs, t0, t1 ^ t2); 713 break; 714 715 /* Shift/rotate operations (32 bit). */ 716 717 case INDEX_op_shl_i32: 718 t0 = *tb_ptr++; 719 t1 = tci_read_ri32(regs, &tb_ptr); 720 t2 = tci_read_ri32(regs, &tb_ptr); 721 tci_write_reg32(regs, t0, t1 << (t2 & 31)); 722 break; 723 case INDEX_op_shr_i32: 724 t0 = *tb_ptr++; 725 t1 = tci_read_ri32(regs, &tb_ptr); 726 t2 = tci_read_ri32(regs, &tb_ptr); 727 tci_write_reg32(regs, t0, t1 >> (t2 & 31)); 728 break; 729 case INDEX_op_sar_i32: 730 t0 = *tb_ptr++; 731 t1 = tci_read_ri32(regs, &tb_ptr); 732 t2 = tci_read_ri32(regs, &tb_ptr); 733 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31))); 734 break; 735 #if TCG_TARGET_HAS_rot_i32 736 case INDEX_op_rotl_i32: 737 t0 = *tb_ptr++; 738 t1 = tci_read_ri32(regs, &tb_ptr); 739 t2 = tci_read_ri32(regs, &tb_ptr); 740 tci_write_reg32(regs, t0, rol32(t1, t2 & 31)); 741 break; 742 case INDEX_op_rotr_i32: 743 t0 = *tb_ptr++; 744 t1 = tci_read_ri32(regs, &tb_ptr); 745 t2 = tci_read_ri32(regs, &tb_ptr); 746 tci_write_reg32(regs, t0, ror32(t1, t2 & 31)); 747 break; 748 #endif 749 #if TCG_TARGET_HAS_deposit_i32 750 case INDEX_op_deposit_i32: 751 t0 = *tb_ptr++; 752 t1 = tci_read_r32(regs, &tb_ptr); 753 t2 = tci_read_r32(regs, &tb_ptr); 754 tmp16 = *tb_ptr++; 755 tmp8 = *tb_ptr++; 756 tmp32 = (((1 << tmp8) - 1) << tmp16); 757 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); 758 break; 759 #endif 760 case INDEX_op_brcond_i32: 761 t0 = tci_read_r32(regs, &tb_ptr); 762 t1 = tci_read_ri32(regs, &tb_ptr); 763 condition = *tb_ptr++; 764 label = tci_read_label(&tb_ptr); 765 if (tci_compare32(t0, t1, condition)) { 766 tci_assert(tb_ptr == old_code_ptr + op_size); 767 tb_ptr = (uint8_t *)label; 768 continue; 769 } 770 break; 771 #if TCG_TARGET_REG_BITS == 32 772 case INDEX_op_add2_i32: 773 t0 = *tb_ptr++; 774 t1 = *tb_ptr++; 775 tmp64 = tci_read_r64(regs, &tb_ptr); 776 tmp64 += tci_read_r64(regs, &tb_ptr); 777 tci_write_reg64(regs, t1, t0, tmp64); 778 break; 779 case INDEX_op_sub2_i32: 780 t0 = *tb_ptr++; 781 t1 = *tb_ptr++; 782 tmp64 = tci_read_r64(regs, &tb_ptr); 783 tmp64 -= tci_read_r64(regs, &tb_ptr); 784 tci_write_reg64(regs, t1, t0, tmp64); 785 break; 786 case INDEX_op_brcond2_i32: 787 tmp64 = tci_read_r64(regs, &tb_ptr); 788 v64 = tci_read_ri64(regs, &tb_ptr); 789 condition = *tb_ptr++; 790 label = tci_read_label(&tb_ptr); 791 if (tci_compare64(tmp64, v64, condition)) { 792 tci_assert(tb_ptr == old_code_ptr + op_size); 793 tb_ptr = (uint8_t *)label; 794 continue; 795 } 796 break; 797 case INDEX_op_mulu2_i32: 798 t0 = *tb_ptr++; 799 t1 = *tb_ptr++; 800 t2 = tci_read_r32(regs, &tb_ptr); 801 tmp64 = tci_read_r32(regs, &tb_ptr); 802 tci_write_reg64(regs, t1, t0, t2 * tmp64); 803 break; 804 #endif /* TCG_TARGET_REG_BITS == 32 */ 805 #if TCG_TARGET_HAS_ext8s_i32 806 case INDEX_op_ext8s_i32: 807 t0 = *tb_ptr++; 808 t1 = tci_read_r8s(regs, &tb_ptr); 809 tci_write_reg32(regs, t0, t1); 810 break; 811 #endif 812 #if TCG_TARGET_HAS_ext16s_i32 813 case INDEX_op_ext16s_i32: 814 t0 = *tb_ptr++; 815 t1 = tci_read_r16s(regs, &tb_ptr); 816 tci_write_reg32(regs, t0, t1); 817 break; 818 #endif 819 #if TCG_TARGET_HAS_ext8u_i32 820 case INDEX_op_ext8u_i32: 821 t0 = *tb_ptr++; 822 t1 = tci_read_r8(regs, &tb_ptr); 823 tci_write_reg32(regs, t0, t1); 824 break; 825 #endif 826 #if TCG_TARGET_HAS_ext16u_i32 827 case INDEX_op_ext16u_i32: 828 t0 = *tb_ptr++; 829 t1 = tci_read_r16(regs, &tb_ptr); 830 tci_write_reg32(regs, t0, t1); 831 break; 832 #endif 833 #if TCG_TARGET_HAS_bswap16_i32 834 case INDEX_op_bswap16_i32: 835 t0 = *tb_ptr++; 836 t1 = tci_read_r16(regs, &tb_ptr); 837 tci_write_reg32(regs, t0, bswap16(t1)); 838 break; 839 #endif 840 #if TCG_TARGET_HAS_bswap32_i32 841 case INDEX_op_bswap32_i32: 842 t0 = *tb_ptr++; 843 t1 = tci_read_r32(regs, &tb_ptr); 844 tci_write_reg32(regs, t0, bswap32(t1)); 845 break; 846 #endif 847 #if TCG_TARGET_HAS_not_i32 848 case INDEX_op_not_i32: 849 t0 = *tb_ptr++; 850 t1 = tci_read_r32(regs, &tb_ptr); 851 tci_write_reg32(regs, t0, ~t1); 852 break; 853 #endif 854 #if TCG_TARGET_HAS_neg_i32 855 case INDEX_op_neg_i32: 856 t0 = *tb_ptr++; 857 t1 = tci_read_r32(regs, &tb_ptr); 858 tci_write_reg32(regs, t0, -t1); 859 break; 860 #endif 861 #if TCG_TARGET_REG_BITS == 64 862 case INDEX_op_mov_i64: 863 t0 = *tb_ptr++; 864 t1 = tci_read_r64(regs, &tb_ptr); 865 tci_write_reg64(regs, t0, t1); 866 break; 867 case INDEX_op_movi_i64: 868 t0 = *tb_ptr++; 869 t1 = tci_read_i64(&tb_ptr); 870 tci_write_reg64(regs, t0, t1); 871 break; 872 873 /* Load/store operations (64 bit). */ 874 875 case INDEX_op_ld8u_i64: 876 t0 = *tb_ptr++; 877 t1 = tci_read_r(regs, &tb_ptr); 878 t2 = tci_read_s32(&tb_ptr); 879 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); 880 break; 881 case INDEX_op_ld8s_i64: 882 TODO(); 883 break; 884 case INDEX_op_ld16u_i64: 885 t0 = *tb_ptr++; 886 t1 = tci_read_r(regs, &tb_ptr); 887 t2 = tci_read_s32(&tb_ptr); 888 tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2)); 889 break; 890 case INDEX_op_ld16s_i64: 891 TODO(); 892 break; 893 case INDEX_op_ld32u_i64: 894 t0 = *tb_ptr++; 895 t1 = tci_read_r(regs, &tb_ptr); 896 t2 = tci_read_s32(&tb_ptr); 897 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); 898 break; 899 case INDEX_op_ld32s_i64: 900 t0 = *tb_ptr++; 901 t1 = tci_read_r(regs, &tb_ptr); 902 t2 = tci_read_s32(&tb_ptr); 903 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2)); 904 break; 905 case INDEX_op_ld_i64: 906 t0 = *tb_ptr++; 907 t1 = tci_read_r(regs, &tb_ptr); 908 t2 = tci_read_s32(&tb_ptr); 909 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2)); 910 break; 911 case INDEX_op_st8_i64: 912 t0 = tci_read_r8(regs, &tb_ptr); 913 t1 = tci_read_r(regs, &tb_ptr); 914 t2 = tci_read_s32(&tb_ptr); 915 *(uint8_t *)(t1 + t2) = t0; 916 break; 917 case INDEX_op_st16_i64: 918 t0 = tci_read_r16(regs, &tb_ptr); 919 t1 = tci_read_r(regs, &tb_ptr); 920 t2 = tci_read_s32(&tb_ptr); 921 *(uint16_t *)(t1 + t2) = t0; 922 break; 923 case INDEX_op_st32_i64: 924 t0 = tci_read_r32(regs, &tb_ptr); 925 t1 = tci_read_r(regs, &tb_ptr); 926 t2 = tci_read_s32(&tb_ptr); 927 *(uint32_t *)(t1 + t2) = t0; 928 break; 929 case INDEX_op_st_i64: 930 t0 = tci_read_r64(regs, &tb_ptr); 931 t1 = tci_read_r(regs, &tb_ptr); 932 t2 = tci_read_s32(&tb_ptr); 933 tci_assert(t1 != sp_value || (int32_t)t2 < 0); 934 *(uint64_t *)(t1 + t2) = t0; 935 break; 936 937 /* Arithmetic operations (64 bit). */ 938 939 case INDEX_op_add_i64: 940 t0 = *tb_ptr++; 941 t1 = tci_read_ri64(regs, &tb_ptr); 942 t2 = tci_read_ri64(regs, &tb_ptr); 943 tci_write_reg64(regs, t0, t1 + t2); 944 break; 945 case INDEX_op_sub_i64: 946 t0 = *tb_ptr++; 947 t1 = tci_read_ri64(regs, &tb_ptr); 948 t2 = tci_read_ri64(regs, &tb_ptr); 949 tci_write_reg64(regs, t0, t1 - t2); 950 break; 951 case INDEX_op_mul_i64: 952 t0 = *tb_ptr++; 953 t1 = tci_read_ri64(regs, &tb_ptr); 954 t2 = tci_read_ri64(regs, &tb_ptr); 955 tci_write_reg64(regs, t0, t1 * t2); 956 break; 957 #if TCG_TARGET_HAS_div_i64 958 case INDEX_op_div_i64: 959 case INDEX_op_divu_i64: 960 case INDEX_op_rem_i64: 961 case INDEX_op_remu_i64: 962 TODO(); 963 break; 964 #elif TCG_TARGET_HAS_div2_i64 965 case INDEX_op_div2_i64: 966 case INDEX_op_divu2_i64: 967 TODO(); 968 break; 969 #endif 970 case INDEX_op_and_i64: 971 t0 = *tb_ptr++; 972 t1 = tci_read_ri64(regs, &tb_ptr); 973 t2 = tci_read_ri64(regs, &tb_ptr); 974 tci_write_reg64(regs, t0, t1 & t2); 975 break; 976 case INDEX_op_or_i64: 977 t0 = *tb_ptr++; 978 t1 = tci_read_ri64(regs, &tb_ptr); 979 t2 = tci_read_ri64(regs, &tb_ptr); 980 tci_write_reg64(regs, t0, t1 | t2); 981 break; 982 case INDEX_op_xor_i64: 983 t0 = *tb_ptr++; 984 t1 = tci_read_ri64(regs, &tb_ptr); 985 t2 = tci_read_ri64(regs, &tb_ptr); 986 tci_write_reg64(regs, t0, t1 ^ t2); 987 break; 988 989 /* Shift/rotate operations (64 bit). */ 990 991 case INDEX_op_shl_i64: 992 t0 = *tb_ptr++; 993 t1 = tci_read_ri64(regs, &tb_ptr); 994 t2 = tci_read_ri64(regs, &tb_ptr); 995 tci_write_reg64(regs, t0, t1 << (t2 & 63)); 996 break; 997 case INDEX_op_shr_i64: 998 t0 = *tb_ptr++; 999 t1 = tci_read_ri64(regs, &tb_ptr); 1000 t2 = tci_read_ri64(regs, &tb_ptr); 1001 tci_write_reg64(regs, t0, t1 >> (t2 & 63)); 1002 break; 1003 case INDEX_op_sar_i64: 1004 t0 = *tb_ptr++; 1005 t1 = tci_read_ri64(regs, &tb_ptr); 1006 t2 = tci_read_ri64(regs, &tb_ptr); 1007 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63))); 1008 break; 1009 #if TCG_TARGET_HAS_rot_i64 1010 case INDEX_op_rotl_i64: 1011 t0 = *tb_ptr++; 1012 t1 = tci_read_ri64(regs, &tb_ptr); 1013 t2 = tci_read_ri64(regs, &tb_ptr); 1014 tci_write_reg64(regs, t0, rol64(t1, t2 & 63)); 1015 break; 1016 case INDEX_op_rotr_i64: 1017 t0 = *tb_ptr++; 1018 t1 = tci_read_ri64(regs, &tb_ptr); 1019 t2 = tci_read_ri64(regs, &tb_ptr); 1020 tci_write_reg64(regs, t0, ror64(t1, t2 & 63)); 1021 break; 1022 #endif 1023 #if TCG_TARGET_HAS_deposit_i64 1024 case INDEX_op_deposit_i64: 1025 t0 = *tb_ptr++; 1026 t1 = tci_read_r64(regs, &tb_ptr); 1027 t2 = tci_read_r64(regs, &tb_ptr); 1028 tmp16 = *tb_ptr++; 1029 tmp8 = *tb_ptr++; 1030 tmp64 = (((1ULL << tmp8) - 1) << tmp16); 1031 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); 1032 break; 1033 #endif 1034 case INDEX_op_brcond_i64: 1035 t0 = tci_read_r64(regs, &tb_ptr); 1036 t1 = tci_read_ri64(regs, &tb_ptr); 1037 condition = *tb_ptr++; 1038 label = tci_read_label(&tb_ptr); 1039 if (tci_compare64(t0, t1, condition)) { 1040 tci_assert(tb_ptr == old_code_ptr + op_size); 1041 tb_ptr = (uint8_t *)label; 1042 continue; 1043 } 1044 break; 1045 #if TCG_TARGET_HAS_ext8u_i64 1046 case INDEX_op_ext8u_i64: 1047 t0 = *tb_ptr++; 1048 t1 = tci_read_r8(regs, &tb_ptr); 1049 tci_write_reg64(regs, t0, t1); 1050 break; 1051 #endif 1052 #if TCG_TARGET_HAS_ext8s_i64 1053 case INDEX_op_ext8s_i64: 1054 t0 = *tb_ptr++; 1055 t1 = tci_read_r8s(regs, &tb_ptr); 1056 tci_write_reg64(regs, t0, t1); 1057 break; 1058 #endif 1059 #if TCG_TARGET_HAS_ext16s_i64 1060 case INDEX_op_ext16s_i64: 1061 t0 = *tb_ptr++; 1062 t1 = tci_read_r16s(regs, &tb_ptr); 1063 tci_write_reg64(regs, t0, t1); 1064 break; 1065 #endif 1066 #if TCG_TARGET_HAS_ext16u_i64 1067 case INDEX_op_ext16u_i64: 1068 t0 = *tb_ptr++; 1069 t1 = tci_read_r16(regs, &tb_ptr); 1070 tci_write_reg64(regs, t0, t1); 1071 break; 1072 #endif 1073 #if TCG_TARGET_HAS_ext32s_i64 1074 case INDEX_op_ext32s_i64: 1075 #endif 1076 case INDEX_op_ext_i32_i64: 1077 t0 = *tb_ptr++; 1078 t1 = tci_read_r32s(regs, &tb_ptr); 1079 tci_write_reg64(regs, t0, t1); 1080 break; 1081 #if TCG_TARGET_HAS_ext32u_i64 1082 case INDEX_op_ext32u_i64: 1083 #endif 1084 case INDEX_op_extu_i32_i64: 1085 t0 = *tb_ptr++; 1086 t1 = tci_read_r32(regs, &tb_ptr); 1087 tci_write_reg64(regs, t0, t1); 1088 break; 1089 #if TCG_TARGET_HAS_bswap16_i64 1090 case INDEX_op_bswap16_i64: 1091 t0 = *tb_ptr++; 1092 t1 = tci_read_r16(regs, &tb_ptr); 1093 tci_write_reg64(regs, t0, bswap16(t1)); 1094 break; 1095 #endif 1096 #if TCG_TARGET_HAS_bswap32_i64 1097 case INDEX_op_bswap32_i64: 1098 t0 = *tb_ptr++; 1099 t1 = tci_read_r32(regs, &tb_ptr); 1100 tci_write_reg64(regs, t0, bswap32(t1)); 1101 break; 1102 #endif 1103 #if TCG_TARGET_HAS_bswap64_i64 1104 case INDEX_op_bswap64_i64: 1105 t0 = *tb_ptr++; 1106 t1 = tci_read_r64(regs, &tb_ptr); 1107 tci_write_reg64(regs, t0, bswap64(t1)); 1108 break; 1109 #endif 1110 #if TCG_TARGET_HAS_not_i64 1111 case INDEX_op_not_i64: 1112 t0 = *tb_ptr++; 1113 t1 = tci_read_r64(regs, &tb_ptr); 1114 tci_write_reg64(regs, t0, ~t1); 1115 break; 1116 #endif 1117 #if TCG_TARGET_HAS_neg_i64 1118 case INDEX_op_neg_i64: 1119 t0 = *tb_ptr++; 1120 t1 = tci_read_r64(regs, &tb_ptr); 1121 tci_write_reg64(regs, t0, -t1); 1122 break; 1123 #endif 1124 #endif /* TCG_TARGET_REG_BITS == 64 */ 1125 1126 /* QEMU specific operations. */ 1127 1128 case INDEX_op_exit_tb: 1129 ret = *(uint64_t *)tb_ptr; 1130 goto exit; 1131 break; 1132 case INDEX_op_goto_tb: 1133 /* Jump address is aligned */ 1134 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); 1135 t0 = qatomic_read((int32_t *)tb_ptr); 1136 tb_ptr += sizeof(int32_t); 1137 tci_assert(tb_ptr == old_code_ptr + op_size); 1138 tb_ptr += (int32_t)t0; 1139 continue; 1140 case INDEX_op_qemu_ld_i32: 1141 t0 = *tb_ptr++; 1142 taddr = tci_read_ulong(regs, &tb_ptr); 1143 oi = tci_read_i(&tb_ptr); 1144 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { 1145 case MO_UB: 1146 tmp32 = qemu_ld_ub; 1147 break; 1148 case MO_SB: 1149 tmp32 = (int8_t)qemu_ld_ub; 1150 break; 1151 case MO_LEUW: 1152 tmp32 = qemu_ld_leuw; 1153 break; 1154 case MO_LESW: 1155 tmp32 = (int16_t)qemu_ld_leuw; 1156 break; 1157 case MO_LEUL: 1158 tmp32 = qemu_ld_leul; 1159 break; 1160 case MO_BEUW: 1161 tmp32 = qemu_ld_beuw; 1162 break; 1163 case MO_BESW: 1164 tmp32 = (int16_t)qemu_ld_beuw; 1165 break; 1166 case MO_BEUL: 1167 tmp32 = qemu_ld_beul; 1168 break; 1169 default: 1170 tcg_abort(); 1171 } 1172 tci_write_reg(regs, t0, tmp32); 1173 break; 1174 case INDEX_op_qemu_ld_i64: 1175 t0 = *tb_ptr++; 1176 if (TCG_TARGET_REG_BITS == 32) { 1177 t1 = *tb_ptr++; 1178 } 1179 taddr = tci_read_ulong(regs, &tb_ptr); 1180 oi = tci_read_i(&tb_ptr); 1181 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { 1182 case MO_UB: 1183 tmp64 = qemu_ld_ub; 1184 break; 1185 case MO_SB: 1186 tmp64 = (int8_t)qemu_ld_ub; 1187 break; 1188 case MO_LEUW: 1189 tmp64 = qemu_ld_leuw; 1190 break; 1191 case MO_LESW: 1192 tmp64 = (int16_t)qemu_ld_leuw; 1193 break; 1194 case MO_LEUL: 1195 tmp64 = qemu_ld_leul; 1196 break; 1197 case MO_LESL: 1198 tmp64 = (int32_t)qemu_ld_leul; 1199 break; 1200 case MO_LEQ: 1201 tmp64 = qemu_ld_leq; 1202 break; 1203 case MO_BEUW: 1204 tmp64 = qemu_ld_beuw; 1205 break; 1206 case MO_BESW: 1207 tmp64 = (int16_t)qemu_ld_beuw; 1208 break; 1209 case MO_BEUL: 1210 tmp64 = qemu_ld_beul; 1211 break; 1212 case MO_BESL: 1213 tmp64 = (int32_t)qemu_ld_beul; 1214 break; 1215 case MO_BEQ: 1216 tmp64 = qemu_ld_beq; 1217 break; 1218 default: 1219 tcg_abort(); 1220 } 1221 tci_write_reg(regs, t0, tmp64); 1222 if (TCG_TARGET_REG_BITS == 32) { 1223 tci_write_reg(regs, t1, tmp64 >> 32); 1224 } 1225 break; 1226 case INDEX_op_qemu_st_i32: 1227 t0 = tci_read_r(regs, &tb_ptr); 1228 taddr = tci_read_ulong(regs, &tb_ptr); 1229 oi = tci_read_i(&tb_ptr); 1230 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { 1231 case MO_UB: 1232 qemu_st_b(t0); 1233 break; 1234 case MO_LEUW: 1235 qemu_st_lew(t0); 1236 break; 1237 case MO_LEUL: 1238 qemu_st_lel(t0); 1239 break; 1240 case MO_BEUW: 1241 qemu_st_bew(t0); 1242 break; 1243 case MO_BEUL: 1244 qemu_st_bel(t0); 1245 break; 1246 default: 1247 tcg_abort(); 1248 } 1249 break; 1250 case INDEX_op_qemu_st_i64: 1251 tmp64 = tci_read_r64(regs, &tb_ptr); 1252 taddr = tci_read_ulong(regs, &tb_ptr); 1253 oi = tci_read_i(&tb_ptr); 1254 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { 1255 case MO_UB: 1256 qemu_st_b(tmp64); 1257 break; 1258 case MO_LEUW: 1259 qemu_st_lew(tmp64); 1260 break; 1261 case MO_LEUL: 1262 qemu_st_lel(tmp64); 1263 break; 1264 case MO_LEQ: 1265 qemu_st_leq(tmp64); 1266 break; 1267 case MO_BEUW: 1268 qemu_st_bew(tmp64); 1269 break; 1270 case MO_BEUL: 1271 qemu_st_bel(tmp64); 1272 break; 1273 case MO_BEQ: 1274 qemu_st_beq(tmp64); 1275 break; 1276 default: 1277 tcg_abort(); 1278 } 1279 break; 1280 case INDEX_op_mb: 1281 /* Ensure ordering for all kinds */ 1282 smp_mb(); 1283 break; 1284 default: 1285 TODO(); 1286 break; 1287 } 1288 tci_assert(tb_ptr == old_code_ptr + op_size); 1289 } 1290 exit: 1291 return ret; 1292 } 1293