1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <stdio.h> 7 #include <stdlib.h> 8 9 #define unlikely(cond) (cond) 10 #include <asm/insn.h> 11 #include "../../../arch/x86/lib/inat.c" 12 #include "../../../arch/x86/lib/insn.c" 13 14 #define CONFIG_64BIT 1 15 #include <asm/nops.h> 16 17 #include <asm/orc_types.h> 18 #include <objtool/check.h> 19 #include <objtool/elf.h> 20 #include <objtool/arch.h> 21 #include <objtool/warn.h> 22 #include <objtool/endianness.h> 23 #include <objtool/builtin.h> 24 #include <arch/elf.h> 25 26 static int is_x86_64(const struct elf *elf) 27 { 28 switch (elf->ehdr.e_machine) { 29 case EM_X86_64: 30 return 1; 31 case EM_386: 32 return 0; 33 default: 34 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine); 35 return -1; 36 } 37 } 38 39 bool arch_callee_saved_reg(unsigned char reg) 40 { 41 switch (reg) { 42 case CFI_BP: 43 case CFI_BX: 44 case CFI_R12: 45 case CFI_R13: 46 case CFI_R14: 47 case CFI_R15: 48 return true; 49 50 case CFI_AX: 51 case CFI_CX: 52 case CFI_DX: 53 case CFI_SI: 54 case CFI_DI: 55 case CFI_SP: 56 case CFI_R8: 57 case CFI_R9: 58 case CFI_R10: 59 case CFI_R11: 60 case CFI_RA: 61 default: 62 return false; 63 } 64 } 65 66 unsigned long arch_dest_reloc_offset(int addend) 67 { 68 return addend + 4; 69 } 70 71 unsigned long arch_jump_destination(struct instruction *insn) 72 { 73 return insn->offset + insn->len + insn->immediate; 74 } 75 76 #define ADD_OP(op) \ 77 if (!(op = calloc(1, sizeof(*op)))) \ 78 return -1; \ 79 else for (list_add_tail(&op->list, ops_list); op; op = NULL) 80 81 /* 82 * Helpers to decode ModRM/SIB: 83 * 84 * r/m| AX CX DX BX | SP | BP | SI DI | 85 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 | 86 * Mod+----------------+-----+-----+---------+ 87 * 00 | [r/m] |[SIB]|[IP+]| [r/m] | 88 * 01 | [r/m + d8] |[S+d]| [r/m + d8] | 89 * 10 | [r/m + d32] |[S+D]| [r/m + d32] | 90 * 11 | r/ m | 91 */ 92 93 #define mod_is_mem() (modrm_mod != 3) 94 #define mod_is_reg() (modrm_mod == 3) 95 96 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0) 97 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem()) 98 99 #define rm_is(reg) (have_SIB() ? \ 100 sib_base == (reg) && sib_index == CFI_SP : \ 101 modrm_rm == (reg)) 102 103 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg)) 104 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg)) 105 106 int arch_decode_instruction(struct objtool_file *file, const struct section *sec, 107 unsigned long offset, unsigned int maxlen, 108 unsigned int *len, enum insn_type *type, 109 unsigned long *immediate, 110 struct list_head *ops_list) 111 { 112 const struct elf *elf = file->elf; 113 struct insn insn; 114 int x86_64, ret; 115 unsigned char op1, op2, op3, 116 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0, 117 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0, 118 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0; 119 struct stack_op *op = NULL; 120 struct symbol *sym; 121 u64 imm; 122 123 x86_64 = is_x86_64(elf); 124 if (x86_64 == -1) 125 return -1; 126 127 ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen, 128 x86_64 ? INSN_MODE_64 : INSN_MODE_32); 129 if (ret < 0) { 130 WARN("can't decode instruction at %s:0x%lx", sec->name, offset); 131 return -1; 132 } 133 134 *len = insn.length; 135 *type = INSN_OTHER; 136 137 if (insn.vex_prefix.nbytes) 138 return 0; 139 140 op1 = insn.opcode.bytes[0]; 141 op2 = insn.opcode.bytes[1]; 142 op3 = insn.opcode.bytes[2]; 143 144 if (insn.rex_prefix.nbytes) { 145 rex = insn.rex_prefix.bytes[0]; 146 rex_w = X86_REX_W(rex) >> 3; 147 rex_r = X86_REX_R(rex) >> 2; 148 rex_x = X86_REX_X(rex) >> 1; 149 rex_b = X86_REX_B(rex); 150 } 151 152 if (insn.modrm.nbytes) { 153 modrm = insn.modrm.bytes[0]; 154 modrm_mod = X86_MODRM_MOD(modrm); 155 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r; 156 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b; 157 } 158 159 if (insn.sib.nbytes) { 160 sib = insn.sib.bytes[0]; 161 /* sib_scale = X86_SIB_SCALE(sib); */ 162 sib_index = X86_SIB_INDEX(sib) + 8*rex_x; 163 sib_base = X86_SIB_BASE(sib) + 8*rex_b; 164 } 165 166 switch (op1) { 167 168 case 0x1: 169 case 0x29: 170 if (rex_w && rm_is_reg(CFI_SP)) { 171 172 /* add/sub reg, %rsp */ 173 ADD_OP(op) { 174 op->src.type = OP_SRC_ADD; 175 op->src.reg = modrm_reg; 176 op->dest.type = OP_DEST_REG; 177 op->dest.reg = CFI_SP; 178 } 179 } 180 break; 181 182 case 0x50 ... 0x57: 183 184 /* push reg */ 185 ADD_OP(op) { 186 op->src.type = OP_SRC_REG; 187 op->src.reg = (op1 & 0x7) + 8*rex_b; 188 op->dest.type = OP_DEST_PUSH; 189 } 190 191 break; 192 193 case 0x58 ... 0x5f: 194 195 /* pop reg */ 196 ADD_OP(op) { 197 op->src.type = OP_SRC_POP; 198 op->dest.type = OP_DEST_REG; 199 op->dest.reg = (op1 & 0x7) + 8*rex_b; 200 } 201 202 break; 203 204 case 0x68: 205 case 0x6a: 206 /* push immediate */ 207 ADD_OP(op) { 208 op->src.type = OP_SRC_CONST; 209 op->dest.type = OP_DEST_PUSH; 210 } 211 break; 212 213 case 0x70 ... 0x7f: 214 *type = INSN_JUMP_CONDITIONAL; 215 break; 216 217 case 0x80 ... 0x83: 218 /* 219 * 1000 00sw : mod OP r/m : immediate 220 * 221 * s - sign extend immediate 222 * w - imm8 / imm32 223 * 224 * OP: 000 ADD 100 AND 225 * 001 OR 101 SUB 226 * 010 ADC 110 XOR 227 * 011 SBB 111 CMP 228 */ 229 230 /* 64bit only */ 231 if (!rex_w) 232 break; 233 234 /* %rsp target only */ 235 if (!rm_is_reg(CFI_SP)) 236 break; 237 238 imm = insn.immediate.value; 239 if (op1 & 2) { /* sign extend */ 240 if (op1 & 1) { /* imm32 */ 241 imm <<= 32; 242 imm = (s64)imm >> 32; 243 } else { /* imm8 */ 244 imm <<= 56; 245 imm = (s64)imm >> 56; 246 } 247 } 248 249 switch (modrm_reg & 7) { 250 case 5: 251 imm = -imm; 252 /* fallthrough */ 253 case 0: 254 /* add/sub imm, %rsp */ 255 ADD_OP(op) { 256 op->src.type = OP_SRC_ADD; 257 op->src.reg = CFI_SP; 258 op->src.offset = imm; 259 op->dest.type = OP_DEST_REG; 260 op->dest.reg = CFI_SP; 261 } 262 break; 263 264 case 4: 265 /* and imm, %rsp */ 266 ADD_OP(op) { 267 op->src.type = OP_SRC_AND; 268 op->src.reg = CFI_SP; 269 op->src.offset = insn.immediate.value; 270 op->dest.type = OP_DEST_REG; 271 op->dest.reg = CFI_SP; 272 } 273 break; 274 275 default: 276 /* WARN ? */ 277 break; 278 } 279 280 break; 281 282 case 0x89: 283 if (!rex_w) 284 break; 285 286 if (modrm_reg == CFI_SP) { 287 288 if (mod_is_reg()) { 289 /* mov %rsp, reg */ 290 ADD_OP(op) { 291 op->src.type = OP_SRC_REG; 292 op->src.reg = CFI_SP; 293 op->dest.type = OP_DEST_REG; 294 op->dest.reg = modrm_rm; 295 } 296 break; 297 298 } else { 299 /* skip RIP relative displacement */ 300 if (is_RIP()) 301 break; 302 303 /* skip nontrivial SIB */ 304 if (have_SIB()) { 305 modrm_rm = sib_base; 306 if (sib_index != CFI_SP) 307 break; 308 } 309 310 /* mov %rsp, disp(%reg) */ 311 ADD_OP(op) { 312 op->src.type = OP_SRC_REG; 313 op->src.reg = CFI_SP; 314 op->dest.type = OP_DEST_REG_INDIRECT; 315 op->dest.reg = modrm_rm; 316 op->dest.offset = insn.displacement.value; 317 } 318 break; 319 } 320 321 break; 322 } 323 324 if (rm_is_reg(CFI_SP)) { 325 326 /* mov reg, %rsp */ 327 ADD_OP(op) { 328 op->src.type = OP_SRC_REG; 329 op->src.reg = modrm_reg; 330 op->dest.type = OP_DEST_REG; 331 op->dest.reg = CFI_SP; 332 } 333 break; 334 } 335 336 /* fallthrough */ 337 case 0x88: 338 if (!rex_w) 339 break; 340 341 if (rm_is_mem(CFI_BP)) { 342 343 /* mov reg, disp(%rbp) */ 344 ADD_OP(op) { 345 op->src.type = OP_SRC_REG; 346 op->src.reg = modrm_reg; 347 op->dest.type = OP_DEST_REG_INDIRECT; 348 op->dest.reg = CFI_BP; 349 op->dest.offset = insn.displacement.value; 350 } 351 break; 352 } 353 354 if (rm_is_mem(CFI_SP)) { 355 356 /* mov reg, disp(%rsp) */ 357 ADD_OP(op) { 358 op->src.type = OP_SRC_REG; 359 op->src.reg = modrm_reg; 360 op->dest.type = OP_DEST_REG_INDIRECT; 361 op->dest.reg = CFI_SP; 362 op->dest.offset = insn.displacement.value; 363 } 364 break; 365 } 366 367 break; 368 369 case 0x8b: 370 if (!rex_w) 371 break; 372 373 if (rm_is_mem(CFI_BP)) { 374 375 /* mov disp(%rbp), reg */ 376 ADD_OP(op) { 377 op->src.type = OP_SRC_REG_INDIRECT; 378 op->src.reg = CFI_BP; 379 op->src.offset = insn.displacement.value; 380 op->dest.type = OP_DEST_REG; 381 op->dest.reg = modrm_reg; 382 } 383 break; 384 } 385 386 if (rm_is_mem(CFI_SP)) { 387 388 /* mov disp(%rsp), reg */ 389 ADD_OP(op) { 390 op->src.type = OP_SRC_REG_INDIRECT; 391 op->src.reg = CFI_SP; 392 op->src.offset = insn.displacement.value; 393 op->dest.type = OP_DEST_REG; 394 op->dest.reg = modrm_reg; 395 } 396 break; 397 } 398 399 break; 400 401 case 0x8d: 402 if (mod_is_reg()) { 403 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset); 404 break; 405 } 406 407 /* skip non 64bit ops */ 408 if (!rex_w) 409 break; 410 411 /* skip RIP relative displacement */ 412 if (is_RIP()) 413 break; 414 415 /* skip nontrivial SIB */ 416 if (have_SIB()) { 417 modrm_rm = sib_base; 418 if (sib_index != CFI_SP) 419 break; 420 } 421 422 /* lea disp(%src), %dst */ 423 ADD_OP(op) { 424 op->src.offset = insn.displacement.value; 425 if (!op->src.offset) { 426 /* lea (%src), %dst */ 427 op->src.type = OP_SRC_REG; 428 } else { 429 /* lea disp(%src), %dst */ 430 op->src.type = OP_SRC_ADD; 431 } 432 op->src.reg = modrm_rm; 433 op->dest.type = OP_DEST_REG; 434 op->dest.reg = modrm_reg; 435 } 436 break; 437 438 case 0x8f: 439 /* pop to mem */ 440 ADD_OP(op) { 441 op->src.type = OP_SRC_POP; 442 op->dest.type = OP_DEST_MEM; 443 } 444 break; 445 446 case 0x90: 447 *type = INSN_NOP; 448 break; 449 450 case 0x9c: 451 /* pushf */ 452 ADD_OP(op) { 453 op->src.type = OP_SRC_CONST; 454 op->dest.type = OP_DEST_PUSHF; 455 } 456 break; 457 458 case 0x9d: 459 /* popf */ 460 ADD_OP(op) { 461 op->src.type = OP_SRC_POPF; 462 op->dest.type = OP_DEST_MEM; 463 } 464 break; 465 466 case 0x0f: 467 468 if (op2 == 0x01) { 469 470 if (modrm == 0xca) 471 *type = INSN_CLAC; 472 else if (modrm == 0xcb) 473 *type = INSN_STAC; 474 475 } else if (op2 >= 0x80 && op2 <= 0x8f) { 476 477 *type = INSN_JUMP_CONDITIONAL; 478 479 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 || 480 op2 == 0x35) { 481 482 /* sysenter, sysret */ 483 *type = INSN_CONTEXT_SWITCH; 484 485 } else if (op2 == 0x0b || op2 == 0xb9) { 486 487 /* ud2 */ 488 *type = INSN_BUG; 489 490 } else if (op2 == 0x0d || op2 == 0x1f) { 491 492 /* nopl/nopw */ 493 *type = INSN_NOP; 494 495 } else if (op2 == 0x38 && op3 == 0xf8) { 496 if (insn.prefixes.nbytes == 1 && 497 insn.prefixes.bytes[0] == 0xf2) { 498 /* ENQCMD cannot be used in the kernel. */ 499 WARN("ENQCMD instruction at %s:%lx", sec->name, 500 offset); 501 } 502 503 } else if (op2 == 0xa0 || op2 == 0xa8) { 504 505 /* push fs/gs */ 506 ADD_OP(op) { 507 op->src.type = OP_SRC_CONST; 508 op->dest.type = OP_DEST_PUSH; 509 } 510 511 } else if (op2 == 0xa1 || op2 == 0xa9) { 512 513 /* pop fs/gs */ 514 ADD_OP(op) { 515 op->src.type = OP_SRC_POP; 516 op->dest.type = OP_DEST_MEM; 517 } 518 } 519 520 break; 521 522 case 0xc9: 523 /* 524 * leave 525 * 526 * equivalent to: 527 * mov bp, sp 528 * pop bp 529 */ 530 ADD_OP(op) { 531 op->src.type = OP_SRC_REG; 532 op->src.reg = CFI_BP; 533 op->dest.type = OP_DEST_REG; 534 op->dest.reg = CFI_SP; 535 } 536 ADD_OP(op) { 537 op->src.type = OP_SRC_POP; 538 op->dest.type = OP_DEST_REG; 539 op->dest.reg = CFI_BP; 540 } 541 break; 542 543 case 0xcc: 544 /* int3 */ 545 *type = INSN_TRAP; 546 break; 547 548 case 0xe3: 549 /* jecxz/jrcxz */ 550 *type = INSN_JUMP_CONDITIONAL; 551 break; 552 553 case 0xe9: 554 case 0xeb: 555 *type = INSN_JUMP_UNCONDITIONAL; 556 break; 557 558 case 0xc2: 559 case 0xc3: 560 *type = INSN_RETURN; 561 break; 562 563 case 0xc7: /* mov imm, r/m */ 564 if (!noinstr) 565 break; 566 567 if (insn.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) { 568 struct reloc *immr, *disp; 569 struct symbol *func; 570 int idx; 571 572 immr = find_reloc_by_dest(elf, (void *)sec, offset+3); 573 disp = find_reloc_by_dest(elf, (void *)sec, offset+7); 574 575 if (!immr || strcmp(immr->sym->name, "pv_ops")) 576 break; 577 578 idx = (immr->addend + 8) / sizeof(void *); 579 580 func = disp->sym; 581 if (disp->sym->type == STT_SECTION) 582 func = find_symbol_by_offset(disp->sym->sec, disp->addend); 583 if (!func) { 584 WARN("no func for pv_ops[]"); 585 return -1; 586 } 587 588 objtool_pv_add(file, idx, func); 589 } 590 591 break; 592 593 case 0xcf: /* iret */ 594 /* 595 * Handle sync_core(), which has an IRET to self. 596 * All other IRET are in STT_NONE entry code. 597 */ 598 sym = find_symbol_containing(sec, offset); 599 if (sym && sym->type == STT_FUNC) { 600 ADD_OP(op) { 601 /* add $40, %rsp */ 602 op->src.type = OP_SRC_ADD; 603 op->src.reg = CFI_SP; 604 op->src.offset = 5*8; 605 op->dest.type = OP_DEST_REG; 606 op->dest.reg = CFI_SP; 607 } 608 break; 609 } 610 611 /* fallthrough */ 612 613 case 0xca: /* retf */ 614 case 0xcb: /* retf */ 615 *type = INSN_CONTEXT_SWITCH; 616 break; 617 618 case 0xe8: 619 *type = INSN_CALL; 620 /* 621 * For the impact on the stack, a CALL behaves like 622 * a PUSH of an immediate value (the return address). 623 */ 624 ADD_OP(op) { 625 op->src.type = OP_SRC_CONST; 626 op->dest.type = OP_DEST_PUSH; 627 } 628 break; 629 630 case 0xfc: 631 *type = INSN_CLD; 632 break; 633 634 case 0xfd: 635 *type = INSN_STD; 636 break; 637 638 case 0xff: 639 if (modrm_reg == 2 || modrm_reg == 3) 640 641 *type = INSN_CALL_DYNAMIC; 642 643 else if (modrm_reg == 4) 644 645 *type = INSN_JUMP_DYNAMIC; 646 647 else if (modrm_reg == 5) 648 649 /* jmpf */ 650 *type = INSN_CONTEXT_SWITCH; 651 652 else if (modrm_reg == 6) { 653 654 /* push from mem */ 655 ADD_OP(op) { 656 op->src.type = OP_SRC_CONST; 657 op->dest.type = OP_DEST_PUSH; 658 } 659 } 660 661 break; 662 663 default: 664 break; 665 } 666 667 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0; 668 669 return 0; 670 } 671 672 void arch_initial_func_cfi_state(struct cfi_init_state *state) 673 { 674 int i; 675 676 for (i = 0; i < CFI_NUM_REGS; i++) { 677 state->regs[i].base = CFI_UNDEFINED; 678 state->regs[i].offset = 0; 679 } 680 681 /* initial CFA (call frame address) */ 682 state->cfa.base = CFI_SP; 683 state->cfa.offset = 8; 684 685 /* initial RA (return address) */ 686 state->regs[CFI_RA].base = CFI_CFA; 687 state->regs[CFI_RA].offset = -8; 688 } 689 690 const char *arch_nop_insn(int len) 691 { 692 static const char nops[5][5] = { 693 { BYTES_NOP1 }, 694 { BYTES_NOP2 }, 695 { BYTES_NOP3 }, 696 { BYTES_NOP4 }, 697 { BYTES_NOP5 }, 698 }; 699 700 if (len < 1 || len > 5) { 701 WARN("invalid NOP size: %d\n", len); 702 return NULL; 703 } 704 705 return nops[len-1]; 706 } 707 708 #define BYTE_RET 0xC3 709 710 const char *arch_ret_insn(int len) 711 { 712 static const char ret[5][5] = { 713 { BYTE_RET }, 714 { BYTE_RET, 0xcc }, 715 { BYTE_RET, 0xcc, BYTES_NOP1 }, 716 { BYTE_RET, 0xcc, BYTES_NOP2 }, 717 { BYTE_RET, 0xcc, BYTES_NOP3 }, 718 }; 719 720 if (len < 1 || len > 5) { 721 WARN("invalid RET size: %d\n", len); 722 return NULL; 723 } 724 725 return ret[len-1]; 726 } 727 728 int arch_decode_hint_reg(u8 sp_reg, int *base) 729 { 730 switch (sp_reg) { 731 case ORC_REG_UNDEFINED: 732 *base = CFI_UNDEFINED; 733 break; 734 case ORC_REG_SP: 735 *base = CFI_SP; 736 break; 737 case ORC_REG_BP: 738 *base = CFI_BP; 739 break; 740 case ORC_REG_SP_INDIRECT: 741 *base = CFI_SP_INDIRECT; 742 break; 743 case ORC_REG_R10: 744 *base = CFI_R10; 745 break; 746 case ORC_REG_R13: 747 *base = CFI_R13; 748 break; 749 case ORC_REG_DI: 750 *base = CFI_DI; 751 break; 752 case ORC_REG_DX: 753 *base = CFI_DX; 754 break; 755 default: 756 return -1; 757 } 758 759 return 0; 760 } 761 762 bool arch_is_retpoline(struct symbol *sym) 763 { 764 return !strncmp(sym->name, "__x86_indirect_", 15); 765 } 766