1 /* 2 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 3 * 4 * This program is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU General Public License 6 * as published by the Free Software Foundation; either version 2 7 * of the License, or (at your option) any later version. 8 * 9 * This program is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 * GNU General Public License for more details. 13 * 14 * You should have received a copy of the GNU General Public License 15 * along with this program; if not, see <http://www.gnu.org/licenses/>. 16 */ 17 18 #include <string.h> 19 #include <stdlib.h> 20 21 #include "check.h" 22 #include "elf.h" 23 #include "special.h" 24 #include "arch.h" 25 #include "warn.h" 26 27 #include <linux/hashtable.h> 28 #include <linux/kernel.h> 29 30 struct alternative { 31 struct list_head list; 32 struct instruction *insn; 33 }; 34 35 const char *objname; 36 static bool no_fp; 37 struct cfi_state initial_func_cfi; 38 39 struct instruction *find_insn(struct objtool_file *file, 40 struct section *sec, unsigned long offset) 41 { 42 struct instruction *insn; 43 44 hash_for_each_possible(file->insn_hash, insn, hash, offset) 45 if (insn->sec == sec && insn->offset == offset) 46 return insn; 47 48 return NULL; 49 } 50 51 static struct instruction *next_insn_same_sec(struct objtool_file *file, 52 struct instruction *insn) 53 { 54 struct instruction *next = list_next_entry(insn, list); 55 56 if (!next || &next->list == &file->insn_list || next->sec != insn->sec) 57 return NULL; 58 59 return next; 60 } 61 62 #define func_for_each_insn(file, func, insn) \ 63 for (insn = find_insn(file, func->sec, func->offset); \ 64 insn && &insn->list != &file->insn_list && \ 65 insn->sec == func->sec && \ 66 insn->offset < func->offset + func->len; \ 67 insn = list_next_entry(insn, list)) 68 69 #define func_for_each_insn_continue_reverse(file, func, insn) \ 70 for (insn = list_prev_entry(insn, list); \ 71 &insn->list != &file->insn_list && \ 72 insn->sec == func->sec && insn->offset >= func->offset; \ 73 insn = list_prev_entry(insn, list)) 74 75 #define sec_for_each_insn_from(file, insn) \ 76 for (; insn; insn = next_insn_same_sec(file, insn)) 77 78 #define sec_for_each_insn_continue(file, insn) \ 79 for (insn = next_insn_same_sec(file, insn); insn; \ 80 insn = next_insn_same_sec(file, insn)) 81 82 /* 83 * Check if the function has been manually whitelisted with the 84 * STACK_FRAME_NON_STANDARD macro, or if it should be automatically whitelisted 85 * due to its use of a context switching instruction. 86 */ 87 static bool ignore_func(struct objtool_file *file, struct symbol *func) 88 { 89 struct rela *rela; 90 91 /* check for STACK_FRAME_NON_STANDARD */ 92 if (file->whitelist && file->whitelist->rela) 93 list_for_each_entry(rela, &file->whitelist->rela->rela_list, list) { 94 if (rela->sym->type == STT_SECTION && 95 rela->sym->sec == func->sec && 96 rela->addend == func->offset) 97 return true; 98 if (rela->sym->type == STT_FUNC && rela->sym == func) 99 return true; 100 } 101 102 return false; 103 } 104 105 /* 106 * This checks to see if the given function is a "noreturn" function. 107 * 108 * For global functions which are outside the scope of this object file, we 109 * have to keep a manual list of them. 110 * 111 * For local functions, we have to detect them manually by simply looking for 112 * the lack of a return instruction. 113 * 114 * Returns: 115 * -1: error 116 * 0: no dead end 117 * 1: dead end 118 */ 119 static int __dead_end_function(struct objtool_file *file, struct symbol *func, 120 int recursion) 121 { 122 int i; 123 struct instruction *insn; 124 bool empty = true; 125 126 /* 127 * Unfortunately these have to be hard coded because the noreturn 128 * attribute isn't provided in ELF data. 129 */ 130 static const char * const global_noreturns[] = { 131 "__stack_chk_fail", 132 "panic", 133 "do_exit", 134 "do_task_dead", 135 "__module_put_and_exit", 136 "complete_and_exit", 137 "kvm_spurious_fault", 138 "__reiserfs_panic", 139 "lbug_with_loc", 140 "fortify_panic", 141 "usercopy_abort", 142 }; 143 144 if (func->bind == STB_WEAK) 145 return 0; 146 147 if (func->bind == STB_GLOBAL) 148 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 149 if (!strcmp(func->name, global_noreturns[i])) 150 return 1; 151 152 if (!func->sec) 153 return 0; 154 155 func_for_each_insn(file, func, insn) { 156 empty = false; 157 158 if (insn->type == INSN_RETURN) 159 return 0; 160 } 161 162 if (empty) 163 return 0; 164 165 /* 166 * A function can have a sibling call instead of a return. In that 167 * case, the function's dead-end status depends on whether the target 168 * of the sibling call returns. 169 */ 170 func_for_each_insn(file, func, insn) { 171 if (insn->sec != func->sec || 172 insn->offset >= func->offset + func->len) 173 break; 174 175 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 176 struct instruction *dest = insn->jump_dest; 177 struct symbol *dest_func; 178 179 if (!dest) 180 /* sibling call to another file */ 181 return 0; 182 183 if (dest->sec != func->sec || 184 dest->offset < func->offset || 185 dest->offset >= func->offset + func->len) { 186 /* local sibling call */ 187 dest_func = find_symbol_by_offset(dest->sec, 188 dest->offset); 189 if (!dest_func) 190 continue; 191 192 if (recursion == 5) { 193 WARN_FUNC("infinite recursion (objtool bug!)", 194 dest->sec, dest->offset); 195 return -1; 196 } 197 198 return __dead_end_function(file, dest_func, 199 recursion + 1); 200 } 201 } 202 203 if (insn->type == INSN_JUMP_DYNAMIC && list_empty(&insn->alts)) 204 /* sibling call */ 205 return 0; 206 } 207 208 return 1; 209 } 210 211 static int dead_end_function(struct objtool_file *file, struct symbol *func) 212 { 213 return __dead_end_function(file, func, 0); 214 } 215 216 static void clear_insn_state(struct insn_state *state) 217 { 218 int i; 219 220 memset(state, 0, sizeof(*state)); 221 state->cfa.base = CFI_UNDEFINED; 222 for (i = 0; i < CFI_NUM_REGS; i++) { 223 state->regs[i].base = CFI_UNDEFINED; 224 state->vals[i].base = CFI_UNDEFINED; 225 } 226 state->drap_reg = CFI_UNDEFINED; 227 state->drap_offset = -1; 228 } 229 230 /* 231 * Call the arch-specific instruction decoder for all the instructions and add 232 * them to the global instruction list. 233 */ 234 static int decode_instructions(struct objtool_file *file) 235 { 236 struct section *sec; 237 struct symbol *func; 238 unsigned long offset; 239 struct instruction *insn; 240 int ret; 241 242 for_each_sec(file, sec) { 243 244 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 245 continue; 246 247 if (strcmp(sec->name, ".altinstr_replacement") && 248 strcmp(sec->name, ".altinstr_aux") && 249 strncmp(sec->name, ".discard.", 9)) 250 sec->text = true; 251 252 for (offset = 0; offset < sec->len; offset += insn->len) { 253 insn = malloc(sizeof(*insn)); 254 if (!insn) { 255 WARN("malloc failed"); 256 return -1; 257 } 258 memset(insn, 0, sizeof(*insn)); 259 INIT_LIST_HEAD(&insn->alts); 260 clear_insn_state(&insn->state); 261 262 insn->sec = sec; 263 insn->offset = offset; 264 265 ret = arch_decode_instruction(file->elf, sec, offset, 266 sec->len - offset, 267 &insn->len, &insn->type, 268 &insn->immediate, 269 &insn->stack_op); 270 if (ret) 271 goto err; 272 273 if (!insn->type || insn->type > INSN_LAST) { 274 WARN_FUNC("invalid instruction type %d", 275 insn->sec, insn->offset, insn->type); 276 ret = -1; 277 goto err; 278 } 279 280 hash_add(file->insn_hash, &insn->hash, insn->offset); 281 list_add_tail(&insn->list, &file->insn_list); 282 } 283 284 list_for_each_entry(func, &sec->symbol_list, list) { 285 if (func->type != STT_FUNC) 286 continue; 287 288 if (!find_insn(file, sec, func->offset)) { 289 WARN("%s(): can't find starting instruction", 290 func->name); 291 return -1; 292 } 293 294 func_for_each_insn(file, func, insn) 295 if (!insn->func) 296 insn->func = func; 297 } 298 } 299 300 return 0; 301 302 err: 303 free(insn); 304 return ret; 305 } 306 307 /* 308 * Mark "ud2" instructions and manually annotated dead ends. 309 */ 310 static int add_dead_ends(struct objtool_file *file) 311 { 312 struct section *sec; 313 struct rela *rela; 314 struct instruction *insn; 315 bool found; 316 317 /* 318 * By default, "ud2" is a dead end unless otherwise annotated, because 319 * GCC 7 inserts it for certain divide-by-zero cases. 320 */ 321 for_each_insn(file, insn) 322 if (insn->type == INSN_BUG) 323 insn->dead_end = true; 324 325 /* 326 * Check for manually annotated dead ends. 327 */ 328 sec = find_section_by_name(file->elf, ".rela.discard.unreachable"); 329 if (!sec) 330 goto reachable; 331 332 list_for_each_entry(rela, &sec->rela_list, list) { 333 if (rela->sym->type != STT_SECTION) { 334 WARN("unexpected relocation symbol type in %s", sec->name); 335 return -1; 336 } 337 insn = find_insn(file, rela->sym->sec, rela->addend); 338 if (insn) 339 insn = list_prev_entry(insn, list); 340 else if (rela->addend == rela->sym->sec->len) { 341 found = false; 342 list_for_each_entry_reverse(insn, &file->insn_list, list) { 343 if (insn->sec == rela->sym->sec) { 344 found = true; 345 break; 346 } 347 } 348 349 if (!found) { 350 WARN("can't find unreachable insn at %s+0x%x", 351 rela->sym->sec->name, rela->addend); 352 return -1; 353 } 354 } else { 355 WARN("can't find unreachable insn at %s+0x%x", 356 rela->sym->sec->name, rela->addend); 357 return -1; 358 } 359 360 insn->dead_end = true; 361 } 362 363 reachable: 364 /* 365 * These manually annotated reachable checks are needed for GCC 4.4, 366 * where the Linux unreachable() macro isn't supported. In that case 367 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's 368 * not a dead end. 369 */ 370 sec = find_section_by_name(file->elf, ".rela.discard.reachable"); 371 if (!sec) 372 return 0; 373 374 list_for_each_entry(rela, &sec->rela_list, list) { 375 if (rela->sym->type != STT_SECTION) { 376 WARN("unexpected relocation symbol type in %s", sec->name); 377 return -1; 378 } 379 insn = find_insn(file, rela->sym->sec, rela->addend); 380 if (insn) 381 insn = list_prev_entry(insn, list); 382 else if (rela->addend == rela->sym->sec->len) { 383 found = false; 384 list_for_each_entry_reverse(insn, &file->insn_list, list) { 385 if (insn->sec == rela->sym->sec) { 386 found = true; 387 break; 388 } 389 } 390 391 if (!found) { 392 WARN("can't find reachable insn at %s+0x%x", 393 rela->sym->sec->name, rela->addend); 394 return -1; 395 } 396 } else { 397 WARN("can't find reachable insn at %s+0x%x", 398 rela->sym->sec->name, rela->addend); 399 return -1; 400 } 401 402 insn->dead_end = false; 403 } 404 405 return 0; 406 } 407 408 /* 409 * Warnings shouldn't be reported for ignored functions. 410 */ 411 static void add_ignores(struct objtool_file *file) 412 { 413 struct instruction *insn; 414 struct section *sec; 415 struct symbol *func; 416 417 for_each_sec(file, sec) { 418 list_for_each_entry(func, &sec->symbol_list, list) { 419 if (func->type != STT_FUNC) 420 continue; 421 422 if (!ignore_func(file, func)) 423 continue; 424 425 func_for_each_insn(file, func, insn) 426 insn->ignore = true; 427 } 428 } 429 } 430 431 /* 432 * FIXME: For now, just ignore any alternatives which add retpolines. This is 433 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline. 434 * But it at least allows objtool to understand the control flow *around* the 435 * retpoline. 436 */ 437 static int add_nospec_ignores(struct objtool_file *file) 438 { 439 struct section *sec; 440 struct rela *rela; 441 struct instruction *insn; 442 443 sec = find_section_by_name(file->elf, ".rela.discard.nospec"); 444 if (!sec) 445 return 0; 446 447 list_for_each_entry(rela, &sec->rela_list, list) { 448 if (rela->sym->type != STT_SECTION) { 449 WARN("unexpected relocation symbol type in %s", sec->name); 450 return -1; 451 } 452 453 insn = find_insn(file, rela->sym->sec, rela->addend); 454 if (!insn) { 455 WARN("bad .discard.nospec entry"); 456 return -1; 457 } 458 459 insn->ignore_alts = true; 460 } 461 462 return 0; 463 } 464 465 /* 466 * Find the destination instructions for all jumps. 467 */ 468 static int add_jump_destinations(struct objtool_file *file) 469 { 470 struct instruction *insn; 471 struct rela *rela; 472 struct section *dest_sec; 473 unsigned long dest_off; 474 475 for_each_insn(file, insn) { 476 if (insn->type != INSN_JUMP_CONDITIONAL && 477 insn->type != INSN_JUMP_UNCONDITIONAL) 478 continue; 479 480 if (insn->ignore) 481 continue; 482 483 rela = find_rela_by_dest_range(insn->sec, insn->offset, 484 insn->len); 485 if (!rela) { 486 dest_sec = insn->sec; 487 dest_off = insn->offset + insn->len + insn->immediate; 488 } else if (rela->sym->type == STT_SECTION) { 489 dest_sec = rela->sym->sec; 490 dest_off = rela->addend + 4; 491 } else if (rela->sym->sec->idx) { 492 dest_sec = rela->sym->sec; 493 dest_off = rela->sym->sym.st_value + rela->addend + 4; 494 } else if (strstr(rela->sym->name, "_indirect_thunk_")) { 495 /* 496 * Retpoline jumps are really dynamic jumps in 497 * disguise, so convert them accordingly. 498 */ 499 insn->type = INSN_JUMP_DYNAMIC; 500 continue; 501 } else { 502 /* sibling call */ 503 insn->jump_dest = 0; 504 continue; 505 } 506 507 insn->jump_dest = find_insn(file, dest_sec, dest_off); 508 if (!insn->jump_dest) { 509 510 /* 511 * This is a special case where an alt instruction 512 * jumps past the end of the section. These are 513 * handled later in handle_group_alt(). 514 */ 515 if (!strcmp(insn->sec->name, ".altinstr_replacement")) 516 continue; 517 518 WARN_FUNC("can't find jump dest instruction at %s+0x%lx", 519 insn->sec, insn->offset, dest_sec->name, 520 dest_off); 521 return -1; 522 } 523 } 524 525 return 0; 526 } 527 528 /* 529 * Find the destination instructions for all calls. 530 */ 531 static int add_call_destinations(struct objtool_file *file) 532 { 533 struct instruction *insn; 534 unsigned long dest_off; 535 struct rela *rela; 536 537 for_each_insn(file, insn) { 538 if (insn->type != INSN_CALL) 539 continue; 540 541 rela = find_rela_by_dest_range(insn->sec, insn->offset, 542 insn->len); 543 if (!rela) { 544 dest_off = insn->offset + insn->len + insn->immediate; 545 insn->call_dest = find_symbol_by_offset(insn->sec, 546 dest_off); 547 548 if (!insn->call_dest && !insn->ignore) { 549 WARN_FUNC("unsupported intra-function call", 550 insn->sec, insn->offset); 551 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE."); 552 return -1; 553 } 554 555 } else if (rela->sym->type == STT_SECTION) { 556 insn->call_dest = find_symbol_by_offset(rela->sym->sec, 557 rela->addend+4); 558 if (!insn->call_dest || 559 insn->call_dest->type != STT_FUNC) { 560 WARN_FUNC("can't find call dest symbol at %s+0x%x", 561 insn->sec, insn->offset, 562 rela->sym->sec->name, 563 rela->addend + 4); 564 return -1; 565 } 566 } else 567 insn->call_dest = rela->sym; 568 } 569 570 return 0; 571 } 572 573 /* 574 * The .alternatives section requires some extra special care, over and above 575 * what other special sections require: 576 * 577 * 1. Because alternatives are patched in-place, we need to insert a fake jump 578 * instruction at the end so that validate_branch() skips all the original 579 * replaced instructions when validating the new instruction path. 580 * 581 * 2. An added wrinkle is that the new instruction length might be zero. In 582 * that case the old instructions are replaced with noops. We simulate that 583 * by creating a fake jump as the only new instruction. 584 * 585 * 3. In some cases, the alternative section includes an instruction which 586 * conditionally jumps to the _end_ of the entry. We have to modify these 587 * jumps' destinations to point back to .text rather than the end of the 588 * entry in .altinstr_replacement. 589 * 590 * 4. It has been requested that we don't validate the !POPCNT feature path 591 * which is a "very very small percentage of machines". 592 */ 593 static int handle_group_alt(struct objtool_file *file, 594 struct special_alt *special_alt, 595 struct instruction *orig_insn, 596 struct instruction **new_insn) 597 { 598 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL; 599 unsigned long dest_off; 600 601 last_orig_insn = NULL; 602 insn = orig_insn; 603 sec_for_each_insn_from(file, insn) { 604 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 605 break; 606 607 if (special_alt->skip_orig) 608 insn->type = INSN_NOP; 609 610 insn->alt_group = true; 611 last_orig_insn = insn; 612 } 613 614 if (next_insn_same_sec(file, last_orig_insn)) { 615 fake_jump = malloc(sizeof(*fake_jump)); 616 if (!fake_jump) { 617 WARN("malloc failed"); 618 return -1; 619 } 620 memset(fake_jump, 0, sizeof(*fake_jump)); 621 INIT_LIST_HEAD(&fake_jump->alts); 622 clear_insn_state(&fake_jump->state); 623 624 fake_jump->sec = special_alt->new_sec; 625 fake_jump->offset = -1; 626 fake_jump->type = INSN_JUMP_UNCONDITIONAL; 627 fake_jump->jump_dest = list_next_entry(last_orig_insn, list); 628 fake_jump->ignore = true; 629 } 630 631 if (!special_alt->new_len) { 632 if (!fake_jump) { 633 WARN("%s: empty alternative at end of section", 634 special_alt->orig_sec->name); 635 return -1; 636 } 637 638 *new_insn = fake_jump; 639 return 0; 640 } 641 642 last_new_insn = NULL; 643 insn = *new_insn; 644 sec_for_each_insn_from(file, insn) { 645 if (insn->offset >= special_alt->new_off + special_alt->new_len) 646 break; 647 648 last_new_insn = insn; 649 650 insn->ignore = orig_insn->ignore_alts; 651 652 if (insn->type != INSN_JUMP_CONDITIONAL && 653 insn->type != INSN_JUMP_UNCONDITIONAL) 654 continue; 655 656 if (!insn->immediate) 657 continue; 658 659 dest_off = insn->offset + insn->len + insn->immediate; 660 if (dest_off == special_alt->new_off + special_alt->new_len) { 661 if (!fake_jump) { 662 WARN("%s: alternative jump to end of section", 663 special_alt->orig_sec->name); 664 return -1; 665 } 666 insn->jump_dest = fake_jump; 667 } 668 669 if (!insn->jump_dest) { 670 WARN_FUNC("can't find alternative jump destination", 671 insn->sec, insn->offset); 672 return -1; 673 } 674 } 675 676 if (!last_new_insn) { 677 WARN_FUNC("can't find last new alternative instruction", 678 special_alt->new_sec, special_alt->new_off); 679 return -1; 680 } 681 682 if (fake_jump) 683 list_add(&fake_jump->list, &last_new_insn->list); 684 685 return 0; 686 } 687 688 /* 689 * A jump table entry can either convert a nop to a jump or a jump to a nop. 690 * If the original instruction is a jump, make the alt entry an effective nop 691 * by just skipping the original instruction. 692 */ 693 static int handle_jump_alt(struct objtool_file *file, 694 struct special_alt *special_alt, 695 struct instruction *orig_insn, 696 struct instruction **new_insn) 697 { 698 if (orig_insn->type == INSN_NOP) 699 return 0; 700 701 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) { 702 WARN_FUNC("unsupported instruction at jump label", 703 orig_insn->sec, orig_insn->offset); 704 return -1; 705 } 706 707 *new_insn = list_next_entry(orig_insn, list); 708 return 0; 709 } 710 711 /* 712 * Read all the special sections which have alternate instructions which can be 713 * patched in or redirected to at runtime. Each instruction having alternate 714 * instruction(s) has them added to its insn->alts list, which will be 715 * traversed in validate_branch(). 716 */ 717 static int add_special_section_alts(struct objtool_file *file) 718 { 719 struct list_head special_alts; 720 struct instruction *orig_insn, *new_insn; 721 struct special_alt *special_alt, *tmp; 722 struct alternative *alt; 723 int ret; 724 725 ret = special_get_alts(file->elf, &special_alts); 726 if (ret) 727 return ret; 728 729 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 730 731 orig_insn = find_insn(file, special_alt->orig_sec, 732 special_alt->orig_off); 733 if (!orig_insn) { 734 WARN_FUNC("special: can't find orig instruction", 735 special_alt->orig_sec, special_alt->orig_off); 736 ret = -1; 737 goto out; 738 } 739 740 new_insn = NULL; 741 if (!special_alt->group || special_alt->new_len) { 742 new_insn = find_insn(file, special_alt->new_sec, 743 special_alt->new_off); 744 if (!new_insn) { 745 WARN_FUNC("special: can't find new instruction", 746 special_alt->new_sec, 747 special_alt->new_off); 748 ret = -1; 749 goto out; 750 } 751 } 752 753 if (special_alt->group) { 754 ret = handle_group_alt(file, special_alt, orig_insn, 755 &new_insn); 756 if (ret) 757 goto out; 758 } else if (special_alt->jump_or_nop) { 759 ret = handle_jump_alt(file, special_alt, orig_insn, 760 &new_insn); 761 if (ret) 762 goto out; 763 } 764 765 alt = malloc(sizeof(*alt)); 766 if (!alt) { 767 WARN("malloc failed"); 768 ret = -1; 769 goto out; 770 } 771 772 alt->insn = new_insn; 773 list_add_tail(&alt->list, &orig_insn->alts); 774 775 list_del(&special_alt->list); 776 free(special_alt); 777 } 778 779 out: 780 return ret; 781 } 782 783 static int add_switch_table(struct objtool_file *file, struct symbol *func, 784 struct instruction *insn, struct rela *table, 785 struct rela *next_table) 786 { 787 struct rela *rela = table; 788 struct instruction *alt_insn; 789 struct alternative *alt; 790 791 list_for_each_entry_from(rela, &file->rodata->rela->rela_list, list) { 792 if (rela == next_table) 793 break; 794 795 if (rela->sym->sec != insn->sec || 796 rela->addend <= func->offset || 797 rela->addend >= func->offset + func->len) 798 break; 799 800 alt_insn = find_insn(file, insn->sec, rela->addend); 801 if (!alt_insn) { 802 WARN("%s: can't find instruction at %s+0x%x", 803 file->rodata->rela->name, insn->sec->name, 804 rela->addend); 805 return -1; 806 } 807 808 alt = malloc(sizeof(*alt)); 809 if (!alt) { 810 WARN("malloc failed"); 811 return -1; 812 } 813 814 alt->insn = alt_insn; 815 list_add_tail(&alt->list, &insn->alts); 816 } 817 818 return 0; 819 } 820 821 /* 822 * find_switch_table() - Given a dynamic jump, find the switch jump table in 823 * .rodata associated with it. 824 * 825 * There are 3 basic patterns: 826 * 827 * 1. jmpq *[rodata addr](,%reg,8) 828 * 829 * This is the most common case by far. It jumps to an address in a simple 830 * jump table which is stored in .rodata. 831 * 832 * 2. jmpq *[rodata addr](%rip) 833 * 834 * This is caused by a rare GCC quirk, currently only seen in three driver 835 * functions in the kernel, only with certain obscure non-distro configs. 836 * 837 * As part of an optimization, GCC makes a copy of an existing switch jump 838 * table, modifies it, and then hard-codes the jump (albeit with an indirect 839 * jump) to use a single entry in the table. The rest of the jump table and 840 * some of its jump targets remain as dead code. 841 * 842 * In such a case we can just crudely ignore all unreachable instruction 843 * warnings for the entire object file. Ideally we would just ignore them 844 * for the function, but that would require redesigning the code quite a 845 * bit. And honestly that's just not worth doing: unreachable instruction 846 * warnings are of questionable value anyway, and this is such a rare issue. 847 * 848 * 3. mov [rodata addr],%reg1 849 * ... some instructions ... 850 * jmpq *(%reg1,%reg2,8) 851 * 852 * This is a fairly uncommon pattern which is new for GCC 6. As of this 853 * writing, there are 11 occurrences of it in the allmodconfig kernel. 854 * 855 * TODO: Once we have DWARF CFI and smarter instruction decoding logic, 856 * ensure the same register is used in the mov and jump instructions. 857 */ 858 static struct rela *find_switch_table(struct objtool_file *file, 859 struct symbol *func, 860 struct instruction *insn) 861 { 862 struct rela *text_rela, *rodata_rela; 863 struct instruction *orig_insn = insn; 864 865 text_rela = find_rela_by_dest_range(insn->sec, insn->offset, insn->len); 866 if (text_rela && text_rela->sym == file->rodata->sym) { 867 /* case 1 */ 868 rodata_rela = find_rela_by_dest(file->rodata, 869 text_rela->addend); 870 if (rodata_rela) 871 return rodata_rela; 872 873 /* case 2 */ 874 rodata_rela = find_rela_by_dest(file->rodata, 875 text_rela->addend + 4); 876 if (!rodata_rela) 877 return NULL; 878 file->ignore_unreachables = true; 879 return rodata_rela; 880 } 881 882 /* case 3 */ 883 func_for_each_insn_continue_reverse(file, func, insn) { 884 if (insn->type == INSN_JUMP_DYNAMIC) 885 break; 886 887 /* allow small jumps within the range */ 888 if (insn->type == INSN_JUMP_UNCONDITIONAL && 889 insn->jump_dest && 890 (insn->jump_dest->offset <= insn->offset || 891 insn->jump_dest->offset > orig_insn->offset)) 892 break; 893 894 /* look for a relocation which references .rodata */ 895 text_rela = find_rela_by_dest_range(insn->sec, insn->offset, 896 insn->len); 897 if (!text_rela || text_rela->sym != file->rodata->sym) 898 continue; 899 900 /* 901 * Make sure the .rodata address isn't associated with a 902 * symbol. gcc jump tables are anonymous data. 903 */ 904 if (find_symbol_containing(file->rodata, text_rela->addend)) 905 continue; 906 907 return find_rela_by_dest(file->rodata, text_rela->addend); 908 } 909 910 return NULL; 911 } 912 913 static int add_func_switch_tables(struct objtool_file *file, 914 struct symbol *func) 915 { 916 struct instruction *insn, *prev_jump = NULL; 917 struct rela *rela, *prev_rela = NULL; 918 int ret; 919 920 func_for_each_insn(file, func, insn) { 921 if (insn->type != INSN_JUMP_DYNAMIC) 922 continue; 923 924 rela = find_switch_table(file, func, insn); 925 if (!rela) 926 continue; 927 928 /* 929 * We found a switch table, but we don't know yet how big it 930 * is. Don't add it until we reach the end of the function or 931 * the beginning of another switch table in the same function. 932 */ 933 if (prev_jump) { 934 ret = add_switch_table(file, func, prev_jump, prev_rela, 935 rela); 936 if (ret) 937 return ret; 938 } 939 940 prev_jump = insn; 941 prev_rela = rela; 942 } 943 944 if (prev_jump) { 945 ret = add_switch_table(file, func, prev_jump, prev_rela, NULL); 946 if (ret) 947 return ret; 948 } 949 950 return 0; 951 } 952 953 /* 954 * For some switch statements, gcc generates a jump table in the .rodata 955 * section which contains a list of addresses within the function to jump to. 956 * This finds these jump tables and adds them to the insn->alts lists. 957 */ 958 static int add_switch_table_alts(struct objtool_file *file) 959 { 960 struct section *sec; 961 struct symbol *func; 962 int ret; 963 964 if (!file->rodata || !file->rodata->rela) 965 return 0; 966 967 for_each_sec(file, sec) { 968 list_for_each_entry(func, &sec->symbol_list, list) { 969 if (func->type != STT_FUNC) 970 continue; 971 972 ret = add_func_switch_tables(file, func); 973 if (ret) 974 return ret; 975 } 976 } 977 978 return 0; 979 } 980 981 static int read_unwind_hints(struct objtool_file *file) 982 { 983 struct section *sec, *relasec; 984 struct rela *rela; 985 struct unwind_hint *hint; 986 struct instruction *insn; 987 struct cfi_reg *cfa; 988 int i; 989 990 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 991 if (!sec) 992 return 0; 993 994 relasec = sec->rela; 995 if (!relasec) { 996 WARN("missing .rela.discard.unwind_hints section"); 997 return -1; 998 } 999 1000 if (sec->len % sizeof(struct unwind_hint)) { 1001 WARN("struct unwind_hint size mismatch"); 1002 return -1; 1003 } 1004 1005 file->hints = true; 1006 1007 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) { 1008 hint = (struct unwind_hint *)sec->data->d_buf + i; 1009 1010 rela = find_rela_by_dest(sec, i * sizeof(*hint)); 1011 if (!rela) { 1012 WARN("can't find rela for unwind_hints[%d]", i); 1013 return -1; 1014 } 1015 1016 insn = find_insn(file, rela->sym->sec, rela->addend); 1017 if (!insn) { 1018 WARN("can't find insn for unwind_hints[%d]", i); 1019 return -1; 1020 } 1021 1022 cfa = &insn->state.cfa; 1023 1024 if (hint->type == UNWIND_HINT_TYPE_SAVE) { 1025 insn->save = true; 1026 continue; 1027 1028 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) { 1029 insn->restore = true; 1030 insn->hint = true; 1031 continue; 1032 } 1033 1034 insn->hint = true; 1035 1036 switch (hint->sp_reg) { 1037 case ORC_REG_UNDEFINED: 1038 cfa->base = CFI_UNDEFINED; 1039 break; 1040 case ORC_REG_SP: 1041 cfa->base = CFI_SP; 1042 break; 1043 case ORC_REG_BP: 1044 cfa->base = CFI_BP; 1045 break; 1046 case ORC_REG_SP_INDIRECT: 1047 cfa->base = CFI_SP_INDIRECT; 1048 break; 1049 case ORC_REG_R10: 1050 cfa->base = CFI_R10; 1051 break; 1052 case ORC_REG_R13: 1053 cfa->base = CFI_R13; 1054 break; 1055 case ORC_REG_DI: 1056 cfa->base = CFI_DI; 1057 break; 1058 case ORC_REG_DX: 1059 cfa->base = CFI_DX; 1060 break; 1061 default: 1062 WARN_FUNC("unsupported unwind_hint sp base reg %d", 1063 insn->sec, insn->offset, hint->sp_reg); 1064 return -1; 1065 } 1066 1067 cfa->offset = hint->sp_offset; 1068 insn->state.type = hint->type; 1069 } 1070 1071 return 0; 1072 } 1073 1074 static int decode_sections(struct objtool_file *file) 1075 { 1076 int ret; 1077 1078 ret = decode_instructions(file); 1079 if (ret) 1080 return ret; 1081 1082 ret = add_dead_ends(file); 1083 if (ret) 1084 return ret; 1085 1086 add_ignores(file); 1087 1088 ret = add_nospec_ignores(file); 1089 if (ret) 1090 return ret; 1091 1092 ret = add_jump_destinations(file); 1093 if (ret) 1094 return ret; 1095 1096 ret = add_special_section_alts(file); 1097 if (ret) 1098 return ret; 1099 1100 ret = add_call_destinations(file); 1101 if (ret) 1102 return ret; 1103 1104 ret = add_switch_table_alts(file); 1105 if (ret) 1106 return ret; 1107 1108 ret = read_unwind_hints(file); 1109 if (ret) 1110 return ret; 1111 1112 return 0; 1113 } 1114 1115 static bool is_fentry_call(struct instruction *insn) 1116 { 1117 if (insn->type == INSN_CALL && 1118 insn->call_dest->type == STT_NOTYPE && 1119 !strcmp(insn->call_dest->name, "__fentry__")) 1120 return true; 1121 1122 return false; 1123 } 1124 1125 static bool has_modified_stack_frame(struct insn_state *state) 1126 { 1127 int i; 1128 1129 if (state->cfa.base != initial_func_cfi.cfa.base || 1130 state->cfa.offset != initial_func_cfi.cfa.offset || 1131 state->stack_size != initial_func_cfi.cfa.offset || 1132 state->drap) 1133 return true; 1134 1135 for (i = 0; i < CFI_NUM_REGS; i++) 1136 if (state->regs[i].base != initial_func_cfi.regs[i].base || 1137 state->regs[i].offset != initial_func_cfi.regs[i].offset) 1138 return true; 1139 1140 return false; 1141 } 1142 1143 static bool has_valid_stack_frame(struct insn_state *state) 1144 { 1145 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA && 1146 state->regs[CFI_BP].offset == -16) 1147 return true; 1148 1149 if (state->drap && state->regs[CFI_BP].base == CFI_BP) 1150 return true; 1151 1152 return false; 1153 } 1154 1155 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state) 1156 { 1157 struct cfi_reg *cfa = &state->cfa; 1158 struct stack_op *op = &insn->stack_op; 1159 1160 if (cfa->base != CFI_SP) 1161 return 0; 1162 1163 /* push */ 1164 if (op->dest.type == OP_DEST_PUSH) 1165 cfa->offset += 8; 1166 1167 /* pop */ 1168 if (op->src.type == OP_SRC_POP) 1169 cfa->offset -= 8; 1170 1171 /* add immediate to sp */ 1172 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 1173 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 1174 cfa->offset -= op->src.offset; 1175 1176 return 0; 1177 } 1178 1179 static void save_reg(struct insn_state *state, unsigned char reg, int base, 1180 int offset) 1181 { 1182 if (arch_callee_saved_reg(reg) && 1183 state->regs[reg].base == CFI_UNDEFINED) { 1184 state->regs[reg].base = base; 1185 state->regs[reg].offset = offset; 1186 } 1187 } 1188 1189 static void restore_reg(struct insn_state *state, unsigned char reg) 1190 { 1191 state->regs[reg].base = CFI_UNDEFINED; 1192 state->regs[reg].offset = 0; 1193 } 1194 1195 /* 1196 * A note about DRAP stack alignment: 1197 * 1198 * GCC has the concept of a DRAP register, which is used to help keep track of 1199 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 1200 * register. The typical DRAP pattern is: 1201 * 1202 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 1203 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 1204 * 41 ff 72 f8 pushq -0x8(%r10) 1205 * 55 push %rbp 1206 * 48 89 e5 mov %rsp,%rbp 1207 * (more pushes) 1208 * 41 52 push %r10 1209 * ... 1210 * 41 5a pop %r10 1211 * (more pops) 1212 * 5d pop %rbp 1213 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1214 * c3 retq 1215 * 1216 * There are some variations in the epilogues, like: 1217 * 1218 * 5b pop %rbx 1219 * 41 5a pop %r10 1220 * 41 5c pop %r12 1221 * 41 5d pop %r13 1222 * 41 5e pop %r14 1223 * c9 leaveq 1224 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1225 * c3 retq 1226 * 1227 * and: 1228 * 1229 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 1230 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 1231 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 1232 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 1233 * c9 leaveq 1234 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1235 * c3 retq 1236 * 1237 * Sometimes r13 is used as the DRAP register, in which case it's saved and 1238 * restored beforehand: 1239 * 1240 * 41 55 push %r13 1241 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 1242 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 1243 * ... 1244 * 49 8d 65 f0 lea -0x10(%r13),%rsp 1245 * 41 5d pop %r13 1246 * c3 retq 1247 */ 1248 static int update_insn_state(struct instruction *insn, struct insn_state *state) 1249 { 1250 struct stack_op *op = &insn->stack_op; 1251 struct cfi_reg *cfa = &state->cfa; 1252 struct cfi_reg *regs = state->regs; 1253 1254 /* stack operations don't make sense with an undefined CFA */ 1255 if (cfa->base == CFI_UNDEFINED) { 1256 if (insn->func) { 1257 WARN_FUNC("undefined stack state", insn->sec, insn->offset); 1258 return -1; 1259 } 1260 return 0; 1261 } 1262 1263 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET) 1264 return update_insn_state_regs(insn, state); 1265 1266 switch (op->dest.type) { 1267 1268 case OP_DEST_REG: 1269 switch (op->src.type) { 1270 1271 case OP_SRC_REG: 1272 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 1273 cfa->base == CFI_SP && 1274 regs[CFI_BP].base == CFI_CFA && 1275 regs[CFI_BP].offset == -cfa->offset) { 1276 1277 /* mov %rsp, %rbp */ 1278 cfa->base = op->dest.reg; 1279 state->bp_scratch = false; 1280 } 1281 1282 else if (op->src.reg == CFI_SP && 1283 op->dest.reg == CFI_BP && state->drap) { 1284 1285 /* drap: mov %rsp, %rbp */ 1286 regs[CFI_BP].base = CFI_BP; 1287 regs[CFI_BP].offset = -state->stack_size; 1288 state->bp_scratch = false; 1289 } 1290 1291 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1292 1293 /* 1294 * mov %rsp, %reg 1295 * 1296 * This is needed for the rare case where GCC 1297 * does: 1298 * 1299 * mov %rsp, %rax 1300 * ... 1301 * mov %rax, %rsp 1302 */ 1303 state->vals[op->dest.reg].base = CFI_CFA; 1304 state->vals[op->dest.reg].offset = -state->stack_size; 1305 } 1306 1307 else if (op->dest.reg == cfa->base) { 1308 1309 /* mov %reg, %rsp */ 1310 if (cfa->base == CFI_SP && 1311 state->vals[op->src.reg].base == CFI_CFA) { 1312 1313 /* 1314 * This is needed for the rare case 1315 * where GCC does something dumb like: 1316 * 1317 * lea 0x8(%rsp), %rcx 1318 * ... 1319 * mov %rcx, %rsp 1320 */ 1321 cfa->offset = -state->vals[op->src.reg].offset; 1322 state->stack_size = cfa->offset; 1323 1324 } else { 1325 cfa->base = CFI_UNDEFINED; 1326 cfa->offset = 0; 1327 } 1328 } 1329 1330 break; 1331 1332 case OP_SRC_ADD: 1333 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 1334 1335 /* add imm, %rsp */ 1336 state->stack_size -= op->src.offset; 1337 if (cfa->base == CFI_SP) 1338 cfa->offset -= op->src.offset; 1339 break; 1340 } 1341 1342 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 1343 1344 /* lea disp(%rbp), %rsp */ 1345 state->stack_size = -(op->src.offset + regs[CFI_BP].offset); 1346 break; 1347 } 1348 1349 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1350 1351 /* drap: lea disp(%rsp), %drap */ 1352 state->drap_reg = op->dest.reg; 1353 1354 /* 1355 * lea disp(%rsp), %reg 1356 * 1357 * This is needed for the rare case where GCC 1358 * does something dumb like: 1359 * 1360 * lea 0x8(%rsp), %rcx 1361 * ... 1362 * mov %rcx, %rsp 1363 */ 1364 state->vals[op->dest.reg].base = CFI_CFA; 1365 state->vals[op->dest.reg].offset = \ 1366 -state->stack_size + op->src.offset; 1367 1368 break; 1369 } 1370 1371 if (state->drap && op->dest.reg == CFI_SP && 1372 op->src.reg == state->drap_reg) { 1373 1374 /* drap: lea disp(%drap), %rsp */ 1375 cfa->base = CFI_SP; 1376 cfa->offset = state->stack_size = -op->src.offset; 1377 state->drap_reg = CFI_UNDEFINED; 1378 state->drap = false; 1379 break; 1380 } 1381 1382 if (op->dest.reg == state->cfa.base) { 1383 WARN_FUNC("unsupported stack register modification", 1384 insn->sec, insn->offset); 1385 return -1; 1386 } 1387 1388 break; 1389 1390 case OP_SRC_AND: 1391 if (op->dest.reg != CFI_SP || 1392 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 1393 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 1394 WARN_FUNC("unsupported stack pointer realignment", 1395 insn->sec, insn->offset); 1396 return -1; 1397 } 1398 1399 if (state->drap_reg != CFI_UNDEFINED) { 1400 /* drap: and imm, %rsp */ 1401 cfa->base = state->drap_reg; 1402 cfa->offset = state->stack_size = 0; 1403 state->drap = true; 1404 } 1405 1406 /* 1407 * Older versions of GCC (4.8ish) realign the stack 1408 * without DRAP, with a frame pointer. 1409 */ 1410 1411 break; 1412 1413 case OP_SRC_POP: 1414 if (!state->drap && op->dest.type == OP_DEST_REG && 1415 op->dest.reg == cfa->base) { 1416 1417 /* pop %rbp */ 1418 cfa->base = CFI_SP; 1419 } 1420 1421 if (state->drap && cfa->base == CFI_BP_INDIRECT && 1422 op->dest.type == OP_DEST_REG && 1423 op->dest.reg == state->drap_reg && 1424 state->drap_offset == -state->stack_size) { 1425 1426 /* drap: pop %drap */ 1427 cfa->base = state->drap_reg; 1428 cfa->offset = 0; 1429 state->drap_offset = -1; 1430 1431 } else if (regs[op->dest.reg].offset == -state->stack_size) { 1432 1433 /* pop %reg */ 1434 restore_reg(state, op->dest.reg); 1435 } 1436 1437 state->stack_size -= 8; 1438 if (cfa->base == CFI_SP) 1439 cfa->offset -= 8; 1440 1441 break; 1442 1443 case OP_SRC_REG_INDIRECT: 1444 if (state->drap && op->src.reg == CFI_BP && 1445 op->src.offset == state->drap_offset) { 1446 1447 /* drap: mov disp(%rbp), %drap */ 1448 cfa->base = state->drap_reg; 1449 cfa->offset = 0; 1450 state->drap_offset = -1; 1451 } 1452 1453 if (state->drap && op->src.reg == CFI_BP && 1454 op->src.offset == regs[op->dest.reg].offset) { 1455 1456 /* drap: mov disp(%rbp), %reg */ 1457 restore_reg(state, op->dest.reg); 1458 1459 } else if (op->src.reg == cfa->base && 1460 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 1461 1462 /* mov disp(%rbp), %reg */ 1463 /* mov disp(%rsp), %reg */ 1464 restore_reg(state, op->dest.reg); 1465 } 1466 1467 break; 1468 1469 default: 1470 WARN_FUNC("unknown stack-related instruction", 1471 insn->sec, insn->offset); 1472 return -1; 1473 } 1474 1475 break; 1476 1477 case OP_DEST_PUSH: 1478 state->stack_size += 8; 1479 if (cfa->base == CFI_SP) 1480 cfa->offset += 8; 1481 1482 if (op->src.type != OP_SRC_REG) 1483 break; 1484 1485 if (state->drap) { 1486 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1487 1488 /* drap: push %drap */ 1489 cfa->base = CFI_BP_INDIRECT; 1490 cfa->offset = -state->stack_size; 1491 1492 /* save drap so we know when to restore it */ 1493 state->drap_offset = -state->stack_size; 1494 1495 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) { 1496 1497 /* drap: push %rbp */ 1498 state->stack_size = 0; 1499 1500 } else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1501 1502 /* drap: push %reg */ 1503 save_reg(state, op->src.reg, CFI_BP, -state->stack_size); 1504 } 1505 1506 } else { 1507 1508 /* push %reg */ 1509 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size); 1510 } 1511 1512 /* detect when asm code uses rbp as a scratch register */ 1513 if (!no_fp && insn->func && op->src.reg == CFI_BP && 1514 cfa->base != CFI_BP) 1515 state->bp_scratch = true; 1516 break; 1517 1518 case OP_DEST_REG_INDIRECT: 1519 1520 if (state->drap) { 1521 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1522 1523 /* drap: mov %drap, disp(%rbp) */ 1524 cfa->base = CFI_BP_INDIRECT; 1525 cfa->offset = op->dest.offset; 1526 1527 /* save drap offset so we know when to restore it */ 1528 state->drap_offset = op->dest.offset; 1529 } 1530 1531 else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1532 1533 /* drap: mov reg, disp(%rbp) */ 1534 save_reg(state, op->src.reg, CFI_BP, op->dest.offset); 1535 } 1536 1537 } else if (op->dest.reg == cfa->base) { 1538 1539 /* mov reg, disp(%rbp) */ 1540 /* mov reg, disp(%rsp) */ 1541 save_reg(state, op->src.reg, CFI_CFA, 1542 op->dest.offset - state->cfa.offset); 1543 } 1544 1545 break; 1546 1547 case OP_DEST_LEAVE: 1548 if ((!state->drap && cfa->base != CFI_BP) || 1549 (state->drap && cfa->base != state->drap_reg)) { 1550 WARN_FUNC("leave instruction with modified stack frame", 1551 insn->sec, insn->offset); 1552 return -1; 1553 } 1554 1555 /* leave (mov %rbp, %rsp; pop %rbp) */ 1556 1557 state->stack_size = -state->regs[CFI_BP].offset - 8; 1558 restore_reg(state, CFI_BP); 1559 1560 if (!state->drap) { 1561 cfa->base = CFI_SP; 1562 cfa->offset -= 8; 1563 } 1564 1565 break; 1566 1567 case OP_DEST_MEM: 1568 if (op->src.type != OP_SRC_POP) { 1569 WARN_FUNC("unknown stack-related memory operation", 1570 insn->sec, insn->offset); 1571 return -1; 1572 } 1573 1574 /* pop mem */ 1575 state->stack_size -= 8; 1576 if (cfa->base == CFI_SP) 1577 cfa->offset -= 8; 1578 1579 break; 1580 1581 default: 1582 WARN_FUNC("unknown stack-related instruction", 1583 insn->sec, insn->offset); 1584 return -1; 1585 } 1586 1587 return 0; 1588 } 1589 1590 static bool insn_state_match(struct instruction *insn, struct insn_state *state) 1591 { 1592 struct insn_state *state1 = &insn->state, *state2 = state; 1593 int i; 1594 1595 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) { 1596 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 1597 insn->sec, insn->offset, 1598 state1->cfa.base, state1->cfa.offset, 1599 state2->cfa.base, state2->cfa.offset); 1600 1601 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) { 1602 for (i = 0; i < CFI_NUM_REGS; i++) { 1603 if (!memcmp(&state1->regs[i], &state2->regs[i], 1604 sizeof(struct cfi_reg))) 1605 continue; 1606 1607 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 1608 insn->sec, insn->offset, 1609 i, state1->regs[i].base, state1->regs[i].offset, 1610 i, state2->regs[i].base, state2->regs[i].offset); 1611 break; 1612 } 1613 1614 } else if (state1->type != state2->type) { 1615 WARN_FUNC("stack state mismatch: type1=%d type2=%d", 1616 insn->sec, insn->offset, state1->type, state2->type); 1617 1618 } else if (state1->drap != state2->drap || 1619 (state1->drap && state1->drap_reg != state2->drap_reg) || 1620 (state1->drap && state1->drap_offset != state2->drap_offset)) { 1621 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 1622 insn->sec, insn->offset, 1623 state1->drap, state1->drap_reg, state1->drap_offset, 1624 state2->drap, state2->drap_reg, state2->drap_offset); 1625 1626 } else 1627 return true; 1628 1629 return false; 1630 } 1631 1632 /* 1633 * Follow the branch starting at the given instruction, and recursively follow 1634 * any other branches (jumps). Meanwhile, track the frame pointer state at 1635 * each instruction and validate all the rules described in 1636 * tools/objtool/Documentation/stack-validation.txt. 1637 */ 1638 static int validate_branch(struct objtool_file *file, struct instruction *first, 1639 struct insn_state state) 1640 { 1641 struct alternative *alt; 1642 struct instruction *insn, *next_insn; 1643 struct section *sec; 1644 struct symbol *func = NULL; 1645 int ret; 1646 1647 insn = first; 1648 sec = insn->sec; 1649 1650 if (insn->alt_group && list_empty(&insn->alts)) { 1651 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group", 1652 sec, insn->offset); 1653 return 1; 1654 } 1655 1656 while (1) { 1657 next_insn = next_insn_same_sec(file, insn); 1658 1659 1660 if (file->c_file && func && insn->func && func != insn->func) { 1661 WARN("%s() falls through to next function %s()", 1662 func->name, insn->func->name); 1663 return 1; 1664 } 1665 1666 if (insn->func) 1667 func = insn->func; 1668 1669 if (func && insn->ignore) { 1670 WARN_FUNC("BUG: why am I validating an ignored function?", 1671 sec, insn->offset); 1672 return 1; 1673 } 1674 1675 if (insn->visited) { 1676 if (!insn->hint && !insn_state_match(insn, &state)) 1677 return 1; 1678 1679 return 0; 1680 } 1681 1682 if (insn->hint) { 1683 if (insn->restore) { 1684 struct instruction *save_insn, *i; 1685 1686 i = insn; 1687 save_insn = NULL; 1688 func_for_each_insn_continue_reverse(file, func, i) { 1689 if (i->save) { 1690 save_insn = i; 1691 break; 1692 } 1693 } 1694 1695 if (!save_insn) { 1696 WARN_FUNC("no corresponding CFI save for CFI restore", 1697 sec, insn->offset); 1698 return 1; 1699 } 1700 1701 if (!save_insn->visited) { 1702 /* 1703 * Oops, no state to copy yet. 1704 * Hopefully we can reach this 1705 * instruction from another branch 1706 * after the save insn has been 1707 * visited. 1708 */ 1709 if (insn == first) 1710 return 0; 1711 1712 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo", 1713 sec, insn->offset); 1714 return 1; 1715 } 1716 1717 insn->state = save_insn->state; 1718 } 1719 1720 state = insn->state; 1721 1722 } else 1723 insn->state = state; 1724 1725 insn->visited = true; 1726 1727 if (!insn->ignore_alts) { 1728 list_for_each_entry(alt, &insn->alts, list) { 1729 ret = validate_branch(file, alt->insn, state); 1730 if (ret) 1731 return 1; 1732 } 1733 } 1734 1735 switch (insn->type) { 1736 1737 case INSN_RETURN: 1738 if (func && has_modified_stack_frame(&state)) { 1739 WARN_FUNC("return with modified stack frame", 1740 sec, insn->offset); 1741 return 1; 1742 } 1743 1744 if (state.bp_scratch) { 1745 WARN("%s uses BP as a scratch register", 1746 insn->func->name); 1747 return 1; 1748 } 1749 1750 return 0; 1751 1752 case INSN_CALL: 1753 if (is_fentry_call(insn)) 1754 break; 1755 1756 ret = dead_end_function(file, insn->call_dest); 1757 if (ret == 1) 1758 return 0; 1759 if (ret == -1) 1760 return 1; 1761 1762 /* fallthrough */ 1763 case INSN_CALL_DYNAMIC: 1764 if (!no_fp && func && !has_valid_stack_frame(&state)) { 1765 WARN_FUNC("call without frame pointer save/setup", 1766 sec, insn->offset); 1767 return 1; 1768 } 1769 break; 1770 1771 case INSN_JUMP_CONDITIONAL: 1772 case INSN_JUMP_UNCONDITIONAL: 1773 if (insn->jump_dest && 1774 (!func || !insn->jump_dest->func || 1775 func == insn->jump_dest->func)) { 1776 ret = validate_branch(file, insn->jump_dest, 1777 state); 1778 if (ret) 1779 return 1; 1780 1781 } else if (func && has_modified_stack_frame(&state)) { 1782 WARN_FUNC("sibling call from callable instruction with modified stack frame", 1783 sec, insn->offset); 1784 return 1; 1785 } 1786 1787 if (insn->type == INSN_JUMP_UNCONDITIONAL) 1788 return 0; 1789 1790 break; 1791 1792 case INSN_JUMP_DYNAMIC: 1793 if (func && list_empty(&insn->alts) && 1794 has_modified_stack_frame(&state)) { 1795 WARN_FUNC("sibling call from callable instruction with modified stack frame", 1796 sec, insn->offset); 1797 return 1; 1798 } 1799 1800 return 0; 1801 1802 case INSN_CONTEXT_SWITCH: 1803 if (func && (!next_insn || !next_insn->hint)) { 1804 WARN_FUNC("unsupported instruction in callable function", 1805 sec, insn->offset); 1806 return 1; 1807 } 1808 return 0; 1809 1810 case INSN_STACK: 1811 if (update_insn_state(insn, &state)) 1812 return 1; 1813 1814 break; 1815 1816 default: 1817 break; 1818 } 1819 1820 if (insn->dead_end) 1821 return 0; 1822 1823 if (!next_insn) { 1824 if (state.cfa.base == CFI_UNDEFINED) 1825 return 0; 1826 WARN("%s: unexpected end of section", sec->name); 1827 return 1; 1828 } 1829 1830 insn = next_insn; 1831 } 1832 1833 return 0; 1834 } 1835 1836 static int validate_unwind_hints(struct objtool_file *file) 1837 { 1838 struct instruction *insn; 1839 int ret, warnings = 0; 1840 struct insn_state state; 1841 1842 if (!file->hints) 1843 return 0; 1844 1845 clear_insn_state(&state); 1846 1847 for_each_insn(file, insn) { 1848 if (insn->hint && !insn->visited) { 1849 ret = validate_branch(file, insn, state); 1850 warnings += ret; 1851 } 1852 } 1853 1854 return warnings; 1855 } 1856 1857 static bool is_kasan_insn(struct instruction *insn) 1858 { 1859 return (insn->type == INSN_CALL && 1860 !strcmp(insn->call_dest->name, "__asan_handle_no_return")); 1861 } 1862 1863 static bool is_ubsan_insn(struct instruction *insn) 1864 { 1865 return (insn->type == INSN_CALL && 1866 !strcmp(insn->call_dest->name, 1867 "__ubsan_handle_builtin_unreachable")); 1868 } 1869 1870 static bool ignore_unreachable_insn(struct instruction *insn) 1871 { 1872 int i; 1873 1874 if (insn->ignore || insn->type == INSN_NOP) 1875 return true; 1876 1877 /* 1878 * Ignore any unused exceptions. This can happen when a whitelisted 1879 * function has an exception table entry. 1880 * 1881 * Also ignore alternative replacement instructions. This can happen 1882 * when a whitelisted function uses one of the ALTERNATIVE macros. 1883 */ 1884 if (!strcmp(insn->sec->name, ".fixup") || 1885 !strcmp(insn->sec->name, ".altinstr_replacement") || 1886 !strcmp(insn->sec->name, ".altinstr_aux")) 1887 return true; 1888 1889 /* 1890 * Check if this (or a subsequent) instruction is related to 1891 * CONFIG_UBSAN or CONFIG_KASAN. 1892 * 1893 * End the search at 5 instructions to avoid going into the weeds. 1894 */ 1895 if (!insn->func) 1896 return false; 1897 for (i = 0; i < 5; i++) { 1898 1899 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 1900 return true; 1901 1902 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest) { 1903 insn = insn->jump_dest; 1904 continue; 1905 } 1906 1907 if (insn->offset + insn->len >= insn->func->offset + insn->func->len) 1908 break; 1909 insn = list_next_entry(insn, list); 1910 } 1911 1912 return false; 1913 } 1914 1915 static int validate_functions(struct objtool_file *file) 1916 { 1917 struct section *sec; 1918 struct symbol *func; 1919 struct instruction *insn; 1920 struct insn_state state; 1921 int ret, warnings = 0; 1922 1923 clear_insn_state(&state); 1924 1925 state.cfa = initial_func_cfi.cfa; 1926 memcpy(&state.regs, &initial_func_cfi.regs, 1927 CFI_NUM_REGS * sizeof(struct cfi_reg)); 1928 state.stack_size = initial_func_cfi.cfa.offset; 1929 1930 for_each_sec(file, sec) { 1931 list_for_each_entry(func, &sec->symbol_list, list) { 1932 if (func->type != STT_FUNC) 1933 continue; 1934 1935 insn = find_insn(file, sec, func->offset); 1936 if (!insn || insn->ignore) 1937 continue; 1938 1939 ret = validate_branch(file, insn, state); 1940 warnings += ret; 1941 } 1942 } 1943 1944 return warnings; 1945 } 1946 1947 static int validate_reachable_instructions(struct objtool_file *file) 1948 { 1949 struct instruction *insn; 1950 1951 if (file->ignore_unreachables) 1952 return 0; 1953 1954 for_each_insn(file, insn) { 1955 if (insn->visited || ignore_unreachable_insn(insn)) 1956 continue; 1957 1958 WARN_FUNC("unreachable instruction", insn->sec, insn->offset); 1959 return 1; 1960 } 1961 1962 return 0; 1963 } 1964 1965 static void cleanup(struct objtool_file *file) 1966 { 1967 struct instruction *insn, *tmpinsn; 1968 struct alternative *alt, *tmpalt; 1969 1970 list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) { 1971 list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) { 1972 list_del(&alt->list); 1973 free(alt); 1974 } 1975 list_del(&insn->list); 1976 hash_del(&insn->hash); 1977 free(insn); 1978 } 1979 elf_close(file->elf); 1980 } 1981 1982 int check(const char *_objname, bool _no_fp, bool no_unreachable, bool orc) 1983 { 1984 struct objtool_file file; 1985 int ret, warnings = 0; 1986 1987 objname = _objname; 1988 no_fp = _no_fp; 1989 1990 file.elf = elf_open(objname, orc ? O_RDWR : O_RDONLY); 1991 if (!file.elf) 1992 return 1; 1993 1994 INIT_LIST_HEAD(&file.insn_list); 1995 hash_init(file.insn_hash); 1996 file.whitelist = find_section_by_name(file.elf, ".discard.func_stack_frame_non_standard"); 1997 file.rodata = find_section_by_name(file.elf, ".rodata"); 1998 file.c_file = find_section_by_name(file.elf, ".comment"); 1999 file.ignore_unreachables = no_unreachable; 2000 file.hints = false; 2001 2002 arch_initial_func_cfi_state(&initial_func_cfi); 2003 2004 ret = decode_sections(&file); 2005 if (ret < 0) 2006 goto out; 2007 warnings += ret; 2008 2009 if (list_empty(&file.insn_list)) 2010 goto out; 2011 2012 ret = validate_functions(&file); 2013 if (ret < 0) 2014 goto out; 2015 warnings += ret; 2016 2017 ret = validate_unwind_hints(&file); 2018 if (ret < 0) 2019 goto out; 2020 warnings += ret; 2021 2022 if (!warnings) { 2023 ret = validate_reachable_instructions(&file); 2024 if (ret < 0) 2025 goto out; 2026 warnings += ret; 2027 } 2028 2029 if (orc) { 2030 ret = create_orc(&file); 2031 if (ret < 0) 2032 goto out; 2033 2034 ret = create_orc_sections(&file); 2035 if (ret < 0) 2036 goto out; 2037 2038 ret = elf_write(file.elf); 2039 if (ret < 0) 2040 goto out; 2041 } 2042 2043 out: 2044 cleanup(&file); 2045 2046 /* ignore warnings for now until we get all the code cleaned up */ 2047 if (ret || warnings) 2048 return 0; 2049 return 0; 2050 } 2051