1 /* 2 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 3 * 4 * This program is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU General Public License 6 * as published by the Free Software Foundation; either version 2 7 * of the License, or (at your option) any later version. 8 * 9 * This program is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 * GNU General Public License for more details. 13 * 14 * You should have received a copy of the GNU General Public License 15 * along with this program; if not, see <http://www.gnu.org/licenses/>. 16 */ 17 18 #include <string.h> 19 #include <stdlib.h> 20 21 #include "builtin.h" 22 #include "check.h" 23 #include "elf.h" 24 #include "special.h" 25 #include "arch.h" 26 #include "warn.h" 27 28 #include <linux/hashtable.h> 29 #include <linux/kernel.h> 30 31 #define FAKE_JUMP_OFFSET -1 32 33 struct alternative { 34 struct list_head list; 35 struct instruction *insn; 36 bool skip_orig; 37 }; 38 39 const char *objname; 40 struct cfi_state initial_func_cfi; 41 42 struct instruction *find_insn(struct objtool_file *file, 43 struct section *sec, unsigned long offset) 44 { 45 struct instruction *insn; 46 47 hash_for_each_possible(file->insn_hash, insn, hash, offset) 48 if (insn->sec == sec && insn->offset == offset) 49 return insn; 50 51 return NULL; 52 } 53 54 static struct instruction *next_insn_same_sec(struct objtool_file *file, 55 struct instruction *insn) 56 { 57 struct instruction *next = list_next_entry(insn, list); 58 59 if (!next || &next->list == &file->insn_list || next->sec != insn->sec) 60 return NULL; 61 62 return next; 63 } 64 65 static struct instruction *next_insn_same_func(struct objtool_file *file, 66 struct instruction *insn) 67 { 68 struct instruction *next = list_next_entry(insn, list); 69 struct symbol *func = insn->func; 70 71 if (!func) 72 return NULL; 73 74 if (&next->list != &file->insn_list && next->func == func) 75 return next; 76 77 /* Check if we're already in the subfunction: */ 78 if (func == func->cfunc) 79 return NULL; 80 81 /* Move to the subfunction: */ 82 return find_insn(file, func->cfunc->sec, func->cfunc->offset); 83 } 84 85 #define func_for_each_insn_all(file, func, insn) \ 86 for (insn = find_insn(file, func->sec, func->offset); \ 87 insn; \ 88 insn = next_insn_same_func(file, insn)) 89 90 #define func_for_each_insn(file, func, insn) \ 91 for (insn = find_insn(file, func->sec, func->offset); \ 92 insn && &insn->list != &file->insn_list && \ 93 insn->sec == func->sec && \ 94 insn->offset < func->offset + func->len; \ 95 insn = list_next_entry(insn, list)) 96 97 #define func_for_each_insn_continue_reverse(file, func, insn) \ 98 for (insn = list_prev_entry(insn, list); \ 99 &insn->list != &file->insn_list && \ 100 insn->sec == func->sec && insn->offset >= func->offset; \ 101 insn = list_prev_entry(insn, list)) 102 103 #define sec_for_each_insn_from(file, insn) \ 104 for (; insn; insn = next_insn_same_sec(file, insn)) 105 106 #define sec_for_each_insn_continue(file, insn) \ 107 for (insn = next_insn_same_sec(file, insn); insn; \ 108 insn = next_insn_same_sec(file, insn)) 109 110 /* 111 * This checks to see if the given function is a "noreturn" function. 112 * 113 * For global functions which are outside the scope of this object file, we 114 * have to keep a manual list of them. 115 * 116 * For local functions, we have to detect them manually by simply looking for 117 * the lack of a return instruction. 118 * 119 * Returns: 120 * -1: error 121 * 0: no dead end 122 * 1: dead end 123 */ 124 static int __dead_end_function(struct objtool_file *file, struct symbol *func, 125 int recursion) 126 { 127 int i; 128 struct instruction *insn; 129 bool empty = true; 130 131 /* 132 * Unfortunately these have to be hard coded because the noreturn 133 * attribute isn't provided in ELF data. 134 */ 135 static const char * const global_noreturns[] = { 136 "__stack_chk_fail", 137 "panic", 138 "do_exit", 139 "do_task_dead", 140 "__module_put_and_exit", 141 "complete_and_exit", 142 "kvm_spurious_fault", 143 "__reiserfs_panic", 144 "lbug_with_loc", 145 "fortify_panic", 146 "usercopy_abort", 147 "machine_real_restart", 148 "rewind_stack_do_exit", 149 }; 150 151 if (func->bind == STB_WEAK) 152 return 0; 153 154 if (func->bind == STB_GLOBAL) 155 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 156 if (!strcmp(func->name, global_noreturns[i])) 157 return 1; 158 159 if (!func->len) 160 return 0; 161 162 insn = find_insn(file, func->sec, func->offset); 163 if (!insn->func) 164 return 0; 165 166 func_for_each_insn_all(file, func, insn) { 167 empty = false; 168 169 if (insn->type == INSN_RETURN) 170 return 0; 171 } 172 173 if (empty) 174 return 0; 175 176 /* 177 * A function can have a sibling call instead of a return. In that 178 * case, the function's dead-end status depends on whether the target 179 * of the sibling call returns. 180 */ 181 func_for_each_insn_all(file, func, insn) { 182 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 183 struct instruction *dest = insn->jump_dest; 184 185 if (!dest) 186 /* sibling call to another file */ 187 return 0; 188 189 if (dest->func && dest->func->pfunc != insn->func->pfunc) { 190 191 /* local sibling call */ 192 if (recursion == 5) { 193 /* 194 * Infinite recursion: two functions 195 * have sibling calls to each other. 196 * This is a very rare case. It means 197 * they aren't dead ends. 198 */ 199 return 0; 200 } 201 202 return __dead_end_function(file, dest->func, 203 recursion + 1); 204 } 205 } 206 207 if (insn->type == INSN_JUMP_DYNAMIC && list_empty(&insn->alts)) 208 /* sibling call */ 209 return 0; 210 } 211 212 return 1; 213 } 214 215 static int dead_end_function(struct objtool_file *file, struct symbol *func) 216 { 217 return __dead_end_function(file, func, 0); 218 } 219 220 static void clear_insn_state(struct insn_state *state) 221 { 222 int i; 223 224 memset(state, 0, sizeof(*state)); 225 state->cfa.base = CFI_UNDEFINED; 226 for (i = 0; i < CFI_NUM_REGS; i++) { 227 state->regs[i].base = CFI_UNDEFINED; 228 state->vals[i].base = CFI_UNDEFINED; 229 } 230 state->drap_reg = CFI_UNDEFINED; 231 state->drap_offset = -1; 232 } 233 234 /* 235 * Call the arch-specific instruction decoder for all the instructions and add 236 * them to the global instruction list. 237 */ 238 static int decode_instructions(struct objtool_file *file) 239 { 240 struct section *sec; 241 struct symbol *func; 242 unsigned long offset; 243 struct instruction *insn; 244 int ret; 245 246 for_each_sec(file, sec) { 247 248 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 249 continue; 250 251 if (strcmp(sec->name, ".altinstr_replacement") && 252 strcmp(sec->name, ".altinstr_aux") && 253 strncmp(sec->name, ".discard.", 9)) 254 sec->text = true; 255 256 for (offset = 0; offset < sec->len; offset += insn->len) { 257 insn = malloc(sizeof(*insn)); 258 if (!insn) { 259 WARN("malloc failed"); 260 return -1; 261 } 262 memset(insn, 0, sizeof(*insn)); 263 INIT_LIST_HEAD(&insn->alts); 264 clear_insn_state(&insn->state); 265 266 insn->sec = sec; 267 insn->offset = offset; 268 269 ret = arch_decode_instruction(file->elf, sec, offset, 270 sec->len - offset, 271 &insn->len, &insn->type, 272 &insn->immediate, 273 &insn->stack_op); 274 if (ret) 275 goto err; 276 277 if (!insn->type || insn->type > INSN_LAST) { 278 WARN_FUNC("invalid instruction type %d", 279 insn->sec, insn->offset, insn->type); 280 ret = -1; 281 goto err; 282 } 283 284 hash_add(file->insn_hash, &insn->hash, insn->offset); 285 list_add_tail(&insn->list, &file->insn_list); 286 } 287 288 list_for_each_entry(func, &sec->symbol_list, list) { 289 if (func->type != STT_FUNC) 290 continue; 291 292 if (!find_insn(file, sec, func->offset)) { 293 WARN("%s(): can't find starting instruction", 294 func->name); 295 return -1; 296 } 297 298 func_for_each_insn(file, func, insn) 299 if (!insn->func) 300 insn->func = func; 301 } 302 } 303 304 return 0; 305 306 err: 307 free(insn); 308 return ret; 309 } 310 311 /* 312 * Mark "ud2" instructions and manually annotated dead ends. 313 */ 314 static int add_dead_ends(struct objtool_file *file) 315 { 316 struct section *sec; 317 struct rela *rela; 318 struct instruction *insn; 319 bool found; 320 321 /* 322 * By default, "ud2" is a dead end unless otherwise annotated, because 323 * GCC 7 inserts it for certain divide-by-zero cases. 324 */ 325 for_each_insn(file, insn) 326 if (insn->type == INSN_BUG) 327 insn->dead_end = true; 328 329 /* 330 * Check for manually annotated dead ends. 331 */ 332 sec = find_section_by_name(file->elf, ".rela.discard.unreachable"); 333 if (!sec) 334 goto reachable; 335 336 list_for_each_entry(rela, &sec->rela_list, list) { 337 if (rela->sym->type != STT_SECTION) { 338 WARN("unexpected relocation symbol type in %s", sec->name); 339 return -1; 340 } 341 insn = find_insn(file, rela->sym->sec, rela->addend); 342 if (insn) 343 insn = list_prev_entry(insn, list); 344 else if (rela->addend == rela->sym->sec->len) { 345 found = false; 346 list_for_each_entry_reverse(insn, &file->insn_list, list) { 347 if (insn->sec == rela->sym->sec) { 348 found = true; 349 break; 350 } 351 } 352 353 if (!found) { 354 WARN("can't find unreachable insn at %s+0x%x", 355 rela->sym->sec->name, rela->addend); 356 return -1; 357 } 358 } else { 359 WARN("can't find unreachable insn at %s+0x%x", 360 rela->sym->sec->name, rela->addend); 361 return -1; 362 } 363 364 insn->dead_end = true; 365 } 366 367 reachable: 368 /* 369 * These manually annotated reachable checks are needed for GCC 4.4, 370 * where the Linux unreachable() macro isn't supported. In that case 371 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's 372 * not a dead end. 373 */ 374 sec = find_section_by_name(file->elf, ".rela.discard.reachable"); 375 if (!sec) 376 return 0; 377 378 list_for_each_entry(rela, &sec->rela_list, list) { 379 if (rela->sym->type != STT_SECTION) { 380 WARN("unexpected relocation symbol type in %s", sec->name); 381 return -1; 382 } 383 insn = find_insn(file, rela->sym->sec, rela->addend); 384 if (insn) 385 insn = list_prev_entry(insn, list); 386 else if (rela->addend == rela->sym->sec->len) { 387 found = false; 388 list_for_each_entry_reverse(insn, &file->insn_list, list) { 389 if (insn->sec == rela->sym->sec) { 390 found = true; 391 break; 392 } 393 } 394 395 if (!found) { 396 WARN("can't find reachable insn at %s+0x%x", 397 rela->sym->sec->name, rela->addend); 398 return -1; 399 } 400 } else { 401 WARN("can't find reachable insn at %s+0x%x", 402 rela->sym->sec->name, rela->addend); 403 return -1; 404 } 405 406 insn->dead_end = false; 407 } 408 409 return 0; 410 } 411 412 /* 413 * Warnings shouldn't be reported for ignored functions. 414 */ 415 static void add_ignores(struct objtool_file *file) 416 { 417 struct instruction *insn; 418 struct section *sec; 419 struct symbol *func; 420 struct rela *rela; 421 422 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard"); 423 if (!sec) 424 return; 425 426 list_for_each_entry(rela, &sec->rela_list, list) { 427 switch (rela->sym->type) { 428 case STT_FUNC: 429 func = rela->sym; 430 break; 431 432 case STT_SECTION: 433 func = find_symbol_by_offset(rela->sym->sec, rela->addend); 434 if (!func || func->type != STT_FUNC) 435 continue; 436 break; 437 438 default: 439 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type); 440 continue; 441 } 442 443 func_for_each_insn_all(file, func, insn) 444 insn->ignore = true; 445 } 446 } 447 448 /* 449 * This is a whitelist of functions that is allowed to be called with AC set. 450 * The list is meant to be minimal and only contains compiler instrumentation 451 * ABI and a few functions used to implement *_{to,from}_user() functions. 452 * 453 * These functions must not directly change AC, but may PUSHF/POPF. 454 */ 455 static const char *uaccess_safe_builtin[] = { 456 /* KASAN */ 457 "kasan_report", 458 "check_memory_region", 459 /* KASAN out-of-line */ 460 "__asan_loadN_noabort", 461 "__asan_load1_noabort", 462 "__asan_load2_noabort", 463 "__asan_load4_noabort", 464 "__asan_load8_noabort", 465 "__asan_load16_noabort", 466 "__asan_storeN_noabort", 467 "__asan_store1_noabort", 468 "__asan_store2_noabort", 469 "__asan_store4_noabort", 470 "__asan_store8_noabort", 471 "__asan_store16_noabort", 472 /* KASAN in-line */ 473 "__asan_report_load_n_noabort", 474 "__asan_report_load1_noabort", 475 "__asan_report_load2_noabort", 476 "__asan_report_load4_noabort", 477 "__asan_report_load8_noabort", 478 "__asan_report_load16_noabort", 479 "__asan_report_store_n_noabort", 480 "__asan_report_store1_noabort", 481 "__asan_report_store2_noabort", 482 "__asan_report_store4_noabort", 483 "__asan_report_store8_noabort", 484 "__asan_report_store16_noabort", 485 /* KCOV */ 486 "write_comp_data", 487 "__sanitizer_cov_trace_pc", 488 "__sanitizer_cov_trace_const_cmp1", 489 "__sanitizer_cov_trace_const_cmp2", 490 "__sanitizer_cov_trace_const_cmp4", 491 "__sanitizer_cov_trace_const_cmp8", 492 "__sanitizer_cov_trace_cmp1", 493 "__sanitizer_cov_trace_cmp2", 494 "__sanitizer_cov_trace_cmp4", 495 "__sanitizer_cov_trace_cmp8", 496 /* UBSAN */ 497 "ubsan_type_mismatch_common", 498 "__ubsan_handle_type_mismatch", 499 "__ubsan_handle_type_mismatch_v1", 500 /* misc */ 501 "csum_partial_copy_generic", 502 "__memcpy_mcsafe", 503 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */ 504 NULL 505 }; 506 507 static void add_uaccess_safe(struct objtool_file *file) 508 { 509 struct symbol *func; 510 const char **name; 511 512 if (!uaccess) 513 return; 514 515 for (name = uaccess_safe_builtin; *name; name++) { 516 func = find_symbol_by_name(file->elf, *name); 517 if (!func) 518 continue; 519 520 func->alias->uaccess_safe = true; 521 } 522 } 523 524 /* 525 * FIXME: For now, just ignore any alternatives which add retpolines. This is 526 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline. 527 * But it at least allows objtool to understand the control flow *around* the 528 * retpoline. 529 */ 530 static int add_ignore_alternatives(struct objtool_file *file) 531 { 532 struct section *sec; 533 struct rela *rela; 534 struct instruction *insn; 535 536 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts"); 537 if (!sec) 538 return 0; 539 540 list_for_each_entry(rela, &sec->rela_list, list) { 541 if (rela->sym->type != STT_SECTION) { 542 WARN("unexpected relocation symbol type in %s", sec->name); 543 return -1; 544 } 545 546 insn = find_insn(file, rela->sym->sec, rela->addend); 547 if (!insn) { 548 WARN("bad .discard.ignore_alts entry"); 549 return -1; 550 } 551 552 insn->ignore_alts = true; 553 } 554 555 return 0; 556 } 557 558 /* 559 * Find the destination instructions for all jumps. 560 */ 561 static int add_jump_destinations(struct objtool_file *file) 562 { 563 struct instruction *insn; 564 struct rela *rela; 565 struct section *dest_sec; 566 unsigned long dest_off; 567 568 for_each_insn(file, insn) { 569 if (insn->type != INSN_JUMP_CONDITIONAL && 570 insn->type != INSN_JUMP_UNCONDITIONAL) 571 continue; 572 573 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET) 574 continue; 575 576 rela = find_rela_by_dest_range(insn->sec, insn->offset, 577 insn->len); 578 if (!rela) { 579 dest_sec = insn->sec; 580 dest_off = insn->offset + insn->len + insn->immediate; 581 } else if (rela->sym->type == STT_SECTION) { 582 dest_sec = rela->sym->sec; 583 dest_off = rela->addend + 4; 584 } else if (rela->sym->sec->idx) { 585 dest_sec = rela->sym->sec; 586 dest_off = rela->sym->sym.st_value + rela->addend + 4; 587 } else if (strstr(rela->sym->name, "_indirect_thunk_")) { 588 /* 589 * Retpoline jumps are really dynamic jumps in 590 * disguise, so convert them accordingly. 591 */ 592 insn->type = INSN_JUMP_DYNAMIC; 593 insn->retpoline_safe = true; 594 continue; 595 } else { 596 /* sibling call */ 597 insn->call_dest = rela->sym; 598 insn->jump_dest = NULL; 599 continue; 600 } 601 602 insn->jump_dest = find_insn(file, dest_sec, dest_off); 603 if (!insn->jump_dest) { 604 605 /* 606 * This is a special case where an alt instruction 607 * jumps past the end of the section. These are 608 * handled later in handle_group_alt(). 609 */ 610 if (!strcmp(insn->sec->name, ".altinstr_replacement")) 611 continue; 612 613 WARN_FUNC("can't find jump dest instruction at %s+0x%lx", 614 insn->sec, insn->offset, dest_sec->name, 615 dest_off); 616 return -1; 617 } 618 619 /* 620 * Cross-function jump. 621 */ 622 if (insn->func && insn->jump_dest->func && 623 insn->func != insn->jump_dest->func) { 624 625 /* 626 * For GCC 8+, create parent/child links for any cold 627 * subfunctions. This is _mostly_ redundant with a 628 * similar initialization in read_symbols(). 629 * 630 * If a function has aliases, we want the *first* such 631 * function in the symbol table to be the subfunction's 632 * parent. In that case we overwrite the 633 * initialization done in read_symbols(). 634 * 635 * However this code can't completely replace the 636 * read_symbols() code because this doesn't detect the 637 * case where the parent function's only reference to a 638 * subfunction is through a switch table. 639 */ 640 if (!strstr(insn->func->name, ".cold.") && 641 strstr(insn->jump_dest->func->name, ".cold.")) { 642 insn->func->cfunc = insn->jump_dest->func; 643 insn->jump_dest->func->pfunc = insn->func; 644 645 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc && 646 insn->jump_dest->offset == insn->jump_dest->func->offset) { 647 648 /* sibling class */ 649 insn->call_dest = insn->jump_dest->func; 650 insn->jump_dest = NULL; 651 } 652 } 653 } 654 655 return 0; 656 } 657 658 /* 659 * Find the destination instructions for all calls. 660 */ 661 static int add_call_destinations(struct objtool_file *file) 662 { 663 struct instruction *insn; 664 unsigned long dest_off; 665 struct rela *rela; 666 667 for_each_insn(file, insn) { 668 if (insn->type != INSN_CALL) 669 continue; 670 671 rela = find_rela_by_dest_range(insn->sec, insn->offset, 672 insn->len); 673 if (!rela) { 674 dest_off = insn->offset + insn->len + insn->immediate; 675 insn->call_dest = find_symbol_by_offset(insn->sec, 676 dest_off); 677 678 if (!insn->call_dest && !insn->ignore) { 679 WARN_FUNC("unsupported intra-function call", 680 insn->sec, insn->offset); 681 if (retpoline) 682 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE."); 683 return -1; 684 } 685 686 } else if (rela->sym->type == STT_SECTION) { 687 insn->call_dest = find_symbol_by_offset(rela->sym->sec, 688 rela->addend+4); 689 if (!insn->call_dest || 690 insn->call_dest->type != STT_FUNC) { 691 WARN_FUNC("can't find call dest symbol at %s+0x%x", 692 insn->sec, insn->offset, 693 rela->sym->sec->name, 694 rela->addend + 4); 695 return -1; 696 } 697 } else 698 insn->call_dest = rela->sym; 699 } 700 701 return 0; 702 } 703 704 /* 705 * The .alternatives section requires some extra special care, over and above 706 * what other special sections require: 707 * 708 * 1. Because alternatives are patched in-place, we need to insert a fake jump 709 * instruction at the end so that validate_branch() skips all the original 710 * replaced instructions when validating the new instruction path. 711 * 712 * 2. An added wrinkle is that the new instruction length might be zero. In 713 * that case the old instructions are replaced with noops. We simulate that 714 * by creating a fake jump as the only new instruction. 715 * 716 * 3. In some cases, the alternative section includes an instruction which 717 * conditionally jumps to the _end_ of the entry. We have to modify these 718 * jumps' destinations to point back to .text rather than the end of the 719 * entry in .altinstr_replacement. 720 */ 721 static int handle_group_alt(struct objtool_file *file, 722 struct special_alt *special_alt, 723 struct instruction *orig_insn, 724 struct instruction **new_insn) 725 { 726 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL; 727 unsigned long dest_off; 728 729 last_orig_insn = NULL; 730 insn = orig_insn; 731 sec_for_each_insn_from(file, insn) { 732 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 733 break; 734 735 insn->alt_group = true; 736 last_orig_insn = insn; 737 } 738 739 if (next_insn_same_sec(file, last_orig_insn)) { 740 fake_jump = malloc(sizeof(*fake_jump)); 741 if (!fake_jump) { 742 WARN("malloc failed"); 743 return -1; 744 } 745 memset(fake_jump, 0, sizeof(*fake_jump)); 746 INIT_LIST_HEAD(&fake_jump->alts); 747 clear_insn_state(&fake_jump->state); 748 749 fake_jump->sec = special_alt->new_sec; 750 fake_jump->offset = FAKE_JUMP_OFFSET; 751 fake_jump->type = INSN_JUMP_UNCONDITIONAL; 752 fake_jump->jump_dest = list_next_entry(last_orig_insn, list); 753 fake_jump->func = orig_insn->func; 754 } 755 756 if (!special_alt->new_len) { 757 if (!fake_jump) { 758 WARN("%s: empty alternative at end of section", 759 special_alt->orig_sec->name); 760 return -1; 761 } 762 763 *new_insn = fake_jump; 764 return 0; 765 } 766 767 last_new_insn = NULL; 768 insn = *new_insn; 769 sec_for_each_insn_from(file, insn) { 770 if (insn->offset >= special_alt->new_off + special_alt->new_len) 771 break; 772 773 last_new_insn = insn; 774 775 insn->ignore = orig_insn->ignore_alts; 776 insn->func = orig_insn->func; 777 778 if (insn->type != INSN_JUMP_CONDITIONAL && 779 insn->type != INSN_JUMP_UNCONDITIONAL) 780 continue; 781 782 if (!insn->immediate) 783 continue; 784 785 dest_off = insn->offset + insn->len + insn->immediate; 786 if (dest_off == special_alt->new_off + special_alt->new_len) { 787 if (!fake_jump) { 788 WARN("%s: alternative jump to end of section", 789 special_alt->orig_sec->name); 790 return -1; 791 } 792 insn->jump_dest = fake_jump; 793 } 794 795 if (!insn->jump_dest) { 796 WARN_FUNC("can't find alternative jump destination", 797 insn->sec, insn->offset); 798 return -1; 799 } 800 } 801 802 if (!last_new_insn) { 803 WARN_FUNC("can't find last new alternative instruction", 804 special_alt->new_sec, special_alt->new_off); 805 return -1; 806 } 807 808 if (fake_jump) 809 list_add(&fake_jump->list, &last_new_insn->list); 810 811 return 0; 812 } 813 814 /* 815 * A jump table entry can either convert a nop to a jump or a jump to a nop. 816 * If the original instruction is a jump, make the alt entry an effective nop 817 * by just skipping the original instruction. 818 */ 819 static int handle_jump_alt(struct objtool_file *file, 820 struct special_alt *special_alt, 821 struct instruction *orig_insn, 822 struct instruction **new_insn) 823 { 824 if (orig_insn->type == INSN_NOP) 825 return 0; 826 827 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) { 828 WARN_FUNC("unsupported instruction at jump label", 829 orig_insn->sec, orig_insn->offset); 830 return -1; 831 } 832 833 *new_insn = list_next_entry(orig_insn, list); 834 return 0; 835 } 836 837 /* 838 * Read all the special sections which have alternate instructions which can be 839 * patched in or redirected to at runtime. Each instruction having alternate 840 * instruction(s) has them added to its insn->alts list, which will be 841 * traversed in validate_branch(). 842 */ 843 static int add_special_section_alts(struct objtool_file *file) 844 { 845 struct list_head special_alts; 846 struct instruction *orig_insn, *new_insn; 847 struct special_alt *special_alt, *tmp; 848 struct alternative *alt; 849 int ret; 850 851 ret = special_get_alts(file->elf, &special_alts); 852 if (ret) 853 return ret; 854 855 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 856 857 orig_insn = find_insn(file, special_alt->orig_sec, 858 special_alt->orig_off); 859 if (!orig_insn) { 860 WARN_FUNC("special: can't find orig instruction", 861 special_alt->orig_sec, special_alt->orig_off); 862 ret = -1; 863 goto out; 864 } 865 866 new_insn = NULL; 867 if (!special_alt->group || special_alt->new_len) { 868 new_insn = find_insn(file, special_alt->new_sec, 869 special_alt->new_off); 870 if (!new_insn) { 871 WARN_FUNC("special: can't find new instruction", 872 special_alt->new_sec, 873 special_alt->new_off); 874 ret = -1; 875 goto out; 876 } 877 } 878 879 if (special_alt->group) { 880 ret = handle_group_alt(file, special_alt, orig_insn, 881 &new_insn); 882 if (ret) 883 goto out; 884 } else if (special_alt->jump_or_nop) { 885 ret = handle_jump_alt(file, special_alt, orig_insn, 886 &new_insn); 887 if (ret) 888 goto out; 889 } 890 891 alt = malloc(sizeof(*alt)); 892 if (!alt) { 893 WARN("malloc failed"); 894 ret = -1; 895 goto out; 896 } 897 898 alt->insn = new_insn; 899 alt->skip_orig = special_alt->skip_orig; 900 orig_insn->ignore_alts |= special_alt->skip_alt; 901 list_add_tail(&alt->list, &orig_insn->alts); 902 903 list_del(&special_alt->list); 904 free(special_alt); 905 } 906 907 out: 908 return ret; 909 } 910 911 static int add_switch_table(struct objtool_file *file, struct instruction *insn, 912 struct rela *table, struct rela *next_table) 913 { 914 struct rela *rela = table; 915 struct instruction *alt_insn; 916 struct alternative *alt; 917 struct symbol *pfunc = insn->func->pfunc; 918 unsigned int prev_offset = 0; 919 920 list_for_each_entry_from(rela, &table->rela_sec->rela_list, list) { 921 if (rela == next_table) 922 break; 923 924 /* Make sure the switch table entries are consecutive: */ 925 if (prev_offset && rela->offset != prev_offset + 8) 926 break; 927 928 /* Detect function pointers from contiguous objects: */ 929 if (rela->sym->sec == pfunc->sec && 930 rela->addend == pfunc->offset) 931 break; 932 933 alt_insn = find_insn(file, rela->sym->sec, rela->addend); 934 if (!alt_insn) 935 break; 936 937 /* Make sure the jmp dest is in the function or subfunction: */ 938 if (alt_insn->func->pfunc != pfunc) 939 break; 940 941 alt = malloc(sizeof(*alt)); 942 if (!alt) { 943 WARN("malloc failed"); 944 return -1; 945 } 946 947 alt->insn = alt_insn; 948 list_add_tail(&alt->list, &insn->alts); 949 prev_offset = rela->offset; 950 } 951 952 if (!prev_offset) { 953 WARN_FUNC("can't find switch jump table", 954 insn->sec, insn->offset); 955 return -1; 956 } 957 958 return 0; 959 } 960 961 /* 962 * find_switch_table() - Given a dynamic jump, find the switch jump table in 963 * .rodata associated with it. 964 * 965 * There are 3 basic patterns: 966 * 967 * 1. jmpq *[rodata addr](,%reg,8) 968 * 969 * This is the most common case by far. It jumps to an address in a simple 970 * jump table which is stored in .rodata. 971 * 972 * 2. jmpq *[rodata addr](%rip) 973 * 974 * This is caused by a rare GCC quirk, currently only seen in three driver 975 * functions in the kernel, only with certain obscure non-distro configs. 976 * 977 * As part of an optimization, GCC makes a copy of an existing switch jump 978 * table, modifies it, and then hard-codes the jump (albeit with an indirect 979 * jump) to use a single entry in the table. The rest of the jump table and 980 * some of its jump targets remain as dead code. 981 * 982 * In such a case we can just crudely ignore all unreachable instruction 983 * warnings for the entire object file. Ideally we would just ignore them 984 * for the function, but that would require redesigning the code quite a 985 * bit. And honestly that's just not worth doing: unreachable instruction 986 * warnings are of questionable value anyway, and this is such a rare issue. 987 * 988 * 3. mov [rodata addr],%reg1 989 * ... some instructions ... 990 * jmpq *(%reg1,%reg2,8) 991 * 992 * This is a fairly uncommon pattern which is new for GCC 6. As of this 993 * writing, there are 11 occurrences of it in the allmodconfig kernel. 994 * 995 * As of GCC 7 there are quite a few more of these and the 'in between' code 996 * is significant. Esp. with KASAN enabled some of the code between the mov 997 * and jmpq uses .rodata itself, which can confuse things. 998 * 999 * TODO: Once we have DWARF CFI and smarter instruction decoding logic, 1000 * ensure the same register is used in the mov and jump instructions. 1001 * 1002 * NOTE: RETPOLINE made it harder still to decode dynamic jumps. 1003 */ 1004 static struct rela *find_switch_table(struct objtool_file *file, 1005 struct symbol *func, 1006 struct instruction *insn) 1007 { 1008 struct rela *text_rela, *rodata_rela; 1009 struct instruction *orig_insn = insn; 1010 struct section *rodata_sec; 1011 unsigned long table_offset; 1012 1013 /* 1014 * Backward search using the @first_jump_src links, these help avoid 1015 * much of the 'in between' code. Which avoids us getting confused by 1016 * it. 1017 */ 1018 for (; 1019 &insn->list != &file->insn_list && 1020 insn->sec == func->sec && 1021 insn->offset >= func->offset; 1022 1023 insn = insn->first_jump_src ?: list_prev_entry(insn, list)) { 1024 1025 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) 1026 break; 1027 1028 /* allow small jumps within the range */ 1029 if (insn->type == INSN_JUMP_UNCONDITIONAL && 1030 insn->jump_dest && 1031 (insn->jump_dest->offset <= insn->offset || 1032 insn->jump_dest->offset > orig_insn->offset)) 1033 break; 1034 1035 /* look for a relocation which references .rodata */ 1036 text_rela = find_rela_by_dest_range(insn->sec, insn->offset, 1037 insn->len); 1038 if (!text_rela || text_rela->sym->type != STT_SECTION || 1039 !text_rela->sym->sec->rodata) 1040 continue; 1041 1042 table_offset = text_rela->addend; 1043 rodata_sec = text_rela->sym->sec; 1044 1045 if (text_rela->type == R_X86_64_PC32) 1046 table_offset += 4; 1047 1048 /* 1049 * Make sure the .rodata address isn't associated with a 1050 * symbol. gcc jump tables are anonymous data. 1051 */ 1052 if (find_symbol_containing(rodata_sec, table_offset)) 1053 continue; 1054 1055 rodata_rela = find_rela_by_dest(rodata_sec, table_offset); 1056 if (rodata_rela) { 1057 /* 1058 * Use of RIP-relative switch jumps is quite rare, and 1059 * indicates a rare GCC quirk/bug which can leave dead 1060 * code behind. 1061 */ 1062 if (text_rela->type == R_X86_64_PC32) 1063 file->ignore_unreachables = true; 1064 1065 return rodata_rela; 1066 } 1067 } 1068 1069 return NULL; 1070 } 1071 1072 1073 static int add_func_switch_tables(struct objtool_file *file, 1074 struct symbol *func) 1075 { 1076 struct instruction *insn, *last = NULL, *prev_jump = NULL; 1077 struct rela *rela, *prev_rela = NULL; 1078 int ret; 1079 1080 func_for_each_insn_all(file, func, insn) { 1081 if (!last) 1082 last = insn; 1083 1084 /* 1085 * Store back-pointers for unconditional forward jumps such 1086 * that find_switch_table() can back-track using those and 1087 * avoid some potentially confusing code. 1088 */ 1089 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest && 1090 insn->offset > last->offset && 1091 insn->jump_dest->offset > insn->offset && 1092 !insn->jump_dest->first_jump_src) { 1093 1094 insn->jump_dest->first_jump_src = insn; 1095 last = insn->jump_dest; 1096 } 1097 1098 if (insn->type != INSN_JUMP_DYNAMIC) 1099 continue; 1100 1101 rela = find_switch_table(file, func, insn); 1102 if (!rela) 1103 continue; 1104 1105 /* 1106 * We found a switch table, but we don't know yet how big it 1107 * is. Don't add it until we reach the end of the function or 1108 * the beginning of another switch table in the same function. 1109 */ 1110 if (prev_jump) { 1111 ret = add_switch_table(file, prev_jump, prev_rela, rela); 1112 if (ret) 1113 return ret; 1114 } 1115 1116 prev_jump = insn; 1117 prev_rela = rela; 1118 } 1119 1120 if (prev_jump) { 1121 ret = add_switch_table(file, prev_jump, prev_rela, NULL); 1122 if (ret) 1123 return ret; 1124 } 1125 1126 return 0; 1127 } 1128 1129 /* 1130 * For some switch statements, gcc generates a jump table in the .rodata 1131 * section which contains a list of addresses within the function to jump to. 1132 * This finds these jump tables and adds them to the insn->alts lists. 1133 */ 1134 static int add_switch_table_alts(struct objtool_file *file) 1135 { 1136 struct section *sec; 1137 struct symbol *func; 1138 int ret; 1139 1140 if (!file->rodata) 1141 return 0; 1142 1143 for_each_sec(file, sec) { 1144 list_for_each_entry(func, &sec->symbol_list, list) { 1145 if (func->type != STT_FUNC) 1146 continue; 1147 1148 ret = add_func_switch_tables(file, func); 1149 if (ret) 1150 return ret; 1151 } 1152 } 1153 1154 return 0; 1155 } 1156 1157 static int read_unwind_hints(struct objtool_file *file) 1158 { 1159 struct section *sec, *relasec; 1160 struct rela *rela; 1161 struct unwind_hint *hint; 1162 struct instruction *insn; 1163 struct cfi_reg *cfa; 1164 int i; 1165 1166 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 1167 if (!sec) 1168 return 0; 1169 1170 relasec = sec->rela; 1171 if (!relasec) { 1172 WARN("missing .rela.discard.unwind_hints section"); 1173 return -1; 1174 } 1175 1176 if (sec->len % sizeof(struct unwind_hint)) { 1177 WARN("struct unwind_hint size mismatch"); 1178 return -1; 1179 } 1180 1181 file->hints = true; 1182 1183 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) { 1184 hint = (struct unwind_hint *)sec->data->d_buf + i; 1185 1186 rela = find_rela_by_dest(sec, i * sizeof(*hint)); 1187 if (!rela) { 1188 WARN("can't find rela for unwind_hints[%d]", i); 1189 return -1; 1190 } 1191 1192 insn = find_insn(file, rela->sym->sec, rela->addend); 1193 if (!insn) { 1194 WARN("can't find insn for unwind_hints[%d]", i); 1195 return -1; 1196 } 1197 1198 cfa = &insn->state.cfa; 1199 1200 if (hint->type == UNWIND_HINT_TYPE_SAVE) { 1201 insn->save = true; 1202 continue; 1203 1204 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) { 1205 insn->restore = true; 1206 insn->hint = true; 1207 continue; 1208 } 1209 1210 insn->hint = true; 1211 1212 switch (hint->sp_reg) { 1213 case ORC_REG_UNDEFINED: 1214 cfa->base = CFI_UNDEFINED; 1215 break; 1216 case ORC_REG_SP: 1217 cfa->base = CFI_SP; 1218 break; 1219 case ORC_REG_BP: 1220 cfa->base = CFI_BP; 1221 break; 1222 case ORC_REG_SP_INDIRECT: 1223 cfa->base = CFI_SP_INDIRECT; 1224 break; 1225 case ORC_REG_R10: 1226 cfa->base = CFI_R10; 1227 break; 1228 case ORC_REG_R13: 1229 cfa->base = CFI_R13; 1230 break; 1231 case ORC_REG_DI: 1232 cfa->base = CFI_DI; 1233 break; 1234 case ORC_REG_DX: 1235 cfa->base = CFI_DX; 1236 break; 1237 default: 1238 WARN_FUNC("unsupported unwind_hint sp base reg %d", 1239 insn->sec, insn->offset, hint->sp_reg); 1240 return -1; 1241 } 1242 1243 cfa->offset = hint->sp_offset; 1244 insn->state.type = hint->type; 1245 insn->state.end = hint->end; 1246 } 1247 1248 return 0; 1249 } 1250 1251 static int read_retpoline_hints(struct objtool_file *file) 1252 { 1253 struct section *sec; 1254 struct instruction *insn; 1255 struct rela *rela; 1256 1257 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe"); 1258 if (!sec) 1259 return 0; 1260 1261 list_for_each_entry(rela, &sec->rela_list, list) { 1262 if (rela->sym->type != STT_SECTION) { 1263 WARN("unexpected relocation symbol type in %s", sec->name); 1264 return -1; 1265 } 1266 1267 insn = find_insn(file, rela->sym->sec, rela->addend); 1268 if (!insn) { 1269 WARN("bad .discard.retpoline_safe entry"); 1270 return -1; 1271 } 1272 1273 if (insn->type != INSN_JUMP_DYNAMIC && 1274 insn->type != INSN_CALL_DYNAMIC) { 1275 WARN_FUNC("retpoline_safe hint not an indirect jump/call", 1276 insn->sec, insn->offset); 1277 return -1; 1278 } 1279 1280 insn->retpoline_safe = true; 1281 } 1282 1283 return 0; 1284 } 1285 1286 static void mark_rodata(struct objtool_file *file) 1287 { 1288 struct section *sec; 1289 bool found = false; 1290 1291 /* 1292 * This searches for the .rodata section or multiple .rodata.func_name 1293 * sections if -fdata-sections is being used. The .str.1.1 and .str.1.8 1294 * rodata sections are ignored as they don't contain jump tables. 1295 */ 1296 for_each_sec(file, sec) { 1297 if (!strncmp(sec->name, ".rodata", 7) && 1298 !strstr(sec->name, ".str1.")) { 1299 sec->rodata = true; 1300 found = true; 1301 } 1302 } 1303 1304 file->rodata = found; 1305 } 1306 1307 static int decode_sections(struct objtool_file *file) 1308 { 1309 int ret; 1310 1311 mark_rodata(file); 1312 1313 ret = decode_instructions(file); 1314 if (ret) 1315 return ret; 1316 1317 ret = add_dead_ends(file); 1318 if (ret) 1319 return ret; 1320 1321 add_ignores(file); 1322 add_uaccess_safe(file); 1323 1324 ret = add_ignore_alternatives(file); 1325 if (ret) 1326 return ret; 1327 1328 ret = add_jump_destinations(file); 1329 if (ret) 1330 return ret; 1331 1332 ret = add_special_section_alts(file); 1333 if (ret) 1334 return ret; 1335 1336 ret = add_call_destinations(file); 1337 if (ret) 1338 return ret; 1339 1340 ret = add_switch_table_alts(file); 1341 if (ret) 1342 return ret; 1343 1344 ret = read_unwind_hints(file); 1345 if (ret) 1346 return ret; 1347 1348 ret = read_retpoline_hints(file); 1349 if (ret) 1350 return ret; 1351 1352 return 0; 1353 } 1354 1355 static bool is_fentry_call(struct instruction *insn) 1356 { 1357 if (insn->type == INSN_CALL && 1358 insn->call_dest->type == STT_NOTYPE && 1359 !strcmp(insn->call_dest->name, "__fentry__")) 1360 return true; 1361 1362 return false; 1363 } 1364 1365 static bool has_modified_stack_frame(struct insn_state *state) 1366 { 1367 int i; 1368 1369 if (state->cfa.base != initial_func_cfi.cfa.base || 1370 state->cfa.offset != initial_func_cfi.cfa.offset || 1371 state->stack_size != initial_func_cfi.cfa.offset || 1372 state->drap) 1373 return true; 1374 1375 for (i = 0; i < CFI_NUM_REGS; i++) 1376 if (state->regs[i].base != initial_func_cfi.regs[i].base || 1377 state->regs[i].offset != initial_func_cfi.regs[i].offset) 1378 return true; 1379 1380 return false; 1381 } 1382 1383 static bool has_valid_stack_frame(struct insn_state *state) 1384 { 1385 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA && 1386 state->regs[CFI_BP].offset == -16) 1387 return true; 1388 1389 if (state->drap && state->regs[CFI_BP].base == CFI_BP) 1390 return true; 1391 1392 return false; 1393 } 1394 1395 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state) 1396 { 1397 struct cfi_reg *cfa = &state->cfa; 1398 struct stack_op *op = &insn->stack_op; 1399 1400 if (cfa->base != CFI_SP) 1401 return 0; 1402 1403 /* push */ 1404 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF) 1405 cfa->offset += 8; 1406 1407 /* pop */ 1408 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF) 1409 cfa->offset -= 8; 1410 1411 /* add immediate to sp */ 1412 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 1413 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 1414 cfa->offset -= op->src.offset; 1415 1416 return 0; 1417 } 1418 1419 static void save_reg(struct insn_state *state, unsigned char reg, int base, 1420 int offset) 1421 { 1422 if (arch_callee_saved_reg(reg) && 1423 state->regs[reg].base == CFI_UNDEFINED) { 1424 state->regs[reg].base = base; 1425 state->regs[reg].offset = offset; 1426 } 1427 } 1428 1429 static void restore_reg(struct insn_state *state, unsigned char reg) 1430 { 1431 state->regs[reg].base = CFI_UNDEFINED; 1432 state->regs[reg].offset = 0; 1433 } 1434 1435 /* 1436 * A note about DRAP stack alignment: 1437 * 1438 * GCC has the concept of a DRAP register, which is used to help keep track of 1439 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 1440 * register. The typical DRAP pattern is: 1441 * 1442 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 1443 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 1444 * 41 ff 72 f8 pushq -0x8(%r10) 1445 * 55 push %rbp 1446 * 48 89 e5 mov %rsp,%rbp 1447 * (more pushes) 1448 * 41 52 push %r10 1449 * ... 1450 * 41 5a pop %r10 1451 * (more pops) 1452 * 5d pop %rbp 1453 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1454 * c3 retq 1455 * 1456 * There are some variations in the epilogues, like: 1457 * 1458 * 5b pop %rbx 1459 * 41 5a pop %r10 1460 * 41 5c pop %r12 1461 * 41 5d pop %r13 1462 * 41 5e pop %r14 1463 * c9 leaveq 1464 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1465 * c3 retq 1466 * 1467 * and: 1468 * 1469 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 1470 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 1471 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 1472 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 1473 * c9 leaveq 1474 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1475 * c3 retq 1476 * 1477 * Sometimes r13 is used as the DRAP register, in which case it's saved and 1478 * restored beforehand: 1479 * 1480 * 41 55 push %r13 1481 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 1482 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 1483 * ... 1484 * 49 8d 65 f0 lea -0x10(%r13),%rsp 1485 * 41 5d pop %r13 1486 * c3 retq 1487 */ 1488 static int update_insn_state(struct instruction *insn, struct insn_state *state) 1489 { 1490 struct stack_op *op = &insn->stack_op; 1491 struct cfi_reg *cfa = &state->cfa; 1492 struct cfi_reg *regs = state->regs; 1493 1494 /* stack operations don't make sense with an undefined CFA */ 1495 if (cfa->base == CFI_UNDEFINED) { 1496 if (insn->func) { 1497 WARN_FUNC("undefined stack state", insn->sec, insn->offset); 1498 return -1; 1499 } 1500 return 0; 1501 } 1502 1503 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET) 1504 return update_insn_state_regs(insn, state); 1505 1506 switch (op->dest.type) { 1507 1508 case OP_DEST_REG: 1509 switch (op->src.type) { 1510 1511 case OP_SRC_REG: 1512 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 1513 cfa->base == CFI_SP && 1514 regs[CFI_BP].base == CFI_CFA && 1515 regs[CFI_BP].offset == -cfa->offset) { 1516 1517 /* mov %rsp, %rbp */ 1518 cfa->base = op->dest.reg; 1519 state->bp_scratch = false; 1520 } 1521 1522 else if (op->src.reg == CFI_SP && 1523 op->dest.reg == CFI_BP && state->drap) { 1524 1525 /* drap: mov %rsp, %rbp */ 1526 regs[CFI_BP].base = CFI_BP; 1527 regs[CFI_BP].offset = -state->stack_size; 1528 state->bp_scratch = false; 1529 } 1530 1531 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1532 1533 /* 1534 * mov %rsp, %reg 1535 * 1536 * This is needed for the rare case where GCC 1537 * does: 1538 * 1539 * mov %rsp, %rax 1540 * ... 1541 * mov %rax, %rsp 1542 */ 1543 state->vals[op->dest.reg].base = CFI_CFA; 1544 state->vals[op->dest.reg].offset = -state->stack_size; 1545 } 1546 1547 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP && 1548 cfa->base == CFI_BP) { 1549 1550 /* 1551 * mov %rbp, %rsp 1552 * 1553 * Restore the original stack pointer (Clang). 1554 */ 1555 state->stack_size = -state->regs[CFI_BP].offset; 1556 } 1557 1558 else if (op->dest.reg == cfa->base) { 1559 1560 /* mov %reg, %rsp */ 1561 if (cfa->base == CFI_SP && 1562 state->vals[op->src.reg].base == CFI_CFA) { 1563 1564 /* 1565 * This is needed for the rare case 1566 * where GCC does something dumb like: 1567 * 1568 * lea 0x8(%rsp), %rcx 1569 * ... 1570 * mov %rcx, %rsp 1571 */ 1572 cfa->offset = -state->vals[op->src.reg].offset; 1573 state->stack_size = cfa->offset; 1574 1575 } else { 1576 cfa->base = CFI_UNDEFINED; 1577 cfa->offset = 0; 1578 } 1579 } 1580 1581 break; 1582 1583 case OP_SRC_ADD: 1584 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 1585 1586 /* add imm, %rsp */ 1587 state->stack_size -= op->src.offset; 1588 if (cfa->base == CFI_SP) 1589 cfa->offset -= op->src.offset; 1590 break; 1591 } 1592 1593 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 1594 1595 /* lea disp(%rbp), %rsp */ 1596 state->stack_size = -(op->src.offset + regs[CFI_BP].offset); 1597 break; 1598 } 1599 1600 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1601 1602 /* drap: lea disp(%rsp), %drap */ 1603 state->drap_reg = op->dest.reg; 1604 1605 /* 1606 * lea disp(%rsp), %reg 1607 * 1608 * This is needed for the rare case where GCC 1609 * does something dumb like: 1610 * 1611 * lea 0x8(%rsp), %rcx 1612 * ... 1613 * mov %rcx, %rsp 1614 */ 1615 state->vals[op->dest.reg].base = CFI_CFA; 1616 state->vals[op->dest.reg].offset = \ 1617 -state->stack_size + op->src.offset; 1618 1619 break; 1620 } 1621 1622 if (state->drap && op->dest.reg == CFI_SP && 1623 op->src.reg == state->drap_reg) { 1624 1625 /* drap: lea disp(%drap), %rsp */ 1626 cfa->base = CFI_SP; 1627 cfa->offset = state->stack_size = -op->src.offset; 1628 state->drap_reg = CFI_UNDEFINED; 1629 state->drap = false; 1630 break; 1631 } 1632 1633 if (op->dest.reg == state->cfa.base) { 1634 WARN_FUNC("unsupported stack register modification", 1635 insn->sec, insn->offset); 1636 return -1; 1637 } 1638 1639 break; 1640 1641 case OP_SRC_AND: 1642 if (op->dest.reg != CFI_SP || 1643 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 1644 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 1645 WARN_FUNC("unsupported stack pointer realignment", 1646 insn->sec, insn->offset); 1647 return -1; 1648 } 1649 1650 if (state->drap_reg != CFI_UNDEFINED) { 1651 /* drap: and imm, %rsp */ 1652 cfa->base = state->drap_reg; 1653 cfa->offset = state->stack_size = 0; 1654 state->drap = true; 1655 } 1656 1657 /* 1658 * Older versions of GCC (4.8ish) realign the stack 1659 * without DRAP, with a frame pointer. 1660 */ 1661 1662 break; 1663 1664 case OP_SRC_POP: 1665 case OP_SRC_POPF: 1666 if (!state->drap && op->dest.type == OP_DEST_REG && 1667 op->dest.reg == cfa->base) { 1668 1669 /* pop %rbp */ 1670 cfa->base = CFI_SP; 1671 } 1672 1673 if (state->drap && cfa->base == CFI_BP_INDIRECT && 1674 op->dest.type == OP_DEST_REG && 1675 op->dest.reg == state->drap_reg && 1676 state->drap_offset == -state->stack_size) { 1677 1678 /* drap: pop %drap */ 1679 cfa->base = state->drap_reg; 1680 cfa->offset = 0; 1681 state->drap_offset = -1; 1682 1683 } else if (regs[op->dest.reg].offset == -state->stack_size) { 1684 1685 /* pop %reg */ 1686 restore_reg(state, op->dest.reg); 1687 } 1688 1689 state->stack_size -= 8; 1690 if (cfa->base == CFI_SP) 1691 cfa->offset -= 8; 1692 1693 break; 1694 1695 case OP_SRC_REG_INDIRECT: 1696 if (state->drap && op->src.reg == CFI_BP && 1697 op->src.offset == state->drap_offset) { 1698 1699 /* drap: mov disp(%rbp), %drap */ 1700 cfa->base = state->drap_reg; 1701 cfa->offset = 0; 1702 state->drap_offset = -1; 1703 } 1704 1705 if (state->drap && op->src.reg == CFI_BP && 1706 op->src.offset == regs[op->dest.reg].offset) { 1707 1708 /* drap: mov disp(%rbp), %reg */ 1709 restore_reg(state, op->dest.reg); 1710 1711 } else if (op->src.reg == cfa->base && 1712 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 1713 1714 /* mov disp(%rbp), %reg */ 1715 /* mov disp(%rsp), %reg */ 1716 restore_reg(state, op->dest.reg); 1717 } 1718 1719 break; 1720 1721 default: 1722 WARN_FUNC("unknown stack-related instruction", 1723 insn->sec, insn->offset); 1724 return -1; 1725 } 1726 1727 break; 1728 1729 case OP_DEST_PUSH: 1730 case OP_DEST_PUSHF: 1731 state->stack_size += 8; 1732 if (cfa->base == CFI_SP) 1733 cfa->offset += 8; 1734 1735 if (op->src.type != OP_SRC_REG) 1736 break; 1737 1738 if (state->drap) { 1739 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1740 1741 /* drap: push %drap */ 1742 cfa->base = CFI_BP_INDIRECT; 1743 cfa->offset = -state->stack_size; 1744 1745 /* save drap so we know when to restore it */ 1746 state->drap_offset = -state->stack_size; 1747 1748 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) { 1749 1750 /* drap: push %rbp */ 1751 state->stack_size = 0; 1752 1753 } else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1754 1755 /* drap: push %reg */ 1756 save_reg(state, op->src.reg, CFI_BP, -state->stack_size); 1757 } 1758 1759 } else { 1760 1761 /* push %reg */ 1762 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size); 1763 } 1764 1765 /* detect when asm code uses rbp as a scratch register */ 1766 if (!no_fp && insn->func && op->src.reg == CFI_BP && 1767 cfa->base != CFI_BP) 1768 state->bp_scratch = true; 1769 break; 1770 1771 case OP_DEST_REG_INDIRECT: 1772 1773 if (state->drap) { 1774 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1775 1776 /* drap: mov %drap, disp(%rbp) */ 1777 cfa->base = CFI_BP_INDIRECT; 1778 cfa->offset = op->dest.offset; 1779 1780 /* save drap offset so we know when to restore it */ 1781 state->drap_offset = op->dest.offset; 1782 } 1783 1784 else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1785 1786 /* drap: mov reg, disp(%rbp) */ 1787 save_reg(state, op->src.reg, CFI_BP, op->dest.offset); 1788 } 1789 1790 } else if (op->dest.reg == cfa->base) { 1791 1792 /* mov reg, disp(%rbp) */ 1793 /* mov reg, disp(%rsp) */ 1794 save_reg(state, op->src.reg, CFI_CFA, 1795 op->dest.offset - state->cfa.offset); 1796 } 1797 1798 break; 1799 1800 case OP_DEST_LEAVE: 1801 if ((!state->drap && cfa->base != CFI_BP) || 1802 (state->drap && cfa->base != state->drap_reg)) { 1803 WARN_FUNC("leave instruction with modified stack frame", 1804 insn->sec, insn->offset); 1805 return -1; 1806 } 1807 1808 /* leave (mov %rbp, %rsp; pop %rbp) */ 1809 1810 state->stack_size = -state->regs[CFI_BP].offset - 8; 1811 restore_reg(state, CFI_BP); 1812 1813 if (!state->drap) { 1814 cfa->base = CFI_SP; 1815 cfa->offset -= 8; 1816 } 1817 1818 break; 1819 1820 case OP_DEST_MEM: 1821 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) { 1822 WARN_FUNC("unknown stack-related memory operation", 1823 insn->sec, insn->offset); 1824 return -1; 1825 } 1826 1827 /* pop mem */ 1828 state->stack_size -= 8; 1829 if (cfa->base == CFI_SP) 1830 cfa->offset -= 8; 1831 1832 break; 1833 1834 default: 1835 WARN_FUNC("unknown stack-related instruction", 1836 insn->sec, insn->offset); 1837 return -1; 1838 } 1839 1840 return 0; 1841 } 1842 1843 static bool insn_state_match(struct instruction *insn, struct insn_state *state) 1844 { 1845 struct insn_state *state1 = &insn->state, *state2 = state; 1846 int i; 1847 1848 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) { 1849 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 1850 insn->sec, insn->offset, 1851 state1->cfa.base, state1->cfa.offset, 1852 state2->cfa.base, state2->cfa.offset); 1853 1854 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) { 1855 for (i = 0; i < CFI_NUM_REGS; i++) { 1856 if (!memcmp(&state1->regs[i], &state2->regs[i], 1857 sizeof(struct cfi_reg))) 1858 continue; 1859 1860 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 1861 insn->sec, insn->offset, 1862 i, state1->regs[i].base, state1->regs[i].offset, 1863 i, state2->regs[i].base, state2->regs[i].offset); 1864 break; 1865 } 1866 1867 } else if (state1->type != state2->type) { 1868 WARN_FUNC("stack state mismatch: type1=%d type2=%d", 1869 insn->sec, insn->offset, state1->type, state2->type); 1870 1871 } else if (state1->drap != state2->drap || 1872 (state1->drap && state1->drap_reg != state2->drap_reg) || 1873 (state1->drap && state1->drap_offset != state2->drap_offset)) { 1874 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 1875 insn->sec, insn->offset, 1876 state1->drap, state1->drap_reg, state1->drap_offset, 1877 state2->drap, state2->drap_reg, state2->drap_offset); 1878 1879 } else 1880 return true; 1881 1882 return false; 1883 } 1884 1885 static inline bool func_uaccess_safe(struct symbol *func) 1886 { 1887 if (func) 1888 return func->alias->uaccess_safe; 1889 1890 return false; 1891 } 1892 1893 static inline const char *insn_dest_name(struct instruction *insn) 1894 { 1895 if (insn->call_dest) 1896 return insn->call_dest->name; 1897 1898 return "{dynamic}"; 1899 } 1900 1901 static int validate_call(struct instruction *insn, struct insn_state *state) 1902 { 1903 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) { 1904 WARN_FUNC("call to %s() with UACCESS enabled", 1905 insn->sec, insn->offset, insn_dest_name(insn)); 1906 return 1; 1907 } 1908 1909 if (state->df) { 1910 WARN_FUNC("call to %s() with DF set", 1911 insn->sec, insn->offset, insn_dest_name(insn)); 1912 return 1; 1913 } 1914 1915 return 0; 1916 } 1917 1918 static int validate_sibling_call(struct instruction *insn, struct insn_state *state) 1919 { 1920 if (has_modified_stack_frame(state)) { 1921 WARN_FUNC("sibling call from callable instruction with modified stack frame", 1922 insn->sec, insn->offset); 1923 return 1; 1924 } 1925 1926 return validate_call(insn, state); 1927 } 1928 1929 /* 1930 * Follow the branch starting at the given instruction, and recursively follow 1931 * any other branches (jumps). Meanwhile, track the frame pointer state at 1932 * each instruction and validate all the rules described in 1933 * tools/objtool/Documentation/stack-validation.txt. 1934 */ 1935 static int validate_branch(struct objtool_file *file, struct instruction *first, 1936 struct insn_state state) 1937 { 1938 struct alternative *alt; 1939 struct instruction *insn, *next_insn; 1940 struct section *sec; 1941 struct symbol *func = NULL; 1942 int ret; 1943 1944 insn = first; 1945 sec = insn->sec; 1946 1947 if (insn->alt_group && list_empty(&insn->alts)) { 1948 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group", 1949 sec, insn->offset); 1950 return 1; 1951 } 1952 1953 while (1) { 1954 next_insn = next_insn_same_sec(file, insn); 1955 1956 if (file->c_file && func && insn->func && func != insn->func->pfunc) { 1957 WARN("%s() falls through to next function %s()", 1958 func->name, insn->func->name); 1959 return 1; 1960 } 1961 1962 if (insn->func) 1963 func = insn->func->pfunc; 1964 1965 if (func && insn->ignore) { 1966 WARN_FUNC("BUG: why am I validating an ignored function?", 1967 sec, insn->offset); 1968 return 1; 1969 } 1970 1971 if (insn->visited) { 1972 if (!insn->hint && !insn_state_match(insn, &state)) 1973 return 1; 1974 1975 /* If we were here with AC=0, but now have AC=1, go again */ 1976 if (insn->state.uaccess || !state.uaccess) 1977 return 0; 1978 } 1979 1980 if (insn->hint) { 1981 if (insn->restore) { 1982 struct instruction *save_insn, *i; 1983 1984 i = insn; 1985 save_insn = NULL; 1986 func_for_each_insn_continue_reverse(file, insn->func, i) { 1987 if (i->save) { 1988 save_insn = i; 1989 break; 1990 } 1991 } 1992 1993 if (!save_insn) { 1994 WARN_FUNC("no corresponding CFI save for CFI restore", 1995 sec, insn->offset); 1996 return 1; 1997 } 1998 1999 if (!save_insn->visited) { 2000 /* 2001 * Oops, no state to copy yet. 2002 * Hopefully we can reach this 2003 * instruction from another branch 2004 * after the save insn has been 2005 * visited. 2006 */ 2007 if (insn == first) 2008 return 0; 2009 2010 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo", 2011 sec, insn->offset); 2012 return 1; 2013 } 2014 2015 insn->state = save_insn->state; 2016 } 2017 2018 state = insn->state; 2019 2020 } else 2021 insn->state = state; 2022 2023 insn->visited = true; 2024 2025 if (!insn->ignore_alts) { 2026 bool skip_orig = false; 2027 2028 list_for_each_entry(alt, &insn->alts, list) { 2029 if (alt->skip_orig) 2030 skip_orig = true; 2031 2032 ret = validate_branch(file, alt->insn, state); 2033 if (ret) { 2034 if (backtrace) 2035 BT_FUNC("(alt)", insn); 2036 return ret; 2037 } 2038 } 2039 2040 if (skip_orig) 2041 return 0; 2042 } 2043 2044 switch (insn->type) { 2045 2046 case INSN_RETURN: 2047 if (state.uaccess && !func_uaccess_safe(func)) { 2048 WARN_FUNC("return with UACCESS enabled", sec, insn->offset); 2049 return 1; 2050 } 2051 2052 if (!state.uaccess && func_uaccess_safe(func)) { 2053 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset); 2054 return 1; 2055 } 2056 2057 if (state.df) { 2058 WARN_FUNC("return with DF set", sec, insn->offset); 2059 return 1; 2060 } 2061 2062 if (func && has_modified_stack_frame(&state)) { 2063 WARN_FUNC("return with modified stack frame", 2064 sec, insn->offset); 2065 return 1; 2066 } 2067 2068 if (state.bp_scratch) { 2069 WARN("%s uses BP as a scratch register", 2070 insn->func->name); 2071 return 1; 2072 } 2073 2074 return 0; 2075 2076 case INSN_CALL: 2077 case INSN_CALL_DYNAMIC: 2078 ret = validate_call(insn, &state); 2079 if (ret) 2080 return ret; 2081 2082 if (insn->type == INSN_CALL) { 2083 if (is_fentry_call(insn)) 2084 break; 2085 2086 ret = dead_end_function(file, insn->call_dest); 2087 if (ret == 1) 2088 return 0; 2089 if (ret == -1) 2090 return 1; 2091 } 2092 2093 if (!no_fp && func && !has_valid_stack_frame(&state)) { 2094 WARN_FUNC("call without frame pointer save/setup", 2095 sec, insn->offset); 2096 return 1; 2097 } 2098 break; 2099 2100 case INSN_JUMP_CONDITIONAL: 2101 case INSN_JUMP_UNCONDITIONAL: 2102 if (func && !insn->jump_dest) { 2103 ret = validate_sibling_call(insn, &state); 2104 if (ret) 2105 return ret; 2106 2107 } else if (insn->jump_dest && 2108 (!func || !insn->jump_dest->func || 2109 insn->jump_dest->func->pfunc == func)) { 2110 ret = validate_branch(file, insn->jump_dest, 2111 state); 2112 if (ret) { 2113 if (backtrace) 2114 BT_FUNC("(branch)", insn); 2115 return ret; 2116 } 2117 } 2118 2119 if (insn->type == INSN_JUMP_UNCONDITIONAL) 2120 return 0; 2121 2122 break; 2123 2124 case INSN_JUMP_DYNAMIC: 2125 if (func && list_empty(&insn->alts)) { 2126 ret = validate_sibling_call(insn, &state); 2127 if (ret) 2128 return ret; 2129 } 2130 2131 return 0; 2132 2133 case INSN_CONTEXT_SWITCH: 2134 if (func && (!next_insn || !next_insn->hint)) { 2135 WARN_FUNC("unsupported instruction in callable function", 2136 sec, insn->offset); 2137 return 1; 2138 } 2139 return 0; 2140 2141 case INSN_STACK: 2142 if (update_insn_state(insn, &state)) 2143 return 1; 2144 2145 if (insn->stack_op.dest.type == OP_DEST_PUSHF) { 2146 if (!state.uaccess_stack) { 2147 state.uaccess_stack = 1; 2148 } else if (state.uaccess_stack >> 31) { 2149 WARN_FUNC("PUSHF stack exhausted", sec, insn->offset); 2150 return 1; 2151 } 2152 state.uaccess_stack <<= 1; 2153 state.uaccess_stack |= state.uaccess; 2154 } 2155 2156 if (insn->stack_op.src.type == OP_SRC_POPF) { 2157 if (state.uaccess_stack) { 2158 state.uaccess = state.uaccess_stack & 1; 2159 state.uaccess_stack >>= 1; 2160 if (state.uaccess_stack == 1) 2161 state.uaccess_stack = 0; 2162 } 2163 } 2164 2165 break; 2166 2167 case INSN_STAC: 2168 if (state.uaccess) { 2169 WARN_FUNC("recursive UACCESS enable", sec, insn->offset); 2170 return 1; 2171 } 2172 2173 state.uaccess = true; 2174 break; 2175 2176 case INSN_CLAC: 2177 if (!state.uaccess && insn->func) { 2178 WARN_FUNC("redundant UACCESS disable", sec, insn->offset); 2179 return 1; 2180 } 2181 2182 if (func_uaccess_safe(func) && !state.uaccess_stack) { 2183 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset); 2184 return 1; 2185 } 2186 2187 state.uaccess = false; 2188 break; 2189 2190 case INSN_STD: 2191 if (state.df) 2192 WARN_FUNC("recursive STD", sec, insn->offset); 2193 2194 state.df = true; 2195 break; 2196 2197 case INSN_CLD: 2198 if (!state.df && insn->func) 2199 WARN_FUNC("redundant CLD", sec, insn->offset); 2200 2201 state.df = false; 2202 break; 2203 2204 default: 2205 break; 2206 } 2207 2208 if (insn->dead_end) 2209 return 0; 2210 2211 if (!next_insn) { 2212 if (state.cfa.base == CFI_UNDEFINED) 2213 return 0; 2214 WARN("%s: unexpected end of section", sec->name); 2215 return 1; 2216 } 2217 2218 insn = next_insn; 2219 } 2220 2221 return 0; 2222 } 2223 2224 static int validate_unwind_hints(struct objtool_file *file) 2225 { 2226 struct instruction *insn; 2227 int ret, warnings = 0; 2228 struct insn_state state; 2229 2230 if (!file->hints) 2231 return 0; 2232 2233 clear_insn_state(&state); 2234 2235 for_each_insn(file, insn) { 2236 if (insn->hint && !insn->visited) { 2237 ret = validate_branch(file, insn, state); 2238 if (ret && backtrace) 2239 BT_FUNC("<=== (hint)", insn); 2240 warnings += ret; 2241 } 2242 } 2243 2244 return warnings; 2245 } 2246 2247 static int validate_retpoline(struct objtool_file *file) 2248 { 2249 struct instruction *insn; 2250 int warnings = 0; 2251 2252 for_each_insn(file, insn) { 2253 if (insn->type != INSN_JUMP_DYNAMIC && 2254 insn->type != INSN_CALL_DYNAMIC) 2255 continue; 2256 2257 if (insn->retpoline_safe) 2258 continue; 2259 2260 /* 2261 * .init.text code is ran before userspace and thus doesn't 2262 * strictly need retpolines, except for modules which are 2263 * loaded late, they very much do need retpoline in their 2264 * .init.text 2265 */ 2266 if (!strcmp(insn->sec->name, ".init.text") && !module) 2267 continue; 2268 2269 WARN_FUNC("indirect %s found in RETPOLINE build", 2270 insn->sec, insn->offset, 2271 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call"); 2272 2273 warnings++; 2274 } 2275 2276 return warnings; 2277 } 2278 2279 static bool is_kasan_insn(struct instruction *insn) 2280 { 2281 return (insn->type == INSN_CALL && 2282 !strcmp(insn->call_dest->name, "__asan_handle_no_return")); 2283 } 2284 2285 static bool is_ubsan_insn(struct instruction *insn) 2286 { 2287 return (insn->type == INSN_CALL && 2288 !strcmp(insn->call_dest->name, 2289 "__ubsan_handle_builtin_unreachable")); 2290 } 2291 2292 static bool ignore_unreachable_insn(struct instruction *insn) 2293 { 2294 int i; 2295 2296 if (insn->ignore || insn->type == INSN_NOP) 2297 return true; 2298 2299 /* 2300 * Ignore any unused exceptions. This can happen when a whitelisted 2301 * function has an exception table entry. 2302 * 2303 * Also ignore alternative replacement instructions. This can happen 2304 * when a whitelisted function uses one of the ALTERNATIVE macros. 2305 */ 2306 if (!strcmp(insn->sec->name, ".fixup") || 2307 !strcmp(insn->sec->name, ".altinstr_replacement") || 2308 !strcmp(insn->sec->name, ".altinstr_aux")) 2309 return true; 2310 2311 /* 2312 * Check if this (or a subsequent) instruction is related to 2313 * CONFIG_UBSAN or CONFIG_KASAN. 2314 * 2315 * End the search at 5 instructions to avoid going into the weeds. 2316 */ 2317 if (!insn->func) 2318 return false; 2319 for (i = 0; i < 5; i++) { 2320 2321 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 2322 return true; 2323 2324 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 2325 if (insn->jump_dest && 2326 insn->jump_dest->func == insn->func) { 2327 insn = insn->jump_dest; 2328 continue; 2329 } 2330 2331 break; 2332 } 2333 2334 if (insn->offset + insn->len >= insn->func->offset + insn->func->len) 2335 break; 2336 2337 insn = list_next_entry(insn, list); 2338 } 2339 2340 return false; 2341 } 2342 2343 static int validate_functions(struct objtool_file *file) 2344 { 2345 struct section *sec; 2346 struct symbol *func; 2347 struct instruction *insn; 2348 struct insn_state state; 2349 int ret, warnings = 0; 2350 2351 clear_insn_state(&state); 2352 2353 state.cfa = initial_func_cfi.cfa; 2354 memcpy(&state.regs, &initial_func_cfi.regs, 2355 CFI_NUM_REGS * sizeof(struct cfi_reg)); 2356 state.stack_size = initial_func_cfi.cfa.offset; 2357 2358 for_each_sec(file, sec) { 2359 list_for_each_entry(func, &sec->symbol_list, list) { 2360 if (func->type != STT_FUNC || func->pfunc != func) 2361 continue; 2362 2363 insn = find_insn(file, sec, func->offset); 2364 if (!insn || insn->ignore) 2365 continue; 2366 2367 state.uaccess = func->alias->uaccess_safe; 2368 2369 ret = validate_branch(file, insn, state); 2370 if (ret && backtrace) 2371 BT_FUNC("<=== (func)", insn); 2372 warnings += ret; 2373 } 2374 } 2375 2376 return warnings; 2377 } 2378 2379 static int validate_reachable_instructions(struct objtool_file *file) 2380 { 2381 struct instruction *insn; 2382 2383 if (file->ignore_unreachables) 2384 return 0; 2385 2386 for_each_insn(file, insn) { 2387 if (insn->visited || ignore_unreachable_insn(insn)) 2388 continue; 2389 2390 WARN_FUNC("unreachable instruction", insn->sec, insn->offset); 2391 return 1; 2392 } 2393 2394 return 0; 2395 } 2396 2397 static void cleanup(struct objtool_file *file) 2398 { 2399 struct instruction *insn, *tmpinsn; 2400 struct alternative *alt, *tmpalt; 2401 2402 list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) { 2403 list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) { 2404 list_del(&alt->list); 2405 free(alt); 2406 } 2407 list_del(&insn->list); 2408 hash_del(&insn->hash); 2409 free(insn); 2410 } 2411 elf_close(file->elf); 2412 } 2413 2414 static struct objtool_file file; 2415 2416 int check(const char *_objname, bool orc) 2417 { 2418 int ret, warnings = 0; 2419 2420 objname = _objname; 2421 2422 file.elf = elf_open(objname, orc ? O_RDWR : O_RDONLY); 2423 if (!file.elf) 2424 return 1; 2425 2426 INIT_LIST_HEAD(&file.insn_list); 2427 hash_init(file.insn_hash); 2428 file.c_file = find_section_by_name(file.elf, ".comment"); 2429 file.ignore_unreachables = no_unreachable; 2430 file.hints = false; 2431 2432 arch_initial_func_cfi_state(&initial_func_cfi); 2433 2434 ret = decode_sections(&file); 2435 if (ret < 0) 2436 goto out; 2437 warnings += ret; 2438 2439 if (list_empty(&file.insn_list)) 2440 goto out; 2441 2442 if (retpoline) { 2443 ret = validate_retpoline(&file); 2444 if (ret < 0) 2445 return ret; 2446 warnings += ret; 2447 } 2448 2449 ret = validate_functions(&file); 2450 if (ret < 0) 2451 goto out; 2452 warnings += ret; 2453 2454 ret = validate_unwind_hints(&file); 2455 if (ret < 0) 2456 goto out; 2457 warnings += ret; 2458 2459 if (!warnings) { 2460 ret = validate_reachable_instructions(&file); 2461 if (ret < 0) 2462 goto out; 2463 warnings += ret; 2464 } 2465 2466 if (orc) { 2467 ret = create_orc(&file); 2468 if (ret < 0) 2469 goto out; 2470 2471 ret = create_orc_sections(&file); 2472 if (ret < 0) 2473 goto out; 2474 2475 ret = elf_write(file.elf); 2476 if (ret < 0) 2477 goto out; 2478 } 2479 2480 out: 2481 cleanup(&file); 2482 2483 /* ignore warnings for now until we get all the code cleaned up */ 2484 if (ret || warnings) 2485 return 0; 2486 return 0; 2487 } 2488