1 /* 2 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 3 * 4 * This program is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU General Public License 6 * as published by the Free Software Foundation; either version 2 7 * of the License, or (at your option) any later version. 8 * 9 * This program is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 * GNU General Public License for more details. 13 * 14 * You should have received a copy of the GNU General Public License 15 * along with this program; if not, see <http://www.gnu.org/licenses/>. 16 */ 17 18 #include <string.h> 19 #include <stdlib.h> 20 21 #include "builtin.h" 22 #include "check.h" 23 #include "elf.h" 24 #include "special.h" 25 #include "arch.h" 26 #include "warn.h" 27 28 #include <linux/hashtable.h> 29 #include <linux/kernel.h> 30 31 struct alternative { 32 struct list_head list; 33 struct instruction *insn; 34 bool skip_orig; 35 }; 36 37 const char *objname; 38 struct cfi_state initial_func_cfi; 39 40 struct instruction *find_insn(struct objtool_file *file, 41 struct section *sec, unsigned long offset) 42 { 43 struct instruction *insn; 44 45 hash_for_each_possible(file->insn_hash, insn, hash, offset) 46 if (insn->sec == sec && insn->offset == offset) 47 return insn; 48 49 return NULL; 50 } 51 52 static struct instruction *next_insn_same_sec(struct objtool_file *file, 53 struct instruction *insn) 54 { 55 struct instruction *next = list_next_entry(insn, list); 56 57 if (!next || &next->list == &file->insn_list || next->sec != insn->sec) 58 return NULL; 59 60 return next; 61 } 62 63 static struct instruction *next_insn_same_func(struct objtool_file *file, 64 struct instruction *insn) 65 { 66 struct instruction *next = list_next_entry(insn, list); 67 struct symbol *func = insn->func; 68 69 if (!func) 70 return NULL; 71 72 if (&next->list != &file->insn_list && next->func == func) 73 return next; 74 75 /* Check if we're already in the subfunction: */ 76 if (func == func->cfunc) 77 return NULL; 78 79 /* Move to the subfunction: */ 80 return find_insn(file, func->cfunc->sec, func->cfunc->offset); 81 } 82 83 #define func_for_each_insn_all(file, func, insn) \ 84 for (insn = find_insn(file, func->sec, func->offset); \ 85 insn; \ 86 insn = next_insn_same_func(file, insn)) 87 88 #define func_for_each_insn(file, func, insn) \ 89 for (insn = find_insn(file, func->sec, func->offset); \ 90 insn && &insn->list != &file->insn_list && \ 91 insn->sec == func->sec && \ 92 insn->offset < func->offset + func->len; \ 93 insn = list_next_entry(insn, list)) 94 95 #define func_for_each_insn_continue_reverse(file, func, insn) \ 96 for (insn = list_prev_entry(insn, list); \ 97 &insn->list != &file->insn_list && \ 98 insn->sec == func->sec && insn->offset >= func->offset; \ 99 insn = list_prev_entry(insn, list)) 100 101 #define sec_for_each_insn_from(file, insn) \ 102 for (; insn; insn = next_insn_same_sec(file, insn)) 103 104 #define sec_for_each_insn_continue(file, insn) \ 105 for (insn = next_insn_same_sec(file, insn); insn; \ 106 insn = next_insn_same_sec(file, insn)) 107 108 /* 109 * This checks to see if the given function is a "noreturn" function. 110 * 111 * For global functions which are outside the scope of this object file, we 112 * have to keep a manual list of them. 113 * 114 * For local functions, we have to detect them manually by simply looking for 115 * the lack of a return instruction. 116 * 117 * Returns: 118 * -1: error 119 * 0: no dead end 120 * 1: dead end 121 */ 122 static int __dead_end_function(struct objtool_file *file, struct symbol *func, 123 int recursion) 124 { 125 int i; 126 struct instruction *insn; 127 bool empty = true; 128 129 /* 130 * Unfortunately these have to be hard coded because the noreturn 131 * attribute isn't provided in ELF data. 132 */ 133 static const char * const global_noreturns[] = { 134 "__stack_chk_fail", 135 "panic", 136 "do_exit", 137 "do_task_dead", 138 "__module_put_and_exit", 139 "complete_and_exit", 140 "kvm_spurious_fault", 141 "__reiserfs_panic", 142 "lbug_with_loc", 143 "fortify_panic", 144 "usercopy_abort", 145 "machine_real_restart", 146 "rewind_stack_do_exit", 147 }; 148 149 if (func->bind == STB_WEAK) 150 return 0; 151 152 if (func->bind == STB_GLOBAL) 153 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 154 if (!strcmp(func->name, global_noreturns[i])) 155 return 1; 156 157 if (!func->len) 158 return 0; 159 160 insn = find_insn(file, func->sec, func->offset); 161 if (!insn->func) 162 return 0; 163 164 func_for_each_insn_all(file, func, insn) { 165 empty = false; 166 167 if (insn->type == INSN_RETURN) 168 return 0; 169 } 170 171 if (empty) 172 return 0; 173 174 /* 175 * A function can have a sibling call instead of a return. In that 176 * case, the function's dead-end status depends on whether the target 177 * of the sibling call returns. 178 */ 179 func_for_each_insn_all(file, func, insn) { 180 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 181 struct instruction *dest = insn->jump_dest; 182 183 if (!dest) 184 /* sibling call to another file */ 185 return 0; 186 187 if (dest->func && dest->func->pfunc != insn->func->pfunc) { 188 189 /* local sibling call */ 190 if (recursion == 5) { 191 /* 192 * Infinite recursion: two functions 193 * have sibling calls to each other. 194 * This is a very rare case. It means 195 * they aren't dead ends. 196 */ 197 return 0; 198 } 199 200 return __dead_end_function(file, dest->func, 201 recursion + 1); 202 } 203 } 204 205 if (insn->type == INSN_JUMP_DYNAMIC && list_empty(&insn->alts)) 206 /* sibling call */ 207 return 0; 208 } 209 210 return 1; 211 } 212 213 static int dead_end_function(struct objtool_file *file, struct symbol *func) 214 { 215 return __dead_end_function(file, func, 0); 216 } 217 218 static void clear_insn_state(struct insn_state *state) 219 { 220 int i; 221 222 memset(state, 0, sizeof(*state)); 223 state->cfa.base = CFI_UNDEFINED; 224 for (i = 0; i < CFI_NUM_REGS; i++) { 225 state->regs[i].base = CFI_UNDEFINED; 226 state->vals[i].base = CFI_UNDEFINED; 227 } 228 state->drap_reg = CFI_UNDEFINED; 229 state->drap_offset = -1; 230 } 231 232 /* 233 * Call the arch-specific instruction decoder for all the instructions and add 234 * them to the global instruction list. 235 */ 236 static int decode_instructions(struct objtool_file *file) 237 { 238 struct section *sec; 239 struct symbol *func; 240 unsigned long offset; 241 struct instruction *insn; 242 int ret; 243 244 for_each_sec(file, sec) { 245 246 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 247 continue; 248 249 if (strcmp(sec->name, ".altinstr_replacement") && 250 strcmp(sec->name, ".altinstr_aux") && 251 strncmp(sec->name, ".discard.", 9)) 252 sec->text = true; 253 254 for (offset = 0; offset < sec->len; offset += insn->len) { 255 insn = malloc(sizeof(*insn)); 256 if (!insn) { 257 WARN("malloc failed"); 258 return -1; 259 } 260 memset(insn, 0, sizeof(*insn)); 261 INIT_LIST_HEAD(&insn->alts); 262 clear_insn_state(&insn->state); 263 264 insn->sec = sec; 265 insn->offset = offset; 266 267 ret = arch_decode_instruction(file->elf, sec, offset, 268 sec->len - offset, 269 &insn->len, &insn->type, 270 &insn->immediate, 271 &insn->stack_op); 272 if (ret) 273 goto err; 274 275 if (!insn->type || insn->type > INSN_LAST) { 276 WARN_FUNC("invalid instruction type %d", 277 insn->sec, insn->offset, insn->type); 278 ret = -1; 279 goto err; 280 } 281 282 hash_add(file->insn_hash, &insn->hash, insn->offset); 283 list_add_tail(&insn->list, &file->insn_list); 284 } 285 286 list_for_each_entry(func, &sec->symbol_list, list) { 287 if (func->type != STT_FUNC) 288 continue; 289 290 if (!find_insn(file, sec, func->offset)) { 291 WARN("%s(): can't find starting instruction", 292 func->name); 293 return -1; 294 } 295 296 func_for_each_insn(file, func, insn) 297 if (!insn->func) 298 insn->func = func; 299 } 300 } 301 302 return 0; 303 304 err: 305 free(insn); 306 return ret; 307 } 308 309 /* 310 * Mark "ud2" instructions and manually annotated dead ends. 311 */ 312 static int add_dead_ends(struct objtool_file *file) 313 { 314 struct section *sec; 315 struct rela *rela; 316 struct instruction *insn; 317 bool found; 318 319 /* 320 * By default, "ud2" is a dead end unless otherwise annotated, because 321 * GCC 7 inserts it for certain divide-by-zero cases. 322 */ 323 for_each_insn(file, insn) 324 if (insn->type == INSN_BUG) 325 insn->dead_end = true; 326 327 /* 328 * Check for manually annotated dead ends. 329 */ 330 sec = find_section_by_name(file->elf, ".rela.discard.unreachable"); 331 if (!sec) 332 goto reachable; 333 334 list_for_each_entry(rela, &sec->rela_list, list) { 335 if (rela->sym->type != STT_SECTION) { 336 WARN("unexpected relocation symbol type in %s", sec->name); 337 return -1; 338 } 339 insn = find_insn(file, rela->sym->sec, rela->addend); 340 if (insn) 341 insn = list_prev_entry(insn, list); 342 else if (rela->addend == rela->sym->sec->len) { 343 found = false; 344 list_for_each_entry_reverse(insn, &file->insn_list, list) { 345 if (insn->sec == rela->sym->sec) { 346 found = true; 347 break; 348 } 349 } 350 351 if (!found) { 352 WARN("can't find unreachable insn at %s+0x%x", 353 rela->sym->sec->name, rela->addend); 354 return -1; 355 } 356 } else { 357 WARN("can't find unreachable insn at %s+0x%x", 358 rela->sym->sec->name, rela->addend); 359 return -1; 360 } 361 362 insn->dead_end = true; 363 } 364 365 reachable: 366 /* 367 * These manually annotated reachable checks are needed for GCC 4.4, 368 * where the Linux unreachable() macro isn't supported. In that case 369 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's 370 * not a dead end. 371 */ 372 sec = find_section_by_name(file->elf, ".rela.discard.reachable"); 373 if (!sec) 374 return 0; 375 376 list_for_each_entry(rela, &sec->rela_list, list) { 377 if (rela->sym->type != STT_SECTION) { 378 WARN("unexpected relocation symbol type in %s", sec->name); 379 return -1; 380 } 381 insn = find_insn(file, rela->sym->sec, rela->addend); 382 if (insn) 383 insn = list_prev_entry(insn, list); 384 else if (rela->addend == rela->sym->sec->len) { 385 found = false; 386 list_for_each_entry_reverse(insn, &file->insn_list, list) { 387 if (insn->sec == rela->sym->sec) { 388 found = true; 389 break; 390 } 391 } 392 393 if (!found) { 394 WARN("can't find reachable insn at %s+0x%x", 395 rela->sym->sec->name, rela->addend); 396 return -1; 397 } 398 } else { 399 WARN("can't find reachable insn at %s+0x%x", 400 rela->sym->sec->name, rela->addend); 401 return -1; 402 } 403 404 insn->dead_end = false; 405 } 406 407 return 0; 408 } 409 410 /* 411 * Warnings shouldn't be reported for ignored functions. 412 */ 413 static void add_ignores(struct objtool_file *file) 414 { 415 struct instruction *insn; 416 struct section *sec; 417 struct symbol *func; 418 struct rela *rela; 419 420 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard"); 421 if (!sec) 422 return; 423 424 list_for_each_entry(rela, &sec->rela_list, list) { 425 switch (rela->sym->type) { 426 case STT_FUNC: 427 func = rela->sym; 428 break; 429 430 case STT_SECTION: 431 func = find_symbol_by_offset(rela->sym->sec, rela->addend); 432 if (!func || func->type != STT_FUNC) 433 continue; 434 break; 435 436 default: 437 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type); 438 continue; 439 } 440 441 func_for_each_insn_all(file, func, insn) 442 insn->ignore = true; 443 } 444 } 445 446 /* 447 * This is a whitelist of functions that is allowed to be called with AC set. 448 * The list is meant to be minimal and only contains compiler instrumentation 449 * ABI and a few functions used to implement *_{to,from}_user() functions. 450 * 451 * These functions must not directly change AC, but may PUSHF/POPF. 452 */ 453 static const char *uaccess_safe_builtin[] = { 454 /* KASAN */ 455 "kasan_report", 456 "check_memory_region", 457 /* KASAN out-of-line */ 458 "__asan_loadN_noabort", 459 "__asan_load1_noabort", 460 "__asan_load2_noabort", 461 "__asan_load4_noabort", 462 "__asan_load8_noabort", 463 "__asan_load16_noabort", 464 "__asan_storeN_noabort", 465 "__asan_store1_noabort", 466 "__asan_store2_noabort", 467 "__asan_store4_noabort", 468 "__asan_store8_noabort", 469 "__asan_store16_noabort", 470 /* KASAN in-line */ 471 "__asan_report_load_n_noabort", 472 "__asan_report_load1_noabort", 473 "__asan_report_load2_noabort", 474 "__asan_report_load4_noabort", 475 "__asan_report_load8_noabort", 476 "__asan_report_load16_noabort", 477 "__asan_report_store_n_noabort", 478 "__asan_report_store1_noabort", 479 "__asan_report_store2_noabort", 480 "__asan_report_store4_noabort", 481 "__asan_report_store8_noabort", 482 "__asan_report_store16_noabort", 483 /* KCOV */ 484 "write_comp_data", 485 "__sanitizer_cov_trace_pc", 486 "__sanitizer_cov_trace_const_cmp1", 487 "__sanitizer_cov_trace_const_cmp2", 488 "__sanitizer_cov_trace_const_cmp4", 489 "__sanitizer_cov_trace_const_cmp8", 490 "__sanitizer_cov_trace_cmp1", 491 "__sanitizer_cov_trace_cmp2", 492 "__sanitizer_cov_trace_cmp4", 493 "__sanitizer_cov_trace_cmp8", 494 /* UBSAN */ 495 "ubsan_type_mismatch_common", 496 "__ubsan_handle_type_mismatch", 497 "__ubsan_handle_type_mismatch_v1", 498 /* misc */ 499 "csum_partial_copy_generic", 500 "__memcpy_mcsafe", 501 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */ 502 NULL 503 }; 504 505 static void add_uaccess_safe(struct objtool_file *file) 506 { 507 struct symbol *func; 508 const char **name; 509 510 if (!uaccess) 511 return; 512 513 for (name = uaccess_safe_builtin; *name; name++) { 514 func = find_symbol_by_name(file->elf, *name); 515 if (!func) 516 continue; 517 518 func->alias->uaccess_safe = true; 519 } 520 } 521 522 /* 523 * FIXME: For now, just ignore any alternatives which add retpolines. This is 524 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline. 525 * But it at least allows objtool to understand the control flow *around* the 526 * retpoline. 527 */ 528 static int add_ignore_alternatives(struct objtool_file *file) 529 { 530 struct section *sec; 531 struct rela *rela; 532 struct instruction *insn; 533 534 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts"); 535 if (!sec) 536 return 0; 537 538 list_for_each_entry(rela, &sec->rela_list, list) { 539 if (rela->sym->type != STT_SECTION) { 540 WARN("unexpected relocation symbol type in %s", sec->name); 541 return -1; 542 } 543 544 insn = find_insn(file, rela->sym->sec, rela->addend); 545 if (!insn) { 546 WARN("bad .discard.ignore_alts entry"); 547 return -1; 548 } 549 550 insn->ignore_alts = true; 551 } 552 553 return 0; 554 } 555 556 /* 557 * Find the destination instructions for all jumps. 558 */ 559 static int add_jump_destinations(struct objtool_file *file) 560 { 561 struct instruction *insn; 562 struct rela *rela; 563 struct section *dest_sec; 564 unsigned long dest_off; 565 566 for_each_insn(file, insn) { 567 if (insn->type != INSN_JUMP_CONDITIONAL && 568 insn->type != INSN_JUMP_UNCONDITIONAL) 569 continue; 570 571 if (insn->ignore) 572 continue; 573 574 rela = find_rela_by_dest_range(insn->sec, insn->offset, 575 insn->len); 576 if (!rela) { 577 dest_sec = insn->sec; 578 dest_off = insn->offset + insn->len + insn->immediate; 579 } else if (rela->sym->type == STT_SECTION) { 580 dest_sec = rela->sym->sec; 581 dest_off = rela->addend + 4; 582 } else if (rela->sym->sec->idx) { 583 dest_sec = rela->sym->sec; 584 dest_off = rela->sym->sym.st_value + rela->addend + 4; 585 } else if (strstr(rela->sym->name, "_indirect_thunk_")) { 586 /* 587 * Retpoline jumps are really dynamic jumps in 588 * disguise, so convert them accordingly. 589 */ 590 insn->type = INSN_JUMP_DYNAMIC; 591 insn->retpoline_safe = true; 592 continue; 593 } else { 594 /* sibling call */ 595 insn->call_dest = rela->sym; 596 insn->jump_dest = NULL; 597 continue; 598 } 599 600 insn->jump_dest = find_insn(file, dest_sec, dest_off); 601 if (!insn->jump_dest) { 602 603 /* 604 * This is a special case where an alt instruction 605 * jumps past the end of the section. These are 606 * handled later in handle_group_alt(). 607 */ 608 if (!strcmp(insn->sec->name, ".altinstr_replacement")) 609 continue; 610 611 WARN_FUNC("can't find jump dest instruction at %s+0x%lx", 612 insn->sec, insn->offset, dest_sec->name, 613 dest_off); 614 return -1; 615 } 616 617 /* 618 * Cross-function jump. 619 */ 620 if (insn->func && insn->jump_dest->func && 621 insn->func != insn->jump_dest->func) { 622 623 /* 624 * For GCC 8+, create parent/child links for any cold 625 * subfunctions. This is _mostly_ redundant with a 626 * similar initialization in read_symbols(). 627 * 628 * If a function has aliases, we want the *first* such 629 * function in the symbol table to be the subfunction's 630 * parent. In that case we overwrite the 631 * initialization done in read_symbols(). 632 * 633 * However this code can't completely replace the 634 * read_symbols() code because this doesn't detect the 635 * case where the parent function's only reference to a 636 * subfunction is through a switch table. 637 */ 638 if (!strstr(insn->func->name, ".cold.") && 639 strstr(insn->jump_dest->func->name, ".cold.")) { 640 insn->func->cfunc = insn->jump_dest->func; 641 insn->jump_dest->func->pfunc = insn->func; 642 643 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc && 644 insn->jump_dest->offset == insn->jump_dest->func->offset) { 645 646 /* sibling class */ 647 insn->call_dest = insn->jump_dest->func; 648 insn->jump_dest = NULL; 649 } 650 } 651 } 652 653 return 0; 654 } 655 656 /* 657 * Find the destination instructions for all calls. 658 */ 659 static int add_call_destinations(struct objtool_file *file) 660 { 661 struct instruction *insn; 662 unsigned long dest_off; 663 struct rela *rela; 664 665 for_each_insn(file, insn) { 666 if (insn->type != INSN_CALL) 667 continue; 668 669 rela = find_rela_by_dest_range(insn->sec, insn->offset, 670 insn->len); 671 if (!rela) { 672 dest_off = insn->offset + insn->len + insn->immediate; 673 insn->call_dest = find_symbol_by_offset(insn->sec, 674 dest_off); 675 676 if (!insn->call_dest && !insn->ignore) { 677 WARN_FUNC("unsupported intra-function call", 678 insn->sec, insn->offset); 679 if (retpoline) 680 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE."); 681 return -1; 682 } 683 684 } else if (rela->sym->type == STT_SECTION) { 685 insn->call_dest = find_symbol_by_offset(rela->sym->sec, 686 rela->addend+4); 687 if (!insn->call_dest || 688 insn->call_dest->type != STT_FUNC) { 689 WARN_FUNC("can't find call dest symbol at %s+0x%x", 690 insn->sec, insn->offset, 691 rela->sym->sec->name, 692 rela->addend + 4); 693 return -1; 694 } 695 } else 696 insn->call_dest = rela->sym; 697 } 698 699 return 0; 700 } 701 702 /* 703 * The .alternatives section requires some extra special care, over and above 704 * what other special sections require: 705 * 706 * 1. Because alternatives are patched in-place, we need to insert a fake jump 707 * instruction at the end so that validate_branch() skips all the original 708 * replaced instructions when validating the new instruction path. 709 * 710 * 2. An added wrinkle is that the new instruction length might be zero. In 711 * that case the old instructions are replaced with noops. We simulate that 712 * by creating a fake jump as the only new instruction. 713 * 714 * 3. In some cases, the alternative section includes an instruction which 715 * conditionally jumps to the _end_ of the entry. We have to modify these 716 * jumps' destinations to point back to .text rather than the end of the 717 * entry in .altinstr_replacement. 718 */ 719 static int handle_group_alt(struct objtool_file *file, 720 struct special_alt *special_alt, 721 struct instruction *orig_insn, 722 struct instruction **new_insn) 723 { 724 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL; 725 unsigned long dest_off; 726 727 last_orig_insn = NULL; 728 insn = orig_insn; 729 sec_for_each_insn_from(file, insn) { 730 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 731 break; 732 733 insn->alt_group = true; 734 last_orig_insn = insn; 735 } 736 737 if (next_insn_same_sec(file, last_orig_insn)) { 738 fake_jump = malloc(sizeof(*fake_jump)); 739 if (!fake_jump) { 740 WARN("malloc failed"); 741 return -1; 742 } 743 memset(fake_jump, 0, sizeof(*fake_jump)); 744 INIT_LIST_HEAD(&fake_jump->alts); 745 clear_insn_state(&fake_jump->state); 746 747 fake_jump->sec = special_alt->new_sec; 748 fake_jump->offset = -1; 749 fake_jump->type = INSN_JUMP_UNCONDITIONAL; 750 fake_jump->jump_dest = list_next_entry(last_orig_insn, list); 751 fake_jump->ignore = true; 752 } 753 754 if (!special_alt->new_len) { 755 if (!fake_jump) { 756 WARN("%s: empty alternative at end of section", 757 special_alt->orig_sec->name); 758 return -1; 759 } 760 761 *new_insn = fake_jump; 762 return 0; 763 } 764 765 last_new_insn = NULL; 766 insn = *new_insn; 767 sec_for_each_insn_from(file, insn) { 768 if (insn->offset >= special_alt->new_off + special_alt->new_len) 769 break; 770 771 last_new_insn = insn; 772 773 insn->ignore = orig_insn->ignore_alts; 774 insn->func = orig_insn->func; 775 776 if (insn->type != INSN_JUMP_CONDITIONAL && 777 insn->type != INSN_JUMP_UNCONDITIONAL) 778 continue; 779 780 if (!insn->immediate) 781 continue; 782 783 dest_off = insn->offset + insn->len + insn->immediate; 784 if (dest_off == special_alt->new_off + special_alt->new_len) { 785 if (!fake_jump) { 786 WARN("%s: alternative jump to end of section", 787 special_alt->orig_sec->name); 788 return -1; 789 } 790 insn->jump_dest = fake_jump; 791 } 792 793 if (!insn->jump_dest) { 794 WARN_FUNC("can't find alternative jump destination", 795 insn->sec, insn->offset); 796 return -1; 797 } 798 } 799 800 if (!last_new_insn) { 801 WARN_FUNC("can't find last new alternative instruction", 802 special_alt->new_sec, special_alt->new_off); 803 return -1; 804 } 805 806 if (fake_jump) 807 list_add(&fake_jump->list, &last_new_insn->list); 808 809 return 0; 810 } 811 812 /* 813 * A jump table entry can either convert a nop to a jump or a jump to a nop. 814 * If the original instruction is a jump, make the alt entry an effective nop 815 * by just skipping the original instruction. 816 */ 817 static int handle_jump_alt(struct objtool_file *file, 818 struct special_alt *special_alt, 819 struct instruction *orig_insn, 820 struct instruction **new_insn) 821 { 822 if (orig_insn->type == INSN_NOP) 823 return 0; 824 825 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) { 826 WARN_FUNC("unsupported instruction at jump label", 827 orig_insn->sec, orig_insn->offset); 828 return -1; 829 } 830 831 *new_insn = list_next_entry(orig_insn, list); 832 return 0; 833 } 834 835 /* 836 * Read all the special sections which have alternate instructions which can be 837 * patched in or redirected to at runtime. Each instruction having alternate 838 * instruction(s) has them added to its insn->alts list, which will be 839 * traversed in validate_branch(). 840 */ 841 static int add_special_section_alts(struct objtool_file *file) 842 { 843 struct list_head special_alts; 844 struct instruction *orig_insn, *new_insn; 845 struct special_alt *special_alt, *tmp; 846 struct alternative *alt; 847 int ret; 848 849 ret = special_get_alts(file->elf, &special_alts); 850 if (ret) 851 return ret; 852 853 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 854 855 orig_insn = find_insn(file, special_alt->orig_sec, 856 special_alt->orig_off); 857 if (!orig_insn) { 858 WARN_FUNC("special: can't find orig instruction", 859 special_alt->orig_sec, special_alt->orig_off); 860 ret = -1; 861 goto out; 862 } 863 864 new_insn = NULL; 865 if (!special_alt->group || special_alt->new_len) { 866 new_insn = find_insn(file, special_alt->new_sec, 867 special_alt->new_off); 868 if (!new_insn) { 869 WARN_FUNC("special: can't find new instruction", 870 special_alt->new_sec, 871 special_alt->new_off); 872 ret = -1; 873 goto out; 874 } 875 } 876 877 if (special_alt->group) { 878 ret = handle_group_alt(file, special_alt, orig_insn, 879 &new_insn); 880 if (ret) 881 goto out; 882 } else if (special_alt->jump_or_nop) { 883 ret = handle_jump_alt(file, special_alt, orig_insn, 884 &new_insn); 885 if (ret) 886 goto out; 887 } 888 889 alt = malloc(sizeof(*alt)); 890 if (!alt) { 891 WARN("malloc failed"); 892 ret = -1; 893 goto out; 894 } 895 896 alt->insn = new_insn; 897 alt->skip_orig = special_alt->skip_orig; 898 orig_insn->ignore_alts |= special_alt->skip_alt; 899 list_add_tail(&alt->list, &orig_insn->alts); 900 901 list_del(&special_alt->list); 902 free(special_alt); 903 } 904 905 out: 906 return ret; 907 } 908 909 static int add_switch_table(struct objtool_file *file, struct instruction *insn, 910 struct rela *table, struct rela *next_table) 911 { 912 struct rela *rela = table; 913 struct instruction *alt_insn; 914 struct alternative *alt; 915 struct symbol *pfunc = insn->func->pfunc; 916 unsigned int prev_offset = 0; 917 918 list_for_each_entry_from(rela, &table->rela_sec->rela_list, list) { 919 if (rela == next_table) 920 break; 921 922 /* Make sure the switch table entries are consecutive: */ 923 if (prev_offset && rela->offset != prev_offset + 8) 924 break; 925 926 /* Detect function pointers from contiguous objects: */ 927 if (rela->sym->sec == pfunc->sec && 928 rela->addend == pfunc->offset) 929 break; 930 931 alt_insn = find_insn(file, rela->sym->sec, rela->addend); 932 if (!alt_insn) 933 break; 934 935 /* Make sure the jmp dest is in the function or subfunction: */ 936 if (alt_insn->func->pfunc != pfunc) 937 break; 938 939 alt = malloc(sizeof(*alt)); 940 if (!alt) { 941 WARN("malloc failed"); 942 return -1; 943 } 944 945 alt->insn = alt_insn; 946 list_add_tail(&alt->list, &insn->alts); 947 prev_offset = rela->offset; 948 } 949 950 if (!prev_offset) { 951 WARN_FUNC("can't find switch jump table", 952 insn->sec, insn->offset); 953 return -1; 954 } 955 956 return 0; 957 } 958 959 /* 960 * find_switch_table() - Given a dynamic jump, find the switch jump table in 961 * .rodata associated with it. 962 * 963 * There are 3 basic patterns: 964 * 965 * 1. jmpq *[rodata addr](,%reg,8) 966 * 967 * This is the most common case by far. It jumps to an address in a simple 968 * jump table which is stored in .rodata. 969 * 970 * 2. jmpq *[rodata addr](%rip) 971 * 972 * This is caused by a rare GCC quirk, currently only seen in three driver 973 * functions in the kernel, only with certain obscure non-distro configs. 974 * 975 * As part of an optimization, GCC makes a copy of an existing switch jump 976 * table, modifies it, and then hard-codes the jump (albeit with an indirect 977 * jump) to use a single entry in the table. The rest of the jump table and 978 * some of its jump targets remain as dead code. 979 * 980 * In such a case we can just crudely ignore all unreachable instruction 981 * warnings for the entire object file. Ideally we would just ignore them 982 * for the function, but that would require redesigning the code quite a 983 * bit. And honestly that's just not worth doing: unreachable instruction 984 * warnings are of questionable value anyway, and this is such a rare issue. 985 * 986 * 3. mov [rodata addr],%reg1 987 * ... some instructions ... 988 * jmpq *(%reg1,%reg2,8) 989 * 990 * This is a fairly uncommon pattern which is new for GCC 6. As of this 991 * writing, there are 11 occurrences of it in the allmodconfig kernel. 992 * 993 * As of GCC 7 there are quite a few more of these and the 'in between' code 994 * is significant. Esp. with KASAN enabled some of the code between the mov 995 * and jmpq uses .rodata itself, which can confuse things. 996 * 997 * TODO: Once we have DWARF CFI and smarter instruction decoding logic, 998 * ensure the same register is used in the mov and jump instructions. 999 * 1000 * NOTE: RETPOLINE made it harder still to decode dynamic jumps. 1001 */ 1002 static struct rela *find_switch_table(struct objtool_file *file, 1003 struct symbol *func, 1004 struct instruction *insn) 1005 { 1006 struct rela *text_rela, *rodata_rela; 1007 struct instruction *orig_insn = insn; 1008 struct section *rodata_sec; 1009 unsigned long table_offset; 1010 1011 /* 1012 * Backward search using the @first_jump_src links, these help avoid 1013 * much of the 'in between' code. Which avoids us getting confused by 1014 * it. 1015 */ 1016 for (; 1017 &insn->list != &file->insn_list && 1018 insn->sec == func->sec && 1019 insn->offset >= func->offset; 1020 1021 insn = insn->first_jump_src ?: list_prev_entry(insn, list)) { 1022 1023 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) 1024 break; 1025 1026 /* allow small jumps within the range */ 1027 if (insn->type == INSN_JUMP_UNCONDITIONAL && 1028 insn->jump_dest && 1029 (insn->jump_dest->offset <= insn->offset || 1030 insn->jump_dest->offset > orig_insn->offset)) 1031 break; 1032 1033 /* look for a relocation which references .rodata */ 1034 text_rela = find_rela_by_dest_range(insn->sec, insn->offset, 1035 insn->len); 1036 if (!text_rela || text_rela->sym->type != STT_SECTION || 1037 !text_rela->sym->sec->rodata) 1038 continue; 1039 1040 table_offset = text_rela->addend; 1041 rodata_sec = text_rela->sym->sec; 1042 1043 if (text_rela->type == R_X86_64_PC32) 1044 table_offset += 4; 1045 1046 /* 1047 * Make sure the .rodata address isn't associated with a 1048 * symbol. gcc jump tables are anonymous data. 1049 */ 1050 if (find_symbol_containing(rodata_sec, table_offset)) 1051 continue; 1052 1053 rodata_rela = find_rela_by_dest(rodata_sec, table_offset); 1054 if (rodata_rela) { 1055 /* 1056 * Use of RIP-relative switch jumps is quite rare, and 1057 * indicates a rare GCC quirk/bug which can leave dead 1058 * code behind. 1059 */ 1060 if (text_rela->type == R_X86_64_PC32) 1061 file->ignore_unreachables = true; 1062 1063 return rodata_rela; 1064 } 1065 } 1066 1067 return NULL; 1068 } 1069 1070 1071 static int add_func_switch_tables(struct objtool_file *file, 1072 struct symbol *func) 1073 { 1074 struct instruction *insn, *last = NULL, *prev_jump = NULL; 1075 struct rela *rela, *prev_rela = NULL; 1076 int ret; 1077 1078 func_for_each_insn_all(file, func, insn) { 1079 if (!last) 1080 last = insn; 1081 1082 /* 1083 * Store back-pointers for unconditional forward jumps such 1084 * that find_switch_table() can back-track using those and 1085 * avoid some potentially confusing code. 1086 */ 1087 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest && 1088 insn->offset > last->offset && 1089 insn->jump_dest->offset > insn->offset && 1090 !insn->jump_dest->first_jump_src) { 1091 1092 insn->jump_dest->first_jump_src = insn; 1093 last = insn->jump_dest; 1094 } 1095 1096 if (insn->type != INSN_JUMP_DYNAMIC) 1097 continue; 1098 1099 rela = find_switch_table(file, func, insn); 1100 if (!rela) 1101 continue; 1102 1103 /* 1104 * We found a switch table, but we don't know yet how big it 1105 * is. Don't add it until we reach the end of the function or 1106 * the beginning of another switch table in the same function. 1107 */ 1108 if (prev_jump) { 1109 ret = add_switch_table(file, prev_jump, prev_rela, rela); 1110 if (ret) 1111 return ret; 1112 } 1113 1114 prev_jump = insn; 1115 prev_rela = rela; 1116 } 1117 1118 if (prev_jump) { 1119 ret = add_switch_table(file, prev_jump, prev_rela, NULL); 1120 if (ret) 1121 return ret; 1122 } 1123 1124 return 0; 1125 } 1126 1127 /* 1128 * For some switch statements, gcc generates a jump table in the .rodata 1129 * section which contains a list of addresses within the function to jump to. 1130 * This finds these jump tables and adds them to the insn->alts lists. 1131 */ 1132 static int add_switch_table_alts(struct objtool_file *file) 1133 { 1134 struct section *sec; 1135 struct symbol *func; 1136 int ret; 1137 1138 if (!file->rodata) 1139 return 0; 1140 1141 for_each_sec(file, sec) { 1142 list_for_each_entry(func, &sec->symbol_list, list) { 1143 if (func->type != STT_FUNC) 1144 continue; 1145 1146 ret = add_func_switch_tables(file, func); 1147 if (ret) 1148 return ret; 1149 } 1150 } 1151 1152 return 0; 1153 } 1154 1155 static int read_unwind_hints(struct objtool_file *file) 1156 { 1157 struct section *sec, *relasec; 1158 struct rela *rela; 1159 struct unwind_hint *hint; 1160 struct instruction *insn; 1161 struct cfi_reg *cfa; 1162 int i; 1163 1164 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 1165 if (!sec) 1166 return 0; 1167 1168 relasec = sec->rela; 1169 if (!relasec) { 1170 WARN("missing .rela.discard.unwind_hints section"); 1171 return -1; 1172 } 1173 1174 if (sec->len % sizeof(struct unwind_hint)) { 1175 WARN("struct unwind_hint size mismatch"); 1176 return -1; 1177 } 1178 1179 file->hints = true; 1180 1181 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) { 1182 hint = (struct unwind_hint *)sec->data->d_buf + i; 1183 1184 rela = find_rela_by_dest(sec, i * sizeof(*hint)); 1185 if (!rela) { 1186 WARN("can't find rela for unwind_hints[%d]", i); 1187 return -1; 1188 } 1189 1190 insn = find_insn(file, rela->sym->sec, rela->addend); 1191 if (!insn) { 1192 WARN("can't find insn for unwind_hints[%d]", i); 1193 return -1; 1194 } 1195 1196 cfa = &insn->state.cfa; 1197 1198 if (hint->type == UNWIND_HINT_TYPE_SAVE) { 1199 insn->save = true; 1200 continue; 1201 1202 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) { 1203 insn->restore = true; 1204 insn->hint = true; 1205 continue; 1206 } 1207 1208 insn->hint = true; 1209 1210 switch (hint->sp_reg) { 1211 case ORC_REG_UNDEFINED: 1212 cfa->base = CFI_UNDEFINED; 1213 break; 1214 case ORC_REG_SP: 1215 cfa->base = CFI_SP; 1216 break; 1217 case ORC_REG_BP: 1218 cfa->base = CFI_BP; 1219 break; 1220 case ORC_REG_SP_INDIRECT: 1221 cfa->base = CFI_SP_INDIRECT; 1222 break; 1223 case ORC_REG_R10: 1224 cfa->base = CFI_R10; 1225 break; 1226 case ORC_REG_R13: 1227 cfa->base = CFI_R13; 1228 break; 1229 case ORC_REG_DI: 1230 cfa->base = CFI_DI; 1231 break; 1232 case ORC_REG_DX: 1233 cfa->base = CFI_DX; 1234 break; 1235 default: 1236 WARN_FUNC("unsupported unwind_hint sp base reg %d", 1237 insn->sec, insn->offset, hint->sp_reg); 1238 return -1; 1239 } 1240 1241 cfa->offset = hint->sp_offset; 1242 insn->state.type = hint->type; 1243 insn->state.end = hint->end; 1244 } 1245 1246 return 0; 1247 } 1248 1249 static int read_retpoline_hints(struct objtool_file *file) 1250 { 1251 struct section *sec; 1252 struct instruction *insn; 1253 struct rela *rela; 1254 1255 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe"); 1256 if (!sec) 1257 return 0; 1258 1259 list_for_each_entry(rela, &sec->rela_list, list) { 1260 if (rela->sym->type != STT_SECTION) { 1261 WARN("unexpected relocation symbol type in %s", sec->name); 1262 return -1; 1263 } 1264 1265 insn = find_insn(file, rela->sym->sec, rela->addend); 1266 if (!insn) { 1267 WARN("bad .discard.retpoline_safe entry"); 1268 return -1; 1269 } 1270 1271 if (insn->type != INSN_JUMP_DYNAMIC && 1272 insn->type != INSN_CALL_DYNAMIC) { 1273 WARN_FUNC("retpoline_safe hint not an indirect jump/call", 1274 insn->sec, insn->offset); 1275 return -1; 1276 } 1277 1278 insn->retpoline_safe = true; 1279 } 1280 1281 return 0; 1282 } 1283 1284 static void mark_rodata(struct objtool_file *file) 1285 { 1286 struct section *sec; 1287 bool found = false; 1288 1289 /* 1290 * This searches for the .rodata section or multiple .rodata.func_name 1291 * sections if -fdata-sections is being used. The .str.1.1 and .str.1.8 1292 * rodata sections are ignored as they don't contain jump tables. 1293 */ 1294 for_each_sec(file, sec) { 1295 if (!strncmp(sec->name, ".rodata", 7) && 1296 !strstr(sec->name, ".str1.")) { 1297 sec->rodata = true; 1298 found = true; 1299 } 1300 } 1301 1302 file->rodata = found; 1303 } 1304 1305 static int decode_sections(struct objtool_file *file) 1306 { 1307 int ret; 1308 1309 mark_rodata(file); 1310 1311 ret = decode_instructions(file); 1312 if (ret) 1313 return ret; 1314 1315 ret = add_dead_ends(file); 1316 if (ret) 1317 return ret; 1318 1319 add_ignores(file); 1320 add_uaccess_safe(file); 1321 1322 ret = add_ignore_alternatives(file); 1323 if (ret) 1324 return ret; 1325 1326 ret = add_jump_destinations(file); 1327 if (ret) 1328 return ret; 1329 1330 ret = add_special_section_alts(file); 1331 if (ret) 1332 return ret; 1333 1334 ret = add_call_destinations(file); 1335 if (ret) 1336 return ret; 1337 1338 ret = add_switch_table_alts(file); 1339 if (ret) 1340 return ret; 1341 1342 ret = read_unwind_hints(file); 1343 if (ret) 1344 return ret; 1345 1346 ret = read_retpoline_hints(file); 1347 if (ret) 1348 return ret; 1349 1350 return 0; 1351 } 1352 1353 static bool is_fentry_call(struct instruction *insn) 1354 { 1355 if (insn->type == INSN_CALL && 1356 insn->call_dest->type == STT_NOTYPE && 1357 !strcmp(insn->call_dest->name, "__fentry__")) 1358 return true; 1359 1360 return false; 1361 } 1362 1363 static bool has_modified_stack_frame(struct insn_state *state) 1364 { 1365 int i; 1366 1367 if (state->cfa.base != initial_func_cfi.cfa.base || 1368 state->cfa.offset != initial_func_cfi.cfa.offset || 1369 state->stack_size != initial_func_cfi.cfa.offset || 1370 state->drap) 1371 return true; 1372 1373 for (i = 0; i < CFI_NUM_REGS; i++) 1374 if (state->regs[i].base != initial_func_cfi.regs[i].base || 1375 state->regs[i].offset != initial_func_cfi.regs[i].offset) 1376 return true; 1377 1378 return false; 1379 } 1380 1381 static bool has_valid_stack_frame(struct insn_state *state) 1382 { 1383 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA && 1384 state->regs[CFI_BP].offset == -16) 1385 return true; 1386 1387 if (state->drap && state->regs[CFI_BP].base == CFI_BP) 1388 return true; 1389 1390 return false; 1391 } 1392 1393 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state) 1394 { 1395 struct cfi_reg *cfa = &state->cfa; 1396 struct stack_op *op = &insn->stack_op; 1397 1398 if (cfa->base != CFI_SP) 1399 return 0; 1400 1401 /* push */ 1402 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF) 1403 cfa->offset += 8; 1404 1405 /* pop */ 1406 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF) 1407 cfa->offset -= 8; 1408 1409 /* add immediate to sp */ 1410 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 1411 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 1412 cfa->offset -= op->src.offset; 1413 1414 return 0; 1415 } 1416 1417 static void save_reg(struct insn_state *state, unsigned char reg, int base, 1418 int offset) 1419 { 1420 if (arch_callee_saved_reg(reg) && 1421 state->regs[reg].base == CFI_UNDEFINED) { 1422 state->regs[reg].base = base; 1423 state->regs[reg].offset = offset; 1424 } 1425 } 1426 1427 static void restore_reg(struct insn_state *state, unsigned char reg) 1428 { 1429 state->regs[reg].base = CFI_UNDEFINED; 1430 state->regs[reg].offset = 0; 1431 } 1432 1433 /* 1434 * A note about DRAP stack alignment: 1435 * 1436 * GCC has the concept of a DRAP register, which is used to help keep track of 1437 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 1438 * register. The typical DRAP pattern is: 1439 * 1440 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 1441 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 1442 * 41 ff 72 f8 pushq -0x8(%r10) 1443 * 55 push %rbp 1444 * 48 89 e5 mov %rsp,%rbp 1445 * (more pushes) 1446 * 41 52 push %r10 1447 * ... 1448 * 41 5a pop %r10 1449 * (more pops) 1450 * 5d pop %rbp 1451 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1452 * c3 retq 1453 * 1454 * There are some variations in the epilogues, like: 1455 * 1456 * 5b pop %rbx 1457 * 41 5a pop %r10 1458 * 41 5c pop %r12 1459 * 41 5d pop %r13 1460 * 41 5e pop %r14 1461 * c9 leaveq 1462 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1463 * c3 retq 1464 * 1465 * and: 1466 * 1467 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 1468 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 1469 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 1470 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 1471 * c9 leaveq 1472 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1473 * c3 retq 1474 * 1475 * Sometimes r13 is used as the DRAP register, in which case it's saved and 1476 * restored beforehand: 1477 * 1478 * 41 55 push %r13 1479 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 1480 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 1481 * ... 1482 * 49 8d 65 f0 lea -0x10(%r13),%rsp 1483 * 41 5d pop %r13 1484 * c3 retq 1485 */ 1486 static int update_insn_state(struct instruction *insn, struct insn_state *state) 1487 { 1488 struct stack_op *op = &insn->stack_op; 1489 struct cfi_reg *cfa = &state->cfa; 1490 struct cfi_reg *regs = state->regs; 1491 1492 /* stack operations don't make sense with an undefined CFA */ 1493 if (cfa->base == CFI_UNDEFINED) { 1494 if (insn->func) { 1495 WARN_FUNC("undefined stack state", insn->sec, insn->offset); 1496 return -1; 1497 } 1498 return 0; 1499 } 1500 1501 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET) 1502 return update_insn_state_regs(insn, state); 1503 1504 switch (op->dest.type) { 1505 1506 case OP_DEST_REG: 1507 switch (op->src.type) { 1508 1509 case OP_SRC_REG: 1510 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 1511 cfa->base == CFI_SP && 1512 regs[CFI_BP].base == CFI_CFA && 1513 regs[CFI_BP].offset == -cfa->offset) { 1514 1515 /* mov %rsp, %rbp */ 1516 cfa->base = op->dest.reg; 1517 state->bp_scratch = false; 1518 } 1519 1520 else if (op->src.reg == CFI_SP && 1521 op->dest.reg == CFI_BP && state->drap) { 1522 1523 /* drap: mov %rsp, %rbp */ 1524 regs[CFI_BP].base = CFI_BP; 1525 regs[CFI_BP].offset = -state->stack_size; 1526 state->bp_scratch = false; 1527 } 1528 1529 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1530 1531 /* 1532 * mov %rsp, %reg 1533 * 1534 * This is needed for the rare case where GCC 1535 * does: 1536 * 1537 * mov %rsp, %rax 1538 * ... 1539 * mov %rax, %rsp 1540 */ 1541 state->vals[op->dest.reg].base = CFI_CFA; 1542 state->vals[op->dest.reg].offset = -state->stack_size; 1543 } 1544 1545 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP && 1546 cfa->base == CFI_BP) { 1547 1548 /* 1549 * mov %rbp, %rsp 1550 * 1551 * Restore the original stack pointer (Clang). 1552 */ 1553 state->stack_size = -state->regs[CFI_BP].offset; 1554 } 1555 1556 else if (op->dest.reg == cfa->base) { 1557 1558 /* mov %reg, %rsp */ 1559 if (cfa->base == CFI_SP && 1560 state->vals[op->src.reg].base == CFI_CFA) { 1561 1562 /* 1563 * This is needed for the rare case 1564 * where GCC does something dumb like: 1565 * 1566 * lea 0x8(%rsp), %rcx 1567 * ... 1568 * mov %rcx, %rsp 1569 */ 1570 cfa->offset = -state->vals[op->src.reg].offset; 1571 state->stack_size = cfa->offset; 1572 1573 } else { 1574 cfa->base = CFI_UNDEFINED; 1575 cfa->offset = 0; 1576 } 1577 } 1578 1579 break; 1580 1581 case OP_SRC_ADD: 1582 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 1583 1584 /* add imm, %rsp */ 1585 state->stack_size -= op->src.offset; 1586 if (cfa->base == CFI_SP) 1587 cfa->offset -= op->src.offset; 1588 break; 1589 } 1590 1591 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 1592 1593 /* lea disp(%rbp), %rsp */ 1594 state->stack_size = -(op->src.offset + regs[CFI_BP].offset); 1595 break; 1596 } 1597 1598 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1599 1600 /* drap: lea disp(%rsp), %drap */ 1601 state->drap_reg = op->dest.reg; 1602 1603 /* 1604 * lea disp(%rsp), %reg 1605 * 1606 * This is needed for the rare case where GCC 1607 * does something dumb like: 1608 * 1609 * lea 0x8(%rsp), %rcx 1610 * ... 1611 * mov %rcx, %rsp 1612 */ 1613 state->vals[op->dest.reg].base = CFI_CFA; 1614 state->vals[op->dest.reg].offset = \ 1615 -state->stack_size + op->src.offset; 1616 1617 break; 1618 } 1619 1620 if (state->drap && op->dest.reg == CFI_SP && 1621 op->src.reg == state->drap_reg) { 1622 1623 /* drap: lea disp(%drap), %rsp */ 1624 cfa->base = CFI_SP; 1625 cfa->offset = state->stack_size = -op->src.offset; 1626 state->drap_reg = CFI_UNDEFINED; 1627 state->drap = false; 1628 break; 1629 } 1630 1631 if (op->dest.reg == state->cfa.base) { 1632 WARN_FUNC("unsupported stack register modification", 1633 insn->sec, insn->offset); 1634 return -1; 1635 } 1636 1637 break; 1638 1639 case OP_SRC_AND: 1640 if (op->dest.reg != CFI_SP || 1641 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 1642 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 1643 WARN_FUNC("unsupported stack pointer realignment", 1644 insn->sec, insn->offset); 1645 return -1; 1646 } 1647 1648 if (state->drap_reg != CFI_UNDEFINED) { 1649 /* drap: and imm, %rsp */ 1650 cfa->base = state->drap_reg; 1651 cfa->offset = state->stack_size = 0; 1652 state->drap = true; 1653 } 1654 1655 /* 1656 * Older versions of GCC (4.8ish) realign the stack 1657 * without DRAP, with a frame pointer. 1658 */ 1659 1660 break; 1661 1662 case OP_SRC_POP: 1663 case OP_SRC_POPF: 1664 if (!state->drap && op->dest.type == OP_DEST_REG && 1665 op->dest.reg == cfa->base) { 1666 1667 /* pop %rbp */ 1668 cfa->base = CFI_SP; 1669 } 1670 1671 if (state->drap && cfa->base == CFI_BP_INDIRECT && 1672 op->dest.type == OP_DEST_REG && 1673 op->dest.reg == state->drap_reg && 1674 state->drap_offset == -state->stack_size) { 1675 1676 /* drap: pop %drap */ 1677 cfa->base = state->drap_reg; 1678 cfa->offset = 0; 1679 state->drap_offset = -1; 1680 1681 } else if (regs[op->dest.reg].offset == -state->stack_size) { 1682 1683 /* pop %reg */ 1684 restore_reg(state, op->dest.reg); 1685 } 1686 1687 state->stack_size -= 8; 1688 if (cfa->base == CFI_SP) 1689 cfa->offset -= 8; 1690 1691 break; 1692 1693 case OP_SRC_REG_INDIRECT: 1694 if (state->drap && op->src.reg == CFI_BP && 1695 op->src.offset == state->drap_offset) { 1696 1697 /* drap: mov disp(%rbp), %drap */ 1698 cfa->base = state->drap_reg; 1699 cfa->offset = 0; 1700 state->drap_offset = -1; 1701 } 1702 1703 if (state->drap && op->src.reg == CFI_BP && 1704 op->src.offset == regs[op->dest.reg].offset) { 1705 1706 /* drap: mov disp(%rbp), %reg */ 1707 restore_reg(state, op->dest.reg); 1708 1709 } else if (op->src.reg == cfa->base && 1710 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 1711 1712 /* mov disp(%rbp), %reg */ 1713 /* mov disp(%rsp), %reg */ 1714 restore_reg(state, op->dest.reg); 1715 } 1716 1717 break; 1718 1719 default: 1720 WARN_FUNC("unknown stack-related instruction", 1721 insn->sec, insn->offset); 1722 return -1; 1723 } 1724 1725 break; 1726 1727 case OP_DEST_PUSH: 1728 case OP_DEST_PUSHF: 1729 state->stack_size += 8; 1730 if (cfa->base == CFI_SP) 1731 cfa->offset += 8; 1732 1733 if (op->src.type != OP_SRC_REG) 1734 break; 1735 1736 if (state->drap) { 1737 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1738 1739 /* drap: push %drap */ 1740 cfa->base = CFI_BP_INDIRECT; 1741 cfa->offset = -state->stack_size; 1742 1743 /* save drap so we know when to restore it */ 1744 state->drap_offset = -state->stack_size; 1745 1746 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) { 1747 1748 /* drap: push %rbp */ 1749 state->stack_size = 0; 1750 1751 } else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1752 1753 /* drap: push %reg */ 1754 save_reg(state, op->src.reg, CFI_BP, -state->stack_size); 1755 } 1756 1757 } else { 1758 1759 /* push %reg */ 1760 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size); 1761 } 1762 1763 /* detect when asm code uses rbp as a scratch register */ 1764 if (!no_fp && insn->func && op->src.reg == CFI_BP && 1765 cfa->base != CFI_BP) 1766 state->bp_scratch = true; 1767 break; 1768 1769 case OP_DEST_REG_INDIRECT: 1770 1771 if (state->drap) { 1772 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) { 1773 1774 /* drap: mov %drap, disp(%rbp) */ 1775 cfa->base = CFI_BP_INDIRECT; 1776 cfa->offset = op->dest.offset; 1777 1778 /* save drap offset so we know when to restore it */ 1779 state->drap_offset = op->dest.offset; 1780 } 1781 1782 else if (regs[op->src.reg].base == CFI_UNDEFINED) { 1783 1784 /* drap: mov reg, disp(%rbp) */ 1785 save_reg(state, op->src.reg, CFI_BP, op->dest.offset); 1786 } 1787 1788 } else if (op->dest.reg == cfa->base) { 1789 1790 /* mov reg, disp(%rbp) */ 1791 /* mov reg, disp(%rsp) */ 1792 save_reg(state, op->src.reg, CFI_CFA, 1793 op->dest.offset - state->cfa.offset); 1794 } 1795 1796 break; 1797 1798 case OP_DEST_LEAVE: 1799 if ((!state->drap && cfa->base != CFI_BP) || 1800 (state->drap && cfa->base != state->drap_reg)) { 1801 WARN_FUNC("leave instruction with modified stack frame", 1802 insn->sec, insn->offset); 1803 return -1; 1804 } 1805 1806 /* leave (mov %rbp, %rsp; pop %rbp) */ 1807 1808 state->stack_size = -state->regs[CFI_BP].offset - 8; 1809 restore_reg(state, CFI_BP); 1810 1811 if (!state->drap) { 1812 cfa->base = CFI_SP; 1813 cfa->offset -= 8; 1814 } 1815 1816 break; 1817 1818 case OP_DEST_MEM: 1819 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) { 1820 WARN_FUNC("unknown stack-related memory operation", 1821 insn->sec, insn->offset); 1822 return -1; 1823 } 1824 1825 /* pop mem */ 1826 state->stack_size -= 8; 1827 if (cfa->base == CFI_SP) 1828 cfa->offset -= 8; 1829 1830 break; 1831 1832 default: 1833 WARN_FUNC("unknown stack-related instruction", 1834 insn->sec, insn->offset); 1835 return -1; 1836 } 1837 1838 return 0; 1839 } 1840 1841 static bool insn_state_match(struct instruction *insn, struct insn_state *state) 1842 { 1843 struct insn_state *state1 = &insn->state, *state2 = state; 1844 int i; 1845 1846 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) { 1847 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 1848 insn->sec, insn->offset, 1849 state1->cfa.base, state1->cfa.offset, 1850 state2->cfa.base, state2->cfa.offset); 1851 1852 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) { 1853 for (i = 0; i < CFI_NUM_REGS; i++) { 1854 if (!memcmp(&state1->regs[i], &state2->regs[i], 1855 sizeof(struct cfi_reg))) 1856 continue; 1857 1858 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 1859 insn->sec, insn->offset, 1860 i, state1->regs[i].base, state1->regs[i].offset, 1861 i, state2->regs[i].base, state2->regs[i].offset); 1862 break; 1863 } 1864 1865 } else if (state1->type != state2->type) { 1866 WARN_FUNC("stack state mismatch: type1=%d type2=%d", 1867 insn->sec, insn->offset, state1->type, state2->type); 1868 1869 } else if (state1->drap != state2->drap || 1870 (state1->drap && state1->drap_reg != state2->drap_reg) || 1871 (state1->drap && state1->drap_offset != state2->drap_offset)) { 1872 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 1873 insn->sec, insn->offset, 1874 state1->drap, state1->drap_reg, state1->drap_offset, 1875 state2->drap, state2->drap_reg, state2->drap_offset); 1876 1877 } else 1878 return true; 1879 1880 return false; 1881 } 1882 1883 static inline bool func_uaccess_safe(struct symbol *func) 1884 { 1885 if (func) 1886 return func->alias->uaccess_safe; 1887 1888 return false; 1889 } 1890 1891 static inline const char *insn_dest_name(struct instruction *insn) 1892 { 1893 if (insn->call_dest) 1894 return insn->call_dest->name; 1895 1896 return "{dynamic}"; 1897 } 1898 1899 static int validate_call(struct instruction *insn, struct insn_state *state) 1900 { 1901 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) { 1902 WARN_FUNC("call to %s() with UACCESS enabled", 1903 insn->sec, insn->offset, insn_dest_name(insn)); 1904 return 1; 1905 } 1906 1907 if (state->df) { 1908 WARN_FUNC("call to %s() with DF set", 1909 insn->sec, insn->offset, insn_dest_name(insn)); 1910 return 1; 1911 } 1912 1913 return 0; 1914 } 1915 1916 static int validate_sibling_call(struct instruction *insn, struct insn_state *state) 1917 { 1918 if (has_modified_stack_frame(state)) { 1919 WARN_FUNC("sibling call from callable instruction with modified stack frame", 1920 insn->sec, insn->offset); 1921 return 1; 1922 } 1923 1924 return validate_call(insn, state); 1925 } 1926 1927 /* 1928 * Follow the branch starting at the given instruction, and recursively follow 1929 * any other branches (jumps). Meanwhile, track the frame pointer state at 1930 * each instruction and validate all the rules described in 1931 * tools/objtool/Documentation/stack-validation.txt. 1932 */ 1933 static int validate_branch(struct objtool_file *file, struct instruction *first, 1934 struct insn_state state) 1935 { 1936 struct alternative *alt; 1937 struct instruction *insn, *next_insn; 1938 struct section *sec; 1939 struct symbol *func = NULL; 1940 int ret; 1941 1942 insn = first; 1943 sec = insn->sec; 1944 1945 if (insn->alt_group && list_empty(&insn->alts)) { 1946 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group", 1947 sec, insn->offset); 1948 return 1; 1949 } 1950 1951 while (1) { 1952 next_insn = next_insn_same_sec(file, insn); 1953 1954 if (file->c_file && func && insn->func && func != insn->func->pfunc) { 1955 WARN("%s() falls through to next function %s()", 1956 func->name, insn->func->name); 1957 return 1; 1958 } 1959 1960 func = insn->func ? insn->func->pfunc : NULL; 1961 1962 if (func && insn->ignore) { 1963 WARN_FUNC("BUG: why am I validating an ignored function?", 1964 sec, insn->offset); 1965 return 1; 1966 } 1967 1968 if (insn->visited) { 1969 if (!insn->hint && !insn_state_match(insn, &state)) 1970 return 1; 1971 1972 /* If we were here with AC=0, but now have AC=1, go again */ 1973 if (insn->state.uaccess || !state.uaccess) 1974 return 0; 1975 } 1976 1977 if (insn->hint) { 1978 if (insn->restore) { 1979 struct instruction *save_insn, *i; 1980 1981 i = insn; 1982 save_insn = NULL; 1983 func_for_each_insn_continue_reverse(file, insn->func, i) { 1984 if (i->save) { 1985 save_insn = i; 1986 break; 1987 } 1988 } 1989 1990 if (!save_insn) { 1991 WARN_FUNC("no corresponding CFI save for CFI restore", 1992 sec, insn->offset); 1993 return 1; 1994 } 1995 1996 if (!save_insn->visited) { 1997 /* 1998 * Oops, no state to copy yet. 1999 * Hopefully we can reach this 2000 * instruction from another branch 2001 * after the save insn has been 2002 * visited. 2003 */ 2004 if (insn == first) 2005 return 0; 2006 2007 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo", 2008 sec, insn->offset); 2009 return 1; 2010 } 2011 2012 insn->state = save_insn->state; 2013 } 2014 2015 state = insn->state; 2016 2017 } else 2018 insn->state = state; 2019 2020 insn->visited = true; 2021 2022 if (!insn->ignore_alts) { 2023 bool skip_orig = false; 2024 2025 list_for_each_entry(alt, &insn->alts, list) { 2026 if (alt->skip_orig) 2027 skip_orig = true; 2028 2029 ret = validate_branch(file, alt->insn, state); 2030 if (ret) { 2031 if (backtrace) 2032 BT_FUNC("(alt)", insn); 2033 return ret; 2034 } 2035 } 2036 2037 if (skip_orig) 2038 return 0; 2039 } 2040 2041 switch (insn->type) { 2042 2043 case INSN_RETURN: 2044 if (state.uaccess && !func_uaccess_safe(func)) { 2045 WARN_FUNC("return with UACCESS enabled", sec, insn->offset); 2046 return 1; 2047 } 2048 2049 if (!state.uaccess && func_uaccess_safe(func)) { 2050 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset); 2051 return 1; 2052 } 2053 2054 if (state.df) { 2055 WARN_FUNC("return with DF set", sec, insn->offset); 2056 return 1; 2057 } 2058 2059 if (func && has_modified_stack_frame(&state)) { 2060 WARN_FUNC("return with modified stack frame", 2061 sec, insn->offset); 2062 return 1; 2063 } 2064 2065 if (state.bp_scratch) { 2066 WARN("%s uses BP as a scratch register", 2067 insn->func->name); 2068 return 1; 2069 } 2070 2071 return 0; 2072 2073 case INSN_CALL: 2074 case INSN_CALL_DYNAMIC: 2075 ret = validate_call(insn, &state); 2076 if (ret) 2077 return ret; 2078 2079 if (insn->type == INSN_CALL) { 2080 if (is_fentry_call(insn)) 2081 break; 2082 2083 ret = dead_end_function(file, insn->call_dest); 2084 if (ret == 1) 2085 return 0; 2086 if (ret == -1) 2087 return 1; 2088 } 2089 2090 if (!no_fp && func && !has_valid_stack_frame(&state)) { 2091 WARN_FUNC("call without frame pointer save/setup", 2092 sec, insn->offset); 2093 return 1; 2094 } 2095 break; 2096 2097 case INSN_JUMP_CONDITIONAL: 2098 case INSN_JUMP_UNCONDITIONAL: 2099 if (func && !insn->jump_dest) { 2100 ret = validate_sibling_call(insn, &state); 2101 if (ret) 2102 return ret; 2103 2104 } else if (insn->jump_dest && 2105 (!func || !insn->jump_dest->func || 2106 insn->jump_dest->func->pfunc == func)) { 2107 ret = validate_branch(file, insn->jump_dest, 2108 state); 2109 if (ret) { 2110 if (backtrace) 2111 BT_FUNC("(branch)", insn); 2112 return ret; 2113 } 2114 } 2115 2116 if (insn->type == INSN_JUMP_UNCONDITIONAL) 2117 return 0; 2118 2119 break; 2120 2121 case INSN_JUMP_DYNAMIC: 2122 if (func && list_empty(&insn->alts)) { 2123 ret = validate_sibling_call(insn, &state); 2124 if (ret) 2125 return ret; 2126 } 2127 2128 return 0; 2129 2130 case INSN_CONTEXT_SWITCH: 2131 if (func && (!next_insn || !next_insn->hint)) { 2132 WARN_FUNC("unsupported instruction in callable function", 2133 sec, insn->offset); 2134 return 1; 2135 } 2136 return 0; 2137 2138 case INSN_STACK: 2139 if (update_insn_state(insn, &state)) 2140 return 1; 2141 2142 if (insn->stack_op.dest.type == OP_DEST_PUSHF) { 2143 if (!state.uaccess_stack) { 2144 state.uaccess_stack = 1; 2145 } else if (state.uaccess_stack >> 31) { 2146 WARN_FUNC("PUSHF stack exhausted", sec, insn->offset); 2147 return 1; 2148 } 2149 state.uaccess_stack <<= 1; 2150 state.uaccess_stack |= state.uaccess; 2151 } 2152 2153 if (insn->stack_op.src.type == OP_SRC_POPF) { 2154 if (state.uaccess_stack) { 2155 state.uaccess = state.uaccess_stack & 1; 2156 state.uaccess_stack >>= 1; 2157 if (state.uaccess_stack == 1) 2158 state.uaccess_stack = 0; 2159 } 2160 } 2161 2162 break; 2163 2164 case INSN_STAC: 2165 if (state.uaccess) { 2166 WARN_FUNC("recursive UACCESS enable", sec, insn->offset); 2167 return 1; 2168 } 2169 2170 state.uaccess = true; 2171 break; 2172 2173 case INSN_CLAC: 2174 if (!state.uaccess && insn->func) { 2175 WARN_FUNC("redundant UACCESS disable", sec, insn->offset); 2176 return 1; 2177 } 2178 2179 if (func_uaccess_safe(func) && !state.uaccess_stack) { 2180 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset); 2181 return 1; 2182 } 2183 2184 state.uaccess = false; 2185 break; 2186 2187 case INSN_STD: 2188 if (state.df) 2189 WARN_FUNC("recursive STD", sec, insn->offset); 2190 2191 state.df = true; 2192 break; 2193 2194 case INSN_CLD: 2195 if (!state.df && insn->func) 2196 WARN_FUNC("redundant CLD", sec, insn->offset); 2197 2198 state.df = false; 2199 break; 2200 2201 default: 2202 break; 2203 } 2204 2205 if (insn->dead_end) 2206 return 0; 2207 2208 if (!next_insn) { 2209 if (state.cfa.base == CFI_UNDEFINED) 2210 return 0; 2211 WARN("%s: unexpected end of section", sec->name); 2212 return 1; 2213 } 2214 2215 insn = next_insn; 2216 } 2217 2218 return 0; 2219 } 2220 2221 static int validate_unwind_hints(struct objtool_file *file) 2222 { 2223 struct instruction *insn; 2224 int ret, warnings = 0; 2225 struct insn_state state; 2226 2227 if (!file->hints) 2228 return 0; 2229 2230 clear_insn_state(&state); 2231 2232 for_each_insn(file, insn) { 2233 if (insn->hint && !insn->visited) { 2234 ret = validate_branch(file, insn, state); 2235 if (ret && backtrace) 2236 BT_FUNC("<=== (hint)", insn); 2237 warnings += ret; 2238 } 2239 } 2240 2241 return warnings; 2242 } 2243 2244 static int validate_retpoline(struct objtool_file *file) 2245 { 2246 struct instruction *insn; 2247 int warnings = 0; 2248 2249 for_each_insn(file, insn) { 2250 if (insn->type != INSN_JUMP_DYNAMIC && 2251 insn->type != INSN_CALL_DYNAMIC) 2252 continue; 2253 2254 if (insn->retpoline_safe) 2255 continue; 2256 2257 /* 2258 * .init.text code is ran before userspace and thus doesn't 2259 * strictly need retpolines, except for modules which are 2260 * loaded late, they very much do need retpoline in their 2261 * .init.text 2262 */ 2263 if (!strcmp(insn->sec->name, ".init.text") && !module) 2264 continue; 2265 2266 WARN_FUNC("indirect %s found in RETPOLINE build", 2267 insn->sec, insn->offset, 2268 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call"); 2269 2270 warnings++; 2271 } 2272 2273 return warnings; 2274 } 2275 2276 static bool is_kasan_insn(struct instruction *insn) 2277 { 2278 return (insn->type == INSN_CALL && 2279 !strcmp(insn->call_dest->name, "__asan_handle_no_return")); 2280 } 2281 2282 static bool is_ubsan_insn(struct instruction *insn) 2283 { 2284 return (insn->type == INSN_CALL && 2285 !strcmp(insn->call_dest->name, 2286 "__ubsan_handle_builtin_unreachable")); 2287 } 2288 2289 static bool ignore_unreachable_insn(struct instruction *insn) 2290 { 2291 int i; 2292 2293 if (insn->ignore || insn->type == INSN_NOP) 2294 return true; 2295 2296 /* 2297 * Ignore any unused exceptions. This can happen when a whitelisted 2298 * function has an exception table entry. 2299 * 2300 * Also ignore alternative replacement instructions. This can happen 2301 * when a whitelisted function uses one of the ALTERNATIVE macros. 2302 */ 2303 if (!strcmp(insn->sec->name, ".fixup") || 2304 !strcmp(insn->sec->name, ".altinstr_replacement") || 2305 !strcmp(insn->sec->name, ".altinstr_aux")) 2306 return true; 2307 2308 /* 2309 * Check if this (or a subsequent) instruction is related to 2310 * CONFIG_UBSAN or CONFIG_KASAN. 2311 * 2312 * End the search at 5 instructions to avoid going into the weeds. 2313 */ 2314 if (!insn->func) 2315 return false; 2316 for (i = 0; i < 5; i++) { 2317 2318 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 2319 return true; 2320 2321 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 2322 if (insn->jump_dest && 2323 insn->jump_dest->func == insn->func) { 2324 insn = insn->jump_dest; 2325 continue; 2326 } 2327 2328 break; 2329 } 2330 2331 if (insn->offset + insn->len >= insn->func->offset + insn->func->len) 2332 break; 2333 2334 insn = list_next_entry(insn, list); 2335 } 2336 2337 return false; 2338 } 2339 2340 static int validate_functions(struct objtool_file *file) 2341 { 2342 struct section *sec; 2343 struct symbol *func; 2344 struct instruction *insn; 2345 struct insn_state state; 2346 int ret, warnings = 0; 2347 2348 clear_insn_state(&state); 2349 2350 state.cfa = initial_func_cfi.cfa; 2351 memcpy(&state.regs, &initial_func_cfi.regs, 2352 CFI_NUM_REGS * sizeof(struct cfi_reg)); 2353 state.stack_size = initial_func_cfi.cfa.offset; 2354 2355 for_each_sec(file, sec) { 2356 list_for_each_entry(func, &sec->symbol_list, list) { 2357 if (func->type != STT_FUNC || func->pfunc != func) 2358 continue; 2359 2360 insn = find_insn(file, sec, func->offset); 2361 if (!insn || insn->ignore) 2362 continue; 2363 2364 state.uaccess = func->alias->uaccess_safe; 2365 2366 ret = validate_branch(file, insn, state); 2367 if (ret && backtrace) 2368 BT_FUNC("<=== (func)", insn); 2369 warnings += ret; 2370 } 2371 } 2372 2373 return warnings; 2374 } 2375 2376 static int validate_reachable_instructions(struct objtool_file *file) 2377 { 2378 struct instruction *insn; 2379 2380 if (file->ignore_unreachables) 2381 return 0; 2382 2383 for_each_insn(file, insn) { 2384 if (insn->visited || ignore_unreachable_insn(insn)) 2385 continue; 2386 2387 WARN_FUNC("unreachable instruction", insn->sec, insn->offset); 2388 return 1; 2389 } 2390 2391 return 0; 2392 } 2393 2394 static void cleanup(struct objtool_file *file) 2395 { 2396 struct instruction *insn, *tmpinsn; 2397 struct alternative *alt, *tmpalt; 2398 2399 list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) { 2400 list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) { 2401 list_del(&alt->list); 2402 free(alt); 2403 } 2404 list_del(&insn->list); 2405 hash_del(&insn->hash); 2406 free(insn); 2407 } 2408 elf_close(file->elf); 2409 } 2410 2411 static struct objtool_file file; 2412 2413 int check(const char *_objname, bool orc) 2414 { 2415 int ret, warnings = 0; 2416 2417 objname = _objname; 2418 2419 file.elf = elf_open(objname, orc ? O_RDWR : O_RDONLY); 2420 if (!file.elf) 2421 return 1; 2422 2423 INIT_LIST_HEAD(&file.insn_list); 2424 hash_init(file.insn_hash); 2425 file.c_file = find_section_by_name(file.elf, ".comment"); 2426 file.ignore_unreachables = no_unreachable; 2427 file.hints = false; 2428 2429 arch_initial_func_cfi_state(&initial_func_cfi); 2430 2431 ret = decode_sections(&file); 2432 if (ret < 0) 2433 goto out; 2434 warnings += ret; 2435 2436 if (list_empty(&file.insn_list)) 2437 goto out; 2438 2439 if (retpoline) { 2440 ret = validate_retpoline(&file); 2441 if (ret < 0) 2442 return ret; 2443 warnings += ret; 2444 } 2445 2446 ret = validate_functions(&file); 2447 if (ret < 0) 2448 goto out; 2449 warnings += ret; 2450 2451 ret = validate_unwind_hints(&file); 2452 if (ret < 0) 2453 goto out; 2454 warnings += ret; 2455 2456 if (!warnings) { 2457 ret = validate_reachable_instructions(&file); 2458 if (ret < 0) 2459 goto out; 2460 warnings += ret; 2461 } 2462 2463 if (orc) { 2464 ret = create_orc(&file); 2465 if (ret < 0) 2466 goto out; 2467 2468 ret = create_orc_sections(&file); 2469 if (ret < 0) 2470 goto out; 2471 2472 ret = elf_write(file.elf); 2473 if (ret < 0) 2474 goto out; 2475 } 2476 2477 out: 2478 cleanup(&file); 2479 2480 /* ignore warnings for now until we get all the code cleaned up */ 2481 if (ret || warnings) 2482 return 0; 2483 return 0; 2484 } 2485