1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com> 4 */ 5 6 #include <string.h> 7 #include <stdlib.h> 8 9 #include "builtin.h" 10 #include "cfi.h" 11 #include "arch.h" 12 #include "check.h" 13 #include "special.h" 14 #include "warn.h" 15 16 #include <linux/hashtable.h> 17 #include <linux/kernel.h> 18 19 #define FAKE_JUMP_OFFSET -1 20 21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table" 22 23 struct alternative { 24 struct list_head list; 25 struct instruction *insn; 26 bool skip_orig; 27 }; 28 29 const char *objname; 30 struct cfi_init_state initial_func_cfi; 31 32 struct instruction *find_insn(struct objtool_file *file, 33 struct section *sec, unsigned long offset) 34 { 35 struct instruction *insn; 36 37 hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) { 38 if (insn->sec == sec && insn->offset == offset) 39 return insn; 40 } 41 42 return NULL; 43 } 44 45 static struct instruction *next_insn_same_sec(struct objtool_file *file, 46 struct instruction *insn) 47 { 48 struct instruction *next = list_next_entry(insn, list); 49 50 if (!next || &next->list == &file->insn_list || next->sec != insn->sec) 51 return NULL; 52 53 return next; 54 } 55 56 static struct instruction *next_insn_same_func(struct objtool_file *file, 57 struct instruction *insn) 58 { 59 struct instruction *next = list_next_entry(insn, list); 60 struct symbol *func = insn->func; 61 62 if (!func) 63 return NULL; 64 65 if (&next->list != &file->insn_list && next->func == func) 66 return next; 67 68 /* Check if we're already in the subfunction: */ 69 if (func == func->cfunc) 70 return NULL; 71 72 /* Move to the subfunction: */ 73 return find_insn(file, func->cfunc->sec, func->cfunc->offset); 74 } 75 76 static struct instruction *prev_insn_same_sym(struct objtool_file *file, 77 struct instruction *insn) 78 { 79 struct instruction *prev = list_prev_entry(insn, list); 80 81 if (&prev->list != &file->insn_list && prev->func == insn->func) 82 return prev; 83 84 return NULL; 85 } 86 87 #define func_for_each_insn(file, func, insn) \ 88 for (insn = find_insn(file, func->sec, func->offset); \ 89 insn; \ 90 insn = next_insn_same_func(file, insn)) 91 92 #define sym_for_each_insn(file, sym, insn) \ 93 for (insn = find_insn(file, sym->sec, sym->offset); \ 94 insn && &insn->list != &file->insn_list && \ 95 insn->sec == sym->sec && \ 96 insn->offset < sym->offset + sym->len; \ 97 insn = list_next_entry(insn, list)) 98 99 #define sym_for_each_insn_continue_reverse(file, sym, insn) \ 100 for (insn = list_prev_entry(insn, list); \ 101 &insn->list != &file->insn_list && \ 102 insn->sec == sym->sec && insn->offset >= sym->offset; \ 103 insn = list_prev_entry(insn, list)) 104 105 #define sec_for_each_insn_from(file, insn) \ 106 for (; insn; insn = next_insn_same_sec(file, insn)) 107 108 #define sec_for_each_insn_continue(file, insn) \ 109 for (insn = next_insn_same_sec(file, insn); insn; \ 110 insn = next_insn_same_sec(file, insn)) 111 112 static bool is_static_jump(struct instruction *insn) 113 { 114 return insn->type == INSN_JUMP_CONDITIONAL || 115 insn->type == INSN_JUMP_UNCONDITIONAL; 116 } 117 118 static bool is_sibling_call(struct instruction *insn) 119 { 120 /* An indirect jump is either a sibling call or a jump to a table. */ 121 if (insn->type == INSN_JUMP_DYNAMIC) 122 return list_empty(&insn->alts); 123 124 if (!is_static_jump(insn)) 125 return false; 126 127 /* add_jump_destinations() sets insn->call_dest for sibling calls. */ 128 return !!insn->call_dest; 129 } 130 131 /* 132 * This checks to see if the given function is a "noreturn" function. 133 * 134 * For global functions which are outside the scope of this object file, we 135 * have to keep a manual list of them. 136 * 137 * For local functions, we have to detect them manually by simply looking for 138 * the lack of a return instruction. 139 */ 140 static bool __dead_end_function(struct objtool_file *file, struct symbol *func, 141 int recursion) 142 { 143 int i; 144 struct instruction *insn; 145 bool empty = true; 146 147 /* 148 * Unfortunately these have to be hard coded because the noreturn 149 * attribute isn't provided in ELF data. 150 */ 151 static const char * const global_noreturns[] = { 152 "__stack_chk_fail", 153 "panic", 154 "do_exit", 155 "do_task_dead", 156 "__module_put_and_exit", 157 "complete_and_exit", 158 "__reiserfs_panic", 159 "lbug_with_loc", 160 "fortify_panic", 161 "usercopy_abort", 162 "machine_real_restart", 163 "rewind_stack_do_exit", 164 "kunit_try_catch_throw", 165 }; 166 167 if (!func) 168 return false; 169 170 if (func->bind == STB_WEAK) 171 return false; 172 173 if (func->bind == STB_GLOBAL) 174 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++) 175 if (!strcmp(func->name, global_noreturns[i])) 176 return true; 177 178 if (!func->len) 179 return false; 180 181 insn = find_insn(file, func->sec, func->offset); 182 if (!insn->func) 183 return false; 184 185 func_for_each_insn(file, func, insn) { 186 empty = false; 187 188 if (insn->type == INSN_RETURN) 189 return false; 190 } 191 192 if (empty) 193 return false; 194 195 /* 196 * A function can have a sibling call instead of a return. In that 197 * case, the function's dead-end status depends on whether the target 198 * of the sibling call returns. 199 */ 200 func_for_each_insn(file, func, insn) { 201 if (is_sibling_call(insn)) { 202 struct instruction *dest = insn->jump_dest; 203 204 if (!dest) 205 /* sibling call to another file */ 206 return false; 207 208 /* local sibling call */ 209 if (recursion == 5) { 210 /* 211 * Infinite recursion: two functions have 212 * sibling calls to each other. This is a very 213 * rare case. It means they aren't dead ends. 214 */ 215 return false; 216 } 217 218 return __dead_end_function(file, dest->func, recursion+1); 219 } 220 } 221 222 return true; 223 } 224 225 static bool dead_end_function(struct objtool_file *file, struct symbol *func) 226 { 227 return __dead_end_function(file, func, 0); 228 } 229 230 static void init_cfi_state(struct cfi_state *cfi) 231 { 232 int i; 233 234 for (i = 0; i < CFI_NUM_REGS; i++) { 235 cfi->regs[i].base = CFI_UNDEFINED; 236 cfi->vals[i].base = CFI_UNDEFINED; 237 } 238 cfi->cfa.base = CFI_UNDEFINED; 239 cfi->drap_reg = CFI_UNDEFINED; 240 cfi->drap_offset = -1; 241 } 242 243 static void init_insn_state(struct insn_state *state, struct section *sec) 244 { 245 memset(state, 0, sizeof(*state)); 246 init_cfi_state(&state->cfi); 247 248 /* 249 * We need the full vmlinux for noinstr validation, otherwise we can 250 * not correctly determine insn->call_dest->sec (external symbols do 251 * not have a section). 252 */ 253 if (vmlinux && sec) 254 state->noinstr = sec->noinstr; 255 } 256 257 /* 258 * Call the arch-specific instruction decoder for all the instructions and add 259 * them to the global instruction list. 260 */ 261 static int decode_instructions(struct objtool_file *file) 262 { 263 struct section *sec; 264 struct symbol *func; 265 unsigned long offset; 266 struct instruction *insn; 267 unsigned long nr_insns = 0; 268 int ret; 269 270 for_each_sec(file, sec) { 271 272 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 273 continue; 274 275 if (strcmp(sec->name, ".altinstr_replacement") && 276 strcmp(sec->name, ".altinstr_aux") && 277 strncmp(sec->name, ".discard.", 9)) 278 sec->text = true; 279 280 if (!strcmp(sec->name, ".noinstr.text") || 281 !strcmp(sec->name, ".entry.text")) 282 sec->noinstr = true; 283 284 for (offset = 0; offset < sec->len; offset += insn->len) { 285 insn = malloc(sizeof(*insn)); 286 if (!insn) { 287 WARN("malloc failed"); 288 return -1; 289 } 290 memset(insn, 0, sizeof(*insn)); 291 INIT_LIST_HEAD(&insn->alts); 292 INIT_LIST_HEAD(&insn->stack_ops); 293 init_cfi_state(&insn->cfi); 294 295 insn->sec = sec; 296 insn->offset = offset; 297 298 ret = arch_decode_instruction(file->elf, sec, offset, 299 sec->len - offset, 300 &insn->len, &insn->type, 301 &insn->immediate, 302 &insn->stack_ops); 303 if (ret) 304 goto err; 305 306 hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset)); 307 list_add_tail(&insn->list, &file->insn_list); 308 nr_insns++; 309 } 310 311 list_for_each_entry(func, &sec->symbol_list, list) { 312 if (func->type != STT_FUNC || func->alias != func) 313 continue; 314 315 if (!find_insn(file, sec, func->offset)) { 316 WARN("%s(): can't find starting instruction", 317 func->name); 318 return -1; 319 } 320 321 sym_for_each_insn(file, func, insn) 322 insn->func = func; 323 } 324 } 325 326 if (stats) 327 printf("nr_insns: %lu\n", nr_insns); 328 329 return 0; 330 331 err: 332 free(insn); 333 return ret; 334 } 335 336 static struct instruction *find_last_insn(struct objtool_file *file, 337 struct section *sec) 338 { 339 struct instruction *insn = NULL; 340 unsigned int offset; 341 unsigned int end = (sec->len > 10) ? sec->len - 10 : 0; 342 343 for (offset = sec->len - 1; offset >= end && !insn; offset--) 344 insn = find_insn(file, sec, offset); 345 346 return insn; 347 } 348 349 /* 350 * Mark "ud2" instructions and manually annotated dead ends. 351 */ 352 static int add_dead_ends(struct objtool_file *file) 353 { 354 struct section *sec; 355 struct rela *rela; 356 struct instruction *insn; 357 358 /* 359 * By default, "ud2" is a dead end unless otherwise annotated, because 360 * GCC 7 inserts it for certain divide-by-zero cases. 361 */ 362 for_each_insn(file, insn) 363 if (insn->type == INSN_BUG) 364 insn->dead_end = true; 365 366 /* 367 * Check for manually annotated dead ends. 368 */ 369 sec = find_section_by_name(file->elf, ".rela.discard.unreachable"); 370 if (!sec) 371 goto reachable; 372 373 list_for_each_entry(rela, &sec->rela_list, list) { 374 if (rela->sym->type != STT_SECTION) { 375 WARN("unexpected relocation symbol type in %s", sec->name); 376 return -1; 377 } 378 insn = find_insn(file, rela->sym->sec, rela->addend); 379 if (insn) 380 insn = list_prev_entry(insn, list); 381 else if (rela->addend == rela->sym->sec->len) { 382 insn = find_last_insn(file, rela->sym->sec); 383 if (!insn) { 384 WARN("can't find unreachable insn at %s+0x%x", 385 rela->sym->sec->name, rela->addend); 386 return -1; 387 } 388 } else { 389 WARN("can't find unreachable insn at %s+0x%x", 390 rela->sym->sec->name, rela->addend); 391 return -1; 392 } 393 394 insn->dead_end = true; 395 } 396 397 reachable: 398 /* 399 * These manually annotated reachable checks are needed for GCC 4.4, 400 * where the Linux unreachable() macro isn't supported. In that case 401 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's 402 * not a dead end. 403 */ 404 sec = find_section_by_name(file->elf, ".rela.discard.reachable"); 405 if (!sec) 406 return 0; 407 408 list_for_each_entry(rela, &sec->rela_list, list) { 409 if (rela->sym->type != STT_SECTION) { 410 WARN("unexpected relocation symbol type in %s", sec->name); 411 return -1; 412 } 413 insn = find_insn(file, rela->sym->sec, rela->addend); 414 if (insn) 415 insn = list_prev_entry(insn, list); 416 else if (rela->addend == rela->sym->sec->len) { 417 insn = find_last_insn(file, rela->sym->sec); 418 if (!insn) { 419 WARN("can't find reachable insn at %s+0x%x", 420 rela->sym->sec->name, rela->addend); 421 return -1; 422 } 423 } else { 424 WARN("can't find reachable insn at %s+0x%x", 425 rela->sym->sec->name, rela->addend); 426 return -1; 427 } 428 429 insn->dead_end = false; 430 } 431 432 return 0; 433 } 434 435 /* 436 * Warnings shouldn't be reported for ignored functions. 437 */ 438 static void add_ignores(struct objtool_file *file) 439 { 440 struct instruction *insn; 441 struct section *sec; 442 struct symbol *func; 443 struct rela *rela; 444 445 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard"); 446 if (!sec) 447 return; 448 449 list_for_each_entry(rela, &sec->rela_list, list) { 450 switch (rela->sym->type) { 451 case STT_FUNC: 452 func = rela->sym; 453 break; 454 455 case STT_SECTION: 456 func = find_func_by_offset(rela->sym->sec, rela->addend); 457 if (!func) 458 continue; 459 break; 460 461 default: 462 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type); 463 continue; 464 } 465 466 func_for_each_insn(file, func, insn) 467 insn->ignore = true; 468 } 469 } 470 471 /* 472 * This is a whitelist of functions that is allowed to be called with AC set. 473 * The list is meant to be minimal and only contains compiler instrumentation 474 * ABI and a few functions used to implement *_{to,from}_user() functions. 475 * 476 * These functions must not directly change AC, but may PUSHF/POPF. 477 */ 478 static const char *uaccess_safe_builtin[] = { 479 /* KASAN */ 480 "kasan_report", 481 "check_memory_region", 482 /* KASAN out-of-line */ 483 "__asan_loadN_noabort", 484 "__asan_load1_noabort", 485 "__asan_load2_noabort", 486 "__asan_load4_noabort", 487 "__asan_load8_noabort", 488 "__asan_load16_noabort", 489 "__asan_storeN_noabort", 490 "__asan_store1_noabort", 491 "__asan_store2_noabort", 492 "__asan_store4_noabort", 493 "__asan_store8_noabort", 494 "__asan_store16_noabort", 495 /* KASAN in-line */ 496 "__asan_report_load_n_noabort", 497 "__asan_report_load1_noabort", 498 "__asan_report_load2_noabort", 499 "__asan_report_load4_noabort", 500 "__asan_report_load8_noabort", 501 "__asan_report_load16_noabort", 502 "__asan_report_store_n_noabort", 503 "__asan_report_store1_noabort", 504 "__asan_report_store2_noabort", 505 "__asan_report_store4_noabort", 506 "__asan_report_store8_noabort", 507 "__asan_report_store16_noabort", 508 /* KCSAN */ 509 "__kcsan_check_access", 510 "kcsan_found_watchpoint", 511 "kcsan_setup_watchpoint", 512 "kcsan_check_scoped_accesses", 513 "kcsan_disable_current", 514 "kcsan_enable_current_nowarn", 515 /* KCSAN/TSAN */ 516 "__tsan_func_entry", 517 "__tsan_func_exit", 518 "__tsan_read_range", 519 "__tsan_write_range", 520 "__tsan_read1", 521 "__tsan_read2", 522 "__tsan_read4", 523 "__tsan_read8", 524 "__tsan_read16", 525 "__tsan_write1", 526 "__tsan_write2", 527 "__tsan_write4", 528 "__tsan_write8", 529 "__tsan_write16", 530 /* KCOV */ 531 "write_comp_data", 532 "check_kcov_mode", 533 "__sanitizer_cov_trace_pc", 534 "__sanitizer_cov_trace_const_cmp1", 535 "__sanitizer_cov_trace_const_cmp2", 536 "__sanitizer_cov_trace_const_cmp4", 537 "__sanitizer_cov_trace_const_cmp8", 538 "__sanitizer_cov_trace_cmp1", 539 "__sanitizer_cov_trace_cmp2", 540 "__sanitizer_cov_trace_cmp4", 541 "__sanitizer_cov_trace_cmp8", 542 "__sanitizer_cov_trace_switch", 543 /* UBSAN */ 544 "ubsan_type_mismatch_common", 545 "__ubsan_handle_type_mismatch", 546 "__ubsan_handle_type_mismatch_v1", 547 "__ubsan_handle_shift_out_of_bounds", 548 /* misc */ 549 "csum_partial_copy_generic", 550 "__memcpy_mcsafe", 551 "mcsafe_handle_tail", 552 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */ 553 NULL 554 }; 555 556 static void add_uaccess_safe(struct objtool_file *file) 557 { 558 struct symbol *func; 559 const char **name; 560 561 if (!uaccess) 562 return; 563 564 for (name = uaccess_safe_builtin; *name; name++) { 565 func = find_symbol_by_name(file->elf, *name); 566 if (!func) 567 continue; 568 569 func->uaccess_safe = true; 570 } 571 } 572 573 /* 574 * FIXME: For now, just ignore any alternatives which add retpolines. This is 575 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline. 576 * But it at least allows objtool to understand the control flow *around* the 577 * retpoline. 578 */ 579 static int add_ignore_alternatives(struct objtool_file *file) 580 { 581 struct section *sec; 582 struct rela *rela; 583 struct instruction *insn; 584 585 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts"); 586 if (!sec) 587 return 0; 588 589 list_for_each_entry(rela, &sec->rela_list, list) { 590 if (rela->sym->type != STT_SECTION) { 591 WARN("unexpected relocation symbol type in %s", sec->name); 592 return -1; 593 } 594 595 insn = find_insn(file, rela->sym->sec, rela->addend); 596 if (!insn) { 597 WARN("bad .discard.ignore_alts entry"); 598 return -1; 599 } 600 601 insn->ignore_alts = true; 602 } 603 604 return 0; 605 } 606 607 /* 608 * Find the destination instructions for all jumps. 609 */ 610 static int add_jump_destinations(struct objtool_file *file) 611 { 612 struct instruction *insn; 613 struct rela *rela; 614 struct section *dest_sec; 615 unsigned long dest_off; 616 617 for_each_insn(file, insn) { 618 if (!is_static_jump(insn)) 619 continue; 620 621 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET) 622 continue; 623 624 rela = find_rela_by_dest_range(file->elf, insn->sec, 625 insn->offset, insn->len); 626 if (!rela) { 627 dest_sec = insn->sec; 628 dest_off = arch_jump_destination(insn); 629 } else if (rela->sym->type == STT_SECTION) { 630 dest_sec = rela->sym->sec; 631 dest_off = arch_dest_rela_offset(rela->addend); 632 } else if (rela->sym->sec->idx) { 633 dest_sec = rela->sym->sec; 634 dest_off = rela->sym->sym.st_value + 635 arch_dest_rela_offset(rela->addend); 636 } else if (strstr(rela->sym->name, "_indirect_thunk_")) { 637 /* 638 * Retpoline jumps are really dynamic jumps in 639 * disguise, so convert them accordingly. 640 */ 641 if (insn->type == INSN_JUMP_UNCONDITIONAL) 642 insn->type = INSN_JUMP_DYNAMIC; 643 else 644 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL; 645 646 insn->retpoline_safe = true; 647 continue; 648 } else { 649 /* external sibling call */ 650 insn->call_dest = rela->sym; 651 continue; 652 } 653 654 insn->jump_dest = find_insn(file, dest_sec, dest_off); 655 if (!insn->jump_dest) { 656 657 /* 658 * This is a special case where an alt instruction 659 * jumps past the end of the section. These are 660 * handled later in handle_group_alt(). 661 */ 662 if (!strcmp(insn->sec->name, ".altinstr_replacement")) 663 continue; 664 665 WARN_FUNC("can't find jump dest instruction at %s+0x%lx", 666 insn->sec, insn->offset, dest_sec->name, 667 dest_off); 668 return -1; 669 } 670 671 /* 672 * Cross-function jump. 673 */ 674 if (insn->func && insn->jump_dest->func && 675 insn->func != insn->jump_dest->func) { 676 677 /* 678 * For GCC 8+, create parent/child links for any cold 679 * subfunctions. This is _mostly_ redundant with a 680 * similar initialization in read_symbols(). 681 * 682 * If a function has aliases, we want the *first* such 683 * function in the symbol table to be the subfunction's 684 * parent. In that case we overwrite the 685 * initialization done in read_symbols(). 686 * 687 * However this code can't completely replace the 688 * read_symbols() code because this doesn't detect the 689 * case where the parent function's only reference to a 690 * subfunction is through a jump table. 691 */ 692 if (!strstr(insn->func->name, ".cold.") && 693 strstr(insn->jump_dest->func->name, ".cold.")) { 694 insn->func->cfunc = insn->jump_dest->func; 695 insn->jump_dest->func->pfunc = insn->func; 696 697 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc && 698 insn->jump_dest->offset == insn->jump_dest->func->offset) { 699 700 /* internal sibling call */ 701 insn->call_dest = insn->jump_dest->func; 702 } 703 } 704 } 705 706 return 0; 707 } 708 709 static void remove_insn_ops(struct instruction *insn) 710 { 711 struct stack_op *op, *tmp; 712 713 list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) { 714 list_del(&op->list); 715 free(op); 716 } 717 } 718 719 /* 720 * Find the destination instructions for all calls. 721 */ 722 static int add_call_destinations(struct objtool_file *file) 723 { 724 struct instruction *insn; 725 unsigned long dest_off; 726 struct rela *rela; 727 728 for_each_insn(file, insn) { 729 if (insn->type != INSN_CALL) 730 continue; 731 732 rela = find_rela_by_dest_range(file->elf, insn->sec, 733 insn->offset, insn->len); 734 if (!rela) { 735 dest_off = arch_jump_destination(insn); 736 insn->call_dest = find_func_by_offset(insn->sec, dest_off); 737 if (!insn->call_dest) 738 insn->call_dest = find_symbol_by_offset(insn->sec, dest_off); 739 740 if (insn->ignore) 741 continue; 742 743 if (!insn->call_dest) { 744 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset); 745 return -1; 746 } 747 748 if (insn->func && insn->call_dest->type != STT_FUNC) { 749 WARN_FUNC("unsupported call to non-function", 750 insn->sec, insn->offset); 751 return -1; 752 } 753 754 } else if (rela->sym->type == STT_SECTION) { 755 dest_off = arch_dest_rela_offset(rela->addend); 756 insn->call_dest = find_func_by_offset(rela->sym->sec, 757 dest_off); 758 if (!insn->call_dest) { 759 WARN_FUNC("can't find call dest symbol at %s+0x%lx", 760 insn->sec, insn->offset, 761 rela->sym->sec->name, 762 dest_off); 763 return -1; 764 } 765 } else 766 insn->call_dest = rela->sym; 767 768 /* 769 * Whatever stack impact regular CALLs have, should be undone 770 * by the RETURN of the called function. 771 * 772 * Annotated intra-function calls retain the stack_ops but 773 * are converted to JUMP, see read_intra_function_calls(). 774 */ 775 remove_insn_ops(insn); 776 } 777 778 return 0; 779 } 780 781 /* 782 * The .alternatives section requires some extra special care, over and above 783 * what other special sections require: 784 * 785 * 1. Because alternatives are patched in-place, we need to insert a fake jump 786 * instruction at the end so that validate_branch() skips all the original 787 * replaced instructions when validating the new instruction path. 788 * 789 * 2. An added wrinkle is that the new instruction length might be zero. In 790 * that case the old instructions are replaced with noops. We simulate that 791 * by creating a fake jump as the only new instruction. 792 * 793 * 3. In some cases, the alternative section includes an instruction which 794 * conditionally jumps to the _end_ of the entry. We have to modify these 795 * jumps' destinations to point back to .text rather than the end of the 796 * entry in .altinstr_replacement. 797 */ 798 static int handle_group_alt(struct objtool_file *file, 799 struct special_alt *special_alt, 800 struct instruction *orig_insn, 801 struct instruction **new_insn) 802 { 803 static unsigned int alt_group_next_index = 1; 804 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL; 805 unsigned int alt_group = alt_group_next_index++; 806 unsigned long dest_off; 807 808 last_orig_insn = NULL; 809 insn = orig_insn; 810 sec_for_each_insn_from(file, insn) { 811 if (insn->offset >= special_alt->orig_off + special_alt->orig_len) 812 break; 813 814 insn->alt_group = alt_group; 815 last_orig_insn = insn; 816 } 817 818 if (next_insn_same_sec(file, last_orig_insn)) { 819 fake_jump = malloc(sizeof(*fake_jump)); 820 if (!fake_jump) { 821 WARN("malloc failed"); 822 return -1; 823 } 824 memset(fake_jump, 0, sizeof(*fake_jump)); 825 INIT_LIST_HEAD(&fake_jump->alts); 826 INIT_LIST_HEAD(&fake_jump->stack_ops); 827 init_cfi_state(&fake_jump->cfi); 828 829 fake_jump->sec = special_alt->new_sec; 830 fake_jump->offset = FAKE_JUMP_OFFSET; 831 fake_jump->type = INSN_JUMP_UNCONDITIONAL; 832 fake_jump->jump_dest = list_next_entry(last_orig_insn, list); 833 fake_jump->func = orig_insn->func; 834 } 835 836 if (!special_alt->new_len) { 837 if (!fake_jump) { 838 WARN("%s: empty alternative at end of section", 839 special_alt->orig_sec->name); 840 return -1; 841 } 842 843 *new_insn = fake_jump; 844 return 0; 845 } 846 847 last_new_insn = NULL; 848 alt_group = alt_group_next_index++; 849 insn = *new_insn; 850 sec_for_each_insn_from(file, insn) { 851 if (insn->offset >= special_alt->new_off + special_alt->new_len) 852 break; 853 854 last_new_insn = insn; 855 856 insn->ignore = orig_insn->ignore_alts; 857 insn->func = orig_insn->func; 858 insn->alt_group = alt_group; 859 860 /* 861 * Since alternative replacement code is copy/pasted by the 862 * kernel after applying relocations, generally such code can't 863 * have relative-address relocation references to outside the 864 * .altinstr_replacement section, unless the arch's 865 * alternatives code can adjust the relative offsets 866 * accordingly. 867 * 868 * The x86 alternatives code adjusts the offsets only when it 869 * encounters a branch instruction at the very beginning of the 870 * replacement group. 871 */ 872 if ((insn->offset != special_alt->new_off || 873 (insn->type != INSN_CALL && !is_static_jump(insn))) && 874 find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) { 875 876 WARN_FUNC("unsupported relocation in alternatives section", 877 insn->sec, insn->offset); 878 return -1; 879 } 880 881 if (!is_static_jump(insn)) 882 continue; 883 884 if (!insn->immediate) 885 continue; 886 887 dest_off = arch_jump_destination(insn); 888 if (dest_off == special_alt->new_off + special_alt->new_len) { 889 if (!fake_jump) { 890 WARN("%s: alternative jump to end of section", 891 special_alt->orig_sec->name); 892 return -1; 893 } 894 insn->jump_dest = fake_jump; 895 } 896 897 if (!insn->jump_dest) { 898 WARN_FUNC("can't find alternative jump destination", 899 insn->sec, insn->offset); 900 return -1; 901 } 902 } 903 904 if (!last_new_insn) { 905 WARN_FUNC("can't find last new alternative instruction", 906 special_alt->new_sec, special_alt->new_off); 907 return -1; 908 } 909 910 if (fake_jump) 911 list_add(&fake_jump->list, &last_new_insn->list); 912 913 return 0; 914 } 915 916 /* 917 * A jump table entry can either convert a nop to a jump or a jump to a nop. 918 * If the original instruction is a jump, make the alt entry an effective nop 919 * by just skipping the original instruction. 920 */ 921 static int handle_jump_alt(struct objtool_file *file, 922 struct special_alt *special_alt, 923 struct instruction *orig_insn, 924 struct instruction **new_insn) 925 { 926 if (orig_insn->type == INSN_NOP) 927 return 0; 928 929 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) { 930 WARN_FUNC("unsupported instruction at jump label", 931 orig_insn->sec, orig_insn->offset); 932 return -1; 933 } 934 935 *new_insn = list_next_entry(orig_insn, list); 936 return 0; 937 } 938 939 /* 940 * Read all the special sections which have alternate instructions which can be 941 * patched in or redirected to at runtime. Each instruction having alternate 942 * instruction(s) has them added to its insn->alts list, which will be 943 * traversed in validate_branch(). 944 */ 945 static int add_special_section_alts(struct objtool_file *file) 946 { 947 struct list_head special_alts; 948 struct instruction *orig_insn, *new_insn; 949 struct special_alt *special_alt, *tmp; 950 struct alternative *alt; 951 int ret; 952 953 ret = special_get_alts(file->elf, &special_alts); 954 if (ret) 955 return ret; 956 957 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) { 958 959 orig_insn = find_insn(file, special_alt->orig_sec, 960 special_alt->orig_off); 961 if (!orig_insn) { 962 WARN_FUNC("special: can't find orig instruction", 963 special_alt->orig_sec, special_alt->orig_off); 964 ret = -1; 965 goto out; 966 } 967 968 new_insn = NULL; 969 if (!special_alt->group || special_alt->new_len) { 970 new_insn = find_insn(file, special_alt->new_sec, 971 special_alt->new_off); 972 if (!new_insn) { 973 WARN_FUNC("special: can't find new instruction", 974 special_alt->new_sec, 975 special_alt->new_off); 976 ret = -1; 977 goto out; 978 } 979 } 980 981 if (special_alt->group) { 982 if (!special_alt->orig_len) { 983 WARN_FUNC("empty alternative entry", 984 orig_insn->sec, orig_insn->offset); 985 continue; 986 } 987 988 ret = handle_group_alt(file, special_alt, orig_insn, 989 &new_insn); 990 if (ret) 991 goto out; 992 } else if (special_alt->jump_or_nop) { 993 ret = handle_jump_alt(file, special_alt, orig_insn, 994 &new_insn); 995 if (ret) 996 goto out; 997 } 998 999 alt = malloc(sizeof(*alt)); 1000 if (!alt) { 1001 WARN("malloc failed"); 1002 ret = -1; 1003 goto out; 1004 } 1005 1006 alt->insn = new_insn; 1007 alt->skip_orig = special_alt->skip_orig; 1008 orig_insn->ignore_alts |= special_alt->skip_alt; 1009 list_add_tail(&alt->list, &orig_insn->alts); 1010 1011 list_del(&special_alt->list); 1012 free(special_alt); 1013 } 1014 1015 out: 1016 return ret; 1017 } 1018 1019 static int add_jump_table(struct objtool_file *file, struct instruction *insn, 1020 struct rela *table) 1021 { 1022 struct rela *rela = table; 1023 struct instruction *dest_insn; 1024 struct alternative *alt; 1025 struct symbol *pfunc = insn->func->pfunc; 1026 unsigned int prev_offset = 0; 1027 1028 /* 1029 * Each @rela is a switch table relocation which points to the target 1030 * instruction. 1031 */ 1032 list_for_each_entry_from(rela, &table->sec->rela_list, list) { 1033 1034 /* Check for the end of the table: */ 1035 if (rela != table && rela->jump_table_start) 1036 break; 1037 1038 /* Make sure the table entries are consecutive: */ 1039 if (prev_offset && rela->offset != prev_offset + 8) 1040 break; 1041 1042 /* Detect function pointers from contiguous objects: */ 1043 if (rela->sym->sec == pfunc->sec && 1044 rela->addend == pfunc->offset) 1045 break; 1046 1047 dest_insn = find_insn(file, rela->sym->sec, rela->addend); 1048 if (!dest_insn) 1049 break; 1050 1051 /* Make sure the destination is in the same function: */ 1052 if (!dest_insn->func || dest_insn->func->pfunc != pfunc) 1053 break; 1054 1055 alt = malloc(sizeof(*alt)); 1056 if (!alt) { 1057 WARN("malloc failed"); 1058 return -1; 1059 } 1060 1061 alt->insn = dest_insn; 1062 list_add_tail(&alt->list, &insn->alts); 1063 prev_offset = rela->offset; 1064 } 1065 1066 if (!prev_offset) { 1067 WARN_FUNC("can't find switch jump table", 1068 insn->sec, insn->offset); 1069 return -1; 1070 } 1071 1072 return 0; 1073 } 1074 1075 /* 1076 * find_jump_table() - Given a dynamic jump, find the switch jump table in 1077 * .rodata associated with it. 1078 * 1079 * There are 3 basic patterns: 1080 * 1081 * 1. jmpq *[rodata addr](,%reg,8) 1082 * 1083 * This is the most common case by far. It jumps to an address in a simple 1084 * jump table which is stored in .rodata. 1085 * 1086 * 2. jmpq *[rodata addr](%rip) 1087 * 1088 * This is caused by a rare GCC quirk, currently only seen in three driver 1089 * functions in the kernel, only with certain obscure non-distro configs. 1090 * 1091 * As part of an optimization, GCC makes a copy of an existing switch jump 1092 * table, modifies it, and then hard-codes the jump (albeit with an indirect 1093 * jump) to use a single entry in the table. The rest of the jump table and 1094 * some of its jump targets remain as dead code. 1095 * 1096 * In such a case we can just crudely ignore all unreachable instruction 1097 * warnings for the entire object file. Ideally we would just ignore them 1098 * for the function, but that would require redesigning the code quite a 1099 * bit. And honestly that's just not worth doing: unreachable instruction 1100 * warnings are of questionable value anyway, and this is such a rare issue. 1101 * 1102 * 3. mov [rodata addr],%reg1 1103 * ... some instructions ... 1104 * jmpq *(%reg1,%reg2,8) 1105 * 1106 * This is a fairly uncommon pattern which is new for GCC 6. As of this 1107 * writing, there are 11 occurrences of it in the allmodconfig kernel. 1108 * 1109 * As of GCC 7 there are quite a few more of these and the 'in between' code 1110 * is significant. Esp. with KASAN enabled some of the code between the mov 1111 * and jmpq uses .rodata itself, which can confuse things. 1112 * 1113 * TODO: Once we have DWARF CFI and smarter instruction decoding logic, 1114 * ensure the same register is used in the mov and jump instructions. 1115 * 1116 * NOTE: RETPOLINE made it harder still to decode dynamic jumps. 1117 */ 1118 static struct rela *find_jump_table(struct objtool_file *file, 1119 struct symbol *func, 1120 struct instruction *insn) 1121 { 1122 struct rela *text_rela, *table_rela; 1123 struct instruction *dest_insn, *orig_insn = insn; 1124 struct section *table_sec; 1125 unsigned long table_offset; 1126 1127 /* 1128 * Backward search using the @first_jump_src links, these help avoid 1129 * much of the 'in between' code. Which avoids us getting confused by 1130 * it. 1131 */ 1132 for (; 1133 insn && insn->func && insn->func->pfunc == func; 1134 insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) { 1135 1136 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC) 1137 break; 1138 1139 /* allow small jumps within the range */ 1140 if (insn->type == INSN_JUMP_UNCONDITIONAL && 1141 insn->jump_dest && 1142 (insn->jump_dest->offset <= insn->offset || 1143 insn->jump_dest->offset > orig_insn->offset)) 1144 break; 1145 1146 /* look for a relocation which references .rodata */ 1147 text_rela = find_rela_by_dest_range(file->elf, insn->sec, 1148 insn->offset, insn->len); 1149 if (!text_rela || text_rela->sym->type != STT_SECTION || 1150 !text_rela->sym->sec->rodata) 1151 continue; 1152 1153 table_offset = text_rela->addend; 1154 table_sec = text_rela->sym->sec; 1155 1156 if (text_rela->type == R_X86_64_PC32) 1157 table_offset += 4; 1158 1159 /* 1160 * Make sure the .rodata address isn't associated with a 1161 * symbol. GCC jump tables are anonymous data. 1162 * 1163 * Also support C jump tables which are in the same format as 1164 * switch jump tables. For objtool to recognize them, they 1165 * need to be placed in the C_JUMP_TABLE_SECTION section. They 1166 * have symbols associated with them. 1167 */ 1168 if (find_symbol_containing(table_sec, table_offset) && 1169 strcmp(table_sec->name, C_JUMP_TABLE_SECTION)) 1170 continue; 1171 1172 /* 1173 * Each table entry has a rela associated with it. The rela 1174 * should reference text in the same function as the original 1175 * instruction. 1176 */ 1177 table_rela = find_rela_by_dest(file->elf, table_sec, table_offset); 1178 if (!table_rela) 1179 continue; 1180 dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend); 1181 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func) 1182 continue; 1183 1184 /* 1185 * Use of RIP-relative switch jumps is quite rare, and 1186 * indicates a rare GCC quirk/bug which can leave dead code 1187 * behind. 1188 */ 1189 if (text_rela->type == R_X86_64_PC32) 1190 file->ignore_unreachables = true; 1191 1192 return table_rela; 1193 } 1194 1195 return NULL; 1196 } 1197 1198 /* 1199 * First pass: Mark the head of each jump table so that in the next pass, 1200 * we know when a given jump table ends and the next one starts. 1201 */ 1202 static void mark_func_jump_tables(struct objtool_file *file, 1203 struct symbol *func) 1204 { 1205 struct instruction *insn, *last = NULL; 1206 struct rela *rela; 1207 1208 func_for_each_insn(file, func, insn) { 1209 if (!last) 1210 last = insn; 1211 1212 /* 1213 * Store back-pointers for unconditional forward jumps such 1214 * that find_jump_table() can back-track using those and 1215 * avoid some potentially confusing code. 1216 */ 1217 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest && 1218 insn->offset > last->offset && 1219 insn->jump_dest->offset > insn->offset && 1220 !insn->jump_dest->first_jump_src) { 1221 1222 insn->jump_dest->first_jump_src = insn; 1223 last = insn->jump_dest; 1224 } 1225 1226 if (insn->type != INSN_JUMP_DYNAMIC) 1227 continue; 1228 1229 rela = find_jump_table(file, func, insn); 1230 if (rela) { 1231 rela->jump_table_start = true; 1232 insn->jump_table = rela; 1233 } 1234 } 1235 } 1236 1237 static int add_func_jump_tables(struct objtool_file *file, 1238 struct symbol *func) 1239 { 1240 struct instruction *insn; 1241 int ret; 1242 1243 func_for_each_insn(file, func, insn) { 1244 if (!insn->jump_table) 1245 continue; 1246 1247 ret = add_jump_table(file, insn, insn->jump_table); 1248 if (ret) 1249 return ret; 1250 } 1251 1252 return 0; 1253 } 1254 1255 /* 1256 * For some switch statements, gcc generates a jump table in the .rodata 1257 * section which contains a list of addresses within the function to jump to. 1258 * This finds these jump tables and adds them to the insn->alts lists. 1259 */ 1260 static int add_jump_table_alts(struct objtool_file *file) 1261 { 1262 struct section *sec; 1263 struct symbol *func; 1264 int ret; 1265 1266 if (!file->rodata) 1267 return 0; 1268 1269 for_each_sec(file, sec) { 1270 list_for_each_entry(func, &sec->symbol_list, list) { 1271 if (func->type != STT_FUNC) 1272 continue; 1273 1274 mark_func_jump_tables(file, func); 1275 ret = add_func_jump_tables(file, func); 1276 if (ret) 1277 return ret; 1278 } 1279 } 1280 1281 return 0; 1282 } 1283 1284 static int read_unwind_hints(struct objtool_file *file) 1285 { 1286 struct section *sec, *relasec; 1287 struct rela *rela; 1288 struct unwind_hint *hint; 1289 struct instruction *insn; 1290 struct cfi_reg *cfa; 1291 int i; 1292 1293 sec = find_section_by_name(file->elf, ".discard.unwind_hints"); 1294 if (!sec) 1295 return 0; 1296 1297 relasec = sec->rela; 1298 if (!relasec) { 1299 WARN("missing .rela.discard.unwind_hints section"); 1300 return -1; 1301 } 1302 1303 if (sec->len % sizeof(struct unwind_hint)) { 1304 WARN("struct unwind_hint size mismatch"); 1305 return -1; 1306 } 1307 1308 file->hints = true; 1309 1310 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) { 1311 hint = (struct unwind_hint *)sec->data->d_buf + i; 1312 1313 rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint)); 1314 if (!rela) { 1315 WARN("can't find rela for unwind_hints[%d]", i); 1316 return -1; 1317 } 1318 1319 insn = find_insn(file, rela->sym->sec, rela->addend); 1320 if (!insn) { 1321 WARN("can't find insn for unwind_hints[%d]", i); 1322 return -1; 1323 } 1324 1325 cfa = &insn->cfi.cfa; 1326 1327 if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) { 1328 insn->ret_offset = hint->sp_offset; 1329 continue; 1330 } 1331 1332 insn->hint = true; 1333 1334 switch (hint->sp_reg) { 1335 case ORC_REG_UNDEFINED: 1336 cfa->base = CFI_UNDEFINED; 1337 break; 1338 case ORC_REG_SP: 1339 cfa->base = CFI_SP; 1340 break; 1341 case ORC_REG_BP: 1342 cfa->base = CFI_BP; 1343 break; 1344 case ORC_REG_SP_INDIRECT: 1345 cfa->base = CFI_SP_INDIRECT; 1346 break; 1347 case ORC_REG_R10: 1348 cfa->base = CFI_R10; 1349 break; 1350 case ORC_REG_R13: 1351 cfa->base = CFI_R13; 1352 break; 1353 case ORC_REG_DI: 1354 cfa->base = CFI_DI; 1355 break; 1356 case ORC_REG_DX: 1357 cfa->base = CFI_DX; 1358 break; 1359 default: 1360 WARN_FUNC("unsupported unwind_hint sp base reg %d", 1361 insn->sec, insn->offset, hint->sp_reg); 1362 return -1; 1363 } 1364 1365 cfa->offset = hint->sp_offset; 1366 insn->cfi.type = hint->type; 1367 insn->cfi.end = hint->end; 1368 } 1369 1370 return 0; 1371 } 1372 1373 static int read_retpoline_hints(struct objtool_file *file) 1374 { 1375 struct section *sec; 1376 struct instruction *insn; 1377 struct rela *rela; 1378 1379 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe"); 1380 if (!sec) 1381 return 0; 1382 1383 list_for_each_entry(rela, &sec->rela_list, list) { 1384 if (rela->sym->type != STT_SECTION) { 1385 WARN("unexpected relocation symbol type in %s", sec->name); 1386 return -1; 1387 } 1388 1389 insn = find_insn(file, rela->sym->sec, rela->addend); 1390 if (!insn) { 1391 WARN("bad .discard.retpoline_safe entry"); 1392 return -1; 1393 } 1394 1395 if (insn->type != INSN_JUMP_DYNAMIC && 1396 insn->type != INSN_CALL_DYNAMIC) { 1397 WARN_FUNC("retpoline_safe hint not an indirect jump/call", 1398 insn->sec, insn->offset); 1399 return -1; 1400 } 1401 1402 insn->retpoline_safe = true; 1403 } 1404 1405 return 0; 1406 } 1407 1408 static int read_instr_hints(struct objtool_file *file) 1409 { 1410 struct section *sec; 1411 struct instruction *insn; 1412 struct rela *rela; 1413 1414 sec = find_section_by_name(file->elf, ".rela.discard.instr_end"); 1415 if (!sec) 1416 return 0; 1417 1418 list_for_each_entry(rela, &sec->rela_list, list) { 1419 if (rela->sym->type != STT_SECTION) { 1420 WARN("unexpected relocation symbol type in %s", sec->name); 1421 return -1; 1422 } 1423 1424 insn = find_insn(file, rela->sym->sec, rela->addend); 1425 if (!insn) { 1426 WARN("bad .discard.instr_end entry"); 1427 return -1; 1428 } 1429 1430 insn->instr--; 1431 } 1432 1433 sec = find_section_by_name(file->elf, ".rela.discard.instr_begin"); 1434 if (!sec) 1435 return 0; 1436 1437 list_for_each_entry(rela, &sec->rela_list, list) { 1438 if (rela->sym->type != STT_SECTION) { 1439 WARN("unexpected relocation symbol type in %s", sec->name); 1440 return -1; 1441 } 1442 1443 insn = find_insn(file, rela->sym->sec, rela->addend); 1444 if (!insn) { 1445 WARN("bad .discard.instr_begin entry"); 1446 return -1; 1447 } 1448 1449 insn->instr++; 1450 } 1451 1452 return 0; 1453 } 1454 1455 static int read_intra_function_calls(struct objtool_file *file) 1456 { 1457 struct instruction *insn; 1458 struct section *sec; 1459 struct rela *rela; 1460 1461 sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls"); 1462 if (!sec) 1463 return 0; 1464 1465 list_for_each_entry(rela, &sec->rela_list, list) { 1466 unsigned long dest_off; 1467 1468 if (rela->sym->type != STT_SECTION) { 1469 WARN("unexpected relocation symbol type in %s", 1470 sec->name); 1471 return -1; 1472 } 1473 1474 insn = find_insn(file, rela->sym->sec, rela->addend); 1475 if (!insn) { 1476 WARN("bad .discard.intra_function_call entry"); 1477 return -1; 1478 } 1479 1480 if (insn->type != INSN_CALL) { 1481 WARN_FUNC("intra_function_call not a direct call", 1482 insn->sec, insn->offset); 1483 return -1; 1484 } 1485 1486 /* 1487 * Treat intra-function CALLs as JMPs, but with a stack_op. 1488 * See add_call_destinations(), which strips stack_ops from 1489 * normal CALLs. 1490 */ 1491 insn->type = INSN_JUMP_UNCONDITIONAL; 1492 1493 dest_off = insn->offset + insn->len + insn->immediate; 1494 insn->jump_dest = find_insn(file, insn->sec, dest_off); 1495 if (!insn->jump_dest) { 1496 WARN_FUNC("can't find call dest at %s+0x%lx", 1497 insn->sec, insn->offset, 1498 insn->sec->name, dest_off); 1499 return -1; 1500 } 1501 } 1502 1503 return 0; 1504 } 1505 1506 static void mark_rodata(struct objtool_file *file) 1507 { 1508 struct section *sec; 1509 bool found = false; 1510 1511 /* 1512 * Search for the following rodata sections, each of which can 1513 * potentially contain jump tables: 1514 * 1515 * - .rodata: can contain GCC switch tables 1516 * - .rodata.<func>: same, if -fdata-sections is being used 1517 * - .rodata..c_jump_table: contains C annotated jump tables 1518 * 1519 * .rodata.str1.* sections are ignored; they don't contain jump tables. 1520 */ 1521 for_each_sec(file, sec) { 1522 if (!strncmp(sec->name, ".rodata", 7) && 1523 !strstr(sec->name, ".str1.")) { 1524 sec->rodata = true; 1525 found = true; 1526 } 1527 } 1528 1529 file->rodata = found; 1530 } 1531 1532 static int decode_sections(struct objtool_file *file) 1533 { 1534 int ret; 1535 1536 mark_rodata(file); 1537 1538 ret = decode_instructions(file); 1539 if (ret) 1540 return ret; 1541 1542 ret = add_dead_ends(file); 1543 if (ret) 1544 return ret; 1545 1546 add_ignores(file); 1547 add_uaccess_safe(file); 1548 1549 ret = add_ignore_alternatives(file); 1550 if (ret) 1551 return ret; 1552 1553 ret = add_jump_destinations(file); 1554 if (ret) 1555 return ret; 1556 1557 ret = add_special_section_alts(file); 1558 if (ret) 1559 return ret; 1560 1561 ret = read_intra_function_calls(file); 1562 if (ret) 1563 return ret; 1564 1565 ret = add_call_destinations(file); 1566 if (ret) 1567 return ret; 1568 1569 ret = add_jump_table_alts(file); 1570 if (ret) 1571 return ret; 1572 1573 ret = read_unwind_hints(file); 1574 if (ret) 1575 return ret; 1576 1577 ret = read_retpoline_hints(file); 1578 if (ret) 1579 return ret; 1580 1581 ret = read_instr_hints(file); 1582 if (ret) 1583 return ret; 1584 1585 return 0; 1586 } 1587 1588 static bool is_fentry_call(struct instruction *insn) 1589 { 1590 if (insn->type == INSN_CALL && insn->call_dest && 1591 insn->call_dest->type == STT_NOTYPE && 1592 !strcmp(insn->call_dest->name, "__fentry__")) 1593 return true; 1594 1595 return false; 1596 } 1597 1598 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state) 1599 { 1600 u8 ret_offset = insn->ret_offset; 1601 struct cfi_state *cfi = &state->cfi; 1602 int i; 1603 1604 if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap) 1605 return true; 1606 1607 if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset) 1608 return true; 1609 1610 if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset) 1611 return true; 1612 1613 /* 1614 * If there is a ret offset hint then don't check registers 1615 * because a callee-saved register might have been pushed on 1616 * the stack. 1617 */ 1618 if (ret_offset) 1619 return false; 1620 1621 for (i = 0; i < CFI_NUM_REGS; i++) { 1622 if (cfi->regs[i].base != initial_func_cfi.regs[i].base || 1623 cfi->regs[i].offset != initial_func_cfi.regs[i].offset) 1624 return true; 1625 } 1626 1627 return false; 1628 } 1629 1630 static bool has_valid_stack_frame(struct insn_state *state) 1631 { 1632 struct cfi_state *cfi = &state->cfi; 1633 1634 if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA && 1635 cfi->regs[CFI_BP].offset == -16) 1636 return true; 1637 1638 if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP) 1639 return true; 1640 1641 return false; 1642 } 1643 1644 static int update_cfi_state_regs(struct instruction *insn, 1645 struct cfi_state *cfi, 1646 struct stack_op *op) 1647 { 1648 struct cfi_reg *cfa = &cfi->cfa; 1649 1650 if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT) 1651 return 0; 1652 1653 /* push */ 1654 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF) 1655 cfa->offset += 8; 1656 1657 /* pop */ 1658 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF) 1659 cfa->offset -= 8; 1660 1661 /* add immediate to sp */ 1662 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD && 1663 op->dest.reg == CFI_SP && op->src.reg == CFI_SP) 1664 cfa->offset -= op->src.offset; 1665 1666 return 0; 1667 } 1668 1669 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset) 1670 { 1671 if (arch_callee_saved_reg(reg) && 1672 cfi->regs[reg].base == CFI_UNDEFINED) { 1673 cfi->regs[reg].base = base; 1674 cfi->regs[reg].offset = offset; 1675 } 1676 } 1677 1678 static void restore_reg(struct cfi_state *cfi, unsigned char reg) 1679 { 1680 cfi->regs[reg].base = initial_func_cfi.regs[reg].base; 1681 cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset; 1682 } 1683 1684 /* 1685 * A note about DRAP stack alignment: 1686 * 1687 * GCC has the concept of a DRAP register, which is used to help keep track of 1688 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP 1689 * register. The typical DRAP pattern is: 1690 * 1691 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10 1692 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp 1693 * 41 ff 72 f8 pushq -0x8(%r10) 1694 * 55 push %rbp 1695 * 48 89 e5 mov %rsp,%rbp 1696 * (more pushes) 1697 * 41 52 push %r10 1698 * ... 1699 * 41 5a pop %r10 1700 * (more pops) 1701 * 5d pop %rbp 1702 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1703 * c3 retq 1704 * 1705 * There are some variations in the epilogues, like: 1706 * 1707 * 5b pop %rbx 1708 * 41 5a pop %r10 1709 * 41 5c pop %r12 1710 * 41 5d pop %r13 1711 * 41 5e pop %r14 1712 * c9 leaveq 1713 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1714 * c3 retq 1715 * 1716 * and: 1717 * 1718 * 4c 8b 55 e8 mov -0x18(%rbp),%r10 1719 * 48 8b 5d e0 mov -0x20(%rbp),%rbx 1720 * 4c 8b 65 f0 mov -0x10(%rbp),%r12 1721 * 4c 8b 6d f8 mov -0x8(%rbp),%r13 1722 * c9 leaveq 1723 * 49 8d 62 f8 lea -0x8(%r10),%rsp 1724 * c3 retq 1725 * 1726 * Sometimes r13 is used as the DRAP register, in which case it's saved and 1727 * restored beforehand: 1728 * 1729 * 41 55 push %r13 1730 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13 1731 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp 1732 * ... 1733 * 49 8d 65 f0 lea -0x10(%r13),%rsp 1734 * 41 5d pop %r13 1735 * c3 retq 1736 */ 1737 static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi, 1738 struct stack_op *op) 1739 { 1740 struct cfi_reg *cfa = &cfi->cfa; 1741 struct cfi_reg *regs = cfi->regs; 1742 1743 /* stack operations don't make sense with an undefined CFA */ 1744 if (cfa->base == CFI_UNDEFINED) { 1745 if (insn->func) { 1746 WARN_FUNC("undefined stack state", insn->sec, insn->offset); 1747 return -1; 1748 } 1749 return 0; 1750 } 1751 1752 if (cfi->type == ORC_TYPE_REGS || cfi->type == ORC_TYPE_REGS_IRET) 1753 return update_cfi_state_regs(insn, cfi, op); 1754 1755 switch (op->dest.type) { 1756 1757 case OP_DEST_REG: 1758 switch (op->src.type) { 1759 1760 case OP_SRC_REG: 1761 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP && 1762 cfa->base == CFI_SP && 1763 regs[CFI_BP].base == CFI_CFA && 1764 regs[CFI_BP].offset == -cfa->offset) { 1765 1766 /* mov %rsp, %rbp */ 1767 cfa->base = op->dest.reg; 1768 cfi->bp_scratch = false; 1769 } 1770 1771 else if (op->src.reg == CFI_SP && 1772 op->dest.reg == CFI_BP && cfi->drap) { 1773 1774 /* drap: mov %rsp, %rbp */ 1775 regs[CFI_BP].base = CFI_BP; 1776 regs[CFI_BP].offset = -cfi->stack_size; 1777 cfi->bp_scratch = false; 1778 } 1779 1780 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1781 1782 /* 1783 * mov %rsp, %reg 1784 * 1785 * This is needed for the rare case where GCC 1786 * does: 1787 * 1788 * mov %rsp, %rax 1789 * ... 1790 * mov %rax, %rsp 1791 */ 1792 cfi->vals[op->dest.reg].base = CFI_CFA; 1793 cfi->vals[op->dest.reg].offset = -cfi->stack_size; 1794 } 1795 1796 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP && 1797 cfa->base == CFI_BP) { 1798 1799 /* 1800 * mov %rbp, %rsp 1801 * 1802 * Restore the original stack pointer (Clang). 1803 */ 1804 cfi->stack_size = -cfi->regs[CFI_BP].offset; 1805 } 1806 1807 else if (op->dest.reg == cfa->base) { 1808 1809 /* mov %reg, %rsp */ 1810 if (cfa->base == CFI_SP && 1811 cfi->vals[op->src.reg].base == CFI_CFA) { 1812 1813 /* 1814 * This is needed for the rare case 1815 * where GCC does something dumb like: 1816 * 1817 * lea 0x8(%rsp), %rcx 1818 * ... 1819 * mov %rcx, %rsp 1820 */ 1821 cfa->offset = -cfi->vals[op->src.reg].offset; 1822 cfi->stack_size = cfa->offset; 1823 1824 } else { 1825 cfa->base = CFI_UNDEFINED; 1826 cfa->offset = 0; 1827 } 1828 } 1829 1830 break; 1831 1832 case OP_SRC_ADD: 1833 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) { 1834 1835 /* add imm, %rsp */ 1836 cfi->stack_size -= op->src.offset; 1837 if (cfa->base == CFI_SP) 1838 cfa->offset -= op->src.offset; 1839 break; 1840 } 1841 1842 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) { 1843 1844 /* lea disp(%rbp), %rsp */ 1845 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset); 1846 break; 1847 } 1848 1849 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) { 1850 1851 /* drap: lea disp(%rsp), %drap */ 1852 cfi->drap_reg = op->dest.reg; 1853 1854 /* 1855 * lea disp(%rsp), %reg 1856 * 1857 * This is needed for the rare case where GCC 1858 * does something dumb like: 1859 * 1860 * lea 0x8(%rsp), %rcx 1861 * ... 1862 * mov %rcx, %rsp 1863 */ 1864 cfi->vals[op->dest.reg].base = CFI_CFA; 1865 cfi->vals[op->dest.reg].offset = \ 1866 -cfi->stack_size + op->src.offset; 1867 1868 break; 1869 } 1870 1871 if (cfi->drap && op->dest.reg == CFI_SP && 1872 op->src.reg == cfi->drap_reg) { 1873 1874 /* drap: lea disp(%drap), %rsp */ 1875 cfa->base = CFI_SP; 1876 cfa->offset = cfi->stack_size = -op->src.offset; 1877 cfi->drap_reg = CFI_UNDEFINED; 1878 cfi->drap = false; 1879 break; 1880 } 1881 1882 if (op->dest.reg == cfi->cfa.base) { 1883 WARN_FUNC("unsupported stack register modification", 1884 insn->sec, insn->offset); 1885 return -1; 1886 } 1887 1888 break; 1889 1890 case OP_SRC_AND: 1891 if (op->dest.reg != CFI_SP || 1892 (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) || 1893 (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) { 1894 WARN_FUNC("unsupported stack pointer realignment", 1895 insn->sec, insn->offset); 1896 return -1; 1897 } 1898 1899 if (cfi->drap_reg != CFI_UNDEFINED) { 1900 /* drap: and imm, %rsp */ 1901 cfa->base = cfi->drap_reg; 1902 cfa->offset = cfi->stack_size = 0; 1903 cfi->drap = true; 1904 } 1905 1906 /* 1907 * Older versions of GCC (4.8ish) realign the stack 1908 * without DRAP, with a frame pointer. 1909 */ 1910 1911 break; 1912 1913 case OP_SRC_POP: 1914 case OP_SRC_POPF: 1915 if (!cfi->drap && op->dest.reg == cfa->base) { 1916 1917 /* pop %rbp */ 1918 cfa->base = CFI_SP; 1919 } 1920 1921 if (cfi->drap && cfa->base == CFI_BP_INDIRECT && 1922 op->dest.reg == cfi->drap_reg && 1923 cfi->drap_offset == -cfi->stack_size) { 1924 1925 /* drap: pop %drap */ 1926 cfa->base = cfi->drap_reg; 1927 cfa->offset = 0; 1928 cfi->drap_offset = -1; 1929 1930 } else if (regs[op->dest.reg].offset == -cfi->stack_size) { 1931 1932 /* pop %reg */ 1933 restore_reg(cfi, op->dest.reg); 1934 } 1935 1936 cfi->stack_size -= 8; 1937 if (cfa->base == CFI_SP) 1938 cfa->offset -= 8; 1939 1940 break; 1941 1942 case OP_SRC_REG_INDIRECT: 1943 if (cfi->drap && op->src.reg == CFI_BP && 1944 op->src.offset == cfi->drap_offset) { 1945 1946 /* drap: mov disp(%rbp), %drap */ 1947 cfa->base = cfi->drap_reg; 1948 cfa->offset = 0; 1949 cfi->drap_offset = -1; 1950 } 1951 1952 if (cfi->drap && op->src.reg == CFI_BP && 1953 op->src.offset == regs[op->dest.reg].offset) { 1954 1955 /* drap: mov disp(%rbp), %reg */ 1956 restore_reg(cfi, op->dest.reg); 1957 1958 } else if (op->src.reg == cfa->base && 1959 op->src.offset == regs[op->dest.reg].offset + cfa->offset) { 1960 1961 /* mov disp(%rbp), %reg */ 1962 /* mov disp(%rsp), %reg */ 1963 restore_reg(cfi, op->dest.reg); 1964 } 1965 1966 break; 1967 1968 default: 1969 WARN_FUNC("unknown stack-related instruction", 1970 insn->sec, insn->offset); 1971 return -1; 1972 } 1973 1974 break; 1975 1976 case OP_DEST_PUSH: 1977 case OP_DEST_PUSHF: 1978 cfi->stack_size += 8; 1979 if (cfa->base == CFI_SP) 1980 cfa->offset += 8; 1981 1982 if (op->src.type != OP_SRC_REG) 1983 break; 1984 1985 if (cfi->drap) { 1986 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 1987 1988 /* drap: push %drap */ 1989 cfa->base = CFI_BP_INDIRECT; 1990 cfa->offset = -cfi->stack_size; 1991 1992 /* save drap so we know when to restore it */ 1993 cfi->drap_offset = -cfi->stack_size; 1994 1995 } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) { 1996 1997 /* drap: push %rbp */ 1998 cfi->stack_size = 0; 1999 2000 } else if (regs[op->src.reg].base == CFI_UNDEFINED) { 2001 2002 /* drap: push %reg */ 2003 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size); 2004 } 2005 2006 } else { 2007 2008 /* push %reg */ 2009 save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size); 2010 } 2011 2012 /* detect when asm code uses rbp as a scratch register */ 2013 if (!no_fp && insn->func && op->src.reg == CFI_BP && 2014 cfa->base != CFI_BP) 2015 cfi->bp_scratch = true; 2016 break; 2017 2018 case OP_DEST_REG_INDIRECT: 2019 2020 if (cfi->drap) { 2021 if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) { 2022 2023 /* drap: mov %drap, disp(%rbp) */ 2024 cfa->base = CFI_BP_INDIRECT; 2025 cfa->offset = op->dest.offset; 2026 2027 /* save drap offset so we know when to restore it */ 2028 cfi->drap_offset = op->dest.offset; 2029 } 2030 2031 else if (regs[op->src.reg].base == CFI_UNDEFINED) { 2032 2033 /* drap: mov reg, disp(%rbp) */ 2034 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset); 2035 } 2036 2037 } else if (op->dest.reg == cfa->base) { 2038 2039 /* mov reg, disp(%rbp) */ 2040 /* mov reg, disp(%rsp) */ 2041 save_reg(cfi, op->src.reg, CFI_CFA, 2042 op->dest.offset - cfi->cfa.offset); 2043 } 2044 2045 break; 2046 2047 case OP_DEST_LEAVE: 2048 if ((!cfi->drap && cfa->base != CFI_BP) || 2049 (cfi->drap && cfa->base != cfi->drap_reg)) { 2050 WARN_FUNC("leave instruction with modified stack frame", 2051 insn->sec, insn->offset); 2052 return -1; 2053 } 2054 2055 /* leave (mov %rbp, %rsp; pop %rbp) */ 2056 2057 cfi->stack_size = -cfi->regs[CFI_BP].offset - 8; 2058 restore_reg(cfi, CFI_BP); 2059 2060 if (!cfi->drap) { 2061 cfa->base = CFI_SP; 2062 cfa->offset -= 8; 2063 } 2064 2065 break; 2066 2067 case OP_DEST_MEM: 2068 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) { 2069 WARN_FUNC("unknown stack-related memory operation", 2070 insn->sec, insn->offset); 2071 return -1; 2072 } 2073 2074 /* pop mem */ 2075 cfi->stack_size -= 8; 2076 if (cfa->base == CFI_SP) 2077 cfa->offset -= 8; 2078 2079 break; 2080 2081 default: 2082 WARN_FUNC("unknown stack-related instruction", 2083 insn->sec, insn->offset); 2084 return -1; 2085 } 2086 2087 return 0; 2088 } 2089 2090 static int handle_insn_ops(struct instruction *insn, struct insn_state *state) 2091 { 2092 struct stack_op *op; 2093 2094 list_for_each_entry(op, &insn->stack_ops, list) { 2095 struct cfi_state old_cfi = state->cfi; 2096 int res; 2097 2098 res = update_cfi_state(insn, &state->cfi, op); 2099 if (res) 2100 return res; 2101 2102 if (insn->alt_group && memcmp(&state->cfi, &old_cfi, sizeof(struct cfi_state))) { 2103 WARN_FUNC("alternative modifies stack", insn->sec, insn->offset); 2104 return -1; 2105 } 2106 2107 if (op->dest.type == OP_DEST_PUSHF) { 2108 if (!state->uaccess_stack) { 2109 state->uaccess_stack = 1; 2110 } else if (state->uaccess_stack >> 31) { 2111 WARN_FUNC("PUSHF stack exhausted", 2112 insn->sec, insn->offset); 2113 return 1; 2114 } 2115 state->uaccess_stack <<= 1; 2116 state->uaccess_stack |= state->uaccess; 2117 } 2118 2119 if (op->src.type == OP_SRC_POPF) { 2120 if (state->uaccess_stack) { 2121 state->uaccess = state->uaccess_stack & 1; 2122 state->uaccess_stack >>= 1; 2123 if (state->uaccess_stack == 1) 2124 state->uaccess_stack = 0; 2125 } 2126 } 2127 } 2128 2129 return 0; 2130 } 2131 2132 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2) 2133 { 2134 struct cfi_state *cfi1 = &insn->cfi; 2135 int i; 2136 2137 if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) { 2138 2139 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d", 2140 insn->sec, insn->offset, 2141 cfi1->cfa.base, cfi1->cfa.offset, 2142 cfi2->cfa.base, cfi2->cfa.offset); 2143 2144 } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) { 2145 for (i = 0; i < CFI_NUM_REGS; i++) { 2146 if (!memcmp(&cfi1->regs[i], &cfi2->regs[i], 2147 sizeof(struct cfi_reg))) 2148 continue; 2149 2150 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d", 2151 insn->sec, insn->offset, 2152 i, cfi1->regs[i].base, cfi1->regs[i].offset, 2153 i, cfi2->regs[i].base, cfi2->regs[i].offset); 2154 break; 2155 } 2156 2157 } else if (cfi1->type != cfi2->type) { 2158 2159 WARN_FUNC("stack state mismatch: type1=%d type2=%d", 2160 insn->sec, insn->offset, cfi1->type, cfi2->type); 2161 2162 } else if (cfi1->drap != cfi2->drap || 2163 (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) || 2164 (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) { 2165 2166 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)", 2167 insn->sec, insn->offset, 2168 cfi1->drap, cfi1->drap_reg, cfi1->drap_offset, 2169 cfi2->drap, cfi2->drap_reg, cfi2->drap_offset); 2170 2171 } else 2172 return true; 2173 2174 return false; 2175 } 2176 2177 static inline bool func_uaccess_safe(struct symbol *func) 2178 { 2179 if (func) 2180 return func->uaccess_safe; 2181 2182 return false; 2183 } 2184 2185 static inline const char *call_dest_name(struct instruction *insn) 2186 { 2187 if (insn->call_dest) 2188 return insn->call_dest->name; 2189 2190 return "{dynamic}"; 2191 } 2192 2193 static int validate_call(struct instruction *insn, struct insn_state *state) 2194 { 2195 if (state->noinstr && state->instr <= 0 && 2196 (!insn->call_dest || !insn->call_dest->sec->noinstr)) { 2197 WARN_FUNC("call to %s() leaves .noinstr.text section", 2198 insn->sec, insn->offset, call_dest_name(insn)); 2199 return 1; 2200 } 2201 2202 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) { 2203 WARN_FUNC("call to %s() with UACCESS enabled", 2204 insn->sec, insn->offset, call_dest_name(insn)); 2205 return 1; 2206 } 2207 2208 if (state->df) { 2209 WARN_FUNC("call to %s() with DF set", 2210 insn->sec, insn->offset, call_dest_name(insn)); 2211 return 1; 2212 } 2213 2214 return 0; 2215 } 2216 2217 static int validate_sibling_call(struct instruction *insn, struct insn_state *state) 2218 { 2219 if (has_modified_stack_frame(insn, state)) { 2220 WARN_FUNC("sibling call from callable instruction with modified stack frame", 2221 insn->sec, insn->offset); 2222 return 1; 2223 } 2224 2225 return validate_call(insn, state); 2226 } 2227 2228 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state) 2229 { 2230 if (state->noinstr && state->instr > 0) { 2231 WARN_FUNC("return with instrumentation enabled", 2232 insn->sec, insn->offset); 2233 return 1; 2234 } 2235 2236 if (state->uaccess && !func_uaccess_safe(func)) { 2237 WARN_FUNC("return with UACCESS enabled", 2238 insn->sec, insn->offset); 2239 return 1; 2240 } 2241 2242 if (!state->uaccess && func_uaccess_safe(func)) { 2243 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", 2244 insn->sec, insn->offset); 2245 return 1; 2246 } 2247 2248 if (state->df) { 2249 WARN_FUNC("return with DF set", 2250 insn->sec, insn->offset); 2251 return 1; 2252 } 2253 2254 if (func && has_modified_stack_frame(insn, state)) { 2255 WARN_FUNC("return with modified stack frame", 2256 insn->sec, insn->offset); 2257 return 1; 2258 } 2259 2260 if (state->cfi.bp_scratch) { 2261 WARN_FUNC("BP used as a scratch register", 2262 insn->sec, insn->offset); 2263 return 1; 2264 } 2265 2266 return 0; 2267 } 2268 2269 /* 2270 * Alternatives should not contain any ORC entries, this in turn means they 2271 * should not contain any CFI ops, which implies all instructions should have 2272 * the same same CFI state. 2273 * 2274 * It is possible to constuct alternatives that have unreachable holes that go 2275 * unreported (because they're NOPs), such holes would result in CFI_UNDEFINED 2276 * states which then results in ORC entries, which we just said we didn't want. 2277 * 2278 * Avoid them by copying the CFI entry of the first instruction into the whole 2279 * alternative. 2280 */ 2281 static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn) 2282 { 2283 struct instruction *first_insn = insn; 2284 int alt_group = insn->alt_group; 2285 2286 sec_for_each_insn_continue(file, insn) { 2287 if (insn->alt_group != alt_group) 2288 break; 2289 insn->cfi = first_insn->cfi; 2290 } 2291 } 2292 2293 /* 2294 * Follow the branch starting at the given instruction, and recursively follow 2295 * any other branches (jumps). Meanwhile, track the frame pointer state at 2296 * each instruction and validate all the rules described in 2297 * tools/objtool/Documentation/stack-validation.txt. 2298 */ 2299 static int validate_branch(struct objtool_file *file, struct symbol *func, 2300 struct instruction *insn, struct insn_state state) 2301 { 2302 struct alternative *alt; 2303 struct instruction *next_insn; 2304 struct section *sec; 2305 u8 visited; 2306 int ret; 2307 2308 sec = insn->sec; 2309 2310 while (1) { 2311 next_insn = next_insn_same_sec(file, insn); 2312 2313 if (file->c_file && func && insn->func && func != insn->func->pfunc) { 2314 WARN("%s() falls through to next function %s()", 2315 func->name, insn->func->name); 2316 return 1; 2317 } 2318 2319 if (func && insn->ignore) { 2320 WARN_FUNC("BUG: why am I validating an ignored function?", 2321 sec, insn->offset); 2322 return 1; 2323 } 2324 2325 visited = 1 << state.uaccess; 2326 if (insn->visited) { 2327 if (!insn->hint && !insn_cfi_match(insn, &state.cfi)) 2328 return 1; 2329 2330 if (insn->visited & visited) 2331 return 0; 2332 } 2333 2334 if (state.noinstr) 2335 state.instr += insn->instr; 2336 2337 if (insn->hint) 2338 state.cfi = insn->cfi; 2339 else 2340 insn->cfi = state.cfi; 2341 2342 insn->visited |= visited; 2343 2344 if (!insn->ignore_alts && !list_empty(&insn->alts)) { 2345 bool skip_orig = false; 2346 2347 list_for_each_entry(alt, &insn->alts, list) { 2348 if (alt->skip_orig) 2349 skip_orig = true; 2350 2351 ret = validate_branch(file, func, alt->insn, state); 2352 if (ret) { 2353 if (backtrace) 2354 BT_FUNC("(alt)", insn); 2355 return ret; 2356 } 2357 } 2358 2359 if (insn->alt_group) 2360 fill_alternative_cfi(file, insn); 2361 2362 if (skip_orig) 2363 return 0; 2364 } 2365 2366 if (handle_insn_ops(insn, &state)) 2367 return 1; 2368 2369 switch (insn->type) { 2370 2371 case INSN_RETURN: 2372 return validate_return(func, insn, &state); 2373 2374 case INSN_CALL: 2375 case INSN_CALL_DYNAMIC: 2376 ret = validate_call(insn, &state); 2377 if (ret) 2378 return ret; 2379 2380 if (!no_fp && func && !is_fentry_call(insn) && 2381 !has_valid_stack_frame(&state)) { 2382 WARN_FUNC("call without frame pointer save/setup", 2383 sec, insn->offset); 2384 return 1; 2385 } 2386 2387 if (dead_end_function(file, insn->call_dest)) 2388 return 0; 2389 2390 break; 2391 2392 case INSN_JUMP_CONDITIONAL: 2393 case INSN_JUMP_UNCONDITIONAL: 2394 if (func && is_sibling_call(insn)) { 2395 ret = validate_sibling_call(insn, &state); 2396 if (ret) 2397 return ret; 2398 2399 } else if (insn->jump_dest) { 2400 ret = validate_branch(file, func, 2401 insn->jump_dest, state); 2402 if (ret) { 2403 if (backtrace) 2404 BT_FUNC("(branch)", insn); 2405 return ret; 2406 } 2407 } 2408 2409 if (insn->type == INSN_JUMP_UNCONDITIONAL) 2410 return 0; 2411 2412 break; 2413 2414 case INSN_JUMP_DYNAMIC: 2415 case INSN_JUMP_DYNAMIC_CONDITIONAL: 2416 if (func && is_sibling_call(insn)) { 2417 ret = validate_sibling_call(insn, &state); 2418 if (ret) 2419 return ret; 2420 } 2421 2422 if (insn->type == INSN_JUMP_DYNAMIC) 2423 return 0; 2424 2425 break; 2426 2427 case INSN_CONTEXT_SWITCH: 2428 if (func && (!next_insn || !next_insn->hint)) { 2429 WARN_FUNC("unsupported instruction in callable function", 2430 sec, insn->offset); 2431 return 1; 2432 } 2433 return 0; 2434 2435 case INSN_STAC: 2436 if (state.uaccess) { 2437 WARN_FUNC("recursive UACCESS enable", sec, insn->offset); 2438 return 1; 2439 } 2440 2441 state.uaccess = true; 2442 break; 2443 2444 case INSN_CLAC: 2445 if (!state.uaccess && func) { 2446 WARN_FUNC("redundant UACCESS disable", sec, insn->offset); 2447 return 1; 2448 } 2449 2450 if (func_uaccess_safe(func) && !state.uaccess_stack) { 2451 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset); 2452 return 1; 2453 } 2454 2455 state.uaccess = false; 2456 break; 2457 2458 case INSN_STD: 2459 if (state.df) 2460 WARN_FUNC("recursive STD", sec, insn->offset); 2461 2462 state.df = true; 2463 break; 2464 2465 case INSN_CLD: 2466 if (!state.df && func) 2467 WARN_FUNC("redundant CLD", sec, insn->offset); 2468 2469 state.df = false; 2470 break; 2471 2472 default: 2473 break; 2474 } 2475 2476 if (insn->dead_end) 2477 return 0; 2478 2479 if (!next_insn) { 2480 if (state.cfi.cfa.base == CFI_UNDEFINED) 2481 return 0; 2482 WARN("%s: unexpected end of section", sec->name); 2483 return 1; 2484 } 2485 2486 insn = next_insn; 2487 } 2488 2489 return 0; 2490 } 2491 2492 static int validate_unwind_hints(struct objtool_file *file, struct section *sec) 2493 { 2494 struct instruction *insn; 2495 struct insn_state state; 2496 int ret, warnings = 0; 2497 2498 if (!file->hints) 2499 return 0; 2500 2501 init_insn_state(&state, sec); 2502 2503 if (sec) { 2504 insn = find_insn(file, sec, 0); 2505 if (!insn) 2506 return 0; 2507 } else { 2508 insn = list_first_entry(&file->insn_list, typeof(*insn), list); 2509 } 2510 2511 while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) { 2512 if (insn->hint && !insn->visited) { 2513 ret = validate_branch(file, insn->func, insn, state); 2514 if (ret && backtrace) 2515 BT_FUNC("<=== (hint)", insn); 2516 warnings += ret; 2517 } 2518 2519 insn = list_next_entry(insn, list); 2520 } 2521 2522 return warnings; 2523 } 2524 2525 static int validate_retpoline(struct objtool_file *file) 2526 { 2527 struct instruction *insn; 2528 int warnings = 0; 2529 2530 for_each_insn(file, insn) { 2531 if (insn->type != INSN_JUMP_DYNAMIC && 2532 insn->type != INSN_CALL_DYNAMIC) 2533 continue; 2534 2535 if (insn->retpoline_safe) 2536 continue; 2537 2538 /* 2539 * .init.text code is ran before userspace and thus doesn't 2540 * strictly need retpolines, except for modules which are 2541 * loaded late, they very much do need retpoline in their 2542 * .init.text 2543 */ 2544 if (!strcmp(insn->sec->name, ".init.text") && !module) 2545 continue; 2546 2547 WARN_FUNC("indirect %s found in RETPOLINE build", 2548 insn->sec, insn->offset, 2549 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call"); 2550 2551 warnings++; 2552 } 2553 2554 return warnings; 2555 } 2556 2557 static bool is_kasan_insn(struct instruction *insn) 2558 { 2559 return (insn->type == INSN_CALL && 2560 !strcmp(insn->call_dest->name, "__asan_handle_no_return")); 2561 } 2562 2563 static bool is_ubsan_insn(struct instruction *insn) 2564 { 2565 return (insn->type == INSN_CALL && 2566 !strcmp(insn->call_dest->name, 2567 "__ubsan_handle_builtin_unreachable")); 2568 } 2569 2570 static bool ignore_unreachable_insn(struct instruction *insn) 2571 { 2572 int i; 2573 2574 if (insn->ignore || insn->type == INSN_NOP) 2575 return true; 2576 2577 /* 2578 * Ignore any unused exceptions. This can happen when a whitelisted 2579 * function has an exception table entry. 2580 * 2581 * Also ignore alternative replacement instructions. This can happen 2582 * when a whitelisted function uses one of the ALTERNATIVE macros. 2583 */ 2584 if (!strcmp(insn->sec->name, ".fixup") || 2585 !strcmp(insn->sec->name, ".altinstr_replacement") || 2586 !strcmp(insn->sec->name, ".altinstr_aux")) 2587 return true; 2588 2589 if (!insn->func) 2590 return false; 2591 2592 /* 2593 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees 2594 * __builtin_unreachable(). The BUG() macro has an unreachable() after 2595 * the UD2, which causes GCC's undefined trap logic to emit another UD2 2596 * (or occasionally a JMP to UD2). 2597 */ 2598 if (list_prev_entry(insn, list)->dead_end && 2599 (insn->type == INSN_BUG || 2600 (insn->type == INSN_JUMP_UNCONDITIONAL && 2601 insn->jump_dest && insn->jump_dest->type == INSN_BUG))) 2602 return true; 2603 2604 /* 2605 * Check if this (or a subsequent) instruction is related to 2606 * CONFIG_UBSAN or CONFIG_KASAN. 2607 * 2608 * End the search at 5 instructions to avoid going into the weeds. 2609 */ 2610 for (i = 0; i < 5; i++) { 2611 2612 if (is_kasan_insn(insn) || is_ubsan_insn(insn)) 2613 return true; 2614 2615 if (insn->type == INSN_JUMP_UNCONDITIONAL) { 2616 if (insn->jump_dest && 2617 insn->jump_dest->func == insn->func) { 2618 insn = insn->jump_dest; 2619 continue; 2620 } 2621 2622 break; 2623 } 2624 2625 if (insn->offset + insn->len >= insn->func->offset + insn->func->len) 2626 break; 2627 2628 insn = list_next_entry(insn, list); 2629 } 2630 2631 return false; 2632 } 2633 2634 static int validate_symbol(struct objtool_file *file, struct section *sec, 2635 struct symbol *sym, struct insn_state *state) 2636 { 2637 struct instruction *insn; 2638 int ret; 2639 2640 if (!sym->len) { 2641 WARN("%s() is missing an ELF size annotation", sym->name); 2642 return 1; 2643 } 2644 2645 if (sym->pfunc != sym || sym->alias != sym) 2646 return 0; 2647 2648 insn = find_insn(file, sec, sym->offset); 2649 if (!insn || insn->ignore || insn->visited) 2650 return 0; 2651 2652 state->uaccess = sym->uaccess_safe; 2653 2654 ret = validate_branch(file, insn->func, insn, *state); 2655 if (ret && backtrace) 2656 BT_FUNC("<=== (sym)", insn); 2657 return ret; 2658 } 2659 2660 static int validate_section(struct objtool_file *file, struct section *sec) 2661 { 2662 struct insn_state state; 2663 struct symbol *func; 2664 int warnings = 0; 2665 2666 list_for_each_entry(func, &sec->symbol_list, list) { 2667 if (func->type != STT_FUNC) 2668 continue; 2669 2670 init_insn_state(&state, sec); 2671 state.cfi.cfa = initial_func_cfi.cfa; 2672 memcpy(&state.cfi.regs, &initial_func_cfi.regs, 2673 CFI_NUM_REGS * sizeof(struct cfi_reg)); 2674 state.cfi.stack_size = initial_func_cfi.cfa.offset; 2675 2676 warnings += validate_symbol(file, sec, func, &state); 2677 } 2678 2679 return warnings; 2680 } 2681 2682 static int validate_vmlinux_functions(struct objtool_file *file) 2683 { 2684 struct section *sec; 2685 int warnings = 0; 2686 2687 sec = find_section_by_name(file->elf, ".noinstr.text"); 2688 if (sec) { 2689 warnings += validate_section(file, sec); 2690 warnings += validate_unwind_hints(file, sec); 2691 } 2692 2693 sec = find_section_by_name(file->elf, ".entry.text"); 2694 if (sec) { 2695 warnings += validate_section(file, sec); 2696 warnings += validate_unwind_hints(file, sec); 2697 } 2698 2699 return warnings; 2700 } 2701 2702 static int validate_functions(struct objtool_file *file) 2703 { 2704 struct section *sec; 2705 int warnings = 0; 2706 2707 for_each_sec(file, sec) { 2708 if (!(sec->sh.sh_flags & SHF_EXECINSTR)) 2709 continue; 2710 2711 warnings += validate_section(file, sec); 2712 } 2713 2714 return warnings; 2715 } 2716 2717 static int validate_reachable_instructions(struct objtool_file *file) 2718 { 2719 struct instruction *insn; 2720 2721 if (file->ignore_unreachables) 2722 return 0; 2723 2724 for_each_insn(file, insn) { 2725 if (insn->visited || ignore_unreachable_insn(insn)) 2726 continue; 2727 2728 WARN_FUNC("unreachable instruction", insn->sec, insn->offset); 2729 return 1; 2730 } 2731 2732 return 0; 2733 } 2734 2735 static struct objtool_file file; 2736 2737 int check(const char *_objname, bool orc) 2738 { 2739 int ret, warnings = 0; 2740 2741 objname = _objname; 2742 2743 file.elf = elf_open_read(objname, orc ? O_RDWR : O_RDONLY); 2744 if (!file.elf) 2745 return 1; 2746 2747 INIT_LIST_HEAD(&file.insn_list); 2748 hash_init(file.insn_hash); 2749 file.c_file = find_section_by_name(file.elf, ".comment"); 2750 file.ignore_unreachables = no_unreachable; 2751 file.hints = false; 2752 2753 arch_initial_func_cfi_state(&initial_func_cfi); 2754 2755 ret = decode_sections(&file); 2756 if (ret < 0) 2757 goto out; 2758 warnings += ret; 2759 2760 if (list_empty(&file.insn_list)) 2761 goto out; 2762 2763 if (vmlinux && !validate_dup) { 2764 ret = validate_vmlinux_functions(&file); 2765 if (ret < 0) 2766 goto out; 2767 2768 warnings += ret; 2769 goto out; 2770 } 2771 2772 if (retpoline) { 2773 ret = validate_retpoline(&file); 2774 if (ret < 0) 2775 return ret; 2776 warnings += ret; 2777 } 2778 2779 ret = validate_functions(&file); 2780 if (ret < 0) 2781 goto out; 2782 warnings += ret; 2783 2784 ret = validate_unwind_hints(&file, NULL); 2785 if (ret < 0) 2786 goto out; 2787 warnings += ret; 2788 2789 if (!warnings) { 2790 ret = validate_reachable_instructions(&file); 2791 if (ret < 0) 2792 goto out; 2793 warnings += ret; 2794 } 2795 2796 if (orc) { 2797 ret = create_orc(&file); 2798 if (ret < 0) 2799 goto out; 2800 2801 ret = create_orc_sections(&file); 2802 if (ret < 0) 2803 goto out; 2804 2805 ret = elf_write(file.elf); 2806 if (ret < 0) 2807 goto out; 2808 } 2809 2810 out: 2811 if (ret < 0) { 2812 /* 2813 * Fatal error. The binary is corrupt or otherwise broken in 2814 * some way, or objtool itself is broken. Fail the kernel 2815 * build. 2816 */ 2817 return ret; 2818 } 2819 2820 return 0; 2821 } 2822