xref: /openbmc/linux/tools/objtool/check.c (revision adb57164)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn(file, func, insn)				\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define sym_for_each_insn(file, sym, insn)				\
81 	for (insn = find_insn(file, sym->sec, sym->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == sym->sec &&				\
84 		insn->offset < sym->offset + sym->len;			\
85 	     insn = list_next_entry(insn, list))
86 
87 #define sym_for_each_insn_continue_reverse(file, sym, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == sym->sec && insn->offset >= sym->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_static_jump(struct instruction *insn)
101 {
102 	return insn->type == INSN_JUMP_CONDITIONAL ||
103 	       insn->type == INSN_JUMP_UNCONDITIONAL;
104 }
105 
106 static bool is_sibling_call(struct instruction *insn)
107 {
108 	/* An indirect jump is either a sibling call or a jump to a table. */
109 	if (insn->type == INSN_JUMP_DYNAMIC)
110 		return list_empty(&insn->alts);
111 
112 	if (!is_static_jump(insn))
113 		return false;
114 
115 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
116 	return !!insn->call_dest;
117 }
118 
119 /*
120  * This checks to see if the given function is a "noreturn" function.
121  *
122  * For global functions which are outside the scope of this object file, we
123  * have to keep a manual list of them.
124  *
125  * For local functions, we have to detect them manually by simply looking for
126  * the lack of a return instruction.
127  */
128 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
129 				int recursion)
130 {
131 	int i;
132 	struct instruction *insn;
133 	bool empty = true;
134 
135 	/*
136 	 * Unfortunately these have to be hard coded because the noreturn
137 	 * attribute isn't provided in ELF data.
138 	 */
139 	static const char * const global_noreturns[] = {
140 		"__stack_chk_fail",
141 		"panic",
142 		"do_exit",
143 		"do_task_dead",
144 		"__module_put_and_exit",
145 		"complete_and_exit",
146 		"__reiserfs_panic",
147 		"lbug_with_loc",
148 		"fortify_panic",
149 		"usercopy_abort",
150 		"machine_real_restart",
151 		"rewind_stack_do_exit",
152 		"kunit_try_catch_throw",
153 	};
154 
155 	if (!func)
156 		return false;
157 
158 	if (func->bind == STB_WEAK)
159 		return false;
160 
161 	if (func->bind == STB_GLOBAL)
162 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
163 			if (!strcmp(func->name, global_noreturns[i]))
164 				return true;
165 
166 	if (!func->len)
167 		return false;
168 
169 	insn = find_insn(file, func->sec, func->offset);
170 	if (!insn->func)
171 		return false;
172 
173 	func_for_each_insn(file, func, insn) {
174 		empty = false;
175 
176 		if (insn->type == INSN_RETURN)
177 			return false;
178 	}
179 
180 	if (empty)
181 		return false;
182 
183 	/*
184 	 * A function can have a sibling call instead of a return.  In that
185 	 * case, the function's dead-end status depends on whether the target
186 	 * of the sibling call returns.
187 	 */
188 	func_for_each_insn(file, func, insn) {
189 		if (is_sibling_call(insn)) {
190 			struct instruction *dest = insn->jump_dest;
191 
192 			if (!dest)
193 				/* sibling call to another file */
194 				return false;
195 
196 			/* local sibling call */
197 			if (recursion == 5) {
198 				/*
199 				 * Infinite recursion: two functions have
200 				 * sibling calls to each other.  This is a very
201 				 * rare case.  It means they aren't dead ends.
202 				 */
203 				return false;
204 			}
205 
206 			return __dead_end_function(file, dest->func, recursion+1);
207 		}
208 	}
209 
210 	return true;
211 }
212 
213 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
214 {
215 	return __dead_end_function(file, func, 0);
216 }
217 
218 static void clear_insn_state(struct insn_state *state)
219 {
220 	int i;
221 
222 	memset(state, 0, sizeof(*state));
223 	state->cfa.base = CFI_UNDEFINED;
224 	for (i = 0; i < CFI_NUM_REGS; i++) {
225 		state->regs[i].base = CFI_UNDEFINED;
226 		state->vals[i].base = CFI_UNDEFINED;
227 	}
228 	state->drap_reg = CFI_UNDEFINED;
229 	state->drap_offset = -1;
230 }
231 
232 /*
233  * Call the arch-specific instruction decoder for all the instructions and add
234  * them to the global instruction list.
235  */
236 static int decode_instructions(struct objtool_file *file)
237 {
238 	struct section *sec;
239 	struct symbol *func;
240 	unsigned long offset;
241 	struct instruction *insn;
242 	unsigned long nr_insns = 0;
243 	int ret;
244 
245 	for_each_sec(file, sec) {
246 
247 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
248 			continue;
249 
250 		if (strcmp(sec->name, ".altinstr_replacement") &&
251 		    strcmp(sec->name, ".altinstr_aux") &&
252 		    strncmp(sec->name, ".discard.", 9))
253 			sec->text = true;
254 
255 		for (offset = 0; offset < sec->len; offset += insn->len) {
256 			insn = malloc(sizeof(*insn));
257 			if (!insn) {
258 				WARN("malloc failed");
259 				return -1;
260 			}
261 			memset(insn, 0, sizeof(*insn));
262 			INIT_LIST_HEAD(&insn->alts);
263 			clear_insn_state(&insn->state);
264 
265 			insn->sec = sec;
266 			insn->offset = offset;
267 
268 			ret = arch_decode_instruction(file->elf, sec, offset,
269 						      sec->len - offset,
270 						      &insn->len, &insn->type,
271 						      &insn->immediate,
272 						      &insn->stack_op);
273 			if (ret)
274 				goto err;
275 
276 			hash_add(file->insn_hash, &insn->hash, insn->offset);
277 			list_add_tail(&insn->list, &file->insn_list);
278 			nr_insns++;
279 		}
280 
281 		list_for_each_entry(func, &sec->symbol_list, list) {
282 			if (func->type != STT_FUNC || func->alias != func)
283 				continue;
284 
285 			if (!find_insn(file, sec, func->offset)) {
286 				WARN("%s(): can't find starting instruction",
287 				     func->name);
288 				return -1;
289 			}
290 
291 			sym_for_each_insn(file, func, insn)
292 				insn->func = func;
293 		}
294 	}
295 
296 	if (stats)
297 		printf("nr_insns: %lu\n", nr_insns);
298 
299 	return 0;
300 
301 err:
302 	free(insn);
303 	return ret;
304 }
305 
306 /*
307  * Mark "ud2" instructions and manually annotated dead ends.
308  */
309 static int add_dead_ends(struct objtool_file *file)
310 {
311 	struct section *sec;
312 	struct rela *rela;
313 	struct instruction *insn;
314 	bool found;
315 
316 	/*
317 	 * By default, "ud2" is a dead end unless otherwise annotated, because
318 	 * GCC 7 inserts it for certain divide-by-zero cases.
319 	 */
320 	for_each_insn(file, insn)
321 		if (insn->type == INSN_BUG)
322 			insn->dead_end = true;
323 
324 	/*
325 	 * Check for manually annotated dead ends.
326 	 */
327 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
328 	if (!sec)
329 		goto reachable;
330 
331 	list_for_each_entry(rela, &sec->rela_list, list) {
332 		if (rela->sym->type != STT_SECTION) {
333 			WARN("unexpected relocation symbol type in %s", sec->name);
334 			return -1;
335 		}
336 		insn = find_insn(file, rela->sym->sec, rela->addend);
337 		if (insn)
338 			insn = list_prev_entry(insn, list);
339 		else if (rela->addend == rela->sym->sec->len) {
340 			found = false;
341 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
342 				if (insn->sec == rela->sym->sec) {
343 					found = true;
344 					break;
345 				}
346 			}
347 
348 			if (!found) {
349 				WARN("can't find unreachable insn at %s+0x%x",
350 				     rela->sym->sec->name, rela->addend);
351 				return -1;
352 			}
353 		} else {
354 			WARN("can't find unreachable insn at %s+0x%x",
355 			     rela->sym->sec->name, rela->addend);
356 			return -1;
357 		}
358 
359 		insn->dead_end = true;
360 	}
361 
362 reachable:
363 	/*
364 	 * These manually annotated reachable checks are needed for GCC 4.4,
365 	 * where the Linux unreachable() macro isn't supported.  In that case
366 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
367 	 * not a dead end.
368 	 */
369 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
370 	if (!sec)
371 		return 0;
372 
373 	list_for_each_entry(rela, &sec->rela_list, list) {
374 		if (rela->sym->type != STT_SECTION) {
375 			WARN("unexpected relocation symbol type in %s", sec->name);
376 			return -1;
377 		}
378 		insn = find_insn(file, rela->sym->sec, rela->addend);
379 		if (insn)
380 			insn = list_prev_entry(insn, list);
381 		else if (rela->addend == rela->sym->sec->len) {
382 			found = false;
383 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
384 				if (insn->sec == rela->sym->sec) {
385 					found = true;
386 					break;
387 				}
388 			}
389 
390 			if (!found) {
391 				WARN("can't find reachable insn at %s+0x%x",
392 				     rela->sym->sec->name, rela->addend);
393 				return -1;
394 			}
395 		} else {
396 			WARN("can't find reachable insn at %s+0x%x",
397 			     rela->sym->sec->name, rela->addend);
398 			return -1;
399 		}
400 
401 		insn->dead_end = false;
402 	}
403 
404 	return 0;
405 }
406 
407 /*
408  * Warnings shouldn't be reported for ignored functions.
409  */
410 static void add_ignores(struct objtool_file *file)
411 {
412 	struct instruction *insn;
413 	struct section *sec;
414 	struct symbol *func;
415 	struct rela *rela;
416 
417 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
418 	if (!sec)
419 		return;
420 
421 	list_for_each_entry(rela, &sec->rela_list, list) {
422 		switch (rela->sym->type) {
423 		case STT_FUNC:
424 			func = rela->sym;
425 			break;
426 
427 		case STT_SECTION:
428 			func = find_func_by_offset(rela->sym->sec, rela->addend);
429 			if (!func)
430 				continue;
431 			break;
432 
433 		default:
434 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
435 			continue;
436 		}
437 
438 		func_for_each_insn(file, func, insn)
439 			insn->ignore = true;
440 	}
441 }
442 
443 /*
444  * This is a whitelist of functions that is allowed to be called with AC set.
445  * The list is meant to be minimal and only contains compiler instrumentation
446  * ABI and a few functions used to implement *_{to,from}_user() functions.
447  *
448  * These functions must not directly change AC, but may PUSHF/POPF.
449  */
450 static const char *uaccess_safe_builtin[] = {
451 	/* KASAN */
452 	"kasan_report",
453 	"check_memory_region",
454 	/* KASAN out-of-line */
455 	"__asan_loadN_noabort",
456 	"__asan_load1_noabort",
457 	"__asan_load2_noabort",
458 	"__asan_load4_noabort",
459 	"__asan_load8_noabort",
460 	"__asan_load16_noabort",
461 	"__asan_storeN_noabort",
462 	"__asan_store1_noabort",
463 	"__asan_store2_noabort",
464 	"__asan_store4_noabort",
465 	"__asan_store8_noabort",
466 	"__asan_store16_noabort",
467 	/* KASAN in-line */
468 	"__asan_report_load_n_noabort",
469 	"__asan_report_load1_noabort",
470 	"__asan_report_load2_noabort",
471 	"__asan_report_load4_noabort",
472 	"__asan_report_load8_noabort",
473 	"__asan_report_load16_noabort",
474 	"__asan_report_store_n_noabort",
475 	"__asan_report_store1_noabort",
476 	"__asan_report_store2_noabort",
477 	"__asan_report_store4_noabort",
478 	"__asan_report_store8_noabort",
479 	"__asan_report_store16_noabort",
480 	/* KCOV */
481 	"write_comp_data",
482 	"__sanitizer_cov_trace_pc",
483 	"__sanitizer_cov_trace_const_cmp1",
484 	"__sanitizer_cov_trace_const_cmp2",
485 	"__sanitizer_cov_trace_const_cmp4",
486 	"__sanitizer_cov_trace_const_cmp8",
487 	"__sanitizer_cov_trace_cmp1",
488 	"__sanitizer_cov_trace_cmp2",
489 	"__sanitizer_cov_trace_cmp4",
490 	"__sanitizer_cov_trace_cmp8",
491 	"__sanitizer_cov_trace_switch",
492 	/* UBSAN */
493 	"ubsan_type_mismatch_common",
494 	"__ubsan_handle_type_mismatch",
495 	"__ubsan_handle_type_mismatch_v1",
496 	"__ubsan_handle_shift_out_of_bounds",
497 	/* misc */
498 	"csum_partial_copy_generic",
499 	"__memcpy_mcsafe",
500 	"mcsafe_handle_tail",
501 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
502 	NULL
503 };
504 
505 static void add_uaccess_safe(struct objtool_file *file)
506 {
507 	struct symbol *func;
508 	const char **name;
509 
510 	if (!uaccess)
511 		return;
512 
513 	for (name = uaccess_safe_builtin; *name; name++) {
514 		func = find_symbol_by_name(file->elf, *name);
515 		if (!func)
516 			continue;
517 
518 		func->uaccess_safe = true;
519 	}
520 }
521 
522 /*
523  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
524  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
525  * But it at least allows objtool to understand the control flow *around* the
526  * retpoline.
527  */
528 static int add_ignore_alternatives(struct objtool_file *file)
529 {
530 	struct section *sec;
531 	struct rela *rela;
532 	struct instruction *insn;
533 
534 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
535 	if (!sec)
536 		return 0;
537 
538 	list_for_each_entry(rela, &sec->rela_list, list) {
539 		if (rela->sym->type != STT_SECTION) {
540 			WARN("unexpected relocation symbol type in %s", sec->name);
541 			return -1;
542 		}
543 
544 		insn = find_insn(file, rela->sym->sec, rela->addend);
545 		if (!insn) {
546 			WARN("bad .discard.ignore_alts entry");
547 			return -1;
548 		}
549 
550 		insn->ignore_alts = true;
551 	}
552 
553 	return 0;
554 }
555 
556 /*
557  * Find the destination instructions for all jumps.
558  */
559 static int add_jump_destinations(struct objtool_file *file)
560 {
561 	struct instruction *insn;
562 	struct rela *rela;
563 	struct section *dest_sec;
564 	unsigned long dest_off;
565 
566 	for_each_insn(file, insn) {
567 		if (!is_static_jump(insn))
568 			continue;
569 
570 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
571 			continue;
572 
573 		rela = find_rela_by_dest_range(file->elf, insn->sec,
574 					       insn->offset, insn->len);
575 		if (!rela) {
576 			dest_sec = insn->sec;
577 			dest_off = insn->offset + insn->len + insn->immediate;
578 		} else if (rela->sym->type == STT_SECTION) {
579 			dest_sec = rela->sym->sec;
580 			dest_off = rela->addend + 4;
581 		} else if (rela->sym->sec->idx) {
582 			dest_sec = rela->sym->sec;
583 			dest_off = rela->sym->sym.st_value + rela->addend + 4;
584 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
585 			/*
586 			 * Retpoline jumps are really dynamic jumps in
587 			 * disguise, so convert them accordingly.
588 			 */
589 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
590 				insn->type = INSN_JUMP_DYNAMIC;
591 			else
592 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
593 
594 			insn->retpoline_safe = true;
595 			continue;
596 		} else {
597 			/* external sibling call */
598 			insn->call_dest = rela->sym;
599 			continue;
600 		}
601 
602 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
603 		if (!insn->jump_dest) {
604 
605 			/*
606 			 * This is a special case where an alt instruction
607 			 * jumps past the end of the section.  These are
608 			 * handled later in handle_group_alt().
609 			 */
610 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
611 				continue;
612 
613 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
614 				  insn->sec, insn->offset, dest_sec->name,
615 				  dest_off);
616 			return -1;
617 		}
618 
619 		/*
620 		 * Cross-function jump.
621 		 */
622 		if (insn->func && insn->jump_dest->func &&
623 		    insn->func != insn->jump_dest->func) {
624 
625 			/*
626 			 * For GCC 8+, create parent/child links for any cold
627 			 * subfunctions.  This is _mostly_ redundant with a
628 			 * similar initialization in read_symbols().
629 			 *
630 			 * If a function has aliases, we want the *first* such
631 			 * function in the symbol table to be the subfunction's
632 			 * parent.  In that case we overwrite the
633 			 * initialization done in read_symbols().
634 			 *
635 			 * However this code can't completely replace the
636 			 * read_symbols() code because this doesn't detect the
637 			 * case where the parent function's only reference to a
638 			 * subfunction is through a jump table.
639 			 */
640 			if (!strstr(insn->func->name, ".cold.") &&
641 			    strstr(insn->jump_dest->func->name, ".cold.")) {
642 				insn->func->cfunc = insn->jump_dest->func;
643 				insn->jump_dest->func->pfunc = insn->func;
644 
645 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
646 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
647 
648 				/* internal sibling call */
649 				insn->call_dest = insn->jump_dest->func;
650 			}
651 		}
652 	}
653 
654 	return 0;
655 }
656 
657 /*
658  * Find the destination instructions for all calls.
659  */
660 static int add_call_destinations(struct objtool_file *file)
661 {
662 	struct instruction *insn;
663 	unsigned long dest_off;
664 	struct rela *rela;
665 
666 	for_each_insn(file, insn) {
667 		if (insn->type != INSN_CALL)
668 			continue;
669 
670 		rela = find_rela_by_dest_range(file->elf, insn->sec,
671 					       insn->offset, insn->len);
672 		if (!rela) {
673 			dest_off = insn->offset + insn->len + insn->immediate;
674 			insn->call_dest = find_func_by_offset(insn->sec, dest_off);
675 			if (!insn->call_dest)
676 				insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
677 
678 			if (insn->ignore)
679 				continue;
680 
681 			if (!insn->call_dest) {
682 				WARN_FUNC("unsupported intra-function call",
683 					  insn->sec, insn->offset);
684 				if (retpoline)
685 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
686 				return -1;
687 			}
688 
689 			if (insn->func && insn->call_dest->type != STT_FUNC) {
690 				WARN_FUNC("unsupported call to non-function",
691 					  insn->sec, insn->offset);
692 				return -1;
693 			}
694 
695 		} else if (rela->sym->type == STT_SECTION) {
696 			insn->call_dest = find_func_by_offset(rela->sym->sec,
697 							      rela->addend+4);
698 			if (!insn->call_dest) {
699 				WARN_FUNC("can't find call dest symbol at %s+0x%x",
700 					  insn->sec, insn->offset,
701 					  rela->sym->sec->name,
702 					  rela->addend + 4);
703 				return -1;
704 			}
705 		} else
706 			insn->call_dest = rela->sym;
707 	}
708 
709 	return 0;
710 }
711 
712 /*
713  * The .alternatives section requires some extra special care, over and above
714  * what other special sections require:
715  *
716  * 1. Because alternatives are patched in-place, we need to insert a fake jump
717  *    instruction at the end so that validate_branch() skips all the original
718  *    replaced instructions when validating the new instruction path.
719  *
720  * 2. An added wrinkle is that the new instruction length might be zero.  In
721  *    that case the old instructions are replaced with noops.  We simulate that
722  *    by creating a fake jump as the only new instruction.
723  *
724  * 3. In some cases, the alternative section includes an instruction which
725  *    conditionally jumps to the _end_ of the entry.  We have to modify these
726  *    jumps' destinations to point back to .text rather than the end of the
727  *    entry in .altinstr_replacement.
728  */
729 static int handle_group_alt(struct objtool_file *file,
730 			    struct special_alt *special_alt,
731 			    struct instruction *orig_insn,
732 			    struct instruction **new_insn)
733 {
734 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
735 	unsigned long dest_off;
736 
737 	last_orig_insn = NULL;
738 	insn = orig_insn;
739 	sec_for_each_insn_from(file, insn) {
740 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
741 			break;
742 
743 		insn->alt_group = true;
744 		last_orig_insn = insn;
745 	}
746 
747 	if (next_insn_same_sec(file, last_orig_insn)) {
748 		fake_jump = malloc(sizeof(*fake_jump));
749 		if (!fake_jump) {
750 			WARN("malloc failed");
751 			return -1;
752 		}
753 		memset(fake_jump, 0, sizeof(*fake_jump));
754 		INIT_LIST_HEAD(&fake_jump->alts);
755 		clear_insn_state(&fake_jump->state);
756 
757 		fake_jump->sec = special_alt->new_sec;
758 		fake_jump->offset = FAKE_JUMP_OFFSET;
759 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
760 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
761 		fake_jump->func = orig_insn->func;
762 	}
763 
764 	if (!special_alt->new_len) {
765 		if (!fake_jump) {
766 			WARN("%s: empty alternative at end of section",
767 			     special_alt->orig_sec->name);
768 			return -1;
769 		}
770 
771 		*new_insn = fake_jump;
772 		return 0;
773 	}
774 
775 	last_new_insn = NULL;
776 	insn = *new_insn;
777 	sec_for_each_insn_from(file, insn) {
778 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
779 			break;
780 
781 		last_new_insn = insn;
782 
783 		insn->ignore = orig_insn->ignore_alts;
784 		insn->func = orig_insn->func;
785 
786 		/*
787 		 * Since alternative replacement code is copy/pasted by the
788 		 * kernel after applying relocations, generally such code can't
789 		 * have relative-address relocation references to outside the
790 		 * .altinstr_replacement section, unless the arch's
791 		 * alternatives code can adjust the relative offsets
792 		 * accordingly.
793 		 *
794 		 * The x86 alternatives code adjusts the offsets only when it
795 		 * encounters a branch instruction at the very beginning of the
796 		 * replacement group.
797 		 */
798 		if ((insn->offset != special_alt->new_off ||
799 		    (insn->type != INSN_CALL && !is_static_jump(insn))) &&
800 		    find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) {
801 
802 			WARN_FUNC("unsupported relocation in alternatives section",
803 				  insn->sec, insn->offset);
804 			return -1;
805 		}
806 
807 		if (!is_static_jump(insn))
808 			continue;
809 
810 		if (!insn->immediate)
811 			continue;
812 
813 		dest_off = insn->offset + insn->len + insn->immediate;
814 		if (dest_off == special_alt->new_off + special_alt->new_len) {
815 			if (!fake_jump) {
816 				WARN("%s: alternative jump to end of section",
817 				     special_alt->orig_sec->name);
818 				return -1;
819 			}
820 			insn->jump_dest = fake_jump;
821 		}
822 
823 		if (!insn->jump_dest) {
824 			WARN_FUNC("can't find alternative jump destination",
825 				  insn->sec, insn->offset);
826 			return -1;
827 		}
828 	}
829 
830 	if (!last_new_insn) {
831 		WARN_FUNC("can't find last new alternative instruction",
832 			  special_alt->new_sec, special_alt->new_off);
833 		return -1;
834 	}
835 
836 	if (fake_jump)
837 		list_add(&fake_jump->list, &last_new_insn->list);
838 
839 	return 0;
840 }
841 
842 /*
843  * A jump table entry can either convert a nop to a jump or a jump to a nop.
844  * If the original instruction is a jump, make the alt entry an effective nop
845  * by just skipping the original instruction.
846  */
847 static int handle_jump_alt(struct objtool_file *file,
848 			   struct special_alt *special_alt,
849 			   struct instruction *orig_insn,
850 			   struct instruction **new_insn)
851 {
852 	if (orig_insn->type == INSN_NOP)
853 		return 0;
854 
855 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
856 		WARN_FUNC("unsupported instruction at jump label",
857 			  orig_insn->sec, orig_insn->offset);
858 		return -1;
859 	}
860 
861 	*new_insn = list_next_entry(orig_insn, list);
862 	return 0;
863 }
864 
865 /*
866  * Read all the special sections which have alternate instructions which can be
867  * patched in or redirected to at runtime.  Each instruction having alternate
868  * instruction(s) has them added to its insn->alts list, which will be
869  * traversed in validate_branch().
870  */
871 static int add_special_section_alts(struct objtool_file *file)
872 {
873 	struct list_head special_alts;
874 	struct instruction *orig_insn, *new_insn;
875 	struct special_alt *special_alt, *tmp;
876 	struct alternative *alt;
877 	int ret;
878 
879 	ret = special_get_alts(file->elf, &special_alts);
880 	if (ret)
881 		return ret;
882 
883 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
884 
885 		orig_insn = find_insn(file, special_alt->orig_sec,
886 				      special_alt->orig_off);
887 		if (!orig_insn) {
888 			WARN_FUNC("special: can't find orig instruction",
889 				  special_alt->orig_sec, special_alt->orig_off);
890 			ret = -1;
891 			goto out;
892 		}
893 
894 		new_insn = NULL;
895 		if (!special_alt->group || special_alt->new_len) {
896 			new_insn = find_insn(file, special_alt->new_sec,
897 					     special_alt->new_off);
898 			if (!new_insn) {
899 				WARN_FUNC("special: can't find new instruction",
900 					  special_alt->new_sec,
901 					  special_alt->new_off);
902 				ret = -1;
903 				goto out;
904 			}
905 		}
906 
907 		if (special_alt->group) {
908 			ret = handle_group_alt(file, special_alt, orig_insn,
909 					       &new_insn);
910 			if (ret)
911 				goto out;
912 		} else if (special_alt->jump_or_nop) {
913 			ret = handle_jump_alt(file, special_alt, orig_insn,
914 					      &new_insn);
915 			if (ret)
916 				goto out;
917 		}
918 
919 		alt = malloc(sizeof(*alt));
920 		if (!alt) {
921 			WARN("malloc failed");
922 			ret = -1;
923 			goto out;
924 		}
925 
926 		alt->insn = new_insn;
927 		alt->skip_orig = special_alt->skip_orig;
928 		orig_insn->ignore_alts |= special_alt->skip_alt;
929 		list_add_tail(&alt->list, &orig_insn->alts);
930 
931 		list_del(&special_alt->list);
932 		free(special_alt);
933 	}
934 
935 out:
936 	return ret;
937 }
938 
939 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
940 			    struct rela *table)
941 {
942 	struct rela *rela = table;
943 	struct instruction *dest_insn;
944 	struct alternative *alt;
945 	struct symbol *pfunc = insn->func->pfunc;
946 	unsigned int prev_offset = 0;
947 
948 	/*
949 	 * Each @rela is a switch table relocation which points to the target
950 	 * instruction.
951 	 */
952 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
953 
954 		/* Check for the end of the table: */
955 		if (rela != table && rela->jump_table_start)
956 			break;
957 
958 		/* Make sure the table entries are consecutive: */
959 		if (prev_offset && rela->offset != prev_offset + 8)
960 			break;
961 
962 		/* Detect function pointers from contiguous objects: */
963 		if (rela->sym->sec == pfunc->sec &&
964 		    rela->addend == pfunc->offset)
965 			break;
966 
967 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
968 		if (!dest_insn)
969 			break;
970 
971 		/* Make sure the destination is in the same function: */
972 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
973 			break;
974 
975 		alt = malloc(sizeof(*alt));
976 		if (!alt) {
977 			WARN("malloc failed");
978 			return -1;
979 		}
980 
981 		alt->insn = dest_insn;
982 		list_add_tail(&alt->list, &insn->alts);
983 		prev_offset = rela->offset;
984 	}
985 
986 	if (!prev_offset) {
987 		WARN_FUNC("can't find switch jump table",
988 			  insn->sec, insn->offset);
989 		return -1;
990 	}
991 
992 	return 0;
993 }
994 
995 /*
996  * find_jump_table() - Given a dynamic jump, find the switch jump table in
997  * .rodata associated with it.
998  *
999  * There are 3 basic patterns:
1000  *
1001  * 1. jmpq *[rodata addr](,%reg,8)
1002  *
1003  *    This is the most common case by far.  It jumps to an address in a simple
1004  *    jump table which is stored in .rodata.
1005  *
1006  * 2. jmpq *[rodata addr](%rip)
1007  *
1008  *    This is caused by a rare GCC quirk, currently only seen in three driver
1009  *    functions in the kernel, only with certain obscure non-distro configs.
1010  *
1011  *    As part of an optimization, GCC makes a copy of an existing switch jump
1012  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
1013  *    jump) to use a single entry in the table.  The rest of the jump table and
1014  *    some of its jump targets remain as dead code.
1015  *
1016  *    In such a case we can just crudely ignore all unreachable instruction
1017  *    warnings for the entire object file.  Ideally we would just ignore them
1018  *    for the function, but that would require redesigning the code quite a
1019  *    bit.  And honestly that's just not worth doing: unreachable instruction
1020  *    warnings are of questionable value anyway, and this is such a rare issue.
1021  *
1022  * 3. mov [rodata addr],%reg1
1023  *    ... some instructions ...
1024  *    jmpq *(%reg1,%reg2,8)
1025  *
1026  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
1027  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
1028  *
1029  *    As of GCC 7 there are quite a few more of these and the 'in between' code
1030  *    is significant. Esp. with KASAN enabled some of the code between the mov
1031  *    and jmpq uses .rodata itself, which can confuse things.
1032  *
1033  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
1034  *    ensure the same register is used in the mov and jump instructions.
1035  *
1036  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1037  */
1038 static struct rela *find_jump_table(struct objtool_file *file,
1039 				      struct symbol *func,
1040 				      struct instruction *insn)
1041 {
1042 	struct rela *text_rela, *table_rela;
1043 	struct instruction *dest_insn, *orig_insn = insn;
1044 	struct section *table_sec;
1045 	unsigned long table_offset;
1046 
1047 	/*
1048 	 * Backward search using the @first_jump_src links, these help avoid
1049 	 * much of the 'in between' code. Which avoids us getting confused by
1050 	 * it.
1051 	 */
1052 	for (;
1053 	     &insn->list != &file->insn_list && insn->func && insn->func->pfunc == func;
1054 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1055 
1056 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1057 			break;
1058 
1059 		/* allow small jumps within the range */
1060 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1061 		    insn->jump_dest &&
1062 		    (insn->jump_dest->offset <= insn->offset ||
1063 		     insn->jump_dest->offset > orig_insn->offset))
1064 		    break;
1065 
1066 		/* look for a relocation which references .rodata */
1067 		text_rela = find_rela_by_dest_range(file->elf, insn->sec,
1068 						    insn->offset, insn->len);
1069 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1070 		    !text_rela->sym->sec->rodata)
1071 			continue;
1072 
1073 		table_offset = text_rela->addend;
1074 		table_sec = text_rela->sym->sec;
1075 
1076 		if (text_rela->type == R_X86_64_PC32)
1077 			table_offset += 4;
1078 
1079 		/*
1080 		 * Make sure the .rodata address isn't associated with a
1081 		 * symbol.  GCC jump tables are anonymous data.
1082 		 *
1083 		 * Also support C jump tables which are in the same format as
1084 		 * switch jump tables.  For objtool to recognize them, they
1085 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1086 		 * have symbols associated with them.
1087 		 */
1088 		if (find_symbol_containing(table_sec, table_offset) &&
1089 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1090 			continue;
1091 
1092 		/*
1093 		 * Each table entry has a rela associated with it.  The rela
1094 		 * should reference text in the same function as the original
1095 		 * instruction.
1096 		 */
1097 		table_rela = find_rela_by_dest(file->elf, table_sec, table_offset);
1098 		if (!table_rela)
1099 			continue;
1100 		dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend);
1101 		if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1102 			continue;
1103 
1104 		/*
1105 		 * Use of RIP-relative switch jumps is quite rare, and
1106 		 * indicates a rare GCC quirk/bug which can leave dead code
1107 		 * behind.
1108 		 */
1109 		if (text_rela->type == R_X86_64_PC32)
1110 			file->ignore_unreachables = true;
1111 
1112 		return table_rela;
1113 	}
1114 
1115 	return NULL;
1116 }
1117 
1118 /*
1119  * First pass: Mark the head of each jump table so that in the next pass,
1120  * we know when a given jump table ends and the next one starts.
1121  */
1122 static void mark_func_jump_tables(struct objtool_file *file,
1123 				    struct symbol *func)
1124 {
1125 	struct instruction *insn, *last = NULL;
1126 	struct rela *rela;
1127 
1128 	func_for_each_insn(file, func, insn) {
1129 		if (!last)
1130 			last = insn;
1131 
1132 		/*
1133 		 * Store back-pointers for unconditional forward jumps such
1134 		 * that find_jump_table() can back-track using those and
1135 		 * avoid some potentially confusing code.
1136 		 */
1137 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1138 		    insn->offset > last->offset &&
1139 		    insn->jump_dest->offset > insn->offset &&
1140 		    !insn->jump_dest->first_jump_src) {
1141 
1142 			insn->jump_dest->first_jump_src = insn;
1143 			last = insn->jump_dest;
1144 		}
1145 
1146 		if (insn->type != INSN_JUMP_DYNAMIC)
1147 			continue;
1148 
1149 		rela = find_jump_table(file, func, insn);
1150 		if (rela) {
1151 			rela->jump_table_start = true;
1152 			insn->jump_table = rela;
1153 		}
1154 	}
1155 }
1156 
1157 static int add_func_jump_tables(struct objtool_file *file,
1158 				  struct symbol *func)
1159 {
1160 	struct instruction *insn;
1161 	int ret;
1162 
1163 	func_for_each_insn(file, func, insn) {
1164 		if (!insn->jump_table)
1165 			continue;
1166 
1167 		ret = add_jump_table(file, insn, insn->jump_table);
1168 		if (ret)
1169 			return ret;
1170 	}
1171 
1172 	return 0;
1173 }
1174 
1175 /*
1176  * For some switch statements, gcc generates a jump table in the .rodata
1177  * section which contains a list of addresses within the function to jump to.
1178  * This finds these jump tables and adds them to the insn->alts lists.
1179  */
1180 static int add_jump_table_alts(struct objtool_file *file)
1181 {
1182 	struct section *sec;
1183 	struct symbol *func;
1184 	int ret;
1185 
1186 	if (!file->rodata)
1187 		return 0;
1188 
1189 	for_each_sec(file, sec) {
1190 		list_for_each_entry(func, &sec->symbol_list, list) {
1191 			if (func->type != STT_FUNC)
1192 				continue;
1193 
1194 			mark_func_jump_tables(file, func);
1195 			ret = add_func_jump_tables(file, func);
1196 			if (ret)
1197 				return ret;
1198 		}
1199 	}
1200 
1201 	return 0;
1202 }
1203 
1204 static int read_unwind_hints(struct objtool_file *file)
1205 {
1206 	struct section *sec, *relasec;
1207 	struct rela *rela;
1208 	struct unwind_hint *hint;
1209 	struct instruction *insn;
1210 	struct cfi_reg *cfa;
1211 	int i;
1212 
1213 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1214 	if (!sec)
1215 		return 0;
1216 
1217 	relasec = sec->rela;
1218 	if (!relasec) {
1219 		WARN("missing .rela.discard.unwind_hints section");
1220 		return -1;
1221 	}
1222 
1223 	if (sec->len % sizeof(struct unwind_hint)) {
1224 		WARN("struct unwind_hint size mismatch");
1225 		return -1;
1226 	}
1227 
1228 	file->hints = true;
1229 
1230 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1231 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1232 
1233 		rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint));
1234 		if (!rela) {
1235 			WARN("can't find rela for unwind_hints[%d]", i);
1236 			return -1;
1237 		}
1238 
1239 		insn = find_insn(file, rela->sym->sec, rela->addend);
1240 		if (!insn) {
1241 			WARN("can't find insn for unwind_hints[%d]", i);
1242 			return -1;
1243 		}
1244 
1245 		cfa = &insn->state.cfa;
1246 
1247 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1248 			insn->save = true;
1249 			continue;
1250 
1251 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1252 			insn->restore = true;
1253 			insn->hint = true;
1254 			continue;
1255 		}
1256 
1257 		insn->hint = true;
1258 
1259 		switch (hint->sp_reg) {
1260 		case ORC_REG_UNDEFINED:
1261 			cfa->base = CFI_UNDEFINED;
1262 			break;
1263 		case ORC_REG_SP:
1264 			cfa->base = CFI_SP;
1265 			break;
1266 		case ORC_REG_BP:
1267 			cfa->base = CFI_BP;
1268 			break;
1269 		case ORC_REG_SP_INDIRECT:
1270 			cfa->base = CFI_SP_INDIRECT;
1271 			break;
1272 		case ORC_REG_R10:
1273 			cfa->base = CFI_R10;
1274 			break;
1275 		case ORC_REG_R13:
1276 			cfa->base = CFI_R13;
1277 			break;
1278 		case ORC_REG_DI:
1279 			cfa->base = CFI_DI;
1280 			break;
1281 		case ORC_REG_DX:
1282 			cfa->base = CFI_DX;
1283 			break;
1284 		default:
1285 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1286 				  insn->sec, insn->offset, hint->sp_reg);
1287 			return -1;
1288 		}
1289 
1290 		cfa->offset = hint->sp_offset;
1291 		insn->state.type = hint->type;
1292 		insn->state.end = hint->end;
1293 	}
1294 
1295 	return 0;
1296 }
1297 
1298 static int read_retpoline_hints(struct objtool_file *file)
1299 {
1300 	struct section *sec;
1301 	struct instruction *insn;
1302 	struct rela *rela;
1303 
1304 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1305 	if (!sec)
1306 		return 0;
1307 
1308 	list_for_each_entry(rela, &sec->rela_list, list) {
1309 		if (rela->sym->type != STT_SECTION) {
1310 			WARN("unexpected relocation symbol type in %s", sec->name);
1311 			return -1;
1312 		}
1313 
1314 		insn = find_insn(file, rela->sym->sec, rela->addend);
1315 		if (!insn) {
1316 			WARN("bad .discard.retpoline_safe entry");
1317 			return -1;
1318 		}
1319 
1320 		if (insn->type != INSN_JUMP_DYNAMIC &&
1321 		    insn->type != INSN_CALL_DYNAMIC) {
1322 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1323 				  insn->sec, insn->offset);
1324 			return -1;
1325 		}
1326 
1327 		insn->retpoline_safe = true;
1328 	}
1329 
1330 	return 0;
1331 }
1332 
1333 static void mark_rodata(struct objtool_file *file)
1334 {
1335 	struct section *sec;
1336 	bool found = false;
1337 
1338 	/*
1339 	 * Search for the following rodata sections, each of which can
1340 	 * potentially contain jump tables:
1341 	 *
1342 	 * - .rodata: can contain GCC switch tables
1343 	 * - .rodata.<func>: same, if -fdata-sections is being used
1344 	 * - .rodata..c_jump_table: contains C annotated jump tables
1345 	 *
1346 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1347 	 */
1348 	for_each_sec(file, sec) {
1349 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1350 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1351 			sec->rodata = true;
1352 			found = true;
1353 		}
1354 	}
1355 
1356 	file->rodata = found;
1357 }
1358 
1359 static int decode_sections(struct objtool_file *file)
1360 {
1361 	int ret;
1362 
1363 	mark_rodata(file);
1364 
1365 	ret = decode_instructions(file);
1366 	if (ret)
1367 		return ret;
1368 
1369 	ret = add_dead_ends(file);
1370 	if (ret)
1371 		return ret;
1372 
1373 	add_ignores(file);
1374 	add_uaccess_safe(file);
1375 
1376 	ret = add_ignore_alternatives(file);
1377 	if (ret)
1378 		return ret;
1379 
1380 	ret = add_jump_destinations(file);
1381 	if (ret)
1382 		return ret;
1383 
1384 	ret = add_special_section_alts(file);
1385 	if (ret)
1386 		return ret;
1387 
1388 	ret = add_call_destinations(file);
1389 	if (ret)
1390 		return ret;
1391 
1392 	ret = add_jump_table_alts(file);
1393 	if (ret)
1394 		return ret;
1395 
1396 	ret = read_unwind_hints(file);
1397 	if (ret)
1398 		return ret;
1399 
1400 	ret = read_retpoline_hints(file);
1401 	if (ret)
1402 		return ret;
1403 
1404 	return 0;
1405 }
1406 
1407 static bool is_fentry_call(struct instruction *insn)
1408 {
1409 	if (insn->type == INSN_CALL &&
1410 	    insn->call_dest->type == STT_NOTYPE &&
1411 	    !strcmp(insn->call_dest->name, "__fentry__"))
1412 		return true;
1413 
1414 	return false;
1415 }
1416 
1417 static bool has_modified_stack_frame(struct insn_state *state)
1418 {
1419 	int i;
1420 
1421 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1422 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1423 	    state->stack_size != initial_func_cfi.cfa.offset ||
1424 	    state->drap)
1425 		return true;
1426 
1427 	for (i = 0; i < CFI_NUM_REGS; i++)
1428 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1429 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1430 			return true;
1431 
1432 	return false;
1433 }
1434 
1435 static bool has_valid_stack_frame(struct insn_state *state)
1436 {
1437 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1438 	    state->regs[CFI_BP].offset == -16)
1439 		return true;
1440 
1441 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1442 		return true;
1443 
1444 	return false;
1445 }
1446 
1447 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1448 {
1449 	struct cfi_reg *cfa = &state->cfa;
1450 	struct stack_op *op = &insn->stack_op;
1451 
1452 	if (cfa->base != CFI_SP)
1453 		return 0;
1454 
1455 	/* push */
1456 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1457 		cfa->offset += 8;
1458 
1459 	/* pop */
1460 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1461 		cfa->offset -= 8;
1462 
1463 	/* add immediate to sp */
1464 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1465 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1466 		cfa->offset -= op->src.offset;
1467 
1468 	return 0;
1469 }
1470 
1471 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1472 		     int offset)
1473 {
1474 	if (arch_callee_saved_reg(reg) &&
1475 	    state->regs[reg].base == CFI_UNDEFINED) {
1476 		state->regs[reg].base = base;
1477 		state->regs[reg].offset = offset;
1478 	}
1479 }
1480 
1481 static void restore_reg(struct insn_state *state, unsigned char reg)
1482 {
1483 	state->regs[reg].base = CFI_UNDEFINED;
1484 	state->regs[reg].offset = 0;
1485 }
1486 
1487 /*
1488  * A note about DRAP stack alignment:
1489  *
1490  * GCC has the concept of a DRAP register, which is used to help keep track of
1491  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1492  * register.  The typical DRAP pattern is:
1493  *
1494  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1495  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1496  *   41 ff 72 f8		pushq  -0x8(%r10)
1497  *   55				push   %rbp
1498  *   48 89 e5			mov    %rsp,%rbp
1499  *				(more pushes)
1500  *   41 52			push   %r10
1501  *				...
1502  *   41 5a			pop    %r10
1503  *				(more pops)
1504  *   5d				pop    %rbp
1505  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1506  *   c3				retq
1507  *
1508  * There are some variations in the epilogues, like:
1509  *
1510  *   5b				pop    %rbx
1511  *   41 5a			pop    %r10
1512  *   41 5c			pop    %r12
1513  *   41 5d			pop    %r13
1514  *   41 5e			pop    %r14
1515  *   c9				leaveq
1516  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1517  *   c3				retq
1518  *
1519  * and:
1520  *
1521  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1522  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1523  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1524  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1525  *   c9				leaveq
1526  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1527  *   c3				retq
1528  *
1529  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1530  * restored beforehand:
1531  *
1532  *   41 55			push   %r13
1533  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1534  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1535  *				...
1536  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1537  *   41 5d			pop    %r13
1538  *   c3				retq
1539  */
1540 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1541 {
1542 	struct stack_op *op = &insn->stack_op;
1543 	struct cfi_reg *cfa = &state->cfa;
1544 	struct cfi_reg *regs = state->regs;
1545 
1546 	/* stack operations don't make sense with an undefined CFA */
1547 	if (cfa->base == CFI_UNDEFINED) {
1548 		if (insn->func) {
1549 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1550 			return -1;
1551 		}
1552 		return 0;
1553 	}
1554 
1555 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1556 		return update_insn_state_regs(insn, state);
1557 
1558 	switch (op->dest.type) {
1559 
1560 	case OP_DEST_REG:
1561 		switch (op->src.type) {
1562 
1563 		case OP_SRC_REG:
1564 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1565 			    cfa->base == CFI_SP &&
1566 			    regs[CFI_BP].base == CFI_CFA &&
1567 			    regs[CFI_BP].offset == -cfa->offset) {
1568 
1569 				/* mov %rsp, %rbp */
1570 				cfa->base = op->dest.reg;
1571 				state->bp_scratch = false;
1572 			}
1573 
1574 			else if (op->src.reg == CFI_SP &&
1575 				 op->dest.reg == CFI_BP && state->drap) {
1576 
1577 				/* drap: mov %rsp, %rbp */
1578 				regs[CFI_BP].base = CFI_BP;
1579 				regs[CFI_BP].offset = -state->stack_size;
1580 				state->bp_scratch = false;
1581 			}
1582 
1583 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1584 
1585 				/*
1586 				 * mov %rsp, %reg
1587 				 *
1588 				 * This is needed for the rare case where GCC
1589 				 * does:
1590 				 *
1591 				 *   mov    %rsp, %rax
1592 				 *   ...
1593 				 *   mov    %rax, %rsp
1594 				 */
1595 				state->vals[op->dest.reg].base = CFI_CFA;
1596 				state->vals[op->dest.reg].offset = -state->stack_size;
1597 			}
1598 
1599 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1600 				 cfa->base == CFI_BP) {
1601 
1602 				/*
1603 				 * mov %rbp, %rsp
1604 				 *
1605 				 * Restore the original stack pointer (Clang).
1606 				 */
1607 				state->stack_size = -state->regs[CFI_BP].offset;
1608 			}
1609 
1610 			else if (op->dest.reg == cfa->base) {
1611 
1612 				/* mov %reg, %rsp */
1613 				if (cfa->base == CFI_SP &&
1614 				    state->vals[op->src.reg].base == CFI_CFA) {
1615 
1616 					/*
1617 					 * This is needed for the rare case
1618 					 * where GCC does something dumb like:
1619 					 *
1620 					 *   lea    0x8(%rsp), %rcx
1621 					 *   ...
1622 					 *   mov    %rcx, %rsp
1623 					 */
1624 					cfa->offset = -state->vals[op->src.reg].offset;
1625 					state->stack_size = cfa->offset;
1626 
1627 				} else {
1628 					cfa->base = CFI_UNDEFINED;
1629 					cfa->offset = 0;
1630 				}
1631 			}
1632 
1633 			break;
1634 
1635 		case OP_SRC_ADD:
1636 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1637 
1638 				/* add imm, %rsp */
1639 				state->stack_size -= op->src.offset;
1640 				if (cfa->base == CFI_SP)
1641 					cfa->offset -= op->src.offset;
1642 				break;
1643 			}
1644 
1645 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1646 
1647 				/* lea disp(%rbp), %rsp */
1648 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1649 				break;
1650 			}
1651 
1652 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1653 
1654 				/* drap: lea disp(%rsp), %drap */
1655 				state->drap_reg = op->dest.reg;
1656 
1657 				/*
1658 				 * lea disp(%rsp), %reg
1659 				 *
1660 				 * This is needed for the rare case where GCC
1661 				 * does something dumb like:
1662 				 *
1663 				 *   lea    0x8(%rsp), %rcx
1664 				 *   ...
1665 				 *   mov    %rcx, %rsp
1666 				 */
1667 				state->vals[op->dest.reg].base = CFI_CFA;
1668 				state->vals[op->dest.reg].offset = \
1669 					-state->stack_size + op->src.offset;
1670 
1671 				break;
1672 			}
1673 
1674 			if (state->drap && op->dest.reg == CFI_SP &&
1675 			    op->src.reg == state->drap_reg) {
1676 
1677 				 /* drap: lea disp(%drap), %rsp */
1678 				cfa->base = CFI_SP;
1679 				cfa->offset = state->stack_size = -op->src.offset;
1680 				state->drap_reg = CFI_UNDEFINED;
1681 				state->drap = false;
1682 				break;
1683 			}
1684 
1685 			if (op->dest.reg == state->cfa.base) {
1686 				WARN_FUNC("unsupported stack register modification",
1687 					  insn->sec, insn->offset);
1688 				return -1;
1689 			}
1690 
1691 			break;
1692 
1693 		case OP_SRC_AND:
1694 			if (op->dest.reg != CFI_SP ||
1695 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1696 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1697 				WARN_FUNC("unsupported stack pointer realignment",
1698 					  insn->sec, insn->offset);
1699 				return -1;
1700 			}
1701 
1702 			if (state->drap_reg != CFI_UNDEFINED) {
1703 				/* drap: and imm, %rsp */
1704 				cfa->base = state->drap_reg;
1705 				cfa->offset = state->stack_size = 0;
1706 				state->drap = true;
1707 			}
1708 
1709 			/*
1710 			 * Older versions of GCC (4.8ish) realign the stack
1711 			 * without DRAP, with a frame pointer.
1712 			 */
1713 
1714 			break;
1715 
1716 		case OP_SRC_POP:
1717 		case OP_SRC_POPF:
1718 			if (!state->drap && op->dest.type == OP_DEST_REG &&
1719 			    op->dest.reg == cfa->base) {
1720 
1721 				/* pop %rbp */
1722 				cfa->base = CFI_SP;
1723 			}
1724 
1725 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1726 			    op->dest.type == OP_DEST_REG &&
1727 			    op->dest.reg == state->drap_reg &&
1728 			    state->drap_offset == -state->stack_size) {
1729 
1730 				/* drap: pop %drap */
1731 				cfa->base = state->drap_reg;
1732 				cfa->offset = 0;
1733 				state->drap_offset = -1;
1734 
1735 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1736 
1737 				/* pop %reg */
1738 				restore_reg(state, op->dest.reg);
1739 			}
1740 
1741 			state->stack_size -= 8;
1742 			if (cfa->base == CFI_SP)
1743 				cfa->offset -= 8;
1744 
1745 			break;
1746 
1747 		case OP_SRC_REG_INDIRECT:
1748 			if (state->drap && op->src.reg == CFI_BP &&
1749 			    op->src.offset == state->drap_offset) {
1750 
1751 				/* drap: mov disp(%rbp), %drap */
1752 				cfa->base = state->drap_reg;
1753 				cfa->offset = 0;
1754 				state->drap_offset = -1;
1755 			}
1756 
1757 			if (state->drap && op->src.reg == CFI_BP &&
1758 			    op->src.offset == regs[op->dest.reg].offset) {
1759 
1760 				/* drap: mov disp(%rbp), %reg */
1761 				restore_reg(state, op->dest.reg);
1762 
1763 			} else if (op->src.reg == cfa->base &&
1764 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1765 
1766 				/* mov disp(%rbp), %reg */
1767 				/* mov disp(%rsp), %reg */
1768 				restore_reg(state, op->dest.reg);
1769 			}
1770 
1771 			break;
1772 
1773 		default:
1774 			WARN_FUNC("unknown stack-related instruction",
1775 				  insn->sec, insn->offset);
1776 			return -1;
1777 		}
1778 
1779 		break;
1780 
1781 	case OP_DEST_PUSH:
1782 	case OP_DEST_PUSHF:
1783 		state->stack_size += 8;
1784 		if (cfa->base == CFI_SP)
1785 			cfa->offset += 8;
1786 
1787 		if (op->src.type != OP_SRC_REG)
1788 			break;
1789 
1790 		if (state->drap) {
1791 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1792 
1793 				/* drap: push %drap */
1794 				cfa->base = CFI_BP_INDIRECT;
1795 				cfa->offset = -state->stack_size;
1796 
1797 				/* save drap so we know when to restore it */
1798 				state->drap_offset = -state->stack_size;
1799 
1800 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1801 
1802 				/* drap: push %rbp */
1803 				state->stack_size = 0;
1804 
1805 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1806 
1807 				/* drap: push %reg */
1808 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1809 			}
1810 
1811 		} else {
1812 
1813 			/* push %reg */
1814 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1815 		}
1816 
1817 		/* detect when asm code uses rbp as a scratch register */
1818 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1819 		    cfa->base != CFI_BP)
1820 			state->bp_scratch = true;
1821 		break;
1822 
1823 	case OP_DEST_REG_INDIRECT:
1824 
1825 		if (state->drap) {
1826 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1827 
1828 				/* drap: mov %drap, disp(%rbp) */
1829 				cfa->base = CFI_BP_INDIRECT;
1830 				cfa->offset = op->dest.offset;
1831 
1832 				/* save drap offset so we know when to restore it */
1833 				state->drap_offset = op->dest.offset;
1834 			}
1835 
1836 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1837 
1838 				/* drap: mov reg, disp(%rbp) */
1839 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1840 			}
1841 
1842 		} else if (op->dest.reg == cfa->base) {
1843 
1844 			/* mov reg, disp(%rbp) */
1845 			/* mov reg, disp(%rsp) */
1846 			save_reg(state, op->src.reg, CFI_CFA,
1847 				 op->dest.offset - state->cfa.offset);
1848 		}
1849 
1850 		break;
1851 
1852 	case OP_DEST_LEAVE:
1853 		if ((!state->drap && cfa->base != CFI_BP) ||
1854 		    (state->drap && cfa->base != state->drap_reg)) {
1855 			WARN_FUNC("leave instruction with modified stack frame",
1856 				  insn->sec, insn->offset);
1857 			return -1;
1858 		}
1859 
1860 		/* leave (mov %rbp, %rsp; pop %rbp) */
1861 
1862 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1863 		restore_reg(state, CFI_BP);
1864 
1865 		if (!state->drap) {
1866 			cfa->base = CFI_SP;
1867 			cfa->offset -= 8;
1868 		}
1869 
1870 		break;
1871 
1872 	case OP_DEST_MEM:
1873 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1874 			WARN_FUNC("unknown stack-related memory operation",
1875 				  insn->sec, insn->offset);
1876 			return -1;
1877 		}
1878 
1879 		/* pop mem */
1880 		state->stack_size -= 8;
1881 		if (cfa->base == CFI_SP)
1882 			cfa->offset -= 8;
1883 
1884 		break;
1885 
1886 	default:
1887 		WARN_FUNC("unknown stack-related instruction",
1888 			  insn->sec, insn->offset);
1889 		return -1;
1890 	}
1891 
1892 	return 0;
1893 }
1894 
1895 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1896 {
1897 	struct insn_state *state1 = &insn->state, *state2 = state;
1898 	int i;
1899 
1900 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1901 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1902 			  insn->sec, insn->offset,
1903 			  state1->cfa.base, state1->cfa.offset,
1904 			  state2->cfa.base, state2->cfa.offset);
1905 
1906 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1907 		for (i = 0; i < CFI_NUM_REGS; i++) {
1908 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1909 				    sizeof(struct cfi_reg)))
1910 				continue;
1911 
1912 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1913 				  insn->sec, insn->offset,
1914 				  i, state1->regs[i].base, state1->regs[i].offset,
1915 				  i, state2->regs[i].base, state2->regs[i].offset);
1916 			break;
1917 		}
1918 
1919 	} else if (state1->type != state2->type) {
1920 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1921 			  insn->sec, insn->offset, state1->type, state2->type);
1922 
1923 	} else if (state1->drap != state2->drap ||
1924 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1925 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1926 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1927 			  insn->sec, insn->offset,
1928 			  state1->drap, state1->drap_reg, state1->drap_offset,
1929 			  state2->drap, state2->drap_reg, state2->drap_offset);
1930 
1931 	} else
1932 		return true;
1933 
1934 	return false;
1935 }
1936 
1937 static inline bool func_uaccess_safe(struct symbol *func)
1938 {
1939 	if (func)
1940 		return func->uaccess_safe;
1941 
1942 	return false;
1943 }
1944 
1945 static inline const char *call_dest_name(struct instruction *insn)
1946 {
1947 	if (insn->call_dest)
1948 		return insn->call_dest->name;
1949 
1950 	return "{dynamic}";
1951 }
1952 
1953 static int validate_call(struct instruction *insn, struct insn_state *state)
1954 {
1955 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1956 		WARN_FUNC("call to %s() with UACCESS enabled",
1957 				insn->sec, insn->offset, call_dest_name(insn));
1958 		return 1;
1959 	}
1960 
1961 	if (state->df) {
1962 		WARN_FUNC("call to %s() with DF set",
1963 				insn->sec, insn->offset, call_dest_name(insn));
1964 		return 1;
1965 	}
1966 
1967 	return 0;
1968 }
1969 
1970 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1971 {
1972 	if (has_modified_stack_frame(state)) {
1973 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1974 				insn->sec, insn->offset);
1975 		return 1;
1976 	}
1977 
1978 	return validate_call(insn, state);
1979 }
1980 
1981 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
1982 {
1983 	if (state->uaccess && !func_uaccess_safe(func)) {
1984 		WARN_FUNC("return with UACCESS enabled",
1985 			  insn->sec, insn->offset);
1986 		return 1;
1987 	}
1988 
1989 	if (!state->uaccess && func_uaccess_safe(func)) {
1990 		WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
1991 			  insn->sec, insn->offset);
1992 		return 1;
1993 	}
1994 
1995 	if (state->df) {
1996 		WARN_FUNC("return with DF set",
1997 			  insn->sec, insn->offset);
1998 		return 1;
1999 	}
2000 
2001 	if (func && has_modified_stack_frame(state)) {
2002 		WARN_FUNC("return with modified stack frame",
2003 			  insn->sec, insn->offset);
2004 		return 1;
2005 	}
2006 
2007 	if (state->bp_scratch) {
2008 		WARN_FUNC("BP used as a scratch register",
2009 			  insn->sec, insn->offset);
2010 		return 1;
2011 	}
2012 
2013 	return 0;
2014 }
2015 
2016 /*
2017  * Follow the branch starting at the given instruction, and recursively follow
2018  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2019  * each instruction and validate all the rules described in
2020  * tools/objtool/Documentation/stack-validation.txt.
2021  */
2022 static int validate_branch(struct objtool_file *file, struct symbol *func,
2023 			   struct instruction *first, struct insn_state state)
2024 {
2025 	struct alternative *alt;
2026 	struct instruction *insn, *next_insn;
2027 	struct section *sec;
2028 	u8 visited;
2029 	int ret;
2030 
2031 	insn = first;
2032 	sec = insn->sec;
2033 
2034 	if (insn->alt_group && list_empty(&insn->alts)) {
2035 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
2036 			  sec, insn->offset);
2037 		return 1;
2038 	}
2039 
2040 	while (1) {
2041 		next_insn = next_insn_same_sec(file, insn);
2042 
2043 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2044 			WARN("%s() falls through to next function %s()",
2045 			     func->name, insn->func->name);
2046 			return 1;
2047 		}
2048 
2049 		if (func && insn->ignore) {
2050 			WARN_FUNC("BUG: why am I validating an ignored function?",
2051 				  sec, insn->offset);
2052 			return 1;
2053 		}
2054 
2055 		visited = 1 << state.uaccess;
2056 		if (insn->visited) {
2057 			if (!insn->hint && !insn_state_match(insn, &state))
2058 				return 1;
2059 
2060 			if (insn->visited & visited)
2061 				return 0;
2062 		}
2063 
2064 		if (insn->hint) {
2065 			if (insn->restore) {
2066 				struct instruction *save_insn, *i;
2067 
2068 				i = insn;
2069 				save_insn = NULL;
2070 				sym_for_each_insn_continue_reverse(file, func, i) {
2071 					if (i->save) {
2072 						save_insn = i;
2073 						break;
2074 					}
2075 				}
2076 
2077 				if (!save_insn) {
2078 					WARN_FUNC("no corresponding CFI save for CFI restore",
2079 						  sec, insn->offset);
2080 					return 1;
2081 				}
2082 
2083 				if (!save_insn->visited) {
2084 					/*
2085 					 * Oops, no state to copy yet.
2086 					 * Hopefully we can reach this
2087 					 * instruction from another branch
2088 					 * after the save insn has been
2089 					 * visited.
2090 					 */
2091 					if (insn == first)
2092 						return 0;
2093 
2094 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2095 						  sec, insn->offset);
2096 					return 1;
2097 				}
2098 
2099 				insn->state = save_insn->state;
2100 			}
2101 
2102 			state = insn->state;
2103 
2104 		} else
2105 			insn->state = state;
2106 
2107 		insn->visited |= visited;
2108 
2109 		if (!insn->ignore_alts) {
2110 			bool skip_orig = false;
2111 
2112 			list_for_each_entry(alt, &insn->alts, list) {
2113 				if (alt->skip_orig)
2114 					skip_orig = true;
2115 
2116 				ret = validate_branch(file, func, alt->insn, state);
2117 				if (ret) {
2118 					if (backtrace)
2119 						BT_FUNC("(alt)", insn);
2120 					return ret;
2121 				}
2122 			}
2123 
2124 			if (skip_orig)
2125 				return 0;
2126 		}
2127 
2128 		switch (insn->type) {
2129 
2130 		case INSN_RETURN:
2131 			return validate_return(func, insn, &state);
2132 
2133 		case INSN_CALL:
2134 		case INSN_CALL_DYNAMIC:
2135 			ret = validate_call(insn, &state);
2136 			if (ret)
2137 				return ret;
2138 
2139 			if (!no_fp && func && !is_fentry_call(insn) &&
2140 			    !has_valid_stack_frame(&state)) {
2141 				WARN_FUNC("call without frame pointer save/setup",
2142 					  sec, insn->offset);
2143 				return 1;
2144 			}
2145 
2146 			if (dead_end_function(file, insn->call_dest))
2147 				return 0;
2148 
2149 			break;
2150 
2151 		case INSN_JUMP_CONDITIONAL:
2152 		case INSN_JUMP_UNCONDITIONAL:
2153 			if (func && is_sibling_call(insn)) {
2154 				ret = validate_sibling_call(insn, &state);
2155 				if (ret)
2156 					return ret;
2157 
2158 			} else if (insn->jump_dest) {
2159 				ret = validate_branch(file, func,
2160 						      insn->jump_dest, state);
2161 				if (ret) {
2162 					if (backtrace)
2163 						BT_FUNC("(branch)", insn);
2164 					return ret;
2165 				}
2166 			}
2167 
2168 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2169 				return 0;
2170 
2171 			break;
2172 
2173 		case INSN_JUMP_DYNAMIC:
2174 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2175 			if (func && is_sibling_call(insn)) {
2176 				ret = validate_sibling_call(insn, &state);
2177 				if (ret)
2178 					return ret;
2179 			}
2180 
2181 			if (insn->type == INSN_JUMP_DYNAMIC)
2182 				return 0;
2183 
2184 			break;
2185 
2186 		case INSN_CONTEXT_SWITCH:
2187 			if (func && (!next_insn || !next_insn->hint)) {
2188 				WARN_FUNC("unsupported instruction in callable function",
2189 					  sec, insn->offset);
2190 				return 1;
2191 			}
2192 			return 0;
2193 
2194 		case INSN_STACK:
2195 			if (update_insn_state(insn, &state))
2196 				return 1;
2197 
2198 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2199 				if (!state.uaccess_stack) {
2200 					state.uaccess_stack = 1;
2201 				} else if (state.uaccess_stack >> 31) {
2202 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2203 					return 1;
2204 				}
2205 				state.uaccess_stack <<= 1;
2206 				state.uaccess_stack  |= state.uaccess;
2207 			}
2208 
2209 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2210 				if (state.uaccess_stack) {
2211 					state.uaccess = state.uaccess_stack & 1;
2212 					state.uaccess_stack >>= 1;
2213 					if (state.uaccess_stack == 1)
2214 						state.uaccess_stack = 0;
2215 				}
2216 			}
2217 
2218 			break;
2219 
2220 		case INSN_STAC:
2221 			if (state.uaccess) {
2222 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2223 				return 1;
2224 			}
2225 
2226 			state.uaccess = true;
2227 			break;
2228 
2229 		case INSN_CLAC:
2230 			if (!state.uaccess && func) {
2231 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2232 				return 1;
2233 			}
2234 
2235 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2236 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2237 				return 1;
2238 			}
2239 
2240 			state.uaccess = false;
2241 			break;
2242 
2243 		case INSN_STD:
2244 			if (state.df)
2245 				WARN_FUNC("recursive STD", sec, insn->offset);
2246 
2247 			state.df = true;
2248 			break;
2249 
2250 		case INSN_CLD:
2251 			if (!state.df && func)
2252 				WARN_FUNC("redundant CLD", sec, insn->offset);
2253 
2254 			state.df = false;
2255 			break;
2256 
2257 		default:
2258 			break;
2259 		}
2260 
2261 		if (insn->dead_end)
2262 			return 0;
2263 
2264 		if (!next_insn) {
2265 			if (state.cfa.base == CFI_UNDEFINED)
2266 				return 0;
2267 			WARN("%s: unexpected end of section", sec->name);
2268 			return 1;
2269 		}
2270 
2271 		insn = next_insn;
2272 	}
2273 
2274 	return 0;
2275 }
2276 
2277 static int validate_unwind_hints(struct objtool_file *file)
2278 {
2279 	struct instruction *insn;
2280 	int ret, warnings = 0;
2281 	struct insn_state state;
2282 
2283 	if (!file->hints)
2284 		return 0;
2285 
2286 	clear_insn_state(&state);
2287 
2288 	for_each_insn(file, insn) {
2289 		if (insn->hint && !insn->visited) {
2290 			ret = validate_branch(file, insn->func, insn, state);
2291 			if (ret && backtrace)
2292 				BT_FUNC("<=== (hint)", insn);
2293 			warnings += ret;
2294 		}
2295 	}
2296 
2297 	return warnings;
2298 }
2299 
2300 static int validate_retpoline(struct objtool_file *file)
2301 {
2302 	struct instruction *insn;
2303 	int warnings = 0;
2304 
2305 	for_each_insn(file, insn) {
2306 		if (insn->type != INSN_JUMP_DYNAMIC &&
2307 		    insn->type != INSN_CALL_DYNAMIC)
2308 			continue;
2309 
2310 		if (insn->retpoline_safe)
2311 			continue;
2312 
2313 		/*
2314 		 * .init.text code is ran before userspace and thus doesn't
2315 		 * strictly need retpolines, except for modules which are
2316 		 * loaded late, they very much do need retpoline in their
2317 		 * .init.text
2318 		 */
2319 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2320 			continue;
2321 
2322 		WARN_FUNC("indirect %s found in RETPOLINE build",
2323 			  insn->sec, insn->offset,
2324 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2325 
2326 		warnings++;
2327 	}
2328 
2329 	return warnings;
2330 }
2331 
2332 static bool is_kasan_insn(struct instruction *insn)
2333 {
2334 	return (insn->type == INSN_CALL &&
2335 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2336 }
2337 
2338 static bool is_ubsan_insn(struct instruction *insn)
2339 {
2340 	return (insn->type == INSN_CALL &&
2341 		!strcmp(insn->call_dest->name,
2342 			"__ubsan_handle_builtin_unreachable"));
2343 }
2344 
2345 static bool ignore_unreachable_insn(struct instruction *insn)
2346 {
2347 	int i;
2348 
2349 	if (insn->ignore || insn->type == INSN_NOP)
2350 		return true;
2351 
2352 	/*
2353 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2354 	 * function has an exception table entry.
2355 	 *
2356 	 * Also ignore alternative replacement instructions.  This can happen
2357 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2358 	 */
2359 	if (!strcmp(insn->sec->name, ".fixup") ||
2360 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2361 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2362 		return true;
2363 
2364 	if (!insn->func)
2365 		return false;
2366 
2367 	/*
2368 	 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2369 	 * __builtin_unreachable().  The BUG() macro has an unreachable() after
2370 	 * the UD2, which causes GCC's undefined trap logic to emit another UD2
2371 	 * (or occasionally a JMP to UD2).
2372 	 */
2373 	if (list_prev_entry(insn, list)->dead_end &&
2374 	    (insn->type == INSN_BUG ||
2375 	     (insn->type == INSN_JUMP_UNCONDITIONAL &&
2376 	      insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2377 		return true;
2378 
2379 	/*
2380 	 * Check if this (or a subsequent) instruction is related to
2381 	 * CONFIG_UBSAN or CONFIG_KASAN.
2382 	 *
2383 	 * End the search at 5 instructions to avoid going into the weeds.
2384 	 */
2385 	for (i = 0; i < 5; i++) {
2386 
2387 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2388 			return true;
2389 
2390 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2391 			if (insn->jump_dest &&
2392 			    insn->jump_dest->func == insn->func) {
2393 				insn = insn->jump_dest;
2394 				continue;
2395 			}
2396 
2397 			break;
2398 		}
2399 
2400 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2401 			break;
2402 
2403 		insn = list_next_entry(insn, list);
2404 	}
2405 
2406 	return false;
2407 }
2408 
2409 static int validate_section(struct objtool_file *file, struct section *sec)
2410 {
2411 	struct symbol *func;
2412 	struct instruction *insn;
2413 	struct insn_state state;
2414 	int ret, warnings = 0;
2415 
2416 	clear_insn_state(&state);
2417 
2418 	state.cfa = initial_func_cfi.cfa;
2419 	memcpy(&state.regs, &initial_func_cfi.regs,
2420 	       CFI_NUM_REGS * sizeof(struct cfi_reg));
2421 	state.stack_size = initial_func_cfi.cfa.offset;
2422 
2423 	list_for_each_entry(func, &sec->symbol_list, list) {
2424 		if (func->type != STT_FUNC)
2425 			continue;
2426 
2427 		if (!func->len) {
2428 			WARN("%s() is missing an ELF size annotation",
2429 			     func->name);
2430 			warnings++;
2431 		}
2432 
2433 		if (func->pfunc != func || func->alias != func)
2434 			continue;
2435 
2436 		insn = find_insn(file, sec, func->offset);
2437 		if (!insn || insn->ignore || insn->visited)
2438 			continue;
2439 
2440 		state.uaccess = func->uaccess_safe;
2441 
2442 		ret = validate_branch(file, func, insn, state);
2443 		if (ret && backtrace)
2444 			BT_FUNC("<=== (func)", insn);
2445 		warnings += ret;
2446 	}
2447 
2448 	return warnings;
2449 }
2450 
2451 static int validate_functions(struct objtool_file *file)
2452 {
2453 	struct section *sec;
2454 	int warnings = 0;
2455 
2456 	for_each_sec(file, sec)
2457 		warnings += validate_section(file, sec);
2458 
2459 	return warnings;
2460 }
2461 
2462 static int validate_reachable_instructions(struct objtool_file *file)
2463 {
2464 	struct instruction *insn;
2465 
2466 	if (file->ignore_unreachables)
2467 		return 0;
2468 
2469 	for_each_insn(file, insn) {
2470 		if (insn->visited || ignore_unreachable_insn(insn))
2471 			continue;
2472 
2473 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2474 		return 1;
2475 	}
2476 
2477 	return 0;
2478 }
2479 
2480 static struct objtool_file file;
2481 
2482 int check(const char *_objname, bool orc)
2483 {
2484 	int ret, warnings = 0;
2485 
2486 	objname = _objname;
2487 
2488 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2489 	if (!file.elf)
2490 		return 1;
2491 
2492 	INIT_LIST_HEAD(&file.insn_list);
2493 	hash_init(file.insn_hash);
2494 	file.c_file = find_section_by_name(file.elf, ".comment");
2495 	file.ignore_unreachables = no_unreachable;
2496 	file.hints = false;
2497 
2498 	arch_initial_func_cfi_state(&initial_func_cfi);
2499 
2500 	ret = decode_sections(&file);
2501 	if (ret < 0)
2502 		goto out;
2503 	warnings += ret;
2504 
2505 	if (list_empty(&file.insn_list))
2506 		goto out;
2507 
2508 	if (retpoline) {
2509 		ret = validate_retpoline(&file);
2510 		if (ret < 0)
2511 			return ret;
2512 		warnings += ret;
2513 	}
2514 
2515 	ret = validate_functions(&file);
2516 	if (ret < 0)
2517 		goto out;
2518 	warnings += ret;
2519 
2520 	ret = validate_unwind_hints(&file);
2521 	if (ret < 0)
2522 		goto out;
2523 	warnings += ret;
2524 
2525 	if (!warnings) {
2526 		ret = validate_reachable_instructions(&file);
2527 		if (ret < 0)
2528 			goto out;
2529 		warnings += ret;
2530 	}
2531 
2532 	if (orc) {
2533 		ret = create_orc(&file);
2534 		if (ret < 0)
2535 			goto out;
2536 
2537 		ret = create_orc_sections(&file);
2538 		if (ret < 0)
2539 			goto out;
2540 
2541 		ret = elf_write(file.elf);
2542 		if (ret < 0)
2543 			goto out;
2544 	}
2545 
2546 out:
2547 	if (ret < 0) {
2548 		/*
2549 		 *  Fatal error.  The binary is corrupt or otherwise broken in
2550 		 *  some way, or objtool itself is broken.  Fail the kernel
2551 		 *  build.
2552 		 */
2553 		return ret;
2554 	}
2555 
2556 	return 0;
2557 }
2558