xref: /openbmc/linux/tools/objtool/check.c (revision 6cc23ed2)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn_all(file, func, insn)			\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define func_for_each_insn(file, func, insn)				\
81 	for (insn = find_insn(file, func->sec, func->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == func->sec &&				\
84 		insn->offset < func->offset + func->len;		\
85 	     insn = list_next_entry(insn, list))
86 
87 #define func_for_each_insn_continue_reverse(file, func, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == func->sec && insn->offset >= func->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_sibling_call(struct instruction *insn)
101 {
102 	/* An indirect jump is either a sibling call or a jump to a table. */
103 	if (insn->type == INSN_JUMP_DYNAMIC)
104 		return list_empty(&insn->alts);
105 
106 	if (insn->type != INSN_JUMP_CONDITIONAL &&
107 	    insn->type != INSN_JUMP_UNCONDITIONAL)
108 		return false;
109 
110 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
111 	return !!insn->call_dest;
112 }
113 
114 /*
115  * This checks to see if the given function is a "noreturn" function.
116  *
117  * For global functions which are outside the scope of this object file, we
118  * have to keep a manual list of them.
119  *
120  * For local functions, we have to detect them manually by simply looking for
121  * the lack of a return instruction.
122  */
123 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
124 				int recursion)
125 {
126 	int i;
127 	struct instruction *insn;
128 	bool empty = true;
129 
130 	/*
131 	 * Unfortunately these have to be hard coded because the noreturn
132 	 * attribute isn't provided in ELF data.
133 	 */
134 	static const char * const global_noreturns[] = {
135 		"__stack_chk_fail",
136 		"panic",
137 		"do_exit",
138 		"do_task_dead",
139 		"__module_put_and_exit",
140 		"complete_and_exit",
141 		"__reiserfs_panic",
142 		"lbug_with_loc",
143 		"fortify_panic",
144 		"usercopy_abort",
145 		"machine_real_restart",
146 		"rewind_stack_do_exit",
147 	};
148 
149 	if (!func)
150 		return false;
151 
152 	if (func->bind == STB_WEAK)
153 		return false;
154 
155 	if (func->bind == STB_GLOBAL)
156 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
157 			if (!strcmp(func->name, global_noreturns[i]))
158 				return true;
159 
160 	if (!func->len)
161 		return false;
162 
163 	insn = find_insn(file, func->sec, func->offset);
164 	if (!insn->func)
165 		return false;
166 
167 	func_for_each_insn_all(file, func, insn) {
168 		empty = false;
169 
170 		if (insn->type == INSN_RETURN)
171 			return false;
172 	}
173 
174 	if (empty)
175 		return false;
176 
177 	/*
178 	 * A function can have a sibling call instead of a return.  In that
179 	 * case, the function's dead-end status depends on whether the target
180 	 * of the sibling call returns.
181 	 */
182 	func_for_each_insn_all(file, func, insn) {
183 		if (is_sibling_call(insn)) {
184 			struct instruction *dest = insn->jump_dest;
185 
186 			if (!dest)
187 				/* sibling call to another file */
188 				return false;
189 
190 			/* local sibling call */
191 			if (recursion == 5) {
192 				/*
193 				 * Infinite recursion: two functions have
194 				 * sibling calls to each other.  This is a very
195 				 * rare case.  It means they aren't dead ends.
196 				 */
197 				return false;
198 			}
199 
200 			return __dead_end_function(file, dest->func, recursion+1);
201 		}
202 	}
203 
204 	return true;
205 }
206 
207 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
208 {
209 	return __dead_end_function(file, func, 0);
210 }
211 
212 static void clear_insn_state(struct insn_state *state)
213 {
214 	int i;
215 
216 	memset(state, 0, sizeof(*state));
217 	state->cfa.base = CFI_UNDEFINED;
218 	for (i = 0; i < CFI_NUM_REGS; i++) {
219 		state->regs[i].base = CFI_UNDEFINED;
220 		state->vals[i].base = CFI_UNDEFINED;
221 	}
222 	state->drap_reg = CFI_UNDEFINED;
223 	state->drap_offset = -1;
224 }
225 
226 /*
227  * Call the arch-specific instruction decoder for all the instructions and add
228  * them to the global instruction list.
229  */
230 static int decode_instructions(struct objtool_file *file)
231 {
232 	struct section *sec;
233 	struct symbol *func;
234 	unsigned long offset;
235 	struct instruction *insn;
236 	int ret;
237 
238 	for_each_sec(file, sec) {
239 
240 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
241 			continue;
242 
243 		if (strcmp(sec->name, ".altinstr_replacement") &&
244 		    strcmp(sec->name, ".altinstr_aux") &&
245 		    strncmp(sec->name, ".discard.", 9))
246 			sec->text = true;
247 
248 		for (offset = 0; offset < sec->len; offset += insn->len) {
249 			insn = malloc(sizeof(*insn));
250 			if (!insn) {
251 				WARN("malloc failed");
252 				return -1;
253 			}
254 			memset(insn, 0, sizeof(*insn));
255 			INIT_LIST_HEAD(&insn->alts);
256 			clear_insn_state(&insn->state);
257 
258 			insn->sec = sec;
259 			insn->offset = offset;
260 
261 			ret = arch_decode_instruction(file->elf, sec, offset,
262 						      sec->len - offset,
263 						      &insn->len, &insn->type,
264 						      &insn->immediate,
265 						      &insn->stack_op);
266 			if (ret)
267 				goto err;
268 
269 			hash_add(file->insn_hash, &insn->hash, insn->offset);
270 			list_add_tail(&insn->list, &file->insn_list);
271 		}
272 
273 		list_for_each_entry(func, &sec->symbol_list, list) {
274 			if (func->type != STT_FUNC || func->alias != func)
275 				continue;
276 
277 			if (!find_insn(file, sec, func->offset)) {
278 				WARN("%s(): can't find starting instruction",
279 				     func->name);
280 				return -1;
281 			}
282 
283 			func_for_each_insn(file, func, insn)
284 				insn->func = func;
285 		}
286 	}
287 
288 	return 0;
289 
290 err:
291 	free(insn);
292 	return ret;
293 }
294 
295 /*
296  * Mark "ud2" instructions and manually annotated dead ends.
297  */
298 static int add_dead_ends(struct objtool_file *file)
299 {
300 	struct section *sec;
301 	struct rela *rela;
302 	struct instruction *insn;
303 	bool found;
304 
305 	/*
306 	 * By default, "ud2" is a dead end unless otherwise annotated, because
307 	 * GCC 7 inserts it for certain divide-by-zero cases.
308 	 */
309 	for_each_insn(file, insn)
310 		if (insn->type == INSN_BUG)
311 			insn->dead_end = true;
312 
313 	/*
314 	 * Check for manually annotated dead ends.
315 	 */
316 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
317 	if (!sec)
318 		goto reachable;
319 
320 	list_for_each_entry(rela, &sec->rela_list, list) {
321 		if (rela->sym->type != STT_SECTION) {
322 			WARN("unexpected relocation symbol type in %s", sec->name);
323 			return -1;
324 		}
325 		insn = find_insn(file, rela->sym->sec, rela->addend);
326 		if (insn)
327 			insn = list_prev_entry(insn, list);
328 		else if (rela->addend == rela->sym->sec->len) {
329 			found = false;
330 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
331 				if (insn->sec == rela->sym->sec) {
332 					found = true;
333 					break;
334 				}
335 			}
336 
337 			if (!found) {
338 				WARN("can't find unreachable insn at %s+0x%x",
339 				     rela->sym->sec->name, rela->addend);
340 				return -1;
341 			}
342 		} else {
343 			WARN("can't find unreachable insn at %s+0x%x",
344 			     rela->sym->sec->name, rela->addend);
345 			return -1;
346 		}
347 
348 		insn->dead_end = true;
349 	}
350 
351 reachable:
352 	/*
353 	 * These manually annotated reachable checks are needed for GCC 4.4,
354 	 * where the Linux unreachable() macro isn't supported.  In that case
355 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
356 	 * not a dead end.
357 	 */
358 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
359 	if (!sec)
360 		return 0;
361 
362 	list_for_each_entry(rela, &sec->rela_list, list) {
363 		if (rela->sym->type != STT_SECTION) {
364 			WARN("unexpected relocation symbol type in %s", sec->name);
365 			return -1;
366 		}
367 		insn = find_insn(file, rela->sym->sec, rela->addend);
368 		if (insn)
369 			insn = list_prev_entry(insn, list);
370 		else if (rela->addend == rela->sym->sec->len) {
371 			found = false;
372 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
373 				if (insn->sec == rela->sym->sec) {
374 					found = true;
375 					break;
376 				}
377 			}
378 
379 			if (!found) {
380 				WARN("can't find reachable insn at %s+0x%x",
381 				     rela->sym->sec->name, rela->addend);
382 				return -1;
383 			}
384 		} else {
385 			WARN("can't find reachable insn at %s+0x%x",
386 			     rela->sym->sec->name, rela->addend);
387 			return -1;
388 		}
389 
390 		insn->dead_end = false;
391 	}
392 
393 	return 0;
394 }
395 
396 /*
397  * Warnings shouldn't be reported for ignored functions.
398  */
399 static void add_ignores(struct objtool_file *file)
400 {
401 	struct instruction *insn;
402 	struct section *sec;
403 	struct symbol *func;
404 	struct rela *rela;
405 
406 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
407 	if (!sec)
408 		return;
409 
410 	list_for_each_entry(rela, &sec->rela_list, list) {
411 		switch (rela->sym->type) {
412 		case STT_FUNC:
413 			func = rela->sym;
414 			break;
415 
416 		case STT_SECTION:
417 			func = find_symbol_by_offset(rela->sym->sec, rela->addend);
418 			if (!func || func->type != STT_FUNC)
419 				continue;
420 			break;
421 
422 		default:
423 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
424 			continue;
425 		}
426 
427 		func_for_each_insn_all(file, func, insn)
428 			insn->ignore = true;
429 	}
430 }
431 
432 /*
433  * This is a whitelist of functions that is allowed to be called with AC set.
434  * The list is meant to be minimal and only contains compiler instrumentation
435  * ABI and a few functions used to implement *_{to,from}_user() functions.
436  *
437  * These functions must not directly change AC, but may PUSHF/POPF.
438  */
439 static const char *uaccess_safe_builtin[] = {
440 	/* KASAN */
441 	"kasan_report",
442 	"check_memory_region",
443 	/* KASAN out-of-line */
444 	"__asan_loadN_noabort",
445 	"__asan_load1_noabort",
446 	"__asan_load2_noabort",
447 	"__asan_load4_noabort",
448 	"__asan_load8_noabort",
449 	"__asan_load16_noabort",
450 	"__asan_storeN_noabort",
451 	"__asan_store1_noabort",
452 	"__asan_store2_noabort",
453 	"__asan_store4_noabort",
454 	"__asan_store8_noabort",
455 	"__asan_store16_noabort",
456 	/* KASAN in-line */
457 	"__asan_report_load_n_noabort",
458 	"__asan_report_load1_noabort",
459 	"__asan_report_load2_noabort",
460 	"__asan_report_load4_noabort",
461 	"__asan_report_load8_noabort",
462 	"__asan_report_load16_noabort",
463 	"__asan_report_store_n_noabort",
464 	"__asan_report_store1_noabort",
465 	"__asan_report_store2_noabort",
466 	"__asan_report_store4_noabort",
467 	"__asan_report_store8_noabort",
468 	"__asan_report_store16_noabort",
469 	/* KCOV */
470 	"write_comp_data",
471 	"__sanitizer_cov_trace_pc",
472 	"__sanitizer_cov_trace_const_cmp1",
473 	"__sanitizer_cov_trace_const_cmp2",
474 	"__sanitizer_cov_trace_const_cmp4",
475 	"__sanitizer_cov_trace_const_cmp8",
476 	"__sanitizer_cov_trace_cmp1",
477 	"__sanitizer_cov_trace_cmp2",
478 	"__sanitizer_cov_trace_cmp4",
479 	"__sanitizer_cov_trace_cmp8",
480 	/* UBSAN */
481 	"ubsan_type_mismatch_common",
482 	"__ubsan_handle_type_mismatch",
483 	"__ubsan_handle_type_mismatch_v1",
484 	/* misc */
485 	"csum_partial_copy_generic",
486 	"__memcpy_mcsafe",
487 	"mcsafe_handle_tail",
488 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
489 	NULL
490 };
491 
492 static void add_uaccess_safe(struct objtool_file *file)
493 {
494 	struct symbol *func;
495 	const char **name;
496 
497 	if (!uaccess)
498 		return;
499 
500 	for (name = uaccess_safe_builtin; *name; name++) {
501 		func = find_symbol_by_name(file->elf, *name);
502 		if (!func)
503 			continue;
504 
505 		func->uaccess_safe = true;
506 	}
507 }
508 
509 /*
510  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
511  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
512  * But it at least allows objtool to understand the control flow *around* the
513  * retpoline.
514  */
515 static int add_ignore_alternatives(struct objtool_file *file)
516 {
517 	struct section *sec;
518 	struct rela *rela;
519 	struct instruction *insn;
520 
521 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
522 	if (!sec)
523 		return 0;
524 
525 	list_for_each_entry(rela, &sec->rela_list, list) {
526 		if (rela->sym->type != STT_SECTION) {
527 			WARN("unexpected relocation symbol type in %s", sec->name);
528 			return -1;
529 		}
530 
531 		insn = find_insn(file, rela->sym->sec, rela->addend);
532 		if (!insn) {
533 			WARN("bad .discard.ignore_alts entry");
534 			return -1;
535 		}
536 
537 		insn->ignore_alts = true;
538 	}
539 
540 	return 0;
541 }
542 
543 /*
544  * Find the destination instructions for all jumps.
545  */
546 static int add_jump_destinations(struct objtool_file *file)
547 {
548 	struct instruction *insn;
549 	struct rela *rela;
550 	struct section *dest_sec;
551 	unsigned long dest_off;
552 
553 	for_each_insn(file, insn) {
554 		if (insn->type != INSN_JUMP_CONDITIONAL &&
555 		    insn->type != INSN_JUMP_UNCONDITIONAL)
556 			continue;
557 
558 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
559 			continue;
560 
561 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
562 					       insn->len);
563 		if (!rela) {
564 			dest_sec = insn->sec;
565 			dest_off = insn->offset + insn->len + insn->immediate;
566 		} else if (rela->sym->type == STT_SECTION) {
567 			dest_sec = rela->sym->sec;
568 			dest_off = rela->addend + 4;
569 		} else if (rela->sym->sec->idx) {
570 			dest_sec = rela->sym->sec;
571 			dest_off = rela->sym->sym.st_value + rela->addend + 4;
572 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
573 			/*
574 			 * Retpoline jumps are really dynamic jumps in
575 			 * disguise, so convert them accordingly.
576 			 */
577 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
578 				insn->type = INSN_JUMP_DYNAMIC;
579 			else
580 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
581 
582 			insn->retpoline_safe = true;
583 			continue;
584 		} else {
585 			/* external sibling call */
586 			insn->call_dest = rela->sym;
587 			continue;
588 		}
589 
590 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
591 		if (!insn->jump_dest) {
592 
593 			/*
594 			 * This is a special case where an alt instruction
595 			 * jumps past the end of the section.  These are
596 			 * handled later in handle_group_alt().
597 			 */
598 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
599 				continue;
600 
601 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
602 				  insn->sec, insn->offset, dest_sec->name,
603 				  dest_off);
604 			return -1;
605 		}
606 
607 		/*
608 		 * Cross-function jump.
609 		 */
610 		if (insn->func && insn->jump_dest->func &&
611 		    insn->func != insn->jump_dest->func) {
612 
613 			/*
614 			 * For GCC 8+, create parent/child links for any cold
615 			 * subfunctions.  This is _mostly_ redundant with a
616 			 * similar initialization in read_symbols().
617 			 *
618 			 * If a function has aliases, we want the *first* such
619 			 * function in the symbol table to be the subfunction's
620 			 * parent.  In that case we overwrite the
621 			 * initialization done in read_symbols().
622 			 *
623 			 * However this code can't completely replace the
624 			 * read_symbols() code because this doesn't detect the
625 			 * case where the parent function's only reference to a
626 			 * subfunction is through a jump table.
627 			 */
628 			if (!strstr(insn->func->name, ".cold.") &&
629 			    strstr(insn->jump_dest->func->name, ".cold.")) {
630 				insn->func->cfunc = insn->jump_dest->func;
631 				insn->jump_dest->func->pfunc = insn->func;
632 
633 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
634 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
635 
636 				/* internal sibling call */
637 				insn->call_dest = insn->jump_dest->func;
638 			}
639 		}
640 	}
641 
642 	return 0;
643 }
644 
645 /*
646  * Find the destination instructions for all calls.
647  */
648 static int add_call_destinations(struct objtool_file *file)
649 {
650 	struct instruction *insn;
651 	unsigned long dest_off;
652 	struct rela *rela;
653 
654 	for_each_insn(file, insn) {
655 		if (insn->type != INSN_CALL)
656 			continue;
657 
658 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
659 					       insn->len);
660 		if (!rela) {
661 			dest_off = insn->offset + insn->len + insn->immediate;
662 			insn->call_dest = find_symbol_by_offset(insn->sec,
663 								dest_off);
664 
665 			if (!insn->call_dest && !insn->ignore) {
666 				WARN_FUNC("unsupported intra-function call",
667 					  insn->sec, insn->offset);
668 				if (retpoline)
669 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
670 				return -1;
671 			}
672 
673 		} else if (rela->sym->type == STT_SECTION) {
674 			insn->call_dest = find_symbol_by_offset(rela->sym->sec,
675 								rela->addend+4);
676 			if (!insn->call_dest ||
677 			    insn->call_dest->type != STT_FUNC) {
678 				WARN_FUNC("can't find call dest symbol at %s+0x%x",
679 					  insn->sec, insn->offset,
680 					  rela->sym->sec->name,
681 					  rela->addend + 4);
682 				return -1;
683 			}
684 		} else
685 			insn->call_dest = rela->sym;
686 	}
687 
688 	return 0;
689 }
690 
691 /*
692  * The .alternatives section requires some extra special care, over and above
693  * what other special sections require:
694  *
695  * 1. Because alternatives are patched in-place, we need to insert a fake jump
696  *    instruction at the end so that validate_branch() skips all the original
697  *    replaced instructions when validating the new instruction path.
698  *
699  * 2. An added wrinkle is that the new instruction length might be zero.  In
700  *    that case the old instructions are replaced with noops.  We simulate that
701  *    by creating a fake jump as the only new instruction.
702  *
703  * 3. In some cases, the alternative section includes an instruction which
704  *    conditionally jumps to the _end_ of the entry.  We have to modify these
705  *    jumps' destinations to point back to .text rather than the end of the
706  *    entry in .altinstr_replacement.
707  */
708 static int handle_group_alt(struct objtool_file *file,
709 			    struct special_alt *special_alt,
710 			    struct instruction *orig_insn,
711 			    struct instruction **new_insn)
712 {
713 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
714 	unsigned long dest_off;
715 
716 	last_orig_insn = NULL;
717 	insn = orig_insn;
718 	sec_for_each_insn_from(file, insn) {
719 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
720 			break;
721 
722 		insn->alt_group = true;
723 		last_orig_insn = insn;
724 	}
725 
726 	if (next_insn_same_sec(file, last_orig_insn)) {
727 		fake_jump = malloc(sizeof(*fake_jump));
728 		if (!fake_jump) {
729 			WARN("malloc failed");
730 			return -1;
731 		}
732 		memset(fake_jump, 0, sizeof(*fake_jump));
733 		INIT_LIST_HEAD(&fake_jump->alts);
734 		clear_insn_state(&fake_jump->state);
735 
736 		fake_jump->sec = special_alt->new_sec;
737 		fake_jump->offset = FAKE_JUMP_OFFSET;
738 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
739 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
740 		fake_jump->func = orig_insn->func;
741 	}
742 
743 	if (!special_alt->new_len) {
744 		if (!fake_jump) {
745 			WARN("%s: empty alternative at end of section",
746 			     special_alt->orig_sec->name);
747 			return -1;
748 		}
749 
750 		*new_insn = fake_jump;
751 		return 0;
752 	}
753 
754 	last_new_insn = NULL;
755 	insn = *new_insn;
756 	sec_for_each_insn_from(file, insn) {
757 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
758 			break;
759 
760 		last_new_insn = insn;
761 
762 		insn->ignore = orig_insn->ignore_alts;
763 		insn->func = orig_insn->func;
764 
765 		if (insn->type != INSN_JUMP_CONDITIONAL &&
766 		    insn->type != INSN_JUMP_UNCONDITIONAL)
767 			continue;
768 
769 		if (!insn->immediate)
770 			continue;
771 
772 		dest_off = insn->offset + insn->len + insn->immediate;
773 		if (dest_off == special_alt->new_off + special_alt->new_len) {
774 			if (!fake_jump) {
775 				WARN("%s: alternative jump to end of section",
776 				     special_alt->orig_sec->name);
777 				return -1;
778 			}
779 			insn->jump_dest = fake_jump;
780 		}
781 
782 		if (!insn->jump_dest) {
783 			WARN_FUNC("can't find alternative jump destination",
784 				  insn->sec, insn->offset);
785 			return -1;
786 		}
787 	}
788 
789 	if (!last_new_insn) {
790 		WARN_FUNC("can't find last new alternative instruction",
791 			  special_alt->new_sec, special_alt->new_off);
792 		return -1;
793 	}
794 
795 	if (fake_jump)
796 		list_add(&fake_jump->list, &last_new_insn->list);
797 
798 	return 0;
799 }
800 
801 /*
802  * A jump table entry can either convert a nop to a jump or a jump to a nop.
803  * If the original instruction is a jump, make the alt entry an effective nop
804  * by just skipping the original instruction.
805  */
806 static int handle_jump_alt(struct objtool_file *file,
807 			   struct special_alt *special_alt,
808 			   struct instruction *orig_insn,
809 			   struct instruction **new_insn)
810 {
811 	if (orig_insn->type == INSN_NOP)
812 		return 0;
813 
814 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
815 		WARN_FUNC("unsupported instruction at jump label",
816 			  orig_insn->sec, orig_insn->offset);
817 		return -1;
818 	}
819 
820 	*new_insn = list_next_entry(orig_insn, list);
821 	return 0;
822 }
823 
824 /*
825  * Read all the special sections which have alternate instructions which can be
826  * patched in or redirected to at runtime.  Each instruction having alternate
827  * instruction(s) has them added to its insn->alts list, which will be
828  * traversed in validate_branch().
829  */
830 static int add_special_section_alts(struct objtool_file *file)
831 {
832 	struct list_head special_alts;
833 	struct instruction *orig_insn, *new_insn;
834 	struct special_alt *special_alt, *tmp;
835 	struct alternative *alt;
836 	int ret;
837 
838 	ret = special_get_alts(file->elf, &special_alts);
839 	if (ret)
840 		return ret;
841 
842 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
843 
844 		orig_insn = find_insn(file, special_alt->orig_sec,
845 				      special_alt->orig_off);
846 		if (!orig_insn) {
847 			WARN_FUNC("special: can't find orig instruction",
848 				  special_alt->orig_sec, special_alt->orig_off);
849 			ret = -1;
850 			goto out;
851 		}
852 
853 		new_insn = NULL;
854 		if (!special_alt->group || special_alt->new_len) {
855 			new_insn = find_insn(file, special_alt->new_sec,
856 					     special_alt->new_off);
857 			if (!new_insn) {
858 				WARN_FUNC("special: can't find new instruction",
859 					  special_alt->new_sec,
860 					  special_alt->new_off);
861 				ret = -1;
862 				goto out;
863 			}
864 		}
865 
866 		if (special_alt->group) {
867 			ret = handle_group_alt(file, special_alt, orig_insn,
868 					       &new_insn);
869 			if (ret)
870 				goto out;
871 		} else if (special_alt->jump_or_nop) {
872 			ret = handle_jump_alt(file, special_alt, orig_insn,
873 					      &new_insn);
874 			if (ret)
875 				goto out;
876 		}
877 
878 		alt = malloc(sizeof(*alt));
879 		if (!alt) {
880 			WARN("malloc failed");
881 			ret = -1;
882 			goto out;
883 		}
884 
885 		alt->insn = new_insn;
886 		alt->skip_orig = special_alt->skip_orig;
887 		orig_insn->ignore_alts |= special_alt->skip_alt;
888 		list_add_tail(&alt->list, &orig_insn->alts);
889 
890 		list_del(&special_alt->list);
891 		free(special_alt);
892 	}
893 
894 out:
895 	return ret;
896 }
897 
898 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
899 			    struct rela *table)
900 {
901 	struct rela *rela = table;
902 	struct instruction *dest_insn;
903 	struct alternative *alt;
904 	struct symbol *pfunc = insn->func->pfunc;
905 	unsigned int prev_offset = 0;
906 
907 	/*
908 	 * Each @rela is a switch table relocation which points to the target
909 	 * instruction.
910 	 */
911 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
912 
913 		/* Check for the end of the table: */
914 		if (rela != table && rela->jump_table_start)
915 			break;
916 
917 		/* Make sure the table entries are consecutive: */
918 		if (prev_offset && rela->offset != prev_offset + 8)
919 			break;
920 
921 		/* Detect function pointers from contiguous objects: */
922 		if (rela->sym->sec == pfunc->sec &&
923 		    rela->addend == pfunc->offset)
924 			break;
925 
926 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
927 		if (!dest_insn)
928 			break;
929 
930 		/* Make sure the destination is in the same function: */
931 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
932 			break;
933 
934 		alt = malloc(sizeof(*alt));
935 		if (!alt) {
936 			WARN("malloc failed");
937 			return -1;
938 		}
939 
940 		alt->insn = dest_insn;
941 		list_add_tail(&alt->list, &insn->alts);
942 		prev_offset = rela->offset;
943 	}
944 
945 	if (!prev_offset) {
946 		WARN_FUNC("can't find switch jump table",
947 			  insn->sec, insn->offset);
948 		return -1;
949 	}
950 
951 	return 0;
952 }
953 
954 /*
955  * find_jump_table() - Given a dynamic jump, find the switch jump table in
956  * .rodata associated with it.
957  *
958  * There are 3 basic patterns:
959  *
960  * 1. jmpq *[rodata addr](,%reg,8)
961  *
962  *    This is the most common case by far.  It jumps to an address in a simple
963  *    jump table which is stored in .rodata.
964  *
965  * 2. jmpq *[rodata addr](%rip)
966  *
967  *    This is caused by a rare GCC quirk, currently only seen in three driver
968  *    functions in the kernel, only with certain obscure non-distro configs.
969  *
970  *    As part of an optimization, GCC makes a copy of an existing switch jump
971  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
972  *    jump) to use a single entry in the table.  The rest of the jump table and
973  *    some of its jump targets remain as dead code.
974  *
975  *    In such a case we can just crudely ignore all unreachable instruction
976  *    warnings for the entire object file.  Ideally we would just ignore them
977  *    for the function, but that would require redesigning the code quite a
978  *    bit.  And honestly that's just not worth doing: unreachable instruction
979  *    warnings are of questionable value anyway, and this is such a rare issue.
980  *
981  * 3. mov [rodata addr],%reg1
982  *    ... some instructions ...
983  *    jmpq *(%reg1,%reg2,8)
984  *
985  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
986  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
987  *
988  *    As of GCC 7 there are quite a few more of these and the 'in between' code
989  *    is significant. Esp. with KASAN enabled some of the code between the mov
990  *    and jmpq uses .rodata itself, which can confuse things.
991  *
992  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
993  *    ensure the same register is used in the mov and jump instructions.
994  *
995  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
996  */
997 static struct rela *find_jump_table(struct objtool_file *file,
998 				      struct symbol *func,
999 				      struct instruction *insn)
1000 {
1001 	struct rela *text_rela, *table_rela;
1002 	struct instruction *orig_insn = insn;
1003 	struct section *table_sec;
1004 	unsigned long table_offset;
1005 
1006 	/*
1007 	 * Backward search using the @first_jump_src links, these help avoid
1008 	 * much of the 'in between' code. Which avoids us getting confused by
1009 	 * it.
1010 	 */
1011 	for (;
1012 	     &insn->list != &file->insn_list &&
1013 	     insn->sec == func->sec &&
1014 	     insn->offset >= func->offset;
1015 
1016 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1017 
1018 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1019 			break;
1020 
1021 		/* allow small jumps within the range */
1022 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1023 		    insn->jump_dest &&
1024 		    (insn->jump_dest->offset <= insn->offset ||
1025 		     insn->jump_dest->offset > orig_insn->offset))
1026 		    break;
1027 
1028 		/* look for a relocation which references .rodata */
1029 		text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
1030 						    insn->len);
1031 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1032 		    !text_rela->sym->sec->rodata)
1033 			continue;
1034 
1035 		table_offset = text_rela->addend;
1036 		table_sec = text_rela->sym->sec;
1037 
1038 		if (text_rela->type == R_X86_64_PC32)
1039 			table_offset += 4;
1040 
1041 		/*
1042 		 * Make sure the .rodata address isn't associated with a
1043 		 * symbol.  GCC jump tables are anonymous data.
1044 		 *
1045 		 * Also support C jump tables which are in the same format as
1046 		 * switch jump tables.  For objtool to recognize them, they
1047 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1048 		 * have symbols associated with them.
1049 		 */
1050 		if (find_symbol_containing(table_sec, table_offset) &&
1051 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1052 			continue;
1053 
1054 		/* Each table entry has a rela associated with it. */
1055 		table_rela = find_rela_by_dest(table_sec, table_offset);
1056 		if (!table_rela)
1057 			continue;
1058 
1059 		/*
1060 		 * Use of RIP-relative switch jumps is quite rare, and
1061 		 * indicates a rare GCC quirk/bug which can leave dead code
1062 		 * behind.
1063 		 */
1064 		if (text_rela->type == R_X86_64_PC32)
1065 			file->ignore_unreachables = true;
1066 
1067 		return table_rela;
1068 	}
1069 
1070 	return NULL;
1071 }
1072 
1073 /*
1074  * First pass: Mark the head of each jump table so that in the next pass,
1075  * we know when a given jump table ends and the next one starts.
1076  */
1077 static void mark_func_jump_tables(struct objtool_file *file,
1078 				    struct symbol *func)
1079 {
1080 	struct instruction *insn, *last = NULL;
1081 	struct rela *rela;
1082 
1083 	func_for_each_insn_all(file, func, insn) {
1084 		if (!last)
1085 			last = insn;
1086 
1087 		/*
1088 		 * Store back-pointers for unconditional forward jumps such
1089 		 * that find_jump_table() can back-track using those and
1090 		 * avoid some potentially confusing code.
1091 		 */
1092 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1093 		    insn->offset > last->offset &&
1094 		    insn->jump_dest->offset > insn->offset &&
1095 		    !insn->jump_dest->first_jump_src) {
1096 
1097 			insn->jump_dest->first_jump_src = insn;
1098 			last = insn->jump_dest;
1099 		}
1100 
1101 		if (insn->type != INSN_JUMP_DYNAMIC)
1102 			continue;
1103 
1104 		rela = find_jump_table(file, func, insn);
1105 		if (rela) {
1106 			rela->jump_table_start = true;
1107 			insn->jump_table = rela;
1108 		}
1109 	}
1110 }
1111 
1112 static int add_func_jump_tables(struct objtool_file *file,
1113 				  struct symbol *func)
1114 {
1115 	struct instruction *insn;
1116 	int ret;
1117 
1118 	func_for_each_insn_all(file, func, insn) {
1119 		if (!insn->jump_table)
1120 			continue;
1121 
1122 		ret = add_jump_table(file, insn, insn->jump_table);
1123 		if (ret)
1124 			return ret;
1125 	}
1126 
1127 	return 0;
1128 }
1129 
1130 /*
1131  * For some switch statements, gcc generates a jump table in the .rodata
1132  * section which contains a list of addresses within the function to jump to.
1133  * This finds these jump tables and adds them to the insn->alts lists.
1134  */
1135 static int add_jump_table_alts(struct objtool_file *file)
1136 {
1137 	struct section *sec;
1138 	struct symbol *func;
1139 	int ret;
1140 
1141 	if (!file->rodata)
1142 		return 0;
1143 
1144 	for_each_sec(file, sec) {
1145 		list_for_each_entry(func, &sec->symbol_list, list) {
1146 			if (func->type != STT_FUNC)
1147 				continue;
1148 
1149 			mark_func_jump_tables(file, func);
1150 			ret = add_func_jump_tables(file, func);
1151 			if (ret)
1152 				return ret;
1153 		}
1154 	}
1155 
1156 	return 0;
1157 }
1158 
1159 static int read_unwind_hints(struct objtool_file *file)
1160 {
1161 	struct section *sec, *relasec;
1162 	struct rela *rela;
1163 	struct unwind_hint *hint;
1164 	struct instruction *insn;
1165 	struct cfi_reg *cfa;
1166 	int i;
1167 
1168 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1169 	if (!sec)
1170 		return 0;
1171 
1172 	relasec = sec->rela;
1173 	if (!relasec) {
1174 		WARN("missing .rela.discard.unwind_hints section");
1175 		return -1;
1176 	}
1177 
1178 	if (sec->len % sizeof(struct unwind_hint)) {
1179 		WARN("struct unwind_hint size mismatch");
1180 		return -1;
1181 	}
1182 
1183 	file->hints = true;
1184 
1185 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1186 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1187 
1188 		rela = find_rela_by_dest(sec, i * sizeof(*hint));
1189 		if (!rela) {
1190 			WARN("can't find rela for unwind_hints[%d]", i);
1191 			return -1;
1192 		}
1193 
1194 		insn = find_insn(file, rela->sym->sec, rela->addend);
1195 		if (!insn) {
1196 			WARN("can't find insn for unwind_hints[%d]", i);
1197 			return -1;
1198 		}
1199 
1200 		cfa = &insn->state.cfa;
1201 
1202 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1203 			insn->save = true;
1204 			continue;
1205 
1206 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1207 			insn->restore = true;
1208 			insn->hint = true;
1209 			continue;
1210 		}
1211 
1212 		insn->hint = true;
1213 
1214 		switch (hint->sp_reg) {
1215 		case ORC_REG_UNDEFINED:
1216 			cfa->base = CFI_UNDEFINED;
1217 			break;
1218 		case ORC_REG_SP:
1219 			cfa->base = CFI_SP;
1220 			break;
1221 		case ORC_REG_BP:
1222 			cfa->base = CFI_BP;
1223 			break;
1224 		case ORC_REG_SP_INDIRECT:
1225 			cfa->base = CFI_SP_INDIRECT;
1226 			break;
1227 		case ORC_REG_R10:
1228 			cfa->base = CFI_R10;
1229 			break;
1230 		case ORC_REG_R13:
1231 			cfa->base = CFI_R13;
1232 			break;
1233 		case ORC_REG_DI:
1234 			cfa->base = CFI_DI;
1235 			break;
1236 		case ORC_REG_DX:
1237 			cfa->base = CFI_DX;
1238 			break;
1239 		default:
1240 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1241 				  insn->sec, insn->offset, hint->sp_reg);
1242 			return -1;
1243 		}
1244 
1245 		cfa->offset = hint->sp_offset;
1246 		insn->state.type = hint->type;
1247 		insn->state.end = hint->end;
1248 	}
1249 
1250 	return 0;
1251 }
1252 
1253 static int read_retpoline_hints(struct objtool_file *file)
1254 {
1255 	struct section *sec;
1256 	struct instruction *insn;
1257 	struct rela *rela;
1258 
1259 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1260 	if (!sec)
1261 		return 0;
1262 
1263 	list_for_each_entry(rela, &sec->rela_list, list) {
1264 		if (rela->sym->type != STT_SECTION) {
1265 			WARN("unexpected relocation symbol type in %s", sec->name);
1266 			return -1;
1267 		}
1268 
1269 		insn = find_insn(file, rela->sym->sec, rela->addend);
1270 		if (!insn) {
1271 			WARN("bad .discard.retpoline_safe entry");
1272 			return -1;
1273 		}
1274 
1275 		if (insn->type != INSN_JUMP_DYNAMIC &&
1276 		    insn->type != INSN_CALL_DYNAMIC) {
1277 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1278 				  insn->sec, insn->offset);
1279 			return -1;
1280 		}
1281 
1282 		insn->retpoline_safe = true;
1283 	}
1284 
1285 	return 0;
1286 }
1287 
1288 static void mark_rodata(struct objtool_file *file)
1289 {
1290 	struct section *sec;
1291 	bool found = false;
1292 
1293 	/*
1294 	 * Search for the following rodata sections, each of which can
1295 	 * potentially contain jump tables:
1296 	 *
1297 	 * - .rodata: can contain GCC switch tables
1298 	 * - .rodata.<func>: same, if -fdata-sections is being used
1299 	 * - .rodata..c_jump_table: contains C annotated jump tables
1300 	 *
1301 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1302 	 */
1303 	for_each_sec(file, sec) {
1304 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1305 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1306 			sec->rodata = true;
1307 			found = true;
1308 		}
1309 	}
1310 
1311 	file->rodata = found;
1312 }
1313 
1314 static int decode_sections(struct objtool_file *file)
1315 {
1316 	int ret;
1317 
1318 	mark_rodata(file);
1319 
1320 	ret = decode_instructions(file);
1321 	if (ret)
1322 		return ret;
1323 
1324 	ret = add_dead_ends(file);
1325 	if (ret)
1326 		return ret;
1327 
1328 	add_ignores(file);
1329 	add_uaccess_safe(file);
1330 
1331 	ret = add_ignore_alternatives(file);
1332 	if (ret)
1333 		return ret;
1334 
1335 	ret = add_jump_destinations(file);
1336 	if (ret)
1337 		return ret;
1338 
1339 	ret = add_special_section_alts(file);
1340 	if (ret)
1341 		return ret;
1342 
1343 	ret = add_call_destinations(file);
1344 	if (ret)
1345 		return ret;
1346 
1347 	ret = add_jump_table_alts(file);
1348 	if (ret)
1349 		return ret;
1350 
1351 	ret = read_unwind_hints(file);
1352 	if (ret)
1353 		return ret;
1354 
1355 	ret = read_retpoline_hints(file);
1356 	if (ret)
1357 		return ret;
1358 
1359 	return 0;
1360 }
1361 
1362 static bool is_fentry_call(struct instruction *insn)
1363 {
1364 	if (insn->type == INSN_CALL &&
1365 	    insn->call_dest->type == STT_NOTYPE &&
1366 	    !strcmp(insn->call_dest->name, "__fentry__"))
1367 		return true;
1368 
1369 	return false;
1370 }
1371 
1372 static bool has_modified_stack_frame(struct insn_state *state)
1373 {
1374 	int i;
1375 
1376 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1377 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1378 	    state->stack_size != initial_func_cfi.cfa.offset ||
1379 	    state->drap)
1380 		return true;
1381 
1382 	for (i = 0; i < CFI_NUM_REGS; i++)
1383 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1384 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1385 			return true;
1386 
1387 	return false;
1388 }
1389 
1390 static bool has_valid_stack_frame(struct insn_state *state)
1391 {
1392 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1393 	    state->regs[CFI_BP].offset == -16)
1394 		return true;
1395 
1396 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1397 		return true;
1398 
1399 	return false;
1400 }
1401 
1402 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1403 {
1404 	struct cfi_reg *cfa = &state->cfa;
1405 	struct stack_op *op = &insn->stack_op;
1406 
1407 	if (cfa->base != CFI_SP)
1408 		return 0;
1409 
1410 	/* push */
1411 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1412 		cfa->offset += 8;
1413 
1414 	/* pop */
1415 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1416 		cfa->offset -= 8;
1417 
1418 	/* add immediate to sp */
1419 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1420 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1421 		cfa->offset -= op->src.offset;
1422 
1423 	return 0;
1424 }
1425 
1426 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1427 		     int offset)
1428 {
1429 	if (arch_callee_saved_reg(reg) &&
1430 	    state->regs[reg].base == CFI_UNDEFINED) {
1431 		state->regs[reg].base = base;
1432 		state->regs[reg].offset = offset;
1433 	}
1434 }
1435 
1436 static void restore_reg(struct insn_state *state, unsigned char reg)
1437 {
1438 	state->regs[reg].base = CFI_UNDEFINED;
1439 	state->regs[reg].offset = 0;
1440 }
1441 
1442 /*
1443  * A note about DRAP stack alignment:
1444  *
1445  * GCC has the concept of a DRAP register, which is used to help keep track of
1446  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1447  * register.  The typical DRAP pattern is:
1448  *
1449  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1450  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1451  *   41 ff 72 f8		pushq  -0x8(%r10)
1452  *   55				push   %rbp
1453  *   48 89 e5			mov    %rsp,%rbp
1454  *				(more pushes)
1455  *   41 52			push   %r10
1456  *				...
1457  *   41 5a			pop    %r10
1458  *				(more pops)
1459  *   5d				pop    %rbp
1460  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1461  *   c3				retq
1462  *
1463  * There are some variations in the epilogues, like:
1464  *
1465  *   5b				pop    %rbx
1466  *   41 5a			pop    %r10
1467  *   41 5c			pop    %r12
1468  *   41 5d			pop    %r13
1469  *   41 5e			pop    %r14
1470  *   c9				leaveq
1471  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1472  *   c3				retq
1473  *
1474  * and:
1475  *
1476  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1477  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1478  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1479  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1480  *   c9				leaveq
1481  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1482  *   c3				retq
1483  *
1484  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1485  * restored beforehand:
1486  *
1487  *   41 55			push   %r13
1488  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1489  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1490  *				...
1491  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1492  *   41 5d			pop    %r13
1493  *   c3				retq
1494  */
1495 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1496 {
1497 	struct stack_op *op = &insn->stack_op;
1498 	struct cfi_reg *cfa = &state->cfa;
1499 	struct cfi_reg *regs = state->regs;
1500 
1501 	/* stack operations don't make sense with an undefined CFA */
1502 	if (cfa->base == CFI_UNDEFINED) {
1503 		if (insn->func) {
1504 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1505 			return -1;
1506 		}
1507 		return 0;
1508 	}
1509 
1510 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1511 		return update_insn_state_regs(insn, state);
1512 
1513 	switch (op->dest.type) {
1514 
1515 	case OP_DEST_REG:
1516 		switch (op->src.type) {
1517 
1518 		case OP_SRC_REG:
1519 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1520 			    cfa->base == CFI_SP &&
1521 			    regs[CFI_BP].base == CFI_CFA &&
1522 			    regs[CFI_BP].offset == -cfa->offset) {
1523 
1524 				/* mov %rsp, %rbp */
1525 				cfa->base = op->dest.reg;
1526 				state->bp_scratch = false;
1527 			}
1528 
1529 			else if (op->src.reg == CFI_SP &&
1530 				 op->dest.reg == CFI_BP && state->drap) {
1531 
1532 				/* drap: mov %rsp, %rbp */
1533 				regs[CFI_BP].base = CFI_BP;
1534 				regs[CFI_BP].offset = -state->stack_size;
1535 				state->bp_scratch = false;
1536 			}
1537 
1538 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1539 
1540 				/*
1541 				 * mov %rsp, %reg
1542 				 *
1543 				 * This is needed for the rare case where GCC
1544 				 * does:
1545 				 *
1546 				 *   mov    %rsp, %rax
1547 				 *   ...
1548 				 *   mov    %rax, %rsp
1549 				 */
1550 				state->vals[op->dest.reg].base = CFI_CFA;
1551 				state->vals[op->dest.reg].offset = -state->stack_size;
1552 			}
1553 
1554 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1555 				 cfa->base == CFI_BP) {
1556 
1557 				/*
1558 				 * mov %rbp, %rsp
1559 				 *
1560 				 * Restore the original stack pointer (Clang).
1561 				 */
1562 				state->stack_size = -state->regs[CFI_BP].offset;
1563 			}
1564 
1565 			else if (op->dest.reg == cfa->base) {
1566 
1567 				/* mov %reg, %rsp */
1568 				if (cfa->base == CFI_SP &&
1569 				    state->vals[op->src.reg].base == CFI_CFA) {
1570 
1571 					/*
1572 					 * This is needed for the rare case
1573 					 * where GCC does something dumb like:
1574 					 *
1575 					 *   lea    0x8(%rsp), %rcx
1576 					 *   ...
1577 					 *   mov    %rcx, %rsp
1578 					 */
1579 					cfa->offset = -state->vals[op->src.reg].offset;
1580 					state->stack_size = cfa->offset;
1581 
1582 				} else {
1583 					cfa->base = CFI_UNDEFINED;
1584 					cfa->offset = 0;
1585 				}
1586 			}
1587 
1588 			break;
1589 
1590 		case OP_SRC_ADD:
1591 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1592 
1593 				/* add imm, %rsp */
1594 				state->stack_size -= op->src.offset;
1595 				if (cfa->base == CFI_SP)
1596 					cfa->offset -= op->src.offset;
1597 				break;
1598 			}
1599 
1600 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1601 
1602 				/* lea disp(%rbp), %rsp */
1603 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1604 				break;
1605 			}
1606 
1607 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1608 
1609 				/* drap: lea disp(%rsp), %drap */
1610 				state->drap_reg = op->dest.reg;
1611 
1612 				/*
1613 				 * lea disp(%rsp), %reg
1614 				 *
1615 				 * This is needed for the rare case where GCC
1616 				 * does something dumb like:
1617 				 *
1618 				 *   lea    0x8(%rsp), %rcx
1619 				 *   ...
1620 				 *   mov    %rcx, %rsp
1621 				 */
1622 				state->vals[op->dest.reg].base = CFI_CFA;
1623 				state->vals[op->dest.reg].offset = \
1624 					-state->stack_size + op->src.offset;
1625 
1626 				break;
1627 			}
1628 
1629 			if (state->drap && op->dest.reg == CFI_SP &&
1630 			    op->src.reg == state->drap_reg) {
1631 
1632 				 /* drap: lea disp(%drap), %rsp */
1633 				cfa->base = CFI_SP;
1634 				cfa->offset = state->stack_size = -op->src.offset;
1635 				state->drap_reg = CFI_UNDEFINED;
1636 				state->drap = false;
1637 				break;
1638 			}
1639 
1640 			if (op->dest.reg == state->cfa.base) {
1641 				WARN_FUNC("unsupported stack register modification",
1642 					  insn->sec, insn->offset);
1643 				return -1;
1644 			}
1645 
1646 			break;
1647 
1648 		case OP_SRC_AND:
1649 			if (op->dest.reg != CFI_SP ||
1650 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1651 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1652 				WARN_FUNC("unsupported stack pointer realignment",
1653 					  insn->sec, insn->offset);
1654 				return -1;
1655 			}
1656 
1657 			if (state->drap_reg != CFI_UNDEFINED) {
1658 				/* drap: and imm, %rsp */
1659 				cfa->base = state->drap_reg;
1660 				cfa->offset = state->stack_size = 0;
1661 				state->drap = true;
1662 			}
1663 
1664 			/*
1665 			 * Older versions of GCC (4.8ish) realign the stack
1666 			 * without DRAP, with a frame pointer.
1667 			 */
1668 
1669 			break;
1670 
1671 		case OP_SRC_POP:
1672 		case OP_SRC_POPF:
1673 			if (!state->drap && op->dest.type == OP_DEST_REG &&
1674 			    op->dest.reg == cfa->base) {
1675 
1676 				/* pop %rbp */
1677 				cfa->base = CFI_SP;
1678 			}
1679 
1680 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1681 			    op->dest.type == OP_DEST_REG &&
1682 			    op->dest.reg == state->drap_reg &&
1683 			    state->drap_offset == -state->stack_size) {
1684 
1685 				/* drap: pop %drap */
1686 				cfa->base = state->drap_reg;
1687 				cfa->offset = 0;
1688 				state->drap_offset = -1;
1689 
1690 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1691 
1692 				/* pop %reg */
1693 				restore_reg(state, op->dest.reg);
1694 			}
1695 
1696 			state->stack_size -= 8;
1697 			if (cfa->base == CFI_SP)
1698 				cfa->offset -= 8;
1699 
1700 			break;
1701 
1702 		case OP_SRC_REG_INDIRECT:
1703 			if (state->drap && op->src.reg == CFI_BP &&
1704 			    op->src.offset == state->drap_offset) {
1705 
1706 				/* drap: mov disp(%rbp), %drap */
1707 				cfa->base = state->drap_reg;
1708 				cfa->offset = 0;
1709 				state->drap_offset = -1;
1710 			}
1711 
1712 			if (state->drap && op->src.reg == CFI_BP &&
1713 			    op->src.offset == regs[op->dest.reg].offset) {
1714 
1715 				/* drap: mov disp(%rbp), %reg */
1716 				restore_reg(state, op->dest.reg);
1717 
1718 			} else if (op->src.reg == cfa->base &&
1719 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1720 
1721 				/* mov disp(%rbp), %reg */
1722 				/* mov disp(%rsp), %reg */
1723 				restore_reg(state, op->dest.reg);
1724 			}
1725 
1726 			break;
1727 
1728 		default:
1729 			WARN_FUNC("unknown stack-related instruction",
1730 				  insn->sec, insn->offset);
1731 			return -1;
1732 		}
1733 
1734 		break;
1735 
1736 	case OP_DEST_PUSH:
1737 	case OP_DEST_PUSHF:
1738 		state->stack_size += 8;
1739 		if (cfa->base == CFI_SP)
1740 			cfa->offset += 8;
1741 
1742 		if (op->src.type != OP_SRC_REG)
1743 			break;
1744 
1745 		if (state->drap) {
1746 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1747 
1748 				/* drap: push %drap */
1749 				cfa->base = CFI_BP_INDIRECT;
1750 				cfa->offset = -state->stack_size;
1751 
1752 				/* save drap so we know when to restore it */
1753 				state->drap_offset = -state->stack_size;
1754 
1755 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1756 
1757 				/* drap: push %rbp */
1758 				state->stack_size = 0;
1759 
1760 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1761 
1762 				/* drap: push %reg */
1763 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1764 			}
1765 
1766 		} else {
1767 
1768 			/* push %reg */
1769 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1770 		}
1771 
1772 		/* detect when asm code uses rbp as a scratch register */
1773 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1774 		    cfa->base != CFI_BP)
1775 			state->bp_scratch = true;
1776 		break;
1777 
1778 	case OP_DEST_REG_INDIRECT:
1779 
1780 		if (state->drap) {
1781 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1782 
1783 				/* drap: mov %drap, disp(%rbp) */
1784 				cfa->base = CFI_BP_INDIRECT;
1785 				cfa->offset = op->dest.offset;
1786 
1787 				/* save drap offset so we know when to restore it */
1788 				state->drap_offset = op->dest.offset;
1789 			}
1790 
1791 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1792 
1793 				/* drap: mov reg, disp(%rbp) */
1794 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1795 			}
1796 
1797 		} else if (op->dest.reg == cfa->base) {
1798 
1799 			/* mov reg, disp(%rbp) */
1800 			/* mov reg, disp(%rsp) */
1801 			save_reg(state, op->src.reg, CFI_CFA,
1802 				 op->dest.offset - state->cfa.offset);
1803 		}
1804 
1805 		break;
1806 
1807 	case OP_DEST_LEAVE:
1808 		if ((!state->drap && cfa->base != CFI_BP) ||
1809 		    (state->drap && cfa->base != state->drap_reg)) {
1810 			WARN_FUNC("leave instruction with modified stack frame",
1811 				  insn->sec, insn->offset);
1812 			return -1;
1813 		}
1814 
1815 		/* leave (mov %rbp, %rsp; pop %rbp) */
1816 
1817 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1818 		restore_reg(state, CFI_BP);
1819 
1820 		if (!state->drap) {
1821 			cfa->base = CFI_SP;
1822 			cfa->offset -= 8;
1823 		}
1824 
1825 		break;
1826 
1827 	case OP_DEST_MEM:
1828 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1829 			WARN_FUNC("unknown stack-related memory operation",
1830 				  insn->sec, insn->offset);
1831 			return -1;
1832 		}
1833 
1834 		/* pop mem */
1835 		state->stack_size -= 8;
1836 		if (cfa->base == CFI_SP)
1837 			cfa->offset -= 8;
1838 
1839 		break;
1840 
1841 	default:
1842 		WARN_FUNC("unknown stack-related instruction",
1843 			  insn->sec, insn->offset);
1844 		return -1;
1845 	}
1846 
1847 	return 0;
1848 }
1849 
1850 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1851 {
1852 	struct insn_state *state1 = &insn->state, *state2 = state;
1853 	int i;
1854 
1855 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1856 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1857 			  insn->sec, insn->offset,
1858 			  state1->cfa.base, state1->cfa.offset,
1859 			  state2->cfa.base, state2->cfa.offset);
1860 
1861 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1862 		for (i = 0; i < CFI_NUM_REGS; i++) {
1863 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1864 				    sizeof(struct cfi_reg)))
1865 				continue;
1866 
1867 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1868 				  insn->sec, insn->offset,
1869 				  i, state1->regs[i].base, state1->regs[i].offset,
1870 				  i, state2->regs[i].base, state2->regs[i].offset);
1871 			break;
1872 		}
1873 
1874 	} else if (state1->type != state2->type) {
1875 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1876 			  insn->sec, insn->offset, state1->type, state2->type);
1877 
1878 	} else if (state1->drap != state2->drap ||
1879 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1880 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1881 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1882 			  insn->sec, insn->offset,
1883 			  state1->drap, state1->drap_reg, state1->drap_offset,
1884 			  state2->drap, state2->drap_reg, state2->drap_offset);
1885 
1886 	} else
1887 		return true;
1888 
1889 	return false;
1890 }
1891 
1892 static inline bool func_uaccess_safe(struct symbol *func)
1893 {
1894 	if (func)
1895 		return func->uaccess_safe;
1896 
1897 	return false;
1898 }
1899 
1900 static inline const char *call_dest_name(struct instruction *insn)
1901 {
1902 	if (insn->call_dest)
1903 		return insn->call_dest->name;
1904 
1905 	return "{dynamic}";
1906 }
1907 
1908 static int validate_call(struct instruction *insn, struct insn_state *state)
1909 {
1910 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1911 		WARN_FUNC("call to %s() with UACCESS enabled",
1912 				insn->sec, insn->offset, call_dest_name(insn));
1913 		return 1;
1914 	}
1915 
1916 	if (state->df) {
1917 		WARN_FUNC("call to %s() with DF set",
1918 				insn->sec, insn->offset, call_dest_name(insn));
1919 		return 1;
1920 	}
1921 
1922 	return 0;
1923 }
1924 
1925 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1926 {
1927 	if (has_modified_stack_frame(state)) {
1928 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1929 				insn->sec, insn->offset);
1930 		return 1;
1931 	}
1932 
1933 	return validate_call(insn, state);
1934 }
1935 
1936 /*
1937  * Follow the branch starting at the given instruction, and recursively follow
1938  * any other branches (jumps).  Meanwhile, track the frame pointer state at
1939  * each instruction and validate all the rules described in
1940  * tools/objtool/Documentation/stack-validation.txt.
1941  */
1942 static int validate_branch(struct objtool_file *file, struct symbol *func,
1943 			   struct instruction *first, struct insn_state state)
1944 {
1945 	struct alternative *alt;
1946 	struct instruction *insn, *next_insn;
1947 	struct section *sec;
1948 	u8 visited;
1949 	int ret;
1950 
1951 	insn = first;
1952 	sec = insn->sec;
1953 
1954 	if (insn->alt_group && list_empty(&insn->alts)) {
1955 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1956 			  sec, insn->offset);
1957 		return 1;
1958 	}
1959 
1960 	while (1) {
1961 		next_insn = next_insn_same_sec(file, insn);
1962 
1963 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
1964 			WARN("%s() falls through to next function %s()",
1965 			     func->name, insn->func->name);
1966 			return 1;
1967 		}
1968 
1969 		if (func && insn->ignore) {
1970 			WARN_FUNC("BUG: why am I validating an ignored function?",
1971 				  sec, insn->offset);
1972 			return 1;
1973 		}
1974 
1975 		visited = 1 << state.uaccess;
1976 		if (insn->visited) {
1977 			if (!insn->hint && !insn_state_match(insn, &state))
1978 				return 1;
1979 
1980 			if (insn->visited & visited)
1981 				return 0;
1982 		}
1983 
1984 		if (insn->hint) {
1985 			if (insn->restore) {
1986 				struct instruction *save_insn, *i;
1987 
1988 				i = insn;
1989 				save_insn = NULL;
1990 				func_for_each_insn_continue_reverse(file, func, i) {
1991 					if (i->save) {
1992 						save_insn = i;
1993 						break;
1994 					}
1995 				}
1996 
1997 				if (!save_insn) {
1998 					WARN_FUNC("no corresponding CFI save for CFI restore",
1999 						  sec, insn->offset);
2000 					return 1;
2001 				}
2002 
2003 				if (!save_insn->visited) {
2004 					/*
2005 					 * Oops, no state to copy yet.
2006 					 * Hopefully we can reach this
2007 					 * instruction from another branch
2008 					 * after the save insn has been
2009 					 * visited.
2010 					 */
2011 					if (insn == first)
2012 						return 0;
2013 
2014 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2015 						  sec, insn->offset);
2016 					return 1;
2017 				}
2018 
2019 				insn->state = save_insn->state;
2020 			}
2021 
2022 			state = insn->state;
2023 
2024 		} else
2025 			insn->state = state;
2026 
2027 		insn->visited |= visited;
2028 
2029 		if (!insn->ignore_alts) {
2030 			bool skip_orig = false;
2031 
2032 			list_for_each_entry(alt, &insn->alts, list) {
2033 				if (alt->skip_orig)
2034 					skip_orig = true;
2035 
2036 				ret = validate_branch(file, func, alt->insn, state);
2037 				if (ret) {
2038 					if (backtrace)
2039 						BT_FUNC("(alt)", insn);
2040 					return ret;
2041 				}
2042 			}
2043 
2044 			if (skip_orig)
2045 				return 0;
2046 		}
2047 
2048 		switch (insn->type) {
2049 
2050 		case INSN_RETURN:
2051 			if (state.uaccess && !func_uaccess_safe(func)) {
2052 				WARN_FUNC("return with UACCESS enabled", sec, insn->offset);
2053 				return 1;
2054 			}
2055 
2056 			if (!state.uaccess && func_uaccess_safe(func)) {
2057 				WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset);
2058 				return 1;
2059 			}
2060 
2061 			if (state.df) {
2062 				WARN_FUNC("return with DF set", sec, insn->offset);
2063 				return 1;
2064 			}
2065 
2066 			if (func && has_modified_stack_frame(&state)) {
2067 				WARN_FUNC("return with modified stack frame",
2068 					  sec, insn->offset);
2069 				return 1;
2070 			}
2071 
2072 			if (state.bp_scratch) {
2073 				WARN("%s uses BP as a scratch register",
2074 				     func->name);
2075 				return 1;
2076 			}
2077 
2078 			return 0;
2079 
2080 		case INSN_CALL:
2081 		case INSN_CALL_DYNAMIC:
2082 			ret = validate_call(insn, &state);
2083 			if (ret)
2084 				return ret;
2085 
2086 			if (!no_fp && func && !is_fentry_call(insn) &&
2087 			    !has_valid_stack_frame(&state)) {
2088 				WARN_FUNC("call without frame pointer save/setup",
2089 					  sec, insn->offset);
2090 				return 1;
2091 			}
2092 
2093 			if (dead_end_function(file, insn->call_dest))
2094 				return 0;
2095 
2096 			break;
2097 
2098 		case INSN_JUMP_CONDITIONAL:
2099 		case INSN_JUMP_UNCONDITIONAL:
2100 			if (func && is_sibling_call(insn)) {
2101 				ret = validate_sibling_call(insn, &state);
2102 				if (ret)
2103 					return ret;
2104 
2105 			} else if (insn->jump_dest) {
2106 				ret = validate_branch(file, func,
2107 						      insn->jump_dest, state);
2108 				if (ret) {
2109 					if (backtrace)
2110 						BT_FUNC("(branch)", insn);
2111 					return ret;
2112 				}
2113 			}
2114 
2115 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2116 				return 0;
2117 
2118 			break;
2119 
2120 		case INSN_JUMP_DYNAMIC:
2121 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2122 			if (func && is_sibling_call(insn)) {
2123 				ret = validate_sibling_call(insn, &state);
2124 				if (ret)
2125 					return ret;
2126 			}
2127 
2128 			if (insn->type == INSN_JUMP_DYNAMIC)
2129 				return 0;
2130 
2131 			break;
2132 
2133 		case INSN_CONTEXT_SWITCH:
2134 			if (func && (!next_insn || !next_insn->hint)) {
2135 				WARN_FUNC("unsupported instruction in callable function",
2136 					  sec, insn->offset);
2137 				return 1;
2138 			}
2139 			return 0;
2140 
2141 		case INSN_STACK:
2142 			if (update_insn_state(insn, &state))
2143 				return 1;
2144 
2145 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2146 				if (!state.uaccess_stack) {
2147 					state.uaccess_stack = 1;
2148 				} else if (state.uaccess_stack >> 31) {
2149 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2150 					return 1;
2151 				}
2152 				state.uaccess_stack <<= 1;
2153 				state.uaccess_stack  |= state.uaccess;
2154 			}
2155 
2156 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2157 				if (state.uaccess_stack) {
2158 					state.uaccess = state.uaccess_stack & 1;
2159 					state.uaccess_stack >>= 1;
2160 					if (state.uaccess_stack == 1)
2161 						state.uaccess_stack = 0;
2162 				}
2163 			}
2164 
2165 			break;
2166 
2167 		case INSN_STAC:
2168 			if (state.uaccess) {
2169 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2170 				return 1;
2171 			}
2172 
2173 			state.uaccess = true;
2174 			break;
2175 
2176 		case INSN_CLAC:
2177 			if (!state.uaccess && func) {
2178 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2179 				return 1;
2180 			}
2181 
2182 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2183 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2184 				return 1;
2185 			}
2186 
2187 			state.uaccess = false;
2188 			break;
2189 
2190 		case INSN_STD:
2191 			if (state.df)
2192 				WARN_FUNC("recursive STD", sec, insn->offset);
2193 
2194 			state.df = true;
2195 			break;
2196 
2197 		case INSN_CLD:
2198 			if (!state.df && func)
2199 				WARN_FUNC("redundant CLD", sec, insn->offset);
2200 
2201 			state.df = false;
2202 			break;
2203 
2204 		default:
2205 			break;
2206 		}
2207 
2208 		if (insn->dead_end)
2209 			return 0;
2210 
2211 		if (!next_insn) {
2212 			if (state.cfa.base == CFI_UNDEFINED)
2213 				return 0;
2214 			WARN("%s: unexpected end of section", sec->name);
2215 			return 1;
2216 		}
2217 
2218 		insn = next_insn;
2219 	}
2220 
2221 	return 0;
2222 }
2223 
2224 static int validate_unwind_hints(struct objtool_file *file)
2225 {
2226 	struct instruction *insn;
2227 	int ret, warnings = 0;
2228 	struct insn_state state;
2229 
2230 	if (!file->hints)
2231 		return 0;
2232 
2233 	clear_insn_state(&state);
2234 
2235 	for_each_insn(file, insn) {
2236 		if (insn->hint && !insn->visited) {
2237 			ret = validate_branch(file, insn->func, insn, state);
2238 			if (ret && backtrace)
2239 				BT_FUNC("<=== (hint)", insn);
2240 			warnings += ret;
2241 		}
2242 	}
2243 
2244 	return warnings;
2245 }
2246 
2247 static int validate_retpoline(struct objtool_file *file)
2248 {
2249 	struct instruction *insn;
2250 	int warnings = 0;
2251 
2252 	for_each_insn(file, insn) {
2253 		if (insn->type != INSN_JUMP_DYNAMIC &&
2254 		    insn->type != INSN_CALL_DYNAMIC)
2255 			continue;
2256 
2257 		if (insn->retpoline_safe)
2258 			continue;
2259 
2260 		/*
2261 		 * .init.text code is ran before userspace and thus doesn't
2262 		 * strictly need retpolines, except for modules which are
2263 		 * loaded late, they very much do need retpoline in their
2264 		 * .init.text
2265 		 */
2266 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2267 			continue;
2268 
2269 		WARN_FUNC("indirect %s found in RETPOLINE build",
2270 			  insn->sec, insn->offset,
2271 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2272 
2273 		warnings++;
2274 	}
2275 
2276 	return warnings;
2277 }
2278 
2279 static bool is_kasan_insn(struct instruction *insn)
2280 {
2281 	return (insn->type == INSN_CALL &&
2282 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2283 }
2284 
2285 static bool is_ubsan_insn(struct instruction *insn)
2286 {
2287 	return (insn->type == INSN_CALL &&
2288 		!strcmp(insn->call_dest->name,
2289 			"__ubsan_handle_builtin_unreachable"));
2290 }
2291 
2292 static bool ignore_unreachable_insn(struct instruction *insn)
2293 {
2294 	int i;
2295 
2296 	if (insn->ignore || insn->type == INSN_NOP)
2297 		return true;
2298 
2299 	/*
2300 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2301 	 * function has an exception table entry.
2302 	 *
2303 	 * Also ignore alternative replacement instructions.  This can happen
2304 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2305 	 */
2306 	if (!strcmp(insn->sec->name, ".fixup") ||
2307 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2308 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2309 		return true;
2310 
2311 	/*
2312 	 * Check if this (or a subsequent) instruction is related to
2313 	 * CONFIG_UBSAN or CONFIG_KASAN.
2314 	 *
2315 	 * End the search at 5 instructions to avoid going into the weeds.
2316 	 */
2317 	if (!insn->func)
2318 		return false;
2319 	for (i = 0; i < 5; i++) {
2320 
2321 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2322 			return true;
2323 
2324 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2325 			if (insn->jump_dest &&
2326 			    insn->jump_dest->func == insn->func) {
2327 				insn = insn->jump_dest;
2328 				continue;
2329 			}
2330 
2331 			break;
2332 		}
2333 
2334 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2335 			break;
2336 
2337 		insn = list_next_entry(insn, list);
2338 	}
2339 
2340 	return false;
2341 }
2342 
2343 static int validate_functions(struct objtool_file *file)
2344 {
2345 	struct section *sec;
2346 	struct symbol *func;
2347 	struct instruction *insn;
2348 	struct insn_state state;
2349 	int ret, warnings = 0;
2350 
2351 	clear_insn_state(&state);
2352 
2353 	state.cfa = initial_func_cfi.cfa;
2354 	memcpy(&state.regs, &initial_func_cfi.regs,
2355 	       CFI_NUM_REGS * sizeof(struct cfi_reg));
2356 	state.stack_size = initial_func_cfi.cfa.offset;
2357 
2358 	for_each_sec(file, sec) {
2359 		list_for_each_entry(func, &sec->symbol_list, list) {
2360 			if (func->type != STT_FUNC)
2361 				continue;
2362 
2363 			if (!func->len) {
2364 				WARN("%s() is missing an ELF size annotation",
2365 				     func->name);
2366 				warnings++;
2367 			}
2368 
2369 			if (func->pfunc != func || func->alias != func)
2370 				continue;
2371 
2372 			insn = find_insn(file, sec, func->offset);
2373 			if (!insn || insn->ignore || insn->visited)
2374 				continue;
2375 
2376 			state.uaccess = func->uaccess_safe;
2377 
2378 			ret = validate_branch(file, func, insn, state);
2379 			if (ret && backtrace)
2380 				BT_FUNC("<=== (func)", insn);
2381 			warnings += ret;
2382 		}
2383 	}
2384 
2385 	return warnings;
2386 }
2387 
2388 static int validate_reachable_instructions(struct objtool_file *file)
2389 {
2390 	struct instruction *insn;
2391 
2392 	if (file->ignore_unreachables)
2393 		return 0;
2394 
2395 	for_each_insn(file, insn) {
2396 		if (insn->visited || ignore_unreachable_insn(insn))
2397 			continue;
2398 
2399 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2400 		return 1;
2401 	}
2402 
2403 	return 0;
2404 }
2405 
2406 static void cleanup(struct objtool_file *file)
2407 {
2408 	struct instruction *insn, *tmpinsn;
2409 	struct alternative *alt, *tmpalt;
2410 
2411 	list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2412 		list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2413 			list_del(&alt->list);
2414 			free(alt);
2415 		}
2416 		list_del(&insn->list);
2417 		hash_del(&insn->hash);
2418 		free(insn);
2419 	}
2420 	elf_close(file->elf);
2421 }
2422 
2423 static struct objtool_file file;
2424 
2425 int check(const char *_objname, bool orc)
2426 {
2427 	int ret, warnings = 0;
2428 
2429 	objname = _objname;
2430 
2431 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2432 	if (!file.elf)
2433 		return 1;
2434 
2435 	INIT_LIST_HEAD(&file.insn_list);
2436 	hash_init(file.insn_hash);
2437 	file.c_file = find_section_by_name(file.elf, ".comment");
2438 	file.ignore_unreachables = no_unreachable;
2439 	file.hints = false;
2440 
2441 	arch_initial_func_cfi_state(&initial_func_cfi);
2442 
2443 	ret = decode_sections(&file);
2444 	if (ret < 0)
2445 		goto out;
2446 	warnings += ret;
2447 
2448 	if (list_empty(&file.insn_list))
2449 		goto out;
2450 
2451 	if (retpoline) {
2452 		ret = validate_retpoline(&file);
2453 		if (ret < 0)
2454 			return ret;
2455 		warnings += ret;
2456 	}
2457 
2458 	ret = validate_functions(&file);
2459 	if (ret < 0)
2460 		goto out;
2461 	warnings += ret;
2462 
2463 	ret = validate_unwind_hints(&file);
2464 	if (ret < 0)
2465 		goto out;
2466 	warnings += ret;
2467 
2468 	if (!warnings) {
2469 		ret = validate_reachable_instructions(&file);
2470 		if (ret < 0)
2471 			goto out;
2472 		warnings += ret;
2473 	}
2474 
2475 	if (orc) {
2476 		ret = create_orc(&file);
2477 		if (ret < 0)
2478 			goto out;
2479 
2480 		ret = create_orc_sections(&file);
2481 		if (ret < 0)
2482 			goto out;
2483 
2484 		ret = elf_write(file.elf);
2485 		if (ret < 0)
2486 			goto out;
2487 	}
2488 
2489 out:
2490 	cleanup(&file);
2491 
2492 	/* ignore warnings for now until we get all the code cleaned up */
2493 	if (ret || warnings)
2494 		return 0;
2495 	return 0;
2496 }
2497