xref: /openbmc/linux/tools/objtool/check.c (revision e6f4c346)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn_all(file, func, insn)			\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define func_for_each_insn(file, func, insn)				\
81 	for (insn = find_insn(file, func->sec, func->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == func->sec &&				\
84 		insn->offset < func->offset + func->len;		\
85 	     insn = list_next_entry(insn, list))
86 
87 #define func_for_each_insn_continue_reverse(file, func, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == func->sec && insn->offset >= func->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_sibling_call(struct instruction *insn)
101 {
102 	/* An indirect jump is either a sibling call or a jump to a table. */
103 	if (insn->type == INSN_JUMP_DYNAMIC)
104 		return list_empty(&insn->alts);
105 
106 	if (insn->type != INSN_JUMP_CONDITIONAL &&
107 	    insn->type != INSN_JUMP_UNCONDITIONAL)
108 		return false;
109 
110 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
111 	return !!insn->call_dest;
112 }
113 
114 /*
115  * This checks to see if the given function is a "noreturn" function.
116  *
117  * For global functions which are outside the scope of this object file, we
118  * have to keep a manual list of them.
119  *
120  * For local functions, we have to detect them manually by simply looking for
121  * the lack of a return instruction.
122  */
123 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
124 				int recursion)
125 {
126 	int i;
127 	struct instruction *insn;
128 	bool empty = true;
129 
130 	/*
131 	 * Unfortunately these have to be hard coded because the noreturn
132 	 * attribute isn't provided in ELF data.
133 	 */
134 	static const char * const global_noreturns[] = {
135 		"__stack_chk_fail",
136 		"panic",
137 		"do_exit",
138 		"do_task_dead",
139 		"__module_put_and_exit",
140 		"complete_and_exit",
141 		"kvm_spurious_fault",
142 		"__reiserfs_panic",
143 		"lbug_with_loc",
144 		"fortify_panic",
145 		"usercopy_abort",
146 		"machine_real_restart",
147 		"rewind_stack_do_exit",
148 	};
149 
150 	if (!func)
151 		return false;
152 
153 	if (func->bind == STB_WEAK)
154 		return false;
155 
156 	if (func->bind == STB_GLOBAL)
157 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
158 			if (!strcmp(func->name, global_noreturns[i]))
159 				return true;
160 
161 	if (!func->len)
162 		return false;
163 
164 	insn = find_insn(file, func->sec, func->offset);
165 	if (!insn->func)
166 		return false;
167 
168 	func_for_each_insn_all(file, func, insn) {
169 		empty = false;
170 
171 		if (insn->type == INSN_RETURN)
172 			return false;
173 	}
174 
175 	if (empty)
176 		return false;
177 
178 	/*
179 	 * A function can have a sibling call instead of a return.  In that
180 	 * case, the function's dead-end status depends on whether the target
181 	 * of the sibling call returns.
182 	 */
183 	func_for_each_insn_all(file, func, insn) {
184 		if (is_sibling_call(insn)) {
185 			struct instruction *dest = insn->jump_dest;
186 
187 			if (!dest)
188 				/* sibling call to another file */
189 				return false;
190 
191 			/* local sibling call */
192 			if (recursion == 5) {
193 				/*
194 				 * Infinite recursion: two functions have
195 				 * sibling calls to each other.  This is a very
196 				 * rare case.  It means they aren't dead ends.
197 				 */
198 				return false;
199 			}
200 
201 			return __dead_end_function(file, dest->func, recursion+1);
202 		}
203 	}
204 
205 	return true;
206 }
207 
208 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
209 {
210 	return __dead_end_function(file, func, 0);
211 }
212 
213 static void clear_insn_state(struct insn_state *state)
214 {
215 	int i;
216 
217 	memset(state, 0, sizeof(*state));
218 	state->cfa.base = CFI_UNDEFINED;
219 	for (i = 0; i < CFI_NUM_REGS; i++) {
220 		state->regs[i].base = CFI_UNDEFINED;
221 		state->vals[i].base = CFI_UNDEFINED;
222 	}
223 	state->drap_reg = CFI_UNDEFINED;
224 	state->drap_offset = -1;
225 }
226 
227 /*
228  * Call the arch-specific instruction decoder for all the instructions and add
229  * them to the global instruction list.
230  */
231 static int decode_instructions(struct objtool_file *file)
232 {
233 	struct section *sec;
234 	struct symbol *func;
235 	unsigned long offset;
236 	struct instruction *insn;
237 	int ret;
238 
239 	for_each_sec(file, sec) {
240 
241 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
242 			continue;
243 
244 		if (strcmp(sec->name, ".altinstr_replacement") &&
245 		    strcmp(sec->name, ".altinstr_aux") &&
246 		    strncmp(sec->name, ".discard.", 9))
247 			sec->text = true;
248 
249 		for (offset = 0; offset < sec->len; offset += insn->len) {
250 			insn = malloc(sizeof(*insn));
251 			if (!insn) {
252 				WARN("malloc failed");
253 				return -1;
254 			}
255 			memset(insn, 0, sizeof(*insn));
256 			INIT_LIST_HEAD(&insn->alts);
257 			clear_insn_state(&insn->state);
258 
259 			insn->sec = sec;
260 			insn->offset = offset;
261 
262 			ret = arch_decode_instruction(file->elf, sec, offset,
263 						      sec->len - offset,
264 						      &insn->len, &insn->type,
265 						      &insn->immediate,
266 						      &insn->stack_op);
267 			if (ret)
268 				goto err;
269 
270 			hash_add(file->insn_hash, &insn->hash, insn->offset);
271 			list_add_tail(&insn->list, &file->insn_list);
272 		}
273 
274 		list_for_each_entry(func, &sec->symbol_list, list) {
275 			if (func->type != STT_FUNC || func->alias != func)
276 				continue;
277 
278 			if (!find_insn(file, sec, func->offset)) {
279 				WARN("%s(): can't find starting instruction",
280 				     func->name);
281 				return -1;
282 			}
283 
284 			func_for_each_insn(file, func, insn)
285 				insn->func = func;
286 		}
287 	}
288 
289 	return 0;
290 
291 err:
292 	free(insn);
293 	return ret;
294 }
295 
296 /*
297  * Mark "ud2" instructions and manually annotated dead ends.
298  */
299 static int add_dead_ends(struct objtool_file *file)
300 {
301 	struct section *sec;
302 	struct rela *rela;
303 	struct instruction *insn;
304 	bool found;
305 
306 	/*
307 	 * By default, "ud2" is a dead end unless otherwise annotated, because
308 	 * GCC 7 inserts it for certain divide-by-zero cases.
309 	 */
310 	for_each_insn(file, insn)
311 		if (insn->type == INSN_BUG)
312 			insn->dead_end = true;
313 
314 	/*
315 	 * Check for manually annotated dead ends.
316 	 */
317 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
318 	if (!sec)
319 		goto reachable;
320 
321 	list_for_each_entry(rela, &sec->rela_list, list) {
322 		if (rela->sym->type != STT_SECTION) {
323 			WARN("unexpected relocation symbol type in %s", sec->name);
324 			return -1;
325 		}
326 		insn = find_insn(file, rela->sym->sec, rela->addend);
327 		if (insn)
328 			insn = list_prev_entry(insn, list);
329 		else if (rela->addend == rela->sym->sec->len) {
330 			found = false;
331 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
332 				if (insn->sec == rela->sym->sec) {
333 					found = true;
334 					break;
335 				}
336 			}
337 
338 			if (!found) {
339 				WARN("can't find unreachable insn at %s+0x%x",
340 				     rela->sym->sec->name, rela->addend);
341 				return -1;
342 			}
343 		} else {
344 			WARN("can't find unreachable insn at %s+0x%x",
345 			     rela->sym->sec->name, rela->addend);
346 			return -1;
347 		}
348 
349 		insn->dead_end = true;
350 	}
351 
352 reachable:
353 	/*
354 	 * These manually annotated reachable checks are needed for GCC 4.4,
355 	 * where the Linux unreachable() macro isn't supported.  In that case
356 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
357 	 * not a dead end.
358 	 */
359 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
360 	if (!sec)
361 		return 0;
362 
363 	list_for_each_entry(rela, &sec->rela_list, list) {
364 		if (rela->sym->type != STT_SECTION) {
365 			WARN("unexpected relocation symbol type in %s", sec->name);
366 			return -1;
367 		}
368 		insn = find_insn(file, rela->sym->sec, rela->addend);
369 		if (insn)
370 			insn = list_prev_entry(insn, list);
371 		else if (rela->addend == rela->sym->sec->len) {
372 			found = false;
373 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
374 				if (insn->sec == rela->sym->sec) {
375 					found = true;
376 					break;
377 				}
378 			}
379 
380 			if (!found) {
381 				WARN("can't find reachable insn at %s+0x%x",
382 				     rela->sym->sec->name, rela->addend);
383 				return -1;
384 			}
385 		} else {
386 			WARN("can't find reachable insn at %s+0x%x",
387 			     rela->sym->sec->name, rela->addend);
388 			return -1;
389 		}
390 
391 		insn->dead_end = false;
392 	}
393 
394 	return 0;
395 }
396 
397 /*
398  * Warnings shouldn't be reported for ignored functions.
399  */
400 static void add_ignores(struct objtool_file *file)
401 {
402 	struct instruction *insn;
403 	struct section *sec;
404 	struct symbol *func;
405 	struct rela *rela;
406 
407 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
408 	if (!sec)
409 		return;
410 
411 	list_for_each_entry(rela, &sec->rela_list, list) {
412 		switch (rela->sym->type) {
413 		case STT_FUNC:
414 			func = rela->sym;
415 			break;
416 
417 		case STT_SECTION:
418 			func = find_symbol_by_offset(rela->sym->sec, rela->addend);
419 			if (!func || func->type != STT_FUNC)
420 				continue;
421 			break;
422 
423 		default:
424 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
425 			continue;
426 		}
427 
428 		func_for_each_insn_all(file, func, insn)
429 			insn->ignore = true;
430 	}
431 }
432 
433 /*
434  * This is a whitelist of functions that is allowed to be called with AC set.
435  * The list is meant to be minimal and only contains compiler instrumentation
436  * ABI and a few functions used to implement *_{to,from}_user() functions.
437  *
438  * These functions must not directly change AC, but may PUSHF/POPF.
439  */
440 static const char *uaccess_safe_builtin[] = {
441 	/* KASAN */
442 	"kasan_report",
443 	"check_memory_region",
444 	/* KASAN out-of-line */
445 	"__asan_loadN_noabort",
446 	"__asan_load1_noabort",
447 	"__asan_load2_noabort",
448 	"__asan_load4_noabort",
449 	"__asan_load8_noabort",
450 	"__asan_load16_noabort",
451 	"__asan_storeN_noabort",
452 	"__asan_store1_noabort",
453 	"__asan_store2_noabort",
454 	"__asan_store4_noabort",
455 	"__asan_store8_noabort",
456 	"__asan_store16_noabort",
457 	/* KASAN in-line */
458 	"__asan_report_load_n_noabort",
459 	"__asan_report_load1_noabort",
460 	"__asan_report_load2_noabort",
461 	"__asan_report_load4_noabort",
462 	"__asan_report_load8_noabort",
463 	"__asan_report_load16_noabort",
464 	"__asan_report_store_n_noabort",
465 	"__asan_report_store1_noabort",
466 	"__asan_report_store2_noabort",
467 	"__asan_report_store4_noabort",
468 	"__asan_report_store8_noabort",
469 	"__asan_report_store16_noabort",
470 	/* KCOV */
471 	"write_comp_data",
472 	"__sanitizer_cov_trace_pc",
473 	"__sanitizer_cov_trace_const_cmp1",
474 	"__sanitizer_cov_trace_const_cmp2",
475 	"__sanitizer_cov_trace_const_cmp4",
476 	"__sanitizer_cov_trace_const_cmp8",
477 	"__sanitizer_cov_trace_cmp1",
478 	"__sanitizer_cov_trace_cmp2",
479 	"__sanitizer_cov_trace_cmp4",
480 	"__sanitizer_cov_trace_cmp8",
481 	/* UBSAN */
482 	"ubsan_type_mismatch_common",
483 	"__ubsan_handle_type_mismatch",
484 	"__ubsan_handle_type_mismatch_v1",
485 	/* misc */
486 	"csum_partial_copy_generic",
487 	"__memcpy_mcsafe",
488 	"mcsafe_handle_tail",
489 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
490 	NULL
491 };
492 
493 static void add_uaccess_safe(struct objtool_file *file)
494 {
495 	struct symbol *func;
496 	const char **name;
497 
498 	if (!uaccess)
499 		return;
500 
501 	for (name = uaccess_safe_builtin; *name; name++) {
502 		func = find_symbol_by_name(file->elf, *name);
503 		if (!func)
504 			continue;
505 
506 		func->uaccess_safe = true;
507 	}
508 }
509 
510 /*
511  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
512  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
513  * But it at least allows objtool to understand the control flow *around* the
514  * retpoline.
515  */
516 static int add_ignore_alternatives(struct objtool_file *file)
517 {
518 	struct section *sec;
519 	struct rela *rela;
520 	struct instruction *insn;
521 
522 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
523 	if (!sec)
524 		return 0;
525 
526 	list_for_each_entry(rela, &sec->rela_list, list) {
527 		if (rela->sym->type != STT_SECTION) {
528 			WARN("unexpected relocation symbol type in %s", sec->name);
529 			return -1;
530 		}
531 
532 		insn = find_insn(file, rela->sym->sec, rela->addend);
533 		if (!insn) {
534 			WARN("bad .discard.ignore_alts entry");
535 			return -1;
536 		}
537 
538 		insn->ignore_alts = true;
539 	}
540 
541 	return 0;
542 }
543 
544 /*
545  * Find the destination instructions for all jumps.
546  */
547 static int add_jump_destinations(struct objtool_file *file)
548 {
549 	struct instruction *insn;
550 	struct rela *rela;
551 	struct section *dest_sec;
552 	unsigned long dest_off;
553 
554 	for_each_insn(file, insn) {
555 		if (insn->type != INSN_JUMP_CONDITIONAL &&
556 		    insn->type != INSN_JUMP_UNCONDITIONAL)
557 			continue;
558 
559 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
560 			continue;
561 
562 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
563 					       insn->len);
564 		if (!rela) {
565 			dest_sec = insn->sec;
566 			dest_off = insn->offset + insn->len + insn->immediate;
567 		} else if (rela->sym->type == STT_SECTION) {
568 			dest_sec = rela->sym->sec;
569 			dest_off = rela->addend + 4;
570 		} else if (rela->sym->sec->idx) {
571 			dest_sec = rela->sym->sec;
572 			dest_off = rela->sym->sym.st_value + rela->addend + 4;
573 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
574 			/*
575 			 * Retpoline jumps are really dynamic jumps in
576 			 * disguise, so convert them accordingly.
577 			 */
578 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
579 				insn->type = INSN_JUMP_DYNAMIC;
580 			else
581 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
582 
583 			insn->retpoline_safe = true;
584 			continue;
585 		} else {
586 			/* external sibling call */
587 			insn->call_dest = rela->sym;
588 			continue;
589 		}
590 
591 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
592 		if (!insn->jump_dest) {
593 
594 			/*
595 			 * This is a special case where an alt instruction
596 			 * jumps past the end of the section.  These are
597 			 * handled later in handle_group_alt().
598 			 */
599 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
600 				continue;
601 
602 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
603 				  insn->sec, insn->offset, dest_sec->name,
604 				  dest_off);
605 			return -1;
606 		}
607 
608 		/*
609 		 * Cross-function jump.
610 		 */
611 		if (insn->func && insn->jump_dest->func &&
612 		    insn->func != insn->jump_dest->func) {
613 
614 			/*
615 			 * For GCC 8+, create parent/child links for any cold
616 			 * subfunctions.  This is _mostly_ redundant with a
617 			 * similar initialization in read_symbols().
618 			 *
619 			 * If a function has aliases, we want the *first* such
620 			 * function in the symbol table to be the subfunction's
621 			 * parent.  In that case we overwrite the
622 			 * initialization done in read_symbols().
623 			 *
624 			 * However this code can't completely replace the
625 			 * read_symbols() code because this doesn't detect the
626 			 * case where the parent function's only reference to a
627 			 * subfunction is through a jump table.
628 			 */
629 			if (!strstr(insn->func->name, ".cold.") &&
630 			    strstr(insn->jump_dest->func->name, ".cold.")) {
631 				insn->func->cfunc = insn->jump_dest->func;
632 				insn->jump_dest->func->pfunc = insn->func;
633 
634 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
635 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
636 
637 				/* internal sibling call */
638 				insn->call_dest = insn->jump_dest->func;
639 			}
640 		}
641 	}
642 
643 	return 0;
644 }
645 
646 /*
647  * Find the destination instructions for all calls.
648  */
649 static int add_call_destinations(struct objtool_file *file)
650 {
651 	struct instruction *insn;
652 	unsigned long dest_off;
653 	struct rela *rela;
654 
655 	for_each_insn(file, insn) {
656 		if (insn->type != INSN_CALL)
657 			continue;
658 
659 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
660 					       insn->len);
661 		if (!rela) {
662 			dest_off = insn->offset + insn->len + insn->immediate;
663 			insn->call_dest = find_symbol_by_offset(insn->sec,
664 								dest_off);
665 
666 			if (!insn->call_dest && !insn->ignore) {
667 				WARN_FUNC("unsupported intra-function call",
668 					  insn->sec, insn->offset);
669 				if (retpoline)
670 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
671 				return -1;
672 			}
673 
674 		} else if (rela->sym->type == STT_SECTION) {
675 			insn->call_dest = find_symbol_by_offset(rela->sym->sec,
676 								rela->addend+4);
677 			if (!insn->call_dest ||
678 			    insn->call_dest->type != STT_FUNC) {
679 				WARN_FUNC("can't find call dest symbol at %s+0x%x",
680 					  insn->sec, insn->offset,
681 					  rela->sym->sec->name,
682 					  rela->addend + 4);
683 				return -1;
684 			}
685 		} else
686 			insn->call_dest = rela->sym;
687 	}
688 
689 	return 0;
690 }
691 
692 /*
693  * The .alternatives section requires some extra special care, over and above
694  * what other special sections require:
695  *
696  * 1. Because alternatives are patched in-place, we need to insert a fake jump
697  *    instruction at the end so that validate_branch() skips all the original
698  *    replaced instructions when validating the new instruction path.
699  *
700  * 2. An added wrinkle is that the new instruction length might be zero.  In
701  *    that case the old instructions are replaced with noops.  We simulate that
702  *    by creating a fake jump as the only new instruction.
703  *
704  * 3. In some cases, the alternative section includes an instruction which
705  *    conditionally jumps to the _end_ of the entry.  We have to modify these
706  *    jumps' destinations to point back to .text rather than the end of the
707  *    entry in .altinstr_replacement.
708  */
709 static int handle_group_alt(struct objtool_file *file,
710 			    struct special_alt *special_alt,
711 			    struct instruction *orig_insn,
712 			    struct instruction **new_insn)
713 {
714 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
715 	unsigned long dest_off;
716 
717 	last_orig_insn = NULL;
718 	insn = orig_insn;
719 	sec_for_each_insn_from(file, insn) {
720 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
721 			break;
722 
723 		insn->alt_group = true;
724 		last_orig_insn = insn;
725 	}
726 
727 	if (next_insn_same_sec(file, last_orig_insn)) {
728 		fake_jump = malloc(sizeof(*fake_jump));
729 		if (!fake_jump) {
730 			WARN("malloc failed");
731 			return -1;
732 		}
733 		memset(fake_jump, 0, sizeof(*fake_jump));
734 		INIT_LIST_HEAD(&fake_jump->alts);
735 		clear_insn_state(&fake_jump->state);
736 
737 		fake_jump->sec = special_alt->new_sec;
738 		fake_jump->offset = FAKE_JUMP_OFFSET;
739 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
740 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
741 		fake_jump->func = orig_insn->func;
742 	}
743 
744 	if (!special_alt->new_len) {
745 		if (!fake_jump) {
746 			WARN("%s: empty alternative at end of section",
747 			     special_alt->orig_sec->name);
748 			return -1;
749 		}
750 
751 		*new_insn = fake_jump;
752 		return 0;
753 	}
754 
755 	last_new_insn = NULL;
756 	insn = *new_insn;
757 	sec_for_each_insn_from(file, insn) {
758 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
759 			break;
760 
761 		last_new_insn = insn;
762 
763 		insn->ignore = orig_insn->ignore_alts;
764 		insn->func = orig_insn->func;
765 
766 		if (insn->type != INSN_JUMP_CONDITIONAL &&
767 		    insn->type != INSN_JUMP_UNCONDITIONAL)
768 			continue;
769 
770 		if (!insn->immediate)
771 			continue;
772 
773 		dest_off = insn->offset + insn->len + insn->immediate;
774 		if (dest_off == special_alt->new_off + special_alt->new_len) {
775 			if (!fake_jump) {
776 				WARN("%s: alternative jump to end of section",
777 				     special_alt->orig_sec->name);
778 				return -1;
779 			}
780 			insn->jump_dest = fake_jump;
781 		}
782 
783 		if (!insn->jump_dest) {
784 			WARN_FUNC("can't find alternative jump destination",
785 				  insn->sec, insn->offset);
786 			return -1;
787 		}
788 	}
789 
790 	if (!last_new_insn) {
791 		WARN_FUNC("can't find last new alternative instruction",
792 			  special_alt->new_sec, special_alt->new_off);
793 		return -1;
794 	}
795 
796 	if (fake_jump)
797 		list_add(&fake_jump->list, &last_new_insn->list);
798 
799 	return 0;
800 }
801 
802 /*
803  * A jump table entry can either convert a nop to a jump or a jump to a nop.
804  * If the original instruction is a jump, make the alt entry an effective nop
805  * by just skipping the original instruction.
806  */
807 static int handle_jump_alt(struct objtool_file *file,
808 			   struct special_alt *special_alt,
809 			   struct instruction *orig_insn,
810 			   struct instruction **new_insn)
811 {
812 	if (orig_insn->type == INSN_NOP)
813 		return 0;
814 
815 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
816 		WARN_FUNC("unsupported instruction at jump label",
817 			  orig_insn->sec, orig_insn->offset);
818 		return -1;
819 	}
820 
821 	*new_insn = list_next_entry(orig_insn, list);
822 	return 0;
823 }
824 
825 /*
826  * Read all the special sections which have alternate instructions which can be
827  * patched in or redirected to at runtime.  Each instruction having alternate
828  * instruction(s) has them added to its insn->alts list, which will be
829  * traversed in validate_branch().
830  */
831 static int add_special_section_alts(struct objtool_file *file)
832 {
833 	struct list_head special_alts;
834 	struct instruction *orig_insn, *new_insn;
835 	struct special_alt *special_alt, *tmp;
836 	struct alternative *alt;
837 	int ret;
838 
839 	ret = special_get_alts(file->elf, &special_alts);
840 	if (ret)
841 		return ret;
842 
843 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
844 
845 		orig_insn = find_insn(file, special_alt->orig_sec,
846 				      special_alt->orig_off);
847 		if (!orig_insn) {
848 			WARN_FUNC("special: can't find orig instruction",
849 				  special_alt->orig_sec, special_alt->orig_off);
850 			ret = -1;
851 			goto out;
852 		}
853 
854 		new_insn = NULL;
855 		if (!special_alt->group || special_alt->new_len) {
856 			new_insn = find_insn(file, special_alt->new_sec,
857 					     special_alt->new_off);
858 			if (!new_insn) {
859 				WARN_FUNC("special: can't find new instruction",
860 					  special_alt->new_sec,
861 					  special_alt->new_off);
862 				ret = -1;
863 				goto out;
864 			}
865 		}
866 
867 		if (special_alt->group) {
868 			ret = handle_group_alt(file, special_alt, orig_insn,
869 					       &new_insn);
870 			if (ret)
871 				goto out;
872 		} else if (special_alt->jump_or_nop) {
873 			ret = handle_jump_alt(file, special_alt, orig_insn,
874 					      &new_insn);
875 			if (ret)
876 				goto out;
877 		}
878 
879 		alt = malloc(sizeof(*alt));
880 		if (!alt) {
881 			WARN("malloc failed");
882 			ret = -1;
883 			goto out;
884 		}
885 
886 		alt->insn = new_insn;
887 		alt->skip_orig = special_alt->skip_orig;
888 		orig_insn->ignore_alts |= special_alt->skip_alt;
889 		list_add_tail(&alt->list, &orig_insn->alts);
890 
891 		list_del(&special_alt->list);
892 		free(special_alt);
893 	}
894 
895 out:
896 	return ret;
897 }
898 
899 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
900 			    struct rela *table)
901 {
902 	struct rela *rela = table;
903 	struct instruction *dest_insn;
904 	struct alternative *alt;
905 	struct symbol *pfunc = insn->func->pfunc;
906 	unsigned int prev_offset = 0;
907 
908 	/*
909 	 * Each @rela is a switch table relocation which points to the target
910 	 * instruction.
911 	 */
912 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
913 
914 		/* Check for the end of the table: */
915 		if (rela != table && rela->jump_table_start)
916 			break;
917 
918 		/* Make sure the table entries are consecutive: */
919 		if (prev_offset && rela->offset != prev_offset + 8)
920 			break;
921 
922 		/* Detect function pointers from contiguous objects: */
923 		if (rela->sym->sec == pfunc->sec &&
924 		    rela->addend == pfunc->offset)
925 			break;
926 
927 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
928 		if (!dest_insn)
929 			break;
930 
931 		/* Make sure the destination is in the same function: */
932 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
933 			break;
934 
935 		alt = malloc(sizeof(*alt));
936 		if (!alt) {
937 			WARN("malloc failed");
938 			return -1;
939 		}
940 
941 		alt->insn = dest_insn;
942 		list_add_tail(&alt->list, &insn->alts);
943 		prev_offset = rela->offset;
944 	}
945 
946 	if (!prev_offset) {
947 		WARN_FUNC("can't find switch jump table",
948 			  insn->sec, insn->offset);
949 		return -1;
950 	}
951 
952 	return 0;
953 }
954 
955 /*
956  * find_jump_table() - Given a dynamic jump, find the switch jump table in
957  * .rodata associated with it.
958  *
959  * There are 3 basic patterns:
960  *
961  * 1. jmpq *[rodata addr](,%reg,8)
962  *
963  *    This is the most common case by far.  It jumps to an address in a simple
964  *    jump table which is stored in .rodata.
965  *
966  * 2. jmpq *[rodata addr](%rip)
967  *
968  *    This is caused by a rare GCC quirk, currently only seen in three driver
969  *    functions in the kernel, only with certain obscure non-distro configs.
970  *
971  *    As part of an optimization, GCC makes a copy of an existing switch jump
972  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
973  *    jump) to use a single entry in the table.  The rest of the jump table and
974  *    some of its jump targets remain as dead code.
975  *
976  *    In such a case we can just crudely ignore all unreachable instruction
977  *    warnings for the entire object file.  Ideally we would just ignore them
978  *    for the function, but that would require redesigning the code quite a
979  *    bit.  And honestly that's just not worth doing: unreachable instruction
980  *    warnings are of questionable value anyway, and this is such a rare issue.
981  *
982  * 3. mov [rodata addr],%reg1
983  *    ... some instructions ...
984  *    jmpq *(%reg1,%reg2,8)
985  *
986  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
987  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
988  *
989  *    As of GCC 7 there are quite a few more of these and the 'in between' code
990  *    is significant. Esp. with KASAN enabled some of the code between the mov
991  *    and jmpq uses .rodata itself, which can confuse things.
992  *
993  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
994  *    ensure the same register is used in the mov and jump instructions.
995  *
996  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
997  */
998 static struct rela *find_jump_table(struct objtool_file *file,
999 				      struct symbol *func,
1000 				      struct instruction *insn)
1001 {
1002 	struct rela *text_rela, *table_rela;
1003 	struct instruction *orig_insn = insn;
1004 	struct section *table_sec;
1005 	unsigned long table_offset;
1006 
1007 	/*
1008 	 * Backward search using the @first_jump_src links, these help avoid
1009 	 * much of the 'in between' code. Which avoids us getting confused by
1010 	 * it.
1011 	 */
1012 	for (;
1013 	     &insn->list != &file->insn_list &&
1014 	     insn->sec == func->sec &&
1015 	     insn->offset >= func->offset;
1016 
1017 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1018 
1019 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1020 			break;
1021 
1022 		/* allow small jumps within the range */
1023 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1024 		    insn->jump_dest &&
1025 		    (insn->jump_dest->offset <= insn->offset ||
1026 		     insn->jump_dest->offset > orig_insn->offset))
1027 		    break;
1028 
1029 		/* look for a relocation which references .rodata */
1030 		text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
1031 						    insn->len);
1032 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1033 		    !text_rela->sym->sec->rodata)
1034 			continue;
1035 
1036 		table_offset = text_rela->addend;
1037 		table_sec = text_rela->sym->sec;
1038 
1039 		if (text_rela->type == R_X86_64_PC32)
1040 			table_offset += 4;
1041 
1042 		/*
1043 		 * Make sure the .rodata address isn't associated with a
1044 		 * symbol.  GCC jump tables are anonymous data.
1045 		 *
1046 		 * Also support C jump tables which are in the same format as
1047 		 * switch jump tables.  For objtool to recognize them, they
1048 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1049 		 * have symbols associated with them.
1050 		 */
1051 		if (find_symbol_containing(table_sec, table_offset) &&
1052 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1053 			continue;
1054 
1055 		/* Each table entry has a rela associated with it. */
1056 		table_rela = find_rela_by_dest(table_sec, table_offset);
1057 		if (!table_rela)
1058 			continue;
1059 
1060 		/*
1061 		 * Use of RIP-relative switch jumps is quite rare, and
1062 		 * indicates a rare GCC quirk/bug which can leave dead code
1063 		 * behind.
1064 		 */
1065 		if (text_rela->type == R_X86_64_PC32)
1066 			file->ignore_unreachables = true;
1067 
1068 		return table_rela;
1069 	}
1070 
1071 	return NULL;
1072 }
1073 
1074 /*
1075  * First pass: Mark the head of each jump table so that in the next pass,
1076  * we know when a given jump table ends and the next one starts.
1077  */
1078 static void mark_func_jump_tables(struct objtool_file *file,
1079 				    struct symbol *func)
1080 {
1081 	struct instruction *insn, *last = NULL;
1082 	struct rela *rela;
1083 
1084 	func_for_each_insn_all(file, func, insn) {
1085 		if (!last)
1086 			last = insn;
1087 
1088 		/*
1089 		 * Store back-pointers for unconditional forward jumps such
1090 		 * that find_jump_table() can back-track using those and
1091 		 * avoid some potentially confusing code.
1092 		 */
1093 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1094 		    insn->offset > last->offset &&
1095 		    insn->jump_dest->offset > insn->offset &&
1096 		    !insn->jump_dest->first_jump_src) {
1097 
1098 			insn->jump_dest->first_jump_src = insn;
1099 			last = insn->jump_dest;
1100 		}
1101 
1102 		if (insn->type != INSN_JUMP_DYNAMIC)
1103 			continue;
1104 
1105 		rela = find_jump_table(file, func, insn);
1106 		if (rela) {
1107 			rela->jump_table_start = true;
1108 			insn->jump_table = rela;
1109 		}
1110 	}
1111 }
1112 
1113 static int add_func_jump_tables(struct objtool_file *file,
1114 				  struct symbol *func)
1115 {
1116 	struct instruction *insn;
1117 	int ret;
1118 
1119 	func_for_each_insn_all(file, func, insn) {
1120 		if (!insn->jump_table)
1121 			continue;
1122 
1123 		ret = add_jump_table(file, insn, insn->jump_table);
1124 		if (ret)
1125 			return ret;
1126 	}
1127 
1128 	return 0;
1129 }
1130 
1131 /*
1132  * For some switch statements, gcc generates a jump table in the .rodata
1133  * section which contains a list of addresses within the function to jump to.
1134  * This finds these jump tables and adds them to the insn->alts lists.
1135  */
1136 static int add_jump_table_alts(struct objtool_file *file)
1137 {
1138 	struct section *sec;
1139 	struct symbol *func;
1140 	int ret;
1141 
1142 	if (!file->rodata)
1143 		return 0;
1144 
1145 	for_each_sec(file, sec) {
1146 		list_for_each_entry(func, &sec->symbol_list, list) {
1147 			if (func->type != STT_FUNC)
1148 				continue;
1149 
1150 			mark_func_jump_tables(file, func);
1151 			ret = add_func_jump_tables(file, func);
1152 			if (ret)
1153 				return ret;
1154 		}
1155 	}
1156 
1157 	return 0;
1158 }
1159 
1160 static int read_unwind_hints(struct objtool_file *file)
1161 {
1162 	struct section *sec, *relasec;
1163 	struct rela *rela;
1164 	struct unwind_hint *hint;
1165 	struct instruction *insn;
1166 	struct cfi_reg *cfa;
1167 	int i;
1168 
1169 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1170 	if (!sec)
1171 		return 0;
1172 
1173 	relasec = sec->rela;
1174 	if (!relasec) {
1175 		WARN("missing .rela.discard.unwind_hints section");
1176 		return -1;
1177 	}
1178 
1179 	if (sec->len % sizeof(struct unwind_hint)) {
1180 		WARN("struct unwind_hint size mismatch");
1181 		return -1;
1182 	}
1183 
1184 	file->hints = true;
1185 
1186 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1187 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1188 
1189 		rela = find_rela_by_dest(sec, i * sizeof(*hint));
1190 		if (!rela) {
1191 			WARN("can't find rela for unwind_hints[%d]", i);
1192 			return -1;
1193 		}
1194 
1195 		insn = find_insn(file, rela->sym->sec, rela->addend);
1196 		if (!insn) {
1197 			WARN("can't find insn for unwind_hints[%d]", i);
1198 			return -1;
1199 		}
1200 
1201 		cfa = &insn->state.cfa;
1202 
1203 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1204 			insn->save = true;
1205 			continue;
1206 
1207 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1208 			insn->restore = true;
1209 			insn->hint = true;
1210 			continue;
1211 		}
1212 
1213 		insn->hint = true;
1214 
1215 		switch (hint->sp_reg) {
1216 		case ORC_REG_UNDEFINED:
1217 			cfa->base = CFI_UNDEFINED;
1218 			break;
1219 		case ORC_REG_SP:
1220 			cfa->base = CFI_SP;
1221 			break;
1222 		case ORC_REG_BP:
1223 			cfa->base = CFI_BP;
1224 			break;
1225 		case ORC_REG_SP_INDIRECT:
1226 			cfa->base = CFI_SP_INDIRECT;
1227 			break;
1228 		case ORC_REG_R10:
1229 			cfa->base = CFI_R10;
1230 			break;
1231 		case ORC_REG_R13:
1232 			cfa->base = CFI_R13;
1233 			break;
1234 		case ORC_REG_DI:
1235 			cfa->base = CFI_DI;
1236 			break;
1237 		case ORC_REG_DX:
1238 			cfa->base = CFI_DX;
1239 			break;
1240 		default:
1241 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1242 				  insn->sec, insn->offset, hint->sp_reg);
1243 			return -1;
1244 		}
1245 
1246 		cfa->offset = hint->sp_offset;
1247 		insn->state.type = hint->type;
1248 		insn->state.end = hint->end;
1249 	}
1250 
1251 	return 0;
1252 }
1253 
1254 static int read_retpoline_hints(struct objtool_file *file)
1255 {
1256 	struct section *sec;
1257 	struct instruction *insn;
1258 	struct rela *rela;
1259 
1260 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1261 	if (!sec)
1262 		return 0;
1263 
1264 	list_for_each_entry(rela, &sec->rela_list, list) {
1265 		if (rela->sym->type != STT_SECTION) {
1266 			WARN("unexpected relocation symbol type in %s", sec->name);
1267 			return -1;
1268 		}
1269 
1270 		insn = find_insn(file, rela->sym->sec, rela->addend);
1271 		if (!insn) {
1272 			WARN("bad .discard.retpoline_safe entry");
1273 			return -1;
1274 		}
1275 
1276 		if (insn->type != INSN_JUMP_DYNAMIC &&
1277 		    insn->type != INSN_CALL_DYNAMIC) {
1278 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1279 				  insn->sec, insn->offset);
1280 			return -1;
1281 		}
1282 
1283 		insn->retpoline_safe = true;
1284 	}
1285 
1286 	return 0;
1287 }
1288 
1289 static void mark_rodata(struct objtool_file *file)
1290 {
1291 	struct section *sec;
1292 	bool found = false;
1293 
1294 	/*
1295 	 * Search for the following rodata sections, each of which can
1296 	 * potentially contain jump tables:
1297 	 *
1298 	 * - .rodata: can contain GCC switch tables
1299 	 * - .rodata.<func>: same, if -fdata-sections is being used
1300 	 * - .rodata..c_jump_table: contains C annotated jump tables
1301 	 *
1302 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1303 	 */
1304 	for_each_sec(file, sec) {
1305 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1306 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1307 			sec->rodata = true;
1308 			found = true;
1309 		}
1310 	}
1311 
1312 	file->rodata = found;
1313 }
1314 
1315 static int decode_sections(struct objtool_file *file)
1316 {
1317 	int ret;
1318 
1319 	mark_rodata(file);
1320 
1321 	ret = decode_instructions(file);
1322 	if (ret)
1323 		return ret;
1324 
1325 	ret = add_dead_ends(file);
1326 	if (ret)
1327 		return ret;
1328 
1329 	add_ignores(file);
1330 	add_uaccess_safe(file);
1331 
1332 	ret = add_ignore_alternatives(file);
1333 	if (ret)
1334 		return ret;
1335 
1336 	ret = add_jump_destinations(file);
1337 	if (ret)
1338 		return ret;
1339 
1340 	ret = add_special_section_alts(file);
1341 	if (ret)
1342 		return ret;
1343 
1344 	ret = add_call_destinations(file);
1345 	if (ret)
1346 		return ret;
1347 
1348 	ret = add_jump_table_alts(file);
1349 	if (ret)
1350 		return ret;
1351 
1352 	ret = read_unwind_hints(file);
1353 	if (ret)
1354 		return ret;
1355 
1356 	ret = read_retpoline_hints(file);
1357 	if (ret)
1358 		return ret;
1359 
1360 	return 0;
1361 }
1362 
1363 static bool is_fentry_call(struct instruction *insn)
1364 {
1365 	if (insn->type == INSN_CALL &&
1366 	    insn->call_dest->type == STT_NOTYPE &&
1367 	    !strcmp(insn->call_dest->name, "__fentry__"))
1368 		return true;
1369 
1370 	return false;
1371 }
1372 
1373 static bool has_modified_stack_frame(struct insn_state *state)
1374 {
1375 	int i;
1376 
1377 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1378 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1379 	    state->stack_size != initial_func_cfi.cfa.offset ||
1380 	    state->drap)
1381 		return true;
1382 
1383 	for (i = 0; i < CFI_NUM_REGS; i++)
1384 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1385 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1386 			return true;
1387 
1388 	return false;
1389 }
1390 
1391 static bool has_valid_stack_frame(struct insn_state *state)
1392 {
1393 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1394 	    state->regs[CFI_BP].offset == -16)
1395 		return true;
1396 
1397 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1398 		return true;
1399 
1400 	return false;
1401 }
1402 
1403 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1404 {
1405 	struct cfi_reg *cfa = &state->cfa;
1406 	struct stack_op *op = &insn->stack_op;
1407 
1408 	if (cfa->base != CFI_SP)
1409 		return 0;
1410 
1411 	/* push */
1412 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1413 		cfa->offset += 8;
1414 
1415 	/* pop */
1416 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1417 		cfa->offset -= 8;
1418 
1419 	/* add immediate to sp */
1420 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1421 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1422 		cfa->offset -= op->src.offset;
1423 
1424 	return 0;
1425 }
1426 
1427 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1428 		     int offset)
1429 {
1430 	if (arch_callee_saved_reg(reg) &&
1431 	    state->regs[reg].base == CFI_UNDEFINED) {
1432 		state->regs[reg].base = base;
1433 		state->regs[reg].offset = offset;
1434 	}
1435 }
1436 
1437 static void restore_reg(struct insn_state *state, unsigned char reg)
1438 {
1439 	state->regs[reg].base = CFI_UNDEFINED;
1440 	state->regs[reg].offset = 0;
1441 }
1442 
1443 /*
1444  * A note about DRAP stack alignment:
1445  *
1446  * GCC has the concept of a DRAP register, which is used to help keep track of
1447  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1448  * register.  The typical DRAP pattern is:
1449  *
1450  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1451  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1452  *   41 ff 72 f8		pushq  -0x8(%r10)
1453  *   55				push   %rbp
1454  *   48 89 e5			mov    %rsp,%rbp
1455  *				(more pushes)
1456  *   41 52			push   %r10
1457  *				...
1458  *   41 5a			pop    %r10
1459  *				(more pops)
1460  *   5d				pop    %rbp
1461  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1462  *   c3				retq
1463  *
1464  * There are some variations in the epilogues, like:
1465  *
1466  *   5b				pop    %rbx
1467  *   41 5a			pop    %r10
1468  *   41 5c			pop    %r12
1469  *   41 5d			pop    %r13
1470  *   41 5e			pop    %r14
1471  *   c9				leaveq
1472  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1473  *   c3				retq
1474  *
1475  * and:
1476  *
1477  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1478  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1479  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1480  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1481  *   c9				leaveq
1482  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1483  *   c3				retq
1484  *
1485  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1486  * restored beforehand:
1487  *
1488  *   41 55			push   %r13
1489  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1490  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1491  *				...
1492  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1493  *   41 5d			pop    %r13
1494  *   c3				retq
1495  */
1496 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1497 {
1498 	struct stack_op *op = &insn->stack_op;
1499 	struct cfi_reg *cfa = &state->cfa;
1500 	struct cfi_reg *regs = state->regs;
1501 
1502 	/* stack operations don't make sense with an undefined CFA */
1503 	if (cfa->base == CFI_UNDEFINED) {
1504 		if (insn->func) {
1505 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1506 			return -1;
1507 		}
1508 		return 0;
1509 	}
1510 
1511 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1512 		return update_insn_state_regs(insn, state);
1513 
1514 	switch (op->dest.type) {
1515 
1516 	case OP_DEST_REG:
1517 		switch (op->src.type) {
1518 
1519 		case OP_SRC_REG:
1520 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1521 			    cfa->base == CFI_SP &&
1522 			    regs[CFI_BP].base == CFI_CFA &&
1523 			    regs[CFI_BP].offset == -cfa->offset) {
1524 
1525 				/* mov %rsp, %rbp */
1526 				cfa->base = op->dest.reg;
1527 				state->bp_scratch = false;
1528 			}
1529 
1530 			else if (op->src.reg == CFI_SP &&
1531 				 op->dest.reg == CFI_BP && state->drap) {
1532 
1533 				/* drap: mov %rsp, %rbp */
1534 				regs[CFI_BP].base = CFI_BP;
1535 				regs[CFI_BP].offset = -state->stack_size;
1536 				state->bp_scratch = false;
1537 			}
1538 
1539 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1540 
1541 				/*
1542 				 * mov %rsp, %reg
1543 				 *
1544 				 * This is needed for the rare case where GCC
1545 				 * does:
1546 				 *
1547 				 *   mov    %rsp, %rax
1548 				 *   ...
1549 				 *   mov    %rax, %rsp
1550 				 */
1551 				state->vals[op->dest.reg].base = CFI_CFA;
1552 				state->vals[op->dest.reg].offset = -state->stack_size;
1553 			}
1554 
1555 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1556 				 cfa->base == CFI_BP) {
1557 
1558 				/*
1559 				 * mov %rbp, %rsp
1560 				 *
1561 				 * Restore the original stack pointer (Clang).
1562 				 */
1563 				state->stack_size = -state->regs[CFI_BP].offset;
1564 			}
1565 
1566 			else if (op->dest.reg == cfa->base) {
1567 
1568 				/* mov %reg, %rsp */
1569 				if (cfa->base == CFI_SP &&
1570 				    state->vals[op->src.reg].base == CFI_CFA) {
1571 
1572 					/*
1573 					 * This is needed for the rare case
1574 					 * where GCC does something dumb like:
1575 					 *
1576 					 *   lea    0x8(%rsp), %rcx
1577 					 *   ...
1578 					 *   mov    %rcx, %rsp
1579 					 */
1580 					cfa->offset = -state->vals[op->src.reg].offset;
1581 					state->stack_size = cfa->offset;
1582 
1583 				} else {
1584 					cfa->base = CFI_UNDEFINED;
1585 					cfa->offset = 0;
1586 				}
1587 			}
1588 
1589 			break;
1590 
1591 		case OP_SRC_ADD:
1592 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1593 
1594 				/* add imm, %rsp */
1595 				state->stack_size -= op->src.offset;
1596 				if (cfa->base == CFI_SP)
1597 					cfa->offset -= op->src.offset;
1598 				break;
1599 			}
1600 
1601 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1602 
1603 				/* lea disp(%rbp), %rsp */
1604 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1605 				break;
1606 			}
1607 
1608 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1609 
1610 				/* drap: lea disp(%rsp), %drap */
1611 				state->drap_reg = op->dest.reg;
1612 
1613 				/*
1614 				 * lea disp(%rsp), %reg
1615 				 *
1616 				 * This is needed for the rare case where GCC
1617 				 * does something dumb like:
1618 				 *
1619 				 *   lea    0x8(%rsp), %rcx
1620 				 *   ...
1621 				 *   mov    %rcx, %rsp
1622 				 */
1623 				state->vals[op->dest.reg].base = CFI_CFA;
1624 				state->vals[op->dest.reg].offset = \
1625 					-state->stack_size + op->src.offset;
1626 
1627 				break;
1628 			}
1629 
1630 			if (state->drap && op->dest.reg == CFI_SP &&
1631 			    op->src.reg == state->drap_reg) {
1632 
1633 				 /* drap: lea disp(%drap), %rsp */
1634 				cfa->base = CFI_SP;
1635 				cfa->offset = state->stack_size = -op->src.offset;
1636 				state->drap_reg = CFI_UNDEFINED;
1637 				state->drap = false;
1638 				break;
1639 			}
1640 
1641 			if (op->dest.reg == state->cfa.base) {
1642 				WARN_FUNC("unsupported stack register modification",
1643 					  insn->sec, insn->offset);
1644 				return -1;
1645 			}
1646 
1647 			break;
1648 
1649 		case OP_SRC_AND:
1650 			if (op->dest.reg != CFI_SP ||
1651 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1652 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1653 				WARN_FUNC("unsupported stack pointer realignment",
1654 					  insn->sec, insn->offset);
1655 				return -1;
1656 			}
1657 
1658 			if (state->drap_reg != CFI_UNDEFINED) {
1659 				/* drap: and imm, %rsp */
1660 				cfa->base = state->drap_reg;
1661 				cfa->offset = state->stack_size = 0;
1662 				state->drap = true;
1663 			}
1664 
1665 			/*
1666 			 * Older versions of GCC (4.8ish) realign the stack
1667 			 * without DRAP, with a frame pointer.
1668 			 */
1669 
1670 			break;
1671 
1672 		case OP_SRC_POP:
1673 		case OP_SRC_POPF:
1674 			if (!state->drap && op->dest.type == OP_DEST_REG &&
1675 			    op->dest.reg == cfa->base) {
1676 
1677 				/* pop %rbp */
1678 				cfa->base = CFI_SP;
1679 			}
1680 
1681 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1682 			    op->dest.type == OP_DEST_REG &&
1683 			    op->dest.reg == state->drap_reg &&
1684 			    state->drap_offset == -state->stack_size) {
1685 
1686 				/* drap: pop %drap */
1687 				cfa->base = state->drap_reg;
1688 				cfa->offset = 0;
1689 				state->drap_offset = -1;
1690 
1691 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1692 
1693 				/* pop %reg */
1694 				restore_reg(state, op->dest.reg);
1695 			}
1696 
1697 			state->stack_size -= 8;
1698 			if (cfa->base == CFI_SP)
1699 				cfa->offset -= 8;
1700 
1701 			break;
1702 
1703 		case OP_SRC_REG_INDIRECT:
1704 			if (state->drap && op->src.reg == CFI_BP &&
1705 			    op->src.offset == state->drap_offset) {
1706 
1707 				/* drap: mov disp(%rbp), %drap */
1708 				cfa->base = state->drap_reg;
1709 				cfa->offset = 0;
1710 				state->drap_offset = -1;
1711 			}
1712 
1713 			if (state->drap && op->src.reg == CFI_BP &&
1714 			    op->src.offset == regs[op->dest.reg].offset) {
1715 
1716 				/* drap: mov disp(%rbp), %reg */
1717 				restore_reg(state, op->dest.reg);
1718 
1719 			} else if (op->src.reg == cfa->base &&
1720 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1721 
1722 				/* mov disp(%rbp), %reg */
1723 				/* mov disp(%rsp), %reg */
1724 				restore_reg(state, op->dest.reg);
1725 			}
1726 
1727 			break;
1728 
1729 		default:
1730 			WARN_FUNC("unknown stack-related instruction",
1731 				  insn->sec, insn->offset);
1732 			return -1;
1733 		}
1734 
1735 		break;
1736 
1737 	case OP_DEST_PUSH:
1738 	case OP_DEST_PUSHF:
1739 		state->stack_size += 8;
1740 		if (cfa->base == CFI_SP)
1741 			cfa->offset += 8;
1742 
1743 		if (op->src.type != OP_SRC_REG)
1744 			break;
1745 
1746 		if (state->drap) {
1747 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1748 
1749 				/* drap: push %drap */
1750 				cfa->base = CFI_BP_INDIRECT;
1751 				cfa->offset = -state->stack_size;
1752 
1753 				/* save drap so we know when to restore it */
1754 				state->drap_offset = -state->stack_size;
1755 
1756 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1757 
1758 				/* drap: push %rbp */
1759 				state->stack_size = 0;
1760 
1761 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1762 
1763 				/* drap: push %reg */
1764 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1765 			}
1766 
1767 		} else {
1768 
1769 			/* push %reg */
1770 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1771 		}
1772 
1773 		/* detect when asm code uses rbp as a scratch register */
1774 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1775 		    cfa->base != CFI_BP)
1776 			state->bp_scratch = true;
1777 		break;
1778 
1779 	case OP_DEST_REG_INDIRECT:
1780 
1781 		if (state->drap) {
1782 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1783 
1784 				/* drap: mov %drap, disp(%rbp) */
1785 				cfa->base = CFI_BP_INDIRECT;
1786 				cfa->offset = op->dest.offset;
1787 
1788 				/* save drap offset so we know when to restore it */
1789 				state->drap_offset = op->dest.offset;
1790 			}
1791 
1792 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1793 
1794 				/* drap: mov reg, disp(%rbp) */
1795 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1796 			}
1797 
1798 		} else if (op->dest.reg == cfa->base) {
1799 
1800 			/* mov reg, disp(%rbp) */
1801 			/* mov reg, disp(%rsp) */
1802 			save_reg(state, op->src.reg, CFI_CFA,
1803 				 op->dest.offset - state->cfa.offset);
1804 		}
1805 
1806 		break;
1807 
1808 	case OP_DEST_LEAVE:
1809 		if ((!state->drap && cfa->base != CFI_BP) ||
1810 		    (state->drap && cfa->base != state->drap_reg)) {
1811 			WARN_FUNC("leave instruction with modified stack frame",
1812 				  insn->sec, insn->offset);
1813 			return -1;
1814 		}
1815 
1816 		/* leave (mov %rbp, %rsp; pop %rbp) */
1817 
1818 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1819 		restore_reg(state, CFI_BP);
1820 
1821 		if (!state->drap) {
1822 			cfa->base = CFI_SP;
1823 			cfa->offset -= 8;
1824 		}
1825 
1826 		break;
1827 
1828 	case OP_DEST_MEM:
1829 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1830 			WARN_FUNC("unknown stack-related memory operation",
1831 				  insn->sec, insn->offset);
1832 			return -1;
1833 		}
1834 
1835 		/* pop mem */
1836 		state->stack_size -= 8;
1837 		if (cfa->base == CFI_SP)
1838 			cfa->offset -= 8;
1839 
1840 		break;
1841 
1842 	default:
1843 		WARN_FUNC("unknown stack-related instruction",
1844 			  insn->sec, insn->offset);
1845 		return -1;
1846 	}
1847 
1848 	return 0;
1849 }
1850 
1851 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1852 {
1853 	struct insn_state *state1 = &insn->state, *state2 = state;
1854 	int i;
1855 
1856 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1857 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1858 			  insn->sec, insn->offset,
1859 			  state1->cfa.base, state1->cfa.offset,
1860 			  state2->cfa.base, state2->cfa.offset);
1861 
1862 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1863 		for (i = 0; i < CFI_NUM_REGS; i++) {
1864 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1865 				    sizeof(struct cfi_reg)))
1866 				continue;
1867 
1868 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1869 				  insn->sec, insn->offset,
1870 				  i, state1->regs[i].base, state1->regs[i].offset,
1871 				  i, state2->regs[i].base, state2->regs[i].offset);
1872 			break;
1873 		}
1874 
1875 	} else if (state1->type != state2->type) {
1876 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1877 			  insn->sec, insn->offset, state1->type, state2->type);
1878 
1879 	} else if (state1->drap != state2->drap ||
1880 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1881 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1882 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1883 			  insn->sec, insn->offset,
1884 			  state1->drap, state1->drap_reg, state1->drap_offset,
1885 			  state2->drap, state2->drap_reg, state2->drap_offset);
1886 
1887 	} else
1888 		return true;
1889 
1890 	return false;
1891 }
1892 
1893 static inline bool func_uaccess_safe(struct symbol *func)
1894 {
1895 	if (func)
1896 		return func->uaccess_safe;
1897 
1898 	return false;
1899 }
1900 
1901 static inline const char *call_dest_name(struct instruction *insn)
1902 {
1903 	if (insn->call_dest)
1904 		return insn->call_dest->name;
1905 
1906 	return "{dynamic}";
1907 }
1908 
1909 static int validate_call(struct instruction *insn, struct insn_state *state)
1910 {
1911 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1912 		WARN_FUNC("call to %s() with UACCESS enabled",
1913 				insn->sec, insn->offset, call_dest_name(insn));
1914 		return 1;
1915 	}
1916 
1917 	if (state->df) {
1918 		WARN_FUNC("call to %s() with DF set",
1919 				insn->sec, insn->offset, call_dest_name(insn));
1920 		return 1;
1921 	}
1922 
1923 	return 0;
1924 }
1925 
1926 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1927 {
1928 	if (has_modified_stack_frame(state)) {
1929 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1930 				insn->sec, insn->offset);
1931 		return 1;
1932 	}
1933 
1934 	return validate_call(insn, state);
1935 }
1936 
1937 /*
1938  * Follow the branch starting at the given instruction, and recursively follow
1939  * any other branches (jumps).  Meanwhile, track the frame pointer state at
1940  * each instruction and validate all the rules described in
1941  * tools/objtool/Documentation/stack-validation.txt.
1942  */
1943 static int validate_branch(struct objtool_file *file, struct symbol *func,
1944 			   struct instruction *first, struct insn_state state)
1945 {
1946 	struct alternative *alt;
1947 	struct instruction *insn, *next_insn;
1948 	struct section *sec;
1949 	u8 visited;
1950 	int ret;
1951 
1952 	insn = first;
1953 	sec = insn->sec;
1954 
1955 	if (insn->alt_group && list_empty(&insn->alts)) {
1956 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1957 			  sec, insn->offset);
1958 		return 1;
1959 	}
1960 
1961 	while (1) {
1962 		next_insn = next_insn_same_sec(file, insn);
1963 
1964 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
1965 			WARN("%s() falls through to next function %s()",
1966 			     func->name, insn->func->name);
1967 			return 1;
1968 		}
1969 
1970 		if (func && insn->ignore) {
1971 			WARN_FUNC("BUG: why am I validating an ignored function?",
1972 				  sec, insn->offset);
1973 			return 1;
1974 		}
1975 
1976 		visited = 1 << state.uaccess;
1977 		if (insn->visited) {
1978 			if (!insn->hint && !insn_state_match(insn, &state))
1979 				return 1;
1980 
1981 			if (insn->visited & visited)
1982 				return 0;
1983 		}
1984 
1985 		if (insn->hint) {
1986 			if (insn->restore) {
1987 				struct instruction *save_insn, *i;
1988 
1989 				i = insn;
1990 				save_insn = NULL;
1991 				func_for_each_insn_continue_reverse(file, func, i) {
1992 					if (i->save) {
1993 						save_insn = i;
1994 						break;
1995 					}
1996 				}
1997 
1998 				if (!save_insn) {
1999 					WARN_FUNC("no corresponding CFI save for CFI restore",
2000 						  sec, insn->offset);
2001 					return 1;
2002 				}
2003 
2004 				if (!save_insn->visited) {
2005 					/*
2006 					 * Oops, no state to copy yet.
2007 					 * Hopefully we can reach this
2008 					 * instruction from another branch
2009 					 * after the save insn has been
2010 					 * visited.
2011 					 */
2012 					if (insn == first)
2013 						return 0;
2014 
2015 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2016 						  sec, insn->offset);
2017 					return 1;
2018 				}
2019 
2020 				insn->state = save_insn->state;
2021 			}
2022 
2023 			state = insn->state;
2024 
2025 		} else
2026 			insn->state = state;
2027 
2028 		insn->visited |= visited;
2029 
2030 		if (!insn->ignore_alts) {
2031 			bool skip_orig = false;
2032 
2033 			list_for_each_entry(alt, &insn->alts, list) {
2034 				if (alt->skip_orig)
2035 					skip_orig = true;
2036 
2037 				ret = validate_branch(file, func, alt->insn, state);
2038 				if (ret) {
2039 					if (backtrace)
2040 						BT_FUNC("(alt)", insn);
2041 					return ret;
2042 				}
2043 			}
2044 
2045 			if (skip_orig)
2046 				return 0;
2047 		}
2048 
2049 		switch (insn->type) {
2050 
2051 		case INSN_RETURN:
2052 			if (state.uaccess && !func_uaccess_safe(func)) {
2053 				WARN_FUNC("return with UACCESS enabled", sec, insn->offset);
2054 				return 1;
2055 			}
2056 
2057 			if (!state.uaccess && func_uaccess_safe(func)) {
2058 				WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset);
2059 				return 1;
2060 			}
2061 
2062 			if (state.df) {
2063 				WARN_FUNC("return with DF set", sec, insn->offset);
2064 				return 1;
2065 			}
2066 
2067 			if (func && has_modified_stack_frame(&state)) {
2068 				WARN_FUNC("return with modified stack frame",
2069 					  sec, insn->offset);
2070 				return 1;
2071 			}
2072 
2073 			if (state.bp_scratch) {
2074 				WARN("%s uses BP as a scratch register",
2075 				     func->name);
2076 				return 1;
2077 			}
2078 
2079 			return 0;
2080 
2081 		case INSN_CALL:
2082 		case INSN_CALL_DYNAMIC:
2083 			ret = validate_call(insn, &state);
2084 			if (ret)
2085 				return ret;
2086 
2087 			if (!no_fp && func && !is_fentry_call(insn) &&
2088 			    !has_valid_stack_frame(&state)) {
2089 				WARN_FUNC("call without frame pointer save/setup",
2090 					  sec, insn->offset);
2091 				return 1;
2092 			}
2093 
2094 			if (dead_end_function(file, insn->call_dest))
2095 				return 0;
2096 
2097 			break;
2098 
2099 		case INSN_JUMP_CONDITIONAL:
2100 		case INSN_JUMP_UNCONDITIONAL:
2101 			if (func && is_sibling_call(insn)) {
2102 				ret = validate_sibling_call(insn, &state);
2103 				if (ret)
2104 					return ret;
2105 
2106 			} else if (insn->jump_dest) {
2107 				ret = validate_branch(file, func,
2108 						      insn->jump_dest, state);
2109 				if (ret) {
2110 					if (backtrace)
2111 						BT_FUNC("(branch)", insn);
2112 					return ret;
2113 				}
2114 			}
2115 
2116 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2117 				return 0;
2118 
2119 			break;
2120 
2121 		case INSN_JUMP_DYNAMIC:
2122 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2123 			if (func && is_sibling_call(insn)) {
2124 				ret = validate_sibling_call(insn, &state);
2125 				if (ret)
2126 					return ret;
2127 			}
2128 
2129 			if (insn->type == INSN_JUMP_DYNAMIC)
2130 				return 0;
2131 
2132 			break;
2133 
2134 		case INSN_CONTEXT_SWITCH:
2135 			if (func && (!next_insn || !next_insn->hint)) {
2136 				WARN_FUNC("unsupported instruction in callable function",
2137 					  sec, insn->offset);
2138 				return 1;
2139 			}
2140 			return 0;
2141 
2142 		case INSN_STACK:
2143 			if (update_insn_state(insn, &state))
2144 				return 1;
2145 
2146 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2147 				if (!state.uaccess_stack) {
2148 					state.uaccess_stack = 1;
2149 				} else if (state.uaccess_stack >> 31) {
2150 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2151 					return 1;
2152 				}
2153 				state.uaccess_stack <<= 1;
2154 				state.uaccess_stack  |= state.uaccess;
2155 			}
2156 
2157 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2158 				if (state.uaccess_stack) {
2159 					state.uaccess = state.uaccess_stack & 1;
2160 					state.uaccess_stack >>= 1;
2161 					if (state.uaccess_stack == 1)
2162 						state.uaccess_stack = 0;
2163 				}
2164 			}
2165 
2166 			break;
2167 
2168 		case INSN_STAC:
2169 			if (state.uaccess) {
2170 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2171 				return 1;
2172 			}
2173 
2174 			state.uaccess = true;
2175 			break;
2176 
2177 		case INSN_CLAC:
2178 			if (!state.uaccess && func) {
2179 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2180 				return 1;
2181 			}
2182 
2183 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2184 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2185 				return 1;
2186 			}
2187 
2188 			state.uaccess = false;
2189 			break;
2190 
2191 		case INSN_STD:
2192 			if (state.df)
2193 				WARN_FUNC("recursive STD", sec, insn->offset);
2194 
2195 			state.df = true;
2196 			break;
2197 
2198 		case INSN_CLD:
2199 			if (!state.df && func)
2200 				WARN_FUNC("redundant CLD", sec, insn->offset);
2201 
2202 			state.df = false;
2203 			break;
2204 
2205 		default:
2206 			break;
2207 		}
2208 
2209 		if (insn->dead_end)
2210 			return 0;
2211 
2212 		if (!next_insn) {
2213 			if (state.cfa.base == CFI_UNDEFINED)
2214 				return 0;
2215 			WARN("%s: unexpected end of section", sec->name);
2216 			return 1;
2217 		}
2218 
2219 		insn = next_insn;
2220 	}
2221 
2222 	return 0;
2223 }
2224 
2225 static int validate_unwind_hints(struct objtool_file *file)
2226 {
2227 	struct instruction *insn;
2228 	int ret, warnings = 0;
2229 	struct insn_state state;
2230 
2231 	if (!file->hints)
2232 		return 0;
2233 
2234 	clear_insn_state(&state);
2235 
2236 	for_each_insn(file, insn) {
2237 		if (insn->hint && !insn->visited) {
2238 			ret = validate_branch(file, insn->func, insn, state);
2239 			if (ret && backtrace)
2240 				BT_FUNC("<=== (hint)", insn);
2241 			warnings += ret;
2242 		}
2243 	}
2244 
2245 	return warnings;
2246 }
2247 
2248 static int validate_retpoline(struct objtool_file *file)
2249 {
2250 	struct instruction *insn;
2251 	int warnings = 0;
2252 
2253 	for_each_insn(file, insn) {
2254 		if (insn->type != INSN_JUMP_DYNAMIC &&
2255 		    insn->type != INSN_CALL_DYNAMIC)
2256 			continue;
2257 
2258 		if (insn->retpoline_safe)
2259 			continue;
2260 
2261 		/*
2262 		 * .init.text code is ran before userspace and thus doesn't
2263 		 * strictly need retpolines, except for modules which are
2264 		 * loaded late, they very much do need retpoline in their
2265 		 * .init.text
2266 		 */
2267 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2268 			continue;
2269 
2270 		WARN_FUNC("indirect %s found in RETPOLINE build",
2271 			  insn->sec, insn->offset,
2272 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2273 
2274 		warnings++;
2275 	}
2276 
2277 	return warnings;
2278 }
2279 
2280 static bool is_kasan_insn(struct instruction *insn)
2281 {
2282 	return (insn->type == INSN_CALL &&
2283 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2284 }
2285 
2286 static bool is_ubsan_insn(struct instruction *insn)
2287 {
2288 	return (insn->type == INSN_CALL &&
2289 		!strcmp(insn->call_dest->name,
2290 			"__ubsan_handle_builtin_unreachable"));
2291 }
2292 
2293 static bool ignore_unreachable_insn(struct instruction *insn)
2294 {
2295 	int i;
2296 
2297 	if (insn->ignore || insn->type == INSN_NOP)
2298 		return true;
2299 
2300 	/*
2301 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2302 	 * function has an exception table entry.
2303 	 *
2304 	 * Also ignore alternative replacement instructions.  This can happen
2305 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2306 	 */
2307 	if (!strcmp(insn->sec->name, ".fixup") ||
2308 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2309 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2310 		return true;
2311 
2312 	/*
2313 	 * Check if this (or a subsequent) instruction is related to
2314 	 * CONFIG_UBSAN or CONFIG_KASAN.
2315 	 *
2316 	 * End the search at 5 instructions to avoid going into the weeds.
2317 	 */
2318 	if (!insn->func)
2319 		return false;
2320 	for (i = 0; i < 5; i++) {
2321 
2322 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2323 			return true;
2324 
2325 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2326 			if (insn->jump_dest &&
2327 			    insn->jump_dest->func == insn->func) {
2328 				insn = insn->jump_dest;
2329 				continue;
2330 			}
2331 
2332 			break;
2333 		}
2334 
2335 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2336 			break;
2337 
2338 		insn = list_next_entry(insn, list);
2339 	}
2340 
2341 	return false;
2342 }
2343 
2344 static int validate_functions(struct objtool_file *file)
2345 {
2346 	struct section *sec;
2347 	struct symbol *func;
2348 	struct instruction *insn;
2349 	struct insn_state state;
2350 	int ret, warnings = 0;
2351 
2352 	clear_insn_state(&state);
2353 
2354 	state.cfa = initial_func_cfi.cfa;
2355 	memcpy(&state.regs, &initial_func_cfi.regs,
2356 	       CFI_NUM_REGS * sizeof(struct cfi_reg));
2357 	state.stack_size = initial_func_cfi.cfa.offset;
2358 
2359 	for_each_sec(file, sec) {
2360 		list_for_each_entry(func, &sec->symbol_list, list) {
2361 			if (func->type != STT_FUNC)
2362 				continue;
2363 
2364 			if (!func->len) {
2365 				WARN("%s() is missing an ELF size annotation",
2366 				     func->name);
2367 				warnings++;
2368 			}
2369 
2370 			if (func->pfunc != func || func->alias != func)
2371 				continue;
2372 
2373 			insn = find_insn(file, sec, func->offset);
2374 			if (!insn || insn->ignore || insn->visited)
2375 				continue;
2376 
2377 			state.uaccess = func->uaccess_safe;
2378 
2379 			ret = validate_branch(file, func, insn, state);
2380 			if (ret && backtrace)
2381 				BT_FUNC("<=== (func)", insn);
2382 			warnings += ret;
2383 		}
2384 	}
2385 
2386 	return warnings;
2387 }
2388 
2389 static int validate_reachable_instructions(struct objtool_file *file)
2390 {
2391 	struct instruction *insn;
2392 
2393 	if (file->ignore_unreachables)
2394 		return 0;
2395 
2396 	for_each_insn(file, insn) {
2397 		if (insn->visited || ignore_unreachable_insn(insn))
2398 			continue;
2399 
2400 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2401 		return 1;
2402 	}
2403 
2404 	return 0;
2405 }
2406 
2407 static void cleanup(struct objtool_file *file)
2408 {
2409 	struct instruction *insn, *tmpinsn;
2410 	struct alternative *alt, *tmpalt;
2411 
2412 	list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2413 		list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2414 			list_del(&alt->list);
2415 			free(alt);
2416 		}
2417 		list_del(&insn->list);
2418 		hash_del(&insn->hash);
2419 		free(insn);
2420 	}
2421 	elf_close(file->elf);
2422 }
2423 
2424 static struct objtool_file file;
2425 
2426 int check(const char *_objname, bool orc)
2427 {
2428 	int ret, warnings = 0;
2429 
2430 	objname = _objname;
2431 
2432 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2433 	if (!file.elf)
2434 		return 1;
2435 
2436 	INIT_LIST_HEAD(&file.insn_list);
2437 	hash_init(file.insn_hash);
2438 	file.c_file = find_section_by_name(file.elf, ".comment");
2439 	file.ignore_unreachables = no_unreachable;
2440 	file.hints = false;
2441 
2442 	arch_initial_func_cfi_state(&initial_func_cfi);
2443 
2444 	ret = decode_sections(&file);
2445 	if (ret < 0)
2446 		goto out;
2447 	warnings += ret;
2448 
2449 	if (list_empty(&file.insn_list))
2450 		goto out;
2451 
2452 	if (retpoline) {
2453 		ret = validate_retpoline(&file);
2454 		if (ret < 0)
2455 			return ret;
2456 		warnings += ret;
2457 	}
2458 
2459 	ret = validate_functions(&file);
2460 	if (ret < 0)
2461 		goto out;
2462 	warnings += ret;
2463 
2464 	ret = validate_unwind_hints(&file);
2465 	if (ret < 0)
2466 		goto out;
2467 	warnings += ret;
2468 
2469 	if (!warnings) {
2470 		ret = validate_reachable_instructions(&file);
2471 		if (ret < 0)
2472 			goto out;
2473 		warnings += ret;
2474 	}
2475 
2476 	if (orc) {
2477 		ret = create_orc(&file);
2478 		if (ret < 0)
2479 			goto out;
2480 
2481 		ret = create_orc_sections(&file);
2482 		if (ret < 0)
2483 			goto out;
2484 
2485 		ret = elf_write(file.elf);
2486 		if (ret < 0)
2487 			goto out;
2488 	}
2489 
2490 out:
2491 	cleanup(&file);
2492 
2493 	/* ignore warnings for now until we get all the code cleaned up */
2494 	if (ret || warnings)
2495 		return 0;
2496 	return 0;
2497 }
2498