xref: /openbmc/linux/tools/objtool/check.c (revision d871f7b5a6a2a30f4eba577fd56941fa3657e394)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "cfi.h"
11 #include "arch.h"
12 #include "check.h"
13 #include "special.h"
14 #include "warn.h"
15 #include "arch_elf.h"
16 
17 #include <linux/hashtable.h>
18 #include <linux/kernel.h>
19 #include <linux/static_call_types.h>
20 
21 #define FAKE_JUMP_OFFSET -1
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 struct cfi_init_state initial_func_cfi;
30 
31 struct instruction *find_insn(struct objtool_file *file,
32 			      struct section *sec, unsigned long offset)
33 {
34 	struct instruction *insn;
35 
36 	hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
37 		if (insn->sec == sec && insn->offset == offset)
38 			return insn;
39 	}
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
76 					       struct instruction *insn)
77 {
78 	struct instruction *prev = list_prev_entry(insn, list);
79 
80 	if (&prev->list != &file->insn_list && prev->func == insn->func)
81 		return prev;
82 
83 	return NULL;
84 }
85 
86 #define func_for_each_insn(file, func, insn)				\
87 	for (insn = find_insn(file, func->sec, func->offset);		\
88 	     insn;							\
89 	     insn = next_insn_same_func(file, insn))
90 
91 #define sym_for_each_insn(file, sym, insn)				\
92 	for (insn = find_insn(file, sym->sec, sym->offset);		\
93 	     insn && &insn->list != &file->insn_list &&			\
94 		insn->sec == sym->sec &&				\
95 		insn->offset < sym->offset + sym->len;			\
96 	     insn = list_next_entry(insn, list))
97 
98 #define sym_for_each_insn_continue_reverse(file, sym, insn)		\
99 	for (insn = list_prev_entry(insn, list);			\
100 	     &insn->list != &file->insn_list &&				\
101 		insn->sec == sym->sec && insn->offset >= sym->offset;	\
102 	     insn = list_prev_entry(insn, list))
103 
104 #define sec_for_each_insn_from(file, insn)				\
105 	for (; insn; insn = next_insn_same_sec(file, insn))
106 
107 #define sec_for_each_insn_continue(file, insn)				\
108 	for (insn = next_insn_same_sec(file, insn); insn;		\
109 	     insn = next_insn_same_sec(file, insn))
110 
111 static bool is_sibling_call(struct instruction *insn)
112 {
113 	/* An indirect jump is either a sibling call or a jump to a table. */
114 	if (insn->type == INSN_JUMP_DYNAMIC)
115 		return list_empty(&insn->alts);
116 
117 	if (!is_static_jump(insn))
118 		return false;
119 
120 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
121 	return !!insn->call_dest;
122 }
123 
124 /*
125  * This checks to see if the given function is a "noreturn" function.
126  *
127  * For global functions which are outside the scope of this object file, we
128  * have to keep a manual list of them.
129  *
130  * For local functions, we have to detect them manually by simply looking for
131  * the lack of a return instruction.
132  */
133 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
134 				int recursion)
135 {
136 	int i;
137 	struct instruction *insn;
138 	bool empty = true;
139 
140 	/*
141 	 * Unfortunately these have to be hard coded because the noreturn
142 	 * attribute isn't provided in ELF data.
143 	 */
144 	static const char * const global_noreturns[] = {
145 		"__stack_chk_fail",
146 		"panic",
147 		"do_exit",
148 		"do_task_dead",
149 		"__module_put_and_exit",
150 		"complete_and_exit",
151 		"__reiserfs_panic",
152 		"lbug_with_loc",
153 		"fortify_panic",
154 		"usercopy_abort",
155 		"machine_real_restart",
156 		"rewind_stack_do_exit",
157 		"kunit_try_catch_throw",
158 	};
159 
160 	if (!func)
161 		return false;
162 
163 	if (func->bind == STB_WEAK)
164 		return false;
165 
166 	if (func->bind == STB_GLOBAL)
167 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
168 			if (!strcmp(func->name, global_noreturns[i]))
169 				return true;
170 
171 	if (!func->len)
172 		return false;
173 
174 	insn = find_insn(file, func->sec, func->offset);
175 	if (!insn->func)
176 		return false;
177 
178 	func_for_each_insn(file, func, insn) {
179 		empty = false;
180 
181 		if (insn->type == INSN_RETURN)
182 			return false;
183 	}
184 
185 	if (empty)
186 		return false;
187 
188 	/*
189 	 * A function can have a sibling call instead of a return.  In that
190 	 * case, the function's dead-end status depends on whether the target
191 	 * of the sibling call returns.
192 	 */
193 	func_for_each_insn(file, func, insn) {
194 		if (is_sibling_call(insn)) {
195 			struct instruction *dest = insn->jump_dest;
196 
197 			if (!dest)
198 				/* sibling call to another file */
199 				return false;
200 
201 			/* local sibling call */
202 			if (recursion == 5) {
203 				/*
204 				 * Infinite recursion: two functions have
205 				 * sibling calls to each other.  This is a very
206 				 * rare case.  It means they aren't dead ends.
207 				 */
208 				return false;
209 			}
210 
211 			return __dead_end_function(file, dest->func, recursion+1);
212 		}
213 	}
214 
215 	return true;
216 }
217 
218 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
219 {
220 	return __dead_end_function(file, func, 0);
221 }
222 
223 static void init_cfi_state(struct cfi_state *cfi)
224 {
225 	int i;
226 
227 	for (i = 0; i < CFI_NUM_REGS; i++) {
228 		cfi->regs[i].base = CFI_UNDEFINED;
229 		cfi->vals[i].base = CFI_UNDEFINED;
230 	}
231 	cfi->cfa.base = CFI_UNDEFINED;
232 	cfi->drap_reg = CFI_UNDEFINED;
233 	cfi->drap_offset = -1;
234 }
235 
236 static void init_insn_state(struct insn_state *state, struct section *sec)
237 {
238 	memset(state, 0, sizeof(*state));
239 	init_cfi_state(&state->cfi);
240 
241 	/*
242 	 * We need the full vmlinux for noinstr validation, otherwise we can
243 	 * not correctly determine insn->call_dest->sec (external symbols do
244 	 * not have a section).
245 	 */
246 	if (vmlinux && sec)
247 		state->noinstr = sec->noinstr;
248 }
249 
250 /*
251  * Call the arch-specific instruction decoder for all the instructions and add
252  * them to the global instruction list.
253  */
254 static int decode_instructions(struct objtool_file *file)
255 {
256 	struct section *sec;
257 	struct symbol *func;
258 	unsigned long offset;
259 	struct instruction *insn;
260 	unsigned long nr_insns = 0;
261 	int ret;
262 
263 	for_each_sec(file, sec) {
264 
265 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
266 			continue;
267 
268 		if (strcmp(sec->name, ".altinstr_replacement") &&
269 		    strcmp(sec->name, ".altinstr_aux") &&
270 		    strncmp(sec->name, ".discard.", 9))
271 			sec->text = true;
272 
273 		if (!strcmp(sec->name, ".noinstr.text") ||
274 		    !strcmp(sec->name, ".entry.text"))
275 			sec->noinstr = true;
276 
277 		for (offset = 0; offset < sec->len; offset += insn->len) {
278 			insn = malloc(sizeof(*insn));
279 			if (!insn) {
280 				WARN("malloc failed");
281 				return -1;
282 			}
283 			memset(insn, 0, sizeof(*insn));
284 			INIT_LIST_HEAD(&insn->alts);
285 			INIT_LIST_HEAD(&insn->stack_ops);
286 			init_cfi_state(&insn->cfi);
287 
288 			insn->sec = sec;
289 			insn->offset = offset;
290 
291 			ret = arch_decode_instruction(file->elf, sec, offset,
292 						      sec->len - offset,
293 						      &insn->len, &insn->type,
294 						      &insn->immediate,
295 						      &insn->stack_ops);
296 			if (ret)
297 				goto err;
298 
299 			hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
300 			list_add_tail(&insn->list, &file->insn_list);
301 			nr_insns++;
302 		}
303 
304 		list_for_each_entry(func, &sec->symbol_list, list) {
305 			if (func->type != STT_FUNC || func->alias != func)
306 				continue;
307 
308 			if (!find_insn(file, sec, func->offset)) {
309 				WARN("%s(): can't find starting instruction",
310 				     func->name);
311 				return -1;
312 			}
313 
314 			sym_for_each_insn(file, func, insn)
315 				insn->func = func;
316 		}
317 	}
318 
319 	if (stats)
320 		printf("nr_insns: %lu\n", nr_insns);
321 
322 	return 0;
323 
324 err:
325 	free(insn);
326 	return ret;
327 }
328 
329 static struct instruction *find_last_insn(struct objtool_file *file,
330 					  struct section *sec)
331 {
332 	struct instruction *insn = NULL;
333 	unsigned int offset;
334 	unsigned int end = (sec->len > 10) ? sec->len - 10 : 0;
335 
336 	for (offset = sec->len - 1; offset >= end && !insn; offset--)
337 		insn = find_insn(file, sec, offset);
338 
339 	return insn;
340 }
341 
342 /*
343  * Mark "ud2" instructions and manually annotated dead ends.
344  */
345 static int add_dead_ends(struct objtool_file *file)
346 {
347 	struct section *sec;
348 	struct reloc *reloc;
349 	struct instruction *insn;
350 
351 	/*
352 	 * By default, "ud2" is a dead end unless otherwise annotated, because
353 	 * GCC 7 inserts it for certain divide-by-zero cases.
354 	 */
355 	for_each_insn(file, insn)
356 		if (insn->type == INSN_BUG)
357 			insn->dead_end = true;
358 
359 	/*
360 	 * Check for manually annotated dead ends.
361 	 */
362 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
363 	if (!sec)
364 		goto reachable;
365 
366 	list_for_each_entry(reloc, &sec->reloc_list, list) {
367 		if (reloc->sym->type != STT_SECTION) {
368 			WARN("unexpected relocation symbol type in %s", sec->name);
369 			return -1;
370 		}
371 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
372 		if (insn)
373 			insn = list_prev_entry(insn, list);
374 		else if (reloc->addend == reloc->sym->sec->len) {
375 			insn = find_last_insn(file, reloc->sym->sec);
376 			if (!insn) {
377 				WARN("can't find unreachable insn at %s+0x%x",
378 				     reloc->sym->sec->name, reloc->addend);
379 				return -1;
380 			}
381 		} else {
382 			WARN("can't find unreachable insn at %s+0x%x",
383 			     reloc->sym->sec->name, reloc->addend);
384 			return -1;
385 		}
386 
387 		insn->dead_end = true;
388 	}
389 
390 reachable:
391 	/*
392 	 * These manually annotated reachable checks are needed for GCC 4.4,
393 	 * where the Linux unreachable() macro isn't supported.  In that case
394 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
395 	 * not a dead end.
396 	 */
397 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
398 	if (!sec)
399 		return 0;
400 
401 	list_for_each_entry(reloc, &sec->reloc_list, list) {
402 		if (reloc->sym->type != STT_SECTION) {
403 			WARN("unexpected relocation symbol type in %s", sec->name);
404 			return -1;
405 		}
406 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
407 		if (insn)
408 			insn = list_prev_entry(insn, list);
409 		else if (reloc->addend == reloc->sym->sec->len) {
410 			insn = find_last_insn(file, reloc->sym->sec);
411 			if (!insn) {
412 				WARN("can't find reachable insn at %s+0x%x",
413 				     reloc->sym->sec->name, reloc->addend);
414 				return -1;
415 			}
416 		} else {
417 			WARN("can't find reachable insn at %s+0x%x",
418 			     reloc->sym->sec->name, reloc->addend);
419 			return -1;
420 		}
421 
422 		insn->dead_end = false;
423 	}
424 
425 	return 0;
426 }
427 
428 static int create_static_call_sections(struct objtool_file *file)
429 {
430 	struct section *sec, *reloc_sec;
431 	struct reloc *reloc;
432 	struct static_call_site *site;
433 	struct instruction *insn;
434 	struct symbol *key_sym;
435 	char *key_name, *tmp;
436 	int idx;
437 
438 	sec = find_section_by_name(file->elf, ".static_call_sites");
439 	if (sec) {
440 		INIT_LIST_HEAD(&file->static_call_list);
441 		WARN("file already has .static_call_sites section, skipping");
442 		return 0;
443 	}
444 
445 	if (list_empty(&file->static_call_list))
446 		return 0;
447 
448 	idx = 0;
449 	list_for_each_entry(insn, &file->static_call_list, static_call_node)
450 		idx++;
451 
452 	sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
453 				 sizeof(struct static_call_site), idx);
454 	if (!sec)
455 		return -1;
456 
457 	reloc_sec = elf_create_reloc_section(file->elf, sec, SHT_RELA);
458 	if (!reloc_sec)
459 		return -1;
460 
461 	idx = 0;
462 	list_for_each_entry(insn, &file->static_call_list, static_call_node) {
463 
464 		site = (struct static_call_site *)sec->data->d_buf + idx;
465 		memset(site, 0, sizeof(struct static_call_site));
466 
467 		/* populate reloc for 'addr' */
468 		reloc = malloc(sizeof(*reloc));
469 		if (!reloc) {
470 			perror("malloc");
471 			return -1;
472 		}
473 		memset(reloc, 0, sizeof(*reloc));
474 		reloc->sym = insn->sec->sym;
475 		reloc->addend = insn->offset;
476 		reloc->type = R_X86_64_PC32;
477 		reloc->offset = idx * sizeof(struct static_call_site);
478 		reloc->sec = reloc_sec;
479 		elf_add_reloc(file->elf, reloc);
480 
481 		/* find key symbol */
482 		key_name = strdup(insn->call_dest->name);
483 		if (!key_name) {
484 			perror("strdup");
485 			return -1;
486 		}
487 		if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
488 			    STATIC_CALL_TRAMP_PREFIX_LEN)) {
489 			WARN("static_call: trampoline name malformed: %s", key_name);
490 			return -1;
491 		}
492 		tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
493 		memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
494 
495 		key_sym = find_symbol_by_name(file->elf, tmp);
496 		if (!key_sym) {
497 			WARN("static_call: can't find static_call_key symbol: %s", tmp);
498 			return -1;
499 		}
500 		free(key_name);
501 
502 		/* populate reloc for 'key' */
503 		reloc = malloc(sizeof(*reloc));
504 		if (!reloc) {
505 			perror("malloc");
506 			return -1;
507 		}
508 		memset(reloc, 0, sizeof(*reloc));
509 		reloc->sym = key_sym;
510 		reloc->addend = is_sibling_call(insn) ? STATIC_CALL_SITE_TAIL : 0;
511 		reloc->type = R_X86_64_PC32;
512 		reloc->offset = idx * sizeof(struct static_call_site) + 4;
513 		reloc->sec = reloc_sec;
514 		elf_add_reloc(file->elf, reloc);
515 
516 		idx++;
517 	}
518 
519 	if (elf_rebuild_reloc_section(file->elf, reloc_sec))
520 		return -1;
521 
522 	return 0;
523 }
524 
525 /*
526  * Warnings shouldn't be reported for ignored functions.
527  */
528 static void add_ignores(struct objtool_file *file)
529 {
530 	struct instruction *insn;
531 	struct section *sec;
532 	struct symbol *func;
533 	struct reloc *reloc;
534 
535 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
536 	if (!sec)
537 		return;
538 
539 	list_for_each_entry(reloc, &sec->reloc_list, list) {
540 		switch (reloc->sym->type) {
541 		case STT_FUNC:
542 			func = reloc->sym;
543 			break;
544 
545 		case STT_SECTION:
546 			func = find_func_by_offset(reloc->sym->sec, reloc->addend);
547 			if (!func)
548 				continue;
549 			break;
550 
551 		default:
552 			WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
553 			continue;
554 		}
555 
556 		func_for_each_insn(file, func, insn)
557 			insn->ignore = true;
558 	}
559 }
560 
561 /*
562  * This is a whitelist of functions that is allowed to be called with AC set.
563  * The list is meant to be minimal and only contains compiler instrumentation
564  * ABI and a few functions used to implement *_{to,from}_user() functions.
565  *
566  * These functions must not directly change AC, but may PUSHF/POPF.
567  */
568 static const char *uaccess_safe_builtin[] = {
569 	/* KASAN */
570 	"kasan_report",
571 	"check_memory_region",
572 	/* KASAN out-of-line */
573 	"__asan_loadN_noabort",
574 	"__asan_load1_noabort",
575 	"__asan_load2_noabort",
576 	"__asan_load4_noabort",
577 	"__asan_load8_noabort",
578 	"__asan_load16_noabort",
579 	"__asan_storeN_noabort",
580 	"__asan_store1_noabort",
581 	"__asan_store2_noabort",
582 	"__asan_store4_noabort",
583 	"__asan_store8_noabort",
584 	"__asan_store16_noabort",
585 	/* KASAN in-line */
586 	"__asan_report_load_n_noabort",
587 	"__asan_report_load1_noabort",
588 	"__asan_report_load2_noabort",
589 	"__asan_report_load4_noabort",
590 	"__asan_report_load8_noabort",
591 	"__asan_report_load16_noabort",
592 	"__asan_report_store_n_noabort",
593 	"__asan_report_store1_noabort",
594 	"__asan_report_store2_noabort",
595 	"__asan_report_store4_noabort",
596 	"__asan_report_store8_noabort",
597 	"__asan_report_store16_noabort",
598 	/* KCSAN */
599 	"__kcsan_check_access",
600 	"kcsan_found_watchpoint",
601 	"kcsan_setup_watchpoint",
602 	"kcsan_check_scoped_accesses",
603 	"kcsan_disable_current",
604 	"kcsan_enable_current_nowarn",
605 	/* KCSAN/TSAN */
606 	"__tsan_func_entry",
607 	"__tsan_func_exit",
608 	"__tsan_read_range",
609 	"__tsan_write_range",
610 	"__tsan_read1",
611 	"__tsan_read2",
612 	"__tsan_read4",
613 	"__tsan_read8",
614 	"__tsan_read16",
615 	"__tsan_write1",
616 	"__tsan_write2",
617 	"__tsan_write4",
618 	"__tsan_write8",
619 	"__tsan_write16",
620 	/* KCOV */
621 	"write_comp_data",
622 	"check_kcov_mode",
623 	"__sanitizer_cov_trace_pc",
624 	"__sanitizer_cov_trace_const_cmp1",
625 	"__sanitizer_cov_trace_const_cmp2",
626 	"__sanitizer_cov_trace_const_cmp4",
627 	"__sanitizer_cov_trace_const_cmp8",
628 	"__sanitizer_cov_trace_cmp1",
629 	"__sanitizer_cov_trace_cmp2",
630 	"__sanitizer_cov_trace_cmp4",
631 	"__sanitizer_cov_trace_cmp8",
632 	"__sanitizer_cov_trace_switch",
633 	/* UBSAN */
634 	"ubsan_type_mismatch_common",
635 	"__ubsan_handle_type_mismatch",
636 	"__ubsan_handle_type_mismatch_v1",
637 	"__ubsan_handle_shift_out_of_bounds",
638 	/* misc */
639 	"csum_partial_copy_generic",
640 	"__memcpy_mcsafe",
641 	"mcsafe_handle_tail",
642 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
643 	NULL
644 };
645 
646 static void add_uaccess_safe(struct objtool_file *file)
647 {
648 	struct symbol *func;
649 	const char **name;
650 
651 	if (!uaccess)
652 		return;
653 
654 	for (name = uaccess_safe_builtin; *name; name++) {
655 		func = find_symbol_by_name(file->elf, *name);
656 		if (!func)
657 			continue;
658 
659 		func->uaccess_safe = true;
660 	}
661 }
662 
663 /*
664  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
665  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
666  * But it at least allows objtool to understand the control flow *around* the
667  * retpoline.
668  */
669 static int add_ignore_alternatives(struct objtool_file *file)
670 {
671 	struct section *sec;
672 	struct reloc *reloc;
673 	struct instruction *insn;
674 
675 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
676 	if (!sec)
677 		return 0;
678 
679 	list_for_each_entry(reloc, &sec->reloc_list, list) {
680 		if (reloc->sym->type != STT_SECTION) {
681 			WARN("unexpected relocation symbol type in %s", sec->name);
682 			return -1;
683 		}
684 
685 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
686 		if (!insn) {
687 			WARN("bad .discard.ignore_alts entry");
688 			return -1;
689 		}
690 
691 		insn->ignore_alts = true;
692 	}
693 
694 	return 0;
695 }
696 
697 /*
698  * Find the destination instructions for all jumps.
699  */
700 static int add_jump_destinations(struct objtool_file *file)
701 {
702 	struct instruction *insn;
703 	struct reloc *reloc;
704 	struct section *dest_sec;
705 	unsigned long dest_off;
706 
707 	for_each_insn(file, insn) {
708 		if (!is_static_jump(insn))
709 			continue;
710 
711 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
712 			continue;
713 
714 		reloc = find_reloc_by_dest_range(file->elf, insn->sec,
715 					       insn->offset, insn->len);
716 		if (!reloc) {
717 			dest_sec = insn->sec;
718 			dest_off = arch_jump_destination(insn);
719 		} else if (reloc->sym->type == STT_SECTION) {
720 			dest_sec = reloc->sym->sec;
721 			dest_off = arch_dest_reloc_offset(reloc->addend);
722 		} else if (reloc->sym->sec->idx) {
723 			dest_sec = reloc->sym->sec;
724 			dest_off = reloc->sym->sym.st_value +
725 				   arch_dest_reloc_offset(reloc->addend);
726 		} else if (strstr(reloc->sym->name, "_indirect_thunk_")) {
727 			/*
728 			 * Retpoline jumps are really dynamic jumps in
729 			 * disguise, so convert them accordingly.
730 			 */
731 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
732 				insn->type = INSN_JUMP_DYNAMIC;
733 			else
734 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
735 
736 			insn->retpoline_safe = true;
737 			continue;
738 		} else {
739 			/* external sibling call */
740 			insn->call_dest = reloc->sym;
741 			if (insn->call_dest->static_call_tramp) {
742 				list_add_tail(&insn->static_call_node,
743 					      &file->static_call_list);
744 			}
745 			continue;
746 		}
747 
748 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
749 		if (!insn->jump_dest) {
750 
751 			/*
752 			 * This is a special case where an alt instruction
753 			 * jumps past the end of the section.  These are
754 			 * handled later in handle_group_alt().
755 			 */
756 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
757 				continue;
758 
759 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
760 				  insn->sec, insn->offset, dest_sec->name,
761 				  dest_off);
762 			return -1;
763 		}
764 
765 		/*
766 		 * Cross-function jump.
767 		 */
768 		if (insn->func && insn->jump_dest->func &&
769 		    insn->func != insn->jump_dest->func) {
770 
771 			/*
772 			 * For GCC 8+, create parent/child links for any cold
773 			 * subfunctions.  This is _mostly_ redundant with a
774 			 * similar initialization in read_symbols().
775 			 *
776 			 * If a function has aliases, we want the *first* such
777 			 * function in the symbol table to be the subfunction's
778 			 * parent.  In that case we overwrite the
779 			 * initialization done in read_symbols().
780 			 *
781 			 * However this code can't completely replace the
782 			 * read_symbols() code because this doesn't detect the
783 			 * case where the parent function's only reference to a
784 			 * subfunction is through a jump table.
785 			 */
786 			if (!strstr(insn->func->name, ".cold.") &&
787 			    strstr(insn->jump_dest->func->name, ".cold.")) {
788 				insn->func->cfunc = insn->jump_dest->func;
789 				insn->jump_dest->func->pfunc = insn->func;
790 
791 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
792 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
793 
794 				/* internal sibling call */
795 				insn->call_dest = insn->jump_dest->func;
796 				if (insn->call_dest->static_call_tramp) {
797 					list_add_tail(&insn->static_call_node,
798 						      &file->static_call_list);
799 				}
800 			}
801 		}
802 	}
803 
804 	return 0;
805 }
806 
807 static void remove_insn_ops(struct instruction *insn)
808 {
809 	struct stack_op *op, *tmp;
810 
811 	list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
812 		list_del(&op->list);
813 		free(op);
814 	}
815 }
816 
817 /*
818  * Find the destination instructions for all calls.
819  */
820 static int add_call_destinations(struct objtool_file *file)
821 {
822 	struct instruction *insn;
823 	unsigned long dest_off;
824 	struct reloc *reloc;
825 
826 	for_each_insn(file, insn) {
827 		if (insn->type != INSN_CALL)
828 			continue;
829 
830 		reloc = find_reloc_by_dest_range(file->elf, insn->sec,
831 					       insn->offset, insn->len);
832 		if (!reloc) {
833 			dest_off = arch_jump_destination(insn);
834 			insn->call_dest = find_func_by_offset(insn->sec, dest_off);
835 			if (!insn->call_dest)
836 				insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
837 
838 			if (insn->ignore)
839 				continue;
840 
841 			if (!insn->call_dest) {
842 				WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
843 				return -1;
844 			}
845 
846 			if (insn->func && insn->call_dest->type != STT_FUNC) {
847 				WARN_FUNC("unsupported call to non-function",
848 					  insn->sec, insn->offset);
849 				return -1;
850 			}
851 
852 		} else if (reloc->sym->type == STT_SECTION) {
853 			dest_off = arch_dest_reloc_offset(reloc->addend);
854 			insn->call_dest = find_func_by_offset(reloc->sym->sec,
855 							      dest_off);
856 			if (!insn->call_dest) {
857 				WARN_FUNC("can't find call dest symbol at %s+0x%lx",
858 					  insn->sec, insn->offset,
859 					  reloc->sym->sec->name,
860 					  dest_off);
861 				return -1;
862 			}
863 		} else
864 			insn->call_dest = reloc->sym;
865 
866 		/*
867 		 * Many compilers cannot disable KCOV with a function attribute
868 		 * so they need a little help, NOP out any KCOV calls from noinstr
869 		 * text.
870 		 */
871 		if (insn->sec->noinstr &&
872 		    !strncmp(insn->call_dest->name, "__sanitizer_cov_", 16)) {
873 			if (reloc) {
874 				reloc->type = R_NONE;
875 				elf_write_reloc(file->elf, reloc);
876 			}
877 
878 			elf_write_insn(file->elf, insn->sec,
879 				       insn->offset, insn->len,
880 				       arch_nop_insn(insn->len));
881 			insn->type = INSN_NOP;
882 		}
883 
884 		/*
885 		 * Whatever stack impact regular CALLs have, should be undone
886 		 * by the RETURN of the called function.
887 		 *
888 		 * Annotated intra-function calls retain the stack_ops but
889 		 * are converted to JUMP, see read_intra_function_calls().
890 		 */
891 		remove_insn_ops(insn);
892 	}
893 
894 	return 0;
895 }
896 
897 /*
898  * The .alternatives section requires some extra special care, over and above
899  * what other special sections require:
900  *
901  * 1. Because alternatives are patched in-place, we need to insert a fake jump
902  *    instruction at the end so that validate_branch() skips all the original
903  *    replaced instructions when validating the new instruction path.
904  *
905  * 2. An added wrinkle is that the new instruction length might be zero.  In
906  *    that case the old instructions are replaced with noops.  We simulate that
907  *    by creating a fake jump as the only new instruction.
908  *
909  * 3. In some cases, the alternative section includes an instruction which
910  *    conditionally jumps to the _end_ of the entry.  We have to modify these
911  *    jumps' destinations to point back to .text rather than the end of the
912  *    entry in .altinstr_replacement.
913  */
914 static int handle_group_alt(struct objtool_file *file,
915 			    struct special_alt *special_alt,
916 			    struct instruction *orig_insn,
917 			    struct instruction **new_insn)
918 {
919 	static unsigned int alt_group_next_index = 1;
920 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
921 	unsigned int alt_group = alt_group_next_index++;
922 	unsigned long dest_off;
923 
924 	last_orig_insn = NULL;
925 	insn = orig_insn;
926 	sec_for_each_insn_from(file, insn) {
927 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
928 			break;
929 
930 		insn->alt_group = alt_group;
931 		last_orig_insn = insn;
932 	}
933 
934 	if (next_insn_same_sec(file, last_orig_insn)) {
935 		fake_jump = malloc(sizeof(*fake_jump));
936 		if (!fake_jump) {
937 			WARN("malloc failed");
938 			return -1;
939 		}
940 		memset(fake_jump, 0, sizeof(*fake_jump));
941 		INIT_LIST_HEAD(&fake_jump->alts);
942 		INIT_LIST_HEAD(&fake_jump->stack_ops);
943 		init_cfi_state(&fake_jump->cfi);
944 
945 		fake_jump->sec = special_alt->new_sec;
946 		fake_jump->offset = FAKE_JUMP_OFFSET;
947 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
948 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
949 		fake_jump->func = orig_insn->func;
950 	}
951 
952 	if (!special_alt->new_len) {
953 		if (!fake_jump) {
954 			WARN("%s: empty alternative at end of section",
955 			     special_alt->orig_sec->name);
956 			return -1;
957 		}
958 
959 		*new_insn = fake_jump;
960 		return 0;
961 	}
962 
963 	last_new_insn = NULL;
964 	alt_group = alt_group_next_index++;
965 	insn = *new_insn;
966 	sec_for_each_insn_from(file, insn) {
967 		struct reloc *alt_reloc;
968 
969 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
970 			break;
971 
972 		last_new_insn = insn;
973 
974 		insn->ignore = orig_insn->ignore_alts;
975 		insn->func = orig_insn->func;
976 		insn->alt_group = alt_group;
977 
978 		/*
979 		 * Since alternative replacement code is copy/pasted by the
980 		 * kernel after applying relocations, generally such code can't
981 		 * have relative-address relocation references to outside the
982 		 * .altinstr_replacement section, unless the arch's
983 		 * alternatives code can adjust the relative offsets
984 		 * accordingly.
985 		 */
986 		alt_reloc = find_reloc_by_dest_range(file->elf, insn->sec,
987 						   insn->offset, insn->len);
988 		if (alt_reloc &&
989 		    !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
990 
991 			WARN_FUNC("unsupported relocation in alternatives section",
992 				  insn->sec, insn->offset);
993 			return -1;
994 		}
995 
996 		if (!is_static_jump(insn))
997 			continue;
998 
999 		if (!insn->immediate)
1000 			continue;
1001 
1002 		dest_off = arch_jump_destination(insn);
1003 		if (dest_off == special_alt->new_off + special_alt->new_len) {
1004 			if (!fake_jump) {
1005 				WARN("%s: alternative jump to end of section",
1006 				     special_alt->orig_sec->name);
1007 				return -1;
1008 			}
1009 			insn->jump_dest = fake_jump;
1010 		}
1011 
1012 		if (!insn->jump_dest) {
1013 			WARN_FUNC("can't find alternative jump destination",
1014 				  insn->sec, insn->offset);
1015 			return -1;
1016 		}
1017 	}
1018 
1019 	if (!last_new_insn) {
1020 		WARN_FUNC("can't find last new alternative instruction",
1021 			  special_alt->new_sec, special_alt->new_off);
1022 		return -1;
1023 	}
1024 
1025 	if (fake_jump)
1026 		list_add(&fake_jump->list, &last_new_insn->list);
1027 
1028 	return 0;
1029 }
1030 
1031 /*
1032  * A jump table entry can either convert a nop to a jump or a jump to a nop.
1033  * If the original instruction is a jump, make the alt entry an effective nop
1034  * by just skipping the original instruction.
1035  */
1036 static int handle_jump_alt(struct objtool_file *file,
1037 			   struct special_alt *special_alt,
1038 			   struct instruction *orig_insn,
1039 			   struct instruction **new_insn)
1040 {
1041 	if (orig_insn->type == INSN_NOP)
1042 		return 0;
1043 
1044 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
1045 		WARN_FUNC("unsupported instruction at jump label",
1046 			  orig_insn->sec, orig_insn->offset);
1047 		return -1;
1048 	}
1049 
1050 	*new_insn = list_next_entry(orig_insn, list);
1051 	return 0;
1052 }
1053 
1054 /*
1055  * Read all the special sections which have alternate instructions which can be
1056  * patched in or redirected to at runtime.  Each instruction having alternate
1057  * instruction(s) has them added to its insn->alts list, which will be
1058  * traversed in validate_branch().
1059  */
1060 static int add_special_section_alts(struct objtool_file *file)
1061 {
1062 	struct list_head special_alts;
1063 	struct instruction *orig_insn, *new_insn;
1064 	struct special_alt *special_alt, *tmp;
1065 	struct alternative *alt;
1066 	int ret;
1067 
1068 	ret = special_get_alts(file->elf, &special_alts);
1069 	if (ret)
1070 		return ret;
1071 
1072 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1073 
1074 		orig_insn = find_insn(file, special_alt->orig_sec,
1075 				      special_alt->orig_off);
1076 		if (!orig_insn) {
1077 			WARN_FUNC("special: can't find orig instruction",
1078 				  special_alt->orig_sec, special_alt->orig_off);
1079 			ret = -1;
1080 			goto out;
1081 		}
1082 
1083 		new_insn = NULL;
1084 		if (!special_alt->group || special_alt->new_len) {
1085 			new_insn = find_insn(file, special_alt->new_sec,
1086 					     special_alt->new_off);
1087 			if (!new_insn) {
1088 				WARN_FUNC("special: can't find new instruction",
1089 					  special_alt->new_sec,
1090 					  special_alt->new_off);
1091 				ret = -1;
1092 				goto out;
1093 			}
1094 		}
1095 
1096 		if (special_alt->group) {
1097 			if (!special_alt->orig_len) {
1098 				WARN_FUNC("empty alternative entry",
1099 					  orig_insn->sec, orig_insn->offset);
1100 				continue;
1101 			}
1102 
1103 			ret = handle_group_alt(file, special_alt, orig_insn,
1104 					       &new_insn);
1105 			if (ret)
1106 				goto out;
1107 		} else if (special_alt->jump_or_nop) {
1108 			ret = handle_jump_alt(file, special_alt, orig_insn,
1109 					      &new_insn);
1110 			if (ret)
1111 				goto out;
1112 		}
1113 
1114 		alt = malloc(sizeof(*alt));
1115 		if (!alt) {
1116 			WARN("malloc failed");
1117 			ret = -1;
1118 			goto out;
1119 		}
1120 
1121 		alt->insn = new_insn;
1122 		alt->skip_orig = special_alt->skip_orig;
1123 		orig_insn->ignore_alts |= special_alt->skip_alt;
1124 		list_add_tail(&alt->list, &orig_insn->alts);
1125 
1126 		list_del(&special_alt->list);
1127 		free(special_alt);
1128 	}
1129 
1130 out:
1131 	return ret;
1132 }
1133 
1134 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1135 			    struct reloc *table)
1136 {
1137 	struct reloc *reloc = table;
1138 	struct instruction *dest_insn;
1139 	struct alternative *alt;
1140 	struct symbol *pfunc = insn->func->pfunc;
1141 	unsigned int prev_offset = 0;
1142 
1143 	/*
1144 	 * Each @reloc is a switch table relocation which points to the target
1145 	 * instruction.
1146 	 */
1147 	list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1148 
1149 		/* Check for the end of the table: */
1150 		if (reloc != table && reloc->jump_table_start)
1151 			break;
1152 
1153 		/* Make sure the table entries are consecutive: */
1154 		if (prev_offset && reloc->offset != prev_offset + 8)
1155 			break;
1156 
1157 		/* Detect function pointers from contiguous objects: */
1158 		if (reloc->sym->sec == pfunc->sec &&
1159 		    reloc->addend == pfunc->offset)
1160 			break;
1161 
1162 		dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1163 		if (!dest_insn)
1164 			break;
1165 
1166 		/* Make sure the destination is in the same function: */
1167 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
1168 			break;
1169 
1170 		alt = malloc(sizeof(*alt));
1171 		if (!alt) {
1172 			WARN("malloc failed");
1173 			return -1;
1174 		}
1175 
1176 		alt->insn = dest_insn;
1177 		list_add_tail(&alt->list, &insn->alts);
1178 		prev_offset = reloc->offset;
1179 	}
1180 
1181 	if (!prev_offset) {
1182 		WARN_FUNC("can't find switch jump table",
1183 			  insn->sec, insn->offset);
1184 		return -1;
1185 	}
1186 
1187 	return 0;
1188 }
1189 
1190 /*
1191  * find_jump_table() - Given a dynamic jump, find the switch jump table
1192  * associated with it.
1193  */
1194 static struct reloc *find_jump_table(struct objtool_file *file,
1195 				      struct symbol *func,
1196 				      struct instruction *insn)
1197 {
1198 	struct reloc *table_reloc;
1199 	struct instruction *dest_insn, *orig_insn = insn;
1200 
1201 	/*
1202 	 * Backward search using the @first_jump_src links, these help avoid
1203 	 * much of the 'in between' code. Which avoids us getting confused by
1204 	 * it.
1205 	 */
1206 	for (;
1207 	     insn && insn->func && insn->func->pfunc == func;
1208 	     insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1209 
1210 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1211 			break;
1212 
1213 		/* allow small jumps within the range */
1214 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1215 		    insn->jump_dest &&
1216 		    (insn->jump_dest->offset <= insn->offset ||
1217 		     insn->jump_dest->offset > orig_insn->offset))
1218 		    break;
1219 
1220 		table_reloc = arch_find_switch_table(file, insn);
1221 		if (!table_reloc)
1222 			continue;
1223 		dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1224 		if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1225 			continue;
1226 
1227 		return table_reloc;
1228 	}
1229 
1230 	return NULL;
1231 }
1232 
1233 /*
1234  * First pass: Mark the head of each jump table so that in the next pass,
1235  * we know when a given jump table ends and the next one starts.
1236  */
1237 static void mark_func_jump_tables(struct objtool_file *file,
1238 				    struct symbol *func)
1239 {
1240 	struct instruction *insn, *last = NULL;
1241 	struct reloc *reloc;
1242 
1243 	func_for_each_insn(file, func, insn) {
1244 		if (!last)
1245 			last = insn;
1246 
1247 		/*
1248 		 * Store back-pointers for unconditional forward jumps such
1249 		 * that find_jump_table() can back-track using those and
1250 		 * avoid some potentially confusing code.
1251 		 */
1252 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1253 		    insn->offset > last->offset &&
1254 		    insn->jump_dest->offset > insn->offset &&
1255 		    !insn->jump_dest->first_jump_src) {
1256 
1257 			insn->jump_dest->first_jump_src = insn;
1258 			last = insn->jump_dest;
1259 		}
1260 
1261 		if (insn->type != INSN_JUMP_DYNAMIC)
1262 			continue;
1263 
1264 		reloc = find_jump_table(file, func, insn);
1265 		if (reloc) {
1266 			reloc->jump_table_start = true;
1267 			insn->jump_table = reloc;
1268 		}
1269 	}
1270 }
1271 
1272 static int add_func_jump_tables(struct objtool_file *file,
1273 				  struct symbol *func)
1274 {
1275 	struct instruction *insn;
1276 	int ret;
1277 
1278 	func_for_each_insn(file, func, insn) {
1279 		if (!insn->jump_table)
1280 			continue;
1281 
1282 		ret = add_jump_table(file, insn, insn->jump_table);
1283 		if (ret)
1284 			return ret;
1285 	}
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * For some switch statements, gcc generates a jump table in the .rodata
1292  * section which contains a list of addresses within the function to jump to.
1293  * This finds these jump tables and adds them to the insn->alts lists.
1294  */
1295 static int add_jump_table_alts(struct objtool_file *file)
1296 {
1297 	struct section *sec;
1298 	struct symbol *func;
1299 	int ret;
1300 
1301 	if (!file->rodata)
1302 		return 0;
1303 
1304 	for_each_sec(file, sec) {
1305 		list_for_each_entry(func, &sec->symbol_list, list) {
1306 			if (func->type != STT_FUNC)
1307 				continue;
1308 
1309 			mark_func_jump_tables(file, func);
1310 			ret = add_func_jump_tables(file, func);
1311 			if (ret)
1312 				return ret;
1313 		}
1314 	}
1315 
1316 	return 0;
1317 }
1318 
1319 static int read_unwind_hints(struct objtool_file *file)
1320 {
1321 	struct section *sec, *relocsec;
1322 	struct reloc *reloc;
1323 	struct unwind_hint *hint;
1324 	struct instruction *insn;
1325 	struct cfi_reg *cfa;
1326 	int i;
1327 
1328 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1329 	if (!sec)
1330 		return 0;
1331 
1332 	relocsec = sec->reloc;
1333 	if (!relocsec) {
1334 		WARN("missing .rela.discard.unwind_hints section");
1335 		return -1;
1336 	}
1337 
1338 	if (sec->len % sizeof(struct unwind_hint)) {
1339 		WARN("struct unwind_hint size mismatch");
1340 		return -1;
1341 	}
1342 
1343 	file->hints = true;
1344 
1345 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1346 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1347 
1348 		reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
1349 		if (!reloc) {
1350 			WARN("can't find reloc for unwind_hints[%d]", i);
1351 			return -1;
1352 		}
1353 
1354 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
1355 		if (!insn) {
1356 			WARN("can't find insn for unwind_hints[%d]", i);
1357 			return -1;
1358 		}
1359 
1360 		cfa = &insn->cfi.cfa;
1361 
1362 		if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) {
1363 			insn->ret_offset = hint->sp_offset;
1364 			continue;
1365 		}
1366 
1367 		insn->hint = true;
1368 
1369 		switch (hint->sp_reg) {
1370 		case ORC_REG_UNDEFINED:
1371 			cfa->base = CFI_UNDEFINED;
1372 			break;
1373 		case ORC_REG_SP:
1374 			cfa->base = CFI_SP;
1375 			break;
1376 		case ORC_REG_BP:
1377 			cfa->base = CFI_BP;
1378 			break;
1379 		case ORC_REG_SP_INDIRECT:
1380 			cfa->base = CFI_SP_INDIRECT;
1381 			break;
1382 		case ORC_REG_R10:
1383 			cfa->base = CFI_R10;
1384 			break;
1385 		case ORC_REG_R13:
1386 			cfa->base = CFI_R13;
1387 			break;
1388 		case ORC_REG_DI:
1389 			cfa->base = CFI_DI;
1390 			break;
1391 		case ORC_REG_DX:
1392 			cfa->base = CFI_DX;
1393 			break;
1394 		default:
1395 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1396 				  insn->sec, insn->offset, hint->sp_reg);
1397 			return -1;
1398 		}
1399 
1400 		cfa->offset = hint->sp_offset;
1401 		insn->cfi.type = hint->type;
1402 		insn->cfi.end = hint->end;
1403 	}
1404 
1405 	return 0;
1406 }
1407 
1408 static int read_retpoline_hints(struct objtool_file *file)
1409 {
1410 	struct section *sec;
1411 	struct instruction *insn;
1412 	struct reloc *reloc;
1413 
1414 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1415 	if (!sec)
1416 		return 0;
1417 
1418 	list_for_each_entry(reloc, &sec->reloc_list, list) {
1419 		if (reloc->sym->type != STT_SECTION) {
1420 			WARN("unexpected relocation symbol type in %s", sec->name);
1421 			return -1;
1422 		}
1423 
1424 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
1425 		if (!insn) {
1426 			WARN("bad .discard.retpoline_safe entry");
1427 			return -1;
1428 		}
1429 
1430 		if (insn->type != INSN_JUMP_DYNAMIC &&
1431 		    insn->type != INSN_CALL_DYNAMIC) {
1432 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1433 				  insn->sec, insn->offset);
1434 			return -1;
1435 		}
1436 
1437 		insn->retpoline_safe = true;
1438 	}
1439 
1440 	return 0;
1441 }
1442 
1443 static int read_instr_hints(struct objtool_file *file)
1444 {
1445 	struct section *sec;
1446 	struct instruction *insn;
1447 	struct reloc *reloc;
1448 
1449 	sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
1450 	if (!sec)
1451 		return 0;
1452 
1453 	list_for_each_entry(reloc, &sec->reloc_list, list) {
1454 		if (reloc->sym->type != STT_SECTION) {
1455 			WARN("unexpected relocation symbol type in %s", sec->name);
1456 			return -1;
1457 		}
1458 
1459 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
1460 		if (!insn) {
1461 			WARN("bad .discard.instr_end entry");
1462 			return -1;
1463 		}
1464 
1465 		insn->instr--;
1466 	}
1467 
1468 	sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
1469 	if (!sec)
1470 		return 0;
1471 
1472 	list_for_each_entry(reloc, &sec->reloc_list, list) {
1473 		if (reloc->sym->type != STT_SECTION) {
1474 			WARN("unexpected relocation symbol type in %s", sec->name);
1475 			return -1;
1476 		}
1477 
1478 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
1479 		if (!insn) {
1480 			WARN("bad .discard.instr_begin entry");
1481 			return -1;
1482 		}
1483 
1484 		insn->instr++;
1485 	}
1486 
1487 	return 0;
1488 }
1489 
1490 static int read_intra_function_calls(struct objtool_file *file)
1491 {
1492 	struct instruction *insn;
1493 	struct section *sec;
1494 	struct reloc *reloc;
1495 
1496 	sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
1497 	if (!sec)
1498 		return 0;
1499 
1500 	list_for_each_entry(reloc, &sec->reloc_list, list) {
1501 		unsigned long dest_off;
1502 
1503 		if (reloc->sym->type != STT_SECTION) {
1504 			WARN("unexpected relocation symbol type in %s",
1505 			     sec->name);
1506 			return -1;
1507 		}
1508 
1509 		insn = find_insn(file, reloc->sym->sec, reloc->addend);
1510 		if (!insn) {
1511 			WARN("bad .discard.intra_function_call entry");
1512 			return -1;
1513 		}
1514 
1515 		if (insn->type != INSN_CALL) {
1516 			WARN_FUNC("intra_function_call not a direct call",
1517 				  insn->sec, insn->offset);
1518 			return -1;
1519 		}
1520 
1521 		/*
1522 		 * Treat intra-function CALLs as JMPs, but with a stack_op.
1523 		 * See add_call_destinations(), which strips stack_ops from
1524 		 * normal CALLs.
1525 		 */
1526 		insn->type = INSN_JUMP_UNCONDITIONAL;
1527 
1528 		dest_off = insn->offset + insn->len + insn->immediate;
1529 		insn->jump_dest = find_insn(file, insn->sec, dest_off);
1530 		if (!insn->jump_dest) {
1531 			WARN_FUNC("can't find call dest at %s+0x%lx",
1532 				  insn->sec, insn->offset,
1533 				  insn->sec->name, dest_off);
1534 			return -1;
1535 		}
1536 	}
1537 
1538 	return 0;
1539 }
1540 
1541 static int read_static_call_tramps(struct objtool_file *file)
1542 {
1543 	struct section *sec;
1544 	struct symbol *func;
1545 
1546 	for_each_sec(file, sec) {
1547 		list_for_each_entry(func, &sec->symbol_list, list) {
1548 			if (func->bind == STB_GLOBAL &&
1549 			    !strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
1550 				     strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
1551 				func->static_call_tramp = true;
1552 		}
1553 	}
1554 
1555 	return 0;
1556 }
1557 
1558 static void mark_rodata(struct objtool_file *file)
1559 {
1560 	struct section *sec;
1561 	bool found = false;
1562 
1563 	/*
1564 	 * Search for the following rodata sections, each of which can
1565 	 * potentially contain jump tables:
1566 	 *
1567 	 * - .rodata: can contain GCC switch tables
1568 	 * - .rodata.<func>: same, if -fdata-sections is being used
1569 	 * - .rodata..c_jump_table: contains C annotated jump tables
1570 	 *
1571 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1572 	 */
1573 	for_each_sec(file, sec) {
1574 		if (!strncmp(sec->name, ".rodata", 7) &&
1575 		    !strstr(sec->name, ".str1.")) {
1576 			sec->rodata = true;
1577 			found = true;
1578 		}
1579 	}
1580 
1581 	file->rodata = found;
1582 }
1583 
1584 static int decode_sections(struct objtool_file *file)
1585 {
1586 	int ret;
1587 
1588 	mark_rodata(file);
1589 
1590 	ret = decode_instructions(file);
1591 	if (ret)
1592 		return ret;
1593 
1594 	ret = add_dead_ends(file);
1595 	if (ret)
1596 		return ret;
1597 
1598 	add_ignores(file);
1599 	add_uaccess_safe(file);
1600 
1601 	ret = add_ignore_alternatives(file);
1602 	if (ret)
1603 		return ret;
1604 
1605 	ret = read_static_call_tramps(file);
1606 	if (ret)
1607 		return ret;
1608 
1609 	ret = add_jump_destinations(file);
1610 	if (ret)
1611 		return ret;
1612 
1613 	ret = add_special_section_alts(file);
1614 	if (ret)
1615 		return ret;
1616 
1617 	ret = read_intra_function_calls(file);
1618 	if (ret)
1619 		return ret;
1620 
1621 	ret = add_call_destinations(file);
1622 	if (ret)
1623 		return ret;
1624 
1625 	ret = add_jump_table_alts(file);
1626 	if (ret)
1627 		return ret;
1628 
1629 	ret = read_unwind_hints(file);
1630 	if (ret)
1631 		return ret;
1632 
1633 	ret = read_retpoline_hints(file);
1634 	if (ret)
1635 		return ret;
1636 
1637 	ret = read_instr_hints(file);
1638 	if (ret)
1639 		return ret;
1640 
1641 	return 0;
1642 }
1643 
1644 static bool is_fentry_call(struct instruction *insn)
1645 {
1646 	if (insn->type == INSN_CALL && insn->call_dest &&
1647 	    insn->call_dest->type == STT_NOTYPE &&
1648 	    !strcmp(insn->call_dest->name, "__fentry__"))
1649 		return true;
1650 
1651 	return false;
1652 }
1653 
1654 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
1655 {
1656 	u8 ret_offset = insn->ret_offset;
1657 	struct cfi_state *cfi = &state->cfi;
1658 	int i;
1659 
1660 	if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
1661 		return true;
1662 
1663 	if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset)
1664 		return true;
1665 
1666 	if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset)
1667 		return true;
1668 
1669 	/*
1670 	 * If there is a ret offset hint then don't check registers
1671 	 * because a callee-saved register might have been pushed on
1672 	 * the stack.
1673 	 */
1674 	if (ret_offset)
1675 		return false;
1676 
1677 	for (i = 0; i < CFI_NUM_REGS; i++) {
1678 		if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
1679 		    cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
1680 			return true;
1681 	}
1682 
1683 	return false;
1684 }
1685 
1686 static bool has_valid_stack_frame(struct insn_state *state)
1687 {
1688 	struct cfi_state *cfi = &state->cfi;
1689 
1690 	if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA &&
1691 	    cfi->regs[CFI_BP].offset == -16)
1692 		return true;
1693 
1694 	if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
1695 		return true;
1696 
1697 	return false;
1698 }
1699 
1700 static int update_cfi_state_regs(struct instruction *insn,
1701 				  struct cfi_state *cfi,
1702 				  struct stack_op *op)
1703 {
1704 	struct cfi_reg *cfa = &cfi->cfa;
1705 
1706 	if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
1707 		return 0;
1708 
1709 	/* push */
1710 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1711 		cfa->offset += 8;
1712 
1713 	/* pop */
1714 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1715 		cfa->offset -= 8;
1716 
1717 	/* add immediate to sp */
1718 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1719 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1720 		cfa->offset -= op->src.offset;
1721 
1722 	return 0;
1723 }
1724 
1725 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
1726 {
1727 	if (arch_callee_saved_reg(reg) &&
1728 	    cfi->regs[reg].base == CFI_UNDEFINED) {
1729 		cfi->regs[reg].base = base;
1730 		cfi->regs[reg].offset = offset;
1731 	}
1732 }
1733 
1734 static void restore_reg(struct cfi_state *cfi, unsigned char reg)
1735 {
1736 	cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
1737 	cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
1738 }
1739 
1740 /*
1741  * A note about DRAP stack alignment:
1742  *
1743  * GCC has the concept of a DRAP register, which is used to help keep track of
1744  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1745  * register.  The typical DRAP pattern is:
1746  *
1747  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1748  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1749  *   41 ff 72 f8		pushq  -0x8(%r10)
1750  *   55				push   %rbp
1751  *   48 89 e5			mov    %rsp,%rbp
1752  *				(more pushes)
1753  *   41 52			push   %r10
1754  *				...
1755  *   41 5a			pop    %r10
1756  *				(more pops)
1757  *   5d				pop    %rbp
1758  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1759  *   c3				retq
1760  *
1761  * There are some variations in the epilogues, like:
1762  *
1763  *   5b				pop    %rbx
1764  *   41 5a			pop    %r10
1765  *   41 5c			pop    %r12
1766  *   41 5d			pop    %r13
1767  *   41 5e			pop    %r14
1768  *   c9				leaveq
1769  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1770  *   c3				retq
1771  *
1772  * and:
1773  *
1774  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1775  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1776  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1777  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1778  *   c9				leaveq
1779  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1780  *   c3				retq
1781  *
1782  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1783  * restored beforehand:
1784  *
1785  *   41 55			push   %r13
1786  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1787  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1788  *				...
1789  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1790  *   41 5d			pop    %r13
1791  *   c3				retq
1792  */
1793 static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
1794 			     struct stack_op *op)
1795 {
1796 	struct cfi_reg *cfa = &cfi->cfa;
1797 	struct cfi_reg *regs = cfi->regs;
1798 
1799 	/* stack operations don't make sense with an undefined CFA */
1800 	if (cfa->base == CFI_UNDEFINED) {
1801 		if (insn->func) {
1802 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1803 			return -1;
1804 		}
1805 		return 0;
1806 	}
1807 
1808 	if (cfi->type == ORC_TYPE_REGS || cfi->type == ORC_TYPE_REGS_IRET)
1809 		return update_cfi_state_regs(insn, cfi, op);
1810 
1811 	switch (op->dest.type) {
1812 
1813 	case OP_DEST_REG:
1814 		switch (op->src.type) {
1815 
1816 		case OP_SRC_REG:
1817 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1818 			    cfa->base == CFI_SP &&
1819 			    regs[CFI_BP].base == CFI_CFA &&
1820 			    regs[CFI_BP].offset == -cfa->offset) {
1821 
1822 				/* mov %rsp, %rbp */
1823 				cfa->base = op->dest.reg;
1824 				cfi->bp_scratch = false;
1825 			}
1826 
1827 			else if (op->src.reg == CFI_SP &&
1828 				 op->dest.reg == CFI_BP && cfi->drap) {
1829 
1830 				/* drap: mov %rsp, %rbp */
1831 				regs[CFI_BP].base = CFI_BP;
1832 				regs[CFI_BP].offset = -cfi->stack_size;
1833 				cfi->bp_scratch = false;
1834 			}
1835 
1836 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1837 
1838 				/*
1839 				 * mov %rsp, %reg
1840 				 *
1841 				 * This is needed for the rare case where GCC
1842 				 * does:
1843 				 *
1844 				 *   mov    %rsp, %rax
1845 				 *   ...
1846 				 *   mov    %rax, %rsp
1847 				 */
1848 				cfi->vals[op->dest.reg].base = CFI_CFA;
1849 				cfi->vals[op->dest.reg].offset = -cfi->stack_size;
1850 			}
1851 
1852 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1853 				 cfa->base == CFI_BP) {
1854 
1855 				/*
1856 				 * mov %rbp, %rsp
1857 				 *
1858 				 * Restore the original stack pointer (Clang).
1859 				 */
1860 				cfi->stack_size = -cfi->regs[CFI_BP].offset;
1861 			}
1862 
1863 			else if (op->dest.reg == cfa->base) {
1864 
1865 				/* mov %reg, %rsp */
1866 				if (cfa->base == CFI_SP &&
1867 				    cfi->vals[op->src.reg].base == CFI_CFA) {
1868 
1869 					/*
1870 					 * This is needed for the rare case
1871 					 * where GCC does something dumb like:
1872 					 *
1873 					 *   lea    0x8(%rsp), %rcx
1874 					 *   ...
1875 					 *   mov    %rcx, %rsp
1876 					 */
1877 					cfa->offset = -cfi->vals[op->src.reg].offset;
1878 					cfi->stack_size = cfa->offset;
1879 
1880 				} else {
1881 					cfa->base = CFI_UNDEFINED;
1882 					cfa->offset = 0;
1883 				}
1884 			}
1885 
1886 			break;
1887 
1888 		case OP_SRC_ADD:
1889 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1890 
1891 				/* add imm, %rsp */
1892 				cfi->stack_size -= op->src.offset;
1893 				if (cfa->base == CFI_SP)
1894 					cfa->offset -= op->src.offset;
1895 				break;
1896 			}
1897 
1898 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1899 
1900 				/* lea disp(%rbp), %rsp */
1901 				cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1902 				break;
1903 			}
1904 
1905 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1906 
1907 				/* drap: lea disp(%rsp), %drap */
1908 				cfi->drap_reg = op->dest.reg;
1909 
1910 				/*
1911 				 * lea disp(%rsp), %reg
1912 				 *
1913 				 * This is needed for the rare case where GCC
1914 				 * does something dumb like:
1915 				 *
1916 				 *   lea    0x8(%rsp), %rcx
1917 				 *   ...
1918 				 *   mov    %rcx, %rsp
1919 				 */
1920 				cfi->vals[op->dest.reg].base = CFI_CFA;
1921 				cfi->vals[op->dest.reg].offset = \
1922 					-cfi->stack_size + op->src.offset;
1923 
1924 				break;
1925 			}
1926 
1927 			if (cfi->drap && op->dest.reg == CFI_SP &&
1928 			    op->src.reg == cfi->drap_reg) {
1929 
1930 				 /* drap: lea disp(%drap), %rsp */
1931 				cfa->base = CFI_SP;
1932 				cfa->offset = cfi->stack_size = -op->src.offset;
1933 				cfi->drap_reg = CFI_UNDEFINED;
1934 				cfi->drap = false;
1935 				break;
1936 			}
1937 
1938 			if (op->dest.reg == cfi->cfa.base) {
1939 				WARN_FUNC("unsupported stack register modification",
1940 					  insn->sec, insn->offset);
1941 				return -1;
1942 			}
1943 
1944 			break;
1945 
1946 		case OP_SRC_AND:
1947 			if (op->dest.reg != CFI_SP ||
1948 			    (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1949 			    (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1950 				WARN_FUNC("unsupported stack pointer realignment",
1951 					  insn->sec, insn->offset);
1952 				return -1;
1953 			}
1954 
1955 			if (cfi->drap_reg != CFI_UNDEFINED) {
1956 				/* drap: and imm, %rsp */
1957 				cfa->base = cfi->drap_reg;
1958 				cfa->offset = cfi->stack_size = 0;
1959 				cfi->drap = true;
1960 			}
1961 
1962 			/*
1963 			 * Older versions of GCC (4.8ish) realign the stack
1964 			 * without DRAP, with a frame pointer.
1965 			 */
1966 
1967 			break;
1968 
1969 		case OP_SRC_POP:
1970 		case OP_SRC_POPF:
1971 			if (!cfi->drap && op->dest.reg == cfa->base) {
1972 
1973 				/* pop %rbp */
1974 				cfa->base = CFI_SP;
1975 			}
1976 
1977 			if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
1978 			    op->dest.reg == cfi->drap_reg &&
1979 			    cfi->drap_offset == -cfi->stack_size) {
1980 
1981 				/* drap: pop %drap */
1982 				cfa->base = cfi->drap_reg;
1983 				cfa->offset = 0;
1984 				cfi->drap_offset = -1;
1985 
1986 			} else if (regs[op->dest.reg].offset == -cfi->stack_size) {
1987 
1988 				/* pop %reg */
1989 				restore_reg(cfi, op->dest.reg);
1990 			}
1991 
1992 			cfi->stack_size -= 8;
1993 			if (cfa->base == CFI_SP)
1994 				cfa->offset -= 8;
1995 
1996 			break;
1997 
1998 		case OP_SRC_REG_INDIRECT:
1999 			if (cfi->drap && op->src.reg == CFI_BP &&
2000 			    op->src.offset == cfi->drap_offset) {
2001 
2002 				/* drap: mov disp(%rbp), %drap */
2003 				cfa->base = cfi->drap_reg;
2004 				cfa->offset = 0;
2005 				cfi->drap_offset = -1;
2006 			}
2007 
2008 			if (cfi->drap && op->src.reg == CFI_BP &&
2009 			    op->src.offset == regs[op->dest.reg].offset) {
2010 
2011 				/* drap: mov disp(%rbp), %reg */
2012 				restore_reg(cfi, op->dest.reg);
2013 
2014 			} else if (op->src.reg == cfa->base &&
2015 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
2016 
2017 				/* mov disp(%rbp), %reg */
2018 				/* mov disp(%rsp), %reg */
2019 				restore_reg(cfi, op->dest.reg);
2020 			}
2021 
2022 			break;
2023 
2024 		default:
2025 			WARN_FUNC("unknown stack-related instruction",
2026 				  insn->sec, insn->offset);
2027 			return -1;
2028 		}
2029 
2030 		break;
2031 
2032 	case OP_DEST_PUSH:
2033 	case OP_DEST_PUSHF:
2034 		cfi->stack_size += 8;
2035 		if (cfa->base == CFI_SP)
2036 			cfa->offset += 8;
2037 
2038 		if (op->src.type != OP_SRC_REG)
2039 			break;
2040 
2041 		if (cfi->drap) {
2042 			if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2043 
2044 				/* drap: push %drap */
2045 				cfa->base = CFI_BP_INDIRECT;
2046 				cfa->offset = -cfi->stack_size;
2047 
2048 				/* save drap so we know when to restore it */
2049 				cfi->drap_offset = -cfi->stack_size;
2050 
2051 			} else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
2052 
2053 				/* drap: push %rbp */
2054 				cfi->stack_size = 0;
2055 
2056 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
2057 
2058 				/* drap: push %reg */
2059 				save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
2060 			}
2061 
2062 		} else {
2063 
2064 			/* push %reg */
2065 			save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
2066 		}
2067 
2068 		/* detect when asm code uses rbp as a scratch register */
2069 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
2070 		    cfa->base != CFI_BP)
2071 			cfi->bp_scratch = true;
2072 		break;
2073 
2074 	case OP_DEST_REG_INDIRECT:
2075 
2076 		if (cfi->drap) {
2077 			if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2078 
2079 				/* drap: mov %drap, disp(%rbp) */
2080 				cfa->base = CFI_BP_INDIRECT;
2081 				cfa->offset = op->dest.offset;
2082 
2083 				/* save drap offset so we know when to restore it */
2084 				cfi->drap_offset = op->dest.offset;
2085 			}
2086 
2087 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
2088 
2089 				/* drap: mov reg, disp(%rbp) */
2090 				save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
2091 			}
2092 
2093 		} else if (op->dest.reg == cfa->base) {
2094 
2095 			/* mov reg, disp(%rbp) */
2096 			/* mov reg, disp(%rsp) */
2097 			save_reg(cfi, op->src.reg, CFI_CFA,
2098 				 op->dest.offset - cfi->cfa.offset);
2099 		}
2100 
2101 		break;
2102 
2103 	case OP_DEST_LEAVE:
2104 		if ((!cfi->drap && cfa->base != CFI_BP) ||
2105 		    (cfi->drap && cfa->base != cfi->drap_reg)) {
2106 			WARN_FUNC("leave instruction with modified stack frame",
2107 				  insn->sec, insn->offset);
2108 			return -1;
2109 		}
2110 
2111 		/* leave (mov %rbp, %rsp; pop %rbp) */
2112 
2113 		cfi->stack_size = -cfi->regs[CFI_BP].offset - 8;
2114 		restore_reg(cfi, CFI_BP);
2115 
2116 		if (!cfi->drap) {
2117 			cfa->base = CFI_SP;
2118 			cfa->offset -= 8;
2119 		}
2120 
2121 		break;
2122 
2123 	case OP_DEST_MEM:
2124 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
2125 			WARN_FUNC("unknown stack-related memory operation",
2126 				  insn->sec, insn->offset);
2127 			return -1;
2128 		}
2129 
2130 		/* pop mem */
2131 		cfi->stack_size -= 8;
2132 		if (cfa->base == CFI_SP)
2133 			cfa->offset -= 8;
2134 
2135 		break;
2136 
2137 	default:
2138 		WARN_FUNC("unknown stack-related instruction",
2139 			  insn->sec, insn->offset);
2140 		return -1;
2141 	}
2142 
2143 	return 0;
2144 }
2145 
2146 static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
2147 {
2148 	struct stack_op *op;
2149 
2150 	list_for_each_entry(op, &insn->stack_ops, list) {
2151 		struct cfi_state old_cfi = state->cfi;
2152 		int res;
2153 
2154 		res = update_cfi_state(insn, &state->cfi, op);
2155 		if (res)
2156 			return res;
2157 
2158 		if (insn->alt_group && memcmp(&state->cfi, &old_cfi, sizeof(struct cfi_state))) {
2159 			WARN_FUNC("alternative modifies stack", insn->sec, insn->offset);
2160 			return -1;
2161 		}
2162 
2163 		if (op->dest.type == OP_DEST_PUSHF) {
2164 			if (!state->uaccess_stack) {
2165 				state->uaccess_stack = 1;
2166 			} else if (state->uaccess_stack >> 31) {
2167 				WARN_FUNC("PUSHF stack exhausted",
2168 					  insn->sec, insn->offset);
2169 				return 1;
2170 			}
2171 			state->uaccess_stack <<= 1;
2172 			state->uaccess_stack  |= state->uaccess;
2173 		}
2174 
2175 		if (op->src.type == OP_SRC_POPF) {
2176 			if (state->uaccess_stack) {
2177 				state->uaccess = state->uaccess_stack & 1;
2178 				state->uaccess_stack >>= 1;
2179 				if (state->uaccess_stack == 1)
2180 					state->uaccess_stack = 0;
2181 			}
2182 		}
2183 	}
2184 
2185 	return 0;
2186 }
2187 
2188 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
2189 {
2190 	struct cfi_state *cfi1 = &insn->cfi;
2191 	int i;
2192 
2193 	if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
2194 
2195 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
2196 			  insn->sec, insn->offset,
2197 			  cfi1->cfa.base, cfi1->cfa.offset,
2198 			  cfi2->cfa.base, cfi2->cfa.offset);
2199 
2200 	} else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
2201 		for (i = 0; i < CFI_NUM_REGS; i++) {
2202 			if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
2203 				    sizeof(struct cfi_reg)))
2204 				continue;
2205 
2206 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
2207 				  insn->sec, insn->offset,
2208 				  i, cfi1->regs[i].base, cfi1->regs[i].offset,
2209 				  i, cfi2->regs[i].base, cfi2->regs[i].offset);
2210 			break;
2211 		}
2212 
2213 	} else if (cfi1->type != cfi2->type) {
2214 
2215 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
2216 			  insn->sec, insn->offset, cfi1->type, cfi2->type);
2217 
2218 	} else if (cfi1->drap != cfi2->drap ||
2219 		   (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
2220 		   (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
2221 
2222 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
2223 			  insn->sec, insn->offset,
2224 			  cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
2225 			  cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
2226 
2227 	} else
2228 		return true;
2229 
2230 	return false;
2231 }
2232 
2233 static inline bool func_uaccess_safe(struct symbol *func)
2234 {
2235 	if (func)
2236 		return func->uaccess_safe;
2237 
2238 	return false;
2239 }
2240 
2241 static inline const char *call_dest_name(struct instruction *insn)
2242 {
2243 	if (insn->call_dest)
2244 		return insn->call_dest->name;
2245 
2246 	return "{dynamic}";
2247 }
2248 
2249 static inline bool noinstr_call_dest(struct symbol *func)
2250 {
2251 	/*
2252 	 * We can't deal with indirect function calls at present;
2253 	 * assume they're instrumented.
2254 	 */
2255 	if (!func)
2256 		return false;
2257 
2258 	/*
2259 	 * If the symbol is from a noinstr section; we good.
2260 	 */
2261 	if (func->sec->noinstr)
2262 		return true;
2263 
2264 	/*
2265 	 * The __ubsan_handle_*() calls are like WARN(), they only happen when
2266 	 * something 'BAD' happened. At the risk of taking the machine down,
2267 	 * let them proceed to get the message out.
2268 	 */
2269 	if (!strncmp(func->name, "__ubsan_handle_", 15))
2270 		return true;
2271 
2272 	return false;
2273 }
2274 
2275 static int validate_call(struct instruction *insn, struct insn_state *state)
2276 {
2277 	if (state->noinstr && state->instr <= 0 &&
2278 	    !noinstr_call_dest(insn->call_dest)) {
2279 		WARN_FUNC("call to %s() leaves .noinstr.text section",
2280 				insn->sec, insn->offset, call_dest_name(insn));
2281 		return 1;
2282 	}
2283 
2284 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
2285 		WARN_FUNC("call to %s() with UACCESS enabled",
2286 				insn->sec, insn->offset, call_dest_name(insn));
2287 		return 1;
2288 	}
2289 
2290 	if (state->df) {
2291 		WARN_FUNC("call to %s() with DF set",
2292 				insn->sec, insn->offset, call_dest_name(insn));
2293 		return 1;
2294 	}
2295 
2296 	return 0;
2297 }
2298 
2299 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
2300 {
2301 	if (has_modified_stack_frame(insn, state)) {
2302 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
2303 				insn->sec, insn->offset);
2304 		return 1;
2305 	}
2306 
2307 	return validate_call(insn, state);
2308 }
2309 
2310 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
2311 {
2312 	if (state->noinstr && state->instr > 0) {
2313 		WARN_FUNC("return with instrumentation enabled",
2314 			  insn->sec, insn->offset);
2315 		return 1;
2316 	}
2317 
2318 	if (state->uaccess && !func_uaccess_safe(func)) {
2319 		WARN_FUNC("return with UACCESS enabled",
2320 			  insn->sec, insn->offset);
2321 		return 1;
2322 	}
2323 
2324 	if (!state->uaccess && func_uaccess_safe(func)) {
2325 		WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
2326 			  insn->sec, insn->offset);
2327 		return 1;
2328 	}
2329 
2330 	if (state->df) {
2331 		WARN_FUNC("return with DF set",
2332 			  insn->sec, insn->offset);
2333 		return 1;
2334 	}
2335 
2336 	if (func && has_modified_stack_frame(insn, state)) {
2337 		WARN_FUNC("return with modified stack frame",
2338 			  insn->sec, insn->offset);
2339 		return 1;
2340 	}
2341 
2342 	if (state->cfi.bp_scratch) {
2343 		WARN_FUNC("BP used as a scratch register",
2344 			  insn->sec, insn->offset);
2345 		return 1;
2346 	}
2347 
2348 	return 0;
2349 }
2350 
2351 /*
2352  * Alternatives should not contain any ORC entries, this in turn means they
2353  * should not contain any CFI ops, which implies all instructions should have
2354  * the same same CFI state.
2355  *
2356  * It is possible to constuct alternatives that have unreachable holes that go
2357  * unreported (because they're NOPs), such holes would result in CFI_UNDEFINED
2358  * states which then results in ORC entries, which we just said we didn't want.
2359  *
2360  * Avoid them by copying the CFI entry of the first instruction into the whole
2361  * alternative.
2362  */
2363 static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn)
2364 {
2365 	struct instruction *first_insn = insn;
2366 	int alt_group = insn->alt_group;
2367 
2368 	sec_for_each_insn_continue(file, insn) {
2369 		if (insn->alt_group != alt_group)
2370 			break;
2371 		insn->cfi = first_insn->cfi;
2372 	}
2373 }
2374 
2375 /*
2376  * Follow the branch starting at the given instruction, and recursively follow
2377  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2378  * each instruction and validate all the rules described in
2379  * tools/objtool/Documentation/stack-validation.txt.
2380  */
2381 static int validate_branch(struct objtool_file *file, struct symbol *func,
2382 			   struct instruction *insn, struct insn_state state)
2383 {
2384 	struct alternative *alt;
2385 	struct instruction *next_insn;
2386 	struct section *sec;
2387 	u8 visited;
2388 	int ret;
2389 
2390 	sec = insn->sec;
2391 
2392 	while (1) {
2393 		next_insn = next_insn_same_sec(file, insn);
2394 
2395 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2396 			WARN("%s() falls through to next function %s()",
2397 			     func->name, insn->func->name);
2398 			return 1;
2399 		}
2400 
2401 		if (func && insn->ignore) {
2402 			WARN_FUNC("BUG: why am I validating an ignored function?",
2403 				  sec, insn->offset);
2404 			return 1;
2405 		}
2406 
2407 		visited = 1 << state.uaccess;
2408 		if (insn->visited) {
2409 			if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
2410 				return 1;
2411 
2412 			if (insn->visited & visited)
2413 				return 0;
2414 		}
2415 
2416 		if (state.noinstr)
2417 			state.instr += insn->instr;
2418 
2419 		if (insn->hint)
2420 			state.cfi = insn->cfi;
2421 		else
2422 			insn->cfi = state.cfi;
2423 
2424 		insn->visited |= visited;
2425 
2426 		if (!insn->ignore_alts && !list_empty(&insn->alts)) {
2427 			bool skip_orig = false;
2428 
2429 			list_for_each_entry(alt, &insn->alts, list) {
2430 				if (alt->skip_orig)
2431 					skip_orig = true;
2432 
2433 				ret = validate_branch(file, func, alt->insn, state);
2434 				if (ret) {
2435 					if (backtrace)
2436 						BT_FUNC("(alt)", insn);
2437 					return ret;
2438 				}
2439 			}
2440 
2441 			if (insn->alt_group)
2442 				fill_alternative_cfi(file, insn);
2443 
2444 			if (skip_orig)
2445 				return 0;
2446 		}
2447 
2448 		if (handle_insn_ops(insn, &state))
2449 			return 1;
2450 
2451 		switch (insn->type) {
2452 
2453 		case INSN_RETURN:
2454 			return validate_return(func, insn, &state);
2455 
2456 		case INSN_CALL:
2457 		case INSN_CALL_DYNAMIC:
2458 			ret = validate_call(insn, &state);
2459 			if (ret)
2460 				return ret;
2461 
2462 			if (!no_fp && func && !is_fentry_call(insn) &&
2463 			    !has_valid_stack_frame(&state)) {
2464 				WARN_FUNC("call without frame pointer save/setup",
2465 					  sec, insn->offset);
2466 				return 1;
2467 			}
2468 
2469 			if (dead_end_function(file, insn->call_dest))
2470 				return 0;
2471 
2472 			if (insn->type == INSN_CALL && insn->call_dest->static_call_tramp) {
2473 				list_add_tail(&insn->static_call_node,
2474 					      &file->static_call_list);
2475 			}
2476 
2477 			break;
2478 
2479 		case INSN_JUMP_CONDITIONAL:
2480 		case INSN_JUMP_UNCONDITIONAL:
2481 			if (func && is_sibling_call(insn)) {
2482 				ret = validate_sibling_call(insn, &state);
2483 				if (ret)
2484 					return ret;
2485 
2486 			} else if (insn->jump_dest) {
2487 				ret = validate_branch(file, func,
2488 						      insn->jump_dest, state);
2489 				if (ret) {
2490 					if (backtrace)
2491 						BT_FUNC("(branch)", insn);
2492 					return ret;
2493 				}
2494 			}
2495 
2496 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2497 				return 0;
2498 
2499 			break;
2500 
2501 		case INSN_JUMP_DYNAMIC:
2502 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2503 			if (func && is_sibling_call(insn)) {
2504 				ret = validate_sibling_call(insn, &state);
2505 				if (ret)
2506 					return ret;
2507 			}
2508 
2509 			if (insn->type == INSN_JUMP_DYNAMIC)
2510 				return 0;
2511 
2512 			break;
2513 
2514 		case INSN_CONTEXT_SWITCH:
2515 			if (func && (!next_insn || !next_insn->hint)) {
2516 				WARN_FUNC("unsupported instruction in callable function",
2517 					  sec, insn->offset);
2518 				return 1;
2519 			}
2520 			return 0;
2521 
2522 		case INSN_STAC:
2523 			if (state.uaccess) {
2524 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2525 				return 1;
2526 			}
2527 
2528 			state.uaccess = true;
2529 			break;
2530 
2531 		case INSN_CLAC:
2532 			if (!state.uaccess && func) {
2533 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2534 				return 1;
2535 			}
2536 
2537 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2538 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2539 				return 1;
2540 			}
2541 
2542 			state.uaccess = false;
2543 			break;
2544 
2545 		case INSN_STD:
2546 			if (state.df)
2547 				WARN_FUNC("recursive STD", sec, insn->offset);
2548 
2549 			state.df = true;
2550 			break;
2551 
2552 		case INSN_CLD:
2553 			if (!state.df && func)
2554 				WARN_FUNC("redundant CLD", sec, insn->offset);
2555 
2556 			state.df = false;
2557 			break;
2558 
2559 		default:
2560 			break;
2561 		}
2562 
2563 		if (insn->dead_end)
2564 			return 0;
2565 
2566 		if (!next_insn) {
2567 			if (state.cfi.cfa.base == CFI_UNDEFINED)
2568 				return 0;
2569 			WARN("%s: unexpected end of section", sec->name);
2570 			return 1;
2571 		}
2572 
2573 		insn = next_insn;
2574 	}
2575 
2576 	return 0;
2577 }
2578 
2579 static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
2580 {
2581 	struct instruction *insn;
2582 	struct insn_state state;
2583 	int ret, warnings = 0;
2584 
2585 	if (!file->hints)
2586 		return 0;
2587 
2588 	init_insn_state(&state, sec);
2589 
2590 	if (sec) {
2591 		insn = find_insn(file, sec, 0);
2592 		if (!insn)
2593 			return 0;
2594 	} else {
2595 		insn = list_first_entry(&file->insn_list, typeof(*insn), list);
2596 	}
2597 
2598 	while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
2599 		if (insn->hint && !insn->visited) {
2600 			ret = validate_branch(file, insn->func, insn, state);
2601 			if (ret && backtrace)
2602 				BT_FUNC("<=== (hint)", insn);
2603 			warnings += ret;
2604 		}
2605 
2606 		insn = list_next_entry(insn, list);
2607 	}
2608 
2609 	return warnings;
2610 }
2611 
2612 static int validate_retpoline(struct objtool_file *file)
2613 {
2614 	struct instruction *insn;
2615 	int warnings = 0;
2616 
2617 	for_each_insn(file, insn) {
2618 		if (insn->type != INSN_JUMP_DYNAMIC &&
2619 		    insn->type != INSN_CALL_DYNAMIC)
2620 			continue;
2621 
2622 		if (insn->retpoline_safe)
2623 			continue;
2624 
2625 		/*
2626 		 * .init.text code is ran before userspace and thus doesn't
2627 		 * strictly need retpolines, except for modules which are
2628 		 * loaded late, they very much do need retpoline in their
2629 		 * .init.text
2630 		 */
2631 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2632 			continue;
2633 
2634 		WARN_FUNC("indirect %s found in RETPOLINE build",
2635 			  insn->sec, insn->offset,
2636 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2637 
2638 		warnings++;
2639 	}
2640 
2641 	return warnings;
2642 }
2643 
2644 static bool is_kasan_insn(struct instruction *insn)
2645 {
2646 	return (insn->type == INSN_CALL &&
2647 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2648 }
2649 
2650 static bool is_ubsan_insn(struct instruction *insn)
2651 {
2652 	return (insn->type == INSN_CALL &&
2653 		!strcmp(insn->call_dest->name,
2654 			"__ubsan_handle_builtin_unreachable"));
2655 }
2656 
2657 static bool ignore_unreachable_insn(struct instruction *insn)
2658 {
2659 	int i;
2660 
2661 	if (insn->ignore || insn->type == INSN_NOP)
2662 		return true;
2663 
2664 	/*
2665 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2666 	 * function has an exception table entry.
2667 	 *
2668 	 * Also ignore alternative replacement instructions.  This can happen
2669 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2670 	 */
2671 	if (!strcmp(insn->sec->name, ".fixup") ||
2672 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2673 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2674 		return true;
2675 
2676 	if (!insn->func)
2677 		return false;
2678 
2679 	/*
2680 	 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2681 	 * __builtin_unreachable().  The BUG() macro has an unreachable() after
2682 	 * the UD2, which causes GCC's undefined trap logic to emit another UD2
2683 	 * (or occasionally a JMP to UD2).
2684 	 */
2685 	if (list_prev_entry(insn, list)->dead_end &&
2686 	    (insn->type == INSN_BUG ||
2687 	     (insn->type == INSN_JUMP_UNCONDITIONAL &&
2688 	      insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2689 		return true;
2690 
2691 	/*
2692 	 * Check if this (or a subsequent) instruction is related to
2693 	 * CONFIG_UBSAN or CONFIG_KASAN.
2694 	 *
2695 	 * End the search at 5 instructions to avoid going into the weeds.
2696 	 */
2697 	for (i = 0; i < 5; i++) {
2698 
2699 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2700 			return true;
2701 
2702 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2703 			if (insn->jump_dest &&
2704 			    insn->jump_dest->func == insn->func) {
2705 				insn = insn->jump_dest;
2706 				continue;
2707 			}
2708 
2709 			break;
2710 		}
2711 
2712 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2713 			break;
2714 
2715 		insn = list_next_entry(insn, list);
2716 	}
2717 
2718 	return false;
2719 }
2720 
2721 static int validate_symbol(struct objtool_file *file, struct section *sec,
2722 			   struct symbol *sym, struct insn_state *state)
2723 {
2724 	struct instruction *insn;
2725 	int ret;
2726 
2727 	if (!sym->len) {
2728 		WARN("%s() is missing an ELF size annotation", sym->name);
2729 		return 1;
2730 	}
2731 
2732 	if (sym->pfunc != sym || sym->alias != sym)
2733 		return 0;
2734 
2735 	insn = find_insn(file, sec, sym->offset);
2736 	if (!insn || insn->ignore || insn->visited)
2737 		return 0;
2738 
2739 	state->uaccess = sym->uaccess_safe;
2740 
2741 	ret = validate_branch(file, insn->func, insn, *state);
2742 	if (ret && backtrace)
2743 		BT_FUNC("<=== (sym)", insn);
2744 	return ret;
2745 }
2746 
2747 static int validate_section(struct objtool_file *file, struct section *sec)
2748 {
2749 	struct insn_state state;
2750 	struct symbol *func;
2751 	int warnings = 0;
2752 
2753 	list_for_each_entry(func, &sec->symbol_list, list) {
2754 		if (func->type != STT_FUNC)
2755 			continue;
2756 
2757 		init_insn_state(&state, sec);
2758 		state.cfi.cfa = initial_func_cfi.cfa;
2759 		memcpy(&state.cfi.regs, &initial_func_cfi.regs,
2760 		       CFI_NUM_REGS * sizeof(struct cfi_reg));
2761 		state.cfi.stack_size = initial_func_cfi.cfa.offset;
2762 
2763 		warnings += validate_symbol(file, sec, func, &state);
2764 	}
2765 
2766 	return warnings;
2767 }
2768 
2769 static int validate_vmlinux_functions(struct objtool_file *file)
2770 {
2771 	struct section *sec;
2772 	int warnings = 0;
2773 
2774 	sec = find_section_by_name(file->elf, ".noinstr.text");
2775 	if (sec) {
2776 		warnings += validate_section(file, sec);
2777 		warnings += validate_unwind_hints(file, sec);
2778 	}
2779 
2780 	sec = find_section_by_name(file->elf, ".entry.text");
2781 	if (sec) {
2782 		warnings += validate_section(file, sec);
2783 		warnings += validate_unwind_hints(file, sec);
2784 	}
2785 
2786 	return warnings;
2787 }
2788 
2789 static int validate_functions(struct objtool_file *file)
2790 {
2791 	struct section *sec;
2792 	int warnings = 0;
2793 
2794 	for_each_sec(file, sec) {
2795 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
2796 			continue;
2797 
2798 		warnings += validate_section(file, sec);
2799 	}
2800 
2801 	return warnings;
2802 }
2803 
2804 static int validate_reachable_instructions(struct objtool_file *file)
2805 {
2806 	struct instruction *insn;
2807 
2808 	if (file->ignore_unreachables)
2809 		return 0;
2810 
2811 	for_each_insn(file, insn) {
2812 		if (insn->visited || ignore_unreachable_insn(insn))
2813 			continue;
2814 
2815 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2816 		return 1;
2817 	}
2818 
2819 	return 0;
2820 }
2821 
2822 int check(struct objtool_file *file)
2823 {
2824 	int ret, warnings = 0;
2825 
2826 	arch_initial_func_cfi_state(&initial_func_cfi);
2827 
2828 	ret = decode_sections(file);
2829 	if (ret < 0)
2830 		goto out;
2831 	warnings += ret;
2832 
2833 	if (list_empty(&file->insn_list))
2834 		goto out;
2835 
2836 	if (vmlinux && !validate_dup) {
2837 		ret = validate_vmlinux_functions(file);
2838 		if (ret < 0)
2839 			goto out;
2840 
2841 		warnings += ret;
2842 		goto out;
2843 	}
2844 
2845 	if (retpoline) {
2846 		ret = validate_retpoline(file);
2847 		if (ret < 0)
2848 			return ret;
2849 		warnings += ret;
2850 	}
2851 
2852 	ret = validate_functions(file);
2853 	if (ret < 0)
2854 		goto out;
2855 	warnings += ret;
2856 
2857 	ret = validate_unwind_hints(file, NULL);
2858 	if (ret < 0)
2859 		goto out;
2860 	warnings += ret;
2861 
2862 	if (!warnings) {
2863 		ret = validate_reachable_instructions(file);
2864 		if (ret < 0)
2865 			goto out;
2866 		warnings += ret;
2867 	}
2868 
2869 	ret = create_static_call_sections(file);
2870 	if (ret < 0)
2871 		goto out;
2872 	warnings += ret;
2873 
2874 out:
2875 	if (ret < 0) {
2876 		/*
2877 		 *  Fatal error.  The binary is corrupt or otherwise broken in
2878 		 *  some way, or objtool itself is broken.  Fail the kernel
2879 		 *  build.
2880 		 */
2881 		return ret;
2882 	}
2883 
2884 	return 0;
2885 }
2886