xref: /openbmc/linux/arch/x86/kernel/static_call.c (revision 1ddbddd7)
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/static_call.h>
3 #include <linux/memory.h>
4 #include <linux/bug.h>
5 #include <asm/text-patching.h>
6 
7 enum insn_type {
8 	CALL = 0, /* site call */
9 	NOP = 1,  /* site cond-call */
10 	JMP = 2,  /* tramp / site tail-call */
11 	RET = 3,  /* tramp / site cond-tail-call */
12 };
13 
14 /*
15  * data16 data16 xorq %rax, %rax - a single 5 byte instruction that clears %rax
16  * The REX.W cancels the effect of any data16.
17  */
18 static const u8 xor5rax[] = { 0x66, 0x66, 0x48, 0x31, 0xc0 };
19 
20 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
21 
22 static void __ref __static_call_transform(void *insn, enum insn_type type, void *func)
23 {
24 	const void *emulate = NULL;
25 	int size = CALL_INSN_SIZE;
26 	const void *code;
27 
28 	switch (type) {
29 	case CALL:
30 		code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
31 		if (func == &__static_call_return0) {
32 			emulate = code;
33 			code = &xor5rax;
34 		}
35 
36 		break;
37 
38 	case NOP:
39 		code = x86_nops[5];
40 		break;
41 
42 	case JMP:
43 		code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
44 		break;
45 
46 	case RET:
47 		code = &retinsn;
48 		break;
49 	}
50 
51 	if (memcmp(insn, code, size) == 0)
52 		return;
53 
54 	if (unlikely(system_state == SYSTEM_BOOTING))
55 		return text_poke_early(insn, code, size);
56 
57 	text_poke_bp(insn, code, size, emulate);
58 }
59 
60 static void __static_call_validate(void *insn, bool tail, bool tramp)
61 {
62 	u8 opcode = *(u8 *)insn;
63 
64 	if (tramp && memcmp(insn+5, "SCT", 3)) {
65 		pr_err("trampoline signature fail");
66 		BUG();
67 	}
68 
69 	if (tail) {
70 		if (opcode == JMP32_INSN_OPCODE ||
71 		    opcode == RET_INSN_OPCODE)
72 			return;
73 	} else {
74 		if (opcode == CALL_INSN_OPCODE ||
75 		    !memcmp(insn, x86_nops[5], 5) ||
76 		    !memcmp(insn, xor5rax, 5))
77 			return;
78 	}
79 
80 	/*
81 	 * If we ever trigger this, our text is corrupt, we'll probably not live long.
82 	 */
83 	pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
84 	BUG();
85 }
86 
87 static inline enum insn_type __sc_insn(bool null, bool tail)
88 {
89 	/*
90 	 * Encode the following table without branches:
91 	 *
92 	 *	tail	null	insn
93 	 *	-----+-------+------
94 	 *	  0  |   0   |  CALL
95 	 *	  0  |   1   |  NOP
96 	 *	  1  |   0   |  JMP
97 	 *	  1  |   1   |  RET
98 	 */
99 	return 2*tail + null;
100 }
101 
102 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
103 {
104 	mutex_lock(&text_mutex);
105 
106 	if (tramp) {
107 		__static_call_validate(tramp, true, true);
108 		__static_call_transform(tramp, __sc_insn(!func, true), func);
109 	}
110 
111 	if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
112 		__static_call_validate(site, tail, false);
113 		__static_call_transform(site, __sc_insn(!func, tail), func);
114 	}
115 
116 	mutex_unlock(&text_mutex);
117 }
118 EXPORT_SYMBOL_GPL(arch_static_call_transform);
119