1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/static_call.h> 3 #include <linux/memory.h> 4 #include <linux/bug.h> 5 #include <asm/text-patching.h> 6 7 enum insn_type { 8 CALL = 0, /* site call */ 9 NOP = 1, /* site cond-call */ 10 JMP = 2, /* tramp / site tail-call */ 11 RET = 3, /* tramp / site cond-tail-call */ 12 }; 13 14 /* 15 * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such 16 * that there is no false-positive trampoline identification while also being a 17 * speculation stop. 18 */ 19 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc }; 20 21 /* 22 * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax 23 */ 24 static const u8 xor5rax[] = { 0x2e, 0x2e, 0x2e, 0x31, 0xc0 }; 25 26 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc }; 27 28 static void __ref __static_call_transform(void *insn, enum insn_type type, 29 void *func, bool modinit) 30 { 31 const void *emulate = NULL; 32 int size = CALL_INSN_SIZE; 33 const void *code; 34 35 switch (type) { 36 case CALL: 37 code = text_gen_insn(CALL_INSN_OPCODE, insn, func); 38 if (func == &__static_call_return0) { 39 emulate = code; 40 code = &xor5rax; 41 } 42 43 break; 44 45 case NOP: 46 code = x86_nops[5]; 47 break; 48 49 case JMP: 50 code = text_gen_insn(JMP32_INSN_OPCODE, insn, func); 51 break; 52 53 case RET: 54 if (cpu_feature_enabled(X86_FEATURE_RETHUNK)) 55 code = text_gen_insn(JMP32_INSN_OPCODE, insn, &__x86_return_thunk); 56 else 57 code = &retinsn; 58 break; 59 } 60 61 if (memcmp(insn, code, size) == 0) 62 return; 63 64 if (system_state == SYSTEM_BOOTING || modinit) 65 return text_poke_early(insn, code, size); 66 67 text_poke_bp(insn, code, size, emulate); 68 } 69 70 static void __static_call_validate(void *insn, bool tail, bool tramp) 71 { 72 u8 opcode = *(u8 *)insn; 73 74 if (tramp && memcmp(insn+5, tramp_ud, 3)) { 75 pr_err("trampoline signature fail"); 76 BUG(); 77 } 78 79 if (tail) { 80 if (opcode == JMP32_INSN_OPCODE || 81 opcode == RET_INSN_OPCODE) 82 return; 83 } else { 84 if (opcode == CALL_INSN_OPCODE || 85 !memcmp(insn, x86_nops[5], 5) || 86 !memcmp(insn, xor5rax, 5)) 87 return; 88 } 89 90 /* 91 * If we ever trigger this, our text is corrupt, we'll probably not live long. 92 */ 93 pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn); 94 BUG(); 95 } 96 97 static inline enum insn_type __sc_insn(bool null, bool tail) 98 { 99 /* 100 * Encode the following table without branches: 101 * 102 * tail null insn 103 * -----+-------+------ 104 * 0 | 0 | CALL 105 * 0 | 1 | NOP 106 * 1 | 0 | JMP 107 * 1 | 1 | RET 108 */ 109 return 2*tail + null; 110 } 111 112 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail) 113 { 114 mutex_lock(&text_mutex); 115 116 if (tramp) { 117 __static_call_validate(tramp, true, true); 118 __static_call_transform(tramp, __sc_insn(!func, true), func, false); 119 } 120 121 if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) { 122 __static_call_validate(site, tail, false); 123 __static_call_transform(site, __sc_insn(!func, tail), func, false); 124 } 125 126 mutex_unlock(&text_mutex); 127 } 128 EXPORT_SYMBOL_GPL(arch_static_call_transform); 129 130 #ifdef CONFIG_RETHUNK 131 /* 132 * This is called by apply_returns() to fix up static call trampolines, 133 * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as 134 * having a return trampoline. 135 * 136 * The problem is that static_call() is available before determining 137 * X86_FEATURE_RETHUNK and, by implication, running alternatives. 138 * 139 * This means that __static_call_transform() above can have overwritten the 140 * return trampoline and we now need to fix things up to be consistent. 141 */ 142 bool __static_call_fixup(void *tramp, u8 op, void *dest) 143 { 144 if (memcmp(tramp+5, tramp_ud, 3)) { 145 /* Not a trampoline site, not our problem. */ 146 return false; 147 } 148 149 mutex_lock(&text_mutex); 150 if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk) 151 __static_call_transform(tramp, RET, NULL, true); 152 mutex_unlock(&text_mutex); 153 154 return true; 155 } 156 #endif 157