1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc 4 */ 5 6#include <linux/linkage.h> 7#include <asm/ptrace.h> 8#include <asm/ftrace.h> 9#include <asm/export.h> 10#include <asm/nospec-branch.h> 11#include <asm/unwind_hints.h> 12 13 .code64 14 .section .entry.text, "ax" 15 16#ifdef CC_USING_FENTRY 17# define function_hook __fentry__ 18EXPORT_SYMBOL(__fentry__) 19#else 20# define function_hook mcount 21EXPORT_SYMBOL(mcount) 22#endif 23 24#ifdef CONFIG_FRAME_POINTER 25# ifdef CC_USING_FENTRY 26/* Save parent and function stack frames (rip and rbp) */ 27# define MCOUNT_FRAME_SIZE (8+16*2) 28# else 29/* Save just function stack frame (rip and rbp) */ 30# define MCOUNT_FRAME_SIZE (8+16) 31# endif 32#else 33/* No need to save a stack frame */ 34# define MCOUNT_FRAME_SIZE 0 35#endif /* CONFIG_FRAME_POINTER */ 36 37/* Size of stack used to save mcount regs in save_mcount_regs */ 38#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE) 39 40/* 41 * gcc -pg option adds a call to 'mcount' in most functions. 42 * When -mfentry is used, the call is to 'fentry' and not 'mcount' 43 * and is done before the function's stack frame is set up. 44 * They both require a set of regs to be saved before calling 45 * any C code and restored before returning back to the function. 46 * 47 * On boot up, all these calls are converted into nops. When tracing 48 * is enabled, the call can jump to either ftrace_caller or 49 * ftrace_regs_caller. Callbacks (tracing functions) that require 50 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to 51 * it. For this reason, the size of the pt_regs structure will be 52 * allocated on the stack and the required mcount registers will 53 * be saved in the locations that pt_regs has them in. 54 */ 55 56/* 57 * @added: the amount of stack added before calling this 58 * 59 * After this is called, the following registers contain: 60 * 61 * %rdi - holds the address that called the trampoline 62 * %rsi - holds the parent function (traced function's return address) 63 * %rdx - holds the original %rbp 64 */ 65.macro save_mcount_regs added=0 66 67#ifdef CONFIG_FRAME_POINTER 68 /* Save the original rbp */ 69 pushq %rbp 70 71 /* 72 * Stack traces will stop at the ftrace trampoline if the frame pointer 73 * is not set up properly. If fentry is used, we need to save a frame 74 * pointer for the parent as well as the function traced, because the 75 * fentry is called before the stack frame is set up, where as mcount 76 * is called afterward. 77 */ 78#ifdef CC_USING_FENTRY 79 /* Save the parent pointer (skip orig rbp and our return address) */ 80 pushq \added+8*2(%rsp) 81 pushq %rbp 82 movq %rsp, %rbp 83 /* Save the return address (now skip orig rbp, rbp and parent) */ 84 pushq \added+8*3(%rsp) 85#else 86 /* Can't assume that rip is before this (unless added was zero) */ 87 pushq \added+8(%rsp) 88#endif 89 pushq %rbp 90 movq %rsp, %rbp 91#endif /* CONFIG_FRAME_POINTER */ 92 93 /* 94 * We add enough stack to save all regs. 95 */ 96 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp 97 movq %rax, RAX(%rsp) 98 movq %rcx, RCX(%rsp) 99 movq %rdx, RDX(%rsp) 100 movq %rsi, RSI(%rsp) 101 movq %rdi, RDI(%rsp) 102 movq %r8, R8(%rsp) 103 movq %r9, R9(%rsp) 104 /* 105 * Save the original RBP. Even though the mcount ABI does not 106 * require this, it helps out callers. 107 */ 108#ifdef CONFIG_FRAME_POINTER 109 movq MCOUNT_REG_SIZE-8(%rsp), %rdx 110#else 111 movq %rbp, %rdx 112#endif 113 movq %rdx, RBP(%rsp) 114 115 /* Copy the parent address into %rsi (second parameter) */ 116#ifdef CC_USING_FENTRY 117 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi 118#else 119 /* %rdx contains original %rbp */ 120 movq 8(%rdx), %rsi 121#endif 122 123 /* Move RIP to its proper location */ 124 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi 125 movq %rdi, RIP(%rsp) 126 127 /* 128 * Now %rdi (the first parameter) has the return address of 129 * where ftrace_call returns. But the callbacks expect the 130 * address of the call itself. 131 */ 132 subq $MCOUNT_INSN_SIZE, %rdi 133 .endm 134 135.macro restore_mcount_regs 136 movq R9(%rsp), %r9 137 movq R8(%rsp), %r8 138 movq RDI(%rsp), %rdi 139 movq RSI(%rsp), %rsi 140 movq RDX(%rsp), %rdx 141 movq RCX(%rsp), %rcx 142 movq RAX(%rsp), %rax 143 144 /* ftrace_regs_caller can modify %rbp */ 145 movq RBP(%rsp), %rbp 146 147 addq $MCOUNT_REG_SIZE, %rsp 148 149 .endm 150 151#ifdef CONFIG_DYNAMIC_FTRACE 152 153ENTRY(function_hook) 154 retq 155ENDPROC(function_hook) 156 157ENTRY(ftrace_caller) 158 /* save_mcount_regs fills in first two parameters */ 159 save_mcount_regs 160 161GLOBAL(ftrace_caller_op_ptr) 162 /* Load the ftrace_ops into the 3rd parameter */ 163 movq function_trace_op(%rip), %rdx 164 165 /* regs go into 4th parameter (but make it NULL) */ 166 movq $0, %rcx 167 168GLOBAL(ftrace_call) 169 call ftrace_stub 170 171 restore_mcount_regs 172 173 /* 174 * The code up to this label is copied into trampolines so 175 * think twice before adding any new code or changing the 176 * layout here. 177 */ 178GLOBAL(ftrace_epilogue) 179 180#ifdef CONFIG_FUNCTION_GRAPH_TRACER 181GLOBAL(ftrace_graph_call) 182 jmp ftrace_stub 183#endif 184 185/* 186 * This is weak to keep gas from relaxing the jumps. 187 * It is also used to copy the retq for trampolines. 188 */ 189WEAK(ftrace_stub) 190 retq 191ENDPROC(ftrace_caller) 192 193ENTRY(ftrace_regs_caller) 194 /* Save the current flags before any operations that can change them */ 195 pushfq 196 197 /* added 8 bytes to save flags */ 198 save_mcount_regs 8 199 /* save_mcount_regs fills in first two parameters */ 200 201GLOBAL(ftrace_regs_caller_op_ptr) 202 /* Load the ftrace_ops into the 3rd parameter */ 203 movq function_trace_op(%rip), %rdx 204 205 /* Save the rest of pt_regs */ 206 movq %r15, R15(%rsp) 207 movq %r14, R14(%rsp) 208 movq %r13, R13(%rsp) 209 movq %r12, R12(%rsp) 210 movq %r11, R11(%rsp) 211 movq %r10, R10(%rsp) 212 movq %rbx, RBX(%rsp) 213 /* Copy saved flags */ 214 movq MCOUNT_REG_SIZE(%rsp), %rcx 215 movq %rcx, EFLAGS(%rsp) 216 /* Kernel segments */ 217 movq $__KERNEL_DS, %rcx 218 movq %rcx, SS(%rsp) 219 movq $__KERNEL_CS, %rcx 220 movq %rcx, CS(%rsp) 221 /* Stack - skipping return address and flags */ 222 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx 223 movq %rcx, RSP(%rsp) 224 225 /* regs go into 4th parameter */ 226 leaq (%rsp), %rcx 227 228GLOBAL(ftrace_regs_call) 229 call ftrace_stub 230 231 /* Copy flags back to SS, to restore them */ 232 movq EFLAGS(%rsp), %rax 233 movq %rax, MCOUNT_REG_SIZE(%rsp) 234 235 /* Handlers can change the RIP */ 236 movq RIP(%rsp), %rax 237 movq %rax, MCOUNT_REG_SIZE+8(%rsp) 238 239 /* restore the rest of pt_regs */ 240 movq R15(%rsp), %r15 241 movq R14(%rsp), %r14 242 movq R13(%rsp), %r13 243 movq R12(%rsp), %r12 244 movq R10(%rsp), %r10 245 movq RBX(%rsp), %rbx 246 247 restore_mcount_regs 248 249 /* Restore flags */ 250 popfq 251 252 /* 253 * As this jmp to ftrace_epilogue can be a short jump 254 * it must not be copied into the trampoline. 255 * The trampoline will add the code to jump 256 * to the return. 257 */ 258GLOBAL(ftrace_regs_caller_end) 259 260 jmp ftrace_epilogue 261 262ENDPROC(ftrace_regs_caller) 263 264 265#else /* ! CONFIG_DYNAMIC_FTRACE */ 266 267ENTRY(function_hook) 268 cmpq $ftrace_stub, ftrace_trace_function 269 jnz trace 270 271fgraph_trace: 272#ifdef CONFIG_FUNCTION_GRAPH_TRACER 273 cmpq $ftrace_stub, ftrace_graph_return 274 jnz ftrace_graph_caller 275 276 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry 277 jnz ftrace_graph_caller 278#endif 279 280GLOBAL(ftrace_stub) 281 retq 282 283trace: 284 /* save_mcount_regs fills in first two parameters */ 285 save_mcount_regs 286 287 /* 288 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not 289 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the 290 * ip and parent ip are used and the list function is called when 291 * function tracing is enabled. 292 */ 293 movq ftrace_trace_function, %r8 294 CALL_NOSPEC %r8 295 restore_mcount_regs 296 297 jmp fgraph_trace 298ENDPROC(function_hook) 299#endif /* CONFIG_DYNAMIC_FTRACE */ 300 301#ifdef CONFIG_FUNCTION_GRAPH_TRACER 302ENTRY(ftrace_graph_caller) 303 /* Saves rbp into %rdx and fills first parameter */ 304 save_mcount_regs 305 306#ifdef CC_USING_FENTRY 307 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi 308 movq $0, %rdx /* No framepointers needed */ 309#else 310 /* Save address of the return address of traced function */ 311 leaq 8(%rdx), %rsi 312 /* ftrace does sanity checks against frame pointers */ 313 movq (%rdx), %rdx 314#endif 315 call prepare_ftrace_return 316 317 restore_mcount_regs 318 319 retq 320ENDPROC(ftrace_graph_caller) 321 322ENTRY(return_to_handler) 323 UNWIND_HINT_EMPTY 324 subq $24, %rsp 325 326 /* Save the return values */ 327 movq %rax, (%rsp) 328 movq %rdx, 8(%rsp) 329 movq %rbp, %rdi 330 331 call ftrace_return_to_handler 332 333 movq %rax, %rdi 334 movq 8(%rsp), %rdx 335 movq (%rsp), %rax 336 addq $24, %rsp 337 JMP_NOSPEC %rdi 338END(return_to_handler) 339#endif 340