xref: /openbmc/linux/arch/arm/kernel/entry-ftrace.S (revision f80be457)
1/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <asm/assembler.h>
4#include <asm/ftrace.h>
5#include <asm/unwind.h>
6
7#include "entry-header.S"
8
9/*
10 * When compiling with -pg, gcc inserts a call to the mcount routine at the
11 * start of every function.  In mcount, apart from the function's address (in
12 * lr), we need to get hold of the function's caller's address.
13 *
14 * Newer GCCs (4.4+) solve this problem by using a version of mcount with call
15 * sites like:
16 *
17 *	push	{lr}
18 *	bl	__gnu_mcount_nc
19 *
20 * With these compilers, frame pointers are not necessary.
21 *
22 * mcount can be thought of as a function called in the middle of a subroutine
23 * call.  As such, it needs to be transparent for both the caller and the
24 * callee: the original lr needs to be restored when leaving mcount, and no
25 * registers should be clobbered.
26 *
27 * When using dynamic ftrace, we patch out the mcount call by a "add sp, #4"
28 * instead of the __gnu_mcount_nc call (see arch/arm/kernel/ftrace.c).
29 */
30
31.macro mcount_adjust_addr rd, rn
32	bic	\rd, \rn, #1		@ clear the Thumb bit if present
33	sub	\rd, \rd, #MCOUNT_INSN_SIZE
34.endm
35
36.macro __mcount suffix
37	mcount_enter
38	ldr_va	r2, ftrace_trace_function
39	badr	r0, .Lftrace_stub
40	cmp	r0, r2
41	bne	1f
42
43#ifdef CONFIG_FUNCTION_GRAPH_TRACER
44	ldr_va	r2, ftrace_graph_return
45	cmp	r0, r2
46	bne	ftrace_graph_caller\suffix
47
48	ldr_va	r2, ftrace_graph_entry
49	mov_l	r0, ftrace_graph_entry_stub
50	cmp	r0, r2
51	bne	ftrace_graph_caller\suffix
52#endif
53
54	mcount_exit
55
561: 	mcount_get_lr	r1			@ lr of instrumented func
57	mcount_adjust_addr	r0, lr		@ instrumented function
58	badr	lr, 2f
59	mov	pc, r2
602:	mcount_exit
61.endm
62
63#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
64
65.macro __ftrace_regs_caller
66
67	str	lr, [sp, #-8]!	@ store LR as PC and make space for CPSR/OLD_R0,
68				@ OLD_R0 will overwrite previous LR
69
70	ldr	lr, [sp, #8]    @ get previous LR
71
72	str	r0, [sp, #8]	@ write r0 as OLD_R0 over previous LR
73
74	str	lr, [sp, #-4]!	@ store previous LR as LR
75
76	add 	lr, sp, #16	@ move in LR the value of SP as it was
77				@ before the push {lr} of the mcount mechanism
78
79	push	{r0-r11, ip, lr}
80
81	@ stack content at this point:
82	@ 0  4          48   52       56            60   64    68       72
83	@ R0 | R1 | ... | IP | SP + 4 | previous LR | LR | PSR | OLD_R0 |
84
85	mov	r3, sp				@ struct pt_regs*
86
87	ldr_va	r2, function_trace_op		@ pointer to the current
88						@ function tracing op
89
90	ldr	r1, [sp, #S_LR]			@ lr of instrumented func
91
92	ldr	lr, [sp, #S_PC]			@ get LR
93
94	mcount_adjust_addr	r0, lr		@ instrumented function
95
96	.globl ftrace_regs_call
97ftrace_regs_call:
98	bl	ftrace_stub
99
100#ifdef CONFIG_FUNCTION_GRAPH_TRACER
101	.globl ftrace_graph_regs_call
102ftrace_graph_regs_call:
103ARM(	mov	r0, r0	)
104THUMB(	nop.w		)
105#endif
106
107	@ pop saved regs
108	pop	{r0-r11, ip, lr}		@ restore r0 through r12
109	ldr	lr, [sp], #4			@ restore LR
110	ldr	pc, [sp], #12
111.endm
112
113#ifdef CONFIG_FUNCTION_GRAPH_TRACER
114.macro __ftrace_graph_regs_caller
115
116#ifdef CONFIG_UNWINDER_FRAME_POINTER
117	sub	r0, fp, #4		@ lr of instrumented routine (parent)
118#else
119	add	r0, sp, #S_LR
120#endif
121
122	@ called from __ftrace_regs_caller
123	ldr	r1, [sp, #S_PC]		@ instrumented routine (func)
124	mcount_adjust_addr	r1, r1
125
126	mov	r2, fpreg		@ frame pointer
127	add	r3, sp, #PT_REGS_SIZE
128	bl	prepare_ftrace_return
129
130	@ pop registers saved in ftrace_regs_caller
131	pop	{r0-r11, ip, lr}		@ restore r0 through r12
132	ldr	lr, [sp], #4			@ restore LR
133	ldr	pc, [sp], #12
134
135.endm
136#endif
137#endif
138
139.macro __ftrace_caller suffix
140	mcount_enter
141
142	mcount_get_lr	r1			@ lr of instrumented func
143	mcount_adjust_addr	r0, lr		@ instrumented function
144
145#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
146	ldr_va	r2, function_trace_op		@ pointer to the current
147						@ function tracing op
148	mov r3, #0				@ regs is NULL
149#endif
150
151	.globl ftrace_call\suffix
152ftrace_call\suffix:
153	bl	ftrace_stub
154
155#ifdef CONFIG_FUNCTION_GRAPH_TRACER
156	.globl ftrace_graph_call\suffix
157ftrace_graph_call\suffix:
158ARM(	mov	r0, r0	)
159THUMB(	nop.w		)
160#endif
161
162	mcount_exit
163.endm
164
165.macro __ftrace_graph_caller
166#ifdef CONFIG_UNWINDER_FRAME_POINTER
167	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
168#else
169	add	r0, sp, #20
170#endif
171#ifdef CONFIG_DYNAMIC_FTRACE
172	@ called from __ftrace_caller, saved in mcount_enter
173	ldr	r1, [sp, #16]		@ instrumented routine (func)
174	mcount_adjust_addr	r1, r1
175#else
176	@ called from __mcount, untouched in lr
177	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
178#endif
179	mov	r2, fpreg		@ frame pointer
180	add	r3, sp, #24
181	bl	prepare_ftrace_return
182	mcount_exit
183.endm
184
185/*
186 * __gnu_mcount_nc
187 */
188
189.macro mcount_enter
190/*
191 * This pad compensates for the push {lr} at the call site.  Note that we are
192 * unable to unwind through a function which does not otherwise save its lr.
193 */
194 UNWIND(.pad	#4)
195	stmdb	sp!, {r0-r3, lr}
196 UNWIND(.save	{r0-r3, lr})
197.endm
198
199.macro mcount_get_lr reg
200	ldr	\reg, [sp, #20]
201.endm
202
203.macro mcount_exit
204	ldmia	sp!, {r0-r3}
205	ldr	lr, [sp, #4]
206	ldr	pc, [sp], #8
207.endm
208
209ENTRY(__gnu_mcount_nc)
210UNWIND(.fnstart)
211#ifdef CONFIG_DYNAMIC_FTRACE
212	push	{lr}
213	ldr	lr, [sp, #4]
214	ldr	pc, [sp], #8
215#else
216	__mcount
217#endif
218UNWIND(.fnend)
219ENDPROC(__gnu_mcount_nc)
220
221#ifdef CONFIG_DYNAMIC_FTRACE
222ENTRY(ftrace_caller)
223UNWIND(.fnstart)
224	__ftrace_caller
225UNWIND(.fnend)
226ENDPROC(ftrace_caller)
227
228#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
229ENTRY(ftrace_regs_caller)
230UNWIND(.fnstart)
231	__ftrace_regs_caller
232UNWIND(.fnend)
233ENDPROC(ftrace_regs_caller)
234#endif
235
236#endif
237
238#ifdef CONFIG_FUNCTION_GRAPH_TRACER
239ENTRY(ftrace_graph_caller)
240UNWIND(.fnstart)
241	__ftrace_graph_caller
242UNWIND(.fnend)
243ENDPROC(ftrace_graph_caller)
244
245#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
246ENTRY(ftrace_graph_regs_caller)
247UNWIND(.fnstart)
248	__ftrace_graph_regs_caller
249UNWIND(.fnend)
250ENDPROC(ftrace_graph_regs_caller)
251#endif
252#endif
253
254.purgem mcount_enter
255.purgem mcount_get_lr
256.purgem mcount_exit
257
258#ifdef CONFIG_FUNCTION_GRAPH_TRACER
259ENTRY(return_to_handler)
260	stmdb	sp!, {r0-r3}
261	add	r0, sp, #16		@ sp at exit of instrumented routine
262	bl	ftrace_return_to_handler
263	mov	lr, r0			@ r0 has real ret addr
264	ldmia	sp!, {r0-r3}
265	ret	lr
266ENDPROC(return_to_handler)
267#endif
268
269ENTRY(ftrace_stub)
270.Lftrace_stub:
271	ret	lr
272ENDPROC(ftrace_stub)
273
274#ifdef CONFIG_DYNAMIC_FTRACE
275
276	__INIT
277
278	.macro	init_tramp, dst:req
279ENTRY(\dst\()_from_init)
280	ldr	pc, =\dst
281ENDPROC(\dst\()_from_init)
282	.endm
283
284	init_tramp	ftrace_caller
285#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
286	init_tramp	ftrace_regs_caller
287#endif
288#endif
289