xref: /openbmc/linux/arch/arm/kernel/entry-ftrace.S (revision 565485b8)
1/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License version 2 as
4 * published by the Free Software Foundation.
5 */
6
7#include <asm/assembler.h>
8#include <asm/ftrace.h>
9#include <asm/unwind.h>
10
11#include "entry-header.S"
12
13/*
14 * When compiling with -pg, gcc inserts a call to the mcount routine at the
15 * start of every function.  In mcount, apart from the function's address (in
16 * lr), we need to get hold of the function's caller's address.
17 *
18 * Newer GCCs (4.4+) solve this problem by using a version of mcount with call
19 * sites like:
20 *
21 *	push	{lr}
22 *	bl	__gnu_mcount_nc
23 *
24 * With these compilers, frame pointers are not necessary.
25 *
26 * mcount can be thought of as a function called in the middle of a subroutine
27 * call.  As such, it needs to be transparent for both the caller and the
28 * callee: the original lr needs to be restored when leaving mcount, and no
29 * registers should be clobbered.  (In the __gnu_mcount_nc implementation, we
30 * clobber the ip register.  This is OK because the ARM calling convention
31 * allows it to be clobbered in subroutines and doesn't use it to hold
32 * parameters.)
33 *
34 * When using dynamic ftrace, we patch out the mcount call by a "pop {lr}"
35 * instead of the __gnu_mcount_nc call (see arch/arm/kernel/ftrace.c).
36 */
37
38.macro mcount_adjust_addr rd, rn
39	bic	\rd, \rn, #1		@ clear the Thumb bit if present
40	sub	\rd, \rd, #MCOUNT_INSN_SIZE
41.endm
42
43.macro __mcount suffix
44	mcount_enter
45	ldr	r0, =ftrace_trace_function
46	ldr	r2, [r0]
47	adr	r0, .Lftrace_stub
48	cmp	r0, r2
49	bne	1f
50
51#ifdef CONFIG_FUNCTION_GRAPH_TRACER
52	ldr     r1, =ftrace_graph_return
53	ldr     r2, [r1]
54	cmp     r0, r2
55	bne     ftrace_graph_caller\suffix
56
57	ldr     r1, =ftrace_graph_entry
58	ldr     r2, [r1]
59	ldr     r0, =ftrace_graph_entry_stub
60	cmp     r0, r2
61	bne     ftrace_graph_caller\suffix
62#endif
63
64	mcount_exit
65
661: 	mcount_get_lr	r1			@ lr of instrumented func
67	mcount_adjust_addr	r0, lr		@ instrumented function
68	badr	lr, 2f
69	mov	pc, r2
702:	mcount_exit
71.endm
72
73#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
74
75.macro __ftrace_regs_caller
76
77	sub	sp, sp, #8	@ space for PC and CPSR OLD_R0,
78				@ OLD_R0 will overwrite previous LR
79
80	add 	ip, sp, #12	@ move in IP the value of SP as it was
81				@ before the push {lr} of the mcount mechanism
82
83	str     lr, [sp, #0]    @ store LR instead of PC
84
85	ldr     lr, [sp, #8]    @ get previous LR
86
87	str	r0, [sp, #8]	@ write r0 as OLD_R0 over previous LR
88
89	stmdb   sp!, {ip, lr}
90	stmdb   sp!, {r0-r11, lr}
91
92	@ stack content at this point:
93	@ 0  4          48   52       56            60   64    68       72
94	@ R0 | R1 | ... | LR | SP + 4 | previous LR | LR | PSR | OLD_R0 |
95
96	mov r3, sp				@ struct pt_regs*
97
98	ldr r2, =function_trace_op
99	ldr r2, [r2]				@ pointer to the current
100						@ function tracing op
101
102	ldr	r1, [sp, #S_LR]			@ lr of instrumented func
103
104	ldr	lr, [sp, #S_PC]			@ get LR
105
106	mcount_adjust_addr	r0, lr		@ instrumented function
107
108	.globl ftrace_regs_call
109ftrace_regs_call:
110	bl	ftrace_stub
111
112#ifdef CONFIG_FUNCTION_GRAPH_TRACER
113	.globl ftrace_graph_regs_call
114ftrace_graph_regs_call:
115	mov	r0, r0
116#endif
117
118	@ pop saved regs
119	ldmia   sp!, {r0-r12}			@ restore r0 through r12
120	ldr	ip, [sp, #8]			@ restore PC
121	ldr	lr, [sp, #4]			@ restore LR
122	ldr	sp, [sp, #0]			@ restore SP
123	mov	pc, ip				@ return
124.endm
125
126#ifdef CONFIG_FUNCTION_GRAPH_TRACER
127.macro __ftrace_graph_regs_caller
128
129	sub     r0, fp, #4              @ lr of instrumented routine (parent)
130
131	@ called from __ftrace_regs_caller
132	ldr     r1, [sp, #S_PC]		@ instrumented routine (func)
133	mcount_adjust_addr	r1, r1
134
135	mov	r2, fp			@ frame pointer
136	bl	prepare_ftrace_return
137
138	@ pop registers saved in ftrace_regs_caller
139	ldmia   sp!, {r0-r12}			@ restore r0 through r12
140	ldr	ip, [sp, #8]			@ restore PC
141	ldr	lr, [sp, #4]			@ restore LR
142	ldr	sp, [sp, #0]			@ restore SP
143	mov	pc, ip				@ return
144
145.endm
146#endif
147#endif
148
149.macro __ftrace_caller suffix
150	mcount_enter
151
152	mcount_get_lr	r1			@ lr of instrumented func
153	mcount_adjust_addr	r0, lr		@ instrumented function
154
155#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
156	ldr r2, =function_trace_op
157	ldr r2, [r2]				@ pointer to the current
158						@ function tracing op
159	mov r3, #0				@ regs is NULL
160#endif
161
162	.globl ftrace_call\suffix
163ftrace_call\suffix:
164	bl	ftrace_stub
165
166#ifdef CONFIG_FUNCTION_GRAPH_TRACER
167	.globl ftrace_graph_call\suffix
168ftrace_graph_call\suffix:
169	mov	r0, r0
170#endif
171
172	mcount_exit
173.endm
174
175.macro __ftrace_graph_caller
176	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
177#ifdef CONFIG_DYNAMIC_FTRACE
178	@ called from __ftrace_caller, saved in mcount_enter
179	ldr	r1, [sp, #16]		@ instrumented routine (func)
180	mcount_adjust_addr	r1, r1
181#else
182	@ called from __mcount, untouched in lr
183	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
184#endif
185	mov	r2, fp			@ frame pointer
186	bl	prepare_ftrace_return
187	mcount_exit
188.endm
189
190/*
191 * __gnu_mcount_nc
192 */
193
194.macro mcount_enter
195/*
196 * This pad compensates for the push {lr} at the call site.  Note that we are
197 * unable to unwind through a function which does not otherwise save its lr.
198 */
199 UNWIND(.pad	#4)
200	stmdb	sp!, {r0-r3, lr}
201 UNWIND(.save	{r0-r3, lr})
202.endm
203
204.macro mcount_get_lr reg
205	ldr	\reg, [sp, #20]
206.endm
207
208.macro mcount_exit
209	ldmia	sp!, {r0-r3, ip, lr}
210	ret	ip
211.endm
212
213ENTRY(__gnu_mcount_nc)
214UNWIND(.fnstart)
215#ifdef CONFIG_DYNAMIC_FTRACE
216	mov	ip, lr
217	ldmia	sp!, {lr}
218	ret	ip
219#else
220	__mcount
221#endif
222UNWIND(.fnend)
223ENDPROC(__gnu_mcount_nc)
224
225#ifdef CONFIG_DYNAMIC_FTRACE
226ENTRY(ftrace_caller)
227UNWIND(.fnstart)
228	__ftrace_caller
229UNWIND(.fnend)
230ENDPROC(ftrace_caller)
231
232#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
233ENTRY(ftrace_regs_caller)
234UNWIND(.fnstart)
235	__ftrace_regs_caller
236UNWIND(.fnend)
237ENDPROC(ftrace_regs_caller)
238#endif
239
240#endif
241
242#ifdef CONFIG_FUNCTION_GRAPH_TRACER
243ENTRY(ftrace_graph_caller)
244UNWIND(.fnstart)
245	__ftrace_graph_caller
246UNWIND(.fnend)
247ENDPROC(ftrace_graph_caller)
248
249#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
250ENTRY(ftrace_graph_regs_caller)
251UNWIND(.fnstart)
252	__ftrace_graph_regs_caller
253UNWIND(.fnend)
254ENDPROC(ftrace_graph_regs_caller)
255#endif
256#endif
257
258.purgem mcount_enter
259.purgem mcount_get_lr
260.purgem mcount_exit
261
262#ifdef CONFIG_FUNCTION_GRAPH_TRACER
263	.globl return_to_handler
264return_to_handler:
265	stmdb	sp!, {r0-r3}
266	mov	r0, fp			@ frame pointer
267	bl	ftrace_return_to_handler
268	mov	lr, r0			@ r0 has real ret addr
269	ldmia	sp!, {r0-r3}
270	ret	lr
271#endif
272
273ENTRY(ftrace_stub)
274.Lftrace_stub:
275	ret	lr
276ENDPROC(ftrace_stub)
277