xref: /openbmc/linux/arch/sparc/lib/copy_page.S (revision 4cdb71b6)
1/* SPDX-License-Identifier: GPL-2.0 */
2/* clear_page.S: UltraSparc optimized copy page.
3 *
4 * Copyright (C) 1996, 1998, 1999, 2000, 2004 David S. Miller (davem@redhat.com)
5 * Copyright (C) 1997 Jakub Jelinek (jakub@redhat.com)
6 */
7
8#include <linux/export.h>
9#include <asm/visasm.h>
10#include <asm/thread_info.h>
11#include <asm/page.h>
12#include <linux/pgtable.h>
13#include <asm/spitfire.h>
14#include <asm/head.h>
15
16	/* What we used to do was lock a TLB entry into a specific
17	 * TLB slot, clear the page with interrupts disabled, then
18	 * restore the original TLB entry.  This was great for
19	 * disturbing the TLB as little as possible, but it meant
20	 * we had to keep interrupts disabled for a long time.
21	 *
22	 * Now, we simply use the normal TLB loading mechanism,
23	 * and this makes the cpu choose a slot all by itself.
24	 * Then we do a normal TLB flush on exit.  We need only
25	 * disable preemption during the clear.
26	 */
27
28#define	DCACHE_SIZE	(PAGE_SIZE * 2)
29
30#if (PAGE_SHIFT == 13)
31#define PAGE_SIZE_REM	0x80
32#elif (PAGE_SHIFT == 16)
33#define PAGE_SIZE_REM	0x100
34#else
35#error Wrong PAGE_SHIFT specified
36#endif
37
38#define TOUCH(reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7)	\
39	fsrc2	%reg0, %f48; 	fsrc2	%reg1, %f50;		\
40	fsrc2	%reg2, %f52; 	fsrc2	%reg3, %f54;		\
41	fsrc2	%reg4, %f56; 	fsrc2	%reg5, %f58;		\
42	fsrc2	%reg6, %f60; 	fsrc2	%reg7, %f62;
43
44	.text
45
46	.align		32
47	.globl		copy_user_page
48	.type		copy_user_page,#function
49	EXPORT_SYMBOL(copy_user_page)
50copy_user_page:		/* %o0=dest, %o1=src, %o2=vaddr */
51	lduw		[%g6 + TI_PRE_COUNT], %o4
52	sethi		%hi(PAGE_OFFSET), %g2
53	sethi		%hi(PAGE_SIZE), %o3
54
55	ldx		[%g2 + %lo(PAGE_OFFSET)], %g2
56	sethi		%hi(PAGE_KERNEL_LOCKED), %g3
57
58	ldx		[%g3 + %lo(PAGE_KERNEL_LOCKED)], %g3
59	sub		%o0, %g2, %g1		! dest paddr
60
61	sub		%o1, %g2, %g2		! src paddr
62
63	and		%o2, %o3, %o0		! vaddr D-cache alias bit
64	or		%g1, %g3, %g1		! dest TTE data
65
66	or		%g2, %g3, %g2		! src TTE data
67	sethi		%hi(TLBTEMP_BASE), %o3
68
69	sethi		%hi(DCACHE_SIZE), %o1
70	add		%o0, %o3, %o0		! dest TTE vaddr
71
72	add		%o4, 1, %o2
73	add		%o0, %o1, %o1		! src TTE vaddr
74
75	/* Disable preemption.  */
76	mov		TLB_TAG_ACCESS, %g3
77	stw		%o2, [%g6 + TI_PRE_COUNT]
78
79	/* Load TLB entries.  */
80	rdpr		%pstate, %o2
81	wrpr		%o2, PSTATE_IE, %pstate
82	stxa		%o0, [%g3] ASI_DMMU
83	stxa		%g1, [%g0] ASI_DTLB_DATA_IN
84	membar		#Sync
85	stxa		%o1, [%g3] ASI_DMMU
86	stxa		%g2, [%g0] ASI_DTLB_DATA_IN
87	membar		#Sync
88	wrpr		%o2, 0x0, %pstate
89
90cheetah_copy_page_insn:
91	ba,pt		%xcc, 9f
92	 nop
93
941:
95	VISEntryHalf
96	membar		#StoreLoad | #StoreStore | #LoadStore
97	sethi		%hi((PAGE_SIZE/64)-2), %o2
98	mov		%o0, %g1
99	prefetch	[%o1 + 0x000], #one_read
100	or		%o2, %lo((PAGE_SIZE/64)-2), %o2
101	prefetch	[%o1 + 0x040], #one_read
102	prefetch	[%o1 + 0x080], #one_read
103	prefetch	[%o1 + 0x0c0], #one_read
104	ldd		[%o1 + 0x000], %f0
105	prefetch	[%o1 + 0x100], #one_read
106	ldd		[%o1 + 0x008], %f2
107	prefetch	[%o1 + 0x140], #one_read
108	ldd		[%o1 + 0x010], %f4
109	prefetch	[%o1 + 0x180], #one_read
110	fsrc2		%f0, %f16
111	ldd		[%o1 + 0x018], %f6
112	fsrc2		%f2, %f18
113	ldd		[%o1 + 0x020], %f8
114	fsrc2		%f4, %f20
115	ldd		[%o1 + 0x028], %f10
116	fsrc2		%f6, %f22
117	ldd		[%o1 + 0x030], %f12
118	fsrc2		%f8, %f24
119	ldd		[%o1 + 0x038], %f14
120	fsrc2		%f10, %f26
121	ldd		[%o1 + 0x040], %f0
1221:	ldd		[%o1 + 0x048], %f2
123	fsrc2		%f12, %f28
124	ldd		[%o1 + 0x050], %f4
125	fsrc2		%f14, %f30
126	stda		%f16, [%o0] ASI_BLK_P
127	ldd		[%o1 + 0x058], %f6
128	fsrc2		%f0, %f16
129	ldd		[%o1 + 0x060], %f8
130	fsrc2		%f2, %f18
131	ldd		[%o1 + 0x068], %f10
132	fsrc2		%f4, %f20
133	ldd		[%o1 + 0x070], %f12
134	fsrc2		%f6, %f22
135	ldd		[%o1 + 0x078], %f14
136	fsrc2		%f8, %f24
137	ldd		[%o1 + 0x080], %f0
138	prefetch	[%o1 + 0x180], #one_read
139	fsrc2		%f10, %f26
140	subcc		%o2, 1, %o2
141	add		%o0, 0x40, %o0
142	bne,pt		%xcc, 1b
143	 add		%o1, 0x40, %o1
144
145	ldd		[%o1 + 0x048], %f2
146	fsrc2		%f12, %f28
147	ldd		[%o1 + 0x050], %f4
148	fsrc2		%f14, %f30
149	stda		%f16, [%o0] ASI_BLK_P
150	ldd		[%o1 + 0x058], %f6
151	fsrc2		%f0, %f16
152	ldd		[%o1 + 0x060], %f8
153	fsrc2		%f2, %f18
154	ldd		[%o1 + 0x068], %f10
155	fsrc2		%f4, %f20
156	ldd		[%o1 + 0x070], %f12
157	fsrc2		%f6, %f22
158	add		%o0, 0x40, %o0
159	ldd		[%o1 + 0x078], %f14
160	fsrc2		%f8, %f24
161	fsrc2		%f10, %f26
162	fsrc2		%f12, %f28
163	fsrc2		%f14, %f30
164	stda		%f16, [%o0] ASI_BLK_P
165	membar		#Sync
166	VISExitHalf
167	ba,pt		%xcc, 5f
168	 nop
169
1709:
171	VISEntry
172	ldub		[%g6 + TI_FAULT_CODE], %g3
173	mov		%o0, %g1
174	cmp		%g3, 0
175	rd		%asi, %g3
176	be,a,pt		%icc, 1f
177	 wr		%g0, ASI_BLK_P, %asi
178	wr		%g0, ASI_BLK_COMMIT_P, %asi
1791:	ldda		[%o1] ASI_BLK_P, %f0
180	add		%o1, 0x40, %o1
181	ldda		[%o1] ASI_BLK_P, %f16
182	add		%o1, 0x40, %o1
183	sethi		%hi(PAGE_SIZE), %o2
1841:	TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
185	ldda		[%o1] ASI_BLK_P, %f32
186	stda		%f48, [%o0] %asi
187	add		%o1, 0x40, %o1
188	sub		%o2, 0x40, %o2
189	add		%o0, 0x40, %o0
190	TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
191	ldda		[%o1] ASI_BLK_P, %f0
192	stda		%f48, [%o0] %asi
193	add		%o1, 0x40, %o1
194	sub		%o2, 0x40, %o2
195	add		%o0, 0x40, %o0
196	TOUCH(f32, f34, f36, f38, f40, f42, f44, f46)
197	ldda		[%o1] ASI_BLK_P, %f16
198	stda		%f48, [%o0] %asi
199	sub		%o2, 0x40, %o2
200	add		%o1, 0x40, %o1
201	cmp		%o2, PAGE_SIZE_REM
202	bne,pt		%xcc, 1b
203	 add		%o0, 0x40, %o0
204#if (PAGE_SHIFT == 16)
205	TOUCH(f0, f2, f4, f6, f8, f10, f12, f14)
206	ldda		[%o1] ASI_BLK_P, %f32
207	stda		%f48, [%o0] %asi
208	add		%o1, 0x40, %o1
209	sub		%o2, 0x40, %o2
210	add		%o0, 0x40, %o0
211	TOUCH(f16, f18, f20, f22, f24, f26, f28, f30)
212	ldda		[%o1] ASI_BLK_P, %f0
213	stda		%f48, [%o0] %asi
214	add		%o1, 0x40, %o1
215	sub		%o2, 0x40, %o2
216	add		%o0, 0x40, %o0
217	membar		#Sync
218	stda		%f32, [%o0] %asi
219	add		%o0, 0x40, %o0
220	stda		%f0, [%o0] %asi
221#else
222	membar		#Sync
223	stda		%f0, [%o0] %asi
224	add		%o0, 0x40, %o0
225	stda		%f16, [%o0] %asi
226#endif
227	membar		#Sync
228	wr		%g3, 0x0, %asi
229	VISExit
230
2315:
232	stxa		%g0, [%g1] ASI_DMMU_DEMAP
233	membar		#Sync
234
235	sethi		%hi(DCACHE_SIZE), %g2
236	stxa		%g0, [%g1 + %g2] ASI_DMMU_DEMAP
237	membar		#Sync
238
239	retl
240	 stw		%o4, [%g6 + TI_PRE_COUNT]
241
242	.size		copy_user_page, .-copy_user_page
243
244	.globl		cheetah_patch_copy_page
245cheetah_patch_copy_page:
246	sethi		%hi(0x01000000), %o1	! NOP
247	sethi		%hi(cheetah_copy_page_insn), %o0
248	or		%o0, %lo(cheetah_copy_page_insn), %o0
249	stw		%o1, [%o0]
250	membar		#StoreStore
251	flush		%o0
252	retl
253	 nop
254