xref: /openbmc/linux/arch/sparc/kernel/ktlb.S (revision 2c684d89)
1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
2 *
3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
4 * Copyright (C) 1996 Eddie C. Dost        (ecd@brainaid.de)
5 * Copyright (C) 1996 Miguel de Icaza      (miguel@nuclecu.unam.mx)
6 * Copyright (C) 1996,98,99 Jakub Jelinek  (jj@sunsite.mff.cuni.cz)
7 */
8
9#include <asm/head.h>
10#include <asm/asi.h>
11#include <asm/page.h>
12#include <asm/pgtable.h>
13#include <asm/tsb.h>
14
15	.text
16	.align		32
17
18kvmap_itlb:
19	/* g6: TAG TARGET */
20	mov		TLB_TAG_ACCESS, %g4
21	ldxa		[%g4] ASI_IMMU, %g4
22
23	/* sun4v_itlb_miss branches here with the missing virtual
24	 * address already loaded into %g4
25	 */
26kvmap_itlb_4v:
27
28	/* Catch kernel NULL pointer calls.  */
29	sethi		%hi(PAGE_SIZE), %g5
30	cmp		%g4, %g5
31	blu,pn		%xcc, kvmap_itlb_longpath
32	 nop
33
34	KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
35
36kvmap_itlb_tsb_miss:
37	sethi		%hi(LOW_OBP_ADDRESS), %g5
38	cmp		%g4, %g5
39	blu,pn		%xcc, kvmap_itlb_vmalloc_addr
40	 mov		0x1, %g5
41	sllx		%g5, 32, %g5
42	cmp		%g4, %g5
43	blu,pn		%xcc, kvmap_itlb_obp
44	 nop
45
46kvmap_itlb_vmalloc_addr:
47	KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
48
49	TSB_LOCK_TAG(%g1, %g2, %g7)
50	TSB_WRITE(%g1, %g5, %g6)
51
52	/* fallthrough to TLB load */
53
54kvmap_itlb_load:
55
56661:	stxa		%g5, [%g0] ASI_ITLB_DATA_IN
57	retry
58	.section	.sun4v_2insn_patch, "ax"
59	.word		661b
60	nop
61	nop
62	.previous
63
64	/* For sun4v the ASI_ITLB_DATA_IN store and the retry
65	 * instruction get nop'd out and we get here to branch
66	 * to the sun4v tlb load code.  The registers are setup
67	 * as follows:
68	 *
69	 * %g4: vaddr
70	 * %g5: PTE
71	 * %g6:	TAG
72	 *
73	 * The sun4v TLB load wants the PTE in %g3 so we fix that
74	 * up here.
75	 */
76	ba,pt		%xcc, sun4v_itlb_load
77	 mov		%g5, %g3
78
79kvmap_itlb_longpath:
80
81661:	rdpr	%pstate, %g5
82	wrpr	%g5, PSTATE_AG | PSTATE_MG, %pstate
83	.section .sun4v_2insn_patch, "ax"
84	.word	661b
85	SET_GL(1)
86	nop
87	.previous
88
89	rdpr	%tpc, %g5
90	ba,pt	%xcc, sparc64_realfault_common
91	 mov	FAULT_CODE_ITLB, %g4
92
93kvmap_itlb_obp:
94	OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
95
96	TSB_LOCK_TAG(%g1, %g2, %g7)
97
98	TSB_WRITE(%g1, %g5, %g6)
99
100	ba,pt		%xcc, kvmap_itlb_load
101	 nop
102
103kvmap_dtlb_obp:
104	OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
105
106	TSB_LOCK_TAG(%g1, %g2, %g7)
107
108	TSB_WRITE(%g1, %g5, %g6)
109
110	ba,pt		%xcc, kvmap_dtlb_load
111	 nop
112
113kvmap_linear_early:
114	sethi		%hi(kern_linear_pte_xor), %g7
115	ldx		[%g7 + %lo(kern_linear_pte_xor)], %g2
116	ba,pt		%xcc, kvmap_dtlb_tsb4m_load
117	 xor		%g2, %g4, %g5
118
119	.align		32
120kvmap_dtlb_tsb4m_load:
121	TSB_LOCK_TAG(%g1, %g2, %g7)
122	TSB_WRITE(%g1, %g5, %g6)
123	ba,pt		%xcc, kvmap_dtlb_load
124	 nop
125
126kvmap_dtlb:
127	/* %g6: TAG TARGET */
128	mov		TLB_TAG_ACCESS, %g4
129	ldxa		[%g4] ASI_DMMU, %g4
130
131	/* sun4v_dtlb_miss branches here with the missing virtual
132	 * address already loaded into %g4
133	 */
134kvmap_dtlb_4v:
135	brgez,pn	%g4, kvmap_dtlb_nonlinear
136	 nop
137
138#ifdef CONFIG_DEBUG_PAGEALLOC
139	/* Index through the base page size TSB even for linear
140	 * mappings when using page allocation debugging.
141	 */
142	KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
143#else
144	/* Correct TAG_TARGET is already in %g6, check 4mb TSB.  */
145	KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
146#endif
147	/* Linear mapping TSB lookup failed.  Fallthrough to kernel
148	 * page table based lookup.
149	 */
150	.globl		kvmap_linear_patch
151kvmap_linear_patch:
152	ba,a,pt		%xcc, kvmap_linear_early
153
154kvmap_dtlb_vmalloc_addr:
155	KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
156
157	TSB_LOCK_TAG(%g1, %g2, %g7)
158	TSB_WRITE(%g1, %g5, %g6)
159
160	/* fallthrough to TLB load */
161
162kvmap_dtlb_load:
163
164661:	stxa		%g5, [%g0] ASI_DTLB_DATA_IN	! Reload TLB
165	retry
166	.section	.sun4v_2insn_patch, "ax"
167	.word		661b
168	nop
169	nop
170	.previous
171
172	/* For sun4v the ASI_DTLB_DATA_IN store and the retry
173	 * instruction get nop'd out and we get here to branch
174	 * to the sun4v tlb load code.  The registers are setup
175	 * as follows:
176	 *
177	 * %g4: vaddr
178	 * %g5: PTE
179	 * %g6:	TAG
180	 *
181	 * The sun4v TLB load wants the PTE in %g3 so we fix that
182	 * up here.
183	 */
184	ba,pt		%xcc, sun4v_dtlb_load
185	 mov		%g5, %g3
186
187#ifdef CONFIG_SPARSEMEM_VMEMMAP
188kvmap_vmemmap:
189	KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
190	ba,a,pt		%xcc, kvmap_dtlb_load
191#endif
192
193kvmap_dtlb_nonlinear:
194	/* Catch kernel NULL pointer derefs.  */
195	sethi		%hi(PAGE_SIZE), %g5
196	cmp		%g4, %g5
197	bleu,pn		%xcc, kvmap_dtlb_longpath
198	 nop
199
200#ifdef CONFIG_SPARSEMEM_VMEMMAP
201	/* Do not use the TSB for vmemmap.  */
202	sethi		%hi(VMEMMAP_BASE), %g5
203	ldx		[%g5 + %lo(VMEMMAP_BASE)], %g5
204	cmp		%g4,%g5
205	bgeu,pn		%xcc, kvmap_vmemmap
206	 nop
207#endif
208
209	KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
210
211kvmap_dtlb_tsbmiss:
212	sethi		%hi(MODULES_VADDR), %g5
213	cmp		%g4, %g5
214	blu,pn		%xcc, kvmap_dtlb_longpath
215	 sethi		%hi(VMALLOC_END), %g5
216	ldx		[%g5 + %lo(VMALLOC_END)], %g5
217	cmp		%g4, %g5
218	bgeu,pn		%xcc, kvmap_dtlb_longpath
219	 nop
220
221kvmap_check_obp:
222	sethi		%hi(LOW_OBP_ADDRESS), %g5
223	cmp		%g4, %g5
224	blu,pn		%xcc, kvmap_dtlb_vmalloc_addr
225	 mov		0x1, %g5
226	sllx		%g5, 32, %g5
227	cmp		%g4, %g5
228	blu,pn		%xcc, kvmap_dtlb_obp
229	 nop
230	ba,pt		%xcc, kvmap_dtlb_vmalloc_addr
231	 nop
232
233kvmap_dtlb_longpath:
234
235661:	rdpr	%pstate, %g5
236	wrpr	%g5, PSTATE_AG | PSTATE_MG, %pstate
237	.section .sun4v_2insn_patch, "ax"
238	.word	661b
239	SET_GL(1)
240	ldxa		[%g0] ASI_SCRATCHPAD, %g5
241	.previous
242
243	rdpr	%tl, %g3
244	cmp	%g3, 1
245
246661:	mov	TLB_TAG_ACCESS, %g4
247	ldxa	[%g4] ASI_DMMU, %g5
248	.section .sun4v_2insn_patch, "ax"
249	.word	661b
250	ldx	[%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
251	nop
252	.previous
253
254	be,pt	%xcc, sparc64_realfault_common
255	 mov	FAULT_CODE_DTLB, %g4
256	ba,pt	%xcc, winfix_trampoline
257	 nop
258