xref: /openbmc/linux/arch/xtensa/kernel/align.S (revision 8dda2eac)
1/*
2 * arch/xtensa/kernel/align.S
3 *
4 * Handle unalignment exceptions in kernel space.
5 *
6 * This file is subject to the terms and conditions of the GNU General
7 * Public License.  See the file "COPYING" in the main directory of
8 * this archive for more details.
9 *
10 * Copyright (C) 2001 - 2005 Tensilica, Inc.
11 * Copyright (C) 2014 Cadence Design Systems Inc.
12 *
13 * Rewritten by Chris Zankel <chris@zankel.net>
14 *
15 * Based on work from Joe Taylor <joe@tensilica.com, joetylr@yahoo.com>
16 * and Marc Gauthier <marc@tensilica.com, marc@alimni.uwaterloo.ca>
17 */
18
19#include <linux/linkage.h>
20#include <asm/current.h>
21#include <asm/asm-offsets.h>
22#include <asm/asmmacro.h>
23#include <asm/processor.h>
24
25#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
26
27/*  First-level exception handler for unaligned exceptions.
28 *
29 *  Note: This handler works only for kernel exceptions.  Unaligned user
30 *        access should get a seg fault.
31 */
32
33/* Big and little endian 16-bit values are located in
34 * different halves of a register.  HWORD_START helps to
35 * abstract the notion of extracting a 16-bit value from a
36 * register.
37 * We also have to define new shifting instructions because
38 * lsb and msb are on 'opposite' ends in a register for
39 * different endian machines.
40 *
41 * Assume a memory region in ascending address:
42 *   	0 1 2 3|4 5 6 7
43 *
44 * When loading one word into a register, the content of that register is:
45 *  LE	3 2 1 0, 7 6 5 4
46 *  BE  0 1 2 3, 4 5 6 7
47 *
48 * Masking the bits of the higher/lower address means:
49 *  LE  X X 0 0, 0 0 X X
50 *  BE	0 0 X X, X X 0 0
51 *
52 * Shifting to higher/lower addresses, means:
53 *  LE  shift left / shift right
54 *  BE  shift right / shift left
55 *
56 * Extracting 16 bits from a 32 bit reg. value to higher/lower address means:
57 *  LE  mask 0 0 X X / shift left
58 *  BE  shift left / mask 0 0 X X
59 */
60
61#define UNALIGNED_USER_EXCEPTION
62
63#if XCHAL_HAVE_BE
64
65#define HWORD_START	16
66#define	INSN_OP0	28
67#define	INSN_T		24
68#define	INSN_OP1	16
69
70.macro __ssa8r	r;		ssa8l	\r;		.endm
71.macro __sh	r, s;		srl	\r, \s;		.endm
72.macro __sl	r, s;		sll	\r, \s;		.endm
73.macro __exth	r, s;		extui	\r, \s, 0, 16;	.endm
74.macro __extl	r, s;		slli	\r, \s, 16;	.endm
75
76#else
77
78#define HWORD_START	0
79#define	INSN_OP0	0
80#define	INSN_T		4
81#define	INSN_OP1	12
82
83.macro __ssa8r	r;		ssa8b	\r;		.endm
84.macro __sh	r, s;		sll	\r, \s;		.endm
85.macro __sl	r, s;		srl	\r, \s;		.endm
86.macro __exth	r, s;		slli	\r, \s, 16;	.endm
87.macro __extl	r, s;		extui	\r, \s, 0, 16;	.endm
88
89#endif
90
91/*
92 *	xxxx xxxx = imm8 field
93 *	     yyyy = imm4 field
94 *	     ssss = s field
95 *	     tttt = t field
96 *
97 *	       		 16		    0
98 *		          -------------------
99 *	L32I.N		  yyyy ssss tttt 1000
100 *	S32I.N	          yyyy ssss tttt 1001
101 *
102 *	       23			    0
103 *		-----------------------------
104 *	res	          0000           0010
105 *	L16UI	xxxx xxxx 0001 ssss tttt 0010
106 *	L32I	xxxx xxxx 0010 ssss tttt 0010
107 *	XXX	          0011 ssss tttt 0010
108 *	XXX	          0100 ssss tttt 0010
109 *	S16I	xxxx xxxx 0101 ssss tttt 0010
110 *	S32I	xxxx xxxx 0110 ssss tttt 0010
111 *	XXX	          0111 ssss tttt 0010
112 *	XXX	          1000 ssss tttt 0010
113 *	L16SI	xxxx xxxx 1001 ssss tttt 0010
114 *	XXX	          1010           0010
115 *      **L32AI	xxxx xxxx 1011 ssss tttt 0010 unsupported
116 *	XXX	          1100           0010
117 *	XXX	          1101           0010
118 *	XXX	          1110           0010
119 *	**S32RI	xxxx xxxx 1111 ssss tttt 0010 unsupported
120 *		-----------------------------
121 *                           ^         ^    ^
122 *    sub-opcode (NIBBLE_R) -+         |    |
123 *       t field (NIBBLE_T) -----------+    |
124 *  major opcode (NIBBLE_OP0) --------------+
125 */
126
127#define OP0_L32I_N	0x8		/* load immediate narrow */
128#define OP0_S32I_N	0x9		/* store immediate narrow */
129#define OP1_SI_MASK	0x4		/* OP1 bit set for stores */
130#define OP1_SI_BIT	2		/* OP1 bit number for stores */
131
132#define OP1_L32I	0x2
133#define OP1_L16UI	0x1
134#define OP1_L16SI	0x9
135#define OP1_L32AI	0xb
136
137#define OP1_S32I	0x6
138#define OP1_S16I	0x5
139#define OP1_S32RI	0xf
140
141/*
142 * Entry condition:
143 *
144 *   a0:	trashed, original value saved on stack (PT_AREG0)
145 *   a1:	a1
146 *   a2:	new stack pointer, original in DEPC
147 *   a3:	a3
148 *   depc:	a2, original value saved on stack (PT_DEPC)
149 *   excsave_1:	dispatch table
150 *
151 *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
152 *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
153 */
154
155	.literal_position
156ENTRY(fast_unaligned)
157
158	/* Note: We don't expect the address to be aligned on a word
159	 *       boundary. After all, the processor generated that exception
160	 *       and it would be a hardware fault.
161	 */
162
163	/* Save some working register */
164
165	s32i	a4, a2, PT_AREG4
166	s32i	a5, a2, PT_AREG5
167	s32i	a6, a2, PT_AREG6
168	s32i	a7, a2, PT_AREG7
169	s32i	a8, a2, PT_AREG8
170
171	rsr	a0, depc
172	s32i	a0, a2, PT_AREG2
173	s32i	a3, a2, PT_AREG3
174
175	rsr	a3, excsave1
176	movi	a4, fast_unaligned_fixup
177	s32i	a4, a3, EXC_TABLE_FIXUP
178
179	/* Keep value of SAR in a0 */
180
181	rsr	a0, sar
182	rsr	a8, excvaddr		# load unaligned memory address
183
184	/* Now, identify one of the following load/store instructions.
185	 *
186	 * The only possible danger of a double exception on the
187	 * following l32i instructions is kernel code in vmalloc
188	 * memory. The processor was just executing at the EPC_1
189	 * address, and indeed, already fetched the instruction.  That
190	 * guarantees a TLB mapping, which hasn't been replaced by
191	 * this unaligned exception handler that uses only static TLB
192	 * mappings. However, high-level interrupt handlers might
193	 * modify TLB entries, so for the generic case, we register a
194	 * TABLE_FIXUP handler here, too.
195	 */
196
197	/* a3...a6 saved on stack, a2 = SP */
198
199	/* Extract the instruction that caused the unaligned access. */
200
201	rsr	a7, epc1	# load exception address
202	movi	a3, ~3
203	and	a3, a3, a7	# mask lower bits
204
205	l32i	a4, a3, 0	# load 2 words
206	l32i	a5, a3, 4
207
208	__ssa8	a7
209	__src_b	a4, a4, a5	# a4 has the instruction
210
211	/* Analyze the instruction (load or store?). */
212
213	extui	a5, a4, INSN_OP0, 4	# get insn.op0 nibble
214
215#if XCHAL_HAVE_DENSITY
216	_beqi	a5, OP0_L32I_N, .Lload	# L32I.N, jump
217	addi	a6, a5, -OP0_S32I_N
218	_beqz	a6, .Lstore		# S32I.N, do a store
219#endif
220	/* 'store indicator bit' not set, jump */
221	_bbci.l	a4, OP1_SI_BIT + INSN_OP1, .Lload
222
223	/* Store: Jump to table entry to get the value in the source register.*/
224
225.Lstore:movi	a5, .Lstore_table	# table
226	extui	a6, a4, INSN_T, 4	# get source register
227	addx8	a5, a6, a5
228	jx	a5			# jump into table
229
230	/* Load: Load memory address. */
231
232.Lload: movi	a3, ~3
233	and	a3, a3, a8		# align memory address
234
235	__ssa8	a8
236#ifdef UNALIGNED_USER_EXCEPTION
237	addi	a3, a3, 8
238	l32e	a5, a3, -8
239	l32e	a6, a3, -4
240#else
241	l32i	a5, a3, 0
242	l32i	a6, a3, 4
243#endif
244	__src_b	a3, a5, a6		# a3 has the data word
245
246#if XCHAL_HAVE_DENSITY
247	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
248
249	extui	a5, a4, INSN_OP0, 4
250	_beqi	a5, OP0_L32I_N, 1f	# l32i.n: jump
251
252	addi	a7, a7, 1
253#else
254	addi	a7, a7, 3
255#endif
256
257	extui	a5, a4, INSN_OP1, 4
258	_beqi	a5, OP1_L32I, 1f	# l32i: jump
259
260	extui	a3, a3, 0, 16		# extract lower 16 bits
261	_beqi	a5, OP1_L16UI, 1f
262	addi	a5, a5, -OP1_L16SI
263	_bnez	a5, .Linvalid_instruction_load
264
265	/* sign extend value */
266
267	slli	a3, a3, 16
268	srai	a3, a3, 16
269
270	/* Set target register. */
271
2721:
273	extui	a4, a4, INSN_T, 4	# extract target register
274	movi	a5, .Lload_table
275	addx8	a4, a4, a5
276	jx	a4			# jump to entry for target register
277
278	.align	8
279.Lload_table:
280	s32i	a3, a2, PT_AREG0;	_j .Lexit;	.align 8
281	mov	a1, a3;			_j .Lexit;	.align 8 # fishy??
282	s32i	a3, a2, PT_AREG2;	_j .Lexit;	.align 8
283	s32i	a3, a2, PT_AREG3;	_j .Lexit;	.align 8
284	s32i	a3, a2, PT_AREG4;	_j .Lexit;	.align 8
285	s32i	a3, a2, PT_AREG5;	_j .Lexit;	.align 8
286	s32i	a3, a2, PT_AREG6;	_j .Lexit;	.align 8
287	s32i	a3, a2, PT_AREG7;	_j .Lexit;	.align 8
288	s32i	a3, a2, PT_AREG8;	_j .Lexit;	.align 8
289	mov	a9, a3		;	_j .Lexit;	.align 8
290	mov	a10, a3		;	_j .Lexit;	.align 8
291	mov	a11, a3		;	_j .Lexit;	.align 8
292	mov	a12, a3		;	_j .Lexit;	.align 8
293	mov	a13, a3		;	_j .Lexit;	.align 8
294	mov	a14, a3		;	_j .Lexit;	.align 8
295	mov	a15, a3		;	_j .Lexit;	.align 8
296
297.Lstore_table:
298	l32i	a3, a2, PT_AREG0;	_j 1f;	.align 8
299	mov	a3, a1;			_j 1f;	.align 8	# fishy??
300	l32i	a3, a2, PT_AREG2;	_j 1f;	.align 8
301	l32i	a3, a2, PT_AREG3;	_j 1f;	.align 8
302	l32i	a3, a2, PT_AREG4;	_j 1f;	.align 8
303	l32i	a3, a2, PT_AREG5;	_j 1f;	.align 8
304	l32i	a3, a2, PT_AREG6;	_j 1f;	.align 8
305	l32i	a3, a2, PT_AREG7;	_j 1f;	.align 8
306	l32i	a3, a2, PT_AREG8;	_j 1f;	.align 8
307	mov	a3, a9		;	_j 1f;	.align 8
308	mov	a3, a10		;	_j 1f;	.align 8
309	mov	a3, a11		;	_j 1f;	.align 8
310	mov	a3, a12		;	_j 1f;	.align 8
311	mov	a3, a13		;	_j 1f;	.align 8
312	mov	a3, a14		;	_j 1f;	.align 8
313	mov	a3, a15		;	_j 1f;	.align 8
314
315	/* We cannot handle this exception. */
316
317	.extern _kernel_exception
318.Linvalid_instruction_load:
319.Linvalid_instruction_store:
320
321	movi	a4, 0
322	rsr	a3, excsave1
323	s32i	a4, a3, EXC_TABLE_FIXUP
324
325	/* Restore a4...a8 and SAR, set SP, and jump to default exception. */
326
327	l32i	a8, a2, PT_AREG8
328	l32i	a7, a2, PT_AREG7
329	l32i	a6, a2, PT_AREG6
330	l32i	a5, a2, PT_AREG5
331	l32i	a4, a2, PT_AREG4
332	wsr	a0, sar
333	mov	a1, a2
334
335	rsr	a0, ps
336	bbsi.l  a0, PS_UM_BIT, 2f     # jump if user mode
337
338	movi	a0, _kernel_exception
339	jx	a0
340
3412:	movi	a0, _user_exception
342	jx	a0
343
3441: 	# a7: instruction pointer, a4: instruction, a3: value
345
346	movi	a6, 0			# mask: ffffffff:00000000
347
348#if XCHAL_HAVE_DENSITY
349	addi	a7, a7, 2		# incr. PC,assume 16-bit instruction
350
351	extui	a5, a4, INSN_OP0, 4	# extract OP0
352	addi	a5, a5, -OP0_S32I_N
353	_beqz	a5, 1f			# s32i.n: jump
354
355	addi	a7, a7, 1		# increment PC, 32-bit instruction
356#else
357	addi	a7, a7, 3		# increment PC, 32-bit instruction
358#endif
359
360	extui	a5, a4, INSN_OP1, 4	# extract OP1
361	_beqi	a5, OP1_S32I, 1f	# jump if 32 bit store
362	_bnei	a5, OP1_S16I, .Linvalid_instruction_store
363
364	movi	a5, -1
365	__extl	a3, a3			# get 16-bit value
366	__exth	a6, a5			# get 16-bit mask ffffffff:ffff0000
367
368	/* Get memory address */
369
3701:
371	movi	a4, ~3
372	and	a4, a4, a8		# align memory address
373
374	/* Insert value into memory */
375
376	movi	a5, -1			# mask: ffffffff:XXXX0000
377#ifdef UNALIGNED_USER_EXCEPTION
378	addi	a4, a4, 8
379#endif
380
381	__ssa8r a8
382	__src_b	a8, a5, a6		# lo-mask  F..F0..0 (BE) 0..0F..F (LE)
383	__src_b	a6, a6, a5		# hi-mask  0..0F..F (BE) F..F0..0 (LE)
384#ifdef UNALIGNED_USER_EXCEPTION
385	l32e	a5, a4, -8
386#else
387	l32i	a5, a4, 0		# load lower address word
388#endif
389	and	a5, a5, a8		# mask
390	__sh	a8, a3 			# shift value
391	or	a5, a5, a8		# or with original value
392#ifdef UNALIGNED_USER_EXCEPTION
393	s32e	a5, a4, -8
394	l32e	a8, a4, -4
395#else
396	s32i	a5, a4, 0		# store
397	l32i	a8, a4, 4		# same for upper address word
398#endif
399	__sl	a5, a3
400	and	a6, a8, a6
401	or	a6, a6, a5
402#ifdef UNALIGNED_USER_EXCEPTION
403	s32e	a6, a4, -4
404#else
405	s32i	a6, a4, 4
406#endif
407
408.Lexit:
409#if XCHAL_HAVE_LOOPS
410	rsr	a4, lend		# check if we reached LEND
411	bne	a7, a4, 1f
412	rsr	a4, lcount		# and LCOUNT != 0
413	beqz	a4, 1f
414	addi	a4, a4, -1		# decrement LCOUNT and set
415	rsr	a7, lbeg		# set PC to LBEGIN
416	wsr	a4, lcount
417#endif
418
4191:	wsr	a7, epc1		# skip emulated instruction
420
421	/* Update icount if we're single-stepping in userspace. */
422	rsr	a4, icountlevel
423	beqz	a4, 1f
424	bgeui	a4, LOCKLEVEL + 1, 1f
425	rsr	a4, icount
426	addi	a4, a4, 1
427	wsr	a4, icount
4281:
429	movi	a4, 0
430	rsr	a3, excsave1
431	s32i	a4, a3, EXC_TABLE_FIXUP
432
433	/* Restore working register */
434
435	l32i	a8, a2, PT_AREG8
436	l32i	a7, a2, PT_AREG7
437	l32i	a6, a2, PT_AREG6
438	l32i	a5, a2, PT_AREG5
439	l32i	a4, a2, PT_AREG4
440	l32i	a3, a2, PT_AREG3
441
442	/* restore SAR and return */
443
444	wsr	a0, sar
445	l32i	a0, a2, PT_AREG0
446	l32i	a2, a2, PT_AREG2
447	rfe
448
449ENDPROC(fast_unaligned)
450
451ENTRY(fast_unaligned_fixup)
452
453	l32i	a2, a3, EXC_TABLE_DOUBLE_SAVE
454	wsr	a3, excsave1
455
456	l32i	a8, a2, PT_AREG8
457	l32i	a7, a2, PT_AREG7
458	l32i	a6, a2, PT_AREG6
459	l32i	a5, a2, PT_AREG5
460	l32i	a4, a2, PT_AREG4
461	l32i	a0, a2, PT_AREG2
462	xsr	a0, depc			# restore depc and a0
463	wsr	a0, sar
464
465	rsr	a0, exccause
466	s32i	a0, a2, PT_DEPC			# mark as a regular exception
467
468	rsr	a0, ps
469	bbsi.l  a0, PS_UM_BIT, 1f		# jump if user mode
470
471	rsr	a0, exccause
472	addx4	a0, a0, a3              	# find entry in table
473	l32i	a0, a0, EXC_TABLE_FAST_KERNEL   # load handler
474	l32i	a3, a2, PT_AREG3
475	jx	a0
4761:
477	rsr	a0, exccause
478	addx4	a0, a0, a3              	# find entry in table
479	l32i	a0, a0, EXC_TABLE_FAST_USER     # load handler
480	l32i	a3, a2, PT_AREG3
481	jx	a0
482
483ENDPROC(fast_unaligned_fixup)
484
485#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */
486