xref: /openbmc/linux/arch/riscv/kernel/traps_misaligned.c (revision 5f8b7d4b2e9604d03ae06f1a2dd5a1f34c33e533)
1  // SPDX-License-Identifier: GPL-2.0-only
2  /*
3   * Copyright (C) 2020 Western Digital Corporation or its affiliates.
4   */
5  #include <linux/kernel.h>
6  #include <linux/init.h>
7  #include <linux/mm.h>
8  #include <linux/module.h>
9  #include <linux/irq.h>
10  #include <linux/stringify.h>
11  
12  #include <asm/processor.h>
13  #include <asm/ptrace.h>
14  #include <asm/csr.h>
15  
16  #define INSN_MATCH_LB			0x3
17  #define INSN_MASK_LB			0x707f
18  #define INSN_MATCH_LH			0x1003
19  #define INSN_MASK_LH			0x707f
20  #define INSN_MATCH_LW			0x2003
21  #define INSN_MASK_LW			0x707f
22  #define INSN_MATCH_LD			0x3003
23  #define INSN_MASK_LD			0x707f
24  #define INSN_MATCH_LBU			0x4003
25  #define INSN_MASK_LBU			0x707f
26  #define INSN_MATCH_LHU			0x5003
27  #define INSN_MASK_LHU			0x707f
28  #define INSN_MATCH_LWU			0x6003
29  #define INSN_MASK_LWU			0x707f
30  #define INSN_MATCH_SB			0x23
31  #define INSN_MASK_SB			0x707f
32  #define INSN_MATCH_SH			0x1023
33  #define INSN_MASK_SH			0x707f
34  #define INSN_MATCH_SW			0x2023
35  #define INSN_MASK_SW			0x707f
36  #define INSN_MATCH_SD			0x3023
37  #define INSN_MASK_SD			0x707f
38  
39  #define INSN_MATCH_FLW			0x2007
40  #define INSN_MASK_FLW			0x707f
41  #define INSN_MATCH_FLD			0x3007
42  #define INSN_MASK_FLD			0x707f
43  #define INSN_MATCH_FLQ			0x4007
44  #define INSN_MASK_FLQ			0x707f
45  #define INSN_MATCH_FSW			0x2027
46  #define INSN_MASK_FSW			0x707f
47  #define INSN_MATCH_FSD			0x3027
48  #define INSN_MASK_FSD			0x707f
49  #define INSN_MATCH_FSQ			0x4027
50  #define INSN_MASK_FSQ			0x707f
51  
52  #define INSN_MATCH_C_LD			0x6000
53  #define INSN_MASK_C_LD			0xe003
54  #define INSN_MATCH_C_SD			0xe000
55  #define INSN_MASK_C_SD			0xe003
56  #define INSN_MATCH_C_LW			0x4000
57  #define INSN_MASK_C_LW			0xe003
58  #define INSN_MATCH_C_SW			0xc000
59  #define INSN_MASK_C_SW			0xe003
60  #define INSN_MATCH_C_LDSP		0x6002
61  #define INSN_MASK_C_LDSP		0xe003
62  #define INSN_MATCH_C_SDSP		0xe002
63  #define INSN_MASK_C_SDSP		0xe003
64  #define INSN_MATCH_C_LWSP		0x4002
65  #define INSN_MASK_C_LWSP		0xe003
66  #define INSN_MATCH_C_SWSP		0xc002
67  #define INSN_MASK_C_SWSP		0xe003
68  
69  #define INSN_MATCH_C_FLD		0x2000
70  #define INSN_MASK_C_FLD			0xe003
71  #define INSN_MATCH_C_FLW		0x6000
72  #define INSN_MASK_C_FLW			0xe003
73  #define INSN_MATCH_C_FSD		0xa000
74  #define INSN_MASK_C_FSD			0xe003
75  #define INSN_MATCH_C_FSW		0xe000
76  #define INSN_MASK_C_FSW			0xe003
77  #define INSN_MATCH_C_FLDSP		0x2002
78  #define INSN_MASK_C_FLDSP		0xe003
79  #define INSN_MATCH_C_FSDSP		0xa002
80  #define INSN_MASK_C_FSDSP		0xe003
81  #define INSN_MATCH_C_FLWSP		0x6002
82  #define INSN_MASK_C_FLWSP		0xe003
83  #define INSN_MATCH_C_FSWSP		0xe002
84  #define INSN_MASK_C_FSWSP		0xe003
85  
86  #define INSN_LEN(insn)			((((insn) & 0x3) < 0x3) ? 2 : 4)
87  
88  #if defined(CONFIG_64BIT)
89  #define LOG_REGBYTES			3
90  #define XLEN				64
91  #else
92  #define LOG_REGBYTES			2
93  #define XLEN				32
94  #endif
95  #define REGBYTES			(1 << LOG_REGBYTES)
96  #define XLEN_MINUS_16			((XLEN) - 16)
97  
98  #define SH_RD				7
99  #define SH_RS1				15
100  #define SH_RS2				20
101  #define SH_RS2C				2
102  
103  #define RV_X(x, s, n)			(((x) >> (s)) & ((1 << (n)) - 1))
104  #define RVC_LW_IMM(x)			((RV_X(x, 6, 1) << 2) | \
105  					 (RV_X(x, 10, 3) << 3) | \
106  					 (RV_X(x, 5, 1) << 6))
107  #define RVC_LD_IMM(x)			((RV_X(x, 10, 3) << 3) | \
108  					 (RV_X(x, 5, 2) << 6))
109  #define RVC_LWSP_IMM(x)			((RV_X(x, 4, 3) << 2) | \
110  					 (RV_X(x, 12, 1) << 5) | \
111  					 (RV_X(x, 2, 2) << 6))
112  #define RVC_LDSP_IMM(x)			((RV_X(x, 5, 2) << 3) | \
113  					 (RV_X(x, 12, 1) << 5) | \
114  					 (RV_X(x, 2, 3) << 6))
115  #define RVC_SWSP_IMM(x)			((RV_X(x, 9, 4) << 2) | \
116  					 (RV_X(x, 7, 2) << 6))
117  #define RVC_SDSP_IMM(x)			((RV_X(x, 10, 3) << 3) | \
118  					 (RV_X(x, 7, 3) << 6))
119  #define RVC_RS1S(insn)			(8 + RV_X(insn, SH_RD, 3))
120  #define RVC_RS2S(insn)			(8 + RV_X(insn, SH_RS2C, 3))
121  #define RVC_RS2(insn)			RV_X(insn, SH_RS2C, 5)
122  
123  #define SHIFT_RIGHT(x, y)		\
124  	((y) < 0 ? ((x) << -(y)) : ((x) >> (y)))
125  
126  #define REG_MASK			\
127  	((1 << (5 + LOG_REGBYTES)) - (1 << LOG_REGBYTES))
128  
129  #define REG_OFFSET(insn, pos)		\
130  	(SHIFT_RIGHT((insn), (pos) - LOG_REGBYTES) & REG_MASK)
131  
132  #define REG_PTR(insn, pos, regs)	\
133  	(ulong *)((ulong)(regs) + REG_OFFSET(insn, pos))
134  
135  #define GET_RS1(insn, regs)		(*REG_PTR(insn, SH_RS1, regs))
136  #define GET_RS2(insn, regs)		(*REG_PTR(insn, SH_RS2, regs))
137  #define GET_RS1S(insn, regs)		(*REG_PTR(RVC_RS1S(insn), 0, regs))
138  #define GET_RS2S(insn, regs)		(*REG_PTR(RVC_RS2S(insn), 0, regs))
139  #define GET_RS2C(insn, regs)		(*REG_PTR(insn, SH_RS2C, regs))
140  #define GET_SP(regs)			(*REG_PTR(2, 0, regs))
141  #define SET_RD(insn, regs, val)		(*REG_PTR(insn, SH_RD, regs) = (val))
142  #define IMM_I(insn)			((s32)(insn) >> 20)
143  #define IMM_S(insn)			(((s32)(insn) >> 25 << 5) | \
144  					 (s32)(((insn) >> 7) & 0x1f))
145  #define MASK_FUNCT3			0x7000
146  
147  #define GET_PRECISION(insn) (((insn) >> 25) & 3)
148  #define GET_RM(insn) (((insn) >> 12) & 7)
149  #define PRECISION_S 0
150  #define PRECISION_D 1
151  
load_u8(const u8 * addr)152  static inline u8 load_u8(const u8 *addr)
153  {
154  	u8 val;
155  
156  	asm volatile("lbu %0, %1" : "=&r" (val) : "m" (*addr));
157  
158  	return val;
159  }
160  
store_u8(u8 * addr,u8 val)161  static inline void store_u8(u8 *addr, u8 val)
162  {
163  	asm volatile ("sb %0, %1\n" : : "r" (val), "m" (*addr));
164  }
165  
get_insn(ulong mepc)166  static inline ulong get_insn(ulong mepc)
167  {
168  	register ulong __mepc asm ("a2") = mepc;
169  	ulong val, rvc_mask = 3, tmp;
170  
171  	asm ("and %[tmp], %[addr], 2\n"
172  		"bnez %[tmp], 1f\n"
173  #if defined(CONFIG_64BIT)
174  		__stringify(LWU) " %[insn], (%[addr])\n"
175  #else
176  		__stringify(LW) " %[insn], (%[addr])\n"
177  #endif
178  		"and %[tmp], %[insn], %[rvc_mask]\n"
179  		"beq %[tmp], %[rvc_mask], 2f\n"
180  		"sll %[insn], %[insn], %[xlen_minus_16]\n"
181  		"srl %[insn], %[insn], %[xlen_minus_16]\n"
182  		"j 2f\n"
183  		"1:\n"
184  		"lhu %[insn], (%[addr])\n"
185  		"and %[tmp], %[insn], %[rvc_mask]\n"
186  		"bne %[tmp], %[rvc_mask], 2f\n"
187  		"lhu %[tmp], 2(%[addr])\n"
188  		"sll %[tmp], %[tmp], 16\n"
189  		"add %[insn], %[insn], %[tmp]\n"
190  		"2:"
191  	: [insn] "=&r" (val), [tmp] "=&r" (tmp)
192  	: [addr] "r" (__mepc), [rvc_mask] "r" (rvc_mask),
193  	  [xlen_minus_16] "i" (XLEN_MINUS_16));
194  
195  	return val;
196  }
197  
198  union reg_data {
199  	u8 data_bytes[8];
200  	ulong data_ulong;
201  	u64 data_u64;
202  };
203  
handle_misaligned_load(struct pt_regs * regs)204  int handle_misaligned_load(struct pt_regs *regs)
205  {
206  	union reg_data val;
207  	unsigned long epc = regs->epc;
208  	unsigned long insn = get_insn(epc);
209  	unsigned long addr = csr_read(mtval);
210  	int i, fp = 0, shift = 0, len = 0;
211  
212  	regs->epc = 0;
213  
214  	if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
215  		len = 4;
216  		shift = 8 * (sizeof(unsigned long) - len);
217  #if defined(CONFIG_64BIT)
218  	} else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
219  		len = 8;
220  		shift = 8 * (sizeof(unsigned long) - len);
221  	} else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) {
222  		len = 4;
223  #endif
224  	} else if ((insn & INSN_MASK_FLD) == INSN_MATCH_FLD) {
225  		fp = 1;
226  		len = 8;
227  	} else if ((insn & INSN_MASK_FLW) == INSN_MATCH_FLW) {
228  		fp = 1;
229  		len = 4;
230  	} else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) {
231  		len = 2;
232  		shift = 8 * (sizeof(unsigned long) - len);
233  	} else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) {
234  		len = 2;
235  #if defined(CONFIG_64BIT)
236  	} else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) {
237  		len = 8;
238  		shift = 8 * (sizeof(unsigned long) - len);
239  		insn = RVC_RS2S(insn) << SH_RD;
240  	} else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP &&
241  		   ((insn >> SH_RD) & 0x1f)) {
242  		len = 8;
243  		shift = 8 * (sizeof(unsigned long) - len);
244  #endif
245  	} else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) {
246  		len = 4;
247  		shift = 8 * (sizeof(unsigned long) - len);
248  		insn = RVC_RS2S(insn) << SH_RD;
249  	} else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP &&
250  		   ((insn >> SH_RD) & 0x1f)) {
251  		len = 4;
252  		shift = 8 * (sizeof(unsigned long) - len);
253  	} else if ((insn & INSN_MASK_C_FLD) == INSN_MATCH_C_FLD) {
254  		fp = 1;
255  		len = 8;
256  		insn = RVC_RS2S(insn) << SH_RD;
257  	} else if ((insn & INSN_MASK_C_FLDSP) == INSN_MATCH_C_FLDSP) {
258  		fp = 1;
259  		len = 8;
260  #if defined(CONFIG_32BIT)
261  	} else if ((insn & INSN_MASK_C_FLW) == INSN_MATCH_C_FLW) {
262  		fp = 1;
263  		len = 4;
264  		insn = RVC_RS2S(insn) << SH_RD;
265  	} else if ((insn & INSN_MASK_C_FLWSP) == INSN_MATCH_C_FLWSP) {
266  		fp = 1;
267  		len = 4;
268  #endif
269  	} else {
270  		regs->epc = epc;
271  		return -1;
272  	}
273  
274  	val.data_u64 = 0;
275  	for (i = 0; i < len; i++)
276  		val.data_bytes[i] = load_u8((void *)(addr + i));
277  
278  	if (fp)
279  		return -1;
280  	SET_RD(insn, regs, val.data_ulong << shift >> shift);
281  
282  	regs->epc = epc + INSN_LEN(insn);
283  
284  	return 0;
285  }
286  
handle_misaligned_store(struct pt_regs * regs)287  int handle_misaligned_store(struct pt_regs *regs)
288  {
289  	union reg_data val;
290  	unsigned long epc = regs->epc;
291  	unsigned long insn = get_insn(epc);
292  	unsigned long addr = csr_read(mtval);
293  	int i, len = 0;
294  
295  	regs->epc = 0;
296  
297  	val.data_ulong = GET_RS2(insn, regs);
298  
299  	if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
300  		len = 4;
301  #if defined(CONFIG_64BIT)
302  	} else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {
303  		len = 8;
304  #endif
305  	} else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) {
306  		len = 2;
307  #if defined(CONFIG_64BIT)
308  	} else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) {
309  		len = 8;
310  		val.data_ulong = GET_RS2S(insn, regs);
311  	} else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP) {
312  		len = 8;
313  		val.data_ulong = GET_RS2C(insn, regs);
314  #endif
315  	} else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) {
316  		len = 4;
317  		val.data_ulong = GET_RS2S(insn, regs);
318  	} else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP) {
319  		len = 4;
320  		val.data_ulong = GET_RS2C(insn, regs);
321  	} else {
322  		regs->epc = epc;
323  		return -1;
324  	}
325  
326  	for (i = 0; i < len; i++)
327  		store_u8((void *)(addr + i), val.data_bytes[i]);
328  
329  	regs->epc = epc + INSN_LEN(insn);
330  
331  	return 0;
332  }
333