1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2021 Sifive.
4  */
5 #ifndef ASM_ERRATA_LIST_H
6 #define ASM_ERRATA_LIST_H
7 
8 #include <asm/alternative.h>
9 #include <asm/csr.h>
10 #include <asm/insn-def.h>
11 #include <asm/hwcap.h>
12 #include <asm/vendorid_list.h>
13 
14 #ifdef CONFIG_ERRATA_ANDES
15 #define ERRATA_ANDESTECH_NO_IOCP	0
16 #define ERRATA_ANDESTECH_NUMBER		1
17 #endif
18 
19 #ifdef CONFIG_ERRATA_SIFIVE
20 #define	ERRATA_SIFIVE_CIP_453 0
21 #define	ERRATA_SIFIVE_CIP_1200 1
22 #define	ERRATA_SIFIVE_NUMBER 2
23 #endif
24 
25 #ifdef CONFIG_ERRATA_THEAD
26 #define	ERRATA_THEAD_PBMT 0
27 #define	ERRATA_THEAD_CMO 1
28 #define	ERRATA_THEAD_PMU 2
29 #define	ERRATA_THEAD_NUMBER 3
30 #endif
31 
32 #ifdef __ASSEMBLY__
33 
34 #define ALT_INSN_FAULT(x)						\
35 ALTERNATIVE(__stringify(RISCV_PTR do_trap_insn_fault),			\
36 	    __stringify(RISCV_PTR sifive_cip_453_insn_fault_trp),	\
37 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
38 	    CONFIG_ERRATA_SIFIVE_CIP_453)
39 
40 #define ALT_PAGE_FAULT(x)						\
41 ALTERNATIVE(__stringify(RISCV_PTR do_page_fault),			\
42 	    __stringify(RISCV_PTR sifive_cip_453_page_fault_trp),	\
43 	    SIFIVE_VENDOR_ID, ERRATA_SIFIVE_CIP_453,			\
44 	    CONFIG_ERRATA_SIFIVE_CIP_453)
45 #else /* !__ASSEMBLY__ */
46 
47 #define ALT_SFENCE_VMA_ASID(asid)					\
48 asm(ALTERNATIVE("sfence.vma x0, %0", "sfence.vma", SIFIVE_VENDOR_ID,	\
49 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
50 		: : "r" (asid) : "memory")
51 
52 #define ALT_SFENCE_VMA_ADDR(addr)					\
53 asm(ALTERNATIVE("sfence.vma %0", "sfence.vma", SIFIVE_VENDOR_ID,	\
54 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
55 		: : "r" (addr) : "memory")
56 
57 #define ALT_SFENCE_VMA_ADDR_ASID(addr, asid)				\
58 asm(ALTERNATIVE("sfence.vma %0, %1", "sfence.vma", SIFIVE_VENDOR_ID,	\
59 		ERRATA_SIFIVE_CIP_1200, CONFIG_ERRATA_SIFIVE_CIP_1200)	\
60 		: : "r" (addr), "r" (asid) : "memory")
61 
62 /*
63  * _val is marked as "will be overwritten", so need to set it to 0
64  * in the default case.
65  */
66 #define ALT_SVPBMT_SHIFT 61
67 #define ALT_THEAD_PBMT_SHIFT 59
68 #define ALT_SVPBMT(_val, prot)						\
69 asm(ALTERNATIVE_2("li %0, 0\t\nnop",					\
70 		  "li %0, %1\t\nslli %0,%0,%3", 0,			\
71 			RISCV_ISA_EXT_SVPBMT, CONFIG_RISCV_ISA_SVPBMT,	\
72 		  "li %0, %2\t\nslli %0,%0,%4", THEAD_VENDOR_ID,	\
73 			ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)	\
74 		: "=r"(_val)						\
75 		: "I"(prot##_SVPBMT >> ALT_SVPBMT_SHIFT),		\
76 		  "I"(prot##_THEAD >> ALT_THEAD_PBMT_SHIFT),		\
77 		  "I"(ALT_SVPBMT_SHIFT),				\
78 		  "I"(ALT_THEAD_PBMT_SHIFT))
79 
80 #ifdef CONFIG_ERRATA_THEAD_PBMT
81 /*
82  * IO/NOCACHE memory types are handled together with svpbmt,
83  * so on T-Head chips, check if no other memory type is set,
84  * and set the non-0 PMA type if applicable.
85  */
86 #define ALT_THEAD_PMA(_val)						\
87 asm volatile(ALTERNATIVE(						\
88 	__nops(7),							\
89 	"li      t3, %1\n\t"						\
90 	"slli    t3, t3, %3\n\t"					\
91 	"and     t3, %0, t3\n\t"					\
92 	"bne     t3, zero, 2f\n\t"					\
93 	"li      t3, %2\n\t"						\
94 	"slli    t3, t3, %3\n\t"					\
95 	"or      %0, %0, t3\n\t"					\
96 	"2:",  THEAD_VENDOR_ID,						\
97 		ERRATA_THEAD_PBMT, CONFIG_ERRATA_THEAD_PBMT)		\
98 	: "+r"(_val)							\
99 	: "I"(_PAGE_MTMASK_THEAD >> ALT_THEAD_PBMT_SHIFT),		\
100 	  "I"(_PAGE_PMA_THEAD >> ALT_THEAD_PBMT_SHIFT),			\
101 	  "I"(ALT_THEAD_PBMT_SHIFT)					\
102 	: "t3")
103 #else
104 #define ALT_THEAD_PMA(_val)
105 #endif
106 
107 /*
108  * dcache.ipa rs1 (invalidate, physical address)
109  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
110  *   0000001    01010      rs1       000      00000  0001011
111  * dache.iva rs1 (invalida, virtual address)
112  *   0000001    00110      rs1       000      00000  0001011
113  *
114  * dcache.cpa rs1 (clean, physical address)
115  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
116  *   0000001    01001      rs1       000      00000  0001011
117  * dcache.cva rs1 (clean, virtual address)
118  *   0000001    00101      rs1       000      00000  0001011
119  *
120  * dcache.cipa rs1 (clean then invalidate, physical address)
121  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
122  *   0000001    01011      rs1       000      00000  0001011
123  * dcache.civa rs1 (... virtual address)
124  *   0000001    00111      rs1       000      00000  0001011
125  *
126  * sync.s (make sure all cache operations finished)
127  * | 31 - 25 | 24 - 20 | 19 - 15 | 14 - 12 | 11 - 7 | 6 - 0 |
128  *   0000000    11001     00000      000      00000  0001011
129  */
130 #define THEAD_inval_A0	".long 0x0265000b"
131 #define THEAD_clean_A0	".long 0x0255000b"
132 #define THEAD_flush_A0	".long 0x0275000b"
133 #define THEAD_SYNC_S	".long 0x0190000b"
134 
135 #define ALT_CMO_OP(_op, _start, _size, _cachesize)			\
136 asm volatile(ALTERNATIVE_2(						\
137 	__nops(6),							\
138 	"mv a0, %1\n\t"							\
139 	"j 2f\n\t"							\
140 	"3:\n\t"							\
141 	CBO_##_op(a0)							\
142 	"add a0, a0, %0\n\t"						\
143 	"2:\n\t"							\
144 	"bltu a0, %2, 3b\n\t"						\
145 	"nop", 0, RISCV_ISA_EXT_ZICBOM, CONFIG_RISCV_ISA_ZICBOM,	\
146 	"mv a0, %1\n\t"							\
147 	"j 2f\n\t"							\
148 	"3:\n\t"							\
149 	THEAD_##_op##_A0 "\n\t"						\
150 	"add a0, a0, %0\n\t"						\
151 	"2:\n\t"							\
152 	"bltu a0, %2, 3b\n\t"						\
153 	THEAD_SYNC_S, THEAD_VENDOR_ID,					\
154 			ERRATA_THEAD_CMO, CONFIG_ERRATA_THEAD_CMO)	\
155 	: : "r"(_cachesize),						\
156 	    "r"((unsigned long)(_start) & ~((_cachesize) - 1UL)),	\
157 	    "r"((unsigned long)(_start) + (_size))			\
158 	: "a0")
159 
160 #define THEAD_C9XX_RV_IRQ_PMU			17
161 #define THEAD_C9XX_CSR_SCOUNTEROF		0x5c5
162 
163 #define ALT_SBI_PMU_OVERFLOW(__ovl)					\
164 asm volatile(ALTERNATIVE(						\
165 	"csrr %0, " __stringify(CSR_SSCOUNTOVF),			\
166 	"csrr %0, " __stringify(THEAD_C9XX_CSR_SCOUNTEROF),		\
167 		THEAD_VENDOR_ID, ERRATA_THEAD_PMU,			\
168 		CONFIG_ERRATA_THEAD_PMU)				\
169 	: "=r" (__ovl) :						\
170 	: "memory")
171 
172 #endif /* __ASSEMBLY__ */
173 
174 #endif
175