1 /* SPDX-License-Identifier: GPL-2.0 */ 2 // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd. 3 4 #ifndef __ASM_CSKY_ENTRY_H 5 #define __ASM_CSKY_ENTRY_H 6 7 #include <asm/setup.h> 8 #include <abi/regdef.h> 9 10 #define LSAVE_PC 8 11 #define LSAVE_PSR 12 12 #define LSAVE_A0 24 13 #define LSAVE_A1 28 14 #define LSAVE_A2 32 15 #define LSAVE_A3 36 16 17 #define KSPTOUSP 18 #define USPTOKSP 19 20 #define usp cr<14, 1> 21 22 .macro SAVE_ALL epc_inc 23 subi sp, 152 24 stw tls, (sp, 0) 25 stw lr, (sp, 4) 26 27 mfcr lr, epc 28 movi tls, \epc_inc 29 add lr, tls 30 stw lr, (sp, 8) 31 32 mfcr lr, epsr 33 stw lr, (sp, 12) 34 btsti lr, 31 35 bf 1f 36 addi lr, sp, 152 37 br 2f 38 1: 39 mfcr lr, usp 40 2: 41 stw lr, (sp, 16) 42 43 stw a0, (sp, 20) 44 stw a0, (sp, 24) 45 stw a1, (sp, 28) 46 stw a2, (sp, 32) 47 stw a3, (sp, 36) 48 49 addi sp, 40 50 stm r4-r13, (sp) 51 52 addi sp, 40 53 stm r16-r30, (sp) 54 #ifdef CONFIG_CPU_HAS_HILO 55 mfhi lr 56 stw lr, (sp, 60) 57 mflo lr 58 stw lr, (sp, 64) 59 mfcr lr, cr14 60 stw lr, (sp, 68) 61 #endif 62 subi sp, 80 63 .endm 64 65 .macro RESTORE_ALL 66 psrclr ie 67 ldw tls, (sp, 0) 68 ldw lr, (sp, 4) 69 ldw a0, (sp, 8) 70 mtcr a0, epc 71 ldw a0, (sp, 12) 72 mtcr a0, epsr 73 btsti a0, 31 74 ldw a0, (sp, 16) 75 mtcr a0, usp 76 mtcr a0, ss0 77 78 #ifdef CONFIG_CPU_HAS_HILO 79 ldw a0, (sp, 140) 80 mthi a0 81 ldw a0, (sp, 144) 82 mtlo a0 83 ldw a0, (sp, 148) 84 mtcr a0, cr14 85 #endif 86 87 ldw a0, (sp, 24) 88 ldw a1, (sp, 28) 89 ldw a2, (sp, 32) 90 ldw a3, (sp, 36) 91 92 addi sp, 40 93 ldm r4-r13, (sp) 94 addi sp, 40 95 ldm r16-r30, (sp) 96 addi sp, 72 97 bf 1f 98 mfcr sp, ss0 99 1: 100 rte 101 .endm 102 103 .macro SAVE_SWITCH_STACK 104 subi sp, 64 105 stm r4-r11, (sp) 106 stw lr, (sp, 32) 107 stw r16, (sp, 36) 108 stw r17, (sp, 40) 109 stw r26, (sp, 44) 110 stw r27, (sp, 48) 111 stw r28, (sp, 52) 112 stw r29, (sp, 56) 113 stw r30, (sp, 60) 114 #ifdef CONFIG_CPU_HAS_HILO 115 subi sp, 16 116 mfhi lr 117 stw lr, (sp, 0) 118 mflo lr 119 stw lr, (sp, 4) 120 mfcr lr, cr14 121 stw lr, (sp, 8) 122 #endif 123 .endm 124 125 .macro RESTORE_SWITCH_STACK 126 #ifdef CONFIG_CPU_HAS_HILO 127 ldw lr, (sp, 0) 128 mthi lr 129 ldw lr, (sp, 4) 130 mtlo lr 131 ldw lr, (sp, 8) 132 mtcr lr, cr14 133 addi sp, 16 134 #endif 135 ldm r4-r11, (sp) 136 ldw lr, (sp, 32) 137 ldw r16, (sp, 36) 138 ldw r17, (sp, 40) 139 ldw r26, (sp, 44) 140 ldw r27, (sp, 48) 141 ldw r28, (sp, 52) 142 ldw r29, (sp, 56) 143 ldw r30, (sp, 60) 144 addi sp, 64 145 .endm 146 147 /* MMU registers operators. */ 148 .macro RD_MIR rx 149 mfcr \rx, cr<0, 15> 150 .endm 151 152 .macro RD_MEH rx 153 mfcr \rx, cr<4, 15> 154 .endm 155 156 .macro RD_MCIR rx 157 mfcr \rx, cr<8, 15> 158 .endm 159 160 .macro RD_PGDR rx 161 mfcr \rx, cr<29, 15> 162 .endm 163 164 .macro RD_PGDR_K rx 165 mfcr \rx, cr<28, 15> 166 .endm 167 168 .macro WR_MEH rx 169 mtcr \rx, cr<4, 15> 170 .endm 171 172 .macro WR_MCIR rx 173 mtcr \rx, cr<8, 15> 174 .endm 175 176 .macro SETUP_MMU 177 /* Init psr and enable ee */ 178 lrw r6, DEFAULT_PSR_VALUE 179 mtcr r6, psr 180 psrset ee 181 182 /* Invalid I/Dcache BTB BHT */ 183 movi r6, 7 184 lsli r6, 16 185 addi r6, (1<<4) | 3 186 mtcr r6, cr17 187 188 /* Invalid all TLB */ 189 bgeni r6, 26 190 mtcr r6, cr<8, 15> /* Set MCIR */ 191 192 /* Check MMU on/off */ 193 mfcr r6, cr18 194 btsti r6, 0 195 bt 1f 196 197 /* MMU off: setup mapping tlb entry */ 198 movi r6, 0 199 mtcr r6, cr<6, 15> /* Set MPR with 4K page size */ 200 201 grs r6, 1f /* Get current pa by PC */ 202 bmaski r7, (PAGE_SHIFT + 1) /* r7 = 0x1fff */ 203 andn r6, r7 204 mtcr r6, cr<4, 15> /* Set MEH */ 205 206 mov r8, r6 207 movi r7, 0x00000006 208 or r8, r7 209 mtcr r8, cr<2, 15> /* Set MEL0 */ 210 movi r7, 0x00001006 211 or r8, r7 212 mtcr r8, cr<3, 15> /* Set MEL1 */ 213 214 bgeni r8, 28 215 mtcr r8, cr<8, 15> /* Set MCIR to write TLB */ 216 217 br 2f 218 1: 219 /* 220 * MMU on: use origin MSA value from bootloader 221 * 222 * cr<30/31, 15> MSA register format: 223 * 31 - 29 | 28 - 9 | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 224 * BA Reserved SH WA B SO SEC C D V 225 */ 226 mfcr r6, cr<30, 15> /* Get MSA0 */ 227 2: 228 lsri r6, 28 229 lsli r6, 28 230 addi r6, 0x1ce 231 mtcr r6, cr<30, 15> /* Set MSA0 */ 232 233 lsri r6, 28 234 addi r6, 2 235 lsli r6, 28 236 addi r6, 0x1ce 237 mtcr r6, cr<31, 15> /* Set MSA1 */ 238 239 /* enable MMU */ 240 mfcr r6, cr18 241 bseti r6, 0 242 mtcr r6, cr18 243 244 jmpi 3f /* jump to va */ 245 3: 246 .endm 247 248 .macro ANDI_R3 rx, imm 249 lsri \rx, 3 250 andi \rx, (\imm >> 3) 251 .endm 252 #endif /* __ASM_CSKY_ENTRY_H */ 253