1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 */ 5 #ifndef __ASM_PERCPU_H 6 #define __ASM_PERCPU_H 7 8 #include <asm/cmpxchg.h> 9 10 /* Use r21 for fast access */ 11 register unsigned long __my_cpu_offset __asm__("$r21"); 12 13 static inline void set_my_cpu_offset(unsigned long off) 14 { 15 __my_cpu_offset = off; 16 csr_write64(off, PERCPU_BASE_KS); 17 } 18 #define __my_cpu_offset __my_cpu_offset 19 20 #define PERCPU_OP(op, asm_op, c_op) \ 21 static inline unsigned long __percpu_##op(void *ptr, \ 22 unsigned long val, int size) \ 23 { \ 24 unsigned long ret; \ 25 \ 26 switch (size) { \ 27 case 4: \ 28 __asm__ __volatile__( \ 29 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \ 30 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr) \ 31 : [val] "r" (val)); \ 32 break; \ 33 case 8: \ 34 __asm__ __volatile__( \ 35 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \ 36 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr) \ 37 : [val] "r" (val)); \ 38 break; \ 39 default: \ 40 ret = 0; \ 41 BUILD_BUG(); \ 42 } \ 43 \ 44 return ret c_op val; \ 45 } 46 47 PERCPU_OP(add, add, +) 48 PERCPU_OP(and, and, &) 49 PERCPU_OP(or, or, |) 50 #undef PERCPU_OP 51 52 static inline unsigned long __percpu_read(void *ptr, int size) 53 { 54 unsigned long ret; 55 56 switch (size) { 57 case 1: 58 __asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr] \n" 59 : [ret] "=&r"(ret) 60 : [ptr] "r"(ptr) 61 : "memory"); 62 break; 63 case 2: 64 __asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr] \n" 65 : [ret] "=&r"(ret) 66 : [ptr] "r"(ptr) 67 : "memory"); 68 break; 69 case 4: 70 __asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr] \n" 71 : [ret] "=&r"(ret) 72 : [ptr] "r"(ptr) 73 : "memory"); 74 break; 75 case 8: 76 __asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr] \n" 77 : [ret] "=&r"(ret) 78 : [ptr] "r"(ptr) 79 : "memory"); 80 break; 81 default: 82 ret = 0; 83 BUILD_BUG(); 84 } 85 86 return ret; 87 } 88 89 static inline void __percpu_write(void *ptr, unsigned long val, int size) 90 { 91 switch (size) { 92 case 1: 93 __asm__ __volatile__("stx.b %[val], $r21, %[ptr] \n" 94 : 95 : [val] "r" (val), [ptr] "r" (ptr) 96 : "memory"); 97 break; 98 case 2: 99 __asm__ __volatile__("stx.h %[val], $r21, %[ptr] \n" 100 : 101 : [val] "r" (val), [ptr] "r" (ptr) 102 : "memory"); 103 break; 104 case 4: 105 __asm__ __volatile__("stx.w %[val], $r21, %[ptr] \n" 106 : 107 : [val] "r" (val), [ptr] "r" (ptr) 108 : "memory"); 109 break; 110 case 8: 111 __asm__ __volatile__("stx.d %[val], $r21, %[ptr] \n" 112 : 113 : [val] "r" (val), [ptr] "r" (ptr) 114 : "memory"); 115 break; 116 default: 117 BUILD_BUG(); 118 } 119 } 120 121 static inline unsigned long __percpu_xchg(void *ptr, unsigned long val, 122 int size) 123 { 124 switch (size) { 125 case 4: 126 return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val); 127 128 case 8: 129 return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val); 130 131 default: 132 BUILD_BUG(); 133 } 134 135 return 0; 136 } 137 138 /* this_cpu_cmpxchg */ 139 #define _protect_cmpxchg_local(pcp, o, n) \ 140 ({ \ 141 typeof(*raw_cpu_ptr(&(pcp))) __ret; \ 142 preempt_disable_notrace(); \ 143 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \ 144 preempt_enable_notrace(); \ 145 __ret; \ 146 }) 147 148 #define _percpu_read(pcp) \ 149 ({ \ 150 typeof(pcp) __retval; \ 151 __retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp)); \ 152 __retval; \ 153 }) 154 155 #define _percpu_write(pcp, val) \ 156 do { \ 157 __percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp)); \ 158 } while (0) \ 159 160 #define _pcp_protect(operation, pcp, val) \ 161 ({ \ 162 typeof(pcp) __retval; \ 163 preempt_disable_notrace(); \ 164 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \ 165 (val), sizeof(pcp)); \ 166 preempt_enable_notrace(); \ 167 __retval; \ 168 }) 169 170 #define _percpu_add(pcp, val) \ 171 _pcp_protect(__percpu_add, pcp, val) 172 173 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val) 174 175 #define _percpu_and(pcp, val) \ 176 _pcp_protect(__percpu_and, pcp, val) 177 178 #define _percpu_or(pcp, val) \ 179 _pcp_protect(__percpu_or, pcp, val) 180 181 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \ 182 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))) 183 184 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val) 185 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val) 186 187 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val) 188 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val) 189 190 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val) 191 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val) 192 193 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val) 194 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val) 195 196 #define this_cpu_read_1(pcp) _percpu_read(pcp) 197 #define this_cpu_read_2(pcp) _percpu_read(pcp) 198 #define this_cpu_read_4(pcp) _percpu_read(pcp) 199 #define this_cpu_read_8(pcp) _percpu_read(pcp) 200 201 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val) 202 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val) 203 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val) 204 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val) 205 206 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val) 207 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val) 208 209 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) 210 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n) 211 212 #include <asm-generic/percpu.h> 213 214 #endif /* __ASM_PERCPU_H */ 215