1 #ifndef _ASM_POWERPC_ATOMIC_H_ 2 #define _ASM_POWERPC_ATOMIC_H_ 3 4 /* 5 * PowerPC atomic operations 6 */ 7 8 #ifdef __KERNEL__ 9 #include <linux/types.h> 10 #include <asm/cmpxchg.h> 11 12 #define ATOMIC_INIT(i) { (i) } 13 14 static __inline__ int atomic_read(const atomic_t *v) 15 { 16 int t; 17 18 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); 19 20 return t; 21 } 22 23 static __inline__ void atomic_set(atomic_t *v, int i) 24 { 25 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); 26 } 27 28 static __inline__ void atomic_add(int a, atomic_t *v) 29 { 30 int t; 31 32 __asm__ __volatile__( 33 "1: lwarx %0,0,%3 # atomic_add\n\ 34 add %0,%2,%0\n" 35 PPC405_ERR77(0,%3) 36 " stwcx. %0,0,%3 \n\ 37 bne- 1b" 38 : "=&r" (t), "+m" (v->counter) 39 : "r" (a), "r" (&v->counter) 40 : "cc"); 41 } 42 43 static __inline__ int atomic_add_return(int a, atomic_t *v) 44 { 45 int t; 46 47 __asm__ __volatile__( 48 PPC_ATOMIC_ENTRY_BARRIER 49 "1: lwarx %0,0,%2 # atomic_add_return\n\ 50 add %0,%1,%0\n" 51 PPC405_ERR77(0,%2) 52 " stwcx. %0,0,%2 \n\ 53 bne- 1b" 54 PPC_ATOMIC_EXIT_BARRIER 55 : "=&r" (t) 56 : "r" (a), "r" (&v->counter) 57 : "cc", "memory"); 58 59 return t; 60 } 61 62 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 63 64 static __inline__ void atomic_sub(int a, atomic_t *v) 65 { 66 int t; 67 68 __asm__ __volatile__( 69 "1: lwarx %0,0,%3 # atomic_sub\n\ 70 subf %0,%2,%0\n" 71 PPC405_ERR77(0,%3) 72 " stwcx. %0,0,%3 \n\ 73 bne- 1b" 74 : "=&r" (t), "+m" (v->counter) 75 : "r" (a), "r" (&v->counter) 76 : "cc"); 77 } 78 79 static __inline__ int atomic_sub_return(int a, atomic_t *v) 80 { 81 int t; 82 83 __asm__ __volatile__( 84 PPC_ATOMIC_ENTRY_BARRIER 85 "1: lwarx %0,0,%2 # atomic_sub_return\n\ 86 subf %0,%1,%0\n" 87 PPC405_ERR77(0,%2) 88 " stwcx. %0,0,%2 \n\ 89 bne- 1b" 90 PPC_ATOMIC_EXIT_BARRIER 91 : "=&r" (t) 92 : "r" (a), "r" (&v->counter) 93 : "cc", "memory"); 94 95 return t; 96 } 97 98 static __inline__ void atomic_inc(atomic_t *v) 99 { 100 int t; 101 102 __asm__ __volatile__( 103 "1: lwarx %0,0,%2 # atomic_inc\n\ 104 addic %0,%0,1\n" 105 PPC405_ERR77(0,%2) 106 " stwcx. %0,0,%2 \n\ 107 bne- 1b" 108 : "=&r" (t), "+m" (v->counter) 109 : "r" (&v->counter) 110 : "cc", "xer"); 111 } 112 113 static __inline__ int atomic_inc_return(atomic_t *v) 114 { 115 int t; 116 117 __asm__ __volatile__( 118 PPC_ATOMIC_ENTRY_BARRIER 119 "1: lwarx %0,0,%1 # atomic_inc_return\n\ 120 addic %0,%0,1\n" 121 PPC405_ERR77(0,%1) 122 " stwcx. %0,0,%1 \n\ 123 bne- 1b" 124 PPC_ATOMIC_EXIT_BARRIER 125 : "=&r" (t) 126 : "r" (&v->counter) 127 : "cc", "xer", "memory"); 128 129 return t; 130 } 131 132 /* 133 * atomic_inc_and_test - increment and test 134 * @v: pointer of type atomic_t 135 * 136 * Atomically increments @v by 1 137 * and returns true if the result is zero, or false for all 138 * other cases. 139 */ 140 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 141 142 static __inline__ void atomic_dec(atomic_t *v) 143 { 144 int t; 145 146 __asm__ __volatile__( 147 "1: lwarx %0,0,%2 # atomic_dec\n\ 148 addic %0,%0,-1\n" 149 PPC405_ERR77(0,%2)\ 150 " stwcx. %0,0,%2\n\ 151 bne- 1b" 152 : "=&r" (t), "+m" (v->counter) 153 : "r" (&v->counter) 154 : "cc", "xer"); 155 } 156 157 static __inline__ int atomic_dec_return(atomic_t *v) 158 { 159 int t; 160 161 __asm__ __volatile__( 162 PPC_ATOMIC_ENTRY_BARRIER 163 "1: lwarx %0,0,%1 # atomic_dec_return\n\ 164 addic %0,%0,-1\n" 165 PPC405_ERR77(0,%1) 166 " stwcx. %0,0,%1\n\ 167 bne- 1b" 168 PPC_ATOMIC_EXIT_BARRIER 169 : "=&r" (t) 170 : "r" (&v->counter) 171 : "cc", "xer", "memory"); 172 173 return t; 174 } 175 176 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 177 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 178 179 /** 180 * __atomic_add_unless - add unless the number is a given value 181 * @v: pointer of type atomic_t 182 * @a: the amount to add to v... 183 * @u: ...unless v is equal to u. 184 * 185 * Atomically adds @a to @v, so long as it was not @u. 186 * Returns the old value of @v. 187 */ 188 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 189 { 190 int t; 191 192 __asm__ __volatile__ ( 193 PPC_ATOMIC_ENTRY_BARRIER 194 "1: lwarx %0,0,%1 # __atomic_add_unless\n\ 195 cmpw 0,%0,%3 \n\ 196 beq- 2f \n\ 197 add %0,%2,%0 \n" 198 PPC405_ERR77(0,%2) 199 " stwcx. %0,0,%1 \n\ 200 bne- 1b \n" 201 PPC_ATOMIC_EXIT_BARRIER 202 " subf %0,%2,%0 \n\ 203 2:" 204 : "=&r" (t) 205 : "r" (&v->counter), "r" (a), "r" (u) 206 : "cc", "memory"); 207 208 return t; 209 } 210 211 /** 212 * atomic_inc_not_zero - increment unless the number is zero 213 * @v: pointer of type atomic_t 214 * 215 * Atomically increments @v by 1, so long as @v is non-zero. 216 * Returns non-zero if @v was non-zero, and zero otherwise. 217 */ 218 static __inline__ int atomic_inc_not_zero(atomic_t *v) 219 { 220 int t1, t2; 221 222 __asm__ __volatile__ ( 223 PPC_ATOMIC_ENTRY_BARRIER 224 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\ 225 cmpwi 0,%0,0\n\ 226 beq- 2f\n\ 227 addic %1,%0,1\n" 228 PPC405_ERR77(0,%2) 229 " stwcx. %1,0,%2\n\ 230 bne- 1b\n" 231 PPC_ATOMIC_EXIT_BARRIER 232 "\n\ 233 2:" 234 : "=&r" (t1), "=&r" (t2) 235 : "r" (&v->counter) 236 : "cc", "xer", "memory"); 237 238 return t1; 239 } 240 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) 241 242 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) 243 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) 244 245 /* 246 * Atomically test *v and decrement if it is greater than 0. 247 * The function returns the old value of *v minus 1, even if 248 * the atomic variable, v, was not decremented. 249 */ 250 static __inline__ int atomic_dec_if_positive(atomic_t *v) 251 { 252 int t; 253 254 __asm__ __volatile__( 255 PPC_ATOMIC_ENTRY_BARRIER 256 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ 257 cmpwi %0,1\n\ 258 addi %0,%0,-1\n\ 259 blt- 2f\n" 260 PPC405_ERR77(0,%1) 261 " stwcx. %0,0,%1\n\ 262 bne- 1b" 263 PPC_ATOMIC_EXIT_BARRIER 264 "\n\ 265 2:" : "=&b" (t) 266 : "r" (&v->counter) 267 : "cc", "memory"); 268 269 return t; 270 } 271 272 #define smp_mb__before_atomic_dec() smp_mb() 273 #define smp_mb__after_atomic_dec() smp_mb() 274 #define smp_mb__before_atomic_inc() smp_mb() 275 #define smp_mb__after_atomic_inc() smp_mb() 276 277 #ifdef __powerpc64__ 278 279 #define ATOMIC64_INIT(i) { (i) } 280 281 static __inline__ long atomic64_read(const atomic64_t *v) 282 { 283 long t; 284 285 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); 286 287 return t; 288 } 289 290 static __inline__ void atomic64_set(atomic64_t *v, long i) 291 { 292 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); 293 } 294 295 static __inline__ void atomic64_add(long a, atomic64_t *v) 296 { 297 long t; 298 299 __asm__ __volatile__( 300 "1: ldarx %0,0,%3 # atomic64_add\n\ 301 add %0,%2,%0\n\ 302 stdcx. %0,0,%3 \n\ 303 bne- 1b" 304 : "=&r" (t), "+m" (v->counter) 305 : "r" (a), "r" (&v->counter) 306 : "cc"); 307 } 308 309 static __inline__ long atomic64_add_return(long a, atomic64_t *v) 310 { 311 long t; 312 313 __asm__ __volatile__( 314 PPC_ATOMIC_ENTRY_BARRIER 315 "1: ldarx %0,0,%2 # atomic64_add_return\n\ 316 add %0,%1,%0\n\ 317 stdcx. %0,0,%2 \n\ 318 bne- 1b" 319 PPC_ATOMIC_EXIT_BARRIER 320 : "=&r" (t) 321 : "r" (a), "r" (&v->counter) 322 : "cc", "memory"); 323 324 return t; 325 } 326 327 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 328 329 static __inline__ void atomic64_sub(long a, atomic64_t *v) 330 { 331 long t; 332 333 __asm__ __volatile__( 334 "1: ldarx %0,0,%3 # atomic64_sub\n\ 335 subf %0,%2,%0\n\ 336 stdcx. %0,0,%3 \n\ 337 bne- 1b" 338 : "=&r" (t), "+m" (v->counter) 339 : "r" (a), "r" (&v->counter) 340 : "cc"); 341 } 342 343 static __inline__ long atomic64_sub_return(long a, atomic64_t *v) 344 { 345 long t; 346 347 __asm__ __volatile__( 348 PPC_ATOMIC_ENTRY_BARRIER 349 "1: ldarx %0,0,%2 # atomic64_sub_return\n\ 350 subf %0,%1,%0\n\ 351 stdcx. %0,0,%2 \n\ 352 bne- 1b" 353 PPC_ATOMIC_EXIT_BARRIER 354 : "=&r" (t) 355 : "r" (a), "r" (&v->counter) 356 : "cc", "memory"); 357 358 return t; 359 } 360 361 static __inline__ void atomic64_inc(atomic64_t *v) 362 { 363 long t; 364 365 __asm__ __volatile__( 366 "1: ldarx %0,0,%2 # atomic64_inc\n\ 367 addic %0,%0,1\n\ 368 stdcx. %0,0,%2 \n\ 369 bne- 1b" 370 : "=&r" (t), "+m" (v->counter) 371 : "r" (&v->counter) 372 : "cc", "xer"); 373 } 374 375 static __inline__ long atomic64_inc_return(atomic64_t *v) 376 { 377 long t; 378 379 __asm__ __volatile__( 380 PPC_ATOMIC_ENTRY_BARRIER 381 "1: ldarx %0,0,%1 # atomic64_inc_return\n\ 382 addic %0,%0,1\n\ 383 stdcx. %0,0,%1 \n\ 384 bne- 1b" 385 PPC_ATOMIC_EXIT_BARRIER 386 : "=&r" (t) 387 : "r" (&v->counter) 388 : "cc", "xer", "memory"); 389 390 return t; 391 } 392 393 /* 394 * atomic64_inc_and_test - increment and test 395 * @v: pointer of type atomic64_t 396 * 397 * Atomically increments @v by 1 398 * and returns true if the result is zero, or false for all 399 * other cases. 400 */ 401 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) 402 403 static __inline__ void atomic64_dec(atomic64_t *v) 404 { 405 long t; 406 407 __asm__ __volatile__( 408 "1: ldarx %0,0,%2 # atomic64_dec\n\ 409 addic %0,%0,-1\n\ 410 stdcx. %0,0,%2\n\ 411 bne- 1b" 412 : "=&r" (t), "+m" (v->counter) 413 : "r" (&v->counter) 414 : "cc", "xer"); 415 } 416 417 static __inline__ long atomic64_dec_return(atomic64_t *v) 418 { 419 long t; 420 421 __asm__ __volatile__( 422 PPC_ATOMIC_ENTRY_BARRIER 423 "1: ldarx %0,0,%1 # atomic64_dec_return\n\ 424 addic %0,%0,-1\n\ 425 stdcx. %0,0,%1\n\ 426 bne- 1b" 427 PPC_ATOMIC_EXIT_BARRIER 428 : "=&r" (t) 429 : "r" (&v->counter) 430 : "cc", "xer", "memory"); 431 432 return t; 433 } 434 435 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) 436 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) 437 438 /* 439 * Atomically test *v and decrement if it is greater than 0. 440 * The function returns the old value of *v minus 1. 441 */ 442 static __inline__ long atomic64_dec_if_positive(atomic64_t *v) 443 { 444 long t; 445 446 __asm__ __volatile__( 447 PPC_ATOMIC_ENTRY_BARRIER 448 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ 449 addic. %0,%0,-1\n\ 450 blt- 2f\n\ 451 stdcx. %0,0,%1\n\ 452 bne- 1b" 453 PPC_ATOMIC_EXIT_BARRIER 454 "\n\ 455 2:" : "=&r" (t) 456 : "r" (&v->counter) 457 : "cc", "xer", "memory"); 458 459 return t; 460 } 461 462 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 463 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 464 465 /** 466 * atomic64_add_unless - add unless the number is a given value 467 * @v: pointer of type atomic64_t 468 * @a: the amount to add to v... 469 * @u: ...unless v is equal to u. 470 * 471 * Atomically adds @a to @v, so long as it was not @u. 472 * Returns the old value of @v. 473 */ 474 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) 475 { 476 long t; 477 478 __asm__ __volatile__ ( 479 PPC_ATOMIC_ENTRY_BARRIER 480 "1: ldarx %0,0,%1 # __atomic_add_unless\n\ 481 cmpd 0,%0,%3 \n\ 482 beq- 2f \n\ 483 add %0,%2,%0 \n" 484 " stdcx. %0,0,%1 \n\ 485 bne- 1b \n" 486 PPC_ATOMIC_EXIT_BARRIER 487 " subf %0,%2,%0 \n\ 488 2:" 489 : "=&r" (t) 490 : "r" (&v->counter), "r" (a), "r" (u) 491 : "cc", "memory"); 492 493 return t != u; 494 } 495 496 /** 497 * atomic_inc64_not_zero - increment unless the number is zero 498 * @v: pointer of type atomic64_t 499 * 500 * Atomically increments @v by 1, so long as @v is non-zero. 501 * Returns non-zero if @v was non-zero, and zero otherwise. 502 */ 503 static __inline__ long atomic64_inc_not_zero(atomic64_t *v) 504 { 505 long t1, t2; 506 507 __asm__ __volatile__ ( 508 PPC_ATOMIC_ENTRY_BARRIER 509 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\ 510 cmpdi 0,%0,0\n\ 511 beq- 2f\n\ 512 addic %1,%0,1\n\ 513 stdcx. %1,0,%2\n\ 514 bne- 1b\n" 515 PPC_ATOMIC_EXIT_BARRIER 516 "\n\ 517 2:" 518 : "=&r" (t1), "=&r" (t2) 519 : "r" (&v->counter) 520 : "cc", "xer", "memory"); 521 522 return t1; 523 } 524 525 #endif /* __powerpc64__ */ 526 527 #endif /* __KERNEL__ */ 528 #endif /* _ASM_POWERPC_ATOMIC_H_ */ 529