13f7e212dSArnd Bergmann /* 23f7e212dSArnd Bergmann * Generic C implementation of atomic counter operations 33f7e212dSArnd Bergmann * Originally implemented for MN10300. 43f7e212dSArnd Bergmann * 53f7e212dSArnd Bergmann * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. 63f7e212dSArnd Bergmann * Written by David Howells (dhowells@redhat.com) 73f7e212dSArnd Bergmann * 83f7e212dSArnd Bergmann * This program is free software; you can redistribute it and/or 93f7e212dSArnd Bergmann * modify it under the terms of the GNU General Public Licence 103f7e212dSArnd Bergmann * as published by the Free Software Foundation; either version 113f7e212dSArnd Bergmann * 2 of the Licence, or (at your option) any later version. 123f7e212dSArnd Bergmann */ 133f7e212dSArnd Bergmann #ifndef __ASM_GENERIC_ATOMIC_H 143f7e212dSArnd Bergmann #define __ASM_GENERIC_ATOMIC_H 153f7e212dSArnd Bergmann 163f7e212dSArnd Bergmann #ifdef CONFIG_SMP 173f7e212dSArnd Bergmann #error not SMP safe 183f7e212dSArnd Bergmann #endif 193f7e212dSArnd Bergmann 203f7e212dSArnd Bergmann /* 213f7e212dSArnd Bergmann * Atomic operations that C can't guarantee us. Useful for 223f7e212dSArnd Bergmann * resource counting etc.. 233f7e212dSArnd Bergmann */ 243f7e212dSArnd Bergmann 253f7e212dSArnd Bergmann #define ATOMIC_INIT(i) { (i) } 263f7e212dSArnd Bergmann 273f7e212dSArnd Bergmann #ifdef __KERNEL__ 283f7e212dSArnd Bergmann 293f7e212dSArnd Bergmann /** 303f7e212dSArnd Bergmann * atomic_read - read atomic variable 313f7e212dSArnd Bergmann * @v: pointer of type atomic_t 323f7e212dSArnd Bergmann * 3337682177SPeter Fritzsche * Atomically reads the value of @v. 343f7e212dSArnd Bergmann */ 35f3d46f9dSAnton Blanchard #define atomic_read(v) (*(volatile int *)&(v)->counter) 363f7e212dSArnd Bergmann 373f7e212dSArnd Bergmann /** 383f7e212dSArnd Bergmann * atomic_set - set atomic variable 393f7e212dSArnd Bergmann * @v: pointer of type atomic_t 403f7e212dSArnd Bergmann * @i: required value 413f7e212dSArnd Bergmann * 4237682177SPeter Fritzsche * Atomically sets the value of @v to @i. 433f7e212dSArnd Bergmann */ 443f7e212dSArnd Bergmann #define atomic_set(v, i) (((v)->counter) = (i)) 453f7e212dSArnd Bergmann 46df9ee292SDavid Howells #include <linux/irqflags.h> 473f7e212dSArnd Bergmann #include <asm/system.h> 483f7e212dSArnd Bergmann 493f7e212dSArnd Bergmann /** 503f7e212dSArnd Bergmann * atomic_add_return - add integer to atomic variable 513f7e212dSArnd Bergmann * @i: integer value to add 523f7e212dSArnd Bergmann * @v: pointer of type atomic_t 533f7e212dSArnd Bergmann * 543f7e212dSArnd Bergmann * Atomically adds @i to @v and returns the result 553f7e212dSArnd Bergmann */ 563f7e212dSArnd Bergmann static inline int atomic_add_return(int i, atomic_t *v) 573f7e212dSArnd Bergmann { 583f7e212dSArnd Bergmann unsigned long flags; 593f7e212dSArnd Bergmann int temp; 603f7e212dSArnd Bergmann 61df9ee292SDavid Howells raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */ 623f7e212dSArnd Bergmann temp = v->counter; 633f7e212dSArnd Bergmann temp += i; 643f7e212dSArnd Bergmann v->counter = temp; 659e58143dSMichal Simek raw_local_irq_restore(flags); 663f7e212dSArnd Bergmann 673f7e212dSArnd Bergmann return temp; 683f7e212dSArnd Bergmann } 693f7e212dSArnd Bergmann 703f7e212dSArnd Bergmann /** 713f7e212dSArnd Bergmann * atomic_sub_return - subtract integer from atomic variable 723f7e212dSArnd Bergmann * @i: integer value to subtract 733f7e212dSArnd Bergmann * @v: pointer of type atomic_t 743f7e212dSArnd Bergmann * 753f7e212dSArnd Bergmann * Atomically subtracts @i from @v and returns the result 763f7e212dSArnd Bergmann */ 773f7e212dSArnd Bergmann static inline int atomic_sub_return(int i, atomic_t *v) 783f7e212dSArnd Bergmann { 793f7e212dSArnd Bergmann unsigned long flags; 803f7e212dSArnd Bergmann int temp; 813f7e212dSArnd Bergmann 82df9ee292SDavid Howells raw_local_irq_save(flags); /* Don't trace it in an irqsoff handler */ 833f7e212dSArnd Bergmann temp = v->counter; 843f7e212dSArnd Bergmann temp -= i; 853f7e212dSArnd Bergmann v->counter = temp; 869e58143dSMichal Simek raw_local_irq_restore(flags); 873f7e212dSArnd Bergmann 883f7e212dSArnd Bergmann return temp; 893f7e212dSArnd Bergmann } 903f7e212dSArnd Bergmann 913f7e212dSArnd Bergmann static inline int atomic_add_negative(int i, atomic_t *v) 923f7e212dSArnd Bergmann { 933f7e212dSArnd Bergmann return atomic_add_return(i, v) < 0; 943f7e212dSArnd Bergmann } 953f7e212dSArnd Bergmann 963f7e212dSArnd Bergmann static inline void atomic_add(int i, atomic_t *v) 973f7e212dSArnd Bergmann { 983f7e212dSArnd Bergmann atomic_add_return(i, v); 993f7e212dSArnd Bergmann } 1003f7e212dSArnd Bergmann 1013f7e212dSArnd Bergmann static inline void atomic_sub(int i, atomic_t *v) 1023f7e212dSArnd Bergmann { 1033f7e212dSArnd Bergmann atomic_sub_return(i, v); 1043f7e212dSArnd Bergmann } 1053f7e212dSArnd Bergmann 1063f7e212dSArnd Bergmann static inline void atomic_inc(atomic_t *v) 1073f7e212dSArnd Bergmann { 1083f7e212dSArnd Bergmann atomic_add_return(1, v); 1093f7e212dSArnd Bergmann } 1103f7e212dSArnd Bergmann 1113f7e212dSArnd Bergmann static inline void atomic_dec(atomic_t *v) 1123f7e212dSArnd Bergmann { 1133f7e212dSArnd Bergmann atomic_sub_return(1, v); 1143f7e212dSArnd Bergmann } 1153f7e212dSArnd Bergmann 1163f7e212dSArnd Bergmann #define atomic_dec_return(v) atomic_sub_return(1, (v)) 1173f7e212dSArnd Bergmann #define atomic_inc_return(v) atomic_add_return(1, (v)) 1183f7e212dSArnd Bergmann 1193f7e212dSArnd Bergmann #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) 1203f7e212dSArnd Bergmann #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 1213f7e212dSArnd Bergmann #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 1223f7e212dSArnd Bergmann 1238b9d4069SMathieu Lacage #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 1248b9d4069SMathieu Lacage #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 1258b9d4069SMathieu Lacage 1268b9d4069SMathieu Lacage #define cmpxchg_local(ptr, o, n) \ 1278b9d4069SMathieu Lacage ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 1288b9d4069SMathieu Lacage (unsigned long)(n), sizeof(*(ptr)))) 1298b9d4069SMathieu Lacage 1308b9d4069SMathieu Lacage #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 1318b9d4069SMathieu Lacage 132*f24219b4SArun Sharma static inline int __atomic_add_unless(atomic_t *v, int a, int u) 1338b9d4069SMathieu Lacage { 1348b9d4069SMathieu Lacage int c, old; 1358b9d4069SMathieu Lacage c = atomic_read(v); 1368b9d4069SMathieu Lacage while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c) 1378b9d4069SMathieu Lacage c = old; 138*f24219b4SArun Sharma return c; 1398b9d4069SMathieu Lacage } 1403f7e212dSArnd Bergmann 1413f7e212dSArnd Bergmann static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) 1423f7e212dSArnd Bergmann { 1433f7e212dSArnd Bergmann unsigned long flags; 1443f7e212dSArnd Bergmann 1453f7e212dSArnd Bergmann mask = ~mask; 1469e58143dSMichal Simek raw_local_irq_save(flags); /* Don't trace it in a irqsoff handler */ 1473f7e212dSArnd Bergmann *addr &= mask; 1489e58143dSMichal Simek raw_local_irq_restore(flags); 1493f7e212dSArnd Bergmann } 1503f7e212dSArnd Bergmann 1513f7e212dSArnd Bergmann /* Assume that atomic operations are already serializing */ 1523f7e212dSArnd Bergmann #define smp_mb__before_atomic_dec() barrier() 1533f7e212dSArnd Bergmann #define smp_mb__after_atomic_dec() barrier() 1543f7e212dSArnd Bergmann #define smp_mb__before_atomic_inc() barrier() 1553f7e212dSArnd Bergmann #define smp_mb__after_atomic_inc() barrier() 1563f7e212dSArnd Bergmann 1573f7e212dSArnd Bergmann #include <asm-generic/atomic-long.h> 1583f7e212dSArnd Bergmann 1593f7e212dSArnd Bergmann #endif /* __KERNEL__ */ 1603f7e212dSArnd Bergmann #endif /* __ASM_GENERIC_ATOMIC_H */ 161