1 /* 2 * Generic C implementation of atomic counter operations. Usable on 3 * UP systems only. Do not include in machine independent code. 4 * 5 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. 6 * Written by David Howells (dhowells@redhat.com) 7 * 8 * This program is free software; you can redistribute it and/or 9 * modify it under the terms of the GNU General Public Licence 10 * as published by the Free Software Foundation; either version 11 * 2 of the Licence, or (at your option) any later version. 12 */ 13 #ifndef __ASM_GENERIC_ATOMIC_H 14 #define __ASM_GENERIC_ATOMIC_H 15 16 #include <asm/cmpxchg.h> 17 #include <asm/barrier.h> 18 19 /* 20 * atomic_$op() - $op integer to atomic variable 21 * @i: integer value to $op 22 * @v: pointer to the atomic variable 23 * 24 * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use 25 * smp_mb__{before,after}_atomic(). 26 */ 27 28 /* 29 * atomic_$op_return() - $op interer to atomic variable and returns the result 30 * @i: integer value to $op 31 * @v: pointer to the atomic variable 32 * 33 * Atomically $ops @i to @v. Does imply a full memory barrier. 34 */ 35 36 #ifdef CONFIG_SMP 37 38 /* we can build all atomic primitives from cmpxchg */ 39 40 #define ATOMIC_OP(op, c_op) \ 41 static inline void atomic_##op(int i, atomic_t *v) \ 42 { \ 43 int c, old; \ 44 \ 45 c = v->counter; \ 46 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ 47 c = old; \ 48 } 49 50 #define ATOMIC_OP_RETURN(op, c_op) \ 51 static inline int atomic_##op##_return(int i, atomic_t *v) \ 52 { \ 53 int c, old; \ 54 \ 55 c = v->counter; \ 56 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ 57 c = old; \ 58 \ 59 return c c_op i; \ 60 } 61 62 #define ATOMIC_FETCH_OP(op, c_op) \ 63 static inline int atomic_fetch_##op(int i, atomic_t *v) \ 64 { \ 65 int c, old; \ 66 \ 67 c = v->counter; \ 68 while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ 69 c = old; \ 70 \ 71 return c; \ 72 } 73 74 #else 75 76 #include <linux/irqflags.h> 77 78 #define ATOMIC_OP(op, c_op) \ 79 static inline void atomic_##op(int i, atomic_t *v) \ 80 { \ 81 unsigned long flags; \ 82 \ 83 raw_local_irq_save(flags); \ 84 v->counter = v->counter c_op i; \ 85 raw_local_irq_restore(flags); \ 86 } 87 88 #define ATOMIC_OP_RETURN(op, c_op) \ 89 static inline int atomic_##op##_return(int i, atomic_t *v) \ 90 { \ 91 unsigned long flags; \ 92 int ret; \ 93 \ 94 raw_local_irq_save(flags); \ 95 ret = (v->counter = v->counter c_op i); \ 96 raw_local_irq_restore(flags); \ 97 \ 98 return ret; \ 99 } 100 101 #define ATOMIC_FETCH_OP(op, c_op) \ 102 static inline int atomic_fetch_##op(int i, atomic_t *v) \ 103 { \ 104 unsigned long flags; \ 105 int ret; \ 106 \ 107 raw_local_irq_save(flags); \ 108 ret = v->counter; \ 109 v->counter = v->counter c_op i; \ 110 raw_local_irq_restore(flags); \ 111 \ 112 return ret; \ 113 } 114 115 #endif /* CONFIG_SMP */ 116 117 #ifndef atomic_add_return 118 ATOMIC_OP_RETURN(add, +) 119 #endif 120 121 #ifndef atomic_sub_return 122 ATOMIC_OP_RETURN(sub, -) 123 #endif 124 125 #ifndef atomic_fetch_add 126 ATOMIC_FETCH_OP(add, +) 127 #endif 128 129 #ifndef atomic_fetch_sub 130 ATOMIC_FETCH_OP(sub, -) 131 #endif 132 133 #ifndef atomic_fetch_and 134 ATOMIC_FETCH_OP(and, &) 135 #endif 136 137 #ifndef atomic_fetch_or 138 ATOMIC_FETCH_OP(or, |) 139 #endif 140 141 #ifndef atomic_fetch_xor 142 ATOMIC_FETCH_OP(xor, ^) 143 #endif 144 145 #ifndef atomic_and 146 ATOMIC_OP(and, &) 147 #endif 148 149 #ifndef atomic_or 150 ATOMIC_OP(or, |) 151 #endif 152 153 #ifndef atomic_xor 154 ATOMIC_OP(xor, ^) 155 #endif 156 157 #undef ATOMIC_FETCH_OP 158 #undef ATOMIC_OP_RETURN 159 #undef ATOMIC_OP 160 161 /* 162 * Atomic operations that C can't guarantee us. Useful for 163 * resource counting etc.. 164 */ 165 166 #define ATOMIC_INIT(i) { (i) } 167 168 /** 169 * atomic_read - read atomic variable 170 * @v: pointer of type atomic_t 171 * 172 * Atomically reads the value of @v. 173 */ 174 #ifndef atomic_read 175 #define atomic_read(v) READ_ONCE((v)->counter) 176 #endif 177 178 /** 179 * atomic_set - set atomic variable 180 * @v: pointer of type atomic_t 181 * @i: required value 182 * 183 * Atomically sets the value of @v to @i. 184 */ 185 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 186 187 #include <linux/irqflags.h> 188 189 static inline void atomic_add(int i, atomic_t *v) 190 { 191 atomic_add_return(i, v); 192 } 193 194 static inline void atomic_sub(int i, atomic_t *v) 195 { 196 atomic_sub_return(i, v); 197 } 198 199 #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 200 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 201 202 #endif /* __ASM_GENERIC_ATOMIC_H */ 203