1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
3 #define _ASM_GENERIC_BITOPS_ATOMIC_H_
4 
5 #include <linux/atomic.h>
6 #include <linux/compiler.h>
7 #include <asm/barrier.h>
8 
9 /*
10  * Implementation of atomic bitops using atomic-fetch ops.
11  * See Documentation/atomic_bitops.txt for details.
12  */
13 
14 static __always_inline void
15 arch_set_bit(unsigned int nr, volatile unsigned long *p)
16 {
17 	p += BIT_WORD(nr);
18 	arch_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
19 }
20 
21 static __always_inline void
22 arch_clear_bit(unsigned int nr, volatile unsigned long *p)
23 {
24 	p += BIT_WORD(nr);
25 	arch_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
26 }
27 
28 static __always_inline void
29 arch_change_bit(unsigned int nr, volatile unsigned long *p)
30 {
31 	p += BIT_WORD(nr);
32 	arch_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
33 }
34 
35 static __always_inline int
36 arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
37 {
38 	long old;
39 	unsigned long mask = BIT_MASK(nr);
40 
41 	p += BIT_WORD(nr);
42 	if (READ_ONCE(*p) & mask)
43 		return 1;
44 
45 	old = arch_atomic_long_fetch_or(mask, (atomic_long_t *)p);
46 	return !!(old & mask);
47 }
48 
49 static __always_inline int
50 arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
51 {
52 	long old;
53 	unsigned long mask = BIT_MASK(nr);
54 
55 	p += BIT_WORD(nr);
56 	if (!(READ_ONCE(*p) & mask))
57 		return 0;
58 
59 	old = arch_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
60 	return !!(old & mask);
61 }
62 
63 static __always_inline int
64 arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
65 {
66 	long old;
67 	unsigned long mask = BIT_MASK(nr);
68 
69 	p += BIT_WORD(nr);
70 	old = arch_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
71 	return !!(old & mask);
72 }
73 
74 #include <asm-generic/bitops/instrumented-atomic.h>
75 
76 #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */
77