1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_SH_BITOPS_OP32_H 3 #define __ASM_SH_BITOPS_OP32_H 4 5 /* 6 * The bit modifying instructions on SH-2A are only capable of working 7 * with a 3-bit immediate, which signifies the shift position for the bit 8 * being worked on. 9 */ 10 #if defined(__BIG_ENDIAN) 11 #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) 12 #define BYTE_NUMBER(nr) ((nr ^ BITOP_LE_SWIZZLE) / BITS_PER_BYTE) 13 #define BYTE_OFFSET(nr) ((nr ^ BITOP_LE_SWIZZLE) % BITS_PER_BYTE) 14 #else 15 #define BYTE_NUMBER(nr) ((nr) / BITS_PER_BYTE) 16 #define BYTE_OFFSET(nr) ((nr) % BITS_PER_BYTE) 17 #endif 18 19 static inline void __set_bit(int nr, volatile unsigned long *addr) 20 { 21 if (__builtin_constant_p(nr)) { 22 __asm__ __volatile__ ( 23 "bset.b %1, @(%O2,%0) ! __set_bit\n\t" 24 : "+r" (addr) 25 : "i" (BYTE_OFFSET(nr)), "i" (BYTE_NUMBER(nr)) 26 : "t", "memory" 27 ); 28 } else { 29 unsigned long mask = BIT_MASK(nr); 30 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 31 32 *p |= mask; 33 } 34 } 35 36 static inline void __clear_bit(int nr, volatile unsigned long *addr) 37 { 38 if (__builtin_constant_p(nr)) { 39 __asm__ __volatile__ ( 40 "bclr.b %1, @(%O2,%0) ! __clear_bit\n\t" 41 : "+r" (addr) 42 : "i" (BYTE_OFFSET(nr)), 43 "i" (BYTE_NUMBER(nr)) 44 : "t", "memory" 45 ); 46 } else { 47 unsigned long mask = BIT_MASK(nr); 48 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 49 50 *p &= ~mask; 51 } 52 } 53 54 /** 55 * __change_bit - Toggle a bit in memory 56 * @nr: the bit to change 57 * @addr: the address to start counting from 58 * 59 * Unlike change_bit(), this function is non-atomic and may be reordered. 60 * If it's called on the same region of memory simultaneously, the effect 61 * may be that only one operation succeeds. 62 */ 63 static inline void __change_bit(int nr, volatile unsigned long *addr) 64 { 65 if (__builtin_constant_p(nr)) { 66 __asm__ __volatile__ ( 67 "bxor.b %1, @(%O2,%0) ! __change_bit\n\t" 68 : "+r" (addr) 69 : "i" (BYTE_OFFSET(nr)), 70 "i" (BYTE_NUMBER(nr)) 71 : "t", "memory" 72 ); 73 } else { 74 unsigned long mask = BIT_MASK(nr); 75 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 76 77 *p ^= mask; 78 } 79 } 80 81 /** 82 * __test_and_set_bit - Set a bit and return its old value 83 * @nr: Bit to set 84 * @addr: Address to count from 85 * 86 * This operation is non-atomic and can be reordered. 87 * If two examples of this operation race, one can appear to succeed 88 * but actually fail. You must protect multiple accesses with a lock. 89 */ 90 static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) 91 { 92 unsigned long mask = BIT_MASK(nr); 93 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 94 unsigned long old = *p; 95 96 *p = old | mask; 97 return (old & mask) != 0; 98 } 99 100 /** 101 * __test_and_clear_bit - Clear a bit and return its old value 102 * @nr: Bit to clear 103 * @addr: Address to count from 104 * 105 * This operation is non-atomic and can be reordered. 106 * If two examples of this operation race, one can appear to succeed 107 * but actually fail. You must protect multiple accesses with a lock. 108 */ 109 static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) 110 { 111 unsigned long mask = BIT_MASK(nr); 112 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 113 unsigned long old = *p; 114 115 *p = old & ~mask; 116 return (old & mask) != 0; 117 } 118 119 /* WARNING: non atomic and it can be reordered! */ 120 static inline int __test_and_change_bit(int nr, 121 volatile unsigned long *addr) 122 { 123 unsigned long mask = BIT_MASK(nr); 124 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 125 unsigned long old = *p; 126 127 *p = old ^ mask; 128 return (old & mask) != 0; 129 } 130 131 /** 132 * test_bit - Determine whether a bit is set 133 * @nr: bit number to test 134 * @addr: Address to start counting from 135 */ 136 static inline int test_bit(int nr, const volatile unsigned long *addr) 137 { 138 return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); 139 } 140 141 #endif /* __ASM_SH_BITOPS_OP32_H */ 142