1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_SH_BITOPS_OP32_H 3 #define __ASM_SH_BITOPS_OP32_H 4 5 /* 6 * The bit modifying instructions on SH-2A are only capable of working 7 * with a 3-bit immediate, which signifies the shift position for the bit 8 * being worked on. 9 */ 10 #if defined(__BIG_ENDIAN) 11 #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) 12 #define BYTE_NUMBER(nr) ((nr ^ BITOP_LE_SWIZZLE) / BITS_PER_BYTE) 13 #define BYTE_OFFSET(nr) ((nr ^ BITOP_LE_SWIZZLE) % BITS_PER_BYTE) 14 #else 15 #define BYTE_NUMBER(nr) ((nr) / BITS_PER_BYTE) 16 #define BYTE_OFFSET(nr) ((nr) % BITS_PER_BYTE) 17 #endif 18 19 #define IS_IMMEDIATE(nr) (__builtin_constant_p(nr)) 20 21 static inline void __set_bit(int nr, volatile unsigned long *addr) 22 { 23 if (IS_IMMEDIATE(nr)) { 24 __asm__ __volatile__ ( 25 "bset.b %1, @(%O2,%0) ! __set_bit\n\t" 26 : "+r" (addr) 27 : "i" (BYTE_OFFSET(nr)), "i" (BYTE_NUMBER(nr)) 28 : "t", "memory" 29 ); 30 } else { 31 unsigned long mask = BIT_MASK(nr); 32 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 33 34 *p |= mask; 35 } 36 } 37 38 static inline void __clear_bit(int nr, volatile unsigned long *addr) 39 { 40 if (IS_IMMEDIATE(nr)) { 41 __asm__ __volatile__ ( 42 "bclr.b %1, @(%O2,%0) ! __clear_bit\n\t" 43 : "+r" (addr) 44 : "i" (BYTE_OFFSET(nr)), 45 "i" (BYTE_NUMBER(nr)) 46 : "t", "memory" 47 ); 48 } else { 49 unsigned long mask = BIT_MASK(nr); 50 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 51 52 *p &= ~mask; 53 } 54 } 55 56 /** 57 * __change_bit - Toggle a bit in memory 58 * @nr: the bit to change 59 * @addr: the address to start counting from 60 * 61 * Unlike change_bit(), this function is non-atomic and may be reordered. 62 * If it's called on the same region of memory simultaneously, the effect 63 * may be that only one operation succeeds. 64 */ 65 static inline void __change_bit(int nr, volatile unsigned long *addr) 66 { 67 if (IS_IMMEDIATE(nr)) { 68 __asm__ __volatile__ ( 69 "bxor.b %1, @(%O2,%0) ! __change_bit\n\t" 70 : "+r" (addr) 71 : "i" (BYTE_OFFSET(nr)), 72 "i" (BYTE_NUMBER(nr)) 73 : "t", "memory" 74 ); 75 } else { 76 unsigned long mask = BIT_MASK(nr); 77 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 78 79 *p ^= mask; 80 } 81 } 82 83 /** 84 * __test_and_set_bit - Set a bit and return its old value 85 * @nr: Bit to set 86 * @addr: Address to count from 87 * 88 * This operation is non-atomic and can be reordered. 89 * If two examples of this operation race, one can appear to succeed 90 * but actually fail. You must protect multiple accesses with a lock. 91 */ 92 static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) 93 { 94 unsigned long mask = BIT_MASK(nr); 95 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 96 unsigned long old = *p; 97 98 *p = old | mask; 99 return (old & mask) != 0; 100 } 101 102 /** 103 * __test_and_clear_bit - Clear a bit and return its old value 104 * @nr: Bit to clear 105 * @addr: Address to count from 106 * 107 * This operation is non-atomic and can be reordered. 108 * If two examples of this operation race, one can appear to succeed 109 * but actually fail. You must protect multiple accesses with a lock. 110 */ 111 static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) 112 { 113 unsigned long mask = BIT_MASK(nr); 114 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 115 unsigned long old = *p; 116 117 *p = old & ~mask; 118 return (old & mask) != 0; 119 } 120 121 /* WARNING: non atomic and it can be reordered! */ 122 static inline int __test_and_change_bit(int nr, 123 volatile unsigned long *addr) 124 { 125 unsigned long mask = BIT_MASK(nr); 126 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); 127 unsigned long old = *p; 128 129 *p = old ^ mask; 130 return (old & mask) != 0; 131 } 132 133 /** 134 * test_bit - Determine whether a bit is set 135 * @nr: bit number to test 136 * @addr: Address to start counting from 137 */ 138 static inline int test_bit(int nr, const volatile unsigned long *addr) 139 { 140 return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); 141 } 142 143 #endif /* __ASM_SH_BITOPS_OP32_H */ 144