1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2f15cbe6fSPaul Mundt #ifndef __ASM_SH_BITOPS_GRB_H
3f15cbe6fSPaul Mundt #define __ASM_SH_BITOPS_GRB_H
4f15cbe6fSPaul Mundt
set_bit(int nr,volatile void * addr)5f15cbe6fSPaul Mundt static inline void set_bit(int nr, volatile void * addr)
6f15cbe6fSPaul Mundt {
7f15cbe6fSPaul Mundt int mask;
8f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
9f15cbe6fSPaul Mundt unsigned long tmp;
10f15cbe6fSPaul Mundt
11f15cbe6fSPaul Mundt a += nr >> 5;
12f15cbe6fSPaul Mundt mask = 1 << (nr & 0x1f);
13f15cbe6fSPaul Mundt
14f15cbe6fSPaul Mundt __asm__ __volatile__ (
15f15cbe6fSPaul Mundt " .align 2 \n\t"
16f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
17f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
18f15cbe6fSPaul Mundt " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
19f15cbe6fSPaul Mundt " mov.l @%1, %0 \n\t" /* load old value */
20f15cbe6fSPaul Mundt " or %2, %0 \n\t" /* or */
21f15cbe6fSPaul Mundt " mov.l %0, @%1 \n\t" /* store new value */
22f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
23f15cbe6fSPaul Mundt : "=&r" (tmp),
24f15cbe6fSPaul Mundt "+r" (a)
25f15cbe6fSPaul Mundt : "r" (mask)
26f15cbe6fSPaul Mundt : "memory" , "r0", "r1");
27f15cbe6fSPaul Mundt }
28f15cbe6fSPaul Mundt
clear_bit(int nr,volatile void * addr)29f15cbe6fSPaul Mundt static inline void clear_bit(int nr, volatile void * addr)
30f15cbe6fSPaul Mundt {
31f15cbe6fSPaul Mundt int mask;
32f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
33f15cbe6fSPaul Mundt unsigned long tmp;
34f15cbe6fSPaul Mundt
35f15cbe6fSPaul Mundt a += nr >> 5;
36f15cbe6fSPaul Mundt mask = ~(1 << (nr & 0x1f));
37f15cbe6fSPaul Mundt __asm__ __volatile__ (
38f15cbe6fSPaul Mundt " .align 2 \n\t"
39f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
40f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
41f15cbe6fSPaul Mundt " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
42f15cbe6fSPaul Mundt " mov.l @%1, %0 \n\t" /* load old value */
43f15cbe6fSPaul Mundt " and %2, %0 \n\t" /* and */
44f15cbe6fSPaul Mundt " mov.l %0, @%1 \n\t" /* store new value */
45f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
46f15cbe6fSPaul Mundt : "=&r" (tmp),
47f15cbe6fSPaul Mundt "+r" (a)
48f15cbe6fSPaul Mundt : "r" (mask)
49f15cbe6fSPaul Mundt : "memory" , "r0", "r1");
50f15cbe6fSPaul Mundt }
51f15cbe6fSPaul Mundt
change_bit(int nr,volatile void * addr)52f15cbe6fSPaul Mundt static inline void change_bit(int nr, volatile void * addr)
53f15cbe6fSPaul Mundt {
54f15cbe6fSPaul Mundt int mask;
55f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
56f15cbe6fSPaul Mundt unsigned long tmp;
57f15cbe6fSPaul Mundt
58f15cbe6fSPaul Mundt a += nr >> 5;
59f15cbe6fSPaul Mundt mask = 1 << (nr & 0x1f);
60f15cbe6fSPaul Mundt __asm__ __volatile__ (
61f15cbe6fSPaul Mundt " .align 2 \n\t"
62f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
63f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
64f15cbe6fSPaul Mundt " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
65f15cbe6fSPaul Mundt " mov.l @%1, %0 \n\t" /* load old value */
66f15cbe6fSPaul Mundt " xor %2, %0 \n\t" /* xor */
67f15cbe6fSPaul Mundt " mov.l %0, @%1 \n\t" /* store new value */
68f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
69f15cbe6fSPaul Mundt : "=&r" (tmp),
70f15cbe6fSPaul Mundt "+r" (a)
71f15cbe6fSPaul Mundt : "r" (mask)
72f15cbe6fSPaul Mundt : "memory" , "r0", "r1");
73f15cbe6fSPaul Mundt }
74f15cbe6fSPaul Mundt
test_and_set_bit(int nr,volatile void * addr)75f15cbe6fSPaul Mundt static inline int test_and_set_bit(int nr, volatile void * addr)
76f15cbe6fSPaul Mundt {
77f15cbe6fSPaul Mundt int mask, retval;
78f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
79f15cbe6fSPaul Mundt unsigned long tmp;
80f15cbe6fSPaul Mundt
81f15cbe6fSPaul Mundt a += nr >> 5;
82f15cbe6fSPaul Mundt mask = 1 << (nr & 0x1f);
83f15cbe6fSPaul Mundt
84f15cbe6fSPaul Mundt __asm__ __volatile__ (
85f15cbe6fSPaul Mundt " .align 2 \n\t"
86f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
87f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
88f15cbe6fSPaul Mundt " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
89f15cbe6fSPaul Mundt " mov.l @%2, %0 \n\t" /* load old value */
90f15cbe6fSPaul Mundt " mov %0, %1 \n\t"
91f15cbe6fSPaul Mundt " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
92f15cbe6fSPaul Mundt " mov #-1, %1 \n\t" /* retvat = -1 */
93f15cbe6fSPaul Mundt " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
94f15cbe6fSPaul Mundt " or %3, %0 \n\t"
95f15cbe6fSPaul Mundt " mov.l %0, @%2 \n\t" /* store new value */
96f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
97f15cbe6fSPaul Mundt : "=&r" (tmp),
98f15cbe6fSPaul Mundt "=&r" (retval),
99f15cbe6fSPaul Mundt "+r" (a)
100f15cbe6fSPaul Mundt : "r" (mask)
101f15cbe6fSPaul Mundt : "memory" , "r0", "r1" ,"t");
102f15cbe6fSPaul Mundt
103f15cbe6fSPaul Mundt return retval;
104f15cbe6fSPaul Mundt }
105f15cbe6fSPaul Mundt
test_and_clear_bit(int nr,volatile void * addr)106f15cbe6fSPaul Mundt static inline int test_and_clear_bit(int nr, volatile void * addr)
107f15cbe6fSPaul Mundt {
108f15cbe6fSPaul Mundt int mask, retval,not_mask;
109f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
110f15cbe6fSPaul Mundt unsigned long tmp;
111f15cbe6fSPaul Mundt
112f15cbe6fSPaul Mundt a += nr >> 5;
113f15cbe6fSPaul Mundt mask = 1 << (nr & 0x1f);
114f15cbe6fSPaul Mundt
115f15cbe6fSPaul Mundt not_mask = ~mask;
116f15cbe6fSPaul Mundt
117f15cbe6fSPaul Mundt __asm__ __volatile__ (
118f15cbe6fSPaul Mundt " .align 2 \n\t"
119f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
120f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
121f15cbe6fSPaul Mundt " mov #-14, r15 \n\t" /* LOGIN */
122f15cbe6fSPaul Mundt " mov.l @%2, %0 \n\t" /* load old value */
123f15cbe6fSPaul Mundt " mov %0, %1 \n\t" /* %1 = *a */
124f15cbe6fSPaul Mundt " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
125f15cbe6fSPaul Mundt " mov #-1, %1 \n\t" /* retvat = -1 */
126f15cbe6fSPaul Mundt " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
127f15cbe6fSPaul Mundt " and %4, %0 \n\t"
128f15cbe6fSPaul Mundt " mov.l %0, @%2 \n\t" /* store new value */
129f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
130f15cbe6fSPaul Mundt : "=&r" (tmp),
131f15cbe6fSPaul Mundt "=&r" (retval),
132f15cbe6fSPaul Mundt "+r" (a)
133f15cbe6fSPaul Mundt : "r" (mask),
134f15cbe6fSPaul Mundt "r" (not_mask)
135f15cbe6fSPaul Mundt : "memory" , "r0", "r1", "t");
136f15cbe6fSPaul Mundt
137f15cbe6fSPaul Mundt return retval;
138f15cbe6fSPaul Mundt }
139f15cbe6fSPaul Mundt
test_and_change_bit(int nr,volatile void * addr)140f15cbe6fSPaul Mundt static inline int test_and_change_bit(int nr, volatile void * addr)
141f15cbe6fSPaul Mundt {
142f15cbe6fSPaul Mundt int mask, retval;
143f15cbe6fSPaul Mundt volatile unsigned int *a = addr;
144f15cbe6fSPaul Mundt unsigned long tmp;
145f15cbe6fSPaul Mundt
146f15cbe6fSPaul Mundt a += nr >> 5;
147f15cbe6fSPaul Mundt mask = 1 << (nr & 0x1f);
148f15cbe6fSPaul Mundt
149f15cbe6fSPaul Mundt __asm__ __volatile__ (
150f15cbe6fSPaul Mundt " .align 2 \n\t"
151f15cbe6fSPaul Mundt " mova 1f, r0 \n\t" /* r0 = end point */
152f15cbe6fSPaul Mundt " mov r15, r1 \n\t" /* r1 = saved sp */
153f15cbe6fSPaul Mundt " mov #-14, r15 \n\t" /* LOGIN */
154f15cbe6fSPaul Mundt " mov.l @%2, %0 \n\t" /* load old value */
155f15cbe6fSPaul Mundt " mov %0, %1 \n\t" /* %1 = *a */
156f15cbe6fSPaul Mundt " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
157f15cbe6fSPaul Mundt " mov #-1, %1 \n\t" /* retvat = -1 */
158f15cbe6fSPaul Mundt " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
159f15cbe6fSPaul Mundt " xor %3, %0 \n\t"
160f15cbe6fSPaul Mundt " mov.l %0, @%2 \n\t" /* store new value */
161f15cbe6fSPaul Mundt "1: mov r1, r15 \n\t" /* LOGOUT */
162f15cbe6fSPaul Mundt : "=&r" (tmp),
163f15cbe6fSPaul Mundt "=&r" (retval),
164f15cbe6fSPaul Mundt "+r" (a)
165f15cbe6fSPaul Mundt : "r" (mask)
166f15cbe6fSPaul Mundt : "memory" , "r0", "r1", "t");
167f15cbe6fSPaul Mundt
168f15cbe6fSPaul Mundt return retval;
169f15cbe6fSPaul Mundt }
17016b529d1SPaul Mundt
17116b529d1SPaul Mundt #include <asm-generic/bitops/non-atomic.h>
17216b529d1SPaul Mundt
173f15cbe6fSPaul Mundt #endif /* __ASM_SH_BITOPS_GRB_H */
174