xref: /openbmc/linux/arch/sh/include/asm/bitops-grb.h (revision 588b48ca)
1 #ifndef __ASM_SH_BITOPS_GRB_H
2 #define __ASM_SH_BITOPS_GRB_H
3 
4 static inline void set_bit(int nr, volatile void * addr)
5 {
6 	int	mask;
7 	volatile unsigned int *a = addr;
8 	unsigned long tmp;
9 
10 	a += nr >> 5;
11 	mask = 1 << (nr & 0x1f);
12 
13         __asm__ __volatile__ (
14                 "   .align 2              \n\t"
15                 "   mova    1f,   r0      \n\t" /* r0 = end point */
16                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
17                 "   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */
18                 "   mov.l  @%1,   %0      \n\t" /* load  old value */
19                 "   or      %2,   %0      \n\t" /* or */
20                 "   mov.l   %0,   @%1     \n\t" /* store new value */
21                 "1: mov     r1,   r15     \n\t" /* LOGOUT */
22                 : "=&r" (tmp),
23                   "+r"  (a)
24                 : "r"   (mask)
25                 : "memory" , "r0", "r1");
26 }
27 
28 static inline void clear_bit(int nr, volatile void * addr)
29 {
30 	int	mask;
31 	volatile unsigned int *a = addr;
32         unsigned long tmp;
33 
34 	a += nr >> 5;
35         mask = ~(1 << (nr & 0x1f));
36         __asm__ __volatile__ (
37                 "   .align 2              \n\t"
38                 "   mova    1f,   r0      \n\t" /* r0 = end point */
39                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
40                 "   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */
41                 "   mov.l  @%1,   %0      \n\t" /* load  old value */
42                 "   and     %2,   %0      \n\t" /* and */
43                 "   mov.l   %0,   @%1     \n\t" /* store new value */
44                 "1: mov     r1,   r15     \n\t" /* LOGOUT */
45                 : "=&r" (tmp),
46                   "+r"  (a)
47                 : "r"   (mask)
48                 : "memory" , "r0", "r1");
49 }
50 
51 static inline void change_bit(int nr, volatile void * addr)
52 {
53         int     mask;
54         volatile unsigned int *a = addr;
55         unsigned long tmp;
56 
57         a += nr >> 5;
58         mask = 1 << (nr & 0x1f);
59         __asm__ __volatile__ (
60                 "   .align 2              \n\t"
61                 "   mova    1f,   r0      \n\t" /* r0 = end point */
62                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
63                 "   mov    #-6,   r15     \n\t" /* LOGIN: r15 = size */
64                 "   mov.l  @%1,   %0      \n\t" /* load  old value */
65                 "   xor     %2,   %0      \n\t" /* xor */
66                 "   mov.l   %0,   @%1     \n\t" /* store new value */
67                 "1: mov     r1,   r15     \n\t" /* LOGOUT */
68                 : "=&r" (tmp),
69                   "+r"  (a)
70                 : "r"   (mask)
71                 : "memory" , "r0", "r1");
72 }
73 
74 static inline int test_and_set_bit(int nr, volatile void * addr)
75 {
76         int     mask, retval;
77 	volatile unsigned int *a = addr;
78         unsigned long tmp;
79 
80 	a += nr >> 5;
81 	mask = 1 << (nr & 0x1f);
82 
83         __asm__ __volatile__ (
84                 "   .align 2              \n\t"
85                 "   mova    1f,   r0      \n\t" /* r0 = end point */
86                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
87                 "   mov   #-14,   r15     \n\t" /* LOGIN: r15 = size */
88                 "   mov.l  @%2,   %0      \n\t" /* load old value */
89                 "   mov     %0,   %1      \n\t"
90                 "   tst     %1,   %3      \n\t" /* T = ((*a & mask) == 0) */
91                 "   mov    #-1,   %1      \n\t" /* retvat = -1 */
92                 "   negc    %1,   %1      \n\t" /* retval = (mask & *a) != 0 */
93                 "   or      %3,   %0      \n\t"
94                 "   mov.l   %0,  @%2      \n\t" /* store new value */
95                 "1: mov     r1,  r15      \n\t" /* LOGOUT */
96                 : "=&r" (tmp),
97                   "=&r" (retval),
98                   "+r"  (a)
99                 : "r"   (mask)
100                 : "memory" , "r0", "r1" ,"t");
101 
102         return retval;
103 }
104 
105 static inline int test_and_clear_bit(int nr, volatile void * addr)
106 {
107         int     mask, retval,not_mask;
108         volatile unsigned int *a = addr;
109         unsigned long tmp;
110 
111         a += nr >> 5;
112         mask = 1 << (nr & 0x1f);
113 
114 	not_mask = ~mask;
115 
116         __asm__ __volatile__ (
117                 "   .align 2              \n\t"
118 		"   mova    1f,   r0      \n\t" /* r0 = end point */
119                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
120 		"   mov   #-14,   r15     \n\t" /* LOGIN */
121 		"   mov.l  @%2,   %0      \n\t" /* load old value */
122                 "   mov     %0,   %1      \n\t" /* %1 = *a */
123                 "   tst     %1,   %3      \n\t" /* T = ((*a & mask) == 0) */
124 		"   mov    #-1,   %1      \n\t" /* retvat = -1 */
125                 "   negc    %1,   %1      \n\t" /* retval = (mask & *a) != 0 */
126                 "   and     %4,   %0      \n\t"
127                 "   mov.l   %0,  @%2      \n\t" /* store new value */
128 		"1: mov     r1,   r15     \n\t" /* LOGOUT */
129 		: "=&r" (tmp),
130 		  "=&r" (retval),
131 		  "+r"  (a)
132 		: "r"   (mask),
133 		  "r"   (not_mask)
134 		: "memory" , "r0", "r1", "t");
135 
136         return retval;
137 }
138 
139 static inline int test_and_change_bit(int nr, volatile void * addr)
140 {
141         int     mask, retval;
142         volatile unsigned int *a = addr;
143         unsigned long tmp;
144 
145         a += nr >> 5;
146         mask = 1 << (nr & 0x1f);
147 
148         __asm__ __volatile__ (
149                 "   .align 2              \n\t"
150                 "   mova    1f,   r0      \n\t" /* r0 = end point */
151                 "   mov    r15,   r1      \n\t" /* r1 = saved sp */
152                 "   mov   #-14,   r15     \n\t" /* LOGIN */
153                 "   mov.l  @%2,   %0      \n\t" /* load old value */
154                 "   mov     %0,   %1      \n\t" /* %1 = *a */
155                 "   tst     %1,   %3      \n\t" /* T = ((*a & mask) == 0) */
156                 "   mov    #-1,   %1      \n\t" /* retvat = -1 */
157                 "   negc    %1,   %1      \n\t" /* retval = (mask & *a) != 0 */
158                 "   xor     %3,   %0      \n\t"
159                 "   mov.l   %0,  @%2      \n\t" /* store new value */
160                 "1: mov     r1,   r15     \n\t" /* LOGOUT */
161                 : "=&r" (tmp),
162                   "=&r" (retval),
163                   "+r"  (a)
164                 : "r"   (mask)
165                 : "memory" , "r0", "r1", "t");
166 
167         return retval;
168 }
169 
170 #include <asm-generic/bitops/non-atomic.h>
171 
172 #endif /* __ASM_SH_BITOPS_GRB_H */
173