1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2742fd1bcSPaul Mundt #ifndef __ASM_SH_BITOPS_LLSC_H
3742fd1bcSPaul Mundt #define __ASM_SH_BITOPS_LLSC_H
4742fd1bcSPaul Mundt
set_bit(int nr,volatile void * addr)5742fd1bcSPaul Mundt static inline void set_bit(int nr, volatile void *addr)
6742fd1bcSPaul Mundt {
7742fd1bcSPaul Mundt int mask;
8742fd1bcSPaul Mundt volatile unsigned int *a = addr;
9742fd1bcSPaul Mundt unsigned long tmp;
10742fd1bcSPaul Mundt
11742fd1bcSPaul Mundt a += nr >> 5;
12742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
13742fd1bcSPaul Mundt
14742fd1bcSPaul Mundt __asm__ __volatile__ (
15742fd1bcSPaul Mundt "1: \n\t"
16742fd1bcSPaul Mundt "movli.l @%1, %0 ! set_bit \n\t"
1742990701SMatt Fleming "or %2, %0 \n\t"
18742fd1bcSPaul Mundt "movco.l %0, @%1 \n\t"
19742fd1bcSPaul Mundt "bf 1b \n\t"
2042990701SMatt Fleming : "=&z" (tmp)
2142990701SMatt Fleming : "r" (a), "r" (mask)
22742fd1bcSPaul Mundt : "t", "memory"
23742fd1bcSPaul Mundt );
24742fd1bcSPaul Mundt }
25742fd1bcSPaul Mundt
clear_bit(int nr,volatile void * addr)26742fd1bcSPaul Mundt static inline void clear_bit(int nr, volatile void *addr)
27742fd1bcSPaul Mundt {
28742fd1bcSPaul Mundt int mask;
29742fd1bcSPaul Mundt volatile unsigned int *a = addr;
30742fd1bcSPaul Mundt unsigned long tmp;
31742fd1bcSPaul Mundt
32742fd1bcSPaul Mundt a += nr >> 5;
33742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
34742fd1bcSPaul Mundt
35742fd1bcSPaul Mundt __asm__ __volatile__ (
36742fd1bcSPaul Mundt "1: \n\t"
37742fd1bcSPaul Mundt "movli.l @%1, %0 ! clear_bit \n\t"
3842990701SMatt Fleming "and %2, %0 \n\t"
39742fd1bcSPaul Mundt "movco.l %0, @%1 \n\t"
40742fd1bcSPaul Mundt "bf 1b \n\t"
4142990701SMatt Fleming : "=&z" (tmp)
4242990701SMatt Fleming : "r" (a), "r" (~mask)
43742fd1bcSPaul Mundt : "t", "memory"
44742fd1bcSPaul Mundt );
45742fd1bcSPaul Mundt }
46742fd1bcSPaul Mundt
change_bit(int nr,volatile void * addr)47742fd1bcSPaul Mundt static inline void change_bit(int nr, volatile void *addr)
48742fd1bcSPaul Mundt {
49742fd1bcSPaul Mundt int mask;
50742fd1bcSPaul Mundt volatile unsigned int *a = addr;
51742fd1bcSPaul Mundt unsigned long tmp;
52742fd1bcSPaul Mundt
53742fd1bcSPaul Mundt a += nr >> 5;
54742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
55742fd1bcSPaul Mundt
56742fd1bcSPaul Mundt __asm__ __volatile__ (
57742fd1bcSPaul Mundt "1: \n\t"
58742fd1bcSPaul Mundt "movli.l @%1, %0 ! change_bit \n\t"
5942990701SMatt Fleming "xor %2, %0 \n\t"
60742fd1bcSPaul Mundt "movco.l %0, @%1 \n\t"
61742fd1bcSPaul Mundt "bf 1b \n\t"
6242990701SMatt Fleming : "=&z" (tmp)
6342990701SMatt Fleming : "r" (a), "r" (mask)
64742fd1bcSPaul Mundt : "t", "memory"
65742fd1bcSPaul Mundt );
66742fd1bcSPaul Mundt }
67742fd1bcSPaul Mundt
test_and_set_bit(int nr,volatile void * addr)68742fd1bcSPaul Mundt static inline int test_and_set_bit(int nr, volatile void *addr)
69742fd1bcSPaul Mundt {
70742fd1bcSPaul Mundt int mask, retval;
71742fd1bcSPaul Mundt volatile unsigned int *a = addr;
72742fd1bcSPaul Mundt unsigned long tmp;
73742fd1bcSPaul Mundt
74742fd1bcSPaul Mundt a += nr >> 5;
75742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
76742fd1bcSPaul Mundt
77742fd1bcSPaul Mundt __asm__ __volatile__ (
78742fd1bcSPaul Mundt "1: \n\t"
7942990701SMatt Fleming "movli.l @%2, %0 ! test_and_set_bit \n\t"
8042990701SMatt Fleming "mov %0, %1 \n\t"
8142990701SMatt Fleming "or %3, %0 \n\t"
8242990701SMatt Fleming "movco.l %0, @%2 \n\t"
83742fd1bcSPaul Mundt "bf 1b \n\t"
8442990701SMatt Fleming "and %3, %1 \n\t"
8542990701SMatt Fleming : "=&z" (tmp), "=&r" (retval)
8642990701SMatt Fleming : "r" (a), "r" (mask)
87742fd1bcSPaul Mundt : "t", "memory"
88742fd1bcSPaul Mundt );
89742fd1bcSPaul Mundt
90742fd1bcSPaul Mundt return retval != 0;
91742fd1bcSPaul Mundt }
92742fd1bcSPaul Mundt
test_and_clear_bit(int nr,volatile void * addr)93742fd1bcSPaul Mundt static inline int test_and_clear_bit(int nr, volatile void *addr)
94742fd1bcSPaul Mundt {
95742fd1bcSPaul Mundt int mask, retval;
96742fd1bcSPaul Mundt volatile unsigned int *a = addr;
97742fd1bcSPaul Mundt unsigned long tmp;
98742fd1bcSPaul Mundt
99742fd1bcSPaul Mundt a += nr >> 5;
100742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
101742fd1bcSPaul Mundt
102742fd1bcSPaul Mundt __asm__ __volatile__ (
103742fd1bcSPaul Mundt "1: \n\t"
10442990701SMatt Fleming "movli.l @%2, %0 ! test_and_clear_bit \n\t"
10542990701SMatt Fleming "mov %0, %1 \n\t"
10642990701SMatt Fleming "and %4, %0 \n\t"
10742990701SMatt Fleming "movco.l %0, @%2 \n\t"
108742fd1bcSPaul Mundt "bf 1b \n\t"
10942990701SMatt Fleming "and %3, %1 \n\t"
110742fd1bcSPaul Mundt "synco \n\t"
11142990701SMatt Fleming : "=&z" (tmp), "=&r" (retval)
11242990701SMatt Fleming : "r" (a), "r" (mask), "r" (~mask)
113742fd1bcSPaul Mundt : "t", "memory"
114742fd1bcSPaul Mundt );
115742fd1bcSPaul Mundt
116742fd1bcSPaul Mundt return retval != 0;
117742fd1bcSPaul Mundt }
118742fd1bcSPaul Mundt
test_and_change_bit(int nr,volatile void * addr)119742fd1bcSPaul Mundt static inline int test_and_change_bit(int nr, volatile void *addr)
120742fd1bcSPaul Mundt {
121742fd1bcSPaul Mundt int mask, retval;
122742fd1bcSPaul Mundt volatile unsigned int *a = addr;
123742fd1bcSPaul Mundt unsigned long tmp;
124742fd1bcSPaul Mundt
125742fd1bcSPaul Mundt a += nr >> 5;
126742fd1bcSPaul Mundt mask = 1 << (nr & 0x1f);
127742fd1bcSPaul Mundt
128742fd1bcSPaul Mundt __asm__ __volatile__ (
129742fd1bcSPaul Mundt "1: \n\t"
13042990701SMatt Fleming "movli.l @%2, %0 ! test_and_change_bit \n\t"
13142990701SMatt Fleming "mov %0, %1 \n\t"
13242990701SMatt Fleming "xor %3, %0 \n\t"
13342990701SMatt Fleming "movco.l %0, @%2 \n\t"
134742fd1bcSPaul Mundt "bf 1b \n\t"
13542990701SMatt Fleming "and %3, %1 \n\t"
136742fd1bcSPaul Mundt "synco \n\t"
13742990701SMatt Fleming : "=&z" (tmp), "=&r" (retval)
13842990701SMatt Fleming : "r" (a), "r" (mask)
139742fd1bcSPaul Mundt : "t", "memory"
140742fd1bcSPaul Mundt );
141742fd1bcSPaul Mundt
142742fd1bcSPaul Mundt return retval != 0;
143742fd1bcSPaul Mundt }
144742fd1bcSPaul Mundt
14516b529d1SPaul Mundt #include <asm-generic/bitops/non-atomic.h>
14616b529d1SPaul Mundt
147742fd1bcSPaul Mundt #endif /* __ASM_SH_BITOPS_LLSC_H */
148