1171d809dSGreg Ungerer #ifndef _M68K_BITOPS_H
2171d809dSGreg Ungerer #define _M68K_BITOPS_H
3171d809dSGreg Ungerer /*
4171d809dSGreg Ungerer * Copyright 1992, Linus Torvalds.
5171d809dSGreg Ungerer *
6171d809dSGreg Ungerer * This file is subject to the terms and conditions of the GNU General Public
7171d809dSGreg Ungerer * License. See the file COPYING in the main directory of this archive
8171d809dSGreg Ungerer * for more details.
9171d809dSGreg Ungerer */
10171d809dSGreg Ungerer
11171d809dSGreg Ungerer #ifndef _LINUX_BITOPS_H
12171d809dSGreg Ungerer #error only <linux/bitops.h> can be included directly
1349148020SSam Ravnborg #endif
14171d809dSGreg Ungerer
15171d809dSGreg Ungerer #include <linux/compiler.h>
162db56e86SPeter Zijlstra #include <asm/barrier.h>
17171d809dSGreg Ungerer
18171d809dSGreg Ungerer /*
19171d809dSGreg Ungerer * Bit access functions vary across the ColdFire and 68k families.
20171d809dSGreg Ungerer * So we will break them out here, and then macro in the ones we want.
21171d809dSGreg Ungerer *
22171d809dSGreg Ungerer * ColdFire - supports standard bset/bclr/bchg with register operand only
23171d809dSGreg Ungerer * 68000 - supports standard bset/bclr/bchg with memory operand
24171d809dSGreg Ungerer * >= 68020 - also supports the bfset/bfclr/bfchg instructions
25171d809dSGreg Ungerer *
26171d809dSGreg Ungerer * Although it is possible to use only the bset/bclr/bchg with register
27171d809dSGreg Ungerer * operands on all platforms you end up with larger generated code.
28171d809dSGreg Ungerer * So we use the best form possible on a given platform.
29171d809dSGreg Ungerer */
30171d809dSGreg Ungerer
bset_reg_set_bit(int nr,volatile unsigned long * vaddr)31171d809dSGreg Ungerer static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr)
32171d809dSGreg Ungerer {
33171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
34171d809dSGreg Ungerer
35171d809dSGreg Ungerer __asm__ __volatile__ ("bset %1,(%0)"
36171d809dSGreg Ungerer :
37171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
38171d809dSGreg Ungerer : "memory");
39171d809dSGreg Ungerer }
40171d809dSGreg Ungerer
bset_mem_set_bit(int nr,volatile unsigned long * vaddr)41171d809dSGreg Ungerer static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr)
42171d809dSGreg Ungerer {
43171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
44171d809dSGreg Ungerer
45171d809dSGreg Ungerer __asm__ __volatile__ ("bset %1,%0"
46171d809dSGreg Ungerer : "+m" (*p)
47171d809dSGreg Ungerer : "di" (nr & 7));
48171d809dSGreg Ungerer }
49171d809dSGreg Ungerer
bfset_mem_set_bit(int nr,volatile unsigned long * vaddr)50171d809dSGreg Ungerer static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr)
51171d809dSGreg Ungerer {
52171d809dSGreg Ungerer __asm__ __volatile__ ("bfset %1{%0:#1}"
53171d809dSGreg Ungerer :
54171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
55171d809dSGreg Ungerer : "memory");
56171d809dSGreg Ungerer }
57171d809dSGreg Ungerer
58171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
59171d809dSGreg Ungerer #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr)
60171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
61171d809dSGreg Ungerer #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr)
62171d809dSGreg Ungerer #else
63171d809dSGreg Ungerer #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
64171d809dSGreg Ungerer bset_mem_set_bit(nr, vaddr) : \
65171d809dSGreg Ungerer bfset_mem_set_bit(nr, vaddr))
66171d809dSGreg Ungerer #endif
67171d809dSGreg Ungerer
680e862838SAlexander Lobakin static __always_inline void
arch___set_bit(unsigned long nr,volatile unsigned long * addr)690e862838SAlexander Lobakin arch___set_bit(unsigned long nr, volatile unsigned long *addr)
700e862838SAlexander Lobakin {
710e862838SAlexander Lobakin set_bit(nr, addr);
720e862838SAlexander Lobakin }
73171d809dSGreg Ungerer
bclr_reg_clear_bit(int nr,volatile unsigned long * vaddr)74171d809dSGreg Ungerer static inline void bclr_reg_clear_bit(int nr, volatile unsigned long *vaddr)
75171d809dSGreg Ungerer {
76171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
77171d809dSGreg Ungerer
78171d809dSGreg Ungerer __asm__ __volatile__ ("bclr %1,(%0)"
79171d809dSGreg Ungerer :
80171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
81171d809dSGreg Ungerer : "memory");
82171d809dSGreg Ungerer }
83171d809dSGreg Ungerer
bclr_mem_clear_bit(int nr,volatile unsigned long * vaddr)84171d809dSGreg Ungerer static inline void bclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
85171d809dSGreg Ungerer {
86171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
87171d809dSGreg Ungerer
88171d809dSGreg Ungerer __asm__ __volatile__ ("bclr %1,%0"
89171d809dSGreg Ungerer : "+m" (*p)
90171d809dSGreg Ungerer : "di" (nr & 7));
91171d809dSGreg Ungerer }
92171d809dSGreg Ungerer
bfclr_mem_clear_bit(int nr,volatile unsigned long * vaddr)93171d809dSGreg Ungerer static inline void bfclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
94171d809dSGreg Ungerer {
95171d809dSGreg Ungerer __asm__ __volatile__ ("bfclr %1{%0:#1}"
96171d809dSGreg Ungerer :
97171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
98171d809dSGreg Ungerer : "memory");
99171d809dSGreg Ungerer }
100171d809dSGreg Ungerer
101171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
102171d809dSGreg Ungerer #define clear_bit(nr, vaddr) bclr_reg_clear_bit(nr, vaddr)
103171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
104171d809dSGreg Ungerer #define clear_bit(nr, vaddr) bclr_mem_clear_bit(nr, vaddr)
105171d809dSGreg Ungerer #else
106171d809dSGreg Ungerer #define clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
107171d809dSGreg Ungerer bclr_mem_clear_bit(nr, vaddr) : \
108171d809dSGreg Ungerer bfclr_mem_clear_bit(nr, vaddr))
109171d809dSGreg Ungerer #endif
110171d809dSGreg Ungerer
1110e862838SAlexander Lobakin static __always_inline void
arch___clear_bit(unsigned long nr,volatile unsigned long * addr)1120e862838SAlexander Lobakin arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
1130e862838SAlexander Lobakin {
1140e862838SAlexander Lobakin clear_bit(nr, addr);
1150e862838SAlexander Lobakin }
116171d809dSGreg Ungerer
bchg_reg_change_bit(int nr,volatile unsigned long * vaddr)117171d809dSGreg Ungerer static inline void bchg_reg_change_bit(int nr, volatile unsigned long *vaddr)
118171d809dSGreg Ungerer {
119171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
120171d809dSGreg Ungerer
121171d809dSGreg Ungerer __asm__ __volatile__ ("bchg %1,(%0)"
122171d809dSGreg Ungerer :
123171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
124171d809dSGreg Ungerer : "memory");
125171d809dSGreg Ungerer }
126171d809dSGreg Ungerer
bchg_mem_change_bit(int nr,volatile unsigned long * vaddr)127171d809dSGreg Ungerer static inline void bchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
128171d809dSGreg Ungerer {
129171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
130171d809dSGreg Ungerer
131171d809dSGreg Ungerer __asm__ __volatile__ ("bchg %1,%0"
132171d809dSGreg Ungerer : "+m" (*p)
133171d809dSGreg Ungerer : "di" (nr & 7));
134171d809dSGreg Ungerer }
135171d809dSGreg Ungerer
bfchg_mem_change_bit(int nr,volatile unsigned long * vaddr)136171d809dSGreg Ungerer static inline void bfchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
137171d809dSGreg Ungerer {
138171d809dSGreg Ungerer __asm__ __volatile__ ("bfchg %1{%0:#1}"
139171d809dSGreg Ungerer :
140171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
141171d809dSGreg Ungerer : "memory");
142171d809dSGreg Ungerer }
143171d809dSGreg Ungerer
144171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
145171d809dSGreg Ungerer #define change_bit(nr, vaddr) bchg_reg_change_bit(nr, vaddr)
146171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
147171d809dSGreg Ungerer #define change_bit(nr, vaddr) bchg_mem_change_bit(nr, vaddr)
148171d809dSGreg Ungerer #else
149171d809dSGreg Ungerer #define change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
150171d809dSGreg Ungerer bchg_mem_change_bit(nr, vaddr) : \
151171d809dSGreg Ungerer bfchg_mem_change_bit(nr, vaddr))
152171d809dSGreg Ungerer #endif
153171d809dSGreg Ungerer
1540e862838SAlexander Lobakin static __always_inline void
arch___change_bit(unsigned long nr,volatile unsigned long * addr)1550e862838SAlexander Lobakin arch___change_bit(unsigned long nr, volatile unsigned long *addr)
156171d809dSGreg Ungerer {
1570e862838SAlexander Lobakin change_bit(nr, addr);
158171d809dSGreg Ungerer }
159171d809dSGreg Ungerer
160*d6ffe606SMikulas Patocka #define arch_test_bit generic_test_bit
161*d6ffe606SMikulas Patocka #define arch_test_bit_acquire generic_test_bit_acquire
162171d809dSGreg Ungerer
bset_reg_test_and_set_bit(int nr,volatile unsigned long * vaddr)163171d809dSGreg Ungerer static inline int bset_reg_test_and_set_bit(int nr,
164171d809dSGreg Ungerer volatile unsigned long *vaddr)
165171d809dSGreg Ungerer {
166171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
167171d809dSGreg Ungerer char retval;
168171d809dSGreg Ungerer
169171d809dSGreg Ungerer __asm__ __volatile__ ("bset %2,(%1); sne %0"
170171d809dSGreg Ungerer : "=d" (retval)
171171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
172171d809dSGreg Ungerer : "memory");
173171d809dSGreg Ungerer return retval;
174171d809dSGreg Ungerer }
175171d809dSGreg Ungerer
bset_mem_test_and_set_bit(int nr,volatile unsigned long * vaddr)176171d809dSGreg Ungerer static inline int bset_mem_test_and_set_bit(int nr,
177171d809dSGreg Ungerer volatile unsigned long *vaddr)
178171d809dSGreg Ungerer {
179171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
180171d809dSGreg Ungerer char retval;
181171d809dSGreg Ungerer
182171d809dSGreg Ungerer __asm__ __volatile__ ("bset %2,%1; sne %0"
183171d809dSGreg Ungerer : "=d" (retval), "+m" (*p)
184171d809dSGreg Ungerer : "di" (nr & 7));
185171d809dSGreg Ungerer return retval;
186171d809dSGreg Ungerer }
187171d809dSGreg Ungerer
bfset_mem_test_and_set_bit(int nr,volatile unsigned long * vaddr)188171d809dSGreg Ungerer static inline int bfset_mem_test_and_set_bit(int nr,
189171d809dSGreg Ungerer volatile unsigned long *vaddr)
190171d809dSGreg Ungerer {
191171d809dSGreg Ungerer char retval;
192171d809dSGreg Ungerer
193171d809dSGreg Ungerer __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
194171d809dSGreg Ungerer : "=d" (retval)
195171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
196171d809dSGreg Ungerer : "memory");
197171d809dSGreg Ungerer return retval;
198171d809dSGreg Ungerer }
199171d809dSGreg Ungerer
200171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
201171d809dSGreg Ungerer #define test_and_set_bit(nr, vaddr) bset_reg_test_and_set_bit(nr, vaddr)
202171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
203171d809dSGreg Ungerer #define test_and_set_bit(nr, vaddr) bset_mem_test_and_set_bit(nr, vaddr)
204171d809dSGreg Ungerer #else
205171d809dSGreg Ungerer #define test_and_set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
206171d809dSGreg Ungerer bset_mem_test_and_set_bit(nr, vaddr) : \
207171d809dSGreg Ungerer bfset_mem_test_and_set_bit(nr, vaddr))
208171d809dSGreg Ungerer #endif
209171d809dSGreg Ungerer
2100e862838SAlexander Lobakin static __always_inline bool
arch___test_and_set_bit(unsigned long nr,volatile unsigned long * addr)2110e862838SAlexander Lobakin arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
2120e862838SAlexander Lobakin {
2130e862838SAlexander Lobakin return test_and_set_bit(nr, addr);
2140e862838SAlexander Lobakin }
215171d809dSGreg Ungerer
bclr_reg_test_and_clear_bit(int nr,volatile unsigned long * vaddr)216171d809dSGreg Ungerer static inline int bclr_reg_test_and_clear_bit(int nr,
217171d809dSGreg Ungerer volatile unsigned long *vaddr)
218171d809dSGreg Ungerer {
219171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
220171d809dSGreg Ungerer char retval;
221171d809dSGreg Ungerer
222171d809dSGreg Ungerer __asm__ __volatile__ ("bclr %2,(%1); sne %0"
223171d809dSGreg Ungerer : "=d" (retval)
224171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
225171d809dSGreg Ungerer : "memory");
226171d809dSGreg Ungerer return retval;
227171d809dSGreg Ungerer }
228171d809dSGreg Ungerer
bclr_mem_test_and_clear_bit(int nr,volatile unsigned long * vaddr)229171d809dSGreg Ungerer static inline int bclr_mem_test_and_clear_bit(int nr,
230171d809dSGreg Ungerer volatile unsigned long *vaddr)
231171d809dSGreg Ungerer {
232171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
233171d809dSGreg Ungerer char retval;
234171d809dSGreg Ungerer
235171d809dSGreg Ungerer __asm__ __volatile__ ("bclr %2,%1; sne %0"
236171d809dSGreg Ungerer : "=d" (retval), "+m" (*p)
237171d809dSGreg Ungerer : "di" (nr & 7));
238171d809dSGreg Ungerer return retval;
239171d809dSGreg Ungerer }
240171d809dSGreg Ungerer
bfclr_mem_test_and_clear_bit(int nr,volatile unsigned long * vaddr)241171d809dSGreg Ungerer static inline int bfclr_mem_test_and_clear_bit(int nr,
242171d809dSGreg Ungerer volatile unsigned long *vaddr)
243171d809dSGreg Ungerer {
244171d809dSGreg Ungerer char retval;
245171d809dSGreg Ungerer
246171d809dSGreg Ungerer __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
247171d809dSGreg Ungerer : "=d" (retval)
248171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
249171d809dSGreg Ungerer : "memory");
250171d809dSGreg Ungerer return retval;
251171d809dSGreg Ungerer }
252171d809dSGreg Ungerer
253171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
254171d809dSGreg Ungerer #define test_and_clear_bit(nr, vaddr) bclr_reg_test_and_clear_bit(nr, vaddr)
255171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
256171d809dSGreg Ungerer #define test_and_clear_bit(nr, vaddr) bclr_mem_test_and_clear_bit(nr, vaddr)
257171d809dSGreg Ungerer #else
258171d809dSGreg Ungerer #define test_and_clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
259171d809dSGreg Ungerer bclr_mem_test_and_clear_bit(nr, vaddr) : \
260171d809dSGreg Ungerer bfclr_mem_test_and_clear_bit(nr, vaddr))
261171d809dSGreg Ungerer #endif
262171d809dSGreg Ungerer
2630e862838SAlexander Lobakin static __always_inline bool
arch___test_and_clear_bit(unsigned long nr,volatile unsigned long * addr)2640e862838SAlexander Lobakin arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
2650e862838SAlexander Lobakin {
2660e862838SAlexander Lobakin return test_and_clear_bit(nr, addr);
2670e862838SAlexander Lobakin }
268171d809dSGreg Ungerer
bchg_reg_test_and_change_bit(int nr,volatile unsigned long * vaddr)269171d809dSGreg Ungerer static inline int bchg_reg_test_and_change_bit(int nr,
270171d809dSGreg Ungerer volatile unsigned long *vaddr)
271171d809dSGreg Ungerer {
272171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
273171d809dSGreg Ungerer char retval;
274171d809dSGreg Ungerer
275171d809dSGreg Ungerer __asm__ __volatile__ ("bchg %2,(%1); sne %0"
276171d809dSGreg Ungerer : "=d" (retval)
277171d809dSGreg Ungerer : "a" (p), "di" (nr & 7)
278171d809dSGreg Ungerer : "memory");
279171d809dSGreg Ungerer return retval;
280171d809dSGreg Ungerer }
281171d809dSGreg Ungerer
bchg_mem_test_and_change_bit(int nr,volatile unsigned long * vaddr)282171d809dSGreg Ungerer static inline int bchg_mem_test_and_change_bit(int nr,
283171d809dSGreg Ungerer volatile unsigned long *vaddr)
284171d809dSGreg Ungerer {
285171d809dSGreg Ungerer char *p = (char *)vaddr + (nr ^ 31) / 8;
286171d809dSGreg Ungerer char retval;
287171d809dSGreg Ungerer
288171d809dSGreg Ungerer __asm__ __volatile__ ("bchg %2,%1; sne %0"
289171d809dSGreg Ungerer : "=d" (retval), "+m" (*p)
290171d809dSGreg Ungerer : "di" (nr & 7));
291171d809dSGreg Ungerer return retval;
292171d809dSGreg Ungerer }
293171d809dSGreg Ungerer
bfchg_mem_test_and_change_bit(int nr,volatile unsigned long * vaddr)294171d809dSGreg Ungerer static inline int bfchg_mem_test_and_change_bit(int nr,
295171d809dSGreg Ungerer volatile unsigned long *vaddr)
296171d809dSGreg Ungerer {
297171d809dSGreg Ungerer char retval;
298171d809dSGreg Ungerer
299171d809dSGreg Ungerer __asm__ __volatile__ ("bfchg %2{%1:#1}; sne %0"
300171d809dSGreg Ungerer : "=d" (retval)
301171d809dSGreg Ungerer : "d" (nr ^ 31), "o" (*vaddr)
302171d809dSGreg Ungerer : "memory");
303171d809dSGreg Ungerer return retval;
304171d809dSGreg Ungerer }
305171d809dSGreg Ungerer
306171d809dSGreg Ungerer #if defined(CONFIG_COLDFIRE)
307171d809dSGreg Ungerer #define test_and_change_bit(nr, vaddr) bchg_reg_test_and_change_bit(nr, vaddr)
308171d809dSGreg Ungerer #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
309171d809dSGreg Ungerer #define test_and_change_bit(nr, vaddr) bchg_mem_test_and_change_bit(nr, vaddr)
310171d809dSGreg Ungerer #else
311171d809dSGreg Ungerer #define test_and_change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
312171d809dSGreg Ungerer bchg_mem_test_and_change_bit(nr, vaddr) : \
313171d809dSGreg Ungerer bfchg_mem_test_and_change_bit(nr, vaddr))
314171d809dSGreg Ungerer #endif
315171d809dSGreg Ungerer
3160e862838SAlexander Lobakin static __always_inline bool
arch___test_and_change_bit(unsigned long nr,volatile unsigned long * addr)3170e862838SAlexander Lobakin arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
3180e862838SAlexander Lobakin {
3190e862838SAlexander Lobakin return test_and_change_bit(nr, addr);
3200e862838SAlexander Lobakin }
321171d809dSGreg Ungerer
322171d809dSGreg Ungerer /*
323171d809dSGreg Ungerer * The true 68020 and more advanced processors support the "bfffo"
324171d809dSGreg Ungerer * instruction for finding bits. ColdFire and simple 68000 parts
325171d809dSGreg Ungerer * (including CPU32) do not support this. They simply use the generic
326171d809dSGreg Ungerer * functions.
327171d809dSGreg Ungerer */
328171d809dSGreg Ungerer #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
329171d809dSGreg Ungerer #include <asm-generic/bitops/ffz.h>
330171d809dSGreg Ungerer #else
331171d809dSGreg Ungerer
find_first_zero_bit(const unsigned long * vaddr,unsigned size)332171d809dSGreg Ungerer static inline int find_first_zero_bit(const unsigned long *vaddr,
333171d809dSGreg Ungerer unsigned size)
334171d809dSGreg Ungerer {
335171d809dSGreg Ungerer const unsigned long *p = vaddr;
336171d809dSGreg Ungerer int res = 32;
337171d809dSGreg Ungerer unsigned int words;
338171d809dSGreg Ungerer unsigned long num;
339171d809dSGreg Ungerer
340171d809dSGreg Ungerer if (!size)
341171d809dSGreg Ungerer return 0;
342171d809dSGreg Ungerer
343171d809dSGreg Ungerer words = (size + 31) >> 5;
344171d809dSGreg Ungerer while (!(num = ~*p++)) {
345171d809dSGreg Ungerer if (!--words)
346171d809dSGreg Ungerer goto out;
347171d809dSGreg Ungerer }
348171d809dSGreg Ungerer
349171d809dSGreg Ungerer __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
350171d809dSGreg Ungerer : "=d" (res) : "d" (num & -num));
351171d809dSGreg Ungerer res ^= 31;
352171d809dSGreg Ungerer out:
353171d809dSGreg Ungerer res += ((long)p - (long)vaddr - 4) * 8;
354171d809dSGreg Ungerer return res < size ? res : size;
355171d809dSGreg Ungerer }
356171d809dSGreg Ungerer #define find_first_zero_bit find_first_zero_bit
357171d809dSGreg Ungerer
find_next_zero_bit(const unsigned long * vaddr,int size,int offset)358171d809dSGreg Ungerer static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
359171d809dSGreg Ungerer int offset)
360171d809dSGreg Ungerer {
361171d809dSGreg Ungerer const unsigned long *p = vaddr + (offset >> 5);
362171d809dSGreg Ungerer int bit = offset & 31UL, res;
363171d809dSGreg Ungerer
364171d809dSGreg Ungerer if (offset >= size)
365171d809dSGreg Ungerer return size;
366171d809dSGreg Ungerer
367171d809dSGreg Ungerer if (bit) {
368171d809dSGreg Ungerer unsigned long num = ~*p++ & (~0UL << bit);
369171d809dSGreg Ungerer offset -= bit;
370171d809dSGreg Ungerer
371171d809dSGreg Ungerer /* Look for zero in first longword */
372171d809dSGreg Ungerer __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
373171d809dSGreg Ungerer : "=d" (res) : "d" (num & -num));
374171d809dSGreg Ungerer if (res < 32) {
375171d809dSGreg Ungerer offset += res ^ 31;
376171d809dSGreg Ungerer return offset < size ? offset : size;
377171d809dSGreg Ungerer }
378171d809dSGreg Ungerer offset += 32;
379171d809dSGreg Ungerer
380171d809dSGreg Ungerer if (offset >= size)
381171d809dSGreg Ungerer return size;
382171d809dSGreg Ungerer }
383171d809dSGreg Ungerer /* No zero yet, search remaining full bytes for a zero */
384171d809dSGreg Ungerer return offset + find_first_zero_bit(p, size - offset);
385171d809dSGreg Ungerer }
386171d809dSGreg Ungerer #define find_next_zero_bit find_next_zero_bit
387171d809dSGreg Ungerer
find_first_bit(const unsigned long * vaddr,unsigned size)388171d809dSGreg Ungerer static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
389171d809dSGreg Ungerer {
390171d809dSGreg Ungerer const unsigned long *p = vaddr;
391171d809dSGreg Ungerer int res = 32;
392171d809dSGreg Ungerer unsigned int words;
393171d809dSGreg Ungerer unsigned long num;
394171d809dSGreg Ungerer
395171d809dSGreg Ungerer if (!size)
396171d809dSGreg Ungerer return 0;
397171d809dSGreg Ungerer
398171d809dSGreg Ungerer words = (size + 31) >> 5;
399171d809dSGreg Ungerer while (!(num = *p++)) {
400171d809dSGreg Ungerer if (!--words)
401171d809dSGreg Ungerer goto out;
402171d809dSGreg Ungerer }
403171d809dSGreg Ungerer
404171d809dSGreg Ungerer __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
405171d809dSGreg Ungerer : "=d" (res) : "d" (num & -num));
406171d809dSGreg Ungerer res ^= 31;
407171d809dSGreg Ungerer out:
408171d809dSGreg Ungerer res += ((long)p - (long)vaddr - 4) * 8;
409171d809dSGreg Ungerer return res < size ? res : size;
410171d809dSGreg Ungerer }
411171d809dSGreg Ungerer #define find_first_bit find_first_bit
412171d809dSGreg Ungerer
find_next_bit(const unsigned long * vaddr,int size,int offset)413171d809dSGreg Ungerer static inline int find_next_bit(const unsigned long *vaddr, int size,
414171d809dSGreg Ungerer int offset)
415171d809dSGreg Ungerer {
416171d809dSGreg Ungerer const unsigned long *p = vaddr + (offset >> 5);
417171d809dSGreg Ungerer int bit = offset & 31UL, res;
418171d809dSGreg Ungerer
419171d809dSGreg Ungerer if (offset >= size)
420171d809dSGreg Ungerer return size;
421171d809dSGreg Ungerer
422171d809dSGreg Ungerer if (bit) {
423171d809dSGreg Ungerer unsigned long num = *p++ & (~0UL << bit);
424171d809dSGreg Ungerer offset -= bit;
425171d809dSGreg Ungerer
426171d809dSGreg Ungerer /* Look for one in first longword */
427171d809dSGreg Ungerer __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
428171d809dSGreg Ungerer : "=d" (res) : "d" (num & -num));
429171d809dSGreg Ungerer if (res < 32) {
430171d809dSGreg Ungerer offset += res ^ 31;
431171d809dSGreg Ungerer return offset < size ? offset : size;
432171d809dSGreg Ungerer }
433171d809dSGreg Ungerer offset += 32;
434171d809dSGreg Ungerer
435171d809dSGreg Ungerer if (offset >= size)
436171d809dSGreg Ungerer return size;
437171d809dSGreg Ungerer }
438171d809dSGreg Ungerer /* No one yet, search remaining full bytes for a one */
439171d809dSGreg Ungerer return offset + find_first_bit(p, size - offset);
440171d809dSGreg Ungerer }
441171d809dSGreg Ungerer #define find_next_bit find_next_bit
442171d809dSGreg Ungerer
443171d809dSGreg Ungerer /*
444171d809dSGreg Ungerer * ffz = Find First Zero in word. Undefined if no zero exists,
445171d809dSGreg Ungerer * so code should check against ~0UL first..
446171d809dSGreg Ungerer */
ffz(unsigned long word)447171d809dSGreg Ungerer static inline unsigned long ffz(unsigned long word)
448171d809dSGreg Ungerer {
449171d809dSGreg Ungerer int res;
450171d809dSGreg Ungerer
451171d809dSGreg Ungerer __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
452171d809dSGreg Ungerer : "=d" (res) : "d" (~word & -~word));
453171d809dSGreg Ungerer return res ^ 31;
454171d809dSGreg Ungerer }
455171d809dSGreg Ungerer
456171d809dSGreg Ungerer #endif
457171d809dSGreg Ungerer
458171d809dSGreg Ungerer #ifdef __KERNEL__
459171d809dSGreg Ungerer
460171d809dSGreg Ungerer #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
461171d809dSGreg Ungerer
462171d809dSGreg Ungerer /*
463171d809dSGreg Ungerer * The newer ColdFire family members support a "bitrev" instruction
464171d809dSGreg Ungerer * and we can use that to implement a fast ffs. Older Coldfire parts,
465171d809dSGreg Ungerer * and normal 68000 parts don't have anything special, so we use the
466171d809dSGreg Ungerer * generic functions for those.
467171d809dSGreg Ungerer */
468171d809dSGreg Ungerer #if (defined(__mcfisaaplus__) || defined(__mcfisac__)) && \
4696dbe88e9SGeert Uytterhoeven !defined(CONFIG_M68000)
__ffs(unsigned long x)470384052e4SMike Rapoport static inline unsigned long __ffs(unsigned long x)
471171d809dSGreg Ungerer {
472171d809dSGreg Ungerer __asm__ __volatile__ ("bitrev %0; ff1 %0"
473171d809dSGreg Ungerer : "=d" (x)
474171d809dSGreg Ungerer : "0" (x));
475171d809dSGreg Ungerer return x;
476171d809dSGreg Ungerer }
477171d809dSGreg Ungerer
ffs(int x)478171d809dSGreg Ungerer static inline int ffs(int x)
479171d809dSGreg Ungerer {
480171d809dSGreg Ungerer if (!x)
481171d809dSGreg Ungerer return 0;
482171d809dSGreg Ungerer return __ffs(x) + 1;
483171d809dSGreg Ungerer }
484171d809dSGreg Ungerer
485171d809dSGreg Ungerer #else
486171d809dSGreg Ungerer #include <asm-generic/bitops/ffs.h>
487171d809dSGreg Ungerer #include <asm-generic/bitops/__ffs.h>
488171d809dSGreg Ungerer #endif
489171d809dSGreg Ungerer
490171d809dSGreg Ungerer #include <asm-generic/bitops/fls.h>
491171d809dSGreg Ungerer #include <asm-generic/bitops/__fls.h>
492171d809dSGreg Ungerer
493171d809dSGreg Ungerer #else
494171d809dSGreg Ungerer
495171d809dSGreg Ungerer /*
496171d809dSGreg Ungerer * ffs: find first bit set. This is defined the same way as
497171d809dSGreg Ungerer * the libc and compiler builtin ffs routines, therefore
498171d809dSGreg Ungerer * differs in spirit from the above ffz (man ffs).
499171d809dSGreg Ungerer */
ffs(int x)500171d809dSGreg Ungerer static inline int ffs(int x)
501171d809dSGreg Ungerer {
502171d809dSGreg Ungerer int cnt;
503171d809dSGreg Ungerer
504171d809dSGreg Ungerer __asm__ ("bfffo %1{#0:#0},%0"
505171d809dSGreg Ungerer : "=d" (cnt)
506171d809dSGreg Ungerer : "dm" (x & -x));
507171d809dSGreg Ungerer return 32 - cnt;
508171d809dSGreg Ungerer }
509384052e4SMike Rapoport
__ffs(unsigned long x)510384052e4SMike Rapoport static inline unsigned long __ffs(unsigned long x)
511384052e4SMike Rapoport {
512384052e4SMike Rapoport return ffs(x) - 1;
513384052e4SMike Rapoport }
514171d809dSGreg Ungerer
515171d809dSGreg Ungerer /*
516171d809dSGreg Ungerer * fls: find last bit set.
517171d809dSGreg Ungerer */
fls(unsigned int x)5183fc2579eSMatthew Wilcox static inline int fls(unsigned int x)
519171d809dSGreg Ungerer {
520171d809dSGreg Ungerer int cnt;
521171d809dSGreg Ungerer
522171d809dSGreg Ungerer __asm__ ("bfffo %1{#0,#0},%0"
523171d809dSGreg Ungerer : "=d" (cnt)
524171d809dSGreg Ungerer : "dm" (x));
525171d809dSGreg Ungerer return 32 - cnt;
526171d809dSGreg Ungerer }
527171d809dSGreg Ungerer
__fls(unsigned long x)5286f08e51cSAmadeusz Sławiński static inline unsigned long __fls(unsigned long x)
529171d809dSGreg Ungerer {
530171d809dSGreg Ungerer return fls(x) - 1;
531171d809dSGreg Ungerer }
532171d809dSGreg Ungerer
533171d809dSGreg Ungerer #endif
534171d809dSGreg Ungerer
53584038fd9SWill Deacon /* Simple test-and-set bit locks */
53684038fd9SWill Deacon #define test_and_set_bit_lock test_and_set_bit
53784038fd9SWill Deacon #define clear_bit_unlock clear_bit
53884038fd9SWill Deacon #define __clear_bit_unlock clear_bit_unlock
53984038fd9SWill Deacon
5400e862838SAlexander Lobakin #include <asm-generic/bitops/non-instrumented-non-atomic.h>
541171d809dSGreg Ungerer #include <asm-generic/bitops/ext2-atomic.h>
542171d809dSGreg Ungerer #include <asm-generic/bitops/fls64.h>
543171d809dSGreg Ungerer #include <asm-generic/bitops/sched.h>
544171d809dSGreg Ungerer #include <asm-generic/bitops/hweight.h>
545bb8bc36eSYury Norov #include <asm-generic/bitops/le.h>
546171d809dSGreg Ungerer #endif /* __KERNEL__ */
547171d809dSGreg Ungerer
548171d809dSGreg Ungerer #endif /* _M68K_BITOPS_H */
549