xref: /openbmc/linux/arch/arc/include/asm/bitops.h (revision 78aec9bb1f3c79e4570eb50260d6320063f823a2)
1d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
214e968baSVineet Gupta /*
314e968baSVineet Gupta  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
414e968baSVineet Gupta  */
514e968baSVineet Gupta 
614e968baSVineet Gupta #ifndef _ASM_BITOPS_H
714e968baSVineet Gupta #define _ASM_BITOPS_H
814e968baSVineet Gupta 
914e968baSVineet Gupta #ifndef _LINUX_BITOPS_H
1014e968baSVineet Gupta #error only <linux/bitops.h> can be included directly
1114e968baSVineet Gupta #endif
1214e968baSVineet Gupta 
1314e968baSVineet Gupta #ifndef __ASSEMBLY__
1414e968baSVineet Gupta 
1514e968baSVineet Gupta #include <linux/types.h>
1614e968baSVineet Gupta #include <linux/compiler.h>
17d594ffa9SPeter Zijlstra #include <asm/barrier.h>
1804e2eee4SVineet Gupta #ifndef CONFIG_ARC_HAS_LLSC
1904e2eee4SVineet Gupta #include <asm/smp.h>
2004e2eee4SVineet Gupta #endif
2114e968baSVineet Gupta 
22a5a10d99SNoam Camus #ifdef CONFIG_ARC_HAS_LLSC
2314e968baSVineet Gupta 
24de60c1a1SVineet Gupta /*
2504e2eee4SVineet Gupta  * Hardware assisted Atomic-R-M-W
26de60c1a1SVineet Gupta  */
2714e968baSVineet Gupta 
2804e2eee4SVineet Gupta #define BIT_OP(op, c_op, asm_op)					\
2904e2eee4SVineet Gupta static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
3004e2eee4SVineet Gupta {									\
3104e2eee4SVineet Gupta 	unsigned int temp;						\
3204e2eee4SVineet Gupta 									\
3304e2eee4SVineet Gupta 	m += nr >> 5;							\
3404e2eee4SVineet Gupta 									\
3504e2eee4SVineet Gupta 	nr &= 0x1f;							\
3604e2eee4SVineet Gupta 									\
3704e2eee4SVineet Gupta 	__asm__ __volatile__(						\
3804e2eee4SVineet Gupta 	"1:	llock       %0, [%1]		\n"			\
3904e2eee4SVineet Gupta 	"	" #asm_op " %0, %0, %2	\n"				\
4004e2eee4SVineet Gupta 	"	scond       %0, [%1]		\n"			\
4104e2eee4SVineet Gupta 	"	bnz         1b			\n"			\
4204e2eee4SVineet Gupta 	: "=&r"(temp)	/* Early clobber, to prevent reg reuse */	\
4304e2eee4SVineet Gupta 	: "r"(m),	/* Not "m": llock only supports reg direct addr mode */	\
4404e2eee4SVineet Gupta 	  "ir"(nr)							\
4504e2eee4SVineet Gupta 	: "cc");							\
4614e968baSVineet Gupta }
4714e968baSVineet Gupta 
4814e968baSVineet Gupta /*
4914e968baSVineet Gupta  * Semantically:
5014e968baSVineet Gupta  *    Test the bit
5114e968baSVineet Gupta  *    if clear
5214e968baSVineet Gupta  *        set it and return 0 (old value)
5314e968baSVineet Gupta  *    else
5414e968baSVineet Gupta  *        return 1 (old value).
5514e968baSVineet Gupta  *
5614e968baSVineet Gupta  * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally
5714e968baSVineet Gupta  * and the old value of bit is returned
5814e968baSVineet Gupta  */
5904e2eee4SVineet Gupta #define TEST_N_BIT_OP(op, c_op, asm_op)					\
6004e2eee4SVineet Gupta static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
6104e2eee4SVineet Gupta {									\
6204e2eee4SVineet Gupta 	unsigned long old, temp;					\
6304e2eee4SVineet Gupta 									\
6404e2eee4SVineet Gupta 	m += nr >> 5;							\
6504e2eee4SVineet Gupta 									\
6604e2eee4SVineet Gupta 	nr &= 0x1f;							\
6704e2eee4SVineet Gupta 									\
6804e2eee4SVineet Gupta 	/*								\
6904e2eee4SVineet Gupta 	 * Explicit full memory barrier needed before/after as		\
7004e2eee4SVineet Gupta 	 * LLOCK/SCOND themselves don't provide any such smenatic	\
7104e2eee4SVineet Gupta 	 */								\
7204e2eee4SVineet Gupta 	smp_mb();							\
7304e2eee4SVineet Gupta 									\
7404e2eee4SVineet Gupta 	__asm__ __volatile__(						\
7504e2eee4SVineet Gupta 	"1:	llock       %0, [%2]	\n"				\
7604e2eee4SVineet Gupta 	"	" #asm_op " %1, %0, %3	\n"				\
7704e2eee4SVineet Gupta 	"	scond       %1, [%2]	\n"				\
7804e2eee4SVineet Gupta 	"	bnz         1b		\n"				\
7904e2eee4SVineet Gupta 	: "=&r"(old), "=&r"(temp)					\
8004e2eee4SVineet Gupta 	: "r"(m), "ir"(nr)						\
8104e2eee4SVineet Gupta 	: "cc");							\
8204e2eee4SVineet Gupta 									\
8304e2eee4SVineet Gupta 	smp_mb();							\
8404e2eee4SVineet Gupta 									\
8504e2eee4SVineet Gupta 	return (old & (1 << nr)) != 0;					\
8614e968baSVineet Gupta }
8714e968baSVineet Gupta 
88dd7c7ab0SVineet Gupta #else /* !CONFIG_ARC_HAS_LLSC */
8914e968baSVineet Gupta 
9014e968baSVineet Gupta /*
9114e968baSVineet Gupta  * Non hardware assisted Atomic-R-M-W
9214e968baSVineet Gupta  * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
9314e968baSVineet Gupta  *
9414e968baSVineet Gupta  * There's "significant" micro-optimization in writing our own variants of
9514e968baSVineet Gupta  * bitops (over generic variants)
9614e968baSVineet Gupta  *
9714e968baSVineet Gupta  * (1) The generic APIs have "signed" @nr while we have it "unsigned"
9814e968baSVineet Gupta  *     This avoids extra code to be generated for pointer arithmatic, since
9914e968baSVineet Gupta  *     is "not sure" that index is NOT -ve
10014e968baSVineet Gupta  * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc
10114e968baSVineet Gupta  *     only consider bottom 5 bits of @nr, so NO need to mask them off.
10214e968baSVineet Gupta  *     (GCC Quirk: however for constant @nr we still need to do the masking
10314e968baSVineet Gupta  *             at compile time)
10414e968baSVineet Gupta  */
10514e968baSVineet Gupta 
10604e2eee4SVineet Gupta #define BIT_OP(op, c_op, asm_op)					\
10704e2eee4SVineet Gupta static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
10804e2eee4SVineet Gupta {									\
10904e2eee4SVineet Gupta 	unsigned long temp, flags;					\
11004e2eee4SVineet Gupta 	m += nr >> 5;							\
11104e2eee4SVineet Gupta 									\
11204e2eee4SVineet Gupta 	/*								\
11304e2eee4SVineet Gupta 	 * spin lock/unlock provide the needed smp_mb() before/after	\
11404e2eee4SVineet Gupta 	 */								\
11504e2eee4SVineet Gupta 	bitops_lock(flags);						\
11604e2eee4SVineet Gupta 									\
11704e2eee4SVineet Gupta 	temp = *m;							\
11880f42084SVineet Gupta 	*m = temp c_op (1UL << (nr & 0x1f));					\
11904e2eee4SVineet Gupta 									\
12004e2eee4SVineet Gupta 	bitops_unlock(flags);						\
12114e968baSVineet Gupta }
12214e968baSVineet Gupta 
12304e2eee4SVineet Gupta #define TEST_N_BIT_OP(op, c_op, asm_op)					\
12404e2eee4SVineet Gupta static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
12504e2eee4SVineet Gupta {									\
12604e2eee4SVineet Gupta 	unsigned long old, flags;					\
12704e2eee4SVineet Gupta 	m += nr >> 5;							\
12804e2eee4SVineet Gupta 									\
12904e2eee4SVineet Gupta 	bitops_lock(flags);						\
13004e2eee4SVineet Gupta 									\
13104e2eee4SVineet Gupta 	old = *m;							\
13280f42084SVineet Gupta 	*m = old c_op (1UL << (nr & 0x1f));				\
13304e2eee4SVineet Gupta 									\
13404e2eee4SVineet Gupta 	bitops_unlock(flags);						\
13504e2eee4SVineet Gupta 									\
13680f42084SVineet Gupta 	return (old & (1UL << (nr & 0x1f))) != 0;			\
13714e968baSVineet Gupta }
13814e968baSVineet Gupta 
139dd7c7ab0SVineet Gupta #endif
14014e968baSVineet Gupta 
14114e968baSVineet Gupta /***************************************
14214e968baSVineet Gupta  * Non atomic variants
14314e968baSVineet Gupta  **************************************/
14414e968baSVineet Gupta 
14504e2eee4SVineet Gupta #define __BIT_OP(op, c_op, asm_op)					\
14604e2eee4SVineet Gupta static inline void __##op##_bit(unsigned long nr, volatile unsigned long *m)	\
14704e2eee4SVineet Gupta {									\
14804e2eee4SVineet Gupta 	unsigned long temp;						\
14904e2eee4SVineet Gupta 	m += nr >> 5;							\
15004e2eee4SVineet Gupta 									\
15104e2eee4SVineet Gupta 	temp = *m;							\
15280f42084SVineet Gupta 	*m = temp c_op (1UL << (nr & 0x1f));				\
15314e968baSVineet Gupta }
15414e968baSVineet Gupta 
15504e2eee4SVineet Gupta #define __TEST_N_BIT_OP(op, c_op, asm_op)				\
15604e2eee4SVineet Gupta static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
15704e2eee4SVineet Gupta {									\
15804e2eee4SVineet Gupta 	unsigned long old;						\
15904e2eee4SVineet Gupta 	m += nr >> 5;							\
16004e2eee4SVineet Gupta 									\
16104e2eee4SVineet Gupta 	old = *m;							\
16280f42084SVineet Gupta 	*m = old c_op (1UL << (nr & 0x1f));				\
16304e2eee4SVineet Gupta 									\
16480f42084SVineet Gupta 	return (old & (1UL << (nr & 0x1f))) != 0;			\
16514e968baSVineet Gupta }
16614e968baSVineet Gupta 
16704e2eee4SVineet Gupta #define BIT_OPS(op, c_op, asm_op)					\
16804e2eee4SVineet Gupta 									\
16904e2eee4SVineet Gupta 	/* set_bit(), clear_bit(), change_bit() */			\
17004e2eee4SVineet Gupta 	BIT_OP(op, c_op, asm_op)					\
17104e2eee4SVineet Gupta 									\
17204e2eee4SVineet Gupta 	/* test_and_set_bit(), test_and_clear_bit(), test_and_change_bit() */\
17304e2eee4SVineet Gupta 	TEST_N_BIT_OP(op, c_op, asm_op)					\
17404e2eee4SVineet Gupta 									\
17504e2eee4SVineet Gupta 	/* __set_bit(), __clear_bit(), __change_bit() */		\
17604e2eee4SVineet Gupta 	__BIT_OP(op, c_op, asm_op)					\
17704e2eee4SVineet Gupta 									\
17804e2eee4SVineet Gupta 	/* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\
17904e2eee4SVineet Gupta 	__TEST_N_BIT_OP(op, c_op, asm_op)
18014e968baSVineet Gupta 
18104e2eee4SVineet Gupta BIT_OPS(set, |, bset)
18204e2eee4SVineet Gupta BIT_OPS(clear, & ~, bclr)
18304e2eee4SVineet Gupta BIT_OPS(change, ^, bxor)
18414e968baSVineet Gupta 
18514e968baSVineet Gupta /*
18614e968baSVineet Gupta  * This routine doesn't need to be atomic.
18714e968baSVineet Gupta  */
18814e968baSVineet Gupta static inline int
189de60c1a1SVineet Gupta test_bit(unsigned int nr, const volatile unsigned long *addr)
19014e968baSVineet Gupta {
19114e968baSVineet Gupta 	unsigned long mask;
19214e968baSVineet Gupta 
19314e968baSVineet Gupta 	addr += nr >> 5;
19414e968baSVineet Gupta 
19580f42084SVineet Gupta 	mask = 1UL << (nr & 0x1f);
19614e968baSVineet Gupta 
19714e968baSVineet Gupta 	return ((mask & *addr) != 0);
19814e968baSVineet Gupta }
19914e968baSVineet Gupta 
2001f6ccfffSVineet Gupta #ifdef CONFIG_ISA_ARCOMPACT
2011f6ccfffSVineet Gupta 
20214e968baSVineet Gupta /*
20314e968baSVineet Gupta  * Count the number of zeros, starting from MSB
20414e968baSVineet Gupta  * Helper for fls( ) friends
20514e968baSVineet Gupta  * This is a pure count, so (1-32) or (0-31) doesn't apply
20614e968baSVineet Gupta  * It could be 0 to 32, based on num of 0's in there
20714e968baSVineet Gupta  * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31
20814e968baSVineet Gupta  */
20914e968baSVineet Gupta static inline __attribute__ ((const)) int clz(unsigned int x)
21014e968baSVineet Gupta {
21114e968baSVineet Gupta 	unsigned int res;
21214e968baSVineet Gupta 
21314e968baSVineet Gupta 	__asm__ __volatile__(
21414e968baSVineet Gupta 	"	norm.f  %0, %1		\n"
21514e968baSVineet Gupta 	"	mov.n   %0, 0		\n"
21614e968baSVineet Gupta 	"	add.p   %0, %0, 1	\n"
21714e968baSVineet Gupta 	: "=r"(res)
21814e968baSVineet Gupta 	: "r"(x)
21914e968baSVineet Gupta 	: "cc");
22014e968baSVineet Gupta 
22114e968baSVineet Gupta 	return res;
22214e968baSVineet Gupta }
22314e968baSVineet Gupta 
2243fc2579eSMatthew Wilcox static inline int constant_fls(unsigned int x)
22514e968baSVineet Gupta {
22614e968baSVineet Gupta 	int r = 32;
22714e968baSVineet Gupta 
22814e968baSVineet Gupta 	if (!x)
22914e968baSVineet Gupta 		return 0;
23014e968baSVineet Gupta 	if (!(x & 0xffff0000u)) {
23114e968baSVineet Gupta 		x <<= 16;
23214e968baSVineet Gupta 		r -= 16;
23314e968baSVineet Gupta 	}
23414e968baSVineet Gupta 	if (!(x & 0xff000000u)) {
23514e968baSVineet Gupta 		x <<= 8;
23614e968baSVineet Gupta 		r -= 8;
23714e968baSVineet Gupta 	}
23814e968baSVineet Gupta 	if (!(x & 0xf0000000u)) {
23914e968baSVineet Gupta 		x <<= 4;
24014e968baSVineet Gupta 		r -= 4;
24114e968baSVineet Gupta 	}
24214e968baSVineet Gupta 	if (!(x & 0xc0000000u)) {
24314e968baSVineet Gupta 		x <<= 2;
24414e968baSVineet Gupta 		r -= 2;
24514e968baSVineet Gupta 	}
246*78aec9bbSGustavo Pimentel 	if (!(x & 0x80000000u))
24714e968baSVineet Gupta 		r -= 1;
24814e968baSVineet Gupta 	return r;
24914e968baSVineet Gupta }
25014e968baSVineet Gupta 
25114e968baSVineet Gupta /*
25214e968baSVineet Gupta  * fls = Find Last Set in word
25314e968baSVineet Gupta  * @result: [1-32]
25414e968baSVineet Gupta  * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
25514e968baSVineet Gupta  */
2563fc2579eSMatthew Wilcox static inline __attribute__ ((const)) int fls(unsigned int x)
25714e968baSVineet Gupta {
25814e968baSVineet Gupta 	if (__builtin_constant_p(x))
25914e968baSVineet Gupta 	       return constant_fls(x);
26014e968baSVineet Gupta 
26114e968baSVineet Gupta 	return 32 - clz(x);
26214e968baSVineet Gupta }
26314e968baSVineet Gupta 
26414e968baSVineet Gupta /*
26514e968baSVineet Gupta  * __fls: Similar to fls, but zero based (0-31)
26614e968baSVineet Gupta  */
26714e968baSVineet Gupta static inline __attribute__ ((const)) int __fls(unsigned long x)
26814e968baSVineet Gupta {
26914e968baSVineet Gupta 	if (!x)
27014e968baSVineet Gupta 		return 0;
27114e968baSVineet Gupta 	else
27214e968baSVineet Gupta 		return fls(x) - 1;
27314e968baSVineet Gupta }
27414e968baSVineet Gupta 
27514e968baSVineet Gupta /*
27614e968baSVineet Gupta  * ffs = Find First Set in word (LSB to MSB)
27714e968baSVineet Gupta  * @result: [1-32], 0 if all 0's
27814e968baSVineet Gupta  */
27914e968baSVineet Gupta #define ffs(x)	({ unsigned long __t = (x); fls(__t & -__t); })
28014e968baSVineet Gupta 
28114e968baSVineet Gupta /*
28214e968baSVineet Gupta  * __ffs: Similar to ffs, but zero based (0-31)
28314e968baSVineet Gupta  */
2844e868f84SEugeniy Paltsev static inline __attribute__ ((const)) unsigned long __ffs(unsigned long word)
28514e968baSVineet Gupta {
28614e968baSVineet Gupta 	if (!word)
28714e968baSVineet Gupta 		return word;
28814e968baSVineet Gupta 
28914e968baSVineet Gupta 	return ffs(word) - 1;
29014e968baSVineet Gupta }
29114e968baSVineet Gupta 
2921f6ccfffSVineet Gupta #else	/* CONFIG_ISA_ARCV2 */
2931f6ccfffSVineet Gupta 
2941f6ccfffSVineet Gupta /*
2951f6ccfffSVineet Gupta  * fls = Find Last Set in word
2961f6ccfffSVineet Gupta  * @result: [1-32]
2971f6ccfffSVineet Gupta  * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
2981f6ccfffSVineet Gupta  */
2991f6ccfffSVineet Gupta static inline __attribute__ ((const)) int fls(unsigned long x)
3001f6ccfffSVineet Gupta {
3011f6ccfffSVineet Gupta 	int n;
3021f6ccfffSVineet Gupta 
3031f6ccfffSVineet Gupta 	asm volatile(
3041f6ccfffSVineet Gupta 	"	fls.f	%0, %1		\n"  /* 0:31; 0(Z) if src 0 */
3051f6ccfffSVineet Gupta 	"	add.nz	%0, %0, 1	\n"  /* 0:31 -> 1:32 */
3061f6ccfffSVineet Gupta 	: "=r"(n)	/* Early clobber not needed */
3071f6ccfffSVineet Gupta 	: "r"(x)
3081f6ccfffSVineet Gupta 	: "cc");
3091f6ccfffSVineet Gupta 
3101f6ccfffSVineet Gupta 	return n;
3111f6ccfffSVineet Gupta }
3121f6ccfffSVineet Gupta 
3131f6ccfffSVineet Gupta /*
3141f6ccfffSVineet Gupta  * __fls: Similar to fls, but zero based (0-31). Also 0 if no bit set
3151f6ccfffSVineet Gupta  */
3161f6ccfffSVineet Gupta static inline __attribute__ ((const)) int __fls(unsigned long x)
3171f6ccfffSVineet Gupta {
3181f6ccfffSVineet Gupta 	/* FLS insn has exactly same semantics as the API */
3191f6ccfffSVineet Gupta 	return	__builtin_arc_fls(x);
3201f6ccfffSVineet Gupta }
3211f6ccfffSVineet Gupta 
3221f6ccfffSVineet Gupta /*
3231f6ccfffSVineet Gupta  * ffs = Find First Set in word (LSB to MSB)
3241f6ccfffSVineet Gupta  * @result: [1-32], 0 if all 0's
3251f6ccfffSVineet Gupta  */
3261f6ccfffSVineet Gupta static inline __attribute__ ((const)) int ffs(unsigned long x)
3271f6ccfffSVineet Gupta {
3281f6ccfffSVineet Gupta 	int n;
3291f6ccfffSVineet Gupta 
3301f6ccfffSVineet Gupta 	asm volatile(
3311f6ccfffSVineet Gupta 	"	ffs.f	%0, %1		\n"  /* 0:31; 31(Z) if src 0 */
3321f6ccfffSVineet Gupta 	"	add.nz	%0, %0, 1	\n"  /* 0:31 -> 1:32 */
3331f6ccfffSVineet Gupta 	"	mov.z	%0, 0		\n"  /* 31(Z)-> 0 */
3341f6ccfffSVineet Gupta 	: "=r"(n)	/* Early clobber not needed */
3351f6ccfffSVineet Gupta 	: "r"(x)
3361f6ccfffSVineet Gupta 	: "cc");
3371f6ccfffSVineet Gupta 
3381f6ccfffSVineet Gupta 	return n;
3391f6ccfffSVineet Gupta }
3401f6ccfffSVineet Gupta 
3411f6ccfffSVineet Gupta /*
3421f6ccfffSVineet Gupta  * __ffs: Similar to ffs, but zero based (0-31)
3431f6ccfffSVineet Gupta  */
3444e868f84SEugeniy Paltsev static inline __attribute__ ((const)) unsigned long __ffs(unsigned long x)
3451f6ccfffSVineet Gupta {
3464e868f84SEugeniy Paltsev 	unsigned long n;
3471f6ccfffSVineet Gupta 
3481f6ccfffSVineet Gupta 	asm volatile(
3491f6ccfffSVineet Gupta 	"	ffs.f	%0, %1		\n"  /* 0:31; 31(Z) if src 0 */
3501f6ccfffSVineet Gupta 	"	mov.z	%0, 0		\n"  /* 31(Z)-> 0 */
3511f6ccfffSVineet Gupta 	: "=r"(n)
3521f6ccfffSVineet Gupta 	: "r"(x)
3531f6ccfffSVineet Gupta 	: "cc");
3541f6ccfffSVineet Gupta 
3551f6ccfffSVineet Gupta 	return n;
3561f6ccfffSVineet Gupta 
3571f6ccfffSVineet Gupta }
3581f6ccfffSVineet Gupta 
3591f6ccfffSVineet Gupta #endif	/* CONFIG_ISA_ARCOMPACT */
3601f6ccfffSVineet Gupta 
36114e968baSVineet Gupta /*
36214e968baSVineet Gupta  * ffz = Find First Zero in word.
36314e968baSVineet Gupta  * @return:[0-31], 32 if all 1's
36414e968baSVineet Gupta  */
36514e968baSVineet Gupta #define ffz(x)	__ffs(~(x))
36614e968baSVineet Gupta 
36714e968baSVineet Gupta #include <asm-generic/bitops/hweight.h>
36814e968baSVineet Gupta #include <asm-generic/bitops/fls64.h>
36914e968baSVineet Gupta #include <asm-generic/bitops/sched.h>
37014e968baSVineet Gupta #include <asm-generic/bitops/lock.h>
37114e968baSVineet Gupta 
37214e968baSVineet Gupta #include <asm-generic/bitops/find.h>
37314e968baSVineet Gupta #include <asm-generic/bitops/le.h>
37414e968baSVineet Gupta #include <asm-generic/bitops/ext2-atomic-setbit.h>
37514e968baSVineet Gupta 
37614e968baSVineet Gupta #endif /* !__ASSEMBLY__ */
37714e968baSVineet Gupta 
37814e968baSVineet Gupta #endif
379