xref: /openbmc/linux/arch/arm/lib/bitops.h (revision 6c870213d6f3a25981c10728f46294a3bed1703f)
1 #include <asm/unwind.h>
2 
3 #if __LINUX_ARM_ARCH__ >= 6
4 	.macro	bitop, name, instr
5 ENTRY(	\name		)
6 UNWIND(	.fnstart	)
7 	ands	ip, r1, #3
8 	strneb	r1, [ip]		@ assert word-aligned
9 	mov	r2, #1
10 	and	r3, r0, #31		@ Get bit offset
11 	mov	r0, r0, lsr #5
12 	add	r1, r1, r0, lsl #2	@ Get word offset
13 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
14 	.arch_extension	mp
15 	ALT_SMP(W(pldw)	[r1])
16 	ALT_UP(W(nop))
17 #endif
18 	mov	r3, r2, lsl r3
19 1:	ldrex	r2, [r1]
20 	\instr	r2, r2, r3
21 	strex	r0, r2, [r1]
22 	cmp	r0, #0
23 	bne	1b
24 	bx	lr
25 UNWIND(	.fnend		)
26 ENDPROC(\name		)
27 	.endm
28 
29 	.macro	testop, name, instr, store
30 ENTRY(	\name		)
31 UNWIND(	.fnstart	)
32 	ands	ip, r1, #3
33 	strneb	r1, [ip]		@ assert word-aligned
34 	mov	r2, #1
35 	and	r3, r0, #31		@ Get bit offset
36 	mov	r0, r0, lsr #5
37 	add	r1, r1, r0, lsl #2	@ Get word offset
38 	mov	r3, r2, lsl r3		@ create mask
39 	smp_dmb
40 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
41 	.arch_extension	mp
42 	ALT_SMP(W(pldw)	[r1])
43 	ALT_UP(W(nop))
44 #endif
45 1:	ldrex	r2, [r1]
46 	ands	r0, r2, r3		@ save old value of bit
47 	\instr	r2, r2, r3		@ toggle bit
48 	strex	ip, r2, [r1]
49 	cmp	ip, #0
50 	bne	1b
51 	smp_dmb
52 	cmp	r0, #0
53 	movne	r0, #1
54 2:	bx	lr
55 UNWIND(	.fnend		)
56 ENDPROC(\name		)
57 	.endm
58 #else
59 	.macro	bitop, name, instr
60 ENTRY(	\name		)
61 UNWIND(	.fnstart	)
62 	ands	ip, r1, #3
63 	strneb	r1, [ip]		@ assert word-aligned
64 	and	r2, r0, #31
65 	mov	r0, r0, lsr #5
66 	mov	r3, #1
67 	mov	r3, r3, lsl r2
68 	save_and_disable_irqs ip
69 	ldr	r2, [r1, r0, lsl #2]
70 	\instr	r2, r2, r3
71 	str	r2, [r1, r0, lsl #2]
72 	restore_irqs ip
73 	mov	pc, lr
74 UNWIND(	.fnend		)
75 ENDPROC(\name		)
76 	.endm
77 
78 /**
79  * testop - implement a test_and_xxx_bit operation.
80  * @instr: operational instruction
81  * @store: store instruction
82  *
83  * Note: we can trivially conditionalise the store instruction
84  * to avoid dirtying the data cache.
85  */
86 	.macro	testop, name, instr, store
87 ENTRY(	\name		)
88 UNWIND(	.fnstart	)
89 	ands	ip, r1, #3
90 	strneb	r1, [ip]		@ assert word-aligned
91 	and	r3, r0, #31
92 	mov	r0, r0, lsr #5
93 	save_and_disable_irqs ip
94 	ldr	r2, [r1, r0, lsl #2]!
95 	mov	r0, #1
96 	tst	r2, r0, lsl r3
97 	\instr	r2, r2, r0, lsl r3
98 	\store	r2, [r1]
99 	moveq	r0, #0
100 	restore_irqs ip
101 	mov	pc, lr
102 UNWIND(	.fnend		)
103 ENDPROC(\name		)
104 	.endm
105 #endif
106