xref: /openbmc/linux/arch/arm/lib/bitops.h (revision 77d84ff8)
1 #include <asm/unwind.h>
2 
3 #if __LINUX_ARM_ARCH__ >= 6
4 	.macro	bitop, name, instr
5 ENTRY(	\name		)
6 UNWIND(	.fnstart	)
7 	ands	ip, r1, #3
8 	strneb	r1, [ip]		@ assert word-aligned
9 	mov	r2, #1
10 	and	r3, r0, #31		@ Get bit offset
11 	mov	r0, r0, lsr #5
12 	add	r1, r1, r0, lsl #2	@ Get word offset
13 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
14 	.arch_extension	mp
15 	ALT_SMP(W(pldw)	[r1])
16 	ALT_UP(W(nop))
17 #endif
18 	mov	r3, r2, lsl r3
19 1:	ldrex	r2, [r1]
20 	\instr	r2, r2, r3
21 	strex	r0, r2, [r1]
22 	cmp	r0, #0
23 	bne	1b
24 	bx	lr
25 UNWIND(	.fnend		)
26 ENDPROC(\name		)
27 	.endm
28 
29 	.macro	testop, name, instr, store
30 ENTRY(	\name		)
31 UNWIND(	.fnstart	)
32 	ands	ip, r1, #3
33 	strneb	r1, [ip]		@ assert word-aligned
34 	mov	r2, #1
35 	and	r3, r0, #31		@ Get bit offset
36 	mov	r0, r0, lsr #5
37 	add	r1, r1, r0, lsl #2	@ Get word offset
38 	mov	r3, r2, lsl r3		@ create mask
39 	smp_dmb
40 1:	ldrex	r2, [r1]
41 	ands	r0, r2, r3		@ save old value of bit
42 	\instr	r2, r2, r3		@ toggle bit
43 	strex	ip, r2, [r1]
44 	cmp	ip, #0
45 	bne	1b
46 	smp_dmb
47 	cmp	r0, #0
48 	movne	r0, #1
49 2:	bx	lr
50 UNWIND(	.fnend		)
51 ENDPROC(\name		)
52 	.endm
53 #else
54 	.macro	bitop, name, instr
55 ENTRY(	\name		)
56 UNWIND(	.fnstart	)
57 	ands	ip, r1, #3
58 	strneb	r1, [ip]		@ assert word-aligned
59 	and	r2, r0, #31
60 	mov	r0, r0, lsr #5
61 	mov	r3, #1
62 	mov	r3, r3, lsl r2
63 	save_and_disable_irqs ip
64 	ldr	r2, [r1, r0, lsl #2]
65 	\instr	r2, r2, r3
66 	str	r2, [r1, r0, lsl #2]
67 	restore_irqs ip
68 	mov	pc, lr
69 UNWIND(	.fnend		)
70 ENDPROC(\name		)
71 	.endm
72 
73 /**
74  * testop - implement a test_and_xxx_bit operation.
75  * @instr: operational instruction
76  * @store: store instruction
77  *
78  * Note: we can trivially conditionalise the store instruction
79  * to avoid dirtying the data cache.
80  */
81 	.macro	testop, name, instr, store
82 ENTRY(	\name		)
83 UNWIND(	.fnstart	)
84 	ands	ip, r1, #3
85 	strneb	r1, [ip]		@ assert word-aligned
86 	and	r3, r0, #31
87 	mov	r0, r0, lsr #5
88 	save_and_disable_irqs ip
89 	ldr	r2, [r1, r0, lsl #2]!
90 	mov	r0, #1
91 	tst	r2, r0, lsl r3
92 	\instr	r2, r2, r0, lsl r3
93 	\store	r2, [r1]
94 	moveq	r0, #0
95 	restore_irqs ip
96 	mov	pc, lr
97 UNWIND(	.fnend		)
98 ENDPROC(\name		)
99 	.endm
100 #endif
101