xref: /openbmc/linux/arch/alpha/include/asm/spinlock.h (revision 3d3337de)
1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
3 
4 #include <linux/kernel.h>
5 #include <asm/current.h>
6 
7 /*
8  * Simple spin lock operations.  There are two variants, one clears IRQ's
9  * on the local processor, one does not.
10  *
11  * We make no fairness assumptions. They have a cost.
12  */
13 
14 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
15 #define arch_spin_is_locked(x)	((x)->lock != 0)
16 #define arch_spin_unlock_wait(x) \
17 		do { cpu_relax(); } while ((x)->lock)
18 
19 static inline void arch_spin_unlock(arch_spinlock_t * lock)
20 {
21 	mb();
22 	lock->lock = 0;
23 }
24 
25 static inline void arch_spin_lock(arch_spinlock_t * lock)
26 {
27 	long tmp;
28 
29 	__asm__ __volatile__(
30 	"1:	ldl_l	%0,%1\n"
31 	"	bne	%0,2f\n"
32 	"	lda	%0,1\n"
33 	"	stl_c	%0,%1\n"
34 	"	beq	%0,2f\n"
35 	"	mb\n"
36 	".subsection 2\n"
37 	"2:	ldl	%0,%1\n"
38 	"	bne	%0,2b\n"
39 	"	br	1b\n"
40 	".previous"
41 	: "=&r" (tmp), "=m" (lock->lock)
42 	: "m"(lock->lock) : "memory");
43 }
44 
45 static inline int arch_spin_trylock(arch_spinlock_t *lock)
46 {
47 	return !test_and_set_bit(0, &lock->lock);
48 }
49 
50 /***********************************************************/
51 
52 static inline int arch_read_can_lock(arch_rwlock_t *lock)
53 {
54 	return (lock->lock & 1) == 0;
55 }
56 
57 static inline int arch_write_can_lock(arch_rwlock_t *lock)
58 {
59 	return lock->lock == 0;
60 }
61 
62 static inline void arch_read_lock(arch_rwlock_t *lock)
63 {
64 	long regx;
65 
66 	__asm__ __volatile__(
67 	"1:	ldl_l	%1,%0\n"
68 	"	blbs	%1,6f\n"
69 	"	subl	%1,2,%1\n"
70 	"	stl_c	%1,%0\n"
71 	"	beq	%1,6f\n"
72 	"	mb\n"
73 	".subsection 2\n"
74 	"6:	ldl	%1,%0\n"
75 	"	blbs	%1,6b\n"
76 	"	br	1b\n"
77 	".previous"
78 	: "=m" (*lock), "=&r" (regx)
79 	: "m" (*lock) : "memory");
80 }
81 
82 static inline void arch_write_lock(arch_rwlock_t *lock)
83 {
84 	long regx;
85 
86 	__asm__ __volatile__(
87 	"1:	ldl_l	%1,%0\n"
88 	"	bne	%1,6f\n"
89 	"	lda	%1,1\n"
90 	"	stl_c	%1,%0\n"
91 	"	beq	%1,6f\n"
92 	"	mb\n"
93 	".subsection 2\n"
94 	"6:	ldl	%1,%0\n"
95 	"	bne	%1,6b\n"
96 	"	br	1b\n"
97 	".previous"
98 	: "=m" (*lock), "=&r" (regx)
99 	: "m" (*lock) : "memory");
100 }
101 
102 static inline int arch_read_trylock(arch_rwlock_t * lock)
103 {
104 	long regx;
105 	int success;
106 
107 	__asm__ __volatile__(
108 	"1:	ldl_l	%1,%0\n"
109 	"	lda	%2,0\n"
110 	"	blbs	%1,2f\n"
111 	"	subl	%1,2,%2\n"
112 	"	stl_c	%2,%0\n"
113 	"	beq	%2,6f\n"
114 	"2:	mb\n"
115 	".subsection 2\n"
116 	"6:	br	1b\n"
117 	".previous"
118 	: "=m" (*lock), "=&r" (regx), "=&r" (success)
119 	: "m" (*lock) : "memory");
120 
121 	return success;
122 }
123 
124 static inline int arch_write_trylock(arch_rwlock_t * lock)
125 {
126 	long regx;
127 	int success;
128 
129 	__asm__ __volatile__(
130 	"1:	ldl_l	%1,%0\n"
131 	"	lda	%2,0\n"
132 	"	bne	%1,2f\n"
133 	"	lda	%2,1\n"
134 	"	stl_c	%2,%0\n"
135 	"	beq	%2,6f\n"
136 	"2:	mb\n"
137 	".subsection 2\n"
138 	"6:	br	1b\n"
139 	".previous"
140 	: "=m" (*lock), "=&r" (regx), "=&r" (success)
141 	: "m" (*lock) : "memory");
142 
143 	return success;
144 }
145 
146 static inline void arch_read_unlock(arch_rwlock_t * lock)
147 {
148 	long regx;
149 	__asm__ __volatile__(
150 	"	mb\n"
151 	"1:	ldl_l	%1,%0\n"
152 	"	addl	%1,2,%1\n"
153 	"	stl_c	%1,%0\n"
154 	"	beq	%1,6f\n"
155 	".subsection 2\n"
156 	"6:	br	1b\n"
157 	".previous"
158 	: "=m" (*lock), "=&r" (regx)
159 	: "m" (*lock) : "memory");
160 }
161 
162 static inline void arch_write_unlock(arch_rwlock_t * lock)
163 {
164 	mb();
165 	lock->lock = 0;
166 }
167 
168 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
169 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
170 
171 #endif /* _ALPHA_SPINLOCK_H */
172