1 #ifndef _ALPHA_SPINLOCK_H 2 #define _ALPHA_SPINLOCK_H 3 4 #include <linux/kernel.h> 5 #include <asm/current.h> 6 #include <asm/barrier.h> 7 #include <asm/processor.h> 8 9 /* 10 * Simple spin lock operations. There are two variants, one clears IRQ's 11 * on the local processor, one does not. 12 * 13 * We make no fairness assumptions. They have a cost. 14 */ 15 16 #define arch_spin_is_locked(x) ((x)->lock != 0) 17 18 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) 19 { 20 return lock.lock == 0; 21 } 22 23 static inline void arch_spin_unlock(arch_spinlock_t * lock) 24 { 25 mb(); 26 lock->lock = 0; 27 } 28 29 static inline void arch_spin_lock(arch_spinlock_t * lock) 30 { 31 long tmp; 32 33 __asm__ __volatile__( 34 "1: ldl_l %0,%1\n" 35 " bne %0,2f\n" 36 " lda %0,1\n" 37 " stl_c %0,%1\n" 38 " beq %0,2f\n" 39 " mb\n" 40 ".subsection 2\n" 41 "2: ldl %0,%1\n" 42 " bne %0,2b\n" 43 " br 1b\n" 44 ".previous" 45 : "=&r" (tmp), "=m" (lock->lock) 46 : "m"(lock->lock) : "memory"); 47 } 48 49 static inline int arch_spin_trylock(arch_spinlock_t *lock) 50 { 51 return !test_and_set_bit(0, &lock->lock); 52 } 53 54 /***********************************************************/ 55 56 static inline void arch_read_lock(arch_rwlock_t *lock) 57 { 58 long regx; 59 60 __asm__ __volatile__( 61 "1: ldl_l %1,%0\n" 62 " blbs %1,6f\n" 63 " subl %1,2,%1\n" 64 " stl_c %1,%0\n" 65 " beq %1,6f\n" 66 " mb\n" 67 ".subsection 2\n" 68 "6: ldl %1,%0\n" 69 " blbs %1,6b\n" 70 " br 1b\n" 71 ".previous" 72 : "=m" (*lock), "=&r" (regx) 73 : "m" (*lock) : "memory"); 74 } 75 76 static inline void arch_write_lock(arch_rwlock_t *lock) 77 { 78 long regx; 79 80 __asm__ __volatile__( 81 "1: ldl_l %1,%0\n" 82 " bne %1,6f\n" 83 " lda %1,1\n" 84 " stl_c %1,%0\n" 85 " beq %1,6f\n" 86 " mb\n" 87 ".subsection 2\n" 88 "6: ldl %1,%0\n" 89 " bne %1,6b\n" 90 " br 1b\n" 91 ".previous" 92 : "=m" (*lock), "=&r" (regx) 93 : "m" (*lock) : "memory"); 94 } 95 96 static inline int arch_read_trylock(arch_rwlock_t * lock) 97 { 98 long regx; 99 int success; 100 101 __asm__ __volatile__( 102 "1: ldl_l %1,%0\n" 103 " lda %2,0\n" 104 " blbs %1,2f\n" 105 " subl %1,2,%2\n" 106 " stl_c %2,%0\n" 107 " beq %2,6f\n" 108 "2: mb\n" 109 ".subsection 2\n" 110 "6: br 1b\n" 111 ".previous" 112 : "=m" (*lock), "=&r" (regx), "=&r" (success) 113 : "m" (*lock) : "memory"); 114 115 return success; 116 } 117 118 static inline int arch_write_trylock(arch_rwlock_t * lock) 119 { 120 long regx; 121 int success; 122 123 __asm__ __volatile__( 124 "1: ldl_l %1,%0\n" 125 " lda %2,0\n" 126 " bne %1,2f\n" 127 " lda %2,1\n" 128 " stl_c %2,%0\n" 129 " beq %2,6f\n" 130 "2: mb\n" 131 ".subsection 2\n" 132 "6: br 1b\n" 133 ".previous" 134 : "=m" (*lock), "=&r" (regx), "=&r" (success) 135 : "m" (*lock) : "memory"); 136 137 return success; 138 } 139 140 static inline void arch_read_unlock(arch_rwlock_t * lock) 141 { 142 long regx; 143 __asm__ __volatile__( 144 " mb\n" 145 "1: ldl_l %1,%0\n" 146 " addl %1,2,%1\n" 147 " stl_c %1,%0\n" 148 " beq %1,6f\n" 149 ".subsection 2\n" 150 "6: br 1b\n" 151 ".previous" 152 : "=m" (*lock), "=&r" (regx) 153 : "m" (*lock) : "memory"); 154 } 155 156 static inline void arch_write_unlock(arch_rwlock_t * lock) 157 { 158 mb(); 159 lock->lock = 0; 160 } 161 162 #endif /* _ALPHA_SPINLOCK_H */ 163