1 /* 32-bit atomic xchg() and cmpxchg() definitions. 2 * 3 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 4 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com.au) 5 * Copyright (C) 2007 Kyle McMartin (kyle@parisc-linux.org) 6 * 7 * Additions by Keith M Wesolowski (wesolows@foobazco.org) based 8 * on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf <prumpf@tux.org>. 9 */ 10 11 #ifndef __ARCH_SPARC_CMPXCHG__ 12 #define __ARCH_SPARC_CMPXCHG__ 13 14 #include <asm/btfixup.h> 15 16 /* This has special calling conventions */ 17 #ifndef CONFIG_SMP 18 BTFIXUPDEF_CALL(void, ___xchg32, void) 19 #endif 20 21 static inline unsigned long xchg_u32(__volatile__ unsigned long *m, unsigned long val) 22 { 23 #ifdef CONFIG_SMP 24 __asm__ __volatile__("swap [%2], %0" 25 : "=&r" (val) 26 : "0" (val), "r" (m) 27 : "memory"); 28 return val; 29 #else 30 register unsigned long *ptr asm("g1"); 31 register unsigned long ret asm("g2"); 32 33 ptr = (unsigned long *) m; 34 ret = val; 35 36 /* Note: this is magic and the nop there is 37 really needed. */ 38 __asm__ __volatile__( 39 "mov %%o7, %%g4\n\t" 40 "call ___f____xchg32\n\t" 41 " nop\n\t" 42 : "=&r" (ret) 43 : "0" (ret), "r" (ptr) 44 : "g3", "g4", "g7", "memory", "cc"); 45 46 return ret; 47 #endif 48 } 49 50 extern void __xchg_called_with_bad_pointer(void); 51 52 static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size) 53 { 54 switch (size) { 55 case 4: 56 return xchg_u32(ptr, x); 57 } 58 __xchg_called_with_bad_pointer(); 59 return x; 60 } 61 62 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 63 64 /* Emulate cmpxchg() the same way we emulate atomics, 65 * by hashing the object address and indexing into an array 66 * of spinlocks to get a bit of performance... 67 * 68 * See arch/sparc/lib/atomic32.c for implementation. 69 * 70 * Cribbed from <asm-parisc/atomic.h> 71 */ 72 #define __HAVE_ARCH_CMPXCHG 1 73 74 /* bug catcher for when unsupported size is used - won't link */ 75 extern void __cmpxchg_called_with_bad_pointer(void); 76 /* we only need to support cmpxchg of a u32 on sparc */ 77 extern unsigned long __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_); 78 79 /* don't worry...optimizer will get rid of most of this */ 80 static inline unsigned long 81 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) 82 { 83 switch (size) { 84 case 4: 85 return __cmpxchg_u32((u32 *)ptr, (u32)old, (u32)new_); 86 default: 87 __cmpxchg_called_with_bad_pointer(); 88 break; 89 } 90 return old; 91 } 92 93 #define cmpxchg(ptr, o, n) \ 94 ({ \ 95 __typeof__(*(ptr)) _o_ = (o); \ 96 __typeof__(*(ptr)) _n_ = (n); \ 97 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ 98 (unsigned long)_n_, sizeof(*(ptr))); \ 99 }) 100 101 #include <asm-generic/cmpxchg-local.h> 102 103 /* 104 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make 105 * them available. 106 */ 107 #define cmpxchg_local(ptr, o, n) \ 108 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 109 (unsigned long)(n), sizeof(*(ptr)))) 110 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) 111 112 #endif /* __ARCH_SPARC_CMPXCHG__ */ 113