1 #ifndef _ASM_IA64_CMPXCHG_H 2 #define _ASM_IA64_CMPXCHG_H 3 4 /* 5 * Compare/Exchange, forked from asm/intrinsics.h 6 * which was: 7 * 8 * Copyright (C) 2002-2003 Hewlett-Packard Co 9 * David Mosberger-Tang <davidm@hpl.hp.com> 10 */ 11 12 #ifndef __ASSEMBLY__ 13 14 #include <linux/types.h> 15 /* include compiler specific intrinsics */ 16 #include <asm/ia64regs.h> 17 #ifdef __INTEL_COMPILER 18 # include <asm/intel_intrin.h> 19 #else 20 # include <asm/gcc_intrin.h> 21 #endif 22 23 /* 24 * This function doesn't exist, so you'll get a linker error if 25 * something tries to do an invalid xchg(). 26 */ 27 extern void ia64_xchg_called_with_bad_pointer(void); 28 29 #define __xchg(x, ptr, size) \ 30 ({ \ 31 unsigned long __xchg_result; \ 32 \ 33 switch (size) { \ 34 case 1: \ 35 __xchg_result = ia64_xchg1((__u8 *)ptr, x); \ 36 break; \ 37 \ 38 case 2: \ 39 __xchg_result = ia64_xchg2((__u16 *)ptr, x); \ 40 break; \ 41 \ 42 case 4: \ 43 __xchg_result = ia64_xchg4((__u32 *)ptr, x); \ 44 break; \ 45 \ 46 case 8: \ 47 __xchg_result = ia64_xchg8((__u64 *)ptr, x); \ 48 break; \ 49 default: \ 50 ia64_xchg_called_with_bad_pointer(); \ 51 } \ 52 __xchg_result; \ 53 }) 54 55 #define xchg(ptr, x) \ 56 ((__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)))) 57 58 /* 59 * Atomic compare and exchange. Compare OLD with MEM, if identical, 60 * store NEW in MEM. Return the initial value in MEM. Success is 61 * indicated by comparing RETURN with OLD. 62 */ 63 64 #define __HAVE_ARCH_CMPXCHG 1 65 66 /* 67 * This function doesn't exist, so you'll get a linker error 68 * if something tries to do an invalid cmpxchg(). 69 */ 70 extern long ia64_cmpxchg_called_with_bad_pointer(void); 71 72 #define ia64_cmpxchg(sem, ptr, old, new, size) \ 73 ({ \ 74 __u64 _o_, _r_; \ 75 \ 76 switch (size) { \ 77 case 1: \ 78 _o_ = (__u8) (long) (old); \ 79 break; \ 80 case 2: \ 81 _o_ = (__u16) (long) (old); \ 82 break; \ 83 case 4: \ 84 _o_ = (__u32) (long) (old); \ 85 break; \ 86 case 8: \ 87 _o_ = (__u64) (long) (old); \ 88 break; \ 89 default: \ 90 break; \ 91 } \ 92 switch (size) { \ 93 case 1: \ 94 _r_ = ia64_cmpxchg1_##sem((__u8 *) ptr, new, _o_); \ 95 break; \ 96 \ 97 case 2: \ 98 _r_ = ia64_cmpxchg2_##sem((__u16 *) ptr, new, _o_); \ 99 break; \ 100 \ 101 case 4: \ 102 _r_ = ia64_cmpxchg4_##sem((__u32 *) ptr, new, _o_); \ 103 break; \ 104 \ 105 case 8: \ 106 _r_ = ia64_cmpxchg8_##sem((__u64 *) ptr, new, _o_); \ 107 break; \ 108 \ 109 default: \ 110 _r_ = ia64_cmpxchg_called_with_bad_pointer(); \ 111 break; \ 112 } \ 113 (__typeof__(old)) _r_; \ 114 }) 115 116 #define cmpxchg_acq(ptr, o, n) \ 117 ia64_cmpxchg(acq, (ptr), (o), (n), sizeof(*(ptr))) 118 #define cmpxchg_rel(ptr, o, n) \ 119 ia64_cmpxchg(rel, (ptr), (o), (n), sizeof(*(ptr))) 120 121 /* 122 * Worse still - early processor implementations actually just ignored 123 * the acquire/release and did a full fence all the time. Unfortunately 124 * this meant a lot of badly written code that used .acq when they really 125 * wanted .rel became legacy out in the wild - so when we made a cpu 126 * that strictly did the .acq or .rel ... all that code started breaking - so 127 * we had to back-pedal and keep the "legacy" behavior of a full fence :-( 128 */ 129 130 /* for compatibility with other platforms: */ 131 #define cmpxchg(ptr, o, n) cmpxchg_acq((ptr), (o), (n)) 132 #define cmpxchg64(ptr, o, n) cmpxchg_acq((ptr), (o), (n)) 133 134 #define cmpxchg_local cmpxchg 135 #define cmpxchg64_local cmpxchg64 136 137 #ifdef CONFIG_IA64_DEBUG_CMPXCHG 138 # define CMPXCHG_BUGCHECK_DECL int _cmpxchg_bugcheck_count = 128; 139 # define CMPXCHG_BUGCHECK(v) \ 140 do { \ 141 if (_cmpxchg_bugcheck_count-- <= 0) { \ 142 void *ip; \ 143 extern int printk(const char *fmt, ...); \ 144 ip = (void *) ia64_getreg(_IA64_REG_IP); \ 145 printk("CMPXCHG_BUGCHECK: stuck at %p on word %p\n", ip, (v));\ 146 break; \ 147 } \ 148 } while (0) 149 #else /* !CONFIG_IA64_DEBUG_CMPXCHG */ 150 # define CMPXCHG_BUGCHECK_DECL 151 # define CMPXCHG_BUGCHECK(v) 152 #endif /* !CONFIG_IA64_DEBUG_CMPXCHG */ 153 154 #endif /* !__ASSEMBLY__ */ 155 156 #endif /* _ASM_IA64_CMPXCHG_H */ 157