1 /* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ 2 /* 3 * Compiler-dependent intrinsics. 4 * 5 * Copyright (C) 2002-2003 Hewlett-Packard Co 6 * David Mosberger-Tang <davidm@hpl.hp.com> 7 */ 8 #ifndef _UAPI_ASM_IA64_INTRINSICS_H 9 #define _UAPI_ASM_IA64_INTRINSICS_H 10 11 12 #ifndef __ASSEMBLY__ 13 14 #include <linux/types.h> 15 /* include compiler specific intrinsics */ 16 #include <asm/ia64regs.h> 17 #ifdef __INTEL_COMPILER 18 # include <asm/intel_intrin.h> 19 #else 20 # include <asm/gcc_intrin.h> 21 #endif 22 #include <asm/cmpxchg.h> 23 24 #define ia64_set_rr0_to_rr4(val0, val1, val2, val3, val4) \ 25 do { \ 26 ia64_set_rr(0x0000000000000000UL, (val0)); \ 27 ia64_set_rr(0x2000000000000000UL, (val1)); \ 28 ia64_set_rr(0x4000000000000000UL, (val2)); \ 29 ia64_set_rr(0x6000000000000000UL, (val3)); \ 30 ia64_set_rr(0x8000000000000000UL, (val4)); \ 31 } while (0) 32 33 /* 34 * Force an unresolved reference if someone tries to use 35 * ia64_fetch_and_add() with a bad value. 36 */ 37 extern unsigned long __bad_size_for_ia64_fetch_and_add (void); 38 extern unsigned long __bad_increment_for_ia64_fetch_and_add (void); 39 40 #define IA64_FETCHADD(tmp,v,n,sz,sem) \ 41 ({ \ 42 switch (sz) { \ 43 case 4: \ 44 tmp = ia64_fetchadd4_##sem((unsigned int *) v, n); \ 45 break; \ 46 \ 47 case 8: \ 48 tmp = ia64_fetchadd8_##sem((unsigned long *) v, n); \ 49 break; \ 50 \ 51 default: \ 52 __bad_size_for_ia64_fetch_and_add(); \ 53 } \ 54 }) 55 56 #define ia64_fetchadd(i,v,sem) \ 57 ({ \ 58 __u64 _tmp; \ 59 volatile __typeof__(*(v)) *_v = (v); \ 60 /* Can't use a switch () here: gcc isn't always smart enough for that... */ \ 61 if ((i) == -16) \ 62 IA64_FETCHADD(_tmp, _v, -16, sizeof(*(v)), sem); \ 63 else if ((i) == -8) \ 64 IA64_FETCHADD(_tmp, _v, -8, sizeof(*(v)), sem); \ 65 else if ((i) == -4) \ 66 IA64_FETCHADD(_tmp, _v, -4, sizeof(*(v)), sem); \ 67 else if ((i) == -1) \ 68 IA64_FETCHADD(_tmp, _v, -1, sizeof(*(v)), sem); \ 69 else if ((i) == 1) \ 70 IA64_FETCHADD(_tmp, _v, 1, sizeof(*(v)), sem); \ 71 else if ((i) == 4) \ 72 IA64_FETCHADD(_tmp, _v, 4, sizeof(*(v)), sem); \ 73 else if ((i) == 8) \ 74 IA64_FETCHADD(_tmp, _v, 8, sizeof(*(v)), sem); \ 75 else if ((i) == 16) \ 76 IA64_FETCHADD(_tmp, _v, 16, sizeof(*(v)), sem); \ 77 else \ 78 _tmp = __bad_increment_for_ia64_fetch_and_add(); \ 79 (__typeof__(*(v))) (_tmp); /* return old value */ \ 80 }) 81 82 #define ia64_fetch_and_add(i,v) (ia64_fetchadd(i, v, rel) + (i)) /* return new value */ 83 84 #endif 85 86 #endif /* _UAPI_ASM_IA64_INTRINSICS_H */ 87