1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */ 25ba840f9SPaul Gortmaker #ifndef _ALPHA_CMPXCHG_H 35ba840f9SPaul Gortmaker #define _ALPHA_CMPXCHG_H 45ba840f9SPaul Gortmaker 55ba840f9SPaul Gortmaker /* 65ba840f9SPaul Gortmaker * Atomic exchange routines. 75ba840f9SPaul Gortmaker */ 85ba840f9SPaul Gortmaker 9*06855063SAndrzej Hajda #define ____xchg(type, args...) __arch_xchg ## type ## _local(args) 105ba840f9SPaul Gortmaker #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args) 115ba840f9SPaul Gortmaker #include <asm/xchg.h> 125ba840f9SPaul Gortmaker 135ba840f9SPaul Gortmaker #define xchg_local(ptr, x) \ 145ba840f9SPaul Gortmaker ({ \ 155ba840f9SPaul Gortmaker __typeof__(*(ptr)) _x_ = (x); \ 16*06855063SAndrzej Hajda (__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\ 175ba840f9SPaul Gortmaker sizeof(*(ptr))); \ 185ba840f9SPaul Gortmaker }) 195ba840f9SPaul Gortmaker 2096d330afSMark Rutland #define arch_cmpxchg_local(ptr, o, n) \ 215ba840f9SPaul Gortmaker ({ \ 225ba840f9SPaul Gortmaker __typeof__(*(ptr)) _o_ = (o); \ 235ba840f9SPaul Gortmaker __typeof__(*(ptr)) _n_ = (n); \ 245ba840f9SPaul Gortmaker (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ 255ba840f9SPaul Gortmaker (unsigned long)_n_, \ 265ba840f9SPaul Gortmaker sizeof(*(ptr))); \ 275ba840f9SPaul Gortmaker }) 285ba840f9SPaul Gortmaker 2996d330afSMark Rutland #define arch_cmpxchg64_local(ptr, o, n) \ 305ba840f9SPaul Gortmaker ({ \ 315ba840f9SPaul Gortmaker BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 325ba840f9SPaul Gortmaker cmpxchg_local((ptr), (o), (n)); \ 335ba840f9SPaul Gortmaker }) 345ba840f9SPaul Gortmaker 355ba840f9SPaul Gortmaker #undef ____xchg 365ba840f9SPaul Gortmaker #undef ____cmpxchg 37*06855063SAndrzej Hajda #define ____xchg(type, args...) __arch_xchg ##type(args) 385ba840f9SPaul Gortmaker #define ____cmpxchg(type, args...) __cmpxchg ##type(args) 395ba840f9SPaul Gortmaker #include <asm/xchg.h> 405ba840f9SPaul Gortmaker 41fbfcd019SAndrea Parri /* 42fbfcd019SAndrea Parri * The leading and the trailing memory barriers guarantee that these 43fbfcd019SAndrea Parri * operations are fully ordered. 44fbfcd019SAndrea Parri */ 4596d330afSMark Rutland #define arch_xchg(ptr, x) \ 465ba840f9SPaul Gortmaker ({ \ 47fbfcd019SAndrea Parri __typeof__(*(ptr)) __ret; \ 485ba840f9SPaul Gortmaker __typeof__(*(ptr)) _x_ = (x); \ 49fbfcd019SAndrea Parri smp_mb(); \ 50fbfcd019SAndrea Parri __ret = (__typeof__(*(ptr))) \ 51*06855063SAndrzej Hajda __arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ 52fbfcd019SAndrea Parri smp_mb(); \ 53fbfcd019SAndrea Parri __ret; \ 545ba840f9SPaul Gortmaker }) 555ba840f9SPaul Gortmaker 5696d330afSMark Rutland #define arch_cmpxchg(ptr, o, n) \ 575ba840f9SPaul Gortmaker ({ \ 58fbfcd019SAndrea Parri __typeof__(*(ptr)) __ret; \ 595ba840f9SPaul Gortmaker __typeof__(*(ptr)) _o_ = (o); \ 605ba840f9SPaul Gortmaker __typeof__(*(ptr)) _n_ = (n); \ 61fbfcd019SAndrea Parri smp_mb(); \ 62fbfcd019SAndrea Parri __ret = (__typeof__(*(ptr))) __cmpxchg((ptr), \ 63fbfcd019SAndrea Parri (unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\ 64fbfcd019SAndrea Parri smp_mb(); \ 65fbfcd019SAndrea Parri __ret; \ 665ba840f9SPaul Gortmaker }) 675ba840f9SPaul Gortmaker 6896d330afSMark Rutland #define arch_cmpxchg64(ptr, o, n) \ 695ba840f9SPaul Gortmaker ({ \ 705ba840f9SPaul Gortmaker BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 7196d330afSMark Rutland arch_cmpxchg((ptr), (o), (n)); \ 725ba840f9SPaul Gortmaker }) 735ba840f9SPaul Gortmaker 745ba840f9SPaul Gortmaker #undef ____cmpxchg 755ba840f9SPaul Gortmaker 765ba840f9SPaul Gortmaker #endif /* _ALPHA_CMPXCHG_H */ 77