xref: /openbmc/linux/arch/arm64/include/asm/spinlock.h (revision f5bfdc8e)
1caab277bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
208e875c1SCatalin Marinas /*
308e875c1SCatalin Marinas  * Copyright (C) 2012 ARM Ltd.
408e875c1SCatalin Marinas  */
508e875c1SCatalin Marinas #ifndef __ASM_SPINLOCK_H
608e875c1SCatalin Marinas #define __ASM_SPINLOCK_H
708e875c1SCatalin Marinas 
8087133acSWill Deacon #include <asm/qrwlock.h>
9c1109047SWill Deacon #include <asm/qspinlock.h>
1008e875c1SCatalin Marinas 
11d89e588cSPeter Zijlstra /* See include/linux/spinlock.h */
12d89e588cSPeter Zijlstra #define smp_mb__after_spinlock()	smp_mb()
13872c63fbSWill Deacon 
14f5bfdc8eSWaiman Long /*
15f5bfdc8eSWaiman Long  * Changing this will break osq_lock() thanks to the call inside
16f5bfdc8eSWaiman Long  * smp_cond_load_relaxed().
17f5bfdc8eSWaiman Long  *
18f5bfdc8eSWaiman Long  * See:
19f5bfdc8eSWaiman Long  * https://lore.kernel.org/lkml/20200110100612.GC2827@hirez.programming.kicks-ass.net
20f5bfdc8eSWaiman Long  */
21f5bfdc8eSWaiman Long #define vcpu_is_preempted(cpu)	false
22f5bfdc8eSWaiman Long 
2308e875c1SCatalin Marinas #endif /* __ASM_SPINLOCK_H */
24