1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_LSE_H 3 #define __ASM_LSE_H 4 5 #include <asm/atomic_ll_sc.h> 6 7 #if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS) 8 9 #include <linux/compiler_types.h> 10 #include <linux/export.h> 11 #include <linux/jump_label.h> 12 #include <linux/stringify.h> 13 #include <asm/alternative.h> 14 #include <asm/atomic_lse.h> 15 #include <asm/cpucaps.h> 16 17 __asm__(".arch_extension lse"); 18 19 extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS]; 20 extern struct static_key_false arm64_const_caps_ready; 21 22 static inline bool system_uses_lse_atomics(void) 23 { 24 return (static_branch_likely(&arm64_const_caps_ready)) && 25 static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]); 26 } 27 28 #define __lse_ll_sc_body(op, ...) \ 29 ({ \ 30 system_uses_lse_atomics() ? \ 31 __lse_##op(__VA_ARGS__) : \ 32 __ll_sc_##op(__VA_ARGS__); \ 33 }) 34 35 /* In-line patching at runtime */ 36 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) \ 37 ALTERNATIVE(llsc, lse, ARM64_HAS_LSE_ATOMICS) 38 39 #else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */ 40 41 static inline bool system_uses_lse_atomics(void) { return false; } 42 43 #define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__) 44 45 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc 46 47 #endif /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */ 48 #endif /* __ASM_LSE_H */ 49