1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_LSE_H 3 #define __ASM_LSE_H 4 5 #include <asm/atomic_ll_sc.h> 6 7 #ifdef CONFIG_ARM64_LSE_ATOMICS 8 9 #define __LSE_PREAMBLE ".arch armv8-a+lse\n" 10 11 #include <linux/compiler_types.h> 12 #include <linux/export.h> 13 #include <linux/jump_label.h> 14 #include <linux/stringify.h> 15 #include <asm/alternative.h> 16 #include <asm/atomic_lse.h> 17 #include <asm/cpucaps.h> 18 19 extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS]; 20 extern struct static_key_false arm64_const_caps_ready; 21 22 static inline bool system_uses_lse_atomics(void) 23 { 24 return (static_branch_likely(&arm64_const_caps_ready)) && 25 static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]); 26 } 27 28 #define __lse_ll_sc_body(op, ...) \ 29 ({ \ 30 system_uses_lse_atomics() ? \ 31 __lse_##op(__VA_ARGS__) : \ 32 __ll_sc_##op(__VA_ARGS__); \ 33 }) 34 35 /* In-line patching at runtime */ 36 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) \ 37 ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS) 38 39 #else /* CONFIG_ARM64_LSE_ATOMICS */ 40 41 static inline bool system_uses_lse_atomics(void) { return false; } 42 43 #define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__) 44 45 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc 46 47 #endif /* CONFIG_ARM64_LSE_ATOMICS */ 48 #endif /* __ASM_LSE_H */ 49