1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_LSE_H 3 #define __ASM_LSE_H 4 5 #include <asm/atomic_ll_sc.h> 6 7 #ifdef CONFIG_ARM64_LSE_ATOMICS 8 9 #define __LSE_PREAMBLE ".arch_extension lse\n" 10 11 #include <linux/compiler_types.h> 12 #include <linux/export.h> 13 #include <linux/jump_label.h> 14 #include <linux/stringify.h> 15 #include <asm/alternative.h> 16 #include <asm/atomic_lse.h> 17 #include <asm/cpucaps.h> 18 19 extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS]; 20 21 static __always_inline bool system_uses_lse_atomics(void) 22 { 23 return static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]); 24 } 25 26 #define __lse_ll_sc_body(op, ...) \ 27 ({ \ 28 system_uses_lse_atomics() ? \ 29 __lse_##op(__VA_ARGS__) : \ 30 __ll_sc_##op(__VA_ARGS__); \ 31 }) 32 33 /* In-line patching at runtime */ 34 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) \ 35 ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS) 36 37 #else /* CONFIG_ARM64_LSE_ATOMICS */ 38 39 static inline bool system_uses_lse_atomics(void) { return false; } 40 41 #define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__) 42 43 #define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc 44 45 #endif /* CONFIG_ARM64_LSE_ATOMICS */ 46 #endif /* __ASM_LSE_H */ 47