xref: /openbmc/linux/arch/loongarch/include/asm/barrier.h (revision 2612e3bbc0386368a850140a6c9b990cd496a5ec)
15b0b14e5SHuacai Chen /* SPDX-License-Identifier: GPL-2.0 */
25b0b14e5SHuacai Chen /*
35b0b14e5SHuacai Chen  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
45b0b14e5SHuacai Chen  */
55b0b14e5SHuacai Chen #ifndef __ASM_BARRIER_H
65b0b14e5SHuacai Chen #define __ASM_BARRIER_H
75b0b14e5SHuacai Chen 
8*e031a5f3SHuacai Chen /*
9*e031a5f3SHuacai Chen  * Hint encoding:
10*e031a5f3SHuacai Chen  *
11*e031a5f3SHuacai Chen  * Bit4: ordering or completion (0: completion, 1: ordering)
12*e031a5f3SHuacai Chen  * Bit3: barrier for previous read (0: true, 1: false)
13*e031a5f3SHuacai Chen  * Bit2: barrier for previous write (0: true, 1: false)
14*e031a5f3SHuacai Chen  * Bit1: barrier for succeeding read (0: true, 1: false)
15*e031a5f3SHuacai Chen  * Bit0: barrier for succeeding write (0: true, 1: false)
16*e031a5f3SHuacai Chen  *
17*e031a5f3SHuacai Chen  * Hint 0x700: barrier for "read after read" from the same address
18*e031a5f3SHuacai Chen  */
195b0b14e5SHuacai Chen 
20*e031a5f3SHuacai Chen #define DBAR(hint) __asm__ __volatile__("dbar %0 " : : "I"(hint) : "memory")
215b0b14e5SHuacai Chen 
22*e031a5f3SHuacai Chen #define crwrw		0b00000
23*e031a5f3SHuacai Chen #define cr_r_		0b00101
24*e031a5f3SHuacai Chen #define c_w_w		0b01010
255b0b14e5SHuacai Chen 
26*e031a5f3SHuacai Chen #define orwrw		0b10000
27*e031a5f3SHuacai Chen #define or_r_		0b10101
28*e031a5f3SHuacai Chen #define o_w_w		0b11010
29*e031a5f3SHuacai Chen 
30*e031a5f3SHuacai Chen #define orw_w		0b10010
31*e031a5f3SHuacai Chen #define or_rw		0b10100
32*e031a5f3SHuacai Chen 
33*e031a5f3SHuacai Chen #define c_sync()	DBAR(crwrw)
34*e031a5f3SHuacai Chen #define c_rsync()	DBAR(cr_r_)
35*e031a5f3SHuacai Chen #define c_wsync()	DBAR(c_w_w)
36*e031a5f3SHuacai Chen 
37*e031a5f3SHuacai Chen #define o_sync()	DBAR(orwrw)
38*e031a5f3SHuacai Chen #define o_rsync()	DBAR(or_r_)
39*e031a5f3SHuacai Chen #define o_wsync()	DBAR(o_w_w)
40*e031a5f3SHuacai Chen 
41*e031a5f3SHuacai Chen #define ldacq_mb()	DBAR(or_rw)
42*e031a5f3SHuacai Chen #define strel_mb()	DBAR(orw_w)
43*e031a5f3SHuacai Chen 
44*e031a5f3SHuacai Chen #define mb()		c_sync()
45*e031a5f3SHuacai Chen #define rmb()		c_rsync()
46*e031a5f3SHuacai Chen #define wmb()		c_wsync()
47*e031a5f3SHuacai Chen #define iob()		c_sync()
48*e031a5f3SHuacai Chen #define wbflush()	c_sync()
49*e031a5f3SHuacai Chen 
50*e031a5f3SHuacai Chen #define __smp_mb()	o_sync()
51*e031a5f3SHuacai Chen #define __smp_rmb()	o_rsync()
52*e031a5f3SHuacai Chen #define __smp_wmb()	o_wsync()
5346859ac8SHuacai Chen 
5446859ac8SHuacai Chen #ifdef CONFIG_SMP
55*e031a5f3SHuacai Chen #define __WEAK_LLSC_MB		"	dbar 0x700	\n"
5646859ac8SHuacai Chen #else
5746859ac8SHuacai Chen #define __WEAK_LLSC_MB		"			\n"
5846859ac8SHuacai Chen #endif
5946859ac8SHuacai Chen 
6046859ac8SHuacai Chen #define __smp_mb__before_atomic()	barrier()
6146859ac8SHuacai Chen #define __smp_mb__after_atomic()	barrier()
6246859ac8SHuacai Chen 
635b0b14e5SHuacai Chen /**
645b0b14e5SHuacai Chen  * array_index_mask_nospec() - generate a ~0 mask when index < size, 0 otherwise
655b0b14e5SHuacai Chen  * @index: array element index
665b0b14e5SHuacai Chen  * @size: number of elements in array
675b0b14e5SHuacai Chen  *
685b0b14e5SHuacai Chen  * Returns:
695b0b14e5SHuacai Chen  *     0 - (@index < @size)
705b0b14e5SHuacai Chen  */
715b0b14e5SHuacai Chen #define array_index_mask_nospec array_index_mask_nospec
array_index_mask_nospec(unsigned long index,unsigned long size)725b0b14e5SHuacai Chen static inline unsigned long array_index_mask_nospec(unsigned long index,
735b0b14e5SHuacai Chen 						    unsigned long size)
745b0b14e5SHuacai Chen {
755b0b14e5SHuacai Chen 	unsigned long mask;
765b0b14e5SHuacai Chen 
775b0b14e5SHuacai Chen 	__asm__ __volatile__(
785b0b14e5SHuacai Chen 		"sltu	%0, %1, %2\n\t"
795b0b14e5SHuacai Chen #if (__SIZEOF_LONG__ == 4)
80d8e7f201SWANG Xuerui 		"sub.w	%0, $zero, %0\n\t"
815b0b14e5SHuacai Chen #elif (__SIZEOF_LONG__ == 8)
82d8e7f201SWANG Xuerui 		"sub.d	%0, $zero, %0\n\t"
835b0b14e5SHuacai Chen #endif
845b0b14e5SHuacai Chen 		: "=r" (mask)
855b0b14e5SHuacai Chen 		: "r" (index), "r" (size)
865b0b14e5SHuacai Chen 		:);
875b0b14e5SHuacai Chen 
885b0b14e5SHuacai Chen 	return mask;
895b0b14e5SHuacai Chen }
905b0b14e5SHuacai Chen 
9146859ac8SHuacai Chen #define __smp_load_acquire(p)				\
9246859ac8SHuacai Chen ({							\
93*e031a5f3SHuacai Chen 	typeof(*p) ___p1 = READ_ONCE(*p);		\
9446859ac8SHuacai Chen 	compiletime_assert_atomic_type(*p);		\
95*e031a5f3SHuacai Chen 	ldacq_mb();					\
96*e031a5f3SHuacai Chen 	___p1;						\
9746859ac8SHuacai Chen })
9846859ac8SHuacai Chen 
9946859ac8SHuacai Chen #define __smp_store_release(p, v)			\
10046859ac8SHuacai Chen do {							\
10146859ac8SHuacai Chen 	compiletime_assert_atomic_type(*p);		\
102*e031a5f3SHuacai Chen 	strel_mb();					\
103*e031a5f3SHuacai Chen 	WRITE_ONCE(*p, v);				\
10446859ac8SHuacai Chen } while (0)
10546859ac8SHuacai Chen 
10646859ac8SHuacai Chen #define __smp_store_mb(p, v)							\
10746859ac8SHuacai Chen do {										\
10846859ac8SHuacai Chen 	union { typeof(p) __val; char __c[1]; } __u =				\
10946859ac8SHuacai Chen 		{ .__val = (__force typeof(p)) (v) };				\
11046859ac8SHuacai Chen 	unsigned long __tmp;							\
11146859ac8SHuacai Chen 	switch (sizeof(p)) {							\
11246859ac8SHuacai Chen 	case 1:									\
11346859ac8SHuacai Chen 		*(volatile __u8 *)&p = *(__u8 *)__u.__c;			\
11446859ac8SHuacai Chen 		__smp_mb();							\
11546859ac8SHuacai Chen 		break;								\
11646859ac8SHuacai Chen 	case 2:									\
11746859ac8SHuacai Chen 		*(volatile __u16 *)&p = *(__u16 *)__u.__c;			\
11846859ac8SHuacai Chen 		__smp_mb();							\
11946859ac8SHuacai Chen 		break;								\
12046859ac8SHuacai Chen 	case 4:									\
12146859ac8SHuacai Chen 		__asm__ __volatile__(						\
12246859ac8SHuacai Chen 		"amswap_db.w %[tmp], %[val], %[mem]	\n"			\
12346859ac8SHuacai Chen 		: [mem] "+ZB" (*(u32 *)&p), [tmp] "=&r" (__tmp)			\
12446859ac8SHuacai Chen 		: [val] "r" (*(__u32 *)__u.__c)					\
12546859ac8SHuacai Chen 		: );								\
12646859ac8SHuacai Chen 		break;								\
12746859ac8SHuacai Chen 	case 8:									\
12846859ac8SHuacai Chen 		__asm__ __volatile__(						\
12946859ac8SHuacai Chen 		"amswap_db.d %[tmp], %[val], %[mem]	\n"			\
13046859ac8SHuacai Chen 		: [mem] "+ZB" (*(u64 *)&p), [tmp] "=&r" (__tmp)			\
13146859ac8SHuacai Chen 		: [val] "r" (*(__u64 *)__u.__c)					\
13246859ac8SHuacai Chen 		: );								\
13346859ac8SHuacai Chen 		break;								\
13446859ac8SHuacai Chen 	}									\
13546859ac8SHuacai Chen } while (0)
13646859ac8SHuacai Chen 
1375b0b14e5SHuacai Chen #include <asm-generic/barrier.h>
1385b0b14e5SHuacai Chen 
1395b0b14e5SHuacai Chen #endif /* __ASM_BARRIER_H */
140