xref: /openbmc/linux/arch/arc/include/asm/futex.h (revision a08971e9)
1d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
201b812bcSVineet Gupta /*
301b812bcSVineet Gupta  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
401b812bcSVineet Gupta  *
501b812bcSVineet Gupta  * Vineetg: August 2010: From Android kernel work
601b812bcSVineet Gupta  */
701b812bcSVineet Gupta 
801b812bcSVineet Gupta #ifndef _ASM_FUTEX_H
901b812bcSVineet Gupta #define _ASM_FUTEX_H
1001b812bcSVineet Gupta 
1101b812bcSVineet Gupta #include <linux/futex.h>
1201b812bcSVineet Gupta #include <linux/preempt.h>
1301b812bcSVineet Gupta #include <linux/uaccess.h>
1401b812bcSVineet Gupta #include <asm/errno.h>
1501b812bcSVineet Gupta 
169138d413SVineet Gupta #ifdef CONFIG_ARC_HAS_LLSC
179138d413SVineet Gupta 
189138d413SVineet Gupta #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
199138d413SVineet Gupta 							\
2031d30c82SVineet Gupta 	smp_mb();					\
219138d413SVineet Gupta 	__asm__ __volatile__(				\
229138d413SVineet Gupta 	"1:	llock	%1, [%2]		\n"	\
239138d413SVineet Gupta 		insn				"\n"	\
249138d413SVineet Gupta 	"2:	scond	%0, [%2]		\n"	\
259138d413SVineet Gupta 	"	bnz	1b			\n"	\
269138d413SVineet Gupta 	"	mov %0, 0			\n"	\
279138d413SVineet Gupta 	"3:					\n"	\
289138d413SVineet Gupta 	"	.section .fixup,\"ax\"		\n"	\
299138d413SVineet Gupta 	"	.align  4			\n"	\
309138d413SVineet Gupta 	"4:	mov %0, %4			\n"	\
316de6066cSYuriy Kolerov 	"	j   3b				\n"	\
329138d413SVineet Gupta 	"	.previous			\n"	\
339138d413SVineet Gupta 	"	.section __ex_table,\"a\"	\n"	\
349138d413SVineet Gupta 	"	.align  4			\n"	\
359138d413SVineet Gupta 	"	.word   1b, 4b			\n"	\
369138d413SVineet Gupta 	"	.word   2b, 4b			\n"	\
379138d413SVineet Gupta 	"	.previous			\n"	\
389138d413SVineet Gupta 							\
399138d413SVineet Gupta 	: "=&r" (ret), "=&r" (oldval)			\
409138d413SVineet Gupta 	: "r" (uaddr), "r" (oparg), "ir" (-EFAULT)	\
4131d30c82SVineet Gupta 	: "cc", "memory");				\
4231d30c82SVineet Gupta 	smp_mb()					\
439138d413SVineet Gupta 
449138d413SVineet Gupta #else	/* !CONFIG_ARC_HAS_LLSC */
459138d413SVineet Gupta 
4601b812bcSVineet Gupta #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
4701b812bcSVineet Gupta 							\
4831d30c82SVineet Gupta 	smp_mb();					\
4901b812bcSVineet Gupta 	__asm__ __volatile__(				\
5001b812bcSVineet Gupta 	"1:	ld	%1, [%2]		\n"	\
5101b812bcSVineet Gupta 		insn				"\n"	\
5201b812bcSVineet Gupta 	"2:	st	%0, [%2]		\n"	\
5301b812bcSVineet Gupta 	"	mov %0, 0			\n"	\
5401b812bcSVineet Gupta 	"3:					\n"	\
5501b812bcSVineet Gupta 	"	.section .fixup,\"ax\"		\n"	\
5601b812bcSVineet Gupta 	"	.align  4			\n"	\
5701b812bcSVineet Gupta 	"4:	mov %0, %4			\n"	\
586de6066cSYuriy Kolerov 	"	j   3b				\n"	\
5901b812bcSVineet Gupta 	"	.previous			\n"	\
6001b812bcSVineet Gupta 	"	.section __ex_table,\"a\"	\n"	\
6101b812bcSVineet Gupta 	"	.align  4			\n"	\
6201b812bcSVineet Gupta 	"	.word   1b, 4b			\n"	\
6301b812bcSVineet Gupta 	"	.word   2b, 4b			\n"	\
6401b812bcSVineet Gupta 	"	.previous			\n"	\
6501b812bcSVineet Gupta 							\
6601b812bcSVineet Gupta 	: "=&r" (ret), "=&r" (oldval)			\
6701b812bcSVineet Gupta 	: "r" (uaddr), "r" (oparg), "ir" (-EFAULT)	\
6831d30c82SVineet Gupta 	: "cc", "memory");				\
6931d30c82SVineet Gupta 	smp_mb()					\
7001b812bcSVineet Gupta 
719138d413SVineet Gupta #endif
729138d413SVineet Gupta 
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)7330d6e0a4SJiri Slaby static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
7430d6e0a4SJiri Slaby 		u32 __user *uaddr)
7501b812bcSVineet Gupta {
7601b812bcSVineet Gupta 	int oldval = 0, ret;
7701b812bcSVineet Gupta 
78a08971e9SAl Viro 	if (!access_ok(uaddr, sizeof(u32)))
79a08971e9SAl Viro 		return -EFAULT;
80a08971e9SAl Viro 
81eb2cd8b7SVineet Gupta #ifndef CONFIG_ARC_HAS_LLSC
82eb2cd8b7SVineet Gupta 	preempt_disable();	/* to guarantee atomic r-m-w of futex op */
83eb2cd8b7SVineet Gupta #endif
8401b812bcSVineet Gupta 
8501b812bcSVineet Gupta 	switch (op) {
8601b812bcSVineet Gupta 	case FUTEX_OP_SET:
8701b812bcSVineet Gupta 		__futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
8801b812bcSVineet Gupta 		break;
8901b812bcSVineet Gupta 	case FUTEX_OP_ADD:
90ed574e2bSVineet Gupta 		/* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */
9101b812bcSVineet Gupta 		__futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
9201b812bcSVineet Gupta 		break;
9301b812bcSVineet Gupta 	case FUTEX_OP_OR:
9401b812bcSVineet Gupta 		__futex_atomic_op("or  %0, %1, %3", ret, oldval, uaddr, oparg);
9501b812bcSVineet Gupta 		break;
9601b812bcSVineet Gupta 	case FUTEX_OP_ANDN:
9701b812bcSVineet Gupta 		__futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
9801b812bcSVineet Gupta 		break;
9901b812bcSVineet Gupta 	case FUTEX_OP_XOR:
10001b812bcSVineet Gupta 		__futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
10101b812bcSVineet Gupta 		break;
10201b812bcSVineet Gupta 	default:
10301b812bcSVineet Gupta 		ret = -ENOSYS;
10401b812bcSVineet Gupta 	}
10501b812bcSVineet Gupta 
106eb2cd8b7SVineet Gupta #ifndef CONFIG_ARC_HAS_LLSC
107eb2cd8b7SVineet Gupta 	preempt_enable();
108eb2cd8b7SVineet Gupta #endif
10901b812bcSVineet Gupta 
11030d6e0a4SJiri Slaby 	if (!ret)
11130d6e0a4SJiri Slaby 		*oval = oldval;
11230d6e0a4SJiri Slaby 
11301b812bcSVineet Gupta 	return ret;
11401b812bcSVineet Gupta }
11501b812bcSVineet Gupta 
11631d30c82SVineet Gupta /*
11731d30c82SVineet Gupta  * cmpxchg of futex (pagefaults disabled by caller)
118882a95aeSVineet Gupta  * Return 0 for success, -EFAULT otherwise
11901b812bcSVineet Gupta  */
12001b812bcSVineet Gupta static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 expval,u32 newval)121ed574e2bSVineet Gupta futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval,
12201b812bcSVineet Gupta 			      u32 newval)
12301b812bcSVineet Gupta {
124882a95aeSVineet Gupta 	int ret = 0;
125ed574e2bSVineet Gupta 	u32 existval;
12601b812bcSVineet Gupta 
12796d4f267SLinus Torvalds 	if (!access_ok(uaddr, sizeof(u32)))
12801b812bcSVineet Gupta 		return -EFAULT;
12901b812bcSVineet Gupta 
130eb2cd8b7SVineet Gupta #ifndef CONFIG_ARC_HAS_LLSC
131eb2cd8b7SVineet Gupta 	preempt_disable();	/* to guarantee atomic r-m-w of futex op */
132eb2cd8b7SVineet Gupta #endif
13331d30c82SVineet Gupta 	smp_mb();
13401b812bcSVineet Gupta 
13501b812bcSVineet Gupta 	__asm__ __volatile__(
1369138d413SVineet Gupta #ifdef CONFIG_ARC_HAS_LLSC
137882a95aeSVineet Gupta 	"1:	llock	%1, [%4]		\n"
138882a95aeSVineet Gupta 	"	brne	%1, %2, 3f		\n"
139882a95aeSVineet Gupta 	"2:	scond	%3, [%4]		\n"
1409138d413SVineet Gupta 	"	bnz	1b			\n"
1419138d413SVineet Gupta #else
142882a95aeSVineet Gupta 	"1:	ld	%1, [%4]		\n"
143882a95aeSVineet Gupta 	"	brne	%1, %2, 3f		\n"
144882a95aeSVineet Gupta 	"2:	st	%3, [%4]		\n"
1459138d413SVineet Gupta #endif
14601b812bcSVineet Gupta 	"3:	\n"
14701b812bcSVineet Gupta 	"	.section .fixup,\"ax\"	\n"
148882a95aeSVineet Gupta 	"4:	mov %0, %5	\n"
1496de6066cSYuriy Kolerov 	"	j   3b	\n"
15001b812bcSVineet Gupta 	"	.previous	\n"
15101b812bcSVineet Gupta 	"	.section __ex_table,\"a\"	\n"
15201b812bcSVineet Gupta 	"	.align  4	\n"
15301b812bcSVineet Gupta 	"	.word   1b, 4b	\n"
15401b812bcSVineet Gupta 	"	.word   2b, 4b	\n"
15501b812bcSVineet Gupta 	"	.previous\n"
156882a95aeSVineet Gupta 	: "+&r"(ret), "=&r"(existval)
157ed574e2bSVineet Gupta 	: "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
15801b812bcSVineet Gupta 	: "cc", "memory");
15901b812bcSVineet Gupta 
16031d30c82SVineet Gupta 	smp_mb();
16101b812bcSVineet Gupta 
162eb2cd8b7SVineet Gupta #ifndef CONFIG_ARC_HAS_LLSC
163eb2cd8b7SVineet Gupta 	preempt_enable();
164eb2cd8b7SVineet Gupta #endif
165ed574e2bSVineet Gupta 	*uval = existval;
166882a95aeSVineet Gupta 	return ret;
16701b812bcSVineet Gupta }
16801b812bcSVineet Gupta 
16901b812bcSVineet Gupta #endif
170