xref: /openbmc/linux/arch/x86/lib/copy_user_64.S (revision ffcdf473)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
4 * Copyright 2002 Andi Kleen, SuSE Labs.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/linkage.h>
10#include <asm/asm.h>
11#include <asm/export.h>
12
13/*
14 * rep_movs_alternative - memory copy with exception handling.
15 * This version is for CPUs that don't have FSRM (Fast Short Rep Movs)
16 *
17 * Input:
18 * rdi destination
19 * rsi source
20 * rcx count
21 *
22 * Output:
23 * rcx uncopied bytes or 0 if successful.
24 *
25 * NOTE! The calling convention is very intentionally the same as
26 * for 'rep movs', so that we can rewrite the function call with
27 * just a plain 'rep movs' on machines that have FSRM.  But to make
28 * it simpler for us, we can clobber rsi/rdi and rax/r8-r11 freely.
29 */
30SYM_FUNC_START(rep_movs_alternative)
31	cmpq $64,%rcx
32	jae .Lunrolled
33
34	cmp $8,%ecx
35	jae .Lword
36
37	testl %ecx,%ecx
38	je .Lexit
39
40.Lcopy_user_tail:
410:	movb (%rsi),%al
421:	movb %al,(%rdi)
43	inc %rdi
44	inc %rsi
45	dec %rcx
46	jne .Lcopy_user_tail
47.Lexit:
48	RET
49
50	_ASM_EXTABLE_UA( 0b, .Lexit)
51	_ASM_EXTABLE_UA( 1b, .Lexit)
52
53	.p2align 4
54.Lword:
552:	movq (%rsi),%rax
563:	movq %rax,(%rdi)
57	addq $8,%rsi
58	addq $8,%rdi
59	sub $8,%ecx
60	je .Lexit
61	cmp $8,%ecx
62	jae .Lword
63	jmp .Lcopy_user_tail
64
65	_ASM_EXTABLE_UA( 2b, .Lcopy_user_tail)
66	_ASM_EXTABLE_UA( 3b, .Lcopy_user_tail)
67
68	.p2align 4
69.Lunrolled:
7010:	movq (%rsi),%r8
7111:	movq 8(%rsi),%r9
7212:	movq 16(%rsi),%r10
7313:	movq 24(%rsi),%r11
7414:	movq %r8,(%rdi)
7515:	movq %r9,8(%rdi)
7616:	movq %r10,16(%rdi)
7717:	movq %r11,24(%rdi)
7820:	movq 32(%rsi),%r8
7921:	movq 40(%rsi),%r9
8022:	movq 48(%rsi),%r10
8123:	movq 56(%rsi),%r11
8224:	movq %r8,32(%rdi)
8325:	movq %r9,40(%rdi)
8426:	movq %r10,48(%rdi)
8527:	movq %r11,56(%rdi)
86	addq $64,%rsi
87	addq $64,%rdi
88	subq $64,%rcx
89	cmpq $64,%rcx
90	jae .Lunrolled
91	cmpl $8,%ecx
92	jae .Lword
93	testl %ecx,%ecx
94	jne .Lcopy_user_tail
95	RET
96
97	_ASM_EXTABLE_UA(10b, .Lcopy_user_tail)
98	_ASM_EXTABLE_UA(11b, .Lcopy_user_tail)
99	_ASM_EXTABLE_UA(12b, .Lcopy_user_tail)
100	_ASM_EXTABLE_UA(13b, .Lcopy_user_tail)
101	_ASM_EXTABLE_UA(14b, .Lcopy_user_tail)
102	_ASM_EXTABLE_UA(15b, .Lcopy_user_tail)
103	_ASM_EXTABLE_UA(16b, .Lcopy_user_tail)
104	_ASM_EXTABLE_UA(17b, .Lcopy_user_tail)
105	_ASM_EXTABLE_UA(20b, .Lcopy_user_tail)
106	_ASM_EXTABLE_UA(21b, .Lcopy_user_tail)
107	_ASM_EXTABLE_UA(22b, .Lcopy_user_tail)
108	_ASM_EXTABLE_UA(23b, .Lcopy_user_tail)
109	_ASM_EXTABLE_UA(24b, .Lcopy_user_tail)
110	_ASM_EXTABLE_UA(25b, .Lcopy_user_tail)
111	_ASM_EXTABLE_UA(26b, .Lcopy_user_tail)
112	_ASM_EXTABLE_UA(27b, .Lcopy_user_tail)
113SYM_FUNC_END(rep_movs_alternative)
114EXPORT_SYMBOL(rep_movs_alternative)
115