xref: /openbmc/linux/arch/x86/lib/copy_mc_64.S (revision 52beb1fc)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */
3
4#include <linux/linkage.h>
5#include <asm/asm.h>
6
7#ifndef CONFIG_UML
8
9#ifdef CONFIG_X86_MCE
10
11/*
12 * copy_mc_fragile - copy memory with indication if an exception / fault happened
13 *
14 * The 'fragile' version is opted into by platform quirks and takes
15 * pains to avoid unrecoverable corner cases like 'fast-string'
16 * instruction sequences, and consuming poison across a cacheline
17 * boundary. The non-fragile version is equivalent to memcpy()
18 * regardless of CPU machine-check-recovery capability.
19 */
20SYM_FUNC_START(copy_mc_fragile)
21	cmpl $8, %edx
22	/* Less than 8 bytes? Go to byte copy loop */
23	jb .L_no_whole_words
24
25	/* Check for bad alignment of source */
26	testl $7, %esi
27	/* Already aligned */
28	jz .L_8byte_aligned
29
30	/* Copy one byte at a time until source is 8-byte aligned */
31	movl %esi, %ecx
32	andl $7, %ecx
33	subl $8, %ecx
34	negl %ecx
35	subl %ecx, %edx
36.L_read_leading_bytes:
37	movb (%rsi), %al
38.L_write_leading_bytes:
39	movb %al, (%rdi)
40	incq %rsi
41	incq %rdi
42	decl %ecx
43	jnz .L_read_leading_bytes
44
45.L_8byte_aligned:
46	movl %edx, %ecx
47	andl $7, %edx
48	shrl $3, %ecx
49	jz .L_no_whole_words
50
51.L_read_words:
52	movq (%rsi), %r8
53.L_write_words:
54	movq %r8, (%rdi)
55	addq $8, %rsi
56	addq $8, %rdi
57	decl %ecx
58	jnz .L_read_words
59
60	/* Any trailing bytes? */
61.L_no_whole_words:
62	andl %edx, %edx
63	jz .L_done_memcpy_trap
64
65	/* Copy trailing bytes */
66	movl %edx, %ecx
67.L_read_trailing_bytes:
68	movb (%rsi), %al
69.L_write_trailing_bytes:
70	movb %al, (%rdi)
71	incq %rsi
72	incq %rdi
73	decl %ecx
74	jnz .L_read_trailing_bytes
75
76	/* Copy successful. Return zero */
77.L_done_memcpy_trap:
78	xorl %eax, %eax
79.L_done:
80	RET
81
82	/*
83	 * Return number of bytes not copied for any failure. Note that
84	 * there is no "tail" handling since the source buffer is 8-byte
85	 * aligned and poison is cacheline aligned.
86	 */
87.E_read_words:
88	shll	$3, %ecx
89.E_leading_bytes:
90	addl	%edx, %ecx
91.E_trailing_bytes:
92	mov	%ecx, %eax
93	jmp	.L_done
94
95	/*
96	 * For write fault handling, given the destination is unaligned,
97	 * we handle faults on multi-byte writes with a byte-by-byte
98	 * copy up to the write-protected page.
99	 */
100.E_write_words:
101	shll	$3, %ecx
102	addl	%edx, %ecx
103	movl	%ecx, %edx
104	jmp copy_mc_fragile_handle_tail
105
106	_ASM_EXTABLE_TYPE(.L_read_leading_bytes, .E_leading_bytes, EX_TYPE_DEFAULT_MCE_SAFE)
107	_ASM_EXTABLE_TYPE(.L_read_words, .E_read_words, EX_TYPE_DEFAULT_MCE_SAFE)
108	_ASM_EXTABLE_TYPE(.L_read_trailing_bytes, .E_trailing_bytes, EX_TYPE_DEFAULT_MCE_SAFE)
109	_ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes)
110	_ASM_EXTABLE(.L_write_words, .E_write_words)
111	_ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes)
112
113SYM_FUNC_END(copy_mc_fragile)
114#endif /* CONFIG_X86_MCE */
115
116/*
117 * copy_mc_enhanced_fast_string - memory copy with exception handling
118 *
119 * Fast string copy + fault / exception handling. If the CPU does
120 * support machine check exception recovery, but does not support
121 * recovering from fast-string exceptions then this CPU needs to be
122 * added to the copy_mc_fragile_key set of quirks. Otherwise, absent any
123 * machine check recovery support this version should be no slower than
124 * standard memcpy.
125 */
126SYM_FUNC_START(copy_mc_enhanced_fast_string)
127	movq %rdi, %rax
128	movq %rdx, %rcx
129.L_copy:
130	rep movsb
131	/* Copy successful. Return zero */
132	xorl %eax, %eax
133	RET
134
135.E_copy:
136	/*
137	 * On fault %rcx is updated such that the copy instruction could
138	 * optionally be restarted at the fault position, i.e. it
139	 * contains 'bytes remaining'. A non-zero return indicates error
140	 * to copy_mc_generic() users, or indicate short transfers to
141	 * user-copy routines.
142	 */
143	movq %rcx, %rax
144	RET
145
146	_ASM_EXTABLE_TYPE(.L_copy, .E_copy, EX_TYPE_DEFAULT_MCE_SAFE)
147
148SYM_FUNC_END(copy_mc_enhanced_fast_string)
149#endif /* !CONFIG_UML */
150