1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com> 4 * Copyright 2002 Andi Kleen, SuSE Labs. 5 * 6 * Functions to copy from and to user space. 7 */ 8 9#include <linux/linkage.h> 10#include <asm/cpufeatures.h> 11#include <asm/alternative.h> 12#include <asm/asm.h> 13#include <asm/export.h> 14 15/* 16 * rep_movs_alternative - memory copy with exception handling. 17 * This version is for CPUs that don't have FSRM (Fast Short Rep Movs) 18 * 19 * Input: 20 * rdi destination 21 * rsi source 22 * rcx count 23 * 24 * Output: 25 * rcx uncopied bytes or 0 if successful. 26 * 27 * NOTE! The calling convention is very intentionally the same as 28 * for 'rep movs', so that we can rewrite the function call with 29 * just a plain 'rep movs' on machines that have FSRM. But to make 30 * it simpler for us, we can clobber rsi/rdi and rax freely. 31 */ 32SYM_FUNC_START(rep_movs_alternative) 33 cmpq $64,%rcx 34 jae .Llarge 35 36 cmp $8,%ecx 37 jae .Lword 38 39 testl %ecx,%ecx 40 je .Lexit 41 42.Lcopy_user_tail: 430: movb (%rsi),%al 441: movb %al,(%rdi) 45 inc %rdi 46 inc %rsi 47 dec %rcx 48 jne .Lcopy_user_tail 49.Lexit: 50 RET 51 52 _ASM_EXTABLE_UA( 0b, .Lexit) 53 _ASM_EXTABLE_UA( 1b, .Lexit) 54 55 .p2align 4 56.Lword: 572: movq (%rsi),%rax 583: movq %rax,(%rdi) 59 addq $8,%rsi 60 addq $8,%rdi 61 sub $8,%ecx 62 je .Lexit 63 cmp $8,%ecx 64 jae .Lword 65 jmp .Lcopy_user_tail 66 67 _ASM_EXTABLE_UA( 2b, .Lcopy_user_tail) 68 _ASM_EXTABLE_UA( 3b, .Lcopy_user_tail) 69 70.Llarge: 710: ALTERNATIVE "jmp .Llarge_movsq", "rep movsb", X86_FEATURE_ERMS 721: RET 73 74 _ASM_EXTABLE_UA( 0b, 1b) 75 76.Llarge_movsq: 77 movq %rcx,%rax 78 shrq $3,%rcx 79 andl $7,%eax 800: rep movsq 81 movl %eax,%ecx 82 testl %ecx,%ecx 83 jne .Lcopy_user_tail 84 RET 85 861: leaq (%rax,%rcx,8),%rcx 87 jmp .Lcopy_user_tail 88 89 _ASM_EXTABLE_UA( 0b, 1b) 90SYM_FUNC_END(rep_movs_alternative) 91EXPORT_SYMBOL(rep_movs_alternative) 92