1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef _ASM_X86_STRING_64_H 3 #define _ASM_X86_STRING_64_H 4 5 #ifdef __KERNEL__ 6 #include <linux/jump_label.h> 7 8 /* Written 2002 by Andi Kleen */ 9 10 /* Even with __builtin_ the compiler may decide to use the out of line 11 function. */ 12 13 #define __HAVE_ARCH_MEMCPY 1 14 #if defined(__SANITIZE_MEMORY__) 15 #undef memcpy 16 void *__msan_memcpy(void *dst, const void *src, size_t size); 17 #define memcpy __msan_memcpy 18 #else 19 extern void *memcpy(void *to, const void *from, size_t len); 20 #endif 21 extern void *__memcpy(void *to, const void *from, size_t len); 22 23 #define __HAVE_ARCH_MEMSET 24 #if defined(__SANITIZE_MEMORY__) 25 extern void *__msan_memset(void *s, int c, size_t n); 26 #undef memset 27 #define memset __msan_memset 28 #else 29 void *memset(void *s, int c, size_t n); 30 #endif 31 void *__memset(void *s, int c, size_t n); 32 33 #define __HAVE_ARCH_MEMSET16 34 static inline void *memset16(uint16_t *s, uint16_t v, size_t n) 35 { 36 long d0, d1; 37 asm volatile("rep\n\t" 38 "stosw" 39 : "=&c" (d0), "=&D" (d1) 40 : "a" (v), "1" (s), "0" (n) 41 : "memory"); 42 return s; 43 } 44 45 #define __HAVE_ARCH_MEMSET32 46 static inline void *memset32(uint32_t *s, uint32_t v, size_t n) 47 { 48 long d0, d1; 49 asm volatile("rep\n\t" 50 "stosl" 51 : "=&c" (d0), "=&D" (d1) 52 : "a" (v), "1" (s), "0" (n) 53 : "memory"); 54 return s; 55 } 56 57 #define __HAVE_ARCH_MEMSET64 58 static inline void *memset64(uint64_t *s, uint64_t v, size_t n) 59 { 60 long d0, d1; 61 asm volatile("rep\n\t" 62 "stosq" 63 : "=&c" (d0), "=&D" (d1) 64 : "a" (v), "1" (s), "0" (n) 65 : "memory"); 66 return s; 67 } 68 69 #define __HAVE_ARCH_MEMMOVE 70 #if defined(__SANITIZE_MEMORY__) 71 #undef memmove 72 void *__msan_memmove(void *dest, const void *src, size_t len); 73 #define memmove __msan_memmove 74 #else 75 void *memmove(void *dest, const void *src, size_t count); 76 #endif 77 void *__memmove(void *dest, const void *src, size_t count); 78 79 int memcmp(const void *cs, const void *ct, size_t count); 80 size_t strlen(const char *s); 81 char *strcpy(char *dest, const char *src); 82 char *strcat(char *dest, const char *src); 83 int strcmp(const char *cs, const char *ct); 84 85 #if (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)) 86 /* 87 * For files that not instrumented (e.g. mm/slub.c) we 88 * should use not instrumented version of mem* functions. 89 */ 90 91 #undef memcpy 92 #define memcpy(dst, src, len) __memcpy(dst, src, len) 93 #undef memmove 94 #define memmove(dst, src, len) __memmove(dst, src, len) 95 #undef memset 96 #define memset(s, c, n) __memset(s, c, n) 97 98 #ifndef __NO_FORTIFY 99 #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */ 100 #endif 101 102 #endif 103 104 #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE 105 #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1 106 void __memcpy_flushcache(void *dst, const void *src, size_t cnt); 107 static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt) 108 { 109 if (__builtin_constant_p(cnt)) { 110 switch (cnt) { 111 case 4: 112 asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src)); 113 return; 114 case 8: 115 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); 116 return; 117 case 16: 118 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); 119 asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8))); 120 return; 121 } 122 } 123 __memcpy_flushcache(dst, src, cnt); 124 } 125 #endif 126 127 #endif /* __KERNEL__ */ 128 129 #endif /* _ASM_X86_STRING_64_H */ 130