1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * S390 version 4 * Copyright IBM Corp. 1999 5 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com), 6 */ 7 8 #ifndef _S390_STRING_H_ 9 #define _S390_STRING_H_ 10 11 #ifndef _LINUX_TYPES_H 12 #include <linux/types.h> 13 #endif 14 15 #define __HAVE_ARCH_MEMCHR /* inline & arch function */ 16 #define __HAVE_ARCH_MEMCMP /* arch function */ 17 #define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */ 18 #define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */ 19 #define __HAVE_ARCH_MEMSCAN /* inline & arch function */ 20 #define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */ 21 #define __HAVE_ARCH_MEMSET16 /* arch function */ 22 #define __HAVE_ARCH_MEMSET32 /* arch function */ 23 #define __HAVE_ARCH_MEMSET64 /* arch function */ 24 #define __HAVE_ARCH_STRCAT /* inline & arch function */ 25 #define __HAVE_ARCH_STRCMP /* arch function */ 26 #define __HAVE_ARCH_STRCPY /* inline & arch function */ 27 #define __HAVE_ARCH_STRLCAT /* arch function */ 28 #define __HAVE_ARCH_STRLCPY /* arch function */ 29 #define __HAVE_ARCH_STRLEN /* inline & arch function */ 30 #define __HAVE_ARCH_STRNCAT /* arch function */ 31 #define __HAVE_ARCH_STRNCPY /* arch function */ 32 #define __HAVE_ARCH_STRNLEN /* inline & arch function */ 33 #define __HAVE_ARCH_STRRCHR /* arch function */ 34 #define __HAVE_ARCH_STRSTR /* arch function */ 35 36 /* Prototypes for non-inlined arch strings functions. */ 37 int memcmp(const void *s1, const void *s2, size_t n); 38 void *memcpy(void *dest, const void *src, size_t n); 39 void *memset(void *s, int c, size_t n); 40 void *memmove(void *dest, const void *src, size_t n); 41 int strcmp(const char *s1, const char *s2); 42 size_t strlcat(char *dest, const char *src, size_t n); 43 size_t strlcpy(char *dest, const char *src, size_t size); 44 char *strncat(char *dest, const char *src, size_t n); 45 char *strncpy(char *dest, const char *src, size_t n); 46 char *strrchr(const char *s, int c); 47 char *strstr(const char *s1, const char *s2); 48 49 #undef __HAVE_ARCH_STRCHR 50 #undef __HAVE_ARCH_STRNCHR 51 #undef __HAVE_ARCH_STRNCMP 52 #undef __HAVE_ARCH_STRPBRK 53 #undef __HAVE_ARCH_STRSEP 54 #undef __HAVE_ARCH_STRSPN 55 56 #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) 57 58 extern void *__memcpy(void *dest, const void *src, size_t n); 59 extern void *__memset(void *s, int c, size_t n); 60 extern void *__memmove(void *dest, const void *src, size_t n); 61 62 /* 63 * For files that are not instrumented (e.g. mm/slub.c) we 64 * should use not instrumented version of mem* functions. 65 */ 66 67 #define memcpy(dst, src, len) __memcpy(dst, src, len) 68 #define memmove(dst, src, len) __memmove(dst, src, len) 69 #define memset(s, c, n) __memset(s, c, n) 70 71 #ifndef __NO_FORTIFY 72 #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */ 73 #endif 74 75 #endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */ 76 77 void *__memset16(uint16_t *s, uint16_t v, size_t count); 78 void *__memset32(uint32_t *s, uint32_t v, size_t count); 79 void *__memset64(uint64_t *s, uint64_t v, size_t count); 80 81 static inline void *memset16(uint16_t *s, uint16_t v, size_t count) 82 { 83 return __memset16(s, v, count * sizeof(v)); 84 } 85 86 static inline void *memset32(uint32_t *s, uint32_t v, size_t count) 87 { 88 return __memset32(s, v, count * sizeof(v)); 89 } 90 91 static inline void *memset64(uint64_t *s, uint64_t v, size_t count) 92 { 93 return __memset64(s, v, count * sizeof(v)); 94 } 95 96 #if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY)) 97 98 static inline void *memchr(const void * s, int c, size_t n) 99 { 100 register int r0 asm("0") = (char) c; 101 const void *ret = s + n; 102 103 asm volatile( 104 "0: srst %0,%1\n" 105 " jo 0b\n" 106 " jl 1f\n" 107 " la %0,0\n" 108 "1:" 109 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 110 return (void *) ret; 111 } 112 113 static inline void *memscan(void *s, int c, size_t n) 114 { 115 register int r0 asm("0") = (char) c; 116 const void *ret = s + n; 117 118 asm volatile( 119 "0: srst %0,%1\n" 120 " jo 0b\n" 121 : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); 122 return (void *) ret; 123 } 124 125 static inline char *strcat(char *dst, const char *src) 126 { 127 register int r0 asm("0") = 0; 128 unsigned long dummy; 129 char *ret = dst; 130 131 asm volatile( 132 "0: srst %0,%1\n" 133 " jo 0b\n" 134 "1: mvst %0,%2\n" 135 " jo 1b" 136 : "=&a" (dummy), "+a" (dst), "+a" (src) 137 : "d" (r0), "0" (0) : "cc", "memory" ); 138 return ret; 139 } 140 141 static inline char *strcpy(char *dst, const char *src) 142 { 143 register int r0 asm("0") = 0; 144 char *ret = dst; 145 146 asm volatile( 147 "0: mvst %0,%1\n" 148 " jo 0b" 149 : "+&a" (dst), "+&a" (src) : "d" (r0) 150 : "cc", "memory"); 151 return ret; 152 } 153 154 static inline size_t strlen(const char *s) 155 { 156 register unsigned long r0 asm("0") = 0; 157 const char *tmp = s; 158 159 asm volatile( 160 "0: srst %0,%1\n" 161 " jo 0b" 162 : "+d" (r0), "+a" (tmp) : : "cc", "memory"); 163 return r0 - (unsigned long) s; 164 } 165 166 static inline size_t strnlen(const char * s, size_t n) 167 { 168 register int r0 asm("0") = 0; 169 const char *tmp = s; 170 const char *end = s + n; 171 172 asm volatile( 173 "0: srst %0,%1\n" 174 " jo 0b" 175 : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory"); 176 return end - s; 177 } 178 #else /* IN_ARCH_STRING_C */ 179 void *memchr(const void * s, int c, size_t n); 180 void *memscan(void *s, int c, size_t n); 181 char *strcat(char *dst, const char *src); 182 char *strcpy(char *dst, const char *src); 183 size_t strlen(const char *s); 184 size_t strnlen(const char * s, size_t n); 185 #endif /* !IN_ARCH_STRING_C */ 186 187 #endif /* __S390_STRING_H_ */ 188