xref: /openbmc/linux/arch/x86/include/asm/string_32.h (revision 5d0e4d78)
1 #ifndef _ASM_X86_STRING_32_H
2 #define _ASM_X86_STRING_32_H
3 
4 #ifdef __KERNEL__
5 
6 /* Let gcc decide whether to inline or use the out of line functions */
7 
8 #define __HAVE_ARCH_STRCPY
9 extern char *strcpy(char *dest, const char *src);
10 
11 #define __HAVE_ARCH_STRNCPY
12 extern char *strncpy(char *dest, const char *src, size_t count);
13 
14 #define __HAVE_ARCH_STRCAT
15 extern char *strcat(char *dest, const char *src);
16 
17 #define __HAVE_ARCH_STRNCAT
18 extern char *strncat(char *dest, const char *src, size_t count);
19 
20 #define __HAVE_ARCH_STRCMP
21 extern int strcmp(const char *cs, const char *ct);
22 
23 #define __HAVE_ARCH_STRNCMP
24 extern int strncmp(const char *cs, const char *ct, size_t count);
25 
26 #define __HAVE_ARCH_STRCHR
27 extern char *strchr(const char *s, int c);
28 
29 #define __HAVE_ARCH_STRLEN
30 extern size_t strlen(const char *s);
31 
32 static __always_inline void *__memcpy(void *to, const void *from, size_t n)
33 {
34 	int d0, d1, d2;
35 	asm volatile("rep ; movsl\n\t"
36 		     "movl %4,%%ecx\n\t"
37 		     "andl $3,%%ecx\n\t"
38 		     "jz 1f\n\t"
39 		     "rep ; movsb\n\t"
40 		     "1:"
41 		     : "=&c" (d0), "=&D" (d1), "=&S" (d2)
42 		     : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from)
43 		     : "memory");
44 	return to;
45 }
46 
47 /*
48  * This looks ugly, but the compiler can optimize it totally,
49  * as the count is constant.
50  */
51 static __always_inline void *__constant_memcpy(void *to, const void *from,
52 					       size_t n)
53 {
54 	long esi, edi;
55 	if (!n)
56 		return to;
57 
58 	switch (n) {
59 	case 1:
60 		*(char *)to = *(char *)from;
61 		return to;
62 	case 2:
63 		*(short *)to = *(short *)from;
64 		return to;
65 	case 4:
66 		*(int *)to = *(int *)from;
67 		return to;
68 	case 3:
69 		*(short *)to = *(short *)from;
70 		*((char *)to + 2) = *((char *)from + 2);
71 		return to;
72 	case 5:
73 		*(int *)to = *(int *)from;
74 		*((char *)to + 4) = *((char *)from + 4);
75 		return to;
76 	case 6:
77 		*(int *)to = *(int *)from;
78 		*((short *)to + 2) = *((short *)from + 2);
79 		return to;
80 	case 8:
81 		*(int *)to = *(int *)from;
82 		*((int *)to + 1) = *((int *)from + 1);
83 		return to;
84 	}
85 
86 	esi = (long)from;
87 	edi = (long)to;
88 	if (n >= 5 * 4) {
89 		/* large block: use rep prefix */
90 		int ecx;
91 		asm volatile("rep ; movsl"
92 			     : "=&c" (ecx), "=&D" (edi), "=&S" (esi)
93 			     : "0" (n / 4), "1" (edi), "2" (esi)
94 			     : "memory"
95 		);
96 	} else {
97 		/* small block: don't clobber ecx + smaller code */
98 		if (n >= 4 * 4)
99 			asm volatile("movsl"
100 				     : "=&D"(edi), "=&S"(esi)
101 				     : "0"(edi), "1"(esi)
102 				     : "memory");
103 		if (n >= 3 * 4)
104 			asm volatile("movsl"
105 				     : "=&D"(edi), "=&S"(esi)
106 				     : "0"(edi), "1"(esi)
107 				     : "memory");
108 		if (n >= 2 * 4)
109 			asm volatile("movsl"
110 				     : "=&D"(edi), "=&S"(esi)
111 				     : "0"(edi), "1"(esi)
112 				     : "memory");
113 		if (n >= 1 * 4)
114 			asm volatile("movsl"
115 				     : "=&D"(edi), "=&S"(esi)
116 				     : "0"(edi), "1"(esi)
117 				     : "memory");
118 	}
119 	switch (n % 4) {
120 		/* tail */
121 	case 0:
122 		return to;
123 	case 1:
124 		asm volatile("movsb"
125 			     : "=&D"(edi), "=&S"(esi)
126 			     : "0"(edi), "1"(esi)
127 			     : "memory");
128 		return to;
129 	case 2:
130 		asm volatile("movsw"
131 			     : "=&D"(edi), "=&S"(esi)
132 			     : "0"(edi), "1"(esi)
133 			     : "memory");
134 		return to;
135 	default:
136 		asm volatile("movsw\n\tmovsb"
137 			     : "=&D"(edi), "=&S"(esi)
138 			     : "0"(edi), "1"(esi)
139 			     : "memory");
140 		return to;
141 	}
142 }
143 
144 #define __HAVE_ARCH_MEMCPY
145 extern void *memcpy(void *, const void *, size_t);
146 
147 #ifndef CONFIG_FORTIFY_SOURCE
148 #ifdef CONFIG_X86_USE_3DNOW
149 
150 #include <asm/mmx.h>
151 
152 /*
153  *	This CPU favours 3DNow strongly (eg AMD Athlon)
154  */
155 
156 static inline void *__constant_memcpy3d(void *to, const void *from, size_t len)
157 {
158 	if (len < 512)
159 		return __constant_memcpy(to, from, len);
160 	return _mmx_memcpy(to, from, len);
161 }
162 
163 static inline void *__memcpy3d(void *to, const void *from, size_t len)
164 {
165 	if (len < 512)
166 		return __memcpy(to, from, len);
167 	return _mmx_memcpy(to, from, len);
168 }
169 
170 #define memcpy(t, f, n)				\
171 	(__builtin_constant_p((n))		\
172 	 ? __constant_memcpy3d((t), (f), (n))	\
173 	 : __memcpy3d((t), (f), (n)))
174 
175 #else
176 
177 /*
178  *	No 3D Now!
179  */
180 
181 #ifndef CONFIG_KMEMCHECK
182 
183 #if (__GNUC__ >= 4)
184 #define memcpy(t, f, n) __builtin_memcpy(t, f, n)
185 #else
186 #define memcpy(t, f, n)				\
187 	(__builtin_constant_p((n))		\
188 	 ? __constant_memcpy((t), (f), (n))	\
189 	 : __memcpy((t), (f), (n)))
190 #endif
191 #else
192 /*
193  * kmemcheck becomes very happy if we use the REP instructions unconditionally,
194  * because it means that we know both memory operands in advance.
195  */
196 #define memcpy(t, f, n) __memcpy((t), (f), (n))
197 #endif
198 
199 #endif
200 #endif /* !CONFIG_FORTIFY_SOURCE */
201 
202 #define __HAVE_ARCH_MEMMOVE
203 void *memmove(void *dest, const void *src, size_t n);
204 
205 extern int memcmp(const void *, const void *, size_t);
206 #ifndef CONFIG_FORTIFY_SOURCE
207 #define memcmp __builtin_memcmp
208 #endif
209 
210 #define __HAVE_ARCH_MEMCHR
211 extern void *memchr(const void *cs, int c, size_t count);
212 
213 static inline void *__memset_generic(void *s, char c, size_t count)
214 {
215 	int d0, d1;
216 	asm volatile("rep\n\t"
217 		     "stosb"
218 		     : "=&c" (d0), "=&D" (d1)
219 		     : "a" (c), "1" (s), "0" (count)
220 		     : "memory");
221 	return s;
222 }
223 
224 /* we might want to write optimized versions of these later */
225 #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count))
226 
227 /*
228  * memset(x, 0, y) is a reasonably common thing to do, so we want to fill
229  * things 32 bits at a time even when we don't know the size of the
230  * area at compile-time..
231  */
232 static __always_inline
233 void *__constant_c_memset(void *s, unsigned long c, size_t count)
234 {
235 	int d0, d1;
236 	asm volatile("rep ; stosl\n\t"
237 		     "testb $2,%b3\n\t"
238 		     "je 1f\n\t"
239 		     "stosw\n"
240 		     "1:\ttestb $1,%b3\n\t"
241 		     "je 2f\n\t"
242 		     "stosb\n"
243 		     "2:"
244 		     : "=&c" (d0), "=&D" (d1)
245 		     : "a" (c), "q" (count), "0" (count/4), "1" ((long)s)
246 		     : "memory");
247 	return s;
248 }
249 
250 /* Added by Gertjan van Wingerde to make minix and sysv module work */
251 #define __HAVE_ARCH_STRNLEN
252 extern size_t strnlen(const char *s, size_t count);
253 /* end of additional stuff */
254 
255 #define __HAVE_ARCH_STRSTR
256 extern char *strstr(const char *cs, const char *ct);
257 
258 /*
259  * This looks horribly ugly, but the compiler can optimize it totally,
260  * as we by now know that both pattern and count is constant..
261  */
262 static __always_inline
263 void *__constant_c_and_count_memset(void *s, unsigned long pattern,
264 				    size_t count)
265 {
266 	switch (count) {
267 	case 0:
268 		return s;
269 	case 1:
270 		*(unsigned char *)s = pattern & 0xff;
271 		return s;
272 	case 2:
273 		*(unsigned short *)s = pattern & 0xffff;
274 		return s;
275 	case 3:
276 		*(unsigned short *)s = pattern & 0xffff;
277 		*((unsigned char *)s + 2) = pattern & 0xff;
278 		return s;
279 	case 4:
280 		*(unsigned long *)s = pattern;
281 		return s;
282 	}
283 
284 #define COMMON(x)							\
285 	asm volatile("rep ; stosl"					\
286 		     x							\
287 		     : "=&c" (d0), "=&D" (d1)				\
288 		     : "a" (eax), "0" (count/4), "1" ((long)s)	\
289 		     : "memory")
290 
291 	{
292 		int d0, d1;
293 #if __GNUC__ == 4 && __GNUC_MINOR__ == 0
294 		/* Workaround for broken gcc 4.0 */
295 		register unsigned long eax asm("%eax") = pattern;
296 #else
297 		unsigned long eax = pattern;
298 #endif
299 
300 		switch (count % 4) {
301 		case 0:
302 			COMMON("");
303 			return s;
304 		case 1:
305 			COMMON("\n\tstosb");
306 			return s;
307 		case 2:
308 			COMMON("\n\tstosw");
309 			return s;
310 		default:
311 			COMMON("\n\tstosw\n\tstosb");
312 			return s;
313 		}
314 	}
315 
316 #undef COMMON
317 }
318 
319 #define __constant_c_x_memset(s, c, count)			\
320 	(__builtin_constant_p(count)				\
321 	 ? __constant_c_and_count_memset((s), (c), (count))	\
322 	 : __constant_c_memset((s), (c), (count)))
323 
324 #define __memset(s, c, count)				\
325 	(__builtin_constant_p(count)			\
326 	 ? __constant_count_memset((s), (c), (count))	\
327 	 : __memset_generic((s), (c), (count)))
328 
329 #define __HAVE_ARCH_MEMSET
330 extern void *memset(void *, int, size_t);
331 #ifndef CONFIG_FORTIFY_SOURCE
332 #if (__GNUC__ >= 4)
333 #define memset(s, c, count) __builtin_memset(s, c, count)
334 #else
335 #define memset(s, c, count)						\
336 	(__builtin_constant_p(c)					\
337 	 ? __constant_c_x_memset((s), (0x01010101UL * (unsigned char)(c)), \
338 				 (count))				\
339 	 : __memset((s), (c), (count)))
340 #endif
341 #endif /* !CONFIG_FORTIFY_SOURCE */
342 
343 /*
344  * find the first occurrence of byte 'c', or 1 past the area if none
345  */
346 #define __HAVE_ARCH_MEMSCAN
347 extern void *memscan(void *addr, int c, size_t size);
348 
349 #endif /* __KERNEL__ */
350 
351 #endif /* _ASM_X86_STRING_32_H */
352