1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2013 ARM Ltd.
4  */
5 #ifndef __ASM_WORD_AT_A_TIME_H
6 #define __ASM_WORD_AT_A_TIME_H
7 
8 #include <linux/uaccess.h>
9 
10 #ifndef __AARCH64EB__
11 
12 #include <linux/kernel.h>
13 
14 struct word_at_a_time {
15 	const unsigned long one_bits, high_bits;
16 };
17 
18 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
19 
20 static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
21 				     const struct word_at_a_time *c)
22 {
23 	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
24 	*bits = mask;
25 	return mask;
26 }
27 
28 #define prep_zero_mask(a, bits, c) (bits)
29 
30 static inline unsigned long create_zero_mask(unsigned long bits)
31 {
32 	bits = (bits - 1) & ~bits;
33 	return bits >> 7;
34 }
35 
36 static inline unsigned long find_zero(unsigned long mask)
37 {
38 	return fls64(mask) >> 3;
39 }
40 
41 #define zero_bytemask(mask) (mask)
42 
43 #else	/* __AARCH64EB__ */
44 #include <asm-generic/word-at-a-time.h>
45 #endif
46 
47 /*
48  * Load an unaligned word from kernel space.
49  *
50  * In the (very unlikely) case of the word being a page-crosser
51  * and the next page not being mapped, take the exception and
52  * return zeroes in the non-existing part.
53  */
54 static inline unsigned long load_unaligned_zeropad(const void *addr)
55 {
56 	unsigned long ret, tmp;
57 
58 	__uaccess_enable_tco_async();
59 
60 	/* Load word from unaligned pointer addr */
61 	asm(
62 	"1:	ldr	%0, %3\n"
63 	"2:\n"
64 	"	.pushsection .fixup,\"ax\"\n"
65 	"	.align 2\n"
66 	"3:	bic	%1, %2, #0x7\n"
67 	"	ldr	%0, [%1]\n"
68 	"	and	%1, %2, #0x7\n"
69 	"	lsl	%1, %1, #0x3\n"
70 #ifndef __AARCH64EB__
71 	"	lsr	%0, %0, %1\n"
72 #else
73 	"	lsl	%0, %0, %1\n"
74 #endif
75 	"	b	2b\n"
76 	"	.popsection\n"
77 	_ASM_EXTABLE(1b, 3b)
78 	: "=&r" (ret), "=&r" (tmp)
79 	: "r" (addr), "Q" (*(unsigned long *)addr));
80 
81 	__uaccess_disable_tco_async();
82 
83 	return ret;
84 }
85 
86 #endif /* __ASM_WORD_AT_A_TIME_H */
87