xref: /openbmc/linux/kernel/kcsan/selftest.c (revision 3f58ff6b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * KCSAN short boot-time selftests.
4  *
5  * Copyright (C) 2019, Google LLC.
6  */
7 
8 #define pr_fmt(fmt) "kcsan: " fmt
9 
10 #include <linux/atomic.h>
11 #include <linux/bitops.h>
12 #include <linux/init.h>
13 #include <linux/kcsan-checks.h>
14 #include <linux/kernel.h>
15 #include <linux/printk.h>
16 #include <linux/random.h>
17 #include <linux/sched.h>
18 #include <linux/spinlock.h>
19 #include <linux/types.h>
20 
21 #include "encoding.h"
22 
23 #define ITERS_PER_TEST 2000
24 
25 /*
26  * Test watchpoint encode and decode: check that encoding some access's info,
27  * and then subsequent decode preserves the access's info.
28  */
29 static bool __init test_encode_decode(void)
30 {
31 	int i;
32 
33 	for (i = 0; i < ITERS_PER_TEST; ++i) {
34 		size_t size = get_random_u32_inclusive(1, MAX_ENCODABLE_SIZE);
35 		bool is_write = !!get_random_u32_below(2);
36 		unsigned long verif_masked_addr;
37 		long encoded_watchpoint;
38 		bool verif_is_write;
39 		unsigned long addr;
40 		size_t verif_size;
41 
42 		get_random_bytes(&addr, sizeof(addr));
43 		if (addr < PAGE_SIZE)
44 			addr = PAGE_SIZE;
45 
46 		if (WARN_ON(!check_encodable(addr, size)))
47 			return false;
48 
49 		encoded_watchpoint = encode_watchpoint(addr, size, is_write);
50 
51 		/* Check special watchpoints */
52 		if (WARN_ON(decode_watchpoint(INVALID_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
53 			return false;
54 		if (WARN_ON(decode_watchpoint(CONSUMED_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
55 			return false;
56 
57 		/* Check decoding watchpoint returns same data */
58 		if (WARN_ON(!decode_watchpoint(encoded_watchpoint, &verif_masked_addr, &verif_size, &verif_is_write)))
59 			return false;
60 		if (WARN_ON(verif_masked_addr != (addr & WATCHPOINT_ADDR_MASK)))
61 			goto fail;
62 		if (WARN_ON(verif_size != size))
63 			goto fail;
64 		if (WARN_ON(is_write != verif_is_write))
65 			goto fail;
66 
67 		continue;
68 fail:
69 		pr_err("%s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx\n",
70 		       __func__, is_write ? "write" : "read", size, addr, encoded_watchpoint,
71 		       verif_is_write ? "write" : "read", verif_size, verif_masked_addr);
72 		return false;
73 	}
74 
75 	return true;
76 }
77 
78 /* Test access matching function. */
79 static bool __init test_matching_access(void)
80 {
81 	if (WARN_ON(!matching_access(10, 1, 10, 1)))
82 		return false;
83 	if (WARN_ON(!matching_access(10, 2, 11, 1)))
84 		return false;
85 	if (WARN_ON(!matching_access(10, 1, 9, 2)))
86 		return false;
87 	if (WARN_ON(matching_access(10, 1, 11, 1)))
88 		return false;
89 	if (WARN_ON(matching_access(9, 1, 10, 1)))
90 		return false;
91 
92 	/*
93 	 * An access of size 0 could match another access, as demonstrated here.
94 	 * Rather than add more comparisons to 'matching_access()', which would
95 	 * end up in the fast-path for *all* checks, check_access() simply
96 	 * returns for all accesses of size 0.
97 	 */
98 	if (WARN_ON(!matching_access(8, 8, 12, 0)))
99 		return false;
100 
101 	return true;
102 }
103 
104 /*
105  * Correct memory barrier instrumentation is critical to avoiding false
106  * positives: simple test to check at boot certain barriers are always properly
107  * instrumented. See kcsan_test for a more complete test.
108  */
109 static DEFINE_SPINLOCK(test_spinlock);
110 static bool __init test_barrier(void)
111 {
112 #ifdef CONFIG_KCSAN_WEAK_MEMORY
113 	struct kcsan_scoped_access *reorder_access = &current->kcsan_ctx.reorder_access;
114 #else
115 	struct kcsan_scoped_access *reorder_access = NULL;
116 #endif
117 	bool ret = true;
118 	arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
119 	atomic_t dummy;
120 	long test_var;
121 
122 	if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
123 		return true;
124 
125 #define __KCSAN_CHECK_BARRIER(access_type, barrier, name)					\
126 	do {											\
127 		reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED;			\
128 		reorder_access->size = 1;							\
129 		barrier;									\
130 		if (reorder_access->size != 0) {						\
131 			pr_err("improperly instrumented type=(" #access_type "): " name "\n");	\
132 			ret = false;								\
133 		}										\
134 	} while (0)
135 #define KCSAN_CHECK_READ_BARRIER(b)  __KCSAN_CHECK_BARRIER(0, b, #b)
136 #define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
137 #define KCSAN_CHECK_RW_BARRIER(b)    __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
138 
139 	kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
140 
141 	KCSAN_CHECK_READ_BARRIER(mb());
142 	KCSAN_CHECK_READ_BARRIER(rmb());
143 	KCSAN_CHECK_READ_BARRIER(smp_mb());
144 	KCSAN_CHECK_READ_BARRIER(smp_rmb());
145 	KCSAN_CHECK_READ_BARRIER(dma_rmb());
146 	KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
147 	KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
148 	KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
149 	KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
150 	KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
151 	KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
152 	KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
153 	KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0,  0));
154 	KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0,  0));
155 	KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
156 	KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
157 	KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
158 	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
159 	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
160 	KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
161 	KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
162 	KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
163 	KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
164 	KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
165 	arch_spin_lock(&arch_spinlock);
166 	KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
167 	spin_lock(&test_spinlock);
168 	KCSAN_CHECK_READ_BARRIER(spin_unlock(&test_spinlock));
169 
170 	KCSAN_CHECK_WRITE_BARRIER(mb());
171 	KCSAN_CHECK_WRITE_BARRIER(wmb());
172 	KCSAN_CHECK_WRITE_BARRIER(smp_mb());
173 	KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
174 	KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
175 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
176 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
177 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
178 	KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
179 	KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
180 	KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
181 	KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
182 	KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0,  0));
183 	KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0,  0));
184 	KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
185 	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
186 	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
187 	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
188 	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
189 	KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
190 	KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
191 	KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
192 	KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
193 	KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
194 	arch_spin_lock(&arch_spinlock);
195 	KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
196 	spin_lock(&test_spinlock);
197 	KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&test_spinlock));
198 
199 	KCSAN_CHECK_RW_BARRIER(mb());
200 	KCSAN_CHECK_RW_BARRIER(wmb());
201 	KCSAN_CHECK_RW_BARRIER(rmb());
202 	KCSAN_CHECK_RW_BARRIER(smp_mb());
203 	KCSAN_CHECK_RW_BARRIER(smp_wmb());
204 	KCSAN_CHECK_RW_BARRIER(smp_rmb());
205 	KCSAN_CHECK_RW_BARRIER(dma_wmb());
206 	KCSAN_CHECK_RW_BARRIER(dma_rmb());
207 	KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
208 	KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
209 	KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
210 	KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
211 	KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
212 	KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
213 	KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
214 	KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0,  0));
215 	KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0,  0));
216 	KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
217 	KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
218 	KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
219 	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
220 	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
221 	KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
222 	KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
223 	KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
224 	KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
225 	KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
226 	arch_spin_lock(&arch_spinlock);
227 	KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
228 	spin_lock(&test_spinlock);
229 	KCSAN_CHECK_RW_BARRIER(spin_unlock(&test_spinlock));
230 
231 #ifdef clear_bit_unlock_is_negative_byte
232 	KCSAN_CHECK_RW_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
233 	KCSAN_CHECK_READ_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
234 	KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
235 #endif
236 	kcsan_nestable_atomic_end();
237 
238 	return ret;
239 }
240 
241 static int __init kcsan_selftest(void)
242 {
243 	int passed = 0;
244 	int total = 0;
245 
246 #define RUN_TEST(do_test)                                                      \
247 	do {                                                                   \
248 		++total;                                                       \
249 		if (do_test())                                                 \
250 			++passed;                                              \
251 		else                                                           \
252 			pr_err("selftest: " #do_test " failed");               \
253 	} while (0)
254 
255 	RUN_TEST(test_encode_decode);
256 	RUN_TEST(test_matching_access);
257 	RUN_TEST(test_barrier);
258 
259 	pr_info("selftest: %d/%d tests passed\n", passed, total);
260 	if (passed != total)
261 		panic("selftests failed");
262 	return 0;
263 }
264 postcore_initcall(kcsan_selftest);
265