xref: /openbmc/linux/lib/test_bpf.c (revision 68813605)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Testsuite for BPF interpreter and BPF JIT compiler
4  *
5  * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
6  */
7 
8 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9 
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/filter.h>
13 #include <linux/bpf.h>
14 #include <linux/skbuff.h>
15 #include <linux/netdevice.h>
16 #include <linux/if_vlan.h>
17 #include <linux/random.h>
18 #include <linux/highmem.h>
19 #include <linux/sched.h>
20 
21 /* General test specific settings */
22 #define MAX_SUBTESTS	3
23 #define MAX_TESTRUNS	1000
24 #define MAX_DATA	128
25 #define MAX_INSNS	512
26 #define MAX_K		0xffffFFFF
27 
28 /* Few constants used to init test 'skb' */
29 #define SKB_TYPE	3
30 #define SKB_MARK	0x1234aaaa
31 #define SKB_HASH	0x1234aaab
32 #define SKB_QUEUE_MAP	123
33 #define SKB_VLAN_TCI	0xffff
34 #define SKB_VLAN_PRESENT	1
35 #define SKB_DEV_IFINDEX	577
36 #define SKB_DEV_TYPE	588
37 
38 /* Redefine REGs to make tests less verbose */
39 #define R0		BPF_REG_0
40 #define R1		BPF_REG_1
41 #define R2		BPF_REG_2
42 #define R3		BPF_REG_3
43 #define R4		BPF_REG_4
44 #define R5		BPF_REG_5
45 #define R6		BPF_REG_6
46 #define R7		BPF_REG_7
47 #define R8		BPF_REG_8
48 #define R9		BPF_REG_9
49 #define R10		BPF_REG_10
50 
51 /* Flags that can be passed to test cases */
52 #define FLAG_NO_DATA		BIT(0)
53 #define FLAG_EXPECTED_FAIL	BIT(1)
54 #define FLAG_SKB_FRAG		BIT(2)
55 #define FLAG_VERIFIER_ZEXT	BIT(3)
56 
57 enum {
58 	CLASSIC  = BIT(6),	/* Old BPF instructions only. */
59 	INTERNAL = BIT(7),	/* Extended instruction set.  */
60 };
61 
62 #define TEST_TYPE_MASK		(CLASSIC | INTERNAL)
63 
64 struct bpf_test {
65 	const char *descr;
66 	union {
67 		struct sock_filter insns[MAX_INSNS];
68 		struct bpf_insn insns_int[MAX_INSNS];
69 		struct {
70 			void *insns;
71 			unsigned int len;
72 		} ptr;
73 	} u;
74 	__u8 aux;
75 	__u8 data[MAX_DATA];
76 	struct {
77 		int data_size;
78 		__u32 result;
79 	} test[MAX_SUBTESTS];
80 	int (*fill_helper)(struct bpf_test *self);
81 	int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
82 	__u8 frag_data[MAX_DATA];
83 	int stack_depth; /* for eBPF only, since tests don't call verifier */
84 	int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
85 };
86 
87 /* Large test cases need separate allocation and fill handler. */
88 
89 static int bpf_fill_maxinsns1(struct bpf_test *self)
90 {
91 	unsigned int len = BPF_MAXINSNS;
92 	struct sock_filter *insn;
93 	__u32 k = ~0;
94 	int i;
95 
96 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
97 	if (!insn)
98 		return -ENOMEM;
99 
100 	for (i = 0; i < len; i++, k--)
101 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
102 
103 	self->u.ptr.insns = insn;
104 	self->u.ptr.len = len;
105 
106 	return 0;
107 }
108 
109 static int bpf_fill_maxinsns2(struct bpf_test *self)
110 {
111 	unsigned int len = BPF_MAXINSNS;
112 	struct sock_filter *insn;
113 	int i;
114 
115 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
116 	if (!insn)
117 		return -ENOMEM;
118 
119 	for (i = 0; i < len; i++)
120 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
121 
122 	self->u.ptr.insns = insn;
123 	self->u.ptr.len = len;
124 
125 	return 0;
126 }
127 
128 static int bpf_fill_maxinsns3(struct bpf_test *self)
129 {
130 	unsigned int len = BPF_MAXINSNS;
131 	struct sock_filter *insn;
132 	struct rnd_state rnd;
133 	int i;
134 
135 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
136 	if (!insn)
137 		return -ENOMEM;
138 
139 	prandom_seed_state(&rnd, 3141592653589793238ULL);
140 
141 	for (i = 0; i < len - 1; i++) {
142 		__u32 k = prandom_u32_state(&rnd);
143 
144 		insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
145 	}
146 
147 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
148 
149 	self->u.ptr.insns = insn;
150 	self->u.ptr.len = len;
151 
152 	return 0;
153 }
154 
155 static int bpf_fill_maxinsns4(struct bpf_test *self)
156 {
157 	unsigned int len = BPF_MAXINSNS + 1;
158 	struct sock_filter *insn;
159 	int i;
160 
161 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
162 	if (!insn)
163 		return -ENOMEM;
164 
165 	for (i = 0; i < len; i++)
166 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
167 
168 	self->u.ptr.insns = insn;
169 	self->u.ptr.len = len;
170 
171 	return 0;
172 }
173 
174 static int bpf_fill_maxinsns5(struct bpf_test *self)
175 {
176 	unsigned int len = BPF_MAXINSNS;
177 	struct sock_filter *insn;
178 	int i;
179 
180 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
181 	if (!insn)
182 		return -ENOMEM;
183 
184 	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
185 
186 	for (i = 1; i < len - 1; i++)
187 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
188 
189 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
190 
191 	self->u.ptr.insns = insn;
192 	self->u.ptr.len = len;
193 
194 	return 0;
195 }
196 
197 static int bpf_fill_maxinsns6(struct bpf_test *self)
198 {
199 	unsigned int len = BPF_MAXINSNS;
200 	struct sock_filter *insn;
201 	int i;
202 
203 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
204 	if (!insn)
205 		return -ENOMEM;
206 
207 	for (i = 0; i < len - 1; i++)
208 		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
209 				     SKF_AD_VLAN_TAG_PRESENT);
210 
211 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
212 
213 	self->u.ptr.insns = insn;
214 	self->u.ptr.len = len;
215 
216 	return 0;
217 }
218 
219 static int bpf_fill_maxinsns7(struct bpf_test *self)
220 {
221 	unsigned int len = BPF_MAXINSNS;
222 	struct sock_filter *insn;
223 	int i;
224 
225 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
226 	if (!insn)
227 		return -ENOMEM;
228 
229 	for (i = 0; i < len - 4; i++)
230 		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
231 				     SKF_AD_CPU);
232 
233 	insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
234 	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
235 				   SKF_AD_CPU);
236 	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
237 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
238 
239 	self->u.ptr.insns = insn;
240 	self->u.ptr.len = len;
241 
242 	return 0;
243 }
244 
245 static int bpf_fill_maxinsns8(struct bpf_test *self)
246 {
247 	unsigned int len = BPF_MAXINSNS;
248 	struct sock_filter *insn;
249 	int i, jmp_off = len - 3;
250 
251 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
252 	if (!insn)
253 		return -ENOMEM;
254 
255 	insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
256 
257 	for (i = 1; i < len - 1; i++)
258 		insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
259 
260 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
261 
262 	self->u.ptr.insns = insn;
263 	self->u.ptr.len = len;
264 
265 	return 0;
266 }
267 
268 static int bpf_fill_maxinsns9(struct bpf_test *self)
269 {
270 	unsigned int len = BPF_MAXINSNS;
271 	struct bpf_insn *insn;
272 	int i;
273 
274 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
275 	if (!insn)
276 		return -ENOMEM;
277 
278 	insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
279 	insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
280 	insn[2] = BPF_EXIT_INSN();
281 
282 	for (i = 3; i < len - 2; i++)
283 		insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
284 
285 	insn[len - 2] = BPF_EXIT_INSN();
286 	insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
287 
288 	self->u.ptr.insns = insn;
289 	self->u.ptr.len = len;
290 
291 	return 0;
292 }
293 
294 static int bpf_fill_maxinsns10(struct bpf_test *self)
295 {
296 	unsigned int len = BPF_MAXINSNS, hlen = len - 2;
297 	struct bpf_insn *insn;
298 	int i;
299 
300 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
301 	if (!insn)
302 		return -ENOMEM;
303 
304 	for (i = 0; i < hlen / 2; i++)
305 		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
306 	for (i = hlen - 1; i > hlen / 2; i--)
307 		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
308 
309 	insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
310 	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
311 	insn[hlen + 1] = BPF_EXIT_INSN();
312 
313 	self->u.ptr.insns = insn;
314 	self->u.ptr.len = len;
315 
316 	return 0;
317 }
318 
319 static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
320 			 unsigned int plen)
321 {
322 	struct sock_filter *insn;
323 	unsigned int rlen;
324 	int i, j;
325 
326 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
327 	if (!insn)
328 		return -ENOMEM;
329 
330 	rlen = (len % plen) - 1;
331 
332 	for (i = 0; i + plen < len; i += plen)
333 		for (j = 0; j < plen; j++)
334 			insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
335 						 plen - 1 - j, 0, 0);
336 	for (j = 0; j < rlen; j++)
337 		insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
338 					 0, 0);
339 
340 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
341 
342 	self->u.ptr.insns = insn;
343 	self->u.ptr.len = len;
344 
345 	return 0;
346 }
347 
348 static int bpf_fill_maxinsns11(struct bpf_test *self)
349 {
350 	/* Hits 70 passes on x86_64 and triggers NOPs padding. */
351 	return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
352 }
353 
354 static int bpf_fill_maxinsns12(struct bpf_test *self)
355 {
356 	unsigned int len = BPF_MAXINSNS;
357 	struct sock_filter *insn;
358 	int i = 0;
359 
360 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
361 	if (!insn)
362 		return -ENOMEM;
363 
364 	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
365 
366 	for (i = 1; i < len - 1; i++)
367 		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
368 
369 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
370 
371 	self->u.ptr.insns = insn;
372 	self->u.ptr.len = len;
373 
374 	return 0;
375 }
376 
377 static int bpf_fill_maxinsns13(struct bpf_test *self)
378 {
379 	unsigned int len = BPF_MAXINSNS;
380 	struct sock_filter *insn;
381 	int i = 0;
382 
383 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
384 	if (!insn)
385 		return -ENOMEM;
386 
387 	for (i = 0; i < len - 3; i++)
388 		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
389 
390 	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
391 	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
392 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
393 
394 	self->u.ptr.insns = insn;
395 	self->u.ptr.len = len;
396 
397 	return 0;
398 }
399 
400 static int bpf_fill_ja(struct bpf_test *self)
401 {
402 	/* Hits exactly 11 passes on x86_64 JIT. */
403 	return __bpf_fill_ja(self, 12, 9);
404 }
405 
406 static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
407 {
408 	unsigned int len = BPF_MAXINSNS;
409 	struct sock_filter *insn;
410 	int i;
411 
412 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
413 	if (!insn)
414 		return -ENOMEM;
415 
416 	for (i = 0; i < len - 1; i += 2) {
417 		insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
418 		insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
419 					 SKF_AD_OFF + SKF_AD_CPU);
420 	}
421 
422 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
423 
424 	self->u.ptr.insns = insn;
425 	self->u.ptr.len = len;
426 
427 	return 0;
428 }
429 
430 static int __bpf_fill_stxdw(struct bpf_test *self, int size)
431 {
432 	unsigned int len = BPF_MAXINSNS;
433 	struct bpf_insn *insn;
434 	int i;
435 
436 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
437 	if (!insn)
438 		return -ENOMEM;
439 
440 	insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
441 	insn[1] = BPF_ST_MEM(size, R10, -40, 42);
442 
443 	for (i = 2; i < len - 2; i++)
444 		insn[i] = BPF_STX_XADD(size, R10, R0, -40);
445 
446 	insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
447 	insn[len - 1] = BPF_EXIT_INSN();
448 
449 	self->u.ptr.insns = insn;
450 	self->u.ptr.len = len;
451 	self->stack_depth = 40;
452 
453 	return 0;
454 }
455 
456 static int bpf_fill_stxw(struct bpf_test *self)
457 {
458 	return __bpf_fill_stxdw(self, BPF_W);
459 }
460 
461 static int bpf_fill_stxdw(struct bpf_test *self)
462 {
463 	return __bpf_fill_stxdw(self, BPF_DW);
464 }
465 
466 static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
467 {
468 	struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
469 
470 	memcpy(insns, tmp, sizeof(tmp));
471 	return 2;
472 }
473 
474 /*
475  * Branch conversion tests. Complex operations can expand to a lot
476  * of instructions when JITed. This in turn may cause jump offsets
477  * to overflow the field size of the native instruction, triggering
478  * a branch conversion mechanism in some JITs.
479  */
480 static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
481 {
482 	struct bpf_insn *insns;
483 	int len = S16_MAX + 5;
484 	int i;
485 
486 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
487 	if (!insns)
488 		return -ENOMEM;
489 
490 	i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
491 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
492 	insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
493 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
494 	insns[i++] = BPF_EXIT_INSN();
495 
496 	while (i < len - 1) {
497 		static const int ops[] = {
498 			BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
499 			BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
500 		};
501 		int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
502 
503 		if (i & 1)
504 			insns[i++] = BPF_ALU32_REG(op, R0, R1);
505 		else
506 			insns[i++] = BPF_ALU64_REG(op, R0, R1);
507 	}
508 
509 	insns[i++] = BPF_EXIT_INSN();
510 	self->u.ptr.insns = insns;
511 	self->u.ptr.len = len;
512 	BUG_ON(i != len);
513 
514 	return 0;
515 }
516 
517 /* Branch taken by runtime decision */
518 static int bpf_fill_max_jmp_taken(struct bpf_test *self)
519 {
520 	return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
521 }
522 
523 /* Branch not taken by runtime decision */
524 static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
525 {
526 	return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
527 }
528 
529 /* Branch always taken, known at JIT time */
530 static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
531 {
532 	return __bpf_fill_max_jmp(self, BPF_JGE, 0);
533 }
534 
535 /* Branch never taken, known at JIT time */
536 static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
537 {
538 	return __bpf_fill_max_jmp(self, BPF_JLT, 0);
539 }
540 
541 /* ALU result computation used in tests */
542 static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
543 {
544 	*res = 0;
545 	switch (op) {
546 	case BPF_MOV:
547 		*res = v2;
548 		break;
549 	case BPF_AND:
550 		*res = v1 & v2;
551 		break;
552 	case BPF_OR:
553 		*res = v1 | v2;
554 		break;
555 	case BPF_XOR:
556 		*res = v1 ^ v2;
557 		break;
558 	case BPF_LSH:
559 		*res = v1 << v2;
560 		break;
561 	case BPF_RSH:
562 		*res = v1 >> v2;
563 		break;
564 	case BPF_ARSH:
565 		*res = v1 >> v2;
566 		if (v2 > 0 && v1 > S64_MAX)
567 			*res |= ~0ULL << (64 - v2);
568 		break;
569 	case BPF_ADD:
570 		*res = v1 + v2;
571 		break;
572 	case BPF_SUB:
573 		*res = v1 - v2;
574 		break;
575 	case BPF_MUL:
576 		*res = v1 * v2;
577 		break;
578 	case BPF_DIV:
579 		if (v2 == 0)
580 			return false;
581 		*res = div64_u64(v1, v2);
582 		break;
583 	case BPF_MOD:
584 		if (v2 == 0)
585 			return false;
586 		div64_u64_rem(v1, v2, res);
587 		break;
588 	}
589 	return true;
590 }
591 
592 /* Test an ALU shift operation for all valid shift values */
593 static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
594 				u8 mode, bool alu32)
595 {
596 	static const s64 regs[] = {
597 		0x0123456789abcdefLL, /* dword > 0, word < 0 */
598 		0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
599 		0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
600 		0x0123458967abcdefLL, /* dword > 0, word > 0 */
601 	};
602 	int bits = alu32 ? 32 : 64;
603 	int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
604 	struct bpf_insn *insn;
605 	int imm, k;
606 	int i = 0;
607 
608 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
609 	if (!insn)
610 		return -ENOMEM;
611 
612 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
613 
614 	for (k = 0; k < ARRAY_SIZE(regs); k++) {
615 		s64 reg = regs[k];
616 
617 		i += __bpf_ld_imm64(&insn[i], R3, reg);
618 
619 		for (imm = 0; imm < bits; imm++) {
620 			u64 val;
621 
622 			/* Perform operation */
623 			insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
624 			insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
625 			if (alu32) {
626 				if (mode == BPF_K)
627 					insn[i++] = BPF_ALU32_IMM(op, R1, imm);
628 				else
629 					insn[i++] = BPF_ALU32_REG(op, R1, R2);
630 
631 				if (op == BPF_ARSH)
632 					reg = (s32)reg;
633 				else
634 					reg = (u32)reg;
635 				__bpf_alu_result(&val, reg, imm, op);
636 				val = (u32)val;
637 			} else {
638 				if (mode == BPF_K)
639 					insn[i++] = BPF_ALU64_IMM(op, R1, imm);
640 				else
641 					insn[i++] = BPF_ALU64_REG(op, R1, R2);
642 				__bpf_alu_result(&val, reg, imm, op);
643 			}
644 
645 			/*
646 			 * When debugging a JIT that fails this test, one
647 			 * can write the immediate value to R0 here to find
648 			 * out which operand values that fail.
649 			 */
650 
651 			/* Load reference and check the result */
652 			i += __bpf_ld_imm64(&insn[i], R4, val);
653 			insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
654 			insn[i++] = BPF_EXIT_INSN();
655 		}
656 	}
657 
658 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
659 	insn[i++] = BPF_EXIT_INSN();
660 
661 	self->u.ptr.insns = insn;
662 	self->u.ptr.len = len;
663 	BUG_ON(i != len);
664 
665 	return 0;
666 }
667 
668 static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
669 {
670 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
671 }
672 
673 static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
674 {
675 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
676 }
677 
678 static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
679 {
680 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
681 }
682 
683 static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
684 {
685 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
686 }
687 
688 static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
689 {
690 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
691 }
692 
693 static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
694 {
695 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
696 }
697 
698 static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
699 {
700 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
701 }
702 
703 static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
704 {
705 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
706 }
707 
708 static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
709 {
710 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
711 }
712 
713 static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
714 {
715 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
716 }
717 
718 static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
719 {
720 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
721 }
722 
723 static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
724 {
725 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
726 }
727 
728 /*
729  * Test an ALU register shift operation for all valid shift values
730  * for the case when the source and destination are the same.
731  */
732 static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
733 					 bool alu32)
734 {
735 	int bits = alu32 ? 32 : 64;
736 	int len = 3 + 6 * bits;
737 	struct bpf_insn *insn;
738 	int i = 0;
739 	u64 val;
740 
741 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
742 	if (!insn)
743 		return -ENOMEM;
744 
745 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
746 
747 	for (val = 0; val < bits; val++) {
748 		u64 res;
749 
750 		/* Perform operation */
751 		insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
752 		if (alu32)
753 			insn[i++] = BPF_ALU32_REG(op, R1, R1);
754 		else
755 			insn[i++] = BPF_ALU64_REG(op, R1, R1);
756 
757 		/* Compute the reference result */
758 		__bpf_alu_result(&res, val, val, op);
759 		if (alu32)
760 			res = (u32)res;
761 		i += __bpf_ld_imm64(&insn[i], R2, res);
762 
763 		/* Check the actual result */
764 		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
765 		insn[i++] = BPF_EXIT_INSN();
766 	}
767 
768 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
769 	insn[i++] = BPF_EXIT_INSN();
770 
771 	self->u.ptr.insns = insn;
772 	self->u.ptr.len = len;
773 	BUG_ON(i != len);
774 
775 	return 0;
776 }
777 
778 static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
779 {
780 	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
781 }
782 
783 static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
784 {
785 	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
786 }
787 
788 static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
789 {
790 	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
791 }
792 
793 static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
794 {
795 	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
796 }
797 
798 static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
799 {
800 	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
801 }
802 
803 static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
804 {
805 	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
806 }
807 
808 /*
809  * Common operand pattern generator for exhaustive power-of-two magnitudes
810  * tests. The block size parameters can be adjusted to increase/reduce the
811  * number of combinatons tested and thereby execution speed and memory
812  * footprint.
813  */
814 
815 static inline s64 value(int msb, int delta, int sign)
816 {
817 	return sign * (1LL << msb) + delta;
818 }
819 
820 static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
821 			      int dbits, int sbits, int block1, int block2,
822 			      int (*emit)(struct bpf_test*, void*,
823 					  struct bpf_insn*, s64, s64))
824 {
825 	static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
826 	struct bpf_insn *insns;
827 	int di, si, bt, db, sb;
828 	int count, len, k;
829 	int extra = 1 + 2;
830 	int i = 0;
831 
832 	/* Total number of iterations for the two pattern */
833 	count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
834 	count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
835 
836 	/* Compute the maximum number of insns and allocate the buffer */
837 	len = extra + count * (*emit)(self, arg, NULL, 0, 0);
838 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
839 	if (!insns)
840 		return -ENOMEM;
841 
842 	/* Add head instruction(s) */
843 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
844 
845 	/*
846 	 * Pattern 1: all combinations of power-of-two magnitudes and sign,
847 	 * and with a block of contiguous values around each magnitude.
848 	 */
849 	for (di = 0; di < dbits - 1; di++)                 /* Dst magnitudes */
850 		for (si = 0; si < sbits - 1; si++)         /* Src magnitudes */
851 			for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
852 				for (db = -(block1 / 2);
853 				     db < (block1 + 1) / 2; db++)
854 					for (sb = -(block1 / 2);
855 					     sb < (block1 + 1) / 2; sb++) {
856 						s64 dst, src;
857 
858 						dst = value(di, db, sgn[k][0]);
859 						src = value(si, sb, sgn[k][1]);
860 						i += (*emit)(self, arg,
861 							     &insns[i],
862 							     dst, src);
863 					}
864 	/*
865 	 * Pattern 2: all combinations for a larger block of values
866 	 * for each power-of-two magnitude and sign, where the magnitude is
867 	 * the same for both operands.
868 	 */
869 	for (bt = 0; bt < max(dbits, sbits) - 1; bt++)        /* Magnitude   */
870 		for (k = 0; k < ARRAY_SIZE(sgn); k++)         /* Sign combos */
871 			for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
872 				for (sb = -(block2 / 2);
873 				     sb < (block2 + 1) / 2; sb++) {
874 					s64 dst, src;
875 
876 					dst = value(bt % dbits, db, sgn[k][0]);
877 					src = value(bt % sbits, sb, sgn[k][1]);
878 					i += (*emit)(self, arg, &insns[i],
879 						     dst, src);
880 				}
881 
882 	/* Append tail instructions */
883 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
884 	insns[i++] = BPF_EXIT_INSN();
885 	BUG_ON(i > len);
886 
887 	self->u.ptr.insns = insns;
888 	self->u.ptr.len = i;
889 
890 	return 0;
891 }
892 
893 /*
894  * Block size parameters used in pattern tests below. une as needed to
895  * increase/reduce the number combinations tested, see following examples.
896  *        block   values per operand MSB
897  * ----------------------------------------
898  *           0     none
899  *           1     (1 << MSB)
900  *           2     (1 << MSB) + [-1, 0]
901  *           3     (1 << MSB) + [-1, 0, 1]
902  */
903 #define PATTERN_BLOCK1 1
904 #define PATTERN_BLOCK2 5
905 
906 /* Number of test runs for a pattern test */
907 #define NR_PATTERN_RUNS 1
908 
909 /*
910  * Exhaustive tests of ALU operations for all combinations of power-of-two
911  * magnitudes of the operands, both for positive and negative values. The
912  * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
913  * emit different code depending on the magnitude of the immediate value.
914  */
915 static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
916 				struct bpf_insn *insns, s64 dst, s64 imm)
917 {
918 	int op = *(int *)arg;
919 	int i = 0;
920 	u64 res;
921 
922 	if (!insns)
923 		return 7;
924 
925 	if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
926 		i += __bpf_ld_imm64(&insns[i], R1, dst);
927 		i += __bpf_ld_imm64(&insns[i], R3, res);
928 		insns[i++] = BPF_ALU64_IMM(op, R1, imm);
929 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
930 		insns[i++] = BPF_EXIT_INSN();
931 	}
932 
933 	return i;
934 }
935 
936 static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
937 				struct bpf_insn *insns, s64 dst, s64 imm)
938 {
939 	int op = *(int *)arg;
940 	int i = 0;
941 	u64 res;
942 
943 	if (!insns)
944 		return 7;
945 
946 	if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
947 		i += __bpf_ld_imm64(&insns[i], R1, dst);
948 		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
949 		insns[i++] = BPF_ALU32_IMM(op, R1, imm);
950 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
951 		insns[i++] = BPF_EXIT_INSN();
952 	}
953 
954 	return i;
955 }
956 
957 static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
958 				struct bpf_insn *insns, s64 dst, s64 src)
959 {
960 	int op = *(int *)arg;
961 	int i = 0;
962 	u64 res;
963 
964 	if (!insns)
965 		return 9;
966 
967 	if (__bpf_alu_result(&res, dst, src, op)) {
968 		i += __bpf_ld_imm64(&insns[i], R1, dst);
969 		i += __bpf_ld_imm64(&insns[i], R2, src);
970 		i += __bpf_ld_imm64(&insns[i], R3, res);
971 		insns[i++] = BPF_ALU64_REG(op, R1, R2);
972 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
973 		insns[i++] = BPF_EXIT_INSN();
974 	}
975 
976 	return i;
977 }
978 
979 static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
980 				struct bpf_insn *insns, s64 dst, s64 src)
981 {
982 	int op = *(int *)arg;
983 	int i = 0;
984 	u64 res;
985 
986 	if (!insns)
987 		return 9;
988 
989 	if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
990 		i += __bpf_ld_imm64(&insns[i], R1, dst);
991 		i += __bpf_ld_imm64(&insns[i], R2, src);
992 		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
993 		insns[i++] = BPF_ALU32_REG(op, R1, R2);
994 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
995 		insns[i++] = BPF_EXIT_INSN();
996 	}
997 
998 	return i;
999 }
1000 
1001 static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1002 {
1003 	return __bpf_fill_pattern(self, &op, 64, 32,
1004 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1005 				  &__bpf_emit_alu64_imm);
1006 }
1007 
1008 static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1009 {
1010 	return __bpf_fill_pattern(self, &op, 64, 32,
1011 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1012 				  &__bpf_emit_alu32_imm);
1013 }
1014 
1015 static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1016 {
1017 	return __bpf_fill_pattern(self, &op, 64, 64,
1018 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1019 				  &__bpf_emit_alu64_reg);
1020 }
1021 
1022 static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1023 {
1024 	return __bpf_fill_pattern(self, &op, 64, 64,
1025 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1026 				  &__bpf_emit_alu32_reg);
1027 }
1028 
1029 /* ALU64 immediate operations */
1030 static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1031 {
1032 	return __bpf_fill_alu64_imm(self, BPF_MOV);
1033 }
1034 
1035 static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1036 {
1037 	return __bpf_fill_alu64_imm(self, BPF_AND);
1038 }
1039 
1040 static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1041 {
1042 	return __bpf_fill_alu64_imm(self, BPF_OR);
1043 }
1044 
1045 static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1046 {
1047 	return __bpf_fill_alu64_imm(self, BPF_XOR);
1048 }
1049 
1050 static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1051 {
1052 	return __bpf_fill_alu64_imm(self, BPF_ADD);
1053 }
1054 
1055 static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1056 {
1057 	return __bpf_fill_alu64_imm(self, BPF_SUB);
1058 }
1059 
1060 static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1061 {
1062 	return __bpf_fill_alu64_imm(self, BPF_MUL);
1063 }
1064 
1065 static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1066 {
1067 	return __bpf_fill_alu64_imm(self, BPF_DIV);
1068 }
1069 
1070 static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1071 {
1072 	return __bpf_fill_alu64_imm(self, BPF_MOD);
1073 }
1074 
1075 /* ALU32 immediate operations */
1076 static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1077 {
1078 	return __bpf_fill_alu32_imm(self, BPF_MOV);
1079 }
1080 
1081 static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1082 {
1083 	return __bpf_fill_alu32_imm(self, BPF_AND);
1084 }
1085 
1086 static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1087 {
1088 	return __bpf_fill_alu32_imm(self, BPF_OR);
1089 }
1090 
1091 static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1092 {
1093 	return __bpf_fill_alu32_imm(self, BPF_XOR);
1094 }
1095 
1096 static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1097 {
1098 	return __bpf_fill_alu32_imm(self, BPF_ADD);
1099 }
1100 
1101 static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1102 {
1103 	return __bpf_fill_alu32_imm(self, BPF_SUB);
1104 }
1105 
1106 static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1107 {
1108 	return __bpf_fill_alu32_imm(self, BPF_MUL);
1109 }
1110 
1111 static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1112 {
1113 	return __bpf_fill_alu32_imm(self, BPF_DIV);
1114 }
1115 
1116 static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1117 {
1118 	return __bpf_fill_alu32_imm(self, BPF_MOD);
1119 }
1120 
1121 /* ALU64 register operations */
1122 static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1123 {
1124 	return __bpf_fill_alu64_reg(self, BPF_MOV);
1125 }
1126 
1127 static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1128 {
1129 	return __bpf_fill_alu64_reg(self, BPF_AND);
1130 }
1131 
1132 static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1133 {
1134 	return __bpf_fill_alu64_reg(self, BPF_OR);
1135 }
1136 
1137 static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1138 {
1139 	return __bpf_fill_alu64_reg(self, BPF_XOR);
1140 }
1141 
1142 static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1143 {
1144 	return __bpf_fill_alu64_reg(self, BPF_ADD);
1145 }
1146 
1147 static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1148 {
1149 	return __bpf_fill_alu64_reg(self, BPF_SUB);
1150 }
1151 
1152 static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1153 {
1154 	return __bpf_fill_alu64_reg(self, BPF_MUL);
1155 }
1156 
1157 static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1158 {
1159 	return __bpf_fill_alu64_reg(self, BPF_DIV);
1160 }
1161 
1162 static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1163 {
1164 	return __bpf_fill_alu64_reg(self, BPF_MOD);
1165 }
1166 
1167 /* ALU32 register operations */
1168 static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1169 {
1170 	return __bpf_fill_alu32_reg(self, BPF_MOV);
1171 }
1172 
1173 static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1174 {
1175 	return __bpf_fill_alu32_reg(self, BPF_AND);
1176 }
1177 
1178 static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1179 {
1180 	return __bpf_fill_alu32_reg(self, BPF_OR);
1181 }
1182 
1183 static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1184 {
1185 	return __bpf_fill_alu32_reg(self, BPF_XOR);
1186 }
1187 
1188 static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1189 {
1190 	return __bpf_fill_alu32_reg(self, BPF_ADD);
1191 }
1192 
1193 static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1194 {
1195 	return __bpf_fill_alu32_reg(self, BPF_SUB);
1196 }
1197 
1198 static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1199 {
1200 	return __bpf_fill_alu32_reg(self, BPF_MUL);
1201 }
1202 
1203 static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1204 {
1205 	return __bpf_fill_alu32_reg(self, BPF_DIV);
1206 }
1207 
1208 static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1209 {
1210 	return __bpf_fill_alu32_reg(self, BPF_MOD);
1211 }
1212 
1213 /*
1214  * Test JITs that implement complex ALU operations as function
1215  * calls, and must re-arrange operands for argument passing.
1216  */
1217 static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1218 {
1219 	int len = 2 + 10 * 10;
1220 	struct bpf_insn *insns;
1221 	u64 dst, res;
1222 	int i = 0;
1223 	u32 imm;
1224 	int rd;
1225 
1226 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1227 	if (!insns)
1228 		return -ENOMEM;
1229 
1230 	/* Operand and result values according to operation */
1231 	if (alu32)
1232 		dst = 0x76543210U;
1233 	else
1234 		dst = 0x7edcba9876543210ULL;
1235 	imm = 0x01234567U;
1236 
1237 	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1238 		imm &= 31;
1239 
1240 	__bpf_alu_result(&res, dst, imm, op);
1241 
1242 	if (alu32)
1243 		res = (u32)res;
1244 
1245 	/* Check all operand registers */
1246 	for (rd = R0; rd <= R9; rd++) {
1247 		i += __bpf_ld_imm64(&insns[i], rd, dst);
1248 
1249 		if (alu32)
1250 			insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1251 		else
1252 			insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1253 
1254 		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1255 		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1256 		insns[i++] = BPF_EXIT_INSN();
1257 
1258 		insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1259 		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1260 		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1261 		insns[i++] = BPF_EXIT_INSN();
1262 	}
1263 
1264 	insns[i++] = BPF_MOV64_IMM(R0, 1);
1265 	insns[i++] = BPF_EXIT_INSN();
1266 
1267 	self->u.ptr.insns = insns;
1268 	self->u.ptr.len = len;
1269 	BUG_ON(i != len);
1270 
1271 	return 0;
1272 }
1273 
1274 /* ALU64 K registers */
1275 static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1276 {
1277 	return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
1278 }
1279 
1280 static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1281 {
1282 	return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
1283 }
1284 
1285 static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1286 {
1287 	return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
1288 }
1289 
1290 static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1291 {
1292 	return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
1293 }
1294 
1295 static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1296 {
1297 	return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
1298 }
1299 
1300 static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1301 {
1302 	return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
1303 }
1304 
1305 static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1306 {
1307 	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
1308 }
1309 
1310 static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1311 {
1312 	return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
1313 }
1314 
1315 static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1316 {
1317 	return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
1318 }
1319 
1320 static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1321 {
1322 	return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
1323 }
1324 
1325 static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1326 {
1327 	return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
1328 }
1329 
1330 static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1331 {
1332 	return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
1333 }
1334 
1335 /* ALU32 K registers */
1336 static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1337 {
1338 	return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
1339 }
1340 
1341 static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1342 {
1343 	return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
1344 }
1345 
1346 static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1347 {
1348 	return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
1349 }
1350 
1351 static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1352 {
1353 	return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
1354 }
1355 
1356 static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1357 {
1358 	return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
1359 }
1360 
1361 static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1362 {
1363 	return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
1364 }
1365 
1366 static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1367 {
1368 	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
1369 }
1370 
1371 static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1372 {
1373 	return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
1374 }
1375 
1376 static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1377 {
1378 	return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
1379 }
1380 
1381 static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1382 {
1383 	return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
1384 }
1385 
1386 static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1387 {
1388 	return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
1389 }
1390 
1391 static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1392 {
1393 	return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
1394 }
1395 
1396 /*
1397  * Test JITs that implement complex ALU operations as function
1398  * calls, and must re-arrange operands for argument passing.
1399  */
1400 static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1401 {
1402 	int len = 2 + 10 * 10 * 12;
1403 	u64 dst, src, res, same;
1404 	struct bpf_insn *insns;
1405 	int rd, rs;
1406 	int i = 0;
1407 
1408 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1409 	if (!insns)
1410 		return -ENOMEM;
1411 
1412 	/* Operand and result values according to operation */
1413 	if (alu32) {
1414 		dst = 0x76543210U;
1415 		src = 0x01234567U;
1416 	} else {
1417 		dst = 0x7edcba9876543210ULL;
1418 		src = 0x0123456789abcdefULL;
1419 	}
1420 
1421 	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1422 		src &= 31;
1423 
1424 	__bpf_alu_result(&res, dst, src, op);
1425 	__bpf_alu_result(&same, src, src, op);
1426 
1427 	if (alu32) {
1428 		res = (u32)res;
1429 		same = (u32)same;
1430 	}
1431 
1432 	/* Check all combinations of operand registers */
1433 	for (rd = R0; rd <= R9; rd++) {
1434 		for (rs = R0; rs <= R9; rs++) {
1435 			u64 val = rd == rs ? same : res;
1436 
1437 			i += __bpf_ld_imm64(&insns[i], rd, dst);
1438 			i += __bpf_ld_imm64(&insns[i], rs, src);
1439 
1440 			if (alu32)
1441 				insns[i++] = BPF_ALU32_REG(op, rd, rs);
1442 			else
1443 				insns[i++] = BPF_ALU64_REG(op, rd, rs);
1444 
1445 			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1446 			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1447 			insns[i++] = BPF_EXIT_INSN();
1448 
1449 			insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1450 			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1451 			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1452 			insns[i++] = BPF_EXIT_INSN();
1453 		}
1454 	}
1455 
1456 	insns[i++] = BPF_MOV64_IMM(R0, 1);
1457 	insns[i++] = BPF_EXIT_INSN();
1458 
1459 	self->u.ptr.insns = insns;
1460 	self->u.ptr.len = len;
1461 	BUG_ON(i != len);
1462 
1463 	return 0;
1464 }
1465 
1466 /* ALU64 X register combinations */
1467 static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1468 {
1469 	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
1470 }
1471 
1472 static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1473 {
1474 	return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
1475 }
1476 
1477 static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1478 {
1479 	return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
1480 }
1481 
1482 static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1483 {
1484 	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
1485 }
1486 
1487 static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1488 {
1489 	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
1490 }
1491 
1492 static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1493 {
1494 	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
1495 }
1496 
1497 static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1498 {
1499 	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
1500 }
1501 
1502 static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1503 {
1504 	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
1505 }
1506 
1507 static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1508 {
1509 	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
1510 }
1511 
1512 static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1513 {
1514 	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
1515 }
1516 
1517 static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1518 {
1519 	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
1520 }
1521 
1522 static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1523 {
1524 	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
1525 }
1526 
1527 /* ALU32 X register combinations */
1528 static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1529 {
1530 	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
1531 }
1532 
1533 static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1534 {
1535 	return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
1536 }
1537 
1538 static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1539 {
1540 	return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
1541 }
1542 
1543 static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1544 {
1545 	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
1546 }
1547 
1548 static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1549 {
1550 	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
1551 }
1552 
1553 static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1554 {
1555 	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
1556 }
1557 
1558 static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1559 {
1560 	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
1561 }
1562 
1563 static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1564 {
1565 	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
1566 }
1567 
1568 static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1569 {
1570 	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
1571 }
1572 
1573 static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1574 {
1575 	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
1576 }
1577 
1578 static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1579 {
1580 	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
1581 }
1582 
1583 static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1584 {
1585 	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
1586 }
1587 
1588 /*
1589  * Exhaustive tests of atomic operations for all power-of-two operand
1590  * magnitudes, both for positive and negative values.
1591  */
1592 
1593 static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1594 			       struct bpf_insn *insns, s64 dst, s64 src)
1595 {
1596 	int op = *(int *)arg;
1597 	u64 keep, fetch, res;
1598 	int i = 0;
1599 
1600 	if (!insns)
1601 		return 21;
1602 
1603 	switch (op) {
1604 	case BPF_XCHG:
1605 		res = src;
1606 		break;
1607 	default:
1608 		__bpf_alu_result(&res, dst, src, BPF_OP(op));
1609 	}
1610 
1611 	keep = 0x0123456789abcdefULL;
1612 	if (op & BPF_FETCH)
1613 		fetch = dst;
1614 	else
1615 		fetch = src;
1616 
1617 	i += __bpf_ld_imm64(&insns[i], R0, keep);
1618 	i += __bpf_ld_imm64(&insns[i], R1, dst);
1619 	i += __bpf_ld_imm64(&insns[i], R2, src);
1620 	i += __bpf_ld_imm64(&insns[i], R3, res);
1621 	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1622 	i += __bpf_ld_imm64(&insns[i], R5, keep);
1623 
1624 	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1625 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1626 	insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1627 
1628 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1629 	insns[i++] = BPF_EXIT_INSN();
1630 
1631 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1632 	insns[i++] = BPF_EXIT_INSN();
1633 
1634 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1635 	insns[i++] = BPF_EXIT_INSN();
1636 
1637 	return i;
1638 }
1639 
1640 static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1641 			       struct bpf_insn *insns, s64 dst, s64 src)
1642 {
1643 	int op = *(int *)arg;
1644 	u64 keep, fetch, res;
1645 	int i = 0;
1646 
1647 	if (!insns)
1648 		return 21;
1649 
1650 	switch (op) {
1651 	case BPF_XCHG:
1652 		res = src;
1653 		break;
1654 	default:
1655 		__bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
1656 	}
1657 
1658 	keep = 0x0123456789abcdefULL;
1659 	if (op & BPF_FETCH)
1660 		fetch = (u32)dst;
1661 	else
1662 		fetch = src;
1663 
1664 	i += __bpf_ld_imm64(&insns[i], R0, keep);
1665 	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1666 	i += __bpf_ld_imm64(&insns[i], R2, src);
1667 	i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1668 	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1669 	i += __bpf_ld_imm64(&insns[i], R5, keep);
1670 
1671 	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1672 	insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1673 	insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1674 
1675 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1676 	insns[i++] = BPF_EXIT_INSN();
1677 
1678 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1679 	insns[i++] = BPF_EXIT_INSN();
1680 
1681 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1682 	insns[i++] = BPF_EXIT_INSN();
1683 
1684 	return i;
1685 }
1686 
1687 static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1688 				struct bpf_insn *insns, s64 dst, s64 src)
1689 {
1690 	int i = 0;
1691 
1692 	if (!insns)
1693 		return 23;
1694 
1695 	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1696 	i += __bpf_ld_imm64(&insns[i], R1, dst);
1697 	i += __bpf_ld_imm64(&insns[i], R2, src);
1698 
1699 	/* Result unsuccessful */
1700 	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1701 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1702 	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1703 
1704 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1705 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1706 	insns[i++] = BPF_EXIT_INSN();
1707 
1708 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1709 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1710 	insns[i++] = BPF_EXIT_INSN();
1711 
1712 	/* Result successful */
1713 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1714 	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1715 
1716 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1717 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1718 	insns[i++] = BPF_EXIT_INSN();
1719 
1720 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1721 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1722 	insns[i++] = BPF_EXIT_INSN();
1723 
1724 	return i;
1725 }
1726 
1727 static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1728 				struct bpf_insn *insns, s64 dst, s64 src)
1729 {
1730 	int i = 0;
1731 
1732 	if (!insns)
1733 		return 27;
1734 
1735 	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1736 	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1737 	i += __bpf_ld_imm64(&insns[i], R2, src);
1738 
1739 	/* Result unsuccessful */
1740 	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1741 	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1742 	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1743 	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1744 
1745 	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1746 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1747 	insns[i++] = BPF_EXIT_INSN();
1748 
1749 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1750 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1751 	insns[i++] = BPF_EXIT_INSN();
1752 
1753 	/* Result successful */
1754 	i += __bpf_ld_imm64(&insns[i], R0, dst);
1755 	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1756 	insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1757 	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1758 
1759 	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1760 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1761 	insns[i++] = BPF_EXIT_INSN();
1762 
1763 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1764 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1765 	insns[i++] = BPF_EXIT_INSN();
1766 
1767 	return i;
1768 }
1769 
1770 static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1771 {
1772 	return __bpf_fill_pattern(self, &op, 64, 64,
1773 				  0, PATTERN_BLOCK2,
1774 				  &__bpf_emit_atomic64);
1775 }
1776 
1777 static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1778 {
1779 	return __bpf_fill_pattern(self, &op, 64, 64,
1780 				  0, PATTERN_BLOCK2,
1781 				  &__bpf_emit_atomic32);
1782 }
1783 
1784 /* 64-bit atomic operations */
1785 static int bpf_fill_atomic64_add(struct bpf_test *self)
1786 {
1787 	return __bpf_fill_atomic64(self, BPF_ADD);
1788 }
1789 
1790 static int bpf_fill_atomic64_and(struct bpf_test *self)
1791 {
1792 	return __bpf_fill_atomic64(self, BPF_AND);
1793 }
1794 
1795 static int bpf_fill_atomic64_or(struct bpf_test *self)
1796 {
1797 	return __bpf_fill_atomic64(self, BPF_OR);
1798 }
1799 
1800 static int bpf_fill_atomic64_xor(struct bpf_test *self)
1801 {
1802 	return __bpf_fill_atomic64(self, BPF_XOR);
1803 }
1804 
1805 static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1806 {
1807 	return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1808 }
1809 
1810 static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1811 {
1812 	return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1813 }
1814 
1815 static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1816 {
1817 	return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1818 }
1819 
1820 static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1821 {
1822 	return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1823 }
1824 
1825 static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1826 {
1827 	return __bpf_fill_atomic64(self, BPF_XCHG);
1828 }
1829 
1830 static int bpf_fill_cmpxchg64(struct bpf_test *self)
1831 {
1832 	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1833 				  &__bpf_emit_cmpxchg64);
1834 }
1835 
1836 /* 32-bit atomic operations */
1837 static int bpf_fill_atomic32_add(struct bpf_test *self)
1838 {
1839 	return __bpf_fill_atomic32(self, BPF_ADD);
1840 }
1841 
1842 static int bpf_fill_atomic32_and(struct bpf_test *self)
1843 {
1844 	return __bpf_fill_atomic32(self, BPF_AND);
1845 }
1846 
1847 static int bpf_fill_atomic32_or(struct bpf_test *self)
1848 {
1849 	return __bpf_fill_atomic32(self, BPF_OR);
1850 }
1851 
1852 static int bpf_fill_atomic32_xor(struct bpf_test *self)
1853 {
1854 	return __bpf_fill_atomic32(self, BPF_XOR);
1855 }
1856 
1857 static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1858 {
1859 	return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1860 }
1861 
1862 static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1863 {
1864 	return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1865 }
1866 
1867 static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1868 {
1869 	return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1870 }
1871 
1872 static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1873 {
1874 	return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1875 }
1876 
1877 static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1878 {
1879 	return __bpf_fill_atomic32(self, BPF_XCHG);
1880 }
1881 
1882 static int bpf_fill_cmpxchg32(struct bpf_test *self)
1883 {
1884 	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1885 				  &__bpf_emit_cmpxchg32);
1886 }
1887 
1888 /*
1889  * Test JITs that implement ATOMIC operations as function calls or
1890  * other primitives, and must re-arrange operands for argument passing.
1891  */
1892 static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1893 {
1894 	struct bpf_insn *insn;
1895 	int len = 2 + 34 * 10 * 10;
1896 	u64 mem, upd, res;
1897 	int rd, rs, i = 0;
1898 
1899 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1900 	if (!insn)
1901 		return -ENOMEM;
1902 
1903 	/* Operand and memory values */
1904 	if (width == BPF_DW) {
1905 		mem = 0x0123456789abcdefULL;
1906 		upd = 0xfedcba9876543210ULL;
1907 	} else { /* BPF_W */
1908 		mem = 0x01234567U;
1909 		upd = 0x76543210U;
1910 	}
1911 
1912 	/* Memory updated according to operation */
1913 	switch (op) {
1914 	case BPF_XCHG:
1915 		res = upd;
1916 		break;
1917 	case BPF_CMPXCHG:
1918 		res = mem;
1919 		break;
1920 	default:
1921 		__bpf_alu_result(&res, mem, upd, BPF_OP(op));
1922 	}
1923 
1924 	/* Test all operand registers */
1925 	for (rd = R0; rd <= R9; rd++) {
1926 		for (rs = R0; rs <= R9; rs++) {
1927 			u64 cmp, src;
1928 
1929 			/* Initialize value in memory */
1930 			i += __bpf_ld_imm64(&insn[i], R0, mem);
1931 			insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1932 
1933 			/* Initialize registers in order */
1934 			i += __bpf_ld_imm64(&insn[i], R0, ~mem);
1935 			i += __bpf_ld_imm64(&insn[i], rs, upd);
1936 			insn[i++] = BPF_MOV64_REG(rd, R10);
1937 
1938 			/* Perform atomic operation */
1939 			insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1940 			if (op == BPF_CMPXCHG && width == BPF_W)
1941 				insn[i++] = BPF_ZEXT_REG(R0);
1942 
1943 			/* Check R0 register value */
1944 			if (op == BPF_CMPXCHG)
1945 				cmp = mem;  /* Expect value from memory */
1946 			else if (R0 == rd || R0 == rs)
1947 				cmp = 0;    /* Aliased, checked below */
1948 			else
1949 				cmp = ~mem; /* Expect value to be preserved */
1950 			if (cmp) {
1951 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1952 							   (u32)cmp, 2);
1953 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1954 				insn[i++] = BPF_EXIT_INSN();
1955 				insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1956 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1957 							   cmp >> 32, 2);
1958 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1959 				insn[i++] = BPF_EXIT_INSN();
1960 			}
1961 
1962 			/* Check source register value */
1963 			if (rs == R0 && op == BPF_CMPXCHG)
1964 				src = 0;   /* Aliased with R0, checked above */
1965 			else if (rs == rd && (op == BPF_CMPXCHG ||
1966 					      !(op & BPF_FETCH)))
1967 				src = 0;   /* Aliased with rd, checked below */
1968 			else if (op == BPF_CMPXCHG)
1969 				src = upd; /* Expect value to be preserved */
1970 			else if (op & BPF_FETCH)
1971 				src = mem; /* Expect fetched value from mem */
1972 			else /* no fetch */
1973 				src = upd; /* Expect value to be preserved */
1974 			if (src) {
1975 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1976 							   (u32)src, 2);
1977 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1978 				insn[i++] = BPF_EXIT_INSN();
1979 				insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
1980 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1981 							   src >> 32, 2);
1982 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1983 				insn[i++] = BPF_EXIT_INSN();
1984 			}
1985 
1986 			/* Check destination register value */
1987 			if (!(rd == R0 && op == BPF_CMPXCHG) &&
1988 			    !(rd == rs && (op & BPF_FETCH))) {
1989 				insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
1990 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1991 				insn[i++] = BPF_EXIT_INSN();
1992 			}
1993 
1994 			/* Check value in memory */
1995 			if (rs != rd) {                  /* No aliasing */
1996 				i += __bpf_ld_imm64(&insn[i], R1, res);
1997 			} else if (op == BPF_XCHG) {     /* Aliased, XCHG */
1998 				insn[i++] = BPF_MOV64_REG(R1, R10);
1999 			} else if (op == BPF_CMPXCHG) {  /* Aliased, CMPXCHG */
2000 				i += __bpf_ld_imm64(&insn[i], R1, mem);
2001 			} else {                        /* Aliased, ALU oper */
2002 				i += __bpf_ld_imm64(&insn[i], R1, mem);
2003 				insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2004 			}
2005 
2006 			insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2007 			if (width == BPF_DW)
2008 				insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2009 			else /* width == BPF_W */
2010 				insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2011 			insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2012 			insn[i++] = BPF_EXIT_INSN();
2013 		}
2014 	}
2015 
2016 	insn[i++] = BPF_MOV64_IMM(R0, 1);
2017 	insn[i++] = BPF_EXIT_INSN();
2018 
2019 	self->u.ptr.insns = insn;
2020 	self->u.ptr.len = i;
2021 	BUG_ON(i > len);
2022 
2023 	return 0;
2024 }
2025 
2026 /* 64-bit atomic register tests */
2027 static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2028 {
2029 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2030 }
2031 
2032 static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2033 {
2034 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2035 }
2036 
2037 static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2038 {
2039 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2040 }
2041 
2042 static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2043 {
2044 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2045 }
2046 
2047 static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2048 {
2049 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2050 }
2051 
2052 static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2053 {
2054 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2055 }
2056 
2057 static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2058 {
2059 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2060 }
2061 
2062 static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2063 {
2064 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2065 }
2066 
2067 static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2068 {
2069 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2070 }
2071 
2072 static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2073 {
2074 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2075 }
2076 
2077 /* 32-bit atomic register tests */
2078 static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2079 {
2080 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2081 }
2082 
2083 static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2084 {
2085 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2086 }
2087 
2088 static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2089 {
2090 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2091 }
2092 
2093 static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2094 {
2095 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2096 }
2097 
2098 static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2099 {
2100 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2101 }
2102 
2103 static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2104 {
2105 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2106 }
2107 
2108 static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2109 {
2110 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2111 }
2112 
2113 static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2114 {
2115 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2116 }
2117 
2118 static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2119 {
2120 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2121 }
2122 
2123 static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2124 {
2125 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2126 }
2127 
2128 /*
2129  * Test the two-instruction 64-bit immediate load operation for all
2130  * power-of-two magnitudes of the immediate operand. For each MSB, a block
2131  * of immediate values centered around the power-of-two MSB are tested,
2132  * both for positive and negative values. The test is designed to verify
2133  * the operation for JITs that emit different code depending on the magnitude
2134  * of the immediate value. This is often the case if the native instruction
2135  * immediate field width is narrower than 32 bits.
2136  */
2137 static int bpf_fill_ld_imm64(struct bpf_test *self)
2138 {
2139 	int block = 64; /* Increase for more tests per MSB position */
2140 	int len = 3 + 8 * 63 * block * 2;
2141 	struct bpf_insn *insn;
2142 	int bit, adj, sign;
2143 	int i = 0;
2144 
2145 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2146 	if (!insn)
2147 		return -ENOMEM;
2148 
2149 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2150 
2151 	for (bit = 0; bit <= 62; bit++) {
2152 		for (adj = -block / 2; adj < block / 2; adj++) {
2153 			for (sign = -1; sign <= 1; sign += 2) {
2154 				s64 imm = sign * ((1LL << bit) + adj);
2155 
2156 				/* Perform operation */
2157 				i += __bpf_ld_imm64(&insn[i], R1, imm);
2158 
2159 				/* Load reference */
2160 				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2161 				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2162 							  (u32)(imm >> 32));
2163 				insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2164 				insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2165 
2166 				/* Check result */
2167 				insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2168 				insn[i++] = BPF_EXIT_INSN();
2169 			}
2170 		}
2171 	}
2172 
2173 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2174 	insn[i++] = BPF_EXIT_INSN();
2175 
2176 	self->u.ptr.insns = insn;
2177 	self->u.ptr.len = len;
2178 	BUG_ON(i != len);
2179 
2180 	return 0;
2181 }
2182 
2183 /*
2184  * Exhaustive tests of JMP operations for all combinations of power-of-two
2185  * magnitudes of the operands, both for positive and negative values. The
2186  * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2187  * emit different code depending on the magnitude of the immediate value.
2188  */
2189 
2190 static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2191 {
2192 	switch (op) {
2193 	case BPF_JSET:
2194 		return !!(v1 & v2);
2195 	case BPF_JEQ:
2196 		return v1 == v2;
2197 	case BPF_JNE:
2198 		return v1 != v2;
2199 	case BPF_JGT:
2200 		return (u64)v1 > (u64)v2;
2201 	case BPF_JGE:
2202 		return (u64)v1 >= (u64)v2;
2203 	case BPF_JLT:
2204 		return (u64)v1 < (u64)v2;
2205 	case BPF_JLE:
2206 		return (u64)v1 <= (u64)v2;
2207 	case BPF_JSGT:
2208 		return v1 > v2;
2209 	case BPF_JSGE:
2210 		return v1 >= v2;
2211 	case BPF_JSLT:
2212 		return v1 < v2;
2213 	case BPF_JSLE:
2214 		return v1 <= v2;
2215 	}
2216 	return false;
2217 }
2218 
2219 static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2220 			      struct bpf_insn *insns, s64 dst, s64 imm)
2221 {
2222 	int op = *(int *)arg;
2223 
2224 	if (insns) {
2225 		bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
2226 		int i = 0;
2227 
2228 		insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2229 
2230 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2231 		insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2232 		if (!match)
2233 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2234 		insns[i++] = BPF_EXIT_INSN();
2235 
2236 		return i;
2237 	}
2238 
2239 	return 5 + 1;
2240 }
2241 
2242 static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2243 				struct bpf_insn *insns, s64 dst, s64 imm)
2244 {
2245 	int op = *(int *)arg;
2246 
2247 	if (insns) {
2248 		bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
2249 		int i = 0;
2250 
2251 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2252 		insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2253 		if (!match)
2254 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2255 		insns[i++] = BPF_EXIT_INSN();
2256 
2257 		return i;
2258 	}
2259 
2260 	return 5;
2261 }
2262 
2263 static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2264 			      struct bpf_insn *insns, s64 dst, s64 src)
2265 {
2266 	int op = *(int *)arg;
2267 
2268 	if (insns) {
2269 		bool match = __bpf_match_jmp_cond(dst, src, op);
2270 		int i = 0;
2271 
2272 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2273 		i += __bpf_ld_imm64(&insns[i], R2, src);
2274 		insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2275 		if (!match)
2276 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2277 		insns[i++] = BPF_EXIT_INSN();
2278 
2279 		return i;
2280 	}
2281 
2282 	return 7;
2283 }
2284 
2285 static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2286 				struct bpf_insn *insns, s64 dst, s64 src)
2287 {
2288 	int op = *(int *)arg;
2289 
2290 	if (insns) {
2291 		bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
2292 		int i = 0;
2293 
2294 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2295 		i += __bpf_ld_imm64(&insns[i], R2, src);
2296 		insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2297 		if (!match)
2298 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2299 		insns[i++] = BPF_EXIT_INSN();
2300 
2301 		return i;
2302 	}
2303 
2304 	return 7;
2305 }
2306 
2307 static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2308 {
2309 	return __bpf_fill_pattern(self, &op, 64, 32,
2310 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2311 				  &__bpf_emit_jmp_imm);
2312 }
2313 
2314 static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2315 {
2316 	return __bpf_fill_pattern(self, &op, 64, 32,
2317 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2318 				  &__bpf_emit_jmp32_imm);
2319 }
2320 
2321 static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2322 {
2323 	return __bpf_fill_pattern(self, &op, 64, 64,
2324 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2325 				  &__bpf_emit_jmp_reg);
2326 }
2327 
2328 static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2329 {
2330 	return __bpf_fill_pattern(self, &op, 64, 64,
2331 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2332 				  &__bpf_emit_jmp32_reg);
2333 }
2334 
2335 /* JMP immediate tests */
2336 static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2337 {
2338 	return __bpf_fill_jmp_imm(self, BPF_JSET);
2339 }
2340 
2341 static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2342 {
2343 	return __bpf_fill_jmp_imm(self, BPF_JEQ);
2344 }
2345 
2346 static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2347 {
2348 	return __bpf_fill_jmp_imm(self, BPF_JNE);
2349 }
2350 
2351 static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2352 {
2353 	return __bpf_fill_jmp_imm(self, BPF_JGT);
2354 }
2355 
2356 static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2357 {
2358 	return __bpf_fill_jmp_imm(self, BPF_JGE);
2359 }
2360 
2361 static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2362 {
2363 	return __bpf_fill_jmp_imm(self, BPF_JLT);
2364 }
2365 
2366 static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2367 {
2368 	return __bpf_fill_jmp_imm(self, BPF_JLE);
2369 }
2370 
2371 static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2372 {
2373 	return __bpf_fill_jmp_imm(self, BPF_JSGT);
2374 }
2375 
2376 static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2377 {
2378 	return __bpf_fill_jmp_imm(self, BPF_JSGE);
2379 }
2380 
2381 static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2382 {
2383 	return __bpf_fill_jmp_imm(self, BPF_JSLT);
2384 }
2385 
2386 static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2387 {
2388 	return __bpf_fill_jmp_imm(self, BPF_JSLE);
2389 }
2390 
2391 /* JMP32 immediate tests */
2392 static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2393 {
2394 	return __bpf_fill_jmp32_imm(self, BPF_JSET);
2395 }
2396 
2397 static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2398 {
2399 	return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2400 }
2401 
2402 static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2403 {
2404 	return __bpf_fill_jmp32_imm(self, BPF_JNE);
2405 }
2406 
2407 static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2408 {
2409 	return __bpf_fill_jmp32_imm(self, BPF_JGT);
2410 }
2411 
2412 static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2413 {
2414 	return __bpf_fill_jmp32_imm(self, BPF_JGE);
2415 }
2416 
2417 static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2418 {
2419 	return __bpf_fill_jmp32_imm(self, BPF_JLT);
2420 }
2421 
2422 static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2423 {
2424 	return __bpf_fill_jmp32_imm(self, BPF_JLE);
2425 }
2426 
2427 static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2428 {
2429 	return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2430 }
2431 
2432 static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2433 {
2434 	return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2435 }
2436 
2437 static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2438 {
2439 	return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2440 }
2441 
2442 static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2443 {
2444 	return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2445 }
2446 
2447 /* JMP register tests */
2448 static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2449 {
2450 	return __bpf_fill_jmp_reg(self, BPF_JSET);
2451 }
2452 
2453 static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2454 {
2455 	return __bpf_fill_jmp_reg(self, BPF_JEQ);
2456 }
2457 
2458 static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2459 {
2460 	return __bpf_fill_jmp_reg(self, BPF_JNE);
2461 }
2462 
2463 static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2464 {
2465 	return __bpf_fill_jmp_reg(self, BPF_JGT);
2466 }
2467 
2468 static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2469 {
2470 	return __bpf_fill_jmp_reg(self, BPF_JGE);
2471 }
2472 
2473 static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2474 {
2475 	return __bpf_fill_jmp_reg(self, BPF_JLT);
2476 }
2477 
2478 static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2479 {
2480 	return __bpf_fill_jmp_reg(self, BPF_JLE);
2481 }
2482 
2483 static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2484 {
2485 	return __bpf_fill_jmp_reg(self, BPF_JSGT);
2486 }
2487 
2488 static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2489 {
2490 	return __bpf_fill_jmp_reg(self, BPF_JSGE);
2491 }
2492 
2493 static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2494 {
2495 	return __bpf_fill_jmp_reg(self, BPF_JSLT);
2496 }
2497 
2498 static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2499 {
2500 	return __bpf_fill_jmp_reg(self, BPF_JSLE);
2501 }
2502 
2503 /* JMP32 register tests */
2504 static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2505 {
2506 	return __bpf_fill_jmp32_reg(self, BPF_JSET);
2507 }
2508 
2509 static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2510 {
2511 	return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2512 }
2513 
2514 static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2515 {
2516 	return __bpf_fill_jmp32_reg(self, BPF_JNE);
2517 }
2518 
2519 static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2520 {
2521 	return __bpf_fill_jmp32_reg(self, BPF_JGT);
2522 }
2523 
2524 static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2525 {
2526 	return __bpf_fill_jmp32_reg(self, BPF_JGE);
2527 }
2528 
2529 static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2530 {
2531 	return __bpf_fill_jmp32_reg(self, BPF_JLT);
2532 }
2533 
2534 static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2535 {
2536 	return __bpf_fill_jmp32_reg(self, BPF_JLE);
2537 }
2538 
2539 static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2540 {
2541 	return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2542 }
2543 
2544 static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2545 {
2546 	return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2547 }
2548 
2549 static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2550 {
2551 	return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2552 }
2553 
2554 static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2555 {
2556 	return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2557 }
2558 
2559 /*
2560  * Set up a sequence of staggered jumps, forwards and backwards with
2561  * increasing offset. This tests the conversion of relative jumps to
2562  * JITed native jumps. On some architectures, for example MIPS, a large
2563  * PC-relative jump offset may overflow the immediate field of the native
2564  * conditional branch instruction, triggering a conversion to use an
2565  * absolute jump instead. Since this changes the jump offsets, another
2566  * offset computation pass is necessary, and that may in turn trigger
2567  * another branch conversion. This jump sequence is particularly nasty
2568  * in that regard.
2569  *
2570  * The sequence generation is parameterized by size and jump type.
2571  * The size must be even, and the expected result is always size + 1.
2572  * Below is an example with size=8 and result=9.
2573  *
2574  *                     ________________________Start
2575  *                     R0 = 0
2576  *                     R1 = r1
2577  *                     R2 = r2
2578  *            ,------- JMP +4 * 3______________Preamble: 4 insns
2579  * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2580  * |          |        R0 = 8                                        |
2581  * |          |        JMP +7 * 3               ------------------------.
2582  * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------.     |  |
2583  * | |        |        R0 = 6                                  |     |  |
2584  * | |        |        JMP +5 * 3               ------------------.  |  |
2585  * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------.     |  |  |  |
2586  * | | |      |        R0 = 4                            |     |  |  |  |
2587  * | | |      |        JMP +3 * 3               ------------.  |  |  |  |
2588  * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--.     |  |  |  |  |  |
2589  * | | | |    |        R0 = 2                      |     |  |  |  |  |  |
2590  * | | | |    |        JMP +1 * 3               ------.  |  |  |  |  |  |
2591  * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1    1  2  3  4  5  6  7  8 loc
2592  * | | | | |           R0 = 1                     -1 +2 -3 +4 -5 +6 -7 +8 off
2593  * | | | | |           JMP -2 * 3               ---'  |  |  |  |  |  |  |
2594  * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----'  |  |  |  |  |  |
2595  * | | | | | |         R0 = 3                            |  |  |  |  |  |
2596  * | | | | | |         JMP -4 * 3               ---------'  |  |  |  |  |
2597  * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------'  |  |  |  |
2598  * | | | | | | |       R0 = 5                                  |  |  |  |
2599  * | | | | | | |       JMP -6 * 3               ---------------'  |  |  |
2600  * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------'  |  |
2601  * | | | | | | | |     R0 = 7                                        |  |
2602  * | | Error | | |     JMP -8 * 3               ---------------------'  |
2603  * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2604  * | | | | | | | | |   R0 = 9__________________Sequence: 3 * size - 1 insns
2605  * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2606  *
2607  */
2608 
2609 /* The maximum size parameter */
2610 #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2611 
2612 /* We use a reduced number of iterations to get a reasonable execution time */
2613 #define NR_STAGGERED_JMP_RUNS 10
2614 
2615 static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2616 				      const struct bpf_insn *jmp,
2617 				      u64 r1, u64 r2)
2618 {
2619 	int size = self->test[0].result - 1;
2620 	int len = 4 + 3 * (size + 1);
2621 	struct bpf_insn *insns;
2622 	int off, ind;
2623 
2624 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
2625 	if (!insns)
2626 		return -ENOMEM;
2627 
2628 	/* Preamble */
2629 	insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2630 	insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2631 	insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2632 	insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2633 
2634 	/* Sequence */
2635 	for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2636 		struct bpf_insn *ins = &insns[4 + 3 * ind];
2637 		int loc;
2638 
2639 		if (off == 0)
2640 			off--;
2641 
2642 		loc = abs(off);
2643 		ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2644 				     3 * (size - ind) + 1);
2645 		ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2646 		ins[2] = *jmp;
2647 		ins[2].off = 3 * (off - 1);
2648 	}
2649 
2650 	/* Return */
2651 	insns[len - 1] = BPF_EXIT_INSN();
2652 
2653 	self->u.ptr.insns = insns;
2654 	self->u.ptr.len = len;
2655 
2656 	return 0;
2657 }
2658 
2659 /* 64-bit unconditional jump */
2660 static int bpf_fill_staggered_ja(struct bpf_test *self)
2661 {
2662 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2663 
2664 	return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
2665 }
2666 
2667 /* 64-bit immediate jumps */
2668 static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2669 {
2670 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2671 
2672 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2673 }
2674 
2675 static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2676 {
2677 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2678 
2679 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2680 }
2681 
2682 static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2683 {
2684 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2685 
2686 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2687 }
2688 
2689 static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2690 {
2691 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2692 
2693 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2694 }
2695 
2696 static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2697 {
2698 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2699 
2700 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2701 }
2702 
2703 static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2704 {
2705 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2706 
2707 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2708 }
2709 
2710 static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2711 {
2712 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2713 
2714 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2715 }
2716 
2717 static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2718 {
2719 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2720 
2721 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2722 }
2723 
2724 static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2725 {
2726 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2727 
2728 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2729 }
2730 
2731 static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2732 {
2733 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2734 
2735 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2736 }
2737 
2738 static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2739 {
2740 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2741 
2742 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2743 }
2744 
2745 /* 64-bit register jumps */
2746 static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2747 {
2748 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2749 
2750 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2751 }
2752 
2753 static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2754 {
2755 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2756 
2757 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2758 }
2759 
2760 static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2761 {
2762 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2763 
2764 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2765 }
2766 
2767 static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2768 {
2769 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2770 
2771 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2772 }
2773 
2774 static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2775 {
2776 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2777 
2778 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2779 }
2780 
2781 static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2782 {
2783 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2784 
2785 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2786 }
2787 
2788 static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2789 {
2790 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2791 
2792 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2793 }
2794 
2795 static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2796 {
2797 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2798 
2799 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2800 }
2801 
2802 static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2803 {
2804 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2805 
2806 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2807 }
2808 
2809 static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2810 {
2811 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2812 
2813 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2814 }
2815 
2816 static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2817 {
2818 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2819 
2820 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2821 }
2822 
2823 /* 32-bit immediate jumps */
2824 static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2825 {
2826 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2827 
2828 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2829 }
2830 
2831 static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2832 {
2833 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2834 
2835 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2836 }
2837 
2838 static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2839 {
2840 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2841 
2842 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2843 }
2844 
2845 static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2846 {
2847 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2848 
2849 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2850 }
2851 
2852 static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2853 {
2854 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2855 
2856 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2857 }
2858 
2859 static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2860 {
2861 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2862 
2863 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2864 }
2865 
2866 static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2867 {
2868 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2869 
2870 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2871 }
2872 
2873 static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2874 {
2875 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2876 
2877 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2878 }
2879 
2880 static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2881 {
2882 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2883 
2884 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2885 }
2886 
2887 static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2888 {
2889 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2890 
2891 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2892 }
2893 
2894 static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2895 {
2896 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
2897 
2898 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2899 }
2900 
2901 /* 32-bit register jumps */
2902 static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
2903 {
2904 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
2905 
2906 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2907 }
2908 
2909 static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
2910 {
2911 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
2912 
2913 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2914 }
2915 
2916 static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
2917 {
2918 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
2919 
2920 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2921 }
2922 
2923 static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
2924 {
2925 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
2926 
2927 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2928 }
2929 
2930 static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
2931 {
2932 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
2933 
2934 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2935 }
2936 
2937 static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
2938 {
2939 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
2940 
2941 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2942 }
2943 
2944 static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
2945 {
2946 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
2947 
2948 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2949 }
2950 
2951 static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
2952 {
2953 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
2954 
2955 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2956 }
2957 
2958 static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
2959 {
2960 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
2961 
2962 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2963 }
2964 
2965 static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
2966 {
2967 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
2968 
2969 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2970 }
2971 
2972 static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
2973 {
2974 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
2975 
2976 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2977 }
2978 
2979 
2980 static struct bpf_test tests[] = {
2981 	{
2982 		"TAX",
2983 		.u.insns = {
2984 			BPF_STMT(BPF_LD | BPF_IMM, 1),
2985 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
2986 			BPF_STMT(BPF_LD | BPF_IMM, 2),
2987 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2988 			BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
2989 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
2990 			BPF_STMT(BPF_LD | BPF_LEN, 0),
2991 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2992 			BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
2993 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
2994 			BPF_STMT(BPF_RET | BPF_A, 0)
2995 		},
2996 		CLASSIC,
2997 		{ 10, 20, 30, 40, 50 },
2998 		{ { 2, 10 }, { 3, 20 }, { 4, 30 } },
2999 	},
3000 	{
3001 		"TXA",
3002 		.u.insns = {
3003 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3004 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3005 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3006 			BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3007 		},
3008 		CLASSIC,
3009 		{ 10, 20, 30, 40, 50 },
3010 		{ { 1, 2 }, { 3, 6 }, { 4, 8 } },
3011 	},
3012 	{
3013 		"ADD_SUB_MUL_K",
3014 		.u.insns = {
3015 			BPF_STMT(BPF_LD | BPF_IMM, 1),
3016 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3017 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3018 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3019 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3020 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3021 			BPF_STMT(BPF_RET | BPF_A, 0)
3022 		},
3023 		CLASSIC | FLAG_NO_DATA,
3024 		{ },
3025 		{ { 0, 0xfffffffd } }
3026 	},
3027 	{
3028 		"DIV_MOD_KX",
3029 		.u.insns = {
3030 			BPF_STMT(BPF_LD | BPF_IMM, 8),
3031 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3032 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3033 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3034 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3035 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3036 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3037 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3038 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3039 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3040 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3041 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3042 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3043 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3044 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3045 			BPF_STMT(BPF_RET | BPF_A, 0)
3046 		},
3047 		CLASSIC | FLAG_NO_DATA,
3048 		{ },
3049 		{ { 0, 0x20000000 } }
3050 	},
3051 	{
3052 		"AND_OR_LSH_K",
3053 		.u.insns = {
3054 			BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3055 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3056 			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3057 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3058 			BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3059 			BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3060 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3061 			BPF_STMT(BPF_RET | BPF_A, 0)
3062 		},
3063 		CLASSIC | FLAG_NO_DATA,
3064 		{ },
3065 		{ { 0, 0x800000ff }, { 1, 0x800000ff } },
3066 	},
3067 	{
3068 		"LD_IMM_0",
3069 		.u.insns = {
3070 			BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3071 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3072 			BPF_STMT(BPF_RET | BPF_K, 0),
3073 			BPF_STMT(BPF_RET | BPF_K, 1),
3074 		},
3075 		CLASSIC,
3076 		{ },
3077 		{ { 1, 1 } },
3078 	},
3079 	{
3080 		"LD_IND",
3081 		.u.insns = {
3082 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3083 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3084 			BPF_STMT(BPF_RET | BPF_K, 1)
3085 		},
3086 		CLASSIC,
3087 		{ },
3088 		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3089 	},
3090 	{
3091 		"LD_ABS",
3092 		.u.insns = {
3093 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3094 			BPF_STMT(BPF_RET | BPF_K, 1)
3095 		},
3096 		CLASSIC,
3097 		{ },
3098 		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3099 	},
3100 	{
3101 		"LD_ABS_LL",
3102 		.u.insns = {
3103 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3104 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3105 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3106 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3107 			BPF_STMT(BPF_RET | BPF_A, 0)
3108 		},
3109 		CLASSIC,
3110 		{ 1, 2, 3 },
3111 		{ { 1, 0 }, { 2, 3 } },
3112 	},
3113 	{
3114 		"LD_IND_LL",
3115 		.u.insns = {
3116 			BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3117 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3118 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3119 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3120 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3121 			BPF_STMT(BPF_RET | BPF_A, 0)
3122 		},
3123 		CLASSIC,
3124 		{ 1, 2, 3, 0xff },
3125 		{ { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3126 	},
3127 	{
3128 		"LD_ABS_NET",
3129 		.u.insns = {
3130 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3131 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3132 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3133 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3134 			BPF_STMT(BPF_RET | BPF_A, 0)
3135 		},
3136 		CLASSIC,
3137 		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3138 		{ { 15, 0 }, { 16, 3 } },
3139 	},
3140 	{
3141 		"LD_IND_NET",
3142 		.u.insns = {
3143 			BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3144 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3145 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3146 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3147 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3148 			BPF_STMT(BPF_RET | BPF_A, 0)
3149 		},
3150 		CLASSIC,
3151 		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3152 		{ { 14, 0 }, { 15, 1 }, { 17, 3 } },
3153 	},
3154 	{
3155 		"LD_PKTTYPE",
3156 		.u.insns = {
3157 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3158 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3159 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3160 			BPF_STMT(BPF_RET | BPF_K, 1),
3161 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3162 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3163 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3164 			BPF_STMT(BPF_RET | BPF_K, 1),
3165 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3166 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3167 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3168 			BPF_STMT(BPF_RET | BPF_K, 1),
3169 			BPF_STMT(BPF_RET | BPF_A, 0)
3170 		},
3171 		CLASSIC,
3172 		{ },
3173 		{ { 1, 3 }, { 10, 3 } },
3174 	},
3175 	{
3176 		"LD_MARK",
3177 		.u.insns = {
3178 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3179 				 SKF_AD_OFF + SKF_AD_MARK),
3180 			BPF_STMT(BPF_RET | BPF_A, 0)
3181 		},
3182 		CLASSIC,
3183 		{ },
3184 		{ { 1, SKB_MARK}, { 10, SKB_MARK} },
3185 	},
3186 	{
3187 		"LD_RXHASH",
3188 		.u.insns = {
3189 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3190 				 SKF_AD_OFF + SKF_AD_RXHASH),
3191 			BPF_STMT(BPF_RET | BPF_A, 0)
3192 		},
3193 		CLASSIC,
3194 		{ },
3195 		{ { 1, SKB_HASH}, { 10, SKB_HASH} },
3196 	},
3197 	{
3198 		"LD_QUEUE",
3199 		.u.insns = {
3200 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3201 				 SKF_AD_OFF + SKF_AD_QUEUE),
3202 			BPF_STMT(BPF_RET | BPF_A, 0)
3203 		},
3204 		CLASSIC,
3205 		{ },
3206 		{ { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3207 	},
3208 	{
3209 		"LD_PROTOCOL",
3210 		.u.insns = {
3211 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3212 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3213 			BPF_STMT(BPF_RET | BPF_K, 0),
3214 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3215 				 SKF_AD_OFF + SKF_AD_PROTOCOL),
3216 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3217 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3218 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3219 			BPF_STMT(BPF_RET | BPF_K, 0),
3220 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3221 			BPF_STMT(BPF_RET | BPF_A, 0)
3222 		},
3223 		CLASSIC,
3224 		{ 10, 20, 30 },
3225 		{ { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3226 	},
3227 	{
3228 		"LD_VLAN_TAG",
3229 		.u.insns = {
3230 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3231 				 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3232 			BPF_STMT(BPF_RET | BPF_A, 0)
3233 		},
3234 		CLASSIC,
3235 		{ },
3236 		{
3237 			{ 1, SKB_VLAN_TCI },
3238 			{ 10, SKB_VLAN_TCI }
3239 		},
3240 	},
3241 	{
3242 		"LD_VLAN_TAG_PRESENT",
3243 		.u.insns = {
3244 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3245 				 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3246 			BPF_STMT(BPF_RET | BPF_A, 0)
3247 		},
3248 		CLASSIC,
3249 		{ },
3250 		{
3251 			{ 1, SKB_VLAN_PRESENT },
3252 			{ 10, SKB_VLAN_PRESENT }
3253 		},
3254 	},
3255 	{
3256 		"LD_IFINDEX",
3257 		.u.insns = {
3258 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3259 				 SKF_AD_OFF + SKF_AD_IFINDEX),
3260 			BPF_STMT(BPF_RET | BPF_A, 0)
3261 		},
3262 		CLASSIC,
3263 		{ },
3264 		{ { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3265 	},
3266 	{
3267 		"LD_HATYPE",
3268 		.u.insns = {
3269 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3270 				 SKF_AD_OFF + SKF_AD_HATYPE),
3271 			BPF_STMT(BPF_RET | BPF_A, 0)
3272 		},
3273 		CLASSIC,
3274 		{ },
3275 		{ { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3276 	},
3277 	{
3278 		"LD_CPU",
3279 		.u.insns = {
3280 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3281 				 SKF_AD_OFF + SKF_AD_CPU),
3282 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3283 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3284 				 SKF_AD_OFF + SKF_AD_CPU),
3285 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3286 			BPF_STMT(BPF_RET | BPF_A, 0)
3287 		},
3288 		CLASSIC,
3289 		{ },
3290 		{ { 1, 0 }, { 10, 0 } },
3291 	},
3292 	{
3293 		"LD_NLATTR",
3294 		.u.insns = {
3295 			BPF_STMT(BPF_LDX | BPF_IMM, 2),
3296 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3297 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3298 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3299 				 SKF_AD_OFF + SKF_AD_NLATTR),
3300 			BPF_STMT(BPF_RET | BPF_A, 0)
3301 		},
3302 		CLASSIC,
3303 #ifdef __BIG_ENDIAN
3304 		{ 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3305 #else
3306 		{ 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3307 #endif
3308 		{ { 4, 0 }, { 20, 6 } },
3309 	},
3310 	{
3311 		"LD_NLATTR_NEST",
3312 		.u.insns = {
3313 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3314 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3315 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3316 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3317 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3318 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3319 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3320 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3321 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3322 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3323 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3324 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3325 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3326 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3327 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3328 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3329 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3330 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3331 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3332 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3333 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3334 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3335 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3336 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3337 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3338 			BPF_STMT(BPF_RET | BPF_A, 0)
3339 		},
3340 		CLASSIC,
3341 #ifdef __BIG_ENDIAN
3342 		{ 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3343 #else
3344 		{ 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3345 #endif
3346 		{ { 4, 0 }, { 20, 10 } },
3347 	},
3348 	{
3349 		"LD_PAYLOAD_OFF",
3350 		.u.insns = {
3351 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3352 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3353 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3354 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3355 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3356 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3357 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3358 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3359 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3360 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3361 			BPF_STMT(BPF_RET | BPF_A, 0)
3362 		},
3363 		CLASSIC,
3364 		/* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3365 		 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3366 		 * id 9737, seq 1, length 64
3367 		 */
3368 		{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3369 		  0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3370 		  0x08, 0x00,
3371 		  0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3372 		  0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3373 		{ { 30, 0 }, { 100, 42 } },
3374 	},
3375 	{
3376 		"LD_ANC_XOR",
3377 		.u.insns = {
3378 			BPF_STMT(BPF_LD | BPF_IMM, 10),
3379 			BPF_STMT(BPF_LDX | BPF_IMM, 300),
3380 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3381 				 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3382 			BPF_STMT(BPF_RET | BPF_A, 0)
3383 		},
3384 		CLASSIC,
3385 		{ },
3386 		{ { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3387 	},
3388 	{
3389 		"SPILL_FILL",
3390 		.u.insns = {
3391 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3392 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3393 			BPF_STMT(BPF_ALU | BPF_RSH, 1),
3394 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3395 			BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3396 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3397 			BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3398 			BPF_STMT(BPF_STX, 15), /* M3 = len */
3399 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
3400 			BPF_STMT(BPF_LD | BPF_MEM, 2),
3401 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3402 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
3403 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3404 			BPF_STMT(BPF_RET | BPF_A, 0)
3405 		},
3406 		CLASSIC,
3407 		{ },
3408 		{ { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3409 	},
3410 	{
3411 		"JEQ",
3412 		.u.insns = {
3413 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3414 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3415 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3416 			BPF_STMT(BPF_RET | BPF_K, 1),
3417 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3418 		},
3419 		CLASSIC,
3420 		{ 3, 3, 3, 3, 3 },
3421 		{ { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3422 	},
3423 	{
3424 		"JGT",
3425 		.u.insns = {
3426 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3427 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3428 			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3429 			BPF_STMT(BPF_RET | BPF_K, 1),
3430 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3431 		},
3432 		CLASSIC,
3433 		{ 4, 4, 4, 3, 3 },
3434 		{ { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3435 	},
3436 	{
3437 		"JGE (jt 0), test 1",
3438 		.u.insns = {
3439 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3440 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3441 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3442 			BPF_STMT(BPF_RET | BPF_K, 1),
3443 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3444 		},
3445 		CLASSIC,
3446 		{ 4, 4, 4, 3, 3 },
3447 		{ { 2, 0 }, { 3, 1 }, { 4, 1 } },
3448 	},
3449 	{
3450 		"JGE (jt 0), test 2",
3451 		.u.insns = {
3452 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3453 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3454 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3455 			BPF_STMT(BPF_RET | BPF_K, 1),
3456 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3457 		},
3458 		CLASSIC,
3459 		{ 4, 4, 5, 3, 3 },
3460 		{ { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3461 	},
3462 	{
3463 		"JGE",
3464 		.u.insns = {
3465 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3466 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3467 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3468 			BPF_STMT(BPF_RET | BPF_K, 10),
3469 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3470 			BPF_STMT(BPF_RET | BPF_K, 20),
3471 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3472 			BPF_STMT(BPF_RET | BPF_K, 30),
3473 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3474 			BPF_STMT(BPF_RET | BPF_K, 40),
3475 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3476 		},
3477 		CLASSIC,
3478 		{ 1, 2, 3, 4, 5 },
3479 		{ { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3480 	},
3481 	{
3482 		"JSET",
3483 		.u.insns = {
3484 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3485 			BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3486 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3487 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3488 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3489 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3490 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3491 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3492 			BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3493 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3494 			BPF_STMT(BPF_RET | BPF_K, 10),
3495 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3496 			BPF_STMT(BPF_RET | BPF_K, 20),
3497 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3498 			BPF_STMT(BPF_RET | BPF_K, 30),
3499 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3500 			BPF_STMT(BPF_RET | BPF_K, 30),
3501 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3502 			BPF_STMT(BPF_RET | BPF_K, 30),
3503 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3504 			BPF_STMT(BPF_RET | BPF_K, 30),
3505 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3506 			BPF_STMT(BPF_RET | BPF_K, 30),
3507 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3508 		},
3509 		CLASSIC,
3510 		{ 0, 0xAA, 0x55, 1 },
3511 		{ { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3512 	},
3513 	{
3514 		"tcpdump port 22",
3515 		.u.insns = {
3516 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3517 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3518 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3519 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3520 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3521 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3522 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3523 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3524 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3525 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3526 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3527 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3528 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3529 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3530 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3531 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3532 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3533 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3534 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3535 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3536 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3537 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3538 			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3539 			BPF_STMT(BPF_RET | BPF_K, 0),
3540 		},
3541 		CLASSIC,
3542 		/* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3543 		 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3544 		 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3545 		 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3546 		 */
3547 		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3548 		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3549 		  0x08, 0x00,
3550 		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3551 		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3552 		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3553 		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3554 		  0xc2, 0x24,
3555 		  0x00, 0x16 /* dst port */ },
3556 		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3557 	},
3558 	{
3559 		"tcpdump complex",
3560 		.u.insns = {
3561 			/* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3562 			 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3563 			 * (len > 115 or len < 30000000000)' -d
3564 			 */
3565 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3566 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3567 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3568 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3569 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3570 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3571 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3572 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3573 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3574 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3575 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3576 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3577 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3578 			BPF_STMT(BPF_ST, 1),
3579 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3580 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3581 			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3582 			BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3583 			BPF_STMT(BPF_LD | BPF_MEM, 1),
3584 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3585 			BPF_STMT(BPF_ST, 5),
3586 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3587 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3588 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3589 			BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3590 			BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3591 			BPF_STMT(BPF_LD | BPF_MEM, 5),
3592 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3593 			BPF_STMT(BPF_LD | BPF_LEN, 0),
3594 			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3595 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3596 			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3597 			BPF_STMT(BPF_RET | BPF_K, 0),
3598 		},
3599 		CLASSIC,
3600 		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3601 		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3602 		  0x08, 0x00,
3603 		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3604 		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3605 		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3606 		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3607 		  0xc2, 0x24,
3608 		  0x00, 0x16 /* dst port */ },
3609 		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3610 	},
3611 	{
3612 		"RET_A",
3613 		.u.insns = {
3614 			/* check that uninitialized X and A contain zeros */
3615 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3616 			BPF_STMT(BPF_RET | BPF_A, 0)
3617 		},
3618 		CLASSIC,
3619 		{ },
3620 		{ {1, 0}, {2, 0} },
3621 	},
3622 	{
3623 		"INT: ADD trivial",
3624 		.u.insns_int = {
3625 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3626 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
3627 			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3628 			BPF_ALU64_REG(BPF_SUB, R1, R2),
3629 			BPF_ALU64_IMM(BPF_ADD, R1, -1),
3630 			BPF_ALU64_IMM(BPF_MUL, R1, 3),
3631 			BPF_ALU64_REG(BPF_MOV, R0, R1),
3632 			BPF_EXIT_INSN(),
3633 		},
3634 		INTERNAL,
3635 		{ },
3636 		{ { 0, 0xfffffffd } }
3637 	},
3638 	{
3639 		"INT: MUL_X",
3640 		.u.insns_int = {
3641 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
3642 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3643 			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3644 			BPF_ALU64_REG(BPF_MUL, R1, R2),
3645 			BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3646 			BPF_EXIT_INSN(),
3647 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
3648 			BPF_EXIT_INSN(),
3649 		},
3650 		INTERNAL,
3651 		{ },
3652 		{ { 0, 1 } }
3653 	},
3654 	{
3655 		"INT: MUL_X2",
3656 		.u.insns_int = {
3657 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3658 			BPF_ALU32_IMM(BPF_MOV, R1, -1),
3659 			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3660 			BPF_ALU64_REG(BPF_MUL, R1, R2),
3661 			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3662 			BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3663 			BPF_EXIT_INSN(),
3664 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3665 			BPF_EXIT_INSN(),
3666 		},
3667 		INTERNAL,
3668 		{ },
3669 		{ { 0, 1 } }
3670 	},
3671 	{
3672 		"INT: MUL32_X",
3673 		.u.insns_int = {
3674 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3675 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3676 			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3677 			BPF_ALU32_REG(BPF_MUL, R1, R2),
3678 			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3679 			BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3680 			BPF_EXIT_INSN(),
3681 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3682 			BPF_EXIT_INSN(),
3683 		},
3684 		INTERNAL,
3685 		{ },
3686 		{ { 0, 1 } }
3687 	},
3688 	{
3689 		/* Have to test all register combinations, since
3690 		 * JITing of different registers will produce
3691 		 * different asm code.
3692 		 */
3693 		"INT: ADD 64-bit",
3694 		.u.insns_int = {
3695 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
3696 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3697 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
3698 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
3699 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
3700 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
3701 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
3702 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
3703 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
3704 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
3705 			BPF_ALU64_IMM(BPF_ADD, R0, 20),
3706 			BPF_ALU64_IMM(BPF_ADD, R1, 20),
3707 			BPF_ALU64_IMM(BPF_ADD, R2, 20),
3708 			BPF_ALU64_IMM(BPF_ADD, R3, 20),
3709 			BPF_ALU64_IMM(BPF_ADD, R4, 20),
3710 			BPF_ALU64_IMM(BPF_ADD, R5, 20),
3711 			BPF_ALU64_IMM(BPF_ADD, R6, 20),
3712 			BPF_ALU64_IMM(BPF_ADD, R7, 20),
3713 			BPF_ALU64_IMM(BPF_ADD, R8, 20),
3714 			BPF_ALU64_IMM(BPF_ADD, R9, 20),
3715 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
3716 			BPF_ALU64_IMM(BPF_SUB, R1, 10),
3717 			BPF_ALU64_IMM(BPF_SUB, R2, 10),
3718 			BPF_ALU64_IMM(BPF_SUB, R3, 10),
3719 			BPF_ALU64_IMM(BPF_SUB, R4, 10),
3720 			BPF_ALU64_IMM(BPF_SUB, R5, 10),
3721 			BPF_ALU64_IMM(BPF_SUB, R6, 10),
3722 			BPF_ALU64_IMM(BPF_SUB, R7, 10),
3723 			BPF_ALU64_IMM(BPF_SUB, R8, 10),
3724 			BPF_ALU64_IMM(BPF_SUB, R9, 10),
3725 			BPF_ALU64_REG(BPF_ADD, R0, R0),
3726 			BPF_ALU64_REG(BPF_ADD, R0, R1),
3727 			BPF_ALU64_REG(BPF_ADD, R0, R2),
3728 			BPF_ALU64_REG(BPF_ADD, R0, R3),
3729 			BPF_ALU64_REG(BPF_ADD, R0, R4),
3730 			BPF_ALU64_REG(BPF_ADD, R0, R5),
3731 			BPF_ALU64_REG(BPF_ADD, R0, R6),
3732 			BPF_ALU64_REG(BPF_ADD, R0, R7),
3733 			BPF_ALU64_REG(BPF_ADD, R0, R8),
3734 			BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3735 			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3736 			BPF_EXIT_INSN(),
3737 			BPF_ALU64_REG(BPF_ADD, R1, R0),
3738 			BPF_ALU64_REG(BPF_ADD, R1, R1),
3739 			BPF_ALU64_REG(BPF_ADD, R1, R2),
3740 			BPF_ALU64_REG(BPF_ADD, R1, R3),
3741 			BPF_ALU64_REG(BPF_ADD, R1, R4),
3742 			BPF_ALU64_REG(BPF_ADD, R1, R5),
3743 			BPF_ALU64_REG(BPF_ADD, R1, R6),
3744 			BPF_ALU64_REG(BPF_ADD, R1, R7),
3745 			BPF_ALU64_REG(BPF_ADD, R1, R8),
3746 			BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3747 			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3748 			BPF_EXIT_INSN(),
3749 			BPF_ALU64_REG(BPF_ADD, R2, R0),
3750 			BPF_ALU64_REG(BPF_ADD, R2, R1),
3751 			BPF_ALU64_REG(BPF_ADD, R2, R2),
3752 			BPF_ALU64_REG(BPF_ADD, R2, R3),
3753 			BPF_ALU64_REG(BPF_ADD, R2, R4),
3754 			BPF_ALU64_REG(BPF_ADD, R2, R5),
3755 			BPF_ALU64_REG(BPF_ADD, R2, R6),
3756 			BPF_ALU64_REG(BPF_ADD, R2, R7),
3757 			BPF_ALU64_REG(BPF_ADD, R2, R8),
3758 			BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3759 			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3760 			BPF_EXIT_INSN(),
3761 			BPF_ALU64_REG(BPF_ADD, R3, R0),
3762 			BPF_ALU64_REG(BPF_ADD, R3, R1),
3763 			BPF_ALU64_REG(BPF_ADD, R3, R2),
3764 			BPF_ALU64_REG(BPF_ADD, R3, R3),
3765 			BPF_ALU64_REG(BPF_ADD, R3, R4),
3766 			BPF_ALU64_REG(BPF_ADD, R3, R5),
3767 			BPF_ALU64_REG(BPF_ADD, R3, R6),
3768 			BPF_ALU64_REG(BPF_ADD, R3, R7),
3769 			BPF_ALU64_REG(BPF_ADD, R3, R8),
3770 			BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3771 			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3772 			BPF_EXIT_INSN(),
3773 			BPF_ALU64_REG(BPF_ADD, R4, R0),
3774 			BPF_ALU64_REG(BPF_ADD, R4, R1),
3775 			BPF_ALU64_REG(BPF_ADD, R4, R2),
3776 			BPF_ALU64_REG(BPF_ADD, R4, R3),
3777 			BPF_ALU64_REG(BPF_ADD, R4, R4),
3778 			BPF_ALU64_REG(BPF_ADD, R4, R5),
3779 			BPF_ALU64_REG(BPF_ADD, R4, R6),
3780 			BPF_ALU64_REG(BPF_ADD, R4, R7),
3781 			BPF_ALU64_REG(BPF_ADD, R4, R8),
3782 			BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3783 			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3784 			BPF_EXIT_INSN(),
3785 			BPF_ALU64_REG(BPF_ADD, R5, R0),
3786 			BPF_ALU64_REG(BPF_ADD, R5, R1),
3787 			BPF_ALU64_REG(BPF_ADD, R5, R2),
3788 			BPF_ALU64_REG(BPF_ADD, R5, R3),
3789 			BPF_ALU64_REG(BPF_ADD, R5, R4),
3790 			BPF_ALU64_REG(BPF_ADD, R5, R5),
3791 			BPF_ALU64_REG(BPF_ADD, R5, R6),
3792 			BPF_ALU64_REG(BPF_ADD, R5, R7),
3793 			BPF_ALU64_REG(BPF_ADD, R5, R8),
3794 			BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3795 			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3796 			BPF_EXIT_INSN(),
3797 			BPF_ALU64_REG(BPF_ADD, R6, R0),
3798 			BPF_ALU64_REG(BPF_ADD, R6, R1),
3799 			BPF_ALU64_REG(BPF_ADD, R6, R2),
3800 			BPF_ALU64_REG(BPF_ADD, R6, R3),
3801 			BPF_ALU64_REG(BPF_ADD, R6, R4),
3802 			BPF_ALU64_REG(BPF_ADD, R6, R5),
3803 			BPF_ALU64_REG(BPF_ADD, R6, R6),
3804 			BPF_ALU64_REG(BPF_ADD, R6, R7),
3805 			BPF_ALU64_REG(BPF_ADD, R6, R8),
3806 			BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3807 			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3808 			BPF_EXIT_INSN(),
3809 			BPF_ALU64_REG(BPF_ADD, R7, R0),
3810 			BPF_ALU64_REG(BPF_ADD, R7, R1),
3811 			BPF_ALU64_REG(BPF_ADD, R7, R2),
3812 			BPF_ALU64_REG(BPF_ADD, R7, R3),
3813 			BPF_ALU64_REG(BPF_ADD, R7, R4),
3814 			BPF_ALU64_REG(BPF_ADD, R7, R5),
3815 			BPF_ALU64_REG(BPF_ADD, R7, R6),
3816 			BPF_ALU64_REG(BPF_ADD, R7, R7),
3817 			BPF_ALU64_REG(BPF_ADD, R7, R8),
3818 			BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3819 			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3820 			BPF_EXIT_INSN(),
3821 			BPF_ALU64_REG(BPF_ADD, R8, R0),
3822 			BPF_ALU64_REG(BPF_ADD, R8, R1),
3823 			BPF_ALU64_REG(BPF_ADD, R8, R2),
3824 			BPF_ALU64_REG(BPF_ADD, R8, R3),
3825 			BPF_ALU64_REG(BPF_ADD, R8, R4),
3826 			BPF_ALU64_REG(BPF_ADD, R8, R5),
3827 			BPF_ALU64_REG(BPF_ADD, R8, R6),
3828 			BPF_ALU64_REG(BPF_ADD, R8, R7),
3829 			BPF_ALU64_REG(BPF_ADD, R8, R8),
3830 			BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3831 			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3832 			BPF_EXIT_INSN(),
3833 			BPF_ALU64_REG(BPF_ADD, R9, R0),
3834 			BPF_ALU64_REG(BPF_ADD, R9, R1),
3835 			BPF_ALU64_REG(BPF_ADD, R9, R2),
3836 			BPF_ALU64_REG(BPF_ADD, R9, R3),
3837 			BPF_ALU64_REG(BPF_ADD, R9, R4),
3838 			BPF_ALU64_REG(BPF_ADD, R9, R5),
3839 			BPF_ALU64_REG(BPF_ADD, R9, R6),
3840 			BPF_ALU64_REG(BPF_ADD, R9, R7),
3841 			BPF_ALU64_REG(BPF_ADD, R9, R8),
3842 			BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3843 			BPF_ALU64_REG(BPF_MOV, R0, R9),
3844 			BPF_EXIT_INSN(),
3845 		},
3846 		INTERNAL,
3847 		{ },
3848 		{ { 0, 2957380 } }
3849 	},
3850 	{
3851 		"INT: ADD 32-bit",
3852 		.u.insns_int = {
3853 			BPF_ALU32_IMM(BPF_MOV, R0, 20),
3854 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
3855 			BPF_ALU32_IMM(BPF_MOV, R2, 2),
3856 			BPF_ALU32_IMM(BPF_MOV, R3, 3),
3857 			BPF_ALU32_IMM(BPF_MOV, R4, 4),
3858 			BPF_ALU32_IMM(BPF_MOV, R5, 5),
3859 			BPF_ALU32_IMM(BPF_MOV, R6, 6),
3860 			BPF_ALU32_IMM(BPF_MOV, R7, 7),
3861 			BPF_ALU32_IMM(BPF_MOV, R8, 8),
3862 			BPF_ALU32_IMM(BPF_MOV, R9, 9),
3863 			BPF_ALU64_IMM(BPF_ADD, R1, 10),
3864 			BPF_ALU64_IMM(BPF_ADD, R2, 10),
3865 			BPF_ALU64_IMM(BPF_ADD, R3, 10),
3866 			BPF_ALU64_IMM(BPF_ADD, R4, 10),
3867 			BPF_ALU64_IMM(BPF_ADD, R5, 10),
3868 			BPF_ALU64_IMM(BPF_ADD, R6, 10),
3869 			BPF_ALU64_IMM(BPF_ADD, R7, 10),
3870 			BPF_ALU64_IMM(BPF_ADD, R8, 10),
3871 			BPF_ALU64_IMM(BPF_ADD, R9, 10),
3872 			BPF_ALU32_REG(BPF_ADD, R0, R1),
3873 			BPF_ALU32_REG(BPF_ADD, R0, R2),
3874 			BPF_ALU32_REG(BPF_ADD, R0, R3),
3875 			BPF_ALU32_REG(BPF_ADD, R0, R4),
3876 			BPF_ALU32_REG(BPF_ADD, R0, R5),
3877 			BPF_ALU32_REG(BPF_ADD, R0, R6),
3878 			BPF_ALU32_REG(BPF_ADD, R0, R7),
3879 			BPF_ALU32_REG(BPF_ADD, R0, R8),
3880 			BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3881 			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3882 			BPF_EXIT_INSN(),
3883 			BPF_ALU32_REG(BPF_ADD, R1, R0),
3884 			BPF_ALU32_REG(BPF_ADD, R1, R1),
3885 			BPF_ALU32_REG(BPF_ADD, R1, R2),
3886 			BPF_ALU32_REG(BPF_ADD, R1, R3),
3887 			BPF_ALU32_REG(BPF_ADD, R1, R4),
3888 			BPF_ALU32_REG(BPF_ADD, R1, R5),
3889 			BPF_ALU32_REG(BPF_ADD, R1, R6),
3890 			BPF_ALU32_REG(BPF_ADD, R1, R7),
3891 			BPF_ALU32_REG(BPF_ADD, R1, R8),
3892 			BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3893 			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3894 			BPF_EXIT_INSN(),
3895 			BPF_ALU32_REG(BPF_ADD, R2, R0),
3896 			BPF_ALU32_REG(BPF_ADD, R2, R1),
3897 			BPF_ALU32_REG(BPF_ADD, R2, R2),
3898 			BPF_ALU32_REG(BPF_ADD, R2, R3),
3899 			BPF_ALU32_REG(BPF_ADD, R2, R4),
3900 			BPF_ALU32_REG(BPF_ADD, R2, R5),
3901 			BPF_ALU32_REG(BPF_ADD, R2, R6),
3902 			BPF_ALU32_REG(BPF_ADD, R2, R7),
3903 			BPF_ALU32_REG(BPF_ADD, R2, R8),
3904 			BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3905 			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3906 			BPF_EXIT_INSN(),
3907 			BPF_ALU32_REG(BPF_ADD, R3, R0),
3908 			BPF_ALU32_REG(BPF_ADD, R3, R1),
3909 			BPF_ALU32_REG(BPF_ADD, R3, R2),
3910 			BPF_ALU32_REG(BPF_ADD, R3, R3),
3911 			BPF_ALU32_REG(BPF_ADD, R3, R4),
3912 			BPF_ALU32_REG(BPF_ADD, R3, R5),
3913 			BPF_ALU32_REG(BPF_ADD, R3, R6),
3914 			BPF_ALU32_REG(BPF_ADD, R3, R7),
3915 			BPF_ALU32_REG(BPF_ADD, R3, R8),
3916 			BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3917 			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3918 			BPF_EXIT_INSN(),
3919 			BPF_ALU32_REG(BPF_ADD, R4, R0),
3920 			BPF_ALU32_REG(BPF_ADD, R4, R1),
3921 			BPF_ALU32_REG(BPF_ADD, R4, R2),
3922 			BPF_ALU32_REG(BPF_ADD, R4, R3),
3923 			BPF_ALU32_REG(BPF_ADD, R4, R4),
3924 			BPF_ALU32_REG(BPF_ADD, R4, R5),
3925 			BPF_ALU32_REG(BPF_ADD, R4, R6),
3926 			BPF_ALU32_REG(BPF_ADD, R4, R7),
3927 			BPF_ALU32_REG(BPF_ADD, R4, R8),
3928 			BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3929 			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3930 			BPF_EXIT_INSN(),
3931 			BPF_ALU32_REG(BPF_ADD, R5, R0),
3932 			BPF_ALU32_REG(BPF_ADD, R5, R1),
3933 			BPF_ALU32_REG(BPF_ADD, R5, R2),
3934 			BPF_ALU32_REG(BPF_ADD, R5, R3),
3935 			BPF_ALU32_REG(BPF_ADD, R5, R4),
3936 			BPF_ALU32_REG(BPF_ADD, R5, R5),
3937 			BPF_ALU32_REG(BPF_ADD, R5, R6),
3938 			BPF_ALU32_REG(BPF_ADD, R5, R7),
3939 			BPF_ALU32_REG(BPF_ADD, R5, R8),
3940 			BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3941 			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3942 			BPF_EXIT_INSN(),
3943 			BPF_ALU32_REG(BPF_ADD, R6, R0),
3944 			BPF_ALU32_REG(BPF_ADD, R6, R1),
3945 			BPF_ALU32_REG(BPF_ADD, R6, R2),
3946 			BPF_ALU32_REG(BPF_ADD, R6, R3),
3947 			BPF_ALU32_REG(BPF_ADD, R6, R4),
3948 			BPF_ALU32_REG(BPF_ADD, R6, R5),
3949 			BPF_ALU32_REG(BPF_ADD, R6, R6),
3950 			BPF_ALU32_REG(BPF_ADD, R6, R7),
3951 			BPF_ALU32_REG(BPF_ADD, R6, R8),
3952 			BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3953 			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3954 			BPF_EXIT_INSN(),
3955 			BPF_ALU32_REG(BPF_ADD, R7, R0),
3956 			BPF_ALU32_REG(BPF_ADD, R7, R1),
3957 			BPF_ALU32_REG(BPF_ADD, R7, R2),
3958 			BPF_ALU32_REG(BPF_ADD, R7, R3),
3959 			BPF_ALU32_REG(BPF_ADD, R7, R4),
3960 			BPF_ALU32_REG(BPF_ADD, R7, R5),
3961 			BPF_ALU32_REG(BPF_ADD, R7, R6),
3962 			BPF_ALU32_REG(BPF_ADD, R7, R7),
3963 			BPF_ALU32_REG(BPF_ADD, R7, R8),
3964 			BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3965 			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3966 			BPF_EXIT_INSN(),
3967 			BPF_ALU32_REG(BPF_ADD, R8, R0),
3968 			BPF_ALU32_REG(BPF_ADD, R8, R1),
3969 			BPF_ALU32_REG(BPF_ADD, R8, R2),
3970 			BPF_ALU32_REG(BPF_ADD, R8, R3),
3971 			BPF_ALU32_REG(BPF_ADD, R8, R4),
3972 			BPF_ALU32_REG(BPF_ADD, R8, R5),
3973 			BPF_ALU32_REG(BPF_ADD, R8, R6),
3974 			BPF_ALU32_REG(BPF_ADD, R8, R7),
3975 			BPF_ALU32_REG(BPF_ADD, R8, R8),
3976 			BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3977 			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3978 			BPF_EXIT_INSN(),
3979 			BPF_ALU32_REG(BPF_ADD, R9, R0),
3980 			BPF_ALU32_REG(BPF_ADD, R9, R1),
3981 			BPF_ALU32_REG(BPF_ADD, R9, R2),
3982 			BPF_ALU32_REG(BPF_ADD, R9, R3),
3983 			BPF_ALU32_REG(BPF_ADD, R9, R4),
3984 			BPF_ALU32_REG(BPF_ADD, R9, R5),
3985 			BPF_ALU32_REG(BPF_ADD, R9, R6),
3986 			BPF_ALU32_REG(BPF_ADD, R9, R7),
3987 			BPF_ALU32_REG(BPF_ADD, R9, R8),
3988 			BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3989 			BPF_ALU32_REG(BPF_MOV, R0, R9),
3990 			BPF_EXIT_INSN(),
3991 		},
3992 		INTERNAL,
3993 		{ },
3994 		{ { 0, 2957380 } }
3995 	},
3996 	{	/* Mainly checking JIT here. */
3997 		"INT: SUB",
3998 		.u.insns_int = {
3999 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4000 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4001 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4002 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4003 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4004 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4005 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4006 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4007 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4008 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4009 			BPF_ALU64_REG(BPF_SUB, R0, R0),
4010 			BPF_ALU64_REG(BPF_SUB, R0, R1),
4011 			BPF_ALU64_REG(BPF_SUB, R0, R2),
4012 			BPF_ALU64_REG(BPF_SUB, R0, R3),
4013 			BPF_ALU64_REG(BPF_SUB, R0, R4),
4014 			BPF_ALU64_REG(BPF_SUB, R0, R5),
4015 			BPF_ALU64_REG(BPF_SUB, R0, R6),
4016 			BPF_ALU64_REG(BPF_SUB, R0, R7),
4017 			BPF_ALU64_REG(BPF_SUB, R0, R8),
4018 			BPF_ALU64_REG(BPF_SUB, R0, R9),
4019 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4020 			BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4021 			BPF_EXIT_INSN(),
4022 			BPF_ALU64_REG(BPF_SUB, R1, R0),
4023 			BPF_ALU64_REG(BPF_SUB, R1, R2),
4024 			BPF_ALU64_REG(BPF_SUB, R1, R3),
4025 			BPF_ALU64_REG(BPF_SUB, R1, R4),
4026 			BPF_ALU64_REG(BPF_SUB, R1, R5),
4027 			BPF_ALU64_REG(BPF_SUB, R1, R6),
4028 			BPF_ALU64_REG(BPF_SUB, R1, R7),
4029 			BPF_ALU64_REG(BPF_SUB, R1, R8),
4030 			BPF_ALU64_REG(BPF_SUB, R1, R9),
4031 			BPF_ALU64_IMM(BPF_SUB, R1, 10),
4032 			BPF_ALU64_REG(BPF_SUB, R2, R0),
4033 			BPF_ALU64_REG(BPF_SUB, R2, R1),
4034 			BPF_ALU64_REG(BPF_SUB, R2, R3),
4035 			BPF_ALU64_REG(BPF_SUB, R2, R4),
4036 			BPF_ALU64_REG(BPF_SUB, R2, R5),
4037 			BPF_ALU64_REG(BPF_SUB, R2, R6),
4038 			BPF_ALU64_REG(BPF_SUB, R2, R7),
4039 			BPF_ALU64_REG(BPF_SUB, R2, R8),
4040 			BPF_ALU64_REG(BPF_SUB, R2, R9),
4041 			BPF_ALU64_IMM(BPF_SUB, R2, 10),
4042 			BPF_ALU64_REG(BPF_SUB, R3, R0),
4043 			BPF_ALU64_REG(BPF_SUB, R3, R1),
4044 			BPF_ALU64_REG(BPF_SUB, R3, R2),
4045 			BPF_ALU64_REG(BPF_SUB, R3, R4),
4046 			BPF_ALU64_REG(BPF_SUB, R3, R5),
4047 			BPF_ALU64_REG(BPF_SUB, R3, R6),
4048 			BPF_ALU64_REG(BPF_SUB, R3, R7),
4049 			BPF_ALU64_REG(BPF_SUB, R3, R8),
4050 			BPF_ALU64_REG(BPF_SUB, R3, R9),
4051 			BPF_ALU64_IMM(BPF_SUB, R3, 10),
4052 			BPF_ALU64_REG(BPF_SUB, R4, R0),
4053 			BPF_ALU64_REG(BPF_SUB, R4, R1),
4054 			BPF_ALU64_REG(BPF_SUB, R4, R2),
4055 			BPF_ALU64_REG(BPF_SUB, R4, R3),
4056 			BPF_ALU64_REG(BPF_SUB, R4, R5),
4057 			BPF_ALU64_REG(BPF_SUB, R4, R6),
4058 			BPF_ALU64_REG(BPF_SUB, R4, R7),
4059 			BPF_ALU64_REG(BPF_SUB, R4, R8),
4060 			BPF_ALU64_REG(BPF_SUB, R4, R9),
4061 			BPF_ALU64_IMM(BPF_SUB, R4, 10),
4062 			BPF_ALU64_REG(BPF_SUB, R5, R0),
4063 			BPF_ALU64_REG(BPF_SUB, R5, R1),
4064 			BPF_ALU64_REG(BPF_SUB, R5, R2),
4065 			BPF_ALU64_REG(BPF_SUB, R5, R3),
4066 			BPF_ALU64_REG(BPF_SUB, R5, R4),
4067 			BPF_ALU64_REG(BPF_SUB, R5, R6),
4068 			BPF_ALU64_REG(BPF_SUB, R5, R7),
4069 			BPF_ALU64_REG(BPF_SUB, R5, R8),
4070 			BPF_ALU64_REG(BPF_SUB, R5, R9),
4071 			BPF_ALU64_IMM(BPF_SUB, R5, 10),
4072 			BPF_ALU64_REG(BPF_SUB, R6, R0),
4073 			BPF_ALU64_REG(BPF_SUB, R6, R1),
4074 			BPF_ALU64_REG(BPF_SUB, R6, R2),
4075 			BPF_ALU64_REG(BPF_SUB, R6, R3),
4076 			BPF_ALU64_REG(BPF_SUB, R6, R4),
4077 			BPF_ALU64_REG(BPF_SUB, R6, R5),
4078 			BPF_ALU64_REG(BPF_SUB, R6, R7),
4079 			BPF_ALU64_REG(BPF_SUB, R6, R8),
4080 			BPF_ALU64_REG(BPF_SUB, R6, R9),
4081 			BPF_ALU64_IMM(BPF_SUB, R6, 10),
4082 			BPF_ALU64_REG(BPF_SUB, R7, R0),
4083 			BPF_ALU64_REG(BPF_SUB, R7, R1),
4084 			BPF_ALU64_REG(BPF_SUB, R7, R2),
4085 			BPF_ALU64_REG(BPF_SUB, R7, R3),
4086 			BPF_ALU64_REG(BPF_SUB, R7, R4),
4087 			BPF_ALU64_REG(BPF_SUB, R7, R5),
4088 			BPF_ALU64_REG(BPF_SUB, R7, R6),
4089 			BPF_ALU64_REG(BPF_SUB, R7, R8),
4090 			BPF_ALU64_REG(BPF_SUB, R7, R9),
4091 			BPF_ALU64_IMM(BPF_SUB, R7, 10),
4092 			BPF_ALU64_REG(BPF_SUB, R8, R0),
4093 			BPF_ALU64_REG(BPF_SUB, R8, R1),
4094 			BPF_ALU64_REG(BPF_SUB, R8, R2),
4095 			BPF_ALU64_REG(BPF_SUB, R8, R3),
4096 			BPF_ALU64_REG(BPF_SUB, R8, R4),
4097 			BPF_ALU64_REG(BPF_SUB, R8, R5),
4098 			BPF_ALU64_REG(BPF_SUB, R8, R6),
4099 			BPF_ALU64_REG(BPF_SUB, R8, R7),
4100 			BPF_ALU64_REG(BPF_SUB, R8, R9),
4101 			BPF_ALU64_IMM(BPF_SUB, R8, 10),
4102 			BPF_ALU64_REG(BPF_SUB, R9, R0),
4103 			BPF_ALU64_REG(BPF_SUB, R9, R1),
4104 			BPF_ALU64_REG(BPF_SUB, R9, R2),
4105 			BPF_ALU64_REG(BPF_SUB, R9, R3),
4106 			BPF_ALU64_REG(BPF_SUB, R9, R4),
4107 			BPF_ALU64_REG(BPF_SUB, R9, R5),
4108 			BPF_ALU64_REG(BPF_SUB, R9, R6),
4109 			BPF_ALU64_REG(BPF_SUB, R9, R7),
4110 			BPF_ALU64_REG(BPF_SUB, R9, R8),
4111 			BPF_ALU64_IMM(BPF_SUB, R9, 10),
4112 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4113 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
4114 			BPF_ALU64_REG(BPF_SUB, R0, R1),
4115 			BPF_ALU64_REG(BPF_SUB, R0, R2),
4116 			BPF_ALU64_REG(BPF_SUB, R0, R3),
4117 			BPF_ALU64_REG(BPF_SUB, R0, R4),
4118 			BPF_ALU64_REG(BPF_SUB, R0, R5),
4119 			BPF_ALU64_REG(BPF_SUB, R0, R6),
4120 			BPF_ALU64_REG(BPF_SUB, R0, R7),
4121 			BPF_ALU64_REG(BPF_SUB, R0, R8),
4122 			BPF_ALU64_REG(BPF_SUB, R0, R9),
4123 			BPF_EXIT_INSN(),
4124 		},
4125 		INTERNAL,
4126 		{ },
4127 		{ { 0, 11 } }
4128 	},
4129 	{	/* Mainly checking JIT here. */
4130 		"INT: XOR",
4131 		.u.insns_int = {
4132 			BPF_ALU64_REG(BPF_SUB, R0, R0),
4133 			BPF_ALU64_REG(BPF_XOR, R1, R1),
4134 			BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4135 			BPF_EXIT_INSN(),
4136 			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4137 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4138 			BPF_ALU64_REG(BPF_SUB, R1, R1),
4139 			BPF_ALU64_REG(BPF_XOR, R2, R2),
4140 			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4141 			BPF_EXIT_INSN(),
4142 			BPF_ALU64_REG(BPF_SUB, R2, R2),
4143 			BPF_ALU64_REG(BPF_XOR, R3, R3),
4144 			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4145 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4146 			BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4147 			BPF_EXIT_INSN(),
4148 			BPF_ALU64_REG(BPF_SUB, R3, R3),
4149 			BPF_ALU64_REG(BPF_XOR, R4, R4),
4150 			BPF_ALU64_IMM(BPF_MOV, R2, 1),
4151 			BPF_ALU64_IMM(BPF_MOV, R5, -1),
4152 			BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4153 			BPF_EXIT_INSN(),
4154 			BPF_ALU64_REG(BPF_SUB, R4, R4),
4155 			BPF_ALU64_REG(BPF_XOR, R5, R5),
4156 			BPF_ALU64_IMM(BPF_MOV, R3, 1),
4157 			BPF_ALU64_IMM(BPF_MOV, R7, -1),
4158 			BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4159 			BPF_EXIT_INSN(),
4160 			BPF_ALU64_IMM(BPF_MOV, R5, 1),
4161 			BPF_ALU64_REG(BPF_SUB, R5, R5),
4162 			BPF_ALU64_REG(BPF_XOR, R6, R6),
4163 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4164 			BPF_ALU64_IMM(BPF_MOV, R8, -1),
4165 			BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4166 			BPF_EXIT_INSN(),
4167 			BPF_ALU64_REG(BPF_SUB, R6, R6),
4168 			BPF_ALU64_REG(BPF_XOR, R7, R7),
4169 			BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4170 			BPF_EXIT_INSN(),
4171 			BPF_ALU64_REG(BPF_SUB, R7, R7),
4172 			BPF_ALU64_REG(BPF_XOR, R8, R8),
4173 			BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4174 			BPF_EXIT_INSN(),
4175 			BPF_ALU64_REG(BPF_SUB, R8, R8),
4176 			BPF_ALU64_REG(BPF_XOR, R9, R9),
4177 			BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4178 			BPF_EXIT_INSN(),
4179 			BPF_ALU64_REG(BPF_SUB, R9, R9),
4180 			BPF_ALU64_REG(BPF_XOR, R0, R0),
4181 			BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4182 			BPF_EXIT_INSN(),
4183 			BPF_ALU64_REG(BPF_SUB, R1, R1),
4184 			BPF_ALU64_REG(BPF_XOR, R0, R0),
4185 			BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4186 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4187 			BPF_EXIT_INSN(),
4188 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
4189 			BPF_EXIT_INSN(),
4190 		},
4191 		INTERNAL,
4192 		{ },
4193 		{ { 0, 1 } }
4194 	},
4195 	{	/* Mainly checking JIT here. */
4196 		"INT: MUL",
4197 		.u.insns_int = {
4198 			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4199 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4200 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4201 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4202 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4203 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4204 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4205 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4206 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4207 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4208 			BPF_ALU64_REG(BPF_MUL, R0, R0),
4209 			BPF_ALU64_REG(BPF_MUL, R0, R1),
4210 			BPF_ALU64_REG(BPF_MUL, R0, R2),
4211 			BPF_ALU64_REG(BPF_MUL, R0, R3),
4212 			BPF_ALU64_REG(BPF_MUL, R0, R4),
4213 			BPF_ALU64_REG(BPF_MUL, R0, R5),
4214 			BPF_ALU64_REG(BPF_MUL, R0, R6),
4215 			BPF_ALU64_REG(BPF_MUL, R0, R7),
4216 			BPF_ALU64_REG(BPF_MUL, R0, R8),
4217 			BPF_ALU64_REG(BPF_MUL, R0, R9),
4218 			BPF_ALU64_IMM(BPF_MUL, R0, 10),
4219 			BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4220 			BPF_EXIT_INSN(),
4221 			BPF_ALU64_REG(BPF_MUL, R1, R0),
4222 			BPF_ALU64_REG(BPF_MUL, R1, R2),
4223 			BPF_ALU64_REG(BPF_MUL, R1, R3),
4224 			BPF_ALU64_REG(BPF_MUL, R1, R4),
4225 			BPF_ALU64_REG(BPF_MUL, R1, R5),
4226 			BPF_ALU64_REG(BPF_MUL, R1, R6),
4227 			BPF_ALU64_REG(BPF_MUL, R1, R7),
4228 			BPF_ALU64_REG(BPF_MUL, R1, R8),
4229 			BPF_ALU64_REG(BPF_MUL, R1, R9),
4230 			BPF_ALU64_IMM(BPF_MUL, R1, 10),
4231 			BPF_ALU64_REG(BPF_MOV, R2, R1),
4232 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4233 			BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4234 			BPF_EXIT_INSN(),
4235 			BPF_ALU64_IMM(BPF_LSH, R1, 32),
4236 			BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4237 			BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4238 			BPF_EXIT_INSN(),
4239 			BPF_ALU64_REG(BPF_MUL, R2, R0),
4240 			BPF_ALU64_REG(BPF_MUL, R2, R1),
4241 			BPF_ALU64_REG(BPF_MUL, R2, R3),
4242 			BPF_ALU64_REG(BPF_MUL, R2, R4),
4243 			BPF_ALU64_REG(BPF_MUL, R2, R5),
4244 			BPF_ALU64_REG(BPF_MUL, R2, R6),
4245 			BPF_ALU64_REG(BPF_MUL, R2, R7),
4246 			BPF_ALU64_REG(BPF_MUL, R2, R8),
4247 			BPF_ALU64_REG(BPF_MUL, R2, R9),
4248 			BPF_ALU64_IMM(BPF_MUL, R2, 10),
4249 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4250 			BPF_ALU64_REG(BPF_MOV, R0, R2),
4251 			BPF_EXIT_INSN(),
4252 		},
4253 		INTERNAL,
4254 		{ },
4255 		{ { 0, 0x35d97ef2 } }
4256 	},
4257 	{	/* Mainly checking JIT here. */
4258 		"MOV REG64",
4259 		.u.insns_int = {
4260 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4261 			BPF_MOV64_REG(R1, R0),
4262 			BPF_MOV64_REG(R2, R1),
4263 			BPF_MOV64_REG(R3, R2),
4264 			BPF_MOV64_REG(R4, R3),
4265 			BPF_MOV64_REG(R5, R4),
4266 			BPF_MOV64_REG(R6, R5),
4267 			BPF_MOV64_REG(R7, R6),
4268 			BPF_MOV64_REG(R8, R7),
4269 			BPF_MOV64_REG(R9, R8),
4270 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4271 			BPF_ALU64_IMM(BPF_MOV, R1, 0),
4272 			BPF_ALU64_IMM(BPF_MOV, R2, 0),
4273 			BPF_ALU64_IMM(BPF_MOV, R3, 0),
4274 			BPF_ALU64_IMM(BPF_MOV, R4, 0),
4275 			BPF_ALU64_IMM(BPF_MOV, R5, 0),
4276 			BPF_ALU64_IMM(BPF_MOV, R6, 0),
4277 			BPF_ALU64_IMM(BPF_MOV, R7, 0),
4278 			BPF_ALU64_IMM(BPF_MOV, R8, 0),
4279 			BPF_ALU64_IMM(BPF_MOV, R9, 0),
4280 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4281 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4282 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4283 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4284 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4285 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4286 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4287 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4288 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4289 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4290 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4291 			BPF_EXIT_INSN(),
4292 		},
4293 		INTERNAL,
4294 		{ },
4295 		{ { 0, 0xfefe } }
4296 	},
4297 	{	/* Mainly checking JIT here. */
4298 		"MOV REG32",
4299 		.u.insns_int = {
4300 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4301 			BPF_MOV64_REG(R1, R0),
4302 			BPF_MOV64_REG(R2, R1),
4303 			BPF_MOV64_REG(R3, R2),
4304 			BPF_MOV64_REG(R4, R3),
4305 			BPF_MOV64_REG(R5, R4),
4306 			BPF_MOV64_REG(R6, R5),
4307 			BPF_MOV64_REG(R7, R6),
4308 			BPF_MOV64_REG(R8, R7),
4309 			BPF_MOV64_REG(R9, R8),
4310 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4311 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
4312 			BPF_ALU32_IMM(BPF_MOV, R2, 0),
4313 			BPF_ALU32_IMM(BPF_MOV, R3, 0),
4314 			BPF_ALU32_IMM(BPF_MOV, R4, 0),
4315 			BPF_ALU32_IMM(BPF_MOV, R5, 0),
4316 			BPF_ALU32_IMM(BPF_MOV, R6, 0),
4317 			BPF_ALU32_IMM(BPF_MOV, R7, 0),
4318 			BPF_ALU32_IMM(BPF_MOV, R8, 0),
4319 			BPF_ALU32_IMM(BPF_MOV, R9, 0),
4320 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4321 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4322 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4323 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4324 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4325 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4326 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4327 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4328 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4329 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4330 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4331 			BPF_EXIT_INSN(),
4332 		},
4333 		INTERNAL,
4334 		{ },
4335 		{ { 0, 0xfefe } }
4336 	},
4337 	{	/* Mainly checking JIT here. */
4338 		"LD IMM64",
4339 		.u.insns_int = {
4340 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4341 			BPF_MOV64_REG(R1, R0),
4342 			BPF_MOV64_REG(R2, R1),
4343 			BPF_MOV64_REG(R3, R2),
4344 			BPF_MOV64_REG(R4, R3),
4345 			BPF_MOV64_REG(R5, R4),
4346 			BPF_MOV64_REG(R6, R5),
4347 			BPF_MOV64_REG(R7, R6),
4348 			BPF_MOV64_REG(R8, R7),
4349 			BPF_MOV64_REG(R9, R8),
4350 			BPF_LD_IMM64(R0, 0x0LL),
4351 			BPF_LD_IMM64(R1, 0x0LL),
4352 			BPF_LD_IMM64(R2, 0x0LL),
4353 			BPF_LD_IMM64(R3, 0x0LL),
4354 			BPF_LD_IMM64(R4, 0x0LL),
4355 			BPF_LD_IMM64(R5, 0x0LL),
4356 			BPF_LD_IMM64(R6, 0x0LL),
4357 			BPF_LD_IMM64(R7, 0x0LL),
4358 			BPF_LD_IMM64(R8, 0x0LL),
4359 			BPF_LD_IMM64(R9, 0x0LL),
4360 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4361 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4362 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4363 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4364 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4365 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4366 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4367 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4368 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4369 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4370 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4371 			BPF_EXIT_INSN(),
4372 		},
4373 		INTERNAL,
4374 		{ },
4375 		{ { 0, 0xfefe } }
4376 	},
4377 	{
4378 		"INT: ALU MIX",
4379 		.u.insns_int = {
4380 			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4381 			BPF_ALU64_IMM(BPF_ADD, R0, -1),
4382 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4383 			BPF_ALU64_IMM(BPF_XOR, R2, 3),
4384 			BPF_ALU64_REG(BPF_DIV, R0, R2),
4385 			BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4386 			BPF_EXIT_INSN(),
4387 			BPF_ALU64_IMM(BPF_MOD, R0, 3),
4388 			BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4389 			BPF_EXIT_INSN(),
4390 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
4391 			BPF_EXIT_INSN(),
4392 		},
4393 		INTERNAL,
4394 		{ },
4395 		{ { 0, -1 } }
4396 	},
4397 	{
4398 		"INT: shifts by register",
4399 		.u.insns_int = {
4400 			BPF_MOV64_IMM(R0, -1234),
4401 			BPF_MOV64_IMM(R1, 1),
4402 			BPF_ALU32_REG(BPF_RSH, R0, R1),
4403 			BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4404 			BPF_EXIT_INSN(),
4405 			BPF_MOV64_IMM(R2, 1),
4406 			BPF_ALU64_REG(BPF_LSH, R0, R2),
4407 			BPF_MOV32_IMM(R4, -1234),
4408 			BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4409 			BPF_EXIT_INSN(),
4410 			BPF_ALU64_IMM(BPF_AND, R4, 63),
4411 			BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4412 			BPF_MOV64_IMM(R3, 47),
4413 			BPF_ALU64_REG(BPF_ARSH, R0, R3),
4414 			BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4415 			BPF_EXIT_INSN(),
4416 			BPF_MOV64_IMM(R2, 1),
4417 			BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4418 			BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4419 			BPF_EXIT_INSN(),
4420 			BPF_MOV64_IMM(R4, 4),
4421 			BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4422 			BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4423 			BPF_EXIT_INSN(),
4424 			BPF_MOV64_IMM(R4, 5),
4425 			BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4426 			BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4427 			BPF_EXIT_INSN(),
4428 			BPF_MOV64_IMM(R0, -1),
4429 			BPF_EXIT_INSN(),
4430 		},
4431 		INTERNAL,
4432 		{ },
4433 		{ { 0, -1 } }
4434 	},
4435 #ifdef CONFIG_32BIT
4436 	{
4437 		"INT: 32-bit context pointer word order and zero-extension",
4438 		.u.insns_int = {
4439 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4440 			BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4441 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
4442 			BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4443 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
4444 			BPF_EXIT_INSN(),
4445 		},
4446 		INTERNAL,
4447 		{ },
4448 		{ { 0, 1 } }
4449 	},
4450 #endif
4451 	{
4452 		"check: missing ret",
4453 		.u.insns = {
4454 			BPF_STMT(BPF_LD | BPF_IMM, 1),
4455 		},
4456 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4457 		{ },
4458 		{ },
4459 		.fill_helper = NULL,
4460 		.expected_errcode = -EINVAL,
4461 	},
4462 	{
4463 		"check: div_k_0",
4464 		.u.insns = {
4465 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4466 			BPF_STMT(BPF_RET | BPF_K, 0)
4467 		},
4468 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4469 		{ },
4470 		{ },
4471 		.fill_helper = NULL,
4472 		.expected_errcode = -EINVAL,
4473 	},
4474 	{
4475 		"check: unknown insn",
4476 		.u.insns = {
4477 			/* seccomp insn, rejected in socket filter */
4478 			BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4479 			BPF_STMT(BPF_RET | BPF_K, 0)
4480 		},
4481 		CLASSIC | FLAG_EXPECTED_FAIL,
4482 		{ },
4483 		{ },
4484 		.fill_helper = NULL,
4485 		.expected_errcode = -EINVAL,
4486 	},
4487 	{
4488 		"check: out of range spill/fill",
4489 		.u.insns = {
4490 			BPF_STMT(BPF_STX, 16),
4491 			BPF_STMT(BPF_RET | BPF_K, 0)
4492 		},
4493 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4494 		{ },
4495 		{ },
4496 		.fill_helper = NULL,
4497 		.expected_errcode = -EINVAL,
4498 	},
4499 	{
4500 		"JUMPS + HOLES",
4501 		.u.insns = {
4502 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4503 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4504 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4505 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4506 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4507 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4508 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4509 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4510 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4511 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4512 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4513 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4514 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4515 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4516 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4517 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4518 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4519 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4520 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4521 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4522 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4523 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4524 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4525 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4526 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4527 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4528 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4529 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4530 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4531 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4532 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4533 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4534 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4535 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4536 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4537 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4538 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4539 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4540 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4541 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4542 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4543 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4544 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4545 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4546 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4547 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4548 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4549 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4550 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4551 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4552 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4553 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4554 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4555 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4556 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4557 			BPF_STMT(BPF_RET | BPF_A, 0),
4558 			BPF_STMT(BPF_RET | BPF_A, 0),
4559 		},
4560 		CLASSIC,
4561 		{ 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4562 		  0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4563 		  0x08, 0x00,
4564 		  0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4565 		  0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4566 		  0xc0, 0xa8, 0x33, 0x01,
4567 		  0xc0, 0xa8, 0x33, 0x02,
4568 		  0xbb, 0xb6,
4569 		  0xa9, 0xfa,
4570 		  0x00, 0x14, 0x00, 0x00,
4571 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4572 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4573 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4574 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4575 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4576 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4577 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4578 		  0xcc, 0xcc, 0xcc, 0xcc },
4579 		{ { 88, 0x001b } }
4580 	},
4581 	{
4582 		"check: RET X",
4583 		.u.insns = {
4584 			BPF_STMT(BPF_RET | BPF_X, 0),
4585 		},
4586 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4587 		{ },
4588 		{ },
4589 		.fill_helper = NULL,
4590 		.expected_errcode = -EINVAL,
4591 	},
4592 	{
4593 		"check: LDX + RET X",
4594 		.u.insns = {
4595 			BPF_STMT(BPF_LDX | BPF_IMM, 42),
4596 			BPF_STMT(BPF_RET | BPF_X, 0),
4597 		},
4598 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4599 		{ },
4600 		{ },
4601 		.fill_helper = NULL,
4602 		.expected_errcode = -EINVAL,
4603 	},
4604 	{	/* Mainly checking JIT here. */
4605 		"M[]: alt STX + LDX",
4606 		.u.insns = {
4607 			BPF_STMT(BPF_LDX | BPF_IMM, 100),
4608 			BPF_STMT(BPF_STX, 0),
4609 			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4610 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4611 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4612 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4613 			BPF_STMT(BPF_STX, 1),
4614 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4615 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4616 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4617 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4618 			BPF_STMT(BPF_STX, 2),
4619 			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4620 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4621 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4622 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4623 			BPF_STMT(BPF_STX, 3),
4624 			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4625 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4626 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4627 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4628 			BPF_STMT(BPF_STX, 4),
4629 			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4630 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4631 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4632 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4633 			BPF_STMT(BPF_STX, 5),
4634 			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4635 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4636 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4637 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4638 			BPF_STMT(BPF_STX, 6),
4639 			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4640 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4641 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4642 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4643 			BPF_STMT(BPF_STX, 7),
4644 			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4645 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4646 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4647 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4648 			BPF_STMT(BPF_STX, 8),
4649 			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4650 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4651 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4652 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4653 			BPF_STMT(BPF_STX, 9),
4654 			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4655 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4656 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4657 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4658 			BPF_STMT(BPF_STX, 10),
4659 			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4660 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4661 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4662 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4663 			BPF_STMT(BPF_STX, 11),
4664 			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4665 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4666 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4667 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4668 			BPF_STMT(BPF_STX, 12),
4669 			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4670 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4671 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4672 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4673 			BPF_STMT(BPF_STX, 13),
4674 			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4675 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4676 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4677 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4678 			BPF_STMT(BPF_STX, 14),
4679 			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4680 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4681 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4682 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4683 			BPF_STMT(BPF_STX, 15),
4684 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4685 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4686 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4687 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4688 			BPF_STMT(BPF_RET | BPF_A, 0),
4689 		},
4690 		CLASSIC | FLAG_NO_DATA,
4691 		{ },
4692 		{ { 0, 116 } },
4693 	},
4694 	{	/* Mainly checking JIT here. */
4695 		"M[]: full STX + full LDX",
4696 		.u.insns = {
4697 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4698 			BPF_STMT(BPF_STX, 0),
4699 			BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4700 			BPF_STMT(BPF_STX, 1),
4701 			BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4702 			BPF_STMT(BPF_STX, 2),
4703 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4704 			BPF_STMT(BPF_STX, 3),
4705 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4706 			BPF_STMT(BPF_STX, 4),
4707 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4708 			BPF_STMT(BPF_STX, 5),
4709 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4710 			BPF_STMT(BPF_STX, 6),
4711 			BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4712 			BPF_STMT(BPF_STX, 7),
4713 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4714 			BPF_STMT(BPF_STX, 8),
4715 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4716 			BPF_STMT(BPF_STX, 9),
4717 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4718 			BPF_STMT(BPF_STX, 10),
4719 			BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4720 			BPF_STMT(BPF_STX, 11),
4721 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4722 			BPF_STMT(BPF_STX, 12),
4723 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4724 			BPF_STMT(BPF_STX, 13),
4725 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4726 			BPF_STMT(BPF_STX, 14),
4727 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4728 			BPF_STMT(BPF_STX, 15),
4729 			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4730 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4731 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4732 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4733 			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4734 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4735 			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4736 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4737 			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4738 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4739 			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4740 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4741 			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4742 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4743 			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4744 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4745 			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4746 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4747 			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4748 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4749 			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4750 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4751 			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4752 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4753 			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4754 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4755 			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4756 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4757 			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4758 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4759 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4760 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4761 			BPF_STMT(BPF_RET | BPF_A, 0),
4762 		},
4763 		CLASSIC | FLAG_NO_DATA,
4764 		{ },
4765 		{ { 0, 0x2a5a5e5 } },
4766 	},
4767 	{
4768 		"check: SKF_AD_MAX",
4769 		.u.insns = {
4770 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4771 				 SKF_AD_OFF + SKF_AD_MAX),
4772 			BPF_STMT(BPF_RET | BPF_A, 0),
4773 		},
4774 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4775 		{ },
4776 		{ },
4777 		.fill_helper = NULL,
4778 		.expected_errcode = -EINVAL,
4779 	},
4780 	{	/* Passes checker but fails during runtime. */
4781 		"LD [SKF_AD_OFF-1]",
4782 		.u.insns = {
4783 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4784 				 SKF_AD_OFF - 1),
4785 			BPF_STMT(BPF_RET | BPF_K, 1),
4786 		},
4787 		CLASSIC,
4788 		{ },
4789 		{ { 1, 0 } },
4790 	},
4791 	{
4792 		"load 64-bit immediate",
4793 		.u.insns_int = {
4794 			BPF_LD_IMM64(R1, 0x567800001234LL),
4795 			BPF_MOV64_REG(R2, R1),
4796 			BPF_MOV64_REG(R3, R2),
4797 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4798 			BPF_ALU64_IMM(BPF_LSH, R3, 32),
4799 			BPF_ALU64_IMM(BPF_RSH, R3, 32),
4800 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4801 			BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4802 			BPF_EXIT_INSN(),
4803 			BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4804 			BPF_EXIT_INSN(),
4805 			BPF_LD_IMM64(R0, 0x1ffffffffLL),
4806 			BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4807 			BPF_EXIT_INSN(),
4808 		},
4809 		INTERNAL,
4810 		{ },
4811 		{ { 0, 1 } }
4812 	},
4813 	/* BPF_ALU | BPF_MOV | BPF_X */
4814 	{
4815 		"ALU_MOV_X: dst = 2",
4816 		.u.insns_int = {
4817 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4818 			BPF_ALU32_REG(BPF_MOV, R0, R1),
4819 			BPF_EXIT_INSN(),
4820 		},
4821 		INTERNAL,
4822 		{ },
4823 		{ { 0, 2 } },
4824 	},
4825 	{
4826 		"ALU_MOV_X: dst = 4294967295",
4827 		.u.insns_int = {
4828 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4829 			BPF_ALU32_REG(BPF_MOV, R0, R1),
4830 			BPF_EXIT_INSN(),
4831 		},
4832 		INTERNAL,
4833 		{ },
4834 		{ { 0, 4294967295U } },
4835 	},
4836 	{
4837 		"ALU64_MOV_X: dst = 2",
4838 		.u.insns_int = {
4839 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4840 			BPF_ALU64_REG(BPF_MOV, R0, R1),
4841 			BPF_EXIT_INSN(),
4842 		},
4843 		INTERNAL,
4844 		{ },
4845 		{ { 0, 2 } },
4846 	},
4847 	{
4848 		"ALU64_MOV_X: dst = 4294967295",
4849 		.u.insns_int = {
4850 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4851 			BPF_ALU64_REG(BPF_MOV, R0, R1),
4852 			BPF_EXIT_INSN(),
4853 		},
4854 		INTERNAL,
4855 		{ },
4856 		{ { 0, 4294967295U } },
4857 	},
4858 	/* BPF_ALU | BPF_MOV | BPF_K */
4859 	{
4860 		"ALU_MOV_K: dst = 2",
4861 		.u.insns_int = {
4862 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
4863 			BPF_EXIT_INSN(),
4864 		},
4865 		INTERNAL,
4866 		{ },
4867 		{ { 0, 2 } },
4868 	},
4869 	{
4870 		"ALU_MOV_K: dst = 4294967295",
4871 		.u.insns_int = {
4872 			BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4873 			BPF_EXIT_INSN(),
4874 		},
4875 		INTERNAL,
4876 		{ },
4877 		{ { 0, 4294967295U } },
4878 	},
4879 	{
4880 		"ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4881 		.u.insns_int = {
4882 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4883 			BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4884 			BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4885 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4886 			BPF_MOV32_IMM(R0, 2),
4887 			BPF_EXIT_INSN(),
4888 			BPF_MOV32_IMM(R0, 1),
4889 			BPF_EXIT_INSN(),
4890 		},
4891 		INTERNAL,
4892 		{ },
4893 		{ { 0, 0x1 } },
4894 	},
4895 	{
4896 		"ALU_MOV_K: small negative",
4897 		.u.insns_int = {
4898 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4899 			BPF_EXIT_INSN(),
4900 		},
4901 		INTERNAL,
4902 		{ },
4903 		{ { 0, -123 } }
4904 	},
4905 	{
4906 		"ALU_MOV_K: small negative zero extension",
4907 		.u.insns_int = {
4908 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4909 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
4910 			BPF_EXIT_INSN(),
4911 		},
4912 		INTERNAL,
4913 		{ },
4914 		{ { 0, 0 } }
4915 	},
4916 	{
4917 		"ALU_MOV_K: large negative",
4918 		.u.insns_int = {
4919 			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
4920 			BPF_EXIT_INSN(),
4921 		},
4922 		INTERNAL,
4923 		{ },
4924 		{ { 0, -123456789 } }
4925 	},
4926 	{
4927 		"ALU_MOV_K: large negative zero extension",
4928 		.u.insns_int = {
4929 			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
4930 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
4931 			BPF_EXIT_INSN(),
4932 		},
4933 		INTERNAL,
4934 		{ },
4935 		{ { 0, 0 } }
4936 	},
4937 	{
4938 		"ALU64_MOV_K: dst = 2",
4939 		.u.insns_int = {
4940 			BPF_ALU64_IMM(BPF_MOV, R0, 2),
4941 			BPF_EXIT_INSN(),
4942 		},
4943 		INTERNAL,
4944 		{ },
4945 		{ { 0, 2 } },
4946 	},
4947 	{
4948 		"ALU64_MOV_K: dst = 2147483647",
4949 		.u.insns_int = {
4950 			BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
4951 			BPF_EXIT_INSN(),
4952 		},
4953 		INTERNAL,
4954 		{ },
4955 		{ { 0, 2147483647 } },
4956 	},
4957 	{
4958 		"ALU64_OR_K: dst = 0x0",
4959 		.u.insns_int = {
4960 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4961 			BPF_LD_IMM64(R3, 0x0),
4962 			BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
4963 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4964 			BPF_MOV32_IMM(R0, 2),
4965 			BPF_EXIT_INSN(),
4966 			BPF_MOV32_IMM(R0, 1),
4967 			BPF_EXIT_INSN(),
4968 		},
4969 		INTERNAL,
4970 		{ },
4971 		{ { 0, 0x1 } },
4972 	},
4973 	{
4974 		"ALU64_MOV_K: dst = -1",
4975 		.u.insns_int = {
4976 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4977 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
4978 			BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
4979 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4980 			BPF_MOV32_IMM(R0, 2),
4981 			BPF_EXIT_INSN(),
4982 			BPF_MOV32_IMM(R0, 1),
4983 			BPF_EXIT_INSN(),
4984 		},
4985 		INTERNAL,
4986 		{ },
4987 		{ { 0, 0x1 } },
4988 	},
4989 	{
4990 		"ALU64_MOV_K: small negative",
4991 		.u.insns_int = {
4992 			BPF_ALU64_IMM(BPF_MOV, R0, -123),
4993 			BPF_EXIT_INSN(),
4994 		},
4995 		INTERNAL,
4996 		{ },
4997 		{ { 0, -123 } }
4998 	},
4999 	{
5000 		"ALU64_MOV_K: small negative sign extension",
5001 		.u.insns_int = {
5002 			BPF_ALU64_IMM(BPF_MOV, R0, -123),
5003 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5004 			BPF_EXIT_INSN(),
5005 		},
5006 		INTERNAL,
5007 		{ },
5008 		{ { 0, 0xffffffff } }
5009 	},
5010 	{
5011 		"ALU64_MOV_K: large negative",
5012 		.u.insns_int = {
5013 			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5014 			BPF_EXIT_INSN(),
5015 		},
5016 		INTERNAL,
5017 		{ },
5018 		{ { 0, -123456789 } }
5019 	},
5020 	{
5021 		"ALU64_MOV_K: large negative sign extension",
5022 		.u.insns_int = {
5023 			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5024 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5025 			BPF_EXIT_INSN(),
5026 		},
5027 		INTERNAL,
5028 		{ },
5029 		{ { 0, 0xffffffff } }
5030 	},
5031 	/* BPF_ALU | BPF_ADD | BPF_X */
5032 	{
5033 		"ALU_ADD_X: 1 + 2 = 3",
5034 		.u.insns_int = {
5035 			BPF_LD_IMM64(R0, 1),
5036 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5037 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5038 			BPF_EXIT_INSN(),
5039 		},
5040 		INTERNAL,
5041 		{ },
5042 		{ { 0, 3 } },
5043 	},
5044 	{
5045 		"ALU_ADD_X: 1 + 4294967294 = 4294967295",
5046 		.u.insns_int = {
5047 			BPF_LD_IMM64(R0, 1),
5048 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5049 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5050 			BPF_EXIT_INSN(),
5051 		},
5052 		INTERNAL,
5053 		{ },
5054 		{ { 0, 4294967295U } },
5055 	},
5056 	{
5057 		"ALU_ADD_X: 2 + 4294967294 = 0",
5058 		.u.insns_int = {
5059 			BPF_LD_IMM64(R0, 2),
5060 			BPF_LD_IMM64(R1, 4294967294U),
5061 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5062 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5063 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5064 			BPF_EXIT_INSN(),
5065 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5066 			BPF_EXIT_INSN(),
5067 		},
5068 		INTERNAL,
5069 		{ },
5070 		{ { 0, 1 } },
5071 	},
5072 	{
5073 		"ALU64_ADD_X: 1 + 2 = 3",
5074 		.u.insns_int = {
5075 			BPF_LD_IMM64(R0, 1),
5076 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5077 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5078 			BPF_EXIT_INSN(),
5079 		},
5080 		INTERNAL,
5081 		{ },
5082 		{ { 0, 3 } },
5083 	},
5084 	{
5085 		"ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5086 		.u.insns_int = {
5087 			BPF_LD_IMM64(R0, 1),
5088 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5089 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5090 			BPF_EXIT_INSN(),
5091 		},
5092 		INTERNAL,
5093 		{ },
5094 		{ { 0, 4294967295U } },
5095 	},
5096 	{
5097 		"ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5098 		.u.insns_int = {
5099 			BPF_LD_IMM64(R0, 2),
5100 			BPF_LD_IMM64(R1, 4294967294U),
5101 			BPF_LD_IMM64(R2, 4294967296ULL),
5102 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5103 			BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5104 			BPF_MOV32_IMM(R0, 0),
5105 			BPF_EXIT_INSN(),
5106 			BPF_MOV32_IMM(R0, 1),
5107 			BPF_EXIT_INSN(),
5108 		},
5109 		INTERNAL,
5110 		{ },
5111 		{ { 0, 1 } },
5112 	},
5113 	/* BPF_ALU | BPF_ADD | BPF_K */
5114 	{
5115 		"ALU_ADD_K: 1 + 2 = 3",
5116 		.u.insns_int = {
5117 			BPF_LD_IMM64(R0, 1),
5118 			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5119 			BPF_EXIT_INSN(),
5120 		},
5121 		INTERNAL,
5122 		{ },
5123 		{ { 0, 3 } },
5124 	},
5125 	{
5126 		"ALU_ADD_K: 3 + 0 = 3",
5127 		.u.insns_int = {
5128 			BPF_LD_IMM64(R0, 3),
5129 			BPF_ALU32_IMM(BPF_ADD, R0, 0),
5130 			BPF_EXIT_INSN(),
5131 		},
5132 		INTERNAL,
5133 		{ },
5134 		{ { 0, 3 } },
5135 	},
5136 	{
5137 		"ALU_ADD_K: 1 + 4294967294 = 4294967295",
5138 		.u.insns_int = {
5139 			BPF_LD_IMM64(R0, 1),
5140 			BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5141 			BPF_EXIT_INSN(),
5142 		},
5143 		INTERNAL,
5144 		{ },
5145 		{ { 0, 4294967295U } },
5146 	},
5147 	{
5148 		"ALU_ADD_K: 4294967294 + 2 = 0",
5149 		.u.insns_int = {
5150 			BPF_LD_IMM64(R0, 4294967294U),
5151 			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5152 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5153 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5154 			BPF_EXIT_INSN(),
5155 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5156 			BPF_EXIT_INSN(),
5157 		},
5158 		INTERNAL,
5159 		{ },
5160 		{ { 0, 1 } },
5161 	},
5162 	{
5163 		"ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5164 		.u.insns_int = {
5165 			BPF_LD_IMM64(R2, 0x0),
5166 			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5167 			BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5168 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5169 			BPF_MOV32_IMM(R0, 2),
5170 			BPF_EXIT_INSN(),
5171 			BPF_MOV32_IMM(R0, 1),
5172 			BPF_EXIT_INSN(),
5173 		},
5174 		INTERNAL,
5175 		{ },
5176 		{ { 0, 0x1 } },
5177 	},
5178 	{
5179 		"ALU_ADD_K: 0 + 0xffff = 0xffff",
5180 		.u.insns_int = {
5181 			BPF_LD_IMM64(R2, 0x0),
5182 			BPF_LD_IMM64(R3, 0xffff),
5183 			BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5184 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5185 			BPF_MOV32_IMM(R0, 2),
5186 			BPF_EXIT_INSN(),
5187 			BPF_MOV32_IMM(R0, 1),
5188 			BPF_EXIT_INSN(),
5189 		},
5190 		INTERNAL,
5191 		{ },
5192 		{ { 0, 0x1 } },
5193 	},
5194 	{
5195 		"ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5196 		.u.insns_int = {
5197 			BPF_LD_IMM64(R2, 0x0),
5198 			BPF_LD_IMM64(R3, 0x7fffffff),
5199 			BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5200 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5201 			BPF_MOV32_IMM(R0, 2),
5202 			BPF_EXIT_INSN(),
5203 			BPF_MOV32_IMM(R0, 1),
5204 			BPF_EXIT_INSN(),
5205 		},
5206 		INTERNAL,
5207 		{ },
5208 		{ { 0, 0x1 } },
5209 	},
5210 	{
5211 		"ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5212 		.u.insns_int = {
5213 			BPF_LD_IMM64(R2, 0x0),
5214 			BPF_LD_IMM64(R3, 0x80000000),
5215 			BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5216 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5217 			BPF_MOV32_IMM(R0, 2),
5218 			BPF_EXIT_INSN(),
5219 			BPF_MOV32_IMM(R0, 1),
5220 			BPF_EXIT_INSN(),
5221 		},
5222 		INTERNAL,
5223 		{ },
5224 		{ { 0, 0x1 } },
5225 	},
5226 	{
5227 		"ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5228 		.u.insns_int = {
5229 			BPF_LD_IMM64(R2, 0x0),
5230 			BPF_LD_IMM64(R3, 0x80008000),
5231 			BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5232 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5233 			BPF_MOV32_IMM(R0, 2),
5234 			BPF_EXIT_INSN(),
5235 			BPF_MOV32_IMM(R0, 1),
5236 			BPF_EXIT_INSN(),
5237 		},
5238 		INTERNAL,
5239 		{ },
5240 		{ { 0, 0x1 } },
5241 	},
5242 	{
5243 		"ALU64_ADD_K: 1 + 2 = 3",
5244 		.u.insns_int = {
5245 			BPF_LD_IMM64(R0, 1),
5246 			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5247 			BPF_EXIT_INSN(),
5248 		},
5249 		INTERNAL,
5250 		{ },
5251 		{ { 0, 3 } },
5252 	},
5253 	{
5254 		"ALU64_ADD_K: 3 + 0 = 3",
5255 		.u.insns_int = {
5256 			BPF_LD_IMM64(R0, 3),
5257 			BPF_ALU64_IMM(BPF_ADD, R0, 0),
5258 			BPF_EXIT_INSN(),
5259 		},
5260 		INTERNAL,
5261 		{ },
5262 		{ { 0, 3 } },
5263 	},
5264 	{
5265 		"ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5266 		.u.insns_int = {
5267 			BPF_LD_IMM64(R0, 1),
5268 			BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5269 			BPF_EXIT_INSN(),
5270 		},
5271 		INTERNAL,
5272 		{ },
5273 		{ { 0, 2147483647 } },
5274 	},
5275 	{
5276 		"ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5277 		.u.insns_int = {
5278 			BPF_LD_IMM64(R0, 4294967294U),
5279 			BPF_LD_IMM64(R1, 4294967296ULL),
5280 			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5281 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5282 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5283 			BPF_EXIT_INSN(),
5284 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5285 			BPF_EXIT_INSN(),
5286 		},
5287 		INTERNAL,
5288 		{ },
5289 		{ { 0, 1 } },
5290 	},
5291 	{
5292 		"ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5293 		.u.insns_int = {
5294 			BPF_LD_IMM64(R0, 2147483646),
5295 			BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5296 			BPF_EXIT_INSN(),
5297 		},
5298 		INTERNAL,
5299 		{ },
5300 		{ { 0, -1 } },
5301 	},
5302 	{
5303 		"ALU64_ADD_K: 1 + 0 = 1",
5304 		.u.insns_int = {
5305 			BPF_LD_IMM64(R2, 0x1),
5306 			BPF_LD_IMM64(R3, 0x1),
5307 			BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5308 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5309 			BPF_MOV32_IMM(R0, 2),
5310 			BPF_EXIT_INSN(),
5311 			BPF_MOV32_IMM(R0, 1),
5312 			BPF_EXIT_INSN(),
5313 		},
5314 		INTERNAL,
5315 		{ },
5316 		{ { 0, 0x1 } },
5317 	},
5318 	{
5319 		"ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5320 		.u.insns_int = {
5321 			BPF_LD_IMM64(R2, 0x0),
5322 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5323 			BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5324 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5325 			BPF_MOV32_IMM(R0, 2),
5326 			BPF_EXIT_INSN(),
5327 			BPF_MOV32_IMM(R0, 1),
5328 			BPF_EXIT_INSN(),
5329 		},
5330 		INTERNAL,
5331 		{ },
5332 		{ { 0, 0x1 } },
5333 	},
5334 	{
5335 		"ALU64_ADD_K: 0 + 0xffff = 0xffff",
5336 		.u.insns_int = {
5337 			BPF_LD_IMM64(R2, 0x0),
5338 			BPF_LD_IMM64(R3, 0xffff),
5339 			BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5340 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5341 			BPF_MOV32_IMM(R0, 2),
5342 			BPF_EXIT_INSN(),
5343 			BPF_MOV32_IMM(R0, 1),
5344 			BPF_EXIT_INSN(),
5345 		},
5346 		INTERNAL,
5347 		{ },
5348 		{ { 0, 0x1 } },
5349 	},
5350 	{
5351 		"ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5352 		.u.insns_int = {
5353 			BPF_LD_IMM64(R2, 0x0),
5354 			BPF_LD_IMM64(R3, 0x7fffffff),
5355 			BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5356 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5357 			BPF_MOV32_IMM(R0, 2),
5358 			BPF_EXIT_INSN(),
5359 			BPF_MOV32_IMM(R0, 1),
5360 			BPF_EXIT_INSN(),
5361 		},
5362 		INTERNAL,
5363 		{ },
5364 		{ { 0, 0x1 } },
5365 	},
5366 	{
5367 		"ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5368 		.u.insns_int = {
5369 			BPF_LD_IMM64(R2, 0x0),
5370 			BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5371 			BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5372 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5373 			BPF_MOV32_IMM(R0, 2),
5374 			BPF_EXIT_INSN(),
5375 			BPF_MOV32_IMM(R0, 1),
5376 			BPF_EXIT_INSN(),
5377 		},
5378 		INTERNAL,
5379 		{ },
5380 		{ { 0, 0x1 } },
5381 	},
5382 	{
5383 		"ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5384 		.u.insns_int = {
5385 			BPF_LD_IMM64(R2, 0x0),
5386 			BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5387 			BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5388 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5389 			BPF_MOV32_IMM(R0, 2),
5390 			BPF_EXIT_INSN(),
5391 			BPF_MOV32_IMM(R0, 1),
5392 			BPF_EXIT_INSN(),
5393 		},
5394 		INTERNAL,
5395 		{ },
5396 		{ { 0, 0x1 } },
5397 	},
5398 	/* BPF_ALU | BPF_SUB | BPF_X */
5399 	{
5400 		"ALU_SUB_X: 3 - 1 = 2",
5401 		.u.insns_int = {
5402 			BPF_LD_IMM64(R0, 3),
5403 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5404 			BPF_ALU32_REG(BPF_SUB, R0, R1),
5405 			BPF_EXIT_INSN(),
5406 		},
5407 		INTERNAL,
5408 		{ },
5409 		{ { 0, 2 } },
5410 	},
5411 	{
5412 		"ALU_SUB_X: 4294967295 - 4294967294 = 1",
5413 		.u.insns_int = {
5414 			BPF_LD_IMM64(R0, 4294967295U),
5415 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5416 			BPF_ALU32_REG(BPF_SUB, R0, R1),
5417 			BPF_EXIT_INSN(),
5418 		},
5419 		INTERNAL,
5420 		{ },
5421 		{ { 0, 1 } },
5422 	},
5423 	{
5424 		"ALU64_SUB_X: 3 - 1 = 2",
5425 		.u.insns_int = {
5426 			BPF_LD_IMM64(R0, 3),
5427 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5428 			BPF_ALU64_REG(BPF_SUB, R0, R1),
5429 			BPF_EXIT_INSN(),
5430 		},
5431 		INTERNAL,
5432 		{ },
5433 		{ { 0, 2 } },
5434 	},
5435 	{
5436 		"ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5437 		.u.insns_int = {
5438 			BPF_LD_IMM64(R0, 4294967295U),
5439 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5440 			BPF_ALU64_REG(BPF_SUB, R0, R1),
5441 			BPF_EXIT_INSN(),
5442 		},
5443 		INTERNAL,
5444 		{ },
5445 		{ { 0, 1 } },
5446 	},
5447 	/* BPF_ALU | BPF_SUB | BPF_K */
5448 	{
5449 		"ALU_SUB_K: 3 - 1 = 2",
5450 		.u.insns_int = {
5451 			BPF_LD_IMM64(R0, 3),
5452 			BPF_ALU32_IMM(BPF_SUB, R0, 1),
5453 			BPF_EXIT_INSN(),
5454 		},
5455 		INTERNAL,
5456 		{ },
5457 		{ { 0, 2 } },
5458 	},
5459 	{
5460 		"ALU_SUB_K: 3 - 0 = 3",
5461 		.u.insns_int = {
5462 			BPF_LD_IMM64(R0, 3),
5463 			BPF_ALU32_IMM(BPF_SUB, R0, 0),
5464 			BPF_EXIT_INSN(),
5465 		},
5466 		INTERNAL,
5467 		{ },
5468 		{ { 0, 3 } },
5469 	},
5470 	{
5471 		"ALU_SUB_K: 4294967295 - 4294967294 = 1",
5472 		.u.insns_int = {
5473 			BPF_LD_IMM64(R0, 4294967295U),
5474 			BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5475 			BPF_EXIT_INSN(),
5476 		},
5477 		INTERNAL,
5478 		{ },
5479 		{ { 0, 1 } },
5480 	},
5481 	{
5482 		"ALU64_SUB_K: 3 - 1 = 2",
5483 		.u.insns_int = {
5484 			BPF_LD_IMM64(R0, 3),
5485 			BPF_ALU64_IMM(BPF_SUB, R0, 1),
5486 			BPF_EXIT_INSN(),
5487 		},
5488 		INTERNAL,
5489 		{ },
5490 		{ { 0, 2 } },
5491 	},
5492 	{
5493 		"ALU64_SUB_K: 3 - 0 = 3",
5494 		.u.insns_int = {
5495 			BPF_LD_IMM64(R0, 3),
5496 			BPF_ALU64_IMM(BPF_SUB, R0, 0),
5497 			BPF_EXIT_INSN(),
5498 		},
5499 		INTERNAL,
5500 		{ },
5501 		{ { 0, 3 } },
5502 	},
5503 	{
5504 		"ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5505 		.u.insns_int = {
5506 			BPF_LD_IMM64(R0, 4294967294U),
5507 			BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5508 			BPF_EXIT_INSN(),
5509 		},
5510 		INTERNAL,
5511 		{ },
5512 		{ { 0, -1 } },
5513 	},
5514 	{
5515 		"ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5516 		.u.insns_int = {
5517 			BPF_LD_IMM64(R0, 2147483646),
5518 			BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5519 			BPF_EXIT_INSN(),
5520 		},
5521 		INTERNAL,
5522 		{ },
5523 		{ { 0, -1 } },
5524 	},
5525 	/* BPF_ALU | BPF_MUL | BPF_X */
5526 	{
5527 		"ALU_MUL_X: 2 * 3 = 6",
5528 		.u.insns_int = {
5529 			BPF_LD_IMM64(R0, 2),
5530 			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5531 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5532 			BPF_EXIT_INSN(),
5533 		},
5534 		INTERNAL,
5535 		{ },
5536 		{ { 0, 6 } },
5537 	},
5538 	{
5539 		"ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5540 		.u.insns_int = {
5541 			BPF_LD_IMM64(R0, 2),
5542 			BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5543 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5544 			BPF_EXIT_INSN(),
5545 		},
5546 		INTERNAL,
5547 		{ },
5548 		{ { 0, 0xFFFFFFF0 } },
5549 	},
5550 	{
5551 		"ALU_MUL_X: -1 * -1 = 1",
5552 		.u.insns_int = {
5553 			BPF_LD_IMM64(R0, -1),
5554 			BPF_ALU32_IMM(BPF_MOV, R1, -1),
5555 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5556 			BPF_EXIT_INSN(),
5557 		},
5558 		INTERNAL,
5559 		{ },
5560 		{ { 0, 1 } },
5561 	},
5562 	{
5563 		"ALU64_MUL_X: 2 * 3 = 6",
5564 		.u.insns_int = {
5565 			BPF_LD_IMM64(R0, 2),
5566 			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5567 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5568 			BPF_EXIT_INSN(),
5569 		},
5570 		INTERNAL,
5571 		{ },
5572 		{ { 0, 6 } },
5573 	},
5574 	{
5575 		"ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5576 		.u.insns_int = {
5577 			BPF_LD_IMM64(R0, 1),
5578 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5579 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5580 			BPF_EXIT_INSN(),
5581 		},
5582 		INTERNAL,
5583 		{ },
5584 		{ { 0, 2147483647 } },
5585 	},
5586 	{
5587 		"ALU64_MUL_X: 64x64 multiply, low word",
5588 		.u.insns_int = {
5589 			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5590 			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5591 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5592 			BPF_EXIT_INSN(),
5593 		},
5594 		INTERNAL,
5595 		{ },
5596 		{ { 0, 0xe5618cf0 } }
5597 	},
5598 	{
5599 		"ALU64_MUL_X: 64x64 multiply, high word",
5600 		.u.insns_int = {
5601 			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5602 			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5603 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5604 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5605 			BPF_EXIT_INSN(),
5606 		},
5607 		INTERNAL,
5608 		{ },
5609 		{ { 0, 0x2236d88f } }
5610 	},
5611 	/* BPF_ALU | BPF_MUL | BPF_K */
5612 	{
5613 		"ALU_MUL_K: 2 * 3 = 6",
5614 		.u.insns_int = {
5615 			BPF_LD_IMM64(R0, 2),
5616 			BPF_ALU32_IMM(BPF_MUL, R0, 3),
5617 			BPF_EXIT_INSN(),
5618 		},
5619 		INTERNAL,
5620 		{ },
5621 		{ { 0, 6 } },
5622 	},
5623 	{
5624 		"ALU_MUL_K: 3 * 1 = 3",
5625 		.u.insns_int = {
5626 			BPF_LD_IMM64(R0, 3),
5627 			BPF_ALU32_IMM(BPF_MUL, R0, 1),
5628 			BPF_EXIT_INSN(),
5629 		},
5630 		INTERNAL,
5631 		{ },
5632 		{ { 0, 3 } },
5633 	},
5634 	{
5635 		"ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5636 		.u.insns_int = {
5637 			BPF_LD_IMM64(R0, 2),
5638 			BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5639 			BPF_EXIT_INSN(),
5640 		},
5641 		INTERNAL,
5642 		{ },
5643 		{ { 0, 0xFFFFFFF0 } },
5644 	},
5645 	{
5646 		"ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5647 		.u.insns_int = {
5648 			BPF_LD_IMM64(R2, 0x1),
5649 			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5650 			BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5651 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5652 			BPF_MOV32_IMM(R0, 2),
5653 			BPF_EXIT_INSN(),
5654 			BPF_MOV32_IMM(R0, 1),
5655 			BPF_EXIT_INSN(),
5656 		},
5657 		INTERNAL,
5658 		{ },
5659 		{ { 0, 0x1 } },
5660 	},
5661 	{
5662 		"ALU64_MUL_K: 2 * 3 = 6",
5663 		.u.insns_int = {
5664 			BPF_LD_IMM64(R0, 2),
5665 			BPF_ALU64_IMM(BPF_MUL, R0, 3),
5666 			BPF_EXIT_INSN(),
5667 		},
5668 		INTERNAL,
5669 		{ },
5670 		{ { 0, 6 } },
5671 	},
5672 	{
5673 		"ALU64_MUL_K: 3 * 1 = 3",
5674 		.u.insns_int = {
5675 			BPF_LD_IMM64(R0, 3),
5676 			BPF_ALU64_IMM(BPF_MUL, R0, 1),
5677 			BPF_EXIT_INSN(),
5678 		},
5679 		INTERNAL,
5680 		{ },
5681 		{ { 0, 3 } },
5682 	},
5683 	{
5684 		"ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5685 		.u.insns_int = {
5686 			BPF_LD_IMM64(R0, 1),
5687 			BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5688 			BPF_EXIT_INSN(),
5689 		},
5690 		INTERNAL,
5691 		{ },
5692 		{ { 0, 2147483647 } },
5693 	},
5694 	{
5695 		"ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5696 		.u.insns_int = {
5697 			BPF_LD_IMM64(R0, 1),
5698 			BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5699 			BPF_EXIT_INSN(),
5700 		},
5701 		INTERNAL,
5702 		{ },
5703 		{ { 0, -2147483647 } },
5704 	},
5705 	{
5706 		"ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5707 		.u.insns_int = {
5708 			BPF_LD_IMM64(R2, 0x1),
5709 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5710 			BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5711 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5712 			BPF_MOV32_IMM(R0, 2),
5713 			BPF_EXIT_INSN(),
5714 			BPF_MOV32_IMM(R0, 1),
5715 			BPF_EXIT_INSN(),
5716 		},
5717 		INTERNAL,
5718 		{ },
5719 		{ { 0, 0x1 } },
5720 	},
5721 	{
5722 		"ALU64_MUL_K: 64x32 multiply, low word",
5723 		.u.insns_int = {
5724 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5725 			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5726 			BPF_EXIT_INSN(),
5727 		},
5728 		INTERNAL,
5729 		{ },
5730 		{ { 0, 0xe242d208 } }
5731 	},
5732 	{
5733 		"ALU64_MUL_K: 64x32 multiply, high word",
5734 		.u.insns_int = {
5735 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5736 			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5737 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5738 			BPF_EXIT_INSN(),
5739 		},
5740 		INTERNAL,
5741 		{ },
5742 		{ { 0, 0xc28f5c28 } }
5743 	},
5744 	/* BPF_ALU | BPF_DIV | BPF_X */
5745 	{
5746 		"ALU_DIV_X: 6 / 2 = 3",
5747 		.u.insns_int = {
5748 			BPF_LD_IMM64(R0, 6),
5749 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5750 			BPF_ALU32_REG(BPF_DIV, R0, R1),
5751 			BPF_EXIT_INSN(),
5752 		},
5753 		INTERNAL,
5754 		{ },
5755 		{ { 0, 3 } },
5756 	},
5757 	{
5758 		"ALU_DIV_X: 4294967295 / 4294967295 = 1",
5759 		.u.insns_int = {
5760 			BPF_LD_IMM64(R0, 4294967295U),
5761 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5762 			BPF_ALU32_REG(BPF_DIV, R0, R1),
5763 			BPF_EXIT_INSN(),
5764 		},
5765 		INTERNAL,
5766 		{ },
5767 		{ { 0, 1 } },
5768 	},
5769 	{
5770 		"ALU64_DIV_X: 6 / 2 = 3",
5771 		.u.insns_int = {
5772 			BPF_LD_IMM64(R0, 6),
5773 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5774 			BPF_ALU64_REG(BPF_DIV, R0, R1),
5775 			BPF_EXIT_INSN(),
5776 		},
5777 		INTERNAL,
5778 		{ },
5779 		{ { 0, 3 } },
5780 	},
5781 	{
5782 		"ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5783 		.u.insns_int = {
5784 			BPF_LD_IMM64(R0, 2147483647),
5785 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5786 			BPF_ALU64_REG(BPF_DIV, R0, R1),
5787 			BPF_EXIT_INSN(),
5788 		},
5789 		INTERNAL,
5790 		{ },
5791 		{ { 0, 1 } },
5792 	},
5793 	{
5794 		"ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5795 		.u.insns_int = {
5796 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5797 			BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5798 			BPF_LD_IMM64(R3, 0x0000000000000001LL),
5799 			BPF_ALU64_REG(BPF_DIV, R2, R4),
5800 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5801 			BPF_MOV32_IMM(R0, 2),
5802 			BPF_EXIT_INSN(),
5803 			BPF_MOV32_IMM(R0, 1),
5804 			BPF_EXIT_INSN(),
5805 		},
5806 		INTERNAL,
5807 		{ },
5808 		{ { 0, 0x1 } },
5809 	},
5810 	/* BPF_ALU | BPF_DIV | BPF_K */
5811 	{
5812 		"ALU_DIV_K: 6 / 2 = 3",
5813 		.u.insns_int = {
5814 			BPF_LD_IMM64(R0, 6),
5815 			BPF_ALU32_IMM(BPF_DIV, R0, 2),
5816 			BPF_EXIT_INSN(),
5817 		},
5818 		INTERNAL,
5819 		{ },
5820 		{ { 0, 3 } },
5821 	},
5822 	{
5823 		"ALU_DIV_K: 3 / 1 = 3",
5824 		.u.insns_int = {
5825 			BPF_LD_IMM64(R0, 3),
5826 			BPF_ALU32_IMM(BPF_DIV, R0, 1),
5827 			BPF_EXIT_INSN(),
5828 		},
5829 		INTERNAL,
5830 		{ },
5831 		{ { 0, 3 } },
5832 	},
5833 	{
5834 		"ALU_DIV_K: 4294967295 / 4294967295 = 1",
5835 		.u.insns_int = {
5836 			BPF_LD_IMM64(R0, 4294967295U),
5837 			BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
5838 			BPF_EXIT_INSN(),
5839 		},
5840 		INTERNAL,
5841 		{ },
5842 		{ { 0, 1 } },
5843 	},
5844 	{
5845 		"ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
5846 		.u.insns_int = {
5847 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5848 			BPF_LD_IMM64(R3, 0x1UL),
5849 			BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
5850 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5851 			BPF_MOV32_IMM(R0, 2),
5852 			BPF_EXIT_INSN(),
5853 			BPF_MOV32_IMM(R0, 1),
5854 			BPF_EXIT_INSN(),
5855 		},
5856 		INTERNAL,
5857 		{ },
5858 		{ { 0, 0x1 } },
5859 	},
5860 	{
5861 		"ALU64_DIV_K: 6 / 2 = 3",
5862 		.u.insns_int = {
5863 			BPF_LD_IMM64(R0, 6),
5864 			BPF_ALU64_IMM(BPF_DIV, R0, 2),
5865 			BPF_EXIT_INSN(),
5866 		},
5867 		INTERNAL,
5868 		{ },
5869 		{ { 0, 3 } },
5870 	},
5871 	{
5872 		"ALU64_DIV_K: 3 / 1 = 3",
5873 		.u.insns_int = {
5874 			BPF_LD_IMM64(R0, 3),
5875 			BPF_ALU64_IMM(BPF_DIV, R0, 1),
5876 			BPF_EXIT_INSN(),
5877 		},
5878 		INTERNAL,
5879 		{ },
5880 		{ { 0, 3 } },
5881 	},
5882 	{
5883 		"ALU64_DIV_K: 2147483647 / 2147483647 = 1",
5884 		.u.insns_int = {
5885 			BPF_LD_IMM64(R0, 2147483647),
5886 			BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
5887 			BPF_EXIT_INSN(),
5888 		},
5889 		INTERNAL,
5890 		{ },
5891 		{ { 0, 1 } },
5892 	},
5893 	{
5894 		"ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5895 		.u.insns_int = {
5896 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5897 			BPF_LD_IMM64(R3, 0x0000000000000001LL),
5898 			BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
5899 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5900 			BPF_MOV32_IMM(R0, 2),
5901 			BPF_EXIT_INSN(),
5902 			BPF_MOV32_IMM(R0, 1),
5903 			BPF_EXIT_INSN(),
5904 		},
5905 		INTERNAL,
5906 		{ },
5907 		{ { 0, 0x1 } },
5908 	},
5909 	/* BPF_ALU | BPF_MOD | BPF_X */
5910 	{
5911 		"ALU_MOD_X: 3 % 2 = 1",
5912 		.u.insns_int = {
5913 			BPF_LD_IMM64(R0, 3),
5914 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5915 			BPF_ALU32_REG(BPF_MOD, R0, R1),
5916 			BPF_EXIT_INSN(),
5917 		},
5918 		INTERNAL,
5919 		{ },
5920 		{ { 0, 1 } },
5921 	},
5922 	{
5923 		"ALU_MOD_X: 4294967295 % 4294967293 = 2",
5924 		.u.insns_int = {
5925 			BPF_LD_IMM64(R0, 4294967295U),
5926 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
5927 			BPF_ALU32_REG(BPF_MOD, R0, R1),
5928 			BPF_EXIT_INSN(),
5929 		},
5930 		INTERNAL,
5931 		{ },
5932 		{ { 0, 2 } },
5933 	},
5934 	{
5935 		"ALU64_MOD_X: 3 % 2 = 1",
5936 		.u.insns_int = {
5937 			BPF_LD_IMM64(R0, 3),
5938 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5939 			BPF_ALU64_REG(BPF_MOD, R0, R1),
5940 			BPF_EXIT_INSN(),
5941 		},
5942 		INTERNAL,
5943 		{ },
5944 		{ { 0, 1 } },
5945 	},
5946 	{
5947 		"ALU64_MOD_X: 2147483647 % 2147483645 = 2",
5948 		.u.insns_int = {
5949 			BPF_LD_IMM64(R0, 2147483647),
5950 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
5951 			BPF_ALU64_REG(BPF_MOD, R0, R1),
5952 			BPF_EXIT_INSN(),
5953 		},
5954 		INTERNAL,
5955 		{ },
5956 		{ { 0, 2 } },
5957 	},
5958 	/* BPF_ALU | BPF_MOD | BPF_K */
5959 	{
5960 		"ALU_MOD_K: 3 % 2 = 1",
5961 		.u.insns_int = {
5962 			BPF_LD_IMM64(R0, 3),
5963 			BPF_ALU32_IMM(BPF_MOD, R0, 2),
5964 			BPF_EXIT_INSN(),
5965 		},
5966 		INTERNAL,
5967 		{ },
5968 		{ { 0, 1 } },
5969 	},
5970 	{
5971 		"ALU_MOD_K: 3 % 1 = 0",
5972 		.u.insns_int = {
5973 			BPF_LD_IMM64(R0, 3),
5974 			BPF_ALU32_IMM(BPF_MOD, R0, 1),
5975 			BPF_EXIT_INSN(),
5976 		},
5977 		INTERNAL,
5978 		{ },
5979 		{ { 0, 0 } },
5980 	},
5981 	{
5982 		"ALU_MOD_K: 4294967295 % 4294967293 = 2",
5983 		.u.insns_int = {
5984 			BPF_LD_IMM64(R0, 4294967295U),
5985 			BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
5986 			BPF_EXIT_INSN(),
5987 		},
5988 		INTERNAL,
5989 		{ },
5990 		{ { 0, 2 } },
5991 	},
5992 	{
5993 		"ALU64_MOD_K: 3 % 2 = 1",
5994 		.u.insns_int = {
5995 			BPF_LD_IMM64(R0, 3),
5996 			BPF_ALU64_IMM(BPF_MOD, R0, 2),
5997 			BPF_EXIT_INSN(),
5998 		},
5999 		INTERNAL,
6000 		{ },
6001 		{ { 0, 1 } },
6002 	},
6003 	{
6004 		"ALU64_MOD_K: 3 % 1 = 0",
6005 		.u.insns_int = {
6006 			BPF_LD_IMM64(R0, 3),
6007 			BPF_ALU64_IMM(BPF_MOD, R0, 1),
6008 			BPF_EXIT_INSN(),
6009 		},
6010 		INTERNAL,
6011 		{ },
6012 		{ { 0, 0 } },
6013 	},
6014 	{
6015 		"ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6016 		.u.insns_int = {
6017 			BPF_LD_IMM64(R0, 2147483647),
6018 			BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6019 			BPF_EXIT_INSN(),
6020 		},
6021 		INTERNAL,
6022 		{ },
6023 		{ { 0, 2 } },
6024 	},
6025 	/* BPF_ALU | BPF_AND | BPF_X */
6026 	{
6027 		"ALU_AND_X: 3 & 2 = 2",
6028 		.u.insns_int = {
6029 			BPF_LD_IMM64(R0, 3),
6030 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6031 			BPF_ALU32_REG(BPF_AND, R0, R1),
6032 			BPF_EXIT_INSN(),
6033 		},
6034 		INTERNAL,
6035 		{ },
6036 		{ { 0, 2 } },
6037 	},
6038 	{
6039 		"ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6040 		.u.insns_int = {
6041 			BPF_LD_IMM64(R0, 0xffffffff),
6042 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6043 			BPF_ALU32_REG(BPF_AND, R0, R1),
6044 			BPF_EXIT_INSN(),
6045 		},
6046 		INTERNAL,
6047 		{ },
6048 		{ { 0, 0xffffffff } },
6049 	},
6050 	{
6051 		"ALU64_AND_X: 3 & 2 = 2",
6052 		.u.insns_int = {
6053 			BPF_LD_IMM64(R0, 3),
6054 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6055 			BPF_ALU64_REG(BPF_AND, R0, R1),
6056 			BPF_EXIT_INSN(),
6057 		},
6058 		INTERNAL,
6059 		{ },
6060 		{ { 0, 2 } },
6061 	},
6062 	{
6063 		"ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6064 		.u.insns_int = {
6065 			BPF_LD_IMM64(R0, 0xffffffff),
6066 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6067 			BPF_ALU64_REG(BPF_AND, R0, R1),
6068 			BPF_EXIT_INSN(),
6069 		},
6070 		INTERNAL,
6071 		{ },
6072 		{ { 0, 0xffffffff } },
6073 	},
6074 	/* BPF_ALU | BPF_AND | BPF_K */
6075 	{
6076 		"ALU_AND_K: 3 & 2 = 2",
6077 		.u.insns_int = {
6078 			BPF_LD_IMM64(R0, 3),
6079 			BPF_ALU32_IMM(BPF_AND, R0, 2),
6080 			BPF_EXIT_INSN(),
6081 		},
6082 		INTERNAL,
6083 		{ },
6084 		{ { 0, 2 } },
6085 	},
6086 	{
6087 		"ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6088 		.u.insns_int = {
6089 			BPF_LD_IMM64(R0, 0xffffffff),
6090 			BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6091 			BPF_EXIT_INSN(),
6092 		},
6093 		INTERNAL,
6094 		{ },
6095 		{ { 0, 0xffffffff } },
6096 	},
6097 	{
6098 		"ALU_AND_K: Small immediate",
6099 		.u.insns_int = {
6100 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6101 			BPF_ALU32_IMM(BPF_AND, R0, 15),
6102 			BPF_EXIT_INSN(),
6103 		},
6104 		INTERNAL,
6105 		{ },
6106 		{ { 0, 4 } }
6107 	},
6108 	{
6109 		"ALU_AND_K: Large immediate",
6110 		.u.insns_int = {
6111 			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6112 			BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6113 			BPF_EXIT_INSN(),
6114 		},
6115 		INTERNAL,
6116 		{ },
6117 		{ { 0, 0xa1b2c3d4 } }
6118 	},
6119 	{
6120 		"ALU_AND_K: Zero extension",
6121 		.u.insns_int = {
6122 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6123 			BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6124 			BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6125 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6126 			BPF_MOV32_IMM(R0, 2),
6127 			BPF_EXIT_INSN(),
6128 			BPF_MOV32_IMM(R0, 1),
6129 			BPF_EXIT_INSN(),
6130 		},
6131 		INTERNAL,
6132 		{ },
6133 		{ { 0, 1 } }
6134 	},
6135 	{
6136 		"ALU64_AND_K: 3 & 2 = 2",
6137 		.u.insns_int = {
6138 			BPF_LD_IMM64(R0, 3),
6139 			BPF_ALU64_IMM(BPF_AND, R0, 2),
6140 			BPF_EXIT_INSN(),
6141 		},
6142 		INTERNAL,
6143 		{ },
6144 		{ { 0, 2 } },
6145 	},
6146 	{
6147 		"ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6148 		.u.insns_int = {
6149 			BPF_LD_IMM64(R0, 0xffffffff),
6150 			BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6151 			BPF_EXIT_INSN(),
6152 		},
6153 		INTERNAL,
6154 		{ },
6155 		{ { 0, 0xffffffff } },
6156 	},
6157 	{
6158 		"ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6159 		.u.insns_int = {
6160 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6161 			BPF_LD_IMM64(R3, 0x0000000000000000LL),
6162 			BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6163 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6164 			BPF_MOV32_IMM(R0, 2),
6165 			BPF_EXIT_INSN(),
6166 			BPF_MOV32_IMM(R0, 1),
6167 			BPF_EXIT_INSN(),
6168 		},
6169 		INTERNAL,
6170 		{ },
6171 		{ { 0, 0x1 } },
6172 	},
6173 	{
6174 		"ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6175 		.u.insns_int = {
6176 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6177 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6178 			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6179 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6180 			BPF_MOV32_IMM(R0, 2),
6181 			BPF_EXIT_INSN(),
6182 			BPF_MOV32_IMM(R0, 1),
6183 			BPF_EXIT_INSN(),
6184 		},
6185 		INTERNAL,
6186 		{ },
6187 		{ { 0, 0x1 } },
6188 	},
6189 	{
6190 		"ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6191 		.u.insns_int = {
6192 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6193 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6194 			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6195 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6196 			BPF_MOV32_IMM(R0, 2),
6197 			BPF_EXIT_INSN(),
6198 			BPF_MOV32_IMM(R0, 1),
6199 			BPF_EXIT_INSN(),
6200 		},
6201 		INTERNAL,
6202 		{ },
6203 		{ { 0, 0x1 } },
6204 	},
6205 	{
6206 		"ALU64_AND_K: Sign extension 1",
6207 		.u.insns_int = {
6208 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6209 			BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6210 			BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6211 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6212 			BPF_MOV32_IMM(R0, 2),
6213 			BPF_EXIT_INSN(),
6214 			BPF_MOV32_IMM(R0, 1),
6215 			BPF_EXIT_INSN(),
6216 		},
6217 		INTERNAL,
6218 		{ },
6219 		{ { 0, 1 } }
6220 	},
6221 	{
6222 		"ALU64_AND_K: Sign extension 2",
6223 		.u.insns_int = {
6224 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6225 			BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6226 			BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6227 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6228 			BPF_MOV32_IMM(R0, 2),
6229 			BPF_EXIT_INSN(),
6230 			BPF_MOV32_IMM(R0, 1),
6231 			BPF_EXIT_INSN(),
6232 		},
6233 		INTERNAL,
6234 		{ },
6235 		{ { 0, 1 } }
6236 	},
6237 	/* BPF_ALU | BPF_OR | BPF_X */
6238 	{
6239 		"ALU_OR_X: 1 | 2 = 3",
6240 		.u.insns_int = {
6241 			BPF_LD_IMM64(R0, 1),
6242 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6243 			BPF_ALU32_REG(BPF_OR, R0, R1),
6244 			BPF_EXIT_INSN(),
6245 		},
6246 		INTERNAL,
6247 		{ },
6248 		{ { 0, 3 } },
6249 	},
6250 	{
6251 		"ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6252 		.u.insns_int = {
6253 			BPF_LD_IMM64(R0, 0),
6254 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6255 			BPF_ALU32_REG(BPF_OR, R0, R1),
6256 			BPF_EXIT_INSN(),
6257 		},
6258 		INTERNAL,
6259 		{ },
6260 		{ { 0, 0xffffffff } },
6261 	},
6262 	{
6263 		"ALU64_OR_X: 1 | 2 = 3",
6264 		.u.insns_int = {
6265 			BPF_LD_IMM64(R0, 1),
6266 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6267 			BPF_ALU64_REG(BPF_OR, R0, R1),
6268 			BPF_EXIT_INSN(),
6269 		},
6270 		INTERNAL,
6271 		{ },
6272 		{ { 0, 3 } },
6273 	},
6274 	{
6275 		"ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6276 		.u.insns_int = {
6277 			BPF_LD_IMM64(R0, 0),
6278 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6279 			BPF_ALU64_REG(BPF_OR, R0, R1),
6280 			BPF_EXIT_INSN(),
6281 		},
6282 		INTERNAL,
6283 		{ },
6284 		{ { 0, 0xffffffff } },
6285 	},
6286 	/* BPF_ALU | BPF_OR | BPF_K */
6287 	{
6288 		"ALU_OR_K: 1 | 2 = 3",
6289 		.u.insns_int = {
6290 			BPF_LD_IMM64(R0, 1),
6291 			BPF_ALU32_IMM(BPF_OR, R0, 2),
6292 			BPF_EXIT_INSN(),
6293 		},
6294 		INTERNAL,
6295 		{ },
6296 		{ { 0, 3 } },
6297 	},
6298 	{
6299 		"ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6300 		.u.insns_int = {
6301 			BPF_LD_IMM64(R0, 0),
6302 			BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6303 			BPF_EXIT_INSN(),
6304 		},
6305 		INTERNAL,
6306 		{ },
6307 		{ { 0, 0xffffffff } },
6308 	},
6309 	{
6310 		"ALU_OR_K: Small immediate",
6311 		.u.insns_int = {
6312 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6313 			BPF_ALU32_IMM(BPF_OR, R0, 1),
6314 			BPF_EXIT_INSN(),
6315 		},
6316 		INTERNAL,
6317 		{ },
6318 		{ { 0, 0x01020305 } }
6319 	},
6320 	{
6321 		"ALU_OR_K: Large immediate",
6322 		.u.insns_int = {
6323 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6324 			BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6325 			BPF_EXIT_INSN(),
6326 		},
6327 		INTERNAL,
6328 		{ },
6329 		{ { 0, 0xa1b2c3d4 } }
6330 	},
6331 	{
6332 		"ALU_OR_K: Zero extension",
6333 		.u.insns_int = {
6334 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6335 			BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6336 			BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6337 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6338 			BPF_MOV32_IMM(R0, 2),
6339 			BPF_EXIT_INSN(),
6340 			BPF_MOV32_IMM(R0, 1),
6341 			BPF_EXIT_INSN(),
6342 		},
6343 		INTERNAL,
6344 		{ },
6345 		{ { 0, 1 } }
6346 	},
6347 	{
6348 		"ALU64_OR_K: 1 | 2 = 3",
6349 		.u.insns_int = {
6350 			BPF_LD_IMM64(R0, 1),
6351 			BPF_ALU64_IMM(BPF_OR, R0, 2),
6352 			BPF_EXIT_INSN(),
6353 		},
6354 		INTERNAL,
6355 		{ },
6356 		{ { 0, 3 } },
6357 	},
6358 	{
6359 		"ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6360 		.u.insns_int = {
6361 			BPF_LD_IMM64(R0, 0),
6362 			BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6363 			BPF_EXIT_INSN(),
6364 		},
6365 		INTERNAL,
6366 		{ },
6367 		{ { 0, 0xffffffff } },
6368 	},
6369 	{
6370 		"ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6371 		.u.insns_int = {
6372 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6373 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6374 			BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6375 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6376 			BPF_MOV32_IMM(R0, 2),
6377 			BPF_EXIT_INSN(),
6378 			BPF_MOV32_IMM(R0, 1),
6379 			BPF_EXIT_INSN(),
6380 		},
6381 		INTERNAL,
6382 		{ },
6383 		{ { 0, 0x1 } },
6384 	},
6385 	{
6386 		"ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6387 		.u.insns_int = {
6388 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6389 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6390 			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6391 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6392 			BPF_MOV32_IMM(R0, 2),
6393 			BPF_EXIT_INSN(),
6394 			BPF_MOV32_IMM(R0, 1),
6395 			BPF_EXIT_INSN(),
6396 		},
6397 		INTERNAL,
6398 		{ },
6399 		{ { 0, 0x1 } },
6400 	},
6401 	{
6402 		"ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6403 		.u.insns_int = {
6404 			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6405 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6406 			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6407 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6408 			BPF_MOV32_IMM(R0, 2),
6409 			BPF_EXIT_INSN(),
6410 			BPF_MOV32_IMM(R0, 1),
6411 			BPF_EXIT_INSN(),
6412 		},
6413 		INTERNAL,
6414 		{ },
6415 		{ { 0, 0x1 } },
6416 	},
6417 	{
6418 		"ALU64_OR_K: Sign extension 1",
6419 		.u.insns_int = {
6420 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6421 			BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6422 			BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6423 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6424 			BPF_MOV32_IMM(R0, 2),
6425 			BPF_EXIT_INSN(),
6426 			BPF_MOV32_IMM(R0, 1),
6427 			BPF_EXIT_INSN(),
6428 		},
6429 		INTERNAL,
6430 		{ },
6431 		{ { 0, 1 } }
6432 	},
6433 	{
6434 		"ALU64_OR_K: Sign extension 2",
6435 		.u.insns_int = {
6436 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6437 			BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6438 			BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6439 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6440 			BPF_MOV32_IMM(R0, 2),
6441 			BPF_EXIT_INSN(),
6442 			BPF_MOV32_IMM(R0, 1),
6443 			BPF_EXIT_INSN(),
6444 		},
6445 		INTERNAL,
6446 		{ },
6447 		{ { 0, 1 } }
6448 	},
6449 	/* BPF_ALU | BPF_XOR | BPF_X */
6450 	{
6451 		"ALU_XOR_X: 5 ^ 6 = 3",
6452 		.u.insns_int = {
6453 			BPF_LD_IMM64(R0, 5),
6454 			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6455 			BPF_ALU32_REG(BPF_XOR, R0, R1),
6456 			BPF_EXIT_INSN(),
6457 		},
6458 		INTERNAL,
6459 		{ },
6460 		{ { 0, 3 } },
6461 	},
6462 	{
6463 		"ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6464 		.u.insns_int = {
6465 			BPF_LD_IMM64(R0, 1),
6466 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6467 			BPF_ALU32_REG(BPF_XOR, R0, R1),
6468 			BPF_EXIT_INSN(),
6469 		},
6470 		INTERNAL,
6471 		{ },
6472 		{ { 0, 0xfffffffe } },
6473 	},
6474 	{
6475 		"ALU64_XOR_X: 5 ^ 6 = 3",
6476 		.u.insns_int = {
6477 			BPF_LD_IMM64(R0, 5),
6478 			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6479 			BPF_ALU64_REG(BPF_XOR, R0, R1),
6480 			BPF_EXIT_INSN(),
6481 		},
6482 		INTERNAL,
6483 		{ },
6484 		{ { 0, 3 } },
6485 	},
6486 	{
6487 		"ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6488 		.u.insns_int = {
6489 			BPF_LD_IMM64(R0, 1),
6490 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6491 			BPF_ALU64_REG(BPF_XOR, R0, R1),
6492 			BPF_EXIT_INSN(),
6493 		},
6494 		INTERNAL,
6495 		{ },
6496 		{ { 0, 0xfffffffe } },
6497 	},
6498 	/* BPF_ALU | BPF_XOR | BPF_K */
6499 	{
6500 		"ALU_XOR_K: 5 ^ 6 = 3",
6501 		.u.insns_int = {
6502 			BPF_LD_IMM64(R0, 5),
6503 			BPF_ALU32_IMM(BPF_XOR, R0, 6),
6504 			BPF_EXIT_INSN(),
6505 		},
6506 		INTERNAL,
6507 		{ },
6508 		{ { 0, 3 } },
6509 	},
6510 	{
6511 		"ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6512 		.u.insns_int = {
6513 			BPF_LD_IMM64(R0, 1),
6514 			BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6515 			BPF_EXIT_INSN(),
6516 		},
6517 		INTERNAL,
6518 		{ },
6519 		{ { 0, 0xfffffffe } },
6520 	},
6521 	{
6522 		"ALU_XOR_K: Small immediate",
6523 		.u.insns_int = {
6524 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6525 			BPF_ALU32_IMM(BPF_XOR, R0, 15),
6526 			BPF_EXIT_INSN(),
6527 		},
6528 		INTERNAL,
6529 		{ },
6530 		{ { 0, 0x0102030b } }
6531 	},
6532 	{
6533 		"ALU_XOR_K: Large immediate",
6534 		.u.insns_int = {
6535 			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6536 			BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6537 			BPF_EXIT_INSN(),
6538 		},
6539 		INTERNAL,
6540 		{ },
6541 		{ { 0, 0x5e4d3c2b } }
6542 	},
6543 	{
6544 		"ALU_XOR_K: Zero extension",
6545 		.u.insns_int = {
6546 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6547 			BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6548 			BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6549 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6550 			BPF_MOV32_IMM(R0, 2),
6551 			BPF_EXIT_INSN(),
6552 			BPF_MOV32_IMM(R0, 1),
6553 			BPF_EXIT_INSN(),
6554 		},
6555 		INTERNAL,
6556 		{ },
6557 		{ { 0, 1 } }
6558 	},
6559 	{
6560 		"ALU64_XOR_K: 5 ^ 6 = 3",
6561 		.u.insns_int = {
6562 			BPF_LD_IMM64(R0, 5),
6563 			BPF_ALU64_IMM(BPF_XOR, R0, 6),
6564 			BPF_EXIT_INSN(),
6565 		},
6566 		INTERNAL,
6567 		{ },
6568 		{ { 0, 3 } },
6569 	},
6570 	{
6571 		"ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6572 		.u.insns_int = {
6573 			BPF_LD_IMM64(R0, 1),
6574 			BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6575 			BPF_EXIT_INSN(),
6576 		},
6577 		INTERNAL,
6578 		{ },
6579 		{ { 0, 0xfffffffe } },
6580 	},
6581 	{
6582 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6583 		.u.insns_int = {
6584 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6585 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6586 			BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6587 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6588 			BPF_MOV32_IMM(R0, 2),
6589 			BPF_EXIT_INSN(),
6590 			BPF_MOV32_IMM(R0, 1),
6591 			BPF_EXIT_INSN(),
6592 		},
6593 		INTERNAL,
6594 		{ },
6595 		{ { 0, 0x1 } },
6596 	},
6597 	{
6598 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6599 		.u.insns_int = {
6600 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6601 			BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6602 			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6603 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6604 			BPF_MOV32_IMM(R0, 2),
6605 			BPF_EXIT_INSN(),
6606 			BPF_MOV32_IMM(R0, 1),
6607 			BPF_EXIT_INSN(),
6608 		},
6609 		INTERNAL,
6610 		{ },
6611 		{ { 0, 0x1 } },
6612 	},
6613 	{
6614 		"ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6615 		.u.insns_int = {
6616 			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6617 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6618 			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6619 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6620 			BPF_MOV32_IMM(R0, 2),
6621 			BPF_EXIT_INSN(),
6622 			BPF_MOV32_IMM(R0, 1),
6623 			BPF_EXIT_INSN(),
6624 		},
6625 		INTERNAL,
6626 		{ },
6627 		{ { 0, 0x1 } },
6628 	},
6629 	{
6630 		"ALU64_XOR_K: Sign extension 1",
6631 		.u.insns_int = {
6632 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6633 			BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6634 			BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6635 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6636 			BPF_MOV32_IMM(R0, 2),
6637 			BPF_EXIT_INSN(),
6638 			BPF_MOV32_IMM(R0, 1),
6639 			BPF_EXIT_INSN(),
6640 		},
6641 		INTERNAL,
6642 		{ },
6643 		{ { 0, 1 } }
6644 	},
6645 	{
6646 		"ALU64_XOR_K: Sign extension 2",
6647 		.u.insns_int = {
6648 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6649 			BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6650 			BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6651 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6652 			BPF_MOV32_IMM(R0, 2),
6653 			BPF_EXIT_INSN(),
6654 			BPF_MOV32_IMM(R0, 1),
6655 			BPF_EXIT_INSN(),
6656 		},
6657 		INTERNAL,
6658 		{ },
6659 		{ { 0, 1 } }
6660 	},
6661 	/* BPF_ALU | BPF_LSH | BPF_X */
6662 	{
6663 		"ALU_LSH_X: 1 << 1 = 2",
6664 		.u.insns_int = {
6665 			BPF_LD_IMM64(R0, 1),
6666 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6667 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6668 			BPF_EXIT_INSN(),
6669 		},
6670 		INTERNAL,
6671 		{ },
6672 		{ { 0, 2 } },
6673 	},
6674 	{
6675 		"ALU_LSH_X: 1 << 31 = 0x80000000",
6676 		.u.insns_int = {
6677 			BPF_LD_IMM64(R0, 1),
6678 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6679 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6680 			BPF_EXIT_INSN(),
6681 		},
6682 		INTERNAL,
6683 		{ },
6684 		{ { 0, 0x80000000 } },
6685 	},
6686 	{
6687 		"ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6688 		.u.insns_int = {
6689 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6690 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6691 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6692 			BPF_EXIT_INSN(),
6693 		},
6694 		INTERNAL,
6695 		{ },
6696 		{ { 0, 0x45678000 } }
6697 	},
6698 	{
6699 		"ALU64_LSH_X: 1 << 1 = 2",
6700 		.u.insns_int = {
6701 			BPF_LD_IMM64(R0, 1),
6702 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6703 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6704 			BPF_EXIT_INSN(),
6705 		},
6706 		INTERNAL,
6707 		{ },
6708 		{ { 0, 2 } },
6709 	},
6710 	{
6711 		"ALU64_LSH_X: 1 << 31 = 0x80000000",
6712 		.u.insns_int = {
6713 			BPF_LD_IMM64(R0, 1),
6714 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6715 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6716 			BPF_EXIT_INSN(),
6717 		},
6718 		INTERNAL,
6719 		{ },
6720 		{ { 0, 0x80000000 } },
6721 	},
6722 	{
6723 		"ALU64_LSH_X: Shift < 32, low word",
6724 		.u.insns_int = {
6725 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6726 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6727 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6728 			BPF_EXIT_INSN(),
6729 		},
6730 		INTERNAL,
6731 		{ },
6732 		{ { 0, 0xbcdef000 } }
6733 	},
6734 	{
6735 		"ALU64_LSH_X: Shift < 32, high word",
6736 		.u.insns_int = {
6737 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6738 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6739 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6740 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6741 			BPF_EXIT_INSN(),
6742 		},
6743 		INTERNAL,
6744 		{ },
6745 		{ { 0, 0x3456789a } }
6746 	},
6747 	{
6748 		"ALU64_LSH_X: Shift > 32, low word",
6749 		.u.insns_int = {
6750 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6751 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
6752 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6753 			BPF_EXIT_INSN(),
6754 		},
6755 		INTERNAL,
6756 		{ },
6757 		{ { 0, 0 } }
6758 	},
6759 	{
6760 		"ALU64_LSH_X: Shift > 32, high word",
6761 		.u.insns_int = {
6762 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6763 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
6764 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6765 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6766 			BPF_EXIT_INSN(),
6767 		},
6768 		INTERNAL,
6769 		{ },
6770 		{ { 0, 0x9abcdef0 } }
6771 	},
6772 	{
6773 		"ALU64_LSH_X: Shift == 32, low word",
6774 		.u.insns_int = {
6775 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6776 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
6777 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6778 			BPF_EXIT_INSN(),
6779 		},
6780 		INTERNAL,
6781 		{ },
6782 		{ { 0, 0 } }
6783 	},
6784 	{
6785 		"ALU64_LSH_X: Shift == 32, high word",
6786 		.u.insns_int = {
6787 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6788 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
6789 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6790 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6791 			BPF_EXIT_INSN(),
6792 		},
6793 		INTERNAL,
6794 		{ },
6795 		{ { 0, 0x89abcdef } }
6796 	},
6797 	{
6798 		"ALU64_LSH_X: Zero shift, low word",
6799 		.u.insns_int = {
6800 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6801 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
6802 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6803 			BPF_EXIT_INSN(),
6804 		},
6805 		INTERNAL,
6806 		{ },
6807 		{ { 0, 0x89abcdef } }
6808 	},
6809 	{
6810 		"ALU64_LSH_X: Zero shift, high word",
6811 		.u.insns_int = {
6812 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6813 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
6814 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6815 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6816 			BPF_EXIT_INSN(),
6817 		},
6818 		INTERNAL,
6819 		{ },
6820 		{ { 0, 0x01234567 } }
6821 	},
6822 	/* BPF_ALU | BPF_LSH | BPF_K */
6823 	{
6824 		"ALU_LSH_K: 1 << 1 = 2",
6825 		.u.insns_int = {
6826 			BPF_LD_IMM64(R0, 1),
6827 			BPF_ALU32_IMM(BPF_LSH, R0, 1),
6828 			BPF_EXIT_INSN(),
6829 		},
6830 		INTERNAL,
6831 		{ },
6832 		{ { 0, 2 } },
6833 	},
6834 	{
6835 		"ALU_LSH_K: 1 << 31 = 0x80000000",
6836 		.u.insns_int = {
6837 			BPF_LD_IMM64(R0, 1),
6838 			BPF_ALU32_IMM(BPF_LSH, R0, 31),
6839 			BPF_EXIT_INSN(),
6840 		},
6841 		INTERNAL,
6842 		{ },
6843 		{ { 0, 0x80000000 } },
6844 	},
6845 	{
6846 		"ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
6847 		.u.insns_int = {
6848 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6849 			BPF_ALU32_IMM(BPF_LSH, R0, 12),
6850 			BPF_EXIT_INSN(),
6851 		},
6852 		INTERNAL,
6853 		{ },
6854 		{ { 0, 0x45678000 } }
6855 	},
6856 	{
6857 		"ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
6858 		.u.insns_int = {
6859 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6860 			BPF_ALU32_IMM(BPF_LSH, R0, 0),
6861 			BPF_EXIT_INSN(),
6862 		},
6863 		INTERNAL,
6864 		{ },
6865 		{ { 0, 0x12345678 } }
6866 	},
6867 	{
6868 		"ALU64_LSH_K: 1 << 1 = 2",
6869 		.u.insns_int = {
6870 			BPF_LD_IMM64(R0, 1),
6871 			BPF_ALU64_IMM(BPF_LSH, R0, 1),
6872 			BPF_EXIT_INSN(),
6873 		},
6874 		INTERNAL,
6875 		{ },
6876 		{ { 0, 2 } },
6877 	},
6878 	{
6879 		"ALU64_LSH_K: 1 << 31 = 0x80000000",
6880 		.u.insns_int = {
6881 			BPF_LD_IMM64(R0, 1),
6882 			BPF_ALU64_IMM(BPF_LSH, R0, 31),
6883 			BPF_EXIT_INSN(),
6884 		},
6885 		INTERNAL,
6886 		{ },
6887 		{ { 0, 0x80000000 } },
6888 	},
6889 	{
6890 		"ALU64_LSH_K: Shift < 32, low word",
6891 		.u.insns_int = {
6892 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6893 			BPF_ALU64_IMM(BPF_LSH, R0, 12),
6894 			BPF_EXIT_INSN(),
6895 		},
6896 		INTERNAL,
6897 		{ },
6898 		{ { 0, 0xbcdef000 } }
6899 	},
6900 	{
6901 		"ALU64_LSH_K: Shift < 32, high word",
6902 		.u.insns_int = {
6903 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6904 			BPF_ALU64_IMM(BPF_LSH, R0, 12),
6905 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6906 			BPF_EXIT_INSN(),
6907 		},
6908 		INTERNAL,
6909 		{ },
6910 		{ { 0, 0x3456789a } }
6911 	},
6912 	{
6913 		"ALU64_LSH_K: Shift > 32, low word",
6914 		.u.insns_int = {
6915 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6916 			BPF_ALU64_IMM(BPF_LSH, R0, 36),
6917 			BPF_EXIT_INSN(),
6918 		},
6919 		INTERNAL,
6920 		{ },
6921 		{ { 0, 0 } }
6922 	},
6923 	{
6924 		"ALU64_LSH_K: Shift > 32, high word",
6925 		.u.insns_int = {
6926 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6927 			BPF_ALU64_IMM(BPF_LSH, R0, 36),
6928 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6929 			BPF_EXIT_INSN(),
6930 		},
6931 		INTERNAL,
6932 		{ },
6933 		{ { 0, 0x9abcdef0 } }
6934 	},
6935 	{
6936 		"ALU64_LSH_K: Shift == 32, low word",
6937 		.u.insns_int = {
6938 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6939 			BPF_ALU64_IMM(BPF_LSH, R0, 32),
6940 			BPF_EXIT_INSN(),
6941 		},
6942 		INTERNAL,
6943 		{ },
6944 		{ { 0, 0 } }
6945 	},
6946 	{
6947 		"ALU64_LSH_K: Shift == 32, high word",
6948 		.u.insns_int = {
6949 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6950 			BPF_ALU64_IMM(BPF_LSH, R0, 32),
6951 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
6952 			BPF_EXIT_INSN(),
6953 		},
6954 		INTERNAL,
6955 		{ },
6956 		{ { 0, 0x89abcdef } }
6957 	},
6958 	{
6959 		"ALU64_LSH_K: Zero shift",
6960 		.u.insns_int = {
6961 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6962 			BPF_ALU64_IMM(BPF_LSH, R0, 0),
6963 			BPF_EXIT_INSN(),
6964 		},
6965 		INTERNAL,
6966 		{ },
6967 		{ { 0, 0x89abcdef } }
6968 	},
6969 	/* BPF_ALU | BPF_RSH | BPF_X */
6970 	{
6971 		"ALU_RSH_X: 2 >> 1 = 1",
6972 		.u.insns_int = {
6973 			BPF_LD_IMM64(R0, 2),
6974 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6975 			BPF_ALU32_REG(BPF_RSH, R0, R1),
6976 			BPF_EXIT_INSN(),
6977 		},
6978 		INTERNAL,
6979 		{ },
6980 		{ { 0, 1 } },
6981 	},
6982 	{
6983 		"ALU_RSH_X: 0x80000000 >> 31 = 1",
6984 		.u.insns_int = {
6985 			BPF_LD_IMM64(R0, 0x80000000),
6986 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6987 			BPF_ALU32_REG(BPF_RSH, R0, R1),
6988 			BPF_EXIT_INSN(),
6989 		},
6990 		INTERNAL,
6991 		{ },
6992 		{ { 0, 1 } },
6993 	},
6994 	{
6995 		"ALU_RSH_X: 0x12345678 >> 20 = 0x123",
6996 		.u.insns_int = {
6997 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6998 			BPF_ALU32_IMM(BPF_MOV, R1, 20),
6999 			BPF_ALU32_REG(BPF_RSH, R0, R1),
7000 			BPF_EXIT_INSN(),
7001 		},
7002 		INTERNAL,
7003 		{ },
7004 		{ { 0, 0x123 } }
7005 	},
7006 	{
7007 		"ALU64_RSH_X: 2 >> 1 = 1",
7008 		.u.insns_int = {
7009 			BPF_LD_IMM64(R0, 2),
7010 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
7011 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7012 			BPF_EXIT_INSN(),
7013 		},
7014 		INTERNAL,
7015 		{ },
7016 		{ { 0, 1 } },
7017 	},
7018 	{
7019 		"ALU64_RSH_X: 0x80000000 >> 31 = 1",
7020 		.u.insns_int = {
7021 			BPF_LD_IMM64(R0, 0x80000000),
7022 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
7023 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7024 			BPF_EXIT_INSN(),
7025 		},
7026 		INTERNAL,
7027 		{ },
7028 		{ { 0, 1 } },
7029 	},
7030 	{
7031 		"ALU64_RSH_X: Shift < 32, low word",
7032 		.u.insns_int = {
7033 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7034 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7035 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7036 			BPF_EXIT_INSN(),
7037 		},
7038 		INTERNAL,
7039 		{ },
7040 		{ { 0, 0x56789abc } }
7041 	},
7042 	{
7043 		"ALU64_RSH_X: Shift < 32, high word",
7044 		.u.insns_int = {
7045 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7046 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7047 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7048 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7049 			BPF_EXIT_INSN(),
7050 		},
7051 		INTERNAL,
7052 		{ },
7053 		{ { 0, 0x00081234 } }
7054 	},
7055 	{
7056 		"ALU64_RSH_X: Shift > 32, low word",
7057 		.u.insns_int = {
7058 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7059 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7060 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7061 			BPF_EXIT_INSN(),
7062 		},
7063 		INTERNAL,
7064 		{ },
7065 		{ { 0, 0x08123456 } }
7066 	},
7067 	{
7068 		"ALU64_RSH_X: Shift > 32, high word",
7069 		.u.insns_int = {
7070 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7071 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7072 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7073 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7074 			BPF_EXIT_INSN(),
7075 		},
7076 		INTERNAL,
7077 		{ },
7078 		{ { 0, 0 } }
7079 	},
7080 	{
7081 		"ALU64_RSH_X: Shift == 32, low word",
7082 		.u.insns_int = {
7083 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7084 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7085 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7086 			BPF_EXIT_INSN(),
7087 		},
7088 		INTERNAL,
7089 		{ },
7090 		{ { 0, 0x81234567 } }
7091 	},
7092 	{
7093 		"ALU64_RSH_X: Shift == 32, high word",
7094 		.u.insns_int = {
7095 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7096 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7097 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7098 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7099 			BPF_EXIT_INSN(),
7100 		},
7101 		INTERNAL,
7102 		{ },
7103 		{ { 0, 0 } }
7104 	},
7105 	{
7106 		"ALU64_RSH_X: Zero shift, low word",
7107 		.u.insns_int = {
7108 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7109 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7110 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7111 			BPF_EXIT_INSN(),
7112 		},
7113 		INTERNAL,
7114 		{ },
7115 		{ { 0, 0x89abcdef } }
7116 	},
7117 	{
7118 		"ALU64_RSH_X: Zero shift, high word",
7119 		.u.insns_int = {
7120 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7121 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7122 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7123 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7124 			BPF_EXIT_INSN(),
7125 		},
7126 		INTERNAL,
7127 		{ },
7128 		{ { 0, 0x81234567 } }
7129 	},
7130 	/* BPF_ALU | BPF_RSH | BPF_K */
7131 	{
7132 		"ALU_RSH_K: 2 >> 1 = 1",
7133 		.u.insns_int = {
7134 			BPF_LD_IMM64(R0, 2),
7135 			BPF_ALU32_IMM(BPF_RSH, R0, 1),
7136 			BPF_EXIT_INSN(),
7137 		},
7138 		INTERNAL,
7139 		{ },
7140 		{ { 0, 1 } },
7141 	},
7142 	{
7143 		"ALU_RSH_K: 0x80000000 >> 31 = 1",
7144 		.u.insns_int = {
7145 			BPF_LD_IMM64(R0, 0x80000000),
7146 			BPF_ALU32_IMM(BPF_RSH, R0, 31),
7147 			BPF_EXIT_INSN(),
7148 		},
7149 		INTERNAL,
7150 		{ },
7151 		{ { 0, 1 } },
7152 	},
7153 	{
7154 		"ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7155 		.u.insns_int = {
7156 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7157 			BPF_ALU32_IMM(BPF_RSH, R0, 20),
7158 			BPF_EXIT_INSN(),
7159 		},
7160 		INTERNAL,
7161 		{ },
7162 		{ { 0, 0x123 } }
7163 	},
7164 	{
7165 		"ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7166 		.u.insns_int = {
7167 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7168 			BPF_ALU32_IMM(BPF_RSH, R0, 0),
7169 			BPF_EXIT_INSN(),
7170 		},
7171 		INTERNAL,
7172 		{ },
7173 		{ { 0, 0x12345678 } }
7174 	},
7175 	{
7176 		"ALU64_RSH_K: 2 >> 1 = 1",
7177 		.u.insns_int = {
7178 			BPF_LD_IMM64(R0, 2),
7179 			BPF_ALU64_IMM(BPF_RSH, R0, 1),
7180 			BPF_EXIT_INSN(),
7181 		},
7182 		INTERNAL,
7183 		{ },
7184 		{ { 0, 1 } },
7185 	},
7186 	{
7187 		"ALU64_RSH_K: 0x80000000 >> 31 = 1",
7188 		.u.insns_int = {
7189 			BPF_LD_IMM64(R0, 0x80000000),
7190 			BPF_ALU64_IMM(BPF_RSH, R0, 31),
7191 			BPF_EXIT_INSN(),
7192 		},
7193 		INTERNAL,
7194 		{ },
7195 		{ { 0, 1 } },
7196 	},
7197 	{
7198 		"ALU64_RSH_K: Shift < 32, low word",
7199 		.u.insns_int = {
7200 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7201 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7202 			BPF_EXIT_INSN(),
7203 		},
7204 		INTERNAL,
7205 		{ },
7206 		{ { 0, 0x56789abc } }
7207 	},
7208 	{
7209 		"ALU64_RSH_K: Shift < 32, high word",
7210 		.u.insns_int = {
7211 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7212 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7213 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7214 			BPF_EXIT_INSN(),
7215 		},
7216 		INTERNAL,
7217 		{ },
7218 		{ { 0, 0x00081234 } }
7219 	},
7220 	{
7221 		"ALU64_RSH_K: Shift > 32, low word",
7222 		.u.insns_int = {
7223 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7224 			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7225 			BPF_EXIT_INSN(),
7226 		},
7227 		INTERNAL,
7228 		{ },
7229 		{ { 0, 0x08123456 } }
7230 	},
7231 	{
7232 		"ALU64_RSH_K: Shift > 32, high word",
7233 		.u.insns_int = {
7234 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7235 			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7236 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7237 			BPF_EXIT_INSN(),
7238 		},
7239 		INTERNAL,
7240 		{ },
7241 		{ { 0, 0 } }
7242 	},
7243 	{
7244 		"ALU64_RSH_K: Shift == 32, low word",
7245 		.u.insns_int = {
7246 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7247 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7248 			BPF_EXIT_INSN(),
7249 		},
7250 		INTERNAL,
7251 		{ },
7252 		{ { 0, 0x81234567 } }
7253 	},
7254 	{
7255 		"ALU64_RSH_K: Shift == 32, high word",
7256 		.u.insns_int = {
7257 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7258 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7259 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7260 			BPF_EXIT_INSN(),
7261 		},
7262 		INTERNAL,
7263 		{ },
7264 		{ { 0, 0 } }
7265 	},
7266 	{
7267 		"ALU64_RSH_K: Zero shift",
7268 		.u.insns_int = {
7269 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7270 			BPF_ALU64_IMM(BPF_RSH, R0, 0),
7271 			BPF_EXIT_INSN(),
7272 		},
7273 		INTERNAL,
7274 		{ },
7275 		{ { 0, 0x89abcdef } }
7276 	},
7277 	/* BPF_ALU | BPF_ARSH | BPF_X */
7278 	{
7279 		"ALU32_ARSH_X: -1234 >> 7 = -10",
7280 		.u.insns_int = {
7281 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7282 			BPF_ALU32_IMM(BPF_MOV, R1, 7),
7283 			BPF_ALU32_REG(BPF_ARSH, R0, R1),
7284 			BPF_EXIT_INSN(),
7285 		},
7286 		INTERNAL,
7287 		{ },
7288 		{ { 0, -10 } }
7289 	},
7290 	{
7291 		"ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7292 		.u.insns_int = {
7293 			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7294 			BPF_ALU32_IMM(BPF_MOV, R1, 40),
7295 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7296 			BPF_EXIT_INSN(),
7297 		},
7298 		INTERNAL,
7299 		{ },
7300 		{ { 0, 0xffff00ff } },
7301 	},
7302 	{
7303 		"ALU64_ARSH_X: Shift < 32, low word",
7304 		.u.insns_int = {
7305 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7306 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7307 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7308 			BPF_EXIT_INSN(),
7309 		},
7310 		INTERNAL,
7311 		{ },
7312 		{ { 0, 0x56789abc } }
7313 	},
7314 	{
7315 		"ALU64_ARSH_X: Shift < 32, high word",
7316 		.u.insns_int = {
7317 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7318 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7319 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7320 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7321 			BPF_EXIT_INSN(),
7322 		},
7323 		INTERNAL,
7324 		{ },
7325 		{ { 0, 0xfff81234 } }
7326 	},
7327 	{
7328 		"ALU64_ARSH_X: Shift > 32, low word",
7329 		.u.insns_int = {
7330 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7331 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7332 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7333 			BPF_EXIT_INSN(),
7334 		},
7335 		INTERNAL,
7336 		{ },
7337 		{ { 0, 0xf8123456 } }
7338 	},
7339 	{
7340 		"ALU64_ARSH_X: Shift > 32, high word",
7341 		.u.insns_int = {
7342 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7343 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7344 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7345 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7346 			BPF_EXIT_INSN(),
7347 		},
7348 		INTERNAL,
7349 		{ },
7350 		{ { 0, -1 } }
7351 	},
7352 	{
7353 		"ALU64_ARSH_X: Shift == 32, low word",
7354 		.u.insns_int = {
7355 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7356 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7357 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7358 			BPF_EXIT_INSN(),
7359 		},
7360 		INTERNAL,
7361 		{ },
7362 		{ { 0, 0x81234567 } }
7363 	},
7364 	{
7365 		"ALU64_ARSH_X: Shift == 32, high word",
7366 		.u.insns_int = {
7367 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7368 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7369 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7370 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7371 			BPF_EXIT_INSN(),
7372 		},
7373 		INTERNAL,
7374 		{ },
7375 		{ { 0, -1 } }
7376 	},
7377 	{
7378 		"ALU64_ARSH_X: Zero shift, low word",
7379 		.u.insns_int = {
7380 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7381 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7382 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7383 			BPF_EXIT_INSN(),
7384 		},
7385 		INTERNAL,
7386 		{ },
7387 		{ { 0, 0x89abcdef } }
7388 	},
7389 	{
7390 		"ALU64_ARSH_X: Zero shift, high word",
7391 		.u.insns_int = {
7392 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7393 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7394 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7395 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7396 			BPF_EXIT_INSN(),
7397 		},
7398 		INTERNAL,
7399 		{ },
7400 		{ { 0, 0x81234567 } }
7401 	},
7402 	/* BPF_ALU | BPF_ARSH | BPF_K */
7403 	{
7404 		"ALU32_ARSH_K: -1234 >> 7 = -10",
7405 		.u.insns_int = {
7406 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7407 			BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7408 			BPF_EXIT_INSN(),
7409 		},
7410 		INTERNAL,
7411 		{ },
7412 		{ { 0, -10 } }
7413 	},
7414 	{
7415 		"ALU32_ARSH_K: -1234 >> 0 = -1234",
7416 		.u.insns_int = {
7417 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7418 			BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7419 			BPF_EXIT_INSN(),
7420 		},
7421 		INTERNAL,
7422 		{ },
7423 		{ { 0, -1234 } }
7424 	},
7425 	{
7426 		"ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7427 		.u.insns_int = {
7428 			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7429 			BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7430 			BPF_EXIT_INSN(),
7431 		},
7432 		INTERNAL,
7433 		{ },
7434 		{ { 0, 0xffff00ff } },
7435 	},
7436 	{
7437 		"ALU64_ARSH_K: Shift < 32, low word",
7438 		.u.insns_int = {
7439 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7440 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7441 			BPF_EXIT_INSN(),
7442 		},
7443 		INTERNAL,
7444 		{ },
7445 		{ { 0, 0x56789abc } }
7446 	},
7447 	{
7448 		"ALU64_ARSH_K: Shift < 32, high word",
7449 		.u.insns_int = {
7450 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7451 			BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7452 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7453 			BPF_EXIT_INSN(),
7454 		},
7455 		INTERNAL,
7456 		{ },
7457 		{ { 0, 0xfff81234 } }
7458 	},
7459 	{
7460 		"ALU64_ARSH_K: Shift > 32, low word",
7461 		.u.insns_int = {
7462 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7463 			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7464 			BPF_EXIT_INSN(),
7465 		},
7466 		INTERNAL,
7467 		{ },
7468 		{ { 0, 0xf8123456 } }
7469 	},
7470 	{
7471 		"ALU64_ARSH_K: Shift > 32, high word",
7472 		.u.insns_int = {
7473 			BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7474 			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7475 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7476 			BPF_EXIT_INSN(),
7477 		},
7478 		INTERNAL,
7479 		{ },
7480 		{ { 0, -1 } }
7481 	},
7482 	{
7483 		"ALU64_ARSH_K: Shift == 32, low word",
7484 		.u.insns_int = {
7485 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7486 			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7487 			BPF_EXIT_INSN(),
7488 		},
7489 		INTERNAL,
7490 		{ },
7491 		{ { 0, 0x81234567 } }
7492 	},
7493 	{
7494 		"ALU64_ARSH_K: Shift == 32, high word",
7495 		.u.insns_int = {
7496 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7497 			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7498 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7499 			BPF_EXIT_INSN(),
7500 		},
7501 		INTERNAL,
7502 		{ },
7503 		{ { 0, -1 } }
7504 	},
7505 	{
7506 		"ALU64_ARSH_K: Zero shift",
7507 		.u.insns_int = {
7508 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7509 			BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7510 			BPF_EXIT_INSN(),
7511 		},
7512 		INTERNAL,
7513 		{ },
7514 		{ { 0, 0x89abcdef } }
7515 	},
7516 	/* BPF_ALU | BPF_NEG */
7517 	{
7518 		"ALU_NEG: -(3) = -3",
7519 		.u.insns_int = {
7520 			BPF_ALU32_IMM(BPF_MOV, R0, 3),
7521 			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7522 			BPF_EXIT_INSN(),
7523 		},
7524 		INTERNAL,
7525 		{ },
7526 		{ { 0, -3 } },
7527 	},
7528 	{
7529 		"ALU_NEG: -(-3) = 3",
7530 		.u.insns_int = {
7531 			BPF_ALU32_IMM(BPF_MOV, R0, -3),
7532 			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7533 			BPF_EXIT_INSN(),
7534 		},
7535 		INTERNAL,
7536 		{ },
7537 		{ { 0, 3 } },
7538 	},
7539 	{
7540 		"ALU64_NEG: -(3) = -3",
7541 		.u.insns_int = {
7542 			BPF_LD_IMM64(R0, 3),
7543 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7544 			BPF_EXIT_INSN(),
7545 		},
7546 		INTERNAL,
7547 		{ },
7548 		{ { 0, -3 } },
7549 	},
7550 	{
7551 		"ALU64_NEG: -(-3) = 3",
7552 		.u.insns_int = {
7553 			BPF_LD_IMM64(R0, -3),
7554 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7555 			BPF_EXIT_INSN(),
7556 		},
7557 		INTERNAL,
7558 		{ },
7559 		{ { 0, 3 } },
7560 	},
7561 	/* BPF_ALU | BPF_END | BPF_FROM_BE */
7562 	{
7563 		"ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7564 		.u.insns_int = {
7565 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7566 			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7567 			BPF_EXIT_INSN(),
7568 		},
7569 		INTERNAL,
7570 		{ },
7571 		{ { 0,  cpu_to_be16(0xcdef) } },
7572 	},
7573 	{
7574 		"ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7575 		.u.insns_int = {
7576 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7577 			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7578 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7579 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7580 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7581 			BPF_EXIT_INSN(),
7582 		},
7583 		INTERNAL,
7584 		{ },
7585 		{ { 0, cpu_to_be32(0x89abcdef) } },
7586 	},
7587 	{
7588 		"ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7589 		.u.insns_int = {
7590 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7591 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7592 			BPF_EXIT_INSN(),
7593 		},
7594 		INTERNAL,
7595 		{ },
7596 		{ { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7597 	},
7598 	{
7599 		"ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7600 		.u.insns_int = {
7601 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7602 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7603 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7604 			BPF_EXIT_INSN(),
7605 		},
7606 		INTERNAL,
7607 		{ },
7608 		{ { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7609 	},
7610 	/* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7611 	{
7612 		"ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7613 		.u.insns_int = {
7614 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7615 			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7616 			BPF_EXIT_INSN(),
7617 		},
7618 		INTERNAL,
7619 		{ },
7620 		{ { 0,  cpu_to_be16(0x3210) } },
7621 	},
7622 	{
7623 		"ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7624 		.u.insns_int = {
7625 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7626 			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7627 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7628 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7629 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7630 			BPF_EXIT_INSN(),
7631 		},
7632 		INTERNAL,
7633 		{ },
7634 		{ { 0, cpu_to_be32(0x76543210) } },
7635 	},
7636 	{
7637 		"ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7638 		.u.insns_int = {
7639 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7640 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7641 			BPF_EXIT_INSN(),
7642 		},
7643 		INTERNAL,
7644 		{ },
7645 		{ { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7646 	},
7647 	{
7648 		"ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7649 		.u.insns_int = {
7650 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7651 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7652 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7653 			BPF_EXIT_INSN(),
7654 		},
7655 		INTERNAL,
7656 		{ },
7657 		{ { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7658 	},
7659 	/* BPF_ALU | BPF_END | BPF_FROM_LE */
7660 	{
7661 		"ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7662 		.u.insns_int = {
7663 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7664 			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7665 			BPF_EXIT_INSN(),
7666 		},
7667 		INTERNAL,
7668 		{ },
7669 		{ { 0, cpu_to_le16(0xcdef) } },
7670 	},
7671 	{
7672 		"ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7673 		.u.insns_int = {
7674 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7675 			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7676 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7677 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7678 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7679 			BPF_EXIT_INSN(),
7680 		},
7681 		INTERNAL,
7682 		{ },
7683 		{ { 0, cpu_to_le32(0x89abcdef) } },
7684 	},
7685 	{
7686 		"ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7687 		.u.insns_int = {
7688 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7689 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7690 			BPF_EXIT_INSN(),
7691 		},
7692 		INTERNAL,
7693 		{ },
7694 		{ { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7695 	},
7696 	{
7697 		"ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7698 		.u.insns_int = {
7699 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7700 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7701 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7702 			BPF_EXIT_INSN(),
7703 		},
7704 		INTERNAL,
7705 		{ },
7706 		{ { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7707 	},
7708 	/* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7709 	{
7710 		"ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7711 		.u.insns_int = {
7712 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7713 			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7714 			BPF_EXIT_INSN(),
7715 		},
7716 		INTERNAL,
7717 		{ },
7718 		{ { 0,  cpu_to_le16(0x3210) } },
7719 	},
7720 	{
7721 		"ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
7722 		.u.insns_int = {
7723 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7724 			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7725 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7726 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7727 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7728 			BPF_EXIT_INSN(),
7729 		},
7730 		INTERNAL,
7731 		{ },
7732 		{ { 0, cpu_to_le32(0x76543210) } },
7733 	},
7734 	{
7735 		"ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
7736 		.u.insns_int = {
7737 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7738 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7739 			BPF_EXIT_INSN(),
7740 		},
7741 		INTERNAL,
7742 		{ },
7743 		{ { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
7744 	},
7745 	{
7746 		"ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
7747 		.u.insns_int = {
7748 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7749 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7750 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7751 			BPF_EXIT_INSN(),
7752 		},
7753 		INTERNAL,
7754 		{ },
7755 		{ { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
7756 	},
7757 	/* BPF_LDX_MEM B/H/W/DW */
7758 	{
7759 		"BPF_LDX_MEM | BPF_B",
7760 		.u.insns_int = {
7761 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7762 			BPF_LD_IMM64(R2, 0x0000000000000008ULL),
7763 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7764 #ifdef __BIG_ENDIAN
7765 			BPF_LDX_MEM(BPF_B, R0, R10, -1),
7766 #else
7767 			BPF_LDX_MEM(BPF_B, R0, R10, -8),
7768 #endif
7769 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7770 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7771 			BPF_EXIT_INSN(),
7772 		},
7773 		INTERNAL,
7774 		{ },
7775 		{ { 0, 0 } },
7776 		.stack_depth = 8,
7777 	},
7778 	{
7779 		"BPF_LDX_MEM | BPF_B, MSB set",
7780 		.u.insns_int = {
7781 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7782 			BPF_LD_IMM64(R2, 0x0000000000000088ULL),
7783 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7784 #ifdef __BIG_ENDIAN
7785 			BPF_LDX_MEM(BPF_B, R0, R10, -1),
7786 #else
7787 			BPF_LDX_MEM(BPF_B, R0, R10, -8),
7788 #endif
7789 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7790 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7791 			BPF_EXIT_INSN(),
7792 		},
7793 		INTERNAL,
7794 		{ },
7795 		{ { 0, 0 } },
7796 		.stack_depth = 8,
7797 	},
7798 	{
7799 		"BPF_LDX_MEM | BPF_H",
7800 		.u.insns_int = {
7801 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7802 			BPF_LD_IMM64(R2, 0x0000000000000708ULL),
7803 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7804 #ifdef __BIG_ENDIAN
7805 			BPF_LDX_MEM(BPF_H, R0, R10, -2),
7806 #else
7807 			BPF_LDX_MEM(BPF_H, R0, R10, -8),
7808 #endif
7809 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7810 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7811 			BPF_EXIT_INSN(),
7812 		},
7813 		INTERNAL,
7814 		{ },
7815 		{ { 0, 0 } },
7816 		.stack_depth = 8,
7817 	},
7818 	{
7819 		"BPF_LDX_MEM | BPF_H, MSB set",
7820 		.u.insns_int = {
7821 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7822 			BPF_LD_IMM64(R2, 0x0000000000008788ULL),
7823 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7824 #ifdef __BIG_ENDIAN
7825 			BPF_LDX_MEM(BPF_H, R0, R10, -2),
7826 #else
7827 			BPF_LDX_MEM(BPF_H, R0, R10, -8),
7828 #endif
7829 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7830 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7831 			BPF_EXIT_INSN(),
7832 		},
7833 		INTERNAL,
7834 		{ },
7835 		{ { 0, 0 } },
7836 		.stack_depth = 8,
7837 	},
7838 	{
7839 		"BPF_LDX_MEM | BPF_W",
7840 		.u.insns_int = {
7841 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
7842 			BPF_LD_IMM64(R2, 0x0000000005060708ULL),
7843 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7844 #ifdef __BIG_ENDIAN
7845 			BPF_LDX_MEM(BPF_W, R0, R10, -4),
7846 #else
7847 			BPF_LDX_MEM(BPF_W, R0, R10, -8),
7848 #endif
7849 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7850 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7851 			BPF_EXIT_INSN(),
7852 		},
7853 		INTERNAL,
7854 		{ },
7855 		{ { 0, 0 } },
7856 		.stack_depth = 8,
7857 	},
7858 	{
7859 		"BPF_LDX_MEM | BPF_W, MSB set",
7860 		.u.insns_int = {
7861 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
7862 			BPF_LD_IMM64(R2, 0x0000000085868788ULL),
7863 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7864 #ifdef __BIG_ENDIAN
7865 			BPF_LDX_MEM(BPF_W, R0, R10, -4),
7866 #else
7867 			BPF_LDX_MEM(BPF_W, R0, R10, -8),
7868 #endif
7869 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7870 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7871 			BPF_EXIT_INSN(),
7872 		},
7873 		INTERNAL,
7874 		{ },
7875 		{ { 0, 0 } },
7876 		.stack_depth = 8,
7877 	},
7878 	/* BPF_STX_MEM B/H/W/DW */
7879 	{
7880 		"BPF_STX_MEM | BPF_B",
7881 		.u.insns_int = {
7882 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7883 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
7884 			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
7885 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7886 #ifdef __BIG_ENDIAN
7887 			BPF_STX_MEM(BPF_B, R10, R2, -1),
7888 #else
7889 			BPF_STX_MEM(BPF_B, R10, R2, -8),
7890 #endif
7891 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7892 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7893 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7894 			BPF_EXIT_INSN(),
7895 		},
7896 		INTERNAL,
7897 		{ },
7898 		{ { 0, 0 } },
7899 		.stack_depth = 8,
7900 	},
7901 	{
7902 		"BPF_STX_MEM | BPF_B, MSB set",
7903 		.u.insns_int = {
7904 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7905 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
7906 			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
7907 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7908 #ifdef __BIG_ENDIAN
7909 			BPF_STX_MEM(BPF_B, R10, R2, -1),
7910 #else
7911 			BPF_STX_MEM(BPF_B, R10, R2, -8),
7912 #endif
7913 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7914 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7915 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7916 			BPF_EXIT_INSN(),
7917 		},
7918 		INTERNAL,
7919 		{ },
7920 		{ { 0, 0 } },
7921 		.stack_depth = 8,
7922 	},
7923 	{
7924 		"BPF_STX_MEM | BPF_H",
7925 		.u.insns_int = {
7926 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7927 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
7928 			BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
7929 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7930 #ifdef __BIG_ENDIAN
7931 			BPF_STX_MEM(BPF_H, R10, R2, -2),
7932 #else
7933 			BPF_STX_MEM(BPF_H, R10, R2, -8),
7934 #endif
7935 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7936 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7937 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7938 			BPF_EXIT_INSN(),
7939 		},
7940 		INTERNAL,
7941 		{ },
7942 		{ { 0, 0 } },
7943 		.stack_depth = 8,
7944 	},
7945 	{
7946 		"BPF_STX_MEM | BPF_H, MSB set",
7947 		.u.insns_int = {
7948 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7949 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
7950 			BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
7951 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7952 #ifdef __BIG_ENDIAN
7953 			BPF_STX_MEM(BPF_H, R10, R2, -2),
7954 #else
7955 			BPF_STX_MEM(BPF_H, R10, R2, -8),
7956 #endif
7957 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7958 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7959 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7960 			BPF_EXIT_INSN(),
7961 		},
7962 		INTERNAL,
7963 		{ },
7964 		{ { 0, 0 } },
7965 		.stack_depth = 8,
7966 	},
7967 	{
7968 		"BPF_STX_MEM | BPF_W",
7969 		.u.insns_int = {
7970 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7971 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
7972 			BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
7973 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7974 #ifdef __BIG_ENDIAN
7975 			BPF_STX_MEM(BPF_W, R10, R2, -4),
7976 #else
7977 			BPF_STX_MEM(BPF_W, R10, R2, -8),
7978 #endif
7979 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
7980 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
7981 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
7982 			BPF_EXIT_INSN(),
7983 		},
7984 		INTERNAL,
7985 		{ },
7986 		{ { 0, 0 } },
7987 		.stack_depth = 8,
7988 	},
7989 	{
7990 		"BPF_STX_MEM | BPF_W, MSB set",
7991 		.u.insns_int = {
7992 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
7993 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
7994 			BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
7995 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
7996 #ifdef __BIG_ENDIAN
7997 			BPF_STX_MEM(BPF_W, R10, R2, -4),
7998 #else
7999 			BPF_STX_MEM(BPF_W, R10, R2, -8),
8000 #endif
8001 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8002 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8003 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8004 			BPF_EXIT_INSN(),
8005 		},
8006 		INTERNAL,
8007 		{ },
8008 		{ { 0, 0 } },
8009 		.stack_depth = 8,
8010 	},
8011 	/* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8012 	{
8013 		"ST_MEM_B: Store/Load byte: max negative",
8014 		.u.insns_int = {
8015 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8016 			BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8017 			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8018 			BPF_EXIT_INSN(),
8019 		},
8020 		INTERNAL,
8021 		{ },
8022 		{ { 0, 0xff } },
8023 		.stack_depth = 40,
8024 	},
8025 	{
8026 		"ST_MEM_B: Store/Load byte: max positive",
8027 		.u.insns_int = {
8028 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8029 			BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8030 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8031 			BPF_EXIT_INSN(),
8032 		},
8033 		INTERNAL,
8034 		{ },
8035 		{ { 0, 0x7f } },
8036 		.stack_depth = 40,
8037 	},
8038 	{
8039 		"STX_MEM_B: Store/Load byte: max negative",
8040 		.u.insns_int = {
8041 			BPF_LD_IMM64(R0, 0),
8042 			BPF_LD_IMM64(R1, 0xffLL),
8043 			BPF_STX_MEM(BPF_B, R10, R1, -40),
8044 			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8045 			BPF_EXIT_INSN(),
8046 		},
8047 		INTERNAL,
8048 		{ },
8049 		{ { 0, 0xff } },
8050 		.stack_depth = 40,
8051 	},
8052 	{
8053 		"ST_MEM_H: Store/Load half word: max negative",
8054 		.u.insns_int = {
8055 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8056 			BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8057 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8058 			BPF_EXIT_INSN(),
8059 		},
8060 		INTERNAL,
8061 		{ },
8062 		{ { 0, 0xffff } },
8063 		.stack_depth = 40,
8064 	},
8065 	{
8066 		"ST_MEM_H: Store/Load half word: max positive",
8067 		.u.insns_int = {
8068 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8069 			BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8070 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8071 			BPF_EXIT_INSN(),
8072 		},
8073 		INTERNAL,
8074 		{ },
8075 		{ { 0, 0x7fff } },
8076 		.stack_depth = 40,
8077 	},
8078 	{
8079 		"STX_MEM_H: Store/Load half word: max negative",
8080 		.u.insns_int = {
8081 			BPF_LD_IMM64(R0, 0),
8082 			BPF_LD_IMM64(R1, 0xffffLL),
8083 			BPF_STX_MEM(BPF_H, R10, R1, -40),
8084 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8085 			BPF_EXIT_INSN(),
8086 		},
8087 		INTERNAL,
8088 		{ },
8089 		{ { 0, 0xffff } },
8090 		.stack_depth = 40,
8091 	},
8092 	{
8093 		"ST_MEM_W: Store/Load word: max negative",
8094 		.u.insns_int = {
8095 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8096 			BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8097 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8098 			BPF_EXIT_INSN(),
8099 		},
8100 		INTERNAL,
8101 		{ },
8102 		{ { 0, 0xffffffff } },
8103 		.stack_depth = 40,
8104 	},
8105 	{
8106 		"ST_MEM_W: Store/Load word: max positive",
8107 		.u.insns_int = {
8108 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8109 			BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8110 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8111 			BPF_EXIT_INSN(),
8112 		},
8113 		INTERNAL,
8114 		{ },
8115 		{ { 0, 0x7fffffff } },
8116 		.stack_depth = 40,
8117 	},
8118 	{
8119 		"STX_MEM_W: Store/Load word: max negative",
8120 		.u.insns_int = {
8121 			BPF_LD_IMM64(R0, 0),
8122 			BPF_LD_IMM64(R1, 0xffffffffLL),
8123 			BPF_STX_MEM(BPF_W, R10, R1, -40),
8124 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8125 			BPF_EXIT_INSN(),
8126 		},
8127 		INTERNAL,
8128 		{ },
8129 		{ { 0, 0xffffffff } },
8130 		.stack_depth = 40,
8131 	},
8132 	{
8133 		"ST_MEM_DW: Store/Load double word: max negative",
8134 		.u.insns_int = {
8135 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8136 			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8137 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8138 			BPF_EXIT_INSN(),
8139 		},
8140 		INTERNAL,
8141 		{ },
8142 		{ { 0, 0xffffffff } },
8143 		.stack_depth = 40,
8144 	},
8145 	{
8146 		"ST_MEM_DW: Store/Load double word: max negative 2",
8147 		.u.insns_int = {
8148 			BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8149 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8150 			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8151 			BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8152 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8153 			BPF_MOV32_IMM(R0, 2),
8154 			BPF_EXIT_INSN(),
8155 			BPF_MOV32_IMM(R0, 1),
8156 			BPF_EXIT_INSN(),
8157 		},
8158 		INTERNAL,
8159 		{ },
8160 		{ { 0, 0x1 } },
8161 		.stack_depth = 40,
8162 	},
8163 	{
8164 		"ST_MEM_DW: Store/Load double word: max positive",
8165 		.u.insns_int = {
8166 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8167 			BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8168 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8169 			BPF_EXIT_INSN(),
8170 		},
8171 		INTERNAL,
8172 		{ },
8173 		{ { 0, 0x7fffffff } },
8174 		.stack_depth = 40,
8175 	},
8176 	{
8177 		"STX_MEM_DW: Store/Load double word: max negative",
8178 		.u.insns_int = {
8179 			BPF_LD_IMM64(R0, 0),
8180 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8181 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8182 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8183 			BPF_EXIT_INSN(),
8184 		},
8185 		INTERNAL,
8186 		{ },
8187 		{ { 0, 0xffffffff } },
8188 		.stack_depth = 40,
8189 	},
8190 	{
8191 		"STX_MEM_DW: Store double word: first word in memory",
8192 		.u.insns_int = {
8193 			BPF_LD_IMM64(R0, 0),
8194 			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8195 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8196 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8197 			BPF_EXIT_INSN(),
8198 		},
8199 		INTERNAL,
8200 		{ },
8201 #ifdef __BIG_ENDIAN
8202 		{ { 0, 0x01234567 } },
8203 #else
8204 		{ { 0, 0x89abcdef } },
8205 #endif
8206 		.stack_depth = 40,
8207 	},
8208 	{
8209 		"STX_MEM_DW: Store double word: second word in memory",
8210 		.u.insns_int = {
8211 			BPF_LD_IMM64(R0, 0),
8212 			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8213 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8214 			BPF_LDX_MEM(BPF_W, R0, R10, -36),
8215 			BPF_EXIT_INSN(),
8216 		},
8217 		INTERNAL,
8218 		{ },
8219 #ifdef __BIG_ENDIAN
8220 		{ { 0, 0x89abcdef } },
8221 #else
8222 		{ { 0, 0x01234567 } },
8223 #endif
8224 		.stack_depth = 40,
8225 	},
8226 	/* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8227 	{
8228 		"STX_XADD_W: X + 1 + 1 + 1 + ...",
8229 		{ },
8230 		INTERNAL,
8231 		{ },
8232 		{ { 0, 4134 } },
8233 		.fill_helper = bpf_fill_stxw,
8234 	},
8235 	{
8236 		"STX_XADD_DW: X + 1 + 1 + 1 + ...",
8237 		{ },
8238 		INTERNAL,
8239 		{ },
8240 		{ { 0, 4134 } },
8241 		.fill_helper = bpf_fill_stxdw,
8242 	},
8243 	/*
8244 	 * Exhaustive tests of atomic operation variants.
8245 	 * Individual tests are expanded from template macros for all
8246 	 * combinations of ALU operation, word size and fetching.
8247 	 */
8248 #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8249 
8250 #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result)	\
8251 {									\
8252 	"BPF_ATOMIC | " #width ", " #op ": Test: "			\
8253 		#old " " #logic " " #update " = " #result,		\
8254 	.u.insns_int = {						\
8255 		BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)),	\
8256 		BPF_ST_MEM(width, R10, -40, old),			\
8257 		BPF_ATOMIC_OP(width, op, R10, R5, -40),			\
8258 		BPF_LDX_MEM(width, R0, R10, -40),			\
8259 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8260 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8261 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8262 		BPF_EXIT_INSN(),					\
8263 	},								\
8264 	INTERNAL,							\
8265 	{ },								\
8266 	{ { 0, result } },						\
8267 	.stack_depth = 40,						\
8268 }
8269 #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result)	\
8270 {									\
8271 	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: "	\
8272 		#old " " #logic " " #update " = " #result,		\
8273 	.u.insns_int = {						\
8274 		BPF_ALU64_REG(BPF_MOV, R1, R10),			\
8275 		BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)),	\
8276 		BPF_ST_MEM(BPF_W, R10, -40, old),			\
8277 		BPF_ATOMIC_OP(width, op, R10, R0, -40),			\
8278 		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8279 		BPF_ALU64_REG(BPF_SUB, R0, R1),				\
8280 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8281 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8282 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8283 		BPF_EXIT_INSN(),					\
8284 	},								\
8285 	INTERNAL,							\
8286 	{ },								\
8287 	{ { 0, 0 } },							\
8288 	.stack_depth = 40,						\
8289 }
8290 #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result)	\
8291 {									\
8292 	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: "	\
8293 		#old " " #logic " " #update " = " #result,		\
8294 	.u.insns_int = {						\
8295 		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8296 		BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)),	\
8297 		BPF_ST_MEM(width, R10, -40, old),			\
8298 		BPF_ATOMIC_OP(width, op, R10, R1, -40),			\
8299 		BPF_ALU64_REG(BPF_SUB, R0, R10),			\
8300 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8301 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8302 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8303 		BPF_EXIT_INSN(),					\
8304 	},								\
8305 	INTERNAL,                                                       \
8306 	{ },                                                            \
8307 	{ { 0, 0 } },                                                   \
8308 	.stack_depth = 40,                                              \
8309 }
8310 #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result)	\
8311 {									\
8312 	"BPF_ATOMIC | " #width ", " #op ": Test fetch: "		\
8313 		#old " " #logic " " #update " = " #result,		\
8314 	.u.insns_int = {						\
8315 		BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)),	\
8316 		BPF_ST_MEM(width, R10, -40, old),			\
8317 		BPF_ATOMIC_OP(width, op, R10, R3, -40),			\
8318 		BPF_ALU32_REG(BPF_MOV, R0, R3),                         \
8319 		BPF_EXIT_INSN(),					\
8320 	},								\
8321 	INTERNAL,                                                       \
8322 	{ },                                                            \
8323 	{ { 0, (op) & BPF_FETCH ? old : update } },			\
8324 	.stack_depth = 40,                                              \
8325 }
8326 	/* BPF_ATOMIC | BPF_W: BPF_ADD */
8327 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8328 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8329 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8330 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
8331 	/* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
8332 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8333 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8334 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8335 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8336 	/* BPF_ATOMIC | BPF_DW: BPF_ADD */
8337 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8338 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8339 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8340 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
8341 	/* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
8342 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8343 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8344 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8345 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
8346 	/* BPF_ATOMIC | BPF_W: BPF_AND */
8347 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8348 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8349 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8350 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
8351 	/* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
8352 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8353 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8354 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8355 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8356 	/* BPF_ATOMIC | BPF_DW: BPF_AND */
8357 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8358 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8359 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8360 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
8361 	/* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
8362 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8363 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8364 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8365 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
8366 	/* BPF_ATOMIC | BPF_W: BPF_OR */
8367 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8368 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8369 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8370 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
8371 	/* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
8372 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8373 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8374 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8375 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8376 	/* BPF_ATOMIC | BPF_DW: BPF_OR */
8377 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8378 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8379 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8380 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
8381 	/* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
8382 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8383 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8384 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8385 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
8386 	/* BPF_ATOMIC | BPF_W: BPF_XOR */
8387 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8388 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8389 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8390 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8391 	/* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
8392 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8393 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8394 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8395 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8396 	/* BPF_ATOMIC | BPF_DW: BPF_XOR */
8397 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8398 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8399 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8400 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
8401 	/* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
8402 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8403 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8404 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8405 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
8406 	/* BPF_ATOMIC | BPF_W: BPF_XCHG */
8407 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8408 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8409 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8410 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8411 	/* BPF_ATOMIC | BPF_DW: BPF_XCHG */
8412 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8413 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8414 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8415 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
8416 #undef BPF_ATOMIC_POISON
8417 #undef BPF_ATOMIC_OP_TEST1
8418 #undef BPF_ATOMIC_OP_TEST2
8419 #undef BPF_ATOMIC_OP_TEST3
8420 #undef BPF_ATOMIC_OP_TEST4
8421 	/* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
8422 	{
8423 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
8424 		.u.insns_int = {
8425 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8426 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8427 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8428 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8429 			BPF_EXIT_INSN(),
8430 		},
8431 		INTERNAL,
8432 		{ },
8433 		{ { 0, 0x01234567 } },
8434 		.stack_depth = 40,
8435 	},
8436 	{
8437 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
8438 		.u.insns_int = {
8439 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8440 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8441 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8442 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8443 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8444 			BPF_EXIT_INSN(),
8445 		},
8446 		INTERNAL,
8447 		{ },
8448 		{ { 0, 0x89abcdef } },
8449 		.stack_depth = 40,
8450 	},
8451 	{
8452 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
8453 		.u.insns_int = {
8454 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8455 			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8456 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8457 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8458 			BPF_EXIT_INSN(),
8459 		},
8460 		INTERNAL,
8461 		{ },
8462 		{ { 0, 0x01234567 } },
8463 		.stack_depth = 40,
8464 	},
8465 	{
8466 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
8467 		.u.insns_int = {
8468 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8469 			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
8470 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8471 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8472 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8473 			BPF_EXIT_INSN(),
8474 		},
8475 		INTERNAL,
8476 		{ },
8477 		{ { 0, 0x01234567 } },
8478 		.stack_depth = 40,
8479 	},
8480 	{
8481 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
8482 		.u.insns_int = {
8483 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
8484 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
8485 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
8486 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8487 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
8488 			BPF_ALU32_REG(BPF_MOV, R0, R3),
8489 			BPF_EXIT_INSN(),
8490 		},
8491 		INTERNAL,
8492 		{ },
8493 		{ { 0, 0x89abcdef } },
8494 		.stack_depth = 40,
8495 	},
8496 	/* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
8497 	{
8498 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
8499 		.u.insns_int = {
8500 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8501 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8502 			BPF_ALU64_REG(BPF_MOV, R0, R1),
8503 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8504 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8505 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8506 			BPF_ALU64_REG(BPF_SUB, R0, R1),
8507 			BPF_EXIT_INSN(),
8508 		},
8509 		INTERNAL,
8510 		{ },
8511 		{ { 0, 0 } },
8512 		.stack_depth = 40,
8513 	},
8514 	{
8515 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
8516 		.u.insns_int = {
8517 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8518 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8519 			BPF_ALU64_REG(BPF_MOV, R0, R1),
8520 			BPF_STX_MEM(BPF_DW, R10, R0, -40),
8521 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8522 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8523 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8524 			BPF_ALU64_REG(BPF_SUB, R0, R2),
8525 			BPF_EXIT_INSN(),
8526 		},
8527 		INTERNAL,
8528 		{ },
8529 		{ { 0, 0 } },
8530 		.stack_depth = 40,
8531 	},
8532 	{
8533 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
8534 		.u.insns_int = {
8535 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8536 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8537 			BPF_ALU64_REG(BPF_MOV, R0, R1),
8538 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
8539 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8540 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8541 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8542 			BPF_ALU64_REG(BPF_SUB, R0, R1),
8543 			BPF_EXIT_INSN(),
8544 		},
8545 		INTERNAL,
8546 		{ },
8547 		{ { 0, 0 } },
8548 		.stack_depth = 40,
8549 	},
8550 	{
8551 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
8552 		.u.insns_int = {
8553 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8554 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8555 			BPF_ALU64_REG(BPF_MOV, R0, R1),
8556 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
8557 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8558 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8559 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8560 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8561 			BPF_ALU64_REG(BPF_SUB, R0, R1),
8562 			BPF_EXIT_INSN(),
8563 		},
8564 		INTERNAL,
8565 		{ },
8566 		{ { 0, 0 } },
8567 		.stack_depth = 40,
8568 	},
8569 	{
8570 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
8571 		.u.insns_int = {
8572 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
8573 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
8574 			BPF_ALU64_REG(BPF_MOV, R0, R1),
8575 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8576 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
8577 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8578 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8579 			BPF_ALU64_REG(BPF_SUB, R0, R2),
8580 			BPF_EXIT_INSN(),
8581 		},
8582 		INTERNAL,
8583 		{ },
8584 		{ { 0, 0 } },
8585 		.stack_depth = 40,
8586 	},
8587 	/* BPF_JMP32 | BPF_JEQ | BPF_K */
8588 	{
8589 		"JMP32_JEQ_K: Small immediate",
8590 		.u.insns_int = {
8591 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8592 			BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
8593 			BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
8594 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8595 			BPF_EXIT_INSN(),
8596 		},
8597 		INTERNAL,
8598 		{ },
8599 		{ { 0, 123 } }
8600 	},
8601 	{
8602 		"JMP32_JEQ_K: Large immediate",
8603 		.u.insns_int = {
8604 			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8605 			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
8606 			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
8607 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8608 			BPF_EXIT_INSN(),
8609 		},
8610 		INTERNAL,
8611 		{ },
8612 		{ { 0, 12345678 } }
8613 	},
8614 	{
8615 		"JMP32_JEQ_K: negative immediate",
8616 		.u.insns_int = {
8617 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8618 			BPF_JMP32_IMM(BPF_JEQ, R0,  123, 1),
8619 			BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
8620 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8621 			BPF_EXIT_INSN(),
8622 		},
8623 		INTERNAL,
8624 		{ },
8625 		{ { 0, -123 } }
8626 	},
8627 	/* BPF_JMP32 | BPF_JEQ | BPF_X */
8628 	{
8629 		"JMP32_JEQ_X",
8630 		.u.insns_int = {
8631 			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8632 			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8633 			BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
8634 			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8635 			BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
8636 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8637 			BPF_EXIT_INSN(),
8638 		},
8639 		INTERNAL,
8640 		{ },
8641 		{ { 0, 1234 } }
8642 	},
8643 	/* BPF_JMP32 | BPF_JNE | BPF_K */
8644 	{
8645 		"JMP32_JNE_K: Small immediate",
8646 		.u.insns_int = {
8647 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8648 			BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
8649 			BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
8650 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8651 			BPF_EXIT_INSN(),
8652 		},
8653 		INTERNAL,
8654 		{ },
8655 		{ { 0, 123 } }
8656 	},
8657 	{
8658 		"JMP32_JNE_K: Large immediate",
8659 		.u.insns_int = {
8660 			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
8661 			BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
8662 			BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
8663 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8664 			BPF_EXIT_INSN(),
8665 		},
8666 		INTERNAL,
8667 		{ },
8668 		{ { 0, 12345678 } }
8669 	},
8670 	{
8671 		"JMP32_JNE_K: negative immediate",
8672 		.u.insns_int = {
8673 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8674 			BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
8675 			BPF_JMP32_IMM(BPF_JNE, R0,  123, 1),
8676 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8677 			BPF_EXIT_INSN(),
8678 		},
8679 		INTERNAL,
8680 		{ },
8681 		{ { 0, -123 } }
8682 	},
8683 	/* BPF_JMP32 | BPF_JNE | BPF_X */
8684 	{
8685 		"JMP32_JNE_X",
8686 		.u.insns_int = {
8687 			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
8688 			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
8689 			BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
8690 			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
8691 			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8692 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8693 			BPF_EXIT_INSN(),
8694 		},
8695 		INTERNAL,
8696 		{ },
8697 		{ { 0, 1234 } }
8698 	},
8699 	/* BPF_JMP32 | BPF_JSET | BPF_K */
8700 	{
8701 		"JMP32_JSET_K: Small immediate",
8702 		.u.insns_int = {
8703 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8704 			BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
8705 			BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
8706 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8707 			BPF_EXIT_INSN(),
8708 		},
8709 		INTERNAL,
8710 		{ },
8711 		{ { 0, 1 } }
8712 	},
8713 	{
8714 		"JMP32_JSET_K: Large immediate",
8715 		.u.insns_int = {
8716 			BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
8717 			BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
8718 			BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
8719 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8720 			BPF_EXIT_INSN(),
8721 		},
8722 		INTERNAL,
8723 		{ },
8724 		{ { 0, 0x40000000 } }
8725 	},
8726 	{
8727 		"JMP32_JSET_K: negative immediate",
8728 		.u.insns_int = {
8729 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8730 			BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
8731 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8732 			BPF_EXIT_INSN(),
8733 		},
8734 		INTERNAL,
8735 		{ },
8736 		{ { 0, -123 } }
8737 	},
8738 	/* BPF_JMP32 | BPF_JSET | BPF_X */
8739 	{
8740 		"JMP32_JSET_X",
8741 		.u.insns_int = {
8742 			BPF_ALU32_IMM(BPF_MOV, R0, 8),
8743 			BPF_ALU32_IMM(BPF_MOV, R1, 7),
8744 			BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
8745 			BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
8746 			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
8747 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8748 			BPF_EXIT_INSN(),
8749 		},
8750 		INTERNAL,
8751 		{ },
8752 		{ { 0, 8 } }
8753 	},
8754 	/* BPF_JMP32 | BPF_JGT | BPF_K */
8755 	{
8756 		"JMP32_JGT_K: Small immediate",
8757 		.u.insns_int = {
8758 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8759 			BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
8760 			BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
8761 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8762 			BPF_EXIT_INSN(),
8763 		},
8764 		INTERNAL,
8765 		{ },
8766 		{ { 0, 123 } }
8767 	},
8768 	{
8769 		"JMP32_JGT_K: Large immediate",
8770 		.u.insns_int = {
8771 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8772 			BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
8773 			BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
8774 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8775 			BPF_EXIT_INSN(),
8776 		},
8777 		INTERNAL,
8778 		{ },
8779 		{ { 0, 0xfffffffe } }
8780 	},
8781 	/* BPF_JMP32 | BPF_JGT | BPF_X */
8782 	{
8783 		"JMP32_JGT_X",
8784 		.u.insns_int = {
8785 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8786 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8787 			BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
8788 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8789 			BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
8790 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8791 			BPF_EXIT_INSN(),
8792 		},
8793 		INTERNAL,
8794 		{ },
8795 		{ { 0, 0xfffffffe } }
8796 	},
8797 	/* BPF_JMP32 | BPF_JGE | BPF_K */
8798 	{
8799 		"JMP32_JGE_K: Small immediate",
8800 		.u.insns_int = {
8801 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8802 			BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
8803 			BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
8804 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8805 			BPF_EXIT_INSN(),
8806 		},
8807 		INTERNAL,
8808 		{ },
8809 		{ { 0, 123 } }
8810 	},
8811 	{
8812 		"JMP32_JGE_K: Large immediate",
8813 		.u.insns_int = {
8814 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8815 			BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
8816 			BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
8817 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8818 			BPF_EXIT_INSN(),
8819 		},
8820 		INTERNAL,
8821 		{ },
8822 		{ { 0, 0xfffffffe } }
8823 	},
8824 	/* BPF_JMP32 | BPF_JGE | BPF_X */
8825 	{
8826 		"JMP32_JGE_X",
8827 		.u.insns_int = {
8828 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8829 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8830 			BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
8831 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
8832 			BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
8833 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8834 			BPF_EXIT_INSN(),
8835 		},
8836 		INTERNAL,
8837 		{ },
8838 		{ { 0, 0xfffffffe } }
8839 	},
8840 	/* BPF_JMP32 | BPF_JLT | BPF_K */
8841 	{
8842 		"JMP32_JLT_K: Small immediate",
8843 		.u.insns_int = {
8844 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8845 			BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
8846 			BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
8847 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8848 			BPF_EXIT_INSN(),
8849 		},
8850 		INTERNAL,
8851 		{ },
8852 		{ { 0, 123 } }
8853 	},
8854 	{
8855 		"JMP32_JLT_K: Large immediate",
8856 		.u.insns_int = {
8857 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8858 			BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
8859 			BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
8860 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8861 			BPF_EXIT_INSN(),
8862 		},
8863 		INTERNAL,
8864 		{ },
8865 		{ { 0, 0xfffffffe } }
8866 	},
8867 	/* BPF_JMP32 | BPF_JLT | BPF_X */
8868 	{
8869 		"JMP32_JLT_X",
8870 		.u.insns_int = {
8871 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8872 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8873 			BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
8874 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
8875 			BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
8876 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8877 			BPF_EXIT_INSN(),
8878 		},
8879 		INTERNAL,
8880 		{ },
8881 		{ { 0, 0xfffffffe } }
8882 	},
8883 	/* BPF_JMP32 | BPF_JLE | BPF_K */
8884 	{
8885 		"JMP32_JLE_K: Small immediate",
8886 		.u.insns_int = {
8887 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
8888 			BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
8889 			BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
8890 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8891 			BPF_EXIT_INSN(),
8892 		},
8893 		INTERNAL,
8894 		{ },
8895 		{ { 0, 123 } }
8896 	},
8897 	{
8898 		"JMP32_JLE_K: Large immediate",
8899 		.u.insns_int = {
8900 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8901 			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
8902 			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
8903 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8904 			BPF_EXIT_INSN(),
8905 		},
8906 		INTERNAL,
8907 		{ },
8908 		{ { 0, 0xfffffffe } }
8909 	},
8910 	/* BPF_JMP32 | BPF_JLE | BPF_X */
8911 	{
8912 		"JMP32_JLE_X",
8913 		.u.insns_int = {
8914 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
8915 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
8916 			BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
8917 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
8918 			BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
8919 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8920 			BPF_EXIT_INSN(),
8921 		},
8922 		INTERNAL,
8923 		{ },
8924 		{ { 0, 0xfffffffe } }
8925 	},
8926 	/* BPF_JMP32 | BPF_JSGT | BPF_K */
8927 	{
8928 		"JMP32_JSGT_K: Small immediate",
8929 		.u.insns_int = {
8930 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8931 			BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
8932 			BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
8933 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8934 			BPF_EXIT_INSN(),
8935 		},
8936 		INTERNAL,
8937 		{ },
8938 		{ { 0, -123 } }
8939 	},
8940 	{
8941 		"JMP32_JSGT_K: Large immediate",
8942 		.u.insns_int = {
8943 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
8944 			BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
8945 			BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
8946 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8947 			BPF_EXIT_INSN(),
8948 		},
8949 		INTERNAL,
8950 		{ },
8951 		{ { 0, -12345678 } }
8952 	},
8953 	/* BPF_JMP32 | BPF_JSGT | BPF_X */
8954 	{
8955 		"JMP32_JSGT_X",
8956 		.u.insns_int = {
8957 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
8958 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
8959 			BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
8960 			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
8961 			BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
8962 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8963 			BPF_EXIT_INSN(),
8964 		},
8965 		INTERNAL,
8966 		{ },
8967 		{ { 0, -12345678 } }
8968 	},
8969 	/* BPF_JMP32 | BPF_JSGE | BPF_K */
8970 	{
8971 		"JMP32_JSGE_K: Small immediate",
8972 		.u.insns_int = {
8973 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
8974 			BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
8975 			BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
8976 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8977 			BPF_EXIT_INSN(),
8978 		},
8979 		INTERNAL,
8980 		{ },
8981 		{ { 0, -123 } }
8982 	},
8983 	{
8984 		"JMP32_JSGE_K: Large immediate",
8985 		.u.insns_int = {
8986 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
8987 			BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
8988 			BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
8989 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
8990 			BPF_EXIT_INSN(),
8991 		},
8992 		INTERNAL,
8993 		{ },
8994 		{ { 0, -12345678 } }
8995 	},
8996 	/* BPF_JMP32 | BPF_JSGE | BPF_X */
8997 	{
8998 		"JMP32_JSGE_X",
8999 		.u.insns_int = {
9000 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9001 			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9002 			BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9003 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9004 			BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9005 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9006 			BPF_EXIT_INSN(),
9007 		},
9008 		INTERNAL,
9009 		{ },
9010 		{ { 0, -12345678 } }
9011 	},
9012 	/* BPF_JMP32 | BPF_JSLT | BPF_K */
9013 	{
9014 		"JMP32_JSLT_K: Small immediate",
9015 		.u.insns_int = {
9016 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9017 			BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9018 			BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9019 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9020 			BPF_EXIT_INSN(),
9021 		},
9022 		INTERNAL,
9023 		{ },
9024 		{ { 0, -123 } }
9025 	},
9026 	{
9027 		"JMP32_JSLT_K: Large immediate",
9028 		.u.insns_int = {
9029 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9030 			BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9031 			BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9032 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9033 			BPF_EXIT_INSN(),
9034 		},
9035 		INTERNAL,
9036 		{ },
9037 		{ { 0, -12345678 } }
9038 	},
9039 	/* BPF_JMP32 | BPF_JSLT | BPF_X */
9040 	{
9041 		"JMP32_JSLT_X",
9042 		.u.insns_int = {
9043 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9044 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9045 			BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9046 			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9047 			BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9048 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9049 			BPF_EXIT_INSN(),
9050 		},
9051 		INTERNAL,
9052 		{ },
9053 		{ { 0, -12345678 } }
9054 	},
9055 	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9056 	{
9057 		"JMP32_JSLE_K: Small immediate",
9058 		.u.insns_int = {
9059 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9060 			BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9061 			BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9062 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9063 			BPF_EXIT_INSN(),
9064 		},
9065 		INTERNAL,
9066 		{ },
9067 		{ { 0, -123 } }
9068 	},
9069 	{
9070 		"JMP32_JSLE_K: Large immediate",
9071 		.u.insns_int = {
9072 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9073 			BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9074 			BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9075 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9076 			BPF_EXIT_INSN(),
9077 		},
9078 		INTERNAL,
9079 		{ },
9080 		{ { 0, -12345678 } }
9081 	},
9082 	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9083 	{
9084 		"JMP32_JSLE_X",
9085 		.u.insns_int = {
9086 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9087 			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9088 			BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9089 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9090 			BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9091 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9092 			BPF_EXIT_INSN(),
9093 		},
9094 		INTERNAL,
9095 		{ },
9096 		{ { 0, -12345678 } }
9097 	},
9098 	/* BPF_JMP | BPF_EXIT */
9099 	{
9100 		"JMP_EXIT",
9101 		.u.insns_int = {
9102 			BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9103 			BPF_EXIT_INSN(),
9104 			BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9105 		},
9106 		INTERNAL,
9107 		{ },
9108 		{ { 0, 0x4711 } },
9109 	},
9110 	/* BPF_JMP | BPF_JA */
9111 	{
9112 		"JMP_JA: Unconditional jump: if (true) return 1",
9113 		.u.insns_int = {
9114 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9115 			BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9116 			BPF_EXIT_INSN(),
9117 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9118 			BPF_EXIT_INSN(),
9119 		},
9120 		INTERNAL,
9121 		{ },
9122 		{ { 0, 1 } },
9123 	},
9124 	/* BPF_JMP | BPF_JSLT | BPF_K */
9125 	{
9126 		"JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9127 		.u.insns_int = {
9128 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9129 			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9130 			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9131 			BPF_EXIT_INSN(),
9132 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9133 			BPF_EXIT_INSN(),
9134 		},
9135 		INTERNAL,
9136 		{ },
9137 		{ { 0, 1 } },
9138 	},
9139 	{
9140 		"JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9141 		.u.insns_int = {
9142 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9143 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9144 			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9145 			BPF_EXIT_INSN(),
9146 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9147 			BPF_EXIT_INSN(),
9148 		},
9149 		INTERNAL,
9150 		{ },
9151 		{ { 0, 1 } },
9152 	},
9153 	/* BPF_JMP | BPF_JSGT | BPF_K */
9154 	{
9155 		"JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9156 		.u.insns_int = {
9157 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9158 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9159 			BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9160 			BPF_EXIT_INSN(),
9161 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9162 			BPF_EXIT_INSN(),
9163 		},
9164 		INTERNAL,
9165 		{ },
9166 		{ { 0, 1 } },
9167 	},
9168 	{
9169 		"JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9170 		.u.insns_int = {
9171 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9172 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9173 			BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9174 			BPF_EXIT_INSN(),
9175 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9176 			BPF_EXIT_INSN(),
9177 		},
9178 		INTERNAL,
9179 		{ },
9180 		{ { 0, 1 } },
9181 	},
9182 	/* BPF_JMP | BPF_JSLE | BPF_K */
9183 	{
9184 		"JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9185 		.u.insns_int = {
9186 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9187 			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9188 			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9189 			BPF_EXIT_INSN(),
9190 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9191 			BPF_EXIT_INSN(),
9192 		},
9193 		INTERNAL,
9194 		{ },
9195 		{ { 0, 1 } },
9196 	},
9197 	{
9198 		"JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9199 		.u.insns_int = {
9200 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9201 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9202 			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9203 			BPF_EXIT_INSN(),
9204 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9205 			BPF_EXIT_INSN(),
9206 		},
9207 		INTERNAL,
9208 		{ },
9209 		{ { 0, 1 } },
9210 	},
9211 	{
9212 		"JMP_JSLE_K: Signed jump: value walk 1",
9213 		.u.insns_int = {
9214 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9215 			BPF_LD_IMM64(R1, 3),
9216 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9217 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9218 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9219 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9220 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9221 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9222 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9223 			BPF_EXIT_INSN(),		/* bad exit */
9224 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9225 			BPF_EXIT_INSN(),
9226 		},
9227 		INTERNAL,
9228 		{ },
9229 		{ { 0, 1 } },
9230 	},
9231 	{
9232 		"JMP_JSLE_K: Signed jump: value walk 2",
9233 		.u.insns_int = {
9234 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9235 			BPF_LD_IMM64(R1, 3),
9236 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9237 			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9238 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9239 			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9240 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9241 			BPF_EXIT_INSN(),		/* bad exit */
9242 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9243 			BPF_EXIT_INSN(),
9244 		},
9245 		INTERNAL,
9246 		{ },
9247 		{ { 0, 1 } },
9248 	},
9249 	/* BPF_JMP | BPF_JSGE | BPF_K */
9250 	{
9251 		"JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9252 		.u.insns_int = {
9253 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9254 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9255 			BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9256 			BPF_EXIT_INSN(),
9257 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9258 			BPF_EXIT_INSN(),
9259 		},
9260 		INTERNAL,
9261 		{ },
9262 		{ { 0, 1 } },
9263 	},
9264 	{
9265 		"JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9266 		.u.insns_int = {
9267 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9268 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9269 			BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9270 			BPF_EXIT_INSN(),
9271 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9272 			BPF_EXIT_INSN(),
9273 		},
9274 		INTERNAL,
9275 		{ },
9276 		{ { 0, 1 } },
9277 	},
9278 	{
9279 		"JMP_JSGE_K: Signed jump: value walk 1",
9280 		.u.insns_int = {
9281 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9282 			BPF_LD_IMM64(R1, -3),
9283 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
9284 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9285 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9286 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9287 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9288 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9289 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9290 			BPF_EXIT_INSN(),		/* bad exit */
9291 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9292 			BPF_EXIT_INSN(),
9293 		},
9294 		INTERNAL,
9295 		{ },
9296 		{ { 0, 1 } },
9297 	},
9298 	{
9299 		"JMP_JSGE_K: Signed jump: value walk 2",
9300 		.u.insns_int = {
9301 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9302 			BPF_LD_IMM64(R1, -3),
9303 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9304 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
9305 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9306 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
9307 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9308 			BPF_EXIT_INSN(),		/* bad exit */
9309 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9310 			BPF_EXIT_INSN(),
9311 		},
9312 		INTERNAL,
9313 		{ },
9314 		{ { 0, 1 } },
9315 	},
9316 	/* BPF_JMP | BPF_JGT | BPF_K */
9317 	{
9318 		"JMP_JGT_K: if (3 > 2) return 1",
9319 		.u.insns_int = {
9320 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9321 			BPF_LD_IMM64(R1, 3),
9322 			BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
9323 			BPF_EXIT_INSN(),
9324 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9325 			BPF_EXIT_INSN(),
9326 		},
9327 		INTERNAL,
9328 		{ },
9329 		{ { 0, 1 } },
9330 	},
9331 	{
9332 		"JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
9333 		.u.insns_int = {
9334 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9335 			BPF_LD_IMM64(R1, -1),
9336 			BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
9337 			BPF_EXIT_INSN(),
9338 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9339 			BPF_EXIT_INSN(),
9340 		},
9341 		INTERNAL,
9342 		{ },
9343 		{ { 0, 1 } },
9344 	},
9345 	/* BPF_JMP | BPF_JLT | BPF_K */
9346 	{
9347 		"JMP_JLT_K: if (2 < 3) return 1",
9348 		.u.insns_int = {
9349 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9350 			BPF_LD_IMM64(R1, 2),
9351 			BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
9352 			BPF_EXIT_INSN(),
9353 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9354 			BPF_EXIT_INSN(),
9355 		},
9356 		INTERNAL,
9357 		{ },
9358 		{ { 0, 1 } },
9359 	},
9360 	{
9361 		"JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
9362 		.u.insns_int = {
9363 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9364 			BPF_LD_IMM64(R1, 1),
9365 			BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
9366 			BPF_EXIT_INSN(),
9367 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9368 			BPF_EXIT_INSN(),
9369 		},
9370 		INTERNAL,
9371 		{ },
9372 		{ { 0, 1 } },
9373 	},
9374 	/* BPF_JMP | BPF_JGE | BPF_K */
9375 	{
9376 		"JMP_JGE_K: if (3 >= 2) return 1",
9377 		.u.insns_int = {
9378 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9379 			BPF_LD_IMM64(R1, 3),
9380 			BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
9381 			BPF_EXIT_INSN(),
9382 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9383 			BPF_EXIT_INSN(),
9384 		},
9385 		INTERNAL,
9386 		{ },
9387 		{ { 0, 1 } },
9388 	},
9389 	/* BPF_JMP | BPF_JLE | BPF_K */
9390 	{
9391 		"JMP_JLE_K: if (2 <= 3) return 1",
9392 		.u.insns_int = {
9393 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9394 			BPF_LD_IMM64(R1, 2),
9395 			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9396 			BPF_EXIT_INSN(),
9397 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9398 			BPF_EXIT_INSN(),
9399 		},
9400 		INTERNAL,
9401 		{ },
9402 		{ { 0, 1 } },
9403 	},
9404 	/* BPF_JMP | BPF_JGT | BPF_K jump backwards */
9405 	{
9406 		"JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
9407 		.u.insns_int = {
9408 			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9409 			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9410 			BPF_EXIT_INSN(),
9411 			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9412 			BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
9413 			BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
9414 			BPF_EXIT_INSN(),
9415 		},
9416 		INTERNAL,
9417 		{ },
9418 		{ { 0, 1 } },
9419 	},
9420 	{
9421 		"JMP_JGE_K: if (3 >= 3) return 1",
9422 		.u.insns_int = {
9423 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9424 			BPF_LD_IMM64(R1, 3),
9425 			BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
9426 			BPF_EXIT_INSN(),
9427 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9428 			BPF_EXIT_INSN(),
9429 		},
9430 		INTERNAL,
9431 		{ },
9432 		{ { 0, 1 } },
9433 	},
9434 	/* BPF_JMP | BPF_JLT | BPF_K jump backwards */
9435 	{
9436 		"JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
9437 		.u.insns_int = {
9438 			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
9439 			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
9440 			BPF_EXIT_INSN(),
9441 			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
9442 			BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
9443 			BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
9444 			BPF_EXIT_INSN(),
9445 		},
9446 		INTERNAL,
9447 		{ },
9448 		{ { 0, 1 } },
9449 	},
9450 	{
9451 		"JMP_JLE_K: if (3 <= 3) return 1",
9452 		.u.insns_int = {
9453 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9454 			BPF_LD_IMM64(R1, 3),
9455 			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
9456 			BPF_EXIT_INSN(),
9457 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9458 			BPF_EXIT_INSN(),
9459 		},
9460 		INTERNAL,
9461 		{ },
9462 		{ { 0, 1 } },
9463 	},
9464 	/* BPF_JMP | BPF_JNE | BPF_K */
9465 	{
9466 		"JMP_JNE_K: if (3 != 2) return 1",
9467 		.u.insns_int = {
9468 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9469 			BPF_LD_IMM64(R1, 3),
9470 			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
9471 			BPF_EXIT_INSN(),
9472 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9473 			BPF_EXIT_INSN(),
9474 		},
9475 		INTERNAL,
9476 		{ },
9477 		{ { 0, 1 } },
9478 	},
9479 	/* BPF_JMP | BPF_JEQ | BPF_K */
9480 	{
9481 		"JMP_JEQ_K: if (3 == 3) return 1",
9482 		.u.insns_int = {
9483 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9484 			BPF_LD_IMM64(R1, 3),
9485 			BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
9486 			BPF_EXIT_INSN(),
9487 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9488 			BPF_EXIT_INSN(),
9489 		},
9490 		INTERNAL,
9491 		{ },
9492 		{ { 0, 1 } },
9493 	},
9494 	/* BPF_JMP | BPF_JSET | BPF_K */
9495 	{
9496 		"JMP_JSET_K: if (0x3 & 0x2) return 1",
9497 		.u.insns_int = {
9498 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9499 			BPF_LD_IMM64(R1, 3),
9500 			BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
9501 			BPF_EXIT_INSN(),
9502 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9503 			BPF_EXIT_INSN(),
9504 		},
9505 		INTERNAL,
9506 		{ },
9507 		{ { 0, 1 } },
9508 	},
9509 	{
9510 		"JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
9511 		.u.insns_int = {
9512 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9513 			BPF_LD_IMM64(R1, 3),
9514 			BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
9515 			BPF_EXIT_INSN(),
9516 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9517 			BPF_EXIT_INSN(),
9518 		},
9519 		INTERNAL,
9520 		{ },
9521 		{ { 0, 1 } },
9522 	},
9523 	/* BPF_JMP | BPF_JSGT | BPF_X */
9524 	{
9525 		"JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
9526 		.u.insns_int = {
9527 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9528 			BPF_LD_IMM64(R1, -1),
9529 			BPF_LD_IMM64(R2, -2),
9530 			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9531 			BPF_EXIT_INSN(),
9532 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9533 			BPF_EXIT_INSN(),
9534 		},
9535 		INTERNAL,
9536 		{ },
9537 		{ { 0, 1 } },
9538 	},
9539 	{
9540 		"JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
9541 		.u.insns_int = {
9542 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9543 			BPF_LD_IMM64(R1, -1),
9544 			BPF_LD_IMM64(R2, -1),
9545 			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
9546 			BPF_EXIT_INSN(),
9547 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9548 			BPF_EXIT_INSN(),
9549 		},
9550 		INTERNAL,
9551 		{ },
9552 		{ { 0, 1 } },
9553 	},
9554 	/* BPF_JMP | BPF_JSLT | BPF_X */
9555 	{
9556 		"JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
9557 		.u.insns_int = {
9558 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9559 			BPF_LD_IMM64(R1, -1),
9560 			BPF_LD_IMM64(R2, -2),
9561 			BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
9562 			BPF_EXIT_INSN(),
9563 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9564 			BPF_EXIT_INSN(),
9565 		},
9566 		INTERNAL,
9567 		{ },
9568 		{ { 0, 1 } },
9569 	},
9570 	{
9571 		"JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
9572 		.u.insns_int = {
9573 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9574 			BPF_LD_IMM64(R1, -1),
9575 			BPF_LD_IMM64(R2, -1),
9576 			BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
9577 			BPF_EXIT_INSN(),
9578 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9579 			BPF_EXIT_INSN(),
9580 		},
9581 		INTERNAL,
9582 		{ },
9583 		{ { 0, 1 } },
9584 	},
9585 	/* BPF_JMP | BPF_JSGE | BPF_X */
9586 	{
9587 		"JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
9588 		.u.insns_int = {
9589 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9590 			BPF_LD_IMM64(R1, -1),
9591 			BPF_LD_IMM64(R2, -2),
9592 			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9593 			BPF_EXIT_INSN(),
9594 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9595 			BPF_EXIT_INSN(),
9596 		},
9597 		INTERNAL,
9598 		{ },
9599 		{ { 0, 1 } },
9600 	},
9601 	{
9602 		"JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
9603 		.u.insns_int = {
9604 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9605 			BPF_LD_IMM64(R1, -1),
9606 			BPF_LD_IMM64(R2, -1),
9607 			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
9608 			BPF_EXIT_INSN(),
9609 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9610 			BPF_EXIT_INSN(),
9611 		},
9612 		INTERNAL,
9613 		{ },
9614 		{ { 0, 1 } },
9615 	},
9616 	/* BPF_JMP | BPF_JSLE | BPF_X */
9617 	{
9618 		"JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
9619 		.u.insns_int = {
9620 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9621 			BPF_LD_IMM64(R1, -1),
9622 			BPF_LD_IMM64(R2, -2),
9623 			BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
9624 			BPF_EXIT_INSN(),
9625 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9626 			BPF_EXIT_INSN(),
9627 		},
9628 		INTERNAL,
9629 		{ },
9630 		{ { 0, 1 } },
9631 	},
9632 	{
9633 		"JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
9634 		.u.insns_int = {
9635 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9636 			BPF_LD_IMM64(R1, -1),
9637 			BPF_LD_IMM64(R2, -1),
9638 			BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
9639 			BPF_EXIT_INSN(),
9640 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9641 			BPF_EXIT_INSN(),
9642 		},
9643 		INTERNAL,
9644 		{ },
9645 		{ { 0, 1 } },
9646 	},
9647 	/* BPF_JMP | BPF_JGT | BPF_X */
9648 	{
9649 		"JMP_JGT_X: if (3 > 2) return 1",
9650 		.u.insns_int = {
9651 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9652 			BPF_LD_IMM64(R1, 3),
9653 			BPF_LD_IMM64(R2, 2),
9654 			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9655 			BPF_EXIT_INSN(),
9656 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9657 			BPF_EXIT_INSN(),
9658 		},
9659 		INTERNAL,
9660 		{ },
9661 		{ { 0, 1 } },
9662 	},
9663 	{
9664 		"JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
9665 		.u.insns_int = {
9666 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9667 			BPF_LD_IMM64(R1, -1),
9668 			BPF_LD_IMM64(R2, 1),
9669 			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
9670 			BPF_EXIT_INSN(),
9671 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9672 			BPF_EXIT_INSN(),
9673 		},
9674 		INTERNAL,
9675 		{ },
9676 		{ { 0, 1 } },
9677 	},
9678 	/* BPF_JMP | BPF_JLT | BPF_X */
9679 	{
9680 		"JMP_JLT_X: if (2 < 3) return 1",
9681 		.u.insns_int = {
9682 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9683 			BPF_LD_IMM64(R1, 3),
9684 			BPF_LD_IMM64(R2, 2),
9685 			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9686 			BPF_EXIT_INSN(),
9687 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9688 			BPF_EXIT_INSN(),
9689 		},
9690 		INTERNAL,
9691 		{ },
9692 		{ { 0, 1 } },
9693 	},
9694 	{
9695 		"JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
9696 		.u.insns_int = {
9697 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9698 			BPF_LD_IMM64(R1, -1),
9699 			BPF_LD_IMM64(R2, 1),
9700 			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
9701 			BPF_EXIT_INSN(),
9702 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9703 			BPF_EXIT_INSN(),
9704 		},
9705 		INTERNAL,
9706 		{ },
9707 		{ { 0, 1 } },
9708 	},
9709 	/* BPF_JMP | BPF_JGE | BPF_X */
9710 	{
9711 		"JMP_JGE_X: if (3 >= 2) return 1",
9712 		.u.insns_int = {
9713 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9714 			BPF_LD_IMM64(R1, 3),
9715 			BPF_LD_IMM64(R2, 2),
9716 			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9717 			BPF_EXIT_INSN(),
9718 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9719 			BPF_EXIT_INSN(),
9720 		},
9721 		INTERNAL,
9722 		{ },
9723 		{ { 0, 1 } },
9724 	},
9725 	{
9726 		"JMP_JGE_X: if (3 >= 3) return 1",
9727 		.u.insns_int = {
9728 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9729 			BPF_LD_IMM64(R1, 3),
9730 			BPF_LD_IMM64(R2, 3),
9731 			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
9732 			BPF_EXIT_INSN(),
9733 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9734 			BPF_EXIT_INSN(),
9735 		},
9736 		INTERNAL,
9737 		{ },
9738 		{ { 0, 1 } },
9739 	},
9740 	/* BPF_JMP | BPF_JLE | BPF_X */
9741 	{
9742 		"JMP_JLE_X: if (2 <= 3) return 1",
9743 		.u.insns_int = {
9744 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9745 			BPF_LD_IMM64(R1, 3),
9746 			BPF_LD_IMM64(R2, 2),
9747 			BPF_JMP_REG(BPF_JLE, R2, R1, 1),
9748 			BPF_EXIT_INSN(),
9749 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9750 			BPF_EXIT_INSN(),
9751 		},
9752 		INTERNAL,
9753 		{ },
9754 		{ { 0, 1 } },
9755 	},
9756 	{
9757 		"JMP_JLE_X: if (3 <= 3) return 1",
9758 		.u.insns_int = {
9759 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9760 			BPF_LD_IMM64(R1, 3),
9761 			BPF_LD_IMM64(R2, 3),
9762 			BPF_JMP_REG(BPF_JLE, R1, R2, 1),
9763 			BPF_EXIT_INSN(),
9764 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9765 			BPF_EXIT_INSN(),
9766 		},
9767 		INTERNAL,
9768 		{ },
9769 		{ { 0, 1 } },
9770 	},
9771 	{
9772 		/* Mainly testing JIT + imm64 here. */
9773 		"JMP_JGE_X: ldimm64 test 1",
9774 		.u.insns_int = {
9775 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9776 			BPF_LD_IMM64(R1, 3),
9777 			BPF_LD_IMM64(R2, 2),
9778 			BPF_JMP_REG(BPF_JGE, R1, R2, 2),
9779 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9780 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9781 			BPF_EXIT_INSN(),
9782 		},
9783 		INTERNAL,
9784 		{ },
9785 		{ { 0, 0xeeeeeeeeU } },
9786 	},
9787 	{
9788 		"JMP_JGE_X: ldimm64 test 2",
9789 		.u.insns_int = {
9790 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9791 			BPF_LD_IMM64(R1, 3),
9792 			BPF_LD_IMM64(R2, 2),
9793 			BPF_JMP_REG(BPF_JGE, R1, R2, 0),
9794 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9795 			BPF_EXIT_INSN(),
9796 		},
9797 		INTERNAL,
9798 		{ },
9799 		{ { 0, 0xffffffffU } },
9800 	},
9801 	{
9802 		"JMP_JGE_X: ldimm64 test 3",
9803 		.u.insns_int = {
9804 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9805 			BPF_LD_IMM64(R1, 3),
9806 			BPF_LD_IMM64(R2, 2),
9807 			BPF_JMP_REG(BPF_JGE, R1, R2, 4),
9808 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9809 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9810 			BPF_EXIT_INSN(),
9811 		},
9812 		INTERNAL,
9813 		{ },
9814 		{ { 0, 1 } },
9815 	},
9816 	{
9817 		"JMP_JLE_X: ldimm64 test 1",
9818 		.u.insns_int = {
9819 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9820 			BPF_LD_IMM64(R1, 3),
9821 			BPF_LD_IMM64(R2, 2),
9822 			BPF_JMP_REG(BPF_JLE, R2, R1, 2),
9823 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9824 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9825 			BPF_EXIT_INSN(),
9826 		},
9827 		INTERNAL,
9828 		{ },
9829 		{ { 0, 0xeeeeeeeeU } },
9830 	},
9831 	{
9832 		"JMP_JLE_X: ldimm64 test 2",
9833 		.u.insns_int = {
9834 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9835 			BPF_LD_IMM64(R1, 3),
9836 			BPF_LD_IMM64(R2, 2),
9837 			BPF_JMP_REG(BPF_JLE, R2, R1, 0),
9838 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9839 			BPF_EXIT_INSN(),
9840 		},
9841 		INTERNAL,
9842 		{ },
9843 		{ { 0, 0xffffffffU } },
9844 	},
9845 	{
9846 		"JMP_JLE_X: ldimm64 test 3",
9847 		.u.insns_int = {
9848 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9849 			BPF_LD_IMM64(R1, 3),
9850 			BPF_LD_IMM64(R2, 2),
9851 			BPF_JMP_REG(BPF_JLE, R2, R1, 4),
9852 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
9853 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
9854 			BPF_EXIT_INSN(),
9855 		},
9856 		INTERNAL,
9857 		{ },
9858 		{ { 0, 1 } },
9859 	},
9860 	/* BPF_JMP | BPF_JNE | BPF_X */
9861 	{
9862 		"JMP_JNE_X: if (3 != 2) return 1",
9863 		.u.insns_int = {
9864 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9865 			BPF_LD_IMM64(R1, 3),
9866 			BPF_LD_IMM64(R2, 2),
9867 			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
9868 			BPF_EXIT_INSN(),
9869 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9870 			BPF_EXIT_INSN(),
9871 		},
9872 		INTERNAL,
9873 		{ },
9874 		{ { 0, 1 } },
9875 	},
9876 	/* BPF_JMP | BPF_JEQ | BPF_X */
9877 	{
9878 		"JMP_JEQ_X: if (3 == 3) return 1",
9879 		.u.insns_int = {
9880 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9881 			BPF_LD_IMM64(R1, 3),
9882 			BPF_LD_IMM64(R2, 3),
9883 			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
9884 			BPF_EXIT_INSN(),
9885 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9886 			BPF_EXIT_INSN(),
9887 		},
9888 		INTERNAL,
9889 		{ },
9890 		{ { 0, 1 } },
9891 	},
9892 	/* BPF_JMP | BPF_JSET | BPF_X */
9893 	{
9894 		"JMP_JSET_X: if (0x3 & 0x2) return 1",
9895 		.u.insns_int = {
9896 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9897 			BPF_LD_IMM64(R1, 3),
9898 			BPF_LD_IMM64(R2, 2),
9899 			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9900 			BPF_EXIT_INSN(),
9901 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9902 			BPF_EXIT_INSN(),
9903 		},
9904 		INTERNAL,
9905 		{ },
9906 		{ { 0, 1 } },
9907 	},
9908 	{
9909 		"JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
9910 		.u.insns_int = {
9911 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9912 			BPF_LD_IMM64(R1, 3),
9913 			BPF_LD_IMM64(R2, 0xffffffff),
9914 			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
9915 			BPF_EXIT_INSN(),
9916 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9917 			BPF_EXIT_INSN(),
9918 		},
9919 		INTERNAL,
9920 		{ },
9921 		{ { 0, 1 } },
9922 	},
9923 	{
9924 		"JMP_JA: Jump, gap, jump, ...",
9925 		{ },
9926 		CLASSIC | FLAG_NO_DATA,
9927 		{ },
9928 		{ { 0, 0xababcbac } },
9929 		.fill_helper = bpf_fill_ja,
9930 	},
9931 	{	/* Mainly checking JIT here. */
9932 		"BPF_MAXINSNS: Maximum possible literals",
9933 		{ },
9934 		CLASSIC | FLAG_NO_DATA,
9935 		{ },
9936 		{ { 0, 0xffffffff } },
9937 		.fill_helper = bpf_fill_maxinsns1,
9938 	},
9939 	{	/* Mainly checking JIT here. */
9940 		"BPF_MAXINSNS: Single literal",
9941 		{ },
9942 		CLASSIC | FLAG_NO_DATA,
9943 		{ },
9944 		{ { 0, 0xfefefefe } },
9945 		.fill_helper = bpf_fill_maxinsns2,
9946 	},
9947 	{	/* Mainly checking JIT here. */
9948 		"BPF_MAXINSNS: Run/add until end",
9949 		{ },
9950 		CLASSIC | FLAG_NO_DATA,
9951 		{ },
9952 		{ { 0, 0x947bf368 } },
9953 		.fill_helper = bpf_fill_maxinsns3,
9954 	},
9955 	{
9956 		"BPF_MAXINSNS: Too many instructions",
9957 		{ },
9958 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
9959 		{ },
9960 		{ },
9961 		.fill_helper = bpf_fill_maxinsns4,
9962 		.expected_errcode = -EINVAL,
9963 	},
9964 	{	/* Mainly checking JIT here. */
9965 		"BPF_MAXINSNS: Very long jump",
9966 		{ },
9967 		CLASSIC | FLAG_NO_DATA,
9968 		{ },
9969 		{ { 0, 0xabababab } },
9970 		.fill_helper = bpf_fill_maxinsns5,
9971 	},
9972 	{	/* Mainly checking JIT here. */
9973 		"BPF_MAXINSNS: Ctx heavy transformations",
9974 		{ },
9975 		CLASSIC,
9976 		{ },
9977 		{
9978 			{  1, SKB_VLAN_PRESENT },
9979 			{ 10, SKB_VLAN_PRESENT }
9980 		},
9981 		.fill_helper = bpf_fill_maxinsns6,
9982 	},
9983 	{	/* Mainly checking JIT here. */
9984 		"BPF_MAXINSNS: Call heavy transformations",
9985 		{ },
9986 		CLASSIC | FLAG_NO_DATA,
9987 		{ },
9988 		{ { 1, 0 }, { 10, 0 } },
9989 		.fill_helper = bpf_fill_maxinsns7,
9990 	},
9991 	{	/* Mainly checking JIT here. */
9992 		"BPF_MAXINSNS: Jump heavy test",
9993 		{ },
9994 		CLASSIC | FLAG_NO_DATA,
9995 		{ },
9996 		{ { 0, 0xffffffff } },
9997 		.fill_helper = bpf_fill_maxinsns8,
9998 	},
9999 	{	/* Mainly checking JIT here. */
10000 		"BPF_MAXINSNS: Very long jump backwards",
10001 		{ },
10002 		INTERNAL | FLAG_NO_DATA,
10003 		{ },
10004 		{ { 0, 0xcbababab } },
10005 		.fill_helper = bpf_fill_maxinsns9,
10006 	},
10007 	{	/* Mainly checking JIT here. */
10008 		"BPF_MAXINSNS: Edge hopping nuthouse",
10009 		{ },
10010 		INTERNAL | FLAG_NO_DATA,
10011 		{ },
10012 		{ { 0, 0xabababac } },
10013 		.fill_helper = bpf_fill_maxinsns10,
10014 	},
10015 	{
10016 		"BPF_MAXINSNS: Jump, gap, jump, ...",
10017 		{ },
10018 		CLASSIC | FLAG_NO_DATA,
10019 		{ },
10020 		{ { 0, 0xababcbac } },
10021 		.fill_helper = bpf_fill_maxinsns11,
10022 	},
10023 	{
10024 		"BPF_MAXINSNS: jump over MSH",
10025 		{ },
10026 		CLASSIC | FLAG_EXPECTED_FAIL,
10027 		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10028 		{ { 4, 0xabababab } },
10029 		.fill_helper = bpf_fill_maxinsns12,
10030 		.expected_errcode = -EINVAL,
10031 	},
10032 	{
10033 		"BPF_MAXINSNS: exec all MSH",
10034 		{ },
10035 		CLASSIC,
10036 		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10037 		{ { 4, 0xababab83 } },
10038 		.fill_helper = bpf_fill_maxinsns13,
10039 	},
10040 	{
10041 		"BPF_MAXINSNS: ld_abs+get_processor_id",
10042 		{ },
10043 		CLASSIC,
10044 		{ },
10045 		{ { 1, 0xbee } },
10046 		.fill_helper = bpf_fill_ld_abs_get_processor_id,
10047 	},
10048 	/*
10049 	 * LD_IND / LD_ABS on fragmented SKBs
10050 	 */
10051 	{
10052 		"LD_IND byte frag",
10053 		.u.insns = {
10054 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10055 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10056 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10057 		},
10058 		CLASSIC | FLAG_SKB_FRAG,
10059 		{ },
10060 		{ {0x40, 0x42} },
10061 		.frag_data = {
10062 			0x42, 0x00, 0x00, 0x00,
10063 			0x43, 0x44, 0x00, 0x00,
10064 			0x21, 0x07, 0x19, 0x83,
10065 		},
10066 	},
10067 	{
10068 		"LD_IND halfword frag",
10069 		.u.insns = {
10070 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10071 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10072 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10073 		},
10074 		CLASSIC | FLAG_SKB_FRAG,
10075 		{ },
10076 		{ {0x40, 0x4344} },
10077 		.frag_data = {
10078 			0x42, 0x00, 0x00, 0x00,
10079 			0x43, 0x44, 0x00, 0x00,
10080 			0x21, 0x07, 0x19, 0x83,
10081 		},
10082 	},
10083 	{
10084 		"LD_IND word frag",
10085 		.u.insns = {
10086 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10087 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10088 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10089 		},
10090 		CLASSIC | FLAG_SKB_FRAG,
10091 		{ },
10092 		{ {0x40, 0x21071983} },
10093 		.frag_data = {
10094 			0x42, 0x00, 0x00, 0x00,
10095 			0x43, 0x44, 0x00, 0x00,
10096 			0x21, 0x07, 0x19, 0x83,
10097 		},
10098 	},
10099 	{
10100 		"LD_IND halfword mixed head/frag",
10101 		.u.insns = {
10102 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10103 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10104 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10105 		},
10106 		CLASSIC | FLAG_SKB_FRAG,
10107 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10108 		{ {0x40, 0x0519} },
10109 		.frag_data = { 0x19, 0x82 },
10110 	},
10111 	{
10112 		"LD_IND word mixed head/frag",
10113 		.u.insns = {
10114 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10115 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10116 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10117 		},
10118 		CLASSIC | FLAG_SKB_FRAG,
10119 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10120 		{ {0x40, 0x25051982} },
10121 		.frag_data = { 0x19, 0x82 },
10122 	},
10123 	{
10124 		"LD_ABS byte frag",
10125 		.u.insns = {
10126 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10127 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10128 		},
10129 		CLASSIC | FLAG_SKB_FRAG,
10130 		{ },
10131 		{ {0x40, 0x42} },
10132 		.frag_data = {
10133 			0x42, 0x00, 0x00, 0x00,
10134 			0x43, 0x44, 0x00, 0x00,
10135 			0x21, 0x07, 0x19, 0x83,
10136 		},
10137 	},
10138 	{
10139 		"LD_ABS halfword frag",
10140 		.u.insns = {
10141 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10142 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10143 		},
10144 		CLASSIC | FLAG_SKB_FRAG,
10145 		{ },
10146 		{ {0x40, 0x4344} },
10147 		.frag_data = {
10148 			0x42, 0x00, 0x00, 0x00,
10149 			0x43, 0x44, 0x00, 0x00,
10150 			0x21, 0x07, 0x19, 0x83,
10151 		},
10152 	},
10153 	{
10154 		"LD_ABS word frag",
10155 		.u.insns = {
10156 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10157 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10158 		},
10159 		CLASSIC | FLAG_SKB_FRAG,
10160 		{ },
10161 		{ {0x40, 0x21071983} },
10162 		.frag_data = {
10163 			0x42, 0x00, 0x00, 0x00,
10164 			0x43, 0x44, 0x00, 0x00,
10165 			0x21, 0x07, 0x19, 0x83,
10166 		},
10167 	},
10168 	{
10169 		"LD_ABS halfword mixed head/frag",
10170 		.u.insns = {
10171 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10172 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10173 		},
10174 		CLASSIC | FLAG_SKB_FRAG,
10175 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10176 		{ {0x40, 0x0519} },
10177 		.frag_data = { 0x19, 0x82 },
10178 	},
10179 	{
10180 		"LD_ABS word mixed head/frag",
10181 		.u.insns = {
10182 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10183 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10184 		},
10185 		CLASSIC | FLAG_SKB_FRAG,
10186 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10187 		{ {0x40, 0x25051982} },
10188 		.frag_data = { 0x19, 0x82 },
10189 	},
10190 	/*
10191 	 * LD_IND / LD_ABS on non fragmented SKBs
10192 	 */
10193 	{
10194 		/*
10195 		 * this tests that the JIT/interpreter correctly resets X
10196 		 * before using it in an LD_IND instruction.
10197 		 */
10198 		"LD_IND byte default X",
10199 		.u.insns = {
10200 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10201 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10202 		},
10203 		CLASSIC,
10204 		{ [0x1] = 0x42 },
10205 		{ {0x40, 0x42 } },
10206 	},
10207 	{
10208 		"LD_IND byte positive offset",
10209 		.u.insns = {
10210 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10211 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10212 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10213 		},
10214 		CLASSIC,
10215 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10216 		{ {0x40, 0x82 } },
10217 	},
10218 	{
10219 		"LD_IND byte negative offset",
10220 		.u.insns = {
10221 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10222 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10223 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10224 		},
10225 		CLASSIC,
10226 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10227 		{ {0x40, 0x05 } },
10228 	},
10229 	{
10230 		"LD_IND byte positive offset, all ff",
10231 		.u.insns = {
10232 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10233 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10234 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10235 		},
10236 		CLASSIC,
10237 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10238 		{ {0x40, 0xff } },
10239 	},
10240 	{
10241 		"LD_IND byte positive offset, out of bounds",
10242 		.u.insns = {
10243 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10244 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10245 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10246 		},
10247 		CLASSIC,
10248 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10249 		{ {0x3f, 0 }, },
10250 	},
10251 	{
10252 		"LD_IND byte negative offset, out of bounds",
10253 		.u.insns = {
10254 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10255 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10256 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10257 		},
10258 		CLASSIC,
10259 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10260 		{ {0x3f, 0 } },
10261 	},
10262 	{
10263 		"LD_IND byte negative offset, multiple calls",
10264 		.u.insns = {
10265 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10266 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10267 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10268 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10269 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10270 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10271 		},
10272 		CLASSIC,
10273 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10274 		{ {0x40, 0x82 }, },
10275 	},
10276 	{
10277 		"LD_IND halfword positive offset",
10278 		.u.insns = {
10279 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10280 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
10281 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10282 		},
10283 		CLASSIC,
10284 		{
10285 			[0x1c] = 0xaa, [0x1d] = 0x55,
10286 			[0x1e] = 0xbb, [0x1f] = 0x66,
10287 			[0x20] = 0xcc, [0x21] = 0x77,
10288 			[0x22] = 0xdd, [0x23] = 0x88,
10289 		},
10290 		{ {0x40, 0xdd88 } },
10291 	},
10292 	{
10293 		"LD_IND halfword negative offset",
10294 		.u.insns = {
10295 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10296 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
10297 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10298 		},
10299 		CLASSIC,
10300 		{
10301 			[0x1c] = 0xaa, [0x1d] = 0x55,
10302 			[0x1e] = 0xbb, [0x1f] = 0x66,
10303 			[0x20] = 0xcc, [0x21] = 0x77,
10304 			[0x22] = 0xdd, [0x23] = 0x88,
10305 		},
10306 		{ {0x40, 0xbb66 } },
10307 	},
10308 	{
10309 		"LD_IND halfword unaligned",
10310 		.u.insns = {
10311 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10312 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10313 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10314 		},
10315 		CLASSIC,
10316 		{
10317 			[0x1c] = 0xaa, [0x1d] = 0x55,
10318 			[0x1e] = 0xbb, [0x1f] = 0x66,
10319 			[0x20] = 0xcc, [0x21] = 0x77,
10320 			[0x22] = 0xdd, [0x23] = 0x88,
10321 		},
10322 		{ {0x40, 0x66cc } },
10323 	},
10324 	{
10325 		"LD_IND halfword positive offset, all ff",
10326 		.u.insns = {
10327 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
10328 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10329 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10330 		},
10331 		CLASSIC,
10332 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10333 		{ {0x40, 0xffff } },
10334 	},
10335 	{
10336 		"LD_IND halfword positive offset, out of bounds",
10337 		.u.insns = {
10338 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10339 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
10340 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10341 		},
10342 		CLASSIC,
10343 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10344 		{ {0x3f, 0 }, },
10345 	},
10346 	{
10347 		"LD_IND halfword negative offset, out of bounds",
10348 		.u.insns = {
10349 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10350 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
10351 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10352 		},
10353 		CLASSIC,
10354 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10355 		{ {0x3f, 0 } },
10356 	},
10357 	{
10358 		"LD_IND word positive offset",
10359 		.u.insns = {
10360 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10361 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
10362 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10363 		},
10364 		CLASSIC,
10365 		{
10366 			[0x1c] = 0xaa, [0x1d] = 0x55,
10367 			[0x1e] = 0xbb, [0x1f] = 0x66,
10368 			[0x20] = 0xcc, [0x21] = 0x77,
10369 			[0x22] = 0xdd, [0x23] = 0x88,
10370 			[0x24] = 0xee, [0x25] = 0x99,
10371 			[0x26] = 0xff, [0x27] = 0xaa,
10372 		},
10373 		{ {0x40, 0xee99ffaa } },
10374 	},
10375 	{
10376 		"LD_IND word negative offset",
10377 		.u.insns = {
10378 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10379 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
10380 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10381 		},
10382 		CLASSIC,
10383 		{
10384 			[0x1c] = 0xaa, [0x1d] = 0x55,
10385 			[0x1e] = 0xbb, [0x1f] = 0x66,
10386 			[0x20] = 0xcc, [0x21] = 0x77,
10387 			[0x22] = 0xdd, [0x23] = 0x88,
10388 			[0x24] = 0xee, [0x25] = 0x99,
10389 			[0x26] = 0xff, [0x27] = 0xaa,
10390 		},
10391 		{ {0x40, 0xaa55bb66 } },
10392 	},
10393 	{
10394 		"LD_IND word unaligned (addr & 3 == 2)",
10395 		.u.insns = {
10396 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10397 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10398 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10399 		},
10400 		CLASSIC,
10401 		{
10402 			[0x1c] = 0xaa, [0x1d] = 0x55,
10403 			[0x1e] = 0xbb, [0x1f] = 0x66,
10404 			[0x20] = 0xcc, [0x21] = 0x77,
10405 			[0x22] = 0xdd, [0x23] = 0x88,
10406 			[0x24] = 0xee, [0x25] = 0x99,
10407 			[0x26] = 0xff, [0x27] = 0xaa,
10408 		},
10409 		{ {0x40, 0xbb66cc77 } },
10410 	},
10411 	{
10412 		"LD_IND word unaligned (addr & 3 == 1)",
10413 		.u.insns = {
10414 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10415 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
10416 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10417 		},
10418 		CLASSIC,
10419 		{
10420 			[0x1c] = 0xaa, [0x1d] = 0x55,
10421 			[0x1e] = 0xbb, [0x1f] = 0x66,
10422 			[0x20] = 0xcc, [0x21] = 0x77,
10423 			[0x22] = 0xdd, [0x23] = 0x88,
10424 			[0x24] = 0xee, [0x25] = 0x99,
10425 			[0x26] = 0xff, [0x27] = 0xaa,
10426 		},
10427 		{ {0x40, 0x55bb66cc } },
10428 	},
10429 	{
10430 		"LD_IND word unaligned (addr & 3 == 3)",
10431 		.u.insns = {
10432 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10433 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
10434 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10435 		},
10436 		CLASSIC,
10437 		{
10438 			[0x1c] = 0xaa, [0x1d] = 0x55,
10439 			[0x1e] = 0xbb, [0x1f] = 0x66,
10440 			[0x20] = 0xcc, [0x21] = 0x77,
10441 			[0x22] = 0xdd, [0x23] = 0x88,
10442 			[0x24] = 0xee, [0x25] = 0x99,
10443 			[0x26] = 0xff, [0x27] = 0xaa,
10444 		},
10445 		{ {0x40, 0x66cc77dd } },
10446 	},
10447 	{
10448 		"LD_IND word positive offset, all ff",
10449 		.u.insns = {
10450 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10451 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10452 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10453 		},
10454 		CLASSIC,
10455 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10456 		{ {0x40, 0xffffffff } },
10457 	},
10458 	{
10459 		"LD_IND word positive offset, out of bounds",
10460 		.u.insns = {
10461 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10462 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
10463 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10464 		},
10465 		CLASSIC,
10466 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10467 		{ {0x3f, 0 }, },
10468 	},
10469 	{
10470 		"LD_IND word negative offset, out of bounds",
10471 		.u.insns = {
10472 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10473 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
10474 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10475 		},
10476 		CLASSIC,
10477 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10478 		{ {0x3f, 0 } },
10479 	},
10480 	{
10481 		"LD_ABS byte",
10482 		.u.insns = {
10483 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
10484 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10485 		},
10486 		CLASSIC,
10487 		{
10488 			[0x1c] = 0xaa, [0x1d] = 0x55,
10489 			[0x1e] = 0xbb, [0x1f] = 0x66,
10490 			[0x20] = 0xcc, [0x21] = 0x77,
10491 			[0x22] = 0xdd, [0x23] = 0x88,
10492 			[0x24] = 0xee, [0x25] = 0x99,
10493 			[0x26] = 0xff, [0x27] = 0xaa,
10494 		},
10495 		{ {0x40, 0xcc } },
10496 	},
10497 	{
10498 		"LD_ABS byte positive offset, all ff",
10499 		.u.insns = {
10500 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10501 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10502 		},
10503 		CLASSIC,
10504 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10505 		{ {0x40, 0xff } },
10506 	},
10507 	{
10508 		"LD_ABS byte positive offset, out of bounds",
10509 		.u.insns = {
10510 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
10511 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10512 		},
10513 		CLASSIC,
10514 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10515 		{ {0x3f, 0 }, },
10516 	},
10517 	{
10518 		"LD_ABS byte negative offset, out of bounds load",
10519 		.u.insns = {
10520 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
10521 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10522 		},
10523 		CLASSIC | FLAG_EXPECTED_FAIL,
10524 		.expected_errcode = -EINVAL,
10525 	},
10526 	{
10527 		"LD_ABS byte negative offset, in bounds",
10528 		.u.insns = {
10529 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10530 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10531 		},
10532 		CLASSIC,
10533 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10534 		{ {0x40, 0x82 }, },
10535 	},
10536 	{
10537 		"LD_ABS byte negative offset, out of bounds",
10538 		.u.insns = {
10539 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10540 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10541 		},
10542 		CLASSIC,
10543 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10544 		{ {0x3f, 0 }, },
10545 	},
10546 	{
10547 		"LD_ABS byte negative offset, multiple calls",
10548 		.u.insns = {
10549 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
10550 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
10551 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
10552 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
10553 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10554 		},
10555 		CLASSIC,
10556 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10557 		{ {0x40, 0x82 }, },
10558 	},
10559 	{
10560 		"LD_ABS halfword",
10561 		.u.insns = {
10562 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
10563 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10564 		},
10565 		CLASSIC,
10566 		{
10567 			[0x1c] = 0xaa, [0x1d] = 0x55,
10568 			[0x1e] = 0xbb, [0x1f] = 0x66,
10569 			[0x20] = 0xcc, [0x21] = 0x77,
10570 			[0x22] = 0xdd, [0x23] = 0x88,
10571 			[0x24] = 0xee, [0x25] = 0x99,
10572 			[0x26] = 0xff, [0x27] = 0xaa,
10573 		},
10574 		{ {0x40, 0xdd88 } },
10575 	},
10576 	{
10577 		"LD_ABS halfword unaligned",
10578 		.u.insns = {
10579 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
10580 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10581 		},
10582 		CLASSIC,
10583 		{
10584 			[0x1c] = 0xaa, [0x1d] = 0x55,
10585 			[0x1e] = 0xbb, [0x1f] = 0x66,
10586 			[0x20] = 0xcc, [0x21] = 0x77,
10587 			[0x22] = 0xdd, [0x23] = 0x88,
10588 			[0x24] = 0xee, [0x25] = 0x99,
10589 			[0x26] = 0xff, [0x27] = 0xaa,
10590 		},
10591 		{ {0x40, 0x99ff } },
10592 	},
10593 	{
10594 		"LD_ABS halfword positive offset, all ff",
10595 		.u.insns = {
10596 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
10597 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10598 		},
10599 		CLASSIC,
10600 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10601 		{ {0x40, 0xffff } },
10602 	},
10603 	{
10604 		"LD_ABS halfword positive offset, out of bounds",
10605 		.u.insns = {
10606 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10607 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10608 		},
10609 		CLASSIC,
10610 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10611 		{ {0x3f, 0 }, },
10612 	},
10613 	{
10614 		"LD_ABS halfword negative offset, out of bounds load",
10615 		.u.insns = {
10616 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
10617 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10618 		},
10619 		CLASSIC | FLAG_EXPECTED_FAIL,
10620 		.expected_errcode = -EINVAL,
10621 	},
10622 	{
10623 		"LD_ABS halfword negative offset, in bounds",
10624 		.u.insns = {
10625 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10626 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10627 		},
10628 		CLASSIC,
10629 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10630 		{ {0x40, 0x1982 }, },
10631 	},
10632 	{
10633 		"LD_ABS halfword negative offset, out of bounds",
10634 		.u.insns = {
10635 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
10636 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10637 		},
10638 		CLASSIC,
10639 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10640 		{ {0x3f, 0 }, },
10641 	},
10642 	{
10643 		"LD_ABS word",
10644 		.u.insns = {
10645 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
10646 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10647 		},
10648 		CLASSIC,
10649 		{
10650 			[0x1c] = 0xaa, [0x1d] = 0x55,
10651 			[0x1e] = 0xbb, [0x1f] = 0x66,
10652 			[0x20] = 0xcc, [0x21] = 0x77,
10653 			[0x22] = 0xdd, [0x23] = 0x88,
10654 			[0x24] = 0xee, [0x25] = 0x99,
10655 			[0x26] = 0xff, [0x27] = 0xaa,
10656 		},
10657 		{ {0x40, 0xaa55bb66 } },
10658 	},
10659 	{
10660 		"LD_ABS word unaligned (addr & 3 == 2)",
10661 		.u.insns = {
10662 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
10663 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10664 		},
10665 		CLASSIC,
10666 		{
10667 			[0x1c] = 0xaa, [0x1d] = 0x55,
10668 			[0x1e] = 0xbb, [0x1f] = 0x66,
10669 			[0x20] = 0xcc, [0x21] = 0x77,
10670 			[0x22] = 0xdd, [0x23] = 0x88,
10671 			[0x24] = 0xee, [0x25] = 0x99,
10672 			[0x26] = 0xff, [0x27] = 0xaa,
10673 		},
10674 		{ {0x40, 0xdd88ee99 } },
10675 	},
10676 	{
10677 		"LD_ABS word unaligned (addr & 3 == 1)",
10678 		.u.insns = {
10679 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
10680 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10681 		},
10682 		CLASSIC,
10683 		{
10684 			[0x1c] = 0xaa, [0x1d] = 0x55,
10685 			[0x1e] = 0xbb, [0x1f] = 0x66,
10686 			[0x20] = 0xcc, [0x21] = 0x77,
10687 			[0x22] = 0xdd, [0x23] = 0x88,
10688 			[0x24] = 0xee, [0x25] = 0x99,
10689 			[0x26] = 0xff, [0x27] = 0xaa,
10690 		},
10691 		{ {0x40, 0x77dd88ee } },
10692 	},
10693 	{
10694 		"LD_ABS word unaligned (addr & 3 == 3)",
10695 		.u.insns = {
10696 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
10697 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10698 		},
10699 		CLASSIC,
10700 		{
10701 			[0x1c] = 0xaa, [0x1d] = 0x55,
10702 			[0x1e] = 0xbb, [0x1f] = 0x66,
10703 			[0x20] = 0xcc, [0x21] = 0x77,
10704 			[0x22] = 0xdd, [0x23] = 0x88,
10705 			[0x24] = 0xee, [0x25] = 0x99,
10706 			[0x26] = 0xff, [0x27] = 0xaa,
10707 		},
10708 		{ {0x40, 0x88ee99ff } },
10709 	},
10710 	{
10711 		"LD_ABS word positive offset, all ff",
10712 		.u.insns = {
10713 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
10714 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10715 		},
10716 		CLASSIC,
10717 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10718 		{ {0x40, 0xffffffff } },
10719 	},
10720 	{
10721 		"LD_ABS word positive offset, out of bounds",
10722 		.u.insns = {
10723 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
10724 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10725 		},
10726 		CLASSIC,
10727 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10728 		{ {0x3f, 0 }, },
10729 	},
10730 	{
10731 		"LD_ABS word negative offset, out of bounds load",
10732 		.u.insns = {
10733 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
10734 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10735 		},
10736 		CLASSIC | FLAG_EXPECTED_FAIL,
10737 		.expected_errcode = -EINVAL,
10738 	},
10739 	{
10740 		"LD_ABS word negative offset, in bounds",
10741 		.u.insns = {
10742 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10743 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10744 		},
10745 		CLASSIC,
10746 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10747 		{ {0x40, 0x25051982 }, },
10748 	},
10749 	{
10750 		"LD_ABS word negative offset, out of bounds",
10751 		.u.insns = {
10752 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
10753 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10754 		},
10755 		CLASSIC,
10756 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10757 		{ {0x3f, 0 }, },
10758 	},
10759 	{
10760 		"LDX_MSH standalone, preserved A",
10761 		.u.insns = {
10762 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10763 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10764 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10765 		},
10766 		CLASSIC,
10767 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10768 		{ {0x40, 0xffeebbaa }, },
10769 	},
10770 	{
10771 		"LDX_MSH standalone, preserved A 2",
10772 		.u.insns = {
10773 			BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
10774 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10775 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
10776 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10777 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
10778 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10779 		},
10780 		CLASSIC,
10781 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10782 		{ {0x40, 0x175e9d63 }, },
10783 	},
10784 	{
10785 		"LDX_MSH standalone, test result 1",
10786 		.u.insns = {
10787 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10788 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
10789 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10790 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10791 		},
10792 		CLASSIC,
10793 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10794 		{ {0x40, 0x14 }, },
10795 	},
10796 	{
10797 		"LDX_MSH standalone, test result 2",
10798 		.u.insns = {
10799 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10800 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
10801 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10802 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10803 		},
10804 		CLASSIC,
10805 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10806 		{ {0x40, 0x24 }, },
10807 	},
10808 	{
10809 		"LDX_MSH standalone, negative offset",
10810 		.u.insns = {
10811 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10812 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
10813 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10814 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10815 		},
10816 		CLASSIC,
10817 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10818 		{ {0x40, 0 }, },
10819 	},
10820 	{
10821 		"LDX_MSH standalone, negative offset 2",
10822 		.u.insns = {
10823 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10824 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
10825 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10826 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10827 		},
10828 		CLASSIC,
10829 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10830 		{ {0x40, 0x24 }, },
10831 	},
10832 	{
10833 		"LDX_MSH standalone, out of bounds",
10834 		.u.insns = {
10835 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
10836 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
10837 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
10838 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10839 		},
10840 		CLASSIC,
10841 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10842 		{ {0x40, 0 }, },
10843 	},
10844 	/*
10845 	 * verify that the interpreter or JIT correctly sets A and X
10846 	 * to 0.
10847 	 */
10848 	{
10849 		"ADD default X",
10850 		.u.insns = {
10851 			/*
10852 			 * A = 0x42
10853 			 * A = A + X
10854 			 * ret A
10855 			 */
10856 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10857 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
10858 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10859 		},
10860 		CLASSIC | FLAG_NO_DATA,
10861 		{},
10862 		{ {0x1, 0x42 } },
10863 	},
10864 	{
10865 		"ADD default A",
10866 		.u.insns = {
10867 			/*
10868 			 * A = A + 0x42
10869 			 * ret A
10870 			 */
10871 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
10872 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10873 		},
10874 		CLASSIC | FLAG_NO_DATA,
10875 		{},
10876 		{ {0x1, 0x42 } },
10877 	},
10878 	{
10879 		"SUB default X",
10880 		.u.insns = {
10881 			/*
10882 			 * A = 0x66
10883 			 * A = A - X
10884 			 * ret A
10885 			 */
10886 			BPF_STMT(BPF_LD | BPF_IMM, 0x66),
10887 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
10888 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10889 		},
10890 		CLASSIC | FLAG_NO_DATA,
10891 		{},
10892 		{ {0x1, 0x66 } },
10893 	},
10894 	{
10895 		"SUB default A",
10896 		.u.insns = {
10897 			/*
10898 			 * A = A - -0x66
10899 			 * ret A
10900 			 */
10901 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
10902 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10903 		},
10904 		CLASSIC | FLAG_NO_DATA,
10905 		{},
10906 		{ {0x1, 0x66 } },
10907 	},
10908 	{
10909 		"MUL default X",
10910 		.u.insns = {
10911 			/*
10912 			 * A = 0x42
10913 			 * A = A * X
10914 			 * ret A
10915 			 */
10916 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10917 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
10918 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10919 		},
10920 		CLASSIC | FLAG_NO_DATA,
10921 		{},
10922 		{ {0x1, 0x0 } },
10923 	},
10924 	{
10925 		"MUL default A",
10926 		.u.insns = {
10927 			/*
10928 			 * A = A * 0x66
10929 			 * ret A
10930 			 */
10931 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
10932 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10933 		},
10934 		CLASSIC | FLAG_NO_DATA,
10935 		{},
10936 		{ {0x1, 0x0 } },
10937 	},
10938 	{
10939 		"DIV default X",
10940 		.u.insns = {
10941 			/*
10942 			 * A = 0x42
10943 			 * A = A / X ; this halt the filter execution if X is 0
10944 			 * ret 0x42
10945 			 */
10946 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10947 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
10948 			BPF_STMT(BPF_RET | BPF_K, 0x42),
10949 		},
10950 		CLASSIC | FLAG_NO_DATA,
10951 		{},
10952 		{ {0x1, 0x0 } },
10953 	},
10954 	{
10955 		"DIV default A",
10956 		.u.insns = {
10957 			/*
10958 			 * A = A / 1
10959 			 * ret A
10960 			 */
10961 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
10962 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10963 		},
10964 		CLASSIC | FLAG_NO_DATA,
10965 		{},
10966 		{ {0x1, 0x0 } },
10967 	},
10968 	{
10969 		"MOD default X",
10970 		.u.insns = {
10971 			/*
10972 			 * A = 0x42
10973 			 * A = A mod X ; this halt the filter execution if X is 0
10974 			 * ret 0x42
10975 			 */
10976 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
10977 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
10978 			BPF_STMT(BPF_RET | BPF_K, 0x42),
10979 		},
10980 		CLASSIC | FLAG_NO_DATA,
10981 		{},
10982 		{ {0x1, 0x0 } },
10983 	},
10984 	{
10985 		"MOD default A",
10986 		.u.insns = {
10987 			/*
10988 			 * A = A mod 1
10989 			 * ret A
10990 			 */
10991 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
10992 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10993 		},
10994 		CLASSIC | FLAG_NO_DATA,
10995 		{},
10996 		{ {0x1, 0x0 } },
10997 	},
10998 	{
10999 		"JMP EQ default A",
11000 		.u.insns = {
11001 			/*
11002 			 * cmp A, 0x0, 0, 1
11003 			 * ret 0x42
11004 			 * ret 0x66
11005 			 */
11006 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11007 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11008 			BPF_STMT(BPF_RET | BPF_K, 0x66),
11009 		},
11010 		CLASSIC | FLAG_NO_DATA,
11011 		{},
11012 		{ {0x1, 0x42 } },
11013 	},
11014 	{
11015 		"JMP EQ default X",
11016 		.u.insns = {
11017 			/*
11018 			 * A = 0x0
11019 			 * cmp A, X, 0, 1
11020 			 * ret 0x42
11021 			 * ret 0x66
11022 			 */
11023 			BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11024 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11025 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11026 			BPF_STMT(BPF_RET | BPF_K, 0x66),
11027 		},
11028 		CLASSIC | FLAG_NO_DATA,
11029 		{},
11030 		{ {0x1, 0x42 } },
11031 	},
11032 	/* Checking interpreter vs JIT wrt signed extended imms. */
11033 	{
11034 		"JNE signed compare, test 1",
11035 		.u.insns_int = {
11036 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11037 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11038 			BPF_MOV64_REG(R2, R1),
11039 			BPF_ALU64_REG(BPF_AND, R2, R3),
11040 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11041 			BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11042 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11043 			BPF_EXIT_INSN(),
11044 		},
11045 		INTERNAL,
11046 		{ },
11047 		{ { 0, 1 } },
11048 	},
11049 	{
11050 		"JNE signed compare, test 2",
11051 		.u.insns_int = {
11052 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11053 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11054 			BPF_MOV64_REG(R2, R1),
11055 			BPF_ALU64_REG(BPF_AND, R2, R3),
11056 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11057 			BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11058 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11059 			BPF_EXIT_INSN(),
11060 		},
11061 		INTERNAL,
11062 		{ },
11063 		{ { 0, 1 } },
11064 	},
11065 	{
11066 		"JNE signed compare, test 3",
11067 		.u.insns_int = {
11068 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11069 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11070 			BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11071 			BPF_MOV64_REG(R2, R1),
11072 			BPF_ALU64_REG(BPF_AND, R2, R3),
11073 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11074 			BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11075 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11076 			BPF_EXIT_INSN(),
11077 		},
11078 		INTERNAL,
11079 		{ },
11080 		{ { 0, 2 } },
11081 	},
11082 	{
11083 		"JNE signed compare, test 4",
11084 		.u.insns_int = {
11085 			BPF_LD_IMM64(R1, -17104896),
11086 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11087 			BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11088 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11089 			BPF_EXIT_INSN(),
11090 		},
11091 		INTERNAL,
11092 		{ },
11093 		{ { 0, 2 } },
11094 	},
11095 	{
11096 		"JNE signed compare, test 5",
11097 		.u.insns_int = {
11098 			BPF_LD_IMM64(R1, 0xfefb0000),
11099 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11100 			BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11101 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11102 			BPF_EXIT_INSN(),
11103 		},
11104 		INTERNAL,
11105 		{ },
11106 		{ { 0, 1 } },
11107 	},
11108 	{
11109 		"JNE signed compare, test 6",
11110 		.u.insns_int = {
11111 			BPF_LD_IMM64(R1, 0x7efb0000),
11112 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11113 			BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11114 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11115 			BPF_EXIT_INSN(),
11116 		},
11117 		INTERNAL,
11118 		{ },
11119 		{ { 0, 2 } },
11120 	},
11121 	{
11122 		"JNE signed compare, test 7",
11123 		.u.insns = {
11124 			BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11125 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
11126 			BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11127 			BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11128 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11129 			BPF_STMT(BPF_RET | BPF_K, 1),
11130 			BPF_STMT(BPF_RET | BPF_K, 2),
11131 		},
11132 		CLASSIC | FLAG_NO_DATA,
11133 		{},
11134 		{ { 0, 2 } },
11135 	},
11136 	/*
11137 	 * Register (non-)clobbering tests for the case where a JIT implements
11138 	 * complex ALU or ATOMIC operations via function calls. If so, the
11139 	 * function call must be transparent to the eBPF registers. The JIT
11140 	 * must therefore save and restore relevant registers across the call.
11141 	 * The following tests check that the eBPF registers retain their
11142 	 * values after such an operation. Mainly intended for complex ALU
11143 	 * and atomic operation, but we run it for all. You never know...
11144 	 *
11145 	 * Note that each operations should be tested twice with different
11146 	 * destinations, to check preservation for all registers.
11147 	 */
11148 #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src)			\
11149 	{							\
11150 		#alu "_" #op " to " #dst ": no clobbering",	\
11151 		.u.insns_int = {				\
11152 			BPF_ALU64_IMM(BPF_MOV, R0, R0),		\
11153 			BPF_ALU64_IMM(BPF_MOV, R1, R1),		\
11154 			BPF_ALU64_IMM(BPF_MOV, R2, R2),		\
11155 			BPF_ALU64_IMM(BPF_MOV, R3, R3),		\
11156 			BPF_ALU64_IMM(BPF_MOV, R4, R4),		\
11157 			BPF_ALU64_IMM(BPF_MOV, R5, R5),		\
11158 			BPF_ALU64_IMM(BPF_MOV, R6, R6),		\
11159 			BPF_ALU64_IMM(BPF_MOV, R7, R7),		\
11160 			BPF_ALU64_IMM(BPF_MOV, R8, R8),		\
11161 			BPF_ALU64_IMM(BPF_MOV, R9, R9),		\
11162 			BPF_##alu(BPF_ ##op, dst, src),		\
11163 			BPF_ALU32_IMM(BPF_MOV, dst, dst),	\
11164 			BPF_JMP_IMM(BPF_JNE, R0, R0, 10),	\
11165 			BPF_JMP_IMM(BPF_JNE, R1, R1, 9),	\
11166 			BPF_JMP_IMM(BPF_JNE, R2, R2, 8),	\
11167 			BPF_JMP_IMM(BPF_JNE, R3, R3, 7),	\
11168 			BPF_JMP_IMM(BPF_JNE, R4, R4, 6),	\
11169 			BPF_JMP_IMM(BPF_JNE, R5, R5, 5),	\
11170 			BPF_JMP_IMM(BPF_JNE, R6, R6, 4),	\
11171 			BPF_JMP_IMM(BPF_JNE, R7, R7, 3),	\
11172 			BPF_JMP_IMM(BPF_JNE, R8, R8, 2),	\
11173 			BPF_JMP_IMM(BPF_JNE, R9, R9, 1),	\
11174 			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
11175 			BPF_EXIT_INSN(),			\
11176 		},						\
11177 		INTERNAL,					\
11178 		{ },						\
11179 		{ { 0, 1 } }					\
11180 	}
11181 	/* ALU64 operations, register clobbering */
11182 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11183 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11184 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11185 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11186 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11187 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11188 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11189 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11190 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11191 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11192 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11193 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11194 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11195 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11196 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11197 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11198 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11199 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11200 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11201 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11202 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11203 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11204 	/* ALU32 immediate operations, register clobbering */
11205 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11206 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11207 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11208 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11209 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11210 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11211 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11212 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11213 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
11214 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
11215 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
11216 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
11217 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
11218 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
11219 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
11220 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
11221 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
11222 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
11223 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
11224 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
11225 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
11226 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
11227 	/* ALU64 register operations, register clobbering */
11228 	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
11229 	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
11230 	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
11231 	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
11232 	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
11233 	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
11234 	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
11235 	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
11236 	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
11237 	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
11238 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
11239 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
11240 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
11241 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
11242 	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
11243 	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
11244 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
11245 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
11246 	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
11247 	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
11248 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
11249 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
11250 	/* ALU32 register operations, register clobbering */
11251 	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
11252 	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
11253 	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
11254 	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
11255 	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
11256 	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
11257 	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
11258 	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
11259 	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
11260 	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
11261 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
11262 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
11263 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
11264 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
11265 	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
11266 	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
11267 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
11268 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
11269 	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
11270 	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
11271 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
11272 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
11273 #undef BPF_TEST_CLOBBER_ALU
11274 #define BPF_TEST_CLOBBER_ATOMIC(width, op)			\
11275 	{							\
11276 		"Atomic_" #width " " #op ": no clobbering",	\
11277 		.u.insns_int = {				\
11278 			BPF_ALU64_IMM(BPF_MOV, R0, 0),		\
11279 			BPF_ALU64_IMM(BPF_MOV, R1, 1),		\
11280 			BPF_ALU64_IMM(BPF_MOV, R2, 2),		\
11281 			BPF_ALU64_IMM(BPF_MOV, R3, 3),		\
11282 			BPF_ALU64_IMM(BPF_MOV, R4, 4),		\
11283 			BPF_ALU64_IMM(BPF_MOV, R5, 5),		\
11284 			BPF_ALU64_IMM(BPF_MOV, R6, 6),		\
11285 			BPF_ALU64_IMM(BPF_MOV, R7, 7),		\
11286 			BPF_ALU64_IMM(BPF_MOV, R8, 8),		\
11287 			BPF_ALU64_IMM(BPF_MOV, R9, 9),		\
11288 			BPF_ST_MEM(width, R10, -8,		\
11289 				   (op) == BPF_CMPXCHG ? 0 :	\
11290 				   (op) & BPF_FETCH ? 1 : 0),	\
11291 			BPF_ATOMIC_OP(width, op, R10, R1, -8),	\
11292 			BPF_JMP_IMM(BPF_JNE, R0, 0, 10),	\
11293 			BPF_JMP_IMM(BPF_JNE, R1, 1, 9),		\
11294 			BPF_JMP_IMM(BPF_JNE, R2, 2, 8),		\
11295 			BPF_JMP_IMM(BPF_JNE, R3, 3, 7),		\
11296 			BPF_JMP_IMM(BPF_JNE, R4, 4, 6),		\
11297 			BPF_JMP_IMM(BPF_JNE, R5, 5, 5),		\
11298 			BPF_JMP_IMM(BPF_JNE, R6, 6, 4),		\
11299 			BPF_JMP_IMM(BPF_JNE, R7, 7, 3),		\
11300 			BPF_JMP_IMM(BPF_JNE, R8, 8, 2),		\
11301 			BPF_JMP_IMM(BPF_JNE, R9, 9, 1),		\
11302 			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
11303 			BPF_EXIT_INSN(),			\
11304 		},						\
11305 		INTERNAL,					\
11306 		{ },						\
11307 		{ { 0, 1 } },					\
11308 		.stack_depth = 8,				\
11309 	}
11310 	/* 64-bit atomic operations, register clobbering */
11311 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
11312 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
11313 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
11314 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
11315 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
11316 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
11317 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
11318 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
11319 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
11320 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
11321 	/* 32-bit atomic operations, register clobbering */
11322 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
11323 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
11324 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
11325 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
11326 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
11327 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
11328 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
11329 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
11330 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
11331 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
11332 #undef BPF_TEST_CLOBBER_ATOMIC
11333 	/* Checking that ALU32 src is not zero extended in place */
11334 #define BPF_ALU32_SRC_ZEXT(op)					\
11335 	{							\
11336 		"ALU32_" #op "_X: src preserved in zext",	\
11337 		.u.insns_int = {				\
11338 			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
11339 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
11340 			BPF_ALU64_REG(BPF_MOV, R0, R1),		\
11341 			BPF_ALU32_REG(BPF_##op, R2, R1),	\
11342 			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
11343 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11344 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11345 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11346 			BPF_EXIT_INSN(),			\
11347 		},						\
11348 		INTERNAL,					\
11349 		{ },						\
11350 		{ { 0, 0 } },					\
11351 	}
11352 	BPF_ALU32_SRC_ZEXT(MOV),
11353 	BPF_ALU32_SRC_ZEXT(AND),
11354 	BPF_ALU32_SRC_ZEXT(OR),
11355 	BPF_ALU32_SRC_ZEXT(XOR),
11356 	BPF_ALU32_SRC_ZEXT(ADD),
11357 	BPF_ALU32_SRC_ZEXT(SUB),
11358 	BPF_ALU32_SRC_ZEXT(MUL),
11359 	BPF_ALU32_SRC_ZEXT(DIV),
11360 	BPF_ALU32_SRC_ZEXT(MOD),
11361 #undef BPF_ALU32_SRC_ZEXT
11362 	/* Checking that ATOMIC32 src is not zero extended in place */
11363 #define BPF_ATOMIC32_SRC_ZEXT(op)					\
11364 	{								\
11365 		"ATOMIC_W_" #op ": src preserved in zext",		\
11366 		.u.insns_int = {					\
11367 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),	\
11368 			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
11369 			BPF_ST_MEM(BPF_W, R10, -4, 0),			\
11370 			BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4),	\
11371 			BPF_ALU64_REG(BPF_SUB, R0, R1),			\
11372 			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
11373 			BPF_ALU64_IMM(BPF_RSH, R1, 32),			\
11374 			BPF_ALU64_REG(BPF_OR, R0, R1),			\
11375 			BPF_EXIT_INSN(),				\
11376 		},							\
11377 		INTERNAL,						\
11378 		{ },							\
11379 		{ { 0, 0 } },						\
11380 		.stack_depth = 8,					\
11381 	}
11382 	BPF_ATOMIC32_SRC_ZEXT(ADD),
11383 	BPF_ATOMIC32_SRC_ZEXT(AND),
11384 	BPF_ATOMIC32_SRC_ZEXT(OR),
11385 	BPF_ATOMIC32_SRC_ZEXT(XOR),
11386 #undef BPF_ATOMIC32_SRC_ZEXT
11387 	/* Checking that CMPXCHG32 src is not zero extended in place */
11388 	{
11389 		"ATOMIC_W_CMPXCHG: src preserved in zext",
11390 		.u.insns_int = {
11391 			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
11392 			BPF_ALU64_REG(BPF_MOV, R2, R1),
11393 			BPF_ALU64_REG(BPF_MOV, R0, 0),
11394 			BPF_ST_MEM(BPF_W, R10, -4, 0),
11395 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
11396 			BPF_ALU64_REG(BPF_SUB, R1, R2),
11397 			BPF_ALU64_REG(BPF_MOV, R2, R1),
11398 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
11399 			BPF_ALU64_REG(BPF_OR, R1, R2),
11400 			BPF_ALU64_REG(BPF_MOV, R0, R1),
11401 			BPF_EXIT_INSN(),
11402 		},
11403 		INTERNAL,
11404 		{ },
11405 		{ { 0, 0 } },
11406 		.stack_depth = 8,
11407 	},
11408 	/* Checking that JMP32 immediate src is not zero extended in place */
11409 #define BPF_JMP32_IMM_ZEXT(op)					\
11410 	{							\
11411 		"JMP32_" #op "_K: operand preserved in zext",	\
11412 		.u.insns_int = {				\
11413 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11414 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11415 			BPF_JMP32_IMM(BPF_##op, R0, 1234, 1),	\
11416 			BPF_JMP_A(0), /* Nop */			\
11417 			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
11418 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11419 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11420 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11421 			BPF_EXIT_INSN(),			\
11422 		},						\
11423 		INTERNAL,					\
11424 		{ },						\
11425 		{ { 0, 0 } },					\
11426 	}
11427 	BPF_JMP32_IMM_ZEXT(JEQ),
11428 	BPF_JMP32_IMM_ZEXT(JNE),
11429 	BPF_JMP32_IMM_ZEXT(JSET),
11430 	BPF_JMP32_IMM_ZEXT(JGT),
11431 	BPF_JMP32_IMM_ZEXT(JGE),
11432 	BPF_JMP32_IMM_ZEXT(JLT),
11433 	BPF_JMP32_IMM_ZEXT(JLE),
11434 	BPF_JMP32_IMM_ZEXT(JSGT),
11435 	BPF_JMP32_IMM_ZEXT(JSGE),
11436 	BPF_JMP32_IMM_ZEXT(JSGT),
11437 	BPF_JMP32_IMM_ZEXT(JSLT),
11438 	BPF_JMP32_IMM_ZEXT(JSLE),
11439 #undef BPF_JMP2_IMM_ZEXT
11440 	/* Checking that JMP32 dst & src are not zero extended in place */
11441 #define BPF_JMP32_REG_ZEXT(op)					\
11442 	{							\
11443 		"JMP32_" #op "_X: operands preserved in zext",	\
11444 		.u.insns_int = {				\
11445 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
11446 			BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
11447 			BPF_ALU64_REG(BPF_MOV, R2, R0),		\
11448 			BPF_ALU64_REG(BPF_MOV, R3, R1),		\
11449 			BPF_JMP32_IMM(BPF_##op, R0, R1, 1),	\
11450 			BPF_JMP_A(0), /* Nop */			\
11451 			BPF_ALU64_REG(BPF_SUB, R0, R2),		\
11452 			BPF_ALU64_REG(BPF_SUB, R1, R3),		\
11453 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11454 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
11455 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
11456 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
11457 			BPF_EXIT_INSN(),			\
11458 		},						\
11459 		INTERNAL,					\
11460 		{ },						\
11461 		{ { 0, 0 } },					\
11462 	}
11463 	BPF_JMP32_REG_ZEXT(JEQ),
11464 	BPF_JMP32_REG_ZEXT(JNE),
11465 	BPF_JMP32_REG_ZEXT(JSET),
11466 	BPF_JMP32_REG_ZEXT(JGT),
11467 	BPF_JMP32_REG_ZEXT(JGE),
11468 	BPF_JMP32_REG_ZEXT(JLT),
11469 	BPF_JMP32_REG_ZEXT(JLE),
11470 	BPF_JMP32_REG_ZEXT(JSGT),
11471 	BPF_JMP32_REG_ZEXT(JSGE),
11472 	BPF_JMP32_REG_ZEXT(JSGT),
11473 	BPF_JMP32_REG_ZEXT(JSLT),
11474 	BPF_JMP32_REG_ZEXT(JSLE),
11475 #undef BPF_JMP2_REG_ZEXT
11476 	/* ALU64 K register combinations */
11477 	{
11478 		"ALU64_MOV_K: registers",
11479 		{ },
11480 		INTERNAL,
11481 		{ },
11482 		{ { 0, 1 } },
11483 		.fill_helper = bpf_fill_alu64_mov_imm_regs,
11484 	},
11485 	{
11486 		"ALU64_AND_K: registers",
11487 		{ },
11488 		INTERNAL,
11489 		{ },
11490 		{ { 0, 1 } },
11491 		.fill_helper = bpf_fill_alu64_and_imm_regs,
11492 	},
11493 	{
11494 		"ALU64_OR_K: registers",
11495 		{ },
11496 		INTERNAL,
11497 		{ },
11498 		{ { 0, 1 } },
11499 		.fill_helper = bpf_fill_alu64_or_imm_regs,
11500 	},
11501 	{
11502 		"ALU64_XOR_K: registers",
11503 		{ },
11504 		INTERNAL,
11505 		{ },
11506 		{ { 0, 1 } },
11507 		.fill_helper = bpf_fill_alu64_xor_imm_regs,
11508 	},
11509 	{
11510 		"ALU64_LSH_K: registers",
11511 		{ },
11512 		INTERNAL,
11513 		{ },
11514 		{ { 0, 1 } },
11515 		.fill_helper = bpf_fill_alu64_lsh_imm_regs,
11516 	},
11517 	{
11518 		"ALU64_RSH_K: registers",
11519 		{ },
11520 		INTERNAL,
11521 		{ },
11522 		{ { 0, 1 } },
11523 		.fill_helper = bpf_fill_alu64_rsh_imm_regs,
11524 	},
11525 	{
11526 		"ALU64_ARSH_K: registers",
11527 		{ },
11528 		INTERNAL,
11529 		{ },
11530 		{ { 0, 1 } },
11531 		.fill_helper = bpf_fill_alu64_arsh_imm_regs,
11532 	},
11533 	{
11534 		"ALU64_ADD_K: registers",
11535 		{ },
11536 		INTERNAL,
11537 		{ },
11538 		{ { 0, 1 } },
11539 		.fill_helper = bpf_fill_alu64_add_imm_regs,
11540 	},
11541 	{
11542 		"ALU64_SUB_K: registers",
11543 		{ },
11544 		INTERNAL,
11545 		{ },
11546 		{ { 0, 1 } },
11547 		.fill_helper = bpf_fill_alu64_sub_imm_regs,
11548 	},
11549 	{
11550 		"ALU64_MUL_K: registers",
11551 		{ },
11552 		INTERNAL,
11553 		{ },
11554 		{ { 0, 1 } },
11555 		.fill_helper = bpf_fill_alu64_mul_imm_regs,
11556 	},
11557 	{
11558 		"ALU64_DIV_K: registers",
11559 		{ },
11560 		INTERNAL,
11561 		{ },
11562 		{ { 0, 1 } },
11563 		.fill_helper = bpf_fill_alu64_div_imm_regs,
11564 	},
11565 	{
11566 		"ALU64_MOD_K: registers",
11567 		{ },
11568 		INTERNAL,
11569 		{ },
11570 		{ { 0, 1 } },
11571 		.fill_helper = bpf_fill_alu64_mod_imm_regs,
11572 	},
11573 	/* ALU32 K registers */
11574 	{
11575 		"ALU32_MOV_K: registers",
11576 		{ },
11577 		INTERNAL,
11578 		{ },
11579 		{ { 0, 1 } },
11580 		.fill_helper = bpf_fill_alu32_mov_imm_regs,
11581 	},
11582 	{
11583 		"ALU32_AND_K: registers",
11584 		{ },
11585 		INTERNAL,
11586 		{ },
11587 		{ { 0, 1 } },
11588 		.fill_helper = bpf_fill_alu32_and_imm_regs,
11589 	},
11590 	{
11591 		"ALU32_OR_K: registers",
11592 		{ },
11593 		INTERNAL,
11594 		{ },
11595 		{ { 0, 1 } },
11596 		.fill_helper = bpf_fill_alu32_or_imm_regs,
11597 	},
11598 	{
11599 		"ALU32_XOR_K: registers",
11600 		{ },
11601 		INTERNAL,
11602 		{ },
11603 		{ { 0, 1 } },
11604 		.fill_helper = bpf_fill_alu32_xor_imm_regs,
11605 	},
11606 	{
11607 		"ALU32_LSH_K: registers",
11608 		{ },
11609 		INTERNAL,
11610 		{ },
11611 		{ { 0, 1 } },
11612 		.fill_helper = bpf_fill_alu32_lsh_imm_regs,
11613 	},
11614 	{
11615 		"ALU32_RSH_K: registers",
11616 		{ },
11617 		INTERNAL,
11618 		{ },
11619 		{ { 0, 1 } },
11620 		.fill_helper = bpf_fill_alu32_rsh_imm_regs,
11621 	},
11622 	{
11623 		"ALU32_ARSH_K: registers",
11624 		{ },
11625 		INTERNAL,
11626 		{ },
11627 		{ { 0, 1 } },
11628 		.fill_helper = bpf_fill_alu32_arsh_imm_regs,
11629 	},
11630 	{
11631 		"ALU32_ADD_K: registers",
11632 		{ },
11633 		INTERNAL,
11634 		{ },
11635 		{ { 0, 1 } },
11636 		.fill_helper = bpf_fill_alu32_add_imm_regs,
11637 	},
11638 	{
11639 		"ALU32_SUB_K: registers",
11640 		{ },
11641 		INTERNAL,
11642 		{ },
11643 		{ { 0, 1 } },
11644 		.fill_helper = bpf_fill_alu32_sub_imm_regs,
11645 	},
11646 	{
11647 		"ALU32_MUL_K: registers",
11648 		{ },
11649 		INTERNAL,
11650 		{ },
11651 		{ { 0, 1 } },
11652 		.fill_helper = bpf_fill_alu32_mul_imm_regs,
11653 	},
11654 	{
11655 		"ALU32_DIV_K: registers",
11656 		{ },
11657 		INTERNAL,
11658 		{ },
11659 		{ { 0, 1 } },
11660 		.fill_helper = bpf_fill_alu32_div_imm_regs,
11661 	},
11662 	{
11663 		"ALU32_MOD_K: registers",
11664 		{ },
11665 		INTERNAL,
11666 		{ },
11667 		{ { 0, 1 } },
11668 		.fill_helper = bpf_fill_alu32_mod_imm_regs,
11669 	},
11670 	/* ALU64 X register combinations */
11671 	{
11672 		"ALU64_MOV_X: register combinations",
11673 		{ },
11674 		INTERNAL,
11675 		{ },
11676 		{ { 0, 1 } },
11677 		.fill_helper = bpf_fill_alu64_mov_reg_pairs,
11678 	},
11679 	{
11680 		"ALU64_AND_X: register combinations",
11681 		{ },
11682 		INTERNAL,
11683 		{ },
11684 		{ { 0, 1 } },
11685 		.fill_helper = bpf_fill_alu64_and_reg_pairs,
11686 	},
11687 	{
11688 		"ALU64_OR_X: register combinations",
11689 		{ },
11690 		INTERNAL,
11691 		{ },
11692 		{ { 0, 1 } },
11693 		.fill_helper = bpf_fill_alu64_or_reg_pairs,
11694 	},
11695 	{
11696 		"ALU64_XOR_X: register combinations",
11697 		{ },
11698 		INTERNAL,
11699 		{ },
11700 		{ { 0, 1 } },
11701 		.fill_helper = bpf_fill_alu64_xor_reg_pairs,
11702 	},
11703 	{
11704 		"ALU64_LSH_X: register combinations",
11705 		{ },
11706 		INTERNAL,
11707 		{ },
11708 		{ { 0, 1 } },
11709 		.fill_helper = bpf_fill_alu64_lsh_reg_pairs,
11710 	},
11711 	{
11712 		"ALU64_RSH_X: register combinations",
11713 		{ },
11714 		INTERNAL,
11715 		{ },
11716 		{ { 0, 1 } },
11717 		.fill_helper = bpf_fill_alu64_rsh_reg_pairs,
11718 	},
11719 	{
11720 		"ALU64_ARSH_X: register combinations",
11721 		{ },
11722 		INTERNAL,
11723 		{ },
11724 		{ { 0, 1 } },
11725 		.fill_helper = bpf_fill_alu64_arsh_reg_pairs,
11726 	},
11727 	{
11728 		"ALU64_ADD_X: register combinations",
11729 		{ },
11730 		INTERNAL,
11731 		{ },
11732 		{ { 0, 1 } },
11733 		.fill_helper = bpf_fill_alu64_add_reg_pairs,
11734 	},
11735 	{
11736 		"ALU64_SUB_X: register combinations",
11737 		{ },
11738 		INTERNAL,
11739 		{ },
11740 		{ { 0, 1 } },
11741 		.fill_helper = bpf_fill_alu64_sub_reg_pairs,
11742 	},
11743 	{
11744 		"ALU64_MUL_X: register combinations",
11745 		{ },
11746 		INTERNAL,
11747 		{ },
11748 		{ { 0, 1 } },
11749 		.fill_helper = bpf_fill_alu64_mul_reg_pairs,
11750 	},
11751 	{
11752 		"ALU64_DIV_X: register combinations",
11753 		{ },
11754 		INTERNAL,
11755 		{ },
11756 		{ { 0, 1 } },
11757 		.fill_helper = bpf_fill_alu64_div_reg_pairs,
11758 	},
11759 	{
11760 		"ALU64_MOD_X: register combinations",
11761 		{ },
11762 		INTERNAL,
11763 		{ },
11764 		{ { 0, 1 } },
11765 		.fill_helper = bpf_fill_alu64_mod_reg_pairs,
11766 	},
11767 	/* ALU32 X register combinations */
11768 	{
11769 		"ALU32_MOV_X: register combinations",
11770 		{ },
11771 		INTERNAL,
11772 		{ },
11773 		{ { 0, 1 } },
11774 		.fill_helper = bpf_fill_alu32_mov_reg_pairs,
11775 	},
11776 	{
11777 		"ALU32_AND_X: register combinations",
11778 		{ },
11779 		INTERNAL,
11780 		{ },
11781 		{ { 0, 1 } },
11782 		.fill_helper = bpf_fill_alu32_and_reg_pairs,
11783 	},
11784 	{
11785 		"ALU32_OR_X: register combinations",
11786 		{ },
11787 		INTERNAL,
11788 		{ },
11789 		{ { 0, 1 } },
11790 		.fill_helper = bpf_fill_alu32_or_reg_pairs,
11791 	},
11792 	{
11793 		"ALU32_XOR_X: register combinations",
11794 		{ },
11795 		INTERNAL,
11796 		{ },
11797 		{ { 0, 1 } },
11798 		.fill_helper = bpf_fill_alu32_xor_reg_pairs,
11799 	},
11800 	{
11801 		"ALU32_LSH_X: register combinations",
11802 		{ },
11803 		INTERNAL,
11804 		{ },
11805 		{ { 0, 1 } },
11806 		.fill_helper = bpf_fill_alu32_lsh_reg_pairs,
11807 	},
11808 	{
11809 		"ALU32_RSH_X: register combinations",
11810 		{ },
11811 		INTERNAL,
11812 		{ },
11813 		{ { 0, 1 } },
11814 		.fill_helper = bpf_fill_alu32_rsh_reg_pairs,
11815 	},
11816 	{
11817 		"ALU32_ARSH_X: register combinations",
11818 		{ },
11819 		INTERNAL,
11820 		{ },
11821 		{ { 0, 1 } },
11822 		.fill_helper = bpf_fill_alu32_arsh_reg_pairs,
11823 	},
11824 	{
11825 		"ALU32_ADD_X: register combinations",
11826 		{ },
11827 		INTERNAL,
11828 		{ },
11829 		{ { 0, 1 } },
11830 		.fill_helper = bpf_fill_alu32_add_reg_pairs,
11831 	},
11832 	{
11833 		"ALU32_SUB_X: register combinations",
11834 		{ },
11835 		INTERNAL,
11836 		{ },
11837 		{ { 0, 1 } },
11838 		.fill_helper = bpf_fill_alu32_sub_reg_pairs,
11839 	},
11840 	{
11841 		"ALU32_MUL_X: register combinations",
11842 		{ },
11843 		INTERNAL,
11844 		{ },
11845 		{ { 0, 1 } },
11846 		.fill_helper = bpf_fill_alu32_mul_reg_pairs,
11847 	},
11848 	{
11849 		"ALU32_DIV_X: register combinations",
11850 		{ },
11851 		INTERNAL,
11852 		{ },
11853 		{ { 0, 1 } },
11854 		.fill_helper = bpf_fill_alu32_div_reg_pairs,
11855 	},
11856 	{
11857 		"ALU32_MOD_X register combinations",
11858 		{ },
11859 		INTERNAL,
11860 		{ },
11861 		{ { 0, 1 } },
11862 		.fill_helper = bpf_fill_alu32_mod_reg_pairs,
11863 	},
11864 	/* Exhaustive test of ALU64 shift operations */
11865 	{
11866 		"ALU64_LSH_K: all shift values",
11867 		{ },
11868 		INTERNAL | FLAG_NO_DATA,
11869 		{ },
11870 		{ { 0, 1 } },
11871 		.fill_helper = bpf_fill_alu64_lsh_imm,
11872 	},
11873 	{
11874 		"ALU64_RSH_K: all shift values",
11875 		{ },
11876 		INTERNAL | FLAG_NO_DATA,
11877 		{ },
11878 		{ { 0, 1 } },
11879 		.fill_helper = bpf_fill_alu64_rsh_imm,
11880 	},
11881 	{
11882 		"ALU64_ARSH_K: all shift values",
11883 		{ },
11884 		INTERNAL | FLAG_NO_DATA,
11885 		{ },
11886 		{ { 0, 1 } },
11887 		.fill_helper = bpf_fill_alu64_arsh_imm,
11888 	},
11889 	{
11890 		"ALU64_LSH_X: all shift values",
11891 		{ },
11892 		INTERNAL | FLAG_NO_DATA,
11893 		{ },
11894 		{ { 0, 1 } },
11895 		.fill_helper = bpf_fill_alu64_lsh_reg,
11896 	},
11897 	{
11898 		"ALU64_RSH_X: all shift values",
11899 		{ },
11900 		INTERNAL | FLAG_NO_DATA,
11901 		{ },
11902 		{ { 0, 1 } },
11903 		.fill_helper = bpf_fill_alu64_rsh_reg,
11904 	},
11905 	{
11906 		"ALU64_ARSH_X: all shift values",
11907 		{ },
11908 		INTERNAL | FLAG_NO_DATA,
11909 		{ },
11910 		{ { 0, 1 } },
11911 		.fill_helper = bpf_fill_alu64_arsh_reg,
11912 	},
11913 	/* Exhaustive test of ALU32 shift operations */
11914 	{
11915 		"ALU32_LSH_K: all shift values",
11916 		{ },
11917 		INTERNAL | FLAG_NO_DATA,
11918 		{ },
11919 		{ { 0, 1 } },
11920 		.fill_helper = bpf_fill_alu32_lsh_imm,
11921 	},
11922 	{
11923 		"ALU32_RSH_K: all shift values",
11924 		{ },
11925 		INTERNAL | FLAG_NO_DATA,
11926 		{ },
11927 		{ { 0, 1 } },
11928 		.fill_helper = bpf_fill_alu32_rsh_imm,
11929 	},
11930 	{
11931 		"ALU32_ARSH_K: all shift values",
11932 		{ },
11933 		INTERNAL | FLAG_NO_DATA,
11934 		{ },
11935 		{ { 0, 1 } },
11936 		.fill_helper = bpf_fill_alu32_arsh_imm,
11937 	},
11938 	{
11939 		"ALU32_LSH_X: all shift values",
11940 		{ },
11941 		INTERNAL | FLAG_NO_DATA,
11942 		{ },
11943 		{ { 0, 1 } },
11944 		.fill_helper = bpf_fill_alu32_lsh_reg,
11945 	},
11946 	{
11947 		"ALU32_RSH_X: all shift values",
11948 		{ },
11949 		INTERNAL | FLAG_NO_DATA,
11950 		{ },
11951 		{ { 0, 1 } },
11952 		.fill_helper = bpf_fill_alu32_rsh_reg,
11953 	},
11954 	{
11955 		"ALU32_ARSH_X: all shift values",
11956 		{ },
11957 		INTERNAL | FLAG_NO_DATA,
11958 		{ },
11959 		{ { 0, 1 } },
11960 		.fill_helper = bpf_fill_alu32_arsh_reg,
11961 	},
11962 	/*
11963 	 * Exhaustive test of ALU64 shift operations when
11964 	 * source and destination register are the same.
11965 	 */
11966 	{
11967 		"ALU64_LSH_X: all shift values with the same register",
11968 		{ },
11969 		INTERNAL | FLAG_NO_DATA,
11970 		{ },
11971 		{ { 0, 1 } },
11972 		.fill_helper = bpf_fill_alu64_lsh_same_reg,
11973 	},
11974 	{
11975 		"ALU64_RSH_X: all shift values with the same register",
11976 		{ },
11977 		INTERNAL | FLAG_NO_DATA,
11978 		{ },
11979 		{ { 0, 1 } },
11980 		.fill_helper = bpf_fill_alu64_rsh_same_reg,
11981 	},
11982 	{
11983 		"ALU64_ARSH_X: all shift values with the same register",
11984 		{ },
11985 		INTERNAL | FLAG_NO_DATA,
11986 		{ },
11987 		{ { 0, 1 } },
11988 		.fill_helper = bpf_fill_alu64_arsh_same_reg,
11989 	},
11990 	/*
11991 	 * Exhaustive test of ALU32 shift operations when
11992 	 * source and destination register are the same.
11993 	 */
11994 	{
11995 		"ALU32_LSH_X: all shift values with the same register",
11996 		{ },
11997 		INTERNAL | FLAG_NO_DATA,
11998 		{ },
11999 		{ { 0, 1 } },
12000 		.fill_helper = bpf_fill_alu32_lsh_same_reg,
12001 	},
12002 	{
12003 		"ALU32_RSH_X: all shift values with the same register",
12004 		{ },
12005 		INTERNAL | FLAG_NO_DATA,
12006 		{ },
12007 		{ { 0, 1 } },
12008 		.fill_helper = bpf_fill_alu32_rsh_same_reg,
12009 	},
12010 	{
12011 		"ALU32_ARSH_X: all shift values with the same register",
12012 		{ },
12013 		INTERNAL | FLAG_NO_DATA,
12014 		{ },
12015 		{ { 0, 1 } },
12016 		.fill_helper = bpf_fill_alu32_arsh_same_reg,
12017 	},
12018 	/* ALU64 immediate magnitudes */
12019 	{
12020 		"ALU64_MOV_K: all immediate value magnitudes",
12021 		{ },
12022 		INTERNAL | FLAG_NO_DATA,
12023 		{ },
12024 		{ { 0, 1 } },
12025 		.fill_helper = bpf_fill_alu64_mov_imm,
12026 		.nr_testruns = NR_PATTERN_RUNS,
12027 	},
12028 	{
12029 		"ALU64_AND_K: all immediate value magnitudes",
12030 		{ },
12031 		INTERNAL | FLAG_NO_DATA,
12032 		{ },
12033 		{ { 0, 1 } },
12034 		.fill_helper = bpf_fill_alu64_and_imm,
12035 		.nr_testruns = NR_PATTERN_RUNS,
12036 	},
12037 	{
12038 		"ALU64_OR_K: all immediate value magnitudes",
12039 		{ },
12040 		INTERNAL | FLAG_NO_DATA,
12041 		{ },
12042 		{ { 0, 1 } },
12043 		.fill_helper = bpf_fill_alu64_or_imm,
12044 		.nr_testruns = NR_PATTERN_RUNS,
12045 	},
12046 	{
12047 		"ALU64_XOR_K: all immediate value magnitudes",
12048 		{ },
12049 		INTERNAL | FLAG_NO_DATA,
12050 		{ },
12051 		{ { 0, 1 } },
12052 		.fill_helper = bpf_fill_alu64_xor_imm,
12053 		.nr_testruns = NR_PATTERN_RUNS,
12054 	},
12055 	{
12056 		"ALU64_ADD_K: all immediate value magnitudes",
12057 		{ },
12058 		INTERNAL | FLAG_NO_DATA,
12059 		{ },
12060 		{ { 0, 1 } },
12061 		.fill_helper = bpf_fill_alu64_add_imm,
12062 		.nr_testruns = NR_PATTERN_RUNS,
12063 	},
12064 	{
12065 		"ALU64_SUB_K: all immediate value magnitudes",
12066 		{ },
12067 		INTERNAL | FLAG_NO_DATA,
12068 		{ },
12069 		{ { 0, 1 } },
12070 		.fill_helper = bpf_fill_alu64_sub_imm,
12071 		.nr_testruns = NR_PATTERN_RUNS,
12072 	},
12073 	{
12074 		"ALU64_MUL_K: all immediate value magnitudes",
12075 		{ },
12076 		INTERNAL | FLAG_NO_DATA,
12077 		{ },
12078 		{ { 0, 1 } },
12079 		.fill_helper = bpf_fill_alu64_mul_imm,
12080 		.nr_testruns = NR_PATTERN_RUNS,
12081 	},
12082 	{
12083 		"ALU64_DIV_K: all immediate value magnitudes",
12084 		{ },
12085 		INTERNAL | FLAG_NO_DATA,
12086 		{ },
12087 		{ { 0, 1 } },
12088 		.fill_helper = bpf_fill_alu64_div_imm,
12089 		.nr_testruns = NR_PATTERN_RUNS,
12090 	},
12091 	{
12092 		"ALU64_MOD_K: all immediate value magnitudes",
12093 		{ },
12094 		INTERNAL | FLAG_NO_DATA,
12095 		{ },
12096 		{ { 0, 1 } },
12097 		.fill_helper = bpf_fill_alu64_mod_imm,
12098 		.nr_testruns = NR_PATTERN_RUNS,
12099 	},
12100 	/* ALU32 immediate magnitudes */
12101 	{
12102 		"ALU32_MOV_K: all immediate value magnitudes",
12103 		{ },
12104 		INTERNAL | FLAG_NO_DATA,
12105 		{ },
12106 		{ { 0, 1 } },
12107 		.fill_helper = bpf_fill_alu32_mov_imm,
12108 		.nr_testruns = NR_PATTERN_RUNS,
12109 	},
12110 	{
12111 		"ALU32_AND_K: all immediate value magnitudes",
12112 		{ },
12113 		INTERNAL | FLAG_NO_DATA,
12114 		{ },
12115 		{ { 0, 1 } },
12116 		.fill_helper = bpf_fill_alu32_and_imm,
12117 		.nr_testruns = NR_PATTERN_RUNS,
12118 	},
12119 	{
12120 		"ALU32_OR_K: all immediate value magnitudes",
12121 		{ },
12122 		INTERNAL | FLAG_NO_DATA,
12123 		{ },
12124 		{ { 0, 1 } },
12125 		.fill_helper = bpf_fill_alu32_or_imm,
12126 		.nr_testruns = NR_PATTERN_RUNS,
12127 	},
12128 	{
12129 		"ALU32_XOR_K: all immediate value magnitudes",
12130 		{ },
12131 		INTERNAL | FLAG_NO_DATA,
12132 		{ },
12133 		{ { 0, 1 } },
12134 		.fill_helper = bpf_fill_alu32_xor_imm,
12135 		.nr_testruns = NR_PATTERN_RUNS,
12136 	},
12137 	{
12138 		"ALU32_ADD_K: all immediate value magnitudes",
12139 		{ },
12140 		INTERNAL | FLAG_NO_DATA,
12141 		{ },
12142 		{ { 0, 1 } },
12143 		.fill_helper = bpf_fill_alu32_add_imm,
12144 		.nr_testruns = NR_PATTERN_RUNS,
12145 	},
12146 	{
12147 		"ALU32_SUB_K: all immediate value magnitudes",
12148 		{ },
12149 		INTERNAL | FLAG_NO_DATA,
12150 		{ },
12151 		{ { 0, 1 } },
12152 		.fill_helper = bpf_fill_alu32_sub_imm,
12153 		.nr_testruns = NR_PATTERN_RUNS,
12154 	},
12155 	{
12156 		"ALU32_MUL_K: all immediate value magnitudes",
12157 		{ },
12158 		INTERNAL | FLAG_NO_DATA,
12159 		{ },
12160 		{ { 0, 1 } },
12161 		.fill_helper = bpf_fill_alu32_mul_imm,
12162 		.nr_testruns = NR_PATTERN_RUNS,
12163 	},
12164 	{
12165 		"ALU32_DIV_K: all immediate value magnitudes",
12166 		{ },
12167 		INTERNAL | FLAG_NO_DATA,
12168 		{ },
12169 		{ { 0, 1 } },
12170 		.fill_helper = bpf_fill_alu32_div_imm,
12171 		.nr_testruns = NR_PATTERN_RUNS,
12172 	},
12173 	{
12174 		"ALU32_MOD_K: all immediate value magnitudes",
12175 		{ },
12176 		INTERNAL | FLAG_NO_DATA,
12177 		{ },
12178 		{ { 0, 1 } },
12179 		.fill_helper = bpf_fill_alu32_mod_imm,
12180 		.nr_testruns = NR_PATTERN_RUNS,
12181 	},
12182 	/* ALU64 register magnitudes */
12183 	{
12184 		"ALU64_MOV_X: all register value magnitudes",
12185 		{ },
12186 		INTERNAL | FLAG_NO_DATA,
12187 		{ },
12188 		{ { 0, 1 } },
12189 		.fill_helper = bpf_fill_alu64_mov_reg,
12190 		.nr_testruns = NR_PATTERN_RUNS,
12191 	},
12192 	{
12193 		"ALU64_AND_X: all register value magnitudes",
12194 		{ },
12195 		INTERNAL | FLAG_NO_DATA,
12196 		{ },
12197 		{ { 0, 1 } },
12198 		.fill_helper = bpf_fill_alu64_and_reg,
12199 		.nr_testruns = NR_PATTERN_RUNS,
12200 	},
12201 	{
12202 		"ALU64_OR_X: all register value magnitudes",
12203 		{ },
12204 		INTERNAL | FLAG_NO_DATA,
12205 		{ },
12206 		{ { 0, 1 } },
12207 		.fill_helper = bpf_fill_alu64_or_reg,
12208 		.nr_testruns = NR_PATTERN_RUNS,
12209 	},
12210 	{
12211 		"ALU64_XOR_X: all register value magnitudes",
12212 		{ },
12213 		INTERNAL | FLAG_NO_DATA,
12214 		{ },
12215 		{ { 0, 1 } },
12216 		.fill_helper = bpf_fill_alu64_xor_reg,
12217 		.nr_testruns = NR_PATTERN_RUNS,
12218 	},
12219 	{
12220 		"ALU64_ADD_X: all register value magnitudes",
12221 		{ },
12222 		INTERNAL | FLAG_NO_DATA,
12223 		{ },
12224 		{ { 0, 1 } },
12225 		.fill_helper = bpf_fill_alu64_add_reg,
12226 		.nr_testruns = NR_PATTERN_RUNS,
12227 	},
12228 	{
12229 		"ALU64_SUB_X: all register value magnitudes",
12230 		{ },
12231 		INTERNAL | FLAG_NO_DATA,
12232 		{ },
12233 		{ { 0, 1 } },
12234 		.fill_helper = bpf_fill_alu64_sub_reg,
12235 		.nr_testruns = NR_PATTERN_RUNS,
12236 	},
12237 	{
12238 		"ALU64_MUL_X: all register value magnitudes",
12239 		{ },
12240 		INTERNAL | FLAG_NO_DATA,
12241 		{ },
12242 		{ { 0, 1 } },
12243 		.fill_helper = bpf_fill_alu64_mul_reg,
12244 		.nr_testruns = NR_PATTERN_RUNS,
12245 	},
12246 	{
12247 		"ALU64_DIV_X: all register value magnitudes",
12248 		{ },
12249 		INTERNAL | FLAG_NO_DATA,
12250 		{ },
12251 		{ { 0, 1 } },
12252 		.fill_helper = bpf_fill_alu64_div_reg,
12253 		.nr_testruns = NR_PATTERN_RUNS,
12254 	},
12255 	{
12256 		"ALU64_MOD_X: all register value magnitudes",
12257 		{ },
12258 		INTERNAL | FLAG_NO_DATA,
12259 		{ },
12260 		{ { 0, 1 } },
12261 		.fill_helper = bpf_fill_alu64_mod_reg,
12262 		.nr_testruns = NR_PATTERN_RUNS,
12263 	},
12264 	/* ALU32 register magnitudes */
12265 	{
12266 		"ALU32_MOV_X: all register value magnitudes",
12267 		{ },
12268 		INTERNAL | FLAG_NO_DATA,
12269 		{ },
12270 		{ { 0, 1 } },
12271 		.fill_helper = bpf_fill_alu32_mov_reg,
12272 		.nr_testruns = NR_PATTERN_RUNS,
12273 	},
12274 	{
12275 		"ALU32_AND_X: all register value magnitudes",
12276 		{ },
12277 		INTERNAL | FLAG_NO_DATA,
12278 		{ },
12279 		{ { 0, 1 } },
12280 		.fill_helper = bpf_fill_alu32_and_reg,
12281 		.nr_testruns = NR_PATTERN_RUNS,
12282 	},
12283 	{
12284 		"ALU32_OR_X: all register value magnitudes",
12285 		{ },
12286 		INTERNAL | FLAG_NO_DATA,
12287 		{ },
12288 		{ { 0, 1 } },
12289 		.fill_helper = bpf_fill_alu32_or_reg,
12290 		.nr_testruns = NR_PATTERN_RUNS,
12291 	},
12292 	{
12293 		"ALU32_XOR_X: all register value magnitudes",
12294 		{ },
12295 		INTERNAL | FLAG_NO_DATA,
12296 		{ },
12297 		{ { 0, 1 } },
12298 		.fill_helper = bpf_fill_alu32_xor_reg,
12299 		.nr_testruns = NR_PATTERN_RUNS,
12300 	},
12301 	{
12302 		"ALU32_ADD_X: all register value magnitudes",
12303 		{ },
12304 		INTERNAL | FLAG_NO_DATA,
12305 		{ },
12306 		{ { 0, 1 } },
12307 		.fill_helper = bpf_fill_alu32_add_reg,
12308 		.nr_testruns = NR_PATTERN_RUNS,
12309 	},
12310 	{
12311 		"ALU32_SUB_X: all register value magnitudes",
12312 		{ },
12313 		INTERNAL | FLAG_NO_DATA,
12314 		{ },
12315 		{ { 0, 1 } },
12316 		.fill_helper = bpf_fill_alu32_sub_reg,
12317 		.nr_testruns = NR_PATTERN_RUNS,
12318 	},
12319 	{
12320 		"ALU32_MUL_X: all register value magnitudes",
12321 		{ },
12322 		INTERNAL | FLAG_NO_DATA,
12323 		{ },
12324 		{ { 0, 1 } },
12325 		.fill_helper = bpf_fill_alu32_mul_reg,
12326 		.nr_testruns = NR_PATTERN_RUNS,
12327 	},
12328 	{
12329 		"ALU32_DIV_X: all register value magnitudes",
12330 		{ },
12331 		INTERNAL | FLAG_NO_DATA,
12332 		{ },
12333 		{ { 0, 1 } },
12334 		.fill_helper = bpf_fill_alu32_div_reg,
12335 		.nr_testruns = NR_PATTERN_RUNS,
12336 	},
12337 	{
12338 		"ALU32_MOD_X: all register value magnitudes",
12339 		{ },
12340 		INTERNAL | FLAG_NO_DATA,
12341 		{ },
12342 		{ { 0, 1 } },
12343 		.fill_helper = bpf_fill_alu32_mod_reg,
12344 		.nr_testruns = NR_PATTERN_RUNS,
12345 	},
12346 	/* LD_IMM64 immediate magnitudes */
12347 	{
12348 		"LD_IMM64: all immediate value magnitudes",
12349 		{ },
12350 		INTERNAL | FLAG_NO_DATA,
12351 		{ },
12352 		{ { 0, 1 } },
12353 		.fill_helper = bpf_fill_ld_imm64,
12354 	},
12355 	/* 64-bit ATOMIC register combinations */
12356 	{
12357 		"ATOMIC_DW_ADD: register combinations",
12358 		{ },
12359 		INTERNAL,
12360 		{ },
12361 		{ { 0, 1 } },
12362 		.fill_helper = bpf_fill_atomic64_add_reg_pairs,
12363 		.stack_depth = 8,
12364 	},
12365 	{
12366 		"ATOMIC_DW_AND: register combinations",
12367 		{ },
12368 		INTERNAL,
12369 		{ },
12370 		{ { 0, 1 } },
12371 		.fill_helper = bpf_fill_atomic64_and_reg_pairs,
12372 		.stack_depth = 8,
12373 	},
12374 	{
12375 		"ATOMIC_DW_OR: register combinations",
12376 		{ },
12377 		INTERNAL,
12378 		{ },
12379 		{ { 0, 1 } },
12380 		.fill_helper = bpf_fill_atomic64_or_reg_pairs,
12381 		.stack_depth = 8,
12382 	},
12383 	{
12384 		"ATOMIC_DW_XOR: register combinations",
12385 		{ },
12386 		INTERNAL,
12387 		{ },
12388 		{ { 0, 1 } },
12389 		.fill_helper = bpf_fill_atomic64_xor_reg_pairs,
12390 		.stack_depth = 8,
12391 	},
12392 	{
12393 		"ATOMIC_DW_ADD_FETCH: register combinations",
12394 		{ },
12395 		INTERNAL,
12396 		{ },
12397 		{ { 0, 1 } },
12398 		.fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
12399 		.stack_depth = 8,
12400 	},
12401 	{
12402 		"ATOMIC_DW_AND_FETCH: register combinations",
12403 		{ },
12404 		INTERNAL,
12405 		{ },
12406 		{ { 0, 1 } },
12407 		.fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
12408 		.stack_depth = 8,
12409 	},
12410 	{
12411 		"ATOMIC_DW_OR_FETCH: register combinations",
12412 		{ },
12413 		INTERNAL,
12414 		{ },
12415 		{ { 0, 1 } },
12416 		.fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
12417 		.stack_depth = 8,
12418 	},
12419 	{
12420 		"ATOMIC_DW_XOR_FETCH: register combinations",
12421 		{ },
12422 		INTERNAL,
12423 		{ },
12424 		{ { 0, 1 } },
12425 		.fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
12426 		.stack_depth = 8,
12427 	},
12428 	{
12429 		"ATOMIC_DW_XCHG: register combinations",
12430 		{ },
12431 		INTERNAL,
12432 		{ },
12433 		{ { 0, 1 } },
12434 		.fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
12435 		.stack_depth = 8,
12436 	},
12437 	{
12438 		"ATOMIC_DW_CMPXCHG: register combinations",
12439 		{ },
12440 		INTERNAL,
12441 		{ },
12442 		{ { 0, 1 } },
12443 		.fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
12444 		.stack_depth = 8,
12445 	},
12446 	/* 32-bit ATOMIC register combinations */
12447 	{
12448 		"ATOMIC_W_ADD: register combinations",
12449 		{ },
12450 		INTERNAL,
12451 		{ },
12452 		{ { 0, 1 } },
12453 		.fill_helper = bpf_fill_atomic32_add_reg_pairs,
12454 		.stack_depth = 8,
12455 	},
12456 	{
12457 		"ATOMIC_W_AND: register combinations",
12458 		{ },
12459 		INTERNAL,
12460 		{ },
12461 		{ { 0, 1 } },
12462 		.fill_helper = bpf_fill_atomic32_and_reg_pairs,
12463 		.stack_depth = 8,
12464 	},
12465 	{
12466 		"ATOMIC_W_OR: register combinations",
12467 		{ },
12468 		INTERNAL,
12469 		{ },
12470 		{ { 0, 1 } },
12471 		.fill_helper = bpf_fill_atomic32_or_reg_pairs,
12472 		.stack_depth = 8,
12473 	},
12474 	{
12475 		"ATOMIC_W_XOR: register combinations",
12476 		{ },
12477 		INTERNAL,
12478 		{ },
12479 		{ { 0, 1 } },
12480 		.fill_helper = bpf_fill_atomic32_xor_reg_pairs,
12481 		.stack_depth = 8,
12482 	},
12483 	{
12484 		"ATOMIC_W_ADD_FETCH: register combinations",
12485 		{ },
12486 		INTERNAL,
12487 		{ },
12488 		{ { 0, 1 } },
12489 		.fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
12490 		.stack_depth = 8,
12491 	},
12492 	{
12493 		"ATOMIC_W_AND_FETCH: register combinations",
12494 		{ },
12495 		INTERNAL,
12496 		{ },
12497 		{ { 0, 1 } },
12498 		.fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
12499 		.stack_depth = 8,
12500 	},
12501 	{
12502 		"ATOMIC_W_OR_FETCH: register combinations",
12503 		{ },
12504 		INTERNAL,
12505 		{ },
12506 		{ { 0, 1 } },
12507 		.fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
12508 		.stack_depth = 8,
12509 	},
12510 	{
12511 		"ATOMIC_W_XOR_FETCH: register combinations",
12512 		{ },
12513 		INTERNAL,
12514 		{ },
12515 		{ { 0, 1 } },
12516 		.fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
12517 		.stack_depth = 8,
12518 	},
12519 	{
12520 		"ATOMIC_W_XCHG: register combinations",
12521 		{ },
12522 		INTERNAL,
12523 		{ },
12524 		{ { 0, 1 } },
12525 		.fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
12526 		.stack_depth = 8,
12527 	},
12528 	{
12529 		"ATOMIC_W_CMPXCHG: register combinations",
12530 		{ },
12531 		INTERNAL,
12532 		{ },
12533 		{ { 0, 1 } },
12534 		.fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
12535 		.stack_depth = 8,
12536 	},
12537 	/* 64-bit ATOMIC magnitudes */
12538 	{
12539 		"ATOMIC_DW_ADD: all operand magnitudes",
12540 		{ },
12541 		INTERNAL | FLAG_NO_DATA,
12542 		{ },
12543 		{ { 0, 1 } },
12544 		.fill_helper = bpf_fill_atomic64_add,
12545 		.stack_depth = 8,
12546 		.nr_testruns = NR_PATTERN_RUNS,
12547 	},
12548 	{
12549 		"ATOMIC_DW_AND: all operand magnitudes",
12550 		{ },
12551 		INTERNAL | FLAG_NO_DATA,
12552 		{ },
12553 		{ { 0, 1 } },
12554 		.fill_helper = bpf_fill_atomic64_and,
12555 		.stack_depth = 8,
12556 		.nr_testruns = NR_PATTERN_RUNS,
12557 	},
12558 	{
12559 		"ATOMIC_DW_OR: all operand magnitudes",
12560 		{ },
12561 		INTERNAL | FLAG_NO_DATA,
12562 		{ },
12563 		{ { 0, 1 } },
12564 		.fill_helper = bpf_fill_atomic64_or,
12565 		.stack_depth = 8,
12566 		.nr_testruns = NR_PATTERN_RUNS,
12567 	},
12568 	{
12569 		"ATOMIC_DW_XOR: all operand magnitudes",
12570 		{ },
12571 		INTERNAL | FLAG_NO_DATA,
12572 		{ },
12573 		{ { 0, 1 } },
12574 		.fill_helper = bpf_fill_atomic64_xor,
12575 		.stack_depth = 8,
12576 		.nr_testruns = NR_PATTERN_RUNS,
12577 	},
12578 	{
12579 		"ATOMIC_DW_ADD_FETCH: all operand magnitudes",
12580 		{ },
12581 		INTERNAL | FLAG_NO_DATA,
12582 		{ },
12583 		{ { 0, 1 } },
12584 		.fill_helper = bpf_fill_atomic64_add_fetch,
12585 		.stack_depth = 8,
12586 		.nr_testruns = NR_PATTERN_RUNS,
12587 	},
12588 	{
12589 		"ATOMIC_DW_AND_FETCH: all operand magnitudes",
12590 		{ },
12591 		INTERNAL | FLAG_NO_DATA,
12592 		{ },
12593 		{ { 0, 1 } },
12594 		.fill_helper = bpf_fill_atomic64_and_fetch,
12595 		.stack_depth = 8,
12596 		.nr_testruns = NR_PATTERN_RUNS,
12597 	},
12598 	{
12599 		"ATOMIC_DW_OR_FETCH: all operand magnitudes",
12600 		{ },
12601 		INTERNAL | FLAG_NO_DATA,
12602 		{ },
12603 		{ { 0, 1 } },
12604 		.fill_helper = bpf_fill_atomic64_or_fetch,
12605 		.stack_depth = 8,
12606 		.nr_testruns = NR_PATTERN_RUNS,
12607 	},
12608 	{
12609 		"ATOMIC_DW_XOR_FETCH: all operand magnitudes",
12610 		{ },
12611 		INTERNAL | FLAG_NO_DATA,
12612 		{ },
12613 		{ { 0, 1 } },
12614 		.fill_helper = bpf_fill_atomic64_xor_fetch,
12615 		.stack_depth = 8,
12616 		.nr_testruns = NR_PATTERN_RUNS,
12617 	},
12618 	{
12619 		"ATOMIC_DW_XCHG: all operand magnitudes",
12620 		{ },
12621 		INTERNAL | FLAG_NO_DATA,
12622 		{ },
12623 		{ { 0, 1 } },
12624 		.fill_helper = bpf_fill_atomic64_xchg,
12625 		.stack_depth = 8,
12626 		.nr_testruns = NR_PATTERN_RUNS,
12627 	},
12628 	{
12629 		"ATOMIC_DW_CMPXCHG: all operand magnitudes",
12630 		{ },
12631 		INTERNAL | FLAG_NO_DATA,
12632 		{ },
12633 		{ { 0, 1 } },
12634 		.fill_helper = bpf_fill_cmpxchg64,
12635 		.stack_depth = 8,
12636 		.nr_testruns = NR_PATTERN_RUNS,
12637 	},
12638 	/* 64-bit atomic magnitudes */
12639 	{
12640 		"ATOMIC_W_ADD: all operand magnitudes",
12641 		{ },
12642 		INTERNAL | FLAG_NO_DATA,
12643 		{ },
12644 		{ { 0, 1 } },
12645 		.fill_helper = bpf_fill_atomic32_add,
12646 		.stack_depth = 8,
12647 		.nr_testruns = NR_PATTERN_RUNS,
12648 	},
12649 	{
12650 		"ATOMIC_W_AND: all operand magnitudes",
12651 		{ },
12652 		INTERNAL | FLAG_NO_DATA,
12653 		{ },
12654 		{ { 0, 1 } },
12655 		.fill_helper = bpf_fill_atomic32_and,
12656 		.stack_depth = 8,
12657 		.nr_testruns = NR_PATTERN_RUNS,
12658 	},
12659 	{
12660 		"ATOMIC_W_OR: all operand magnitudes",
12661 		{ },
12662 		INTERNAL | FLAG_NO_DATA,
12663 		{ },
12664 		{ { 0, 1 } },
12665 		.fill_helper = bpf_fill_atomic32_or,
12666 		.stack_depth = 8,
12667 		.nr_testruns = NR_PATTERN_RUNS,
12668 	},
12669 	{
12670 		"ATOMIC_W_XOR: all operand magnitudes",
12671 		{ },
12672 		INTERNAL | FLAG_NO_DATA,
12673 		{ },
12674 		{ { 0, 1 } },
12675 		.fill_helper = bpf_fill_atomic32_xor,
12676 		.stack_depth = 8,
12677 		.nr_testruns = NR_PATTERN_RUNS,
12678 	},
12679 	{
12680 		"ATOMIC_W_ADD_FETCH: all operand magnitudes",
12681 		{ },
12682 		INTERNAL | FLAG_NO_DATA,
12683 		{ },
12684 		{ { 0, 1 } },
12685 		.fill_helper = bpf_fill_atomic32_add_fetch,
12686 		.stack_depth = 8,
12687 		.nr_testruns = NR_PATTERN_RUNS,
12688 	},
12689 	{
12690 		"ATOMIC_W_AND_FETCH: all operand magnitudes",
12691 		{ },
12692 		INTERNAL | FLAG_NO_DATA,
12693 		{ },
12694 		{ { 0, 1 } },
12695 		.fill_helper = bpf_fill_atomic32_and_fetch,
12696 		.stack_depth = 8,
12697 		.nr_testruns = NR_PATTERN_RUNS,
12698 	},
12699 	{
12700 		"ATOMIC_W_OR_FETCH: all operand magnitudes",
12701 		{ },
12702 		INTERNAL | FLAG_NO_DATA,
12703 		{ },
12704 		{ { 0, 1 } },
12705 		.fill_helper = bpf_fill_atomic32_or_fetch,
12706 		.stack_depth = 8,
12707 		.nr_testruns = NR_PATTERN_RUNS,
12708 	},
12709 	{
12710 		"ATOMIC_W_XOR_FETCH: all operand magnitudes",
12711 		{ },
12712 		INTERNAL | FLAG_NO_DATA,
12713 		{ },
12714 		{ { 0, 1 } },
12715 		.fill_helper = bpf_fill_atomic32_xor_fetch,
12716 		.stack_depth = 8,
12717 		.nr_testruns = NR_PATTERN_RUNS,
12718 	},
12719 	{
12720 		"ATOMIC_W_XCHG: all operand magnitudes",
12721 		{ },
12722 		INTERNAL | FLAG_NO_DATA,
12723 		{ },
12724 		{ { 0, 1 } },
12725 		.fill_helper = bpf_fill_atomic32_xchg,
12726 		.stack_depth = 8,
12727 		.nr_testruns = NR_PATTERN_RUNS,
12728 	},
12729 	{
12730 		"ATOMIC_W_CMPXCHG: all operand magnitudes",
12731 		{ },
12732 		INTERNAL | FLAG_NO_DATA,
12733 		{ },
12734 		{ { 0, 1 } },
12735 		.fill_helper = bpf_fill_cmpxchg32,
12736 		.stack_depth = 8,
12737 		.nr_testruns = NR_PATTERN_RUNS,
12738 	},
12739 	/* JMP immediate magnitudes */
12740 	{
12741 		"JMP_JSET_K: all immediate value magnitudes",
12742 		{ },
12743 		INTERNAL | FLAG_NO_DATA,
12744 		{ },
12745 		{ { 0, 1 } },
12746 		.fill_helper = bpf_fill_jmp_jset_imm,
12747 		.nr_testruns = NR_PATTERN_RUNS,
12748 	},
12749 	{
12750 		"JMP_JEQ_K: all immediate value magnitudes",
12751 		{ },
12752 		INTERNAL | FLAG_NO_DATA,
12753 		{ },
12754 		{ { 0, 1 } },
12755 		.fill_helper = bpf_fill_jmp_jeq_imm,
12756 		.nr_testruns = NR_PATTERN_RUNS,
12757 	},
12758 	{
12759 		"JMP_JNE_K: all immediate value magnitudes",
12760 		{ },
12761 		INTERNAL | FLAG_NO_DATA,
12762 		{ },
12763 		{ { 0, 1 } },
12764 		.fill_helper = bpf_fill_jmp_jne_imm,
12765 		.nr_testruns = NR_PATTERN_RUNS,
12766 	},
12767 	{
12768 		"JMP_JGT_K: all immediate value magnitudes",
12769 		{ },
12770 		INTERNAL | FLAG_NO_DATA,
12771 		{ },
12772 		{ { 0, 1 } },
12773 		.fill_helper = bpf_fill_jmp_jgt_imm,
12774 		.nr_testruns = NR_PATTERN_RUNS,
12775 	},
12776 	{
12777 		"JMP_JGE_K: all immediate value magnitudes",
12778 		{ },
12779 		INTERNAL | FLAG_NO_DATA,
12780 		{ },
12781 		{ { 0, 1 } },
12782 		.fill_helper = bpf_fill_jmp_jge_imm,
12783 		.nr_testruns = NR_PATTERN_RUNS,
12784 	},
12785 	{
12786 		"JMP_JLT_K: all immediate value magnitudes",
12787 		{ },
12788 		INTERNAL | FLAG_NO_DATA,
12789 		{ },
12790 		{ { 0, 1 } },
12791 		.fill_helper = bpf_fill_jmp_jlt_imm,
12792 		.nr_testruns = NR_PATTERN_RUNS,
12793 	},
12794 	{
12795 		"JMP_JLE_K: all immediate value magnitudes",
12796 		{ },
12797 		INTERNAL | FLAG_NO_DATA,
12798 		{ },
12799 		{ { 0, 1 } },
12800 		.fill_helper = bpf_fill_jmp_jle_imm,
12801 		.nr_testruns = NR_PATTERN_RUNS,
12802 	},
12803 	{
12804 		"JMP_JSGT_K: all immediate value magnitudes",
12805 		{ },
12806 		INTERNAL | FLAG_NO_DATA,
12807 		{ },
12808 		{ { 0, 1 } },
12809 		.fill_helper = bpf_fill_jmp_jsgt_imm,
12810 		.nr_testruns = NR_PATTERN_RUNS,
12811 	},
12812 	{
12813 		"JMP_JSGE_K: all immediate value magnitudes",
12814 		{ },
12815 		INTERNAL | FLAG_NO_DATA,
12816 		{ },
12817 		{ { 0, 1 } },
12818 		.fill_helper = bpf_fill_jmp_jsge_imm,
12819 		.nr_testruns = NR_PATTERN_RUNS,
12820 	},
12821 	{
12822 		"JMP_JSLT_K: all immediate value magnitudes",
12823 		{ },
12824 		INTERNAL | FLAG_NO_DATA,
12825 		{ },
12826 		{ { 0, 1 } },
12827 		.fill_helper = bpf_fill_jmp_jslt_imm,
12828 		.nr_testruns = NR_PATTERN_RUNS,
12829 	},
12830 	{
12831 		"JMP_JSLE_K: all immediate value magnitudes",
12832 		{ },
12833 		INTERNAL | FLAG_NO_DATA,
12834 		{ },
12835 		{ { 0, 1 } },
12836 		.fill_helper = bpf_fill_jmp_jsle_imm,
12837 		.nr_testruns = NR_PATTERN_RUNS,
12838 	},
12839 	/* JMP register magnitudes */
12840 	{
12841 		"JMP_JSET_X: all register value magnitudes",
12842 		{ },
12843 		INTERNAL | FLAG_NO_DATA,
12844 		{ },
12845 		{ { 0, 1 } },
12846 		.fill_helper = bpf_fill_jmp_jset_reg,
12847 		.nr_testruns = NR_PATTERN_RUNS,
12848 	},
12849 	{
12850 		"JMP_JEQ_X: all register value magnitudes",
12851 		{ },
12852 		INTERNAL | FLAG_NO_DATA,
12853 		{ },
12854 		{ { 0, 1 } },
12855 		.fill_helper = bpf_fill_jmp_jeq_reg,
12856 		.nr_testruns = NR_PATTERN_RUNS,
12857 	},
12858 	{
12859 		"JMP_JNE_X: all register value magnitudes",
12860 		{ },
12861 		INTERNAL | FLAG_NO_DATA,
12862 		{ },
12863 		{ { 0, 1 } },
12864 		.fill_helper = bpf_fill_jmp_jne_reg,
12865 		.nr_testruns = NR_PATTERN_RUNS,
12866 	},
12867 	{
12868 		"JMP_JGT_X: all register value magnitudes",
12869 		{ },
12870 		INTERNAL | FLAG_NO_DATA,
12871 		{ },
12872 		{ { 0, 1 } },
12873 		.fill_helper = bpf_fill_jmp_jgt_reg,
12874 		.nr_testruns = NR_PATTERN_RUNS,
12875 	},
12876 	{
12877 		"JMP_JGE_X: all register value magnitudes",
12878 		{ },
12879 		INTERNAL | FLAG_NO_DATA,
12880 		{ },
12881 		{ { 0, 1 } },
12882 		.fill_helper = bpf_fill_jmp_jge_reg,
12883 		.nr_testruns = NR_PATTERN_RUNS,
12884 	},
12885 	{
12886 		"JMP_JLT_X: all register value magnitudes",
12887 		{ },
12888 		INTERNAL | FLAG_NO_DATA,
12889 		{ },
12890 		{ { 0, 1 } },
12891 		.fill_helper = bpf_fill_jmp_jlt_reg,
12892 		.nr_testruns = NR_PATTERN_RUNS,
12893 	},
12894 	{
12895 		"JMP_JLE_X: all register value magnitudes",
12896 		{ },
12897 		INTERNAL | FLAG_NO_DATA,
12898 		{ },
12899 		{ { 0, 1 } },
12900 		.fill_helper = bpf_fill_jmp_jle_reg,
12901 		.nr_testruns = NR_PATTERN_RUNS,
12902 	},
12903 	{
12904 		"JMP_JSGT_X: all register value magnitudes",
12905 		{ },
12906 		INTERNAL | FLAG_NO_DATA,
12907 		{ },
12908 		{ { 0, 1 } },
12909 		.fill_helper = bpf_fill_jmp_jsgt_reg,
12910 		.nr_testruns = NR_PATTERN_RUNS,
12911 	},
12912 	{
12913 		"JMP_JSGE_X: all register value magnitudes",
12914 		{ },
12915 		INTERNAL | FLAG_NO_DATA,
12916 		{ },
12917 		{ { 0, 1 } },
12918 		.fill_helper = bpf_fill_jmp_jsge_reg,
12919 		.nr_testruns = NR_PATTERN_RUNS,
12920 	},
12921 	{
12922 		"JMP_JSLT_X: all register value magnitudes",
12923 		{ },
12924 		INTERNAL | FLAG_NO_DATA,
12925 		{ },
12926 		{ { 0, 1 } },
12927 		.fill_helper = bpf_fill_jmp_jslt_reg,
12928 		.nr_testruns = NR_PATTERN_RUNS,
12929 	},
12930 	{
12931 		"JMP_JSLE_X: all register value magnitudes",
12932 		{ },
12933 		INTERNAL | FLAG_NO_DATA,
12934 		{ },
12935 		{ { 0, 1 } },
12936 		.fill_helper = bpf_fill_jmp_jsle_reg,
12937 		.nr_testruns = NR_PATTERN_RUNS,
12938 	},
12939 	/* JMP32 immediate magnitudes */
12940 	{
12941 		"JMP32_JSET_K: all immediate value magnitudes",
12942 		{ },
12943 		INTERNAL | FLAG_NO_DATA,
12944 		{ },
12945 		{ { 0, 1 } },
12946 		.fill_helper = bpf_fill_jmp32_jset_imm,
12947 		.nr_testruns = NR_PATTERN_RUNS,
12948 	},
12949 	{
12950 		"JMP32_JEQ_K: all immediate value magnitudes",
12951 		{ },
12952 		INTERNAL | FLAG_NO_DATA,
12953 		{ },
12954 		{ { 0, 1 } },
12955 		.fill_helper = bpf_fill_jmp32_jeq_imm,
12956 		.nr_testruns = NR_PATTERN_RUNS,
12957 	},
12958 	{
12959 		"JMP32_JNE_K: all immediate value magnitudes",
12960 		{ },
12961 		INTERNAL | FLAG_NO_DATA,
12962 		{ },
12963 		{ { 0, 1 } },
12964 		.fill_helper = bpf_fill_jmp32_jne_imm,
12965 		.nr_testruns = NR_PATTERN_RUNS,
12966 	},
12967 	{
12968 		"JMP32_JGT_K: all immediate value magnitudes",
12969 		{ },
12970 		INTERNAL | FLAG_NO_DATA,
12971 		{ },
12972 		{ { 0, 1 } },
12973 		.fill_helper = bpf_fill_jmp32_jgt_imm,
12974 		.nr_testruns = NR_PATTERN_RUNS,
12975 	},
12976 	{
12977 		"JMP32_JGE_K: all immediate value magnitudes",
12978 		{ },
12979 		INTERNAL | FLAG_NO_DATA,
12980 		{ },
12981 		{ { 0, 1 } },
12982 		.fill_helper = bpf_fill_jmp32_jge_imm,
12983 		.nr_testruns = NR_PATTERN_RUNS,
12984 	},
12985 	{
12986 		"JMP32_JLT_K: all immediate value magnitudes",
12987 		{ },
12988 		INTERNAL | FLAG_NO_DATA,
12989 		{ },
12990 		{ { 0, 1 } },
12991 		.fill_helper = bpf_fill_jmp32_jlt_imm,
12992 		.nr_testruns = NR_PATTERN_RUNS,
12993 	},
12994 	{
12995 		"JMP32_JLE_K: all immediate value magnitudes",
12996 		{ },
12997 		INTERNAL | FLAG_NO_DATA,
12998 		{ },
12999 		{ { 0, 1 } },
13000 		.fill_helper = bpf_fill_jmp32_jle_imm,
13001 		.nr_testruns = NR_PATTERN_RUNS,
13002 	},
13003 	{
13004 		"JMP32_JSGT_K: all immediate value magnitudes",
13005 		{ },
13006 		INTERNAL | FLAG_NO_DATA,
13007 		{ },
13008 		{ { 0, 1 } },
13009 		.fill_helper = bpf_fill_jmp32_jsgt_imm,
13010 		.nr_testruns = NR_PATTERN_RUNS,
13011 	},
13012 	{
13013 		"JMP32_JSGE_K: all immediate value magnitudes",
13014 		{ },
13015 		INTERNAL | FLAG_NO_DATA,
13016 		{ },
13017 		{ { 0, 1 } },
13018 		.fill_helper = bpf_fill_jmp32_jsge_imm,
13019 		.nr_testruns = NR_PATTERN_RUNS,
13020 	},
13021 	{
13022 		"JMP32_JSLT_K: all immediate value magnitudes",
13023 		{ },
13024 		INTERNAL | FLAG_NO_DATA,
13025 		{ },
13026 		{ { 0, 1 } },
13027 		.fill_helper = bpf_fill_jmp32_jslt_imm,
13028 		.nr_testruns = NR_PATTERN_RUNS,
13029 	},
13030 	{
13031 		"JMP32_JSLE_K: all immediate value magnitudes",
13032 		{ },
13033 		INTERNAL | FLAG_NO_DATA,
13034 		{ },
13035 		{ { 0, 1 } },
13036 		.fill_helper = bpf_fill_jmp32_jsle_imm,
13037 		.nr_testruns = NR_PATTERN_RUNS,
13038 	},
13039 	/* JMP32 register magnitudes */
13040 	{
13041 		"JMP32_JSET_X: all register value magnitudes",
13042 		{ },
13043 		INTERNAL | FLAG_NO_DATA,
13044 		{ },
13045 		{ { 0, 1 } },
13046 		.fill_helper = bpf_fill_jmp32_jset_reg,
13047 		.nr_testruns = NR_PATTERN_RUNS,
13048 	},
13049 	{
13050 		"JMP32_JEQ_X: all register value magnitudes",
13051 		{ },
13052 		INTERNAL | FLAG_NO_DATA,
13053 		{ },
13054 		{ { 0, 1 } },
13055 		.fill_helper = bpf_fill_jmp32_jeq_reg,
13056 		.nr_testruns = NR_PATTERN_RUNS,
13057 	},
13058 	{
13059 		"JMP32_JNE_X: all register value magnitudes",
13060 		{ },
13061 		INTERNAL | FLAG_NO_DATA,
13062 		{ },
13063 		{ { 0, 1 } },
13064 		.fill_helper = bpf_fill_jmp32_jne_reg,
13065 		.nr_testruns = NR_PATTERN_RUNS,
13066 	},
13067 	{
13068 		"JMP32_JGT_X: all register value magnitudes",
13069 		{ },
13070 		INTERNAL | FLAG_NO_DATA,
13071 		{ },
13072 		{ { 0, 1 } },
13073 		.fill_helper = bpf_fill_jmp32_jgt_reg,
13074 		.nr_testruns = NR_PATTERN_RUNS,
13075 	},
13076 	{
13077 		"JMP32_JGE_X: all register value magnitudes",
13078 		{ },
13079 		INTERNAL | FLAG_NO_DATA,
13080 		{ },
13081 		{ { 0, 1 } },
13082 		.fill_helper = bpf_fill_jmp32_jge_reg,
13083 		.nr_testruns = NR_PATTERN_RUNS,
13084 	},
13085 	{
13086 		"JMP32_JLT_X: all register value magnitudes",
13087 		{ },
13088 		INTERNAL | FLAG_NO_DATA,
13089 		{ },
13090 		{ { 0, 1 } },
13091 		.fill_helper = bpf_fill_jmp32_jlt_reg,
13092 		.nr_testruns = NR_PATTERN_RUNS,
13093 	},
13094 	{
13095 		"JMP32_JLE_X: all register value magnitudes",
13096 		{ },
13097 		INTERNAL | FLAG_NO_DATA,
13098 		{ },
13099 		{ { 0, 1 } },
13100 		.fill_helper = bpf_fill_jmp32_jle_reg,
13101 		.nr_testruns = NR_PATTERN_RUNS,
13102 	},
13103 	{
13104 		"JMP32_JSGT_X: all register value magnitudes",
13105 		{ },
13106 		INTERNAL | FLAG_NO_DATA,
13107 		{ },
13108 		{ { 0, 1 } },
13109 		.fill_helper = bpf_fill_jmp32_jsgt_reg,
13110 		.nr_testruns = NR_PATTERN_RUNS,
13111 	},
13112 	{
13113 		"JMP32_JSGE_X: all register value magnitudes",
13114 		{ },
13115 		INTERNAL | FLAG_NO_DATA,
13116 		{ },
13117 		{ { 0, 1 } },
13118 		.fill_helper = bpf_fill_jmp32_jsge_reg,
13119 		.nr_testruns = NR_PATTERN_RUNS,
13120 	},
13121 	{
13122 		"JMP32_JSLT_X: all register value magnitudes",
13123 		{ },
13124 		INTERNAL | FLAG_NO_DATA,
13125 		{ },
13126 		{ { 0, 1 } },
13127 		.fill_helper = bpf_fill_jmp32_jslt_reg,
13128 		.nr_testruns = NR_PATTERN_RUNS,
13129 	},
13130 	{
13131 		"JMP32_JSLE_X: all register value magnitudes",
13132 		{ },
13133 		INTERNAL | FLAG_NO_DATA,
13134 		{ },
13135 		{ { 0, 1 } },
13136 		.fill_helper = bpf_fill_jmp32_jsle_reg,
13137 		.nr_testruns = NR_PATTERN_RUNS,
13138 	},
13139 	/* Conditional jumps with constant decision */
13140 	{
13141 		"JMP_JSET_K: imm = 0 -> never taken",
13142 		.u.insns_int = {
13143 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13144 			BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13145 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13146 			BPF_EXIT_INSN(),
13147 		},
13148 		INTERNAL | FLAG_NO_DATA,
13149 		{ },
13150 		{ { 0, 0 } },
13151 	},
13152 	{
13153 		"JMP_JLT_K: imm = 0 -> never taken",
13154 		.u.insns_int = {
13155 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13156 			BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13157 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13158 			BPF_EXIT_INSN(),
13159 		},
13160 		INTERNAL | FLAG_NO_DATA,
13161 		{ },
13162 		{ { 0, 0 } },
13163 	},
13164 	{
13165 		"JMP_JGE_K: imm = 0 -> always taken",
13166 		.u.insns_int = {
13167 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13168 			BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13169 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13170 			BPF_EXIT_INSN(),
13171 		},
13172 		INTERNAL | FLAG_NO_DATA,
13173 		{ },
13174 		{ { 0, 1 } },
13175 	},
13176 	{
13177 		"JMP_JGT_K: imm = 0xffffffff -> never taken",
13178 		.u.insns_int = {
13179 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13180 			BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13181 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13182 			BPF_EXIT_INSN(),
13183 		},
13184 		INTERNAL | FLAG_NO_DATA,
13185 		{ },
13186 		{ { 0, 0 } },
13187 	},
13188 	{
13189 		"JMP_JLE_K: imm = 0xffffffff -> always taken",
13190 		.u.insns_int = {
13191 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13192 			BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
13193 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13194 			BPF_EXIT_INSN(),
13195 		},
13196 		INTERNAL | FLAG_NO_DATA,
13197 		{ },
13198 		{ { 0, 1 } },
13199 	},
13200 	{
13201 		"JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
13202 		.u.insns_int = {
13203 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13204 			BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
13205 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13206 			BPF_EXIT_INSN(),
13207 		},
13208 		INTERNAL | FLAG_NO_DATA,
13209 		{ },
13210 		{ { 0, 0 } },
13211 	},
13212 	{
13213 		"JMP32_JSGE_K: imm = -0x80000000 -> always taken",
13214 		.u.insns_int = {
13215 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13216 			BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
13217 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13218 			BPF_EXIT_INSN(),
13219 		},
13220 		INTERNAL | FLAG_NO_DATA,
13221 		{ },
13222 		{ { 0, 1 } },
13223 	},
13224 	{
13225 		"JMP32_JSLT_K: imm = -0x80000000 -> never taken",
13226 		.u.insns_int = {
13227 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13228 			BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
13229 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13230 			BPF_EXIT_INSN(),
13231 		},
13232 		INTERNAL | FLAG_NO_DATA,
13233 		{ },
13234 		{ { 0, 0 } },
13235 	},
13236 	{
13237 		"JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
13238 		.u.insns_int = {
13239 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13240 			BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
13241 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13242 			BPF_EXIT_INSN(),
13243 		},
13244 		INTERNAL | FLAG_NO_DATA,
13245 		{ },
13246 		{ { 0, 1 } },
13247 	},
13248 	{
13249 		"JMP_JEQ_X: dst = src -> always taken",
13250 		.u.insns_int = {
13251 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13252 			BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
13253 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13254 			BPF_EXIT_INSN(),
13255 		},
13256 		INTERNAL | FLAG_NO_DATA,
13257 		{ },
13258 		{ { 0, 1 } },
13259 	},
13260 	{
13261 		"JMP_JGE_X: dst = src -> always taken",
13262 		.u.insns_int = {
13263 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13264 			BPF_JMP_REG(BPF_JGE, R1, R1, 1),
13265 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13266 			BPF_EXIT_INSN(),
13267 		},
13268 		INTERNAL | FLAG_NO_DATA,
13269 		{ },
13270 		{ { 0, 1 } },
13271 	},
13272 	{
13273 		"JMP_JLE_X: dst = src -> always taken",
13274 		.u.insns_int = {
13275 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13276 			BPF_JMP_REG(BPF_JLE, R1, R1, 1),
13277 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13278 			BPF_EXIT_INSN(),
13279 		},
13280 		INTERNAL | FLAG_NO_DATA,
13281 		{ },
13282 		{ { 0, 1 } },
13283 	},
13284 	{
13285 		"JMP_JSGE_X: dst = src -> always taken",
13286 		.u.insns_int = {
13287 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13288 			BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
13289 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13290 			BPF_EXIT_INSN(),
13291 		},
13292 		INTERNAL | FLAG_NO_DATA,
13293 		{ },
13294 		{ { 0, 1 } },
13295 	},
13296 	{
13297 		"JMP_JSLE_X: dst = src -> always taken",
13298 		.u.insns_int = {
13299 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13300 			BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
13301 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13302 			BPF_EXIT_INSN(),
13303 		},
13304 		INTERNAL | FLAG_NO_DATA,
13305 		{ },
13306 		{ { 0, 1 } },
13307 	},
13308 	{
13309 		"JMP_JNE_X: dst = src -> never taken",
13310 		.u.insns_int = {
13311 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13312 			BPF_JMP_REG(BPF_JNE, R1, R1, 1),
13313 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13314 			BPF_EXIT_INSN(),
13315 		},
13316 		INTERNAL | FLAG_NO_DATA,
13317 		{ },
13318 		{ { 0, 0 } },
13319 	},
13320 	{
13321 		"JMP_JGT_X: dst = src -> never taken",
13322 		.u.insns_int = {
13323 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13324 			BPF_JMP_REG(BPF_JGT, R1, R1, 1),
13325 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13326 			BPF_EXIT_INSN(),
13327 		},
13328 		INTERNAL | FLAG_NO_DATA,
13329 		{ },
13330 		{ { 0, 0 } },
13331 	},
13332 	{
13333 		"JMP_JLT_X: dst = src -> never taken",
13334 		.u.insns_int = {
13335 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13336 			BPF_JMP_REG(BPF_JLT, R1, R1, 1),
13337 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13338 			BPF_EXIT_INSN(),
13339 		},
13340 		INTERNAL | FLAG_NO_DATA,
13341 		{ },
13342 		{ { 0, 0 } },
13343 	},
13344 	{
13345 		"JMP_JSGT_X: dst = src -> never taken",
13346 		.u.insns_int = {
13347 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13348 			BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
13349 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13350 			BPF_EXIT_INSN(),
13351 		},
13352 		INTERNAL | FLAG_NO_DATA,
13353 		{ },
13354 		{ { 0, 0 } },
13355 	},
13356 	{
13357 		"JMP_JSLT_X: dst = src -> never taken",
13358 		.u.insns_int = {
13359 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13360 			BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
13361 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13362 			BPF_EXIT_INSN(),
13363 		},
13364 		INTERNAL | FLAG_NO_DATA,
13365 		{ },
13366 		{ { 0, 0 } },
13367 	},
13368 	/* Short relative jumps */
13369 	{
13370 		"Short relative jump: offset=0",
13371 		.u.insns_int = {
13372 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13373 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
13374 			BPF_EXIT_INSN(),
13375 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13376 		},
13377 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13378 		{ },
13379 		{ { 0, 0 } },
13380 	},
13381 	{
13382 		"Short relative jump: offset=1",
13383 		.u.insns_int = {
13384 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13385 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
13386 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13387 			BPF_EXIT_INSN(),
13388 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13389 		},
13390 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13391 		{ },
13392 		{ { 0, 0 } },
13393 	},
13394 	{
13395 		"Short relative jump: offset=2",
13396 		.u.insns_int = {
13397 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13398 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
13399 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13400 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13401 			BPF_EXIT_INSN(),
13402 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13403 		},
13404 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13405 		{ },
13406 		{ { 0, 0 } },
13407 	},
13408 	{
13409 		"Short relative jump: offset=3",
13410 		.u.insns_int = {
13411 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13412 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
13413 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13414 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13415 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13416 			BPF_EXIT_INSN(),
13417 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13418 		},
13419 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13420 		{ },
13421 		{ { 0, 0 } },
13422 	},
13423 	{
13424 		"Short relative jump: offset=4",
13425 		.u.insns_int = {
13426 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13427 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
13428 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13429 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13430 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13431 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
13432 			BPF_EXIT_INSN(),
13433 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
13434 		},
13435 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
13436 		{ },
13437 		{ { 0, 0 } },
13438 	},
13439 	/* Conditional branch conversions */
13440 	{
13441 		"Long conditional jump: taken at runtime",
13442 		{ },
13443 		INTERNAL | FLAG_NO_DATA,
13444 		{ },
13445 		{ { 0, 1 } },
13446 		.fill_helper = bpf_fill_max_jmp_taken,
13447 	},
13448 	{
13449 		"Long conditional jump: not taken at runtime",
13450 		{ },
13451 		INTERNAL | FLAG_NO_DATA,
13452 		{ },
13453 		{ { 0, 2 } },
13454 		.fill_helper = bpf_fill_max_jmp_not_taken,
13455 	},
13456 	{
13457 		"Long conditional jump: always taken, known at JIT time",
13458 		{ },
13459 		INTERNAL | FLAG_NO_DATA,
13460 		{ },
13461 		{ { 0, 1 } },
13462 		.fill_helper = bpf_fill_max_jmp_always_taken,
13463 	},
13464 	{
13465 		"Long conditional jump: never taken, known at JIT time",
13466 		{ },
13467 		INTERNAL | FLAG_NO_DATA,
13468 		{ },
13469 		{ { 0, 2 } },
13470 		.fill_helper = bpf_fill_max_jmp_never_taken,
13471 	},
13472 	/* Staggered jump sequences, immediate */
13473 	{
13474 		"Staggered jumps: JMP_JA",
13475 		{ },
13476 		INTERNAL | FLAG_NO_DATA,
13477 		{ },
13478 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13479 		.fill_helper = bpf_fill_staggered_ja,
13480 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13481 	},
13482 	{
13483 		"Staggered jumps: JMP_JEQ_K",
13484 		{ },
13485 		INTERNAL | FLAG_NO_DATA,
13486 		{ },
13487 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13488 		.fill_helper = bpf_fill_staggered_jeq_imm,
13489 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13490 	},
13491 	{
13492 		"Staggered jumps: JMP_JNE_K",
13493 		{ },
13494 		INTERNAL | FLAG_NO_DATA,
13495 		{ },
13496 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13497 		.fill_helper = bpf_fill_staggered_jne_imm,
13498 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13499 	},
13500 	{
13501 		"Staggered jumps: JMP_JSET_K",
13502 		{ },
13503 		INTERNAL | FLAG_NO_DATA,
13504 		{ },
13505 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13506 		.fill_helper = bpf_fill_staggered_jset_imm,
13507 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13508 	},
13509 	{
13510 		"Staggered jumps: JMP_JGT_K",
13511 		{ },
13512 		INTERNAL | FLAG_NO_DATA,
13513 		{ },
13514 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13515 		.fill_helper = bpf_fill_staggered_jgt_imm,
13516 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13517 	},
13518 	{
13519 		"Staggered jumps: JMP_JGE_K",
13520 		{ },
13521 		INTERNAL | FLAG_NO_DATA,
13522 		{ },
13523 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13524 		.fill_helper = bpf_fill_staggered_jge_imm,
13525 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13526 	},
13527 	{
13528 		"Staggered jumps: JMP_JLT_K",
13529 		{ },
13530 		INTERNAL | FLAG_NO_DATA,
13531 		{ },
13532 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13533 		.fill_helper = bpf_fill_staggered_jlt_imm,
13534 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13535 	},
13536 	{
13537 		"Staggered jumps: JMP_JLE_K",
13538 		{ },
13539 		INTERNAL | FLAG_NO_DATA,
13540 		{ },
13541 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13542 		.fill_helper = bpf_fill_staggered_jle_imm,
13543 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13544 	},
13545 	{
13546 		"Staggered jumps: JMP_JSGT_K",
13547 		{ },
13548 		INTERNAL | FLAG_NO_DATA,
13549 		{ },
13550 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13551 		.fill_helper = bpf_fill_staggered_jsgt_imm,
13552 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13553 	},
13554 	{
13555 		"Staggered jumps: JMP_JSGE_K",
13556 		{ },
13557 		INTERNAL | FLAG_NO_DATA,
13558 		{ },
13559 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13560 		.fill_helper = bpf_fill_staggered_jsge_imm,
13561 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13562 	},
13563 	{
13564 		"Staggered jumps: JMP_JSLT_K",
13565 		{ },
13566 		INTERNAL | FLAG_NO_DATA,
13567 		{ },
13568 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13569 		.fill_helper = bpf_fill_staggered_jslt_imm,
13570 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13571 	},
13572 	{
13573 		"Staggered jumps: JMP_JSLE_K",
13574 		{ },
13575 		INTERNAL | FLAG_NO_DATA,
13576 		{ },
13577 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13578 		.fill_helper = bpf_fill_staggered_jsle_imm,
13579 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13580 	},
13581 	/* Staggered jump sequences, register */
13582 	{
13583 		"Staggered jumps: JMP_JEQ_X",
13584 		{ },
13585 		INTERNAL | FLAG_NO_DATA,
13586 		{ },
13587 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13588 		.fill_helper = bpf_fill_staggered_jeq_reg,
13589 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13590 	},
13591 	{
13592 		"Staggered jumps: JMP_JNE_X",
13593 		{ },
13594 		INTERNAL | FLAG_NO_DATA,
13595 		{ },
13596 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13597 		.fill_helper = bpf_fill_staggered_jne_reg,
13598 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13599 	},
13600 	{
13601 		"Staggered jumps: JMP_JSET_X",
13602 		{ },
13603 		INTERNAL | FLAG_NO_DATA,
13604 		{ },
13605 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13606 		.fill_helper = bpf_fill_staggered_jset_reg,
13607 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13608 	},
13609 	{
13610 		"Staggered jumps: JMP_JGT_X",
13611 		{ },
13612 		INTERNAL | FLAG_NO_DATA,
13613 		{ },
13614 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13615 		.fill_helper = bpf_fill_staggered_jgt_reg,
13616 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13617 	},
13618 	{
13619 		"Staggered jumps: JMP_JGE_X",
13620 		{ },
13621 		INTERNAL | FLAG_NO_DATA,
13622 		{ },
13623 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13624 		.fill_helper = bpf_fill_staggered_jge_reg,
13625 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13626 	},
13627 	{
13628 		"Staggered jumps: JMP_JLT_X",
13629 		{ },
13630 		INTERNAL | FLAG_NO_DATA,
13631 		{ },
13632 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13633 		.fill_helper = bpf_fill_staggered_jlt_reg,
13634 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13635 	},
13636 	{
13637 		"Staggered jumps: JMP_JLE_X",
13638 		{ },
13639 		INTERNAL | FLAG_NO_DATA,
13640 		{ },
13641 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13642 		.fill_helper = bpf_fill_staggered_jle_reg,
13643 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13644 	},
13645 	{
13646 		"Staggered jumps: JMP_JSGT_X",
13647 		{ },
13648 		INTERNAL | FLAG_NO_DATA,
13649 		{ },
13650 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13651 		.fill_helper = bpf_fill_staggered_jsgt_reg,
13652 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13653 	},
13654 	{
13655 		"Staggered jumps: JMP_JSGE_X",
13656 		{ },
13657 		INTERNAL | FLAG_NO_DATA,
13658 		{ },
13659 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13660 		.fill_helper = bpf_fill_staggered_jsge_reg,
13661 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13662 	},
13663 	{
13664 		"Staggered jumps: JMP_JSLT_X",
13665 		{ },
13666 		INTERNAL | FLAG_NO_DATA,
13667 		{ },
13668 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13669 		.fill_helper = bpf_fill_staggered_jslt_reg,
13670 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13671 	},
13672 	{
13673 		"Staggered jumps: JMP_JSLE_X",
13674 		{ },
13675 		INTERNAL | FLAG_NO_DATA,
13676 		{ },
13677 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13678 		.fill_helper = bpf_fill_staggered_jsle_reg,
13679 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13680 	},
13681 	/* Staggered jump sequences, JMP32 immediate */
13682 	{
13683 		"Staggered jumps: JMP32_JEQ_K",
13684 		{ },
13685 		INTERNAL | FLAG_NO_DATA,
13686 		{ },
13687 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13688 		.fill_helper = bpf_fill_staggered_jeq32_imm,
13689 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13690 	},
13691 	{
13692 		"Staggered jumps: JMP32_JNE_K",
13693 		{ },
13694 		INTERNAL | FLAG_NO_DATA,
13695 		{ },
13696 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13697 		.fill_helper = bpf_fill_staggered_jne32_imm,
13698 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13699 	},
13700 	{
13701 		"Staggered jumps: JMP32_JSET_K",
13702 		{ },
13703 		INTERNAL | FLAG_NO_DATA,
13704 		{ },
13705 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13706 		.fill_helper = bpf_fill_staggered_jset32_imm,
13707 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13708 	},
13709 	{
13710 		"Staggered jumps: JMP32_JGT_K",
13711 		{ },
13712 		INTERNAL | FLAG_NO_DATA,
13713 		{ },
13714 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13715 		.fill_helper = bpf_fill_staggered_jgt32_imm,
13716 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13717 	},
13718 	{
13719 		"Staggered jumps: JMP32_JGE_K",
13720 		{ },
13721 		INTERNAL | FLAG_NO_DATA,
13722 		{ },
13723 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13724 		.fill_helper = bpf_fill_staggered_jge32_imm,
13725 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13726 	},
13727 	{
13728 		"Staggered jumps: JMP32_JLT_K",
13729 		{ },
13730 		INTERNAL | FLAG_NO_DATA,
13731 		{ },
13732 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13733 		.fill_helper = bpf_fill_staggered_jlt32_imm,
13734 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13735 	},
13736 	{
13737 		"Staggered jumps: JMP32_JLE_K",
13738 		{ },
13739 		INTERNAL | FLAG_NO_DATA,
13740 		{ },
13741 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13742 		.fill_helper = bpf_fill_staggered_jle32_imm,
13743 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13744 	},
13745 	{
13746 		"Staggered jumps: JMP32_JSGT_K",
13747 		{ },
13748 		INTERNAL | FLAG_NO_DATA,
13749 		{ },
13750 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13751 		.fill_helper = bpf_fill_staggered_jsgt32_imm,
13752 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13753 	},
13754 	{
13755 		"Staggered jumps: JMP32_JSGE_K",
13756 		{ },
13757 		INTERNAL | FLAG_NO_DATA,
13758 		{ },
13759 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13760 		.fill_helper = bpf_fill_staggered_jsge32_imm,
13761 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13762 	},
13763 	{
13764 		"Staggered jumps: JMP32_JSLT_K",
13765 		{ },
13766 		INTERNAL | FLAG_NO_DATA,
13767 		{ },
13768 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13769 		.fill_helper = bpf_fill_staggered_jslt32_imm,
13770 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13771 	},
13772 	{
13773 		"Staggered jumps: JMP32_JSLE_K",
13774 		{ },
13775 		INTERNAL | FLAG_NO_DATA,
13776 		{ },
13777 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13778 		.fill_helper = bpf_fill_staggered_jsle32_imm,
13779 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13780 	},
13781 	/* Staggered jump sequences, JMP32 register */
13782 	{
13783 		"Staggered jumps: JMP32_JEQ_X",
13784 		{ },
13785 		INTERNAL | FLAG_NO_DATA,
13786 		{ },
13787 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13788 		.fill_helper = bpf_fill_staggered_jeq32_reg,
13789 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13790 	},
13791 	{
13792 		"Staggered jumps: JMP32_JNE_X",
13793 		{ },
13794 		INTERNAL | FLAG_NO_DATA,
13795 		{ },
13796 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13797 		.fill_helper = bpf_fill_staggered_jne32_reg,
13798 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13799 	},
13800 	{
13801 		"Staggered jumps: JMP32_JSET_X",
13802 		{ },
13803 		INTERNAL | FLAG_NO_DATA,
13804 		{ },
13805 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13806 		.fill_helper = bpf_fill_staggered_jset32_reg,
13807 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13808 	},
13809 	{
13810 		"Staggered jumps: JMP32_JGT_X",
13811 		{ },
13812 		INTERNAL | FLAG_NO_DATA,
13813 		{ },
13814 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13815 		.fill_helper = bpf_fill_staggered_jgt32_reg,
13816 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13817 	},
13818 	{
13819 		"Staggered jumps: JMP32_JGE_X",
13820 		{ },
13821 		INTERNAL | FLAG_NO_DATA,
13822 		{ },
13823 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13824 		.fill_helper = bpf_fill_staggered_jge32_reg,
13825 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13826 	},
13827 	{
13828 		"Staggered jumps: JMP32_JLT_X",
13829 		{ },
13830 		INTERNAL | FLAG_NO_DATA,
13831 		{ },
13832 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13833 		.fill_helper = bpf_fill_staggered_jlt32_reg,
13834 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13835 	},
13836 	{
13837 		"Staggered jumps: JMP32_JLE_X",
13838 		{ },
13839 		INTERNAL | FLAG_NO_DATA,
13840 		{ },
13841 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13842 		.fill_helper = bpf_fill_staggered_jle32_reg,
13843 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13844 	},
13845 	{
13846 		"Staggered jumps: JMP32_JSGT_X",
13847 		{ },
13848 		INTERNAL | FLAG_NO_DATA,
13849 		{ },
13850 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13851 		.fill_helper = bpf_fill_staggered_jsgt32_reg,
13852 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13853 	},
13854 	{
13855 		"Staggered jumps: JMP32_JSGE_X",
13856 		{ },
13857 		INTERNAL | FLAG_NO_DATA,
13858 		{ },
13859 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13860 		.fill_helper = bpf_fill_staggered_jsge32_reg,
13861 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13862 	},
13863 	{
13864 		"Staggered jumps: JMP32_JSLT_X",
13865 		{ },
13866 		INTERNAL | FLAG_NO_DATA,
13867 		{ },
13868 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13869 		.fill_helper = bpf_fill_staggered_jslt32_reg,
13870 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13871 	},
13872 	{
13873 		"Staggered jumps: JMP32_JSLE_X",
13874 		{ },
13875 		INTERNAL | FLAG_NO_DATA,
13876 		{ },
13877 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
13878 		.fill_helper = bpf_fill_staggered_jsle32_reg,
13879 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
13880 	},
13881 };
13882 
13883 static struct net_device dev;
13884 
13885 static struct sk_buff *populate_skb(char *buf, int size)
13886 {
13887 	struct sk_buff *skb;
13888 
13889 	if (size >= MAX_DATA)
13890 		return NULL;
13891 
13892 	skb = alloc_skb(MAX_DATA, GFP_KERNEL);
13893 	if (!skb)
13894 		return NULL;
13895 
13896 	__skb_put_data(skb, buf, size);
13897 
13898 	/* Initialize a fake skb with test pattern. */
13899 	skb_reset_mac_header(skb);
13900 	skb->protocol = htons(ETH_P_IP);
13901 	skb->pkt_type = SKB_TYPE;
13902 	skb->mark = SKB_MARK;
13903 	skb->hash = SKB_HASH;
13904 	skb->queue_mapping = SKB_QUEUE_MAP;
13905 	skb->vlan_tci = SKB_VLAN_TCI;
13906 	skb->vlan_present = SKB_VLAN_PRESENT;
13907 	skb->vlan_proto = htons(ETH_P_IP);
13908 	dev_net_set(&dev, &init_net);
13909 	skb->dev = &dev;
13910 	skb->dev->ifindex = SKB_DEV_IFINDEX;
13911 	skb->dev->type = SKB_DEV_TYPE;
13912 	skb_set_network_header(skb, min(size, ETH_HLEN));
13913 
13914 	return skb;
13915 }
13916 
13917 static void *generate_test_data(struct bpf_test *test, int sub)
13918 {
13919 	struct sk_buff *skb;
13920 	struct page *page;
13921 
13922 	if (test->aux & FLAG_NO_DATA)
13923 		return NULL;
13924 
13925 	/* Test case expects an skb, so populate one. Various
13926 	 * subtests generate skbs of different sizes based on
13927 	 * the same data.
13928 	 */
13929 	skb = populate_skb(test->data, test->test[sub].data_size);
13930 	if (!skb)
13931 		return NULL;
13932 
13933 	if (test->aux & FLAG_SKB_FRAG) {
13934 		/*
13935 		 * when the test requires a fragmented skb, add a
13936 		 * single fragment to the skb, filled with
13937 		 * test->frag_data.
13938 		 */
13939 		void *ptr;
13940 
13941 		page = alloc_page(GFP_KERNEL);
13942 
13943 		if (!page)
13944 			goto err_kfree_skb;
13945 
13946 		ptr = kmap(page);
13947 		if (!ptr)
13948 			goto err_free_page;
13949 		memcpy(ptr, test->frag_data, MAX_DATA);
13950 		kunmap(page);
13951 		skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
13952 	}
13953 
13954 	return skb;
13955 
13956 err_free_page:
13957 	__free_page(page);
13958 err_kfree_skb:
13959 	kfree_skb(skb);
13960 	return NULL;
13961 }
13962 
13963 static void release_test_data(const struct bpf_test *test, void *data)
13964 {
13965 	if (test->aux & FLAG_NO_DATA)
13966 		return;
13967 
13968 	kfree_skb(data);
13969 }
13970 
13971 static int filter_length(int which)
13972 {
13973 	struct sock_filter *fp;
13974 	int len;
13975 
13976 	if (tests[which].fill_helper)
13977 		return tests[which].u.ptr.len;
13978 
13979 	fp = tests[which].u.insns;
13980 	for (len = MAX_INSNS - 1; len > 0; --len)
13981 		if (fp[len].code != 0 || fp[len].k != 0)
13982 			break;
13983 
13984 	return len + 1;
13985 }
13986 
13987 static void *filter_pointer(int which)
13988 {
13989 	if (tests[which].fill_helper)
13990 		return tests[which].u.ptr.insns;
13991 	else
13992 		return tests[which].u.insns;
13993 }
13994 
13995 static struct bpf_prog *generate_filter(int which, int *err)
13996 {
13997 	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
13998 	unsigned int flen = filter_length(which);
13999 	void *fptr = filter_pointer(which);
14000 	struct sock_fprog_kern fprog;
14001 	struct bpf_prog *fp;
14002 
14003 	switch (test_type) {
14004 	case CLASSIC:
14005 		fprog.filter = fptr;
14006 		fprog.len = flen;
14007 
14008 		*err = bpf_prog_create(&fp, &fprog);
14009 		if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14010 			if (*err == tests[which].expected_errcode) {
14011 				pr_cont("PASS\n");
14012 				/* Verifier rejected filter as expected. */
14013 				*err = 0;
14014 				return NULL;
14015 			} else {
14016 				pr_cont("UNEXPECTED_PASS\n");
14017 				/* Verifier didn't reject the test that's
14018 				 * bad enough, just return!
14019 				 */
14020 				*err = -EINVAL;
14021 				return NULL;
14022 			}
14023 		}
14024 		if (*err) {
14025 			pr_cont("FAIL to prog_create err=%d len=%d\n",
14026 				*err, fprog.len);
14027 			return NULL;
14028 		}
14029 		break;
14030 
14031 	case INTERNAL:
14032 		fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
14033 		if (fp == NULL) {
14034 			pr_cont("UNEXPECTED_FAIL no memory left\n");
14035 			*err = -ENOMEM;
14036 			return NULL;
14037 		}
14038 
14039 		fp->len = flen;
14040 		/* Type doesn't really matter here as long as it's not unspec. */
14041 		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14042 		memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14043 		fp->aux->stack_depth = tests[which].stack_depth;
14044 		fp->aux->verifier_zext = !!(tests[which].aux &
14045 					    FLAG_VERIFIER_ZEXT);
14046 
14047 		/* We cannot error here as we don't need type compatibility
14048 		 * checks.
14049 		 */
14050 		fp = bpf_prog_select_runtime(fp, err);
14051 		if (*err) {
14052 			pr_cont("FAIL to select_runtime err=%d\n", *err);
14053 			return NULL;
14054 		}
14055 		break;
14056 	}
14057 
14058 	*err = 0;
14059 	return fp;
14060 }
14061 
14062 static void release_filter(struct bpf_prog *fp, int which)
14063 {
14064 	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14065 
14066 	switch (test_type) {
14067 	case CLASSIC:
14068 		bpf_prog_destroy(fp);
14069 		break;
14070 	case INTERNAL:
14071 		bpf_prog_free(fp);
14072 		break;
14073 	}
14074 }
14075 
14076 static int __run_one(const struct bpf_prog *fp, const void *data,
14077 		     int runs, u64 *duration)
14078 {
14079 	u64 start, finish;
14080 	int ret = 0, i;
14081 
14082 	migrate_disable();
14083 	start = ktime_get_ns();
14084 
14085 	for (i = 0; i < runs; i++)
14086 		ret = bpf_prog_run(fp, data);
14087 
14088 	finish = ktime_get_ns();
14089 	migrate_enable();
14090 
14091 	*duration = finish - start;
14092 	do_div(*duration, runs);
14093 
14094 	return ret;
14095 }
14096 
14097 static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14098 {
14099 	int err_cnt = 0, i, runs = MAX_TESTRUNS;
14100 
14101 	if (test->nr_testruns)
14102 		runs = min(test->nr_testruns, MAX_TESTRUNS);
14103 
14104 	for (i = 0; i < MAX_SUBTESTS; i++) {
14105 		void *data;
14106 		u64 duration;
14107 		u32 ret;
14108 
14109 		/*
14110 		 * NOTE: Several sub-tests may be present, in which case
14111 		 * a zero {data_size, result} tuple indicates the end of
14112 		 * the sub-test array. The first test is always run,
14113 		 * even if both data_size and result happen to be zero.
14114 		 */
14115 		if (i > 0 &&
14116 		    test->test[i].data_size == 0 &&
14117 		    test->test[i].result == 0)
14118 			break;
14119 
14120 		data = generate_test_data(test, i);
14121 		if (!data && !(test->aux & FLAG_NO_DATA)) {
14122 			pr_cont("data generation failed ");
14123 			err_cnt++;
14124 			break;
14125 		}
14126 		ret = __run_one(fp, data, runs, &duration);
14127 		release_test_data(test, data);
14128 
14129 		if (ret == test->test[i].result) {
14130 			pr_cont("%lld ", duration);
14131 		} else {
14132 			pr_cont("ret %d != %d ", ret,
14133 				test->test[i].result);
14134 			err_cnt++;
14135 		}
14136 	}
14137 
14138 	return err_cnt;
14139 }
14140 
14141 static char test_name[64];
14142 module_param_string(test_name, test_name, sizeof(test_name), 0);
14143 
14144 static int test_id = -1;
14145 module_param(test_id, int, 0);
14146 
14147 static int test_range[2] = { 0, ARRAY_SIZE(tests) - 1 };
14148 module_param_array(test_range, int, NULL, 0);
14149 
14150 static __init int find_test_index(const char *test_name)
14151 {
14152 	int i;
14153 
14154 	for (i = 0; i < ARRAY_SIZE(tests); i++) {
14155 		if (!strcmp(tests[i].descr, test_name))
14156 			return i;
14157 	}
14158 	return -1;
14159 }
14160 
14161 static __init int prepare_bpf_tests(void)
14162 {
14163 	if (test_id >= 0) {
14164 		/*
14165 		 * if a test_id was specified, use test_range to
14166 		 * cover only that test.
14167 		 */
14168 		if (test_id >= ARRAY_SIZE(tests)) {
14169 			pr_err("test_bpf: invalid test_id specified.\n");
14170 			return -EINVAL;
14171 		}
14172 
14173 		test_range[0] = test_id;
14174 		test_range[1] = test_id;
14175 	} else if (*test_name) {
14176 		/*
14177 		 * if a test_name was specified, find it and setup
14178 		 * test_range to cover only that test.
14179 		 */
14180 		int idx = find_test_index(test_name);
14181 
14182 		if (idx < 0) {
14183 			pr_err("test_bpf: no test named '%s' found.\n",
14184 			       test_name);
14185 			return -EINVAL;
14186 		}
14187 		test_range[0] = idx;
14188 		test_range[1] = idx;
14189 	} else {
14190 		/*
14191 		 * check that the supplied test_range is valid.
14192 		 */
14193 		if (test_range[0] >= ARRAY_SIZE(tests) ||
14194 		    test_range[1] >= ARRAY_SIZE(tests) ||
14195 		    test_range[0] < 0 || test_range[1] < 0) {
14196 			pr_err("test_bpf: test_range is out of bound.\n");
14197 			return -EINVAL;
14198 		}
14199 
14200 		if (test_range[1] < test_range[0]) {
14201 			pr_err("test_bpf: test_range is ending before it starts.\n");
14202 			return -EINVAL;
14203 		}
14204 	}
14205 
14206 	return 0;
14207 }
14208 
14209 static __init void destroy_bpf_tests(void)
14210 {
14211 }
14212 
14213 static bool exclude_test(int test_id)
14214 {
14215 	return test_id < test_range[0] || test_id > test_range[1];
14216 }
14217 
14218 static __init struct sk_buff *build_test_skb(void)
14219 {
14220 	u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
14221 	struct sk_buff *skb[2];
14222 	struct page *page[2];
14223 	int i, data_size = 8;
14224 
14225 	for (i = 0; i < 2; i++) {
14226 		page[i] = alloc_page(GFP_KERNEL);
14227 		if (!page[i]) {
14228 			if (i == 0)
14229 				goto err_page0;
14230 			else
14231 				goto err_page1;
14232 		}
14233 
14234 		/* this will set skb[i]->head_frag */
14235 		skb[i] = dev_alloc_skb(headroom + data_size);
14236 		if (!skb[i]) {
14237 			if (i == 0)
14238 				goto err_skb0;
14239 			else
14240 				goto err_skb1;
14241 		}
14242 
14243 		skb_reserve(skb[i], headroom);
14244 		skb_put(skb[i], data_size);
14245 		skb[i]->protocol = htons(ETH_P_IP);
14246 		skb_reset_network_header(skb[i]);
14247 		skb_set_mac_header(skb[i], -ETH_HLEN);
14248 
14249 		skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
14250 		// skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
14251 	}
14252 
14253 	/* setup shinfo */
14254 	skb_shinfo(skb[0])->gso_size = 1448;
14255 	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
14256 	skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
14257 	skb_shinfo(skb[0])->gso_segs = 0;
14258 	skb_shinfo(skb[0])->frag_list = skb[1];
14259 	skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
14260 
14261 	/* adjust skb[0]'s len */
14262 	skb[0]->len += skb[1]->len;
14263 	skb[0]->data_len += skb[1]->data_len;
14264 	skb[0]->truesize += skb[1]->truesize;
14265 
14266 	return skb[0];
14267 
14268 err_skb1:
14269 	__free_page(page[1]);
14270 err_page1:
14271 	kfree_skb(skb[0]);
14272 err_skb0:
14273 	__free_page(page[0]);
14274 err_page0:
14275 	return NULL;
14276 }
14277 
14278 static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
14279 {
14280 	unsigned int alloc_size = 2000;
14281 	unsigned int headroom = 102, doffset = 72, data_size = 1308;
14282 	struct sk_buff *skb[2];
14283 	int i;
14284 
14285 	/* skbs linked in a frag_list, both with linear data, with head_frag=0
14286 	 * (data allocated by kmalloc), both have tcp data of 1308 bytes
14287 	 * (total payload is 2616 bytes).
14288 	 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
14289 	 */
14290 	for (i = 0; i < 2; i++) {
14291 		skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
14292 		if (!skb[i]) {
14293 			if (i == 0)
14294 				goto err_skb0;
14295 			else
14296 				goto err_skb1;
14297 		}
14298 
14299 		skb[i]->protocol = htons(ETH_P_IPV6);
14300 		skb_reserve(skb[i], headroom);
14301 		skb_put(skb[i], doffset + data_size);
14302 		skb_reset_network_header(skb[i]);
14303 		if (i == 0)
14304 			skb_reset_mac_header(skb[i]);
14305 		else
14306 			skb_set_mac_header(skb[i], -ETH_HLEN);
14307 		__skb_pull(skb[i], doffset);
14308 	}
14309 
14310 	/* setup shinfo.
14311 	 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
14312 	 * reduced gso_size.
14313 	 */
14314 	skb_shinfo(skb[0])->gso_size = 1288;
14315 	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
14316 	skb_shinfo(skb[0])->gso_segs = 0;
14317 	skb_shinfo(skb[0])->frag_list = skb[1];
14318 
14319 	/* adjust skb[0]'s len */
14320 	skb[0]->len += skb[1]->len;
14321 	skb[0]->data_len += skb[1]->len;
14322 	skb[0]->truesize += skb[1]->truesize;
14323 
14324 	return skb[0];
14325 
14326 err_skb1:
14327 	kfree_skb(skb[0]);
14328 err_skb0:
14329 	return NULL;
14330 }
14331 
14332 struct skb_segment_test {
14333 	const char *descr;
14334 	struct sk_buff *(*build_skb)(void);
14335 	netdev_features_t features;
14336 };
14337 
14338 static struct skb_segment_test skb_segment_tests[] __initconst = {
14339 	{
14340 		.descr = "gso_with_rx_frags",
14341 		.build_skb = build_test_skb,
14342 		.features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
14343 			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
14344 	},
14345 	{
14346 		.descr = "gso_linear_no_head_frag",
14347 		.build_skb = build_test_skb_linear_no_head_frag,
14348 		.features = NETIF_F_SG | NETIF_F_FRAGLIST |
14349 			    NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
14350 			    NETIF_F_LLTX_BIT | NETIF_F_GRO |
14351 			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
14352 			    NETIF_F_HW_VLAN_STAG_TX_BIT
14353 	}
14354 };
14355 
14356 static __init int test_skb_segment_single(const struct skb_segment_test *test)
14357 {
14358 	struct sk_buff *skb, *segs;
14359 	int ret = -1;
14360 
14361 	skb = test->build_skb();
14362 	if (!skb) {
14363 		pr_info("%s: failed to build_test_skb", __func__);
14364 		goto done;
14365 	}
14366 
14367 	segs = skb_segment(skb, test->features);
14368 	if (!IS_ERR(segs)) {
14369 		kfree_skb_list(segs);
14370 		ret = 0;
14371 	}
14372 	kfree_skb(skb);
14373 done:
14374 	return ret;
14375 }
14376 
14377 static __init int test_skb_segment(void)
14378 {
14379 	int i, err_cnt = 0, pass_cnt = 0;
14380 
14381 	for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
14382 		const struct skb_segment_test *test = &skb_segment_tests[i];
14383 
14384 		pr_info("#%d %s ", i, test->descr);
14385 
14386 		if (test_skb_segment_single(test)) {
14387 			pr_cont("FAIL\n");
14388 			err_cnt++;
14389 		} else {
14390 			pr_cont("PASS\n");
14391 			pass_cnt++;
14392 		}
14393 	}
14394 
14395 	pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
14396 		pass_cnt, err_cnt);
14397 	return err_cnt ? -EINVAL : 0;
14398 }
14399 
14400 static __init int test_bpf(void)
14401 {
14402 	int i, err_cnt = 0, pass_cnt = 0;
14403 	int jit_cnt = 0, run_cnt = 0;
14404 
14405 	for (i = 0; i < ARRAY_SIZE(tests); i++) {
14406 		struct bpf_prog *fp;
14407 		int err;
14408 
14409 		cond_resched();
14410 		if (exclude_test(i))
14411 			continue;
14412 
14413 		pr_info("#%d %s ", i, tests[i].descr);
14414 
14415 		if (tests[i].fill_helper &&
14416 		    tests[i].fill_helper(&tests[i]) < 0) {
14417 			pr_cont("FAIL to prog_fill\n");
14418 			continue;
14419 		}
14420 
14421 		fp = generate_filter(i, &err);
14422 
14423 		if (tests[i].fill_helper) {
14424 			kfree(tests[i].u.ptr.insns);
14425 			tests[i].u.ptr.insns = NULL;
14426 		}
14427 
14428 		if (fp == NULL) {
14429 			if (err == 0) {
14430 				pass_cnt++;
14431 				continue;
14432 			}
14433 			err_cnt++;
14434 			continue;
14435 		}
14436 
14437 		pr_cont("jited:%u ", fp->jited);
14438 
14439 		run_cnt++;
14440 		if (fp->jited)
14441 			jit_cnt++;
14442 
14443 		err = run_one(fp, &tests[i]);
14444 		release_filter(fp, i);
14445 
14446 		if (err) {
14447 			pr_cont("FAIL (%d times)\n", err);
14448 			err_cnt++;
14449 		} else {
14450 			pr_cont("PASS\n");
14451 			pass_cnt++;
14452 		}
14453 	}
14454 
14455 	pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14456 		pass_cnt, err_cnt, jit_cnt, run_cnt);
14457 
14458 	return err_cnt ? -EINVAL : 0;
14459 }
14460 
14461 struct tail_call_test {
14462 	const char *descr;
14463 	struct bpf_insn insns[MAX_INSNS];
14464 	int flags;
14465 	int result;
14466 	int stack_depth;
14467 };
14468 
14469 /* Flags that can be passed to tail call test cases */
14470 #define FLAG_NEED_STATE		BIT(0)
14471 #define FLAG_RESULT_IN_STATE	BIT(1)
14472 
14473 /*
14474  * Magic marker used in test snippets for tail calls below.
14475  * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
14476  * with the proper values by the test runner.
14477  */
14478 #define TAIL_CALL_MARKER 0x7a11ca11
14479 
14480 /* Special offset to indicate a NULL call target */
14481 #define TAIL_CALL_NULL 0x7fff
14482 
14483 /* Special offset to indicate an out-of-range index */
14484 #define TAIL_CALL_INVALID 0x7ffe
14485 
14486 #define TAIL_CALL(offset)			       \
14487 	BPF_LD_IMM64(R2, TAIL_CALL_MARKER),	       \
14488 	BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
14489 		     offset, TAIL_CALL_MARKER),	       \
14490 	BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
14491 
14492 /*
14493  * A test function to be called from a BPF program, clobbering a lot of
14494  * CPU registers in the process. A JITed BPF program calling this function
14495  * must save and restore any caller-saved registers it uses for internal
14496  * state, for example the current tail call count.
14497  */
14498 BPF_CALL_1(bpf_test_func, u64, arg)
14499 {
14500 	char buf[64];
14501 	long a = 0;
14502 	long b = 1;
14503 	long c = 2;
14504 	long d = 3;
14505 	long e = 4;
14506 	long f = 5;
14507 	long g = 6;
14508 	long h = 7;
14509 
14510 	return snprintf(buf, sizeof(buf),
14511 			"%ld %lu %lx %ld %lu %lx %ld %lu %x",
14512 			a, b, c, d, e, f, g, h, (int)arg);
14513 }
14514 #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
14515 
14516 /*
14517  * Tail call tests. Each test case may call any other test in the table,
14518  * including itself, specified as a relative index offset from the calling
14519  * test. The index TAIL_CALL_NULL can be used to specify a NULL target
14520  * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
14521  * results in a target index that is out of range.
14522  */
14523 static struct tail_call_test tail_call_tests[] = {
14524 	{
14525 		"Tail call leaf",
14526 		.insns = {
14527 			BPF_ALU64_REG(BPF_MOV, R0, R1),
14528 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
14529 			BPF_EXIT_INSN(),
14530 		},
14531 		.result = 1,
14532 	},
14533 	{
14534 		"Tail call 2",
14535 		.insns = {
14536 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
14537 			TAIL_CALL(-1),
14538 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14539 			BPF_EXIT_INSN(),
14540 		},
14541 		.result = 3,
14542 	},
14543 	{
14544 		"Tail call 3",
14545 		.insns = {
14546 			BPF_ALU64_IMM(BPF_ADD, R1, 3),
14547 			TAIL_CALL(-1),
14548 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14549 			BPF_EXIT_INSN(),
14550 		},
14551 		.result = 6,
14552 	},
14553 	{
14554 		"Tail call 4",
14555 		.insns = {
14556 			BPF_ALU64_IMM(BPF_ADD, R1, 4),
14557 			TAIL_CALL(-1),
14558 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
14559 			BPF_EXIT_INSN(),
14560 		},
14561 		.result = 10,
14562 	},
14563 	{
14564 		"Tail call error path, max count reached",
14565 		.insns = {
14566 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14567 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14568 			BPF_STX_MEM(BPF_W, R1, R2, 0),
14569 			TAIL_CALL(0),
14570 			BPF_EXIT_INSN(),
14571 		},
14572 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14573 		.result = (MAX_TAIL_CALL_CNT + 1 + 1) * MAX_TESTRUNS,
14574 	},
14575 	{
14576 		"Tail call count preserved across function calls",
14577 		.insns = {
14578 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14579 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14580 			BPF_STX_MEM(BPF_W, R1, R2, 0),
14581 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
14582 			BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
14583 			BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
14584 			BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
14585 			BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
14586 			BPF_CALL_REL(BPF_FUNC_jiffies64),
14587 			BPF_CALL_REL(BPF_FUNC_test_func),
14588 			BPF_LDX_MEM(BPF_DW, R1, R10, -8),
14589 			BPF_ALU32_REG(BPF_MOV, R0, R1),
14590 			TAIL_CALL(0),
14591 			BPF_EXIT_INSN(),
14592 		},
14593 		.stack_depth = 8,
14594 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14595 		.result = (MAX_TAIL_CALL_CNT + 1 + 1) * MAX_TESTRUNS,
14596 	},
14597 	{
14598 		"Tail call error path, NULL target",
14599 		.insns = {
14600 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14601 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14602 			BPF_STX_MEM(BPF_W, R1, R2, 0),
14603 			TAIL_CALL(TAIL_CALL_NULL),
14604 			BPF_EXIT_INSN(),
14605 		},
14606 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14607 		.result = MAX_TESTRUNS,
14608 	},
14609 	{
14610 		"Tail call error path, index out of range",
14611 		.insns = {
14612 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
14613 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
14614 			BPF_STX_MEM(BPF_W, R1, R2, 0),
14615 			TAIL_CALL(TAIL_CALL_INVALID),
14616 			BPF_EXIT_INSN(),
14617 		},
14618 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
14619 		.result = MAX_TESTRUNS,
14620 	},
14621 };
14622 
14623 static void __init destroy_tail_call_tests(struct bpf_array *progs)
14624 {
14625 	int i;
14626 
14627 	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
14628 		if (progs->ptrs[i])
14629 			bpf_prog_free(progs->ptrs[i]);
14630 	kfree(progs);
14631 }
14632 
14633 static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
14634 {
14635 	int ntests = ARRAY_SIZE(tail_call_tests);
14636 	struct bpf_array *progs;
14637 	int which, err;
14638 
14639 	/* Allocate the table of programs to be used for tall calls */
14640 	progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
14641 			GFP_KERNEL);
14642 	if (!progs)
14643 		goto out_nomem;
14644 
14645 	/* Create all eBPF programs and populate the table */
14646 	for (which = 0; which < ntests; which++) {
14647 		struct tail_call_test *test = &tail_call_tests[which];
14648 		struct bpf_prog *fp;
14649 		int len, i;
14650 
14651 		/* Compute the number of program instructions */
14652 		for (len = 0; len < MAX_INSNS; len++) {
14653 			struct bpf_insn *insn = &test->insns[len];
14654 
14655 			if (len < MAX_INSNS - 1 &&
14656 			    insn->code == (BPF_LD | BPF_DW | BPF_IMM))
14657 				len++;
14658 			if (insn->code == 0)
14659 				break;
14660 		}
14661 
14662 		/* Allocate and initialize the program */
14663 		fp = bpf_prog_alloc(bpf_prog_size(len), 0);
14664 		if (!fp)
14665 			goto out_nomem;
14666 
14667 		fp->len = len;
14668 		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14669 		fp->aux->stack_depth = test->stack_depth;
14670 		memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
14671 
14672 		/* Relocate runtime tail call offsets and addresses */
14673 		for (i = 0; i < len; i++) {
14674 			struct bpf_insn *insn = &fp->insnsi[i];
14675 			long addr = 0;
14676 
14677 			switch (insn->code) {
14678 			case BPF_LD | BPF_DW | BPF_IMM:
14679 				if (insn->imm != TAIL_CALL_MARKER)
14680 					break;
14681 				insn[0].imm = (u32)(long)progs;
14682 				insn[1].imm = ((u64)(long)progs) >> 32;
14683 				break;
14684 
14685 			case BPF_ALU | BPF_MOV | BPF_K:
14686 				if (insn->imm != TAIL_CALL_MARKER)
14687 					break;
14688 				if (insn->off == TAIL_CALL_NULL)
14689 					insn->imm = ntests;
14690 				else if (insn->off == TAIL_CALL_INVALID)
14691 					insn->imm = ntests + 1;
14692 				else
14693 					insn->imm = which + insn->off;
14694 				insn->off = 0;
14695 				break;
14696 
14697 			case BPF_JMP | BPF_CALL:
14698 				if (insn->src_reg != BPF_PSEUDO_CALL)
14699 					break;
14700 				switch (insn->imm) {
14701 				case BPF_FUNC_get_numa_node_id:
14702 					addr = (long)&numa_node_id;
14703 					break;
14704 				case BPF_FUNC_ktime_get_ns:
14705 					addr = (long)&ktime_get_ns;
14706 					break;
14707 				case BPF_FUNC_ktime_get_boot_ns:
14708 					addr = (long)&ktime_get_boot_fast_ns;
14709 					break;
14710 				case BPF_FUNC_ktime_get_coarse_ns:
14711 					addr = (long)&ktime_get_coarse_ns;
14712 					break;
14713 				case BPF_FUNC_jiffies64:
14714 					addr = (long)&get_jiffies_64;
14715 					break;
14716 				case BPF_FUNC_test_func:
14717 					addr = (long)&bpf_test_func;
14718 					break;
14719 				default:
14720 					err = -EFAULT;
14721 					goto out_err;
14722 				}
14723 				*insn = BPF_EMIT_CALL(addr);
14724 				if ((long)__bpf_call_base + insn->imm != addr)
14725 					*insn = BPF_JMP_A(0); /* Skip: NOP */
14726 				break;
14727 			}
14728 		}
14729 
14730 		fp = bpf_prog_select_runtime(fp, &err);
14731 		if (err)
14732 			goto out_err;
14733 
14734 		progs->ptrs[which] = fp;
14735 	}
14736 
14737 	/* The last entry contains a NULL program pointer */
14738 	progs->map.max_entries = ntests + 1;
14739 	*pprogs = progs;
14740 	return 0;
14741 
14742 out_nomem:
14743 	err = -ENOMEM;
14744 
14745 out_err:
14746 	if (progs)
14747 		destroy_tail_call_tests(progs);
14748 	return err;
14749 }
14750 
14751 static __init int test_tail_calls(struct bpf_array *progs)
14752 {
14753 	int i, err_cnt = 0, pass_cnt = 0;
14754 	int jit_cnt = 0, run_cnt = 0;
14755 
14756 	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
14757 		struct tail_call_test *test = &tail_call_tests[i];
14758 		struct bpf_prog *fp = progs->ptrs[i];
14759 		int *data = NULL;
14760 		int state = 0;
14761 		u64 duration;
14762 		int ret;
14763 
14764 		cond_resched();
14765 
14766 		pr_info("#%d %s ", i, test->descr);
14767 		if (!fp) {
14768 			err_cnt++;
14769 			continue;
14770 		}
14771 		pr_cont("jited:%u ", fp->jited);
14772 
14773 		run_cnt++;
14774 		if (fp->jited)
14775 			jit_cnt++;
14776 
14777 		if (test->flags & FLAG_NEED_STATE)
14778 			data = &state;
14779 		ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
14780 		if (test->flags & FLAG_RESULT_IN_STATE)
14781 			ret = state;
14782 		if (ret == test->result) {
14783 			pr_cont("%lld PASS", duration);
14784 			pass_cnt++;
14785 		} else {
14786 			pr_cont("ret %d != %d FAIL", ret, test->result);
14787 			err_cnt++;
14788 		}
14789 	}
14790 
14791 	pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
14792 		__func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
14793 
14794 	return err_cnt ? -EINVAL : 0;
14795 }
14796 
14797 static int __init test_bpf_init(void)
14798 {
14799 	struct bpf_array *progs = NULL;
14800 	int ret;
14801 
14802 	ret = prepare_bpf_tests();
14803 	if (ret < 0)
14804 		return ret;
14805 
14806 	ret = test_bpf();
14807 	destroy_bpf_tests();
14808 	if (ret)
14809 		return ret;
14810 
14811 	ret = prepare_tail_call_tests(&progs);
14812 	if (ret)
14813 		return ret;
14814 	ret = test_tail_calls(progs);
14815 	destroy_tail_call_tests(progs);
14816 	if (ret)
14817 		return ret;
14818 
14819 	return test_skb_segment();
14820 }
14821 
14822 static void __exit test_bpf_exit(void)
14823 {
14824 }
14825 
14826 module_init(test_bpf_init);
14827 module_exit(test_bpf_exit);
14828 
14829 MODULE_LICENSE("GPL");
14830