1*7960d02dSBrendan Jackman #define __INVALID_ATOMIC_ACCESS_TEST(op)					\
2*7960d02dSBrendan Jackman 	{								\
3*7960d02dSBrendan Jackman 		"atomic " #op " access through non-pointer ",			\
4*7960d02dSBrendan Jackman 		.insns = {						\
5*7960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_0, 1),			\
6*7960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_1, 0),			\
7*7960d02dSBrendan Jackman 			BPF_ATOMIC_OP(BPF_DW, op, BPF_REG_1, BPF_REG_0, -8), \
8*7960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_0, 0),			\
9*7960d02dSBrendan Jackman 			BPF_EXIT_INSN(),				\
10*7960d02dSBrendan Jackman 		},							\
11*7960d02dSBrendan Jackman 		.result = REJECT,					\
12*7960d02dSBrendan Jackman 		.errstr = "R1 invalid mem access 'inv'"			\
13*7960d02dSBrendan Jackman 	}
14*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD),
15*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH),
16*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD),
17*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_ADD | BPF_FETCH),
18*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_AND),
19*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_AND | BPF_FETCH),
20*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_OR),
21*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_OR | BPF_FETCH),
22*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_XOR),
23*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_XOR | BPF_FETCH),
24*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_XCHG),
25*7960d02dSBrendan Jackman __INVALID_ATOMIC_ACCESS_TEST(BPF_CMPXCHG),
26