xref: /openbmc/linux/tools/testing/selftests/bpf/verifier/atomic_fetch.c (revision 03ab8e6297acd1bc0eedaa050e2a1635c576fd11)
1*180486b4SDaniel Borkmann {
2*180486b4SDaniel Borkmann 	"atomic dw/fetch and address leakage of (map ptr & -1) via stack slot",
3*180486b4SDaniel Borkmann 	.insns = {
4*180486b4SDaniel Borkmann 		BPF_LD_IMM64(BPF_REG_1, -1),
5*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_8, 0),
6*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_9, 0),
7*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
8*180486b4SDaniel Borkmann 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
9*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
10*180486b4SDaniel Borkmann 		BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
11*180486b4SDaniel Borkmann 		BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
12*180486b4SDaniel Borkmann 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
13*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
14*180486b4SDaniel Borkmann 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
15*180486b4SDaniel Borkmann 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
16*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
17*180486b4SDaniel Borkmann 		BPF_MOV64_IMM(BPF_REG_0, 0),
18*180486b4SDaniel Borkmann 		BPF_EXIT_INSN(),
19*180486b4SDaniel Borkmann 	},
20*180486b4SDaniel Borkmann 	.fixup_map_array_48b = { 2, 4 },
21*180486b4SDaniel Borkmann 	.result = ACCEPT,
22*180486b4SDaniel Borkmann 	.result_unpriv = REJECT,
23*180486b4SDaniel Borkmann 	.errstr_unpriv = "leaking pointer from stack off -8",
24*180486b4SDaniel Borkmann },
25*180486b4SDaniel Borkmann {
26*180486b4SDaniel Borkmann 	"atomic dw/fetch and address leakage of (map ptr & -1) via returned value",
27*180486b4SDaniel Borkmann 	.insns = {
28*180486b4SDaniel Borkmann 		BPF_LD_IMM64(BPF_REG_1, -1),
29*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_8, 0),
30*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_9, 0),
31*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
32*180486b4SDaniel Borkmann 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
33*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
34*180486b4SDaniel Borkmann 		BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
35*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
36*180486b4SDaniel Borkmann 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
37*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
38*180486b4SDaniel Borkmann 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
39*180486b4SDaniel Borkmann 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
40*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
41*180486b4SDaniel Borkmann 		BPF_MOV64_IMM(BPF_REG_0, 0),
42*180486b4SDaniel Borkmann 		BPF_EXIT_INSN(),
43*180486b4SDaniel Borkmann 	},
44*180486b4SDaniel Borkmann 	.fixup_map_array_48b = { 2, 4 },
45*180486b4SDaniel Borkmann 	.result = ACCEPT,
46*180486b4SDaniel Borkmann 	.result_unpriv = REJECT,
47*180486b4SDaniel Borkmann 	.errstr_unpriv = "leaking pointer from stack off -8",
48*180486b4SDaniel Borkmann },
49*180486b4SDaniel Borkmann {
50*180486b4SDaniel Borkmann 	"atomic w/fetch and address leakage of (map ptr & -1) via stack slot",
51*180486b4SDaniel Borkmann 	.insns = {
52*180486b4SDaniel Borkmann 		BPF_LD_IMM64(BPF_REG_1, -1),
53*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_8, 0),
54*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_9, 0),
55*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
56*180486b4SDaniel Borkmann 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
57*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
58*180486b4SDaniel Borkmann 		BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
59*180486b4SDaniel Borkmann 		BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
60*180486b4SDaniel Borkmann 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
61*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
62*180486b4SDaniel Borkmann 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
63*180486b4SDaniel Borkmann 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
64*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
65*180486b4SDaniel Borkmann 		BPF_MOV64_IMM(BPF_REG_0, 0),
66*180486b4SDaniel Borkmann 		BPF_EXIT_INSN(),
67*180486b4SDaniel Borkmann 	},
68*180486b4SDaniel Borkmann 	.fixup_map_array_48b = { 2, 4 },
69*180486b4SDaniel Borkmann 	.result = REJECT,
70*180486b4SDaniel Borkmann 	.errstr = "invalid size of register fill",
71*180486b4SDaniel Borkmann },
72*180486b4SDaniel Borkmann {
73*180486b4SDaniel Borkmann 	"atomic w/fetch and address leakage of (map ptr & -1) via returned value",
74*180486b4SDaniel Borkmann 	.insns = {
75*180486b4SDaniel Borkmann 		BPF_LD_IMM64(BPF_REG_1, -1),
76*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_8, 0),
77*180486b4SDaniel Borkmann 		BPF_LD_MAP_FD(BPF_REG_9, 0),
78*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
79*180486b4SDaniel Borkmann 		BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
80*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
81*180486b4SDaniel Borkmann 		BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
82*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
83*180486b4SDaniel Borkmann 		BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
84*180486b4SDaniel Borkmann 		BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
85*180486b4SDaniel Borkmann 		BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
86*180486b4SDaniel Borkmann 		BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
87*180486b4SDaniel Borkmann 		BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
88*180486b4SDaniel Borkmann 		BPF_MOV64_IMM(BPF_REG_0, 0),
89*180486b4SDaniel Borkmann 		BPF_EXIT_INSN(),
90*180486b4SDaniel Borkmann 	},
91*180486b4SDaniel Borkmann 	.fixup_map_array_48b = { 2, 4 },
92*180486b4SDaniel Borkmann 	.result = REJECT,
93*180486b4SDaniel Borkmann 	.errstr = "invalid size of register fill",
94*180486b4SDaniel Borkmann },
957960d02dSBrendan Jackman #define __ATOMIC_FETCH_OP_TEST(src_reg, dst_reg, operand1, op, operand2, expect) \
967960d02dSBrendan Jackman 	{								\
977960d02dSBrendan Jackman 		"atomic fetch " #op ", src=" #dst_reg " dst=" #dst_reg,	\
987960d02dSBrendan Jackman 		.insns = {						\
997960d02dSBrendan Jackman 			/* u64 val = operan1; */			\
1007960d02dSBrendan Jackman 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, operand1),	\
1017960d02dSBrendan Jackman 			/* u64 old = atomic_fetch_add(&val, operand2); */ \
1027960d02dSBrendan Jackman 			BPF_MOV64_REG(dst_reg, BPF_REG_10),		\
1037960d02dSBrendan Jackman 			BPF_MOV64_IMM(src_reg, operand2),		\
1047960d02dSBrendan Jackman 			BPF_ATOMIC_OP(BPF_DW, op,			\
1057960d02dSBrendan Jackman 				      dst_reg, src_reg, -8),		\
1067960d02dSBrendan Jackman 			/* if (old != operand1) exit(1); */		\
1077960d02dSBrendan Jackman 			BPF_JMP_IMM(BPF_JEQ, src_reg, operand1, 2),	\
1087960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_0, 1),			\
1097960d02dSBrendan Jackman 			BPF_EXIT_INSN(),				\
1107960d02dSBrendan Jackman 			/* if (val != result) exit (2); */		\
1117960d02dSBrendan Jackman 			BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -8),	\
1127960d02dSBrendan Jackman 			BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, expect, 2),	\
1137960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_0, 2),			\
1147960d02dSBrendan Jackman 			BPF_EXIT_INSN(),				\
1157960d02dSBrendan Jackman 			/* exit(0); */					\
1167960d02dSBrendan Jackman 			BPF_MOV64_IMM(BPF_REG_0, 0),			\
1177960d02dSBrendan Jackman 			BPF_EXIT_INSN(),				\
1187960d02dSBrendan Jackman 		},							\
1197960d02dSBrendan Jackman 		.result = ACCEPT,					\
1207960d02dSBrendan Jackman 	}
1217960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 1, BPF_ADD | BPF_FETCH, 2, 3),
1227960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 1, BPF_ADD | BPF_FETCH, 2, 3),
1237960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 1, BPF_ADD | BPF_FETCH, 2, 3),
1247960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 1, BPF_ADD | BPF_FETCH, 2, 3),
1257960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3),
1267960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 1, BPF_ADD | BPF_FETCH, 2, 3),
1277960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1287960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1297960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1307960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1317960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1327960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
1337960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1347960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1357960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1367960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1377960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1387960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
1397960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1407960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1417960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1427960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1437960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1447960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
1457960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XCHG, 0x011, 0x011),
1467960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XCHG, 0x011, 0x011),
1477960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XCHG, 0x011, 0x011),
1487960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XCHG, 0x011, 0x011),
1497960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XCHG, 0x011, 0x011),
1507960d02dSBrendan Jackman __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XCHG, 0x011, 0x011),
1517960d02dSBrendan Jackman #undef __ATOMIC_FETCH_OP_TEST
152