1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (c) 2023 Meta Platforms, Inc. and affiliates. */
3 
4 #include "vmlinux.h"
5 #include <bpf/bpf_helpers.h>
6 #include <bpf/bpf_tracing.h>
7 
8 #if (defined(__TARGET_ARCH_arm64) || defined(__TARGET_ARCH_x86) || \
9      (defined(__TARGET_ARCH_riscv) && __riscv_xlen == 64)) && __clang_major__ >= 18
10 const volatile int skip = 0;
11 #else
12 const volatile int skip = 1;
13 #endif
14 
15 volatile const short val1 = -1;
16 volatile const int val2 = -1;
17 short val3 = -1;
18 int val4 = -1;
19 int done1, done2, ret1, ret2;
20 
21 SEC("?raw_tp/sys_enter")
22 int rdonly_map_prog(const void *ctx)
23 {
24 	if (done1)
25 		return 0;
26 
27 	done1 = 1;
28 	/* val1/val2 readonly map */
29 	if (val1 == val2)
30 		ret1 = 1;
31 	return 0;
32 
33 }
34 
35 SEC("?raw_tp/sys_enter")
36 int map_val_prog(const void *ctx)
37 {
38 	if (done2)
39 		return 0;
40 
41 	done2 = 1;
42 	/* val1/val2 regular read/write map */
43 	if (val3 == val4)
44 		ret2 = 1;
45 	return 0;
46 
47 }
48 
49 struct bpf_testmod_struct_arg_1 {
50 	int a;
51 };
52 
53 long long int_member;
54 
55 SEC("?fentry/bpf_testmod_test_arg_ptr_to_struct")
56 int BPF_PROG2(test_ptr_struct_arg, struct bpf_testmod_struct_arg_1 *, p)
57 {
58 	/* probed memory access */
59 	int_member = p->a;
60         return 0;
61 }
62 
63 long long set_optlen, set_retval;
64 
65 SEC("?cgroup/getsockopt")
66 int _getsockopt(volatile struct bpf_sockopt *ctx)
67 {
68 	int old_optlen, old_retval;
69 
70 	old_optlen = ctx->optlen;
71 	old_retval = ctx->retval;
72 
73 	ctx->optlen = -1;
74 	ctx->retval = -1;
75 
76 	/* sign extension for ctx member */
77 	set_optlen = ctx->optlen;
78 	set_retval = ctx->retval;
79 
80 	ctx->optlen = old_optlen;
81 	ctx->retval = old_retval;
82 
83 	return 0;
84 }
85 
86 long long set_mark;
87 
88 SEC("?tc")
89 int _tc(volatile struct __sk_buff *skb)
90 {
91 	long long tmp_mark;
92 	int old_mark;
93 
94 	old_mark = skb->mark;
95 
96 	skb->mark = 0xf6fe;
97 
98 	/* narrowed sign extension for ctx member */
99 #if __clang_major__ >= 18
100 	/* force narrow one-byte signed load. Otherwise, compiler may
101 	 * generate a 32-bit unsigned load followed by an s8 movsx.
102 	 */
103 	asm volatile ("r1 = *(s8 *)(%[ctx] + %[off_mark])\n\t"
104 		      "%[tmp_mark] = r1"
105 		      : [tmp_mark]"=r"(tmp_mark)
106 		      : [ctx]"r"(skb),
107 			[off_mark]"i"(offsetof(struct __sk_buff, mark))
108 		      : "r1");
109 #else
110 	tmp_mark = (char)skb->mark;
111 #endif
112 	set_mark = tmp_mark;
113 
114 	skb->mark = old_mark;
115 
116 	return 0;
117 }
118 
119 char _license[] SEC("license") = "GPL";
120