1 /*
2 * SPDX-License-Identifier: GPL-2.0-or-later
3 *
4 * Check if we detect all memory accesses expected using plugin API.
5 * Used in conjunction with ./check-plugin-mem-access.sh check script.
6 * Output of this program is the list of patterns expected in plugin output.
7 *
8 * 8,16,32 load/store are tested for all arch.
9 * 64,128 load/store are tested for aarch64/x64.
10 * atomic operations (8,16,32,64) are tested for x64 only.
11 */
12
13 #include <pthread.h>
14 #include <stdint.h>
15 #include <stdio.h>
16 #include <stdlib.h>
17
18 #if defined(__x86_64__)
19 #include <emmintrin.h>
20 #elif defined(__aarch64__)
21 #include <arm_neon.h>
22 #endif /* __x86_64__ */
23
24 static void *data;
25
26 /* ,store_u8,.*,8,store,0xf1 */
27 #define PRINT_EXPECTED(function, type, value, action) \
28 do { \
29 printf(",%s,.*,%d,%s,%s\n", \
30 #function, (int) sizeof(type) * 8, action, value); \
31 } \
32 while (0)
33
34 #define DEFINE_STORE(name, type, value) \
35 \
36 static void print_expected_store_##name(void) \
37 { \
38 PRINT_EXPECTED(store_##name, type, #value, "store"); \
39 } \
40 \
41 static void store_##name(void) \
42 { \
43 *((type *)data) = value; \
44 print_expected_store_##name(); \
45 }
46
47 #define DEFINE_ATOMIC_OP(name, type, value) \
48 \
49 static void print_expected_atomic_op_##name(void) \
50 { \
51 PRINT_EXPECTED(atomic_op_##name, type, "0x0*42", "load"); \
52 PRINT_EXPECTED(atomic_op_##name, type, #value, "store"); \
53 } \
54 \
55 static void atomic_op_##name(void) \
56 { \
57 *((type *)data) = 0x42; \
58 __sync_val_compare_and_swap((type *)data, 0x42, value); \
59 print_expected_atomic_op_##name(); \
60 }
61
62 #define DEFINE_LOAD(name, type, value) \
63 \
64 static void print_expected_load_##name(void) \
65 { \
66 PRINT_EXPECTED(load_##name, type, #value, "load"); \
67 } \
68 \
69 static void load_##name(void) \
70 { \
71 \
72 /* volatile forces load to be generated. */ \
73 volatile type src = *((type *) data); \
74 volatile type dest = src; \
75 (void)src, (void)dest; \
76 print_expected_load_##name(); \
77 }
78
79 DEFINE_STORE(u8, uint8_t, 0xf1)
80 DEFINE_LOAD(u8, uint8_t, 0xf1)
81 DEFINE_STORE(u16, uint16_t, 0xf123)
82 DEFINE_LOAD(u16, uint16_t, 0xf123)
83 DEFINE_STORE(u32, uint32_t, 0xff112233)
84 DEFINE_LOAD(u32, uint32_t, 0xff112233)
85
86 #if defined(__x86_64__) || defined(__aarch64__)
87 DEFINE_STORE(u64, uint64_t, 0xf123456789abcdef)
88 DEFINE_LOAD(u64, uint64_t, 0xf123456789abcdef)
89
print_expected_store_u128(void)90 static void print_expected_store_u128(void)
91 {
92 PRINT_EXPECTED(store_u128, __int128,
93 "0xf122334455667788f123456789abcdef", "store");
94 }
95
store_u128(void)96 static void store_u128(void)
97 {
98 #ifdef __x86_64__
99 _mm_store_si128(data, _mm_set_epi32(0xf1223344, 0x55667788,
100 0xf1234567, 0x89abcdef));
101 #else
102 const uint32_t init[4] = {0x89abcdef, 0xf1234567, 0x55667788, 0xf1223344};
103 uint32x4_t vec = vld1q_u32(init);
104 vst1q_u32(data, vec);
105 #endif /* __x86_64__ */
106 print_expected_store_u128();
107 }
108
print_expected_load_u128(void)109 static void print_expected_load_u128(void)
110 {
111 PRINT_EXPECTED(load_u128, __int128,
112 "0xf122334455667788f123456789abcdef", "load");
113 }
114
load_u128(void)115 static void load_u128(void)
116 {
117 #ifdef __x86_64__
118 __m128i var = _mm_load_si128(data);
119 #else
120 uint32x4_t var = vld1q_u32(data);
121 #endif
122 (void) var;
123 print_expected_load_u128();
124 }
125 #endif /* __x86_64__ || __aarch64__ */
126
127 #if defined(__x86_64__)
128 DEFINE_ATOMIC_OP(u8, uint8_t, 0xf1)
129 DEFINE_ATOMIC_OP(u16, uint16_t, 0xf123)
130 DEFINE_ATOMIC_OP(u32, uint32_t, 0xff112233)
131 DEFINE_ATOMIC_OP(u64, uint64_t, 0xf123456789abcdef)
132 #endif /* __x86_64__ */
133
f(void * p)134 static void *f(void *p)
135 {
136 return NULL;
137 }
138
main(void)139 int main(void)
140 {
141 /*
142 * We force creation of a second thread to enable cpu flag CF_PARALLEL.
143 * This will generate atomic operations when needed.
144 */
145 pthread_t thread;
146 pthread_create(&thread, NULL, &f, NULL);
147 pthread_join(thread, NULL);
148
149 /* allocate storage up to 128 bits */
150 data = malloc(16);
151
152 store_u8();
153 load_u8();
154
155 store_u16();
156 load_u16();
157
158 store_u32();
159 load_u32();
160
161 #if defined(__x86_64__) || defined(__aarch64__)
162 store_u64();
163 load_u64();
164
165 store_u128();
166 load_u128();
167 #endif /* __x86_64__ || __aarch64__ */
168
169 #if defined(__x86_64__)
170 atomic_op_u8();
171 atomic_op_u16();
172 atomic_op_u32();
173 atomic_op_u64();
174 #endif /* __x86_64__ */
175
176 free(data);
177 }
178