1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 /*
7  * This file provoides atomic operations with explicit instrumentation (e.g.
8  * KASAN, KCSAN), which should be used unless it is necessary to avoid
9  * instrumentation. Where it is necessary to aovid instrumenation, the
10  * raw_atomic*() operations should be used.
11  */
12 #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
13 #define _LINUX_ATOMIC_INSTRUMENTED_H
14 
15 #include <linux/build_bug.h>
16 #include <linux/compiler.h>
17 #include <linux/instrumented.h>
18 
19 /**
20  * atomic_read() - atomic load with relaxed ordering
21  * @v: pointer to atomic_t
22  *
23  * Atomically loads the value of @v with relaxed ordering.
24  *
25  * Unsafe to use in noinstr code; use raw_atomic_read() there.
26  *
27  * Return: The value loaded from @v.
28  */
29 static __always_inline int
atomic_read(const atomic_t * v)30 atomic_read(const atomic_t *v)
31 {
32 	instrument_atomic_read(v, sizeof(*v));
33 	return raw_atomic_read(v);
34 }
35 
36 /**
37  * atomic_read_acquire() - atomic load with acquire ordering
38  * @v: pointer to atomic_t
39  *
40  * Atomically loads the value of @v with acquire ordering.
41  *
42  * Unsafe to use in noinstr code; use raw_atomic_read_acquire() there.
43  *
44  * Return: The value loaded from @v.
45  */
46 static __always_inline int
atomic_read_acquire(const atomic_t * v)47 atomic_read_acquire(const atomic_t *v)
48 {
49 	instrument_atomic_read(v, sizeof(*v));
50 	return raw_atomic_read_acquire(v);
51 }
52 
53 /**
54  * atomic_set() - atomic set with relaxed ordering
55  * @v: pointer to atomic_t
56  * @i: int value to assign
57  *
58  * Atomically sets @v to @i with relaxed ordering.
59  *
60  * Unsafe to use in noinstr code; use raw_atomic_set() there.
61  *
62  * Return: Nothing.
63  */
64 static __always_inline void
atomic_set(atomic_t * v,int i)65 atomic_set(atomic_t *v, int i)
66 {
67 	instrument_atomic_write(v, sizeof(*v));
68 	raw_atomic_set(v, i);
69 }
70 
71 /**
72  * atomic_set_release() - atomic set with release ordering
73  * @v: pointer to atomic_t
74  * @i: int value to assign
75  *
76  * Atomically sets @v to @i with release ordering.
77  *
78  * Unsafe to use in noinstr code; use raw_atomic_set_release() there.
79  *
80  * Return: Nothing.
81  */
82 static __always_inline void
atomic_set_release(atomic_t * v,int i)83 atomic_set_release(atomic_t *v, int i)
84 {
85 	kcsan_release();
86 	instrument_atomic_write(v, sizeof(*v));
87 	raw_atomic_set_release(v, i);
88 }
89 
90 /**
91  * atomic_add() - atomic add with relaxed ordering
92  * @i: int value to add
93  * @v: pointer to atomic_t
94  *
95  * Atomically updates @v to (@v + @i) with relaxed ordering.
96  *
97  * Unsafe to use in noinstr code; use raw_atomic_add() there.
98  *
99  * Return: Nothing.
100  */
101 static __always_inline void
atomic_add(int i,atomic_t * v)102 atomic_add(int i, atomic_t *v)
103 {
104 	instrument_atomic_read_write(v, sizeof(*v));
105 	raw_atomic_add(i, v);
106 }
107 
108 /**
109  * atomic_add_return() - atomic add with full ordering
110  * @i: int value to add
111  * @v: pointer to atomic_t
112  *
113  * Atomically updates @v to (@v + @i) with full ordering.
114  *
115  * Unsafe to use in noinstr code; use raw_atomic_add_return() there.
116  *
117  * Return: The updated value of @v.
118  */
119 static __always_inline int
atomic_add_return(int i,atomic_t * v)120 atomic_add_return(int i, atomic_t *v)
121 {
122 	kcsan_mb();
123 	instrument_atomic_read_write(v, sizeof(*v));
124 	return raw_atomic_add_return(i, v);
125 }
126 
127 /**
128  * atomic_add_return_acquire() - atomic add with acquire ordering
129  * @i: int value to add
130  * @v: pointer to atomic_t
131  *
132  * Atomically updates @v to (@v + @i) with acquire ordering.
133  *
134  * Unsafe to use in noinstr code; use raw_atomic_add_return_acquire() there.
135  *
136  * Return: The updated value of @v.
137  */
138 static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)139 atomic_add_return_acquire(int i, atomic_t *v)
140 {
141 	instrument_atomic_read_write(v, sizeof(*v));
142 	return raw_atomic_add_return_acquire(i, v);
143 }
144 
145 /**
146  * atomic_add_return_release() - atomic add with release ordering
147  * @i: int value to add
148  * @v: pointer to atomic_t
149  *
150  * Atomically updates @v to (@v + @i) with release ordering.
151  *
152  * Unsafe to use in noinstr code; use raw_atomic_add_return_release() there.
153  *
154  * Return: The updated value of @v.
155  */
156 static __always_inline int
atomic_add_return_release(int i,atomic_t * v)157 atomic_add_return_release(int i, atomic_t *v)
158 {
159 	kcsan_release();
160 	instrument_atomic_read_write(v, sizeof(*v));
161 	return raw_atomic_add_return_release(i, v);
162 }
163 
164 /**
165  * atomic_add_return_relaxed() - atomic add with relaxed ordering
166  * @i: int value to add
167  * @v: pointer to atomic_t
168  *
169  * Atomically updates @v to (@v + @i) with relaxed ordering.
170  *
171  * Unsafe to use in noinstr code; use raw_atomic_add_return_relaxed() there.
172  *
173  * Return: The updated value of @v.
174  */
175 static __always_inline int
atomic_add_return_relaxed(int i,atomic_t * v)176 atomic_add_return_relaxed(int i, atomic_t *v)
177 {
178 	instrument_atomic_read_write(v, sizeof(*v));
179 	return raw_atomic_add_return_relaxed(i, v);
180 }
181 
182 /**
183  * atomic_fetch_add() - atomic add with full ordering
184  * @i: int value to add
185  * @v: pointer to atomic_t
186  *
187  * Atomically updates @v to (@v + @i) with full ordering.
188  *
189  * Unsafe to use in noinstr code; use raw_atomic_fetch_add() there.
190  *
191  * Return: The original value of @v.
192  */
193 static __always_inline int
atomic_fetch_add(int i,atomic_t * v)194 atomic_fetch_add(int i, atomic_t *v)
195 {
196 	kcsan_mb();
197 	instrument_atomic_read_write(v, sizeof(*v));
198 	return raw_atomic_fetch_add(i, v);
199 }
200 
201 /**
202  * atomic_fetch_add_acquire() - atomic add with acquire ordering
203  * @i: int value to add
204  * @v: pointer to atomic_t
205  *
206  * Atomically updates @v to (@v + @i) with acquire ordering.
207  *
208  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_acquire() there.
209  *
210  * Return: The original value of @v.
211  */
212 static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)213 atomic_fetch_add_acquire(int i, atomic_t *v)
214 {
215 	instrument_atomic_read_write(v, sizeof(*v));
216 	return raw_atomic_fetch_add_acquire(i, v);
217 }
218 
219 /**
220  * atomic_fetch_add_release() - atomic add with release ordering
221  * @i: int value to add
222  * @v: pointer to atomic_t
223  *
224  * Atomically updates @v to (@v + @i) with release ordering.
225  *
226  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_release() there.
227  *
228  * Return: The original value of @v.
229  */
230 static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)231 atomic_fetch_add_release(int i, atomic_t *v)
232 {
233 	kcsan_release();
234 	instrument_atomic_read_write(v, sizeof(*v));
235 	return raw_atomic_fetch_add_release(i, v);
236 }
237 
238 /**
239  * atomic_fetch_add_relaxed() - atomic add with relaxed ordering
240  * @i: int value to add
241  * @v: pointer to atomic_t
242  *
243  * Atomically updates @v to (@v + @i) with relaxed ordering.
244  *
245  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_relaxed() there.
246  *
247  * Return: The original value of @v.
248  */
249 static __always_inline int
atomic_fetch_add_relaxed(int i,atomic_t * v)250 atomic_fetch_add_relaxed(int i, atomic_t *v)
251 {
252 	instrument_atomic_read_write(v, sizeof(*v));
253 	return raw_atomic_fetch_add_relaxed(i, v);
254 }
255 
256 /**
257  * atomic_sub() - atomic subtract with relaxed ordering
258  * @i: int value to subtract
259  * @v: pointer to atomic_t
260  *
261  * Atomically updates @v to (@v - @i) with relaxed ordering.
262  *
263  * Unsafe to use in noinstr code; use raw_atomic_sub() there.
264  *
265  * Return: Nothing.
266  */
267 static __always_inline void
atomic_sub(int i,atomic_t * v)268 atomic_sub(int i, atomic_t *v)
269 {
270 	instrument_atomic_read_write(v, sizeof(*v));
271 	raw_atomic_sub(i, v);
272 }
273 
274 /**
275  * atomic_sub_return() - atomic subtract with full ordering
276  * @i: int value to subtract
277  * @v: pointer to atomic_t
278  *
279  * Atomically updates @v to (@v - @i) with full ordering.
280  *
281  * Unsafe to use in noinstr code; use raw_atomic_sub_return() there.
282  *
283  * Return: The updated value of @v.
284  */
285 static __always_inline int
atomic_sub_return(int i,atomic_t * v)286 atomic_sub_return(int i, atomic_t *v)
287 {
288 	kcsan_mb();
289 	instrument_atomic_read_write(v, sizeof(*v));
290 	return raw_atomic_sub_return(i, v);
291 }
292 
293 /**
294  * atomic_sub_return_acquire() - atomic subtract with acquire ordering
295  * @i: int value to subtract
296  * @v: pointer to atomic_t
297  *
298  * Atomically updates @v to (@v - @i) with acquire ordering.
299  *
300  * Unsafe to use in noinstr code; use raw_atomic_sub_return_acquire() there.
301  *
302  * Return: The updated value of @v.
303  */
304 static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)305 atomic_sub_return_acquire(int i, atomic_t *v)
306 {
307 	instrument_atomic_read_write(v, sizeof(*v));
308 	return raw_atomic_sub_return_acquire(i, v);
309 }
310 
311 /**
312  * atomic_sub_return_release() - atomic subtract with release ordering
313  * @i: int value to subtract
314  * @v: pointer to atomic_t
315  *
316  * Atomically updates @v to (@v - @i) with release ordering.
317  *
318  * Unsafe to use in noinstr code; use raw_atomic_sub_return_release() there.
319  *
320  * Return: The updated value of @v.
321  */
322 static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)323 atomic_sub_return_release(int i, atomic_t *v)
324 {
325 	kcsan_release();
326 	instrument_atomic_read_write(v, sizeof(*v));
327 	return raw_atomic_sub_return_release(i, v);
328 }
329 
330 /**
331  * atomic_sub_return_relaxed() - atomic subtract with relaxed ordering
332  * @i: int value to subtract
333  * @v: pointer to atomic_t
334  *
335  * Atomically updates @v to (@v - @i) with relaxed ordering.
336  *
337  * Unsafe to use in noinstr code; use raw_atomic_sub_return_relaxed() there.
338  *
339  * Return: The updated value of @v.
340  */
341 static __always_inline int
atomic_sub_return_relaxed(int i,atomic_t * v)342 atomic_sub_return_relaxed(int i, atomic_t *v)
343 {
344 	instrument_atomic_read_write(v, sizeof(*v));
345 	return raw_atomic_sub_return_relaxed(i, v);
346 }
347 
348 /**
349  * atomic_fetch_sub() - atomic subtract with full ordering
350  * @i: int value to subtract
351  * @v: pointer to atomic_t
352  *
353  * Atomically updates @v to (@v - @i) with full ordering.
354  *
355  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub() there.
356  *
357  * Return: The original value of @v.
358  */
359 static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)360 atomic_fetch_sub(int i, atomic_t *v)
361 {
362 	kcsan_mb();
363 	instrument_atomic_read_write(v, sizeof(*v));
364 	return raw_atomic_fetch_sub(i, v);
365 }
366 
367 /**
368  * atomic_fetch_sub_acquire() - atomic subtract with acquire ordering
369  * @i: int value to subtract
370  * @v: pointer to atomic_t
371  *
372  * Atomically updates @v to (@v - @i) with acquire ordering.
373  *
374  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_acquire() there.
375  *
376  * Return: The original value of @v.
377  */
378 static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)379 atomic_fetch_sub_acquire(int i, atomic_t *v)
380 {
381 	instrument_atomic_read_write(v, sizeof(*v));
382 	return raw_atomic_fetch_sub_acquire(i, v);
383 }
384 
385 /**
386  * atomic_fetch_sub_release() - atomic subtract with release ordering
387  * @i: int value to subtract
388  * @v: pointer to atomic_t
389  *
390  * Atomically updates @v to (@v - @i) with release ordering.
391  *
392  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_release() there.
393  *
394  * Return: The original value of @v.
395  */
396 static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)397 atomic_fetch_sub_release(int i, atomic_t *v)
398 {
399 	kcsan_release();
400 	instrument_atomic_read_write(v, sizeof(*v));
401 	return raw_atomic_fetch_sub_release(i, v);
402 }
403 
404 /**
405  * atomic_fetch_sub_relaxed() - atomic subtract with relaxed ordering
406  * @i: int value to subtract
407  * @v: pointer to atomic_t
408  *
409  * Atomically updates @v to (@v - @i) with relaxed ordering.
410  *
411  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_relaxed() there.
412  *
413  * Return: The original value of @v.
414  */
415 static __always_inline int
atomic_fetch_sub_relaxed(int i,atomic_t * v)416 atomic_fetch_sub_relaxed(int i, atomic_t *v)
417 {
418 	instrument_atomic_read_write(v, sizeof(*v));
419 	return raw_atomic_fetch_sub_relaxed(i, v);
420 }
421 
422 /**
423  * atomic_inc() - atomic increment with relaxed ordering
424  * @v: pointer to atomic_t
425  *
426  * Atomically updates @v to (@v + 1) with relaxed ordering.
427  *
428  * Unsafe to use in noinstr code; use raw_atomic_inc() there.
429  *
430  * Return: Nothing.
431  */
432 static __always_inline void
atomic_inc(atomic_t * v)433 atomic_inc(atomic_t *v)
434 {
435 	instrument_atomic_read_write(v, sizeof(*v));
436 	raw_atomic_inc(v);
437 }
438 
439 /**
440  * atomic_inc_return() - atomic increment with full ordering
441  * @v: pointer to atomic_t
442  *
443  * Atomically updates @v to (@v + 1) with full ordering.
444  *
445  * Unsafe to use in noinstr code; use raw_atomic_inc_return() there.
446  *
447  * Return: The updated value of @v.
448  */
449 static __always_inline int
atomic_inc_return(atomic_t * v)450 atomic_inc_return(atomic_t *v)
451 {
452 	kcsan_mb();
453 	instrument_atomic_read_write(v, sizeof(*v));
454 	return raw_atomic_inc_return(v);
455 }
456 
457 /**
458  * atomic_inc_return_acquire() - atomic increment with acquire ordering
459  * @v: pointer to atomic_t
460  *
461  * Atomically updates @v to (@v + 1) with acquire ordering.
462  *
463  * Unsafe to use in noinstr code; use raw_atomic_inc_return_acquire() there.
464  *
465  * Return: The updated value of @v.
466  */
467 static __always_inline int
atomic_inc_return_acquire(atomic_t * v)468 atomic_inc_return_acquire(atomic_t *v)
469 {
470 	instrument_atomic_read_write(v, sizeof(*v));
471 	return raw_atomic_inc_return_acquire(v);
472 }
473 
474 /**
475  * atomic_inc_return_release() - atomic increment with release ordering
476  * @v: pointer to atomic_t
477  *
478  * Atomically updates @v to (@v + 1) with release ordering.
479  *
480  * Unsafe to use in noinstr code; use raw_atomic_inc_return_release() there.
481  *
482  * Return: The updated value of @v.
483  */
484 static __always_inline int
atomic_inc_return_release(atomic_t * v)485 atomic_inc_return_release(atomic_t *v)
486 {
487 	kcsan_release();
488 	instrument_atomic_read_write(v, sizeof(*v));
489 	return raw_atomic_inc_return_release(v);
490 }
491 
492 /**
493  * atomic_inc_return_relaxed() - atomic increment with relaxed ordering
494  * @v: pointer to atomic_t
495  *
496  * Atomically updates @v to (@v + 1) with relaxed ordering.
497  *
498  * Unsafe to use in noinstr code; use raw_atomic_inc_return_relaxed() there.
499  *
500  * Return: The updated value of @v.
501  */
502 static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)503 atomic_inc_return_relaxed(atomic_t *v)
504 {
505 	instrument_atomic_read_write(v, sizeof(*v));
506 	return raw_atomic_inc_return_relaxed(v);
507 }
508 
509 /**
510  * atomic_fetch_inc() - atomic increment with full ordering
511  * @v: pointer to atomic_t
512  *
513  * Atomically updates @v to (@v + 1) with full ordering.
514  *
515  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc() there.
516  *
517  * Return: The original value of @v.
518  */
519 static __always_inline int
atomic_fetch_inc(atomic_t * v)520 atomic_fetch_inc(atomic_t *v)
521 {
522 	kcsan_mb();
523 	instrument_atomic_read_write(v, sizeof(*v));
524 	return raw_atomic_fetch_inc(v);
525 }
526 
527 /**
528  * atomic_fetch_inc_acquire() - atomic increment with acquire ordering
529  * @v: pointer to atomic_t
530  *
531  * Atomically updates @v to (@v + 1) with acquire ordering.
532  *
533  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_acquire() there.
534  *
535  * Return: The original value of @v.
536  */
537 static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)538 atomic_fetch_inc_acquire(atomic_t *v)
539 {
540 	instrument_atomic_read_write(v, sizeof(*v));
541 	return raw_atomic_fetch_inc_acquire(v);
542 }
543 
544 /**
545  * atomic_fetch_inc_release() - atomic increment with release ordering
546  * @v: pointer to atomic_t
547  *
548  * Atomically updates @v to (@v + 1) with release ordering.
549  *
550  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_release() there.
551  *
552  * Return: The original value of @v.
553  */
554 static __always_inline int
atomic_fetch_inc_release(atomic_t * v)555 atomic_fetch_inc_release(atomic_t *v)
556 {
557 	kcsan_release();
558 	instrument_atomic_read_write(v, sizeof(*v));
559 	return raw_atomic_fetch_inc_release(v);
560 }
561 
562 /**
563  * atomic_fetch_inc_relaxed() - atomic increment with relaxed ordering
564  * @v: pointer to atomic_t
565  *
566  * Atomically updates @v to (@v + 1) with relaxed ordering.
567  *
568  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_relaxed() there.
569  *
570  * Return: The original value of @v.
571  */
572 static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)573 atomic_fetch_inc_relaxed(atomic_t *v)
574 {
575 	instrument_atomic_read_write(v, sizeof(*v));
576 	return raw_atomic_fetch_inc_relaxed(v);
577 }
578 
579 /**
580  * atomic_dec() - atomic decrement with relaxed ordering
581  * @v: pointer to atomic_t
582  *
583  * Atomically updates @v to (@v - 1) with relaxed ordering.
584  *
585  * Unsafe to use in noinstr code; use raw_atomic_dec() there.
586  *
587  * Return: Nothing.
588  */
589 static __always_inline void
atomic_dec(atomic_t * v)590 atomic_dec(atomic_t *v)
591 {
592 	instrument_atomic_read_write(v, sizeof(*v));
593 	raw_atomic_dec(v);
594 }
595 
596 /**
597  * atomic_dec_return() - atomic decrement with full ordering
598  * @v: pointer to atomic_t
599  *
600  * Atomically updates @v to (@v - 1) with full ordering.
601  *
602  * Unsafe to use in noinstr code; use raw_atomic_dec_return() there.
603  *
604  * Return: The updated value of @v.
605  */
606 static __always_inline int
atomic_dec_return(atomic_t * v)607 atomic_dec_return(atomic_t *v)
608 {
609 	kcsan_mb();
610 	instrument_atomic_read_write(v, sizeof(*v));
611 	return raw_atomic_dec_return(v);
612 }
613 
614 /**
615  * atomic_dec_return_acquire() - atomic decrement with acquire ordering
616  * @v: pointer to atomic_t
617  *
618  * Atomically updates @v to (@v - 1) with acquire ordering.
619  *
620  * Unsafe to use in noinstr code; use raw_atomic_dec_return_acquire() there.
621  *
622  * Return: The updated value of @v.
623  */
624 static __always_inline int
atomic_dec_return_acquire(atomic_t * v)625 atomic_dec_return_acquire(atomic_t *v)
626 {
627 	instrument_atomic_read_write(v, sizeof(*v));
628 	return raw_atomic_dec_return_acquire(v);
629 }
630 
631 /**
632  * atomic_dec_return_release() - atomic decrement with release ordering
633  * @v: pointer to atomic_t
634  *
635  * Atomically updates @v to (@v - 1) with release ordering.
636  *
637  * Unsafe to use in noinstr code; use raw_atomic_dec_return_release() there.
638  *
639  * Return: The updated value of @v.
640  */
641 static __always_inline int
atomic_dec_return_release(atomic_t * v)642 atomic_dec_return_release(atomic_t *v)
643 {
644 	kcsan_release();
645 	instrument_atomic_read_write(v, sizeof(*v));
646 	return raw_atomic_dec_return_release(v);
647 }
648 
649 /**
650  * atomic_dec_return_relaxed() - atomic decrement with relaxed ordering
651  * @v: pointer to atomic_t
652  *
653  * Atomically updates @v to (@v - 1) with relaxed ordering.
654  *
655  * Unsafe to use in noinstr code; use raw_atomic_dec_return_relaxed() there.
656  *
657  * Return: The updated value of @v.
658  */
659 static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)660 atomic_dec_return_relaxed(atomic_t *v)
661 {
662 	instrument_atomic_read_write(v, sizeof(*v));
663 	return raw_atomic_dec_return_relaxed(v);
664 }
665 
666 /**
667  * atomic_fetch_dec() - atomic decrement with full ordering
668  * @v: pointer to atomic_t
669  *
670  * Atomically updates @v to (@v - 1) with full ordering.
671  *
672  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec() there.
673  *
674  * Return: The original value of @v.
675  */
676 static __always_inline int
atomic_fetch_dec(atomic_t * v)677 atomic_fetch_dec(atomic_t *v)
678 {
679 	kcsan_mb();
680 	instrument_atomic_read_write(v, sizeof(*v));
681 	return raw_atomic_fetch_dec(v);
682 }
683 
684 /**
685  * atomic_fetch_dec_acquire() - atomic decrement with acquire ordering
686  * @v: pointer to atomic_t
687  *
688  * Atomically updates @v to (@v - 1) with acquire ordering.
689  *
690  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_acquire() there.
691  *
692  * Return: The original value of @v.
693  */
694 static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)695 atomic_fetch_dec_acquire(atomic_t *v)
696 {
697 	instrument_atomic_read_write(v, sizeof(*v));
698 	return raw_atomic_fetch_dec_acquire(v);
699 }
700 
701 /**
702  * atomic_fetch_dec_release() - atomic decrement with release ordering
703  * @v: pointer to atomic_t
704  *
705  * Atomically updates @v to (@v - 1) with release ordering.
706  *
707  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_release() there.
708  *
709  * Return: The original value of @v.
710  */
711 static __always_inline int
atomic_fetch_dec_release(atomic_t * v)712 atomic_fetch_dec_release(atomic_t *v)
713 {
714 	kcsan_release();
715 	instrument_atomic_read_write(v, sizeof(*v));
716 	return raw_atomic_fetch_dec_release(v);
717 }
718 
719 /**
720  * atomic_fetch_dec_relaxed() - atomic decrement with relaxed ordering
721  * @v: pointer to atomic_t
722  *
723  * Atomically updates @v to (@v - 1) with relaxed ordering.
724  *
725  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_relaxed() there.
726  *
727  * Return: The original value of @v.
728  */
729 static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)730 atomic_fetch_dec_relaxed(atomic_t *v)
731 {
732 	instrument_atomic_read_write(v, sizeof(*v));
733 	return raw_atomic_fetch_dec_relaxed(v);
734 }
735 
736 /**
737  * atomic_and() - atomic bitwise AND with relaxed ordering
738  * @i: int value
739  * @v: pointer to atomic_t
740  *
741  * Atomically updates @v to (@v & @i) with relaxed ordering.
742  *
743  * Unsafe to use in noinstr code; use raw_atomic_and() there.
744  *
745  * Return: Nothing.
746  */
747 static __always_inline void
atomic_and(int i,atomic_t * v)748 atomic_and(int i, atomic_t *v)
749 {
750 	instrument_atomic_read_write(v, sizeof(*v));
751 	raw_atomic_and(i, v);
752 }
753 
754 /**
755  * atomic_fetch_and() - atomic bitwise AND with full ordering
756  * @i: int value
757  * @v: pointer to atomic_t
758  *
759  * Atomically updates @v to (@v & @i) with full ordering.
760  *
761  * Unsafe to use in noinstr code; use raw_atomic_fetch_and() there.
762  *
763  * Return: The original value of @v.
764  */
765 static __always_inline int
atomic_fetch_and(int i,atomic_t * v)766 atomic_fetch_and(int i, atomic_t *v)
767 {
768 	kcsan_mb();
769 	instrument_atomic_read_write(v, sizeof(*v));
770 	return raw_atomic_fetch_and(i, v);
771 }
772 
773 /**
774  * atomic_fetch_and_acquire() - atomic bitwise AND with acquire ordering
775  * @i: int value
776  * @v: pointer to atomic_t
777  *
778  * Atomically updates @v to (@v & @i) with acquire ordering.
779  *
780  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_acquire() there.
781  *
782  * Return: The original value of @v.
783  */
784 static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)785 atomic_fetch_and_acquire(int i, atomic_t *v)
786 {
787 	instrument_atomic_read_write(v, sizeof(*v));
788 	return raw_atomic_fetch_and_acquire(i, v);
789 }
790 
791 /**
792  * atomic_fetch_and_release() - atomic bitwise AND with release ordering
793  * @i: int value
794  * @v: pointer to atomic_t
795  *
796  * Atomically updates @v to (@v & @i) with release ordering.
797  *
798  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_release() there.
799  *
800  * Return: The original value of @v.
801  */
802 static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)803 atomic_fetch_and_release(int i, atomic_t *v)
804 {
805 	kcsan_release();
806 	instrument_atomic_read_write(v, sizeof(*v));
807 	return raw_atomic_fetch_and_release(i, v);
808 }
809 
810 /**
811  * atomic_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
812  * @i: int value
813  * @v: pointer to atomic_t
814  *
815  * Atomically updates @v to (@v & @i) with relaxed ordering.
816  *
817  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_relaxed() there.
818  *
819  * Return: The original value of @v.
820  */
821 static __always_inline int
atomic_fetch_and_relaxed(int i,atomic_t * v)822 atomic_fetch_and_relaxed(int i, atomic_t *v)
823 {
824 	instrument_atomic_read_write(v, sizeof(*v));
825 	return raw_atomic_fetch_and_relaxed(i, v);
826 }
827 
828 /**
829  * atomic_andnot() - atomic bitwise AND NOT with relaxed ordering
830  * @i: int value
831  * @v: pointer to atomic_t
832  *
833  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
834  *
835  * Unsafe to use in noinstr code; use raw_atomic_andnot() there.
836  *
837  * Return: Nothing.
838  */
839 static __always_inline void
atomic_andnot(int i,atomic_t * v)840 atomic_andnot(int i, atomic_t *v)
841 {
842 	instrument_atomic_read_write(v, sizeof(*v));
843 	raw_atomic_andnot(i, v);
844 }
845 
846 /**
847  * atomic_fetch_andnot() - atomic bitwise AND NOT with full ordering
848  * @i: int value
849  * @v: pointer to atomic_t
850  *
851  * Atomically updates @v to (@v & ~@i) with full ordering.
852  *
853  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot() there.
854  *
855  * Return: The original value of @v.
856  */
857 static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)858 atomic_fetch_andnot(int i, atomic_t *v)
859 {
860 	kcsan_mb();
861 	instrument_atomic_read_write(v, sizeof(*v));
862 	return raw_atomic_fetch_andnot(i, v);
863 }
864 
865 /**
866  * atomic_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
867  * @i: int value
868  * @v: pointer to atomic_t
869  *
870  * Atomically updates @v to (@v & ~@i) with acquire ordering.
871  *
872  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_acquire() there.
873  *
874  * Return: The original value of @v.
875  */
876 static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)877 atomic_fetch_andnot_acquire(int i, atomic_t *v)
878 {
879 	instrument_atomic_read_write(v, sizeof(*v));
880 	return raw_atomic_fetch_andnot_acquire(i, v);
881 }
882 
883 /**
884  * atomic_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
885  * @i: int value
886  * @v: pointer to atomic_t
887  *
888  * Atomically updates @v to (@v & ~@i) with release ordering.
889  *
890  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_release() there.
891  *
892  * Return: The original value of @v.
893  */
894 static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)895 atomic_fetch_andnot_release(int i, atomic_t *v)
896 {
897 	kcsan_release();
898 	instrument_atomic_read_write(v, sizeof(*v));
899 	return raw_atomic_fetch_andnot_release(i, v);
900 }
901 
902 /**
903  * atomic_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
904  * @i: int value
905  * @v: pointer to atomic_t
906  *
907  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
908  *
909  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_relaxed() there.
910  *
911  * Return: The original value of @v.
912  */
913 static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)914 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
915 {
916 	instrument_atomic_read_write(v, sizeof(*v));
917 	return raw_atomic_fetch_andnot_relaxed(i, v);
918 }
919 
920 /**
921  * atomic_or() - atomic bitwise OR with relaxed ordering
922  * @i: int value
923  * @v: pointer to atomic_t
924  *
925  * Atomically updates @v to (@v | @i) with relaxed ordering.
926  *
927  * Unsafe to use in noinstr code; use raw_atomic_or() there.
928  *
929  * Return: Nothing.
930  */
931 static __always_inline void
atomic_or(int i,atomic_t * v)932 atomic_or(int i, atomic_t *v)
933 {
934 	instrument_atomic_read_write(v, sizeof(*v));
935 	raw_atomic_or(i, v);
936 }
937 
938 /**
939  * atomic_fetch_or() - atomic bitwise OR with full ordering
940  * @i: int value
941  * @v: pointer to atomic_t
942  *
943  * Atomically updates @v to (@v | @i) with full ordering.
944  *
945  * Unsafe to use in noinstr code; use raw_atomic_fetch_or() there.
946  *
947  * Return: The original value of @v.
948  */
949 static __always_inline int
atomic_fetch_or(int i,atomic_t * v)950 atomic_fetch_or(int i, atomic_t *v)
951 {
952 	kcsan_mb();
953 	instrument_atomic_read_write(v, sizeof(*v));
954 	return raw_atomic_fetch_or(i, v);
955 }
956 
957 /**
958  * atomic_fetch_or_acquire() - atomic bitwise OR with acquire ordering
959  * @i: int value
960  * @v: pointer to atomic_t
961  *
962  * Atomically updates @v to (@v | @i) with acquire ordering.
963  *
964  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_acquire() there.
965  *
966  * Return: The original value of @v.
967  */
968 static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)969 atomic_fetch_or_acquire(int i, atomic_t *v)
970 {
971 	instrument_atomic_read_write(v, sizeof(*v));
972 	return raw_atomic_fetch_or_acquire(i, v);
973 }
974 
975 /**
976  * atomic_fetch_or_release() - atomic bitwise OR with release ordering
977  * @i: int value
978  * @v: pointer to atomic_t
979  *
980  * Atomically updates @v to (@v | @i) with release ordering.
981  *
982  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_release() there.
983  *
984  * Return: The original value of @v.
985  */
986 static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)987 atomic_fetch_or_release(int i, atomic_t *v)
988 {
989 	kcsan_release();
990 	instrument_atomic_read_write(v, sizeof(*v));
991 	return raw_atomic_fetch_or_release(i, v);
992 }
993 
994 /**
995  * atomic_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
996  * @i: int value
997  * @v: pointer to atomic_t
998  *
999  * Atomically updates @v to (@v | @i) with relaxed ordering.
1000  *
1001  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_relaxed() there.
1002  *
1003  * Return: The original value of @v.
1004  */
1005 static __always_inline int
atomic_fetch_or_relaxed(int i,atomic_t * v)1006 atomic_fetch_or_relaxed(int i, atomic_t *v)
1007 {
1008 	instrument_atomic_read_write(v, sizeof(*v));
1009 	return raw_atomic_fetch_or_relaxed(i, v);
1010 }
1011 
1012 /**
1013  * atomic_xor() - atomic bitwise XOR with relaxed ordering
1014  * @i: int value
1015  * @v: pointer to atomic_t
1016  *
1017  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1018  *
1019  * Unsafe to use in noinstr code; use raw_atomic_xor() there.
1020  *
1021  * Return: Nothing.
1022  */
1023 static __always_inline void
atomic_xor(int i,atomic_t * v)1024 atomic_xor(int i, atomic_t *v)
1025 {
1026 	instrument_atomic_read_write(v, sizeof(*v));
1027 	raw_atomic_xor(i, v);
1028 }
1029 
1030 /**
1031  * atomic_fetch_xor() - atomic bitwise XOR with full ordering
1032  * @i: int value
1033  * @v: pointer to atomic_t
1034  *
1035  * Atomically updates @v to (@v ^ @i) with full ordering.
1036  *
1037  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor() there.
1038  *
1039  * Return: The original value of @v.
1040  */
1041 static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)1042 atomic_fetch_xor(int i, atomic_t *v)
1043 {
1044 	kcsan_mb();
1045 	instrument_atomic_read_write(v, sizeof(*v));
1046 	return raw_atomic_fetch_xor(i, v);
1047 }
1048 
1049 /**
1050  * atomic_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1051  * @i: int value
1052  * @v: pointer to atomic_t
1053  *
1054  * Atomically updates @v to (@v ^ @i) with acquire ordering.
1055  *
1056  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_acquire() there.
1057  *
1058  * Return: The original value of @v.
1059  */
1060 static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)1061 atomic_fetch_xor_acquire(int i, atomic_t *v)
1062 {
1063 	instrument_atomic_read_write(v, sizeof(*v));
1064 	return raw_atomic_fetch_xor_acquire(i, v);
1065 }
1066 
1067 /**
1068  * atomic_fetch_xor_release() - atomic bitwise XOR with release ordering
1069  * @i: int value
1070  * @v: pointer to atomic_t
1071  *
1072  * Atomically updates @v to (@v ^ @i) with release ordering.
1073  *
1074  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_release() there.
1075  *
1076  * Return: The original value of @v.
1077  */
1078 static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)1079 atomic_fetch_xor_release(int i, atomic_t *v)
1080 {
1081 	kcsan_release();
1082 	instrument_atomic_read_write(v, sizeof(*v));
1083 	return raw_atomic_fetch_xor_release(i, v);
1084 }
1085 
1086 /**
1087  * atomic_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1088  * @i: int value
1089  * @v: pointer to atomic_t
1090  *
1091  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1092  *
1093  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_relaxed() there.
1094  *
1095  * Return: The original value of @v.
1096  */
1097 static __always_inline int
atomic_fetch_xor_relaxed(int i,atomic_t * v)1098 atomic_fetch_xor_relaxed(int i, atomic_t *v)
1099 {
1100 	instrument_atomic_read_write(v, sizeof(*v));
1101 	return raw_atomic_fetch_xor_relaxed(i, v);
1102 }
1103 
1104 /**
1105  * atomic_xchg() - atomic exchange with full ordering
1106  * @v: pointer to atomic_t
1107  * @new: int value to assign
1108  *
1109  * Atomically updates @v to @new with full ordering.
1110  *
1111  * Unsafe to use in noinstr code; use raw_atomic_xchg() there.
1112  *
1113  * Return: The original value of @v.
1114  */
1115 static __always_inline int
atomic_xchg(atomic_t * v,int new)1116 atomic_xchg(atomic_t *v, int new)
1117 {
1118 	kcsan_mb();
1119 	instrument_atomic_read_write(v, sizeof(*v));
1120 	return raw_atomic_xchg(v, new);
1121 }
1122 
1123 /**
1124  * atomic_xchg_acquire() - atomic exchange with acquire ordering
1125  * @v: pointer to atomic_t
1126  * @new: int value to assign
1127  *
1128  * Atomically updates @v to @new with acquire ordering.
1129  *
1130  * Unsafe to use in noinstr code; use raw_atomic_xchg_acquire() there.
1131  *
1132  * Return: The original value of @v.
1133  */
1134 static __always_inline int
atomic_xchg_acquire(atomic_t * v,int new)1135 atomic_xchg_acquire(atomic_t *v, int new)
1136 {
1137 	instrument_atomic_read_write(v, sizeof(*v));
1138 	return raw_atomic_xchg_acquire(v, new);
1139 }
1140 
1141 /**
1142  * atomic_xchg_release() - atomic exchange with release ordering
1143  * @v: pointer to atomic_t
1144  * @new: int value to assign
1145  *
1146  * Atomically updates @v to @new with release ordering.
1147  *
1148  * Unsafe to use in noinstr code; use raw_atomic_xchg_release() there.
1149  *
1150  * Return: The original value of @v.
1151  */
1152 static __always_inline int
atomic_xchg_release(atomic_t * v,int new)1153 atomic_xchg_release(atomic_t *v, int new)
1154 {
1155 	kcsan_release();
1156 	instrument_atomic_read_write(v, sizeof(*v));
1157 	return raw_atomic_xchg_release(v, new);
1158 }
1159 
1160 /**
1161  * atomic_xchg_relaxed() - atomic exchange with relaxed ordering
1162  * @v: pointer to atomic_t
1163  * @new: int value to assign
1164  *
1165  * Atomically updates @v to @new with relaxed ordering.
1166  *
1167  * Unsafe to use in noinstr code; use raw_atomic_xchg_relaxed() there.
1168  *
1169  * Return: The original value of @v.
1170  */
1171 static __always_inline int
atomic_xchg_relaxed(atomic_t * v,int new)1172 atomic_xchg_relaxed(atomic_t *v, int new)
1173 {
1174 	instrument_atomic_read_write(v, sizeof(*v));
1175 	return raw_atomic_xchg_relaxed(v, new);
1176 }
1177 
1178 /**
1179  * atomic_cmpxchg() - atomic compare and exchange with full ordering
1180  * @v: pointer to atomic_t
1181  * @old: int value to compare with
1182  * @new: int value to assign
1183  *
1184  * If (@v == @old), atomically updates @v to @new with full ordering.
1185  *
1186  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg() there.
1187  *
1188  * Return: The original value of @v.
1189  */
1190 static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)1191 atomic_cmpxchg(atomic_t *v, int old, int new)
1192 {
1193 	kcsan_mb();
1194 	instrument_atomic_read_write(v, sizeof(*v));
1195 	return raw_atomic_cmpxchg(v, old, new);
1196 }
1197 
1198 /**
1199  * atomic_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1200  * @v: pointer to atomic_t
1201  * @old: int value to compare with
1202  * @new: int value to assign
1203  *
1204  * If (@v == @old), atomically updates @v to @new with acquire ordering.
1205  *
1206  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_acquire() there.
1207  *
1208  * Return: The original value of @v.
1209  */
1210 static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)1211 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1212 {
1213 	instrument_atomic_read_write(v, sizeof(*v));
1214 	return raw_atomic_cmpxchg_acquire(v, old, new);
1215 }
1216 
1217 /**
1218  * atomic_cmpxchg_release() - atomic compare and exchange with release ordering
1219  * @v: pointer to atomic_t
1220  * @old: int value to compare with
1221  * @new: int value to assign
1222  *
1223  * If (@v == @old), atomically updates @v to @new with release ordering.
1224  *
1225  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_release() there.
1226  *
1227  * Return: The original value of @v.
1228  */
1229 static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)1230 atomic_cmpxchg_release(atomic_t *v, int old, int new)
1231 {
1232 	kcsan_release();
1233 	instrument_atomic_read_write(v, sizeof(*v));
1234 	return raw_atomic_cmpxchg_release(v, old, new);
1235 }
1236 
1237 /**
1238  * atomic_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1239  * @v: pointer to atomic_t
1240  * @old: int value to compare with
1241  * @new: int value to assign
1242  *
1243  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1244  *
1245  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_relaxed() there.
1246  *
1247  * Return: The original value of @v.
1248  */
1249 static __always_inline int
atomic_cmpxchg_relaxed(atomic_t * v,int old,int new)1250 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
1251 {
1252 	instrument_atomic_read_write(v, sizeof(*v));
1253 	return raw_atomic_cmpxchg_relaxed(v, old, new);
1254 }
1255 
1256 /**
1257  * atomic_try_cmpxchg() - atomic compare and exchange with full ordering
1258  * @v: pointer to atomic_t
1259  * @old: pointer to int value to compare with
1260  * @new: int value to assign
1261  *
1262  * If (@v == @old), atomically updates @v to @new with full ordering.
1263  * Otherwise, updates @old to the current value of @v.
1264  *
1265  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg() there.
1266  *
1267  * Return: @true if the exchange occured, @false otherwise.
1268  */
1269 static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1270 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1271 {
1272 	kcsan_mb();
1273 	instrument_atomic_read_write(v, sizeof(*v));
1274 	instrument_atomic_read_write(old, sizeof(*old));
1275 	return raw_atomic_try_cmpxchg(v, old, new);
1276 }
1277 
1278 /**
1279  * atomic_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1280  * @v: pointer to atomic_t
1281  * @old: pointer to int value to compare with
1282  * @new: int value to assign
1283  *
1284  * If (@v == @old), atomically updates @v to @new with acquire ordering.
1285  * Otherwise, updates @old to the current value of @v.
1286  *
1287  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_acquire() there.
1288  *
1289  * Return: @true if the exchange occured, @false otherwise.
1290  */
1291 static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1292 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1293 {
1294 	instrument_atomic_read_write(v, sizeof(*v));
1295 	instrument_atomic_read_write(old, sizeof(*old));
1296 	return raw_atomic_try_cmpxchg_acquire(v, old, new);
1297 }
1298 
1299 /**
1300  * atomic_try_cmpxchg_release() - atomic compare and exchange with release ordering
1301  * @v: pointer to atomic_t
1302  * @old: pointer to int value to compare with
1303  * @new: int value to assign
1304  *
1305  * If (@v == @old), atomically updates @v to @new with release ordering.
1306  * Otherwise, updates @old to the current value of @v.
1307  *
1308  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_release() there.
1309  *
1310  * Return: @true if the exchange occured, @false otherwise.
1311  */
1312 static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1313 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1314 {
1315 	kcsan_release();
1316 	instrument_atomic_read_write(v, sizeof(*v));
1317 	instrument_atomic_read_write(old, sizeof(*old));
1318 	return raw_atomic_try_cmpxchg_release(v, old, new);
1319 }
1320 
1321 /**
1322  * atomic_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1323  * @v: pointer to atomic_t
1324  * @old: pointer to int value to compare with
1325  * @new: int value to assign
1326  *
1327  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1328  * Otherwise, updates @old to the current value of @v.
1329  *
1330  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_relaxed() there.
1331  *
1332  * Return: @true if the exchange occured, @false otherwise.
1333  */
1334 static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1335 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1336 {
1337 	instrument_atomic_read_write(v, sizeof(*v));
1338 	instrument_atomic_read_write(old, sizeof(*old));
1339 	return raw_atomic_try_cmpxchg_relaxed(v, old, new);
1340 }
1341 
1342 /**
1343  * atomic_sub_and_test() - atomic subtract and test if zero with full ordering
1344  * @i: int value to subtract
1345  * @v: pointer to atomic_t
1346  *
1347  * Atomically updates @v to (@v - @i) with full ordering.
1348  *
1349  * Unsafe to use in noinstr code; use raw_atomic_sub_and_test() there.
1350  *
1351  * Return: @true if the resulting value of @v is zero, @false otherwise.
1352  */
1353 static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)1354 atomic_sub_and_test(int i, atomic_t *v)
1355 {
1356 	kcsan_mb();
1357 	instrument_atomic_read_write(v, sizeof(*v));
1358 	return raw_atomic_sub_and_test(i, v);
1359 }
1360 
1361 /**
1362  * atomic_dec_and_test() - atomic decrement and test if zero with full ordering
1363  * @v: pointer to atomic_t
1364  *
1365  * Atomically updates @v to (@v - 1) with full ordering.
1366  *
1367  * Unsafe to use in noinstr code; use raw_atomic_dec_and_test() there.
1368  *
1369  * Return: @true if the resulting value of @v is zero, @false otherwise.
1370  */
1371 static __always_inline bool
atomic_dec_and_test(atomic_t * v)1372 atomic_dec_and_test(atomic_t *v)
1373 {
1374 	kcsan_mb();
1375 	instrument_atomic_read_write(v, sizeof(*v));
1376 	return raw_atomic_dec_and_test(v);
1377 }
1378 
1379 /**
1380  * atomic_inc_and_test() - atomic increment and test if zero with full ordering
1381  * @v: pointer to atomic_t
1382  *
1383  * Atomically updates @v to (@v + 1) with full ordering.
1384  *
1385  * Unsafe to use in noinstr code; use raw_atomic_inc_and_test() there.
1386  *
1387  * Return: @true if the resulting value of @v is zero, @false otherwise.
1388  */
1389 static __always_inline bool
atomic_inc_and_test(atomic_t * v)1390 atomic_inc_and_test(atomic_t *v)
1391 {
1392 	kcsan_mb();
1393 	instrument_atomic_read_write(v, sizeof(*v));
1394 	return raw_atomic_inc_and_test(v);
1395 }
1396 
1397 /**
1398  * atomic_add_negative() - atomic add and test if negative with full ordering
1399  * @i: int value to add
1400  * @v: pointer to atomic_t
1401  *
1402  * Atomically updates @v to (@v + @i) with full ordering.
1403  *
1404  * Unsafe to use in noinstr code; use raw_atomic_add_negative() there.
1405  *
1406  * Return: @true if the resulting value of @v is negative, @false otherwise.
1407  */
1408 static __always_inline bool
atomic_add_negative(int i,atomic_t * v)1409 atomic_add_negative(int i, atomic_t *v)
1410 {
1411 	kcsan_mb();
1412 	instrument_atomic_read_write(v, sizeof(*v));
1413 	return raw_atomic_add_negative(i, v);
1414 }
1415 
1416 /**
1417  * atomic_add_negative_acquire() - atomic add and test if negative with acquire ordering
1418  * @i: int value to add
1419  * @v: pointer to atomic_t
1420  *
1421  * Atomically updates @v to (@v + @i) with acquire ordering.
1422  *
1423  * Unsafe to use in noinstr code; use raw_atomic_add_negative_acquire() there.
1424  *
1425  * Return: @true if the resulting value of @v is negative, @false otherwise.
1426  */
1427 static __always_inline bool
atomic_add_negative_acquire(int i,atomic_t * v)1428 atomic_add_negative_acquire(int i, atomic_t *v)
1429 {
1430 	instrument_atomic_read_write(v, sizeof(*v));
1431 	return raw_atomic_add_negative_acquire(i, v);
1432 }
1433 
1434 /**
1435  * atomic_add_negative_release() - atomic add and test if negative with release ordering
1436  * @i: int value to add
1437  * @v: pointer to atomic_t
1438  *
1439  * Atomically updates @v to (@v + @i) with release ordering.
1440  *
1441  * Unsafe to use in noinstr code; use raw_atomic_add_negative_release() there.
1442  *
1443  * Return: @true if the resulting value of @v is negative, @false otherwise.
1444  */
1445 static __always_inline bool
atomic_add_negative_release(int i,atomic_t * v)1446 atomic_add_negative_release(int i, atomic_t *v)
1447 {
1448 	kcsan_release();
1449 	instrument_atomic_read_write(v, sizeof(*v));
1450 	return raw_atomic_add_negative_release(i, v);
1451 }
1452 
1453 /**
1454  * atomic_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1455  * @i: int value to add
1456  * @v: pointer to atomic_t
1457  *
1458  * Atomically updates @v to (@v + @i) with relaxed ordering.
1459  *
1460  * Unsafe to use in noinstr code; use raw_atomic_add_negative_relaxed() there.
1461  *
1462  * Return: @true if the resulting value of @v is negative, @false otherwise.
1463  */
1464 static __always_inline bool
atomic_add_negative_relaxed(int i,atomic_t * v)1465 atomic_add_negative_relaxed(int i, atomic_t *v)
1466 {
1467 	instrument_atomic_read_write(v, sizeof(*v));
1468 	return raw_atomic_add_negative_relaxed(i, v);
1469 }
1470 
1471 /**
1472  * atomic_fetch_add_unless() - atomic add unless value with full ordering
1473  * @v: pointer to atomic_t
1474  * @a: int value to add
1475  * @u: int value to compare with
1476  *
1477  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1478  *
1479  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_unless() there.
1480  *
1481  * Return: The original value of @v.
1482  */
1483 static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)1484 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1485 {
1486 	kcsan_mb();
1487 	instrument_atomic_read_write(v, sizeof(*v));
1488 	return raw_atomic_fetch_add_unless(v, a, u);
1489 }
1490 
1491 /**
1492  * atomic_add_unless() - atomic add unless value with full ordering
1493  * @v: pointer to atomic_t
1494  * @a: int value to add
1495  * @u: int value to compare with
1496  *
1497  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1498  *
1499  * Unsafe to use in noinstr code; use raw_atomic_add_unless() there.
1500  *
1501  * Return: @true if @v was updated, @false otherwise.
1502  */
1503 static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)1504 atomic_add_unless(atomic_t *v, int a, int u)
1505 {
1506 	kcsan_mb();
1507 	instrument_atomic_read_write(v, sizeof(*v));
1508 	return raw_atomic_add_unless(v, a, u);
1509 }
1510 
1511 /**
1512  * atomic_inc_not_zero() - atomic increment unless zero with full ordering
1513  * @v: pointer to atomic_t
1514  *
1515  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
1516  *
1517  * Unsafe to use in noinstr code; use raw_atomic_inc_not_zero() there.
1518  *
1519  * Return: @true if @v was updated, @false otherwise.
1520  */
1521 static __always_inline bool
atomic_inc_not_zero(atomic_t * v)1522 atomic_inc_not_zero(atomic_t *v)
1523 {
1524 	kcsan_mb();
1525 	instrument_atomic_read_write(v, sizeof(*v));
1526 	return raw_atomic_inc_not_zero(v);
1527 }
1528 
1529 /**
1530  * atomic_inc_unless_negative() - atomic increment unless negative with full ordering
1531  * @v: pointer to atomic_t
1532  *
1533  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
1534  *
1535  * Unsafe to use in noinstr code; use raw_atomic_inc_unless_negative() there.
1536  *
1537  * Return: @true if @v was updated, @false otherwise.
1538  */
1539 static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)1540 atomic_inc_unless_negative(atomic_t *v)
1541 {
1542 	kcsan_mb();
1543 	instrument_atomic_read_write(v, sizeof(*v));
1544 	return raw_atomic_inc_unless_negative(v);
1545 }
1546 
1547 /**
1548  * atomic_dec_unless_positive() - atomic decrement unless positive with full ordering
1549  * @v: pointer to atomic_t
1550  *
1551  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
1552  *
1553  * Unsafe to use in noinstr code; use raw_atomic_dec_unless_positive() there.
1554  *
1555  * Return: @true if @v was updated, @false otherwise.
1556  */
1557 static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)1558 atomic_dec_unless_positive(atomic_t *v)
1559 {
1560 	kcsan_mb();
1561 	instrument_atomic_read_write(v, sizeof(*v));
1562 	return raw_atomic_dec_unless_positive(v);
1563 }
1564 
1565 /**
1566  * atomic_dec_if_positive() - atomic decrement if positive with full ordering
1567  * @v: pointer to atomic_t
1568  *
1569  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
1570  *
1571  * Unsafe to use in noinstr code; use raw_atomic_dec_if_positive() there.
1572  *
1573  * Return: The old value of (@v - 1), regardless of whether @v was updated.
1574  */
1575 static __always_inline int
atomic_dec_if_positive(atomic_t * v)1576 atomic_dec_if_positive(atomic_t *v)
1577 {
1578 	kcsan_mb();
1579 	instrument_atomic_read_write(v, sizeof(*v));
1580 	return raw_atomic_dec_if_positive(v);
1581 }
1582 
1583 /**
1584  * atomic64_read() - atomic load with relaxed ordering
1585  * @v: pointer to atomic64_t
1586  *
1587  * Atomically loads the value of @v with relaxed ordering.
1588  *
1589  * Unsafe to use in noinstr code; use raw_atomic64_read() there.
1590  *
1591  * Return: The value loaded from @v.
1592  */
1593 static __always_inline s64
atomic64_read(const atomic64_t * v)1594 atomic64_read(const atomic64_t *v)
1595 {
1596 	instrument_atomic_read(v, sizeof(*v));
1597 	return raw_atomic64_read(v);
1598 }
1599 
1600 /**
1601  * atomic64_read_acquire() - atomic load with acquire ordering
1602  * @v: pointer to atomic64_t
1603  *
1604  * Atomically loads the value of @v with acquire ordering.
1605  *
1606  * Unsafe to use in noinstr code; use raw_atomic64_read_acquire() there.
1607  *
1608  * Return: The value loaded from @v.
1609  */
1610 static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)1611 atomic64_read_acquire(const atomic64_t *v)
1612 {
1613 	instrument_atomic_read(v, sizeof(*v));
1614 	return raw_atomic64_read_acquire(v);
1615 }
1616 
1617 /**
1618  * atomic64_set() - atomic set with relaxed ordering
1619  * @v: pointer to atomic64_t
1620  * @i: s64 value to assign
1621  *
1622  * Atomically sets @v to @i with relaxed ordering.
1623  *
1624  * Unsafe to use in noinstr code; use raw_atomic64_set() there.
1625  *
1626  * Return: Nothing.
1627  */
1628 static __always_inline void
atomic64_set(atomic64_t * v,s64 i)1629 atomic64_set(atomic64_t *v, s64 i)
1630 {
1631 	instrument_atomic_write(v, sizeof(*v));
1632 	raw_atomic64_set(v, i);
1633 }
1634 
1635 /**
1636  * atomic64_set_release() - atomic set with release ordering
1637  * @v: pointer to atomic64_t
1638  * @i: s64 value to assign
1639  *
1640  * Atomically sets @v to @i with release ordering.
1641  *
1642  * Unsafe to use in noinstr code; use raw_atomic64_set_release() there.
1643  *
1644  * Return: Nothing.
1645  */
1646 static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)1647 atomic64_set_release(atomic64_t *v, s64 i)
1648 {
1649 	kcsan_release();
1650 	instrument_atomic_write(v, sizeof(*v));
1651 	raw_atomic64_set_release(v, i);
1652 }
1653 
1654 /**
1655  * atomic64_add() - atomic add with relaxed ordering
1656  * @i: s64 value to add
1657  * @v: pointer to atomic64_t
1658  *
1659  * Atomically updates @v to (@v + @i) with relaxed ordering.
1660  *
1661  * Unsafe to use in noinstr code; use raw_atomic64_add() there.
1662  *
1663  * Return: Nothing.
1664  */
1665 static __always_inline void
atomic64_add(s64 i,atomic64_t * v)1666 atomic64_add(s64 i, atomic64_t *v)
1667 {
1668 	instrument_atomic_read_write(v, sizeof(*v));
1669 	raw_atomic64_add(i, v);
1670 }
1671 
1672 /**
1673  * atomic64_add_return() - atomic add with full ordering
1674  * @i: s64 value to add
1675  * @v: pointer to atomic64_t
1676  *
1677  * Atomically updates @v to (@v + @i) with full ordering.
1678  *
1679  * Unsafe to use in noinstr code; use raw_atomic64_add_return() there.
1680  *
1681  * Return: The updated value of @v.
1682  */
1683 static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)1684 atomic64_add_return(s64 i, atomic64_t *v)
1685 {
1686 	kcsan_mb();
1687 	instrument_atomic_read_write(v, sizeof(*v));
1688 	return raw_atomic64_add_return(i, v);
1689 }
1690 
1691 /**
1692  * atomic64_add_return_acquire() - atomic add with acquire ordering
1693  * @i: s64 value to add
1694  * @v: pointer to atomic64_t
1695  *
1696  * Atomically updates @v to (@v + @i) with acquire ordering.
1697  *
1698  * Unsafe to use in noinstr code; use raw_atomic64_add_return_acquire() there.
1699  *
1700  * Return: The updated value of @v.
1701  */
1702 static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)1703 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1704 {
1705 	instrument_atomic_read_write(v, sizeof(*v));
1706 	return raw_atomic64_add_return_acquire(i, v);
1707 }
1708 
1709 /**
1710  * atomic64_add_return_release() - atomic add with release ordering
1711  * @i: s64 value to add
1712  * @v: pointer to atomic64_t
1713  *
1714  * Atomically updates @v to (@v + @i) with release ordering.
1715  *
1716  * Unsafe to use in noinstr code; use raw_atomic64_add_return_release() there.
1717  *
1718  * Return: The updated value of @v.
1719  */
1720 static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)1721 atomic64_add_return_release(s64 i, atomic64_t *v)
1722 {
1723 	kcsan_release();
1724 	instrument_atomic_read_write(v, sizeof(*v));
1725 	return raw_atomic64_add_return_release(i, v);
1726 }
1727 
1728 /**
1729  * atomic64_add_return_relaxed() - atomic add with relaxed ordering
1730  * @i: s64 value to add
1731  * @v: pointer to atomic64_t
1732  *
1733  * Atomically updates @v to (@v + @i) with relaxed ordering.
1734  *
1735  * Unsafe to use in noinstr code; use raw_atomic64_add_return_relaxed() there.
1736  *
1737  * Return: The updated value of @v.
1738  */
1739 static __always_inline s64
atomic64_add_return_relaxed(s64 i,atomic64_t * v)1740 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
1741 {
1742 	instrument_atomic_read_write(v, sizeof(*v));
1743 	return raw_atomic64_add_return_relaxed(i, v);
1744 }
1745 
1746 /**
1747  * atomic64_fetch_add() - atomic add with full ordering
1748  * @i: s64 value to add
1749  * @v: pointer to atomic64_t
1750  *
1751  * Atomically updates @v to (@v + @i) with full ordering.
1752  *
1753  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add() there.
1754  *
1755  * Return: The original value of @v.
1756  */
1757 static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)1758 atomic64_fetch_add(s64 i, atomic64_t *v)
1759 {
1760 	kcsan_mb();
1761 	instrument_atomic_read_write(v, sizeof(*v));
1762 	return raw_atomic64_fetch_add(i, v);
1763 }
1764 
1765 /**
1766  * atomic64_fetch_add_acquire() - atomic add with acquire ordering
1767  * @i: s64 value to add
1768  * @v: pointer to atomic64_t
1769  *
1770  * Atomically updates @v to (@v + @i) with acquire ordering.
1771  *
1772  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_acquire() there.
1773  *
1774  * Return: The original value of @v.
1775  */
1776 static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1777 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1778 {
1779 	instrument_atomic_read_write(v, sizeof(*v));
1780 	return raw_atomic64_fetch_add_acquire(i, v);
1781 }
1782 
1783 /**
1784  * atomic64_fetch_add_release() - atomic add with release ordering
1785  * @i: s64 value to add
1786  * @v: pointer to atomic64_t
1787  *
1788  * Atomically updates @v to (@v + @i) with release ordering.
1789  *
1790  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_release() there.
1791  *
1792  * Return: The original value of @v.
1793  */
1794 static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)1795 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1796 {
1797 	kcsan_release();
1798 	instrument_atomic_read_write(v, sizeof(*v));
1799 	return raw_atomic64_fetch_add_release(i, v);
1800 }
1801 
1802 /**
1803  * atomic64_fetch_add_relaxed() - atomic add with relaxed ordering
1804  * @i: s64 value to add
1805  * @v: pointer to atomic64_t
1806  *
1807  * Atomically updates @v to (@v + @i) with relaxed ordering.
1808  *
1809  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_relaxed() there.
1810  *
1811  * Return: The original value of @v.
1812  */
1813 static __always_inline s64
atomic64_fetch_add_relaxed(s64 i,atomic64_t * v)1814 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
1815 {
1816 	instrument_atomic_read_write(v, sizeof(*v));
1817 	return raw_atomic64_fetch_add_relaxed(i, v);
1818 }
1819 
1820 /**
1821  * atomic64_sub() - atomic subtract with relaxed ordering
1822  * @i: s64 value to subtract
1823  * @v: pointer to atomic64_t
1824  *
1825  * Atomically updates @v to (@v - @i) with relaxed ordering.
1826  *
1827  * Unsafe to use in noinstr code; use raw_atomic64_sub() there.
1828  *
1829  * Return: Nothing.
1830  */
1831 static __always_inline void
atomic64_sub(s64 i,atomic64_t * v)1832 atomic64_sub(s64 i, atomic64_t *v)
1833 {
1834 	instrument_atomic_read_write(v, sizeof(*v));
1835 	raw_atomic64_sub(i, v);
1836 }
1837 
1838 /**
1839  * atomic64_sub_return() - atomic subtract with full ordering
1840  * @i: s64 value to subtract
1841  * @v: pointer to atomic64_t
1842  *
1843  * Atomically updates @v to (@v - @i) with full ordering.
1844  *
1845  * Unsafe to use in noinstr code; use raw_atomic64_sub_return() there.
1846  *
1847  * Return: The updated value of @v.
1848  */
1849 static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)1850 atomic64_sub_return(s64 i, atomic64_t *v)
1851 {
1852 	kcsan_mb();
1853 	instrument_atomic_read_write(v, sizeof(*v));
1854 	return raw_atomic64_sub_return(i, v);
1855 }
1856 
1857 /**
1858  * atomic64_sub_return_acquire() - atomic subtract with acquire ordering
1859  * @i: s64 value to subtract
1860  * @v: pointer to atomic64_t
1861  *
1862  * Atomically updates @v to (@v - @i) with acquire ordering.
1863  *
1864  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_acquire() there.
1865  *
1866  * Return: The updated value of @v.
1867  */
1868 static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)1869 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1870 {
1871 	instrument_atomic_read_write(v, sizeof(*v));
1872 	return raw_atomic64_sub_return_acquire(i, v);
1873 }
1874 
1875 /**
1876  * atomic64_sub_return_release() - atomic subtract with release ordering
1877  * @i: s64 value to subtract
1878  * @v: pointer to atomic64_t
1879  *
1880  * Atomically updates @v to (@v - @i) with release ordering.
1881  *
1882  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_release() there.
1883  *
1884  * Return: The updated value of @v.
1885  */
1886 static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)1887 atomic64_sub_return_release(s64 i, atomic64_t *v)
1888 {
1889 	kcsan_release();
1890 	instrument_atomic_read_write(v, sizeof(*v));
1891 	return raw_atomic64_sub_return_release(i, v);
1892 }
1893 
1894 /**
1895  * atomic64_sub_return_relaxed() - atomic subtract with relaxed ordering
1896  * @i: s64 value to subtract
1897  * @v: pointer to atomic64_t
1898  *
1899  * Atomically updates @v to (@v - @i) with relaxed ordering.
1900  *
1901  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_relaxed() there.
1902  *
1903  * Return: The updated value of @v.
1904  */
1905 static __always_inline s64
atomic64_sub_return_relaxed(s64 i,atomic64_t * v)1906 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1907 {
1908 	instrument_atomic_read_write(v, sizeof(*v));
1909 	return raw_atomic64_sub_return_relaxed(i, v);
1910 }
1911 
1912 /**
1913  * atomic64_fetch_sub() - atomic subtract with full ordering
1914  * @i: s64 value to subtract
1915  * @v: pointer to atomic64_t
1916  *
1917  * Atomically updates @v to (@v - @i) with full ordering.
1918  *
1919  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub() there.
1920  *
1921  * Return: The original value of @v.
1922  */
1923 static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)1924 atomic64_fetch_sub(s64 i, atomic64_t *v)
1925 {
1926 	kcsan_mb();
1927 	instrument_atomic_read_write(v, sizeof(*v));
1928 	return raw_atomic64_fetch_sub(i, v);
1929 }
1930 
1931 /**
1932  * atomic64_fetch_sub_acquire() - atomic subtract with acquire ordering
1933  * @i: s64 value to subtract
1934  * @v: pointer to atomic64_t
1935  *
1936  * Atomically updates @v to (@v - @i) with acquire ordering.
1937  *
1938  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_acquire() there.
1939  *
1940  * Return: The original value of @v.
1941  */
1942 static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1943 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1944 {
1945 	instrument_atomic_read_write(v, sizeof(*v));
1946 	return raw_atomic64_fetch_sub_acquire(i, v);
1947 }
1948 
1949 /**
1950  * atomic64_fetch_sub_release() - atomic subtract with release ordering
1951  * @i: s64 value to subtract
1952  * @v: pointer to atomic64_t
1953  *
1954  * Atomically updates @v to (@v - @i) with release ordering.
1955  *
1956  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_release() there.
1957  *
1958  * Return: The original value of @v.
1959  */
1960 static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)1961 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1962 {
1963 	kcsan_release();
1964 	instrument_atomic_read_write(v, sizeof(*v));
1965 	return raw_atomic64_fetch_sub_release(i, v);
1966 }
1967 
1968 /**
1969  * atomic64_fetch_sub_relaxed() - atomic subtract with relaxed ordering
1970  * @i: s64 value to subtract
1971  * @v: pointer to atomic64_t
1972  *
1973  * Atomically updates @v to (@v - @i) with relaxed ordering.
1974  *
1975  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_relaxed() there.
1976  *
1977  * Return: The original value of @v.
1978  */
1979 static __always_inline s64
atomic64_fetch_sub_relaxed(s64 i,atomic64_t * v)1980 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1981 {
1982 	instrument_atomic_read_write(v, sizeof(*v));
1983 	return raw_atomic64_fetch_sub_relaxed(i, v);
1984 }
1985 
1986 /**
1987  * atomic64_inc() - atomic increment with relaxed ordering
1988  * @v: pointer to atomic64_t
1989  *
1990  * Atomically updates @v to (@v + 1) with relaxed ordering.
1991  *
1992  * Unsafe to use in noinstr code; use raw_atomic64_inc() there.
1993  *
1994  * Return: Nothing.
1995  */
1996 static __always_inline void
atomic64_inc(atomic64_t * v)1997 atomic64_inc(atomic64_t *v)
1998 {
1999 	instrument_atomic_read_write(v, sizeof(*v));
2000 	raw_atomic64_inc(v);
2001 }
2002 
2003 /**
2004  * atomic64_inc_return() - atomic increment with full ordering
2005  * @v: pointer to atomic64_t
2006  *
2007  * Atomically updates @v to (@v + 1) with full ordering.
2008  *
2009  * Unsafe to use in noinstr code; use raw_atomic64_inc_return() there.
2010  *
2011  * Return: The updated value of @v.
2012  */
2013 static __always_inline s64
atomic64_inc_return(atomic64_t * v)2014 atomic64_inc_return(atomic64_t *v)
2015 {
2016 	kcsan_mb();
2017 	instrument_atomic_read_write(v, sizeof(*v));
2018 	return raw_atomic64_inc_return(v);
2019 }
2020 
2021 /**
2022  * atomic64_inc_return_acquire() - atomic increment with acquire ordering
2023  * @v: pointer to atomic64_t
2024  *
2025  * Atomically updates @v to (@v + 1) with acquire ordering.
2026  *
2027  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_acquire() there.
2028  *
2029  * Return: The updated value of @v.
2030  */
2031 static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)2032 atomic64_inc_return_acquire(atomic64_t *v)
2033 {
2034 	instrument_atomic_read_write(v, sizeof(*v));
2035 	return raw_atomic64_inc_return_acquire(v);
2036 }
2037 
2038 /**
2039  * atomic64_inc_return_release() - atomic increment with release ordering
2040  * @v: pointer to atomic64_t
2041  *
2042  * Atomically updates @v to (@v + 1) with release ordering.
2043  *
2044  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_release() there.
2045  *
2046  * Return: The updated value of @v.
2047  */
2048 static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)2049 atomic64_inc_return_release(atomic64_t *v)
2050 {
2051 	kcsan_release();
2052 	instrument_atomic_read_write(v, sizeof(*v));
2053 	return raw_atomic64_inc_return_release(v);
2054 }
2055 
2056 /**
2057  * atomic64_inc_return_relaxed() - atomic increment with relaxed ordering
2058  * @v: pointer to atomic64_t
2059  *
2060  * Atomically updates @v to (@v + 1) with relaxed ordering.
2061  *
2062  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_relaxed() there.
2063  *
2064  * Return: The updated value of @v.
2065  */
2066 static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)2067 atomic64_inc_return_relaxed(atomic64_t *v)
2068 {
2069 	instrument_atomic_read_write(v, sizeof(*v));
2070 	return raw_atomic64_inc_return_relaxed(v);
2071 }
2072 
2073 /**
2074  * atomic64_fetch_inc() - atomic increment with full ordering
2075  * @v: pointer to atomic64_t
2076  *
2077  * Atomically updates @v to (@v + 1) with full ordering.
2078  *
2079  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc() there.
2080  *
2081  * Return: The original value of @v.
2082  */
2083 static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)2084 atomic64_fetch_inc(atomic64_t *v)
2085 {
2086 	kcsan_mb();
2087 	instrument_atomic_read_write(v, sizeof(*v));
2088 	return raw_atomic64_fetch_inc(v);
2089 }
2090 
2091 /**
2092  * atomic64_fetch_inc_acquire() - atomic increment with acquire ordering
2093  * @v: pointer to atomic64_t
2094  *
2095  * Atomically updates @v to (@v + 1) with acquire ordering.
2096  *
2097  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_acquire() there.
2098  *
2099  * Return: The original value of @v.
2100  */
2101 static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)2102 atomic64_fetch_inc_acquire(atomic64_t *v)
2103 {
2104 	instrument_atomic_read_write(v, sizeof(*v));
2105 	return raw_atomic64_fetch_inc_acquire(v);
2106 }
2107 
2108 /**
2109  * atomic64_fetch_inc_release() - atomic increment with release ordering
2110  * @v: pointer to atomic64_t
2111  *
2112  * Atomically updates @v to (@v + 1) with release ordering.
2113  *
2114  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_release() there.
2115  *
2116  * Return: The original value of @v.
2117  */
2118 static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)2119 atomic64_fetch_inc_release(atomic64_t *v)
2120 {
2121 	kcsan_release();
2122 	instrument_atomic_read_write(v, sizeof(*v));
2123 	return raw_atomic64_fetch_inc_release(v);
2124 }
2125 
2126 /**
2127  * atomic64_fetch_inc_relaxed() - atomic increment with relaxed ordering
2128  * @v: pointer to atomic64_t
2129  *
2130  * Atomically updates @v to (@v + 1) with relaxed ordering.
2131  *
2132  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_relaxed() there.
2133  *
2134  * Return: The original value of @v.
2135  */
2136 static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)2137 atomic64_fetch_inc_relaxed(atomic64_t *v)
2138 {
2139 	instrument_atomic_read_write(v, sizeof(*v));
2140 	return raw_atomic64_fetch_inc_relaxed(v);
2141 }
2142 
2143 /**
2144  * atomic64_dec() - atomic decrement with relaxed ordering
2145  * @v: pointer to atomic64_t
2146  *
2147  * Atomically updates @v to (@v - 1) with relaxed ordering.
2148  *
2149  * Unsafe to use in noinstr code; use raw_atomic64_dec() there.
2150  *
2151  * Return: Nothing.
2152  */
2153 static __always_inline void
atomic64_dec(atomic64_t * v)2154 atomic64_dec(atomic64_t *v)
2155 {
2156 	instrument_atomic_read_write(v, sizeof(*v));
2157 	raw_atomic64_dec(v);
2158 }
2159 
2160 /**
2161  * atomic64_dec_return() - atomic decrement with full ordering
2162  * @v: pointer to atomic64_t
2163  *
2164  * Atomically updates @v to (@v - 1) with full ordering.
2165  *
2166  * Unsafe to use in noinstr code; use raw_atomic64_dec_return() there.
2167  *
2168  * Return: The updated value of @v.
2169  */
2170 static __always_inline s64
atomic64_dec_return(atomic64_t * v)2171 atomic64_dec_return(atomic64_t *v)
2172 {
2173 	kcsan_mb();
2174 	instrument_atomic_read_write(v, sizeof(*v));
2175 	return raw_atomic64_dec_return(v);
2176 }
2177 
2178 /**
2179  * atomic64_dec_return_acquire() - atomic decrement with acquire ordering
2180  * @v: pointer to atomic64_t
2181  *
2182  * Atomically updates @v to (@v - 1) with acquire ordering.
2183  *
2184  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_acquire() there.
2185  *
2186  * Return: The updated value of @v.
2187  */
2188 static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)2189 atomic64_dec_return_acquire(atomic64_t *v)
2190 {
2191 	instrument_atomic_read_write(v, sizeof(*v));
2192 	return raw_atomic64_dec_return_acquire(v);
2193 }
2194 
2195 /**
2196  * atomic64_dec_return_release() - atomic decrement with release ordering
2197  * @v: pointer to atomic64_t
2198  *
2199  * Atomically updates @v to (@v - 1) with release ordering.
2200  *
2201  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_release() there.
2202  *
2203  * Return: The updated value of @v.
2204  */
2205 static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)2206 atomic64_dec_return_release(atomic64_t *v)
2207 {
2208 	kcsan_release();
2209 	instrument_atomic_read_write(v, sizeof(*v));
2210 	return raw_atomic64_dec_return_release(v);
2211 }
2212 
2213 /**
2214  * atomic64_dec_return_relaxed() - atomic decrement with relaxed ordering
2215  * @v: pointer to atomic64_t
2216  *
2217  * Atomically updates @v to (@v - 1) with relaxed ordering.
2218  *
2219  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_relaxed() there.
2220  *
2221  * Return: The updated value of @v.
2222  */
2223 static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)2224 atomic64_dec_return_relaxed(atomic64_t *v)
2225 {
2226 	instrument_atomic_read_write(v, sizeof(*v));
2227 	return raw_atomic64_dec_return_relaxed(v);
2228 }
2229 
2230 /**
2231  * atomic64_fetch_dec() - atomic decrement with full ordering
2232  * @v: pointer to atomic64_t
2233  *
2234  * Atomically updates @v to (@v - 1) with full ordering.
2235  *
2236  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec() there.
2237  *
2238  * Return: The original value of @v.
2239  */
2240 static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)2241 atomic64_fetch_dec(atomic64_t *v)
2242 {
2243 	kcsan_mb();
2244 	instrument_atomic_read_write(v, sizeof(*v));
2245 	return raw_atomic64_fetch_dec(v);
2246 }
2247 
2248 /**
2249  * atomic64_fetch_dec_acquire() - atomic decrement with acquire ordering
2250  * @v: pointer to atomic64_t
2251  *
2252  * Atomically updates @v to (@v - 1) with acquire ordering.
2253  *
2254  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_acquire() there.
2255  *
2256  * Return: The original value of @v.
2257  */
2258 static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)2259 atomic64_fetch_dec_acquire(atomic64_t *v)
2260 {
2261 	instrument_atomic_read_write(v, sizeof(*v));
2262 	return raw_atomic64_fetch_dec_acquire(v);
2263 }
2264 
2265 /**
2266  * atomic64_fetch_dec_release() - atomic decrement with release ordering
2267  * @v: pointer to atomic64_t
2268  *
2269  * Atomically updates @v to (@v - 1) with release ordering.
2270  *
2271  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_release() there.
2272  *
2273  * Return: The original value of @v.
2274  */
2275 static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)2276 atomic64_fetch_dec_release(atomic64_t *v)
2277 {
2278 	kcsan_release();
2279 	instrument_atomic_read_write(v, sizeof(*v));
2280 	return raw_atomic64_fetch_dec_release(v);
2281 }
2282 
2283 /**
2284  * atomic64_fetch_dec_relaxed() - atomic decrement with relaxed ordering
2285  * @v: pointer to atomic64_t
2286  *
2287  * Atomically updates @v to (@v - 1) with relaxed ordering.
2288  *
2289  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_relaxed() there.
2290  *
2291  * Return: The original value of @v.
2292  */
2293 static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)2294 atomic64_fetch_dec_relaxed(atomic64_t *v)
2295 {
2296 	instrument_atomic_read_write(v, sizeof(*v));
2297 	return raw_atomic64_fetch_dec_relaxed(v);
2298 }
2299 
2300 /**
2301  * atomic64_and() - atomic bitwise AND with relaxed ordering
2302  * @i: s64 value
2303  * @v: pointer to atomic64_t
2304  *
2305  * Atomically updates @v to (@v & @i) with relaxed ordering.
2306  *
2307  * Unsafe to use in noinstr code; use raw_atomic64_and() there.
2308  *
2309  * Return: Nothing.
2310  */
2311 static __always_inline void
atomic64_and(s64 i,atomic64_t * v)2312 atomic64_and(s64 i, atomic64_t *v)
2313 {
2314 	instrument_atomic_read_write(v, sizeof(*v));
2315 	raw_atomic64_and(i, v);
2316 }
2317 
2318 /**
2319  * atomic64_fetch_and() - atomic bitwise AND with full ordering
2320  * @i: s64 value
2321  * @v: pointer to atomic64_t
2322  *
2323  * Atomically updates @v to (@v & @i) with full ordering.
2324  *
2325  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and() there.
2326  *
2327  * Return: The original value of @v.
2328  */
2329 static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)2330 atomic64_fetch_and(s64 i, atomic64_t *v)
2331 {
2332 	kcsan_mb();
2333 	instrument_atomic_read_write(v, sizeof(*v));
2334 	return raw_atomic64_fetch_and(i, v);
2335 }
2336 
2337 /**
2338  * atomic64_fetch_and_acquire() - atomic bitwise AND with acquire ordering
2339  * @i: s64 value
2340  * @v: pointer to atomic64_t
2341  *
2342  * Atomically updates @v to (@v & @i) with acquire ordering.
2343  *
2344  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_acquire() there.
2345  *
2346  * Return: The original value of @v.
2347  */
2348 static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)2349 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
2350 {
2351 	instrument_atomic_read_write(v, sizeof(*v));
2352 	return raw_atomic64_fetch_and_acquire(i, v);
2353 }
2354 
2355 /**
2356  * atomic64_fetch_and_release() - atomic bitwise AND with release ordering
2357  * @i: s64 value
2358  * @v: pointer to atomic64_t
2359  *
2360  * Atomically updates @v to (@v & @i) with release ordering.
2361  *
2362  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_release() there.
2363  *
2364  * Return: The original value of @v.
2365  */
2366 static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)2367 atomic64_fetch_and_release(s64 i, atomic64_t *v)
2368 {
2369 	kcsan_release();
2370 	instrument_atomic_read_write(v, sizeof(*v));
2371 	return raw_atomic64_fetch_and_release(i, v);
2372 }
2373 
2374 /**
2375  * atomic64_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
2376  * @i: s64 value
2377  * @v: pointer to atomic64_t
2378  *
2379  * Atomically updates @v to (@v & @i) with relaxed ordering.
2380  *
2381  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_relaxed() there.
2382  *
2383  * Return: The original value of @v.
2384  */
2385 static __always_inline s64
atomic64_fetch_and_relaxed(s64 i,atomic64_t * v)2386 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
2387 {
2388 	instrument_atomic_read_write(v, sizeof(*v));
2389 	return raw_atomic64_fetch_and_relaxed(i, v);
2390 }
2391 
2392 /**
2393  * atomic64_andnot() - atomic bitwise AND NOT with relaxed ordering
2394  * @i: s64 value
2395  * @v: pointer to atomic64_t
2396  *
2397  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2398  *
2399  * Unsafe to use in noinstr code; use raw_atomic64_andnot() there.
2400  *
2401  * Return: Nothing.
2402  */
2403 static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)2404 atomic64_andnot(s64 i, atomic64_t *v)
2405 {
2406 	instrument_atomic_read_write(v, sizeof(*v));
2407 	raw_atomic64_andnot(i, v);
2408 }
2409 
2410 /**
2411  * atomic64_fetch_andnot() - atomic bitwise AND NOT with full ordering
2412  * @i: s64 value
2413  * @v: pointer to atomic64_t
2414  *
2415  * Atomically updates @v to (@v & ~@i) with full ordering.
2416  *
2417  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot() there.
2418  *
2419  * Return: The original value of @v.
2420  */
2421 static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)2422 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2423 {
2424 	kcsan_mb();
2425 	instrument_atomic_read_write(v, sizeof(*v));
2426 	return raw_atomic64_fetch_andnot(i, v);
2427 }
2428 
2429 /**
2430  * atomic64_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
2431  * @i: s64 value
2432  * @v: pointer to atomic64_t
2433  *
2434  * Atomically updates @v to (@v & ~@i) with acquire ordering.
2435  *
2436  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_acquire() there.
2437  *
2438  * Return: The original value of @v.
2439  */
2440 static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)2441 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2442 {
2443 	instrument_atomic_read_write(v, sizeof(*v));
2444 	return raw_atomic64_fetch_andnot_acquire(i, v);
2445 }
2446 
2447 /**
2448  * atomic64_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
2449  * @i: s64 value
2450  * @v: pointer to atomic64_t
2451  *
2452  * Atomically updates @v to (@v & ~@i) with release ordering.
2453  *
2454  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_release() there.
2455  *
2456  * Return: The original value of @v.
2457  */
2458 static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)2459 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2460 {
2461 	kcsan_release();
2462 	instrument_atomic_read_write(v, sizeof(*v));
2463 	return raw_atomic64_fetch_andnot_release(i, v);
2464 }
2465 
2466 /**
2467  * atomic64_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
2468  * @i: s64 value
2469  * @v: pointer to atomic64_t
2470  *
2471  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2472  *
2473  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_relaxed() there.
2474  *
2475  * Return: The original value of @v.
2476  */
2477 static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)2478 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
2479 {
2480 	instrument_atomic_read_write(v, sizeof(*v));
2481 	return raw_atomic64_fetch_andnot_relaxed(i, v);
2482 }
2483 
2484 /**
2485  * atomic64_or() - atomic bitwise OR with relaxed ordering
2486  * @i: s64 value
2487  * @v: pointer to atomic64_t
2488  *
2489  * Atomically updates @v to (@v | @i) with relaxed ordering.
2490  *
2491  * Unsafe to use in noinstr code; use raw_atomic64_or() there.
2492  *
2493  * Return: Nothing.
2494  */
2495 static __always_inline void
atomic64_or(s64 i,atomic64_t * v)2496 atomic64_or(s64 i, atomic64_t *v)
2497 {
2498 	instrument_atomic_read_write(v, sizeof(*v));
2499 	raw_atomic64_or(i, v);
2500 }
2501 
2502 /**
2503  * atomic64_fetch_or() - atomic bitwise OR with full ordering
2504  * @i: s64 value
2505  * @v: pointer to atomic64_t
2506  *
2507  * Atomically updates @v to (@v | @i) with full ordering.
2508  *
2509  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or() there.
2510  *
2511  * Return: The original value of @v.
2512  */
2513 static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)2514 atomic64_fetch_or(s64 i, atomic64_t *v)
2515 {
2516 	kcsan_mb();
2517 	instrument_atomic_read_write(v, sizeof(*v));
2518 	return raw_atomic64_fetch_or(i, v);
2519 }
2520 
2521 /**
2522  * atomic64_fetch_or_acquire() - atomic bitwise OR with acquire ordering
2523  * @i: s64 value
2524  * @v: pointer to atomic64_t
2525  *
2526  * Atomically updates @v to (@v | @i) with acquire ordering.
2527  *
2528  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_acquire() there.
2529  *
2530  * Return: The original value of @v.
2531  */
2532 static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2533 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2534 {
2535 	instrument_atomic_read_write(v, sizeof(*v));
2536 	return raw_atomic64_fetch_or_acquire(i, v);
2537 }
2538 
2539 /**
2540  * atomic64_fetch_or_release() - atomic bitwise OR with release ordering
2541  * @i: s64 value
2542  * @v: pointer to atomic64_t
2543  *
2544  * Atomically updates @v to (@v | @i) with release ordering.
2545  *
2546  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_release() there.
2547  *
2548  * Return: The original value of @v.
2549  */
2550 static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)2551 atomic64_fetch_or_release(s64 i, atomic64_t *v)
2552 {
2553 	kcsan_release();
2554 	instrument_atomic_read_write(v, sizeof(*v));
2555 	return raw_atomic64_fetch_or_release(i, v);
2556 }
2557 
2558 /**
2559  * atomic64_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
2560  * @i: s64 value
2561  * @v: pointer to atomic64_t
2562  *
2563  * Atomically updates @v to (@v | @i) with relaxed ordering.
2564  *
2565  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_relaxed() there.
2566  *
2567  * Return: The original value of @v.
2568  */
2569 static __always_inline s64
atomic64_fetch_or_relaxed(s64 i,atomic64_t * v)2570 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
2571 {
2572 	instrument_atomic_read_write(v, sizeof(*v));
2573 	return raw_atomic64_fetch_or_relaxed(i, v);
2574 }
2575 
2576 /**
2577  * atomic64_xor() - atomic bitwise XOR with relaxed ordering
2578  * @i: s64 value
2579  * @v: pointer to atomic64_t
2580  *
2581  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2582  *
2583  * Unsafe to use in noinstr code; use raw_atomic64_xor() there.
2584  *
2585  * Return: Nothing.
2586  */
2587 static __always_inline void
atomic64_xor(s64 i,atomic64_t * v)2588 atomic64_xor(s64 i, atomic64_t *v)
2589 {
2590 	instrument_atomic_read_write(v, sizeof(*v));
2591 	raw_atomic64_xor(i, v);
2592 }
2593 
2594 /**
2595  * atomic64_fetch_xor() - atomic bitwise XOR with full ordering
2596  * @i: s64 value
2597  * @v: pointer to atomic64_t
2598  *
2599  * Atomically updates @v to (@v ^ @i) with full ordering.
2600  *
2601  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor() there.
2602  *
2603  * Return: The original value of @v.
2604  */
2605 static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)2606 atomic64_fetch_xor(s64 i, atomic64_t *v)
2607 {
2608 	kcsan_mb();
2609 	instrument_atomic_read_write(v, sizeof(*v));
2610 	return raw_atomic64_fetch_xor(i, v);
2611 }
2612 
2613 /**
2614  * atomic64_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
2615  * @i: s64 value
2616  * @v: pointer to atomic64_t
2617  *
2618  * Atomically updates @v to (@v ^ @i) with acquire ordering.
2619  *
2620  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_acquire() there.
2621  *
2622  * Return: The original value of @v.
2623  */
2624 static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2625 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2626 {
2627 	instrument_atomic_read_write(v, sizeof(*v));
2628 	return raw_atomic64_fetch_xor_acquire(i, v);
2629 }
2630 
2631 /**
2632  * atomic64_fetch_xor_release() - atomic bitwise XOR with release ordering
2633  * @i: s64 value
2634  * @v: pointer to atomic64_t
2635  *
2636  * Atomically updates @v to (@v ^ @i) with release ordering.
2637  *
2638  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_release() there.
2639  *
2640  * Return: The original value of @v.
2641  */
2642 static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)2643 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2644 {
2645 	kcsan_release();
2646 	instrument_atomic_read_write(v, sizeof(*v));
2647 	return raw_atomic64_fetch_xor_release(i, v);
2648 }
2649 
2650 /**
2651  * atomic64_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
2652  * @i: s64 value
2653  * @v: pointer to atomic64_t
2654  *
2655  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2656  *
2657  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_relaxed() there.
2658  *
2659  * Return: The original value of @v.
2660  */
2661 static __always_inline s64
atomic64_fetch_xor_relaxed(s64 i,atomic64_t * v)2662 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
2663 {
2664 	instrument_atomic_read_write(v, sizeof(*v));
2665 	return raw_atomic64_fetch_xor_relaxed(i, v);
2666 }
2667 
2668 /**
2669  * atomic64_xchg() - atomic exchange with full ordering
2670  * @v: pointer to atomic64_t
2671  * @new: s64 value to assign
2672  *
2673  * Atomically updates @v to @new with full ordering.
2674  *
2675  * Unsafe to use in noinstr code; use raw_atomic64_xchg() there.
2676  *
2677  * Return: The original value of @v.
2678  */
2679 static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 new)2680 atomic64_xchg(atomic64_t *v, s64 new)
2681 {
2682 	kcsan_mb();
2683 	instrument_atomic_read_write(v, sizeof(*v));
2684 	return raw_atomic64_xchg(v, new);
2685 }
2686 
2687 /**
2688  * atomic64_xchg_acquire() - atomic exchange with acquire ordering
2689  * @v: pointer to atomic64_t
2690  * @new: s64 value to assign
2691  *
2692  * Atomically updates @v to @new with acquire ordering.
2693  *
2694  * Unsafe to use in noinstr code; use raw_atomic64_xchg_acquire() there.
2695  *
2696  * Return: The original value of @v.
2697  */
2698 static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 new)2699 atomic64_xchg_acquire(atomic64_t *v, s64 new)
2700 {
2701 	instrument_atomic_read_write(v, sizeof(*v));
2702 	return raw_atomic64_xchg_acquire(v, new);
2703 }
2704 
2705 /**
2706  * atomic64_xchg_release() - atomic exchange with release ordering
2707  * @v: pointer to atomic64_t
2708  * @new: s64 value to assign
2709  *
2710  * Atomically updates @v to @new with release ordering.
2711  *
2712  * Unsafe to use in noinstr code; use raw_atomic64_xchg_release() there.
2713  *
2714  * Return: The original value of @v.
2715  */
2716 static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 new)2717 atomic64_xchg_release(atomic64_t *v, s64 new)
2718 {
2719 	kcsan_release();
2720 	instrument_atomic_read_write(v, sizeof(*v));
2721 	return raw_atomic64_xchg_release(v, new);
2722 }
2723 
2724 /**
2725  * atomic64_xchg_relaxed() - atomic exchange with relaxed ordering
2726  * @v: pointer to atomic64_t
2727  * @new: s64 value to assign
2728  *
2729  * Atomically updates @v to @new with relaxed ordering.
2730  *
2731  * Unsafe to use in noinstr code; use raw_atomic64_xchg_relaxed() there.
2732  *
2733  * Return: The original value of @v.
2734  */
2735 static __always_inline s64
atomic64_xchg_relaxed(atomic64_t * v,s64 new)2736 atomic64_xchg_relaxed(atomic64_t *v, s64 new)
2737 {
2738 	instrument_atomic_read_write(v, sizeof(*v));
2739 	return raw_atomic64_xchg_relaxed(v, new);
2740 }
2741 
2742 /**
2743  * atomic64_cmpxchg() - atomic compare and exchange with full ordering
2744  * @v: pointer to atomic64_t
2745  * @old: s64 value to compare with
2746  * @new: s64 value to assign
2747  *
2748  * If (@v == @old), atomically updates @v to @new with full ordering.
2749  *
2750  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg() there.
2751  *
2752  * Return: The original value of @v.
2753  */
2754 static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2755 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2756 {
2757 	kcsan_mb();
2758 	instrument_atomic_read_write(v, sizeof(*v));
2759 	return raw_atomic64_cmpxchg(v, old, new);
2760 }
2761 
2762 /**
2763  * atomic64_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2764  * @v: pointer to atomic64_t
2765  * @old: s64 value to compare with
2766  * @new: s64 value to assign
2767  *
2768  * If (@v == @old), atomically updates @v to @new with acquire ordering.
2769  *
2770  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_acquire() there.
2771  *
2772  * Return: The original value of @v.
2773  */
2774 static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2775 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2776 {
2777 	instrument_atomic_read_write(v, sizeof(*v));
2778 	return raw_atomic64_cmpxchg_acquire(v, old, new);
2779 }
2780 
2781 /**
2782  * atomic64_cmpxchg_release() - atomic compare and exchange with release ordering
2783  * @v: pointer to atomic64_t
2784  * @old: s64 value to compare with
2785  * @new: s64 value to assign
2786  *
2787  * If (@v == @old), atomically updates @v to @new with release ordering.
2788  *
2789  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_release() there.
2790  *
2791  * Return: The original value of @v.
2792  */
2793 static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2794 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2795 {
2796 	kcsan_release();
2797 	instrument_atomic_read_write(v, sizeof(*v));
2798 	return raw_atomic64_cmpxchg_release(v, old, new);
2799 }
2800 
2801 /**
2802  * atomic64_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2803  * @v: pointer to atomic64_t
2804  * @old: s64 value to compare with
2805  * @new: s64 value to assign
2806  *
2807  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2808  *
2809  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_relaxed() there.
2810  *
2811  * Return: The original value of @v.
2812  */
2813 static __always_inline s64
atomic64_cmpxchg_relaxed(atomic64_t * v,s64 old,s64 new)2814 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
2815 {
2816 	instrument_atomic_read_write(v, sizeof(*v));
2817 	return raw_atomic64_cmpxchg_relaxed(v, old, new);
2818 }
2819 
2820 /**
2821  * atomic64_try_cmpxchg() - atomic compare and exchange with full ordering
2822  * @v: pointer to atomic64_t
2823  * @old: pointer to s64 value to compare with
2824  * @new: s64 value to assign
2825  *
2826  * If (@v == @old), atomically updates @v to @new with full ordering.
2827  * Otherwise, updates @old to the current value of @v.
2828  *
2829  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg() there.
2830  *
2831  * Return: @true if the exchange occured, @false otherwise.
2832  */
2833 static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2834 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2835 {
2836 	kcsan_mb();
2837 	instrument_atomic_read_write(v, sizeof(*v));
2838 	instrument_atomic_read_write(old, sizeof(*old));
2839 	return raw_atomic64_try_cmpxchg(v, old, new);
2840 }
2841 
2842 /**
2843  * atomic64_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2844  * @v: pointer to atomic64_t
2845  * @old: pointer to s64 value to compare with
2846  * @new: s64 value to assign
2847  *
2848  * If (@v == @old), atomically updates @v to @new with acquire ordering.
2849  * Otherwise, updates @old to the current value of @v.
2850  *
2851  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_acquire() there.
2852  *
2853  * Return: @true if the exchange occured, @false otherwise.
2854  */
2855 static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2856 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2857 {
2858 	instrument_atomic_read_write(v, sizeof(*v));
2859 	instrument_atomic_read_write(old, sizeof(*old));
2860 	return raw_atomic64_try_cmpxchg_acquire(v, old, new);
2861 }
2862 
2863 /**
2864  * atomic64_try_cmpxchg_release() - atomic compare and exchange with release ordering
2865  * @v: pointer to atomic64_t
2866  * @old: pointer to s64 value to compare with
2867  * @new: s64 value to assign
2868  *
2869  * If (@v == @old), atomically updates @v to @new with release ordering.
2870  * Otherwise, updates @old to the current value of @v.
2871  *
2872  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_release() there.
2873  *
2874  * Return: @true if the exchange occured, @false otherwise.
2875  */
2876 static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2877 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2878 {
2879 	kcsan_release();
2880 	instrument_atomic_read_write(v, sizeof(*v));
2881 	instrument_atomic_read_write(old, sizeof(*old));
2882 	return raw_atomic64_try_cmpxchg_release(v, old, new);
2883 }
2884 
2885 /**
2886  * atomic64_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2887  * @v: pointer to atomic64_t
2888  * @old: pointer to s64 value to compare with
2889  * @new: s64 value to assign
2890  *
2891  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2892  * Otherwise, updates @old to the current value of @v.
2893  *
2894  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_relaxed() there.
2895  *
2896  * Return: @true if the exchange occured, @false otherwise.
2897  */
2898 static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2899 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2900 {
2901 	instrument_atomic_read_write(v, sizeof(*v));
2902 	instrument_atomic_read_write(old, sizeof(*old));
2903 	return raw_atomic64_try_cmpxchg_relaxed(v, old, new);
2904 }
2905 
2906 /**
2907  * atomic64_sub_and_test() - atomic subtract and test if zero with full ordering
2908  * @i: s64 value to subtract
2909  * @v: pointer to atomic64_t
2910  *
2911  * Atomically updates @v to (@v - @i) with full ordering.
2912  *
2913  * Unsafe to use in noinstr code; use raw_atomic64_sub_and_test() there.
2914  *
2915  * Return: @true if the resulting value of @v is zero, @false otherwise.
2916  */
2917 static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)2918 atomic64_sub_and_test(s64 i, atomic64_t *v)
2919 {
2920 	kcsan_mb();
2921 	instrument_atomic_read_write(v, sizeof(*v));
2922 	return raw_atomic64_sub_and_test(i, v);
2923 }
2924 
2925 /**
2926  * atomic64_dec_and_test() - atomic decrement and test if zero with full ordering
2927  * @v: pointer to atomic64_t
2928  *
2929  * Atomically updates @v to (@v - 1) with full ordering.
2930  *
2931  * Unsafe to use in noinstr code; use raw_atomic64_dec_and_test() there.
2932  *
2933  * Return: @true if the resulting value of @v is zero, @false otherwise.
2934  */
2935 static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)2936 atomic64_dec_and_test(atomic64_t *v)
2937 {
2938 	kcsan_mb();
2939 	instrument_atomic_read_write(v, sizeof(*v));
2940 	return raw_atomic64_dec_and_test(v);
2941 }
2942 
2943 /**
2944  * atomic64_inc_and_test() - atomic increment and test if zero with full ordering
2945  * @v: pointer to atomic64_t
2946  *
2947  * Atomically updates @v to (@v + 1) with full ordering.
2948  *
2949  * Unsafe to use in noinstr code; use raw_atomic64_inc_and_test() there.
2950  *
2951  * Return: @true if the resulting value of @v is zero, @false otherwise.
2952  */
2953 static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)2954 atomic64_inc_and_test(atomic64_t *v)
2955 {
2956 	kcsan_mb();
2957 	instrument_atomic_read_write(v, sizeof(*v));
2958 	return raw_atomic64_inc_and_test(v);
2959 }
2960 
2961 /**
2962  * atomic64_add_negative() - atomic add and test if negative with full ordering
2963  * @i: s64 value to add
2964  * @v: pointer to atomic64_t
2965  *
2966  * Atomically updates @v to (@v + @i) with full ordering.
2967  *
2968  * Unsafe to use in noinstr code; use raw_atomic64_add_negative() there.
2969  *
2970  * Return: @true if the resulting value of @v is negative, @false otherwise.
2971  */
2972 static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)2973 atomic64_add_negative(s64 i, atomic64_t *v)
2974 {
2975 	kcsan_mb();
2976 	instrument_atomic_read_write(v, sizeof(*v));
2977 	return raw_atomic64_add_negative(i, v);
2978 }
2979 
2980 /**
2981  * atomic64_add_negative_acquire() - atomic add and test if negative with acquire ordering
2982  * @i: s64 value to add
2983  * @v: pointer to atomic64_t
2984  *
2985  * Atomically updates @v to (@v + @i) with acquire ordering.
2986  *
2987  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_acquire() there.
2988  *
2989  * Return: @true if the resulting value of @v is negative, @false otherwise.
2990  */
2991 static __always_inline bool
atomic64_add_negative_acquire(s64 i,atomic64_t * v)2992 atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2993 {
2994 	instrument_atomic_read_write(v, sizeof(*v));
2995 	return raw_atomic64_add_negative_acquire(i, v);
2996 }
2997 
2998 /**
2999  * atomic64_add_negative_release() - atomic add and test if negative with release ordering
3000  * @i: s64 value to add
3001  * @v: pointer to atomic64_t
3002  *
3003  * Atomically updates @v to (@v + @i) with release ordering.
3004  *
3005  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_release() there.
3006  *
3007  * Return: @true if the resulting value of @v is negative, @false otherwise.
3008  */
3009 static __always_inline bool
atomic64_add_negative_release(s64 i,atomic64_t * v)3010 atomic64_add_negative_release(s64 i, atomic64_t *v)
3011 {
3012 	kcsan_release();
3013 	instrument_atomic_read_write(v, sizeof(*v));
3014 	return raw_atomic64_add_negative_release(i, v);
3015 }
3016 
3017 /**
3018  * atomic64_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
3019  * @i: s64 value to add
3020  * @v: pointer to atomic64_t
3021  *
3022  * Atomically updates @v to (@v + @i) with relaxed ordering.
3023  *
3024  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_relaxed() there.
3025  *
3026  * Return: @true if the resulting value of @v is negative, @false otherwise.
3027  */
3028 static __always_inline bool
atomic64_add_negative_relaxed(s64 i,atomic64_t * v)3029 atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
3030 {
3031 	instrument_atomic_read_write(v, sizeof(*v));
3032 	return raw_atomic64_add_negative_relaxed(i, v);
3033 }
3034 
3035 /**
3036  * atomic64_fetch_add_unless() - atomic add unless value with full ordering
3037  * @v: pointer to atomic64_t
3038  * @a: s64 value to add
3039  * @u: s64 value to compare with
3040  *
3041  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3042  *
3043  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_unless() there.
3044  *
3045  * Return: The original value of @v.
3046  */
3047 static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)3048 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
3049 {
3050 	kcsan_mb();
3051 	instrument_atomic_read_write(v, sizeof(*v));
3052 	return raw_atomic64_fetch_add_unless(v, a, u);
3053 }
3054 
3055 /**
3056  * atomic64_add_unless() - atomic add unless value with full ordering
3057  * @v: pointer to atomic64_t
3058  * @a: s64 value to add
3059  * @u: s64 value to compare with
3060  *
3061  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3062  *
3063  * Unsafe to use in noinstr code; use raw_atomic64_add_unless() there.
3064  *
3065  * Return: @true if @v was updated, @false otherwise.
3066  */
3067 static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)3068 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
3069 {
3070 	kcsan_mb();
3071 	instrument_atomic_read_write(v, sizeof(*v));
3072 	return raw_atomic64_add_unless(v, a, u);
3073 }
3074 
3075 /**
3076  * atomic64_inc_not_zero() - atomic increment unless zero with full ordering
3077  * @v: pointer to atomic64_t
3078  *
3079  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
3080  *
3081  * Unsafe to use in noinstr code; use raw_atomic64_inc_not_zero() there.
3082  *
3083  * Return: @true if @v was updated, @false otherwise.
3084  */
3085 static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)3086 atomic64_inc_not_zero(atomic64_t *v)
3087 {
3088 	kcsan_mb();
3089 	instrument_atomic_read_write(v, sizeof(*v));
3090 	return raw_atomic64_inc_not_zero(v);
3091 }
3092 
3093 /**
3094  * atomic64_inc_unless_negative() - atomic increment unless negative with full ordering
3095  * @v: pointer to atomic64_t
3096  *
3097  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
3098  *
3099  * Unsafe to use in noinstr code; use raw_atomic64_inc_unless_negative() there.
3100  *
3101  * Return: @true if @v was updated, @false otherwise.
3102  */
3103 static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)3104 atomic64_inc_unless_negative(atomic64_t *v)
3105 {
3106 	kcsan_mb();
3107 	instrument_atomic_read_write(v, sizeof(*v));
3108 	return raw_atomic64_inc_unless_negative(v);
3109 }
3110 
3111 /**
3112  * atomic64_dec_unless_positive() - atomic decrement unless positive with full ordering
3113  * @v: pointer to atomic64_t
3114  *
3115  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
3116  *
3117  * Unsafe to use in noinstr code; use raw_atomic64_dec_unless_positive() there.
3118  *
3119  * Return: @true if @v was updated, @false otherwise.
3120  */
3121 static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)3122 atomic64_dec_unless_positive(atomic64_t *v)
3123 {
3124 	kcsan_mb();
3125 	instrument_atomic_read_write(v, sizeof(*v));
3126 	return raw_atomic64_dec_unless_positive(v);
3127 }
3128 
3129 /**
3130  * atomic64_dec_if_positive() - atomic decrement if positive with full ordering
3131  * @v: pointer to atomic64_t
3132  *
3133  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
3134  *
3135  * Unsafe to use in noinstr code; use raw_atomic64_dec_if_positive() there.
3136  *
3137  * Return: The old value of (@v - 1), regardless of whether @v was updated.
3138  */
3139 static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)3140 atomic64_dec_if_positive(atomic64_t *v)
3141 {
3142 	kcsan_mb();
3143 	instrument_atomic_read_write(v, sizeof(*v));
3144 	return raw_atomic64_dec_if_positive(v);
3145 }
3146 
3147 /**
3148  * atomic_long_read() - atomic load with relaxed ordering
3149  * @v: pointer to atomic_long_t
3150  *
3151  * Atomically loads the value of @v with relaxed ordering.
3152  *
3153  * Unsafe to use in noinstr code; use raw_atomic_long_read() there.
3154  *
3155  * Return: The value loaded from @v.
3156  */
3157 static __always_inline long
atomic_long_read(const atomic_long_t * v)3158 atomic_long_read(const atomic_long_t *v)
3159 {
3160 	instrument_atomic_read(v, sizeof(*v));
3161 	return raw_atomic_long_read(v);
3162 }
3163 
3164 /**
3165  * atomic_long_read_acquire() - atomic load with acquire ordering
3166  * @v: pointer to atomic_long_t
3167  *
3168  * Atomically loads the value of @v with acquire ordering.
3169  *
3170  * Unsafe to use in noinstr code; use raw_atomic_long_read_acquire() there.
3171  *
3172  * Return: The value loaded from @v.
3173  */
3174 static __always_inline long
atomic_long_read_acquire(const atomic_long_t * v)3175 atomic_long_read_acquire(const atomic_long_t *v)
3176 {
3177 	instrument_atomic_read(v, sizeof(*v));
3178 	return raw_atomic_long_read_acquire(v);
3179 }
3180 
3181 /**
3182  * atomic_long_set() - atomic set with relaxed ordering
3183  * @v: pointer to atomic_long_t
3184  * @i: long value to assign
3185  *
3186  * Atomically sets @v to @i with relaxed ordering.
3187  *
3188  * Unsafe to use in noinstr code; use raw_atomic_long_set() there.
3189  *
3190  * Return: Nothing.
3191  */
3192 static __always_inline void
atomic_long_set(atomic_long_t * v,long i)3193 atomic_long_set(atomic_long_t *v, long i)
3194 {
3195 	instrument_atomic_write(v, sizeof(*v));
3196 	raw_atomic_long_set(v, i);
3197 }
3198 
3199 /**
3200  * atomic_long_set_release() - atomic set with release ordering
3201  * @v: pointer to atomic_long_t
3202  * @i: long value to assign
3203  *
3204  * Atomically sets @v to @i with release ordering.
3205  *
3206  * Unsafe to use in noinstr code; use raw_atomic_long_set_release() there.
3207  *
3208  * Return: Nothing.
3209  */
3210 static __always_inline void
atomic_long_set_release(atomic_long_t * v,long i)3211 atomic_long_set_release(atomic_long_t *v, long i)
3212 {
3213 	kcsan_release();
3214 	instrument_atomic_write(v, sizeof(*v));
3215 	raw_atomic_long_set_release(v, i);
3216 }
3217 
3218 /**
3219  * atomic_long_add() - atomic add with relaxed ordering
3220  * @i: long value to add
3221  * @v: pointer to atomic_long_t
3222  *
3223  * Atomically updates @v to (@v + @i) with relaxed ordering.
3224  *
3225  * Unsafe to use in noinstr code; use raw_atomic_long_add() there.
3226  *
3227  * Return: Nothing.
3228  */
3229 static __always_inline void
atomic_long_add(long i,atomic_long_t * v)3230 atomic_long_add(long i, atomic_long_t *v)
3231 {
3232 	instrument_atomic_read_write(v, sizeof(*v));
3233 	raw_atomic_long_add(i, v);
3234 }
3235 
3236 /**
3237  * atomic_long_add_return() - atomic add with full ordering
3238  * @i: long value to add
3239  * @v: pointer to atomic_long_t
3240  *
3241  * Atomically updates @v to (@v + @i) with full ordering.
3242  *
3243  * Unsafe to use in noinstr code; use raw_atomic_long_add_return() there.
3244  *
3245  * Return: The updated value of @v.
3246  */
3247 static __always_inline long
atomic_long_add_return(long i,atomic_long_t * v)3248 atomic_long_add_return(long i, atomic_long_t *v)
3249 {
3250 	kcsan_mb();
3251 	instrument_atomic_read_write(v, sizeof(*v));
3252 	return raw_atomic_long_add_return(i, v);
3253 }
3254 
3255 /**
3256  * atomic_long_add_return_acquire() - atomic add with acquire ordering
3257  * @i: long value to add
3258  * @v: pointer to atomic_long_t
3259  *
3260  * Atomically updates @v to (@v + @i) with acquire ordering.
3261  *
3262  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_acquire() there.
3263  *
3264  * Return: The updated value of @v.
3265  */
3266 static __always_inline long
atomic_long_add_return_acquire(long i,atomic_long_t * v)3267 atomic_long_add_return_acquire(long i, atomic_long_t *v)
3268 {
3269 	instrument_atomic_read_write(v, sizeof(*v));
3270 	return raw_atomic_long_add_return_acquire(i, v);
3271 }
3272 
3273 /**
3274  * atomic_long_add_return_release() - atomic add with release ordering
3275  * @i: long value to add
3276  * @v: pointer to atomic_long_t
3277  *
3278  * Atomically updates @v to (@v + @i) with release ordering.
3279  *
3280  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_release() there.
3281  *
3282  * Return: The updated value of @v.
3283  */
3284 static __always_inline long
atomic_long_add_return_release(long i,atomic_long_t * v)3285 atomic_long_add_return_release(long i, atomic_long_t *v)
3286 {
3287 	kcsan_release();
3288 	instrument_atomic_read_write(v, sizeof(*v));
3289 	return raw_atomic_long_add_return_release(i, v);
3290 }
3291 
3292 /**
3293  * atomic_long_add_return_relaxed() - atomic add with relaxed ordering
3294  * @i: long value to add
3295  * @v: pointer to atomic_long_t
3296  *
3297  * Atomically updates @v to (@v + @i) with relaxed ordering.
3298  *
3299  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_relaxed() there.
3300  *
3301  * Return: The updated value of @v.
3302  */
3303 static __always_inline long
atomic_long_add_return_relaxed(long i,atomic_long_t * v)3304 atomic_long_add_return_relaxed(long i, atomic_long_t *v)
3305 {
3306 	instrument_atomic_read_write(v, sizeof(*v));
3307 	return raw_atomic_long_add_return_relaxed(i, v);
3308 }
3309 
3310 /**
3311  * atomic_long_fetch_add() - atomic add with full ordering
3312  * @i: long value to add
3313  * @v: pointer to atomic_long_t
3314  *
3315  * Atomically updates @v to (@v + @i) with full ordering.
3316  *
3317  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add() there.
3318  *
3319  * Return: The original value of @v.
3320  */
3321 static __always_inline long
atomic_long_fetch_add(long i,atomic_long_t * v)3322 atomic_long_fetch_add(long i, atomic_long_t *v)
3323 {
3324 	kcsan_mb();
3325 	instrument_atomic_read_write(v, sizeof(*v));
3326 	return raw_atomic_long_fetch_add(i, v);
3327 }
3328 
3329 /**
3330  * atomic_long_fetch_add_acquire() - atomic add with acquire ordering
3331  * @i: long value to add
3332  * @v: pointer to atomic_long_t
3333  *
3334  * Atomically updates @v to (@v + @i) with acquire ordering.
3335  *
3336  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_acquire() there.
3337  *
3338  * Return: The original value of @v.
3339  */
3340 static __always_inline long
atomic_long_fetch_add_acquire(long i,atomic_long_t * v)3341 atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
3342 {
3343 	instrument_atomic_read_write(v, sizeof(*v));
3344 	return raw_atomic_long_fetch_add_acquire(i, v);
3345 }
3346 
3347 /**
3348  * atomic_long_fetch_add_release() - atomic add with release ordering
3349  * @i: long value to add
3350  * @v: pointer to atomic_long_t
3351  *
3352  * Atomically updates @v to (@v + @i) with release ordering.
3353  *
3354  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_release() there.
3355  *
3356  * Return: The original value of @v.
3357  */
3358 static __always_inline long
atomic_long_fetch_add_release(long i,atomic_long_t * v)3359 atomic_long_fetch_add_release(long i, atomic_long_t *v)
3360 {
3361 	kcsan_release();
3362 	instrument_atomic_read_write(v, sizeof(*v));
3363 	return raw_atomic_long_fetch_add_release(i, v);
3364 }
3365 
3366 /**
3367  * atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
3368  * @i: long value to add
3369  * @v: pointer to atomic_long_t
3370  *
3371  * Atomically updates @v to (@v + @i) with relaxed ordering.
3372  *
3373  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_relaxed() there.
3374  *
3375  * Return: The original value of @v.
3376  */
3377 static __always_inline long
atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)3378 atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
3379 {
3380 	instrument_atomic_read_write(v, sizeof(*v));
3381 	return raw_atomic_long_fetch_add_relaxed(i, v);
3382 }
3383 
3384 /**
3385  * atomic_long_sub() - atomic subtract with relaxed ordering
3386  * @i: long value to subtract
3387  * @v: pointer to atomic_long_t
3388  *
3389  * Atomically updates @v to (@v - @i) with relaxed ordering.
3390  *
3391  * Unsafe to use in noinstr code; use raw_atomic_long_sub() there.
3392  *
3393  * Return: Nothing.
3394  */
3395 static __always_inline void
atomic_long_sub(long i,atomic_long_t * v)3396 atomic_long_sub(long i, atomic_long_t *v)
3397 {
3398 	instrument_atomic_read_write(v, sizeof(*v));
3399 	raw_atomic_long_sub(i, v);
3400 }
3401 
3402 /**
3403  * atomic_long_sub_return() - atomic subtract with full ordering
3404  * @i: long value to subtract
3405  * @v: pointer to atomic_long_t
3406  *
3407  * Atomically updates @v to (@v - @i) with full ordering.
3408  *
3409  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return() there.
3410  *
3411  * Return: The updated value of @v.
3412  */
3413 static __always_inline long
atomic_long_sub_return(long i,atomic_long_t * v)3414 atomic_long_sub_return(long i, atomic_long_t *v)
3415 {
3416 	kcsan_mb();
3417 	instrument_atomic_read_write(v, sizeof(*v));
3418 	return raw_atomic_long_sub_return(i, v);
3419 }
3420 
3421 /**
3422  * atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
3423  * @i: long value to subtract
3424  * @v: pointer to atomic_long_t
3425  *
3426  * Atomically updates @v to (@v - @i) with acquire ordering.
3427  *
3428  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_acquire() there.
3429  *
3430  * Return: The updated value of @v.
3431  */
3432 static __always_inline long
atomic_long_sub_return_acquire(long i,atomic_long_t * v)3433 atomic_long_sub_return_acquire(long i, atomic_long_t *v)
3434 {
3435 	instrument_atomic_read_write(v, sizeof(*v));
3436 	return raw_atomic_long_sub_return_acquire(i, v);
3437 }
3438 
3439 /**
3440  * atomic_long_sub_return_release() - atomic subtract with release ordering
3441  * @i: long value to subtract
3442  * @v: pointer to atomic_long_t
3443  *
3444  * Atomically updates @v to (@v - @i) with release ordering.
3445  *
3446  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_release() there.
3447  *
3448  * Return: The updated value of @v.
3449  */
3450 static __always_inline long
atomic_long_sub_return_release(long i,atomic_long_t * v)3451 atomic_long_sub_return_release(long i, atomic_long_t *v)
3452 {
3453 	kcsan_release();
3454 	instrument_atomic_read_write(v, sizeof(*v));
3455 	return raw_atomic_long_sub_return_release(i, v);
3456 }
3457 
3458 /**
3459  * atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
3460  * @i: long value to subtract
3461  * @v: pointer to atomic_long_t
3462  *
3463  * Atomically updates @v to (@v - @i) with relaxed ordering.
3464  *
3465  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_relaxed() there.
3466  *
3467  * Return: The updated value of @v.
3468  */
3469 static __always_inline long
atomic_long_sub_return_relaxed(long i,atomic_long_t * v)3470 atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
3471 {
3472 	instrument_atomic_read_write(v, sizeof(*v));
3473 	return raw_atomic_long_sub_return_relaxed(i, v);
3474 }
3475 
3476 /**
3477  * atomic_long_fetch_sub() - atomic subtract with full ordering
3478  * @i: long value to subtract
3479  * @v: pointer to atomic_long_t
3480  *
3481  * Atomically updates @v to (@v - @i) with full ordering.
3482  *
3483  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub() there.
3484  *
3485  * Return: The original value of @v.
3486  */
3487 static __always_inline long
atomic_long_fetch_sub(long i,atomic_long_t * v)3488 atomic_long_fetch_sub(long i, atomic_long_t *v)
3489 {
3490 	kcsan_mb();
3491 	instrument_atomic_read_write(v, sizeof(*v));
3492 	return raw_atomic_long_fetch_sub(i, v);
3493 }
3494 
3495 /**
3496  * atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
3497  * @i: long value to subtract
3498  * @v: pointer to atomic_long_t
3499  *
3500  * Atomically updates @v to (@v - @i) with acquire ordering.
3501  *
3502  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_acquire() there.
3503  *
3504  * Return: The original value of @v.
3505  */
3506 static __always_inline long
atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)3507 atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
3508 {
3509 	instrument_atomic_read_write(v, sizeof(*v));
3510 	return raw_atomic_long_fetch_sub_acquire(i, v);
3511 }
3512 
3513 /**
3514  * atomic_long_fetch_sub_release() - atomic subtract with release ordering
3515  * @i: long value to subtract
3516  * @v: pointer to atomic_long_t
3517  *
3518  * Atomically updates @v to (@v - @i) with release ordering.
3519  *
3520  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_release() there.
3521  *
3522  * Return: The original value of @v.
3523  */
3524 static __always_inline long
atomic_long_fetch_sub_release(long i,atomic_long_t * v)3525 atomic_long_fetch_sub_release(long i, atomic_long_t *v)
3526 {
3527 	kcsan_release();
3528 	instrument_atomic_read_write(v, sizeof(*v));
3529 	return raw_atomic_long_fetch_sub_release(i, v);
3530 }
3531 
3532 /**
3533  * atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
3534  * @i: long value to subtract
3535  * @v: pointer to atomic_long_t
3536  *
3537  * Atomically updates @v to (@v - @i) with relaxed ordering.
3538  *
3539  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_relaxed() there.
3540  *
3541  * Return: The original value of @v.
3542  */
3543 static __always_inline long
atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)3544 atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
3545 {
3546 	instrument_atomic_read_write(v, sizeof(*v));
3547 	return raw_atomic_long_fetch_sub_relaxed(i, v);
3548 }
3549 
3550 /**
3551  * atomic_long_inc() - atomic increment with relaxed ordering
3552  * @v: pointer to atomic_long_t
3553  *
3554  * Atomically updates @v to (@v + 1) with relaxed ordering.
3555  *
3556  * Unsafe to use in noinstr code; use raw_atomic_long_inc() there.
3557  *
3558  * Return: Nothing.
3559  */
3560 static __always_inline void
atomic_long_inc(atomic_long_t * v)3561 atomic_long_inc(atomic_long_t *v)
3562 {
3563 	instrument_atomic_read_write(v, sizeof(*v));
3564 	raw_atomic_long_inc(v);
3565 }
3566 
3567 /**
3568  * atomic_long_inc_return() - atomic increment with full ordering
3569  * @v: pointer to atomic_long_t
3570  *
3571  * Atomically updates @v to (@v + 1) with full ordering.
3572  *
3573  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return() there.
3574  *
3575  * Return: The updated value of @v.
3576  */
3577 static __always_inline long
atomic_long_inc_return(atomic_long_t * v)3578 atomic_long_inc_return(atomic_long_t *v)
3579 {
3580 	kcsan_mb();
3581 	instrument_atomic_read_write(v, sizeof(*v));
3582 	return raw_atomic_long_inc_return(v);
3583 }
3584 
3585 /**
3586  * atomic_long_inc_return_acquire() - atomic increment with acquire ordering
3587  * @v: pointer to atomic_long_t
3588  *
3589  * Atomically updates @v to (@v + 1) with acquire ordering.
3590  *
3591  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_acquire() there.
3592  *
3593  * Return: The updated value of @v.
3594  */
3595 static __always_inline long
atomic_long_inc_return_acquire(atomic_long_t * v)3596 atomic_long_inc_return_acquire(atomic_long_t *v)
3597 {
3598 	instrument_atomic_read_write(v, sizeof(*v));
3599 	return raw_atomic_long_inc_return_acquire(v);
3600 }
3601 
3602 /**
3603  * atomic_long_inc_return_release() - atomic increment with release ordering
3604  * @v: pointer to atomic_long_t
3605  *
3606  * Atomically updates @v to (@v + 1) with release ordering.
3607  *
3608  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_release() there.
3609  *
3610  * Return: The updated value of @v.
3611  */
3612 static __always_inline long
atomic_long_inc_return_release(atomic_long_t * v)3613 atomic_long_inc_return_release(atomic_long_t *v)
3614 {
3615 	kcsan_release();
3616 	instrument_atomic_read_write(v, sizeof(*v));
3617 	return raw_atomic_long_inc_return_release(v);
3618 }
3619 
3620 /**
3621  * atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
3622  * @v: pointer to atomic_long_t
3623  *
3624  * Atomically updates @v to (@v + 1) with relaxed ordering.
3625  *
3626  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_relaxed() there.
3627  *
3628  * Return: The updated value of @v.
3629  */
3630 static __always_inline long
atomic_long_inc_return_relaxed(atomic_long_t * v)3631 atomic_long_inc_return_relaxed(atomic_long_t *v)
3632 {
3633 	instrument_atomic_read_write(v, sizeof(*v));
3634 	return raw_atomic_long_inc_return_relaxed(v);
3635 }
3636 
3637 /**
3638  * atomic_long_fetch_inc() - atomic increment with full ordering
3639  * @v: pointer to atomic_long_t
3640  *
3641  * Atomically updates @v to (@v + 1) with full ordering.
3642  *
3643  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc() there.
3644  *
3645  * Return: The original value of @v.
3646  */
3647 static __always_inline long
atomic_long_fetch_inc(atomic_long_t * v)3648 atomic_long_fetch_inc(atomic_long_t *v)
3649 {
3650 	kcsan_mb();
3651 	instrument_atomic_read_write(v, sizeof(*v));
3652 	return raw_atomic_long_fetch_inc(v);
3653 }
3654 
3655 /**
3656  * atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
3657  * @v: pointer to atomic_long_t
3658  *
3659  * Atomically updates @v to (@v + 1) with acquire ordering.
3660  *
3661  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_acquire() there.
3662  *
3663  * Return: The original value of @v.
3664  */
3665 static __always_inline long
atomic_long_fetch_inc_acquire(atomic_long_t * v)3666 atomic_long_fetch_inc_acquire(atomic_long_t *v)
3667 {
3668 	instrument_atomic_read_write(v, sizeof(*v));
3669 	return raw_atomic_long_fetch_inc_acquire(v);
3670 }
3671 
3672 /**
3673  * atomic_long_fetch_inc_release() - atomic increment with release ordering
3674  * @v: pointer to atomic_long_t
3675  *
3676  * Atomically updates @v to (@v + 1) with release ordering.
3677  *
3678  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_release() there.
3679  *
3680  * Return: The original value of @v.
3681  */
3682 static __always_inline long
atomic_long_fetch_inc_release(atomic_long_t * v)3683 atomic_long_fetch_inc_release(atomic_long_t *v)
3684 {
3685 	kcsan_release();
3686 	instrument_atomic_read_write(v, sizeof(*v));
3687 	return raw_atomic_long_fetch_inc_release(v);
3688 }
3689 
3690 /**
3691  * atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
3692  * @v: pointer to atomic_long_t
3693  *
3694  * Atomically updates @v to (@v + 1) with relaxed ordering.
3695  *
3696  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_relaxed() there.
3697  *
3698  * Return: The original value of @v.
3699  */
3700 static __always_inline long
atomic_long_fetch_inc_relaxed(atomic_long_t * v)3701 atomic_long_fetch_inc_relaxed(atomic_long_t *v)
3702 {
3703 	instrument_atomic_read_write(v, sizeof(*v));
3704 	return raw_atomic_long_fetch_inc_relaxed(v);
3705 }
3706 
3707 /**
3708  * atomic_long_dec() - atomic decrement with relaxed ordering
3709  * @v: pointer to atomic_long_t
3710  *
3711  * Atomically updates @v to (@v - 1) with relaxed ordering.
3712  *
3713  * Unsafe to use in noinstr code; use raw_atomic_long_dec() there.
3714  *
3715  * Return: Nothing.
3716  */
3717 static __always_inline void
atomic_long_dec(atomic_long_t * v)3718 atomic_long_dec(atomic_long_t *v)
3719 {
3720 	instrument_atomic_read_write(v, sizeof(*v));
3721 	raw_atomic_long_dec(v);
3722 }
3723 
3724 /**
3725  * atomic_long_dec_return() - atomic decrement with full ordering
3726  * @v: pointer to atomic_long_t
3727  *
3728  * Atomically updates @v to (@v - 1) with full ordering.
3729  *
3730  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return() there.
3731  *
3732  * Return: The updated value of @v.
3733  */
3734 static __always_inline long
atomic_long_dec_return(atomic_long_t * v)3735 atomic_long_dec_return(atomic_long_t *v)
3736 {
3737 	kcsan_mb();
3738 	instrument_atomic_read_write(v, sizeof(*v));
3739 	return raw_atomic_long_dec_return(v);
3740 }
3741 
3742 /**
3743  * atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
3744  * @v: pointer to atomic_long_t
3745  *
3746  * Atomically updates @v to (@v - 1) with acquire ordering.
3747  *
3748  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_acquire() there.
3749  *
3750  * Return: The updated value of @v.
3751  */
3752 static __always_inline long
atomic_long_dec_return_acquire(atomic_long_t * v)3753 atomic_long_dec_return_acquire(atomic_long_t *v)
3754 {
3755 	instrument_atomic_read_write(v, sizeof(*v));
3756 	return raw_atomic_long_dec_return_acquire(v);
3757 }
3758 
3759 /**
3760  * atomic_long_dec_return_release() - atomic decrement with release ordering
3761  * @v: pointer to atomic_long_t
3762  *
3763  * Atomically updates @v to (@v - 1) with release ordering.
3764  *
3765  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_release() there.
3766  *
3767  * Return: The updated value of @v.
3768  */
3769 static __always_inline long
atomic_long_dec_return_release(atomic_long_t * v)3770 atomic_long_dec_return_release(atomic_long_t *v)
3771 {
3772 	kcsan_release();
3773 	instrument_atomic_read_write(v, sizeof(*v));
3774 	return raw_atomic_long_dec_return_release(v);
3775 }
3776 
3777 /**
3778  * atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
3779  * @v: pointer to atomic_long_t
3780  *
3781  * Atomically updates @v to (@v - 1) with relaxed ordering.
3782  *
3783  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_relaxed() there.
3784  *
3785  * Return: The updated value of @v.
3786  */
3787 static __always_inline long
atomic_long_dec_return_relaxed(atomic_long_t * v)3788 atomic_long_dec_return_relaxed(atomic_long_t *v)
3789 {
3790 	instrument_atomic_read_write(v, sizeof(*v));
3791 	return raw_atomic_long_dec_return_relaxed(v);
3792 }
3793 
3794 /**
3795  * atomic_long_fetch_dec() - atomic decrement with full ordering
3796  * @v: pointer to atomic_long_t
3797  *
3798  * Atomically updates @v to (@v - 1) with full ordering.
3799  *
3800  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec() there.
3801  *
3802  * Return: The original value of @v.
3803  */
3804 static __always_inline long
atomic_long_fetch_dec(atomic_long_t * v)3805 atomic_long_fetch_dec(atomic_long_t *v)
3806 {
3807 	kcsan_mb();
3808 	instrument_atomic_read_write(v, sizeof(*v));
3809 	return raw_atomic_long_fetch_dec(v);
3810 }
3811 
3812 /**
3813  * atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
3814  * @v: pointer to atomic_long_t
3815  *
3816  * Atomically updates @v to (@v - 1) with acquire ordering.
3817  *
3818  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_acquire() there.
3819  *
3820  * Return: The original value of @v.
3821  */
3822 static __always_inline long
atomic_long_fetch_dec_acquire(atomic_long_t * v)3823 atomic_long_fetch_dec_acquire(atomic_long_t *v)
3824 {
3825 	instrument_atomic_read_write(v, sizeof(*v));
3826 	return raw_atomic_long_fetch_dec_acquire(v);
3827 }
3828 
3829 /**
3830  * atomic_long_fetch_dec_release() - atomic decrement with release ordering
3831  * @v: pointer to atomic_long_t
3832  *
3833  * Atomically updates @v to (@v - 1) with release ordering.
3834  *
3835  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_release() there.
3836  *
3837  * Return: The original value of @v.
3838  */
3839 static __always_inline long
atomic_long_fetch_dec_release(atomic_long_t * v)3840 atomic_long_fetch_dec_release(atomic_long_t *v)
3841 {
3842 	kcsan_release();
3843 	instrument_atomic_read_write(v, sizeof(*v));
3844 	return raw_atomic_long_fetch_dec_release(v);
3845 }
3846 
3847 /**
3848  * atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
3849  * @v: pointer to atomic_long_t
3850  *
3851  * Atomically updates @v to (@v - 1) with relaxed ordering.
3852  *
3853  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_relaxed() there.
3854  *
3855  * Return: The original value of @v.
3856  */
3857 static __always_inline long
atomic_long_fetch_dec_relaxed(atomic_long_t * v)3858 atomic_long_fetch_dec_relaxed(atomic_long_t *v)
3859 {
3860 	instrument_atomic_read_write(v, sizeof(*v));
3861 	return raw_atomic_long_fetch_dec_relaxed(v);
3862 }
3863 
3864 /**
3865  * atomic_long_and() - atomic bitwise AND with relaxed ordering
3866  * @i: long value
3867  * @v: pointer to atomic_long_t
3868  *
3869  * Atomically updates @v to (@v & @i) with relaxed ordering.
3870  *
3871  * Unsafe to use in noinstr code; use raw_atomic_long_and() there.
3872  *
3873  * Return: Nothing.
3874  */
3875 static __always_inline void
atomic_long_and(long i,atomic_long_t * v)3876 atomic_long_and(long i, atomic_long_t *v)
3877 {
3878 	instrument_atomic_read_write(v, sizeof(*v));
3879 	raw_atomic_long_and(i, v);
3880 }
3881 
3882 /**
3883  * atomic_long_fetch_and() - atomic bitwise AND with full ordering
3884  * @i: long value
3885  * @v: pointer to atomic_long_t
3886  *
3887  * Atomically updates @v to (@v & @i) with full ordering.
3888  *
3889  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and() there.
3890  *
3891  * Return: The original value of @v.
3892  */
3893 static __always_inline long
atomic_long_fetch_and(long i,atomic_long_t * v)3894 atomic_long_fetch_and(long i, atomic_long_t *v)
3895 {
3896 	kcsan_mb();
3897 	instrument_atomic_read_write(v, sizeof(*v));
3898 	return raw_atomic_long_fetch_and(i, v);
3899 }
3900 
3901 /**
3902  * atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
3903  * @i: long value
3904  * @v: pointer to atomic_long_t
3905  *
3906  * Atomically updates @v to (@v & @i) with acquire ordering.
3907  *
3908  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_acquire() there.
3909  *
3910  * Return: The original value of @v.
3911  */
3912 static __always_inline long
atomic_long_fetch_and_acquire(long i,atomic_long_t * v)3913 atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
3914 {
3915 	instrument_atomic_read_write(v, sizeof(*v));
3916 	return raw_atomic_long_fetch_and_acquire(i, v);
3917 }
3918 
3919 /**
3920  * atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
3921  * @i: long value
3922  * @v: pointer to atomic_long_t
3923  *
3924  * Atomically updates @v to (@v & @i) with release ordering.
3925  *
3926  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_release() there.
3927  *
3928  * Return: The original value of @v.
3929  */
3930 static __always_inline long
atomic_long_fetch_and_release(long i,atomic_long_t * v)3931 atomic_long_fetch_and_release(long i, atomic_long_t *v)
3932 {
3933 	kcsan_release();
3934 	instrument_atomic_read_write(v, sizeof(*v));
3935 	return raw_atomic_long_fetch_and_release(i, v);
3936 }
3937 
3938 /**
3939  * atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
3940  * @i: long value
3941  * @v: pointer to atomic_long_t
3942  *
3943  * Atomically updates @v to (@v & @i) with relaxed ordering.
3944  *
3945  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_relaxed() there.
3946  *
3947  * Return: The original value of @v.
3948  */
3949 static __always_inline long
atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)3950 atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
3951 {
3952 	instrument_atomic_read_write(v, sizeof(*v));
3953 	return raw_atomic_long_fetch_and_relaxed(i, v);
3954 }
3955 
3956 /**
3957  * atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
3958  * @i: long value
3959  * @v: pointer to atomic_long_t
3960  *
3961  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
3962  *
3963  * Unsafe to use in noinstr code; use raw_atomic_long_andnot() there.
3964  *
3965  * Return: Nothing.
3966  */
3967 static __always_inline void
atomic_long_andnot(long i,atomic_long_t * v)3968 atomic_long_andnot(long i, atomic_long_t *v)
3969 {
3970 	instrument_atomic_read_write(v, sizeof(*v));
3971 	raw_atomic_long_andnot(i, v);
3972 }
3973 
3974 /**
3975  * atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
3976  * @i: long value
3977  * @v: pointer to atomic_long_t
3978  *
3979  * Atomically updates @v to (@v & ~@i) with full ordering.
3980  *
3981  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot() there.
3982  *
3983  * Return: The original value of @v.
3984  */
3985 static __always_inline long
atomic_long_fetch_andnot(long i,atomic_long_t * v)3986 atomic_long_fetch_andnot(long i, atomic_long_t *v)
3987 {
3988 	kcsan_mb();
3989 	instrument_atomic_read_write(v, sizeof(*v));
3990 	return raw_atomic_long_fetch_andnot(i, v);
3991 }
3992 
3993 /**
3994  * atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
3995  * @i: long value
3996  * @v: pointer to atomic_long_t
3997  *
3998  * Atomically updates @v to (@v & ~@i) with acquire ordering.
3999  *
4000  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_acquire() there.
4001  *
4002  * Return: The original value of @v.
4003  */
4004 static __always_inline long
atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)4005 atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
4006 {
4007 	instrument_atomic_read_write(v, sizeof(*v));
4008 	return raw_atomic_long_fetch_andnot_acquire(i, v);
4009 }
4010 
4011 /**
4012  * atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
4013  * @i: long value
4014  * @v: pointer to atomic_long_t
4015  *
4016  * Atomically updates @v to (@v & ~@i) with release ordering.
4017  *
4018  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_release() there.
4019  *
4020  * Return: The original value of @v.
4021  */
4022 static __always_inline long
atomic_long_fetch_andnot_release(long i,atomic_long_t * v)4023 atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
4024 {
4025 	kcsan_release();
4026 	instrument_atomic_read_write(v, sizeof(*v));
4027 	return raw_atomic_long_fetch_andnot_release(i, v);
4028 }
4029 
4030 /**
4031  * atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
4032  * @i: long value
4033  * @v: pointer to atomic_long_t
4034  *
4035  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
4036  *
4037  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_relaxed() there.
4038  *
4039  * Return: The original value of @v.
4040  */
4041 static __always_inline long
atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)4042 atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
4043 {
4044 	instrument_atomic_read_write(v, sizeof(*v));
4045 	return raw_atomic_long_fetch_andnot_relaxed(i, v);
4046 }
4047 
4048 /**
4049  * atomic_long_or() - atomic bitwise OR with relaxed ordering
4050  * @i: long value
4051  * @v: pointer to atomic_long_t
4052  *
4053  * Atomically updates @v to (@v | @i) with relaxed ordering.
4054  *
4055  * Unsafe to use in noinstr code; use raw_atomic_long_or() there.
4056  *
4057  * Return: Nothing.
4058  */
4059 static __always_inline void
atomic_long_or(long i,atomic_long_t * v)4060 atomic_long_or(long i, atomic_long_t *v)
4061 {
4062 	instrument_atomic_read_write(v, sizeof(*v));
4063 	raw_atomic_long_or(i, v);
4064 }
4065 
4066 /**
4067  * atomic_long_fetch_or() - atomic bitwise OR with full ordering
4068  * @i: long value
4069  * @v: pointer to atomic_long_t
4070  *
4071  * Atomically updates @v to (@v | @i) with full ordering.
4072  *
4073  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or() there.
4074  *
4075  * Return: The original value of @v.
4076  */
4077 static __always_inline long
atomic_long_fetch_or(long i,atomic_long_t * v)4078 atomic_long_fetch_or(long i, atomic_long_t *v)
4079 {
4080 	kcsan_mb();
4081 	instrument_atomic_read_write(v, sizeof(*v));
4082 	return raw_atomic_long_fetch_or(i, v);
4083 }
4084 
4085 /**
4086  * atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
4087  * @i: long value
4088  * @v: pointer to atomic_long_t
4089  *
4090  * Atomically updates @v to (@v | @i) with acquire ordering.
4091  *
4092  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_acquire() there.
4093  *
4094  * Return: The original value of @v.
4095  */
4096 static __always_inline long
atomic_long_fetch_or_acquire(long i,atomic_long_t * v)4097 atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
4098 {
4099 	instrument_atomic_read_write(v, sizeof(*v));
4100 	return raw_atomic_long_fetch_or_acquire(i, v);
4101 }
4102 
4103 /**
4104  * atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
4105  * @i: long value
4106  * @v: pointer to atomic_long_t
4107  *
4108  * Atomically updates @v to (@v | @i) with release ordering.
4109  *
4110  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_release() there.
4111  *
4112  * Return: The original value of @v.
4113  */
4114 static __always_inline long
atomic_long_fetch_or_release(long i,atomic_long_t * v)4115 atomic_long_fetch_or_release(long i, atomic_long_t *v)
4116 {
4117 	kcsan_release();
4118 	instrument_atomic_read_write(v, sizeof(*v));
4119 	return raw_atomic_long_fetch_or_release(i, v);
4120 }
4121 
4122 /**
4123  * atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
4124  * @i: long value
4125  * @v: pointer to atomic_long_t
4126  *
4127  * Atomically updates @v to (@v | @i) with relaxed ordering.
4128  *
4129  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_relaxed() there.
4130  *
4131  * Return: The original value of @v.
4132  */
4133 static __always_inline long
atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)4134 atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
4135 {
4136 	instrument_atomic_read_write(v, sizeof(*v));
4137 	return raw_atomic_long_fetch_or_relaxed(i, v);
4138 }
4139 
4140 /**
4141  * atomic_long_xor() - atomic bitwise XOR with relaxed ordering
4142  * @i: long value
4143  * @v: pointer to atomic_long_t
4144  *
4145  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4146  *
4147  * Unsafe to use in noinstr code; use raw_atomic_long_xor() there.
4148  *
4149  * Return: Nothing.
4150  */
4151 static __always_inline void
atomic_long_xor(long i,atomic_long_t * v)4152 atomic_long_xor(long i, atomic_long_t *v)
4153 {
4154 	instrument_atomic_read_write(v, sizeof(*v));
4155 	raw_atomic_long_xor(i, v);
4156 }
4157 
4158 /**
4159  * atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
4160  * @i: long value
4161  * @v: pointer to atomic_long_t
4162  *
4163  * Atomically updates @v to (@v ^ @i) with full ordering.
4164  *
4165  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor() there.
4166  *
4167  * Return: The original value of @v.
4168  */
4169 static __always_inline long
atomic_long_fetch_xor(long i,atomic_long_t * v)4170 atomic_long_fetch_xor(long i, atomic_long_t *v)
4171 {
4172 	kcsan_mb();
4173 	instrument_atomic_read_write(v, sizeof(*v));
4174 	return raw_atomic_long_fetch_xor(i, v);
4175 }
4176 
4177 /**
4178  * atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
4179  * @i: long value
4180  * @v: pointer to atomic_long_t
4181  *
4182  * Atomically updates @v to (@v ^ @i) with acquire ordering.
4183  *
4184  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_acquire() there.
4185  *
4186  * Return: The original value of @v.
4187  */
4188 static __always_inline long
atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)4189 atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
4190 {
4191 	instrument_atomic_read_write(v, sizeof(*v));
4192 	return raw_atomic_long_fetch_xor_acquire(i, v);
4193 }
4194 
4195 /**
4196  * atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
4197  * @i: long value
4198  * @v: pointer to atomic_long_t
4199  *
4200  * Atomically updates @v to (@v ^ @i) with release ordering.
4201  *
4202  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_release() there.
4203  *
4204  * Return: The original value of @v.
4205  */
4206 static __always_inline long
atomic_long_fetch_xor_release(long i,atomic_long_t * v)4207 atomic_long_fetch_xor_release(long i, atomic_long_t *v)
4208 {
4209 	kcsan_release();
4210 	instrument_atomic_read_write(v, sizeof(*v));
4211 	return raw_atomic_long_fetch_xor_release(i, v);
4212 }
4213 
4214 /**
4215  * atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
4216  * @i: long value
4217  * @v: pointer to atomic_long_t
4218  *
4219  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4220  *
4221  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_relaxed() there.
4222  *
4223  * Return: The original value of @v.
4224  */
4225 static __always_inline long
atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)4226 atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
4227 {
4228 	instrument_atomic_read_write(v, sizeof(*v));
4229 	return raw_atomic_long_fetch_xor_relaxed(i, v);
4230 }
4231 
4232 /**
4233  * atomic_long_xchg() - atomic exchange with full ordering
4234  * @v: pointer to atomic_long_t
4235  * @new: long value to assign
4236  *
4237  * Atomically updates @v to @new with full ordering.
4238  *
4239  * Unsafe to use in noinstr code; use raw_atomic_long_xchg() there.
4240  *
4241  * Return: The original value of @v.
4242  */
4243 static __always_inline long
atomic_long_xchg(atomic_long_t * v,long new)4244 atomic_long_xchg(atomic_long_t *v, long new)
4245 {
4246 	kcsan_mb();
4247 	instrument_atomic_read_write(v, sizeof(*v));
4248 	return raw_atomic_long_xchg(v, new);
4249 }
4250 
4251 /**
4252  * atomic_long_xchg_acquire() - atomic exchange with acquire ordering
4253  * @v: pointer to atomic_long_t
4254  * @new: long value to assign
4255  *
4256  * Atomically updates @v to @new with acquire ordering.
4257  *
4258  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_acquire() there.
4259  *
4260  * Return: The original value of @v.
4261  */
4262 static __always_inline long
atomic_long_xchg_acquire(atomic_long_t * v,long new)4263 atomic_long_xchg_acquire(atomic_long_t *v, long new)
4264 {
4265 	instrument_atomic_read_write(v, sizeof(*v));
4266 	return raw_atomic_long_xchg_acquire(v, new);
4267 }
4268 
4269 /**
4270  * atomic_long_xchg_release() - atomic exchange with release ordering
4271  * @v: pointer to atomic_long_t
4272  * @new: long value to assign
4273  *
4274  * Atomically updates @v to @new with release ordering.
4275  *
4276  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_release() there.
4277  *
4278  * Return: The original value of @v.
4279  */
4280 static __always_inline long
atomic_long_xchg_release(atomic_long_t * v,long new)4281 atomic_long_xchg_release(atomic_long_t *v, long new)
4282 {
4283 	kcsan_release();
4284 	instrument_atomic_read_write(v, sizeof(*v));
4285 	return raw_atomic_long_xchg_release(v, new);
4286 }
4287 
4288 /**
4289  * atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
4290  * @v: pointer to atomic_long_t
4291  * @new: long value to assign
4292  *
4293  * Atomically updates @v to @new with relaxed ordering.
4294  *
4295  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_relaxed() there.
4296  *
4297  * Return: The original value of @v.
4298  */
4299 static __always_inline long
atomic_long_xchg_relaxed(atomic_long_t * v,long new)4300 atomic_long_xchg_relaxed(atomic_long_t *v, long new)
4301 {
4302 	instrument_atomic_read_write(v, sizeof(*v));
4303 	return raw_atomic_long_xchg_relaxed(v, new);
4304 }
4305 
4306 /**
4307  * atomic_long_cmpxchg() - atomic compare and exchange with full ordering
4308  * @v: pointer to atomic_long_t
4309  * @old: long value to compare with
4310  * @new: long value to assign
4311  *
4312  * If (@v == @old), atomically updates @v to @new with full ordering.
4313  *
4314  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg() there.
4315  *
4316  * Return: The original value of @v.
4317  */
4318 static __always_inline long
atomic_long_cmpxchg(atomic_long_t * v,long old,long new)4319 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
4320 {
4321 	kcsan_mb();
4322 	instrument_atomic_read_write(v, sizeof(*v));
4323 	return raw_atomic_long_cmpxchg(v, old, new);
4324 }
4325 
4326 /**
4327  * atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4328  * @v: pointer to atomic_long_t
4329  * @old: long value to compare with
4330  * @new: long value to assign
4331  *
4332  * If (@v == @old), atomically updates @v to @new with acquire ordering.
4333  *
4334  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_acquire() there.
4335  *
4336  * Return: The original value of @v.
4337  */
4338 static __always_inline long
atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)4339 atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
4340 {
4341 	instrument_atomic_read_write(v, sizeof(*v));
4342 	return raw_atomic_long_cmpxchg_acquire(v, old, new);
4343 }
4344 
4345 /**
4346  * atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
4347  * @v: pointer to atomic_long_t
4348  * @old: long value to compare with
4349  * @new: long value to assign
4350  *
4351  * If (@v == @old), atomically updates @v to @new with release ordering.
4352  *
4353  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_release() there.
4354  *
4355  * Return: The original value of @v.
4356  */
4357 static __always_inline long
atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)4358 atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
4359 {
4360 	kcsan_release();
4361 	instrument_atomic_read_write(v, sizeof(*v));
4362 	return raw_atomic_long_cmpxchg_release(v, old, new);
4363 }
4364 
4365 /**
4366  * atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4367  * @v: pointer to atomic_long_t
4368  * @old: long value to compare with
4369  * @new: long value to assign
4370  *
4371  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4372  *
4373  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_relaxed() there.
4374  *
4375  * Return: The original value of @v.
4376  */
4377 static __always_inline long
atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)4378 atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
4379 {
4380 	instrument_atomic_read_write(v, sizeof(*v));
4381 	return raw_atomic_long_cmpxchg_relaxed(v, old, new);
4382 }
4383 
4384 /**
4385  * atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
4386  * @v: pointer to atomic_long_t
4387  * @old: pointer to long value to compare with
4388  * @new: long value to assign
4389  *
4390  * If (@v == @old), atomically updates @v to @new with full ordering.
4391  * Otherwise, updates @old to the current value of @v.
4392  *
4393  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg() there.
4394  *
4395  * Return: @true if the exchange occured, @false otherwise.
4396  */
4397 static __always_inline bool
atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)4398 atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
4399 {
4400 	kcsan_mb();
4401 	instrument_atomic_read_write(v, sizeof(*v));
4402 	instrument_atomic_read_write(old, sizeof(*old));
4403 	return raw_atomic_long_try_cmpxchg(v, old, new);
4404 }
4405 
4406 /**
4407  * atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4408  * @v: pointer to atomic_long_t
4409  * @old: pointer to long value to compare with
4410  * @new: long value to assign
4411  *
4412  * If (@v == @old), atomically updates @v to @new with acquire ordering.
4413  * Otherwise, updates @old to the current value of @v.
4414  *
4415  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_acquire() there.
4416  *
4417  * Return: @true if the exchange occured, @false otherwise.
4418  */
4419 static __always_inline bool
atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)4420 atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
4421 {
4422 	instrument_atomic_read_write(v, sizeof(*v));
4423 	instrument_atomic_read_write(old, sizeof(*old));
4424 	return raw_atomic_long_try_cmpxchg_acquire(v, old, new);
4425 }
4426 
4427 /**
4428  * atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
4429  * @v: pointer to atomic_long_t
4430  * @old: pointer to long value to compare with
4431  * @new: long value to assign
4432  *
4433  * If (@v == @old), atomically updates @v to @new with release ordering.
4434  * Otherwise, updates @old to the current value of @v.
4435  *
4436  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_release() there.
4437  *
4438  * Return: @true if the exchange occured, @false otherwise.
4439  */
4440 static __always_inline bool
atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)4441 atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
4442 {
4443 	kcsan_release();
4444 	instrument_atomic_read_write(v, sizeof(*v));
4445 	instrument_atomic_read_write(old, sizeof(*old));
4446 	return raw_atomic_long_try_cmpxchg_release(v, old, new);
4447 }
4448 
4449 /**
4450  * atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4451  * @v: pointer to atomic_long_t
4452  * @old: pointer to long value to compare with
4453  * @new: long value to assign
4454  *
4455  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4456  * Otherwise, updates @old to the current value of @v.
4457  *
4458  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_relaxed() there.
4459  *
4460  * Return: @true if the exchange occured, @false otherwise.
4461  */
4462 static __always_inline bool
atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)4463 atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
4464 {
4465 	instrument_atomic_read_write(v, sizeof(*v));
4466 	instrument_atomic_read_write(old, sizeof(*old));
4467 	return raw_atomic_long_try_cmpxchg_relaxed(v, old, new);
4468 }
4469 
4470 /**
4471  * atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
4472  * @i: long value to subtract
4473  * @v: pointer to atomic_long_t
4474  *
4475  * Atomically updates @v to (@v - @i) with full ordering.
4476  *
4477  * Unsafe to use in noinstr code; use raw_atomic_long_sub_and_test() there.
4478  *
4479  * Return: @true if the resulting value of @v is zero, @false otherwise.
4480  */
4481 static __always_inline bool
atomic_long_sub_and_test(long i,atomic_long_t * v)4482 atomic_long_sub_and_test(long i, atomic_long_t *v)
4483 {
4484 	kcsan_mb();
4485 	instrument_atomic_read_write(v, sizeof(*v));
4486 	return raw_atomic_long_sub_and_test(i, v);
4487 }
4488 
4489 /**
4490  * atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
4491  * @v: pointer to atomic_long_t
4492  *
4493  * Atomically updates @v to (@v - 1) with full ordering.
4494  *
4495  * Unsafe to use in noinstr code; use raw_atomic_long_dec_and_test() there.
4496  *
4497  * Return: @true if the resulting value of @v is zero, @false otherwise.
4498  */
4499 static __always_inline bool
atomic_long_dec_and_test(atomic_long_t * v)4500 atomic_long_dec_and_test(atomic_long_t *v)
4501 {
4502 	kcsan_mb();
4503 	instrument_atomic_read_write(v, sizeof(*v));
4504 	return raw_atomic_long_dec_and_test(v);
4505 }
4506 
4507 /**
4508  * atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
4509  * @v: pointer to atomic_long_t
4510  *
4511  * Atomically updates @v to (@v + 1) with full ordering.
4512  *
4513  * Unsafe to use in noinstr code; use raw_atomic_long_inc_and_test() there.
4514  *
4515  * Return: @true if the resulting value of @v is zero, @false otherwise.
4516  */
4517 static __always_inline bool
atomic_long_inc_and_test(atomic_long_t * v)4518 atomic_long_inc_and_test(atomic_long_t *v)
4519 {
4520 	kcsan_mb();
4521 	instrument_atomic_read_write(v, sizeof(*v));
4522 	return raw_atomic_long_inc_and_test(v);
4523 }
4524 
4525 /**
4526  * atomic_long_add_negative() - atomic add and test if negative with full ordering
4527  * @i: long value to add
4528  * @v: pointer to atomic_long_t
4529  *
4530  * Atomically updates @v to (@v + @i) with full ordering.
4531  *
4532  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative() there.
4533  *
4534  * Return: @true if the resulting value of @v is negative, @false otherwise.
4535  */
4536 static __always_inline bool
atomic_long_add_negative(long i,atomic_long_t * v)4537 atomic_long_add_negative(long i, atomic_long_t *v)
4538 {
4539 	kcsan_mb();
4540 	instrument_atomic_read_write(v, sizeof(*v));
4541 	return raw_atomic_long_add_negative(i, v);
4542 }
4543 
4544 /**
4545  * atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
4546  * @i: long value to add
4547  * @v: pointer to atomic_long_t
4548  *
4549  * Atomically updates @v to (@v + @i) with acquire ordering.
4550  *
4551  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_acquire() there.
4552  *
4553  * Return: @true if the resulting value of @v is negative, @false otherwise.
4554  */
4555 static __always_inline bool
atomic_long_add_negative_acquire(long i,atomic_long_t * v)4556 atomic_long_add_negative_acquire(long i, atomic_long_t *v)
4557 {
4558 	instrument_atomic_read_write(v, sizeof(*v));
4559 	return raw_atomic_long_add_negative_acquire(i, v);
4560 }
4561 
4562 /**
4563  * atomic_long_add_negative_release() - atomic add and test if negative with release ordering
4564  * @i: long value to add
4565  * @v: pointer to atomic_long_t
4566  *
4567  * Atomically updates @v to (@v + @i) with release ordering.
4568  *
4569  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_release() there.
4570  *
4571  * Return: @true if the resulting value of @v is negative, @false otherwise.
4572  */
4573 static __always_inline bool
atomic_long_add_negative_release(long i,atomic_long_t * v)4574 atomic_long_add_negative_release(long i, atomic_long_t *v)
4575 {
4576 	kcsan_release();
4577 	instrument_atomic_read_write(v, sizeof(*v));
4578 	return raw_atomic_long_add_negative_release(i, v);
4579 }
4580 
4581 /**
4582  * atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
4583  * @i: long value to add
4584  * @v: pointer to atomic_long_t
4585  *
4586  * Atomically updates @v to (@v + @i) with relaxed ordering.
4587  *
4588  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_relaxed() there.
4589  *
4590  * Return: @true if the resulting value of @v is negative, @false otherwise.
4591  */
4592 static __always_inline bool
atomic_long_add_negative_relaxed(long i,atomic_long_t * v)4593 atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
4594 {
4595 	instrument_atomic_read_write(v, sizeof(*v));
4596 	return raw_atomic_long_add_negative_relaxed(i, v);
4597 }
4598 
4599 /**
4600  * atomic_long_fetch_add_unless() - atomic add unless value with full ordering
4601  * @v: pointer to atomic_long_t
4602  * @a: long value to add
4603  * @u: long value to compare with
4604  *
4605  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4606  *
4607  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_unless() there.
4608  *
4609  * Return: The original value of @v.
4610  */
4611 static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)4612 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
4613 {
4614 	kcsan_mb();
4615 	instrument_atomic_read_write(v, sizeof(*v));
4616 	return raw_atomic_long_fetch_add_unless(v, a, u);
4617 }
4618 
4619 /**
4620  * atomic_long_add_unless() - atomic add unless value with full ordering
4621  * @v: pointer to atomic_long_t
4622  * @a: long value to add
4623  * @u: long value to compare with
4624  *
4625  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4626  *
4627  * Unsafe to use in noinstr code; use raw_atomic_long_add_unless() there.
4628  *
4629  * Return: @true if @v was updated, @false otherwise.
4630  */
4631 static __always_inline bool
atomic_long_add_unless(atomic_long_t * v,long a,long u)4632 atomic_long_add_unless(atomic_long_t *v, long a, long u)
4633 {
4634 	kcsan_mb();
4635 	instrument_atomic_read_write(v, sizeof(*v));
4636 	return raw_atomic_long_add_unless(v, a, u);
4637 }
4638 
4639 /**
4640  * atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
4641  * @v: pointer to atomic_long_t
4642  *
4643  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
4644  *
4645  * Unsafe to use in noinstr code; use raw_atomic_long_inc_not_zero() there.
4646  *
4647  * Return: @true if @v was updated, @false otherwise.
4648  */
4649 static __always_inline bool
atomic_long_inc_not_zero(atomic_long_t * v)4650 atomic_long_inc_not_zero(atomic_long_t *v)
4651 {
4652 	kcsan_mb();
4653 	instrument_atomic_read_write(v, sizeof(*v));
4654 	return raw_atomic_long_inc_not_zero(v);
4655 }
4656 
4657 /**
4658  * atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
4659  * @v: pointer to atomic_long_t
4660  *
4661  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
4662  *
4663  * Unsafe to use in noinstr code; use raw_atomic_long_inc_unless_negative() there.
4664  *
4665  * Return: @true if @v was updated, @false otherwise.
4666  */
4667 static __always_inline bool
atomic_long_inc_unless_negative(atomic_long_t * v)4668 atomic_long_inc_unless_negative(atomic_long_t *v)
4669 {
4670 	kcsan_mb();
4671 	instrument_atomic_read_write(v, sizeof(*v));
4672 	return raw_atomic_long_inc_unless_negative(v);
4673 }
4674 
4675 /**
4676  * atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
4677  * @v: pointer to atomic_long_t
4678  *
4679  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
4680  *
4681  * Unsafe to use in noinstr code; use raw_atomic_long_dec_unless_positive() there.
4682  *
4683  * Return: @true if @v was updated, @false otherwise.
4684  */
4685 static __always_inline bool
atomic_long_dec_unless_positive(atomic_long_t * v)4686 atomic_long_dec_unless_positive(atomic_long_t *v)
4687 {
4688 	kcsan_mb();
4689 	instrument_atomic_read_write(v, sizeof(*v));
4690 	return raw_atomic_long_dec_unless_positive(v);
4691 }
4692 
4693 /**
4694  * atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
4695  * @v: pointer to atomic_long_t
4696  *
4697  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
4698  *
4699  * Unsafe to use in noinstr code; use raw_atomic_long_dec_if_positive() there.
4700  *
4701  * Return: The old value of (@v - 1), regardless of whether @v was updated.
4702  */
4703 static __always_inline long
atomic_long_dec_if_positive(atomic_long_t * v)4704 atomic_long_dec_if_positive(atomic_long_t *v)
4705 {
4706 	kcsan_mb();
4707 	instrument_atomic_read_write(v, sizeof(*v));
4708 	return raw_atomic_long_dec_if_positive(v);
4709 }
4710 
4711 #define xchg(ptr, ...) \
4712 ({ \
4713 	typeof(ptr) __ai_ptr = (ptr); \
4714 	kcsan_mb(); \
4715 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4716 	raw_xchg(__ai_ptr, __VA_ARGS__); \
4717 })
4718 
4719 #define xchg_acquire(ptr, ...) \
4720 ({ \
4721 	typeof(ptr) __ai_ptr = (ptr); \
4722 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4723 	raw_xchg_acquire(__ai_ptr, __VA_ARGS__); \
4724 })
4725 
4726 #define xchg_release(ptr, ...) \
4727 ({ \
4728 	typeof(ptr) __ai_ptr = (ptr); \
4729 	kcsan_release(); \
4730 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4731 	raw_xchg_release(__ai_ptr, __VA_ARGS__); \
4732 })
4733 
4734 #define xchg_relaxed(ptr, ...) \
4735 ({ \
4736 	typeof(ptr) __ai_ptr = (ptr); \
4737 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4738 	raw_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
4739 })
4740 
4741 #define cmpxchg(ptr, ...) \
4742 ({ \
4743 	typeof(ptr) __ai_ptr = (ptr); \
4744 	kcsan_mb(); \
4745 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4746 	raw_cmpxchg(__ai_ptr, __VA_ARGS__); \
4747 })
4748 
4749 #define cmpxchg_acquire(ptr, ...) \
4750 ({ \
4751 	typeof(ptr) __ai_ptr = (ptr); \
4752 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4753 	raw_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
4754 })
4755 
4756 #define cmpxchg_release(ptr, ...) \
4757 ({ \
4758 	typeof(ptr) __ai_ptr = (ptr); \
4759 	kcsan_release(); \
4760 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4761 	raw_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
4762 })
4763 
4764 #define cmpxchg_relaxed(ptr, ...) \
4765 ({ \
4766 	typeof(ptr) __ai_ptr = (ptr); \
4767 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4768 	raw_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
4769 })
4770 
4771 #define cmpxchg64(ptr, ...) \
4772 ({ \
4773 	typeof(ptr) __ai_ptr = (ptr); \
4774 	kcsan_mb(); \
4775 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4776 	raw_cmpxchg64(__ai_ptr, __VA_ARGS__); \
4777 })
4778 
4779 #define cmpxchg64_acquire(ptr, ...) \
4780 ({ \
4781 	typeof(ptr) __ai_ptr = (ptr); \
4782 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4783 	raw_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
4784 })
4785 
4786 #define cmpxchg64_release(ptr, ...) \
4787 ({ \
4788 	typeof(ptr) __ai_ptr = (ptr); \
4789 	kcsan_release(); \
4790 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4791 	raw_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
4792 })
4793 
4794 #define cmpxchg64_relaxed(ptr, ...) \
4795 ({ \
4796 	typeof(ptr) __ai_ptr = (ptr); \
4797 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4798 	raw_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
4799 })
4800 
4801 #define cmpxchg128(ptr, ...) \
4802 ({ \
4803 	typeof(ptr) __ai_ptr = (ptr); \
4804 	kcsan_mb(); \
4805 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4806 	raw_cmpxchg128(__ai_ptr, __VA_ARGS__); \
4807 })
4808 
4809 #define cmpxchg128_acquire(ptr, ...) \
4810 ({ \
4811 	typeof(ptr) __ai_ptr = (ptr); \
4812 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4813 	raw_cmpxchg128_acquire(__ai_ptr, __VA_ARGS__); \
4814 })
4815 
4816 #define cmpxchg128_release(ptr, ...) \
4817 ({ \
4818 	typeof(ptr) __ai_ptr = (ptr); \
4819 	kcsan_release(); \
4820 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4821 	raw_cmpxchg128_release(__ai_ptr, __VA_ARGS__); \
4822 })
4823 
4824 #define cmpxchg128_relaxed(ptr, ...) \
4825 ({ \
4826 	typeof(ptr) __ai_ptr = (ptr); \
4827 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4828 	raw_cmpxchg128_relaxed(__ai_ptr, __VA_ARGS__); \
4829 })
4830 
4831 #define try_cmpxchg(ptr, oldp, ...) \
4832 ({ \
4833 	typeof(ptr) __ai_ptr = (ptr); \
4834 	typeof(oldp) __ai_oldp = (oldp); \
4835 	kcsan_mb(); \
4836 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4837 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4838 	raw_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4839 })
4840 
4841 #define try_cmpxchg_acquire(ptr, oldp, ...) \
4842 ({ \
4843 	typeof(ptr) __ai_ptr = (ptr); \
4844 	typeof(oldp) __ai_oldp = (oldp); \
4845 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4846 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4847 	raw_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4848 })
4849 
4850 #define try_cmpxchg_release(ptr, oldp, ...) \
4851 ({ \
4852 	typeof(ptr) __ai_ptr = (ptr); \
4853 	typeof(oldp) __ai_oldp = (oldp); \
4854 	kcsan_release(); \
4855 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4856 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4857 	raw_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4858 })
4859 
4860 #define try_cmpxchg_relaxed(ptr, oldp, ...) \
4861 ({ \
4862 	typeof(ptr) __ai_ptr = (ptr); \
4863 	typeof(oldp) __ai_oldp = (oldp); \
4864 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4865 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4866 	raw_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4867 })
4868 
4869 #define try_cmpxchg64(ptr, oldp, ...) \
4870 ({ \
4871 	typeof(ptr) __ai_ptr = (ptr); \
4872 	typeof(oldp) __ai_oldp = (oldp); \
4873 	kcsan_mb(); \
4874 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4875 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4876 	raw_try_cmpxchg64(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4877 })
4878 
4879 #define try_cmpxchg64_acquire(ptr, oldp, ...) \
4880 ({ \
4881 	typeof(ptr) __ai_ptr = (ptr); \
4882 	typeof(oldp) __ai_oldp = (oldp); \
4883 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4884 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4885 	raw_try_cmpxchg64_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4886 })
4887 
4888 #define try_cmpxchg64_release(ptr, oldp, ...) \
4889 ({ \
4890 	typeof(ptr) __ai_ptr = (ptr); \
4891 	typeof(oldp) __ai_oldp = (oldp); \
4892 	kcsan_release(); \
4893 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4894 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4895 	raw_try_cmpxchg64_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4896 })
4897 
4898 #define try_cmpxchg64_relaxed(ptr, oldp, ...) \
4899 ({ \
4900 	typeof(ptr) __ai_ptr = (ptr); \
4901 	typeof(oldp) __ai_oldp = (oldp); \
4902 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4903 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4904 	raw_try_cmpxchg64_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4905 })
4906 
4907 #define try_cmpxchg128(ptr, oldp, ...) \
4908 ({ \
4909 	typeof(ptr) __ai_ptr = (ptr); \
4910 	typeof(oldp) __ai_oldp = (oldp); \
4911 	kcsan_mb(); \
4912 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4913 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4914 	raw_try_cmpxchg128(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4915 })
4916 
4917 #define try_cmpxchg128_acquire(ptr, oldp, ...) \
4918 ({ \
4919 	typeof(ptr) __ai_ptr = (ptr); \
4920 	typeof(oldp) __ai_oldp = (oldp); \
4921 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4922 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4923 	raw_try_cmpxchg128_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4924 })
4925 
4926 #define try_cmpxchg128_release(ptr, oldp, ...) \
4927 ({ \
4928 	typeof(ptr) __ai_ptr = (ptr); \
4929 	typeof(oldp) __ai_oldp = (oldp); \
4930 	kcsan_release(); \
4931 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4932 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4933 	raw_try_cmpxchg128_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4934 })
4935 
4936 #define try_cmpxchg128_relaxed(ptr, oldp, ...) \
4937 ({ \
4938 	typeof(ptr) __ai_ptr = (ptr); \
4939 	typeof(oldp) __ai_oldp = (oldp); \
4940 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4941 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4942 	raw_try_cmpxchg128_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4943 })
4944 
4945 #define cmpxchg_local(ptr, ...) \
4946 ({ \
4947 	typeof(ptr) __ai_ptr = (ptr); \
4948 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4949 	raw_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
4950 })
4951 
4952 #define cmpxchg64_local(ptr, ...) \
4953 ({ \
4954 	typeof(ptr) __ai_ptr = (ptr); \
4955 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4956 	raw_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
4957 })
4958 
4959 #define cmpxchg128_local(ptr, ...) \
4960 ({ \
4961 	typeof(ptr) __ai_ptr = (ptr); \
4962 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4963 	raw_cmpxchg128_local(__ai_ptr, __VA_ARGS__); \
4964 })
4965 
4966 #define sync_cmpxchg(ptr, ...) \
4967 ({ \
4968 	typeof(ptr) __ai_ptr = (ptr); \
4969 	kcsan_mb(); \
4970 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4971 	raw_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
4972 })
4973 
4974 #define try_cmpxchg_local(ptr, oldp, ...) \
4975 ({ \
4976 	typeof(ptr) __ai_ptr = (ptr); \
4977 	typeof(oldp) __ai_oldp = (oldp); \
4978 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4979 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4980 	raw_try_cmpxchg_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4981 })
4982 
4983 #define try_cmpxchg64_local(ptr, oldp, ...) \
4984 ({ \
4985 	typeof(ptr) __ai_ptr = (ptr); \
4986 	typeof(oldp) __ai_oldp = (oldp); \
4987 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4988 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4989 	raw_try_cmpxchg64_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4990 })
4991 
4992 #define try_cmpxchg128_local(ptr, oldp, ...) \
4993 ({ \
4994 	typeof(ptr) __ai_ptr = (ptr); \
4995 	typeof(oldp) __ai_oldp = (oldp); \
4996 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4997 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4998 	raw_try_cmpxchg128_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4999 })
5000 
5001 
5002 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
5003 // 5f7bb165838dcca35625e7d4b42540b790abd19b
5004