1e3d18ceeSMark Rutland // SPDX-License-Identifier: GPL-2.0
2e3d18ceeSMark Rutland
3e3d18ceeSMark Rutland // Generated by scripts/atomic/gen-atomic-instrumented.sh
4e3d18ceeSMark Rutland // DO NOT MODIFY THIS FILE DIRECTLY
5e3d18ceeSMark Rutland
6e3d18ceeSMark Rutland /*
7c9268ac6SMark Rutland * This file provoides atomic operations with explicit instrumentation (e.g.
8c9268ac6SMark Rutland * KASAN, KCSAN), which should be used unless it is necessary to avoid
9c9268ac6SMark Rutland * instrumentation. Where it is necessary to aovid instrumenation, the
10c9268ac6SMark Rutland * raw_atomic*() operations should be used.
11e3d18ceeSMark Rutland */
12e3d18ceeSMark Rutland #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
13e3d18ceeSMark Rutland #define _LINUX_ATOMIC_INSTRUMENTED_H
14e3d18ceeSMark Rutland
15e3d18ceeSMark Rutland #include <linux/build_bug.h>
16e3d18ceeSMark Rutland #include <linux/compiler.h>
17e3d18ceeSMark Rutland #include <linux/instrumented.h>
18e3d18ceeSMark Rutland
19ad811070SMark Rutland /**
20ad811070SMark Rutland * atomic_read() - atomic load with relaxed ordering
21ad811070SMark Rutland * @v: pointer to atomic_t
22ad811070SMark Rutland *
23ad811070SMark Rutland * Atomically loads the value of @v with relaxed ordering.
24ad811070SMark Rutland *
25ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_read() there.
26ad811070SMark Rutland *
27ad811070SMark Rutland * Return: The value loaded from @v.
28ad811070SMark Rutland */
29e3d18ceeSMark Rutland static __always_inline int
atomic_read(const atomic_t * v)30e3d18ceeSMark Rutland atomic_read(const atomic_t *v)
31e3d18ceeSMark Rutland {
32e3d18ceeSMark Rutland instrument_atomic_read(v, sizeof(*v));
33c9268ac6SMark Rutland return raw_atomic_read(v);
34e3d18ceeSMark Rutland }
35e3d18ceeSMark Rutland
36ad811070SMark Rutland /**
37ad811070SMark Rutland * atomic_read_acquire() - atomic load with acquire ordering
38ad811070SMark Rutland * @v: pointer to atomic_t
39ad811070SMark Rutland *
40ad811070SMark Rutland * Atomically loads the value of @v with acquire ordering.
41ad811070SMark Rutland *
42ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_read_acquire() there.
43ad811070SMark Rutland *
44ad811070SMark Rutland * Return: The value loaded from @v.
45ad811070SMark Rutland */
46e3d18ceeSMark Rutland static __always_inline int
atomic_read_acquire(const atomic_t * v)47e3d18ceeSMark Rutland atomic_read_acquire(const atomic_t *v)
48e3d18ceeSMark Rutland {
49e3d18ceeSMark Rutland instrument_atomic_read(v, sizeof(*v));
50c9268ac6SMark Rutland return raw_atomic_read_acquire(v);
51e3d18ceeSMark Rutland }
52e3d18ceeSMark Rutland
53ad811070SMark Rutland /**
54ad811070SMark Rutland * atomic_set() - atomic set with relaxed ordering
55ad811070SMark Rutland * @v: pointer to atomic_t
56ad811070SMark Rutland * @i: int value to assign
57ad811070SMark Rutland *
58ad811070SMark Rutland * Atomically sets @v to @i with relaxed ordering.
59ad811070SMark Rutland *
60ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_set() there.
61ad811070SMark Rutland *
62ad811070SMark Rutland * Return: Nothing.
63ad811070SMark Rutland */
64e3d18ceeSMark Rutland static __always_inline void
atomic_set(atomic_t * v,int i)65e3d18ceeSMark Rutland atomic_set(atomic_t *v, int i)
66e3d18ceeSMark Rutland {
67e3d18ceeSMark Rutland instrument_atomic_write(v, sizeof(*v));
68c9268ac6SMark Rutland raw_atomic_set(v, i);
69e3d18ceeSMark Rutland }
70e3d18ceeSMark Rutland
71ad811070SMark Rutland /**
72ad811070SMark Rutland * atomic_set_release() - atomic set with release ordering
73ad811070SMark Rutland * @v: pointer to atomic_t
74ad811070SMark Rutland * @i: int value to assign
75ad811070SMark Rutland *
76ad811070SMark Rutland * Atomically sets @v to @i with release ordering.
77ad811070SMark Rutland *
78ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_set_release() there.
79ad811070SMark Rutland *
80ad811070SMark Rutland * Return: Nothing.
81ad811070SMark Rutland */
82e3d18ceeSMark Rutland static __always_inline void
atomic_set_release(atomic_t * v,int i)83e3d18ceeSMark Rutland atomic_set_release(atomic_t *v, int i)
84e3d18ceeSMark Rutland {
85e87c4f66SMarco Elver kcsan_release();
86e3d18ceeSMark Rutland instrument_atomic_write(v, sizeof(*v));
87c9268ac6SMark Rutland raw_atomic_set_release(v, i);
88e3d18ceeSMark Rutland }
89e3d18ceeSMark Rutland
90ad811070SMark Rutland /**
91ad811070SMark Rutland * atomic_add() - atomic add with relaxed ordering
92ad811070SMark Rutland * @i: int value to add
93ad811070SMark Rutland * @v: pointer to atomic_t
94ad811070SMark Rutland *
95ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
96ad811070SMark Rutland *
97ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add() there.
98ad811070SMark Rutland *
99ad811070SMark Rutland * Return: Nothing.
100ad811070SMark Rutland */
101e3d18ceeSMark Rutland static __always_inline void
atomic_add(int i,atomic_t * v)102e3d18ceeSMark Rutland atomic_add(int i, atomic_t *v)
103e3d18ceeSMark Rutland {
104e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
105c9268ac6SMark Rutland raw_atomic_add(i, v);
106e3d18ceeSMark Rutland }
107e3d18ceeSMark Rutland
108ad811070SMark Rutland /**
109ad811070SMark Rutland * atomic_add_return() - atomic add with full ordering
110ad811070SMark Rutland * @i: int value to add
111ad811070SMark Rutland * @v: pointer to atomic_t
112ad811070SMark Rutland *
113ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
114ad811070SMark Rutland *
115ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_return() there.
116ad811070SMark Rutland *
117ad811070SMark Rutland * Return: The updated value of @v.
118ad811070SMark Rutland */
119e3d18ceeSMark Rutland static __always_inline int
atomic_add_return(int i,atomic_t * v)120e3d18ceeSMark Rutland atomic_add_return(int i, atomic_t *v)
121e3d18ceeSMark Rutland {
122e87c4f66SMarco Elver kcsan_mb();
123e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
124c9268ac6SMark Rutland return raw_atomic_add_return(i, v);
125e3d18ceeSMark Rutland }
126e3d18ceeSMark Rutland
127ad811070SMark Rutland /**
128ad811070SMark Rutland * atomic_add_return_acquire() - atomic add with acquire ordering
129ad811070SMark Rutland * @i: int value to add
130ad811070SMark Rutland * @v: pointer to atomic_t
131ad811070SMark Rutland *
132ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
133ad811070SMark Rutland *
134ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_return_acquire() there.
135ad811070SMark Rutland *
136ad811070SMark Rutland * Return: The updated value of @v.
137ad811070SMark Rutland */
138e3d18ceeSMark Rutland static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)139e3d18ceeSMark Rutland atomic_add_return_acquire(int i, atomic_t *v)
140e3d18ceeSMark Rutland {
141e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
142c9268ac6SMark Rutland return raw_atomic_add_return_acquire(i, v);
143e3d18ceeSMark Rutland }
144e3d18ceeSMark Rutland
145ad811070SMark Rutland /**
146ad811070SMark Rutland * atomic_add_return_release() - atomic add with release ordering
147ad811070SMark Rutland * @i: int value to add
148ad811070SMark Rutland * @v: pointer to atomic_t
149ad811070SMark Rutland *
150ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
151ad811070SMark Rutland *
152ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_return_release() there.
153ad811070SMark Rutland *
154ad811070SMark Rutland * Return: The updated value of @v.
155ad811070SMark Rutland */
156e3d18ceeSMark Rutland static __always_inline int
atomic_add_return_release(int i,atomic_t * v)157e3d18ceeSMark Rutland atomic_add_return_release(int i, atomic_t *v)
158e3d18ceeSMark Rutland {
159e87c4f66SMarco Elver kcsan_release();
160e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
161c9268ac6SMark Rutland return raw_atomic_add_return_release(i, v);
162e3d18ceeSMark Rutland }
163e3d18ceeSMark Rutland
164ad811070SMark Rutland /**
165ad811070SMark Rutland * atomic_add_return_relaxed() - atomic add with relaxed ordering
166ad811070SMark Rutland * @i: int value to add
167ad811070SMark Rutland * @v: pointer to atomic_t
168ad811070SMark Rutland *
169ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
170ad811070SMark Rutland *
171ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_return_relaxed() there.
172ad811070SMark Rutland *
173ad811070SMark Rutland * Return: The updated value of @v.
174ad811070SMark Rutland */
175e3d18ceeSMark Rutland static __always_inline int
atomic_add_return_relaxed(int i,atomic_t * v)176e3d18ceeSMark Rutland atomic_add_return_relaxed(int i, atomic_t *v)
177e3d18ceeSMark Rutland {
178e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
179c9268ac6SMark Rutland return raw_atomic_add_return_relaxed(i, v);
180e3d18ceeSMark Rutland }
181e3d18ceeSMark Rutland
182ad811070SMark Rutland /**
183ad811070SMark Rutland * atomic_fetch_add() - atomic add with full ordering
184ad811070SMark Rutland * @i: int value to add
185ad811070SMark Rutland * @v: pointer to atomic_t
186ad811070SMark Rutland *
187ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
188ad811070SMark Rutland *
189ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_add() there.
190ad811070SMark Rutland *
191ad811070SMark Rutland * Return: The original value of @v.
192ad811070SMark Rutland */
193e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_add(int i,atomic_t * v)194e3d18ceeSMark Rutland atomic_fetch_add(int i, atomic_t *v)
195e3d18ceeSMark Rutland {
196e87c4f66SMarco Elver kcsan_mb();
197e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
198c9268ac6SMark Rutland return raw_atomic_fetch_add(i, v);
199e3d18ceeSMark Rutland }
200e3d18ceeSMark Rutland
201ad811070SMark Rutland /**
202ad811070SMark Rutland * atomic_fetch_add_acquire() - atomic add with acquire ordering
203ad811070SMark Rutland * @i: int value to add
204ad811070SMark Rutland * @v: pointer to atomic_t
205ad811070SMark Rutland *
206ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
207ad811070SMark Rutland *
208ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_add_acquire() there.
209ad811070SMark Rutland *
210ad811070SMark Rutland * Return: The original value of @v.
211ad811070SMark Rutland */
212e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)213e3d18ceeSMark Rutland atomic_fetch_add_acquire(int i, atomic_t *v)
214e3d18ceeSMark Rutland {
215e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
216c9268ac6SMark Rutland return raw_atomic_fetch_add_acquire(i, v);
217e3d18ceeSMark Rutland }
218e3d18ceeSMark Rutland
219ad811070SMark Rutland /**
220ad811070SMark Rutland * atomic_fetch_add_release() - atomic add with release ordering
221ad811070SMark Rutland * @i: int value to add
222ad811070SMark Rutland * @v: pointer to atomic_t
223ad811070SMark Rutland *
224ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
225ad811070SMark Rutland *
226ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_add_release() there.
227ad811070SMark Rutland *
228ad811070SMark Rutland * Return: The original value of @v.
229ad811070SMark Rutland */
230e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)231e3d18ceeSMark Rutland atomic_fetch_add_release(int i, atomic_t *v)
232e3d18ceeSMark Rutland {
233e87c4f66SMarco Elver kcsan_release();
234e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
235c9268ac6SMark Rutland return raw_atomic_fetch_add_release(i, v);
236e3d18ceeSMark Rutland }
237e3d18ceeSMark Rutland
238ad811070SMark Rutland /**
239ad811070SMark Rutland * atomic_fetch_add_relaxed() - atomic add with relaxed ordering
240ad811070SMark Rutland * @i: int value to add
241ad811070SMark Rutland * @v: pointer to atomic_t
242ad811070SMark Rutland *
243ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
244ad811070SMark Rutland *
245ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_add_relaxed() there.
246ad811070SMark Rutland *
247ad811070SMark Rutland * Return: The original value of @v.
248ad811070SMark Rutland */
249e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_add_relaxed(int i,atomic_t * v)250e3d18ceeSMark Rutland atomic_fetch_add_relaxed(int i, atomic_t *v)
251e3d18ceeSMark Rutland {
252e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
253c9268ac6SMark Rutland return raw_atomic_fetch_add_relaxed(i, v);
254e3d18ceeSMark Rutland }
255e3d18ceeSMark Rutland
256ad811070SMark Rutland /**
257ad811070SMark Rutland * atomic_sub() - atomic subtract with relaxed ordering
258ad811070SMark Rutland * @i: int value to subtract
259ad811070SMark Rutland * @v: pointer to atomic_t
260ad811070SMark Rutland *
261ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
262ad811070SMark Rutland *
263ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub() there.
264ad811070SMark Rutland *
265ad811070SMark Rutland * Return: Nothing.
266ad811070SMark Rutland */
267e3d18ceeSMark Rutland static __always_inline void
atomic_sub(int i,atomic_t * v)268e3d18ceeSMark Rutland atomic_sub(int i, atomic_t *v)
269e3d18ceeSMark Rutland {
270e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
271c9268ac6SMark Rutland raw_atomic_sub(i, v);
272e3d18ceeSMark Rutland }
273e3d18ceeSMark Rutland
274ad811070SMark Rutland /**
275ad811070SMark Rutland * atomic_sub_return() - atomic subtract with full ordering
276ad811070SMark Rutland * @i: int value to subtract
277ad811070SMark Rutland * @v: pointer to atomic_t
278ad811070SMark Rutland *
279ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
280ad811070SMark Rutland *
281ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub_return() there.
282ad811070SMark Rutland *
283ad811070SMark Rutland * Return: The updated value of @v.
284ad811070SMark Rutland */
285e3d18ceeSMark Rutland static __always_inline int
atomic_sub_return(int i,atomic_t * v)286e3d18ceeSMark Rutland atomic_sub_return(int i, atomic_t *v)
287e3d18ceeSMark Rutland {
288e87c4f66SMarco Elver kcsan_mb();
289e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
290c9268ac6SMark Rutland return raw_atomic_sub_return(i, v);
291e3d18ceeSMark Rutland }
292e3d18ceeSMark Rutland
293ad811070SMark Rutland /**
294ad811070SMark Rutland * atomic_sub_return_acquire() - atomic subtract with acquire ordering
295ad811070SMark Rutland * @i: int value to subtract
296ad811070SMark Rutland * @v: pointer to atomic_t
297ad811070SMark Rutland *
298ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
299ad811070SMark Rutland *
300ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub_return_acquire() there.
301ad811070SMark Rutland *
302ad811070SMark Rutland * Return: The updated value of @v.
303ad811070SMark Rutland */
304e3d18ceeSMark Rutland static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)305e3d18ceeSMark Rutland atomic_sub_return_acquire(int i, atomic_t *v)
306e3d18ceeSMark Rutland {
307e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
308c9268ac6SMark Rutland return raw_atomic_sub_return_acquire(i, v);
309e3d18ceeSMark Rutland }
310e3d18ceeSMark Rutland
311ad811070SMark Rutland /**
312ad811070SMark Rutland * atomic_sub_return_release() - atomic subtract with release ordering
313ad811070SMark Rutland * @i: int value to subtract
314ad811070SMark Rutland * @v: pointer to atomic_t
315ad811070SMark Rutland *
316ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
317ad811070SMark Rutland *
318ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub_return_release() there.
319ad811070SMark Rutland *
320ad811070SMark Rutland * Return: The updated value of @v.
321ad811070SMark Rutland */
322e3d18ceeSMark Rutland static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)323e3d18ceeSMark Rutland atomic_sub_return_release(int i, atomic_t *v)
324e3d18ceeSMark Rutland {
325e87c4f66SMarco Elver kcsan_release();
326e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
327c9268ac6SMark Rutland return raw_atomic_sub_return_release(i, v);
328e3d18ceeSMark Rutland }
329e3d18ceeSMark Rutland
330ad811070SMark Rutland /**
331ad811070SMark Rutland * atomic_sub_return_relaxed() - atomic subtract with relaxed ordering
332ad811070SMark Rutland * @i: int value to subtract
333ad811070SMark Rutland * @v: pointer to atomic_t
334ad811070SMark Rutland *
335ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
336ad811070SMark Rutland *
337ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub_return_relaxed() there.
338ad811070SMark Rutland *
339ad811070SMark Rutland * Return: The updated value of @v.
340ad811070SMark Rutland */
341e3d18ceeSMark Rutland static __always_inline int
atomic_sub_return_relaxed(int i,atomic_t * v)342e3d18ceeSMark Rutland atomic_sub_return_relaxed(int i, atomic_t *v)
343e3d18ceeSMark Rutland {
344e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
345c9268ac6SMark Rutland return raw_atomic_sub_return_relaxed(i, v);
346e3d18ceeSMark Rutland }
347e3d18ceeSMark Rutland
348ad811070SMark Rutland /**
349ad811070SMark Rutland * atomic_fetch_sub() - atomic subtract with full ordering
350ad811070SMark Rutland * @i: int value to subtract
351ad811070SMark Rutland * @v: pointer to atomic_t
352ad811070SMark Rutland *
353ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
354ad811070SMark Rutland *
355ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_sub() there.
356ad811070SMark Rutland *
357ad811070SMark Rutland * Return: The original value of @v.
358ad811070SMark Rutland */
359e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)360e3d18ceeSMark Rutland atomic_fetch_sub(int i, atomic_t *v)
361e3d18ceeSMark Rutland {
362e87c4f66SMarco Elver kcsan_mb();
363e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
364c9268ac6SMark Rutland return raw_atomic_fetch_sub(i, v);
365e3d18ceeSMark Rutland }
366e3d18ceeSMark Rutland
367ad811070SMark Rutland /**
368ad811070SMark Rutland * atomic_fetch_sub_acquire() - atomic subtract with acquire ordering
369ad811070SMark Rutland * @i: int value to subtract
370ad811070SMark Rutland * @v: pointer to atomic_t
371ad811070SMark Rutland *
372ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
373ad811070SMark Rutland *
374ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_acquire() there.
375ad811070SMark Rutland *
376ad811070SMark Rutland * Return: The original value of @v.
377ad811070SMark Rutland */
378e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)379e3d18ceeSMark Rutland atomic_fetch_sub_acquire(int i, atomic_t *v)
380e3d18ceeSMark Rutland {
381e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
382c9268ac6SMark Rutland return raw_atomic_fetch_sub_acquire(i, v);
383e3d18ceeSMark Rutland }
384e3d18ceeSMark Rutland
385ad811070SMark Rutland /**
386ad811070SMark Rutland * atomic_fetch_sub_release() - atomic subtract with release ordering
387ad811070SMark Rutland * @i: int value to subtract
388ad811070SMark Rutland * @v: pointer to atomic_t
389ad811070SMark Rutland *
390ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
391ad811070SMark Rutland *
392ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_release() there.
393ad811070SMark Rutland *
394ad811070SMark Rutland * Return: The original value of @v.
395ad811070SMark Rutland */
396e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)397e3d18ceeSMark Rutland atomic_fetch_sub_release(int i, atomic_t *v)
398e3d18ceeSMark Rutland {
399e87c4f66SMarco Elver kcsan_release();
400e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
401c9268ac6SMark Rutland return raw_atomic_fetch_sub_release(i, v);
402e3d18ceeSMark Rutland }
403e3d18ceeSMark Rutland
404ad811070SMark Rutland /**
405ad811070SMark Rutland * atomic_fetch_sub_relaxed() - atomic subtract with relaxed ordering
406ad811070SMark Rutland * @i: int value to subtract
407ad811070SMark Rutland * @v: pointer to atomic_t
408ad811070SMark Rutland *
409ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
410ad811070SMark Rutland *
411ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_relaxed() there.
412ad811070SMark Rutland *
413ad811070SMark Rutland * Return: The original value of @v.
414ad811070SMark Rutland */
415e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_sub_relaxed(int i,atomic_t * v)416e3d18ceeSMark Rutland atomic_fetch_sub_relaxed(int i, atomic_t *v)
417e3d18ceeSMark Rutland {
418e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
419c9268ac6SMark Rutland return raw_atomic_fetch_sub_relaxed(i, v);
420e3d18ceeSMark Rutland }
421e3d18ceeSMark Rutland
422ad811070SMark Rutland /**
423ad811070SMark Rutland * atomic_inc() - atomic increment with relaxed ordering
424ad811070SMark Rutland * @v: pointer to atomic_t
425ad811070SMark Rutland *
426ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
427ad811070SMark Rutland *
428ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc() there.
429ad811070SMark Rutland *
430ad811070SMark Rutland * Return: Nothing.
431ad811070SMark Rutland */
432e3d18ceeSMark Rutland static __always_inline void
atomic_inc(atomic_t * v)433e3d18ceeSMark Rutland atomic_inc(atomic_t *v)
434e3d18ceeSMark Rutland {
435e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
436c9268ac6SMark Rutland raw_atomic_inc(v);
437e3d18ceeSMark Rutland }
438e3d18ceeSMark Rutland
439ad811070SMark Rutland /**
440ad811070SMark Rutland * atomic_inc_return() - atomic increment with full ordering
441ad811070SMark Rutland * @v: pointer to atomic_t
442ad811070SMark Rutland *
443ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
444ad811070SMark Rutland *
445ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_return() there.
446ad811070SMark Rutland *
447ad811070SMark Rutland * Return: The updated value of @v.
448ad811070SMark Rutland */
449e3d18ceeSMark Rutland static __always_inline int
atomic_inc_return(atomic_t * v)450e3d18ceeSMark Rutland atomic_inc_return(atomic_t *v)
451e3d18ceeSMark Rutland {
452e87c4f66SMarco Elver kcsan_mb();
453e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
454c9268ac6SMark Rutland return raw_atomic_inc_return(v);
455e3d18ceeSMark Rutland }
456e3d18ceeSMark Rutland
457ad811070SMark Rutland /**
458ad811070SMark Rutland * atomic_inc_return_acquire() - atomic increment with acquire ordering
459ad811070SMark Rutland * @v: pointer to atomic_t
460ad811070SMark Rutland *
461ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
462ad811070SMark Rutland *
463ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_return_acquire() there.
464ad811070SMark Rutland *
465ad811070SMark Rutland * Return: The updated value of @v.
466ad811070SMark Rutland */
467e3d18ceeSMark Rutland static __always_inline int
atomic_inc_return_acquire(atomic_t * v)468e3d18ceeSMark Rutland atomic_inc_return_acquire(atomic_t *v)
469e3d18ceeSMark Rutland {
470e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
471c9268ac6SMark Rutland return raw_atomic_inc_return_acquire(v);
472e3d18ceeSMark Rutland }
473e3d18ceeSMark Rutland
474ad811070SMark Rutland /**
475ad811070SMark Rutland * atomic_inc_return_release() - atomic increment with release ordering
476ad811070SMark Rutland * @v: pointer to atomic_t
477ad811070SMark Rutland *
478ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
479ad811070SMark Rutland *
480ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_return_release() there.
481ad811070SMark Rutland *
482ad811070SMark Rutland * Return: The updated value of @v.
483ad811070SMark Rutland */
484e3d18ceeSMark Rutland static __always_inline int
atomic_inc_return_release(atomic_t * v)485e3d18ceeSMark Rutland atomic_inc_return_release(atomic_t *v)
486e3d18ceeSMark Rutland {
487e87c4f66SMarco Elver kcsan_release();
488e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
489c9268ac6SMark Rutland return raw_atomic_inc_return_release(v);
490e3d18ceeSMark Rutland }
491e3d18ceeSMark Rutland
492ad811070SMark Rutland /**
493ad811070SMark Rutland * atomic_inc_return_relaxed() - atomic increment with relaxed ordering
494ad811070SMark Rutland * @v: pointer to atomic_t
495ad811070SMark Rutland *
496ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
497ad811070SMark Rutland *
498ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_return_relaxed() there.
499ad811070SMark Rutland *
500ad811070SMark Rutland * Return: The updated value of @v.
501ad811070SMark Rutland */
502e3d18ceeSMark Rutland static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)503e3d18ceeSMark Rutland atomic_inc_return_relaxed(atomic_t *v)
504e3d18ceeSMark Rutland {
505e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
506c9268ac6SMark Rutland return raw_atomic_inc_return_relaxed(v);
507e3d18ceeSMark Rutland }
508e3d18ceeSMark Rutland
509ad811070SMark Rutland /**
510ad811070SMark Rutland * atomic_fetch_inc() - atomic increment with full ordering
511ad811070SMark Rutland * @v: pointer to atomic_t
512ad811070SMark Rutland *
513ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
514ad811070SMark Rutland *
515ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_inc() there.
516ad811070SMark Rutland *
517ad811070SMark Rutland * Return: The original value of @v.
518ad811070SMark Rutland */
519e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_inc(atomic_t * v)520e3d18ceeSMark Rutland atomic_fetch_inc(atomic_t *v)
521e3d18ceeSMark Rutland {
522e87c4f66SMarco Elver kcsan_mb();
523e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
524c9268ac6SMark Rutland return raw_atomic_fetch_inc(v);
525e3d18ceeSMark Rutland }
526e3d18ceeSMark Rutland
527ad811070SMark Rutland /**
528ad811070SMark Rutland * atomic_fetch_inc_acquire() - atomic increment with acquire ordering
529ad811070SMark Rutland * @v: pointer to atomic_t
530ad811070SMark Rutland *
531ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
532ad811070SMark Rutland *
533ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_acquire() there.
534ad811070SMark Rutland *
535ad811070SMark Rutland * Return: The original value of @v.
536ad811070SMark Rutland */
537e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)538e3d18ceeSMark Rutland atomic_fetch_inc_acquire(atomic_t *v)
539e3d18ceeSMark Rutland {
540e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
541c9268ac6SMark Rutland return raw_atomic_fetch_inc_acquire(v);
542e3d18ceeSMark Rutland }
543e3d18ceeSMark Rutland
544ad811070SMark Rutland /**
545ad811070SMark Rutland * atomic_fetch_inc_release() - atomic increment with release ordering
546ad811070SMark Rutland * @v: pointer to atomic_t
547ad811070SMark Rutland *
548ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
549ad811070SMark Rutland *
550ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_release() there.
551ad811070SMark Rutland *
552ad811070SMark Rutland * Return: The original value of @v.
553ad811070SMark Rutland */
554e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_inc_release(atomic_t * v)555e3d18ceeSMark Rutland atomic_fetch_inc_release(atomic_t *v)
556e3d18ceeSMark Rutland {
557e87c4f66SMarco Elver kcsan_release();
558e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
559c9268ac6SMark Rutland return raw_atomic_fetch_inc_release(v);
560e3d18ceeSMark Rutland }
561e3d18ceeSMark Rutland
562ad811070SMark Rutland /**
563ad811070SMark Rutland * atomic_fetch_inc_relaxed() - atomic increment with relaxed ordering
564ad811070SMark Rutland * @v: pointer to atomic_t
565ad811070SMark Rutland *
566ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
567ad811070SMark Rutland *
568ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_relaxed() there.
569ad811070SMark Rutland *
570ad811070SMark Rutland * Return: The original value of @v.
571ad811070SMark Rutland */
572e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)573e3d18ceeSMark Rutland atomic_fetch_inc_relaxed(atomic_t *v)
574e3d18ceeSMark Rutland {
575e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
576c9268ac6SMark Rutland return raw_atomic_fetch_inc_relaxed(v);
577e3d18ceeSMark Rutland }
578e3d18ceeSMark Rutland
579ad811070SMark Rutland /**
580ad811070SMark Rutland * atomic_dec() - atomic decrement with relaxed ordering
581ad811070SMark Rutland * @v: pointer to atomic_t
582ad811070SMark Rutland *
583ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
584ad811070SMark Rutland *
585ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec() there.
586ad811070SMark Rutland *
587ad811070SMark Rutland * Return: Nothing.
588ad811070SMark Rutland */
589e3d18ceeSMark Rutland static __always_inline void
atomic_dec(atomic_t * v)590e3d18ceeSMark Rutland atomic_dec(atomic_t *v)
591e3d18ceeSMark Rutland {
592e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
593c9268ac6SMark Rutland raw_atomic_dec(v);
594e3d18ceeSMark Rutland }
595e3d18ceeSMark Rutland
596ad811070SMark Rutland /**
597ad811070SMark Rutland * atomic_dec_return() - atomic decrement with full ordering
598ad811070SMark Rutland * @v: pointer to atomic_t
599ad811070SMark Rutland *
600ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
601ad811070SMark Rutland *
602ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_return() there.
603ad811070SMark Rutland *
604ad811070SMark Rutland * Return: The updated value of @v.
605ad811070SMark Rutland */
606e3d18ceeSMark Rutland static __always_inline int
atomic_dec_return(atomic_t * v)607e3d18ceeSMark Rutland atomic_dec_return(atomic_t *v)
608e3d18ceeSMark Rutland {
609e87c4f66SMarco Elver kcsan_mb();
610e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
611c9268ac6SMark Rutland return raw_atomic_dec_return(v);
612e3d18ceeSMark Rutland }
613e3d18ceeSMark Rutland
614ad811070SMark Rutland /**
615ad811070SMark Rutland * atomic_dec_return_acquire() - atomic decrement with acquire ordering
616ad811070SMark Rutland * @v: pointer to atomic_t
617ad811070SMark Rutland *
618ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
619ad811070SMark Rutland *
620ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_return_acquire() there.
621ad811070SMark Rutland *
622ad811070SMark Rutland * Return: The updated value of @v.
623ad811070SMark Rutland */
624e3d18ceeSMark Rutland static __always_inline int
atomic_dec_return_acquire(atomic_t * v)625e3d18ceeSMark Rutland atomic_dec_return_acquire(atomic_t *v)
626e3d18ceeSMark Rutland {
627e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
628c9268ac6SMark Rutland return raw_atomic_dec_return_acquire(v);
629e3d18ceeSMark Rutland }
630e3d18ceeSMark Rutland
631ad811070SMark Rutland /**
632ad811070SMark Rutland * atomic_dec_return_release() - atomic decrement with release ordering
633ad811070SMark Rutland * @v: pointer to atomic_t
634ad811070SMark Rutland *
635ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
636ad811070SMark Rutland *
637ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_return_release() there.
638ad811070SMark Rutland *
639ad811070SMark Rutland * Return: The updated value of @v.
640ad811070SMark Rutland */
641e3d18ceeSMark Rutland static __always_inline int
atomic_dec_return_release(atomic_t * v)642e3d18ceeSMark Rutland atomic_dec_return_release(atomic_t *v)
643e3d18ceeSMark Rutland {
644e87c4f66SMarco Elver kcsan_release();
645e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
646c9268ac6SMark Rutland return raw_atomic_dec_return_release(v);
647e3d18ceeSMark Rutland }
648e3d18ceeSMark Rutland
649ad811070SMark Rutland /**
650ad811070SMark Rutland * atomic_dec_return_relaxed() - atomic decrement with relaxed ordering
651ad811070SMark Rutland * @v: pointer to atomic_t
652ad811070SMark Rutland *
653ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
654ad811070SMark Rutland *
655ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_return_relaxed() there.
656ad811070SMark Rutland *
657ad811070SMark Rutland * Return: The updated value of @v.
658ad811070SMark Rutland */
659e3d18ceeSMark Rutland static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)660e3d18ceeSMark Rutland atomic_dec_return_relaxed(atomic_t *v)
661e3d18ceeSMark Rutland {
662e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
663c9268ac6SMark Rutland return raw_atomic_dec_return_relaxed(v);
664e3d18ceeSMark Rutland }
665e3d18ceeSMark Rutland
666ad811070SMark Rutland /**
667ad811070SMark Rutland * atomic_fetch_dec() - atomic decrement with full ordering
668ad811070SMark Rutland * @v: pointer to atomic_t
669ad811070SMark Rutland *
670ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
671ad811070SMark Rutland *
672ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_dec() there.
673ad811070SMark Rutland *
674ad811070SMark Rutland * Return: The original value of @v.
675ad811070SMark Rutland */
676e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_dec(atomic_t * v)677e3d18ceeSMark Rutland atomic_fetch_dec(atomic_t *v)
678e3d18ceeSMark Rutland {
679e87c4f66SMarco Elver kcsan_mb();
680e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
681c9268ac6SMark Rutland return raw_atomic_fetch_dec(v);
682e3d18ceeSMark Rutland }
683e3d18ceeSMark Rutland
684ad811070SMark Rutland /**
685ad811070SMark Rutland * atomic_fetch_dec_acquire() - atomic decrement with acquire ordering
686ad811070SMark Rutland * @v: pointer to atomic_t
687ad811070SMark Rutland *
688ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
689ad811070SMark Rutland *
690ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_acquire() there.
691ad811070SMark Rutland *
692ad811070SMark Rutland * Return: The original value of @v.
693ad811070SMark Rutland */
694e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)695e3d18ceeSMark Rutland atomic_fetch_dec_acquire(atomic_t *v)
696e3d18ceeSMark Rutland {
697e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
698c9268ac6SMark Rutland return raw_atomic_fetch_dec_acquire(v);
699e3d18ceeSMark Rutland }
700e3d18ceeSMark Rutland
701ad811070SMark Rutland /**
702ad811070SMark Rutland * atomic_fetch_dec_release() - atomic decrement with release ordering
703ad811070SMark Rutland * @v: pointer to atomic_t
704ad811070SMark Rutland *
705ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
706ad811070SMark Rutland *
707ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_release() there.
708ad811070SMark Rutland *
709ad811070SMark Rutland * Return: The original value of @v.
710ad811070SMark Rutland */
711e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_dec_release(atomic_t * v)712e3d18ceeSMark Rutland atomic_fetch_dec_release(atomic_t *v)
713e3d18ceeSMark Rutland {
714e87c4f66SMarco Elver kcsan_release();
715e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
716c9268ac6SMark Rutland return raw_atomic_fetch_dec_release(v);
717e3d18ceeSMark Rutland }
718e3d18ceeSMark Rutland
719ad811070SMark Rutland /**
720ad811070SMark Rutland * atomic_fetch_dec_relaxed() - atomic decrement with relaxed ordering
721ad811070SMark Rutland * @v: pointer to atomic_t
722ad811070SMark Rutland *
723ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
724ad811070SMark Rutland *
725ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_relaxed() there.
726ad811070SMark Rutland *
727ad811070SMark Rutland * Return: The original value of @v.
728ad811070SMark Rutland */
729e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)730e3d18ceeSMark Rutland atomic_fetch_dec_relaxed(atomic_t *v)
731e3d18ceeSMark Rutland {
732e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
733c9268ac6SMark Rutland return raw_atomic_fetch_dec_relaxed(v);
734e3d18ceeSMark Rutland }
735e3d18ceeSMark Rutland
736ad811070SMark Rutland /**
737ad811070SMark Rutland * atomic_and() - atomic bitwise AND with relaxed ordering
738ad811070SMark Rutland * @i: int value
739ad811070SMark Rutland * @v: pointer to atomic_t
740ad811070SMark Rutland *
741ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
742ad811070SMark Rutland *
743ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_and() there.
744ad811070SMark Rutland *
745ad811070SMark Rutland * Return: Nothing.
746ad811070SMark Rutland */
747e3d18ceeSMark Rutland static __always_inline void
atomic_and(int i,atomic_t * v)748e3d18ceeSMark Rutland atomic_and(int i, atomic_t *v)
749e3d18ceeSMark Rutland {
750e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
751c9268ac6SMark Rutland raw_atomic_and(i, v);
752e3d18ceeSMark Rutland }
753e3d18ceeSMark Rutland
754ad811070SMark Rutland /**
755ad811070SMark Rutland * atomic_fetch_and() - atomic bitwise AND with full ordering
756ad811070SMark Rutland * @i: int value
757ad811070SMark Rutland * @v: pointer to atomic_t
758ad811070SMark Rutland *
759ad811070SMark Rutland * Atomically updates @v to (@v & @i) with full ordering.
760ad811070SMark Rutland *
761ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_and() there.
762ad811070SMark Rutland *
763ad811070SMark Rutland * Return: The original value of @v.
764ad811070SMark Rutland */
765e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_and(int i,atomic_t * v)766e3d18ceeSMark Rutland atomic_fetch_and(int i, atomic_t *v)
767e3d18ceeSMark Rutland {
768e87c4f66SMarco Elver kcsan_mb();
769e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
770c9268ac6SMark Rutland return raw_atomic_fetch_and(i, v);
771e3d18ceeSMark Rutland }
772e3d18ceeSMark Rutland
773ad811070SMark Rutland /**
774ad811070SMark Rutland * atomic_fetch_and_acquire() - atomic bitwise AND with acquire ordering
775ad811070SMark Rutland * @i: int value
776ad811070SMark Rutland * @v: pointer to atomic_t
777ad811070SMark Rutland *
778ad811070SMark Rutland * Atomically updates @v to (@v & @i) with acquire ordering.
779ad811070SMark Rutland *
780ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_and_acquire() there.
781ad811070SMark Rutland *
782ad811070SMark Rutland * Return: The original value of @v.
783ad811070SMark Rutland */
784e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)785e3d18ceeSMark Rutland atomic_fetch_and_acquire(int i, atomic_t *v)
786e3d18ceeSMark Rutland {
787e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
788c9268ac6SMark Rutland return raw_atomic_fetch_and_acquire(i, v);
789e3d18ceeSMark Rutland }
790e3d18ceeSMark Rutland
791ad811070SMark Rutland /**
792ad811070SMark Rutland * atomic_fetch_and_release() - atomic bitwise AND with release ordering
793ad811070SMark Rutland * @i: int value
794ad811070SMark Rutland * @v: pointer to atomic_t
795ad811070SMark Rutland *
796ad811070SMark Rutland * Atomically updates @v to (@v & @i) with release ordering.
797ad811070SMark Rutland *
798ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_and_release() there.
799ad811070SMark Rutland *
800ad811070SMark Rutland * Return: The original value of @v.
801ad811070SMark Rutland */
802e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)803e3d18ceeSMark Rutland atomic_fetch_and_release(int i, atomic_t *v)
804e3d18ceeSMark Rutland {
805e87c4f66SMarco Elver kcsan_release();
806e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
807c9268ac6SMark Rutland return raw_atomic_fetch_and_release(i, v);
808e3d18ceeSMark Rutland }
809e3d18ceeSMark Rutland
810ad811070SMark Rutland /**
811ad811070SMark Rutland * atomic_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
812ad811070SMark Rutland * @i: int value
813ad811070SMark Rutland * @v: pointer to atomic_t
814ad811070SMark Rutland *
815ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
816ad811070SMark Rutland *
817ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_and_relaxed() there.
818ad811070SMark Rutland *
819ad811070SMark Rutland * Return: The original value of @v.
820ad811070SMark Rutland */
821e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_and_relaxed(int i,atomic_t * v)822e3d18ceeSMark Rutland atomic_fetch_and_relaxed(int i, atomic_t *v)
823e3d18ceeSMark Rutland {
824e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
825c9268ac6SMark Rutland return raw_atomic_fetch_and_relaxed(i, v);
826e3d18ceeSMark Rutland }
827e3d18ceeSMark Rutland
828ad811070SMark Rutland /**
829ad811070SMark Rutland * atomic_andnot() - atomic bitwise AND NOT with relaxed ordering
830ad811070SMark Rutland * @i: int value
831ad811070SMark Rutland * @v: pointer to atomic_t
832ad811070SMark Rutland *
833ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
834ad811070SMark Rutland *
835ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_andnot() there.
836ad811070SMark Rutland *
837ad811070SMark Rutland * Return: Nothing.
838ad811070SMark Rutland */
839e3d18ceeSMark Rutland static __always_inline void
atomic_andnot(int i,atomic_t * v)840e3d18ceeSMark Rutland atomic_andnot(int i, atomic_t *v)
841e3d18ceeSMark Rutland {
842e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
843c9268ac6SMark Rutland raw_atomic_andnot(i, v);
844e3d18ceeSMark Rutland }
845e3d18ceeSMark Rutland
846ad811070SMark Rutland /**
847ad811070SMark Rutland * atomic_fetch_andnot() - atomic bitwise AND NOT with full ordering
848ad811070SMark Rutland * @i: int value
849ad811070SMark Rutland * @v: pointer to atomic_t
850ad811070SMark Rutland *
851ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with full ordering.
852ad811070SMark Rutland *
853ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot() there.
854ad811070SMark Rutland *
855ad811070SMark Rutland * Return: The original value of @v.
856ad811070SMark Rutland */
857e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)858e3d18ceeSMark Rutland atomic_fetch_andnot(int i, atomic_t *v)
859e3d18ceeSMark Rutland {
860e87c4f66SMarco Elver kcsan_mb();
861e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
862c9268ac6SMark Rutland return raw_atomic_fetch_andnot(i, v);
863e3d18ceeSMark Rutland }
864e3d18ceeSMark Rutland
865ad811070SMark Rutland /**
866ad811070SMark Rutland * atomic_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
867ad811070SMark Rutland * @i: int value
868ad811070SMark Rutland * @v: pointer to atomic_t
869ad811070SMark Rutland *
870ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with acquire ordering.
871ad811070SMark Rutland *
872ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_acquire() there.
873ad811070SMark Rutland *
874ad811070SMark Rutland * Return: The original value of @v.
875ad811070SMark Rutland */
876e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)877e3d18ceeSMark Rutland atomic_fetch_andnot_acquire(int i, atomic_t *v)
878e3d18ceeSMark Rutland {
879e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
880c9268ac6SMark Rutland return raw_atomic_fetch_andnot_acquire(i, v);
881e3d18ceeSMark Rutland }
882e3d18ceeSMark Rutland
883ad811070SMark Rutland /**
884ad811070SMark Rutland * atomic_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
885ad811070SMark Rutland * @i: int value
886ad811070SMark Rutland * @v: pointer to atomic_t
887ad811070SMark Rutland *
888ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with release ordering.
889ad811070SMark Rutland *
890ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_release() there.
891ad811070SMark Rutland *
892ad811070SMark Rutland * Return: The original value of @v.
893ad811070SMark Rutland */
894e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)895e3d18ceeSMark Rutland atomic_fetch_andnot_release(int i, atomic_t *v)
896e3d18ceeSMark Rutland {
897e87c4f66SMarco Elver kcsan_release();
898e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
899c9268ac6SMark Rutland return raw_atomic_fetch_andnot_release(i, v);
900e3d18ceeSMark Rutland }
901e3d18ceeSMark Rutland
902ad811070SMark Rutland /**
903ad811070SMark Rutland * atomic_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
904ad811070SMark Rutland * @i: int value
905ad811070SMark Rutland * @v: pointer to atomic_t
906ad811070SMark Rutland *
907ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
908ad811070SMark Rutland *
909ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_relaxed() there.
910ad811070SMark Rutland *
911ad811070SMark Rutland * Return: The original value of @v.
912ad811070SMark Rutland */
913e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)914e3d18ceeSMark Rutland atomic_fetch_andnot_relaxed(int i, atomic_t *v)
915e3d18ceeSMark Rutland {
916e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
917c9268ac6SMark Rutland return raw_atomic_fetch_andnot_relaxed(i, v);
918e3d18ceeSMark Rutland }
919e3d18ceeSMark Rutland
920ad811070SMark Rutland /**
921ad811070SMark Rutland * atomic_or() - atomic bitwise OR with relaxed ordering
922ad811070SMark Rutland * @i: int value
923ad811070SMark Rutland * @v: pointer to atomic_t
924ad811070SMark Rutland *
925ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
926ad811070SMark Rutland *
927ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_or() there.
928ad811070SMark Rutland *
929ad811070SMark Rutland * Return: Nothing.
930ad811070SMark Rutland */
931e3d18ceeSMark Rutland static __always_inline void
atomic_or(int i,atomic_t * v)932e3d18ceeSMark Rutland atomic_or(int i, atomic_t *v)
933e3d18ceeSMark Rutland {
934e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
935c9268ac6SMark Rutland raw_atomic_or(i, v);
936e3d18ceeSMark Rutland }
937e3d18ceeSMark Rutland
938ad811070SMark Rutland /**
939ad811070SMark Rutland * atomic_fetch_or() - atomic bitwise OR with full ordering
940ad811070SMark Rutland * @i: int value
941ad811070SMark Rutland * @v: pointer to atomic_t
942ad811070SMark Rutland *
943ad811070SMark Rutland * Atomically updates @v to (@v | @i) with full ordering.
944ad811070SMark Rutland *
945ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_or() there.
946ad811070SMark Rutland *
947ad811070SMark Rutland * Return: The original value of @v.
948ad811070SMark Rutland */
949e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_or(int i,atomic_t * v)950e3d18ceeSMark Rutland atomic_fetch_or(int i, atomic_t *v)
951e3d18ceeSMark Rutland {
952e87c4f66SMarco Elver kcsan_mb();
953e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
954c9268ac6SMark Rutland return raw_atomic_fetch_or(i, v);
955e3d18ceeSMark Rutland }
956e3d18ceeSMark Rutland
957ad811070SMark Rutland /**
958ad811070SMark Rutland * atomic_fetch_or_acquire() - atomic bitwise OR with acquire ordering
959ad811070SMark Rutland * @i: int value
960ad811070SMark Rutland * @v: pointer to atomic_t
961ad811070SMark Rutland *
962ad811070SMark Rutland * Atomically updates @v to (@v | @i) with acquire ordering.
963ad811070SMark Rutland *
964ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_or_acquire() there.
965ad811070SMark Rutland *
966ad811070SMark Rutland * Return: The original value of @v.
967ad811070SMark Rutland */
968e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)969e3d18ceeSMark Rutland atomic_fetch_or_acquire(int i, atomic_t *v)
970e3d18ceeSMark Rutland {
971e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
972c9268ac6SMark Rutland return raw_atomic_fetch_or_acquire(i, v);
973e3d18ceeSMark Rutland }
974e3d18ceeSMark Rutland
975ad811070SMark Rutland /**
976ad811070SMark Rutland * atomic_fetch_or_release() - atomic bitwise OR with release ordering
977ad811070SMark Rutland * @i: int value
978ad811070SMark Rutland * @v: pointer to atomic_t
979ad811070SMark Rutland *
980ad811070SMark Rutland * Atomically updates @v to (@v | @i) with release ordering.
981ad811070SMark Rutland *
982ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_or_release() there.
983ad811070SMark Rutland *
984ad811070SMark Rutland * Return: The original value of @v.
985ad811070SMark Rutland */
986e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)987e3d18ceeSMark Rutland atomic_fetch_or_release(int i, atomic_t *v)
988e3d18ceeSMark Rutland {
989e87c4f66SMarco Elver kcsan_release();
990e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
991c9268ac6SMark Rutland return raw_atomic_fetch_or_release(i, v);
992e3d18ceeSMark Rutland }
993e3d18ceeSMark Rutland
994ad811070SMark Rutland /**
995ad811070SMark Rutland * atomic_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
996ad811070SMark Rutland * @i: int value
997ad811070SMark Rutland * @v: pointer to atomic_t
998ad811070SMark Rutland *
999ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
1000ad811070SMark Rutland *
1001ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_or_relaxed() there.
1002ad811070SMark Rutland *
1003ad811070SMark Rutland * Return: The original value of @v.
1004ad811070SMark Rutland */
1005e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_or_relaxed(int i,atomic_t * v)1006e3d18ceeSMark Rutland atomic_fetch_or_relaxed(int i, atomic_t *v)
1007e3d18ceeSMark Rutland {
1008e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1009c9268ac6SMark Rutland return raw_atomic_fetch_or_relaxed(i, v);
1010e3d18ceeSMark Rutland }
1011e3d18ceeSMark Rutland
1012ad811070SMark Rutland /**
1013ad811070SMark Rutland * atomic_xor() - atomic bitwise XOR with relaxed ordering
1014ad811070SMark Rutland * @i: int value
1015ad811070SMark Rutland * @v: pointer to atomic_t
1016ad811070SMark Rutland *
1017ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1018ad811070SMark Rutland *
1019ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_xor() there.
1020ad811070SMark Rutland *
1021ad811070SMark Rutland * Return: Nothing.
1022ad811070SMark Rutland */
1023e3d18ceeSMark Rutland static __always_inline void
atomic_xor(int i,atomic_t * v)1024e3d18ceeSMark Rutland atomic_xor(int i, atomic_t *v)
1025e3d18ceeSMark Rutland {
1026e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1027c9268ac6SMark Rutland raw_atomic_xor(i, v);
1028e3d18ceeSMark Rutland }
1029e3d18ceeSMark Rutland
1030ad811070SMark Rutland /**
1031ad811070SMark Rutland * atomic_fetch_xor() - atomic bitwise XOR with full ordering
1032ad811070SMark Rutland * @i: int value
1033ad811070SMark Rutland * @v: pointer to atomic_t
1034ad811070SMark Rutland *
1035ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with full ordering.
1036ad811070SMark Rutland *
1037ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_xor() there.
1038ad811070SMark Rutland *
1039ad811070SMark Rutland * Return: The original value of @v.
1040ad811070SMark Rutland */
1041e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)1042e3d18ceeSMark Rutland atomic_fetch_xor(int i, atomic_t *v)
1043e3d18ceeSMark Rutland {
1044e87c4f66SMarco Elver kcsan_mb();
1045e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1046c9268ac6SMark Rutland return raw_atomic_fetch_xor(i, v);
1047e3d18ceeSMark Rutland }
1048e3d18ceeSMark Rutland
1049ad811070SMark Rutland /**
1050ad811070SMark Rutland * atomic_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1051ad811070SMark Rutland * @i: int value
1052ad811070SMark Rutland * @v: pointer to atomic_t
1053ad811070SMark Rutland *
1054ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with acquire ordering.
1055ad811070SMark Rutland *
1056ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_acquire() there.
1057ad811070SMark Rutland *
1058ad811070SMark Rutland * Return: The original value of @v.
1059ad811070SMark Rutland */
1060e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)1061e3d18ceeSMark Rutland atomic_fetch_xor_acquire(int i, atomic_t *v)
1062e3d18ceeSMark Rutland {
1063e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1064c9268ac6SMark Rutland return raw_atomic_fetch_xor_acquire(i, v);
1065e3d18ceeSMark Rutland }
1066e3d18ceeSMark Rutland
1067ad811070SMark Rutland /**
1068ad811070SMark Rutland * atomic_fetch_xor_release() - atomic bitwise XOR with release ordering
1069ad811070SMark Rutland * @i: int value
1070ad811070SMark Rutland * @v: pointer to atomic_t
1071ad811070SMark Rutland *
1072ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with release ordering.
1073ad811070SMark Rutland *
1074ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_release() there.
1075ad811070SMark Rutland *
1076ad811070SMark Rutland * Return: The original value of @v.
1077ad811070SMark Rutland */
1078e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)1079e3d18ceeSMark Rutland atomic_fetch_xor_release(int i, atomic_t *v)
1080e3d18ceeSMark Rutland {
1081e87c4f66SMarco Elver kcsan_release();
1082e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1083c9268ac6SMark Rutland return raw_atomic_fetch_xor_release(i, v);
1084e3d18ceeSMark Rutland }
1085e3d18ceeSMark Rutland
1086ad811070SMark Rutland /**
1087ad811070SMark Rutland * atomic_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1088ad811070SMark Rutland * @i: int value
1089ad811070SMark Rutland * @v: pointer to atomic_t
1090ad811070SMark Rutland *
1091ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1092ad811070SMark Rutland *
1093ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_relaxed() there.
1094ad811070SMark Rutland *
1095ad811070SMark Rutland * Return: The original value of @v.
1096ad811070SMark Rutland */
1097e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_xor_relaxed(int i,atomic_t * v)1098e3d18ceeSMark Rutland atomic_fetch_xor_relaxed(int i, atomic_t *v)
1099e3d18ceeSMark Rutland {
1100e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1101c9268ac6SMark Rutland return raw_atomic_fetch_xor_relaxed(i, v);
1102e3d18ceeSMark Rutland }
1103e3d18ceeSMark Rutland
1104ad811070SMark Rutland /**
1105ad811070SMark Rutland * atomic_xchg() - atomic exchange with full ordering
1106ad811070SMark Rutland * @v: pointer to atomic_t
1107ad811070SMark Rutland * @new: int value to assign
1108ad811070SMark Rutland *
1109ad811070SMark Rutland * Atomically updates @v to @new with full ordering.
1110ad811070SMark Rutland *
1111ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_xchg() there.
1112ad811070SMark Rutland *
1113ad811070SMark Rutland * Return: The original value of @v.
1114ad811070SMark Rutland */
1115e3d18ceeSMark Rutland static __always_inline int
atomic_xchg(atomic_t * v,int new)11161d78814dSMark Rutland atomic_xchg(atomic_t *v, int new)
1117e3d18ceeSMark Rutland {
1118e87c4f66SMarco Elver kcsan_mb();
1119e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
11201d78814dSMark Rutland return raw_atomic_xchg(v, new);
1121e3d18ceeSMark Rutland }
1122e3d18ceeSMark Rutland
1123ad811070SMark Rutland /**
1124ad811070SMark Rutland * atomic_xchg_acquire() - atomic exchange with acquire ordering
1125ad811070SMark Rutland * @v: pointer to atomic_t
1126ad811070SMark Rutland * @new: int value to assign
1127ad811070SMark Rutland *
1128ad811070SMark Rutland * Atomically updates @v to @new with acquire ordering.
1129ad811070SMark Rutland *
1130ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_xchg_acquire() there.
1131ad811070SMark Rutland *
1132ad811070SMark Rutland * Return: The original value of @v.
1133ad811070SMark Rutland */
1134e3d18ceeSMark Rutland static __always_inline int
atomic_xchg_acquire(atomic_t * v,int new)11351d78814dSMark Rutland atomic_xchg_acquire(atomic_t *v, int new)
1136e3d18ceeSMark Rutland {
1137e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
11381d78814dSMark Rutland return raw_atomic_xchg_acquire(v, new);
1139e3d18ceeSMark Rutland }
1140e3d18ceeSMark Rutland
1141ad811070SMark Rutland /**
1142ad811070SMark Rutland * atomic_xchg_release() - atomic exchange with release ordering
1143ad811070SMark Rutland * @v: pointer to atomic_t
1144ad811070SMark Rutland * @new: int value to assign
1145ad811070SMark Rutland *
1146ad811070SMark Rutland * Atomically updates @v to @new with release ordering.
1147ad811070SMark Rutland *
1148ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_xchg_release() there.
1149ad811070SMark Rutland *
1150ad811070SMark Rutland * Return: The original value of @v.
1151ad811070SMark Rutland */
1152e3d18ceeSMark Rutland static __always_inline int
atomic_xchg_release(atomic_t * v,int new)11531d78814dSMark Rutland atomic_xchg_release(atomic_t *v, int new)
1154e3d18ceeSMark Rutland {
1155e87c4f66SMarco Elver kcsan_release();
1156e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
11571d78814dSMark Rutland return raw_atomic_xchg_release(v, new);
1158e3d18ceeSMark Rutland }
1159e3d18ceeSMark Rutland
1160ad811070SMark Rutland /**
1161ad811070SMark Rutland * atomic_xchg_relaxed() - atomic exchange with relaxed ordering
1162ad811070SMark Rutland * @v: pointer to atomic_t
1163ad811070SMark Rutland * @new: int value to assign
1164ad811070SMark Rutland *
1165ad811070SMark Rutland * Atomically updates @v to @new with relaxed ordering.
1166ad811070SMark Rutland *
1167ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_xchg_relaxed() there.
1168ad811070SMark Rutland *
1169ad811070SMark Rutland * Return: The original value of @v.
1170ad811070SMark Rutland */
1171e3d18ceeSMark Rutland static __always_inline int
atomic_xchg_relaxed(atomic_t * v,int new)11721d78814dSMark Rutland atomic_xchg_relaxed(atomic_t *v, int new)
1173e3d18ceeSMark Rutland {
1174e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
11751d78814dSMark Rutland return raw_atomic_xchg_relaxed(v, new);
1176e3d18ceeSMark Rutland }
1177e3d18ceeSMark Rutland
1178ad811070SMark Rutland /**
1179ad811070SMark Rutland * atomic_cmpxchg() - atomic compare and exchange with full ordering
1180ad811070SMark Rutland * @v: pointer to atomic_t
1181ad811070SMark Rutland * @old: int value to compare with
1182ad811070SMark Rutland * @new: int value to assign
1183ad811070SMark Rutland *
1184ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
1185ad811070SMark Rutland *
1186ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_cmpxchg() there.
1187ad811070SMark Rutland *
1188ad811070SMark Rutland * Return: The original value of @v.
1189ad811070SMark Rutland */
1190e3d18ceeSMark Rutland static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)1191e3d18ceeSMark Rutland atomic_cmpxchg(atomic_t *v, int old, int new)
1192e3d18ceeSMark Rutland {
1193e87c4f66SMarco Elver kcsan_mb();
1194e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1195c9268ac6SMark Rutland return raw_atomic_cmpxchg(v, old, new);
1196e3d18ceeSMark Rutland }
1197e3d18ceeSMark Rutland
1198ad811070SMark Rutland /**
1199ad811070SMark Rutland * atomic_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1200ad811070SMark Rutland * @v: pointer to atomic_t
1201ad811070SMark Rutland * @old: int value to compare with
1202ad811070SMark Rutland * @new: int value to assign
1203ad811070SMark Rutland *
1204ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
1205ad811070SMark Rutland *
1206ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_acquire() there.
1207ad811070SMark Rutland *
1208ad811070SMark Rutland * Return: The original value of @v.
1209ad811070SMark Rutland */
1210e3d18ceeSMark Rutland static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)1211e3d18ceeSMark Rutland atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1212e3d18ceeSMark Rutland {
1213e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1214c9268ac6SMark Rutland return raw_atomic_cmpxchg_acquire(v, old, new);
1215e3d18ceeSMark Rutland }
1216e3d18ceeSMark Rutland
1217ad811070SMark Rutland /**
1218ad811070SMark Rutland * atomic_cmpxchg_release() - atomic compare and exchange with release ordering
1219ad811070SMark Rutland * @v: pointer to atomic_t
1220ad811070SMark Rutland * @old: int value to compare with
1221ad811070SMark Rutland * @new: int value to assign
1222ad811070SMark Rutland *
1223ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
1224ad811070SMark Rutland *
1225ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_release() there.
1226ad811070SMark Rutland *
1227ad811070SMark Rutland * Return: The original value of @v.
1228ad811070SMark Rutland */
1229e3d18ceeSMark Rutland static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)1230e3d18ceeSMark Rutland atomic_cmpxchg_release(atomic_t *v, int old, int new)
1231e3d18ceeSMark Rutland {
1232e87c4f66SMarco Elver kcsan_release();
1233e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1234c9268ac6SMark Rutland return raw_atomic_cmpxchg_release(v, old, new);
1235e3d18ceeSMark Rutland }
1236e3d18ceeSMark Rutland
1237ad811070SMark Rutland /**
1238ad811070SMark Rutland * atomic_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1239ad811070SMark Rutland * @v: pointer to atomic_t
1240ad811070SMark Rutland * @old: int value to compare with
1241ad811070SMark Rutland * @new: int value to assign
1242ad811070SMark Rutland *
1243ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1244ad811070SMark Rutland *
1245ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_relaxed() there.
1246ad811070SMark Rutland *
1247ad811070SMark Rutland * Return: The original value of @v.
1248ad811070SMark Rutland */
1249e3d18ceeSMark Rutland static __always_inline int
atomic_cmpxchg_relaxed(atomic_t * v,int old,int new)1250e3d18ceeSMark Rutland atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
1251e3d18ceeSMark Rutland {
1252e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1253c9268ac6SMark Rutland return raw_atomic_cmpxchg_relaxed(v, old, new);
1254e3d18ceeSMark Rutland }
1255e3d18ceeSMark Rutland
1256ad811070SMark Rutland /**
1257ad811070SMark Rutland * atomic_try_cmpxchg() - atomic compare and exchange with full ordering
1258ad811070SMark Rutland * @v: pointer to atomic_t
1259ad811070SMark Rutland * @old: pointer to int value to compare with
1260ad811070SMark Rutland * @new: int value to assign
1261ad811070SMark Rutland *
1262ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
1263ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
1264ad811070SMark Rutland *
1265ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg() there.
1266ad811070SMark Rutland *
1267ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1268ad811070SMark Rutland */
1269e3d18ceeSMark Rutland static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1270e3d18ceeSMark Rutland atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1271e3d18ceeSMark Rutland {
1272e87c4f66SMarco Elver kcsan_mb();
1273e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1274e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
1275c9268ac6SMark Rutland return raw_atomic_try_cmpxchg(v, old, new);
1276e3d18ceeSMark Rutland }
1277e3d18ceeSMark Rutland
1278ad811070SMark Rutland /**
1279ad811070SMark Rutland * atomic_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1280ad811070SMark Rutland * @v: pointer to atomic_t
1281ad811070SMark Rutland * @old: pointer to int value to compare with
1282ad811070SMark Rutland * @new: int value to assign
1283ad811070SMark Rutland *
1284ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
1285ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
1286ad811070SMark Rutland *
1287ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_acquire() there.
1288ad811070SMark Rutland *
1289ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1290ad811070SMark Rutland */
1291e3d18ceeSMark Rutland static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1292e3d18ceeSMark Rutland atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1293e3d18ceeSMark Rutland {
1294e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1295e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
1296c9268ac6SMark Rutland return raw_atomic_try_cmpxchg_acquire(v, old, new);
1297e3d18ceeSMark Rutland }
1298e3d18ceeSMark Rutland
1299ad811070SMark Rutland /**
1300ad811070SMark Rutland * atomic_try_cmpxchg_release() - atomic compare and exchange with release ordering
1301ad811070SMark Rutland * @v: pointer to atomic_t
1302ad811070SMark Rutland * @old: pointer to int value to compare with
1303ad811070SMark Rutland * @new: int value to assign
1304ad811070SMark Rutland *
1305ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
1306ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
1307ad811070SMark Rutland *
1308ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_release() there.
1309ad811070SMark Rutland *
1310ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1311ad811070SMark Rutland */
1312e3d18ceeSMark Rutland static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1313e3d18ceeSMark Rutland atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1314e3d18ceeSMark Rutland {
1315e87c4f66SMarco Elver kcsan_release();
1316e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1317e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
1318c9268ac6SMark Rutland return raw_atomic_try_cmpxchg_release(v, old, new);
1319e3d18ceeSMark Rutland }
1320e3d18ceeSMark Rutland
1321ad811070SMark Rutland /**
1322ad811070SMark Rutland * atomic_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1323ad811070SMark Rutland * @v: pointer to atomic_t
1324ad811070SMark Rutland * @old: pointer to int value to compare with
1325ad811070SMark Rutland * @new: int value to assign
1326ad811070SMark Rutland *
1327ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1328ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
1329ad811070SMark Rutland *
1330ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_relaxed() there.
1331ad811070SMark Rutland *
1332ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1333ad811070SMark Rutland */
1334e3d18ceeSMark Rutland static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1335e3d18ceeSMark Rutland atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1336e3d18ceeSMark Rutland {
1337e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1338e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
1339c9268ac6SMark Rutland return raw_atomic_try_cmpxchg_relaxed(v, old, new);
1340e3d18ceeSMark Rutland }
1341e3d18ceeSMark Rutland
1342ad811070SMark Rutland /**
1343ad811070SMark Rutland * atomic_sub_and_test() - atomic subtract and test if zero with full ordering
1344ad811070SMark Rutland * @i: int value to add
1345ad811070SMark Rutland * @v: pointer to atomic_t
1346ad811070SMark Rutland *
1347ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
1348ad811070SMark Rutland *
1349ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_sub_and_test() there.
1350ad811070SMark Rutland *
1351ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1352ad811070SMark Rutland */
1353e3d18ceeSMark Rutland static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)1354e3d18ceeSMark Rutland atomic_sub_and_test(int i, atomic_t *v)
1355e3d18ceeSMark Rutland {
1356e87c4f66SMarco Elver kcsan_mb();
1357e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1358c9268ac6SMark Rutland return raw_atomic_sub_and_test(i, v);
1359e3d18ceeSMark Rutland }
1360e3d18ceeSMark Rutland
1361ad811070SMark Rutland /**
1362ad811070SMark Rutland * atomic_dec_and_test() - atomic decrement and test if zero with full ordering
1363ad811070SMark Rutland * @v: pointer to atomic_t
1364ad811070SMark Rutland *
1365ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
1366ad811070SMark Rutland *
1367ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_and_test() there.
1368ad811070SMark Rutland *
1369ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1370ad811070SMark Rutland */
1371e3d18ceeSMark Rutland static __always_inline bool
atomic_dec_and_test(atomic_t * v)1372e3d18ceeSMark Rutland atomic_dec_and_test(atomic_t *v)
1373e3d18ceeSMark Rutland {
1374e87c4f66SMarco Elver kcsan_mb();
1375e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1376c9268ac6SMark Rutland return raw_atomic_dec_and_test(v);
1377e3d18ceeSMark Rutland }
1378e3d18ceeSMark Rutland
1379ad811070SMark Rutland /**
1380ad811070SMark Rutland * atomic_inc_and_test() - atomic increment and test if zero with full ordering
1381ad811070SMark Rutland * @v: pointer to atomic_t
1382ad811070SMark Rutland *
1383ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
1384ad811070SMark Rutland *
1385ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_and_test() there.
1386ad811070SMark Rutland *
1387ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1388ad811070SMark Rutland */
1389e3d18ceeSMark Rutland static __always_inline bool
atomic_inc_and_test(atomic_t * v)1390e3d18ceeSMark Rutland atomic_inc_and_test(atomic_t *v)
1391e3d18ceeSMark Rutland {
1392e87c4f66SMarco Elver kcsan_mb();
1393e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1394c9268ac6SMark Rutland return raw_atomic_inc_and_test(v);
1395e3d18ceeSMark Rutland }
1396e3d18ceeSMark Rutland
1397ad811070SMark Rutland /**
1398ad811070SMark Rutland * atomic_add_negative() - atomic add and test if negative with full ordering
1399ad811070SMark Rutland * @i: int value to add
1400ad811070SMark Rutland * @v: pointer to atomic_t
1401ad811070SMark Rutland *
1402ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
1403ad811070SMark Rutland *
1404ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_negative() there.
1405ad811070SMark Rutland *
1406ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1407ad811070SMark Rutland */
1408e3d18ceeSMark Rutland static __always_inline bool
atomic_add_negative(int i,atomic_t * v)1409e3d18ceeSMark Rutland atomic_add_negative(int i, atomic_t *v)
1410e3d18ceeSMark Rutland {
1411e87c4f66SMarco Elver kcsan_mb();
1412e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1413c9268ac6SMark Rutland return raw_atomic_add_negative(i, v);
1414e3d18ceeSMark Rutland }
1415e3d18ceeSMark Rutland
1416ad811070SMark Rutland /**
1417ad811070SMark Rutland * atomic_add_negative_acquire() - atomic add and test if negative with acquire ordering
1418ad811070SMark Rutland * @i: int value to add
1419ad811070SMark Rutland * @v: pointer to atomic_t
1420ad811070SMark Rutland *
1421ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
1422ad811070SMark Rutland *
1423ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_negative_acquire() there.
1424ad811070SMark Rutland *
1425ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1426ad811070SMark Rutland */
1427e5ab9effSThomas Gleixner static __always_inline bool
atomic_add_negative_acquire(int i,atomic_t * v)1428e5ab9effSThomas Gleixner atomic_add_negative_acquire(int i, atomic_t *v)
1429e5ab9effSThomas Gleixner {
1430e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
1431c9268ac6SMark Rutland return raw_atomic_add_negative_acquire(i, v);
1432e5ab9effSThomas Gleixner }
1433e5ab9effSThomas Gleixner
1434ad811070SMark Rutland /**
1435ad811070SMark Rutland * atomic_add_negative_release() - atomic add and test if negative with release ordering
1436ad811070SMark Rutland * @i: int value to add
1437ad811070SMark Rutland * @v: pointer to atomic_t
1438ad811070SMark Rutland *
1439ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
1440ad811070SMark Rutland *
1441ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_negative_release() there.
1442ad811070SMark Rutland *
1443ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1444ad811070SMark Rutland */
1445e5ab9effSThomas Gleixner static __always_inline bool
atomic_add_negative_release(int i,atomic_t * v)1446e5ab9effSThomas Gleixner atomic_add_negative_release(int i, atomic_t *v)
1447e5ab9effSThomas Gleixner {
1448e5ab9effSThomas Gleixner kcsan_release();
1449e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
1450c9268ac6SMark Rutland return raw_atomic_add_negative_release(i, v);
1451e5ab9effSThomas Gleixner }
1452e5ab9effSThomas Gleixner
1453ad811070SMark Rutland /**
1454ad811070SMark Rutland * atomic_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1455ad811070SMark Rutland * @i: int value to add
1456ad811070SMark Rutland * @v: pointer to atomic_t
1457ad811070SMark Rutland *
1458ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
1459ad811070SMark Rutland *
1460ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_negative_relaxed() there.
1461ad811070SMark Rutland *
1462ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1463ad811070SMark Rutland */
1464e5ab9effSThomas Gleixner static __always_inline bool
atomic_add_negative_relaxed(int i,atomic_t * v)1465e5ab9effSThomas Gleixner atomic_add_negative_relaxed(int i, atomic_t *v)
1466e5ab9effSThomas Gleixner {
1467e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
1468c9268ac6SMark Rutland return raw_atomic_add_negative_relaxed(i, v);
1469e5ab9effSThomas Gleixner }
1470e5ab9effSThomas Gleixner
1471ad811070SMark Rutland /**
1472ad811070SMark Rutland * atomic_fetch_add_unless() - atomic add unless value with full ordering
1473ad811070SMark Rutland * @v: pointer to atomic_t
1474ad811070SMark Rutland * @a: int value to add
1475ad811070SMark Rutland * @u: int value to compare with
1476ad811070SMark Rutland *
1477ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1478ad811070SMark Rutland *
1479ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_fetch_add_unless() there.
1480ad811070SMark Rutland *
1481ad811070SMark Rutland * Return: The original value of @v.
1482ad811070SMark Rutland */
1483e3d18ceeSMark Rutland static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)1484e3d18ceeSMark Rutland atomic_fetch_add_unless(atomic_t *v, int a, int u)
1485e3d18ceeSMark Rutland {
1486e87c4f66SMarco Elver kcsan_mb();
1487e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1488c9268ac6SMark Rutland return raw_atomic_fetch_add_unless(v, a, u);
1489e3d18ceeSMark Rutland }
1490e3d18ceeSMark Rutland
1491ad811070SMark Rutland /**
1492ad811070SMark Rutland * atomic_add_unless() - atomic add unless value with full ordering
1493ad811070SMark Rutland * @v: pointer to atomic_t
1494ad811070SMark Rutland * @a: int value to add
1495ad811070SMark Rutland * @u: int value to compare with
1496ad811070SMark Rutland *
1497ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1498ad811070SMark Rutland *
1499ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_add_unless() there.
1500ad811070SMark Rutland *
1501ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1502ad811070SMark Rutland */
1503e3d18ceeSMark Rutland static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)1504e3d18ceeSMark Rutland atomic_add_unless(atomic_t *v, int a, int u)
1505e3d18ceeSMark Rutland {
1506e87c4f66SMarco Elver kcsan_mb();
1507e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1508c9268ac6SMark Rutland return raw_atomic_add_unless(v, a, u);
1509e3d18ceeSMark Rutland }
1510e3d18ceeSMark Rutland
1511ad811070SMark Rutland /**
1512ad811070SMark Rutland * atomic_inc_not_zero() - atomic increment unless zero with full ordering
1513ad811070SMark Rutland * @v: pointer to atomic_t
1514ad811070SMark Rutland *
1515ad811070SMark Rutland * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
1516ad811070SMark Rutland *
1517ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_not_zero() there.
1518ad811070SMark Rutland *
1519ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1520ad811070SMark Rutland */
1521e3d18ceeSMark Rutland static __always_inline bool
atomic_inc_not_zero(atomic_t * v)1522e3d18ceeSMark Rutland atomic_inc_not_zero(atomic_t *v)
1523e3d18ceeSMark Rutland {
1524e87c4f66SMarco Elver kcsan_mb();
1525e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1526c9268ac6SMark Rutland return raw_atomic_inc_not_zero(v);
1527e3d18ceeSMark Rutland }
1528e3d18ceeSMark Rutland
1529ad811070SMark Rutland /**
1530ad811070SMark Rutland * atomic_inc_unless_negative() - atomic increment unless negative with full ordering
1531ad811070SMark Rutland * @v: pointer to atomic_t
1532ad811070SMark Rutland *
1533ad811070SMark Rutland * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
1534ad811070SMark Rutland *
1535ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_inc_unless_negative() there.
1536ad811070SMark Rutland *
1537ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1538ad811070SMark Rutland */
1539e3d18ceeSMark Rutland static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)1540e3d18ceeSMark Rutland atomic_inc_unless_negative(atomic_t *v)
1541e3d18ceeSMark Rutland {
1542e87c4f66SMarco Elver kcsan_mb();
1543e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1544c9268ac6SMark Rutland return raw_atomic_inc_unless_negative(v);
1545e3d18ceeSMark Rutland }
1546e3d18ceeSMark Rutland
1547ad811070SMark Rutland /**
1548ad811070SMark Rutland * atomic_dec_unless_positive() - atomic decrement unless positive with full ordering
1549ad811070SMark Rutland * @v: pointer to atomic_t
1550ad811070SMark Rutland *
1551ad811070SMark Rutland * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
1552ad811070SMark Rutland *
1553ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_unless_positive() there.
1554ad811070SMark Rutland *
1555ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1556ad811070SMark Rutland */
1557e3d18ceeSMark Rutland static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)1558e3d18ceeSMark Rutland atomic_dec_unless_positive(atomic_t *v)
1559e3d18ceeSMark Rutland {
1560e87c4f66SMarco Elver kcsan_mb();
1561e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1562c9268ac6SMark Rutland return raw_atomic_dec_unless_positive(v);
1563e3d18ceeSMark Rutland }
1564e3d18ceeSMark Rutland
1565ad811070SMark Rutland /**
1566ad811070SMark Rutland * atomic_dec_if_positive() - atomic decrement if positive with full ordering
1567ad811070SMark Rutland * @v: pointer to atomic_t
1568ad811070SMark Rutland *
1569ad811070SMark Rutland * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
1570ad811070SMark Rutland *
1571ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_dec_if_positive() there.
1572ad811070SMark Rutland *
1573*b33eb50aSMark Rutland * Return: The old value of (@v - 1), regardless of whether @v was updated.
1574ad811070SMark Rutland */
1575e3d18ceeSMark Rutland static __always_inline int
atomic_dec_if_positive(atomic_t * v)1576e3d18ceeSMark Rutland atomic_dec_if_positive(atomic_t *v)
1577e3d18ceeSMark Rutland {
1578e87c4f66SMarco Elver kcsan_mb();
1579e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1580c9268ac6SMark Rutland return raw_atomic_dec_if_positive(v);
1581e3d18ceeSMark Rutland }
1582e3d18ceeSMark Rutland
1583ad811070SMark Rutland /**
1584ad811070SMark Rutland * atomic64_read() - atomic load with relaxed ordering
1585ad811070SMark Rutland * @v: pointer to atomic64_t
1586ad811070SMark Rutland *
1587ad811070SMark Rutland * Atomically loads the value of @v with relaxed ordering.
1588ad811070SMark Rutland *
1589ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_read() there.
1590ad811070SMark Rutland *
1591ad811070SMark Rutland * Return: The value loaded from @v.
1592ad811070SMark Rutland */
1593e3d18ceeSMark Rutland static __always_inline s64
atomic64_read(const atomic64_t * v)1594e3d18ceeSMark Rutland atomic64_read(const atomic64_t *v)
1595e3d18ceeSMark Rutland {
1596e3d18ceeSMark Rutland instrument_atomic_read(v, sizeof(*v));
1597c9268ac6SMark Rutland return raw_atomic64_read(v);
1598e3d18ceeSMark Rutland }
1599e3d18ceeSMark Rutland
1600ad811070SMark Rutland /**
1601ad811070SMark Rutland * atomic64_read_acquire() - atomic load with acquire ordering
1602ad811070SMark Rutland * @v: pointer to atomic64_t
1603ad811070SMark Rutland *
1604ad811070SMark Rutland * Atomically loads the value of @v with acquire ordering.
1605ad811070SMark Rutland *
1606ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_read_acquire() there.
1607ad811070SMark Rutland *
1608ad811070SMark Rutland * Return: The value loaded from @v.
1609ad811070SMark Rutland */
1610e3d18ceeSMark Rutland static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)1611e3d18ceeSMark Rutland atomic64_read_acquire(const atomic64_t *v)
1612e3d18ceeSMark Rutland {
1613e3d18ceeSMark Rutland instrument_atomic_read(v, sizeof(*v));
1614c9268ac6SMark Rutland return raw_atomic64_read_acquire(v);
1615e3d18ceeSMark Rutland }
1616e3d18ceeSMark Rutland
1617ad811070SMark Rutland /**
1618ad811070SMark Rutland * atomic64_set() - atomic set with relaxed ordering
1619ad811070SMark Rutland * @v: pointer to atomic64_t
1620ad811070SMark Rutland * @i: s64 value to assign
1621ad811070SMark Rutland *
1622ad811070SMark Rutland * Atomically sets @v to @i with relaxed ordering.
1623ad811070SMark Rutland *
1624ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_set() there.
1625ad811070SMark Rutland *
1626ad811070SMark Rutland * Return: Nothing.
1627ad811070SMark Rutland */
1628e3d18ceeSMark Rutland static __always_inline void
atomic64_set(atomic64_t * v,s64 i)1629e3d18ceeSMark Rutland atomic64_set(atomic64_t *v, s64 i)
1630e3d18ceeSMark Rutland {
1631e3d18ceeSMark Rutland instrument_atomic_write(v, sizeof(*v));
1632c9268ac6SMark Rutland raw_atomic64_set(v, i);
1633e3d18ceeSMark Rutland }
1634e3d18ceeSMark Rutland
1635ad811070SMark Rutland /**
1636ad811070SMark Rutland * atomic64_set_release() - atomic set with release ordering
1637ad811070SMark Rutland * @v: pointer to atomic64_t
1638ad811070SMark Rutland * @i: s64 value to assign
1639ad811070SMark Rutland *
1640ad811070SMark Rutland * Atomically sets @v to @i with release ordering.
1641ad811070SMark Rutland *
1642ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_set_release() there.
1643ad811070SMark Rutland *
1644ad811070SMark Rutland * Return: Nothing.
1645ad811070SMark Rutland */
1646e3d18ceeSMark Rutland static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)1647e3d18ceeSMark Rutland atomic64_set_release(atomic64_t *v, s64 i)
1648e3d18ceeSMark Rutland {
1649e87c4f66SMarco Elver kcsan_release();
1650e3d18ceeSMark Rutland instrument_atomic_write(v, sizeof(*v));
1651c9268ac6SMark Rutland raw_atomic64_set_release(v, i);
1652e3d18ceeSMark Rutland }
1653e3d18ceeSMark Rutland
1654ad811070SMark Rutland /**
1655ad811070SMark Rutland * atomic64_add() - atomic add with relaxed ordering
1656ad811070SMark Rutland * @i: s64 value to add
1657ad811070SMark Rutland * @v: pointer to atomic64_t
1658ad811070SMark Rutland *
1659ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
1660ad811070SMark Rutland *
1661ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add() there.
1662ad811070SMark Rutland *
1663ad811070SMark Rutland * Return: Nothing.
1664ad811070SMark Rutland */
1665e3d18ceeSMark Rutland static __always_inline void
atomic64_add(s64 i,atomic64_t * v)1666e3d18ceeSMark Rutland atomic64_add(s64 i, atomic64_t *v)
1667e3d18ceeSMark Rutland {
1668e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1669c9268ac6SMark Rutland raw_atomic64_add(i, v);
1670e3d18ceeSMark Rutland }
1671e3d18ceeSMark Rutland
1672ad811070SMark Rutland /**
1673ad811070SMark Rutland * atomic64_add_return() - atomic add with full ordering
1674ad811070SMark Rutland * @i: s64 value to add
1675ad811070SMark Rutland * @v: pointer to atomic64_t
1676ad811070SMark Rutland *
1677ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
1678ad811070SMark Rutland *
1679ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_return() there.
1680ad811070SMark Rutland *
1681ad811070SMark Rutland * Return: The updated value of @v.
1682ad811070SMark Rutland */
1683e3d18ceeSMark Rutland static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)1684e3d18ceeSMark Rutland atomic64_add_return(s64 i, atomic64_t *v)
1685e3d18ceeSMark Rutland {
1686e87c4f66SMarco Elver kcsan_mb();
1687e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1688c9268ac6SMark Rutland return raw_atomic64_add_return(i, v);
1689e3d18ceeSMark Rutland }
1690e3d18ceeSMark Rutland
1691ad811070SMark Rutland /**
1692ad811070SMark Rutland * atomic64_add_return_acquire() - atomic add with acquire ordering
1693ad811070SMark Rutland * @i: s64 value to add
1694ad811070SMark Rutland * @v: pointer to atomic64_t
1695ad811070SMark Rutland *
1696ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
1697ad811070SMark Rutland *
1698ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_return_acquire() there.
1699ad811070SMark Rutland *
1700ad811070SMark Rutland * Return: The updated value of @v.
1701ad811070SMark Rutland */
1702e3d18ceeSMark Rutland static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)1703e3d18ceeSMark Rutland atomic64_add_return_acquire(s64 i, atomic64_t *v)
1704e3d18ceeSMark Rutland {
1705e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1706c9268ac6SMark Rutland return raw_atomic64_add_return_acquire(i, v);
1707e3d18ceeSMark Rutland }
1708e3d18ceeSMark Rutland
1709ad811070SMark Rutland /**
1710ad811070SMark Rutland * atomic64_add_return_release() - atomic add with release ordering
1711ad811070SMark Rutland * @i: s64 value to add
1712ad811070SMark Rutland * @v: pointer to atomic64_t
1713ad811070SMark Rutland *
1714ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
1715ad811070SMark Rutland *
1716ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_return_release() there.
1717ad811070SMark Rutland *
1718ad811070SMark Rutland * Return: The updated value of @v.
1719ad811070SMark Rutland */
1720e3d18ceeSMark Rutland static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)1721e3d18ceeSMark Rutland atomic64_add_return_release(s64 i, atomic64_t *v)
1722e3d18ceeSMark Rutland {
1723e87c4f66SMarco Elver kcsan_release();
1724e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1725c9268ac6SMark Rutland return raw_atomic64_add_return_release(i, v);
1726e3d18ceeSMark Rutland }
1727e3d18ceeSMark Rutland
1728ad811070SMark Rutland /**
1729ad811070SMark Rutland * atomic64_add_return_relaxed() - atomic add with relaxed ordering
1730ad811070SMark Rutland * @i: s64 value to add
1731ad811070SMark Rutland * @v: pointer to atomic64_t
1732ad811070SMark Rutland *
1733ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
1734ad811070SMark Rutland *
1735ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_return_relaxed() there.
1736ad811070SMark Rutland *
1737ad811070SMark Rutland * Return: The updated value of @v.
1738ad811070SMark Rutland */
1739e3d18ceeSMark Rutland static __always_inline s64
atomic64_add_return_relaxed(s64 i,atomic64_t * v)1740e3d18ceeSMark Rutland atomic64_add_return_relaxed(s64 i, atomic64_t *v)
1741e3d18ceeSMark Rutland {
1742e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1743c9268ac6SMark Rutland return raw_atomic64_add_return_relaxed(i, v);
1744e3d18ceeSMark Rutland }
1745e3d18ceeSMark Rutland
1746ad811070SMark Rutland /**
1747ad811070SMark Rutland * atomic64_fetch_add() - atomic add with full ordering
1748ad811070SMark Rutland * @i: s64 value to add
1749ad811070SMark Rutland * @v: pointer to atomic64_t
1750ad811070SMark Rutland *
1751ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
1752ad811070SMark Rutland *
1753ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_add() there.
1754ad811070SMark Rutland *
1755ad811070SMark Rutland * Return: The original value of @v.
1756ad811070SMark Rutland */
1757e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)1758e3d18ceeSMark Rutland atomic64_fetch_add(s64 i, atomic64_t *v)
1759e3d18ceeSMark Rutland {
1760e87c4f66SMarco Elver kcsan_mb();
1761e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1762c9268ac6SMark Rutland return raw_atomic64_fetch_add(i, v);
1763e3d18ceeSMark Rutland }
1764e3d18ceeSMark Rutland
1765ad811070SMark Rutland /**
1766ad811070SMark Rutland * atomic64_fetch_add_acquire() - atomic add with acquire ordering
1767ad811070SMark Rutland * @i: s64 value to add
1768ad811070SMark Rutland * @v: pointer to atomic64_t
1769ad811070SMark Rutland *
1770ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
1771ad811070SMark Rutland *
1772ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_acquire() there.
1773ad811070SMark Rutland *
1774ad811070SMark Rutland * Return: The original value of @v.
1775ad811070SMark Rutland */
1776e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1777e3d18ceeSMark Rutland atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1778e3d18ceeSMark Rutland {
1779e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1780c9268ac6SMark Rutland return raw_atomic64_fetch_add_acquire(i, v);
1781e3d18ceeSMark Rutland }
1782e3d18ceeSMark Rutland
1783ad811070SMark Rutland /**
1784ad811070SMark Rutland * atomic64_fetch_add_release() - atomic add with release ordering
1785ad811070SMark Rutland * @i: s64 value to add
1786ad811070SMark Rutland * @v: pointer to atomic64_t
1787ad811070SMark Rutland *
1788ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
1789ad811070SMark Rutland *
1790ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_release() there.
1791ad811070SMark Rutland *
1792ad811070SMark Rutland * Return: The original value of @v.
1793ad811070SMark Rutland */
1794e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)1795e3d18ceeSMark Rutland atomic64_fetch_add_release(s64 i, atomic64_t *v)
1796e3d18ceeSMark Rutland {
1797e87c4f66SMarco Elver kcsan_release();
1798e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1799c9268ac6SMark Rutland return raw_atomic64_fetch_add_release(i, v);
1800e3d18ceeSMark Rutland }
1801e3d18ceeSMark Rutland
1802ad811070SMark Rutland /**
1803ad811070SMark Rutland * atomic64_fetch_add_relaxed() - atomic add with relaxed ordering
1804ad811070SMark Rutland * @i: s64 value to add
1805ad811070SMark Rutland * @v: pointer to atomic64_t
1806ad811070SMark Rutland *
1807ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
1808ad811070SMark Rutland *
1809ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_relaxed() there.
1810ad811070SMark Rutland *
1811ad811070SMark Rutland * Return: The original value of @v.
1812ad811070SMark Rutland */
1813e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_add_relaxed(s64 i,atomic64_t * v)1814e3d18ceeSMark Rutland atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
1815e3d18ceeSMark Rutland {
1816e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1817c9268ac6SMark Rutland return raw_atomic64_fetch_add_relaxed(i, v);
1818e3d18ceeSMark Rutland }
1819e3d18ceeSMark Rutland
1820ad811070SMark Rutland /**
1821ad811070SMark Rutland * atomic64_sub() - atomic subtract with relaxed ordering
1822ad811070SMark Rutland * @i: s64 value to subtract
1823ad811070SMark Rutland * @v: pointer to atomic64_t
1824ad811070SMark Rutland *
1825ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
1826ad811070SMark Rutland *
1827ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub() there.
1828ad811070SMark Rutland *
1829ad811070SMark Rutland * Return: Nothing.
1830ad811070SMark Rutland */
1831e3d18ceeSMark Rutland static __always_inline void
atomic64_sub(s64 i,atomic64_t * v)1832e3d18ceeSMark Rutland atomic64_sub(s64 i, atomic64_t *v)
1833e3d18ceeSMark Rutland {
1834e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1835c9268ac6SMark Rutland raw_atomic64_sub(i, v);
1836e3d18ceeSMark Rutland }
1837e3d18ceeSMark Rutland
1838ad811070SMark Rutland /**
1839ad811070SMark Rutland * atomic64_sub_return() - atomic subtract with full ordering
1840ad811070SMark Rutland * @i: s64 value to subtract
1841ad811070SMark Rutland * @v: pointer to atomic64_t
1842ad811070SMark Rutland *
1843ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
1844ad811070SMark Rutland *
1845ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub_return() there.
1846ad811070SMark Rutland *
1847ad811070SMark Rutland * Return: The updated value of @v.
1848ad811070SMark Rutland */
1849e3d18ceeSMark Rutland static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)1850e3d18ceeSMark Rutland atomic64_sub_return(s64 i, atomic64_t *v)
1851e3d18ceeSMark Rutland {
1852e87c4f66SMarco Elver kcsan_mb();
1853e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1854c9268ac6SMark Rutland return raw_atomic64_sub_return(i, v);
1855e3d18ceeSMark Rutland }
1856e3d18ceeSMark Rutland
1857ad811070SMark Rutland /**
1858ad811070SMark Rutland * atomic64_sub_return_acquire() - atomic subtract with acquire ordering
1859ad811070SMark Rutland * @i: s64 value to subtract
1860ad811070SMark Rutland * @v: pointer to atomic64_t
1861ad811070SMark Rutland *
1862ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
1863ad811070SMark Rutland *
1864ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub_return_acquire() there.
1865ad811070SMark Rutland *
1866ad811070SMark Rutland * Return: The updated value of @v.
1867ad811070SMark Rutland */
1868e3d18ceeSMark Rutland static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)1869e3d18ceeSMark Rutland atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1870e3d18ceeSMark Rutland {
1871e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1872c9268ac6SMark Rutland return raw_atomic64_sub_return_acquire(i, v);
1873e3d18ceeSMark Rutland }
1874e3d18ceeSMark Rutland
1875ad811070SMark Rutland /**
1876ad811070SMark Rutland * atomic64_sub_return_release() - atomic subtract with release ordering
1877ad811070SMark Rutland * @i: s64 value to subtract
1878ad811070SMark Rutland * @v: pointer to atomic64_t
1879ad811070SMark Rutland *
1880ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
1881ad811070SMark Rutland *
1882ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub_return_release() there.
1883ad811070SMark Rutland *
1884ad811070SMark Rutland * Return: The updated value of @v.
1885ad811070SMark Rutland */
1886e3d18ceeSMark Rutland static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)1887e3d18ceeSMark Rutland atomic64_sub_return_release(s64 i, atomic64_t *v)
1888e3d18ceeSMark Rutland {
1889e87c4f66SMarco Elver kcsan_release();
1890e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1891c9268ac6SMark Rutland return raw_atomic64_sub_return_release(i, v);
1892e3d18ceeSMark Rutland }
1893e3d18ceeSMark Rutland
1894ad811070SMark Rutland /**
1895ad811070SMark Rutland * atomic64_sub_return_relaxed() - atomic subtract with relaxed ordering
1896ad811070SMark Rutland * @i: s64 value to subtract
1897ad811070SMark Rutland * @v: pointer to atomic64_t
1898ad811070SMark Rutland *
1899ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
1900ad811070SMark Rutland *
1901ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub_return_relaxed() there.
1902ad811070SMark Rutland *
1903ad811070SMark Rutland * Return: The updated value of @v.
1904ad811070SMark Rutland */
1905e3d18ceeSMark Rutland static __always_inline s64
atomic64_sub_return_relaxed(s64 i,atomic64_t * v)1906e3d18ceeSMark Rutland atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1907e3d18ceeSMark Rutland {
1908e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1909c9268ac6SMark Rutland return raw_atomic64_sub_return_relaxed(i, v);
1910e3d18ceeSMark Rutland }
1911e3d18ceeSMark Rutland
1912ad811070SMark Rutland /**
1913ad811070SMark Rutland * atomic64_fetch_sub() - atomic subtract with full ordering
1914ad811070SMark Rutland * @i: s64 value to subtract
1915ad811070SMark Rutland * @v: pointer to atomic64_t
1916ad811070SMark Rutland *
1917ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
1918ad811070SMark Rutland *
1919ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub() there.
1920ad811070SMark Rutland *
1921ad811070SMark Rutland * Return: The original value of @v.
1922ad811070SMark Rutland */
1923e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)1924e3d18ceeSMark Rutland atomic64_fetch_sub(s64 i, atomic64_t *v)
1925e3d18ceeSMark Rutland {
1926e87c4f66SMarco Elver kcsan_mb();
1927e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1928c9268ac6SMark Rutland return raw_atomic64_fetch_sub(i, v);
1929e3d18ceeSMark Rutland }
1930e3d18ceeSMark Rutland
1931ad811070SMark Rutland /**
1932ad811070SMark Rutland * atomic64_fetch_sub_acquire() - atomic subtract with acquire ordering
1933ad811070SMark Rutland * @i: s64 value to subtract
1934ad811070SMark Rutland * @v: pointer to atomic64_t
1935ad811070SMark Rutland *
1936ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
1937ad811070SMark Rutland *
1938ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_acquire() there.
1939ad811070SMark Rutland *
1940ad811070SMark Rutland * Return: The original value of @v.
1941ad811070SMark Rutland */
1942e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1943e3d18ceeSMark Rutland atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1944e3d18ceeSMark Rutland {
1945e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1946c9268ac6SMark Rutland return raw_atomic64_fetch_sub_acquire(i, v);
1947e3d18ceeSMark Rutland }
1948e3d18ceeSMark Rutland
1949ad811070SMark Rutland /**
1950ad811070SMark Rutland * atomic64_fetch_sub_release() - atomic subtract with release ordering
1951ad811070SMark Rutland * @i: s64 value to subtract
1952ad811070SMark Rutland * @v: pointer to atomic64_t
1953ad811070SMark Rutland *
1954ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
1955ad811070SMark Rutland *
1956ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_release() there.
1957ad811070SMark Rutland *
1958ad811070SMark Rutland * Return: The original value of @v.
1959ad811070SMark Rutland */
1960e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)1961e3d18ceeSMark Rutland atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1962e3d18ceeSMark Rutland {
1963e87c4f66SMarco Elver kcsan_release();
1964e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1965c9268ac6SMark Rutland return raw_atomic64_fetch_sub_release(i, v);
1966e3d18ceeSMark Rutland }
1967e3d18ceeSMark Rutland
1968ad811070SMark Rutland /**
1969ad811070SMark Rutland * atomic64_fetch_sub_relaxed() - atomic subtract with relaxed ordering
1970ad811070SMark Rutland * @i: s64 value to subtract
1971ad811070SMark Rutland * @v: pointer to atomic64_t
1972ad811070SMark Rutland *
1973ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
1974ad811070SMark Rutland *
1975ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_relaxed() there.
1976ad811070SMark Rutland *
1977ad811070SMark Rutland * Return: The original value of @v.
1978ad811070SMark Rutland */
1979e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_sub_relaxed(s64 i,atomic64_t * v)1980e3d18ceeSMark Rutland atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1981e3d18ceeSMark Rutland {
1982e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
1983c9268ac6SMark Rutland return raw_atomic64_fetch_sub_relaxed(i, v);
1984e3d18ceeSMark Rutland }
1985e3d18ceeSMark Rutland
1986ad811070SMark Rutland /**
1987ad811070SMark Rutland * atomic64_inc() - atomic increment with relaxed ordering
1988ad811070SMark Rutland * @v: pointer to atomic64_t
1989ad811070SMark Rutland *
1990ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
1991ad811070SMark Rutland *
1992ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc() there.
1993ad811070SMark Rutland *
1994ad811070SMark Rutland * Return: Nothing.
1995ad811070SMark Rutland */
1996e3d18ceeSMark Rutland static __always_inline void
atomic64_inc(atomic64_t * v)1997e3d18ceeSMark Rutland atomic64_inc(atomic64_t *v)
1998e3d18ceeSMark Rutland {
1999e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2000c9268ac6SMark Rutland raw_atomic64_inc(v);
2001e3d18ceeSMark Rutland }
2002e3d18ceeSMark Rutland
2003ad811070SMark Rutland /**
2004ad811070SMark Rutland * atomic64_inc_return() - atomic increment with full ordering
2005ad811070SMark Rutland * @v: pointer to atomic64_t
2006ad811070SMark Rutland *
2007ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
2008ad811070SMark Rutland *
2009ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_return() there.
2010ad811070SMark Rutland *
2011ad811070SMark Rutland * Return: The updated value of @v.
2012ad811070SMark Rutland */
2013e3d18ceeSMark Rutland static __always_inline s64
atomic64_inc_return(atomic64_t * v)2014e3d18ceeSMark Rutland atomic64_inc_return(atomic64_t *v)
2015e3d18ceeSMark Rutland {
2016e87c4f66SMarco Elver kcsan_mb();
2017e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2018c9268ac6SMark Rutland return raw_atomic64_inc_return(v);
2019e3d18ceeSMark Rutland }
2020e3d18ceeSMark Rutland
2021ad811070SMark Rutland /**
2022ad811070SMark Rutland * atomic64_inc_return_acquire() - atomic increment with acquire ordering
2023ad811070SMark Rutland * @v: pointer to atomic64_t
2024ad811070SMark Rutland *
2025ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
2026ad811070SMark Rutland *
2027ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_return_acquire() there.
2028ad811070SMark Rutland *
2029ad811070SMark Rutland * Return: The updated value of @v.
2030ad811070SMark Rutland */
2031e3d18ceeSMark Rutland static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)2032e3d18ceeSMark Rutland atomic64_inc_return_acquire(atomic64_t *v)
2033e3d18ceeSMark Rutland {
2034e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2035c9268ac6SMark Rutland return raw_atomic64_inc_return_acquire(v);
2036e3d18ceeSMark Rutland }
2037e3d18ceeSMark Rutland
2038ad811070SMark Rutland /**
2039ad811070SMark Rutland * atomic64_inc_return_release() - atomic increment with release ordering
2040ad811070SMark Rutland * @v: pointer to atomic64_t
2041ad811070SMark Rutland *
2042ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
2043ad811070SMark Rutland *
2044ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_return_release() there.
2045ad811070SMark Rutland *
2046ad811070SMark Rutland * Return: The updated value of @v.
2047ad811070SMark Rutland */
2048e3d18ceeSMark Rutland static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)2049e3d18ceeSMark Rutland atomic64_inc_return_release(atomic64_t *v)
2050e3d18ceeSMark Rutland {
2051e87c4f66SMarco Elver kcsan_release();
2052e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2053c9268ac6SMark Rutland return raw_atomic64_inc_return_release(v);
2054e3d18ceeSMark Rutland }
2055e3d18ceeSMark Rutland
2056ad811070SMark Rutland /**
2057ad811070SMark Rutland * atomic64_inc_return_relaxed() - atomic increment with relaxed ordering
2058ad811070SMark Rutland * @v: pointer to atomic64_t
2059ad811070SMark Rutland *
2060ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
2061ad811070SMark Rutland *
2062ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_return_relaxed() there.
2063ad811070SMark Rutland *
2064ad811070SMark Rutland * Return: The updated value of @v.
2065ad811070SMark Rutland */
2066e3d18ceeSMark Rutland static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)2067e3d18ceeSMark Rutland atomic64_inc_return_relaxed(atomic64_t *v)
2068e3d18ceeSMark Rutland {
2069e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2070c9268ac6SMark Rutland return raw_atomic64_inc_return_relaxed(v);
2071e3d18ceeSMark Rutland }
2072e3d18ceeSMark Rutland
2073ad811070SMark Rutland /**
2074ad811070SMark Rutland * atomic64_fetch_inc() - atomic increment with full ordering
2075ad811070SMark Rutland * @v: pointer to atomic64_t
2076ad811070SMark Rutland *
2077ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
2078ad811070SMark Rutland *
2079ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc() there.
2080ad811070SMark Rutland *
2081ad811070SMark Rutland * Return: The original value of @v.
2082ad811070SMark Rutland */
2083e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)2084e3d18ceeSMark Rutland atomic64_fetch_inc(atomic64_t *v)
2085e3d18ceeSMark Rutland {
2086e87c4f66SMarco Elver kcsan_mb();
2087e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2088c9268ac6SMark Rutland return raw_atomic64_fetch_inc(v);
2089e3d18ceeSMark Rutland }
2090e3d18ceeSMark Rutland
2091ad811070SMark Rutland /**
2092ad811070SMark Rutland * atomic64_fetch_inc_acquire() - atomic increment with acquire ordering
2093ad811070SMark Rutland * @v: pointer to atomic64_t
2094ad811070SMark Rutland *
2095ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
2096ad811070SMark Rutland *
2097ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_acquire() there.
2098ad811070SMark Rutland *
2099ad811070SMark Rutland * Return: The original value of @v.
2100ad811070SMark Rutland */
2101e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)2102e3d18ceeSMark Rutland atomic64_fetch_inc_acquire(atomic64_t *v)
2103e3d18ceeSMark Rutland {
2104e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2105c9268ac6SMark Rutland return raw_atomic64_fetch_inc_acquire(v);
2106e3d18ceeSMark Rutland }
2107e3d18ceeSMark Rutland
2108ad811070SMark Rutland /**
2109ad811070SMark Rutland * atomic64_fetch_inc_release() - atomic increment with release ordering
2110ad811070SMark Rutland * @v: pointer to atomic64_t
2111ad811070SMark Rutland *
2112ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
2113ad811070SMark Rutland *
2114ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_release() there.
2115ad811070SMark Rutland *
2116ad811070SMark Rutland * Return: The original value of @v.
2117ad811070SMark Rutland */
2118e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)2119e3d18ceeSMark Rutland atomic64_fetch_inc_release(atomic64_t *v)
2120e3d18ceeSMark Rutland {
2121e87c4f66SMarco Elver kcsan_release();
2122e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2123c9268ac6SMark Rutland return raw_atomic64_fetch_inc_release(v);
2124e3d18ceeSMark Rutland }
2125e3d18ceeSMark Rutland
2126ad811070SMark Rutland /**
2127ad811070SMark Rutland * atomic64_fetch_inc_relaxed() - atomic increment with relaxed ordering
2128ad811070SMark Rutland * @v: pointer to atomic64_t
2129ad811070SMark Rutland *
2130ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
2131ad811070SMark Rutland *
2132ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_relaxed() there.
2133ad811070SMark Rutland *
2134ad811070SMark Rutland * Return: The original value of @v.
2135ad811070SMark Rutland */
2136e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)2137e3d18ceeSMark Rutland atomic64_fetch_inc_relaxed(atomic64_t *v)
2138e3d18ceeSMark Rutland {
2139e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2140c9268ac6SMark Rutland return raw_atomic64_fetch_inc_relaxed(v);
2141e3d18ceeSMark Rutland }
2142e3d18ceeSMark Rutland
2143ad811070SMark Rutland /**
2144ad811070SMark Rutland * atomic64_dec() - atomic decrement with relaxed ordering
2145ad811070SMark Rutland * @v: pointer to atomic64_t
2146ad811070SMark Rutland *
2147ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
2148ad811070SMark Rutland *
2149ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec() there.
2150ad811070SMark Rutland *
2151ad811070SMark Rutland * Return: Nothing.
2152ad811070SMark Rutland */
2153e3d18ceeSMark Rutland static __always_inline void
atomic64_dec(atomic64_t * v)2154e3d18ceeSMark Rutland atomic64_dec(atomic64_t *v)
2155e3d18ceeSMark Rutland {
2156e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2157c9268ac6SMark Rutland raw_atomic64_dec(v);
2158e3d18ceeSMark Rutland }
2159e3d18ceeSMark Rutland
2160ad811070SMark Rutland /**
2161ad811070SMark Rutland * atomic64_dec_return() - atomic decrement with full ordering
2162ad811070SMark Rutland * @v: pointer to atomic64_t
2163ad811070SMark Rutland *
2164ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
2165ad811070SMark Rutland *
2166ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_return() there.
2167ad811070SMark Rutland *
2168ad811070SMark Rutland * Return: The updated value of @v.
2169ad811070SMark Rutland */
2170e3d18ceeSMark Rutland static __always_inline s64
atomic64_dec_return(atomic64_t * v)2171e3d18ceeSMark Rutland atomic64_dec_return(atomic64_t *v)
2172e3d18ceeSMark Rutland {
2173e87c4f66SMarco Elver kcsan_mb();
2174e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2175c9268ac6SMark Rutland return raw_atomic64_dec_return(v);
2176e3d18ceeSMark Rutland }
2177e3d18ceeSMark Rutland
2178ad811070SMark Rutland /**
2179ad811070SMark Rutland * atomic64_dec_return_acquire() - atomic decrement with acquire ordering
2180ad811070SMark Rutland * @v: pointer to atomic64_t
2181ad811070SMark Rutland *
2182ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
2183ad811070SMark Rutland *
2184ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_return_acquire() there.
2185ad811070SMark Rutland *
2186ad811070SMark Rutland * Return: The updated value of @v.
2187ad811070SMark Rutland */
2188e3d18ceeSMark Rutland static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)2189e3d18ceeSMark Rutland atomic64_dec_return_acquire(atomic64_t *v)
2190e3d18ceeSMark Rutland {
2191e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2192c9268ac6SMark Rutland return raw_atomic64_dec_return_acquire(v);
2193e3d18ceeSMark Rutland }
2194e3d18ceeSMark Rutland
2195ad811070SMark Rutland /**
2196ad811070SMark Rutland * atomic64_dec_return_release() - atomic decrement with release ordering
2197ad811070SMark Rutland * @v: pointer to atomic64_t
2198ad811070SMark Rutland *
2199ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
2200ad811070SMark Rutland *
2201ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_return_release() there.
2202ad811070SMark Rutland *
2203ad811070SMark Rutland * Return: The updated value of @v.
2204ad811070SMark Rutland */
2205e3d18ceeSMark Rutland static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)2206e3d18ceeSMark Rutland atomic64_dec_return_release(atomic64_t *v)
2207e3d18ceeSMark Rutland {
2208e87c4f66SMarco Elver kcsan_release();
2209e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2210c9268ac6SMark Rutland return raw_atomic64_dec_return_release(v);
2211e3d18ceeSMark Rutland }
2212e3d18ceeSMark Rutland
2213ad811070SMark Rutland /**
2214ad811070SMark Rutland * atomic64_dec_return_relaxed() - atomic decrement with relaxed ordering
2215ad811070SMark Rutland * @v: pointer to atomic64_t
2216ad811070SMark Rutland *
2217ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
2218ad811070SMark Rutland *
2219ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_return_relaxed() there.
2220ad811070SMark Rutland *
2221ad811070SMark Rutland * Return: The updated value of @v.
2222ad811070SMark Rutland */
2223e3d18ceeSMark Rutland static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)2224e3d18ceeSMark Rutland atomic64_dec_return_relaxed(atomic64_t *v)
2225e3d18ceeSMark Rutland {
2226e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2227c9268ac6SMark Rutland return raw_atomic64_dec_return_relaxed(v);
2228e3d18ceeSMark Rutland }
2229e3d18ceeSMark Rutland
2230ad811070SMark Rutland /**
2231ad811070SMark Rutland * atomic64_fetch_dec() - atomic decrement with full ordering
2232ad811070SMark Rutland * @v: pointer to atomic64_t
2233ad811070SMark Rutland *
2234ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
2235ad811070SMark Rutland *
2236ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec() there.
2237ad811070SMark Rutland *
2238ad811070SMark Rutland * Return: The original value of @v.
2239ad811070SMark Rutland */
2240e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)2241e3d18ceeSMark Rutland atomic64_fetch_dec(atomic64_t *v)
2242e3d18ceeSMark Rutland {
2243e87c4f66SMarco Elver kcsan_mb();
2244e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2245c9268ac6SMark Rutland return raw_atomic64_fetch_dec(v);
2246e3d18ceeSMark Rutland }
2247e3d18ceeSMark Rutland
2248ad811070SMark Rutland /**
2249ad811070SMark Rutland * atomic64_fetch_dec_acquire() - atomic decrement with acquire ordering
2250ad811070SMark Rutland * @v: pointer to atomic64_t
2251ad811070SMark Rutland *
2252ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
2253ad811070SMark Rutland *
2254ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_acquire() there.
2255ad811070SMark Rutland *
2256ad811070SMark Rutland * Return: The original value of @v.
2257ad811070SMark Rutland */
2258e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)2259e3d18ceeSMark Rutland atomic64_fetch_dec_acquire(atomic64_t *v)
2260e3d18ceeSMark Rutland {
2261e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2262c9268ac6SMark Rutland return raw_atomic64_fetch_dec_acquire(v);
2263e3d18ceeSMark Rutland }
2264e3d18ceeSMark Rutland
2265ad811070SMark Rutland /**
2266ad811070SMark Rutland * atomic64_fetch_dec_release() - atomic decrement with release ordering
2267ad811070SMark Rutland * @v: pointer to atomic64_t
2268ad811070SMark Rutland *
2269ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
2270ad811070SMark Rutland *
2271ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_release() there.
2272ad811070SMark Rutland *
2273ad811070SMark Rutland * Return: The original value of @v.
2274ad811070SMark Rutland */
2275e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)2276e3d18ceeSMark Rutland atomic64_fetch_dec_release(atomic64_t *v)
2277e3d18ceeSMark Rutland {
2278e87c4f66SMarco Elver kcsan_release();
2279e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2280c9268ac6SMark Rutland return raw_atomic64_fetch_dec_release(v);
2281e3d18ceeSMark Rutland }
2282e3d18ceeSMark Rutland
2283ad811070SMark Rutland /**
2284ad811070SMark Rutland * atomic64_fetch_dec_relaxed() - atomic decrement with relaxed ordering
2285ad811070SMark Rutland * @v: pointer to atomic64_t
2286ad811070SMark Rutland *
2287ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
2288ad811070SMark Rutland *
2289ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_relaxed() there.
2290ad811070SMark Rutland *
2291ad811070SMark Rutland * Return: The original value of @v.
2292ad811070SMark Rutland */
2293e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)2294e3d18ceeSMark Rutland atomic64_fetch_dec_relaxed(atomic64_t *v)
2295e3d18ceeSMark Rutland {
2296e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2297c9268ac6SMark Rutland return raw_atomic64_fetch_dec_relaxed(v);
2298e3d18ceeSMark Rutland }
2299e3d18ceeSMark Rutland
2300ad811070SMark Rutland /**
2301ad811070SMark Rutland * atomic64_and() - atomic bitwise AND with relaxed ordering
2302ad811070SMark Rutland * @i: s64 value
2303ad811070SMark Rutland * @v: pointer to atomic64_t
2304ad811070SMark Rutland *
2305ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
2306ad811070SMark Rutland *
2307ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_and() there.
2308ad811070SMark Rutland *
2309ad811070SMark Rutland * Return: Nothing.
2310ad811070SMark Rutland */
2311e3d18ceeSMark Rutland static __always_inline void
atomic64_and(s64 i,atomic64_t * v)2312e3d18ceeSMark Rutland atomic64_and(s64 i, atomic64_t *v)
2313e3d18ceeSMark Rutland {
2314e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2315c9268ac6SMark Rutland raw_atomic64_and(i, v);
2316e3d18ceeSMark Rutland }
2317e3d18ceeSMark Rutland
2318ad811070SMark Rutland /**
2319ad811070SMark Rutland * atomic64_fetch_and() - atomic bitwise AND with full ordering
2320ad811070SMark Rutland * @i: s64 value
2321ad811070SMark Rutland * @v: pointer to atomic64_t
2322ad811070SMark Rutland *
2323ad811070SMark Rutland * Atomically updates @v to (@v & @i) with full ordering.
2324ad811070SMark Rutland *
2325ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_and() there.
2326ad811070SMark Rutland *
2327ad811070SMark Rutland * Return: The original value of @v.
2328ad811070SMark Rutland */
2329e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)2330e3d18ceeSMark Rutland atomic64_fetch_and(s64 i, atomic64_t *v)
2331e3d18ceeSMark Rutland {
2332e87c4f66SMarco Elver kcsan_mb();
2333e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2334c9268ac6SMark Rutland return raw_atomic64_fetch_and(i, v);
2335e3d18ceeSMark Rutland }
2336e3d18ceeSMark Rutland
2337ad811070SMark Rutland /**
2338ad811070SMark Rutland * atomic64_fetch_and_acquire() - atomic bitwise AND with acquire ordering
2339ad811070SMark Rutland * @i: s64 value
2340ad811070SMark Rutland * @v: pointer to atomic64_t
2341ad811070SMark Rutland *
2342ad811070SMark Rutland * Atomically updates @v to (@v & @i) with acquire ordering.
2343ad811070SMark Rutland *
2344ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_acquire() there.
2345ad811070SMark Rutland *
2346ad811070SMark Rutland * Return: The original value of @v.
2347ad811070SMark Rutland */
2348e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)2349e3d18ceeSMark Rutland atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
2350e3d18ceeSMark Rutland {
2351e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2352c9268ac6SMark Rutland return raw_atomic64_fetch_and_acquire(i, v);
2353e3d18ceeSMark Rutland }
2354e3d18ceeSMark Rutland
2355ad811070SMark Rutland /**
2356ad811070SMark Rutland * atomic64_fetch_and_release() - atomic bitwise AND with release ordering
2357ad811070SMark Rutland * @i: s64 value
2358ad811070SMark Rutland * @v: pointer to atomic64_t
2359ad811070SMark Rutland *
2360ad811070SMark Rutland * Atomically updates @v to (@v & @i) with release ordering.
2361ad811070SMark Rutland *
2362ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_release() there.
2363ad811070SMark Rutland *
2364ad811070SMark Rutland * Return: The original value of @v.
2365ad811070SMark Rutland */
2366e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)2367e3d18ceeSMark Rutland atomic64_fetch_and_release(s64 i, atomic64_t *v)
2368e3d18ceeSMark Rutland {
2369e87c4f66SMarco Elver kcsan_release();
2370e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2371c9268ac6SMark Rutland return raw_atomic64_fetch_and_release(i, v);
2372e3d18ceeSMark Rutland }
2373e3d18ceeSMark Rutland
2374ad811070SMark Rutland /**
2375ad811070SMark Rutland * atomic64_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
2376ad811070SMark Rutland * @i: s64 value
2377ad811070SMark Rutland * @v: pointer to atomic64_t
2378ad811070SMark Rutland *
2379ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
2380ad811070SMark Rutland *
2381ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_relaxed() there.
2382ad811070SMark Rutland *
2383ad811070SMark Rutland * Return: The original value of @v.
2384ad811070SMark Rutland */
2385e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_and_relaxed(s64 i,atomic64_t * v)2386e3d18ceeSMark Rutland atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
2387e3d18ceeSMark Rutland {
2388e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2389c9268ac6SMark Rutland return raw_atomic64_fetch_and_relaxed(i, v);
2390e3d18ceeSMark Rutland }
2391e3d18ceeSMark Rutland
2392ad811070SMark Rutland /**
2393ad811070SMark Rutland * atomic64_andnot() - atomic bitwise AND NOT with relaxed ordering
2394ad811070SMark Rutland * @i: s64 value
2395ad811070SMark Rutland * @v: pointer to atomic64_t
2396ad811070SMark Rutland *
2397ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2398ad811070SMark Rutland *
2399ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_andnot() there.
2400ad811070SMark Rutland *
2401ad811070SMark Rutland * Return: Nothing.
2402ad811070SMark Rutland */
2403e3d18ceeSMark Rutland static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)2404e3d18ceeSMark Rutland atomic64_andnot(s64 i, atomic64_t *v)
2405e3d18ceeSMark Rutland {
2406e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2407c9268ac6SMark Rutland raw_atomic64_andnot(i, v);
2408e3d18ceeSMark Rutland }
2409e3d18ceeSMark Rutland
2410ad811070SMark Rutland /**
2411ad811070SMark Rutland * atomic64_fetch_andnot() - atomic bitwise AND NOT with full ordering
2412ad811070SMark Rutland * @i: s64 value
2413ad811070SMark Rutland * @v: pointer to atomic64_t
2414ad811070SMark Rutland *
2415ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with full ordering.
2416ad811070SMark Rutland *
2417ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot() there.
2418ad811070SMark Rutland *
2419ad811070SMark Rutland * Return: The original value of @v.
2420ad811070SMark Rutland */
2421e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)2422e3d18ceeSMark Rutland atomic64_fetch_andnot(s64 i, atomic64_t *v)
2423e3d18ceeSMark Rutland {
2424e87c4f66SMarco Elver kcsan_mb();
2425e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2426c9268ac6SMark Rutland return raw_atomic64_fetch_andnot(i, v);
2427e3d18ceeSMark Rutland }
2428e3d18ceeSMark Rutland
2429ad811070SMark Rutland /**
2430ad811070SMark Rutland * atomic64_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
2431ad811070SMark Rutland * @i: s64 value
2432ad811070SMark Rutland * @v: pointer to atomic64_t
2433ad811070SMark Rutland *
2434ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with acquire ordering.
2435ad811070SMark Rutland *
2436ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_acquire() there.
2437ad811070SMark Rutland *
2438ad811070SMark Rutland * Return: The original value of @v.
2439ad811070SMark Rutland */
2440e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)2441e3d18ceeSMark Rutland atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2442e3d18ceeSMark Rutland {
2443e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2444c9268ac6SMark Rutland return raw_atomic64_fetch_andnot_acquire(i, v);
2445e3d18ceeSMark Rutland }
2446e3d18ceeSMark Rutland
2447ad811070SMark Rutland /**
2448ad811070SMark Rutland * atomic64_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
2449ad811070SMark Rutland * @i: s64 value
2450ad811070SMark Rutland * @v: pointer to atomic64_t
2451ad811070SMark Rutland *
2452ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with release ordering.
2453ad811070SMark Rutland *
2454ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_release() there.
2455ad811070SMark Rutland *
2456ad811070SMark Rutland * Return: The original value of @v.
2457ad811070SMark Rutland */
2458e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)2459e3d18ceeSMark Rutland atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2460e3d18ceeSMark Rutland {
2461e87c4f66SMarco Elver kcsan_release();
2462e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2463c9268ac6SMark Rutland return raw_atomic64_fetch_andnot_release(i, v);
2464e3d18ceeSMark Rutland }
2465e3d18ceeSMark Rutland
2466ad811070SMark Rutland /**
2467ad811070SMark Rutland * atomic64_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
2468ad811070SMark Rutland * @i: s64 value
2469ad811070SMark Rutland * @v: pointer to atomic64_t
2470ad811070SMark Rutland *
2471ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2472ad811070SMark Rutland *
2473ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_relaxed() there.
2474ad811070SMark Rutland *
2475ad811070SMark Rutland * Return: The original value of @v.
2476ad811070SMark Rutland */
2477e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)2478e3d18ceeSMark Rutland atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
2479e3d18ceeSMark Rutland {
2480e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2481c9268ac6SMark Rutland return raw_atomic64_fetch_andnot_relaxed(i, v);
2482e3d18ceeSMark Rutland }
2483e3d18ceeSMark Rutland
2484ad811070SMark Rutland /**
2485ad811070SMark Rutland * atomic64_or() - atomic bitwise OR with relaxed ordering
2486ad811070SMark Rutland * @i: s64 value
2487ad811070SMark Rutland * @v: pointer to atomic64_t
2488ad811070SMark Rutland *
2489ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
2490ad811070SMark Rutland *
2491ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_or() there.
2492ad811070SMark Rutland *
2493ad811070SMark Rutland * Return: Nothing.
2494ad811070SMark Rutland */
2495e3d18ceeSMark Rutland static __always_inline void
atomic64_or(s64 i,atomic64_t * v)2496e3d18ceeSMark Rutland atomic64_or(s64 i, atomic64_t *v)
2497e3d18ceeSMark Rutland {
2498e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2499c9268ac6SMark Rutland raw_atomic64_or(i, v);
2500e3d18ceeSMark Rutland }
2501e3d18ceeSMark Rutland
2502ad811070SMark Rutland /**
2503ad811070SMark Rutland * atomic64_fetch_or() - atomic bitwise OR with full ordering
2504ad811070SMark Rutland * @i: s64 value
2505ad811070SMark Rutland * @v: pointer to atomic64_t
2506ad811070SMark Rutland *
2507ad811070SMark Rutland * Atomically updates @v to (@v | @i) with full ordering.
2508ad811070SMark Rutland *
2509ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_or() there.
2510ad811070SMark Rutland *
2511ad811070SMark Rutland * Return: The original value of @v.
2512ad811070SMark Rutland */
2513e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)2514e3d18ceeSMark Rutland atomic64_fetch_or(s64 i, atomic64_t *v)
2515e3d18ceeSMark Rutland {
2516e87c4f66SMarco Elver kcsan_mb();
2517e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2518c9268ac6SMark Rutland return raw_atomic64_fetch_or(i, v);
2519e3d18ceeSMark Rutland }
2520e3d18ceeSMark Rutland
2521ad811070SMark Rutland /**
2522ad811070SMark Rutland * atomic64_fetch_or_acquire() - atomic bitwise OR with acquire ordering
2523ad811070SMark Rutland * @i: s64 value
2524ad811070SMark Rutland * @v: pointer to atomic64_t
2525ad811070SMark Rutland *
2526ad811070SMark Rutland * Atomically updates @v to (@v | @i) with acquire ordering.
2527ad811070SMark Rutland *
2528ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_acquire() there.
2529ad811070SMark Rutland *
2530ad811070SMark Rutland * Return: The original value of @v.
2531ad811070SMark Rutland */
2532e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2533e3d18ceeSMark Rutland atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2534e3d18ceeSMark Rutland {
2535e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2536c9268ac6SMark Rutland return raw_atomic64_fetch_or_acquire(i, v);
2537e3d18ceeSMark Rutland }
2538e3d18ceeSMark Rutland
2539ad811070SMark Rutland /**
2540ad811070SMark Rutland * atomic64_fetch_or_release() - atomic bitwise OR with release ordering
2541ad811070SMark Rutland * @i: s64 value
2542ad811070SMark Rutland * @v: pointer to atomic64_t
2543ad811070SMark Rutland *
2544ad811070SMark Rutland * Atomically updates @v to (@v | @i) with release ordering.
2545ad811070SMark Rutland *
2546ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_release() there.
2547ad811070SMark Rutland *
2548ad811070SMark Rutland * Return: The original value of @v.
2549ad811070SMark Rutland */
2550e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)2551e3d18ceeSMark Rutland atomic64_fetch_or_release(s64 i, atomic64_t *v)
2552e3d18ceeSMark Rutland {
2553e87c4f66SMarco Elver kcsan_release();
2554e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2555c9268ac6SMark Rutland return raw_atomic64_fetch_or_release(i, v);
2556e3d18ceeSMark Rutland }
2557e3d18ceeSMark Rutland
2558ad811070SMark Rutland /**
2559ad811070SMark Rutland * atomic64_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
2560ad811070SMark Rutland * @i: s64 value
2561ad811070SMark Rutland * @v: pointer to atomic64_t
2562ad811070SMark Rutland *
2563ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
2564ad811070SMark Rutland *
2565ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_relaxed() there.
2566ad811070SMark Rutland *
2567ad811070SMark Rutland * Return: The original value of @v.
2568ad811070SMark Rutland */
2569e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_or_relaxed(s64 i,atomic64_t * v)2570e3d18ceeSMark Rutland atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
2571e3d18ceeSMark Rutland {
2572e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2573c9268ac6SMark Rutland return raw_atomic64_fetch_or_relaxed(i, v);
2574e3d18ceeSMark Rutland }
2575e3d18ceeSMark Rutland
2576ad811070SMark Rutland /**
2577ad811070SMark Rutland * atomic64_xor() - atomic bitwise XOR with relaxed ordering
2578ad811070SMark Rutland * @i: s64 value
2579ad811070SMark Rutland * @v: pointer to atomic64_t
2580ad811070SMark Rutland *
2581ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2582ad811070SMark Rutland *
2583ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_xor() there.
2584ad811070SMark Rutland *
2585ad811070SMark Rutland * Return: Nothing.
2586ad811070SMark Rutland */
2587e3d18ceeSMark Rutland static __always_inline void
atomic64_xor(s64 i,atomic64_t * v)2588e3d18ceeSMark Rutland atomic64_xor(s64 i, atomic64_t *v)
2589e3d18ceeSMark Rutland {
2590e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2591c9268ac6SMark Rutland raw_atomic64_xor(i, v);
2592e3d18ceeSMark Rutland }
2593e3d18ceeSMark Rutland
2594ad811070SMark Rutland /**
2595ad811070SMark Rutland * atomic64_fetch_xor() - atomic bitwise XOR with full ordering
2596ad811070SMark Rutland * @i: s64 value
2597ad811070SMark Rutland * @v: pointer to atomic64_t
2598ad811070SMark Rutland *
2599ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with full ordering.
2600ad811070SMark Rutland *
2601ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor() there.
2602ad811070SMark Rutland *
2603ad811070SMark Rutland * Return: The original value of @v.
2604ad811070SMark Rutland */
2605e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)2606e3d18ceeSMark Rutland atomic64_fetch_xor(s64 i, atomic64_t *v)
2607e3d18ceeSMark Rutland {
2608e87c4f66SMarco Elver kcsan_mb();
2609e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2610c9268ac6SMark Rutland return raw_atomic64_fetch_xor(i, v);
2611e3d18ceeSMark Rutland }
2612e3d18ceeSMark Rutland
2613ad811070SMark Rutland /**
2614ad811070SMark Rutland * atomic64_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
2615ad811070SMark Rutland * @i: s64 value
2616ad811070SMark Rutland * @v: pointer to atomic64_t
2617ad811070SMark Rutland *
2618ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with acquire ordering.
2619ad811070SMark Rutland *
2620ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_acquire() there.
2621ad811070SMark Rutland *
2622ad811070SMark Rutland * Return: The original value of @v.
2623ad811070SMark Rutland */
2624e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2625e3d18ceeSMark Rutland atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2626e3d18ceeSMark Rutland {
2627e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2628c9268ac6SMark Rutland return raw_atomic64_fetch_xor_acquire(i, v);
2629e3d18ceeSMark Rutland }
2630e3d18ceeSMark Rutland
2631ad811070SMark Rutland /**
2632ad811070SMark Rutland * atomic64_fetch_xor_release() - atomic bitwise XOR with release ordering
2633ad811070SMark Rutland * @i: s64 value
2634ad811070SMark Rutland * @v: pointer to atomic64_t
2635ad811070SMark Rutland *
2636ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with release ordering.
2637ad811070SMark Rutland *
2638ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_release() there.
2639ad811070SMark Rutland *
2640ad811070SMark Rutland * Return: The original value of @v.
2641ad811070SMark Rutland */
2642e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)2643e3d18ceeSMark Rutland atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2644e3d18ceeSMark Rutland {
2645e87c4f66SMarco Elver kcsan_release();
2646e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2647c9268ac6SMark Rutland return raw_atomic64_fetch_xor_release(i, v);
2648e3d18ceeSMark Rutland }
2649e3d18ceeSMark Rutland
2650ad811070SMark Rutland /**
2651ad811070SMark Rutland * atomic64_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
2652ad811070SMark Rutland * @i: s64 value
2653ad811070SMark Rutland * @v: pointer to atomic64_t
2654ad811070SMark Rutland *
2655ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2656ad811070SMark Rutland *
2657ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_relaxed() there.
2658ad811070SMark Rutland *
2659ad811070SMark Rutland * Return: The original value of @v.
2660ad811070SMark Rutland */
2661e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_xor_relaxed(s64 i,atomic64_t * v)2662e3d18ceeSMark Rutland atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
2663e3d18ceeSMark Rutland {
2664e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2665c9268ac6SMark Rutland return raw_atomic64_fetch_xor_relaxed(i, v);
2666e3d18ceeSMark Rutland }
2667e3d18ceeSMark Rutland
2668ad811070SMark Rutland /**
2669ad811070SMark Rutland * atomic64_xchg() - atomic exchange with full ordering
2670ad811070SMark Rutland * @v: pointer to atomic64_t
2671ad811070SMark Rutland * @new: s64 value to assign
2672ad811070SMark Rutland *
2673ad811070SMark Rutland * Atomically updates @v to @new with full ordering.
2674ad811070SMark Rutland *
2675ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_xchg() there.
2676ad811070SMark Rutland *
2677ad811070SMark Rutland * Return: The original value of @v.
2678ad811070SMark Rutland */
2679e3d18ceeSMark Rutland static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 new)26801d78814dSMark Rutland atomic64_xchg(atomic64_t *v, s64 new)
2681e3d18ceeSMark Rutland {
2682e87c4f66SMarco Elver kcsan_mb();
2683e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
26841d78814dSMark Rutland return raw_atomic64_xchg(v, new);
2685e3d18ceeSMark Rutland }
2686e3d18ceeSMark Rutland
2687ad811070SMark Rutland /**
2688ad811070SMark Rutland * atomic64_xchg_acquire() - atomic exchange with acquire ordering
2689ad811070SMark Rutland * @v: pointer to atomic64_t
2690ad811070SMark Rutland * @new: s64 value to assign
2691ad811070SMark Rutland *
2692ad811070SMark Rutland * Atomically updates @v to @new with acquire ordering.
2693ad811070SMark Rutland *
2694ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_xchg_acquire() there.
2695ad811070SMark Rutland *
2696ad811070SMark Rutland * Return: The original value of @v.
2697ad811070SMark Rutland */
2698e3d18ceeSMark Rutland static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 new)26991d78814dSMark Rutland atomic64_xchg_acquire(atomic64_t *v, s64 new)
2700e3d18ceeSMark Rutland {
2701e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
27021d78814dSMark Rutland return raw_atomic64_xchg_acquire(v, new);
2703e3d18ceeSMark Rutland }
2704e3d18ceeSMark Rutland
2705ad811070SMark Rutland /**
2706ad811070SMark Rutland * atomic64_xchg_release() - atomic exchange with release ordering
2707ad811070SMark Rutland * @v: pointer to atomic64_t
2708ad811070SMark Rutland * @new: s64 value to assign
2709ad811070SMark Rutland *
2710ad811070SMark Rutland * Atomically updates @v to @new with release ordering.
2711ad811070SMark Rutland *
2712ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_xchg_release() there.
2713ad811070SMark Rutland *
2714ad811070SMark Rutland * Return: The original value of @v.
2715ad811070SMark Rutland */
2716e3d18ceeSMark Rutland static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 new)27171d78814dSMark Rutland atomic64_xchg_release(atomic64_t *v, s64 new)
2718e3d18ceeSMark Rutland {
2719e87c4f66SMarco Elver kcsan_release();
2720e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
27211d78814dSMark Rutland return raw_atomic64_xchg_release(v, new);
2722e3d18ceeSMark Rutland }
2723e3d18ceeSMark Rutland
2724ad811070SMark Rutland /**
2725ad811070SMark Rutland * atomic64_xchg_relaxed() - atomic exchange with relaxed ordering
2726ad811070SMark Rutland * @v: pointer to atomic64_t
2727ad811070SMark Rutland * @new: s64 value to assign
2728ad811070SMark Rutland *
2729ad811070SMark Rutland * Atomically updates @v to @new with relaxed ordering.
2730ad811070SMark Rutland *
2731ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_xchg_relaxed() there.
2732ad811070SMark Rutland *
2733ad811070SMark Rutland * Return: The original value of @v.
2734ad811070SMark Rutland */
2735e3d18ceeSMark Rutland static __always_inline s64
atomic64_xchg_relaxed(atomic64_t * v,s64 new)27361d78814dSMark Rutland atomic64_xchg_relaxed(atomic64_t *v, s64 new)
2737e3d18ceeSMark Rutland {
2738e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
27391d78814dSMark Rutland return raw_atomic64_xchg_relaxed(v, new);
2740e3d18ceeSMark Rutland }
2741e3d18ceeSMark Rutland
2742ad811070SMark Rutland /**
2743ad811070SMark Rutland * atomic64_cmpxchg() - atomic compare and exchange with full ordering
2744ad811070SMark Rutland * @v: pointer to atomic64_t
2745ad811070SMark Rutland * @old: s64 value to compare with
2746ad811070SMark Rutland * @new: s64 value to assign
2747ad811070SMark Rutland *
2748ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
2749ad811070SMark Rutland *
2750ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg() there.
2751ad811070SMark Rutland *
2752ad811070SMark Rutland * Return: The original value of @v.
2753ad811070SMark Rutland */
2754e3d18ceeSMark Rutland static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2755e3d18ceeSMark Rutland atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2756e3d18ceeSMark Rutland {
2757e87c4f66SMarco Elver kcsan_mb();
2758e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2759c9268ac6SMark Rutland return raw_atomic64_cmpxchg(v, old, new);
2760e3d18ceeSMark Rutland }
2761e3d18ceeSMark Rutland
2762ad811070SMark Rutland /**
2763ad811070SMark Rutland * atomic64_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2764ad811070SMark Rutland * @v: pointer to atomic64_t
2765ad811070SMark Rutland * @old: s64 value to compare with
2766ad811070SMark Rutland * @new: s64 value to assign
2767ad811070SMark Rutland *
2768ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
2769ad811070SMark Rutland *
2770ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_acquire() there.
2771ad811070SMark Rutland *
2772ad811070SMark Rutland * Return: The original value of @v.
2773ad811070SMark Rutland */
2774e3d18ceeSMark Rutland static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2775e3d18ceeSMark Rutland atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2776e3d18ceeSMark Rutland {
2777e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2778c9268ac6SMark Rutland return raw_atomic64_cmpxchg_acquire(v, old, new);
2779e3d18ceeSMark Rutland }
2780e3d18ceeSMark Rutland
2781ad811070SMark Rutland /**
2782ad811070SMark Rutland * atomic64_cmpxchg_release() - atomic compare and exchange with release ordering
2783ad811070SMark Rutland * @v: pointer to atomic64_t
2784ad811070SMark Rutland * @old: s64 value to compare with
2785ad811070SMark Rutland * @new: s64 value to assign
2786ad811070SMark Rutland *
2787ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
2788ad811070SMark Rutland *
2789ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_release() there.
2790ad811070SMark Rutland *
2791ad811070SMark Rutland * Return: The original value of @v.
2792ad811070SMark Rutland */
2793e3d18ceeSMark Rutland static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2794e3d18ceeSMark Rutland atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2795e3d18ceeSMark Rutland {
2796e87c4f66SMarco Elver kcsan_release();
2797e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2798c9268ac6SMark Rutland return raw_atomic64_cmpxchg_release(v, old, new);
2799e3d18ceeSMark Rutland }
2800e3d18ceeSMark Rutland
2801ad811070SMark Rutland /**
2802ad811070SMark Rutland * atomic64_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2803ad811070SMark Rutland * @v: pointer to atomic64_t
2804ad811070SMark Rutland * @old: s64 value to compare with
2805ad811070SMark Rutland * @new: s64 value to assign
2806ad811070SMark Rutland *
2807ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2808ad811070SMark Rutland *
2809ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_relaxed() there.
2810ad811070SMark Rutland *
2811ad811070SMark Rutland * Return: The original value of @v.
2812ad811070SMark Rutland */
2813e3d18ceeSMark Rutland static __always_inline s64
atomic64_cmpxchg_relaxed(atomic64_t * v,s64 old,s64 new)2814e3d18ceeSMark Rutland atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
2815e3d18ceeSMark Rutland {
2816e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2817c9268ac6SMark Rutland return raw_atomic64_cmpxchg_relaxed(v, old, new);
2818e3d18ceeSMark Rutland }
2819e3d18ceeSMark Rutland
2820ad811070SMark Rutland /**
2821ad811070SMark Rutland * atomic64_try_cmpxchg() - atomic compare and exchange with full ordering
2822ad811070SMark Rutland * @v: pointer to atomic64_t
2823ad811070SMark Rutland * @old: pointer to s64 value to compare with
2824ad811070SMark Rutland * @new: s64 value to assign
2825ad811070SMark Rutland *
2826ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
2827ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
2828ad811070SMark Rutland *
2829ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg() there.
2830ad811070SMark Rutland *
2831ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
2832ad811070SMark Rutland */
2833e3d18ceeSMark Rutland static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2834e3d18ceeSMark Rutland atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2835e3d18ceeSMark Rutland {
2836e87c4f66SMarco Elver kcsan_mb();
2837e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2838e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
2839c9268ac6SMark Rutland return raw_atomic64_try_cmpxchg(v, old, new);
2840e3d18ceeSMark Rutland }
2841e3d18ceeSMark Rutland
2842ad811070SMark Rutland /**
2843ad811070SMark Rutland * atomic64_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2844ad811070SMark Rutland * @v: pointer to atomic64_t
2845ad811070SMark Rutland * @old: pointer to s64 value to compare with
2846ad811070SMark Rutland * @new: s64 value to assign
2847ad811070SMark Rutland *
2848ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
2849ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
2850ad811070SMark Rutland *
2851ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_acquire() there.
2852ad811070SMark Rutland *
2853ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
2854ad811070SMark Rutland */
2855e3d18ceeSMark Rutland static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2856e3d18ceeSMark Rutland atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2857e3d18ceeSMark Rutland {
2858e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2859e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
2860c9268ac6SMark Rutland return raw_atomic64_try_cmpxchg_acquire(v, old, new);
2861e3d18ceeSMark Rutland }
2862e3d18ceeSMark Rutland
2863ad811070SMark Rutland /**
2864ad811070SMark Rutland * atomic64_try_cmpxchg_release() - atomic compare and exchange with release ordering
2865ad811070SMark Rutland * @v: pointer to atomic64_t
2866ad811070SMark Rutland * @old: pointer to s64 value to compare with
2867ad811070SMark Rutland * @new: s64 value to assign
2868ad811070SMark Rutland *
2869ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
2870ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
2871ad811070SMark Rutland *
2872ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_release() there.
2873ad811070SMark Rutland *
2874ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
2875ad811070SMark Rutland */
2876e3d18ceeSMark Rutland static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2877e3d18ceeSMark Rutland atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2878e3d18ceeSMark Rutland {
2879e87c4f66SMarco Elver kcsan_release();
2880e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2881e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
2882c9268ac6SMark Rutland return raw_atomic64_try_cmpxchg_release(v, old, new);
2883e3d18ceeSMark Rutland }
2884e3d18ceeSMark Rutland
2885ad811070SMark Rutland /**
2886ad811070SMark Rutland * atomic64_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2887ad811070SMark Rutland * @v: pointer to atomic64_t
2888ad811070SMark Rutland * @old: pointer to s64 value to compare with
2889ad811070SMark Rutland * @new: s64 value to assign
2890ad811070SMark Rutland *
2891ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2892ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
2893ad811070SMark Rutland *
2894ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_relaxed() there.
2895ad811070SMark Rutland *
2896ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
2897ad811070SMark Rutland */
2898e3d18ceeSMark Rutland static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2899e3d18ceeSMark Rutland atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2900e3d18ceeSMark Rutland {
2901e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2902e3d18ceeSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
2903c9268ac6SMark Rutland return raw_atomic64_try_cmpxchg_relaxed(v, old, new);
2904e3d18ceeSMark Rutland }
2905e3d18ceeSMark Rutland
2906ad811070SMark Rutland /**
2907ad811070SMark Rutland * atomic64_sub_and_test() - atomic subtract and test if zero with full ordering
2908ad811070SMark Rutland * @i: s64 value to add
2909ad811070SMark Rutland * @v: pointer to atomic64_t
2910ad811070SMark Rutland *
2911ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
2912ad811070SMark Rutland *
2913ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_sub_and_test() there.
2914ad811070SMark Rutland *
2915ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
2916ad811070SMark Rutland */
2917e3d18ceeSMark Rutland static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)2918e3d18ceeSMark Rutland atomic64_sub_and_test(s64 i, atomic64_t *v)
2919e3d18ceeSMark Rutland {
2920e87c4f66SMarco Elver kcsan_mb();
2921e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2922c9268ac6SMark Rutland return raw_atomic64_sub_and_test(i, v);
2923e3d18ceeSMark Rutland }
2924e3d18ceeSMark Rutland
2925ad811070SMark Rutland /**
2926ad811070SMark Rutland * atomic64_dec_and_test() - atomic decrement and test if zero with full ordering
2927ad811070SMark Rutland * @v: pointer to atomic64_t
2928ad811070SMark Rutland *
2929ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
2930ad811070SMark Rutland *
2931ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_and_test() there.
2932ad811070SMark Rutland *
2933ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
2934ad811070SMark Rutland */
2935e3d18ceeSMark Rutland static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)2936e3d18ceeSMark Rutland atomic64_dec_and_test(atomic64_t *v)
2937e3d18ceeSMark Rutland {
2938e87c4f66SMarco Elver kcsan_mb();
2939e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2940c9268ac6SMark Rutland return raw_atomic64_dec_and_test(v);
2941e3d18ceeSMark Rutland }
2942e3d18ceeSMark Rutland
2943ad811070SMark Rutland /**
2944ad811070SMark Rutland * atomic64_inc_and_test() - atomic increment and test if zero with full ordering
2945ad811070SMark Rutland * @v: pointer to atomic64_t
2946ad811070SMark Rutland *
2947ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
2948ad811070SMark Rutland *
2949ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_and_test() there.
2950ad811070SMark Rutland *
2951ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
2952ad811070SMark Rutland */
2953e3d18ceeSMark Rutland static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)2954e3d18ceeSMark Rutland atomic64_inc_and_test(atomic64_t *v)
2955e3d18ceeSMark Rutland {
2956e87c4f66SMarco Elver kcsan_mb();
2957e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2958c9268ac6SMark Rutland return raw_atomic64_inc_and_test(v);
2959e3d18ceeSMark Rutland }
2960e3d18ceeSMark Rutland
2961ad811070SMark Rutland /**
2962ad811070SMark Rutland * atomic64_add_negative() - atomic add and test if negative with full ordering
2963ad811070SMark Rutland * @i: s64 value to add
2964ad811070SMark Rutland * @v: pointer to atomic64_t
2965ad811070SMark Rutland *
2966ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
2967ad811070SMark Rutland *
2968ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_negative() there.
2969ad811070SMark Rutland *
2970ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
2971ad811070SMark Rutland */
2972e3d18ceeSMark Rutland static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)2973e3d18ceeSMark Rutland atomic64_add_negative(s64 i, atomic64_t *v)
2974e3d18ceeSMark Rutland {
2975e87c4f66SMarco Elver kcsan_mb();
2976e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
2977c9268ac6SMark Rutland return raw_atomic64_add_negative(i, v);
2978e3d18ceeSMark Rutland }
2979e3d18ceeSMark Rutland
2980ad811070SMark Rutland /**
2981ad811070SMark Rutland * atomic64_add_negative_acquire() - atomic add and test if negative with acquire ordering
2982ad811070SMark Rutland * @i: s64 value to add
2983ad811070SMark Rutland * @v: pointer to atomic64_t
2984ad811070SMark Rutland *
2985ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
2986ad811070SMark Rutland *
2987ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_negative_acquire() there.
2988ad811070SMark Rutland *
2989ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
2990ad811070SMark Rutland */
2991e5ab9effSThomas Gleixner static __always_inline bool
atomic64_add_negative_acquire(s64 i,atomic64_t * v)2992e5ab9effSThomas Gleixner atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2993e5ab9effSThomas Gleixner {
2994e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
2995c9268ac6SMark Rutland return raw_atomic64_add_negative_acquire(i, v);
2996e5ab9effSThomas Gleixner }
2997e5ab9effSThomas Gleixner
2998ad811070SMark Rutland /**
2999ad811070SMark Rutland * atomic64_add_negative_release() - atomic add and test if negative with release ordering
3000ad811070SMark Rutland * @i: s64 value to add
3001ad811070SMark Rutland * @v: pointer to atomic64_t
3002ad811070SMark Rutland *
3003ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
3004ad811070SMark Rutland *
3005ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_negative_release() there.
3006ad811070SMark Rutland *
3007ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
3008ad811070SMark Rutland */
3009e5ab9effSThomas Gleixner static __always_inline bool
atomic64_add_negative_release(s64 i,atomic64_t * v)3010e5ab9effSThomas Gleixner atomic64_add_negative_release(s64 i, atomic64_t *v)
3011e5ab9effSThomas Gleixner {
3012e5ab9effSThomas Gleixner kcsan_release();
3013e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
3014c9268ac6SMark Rutland return raw_atomic64_add_negative_release(i, v);
3015e5ab9effSThomas Gleixner }
3016e5ab9effSThomas Gleixner
3017ad811070SMark Rutland /**
3018ad811070SMark Rutland * atomic64_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
3019ad811070SMark Rutland * @i: s64 value to add
3020ad811070SMark Rutland * @v: pointer to atomic64_t
3021ad811070SMark Rutland *
3022ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
3023ad811070SMark Rutland *
3024ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_negative_relaxed() there.
3025ad811070SMark Rutland *
3026ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
3027ad811070SMark Rutland */
3028e5ab9effSThomas Gleixner static __always_inline bool
atomic64_add_negative_relaxed(s64 i,atomic64_t * v)3029e5ab9effSThomas Gleixner atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
3030e5ab9effSThomas Gleixner {
3031e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
3032c9268ac6SMark Rutland return raw_atomic64_add_negative_relaxed(i, v);
3033e5ab9effSThomas Gleixner }
3034e5ab9effSThomas Gleixner
3035ad811070SMark Rutland /**
3036ad811070SMark Rutland * atomic64_fetch_add_unless() - atomic add unless value with full ordering
3037ad811070SMark Rutland * @v: pointer to atomic64_t
3038ad811070SMark Rutland * @a: s64 value to add
3039ad811070SMark Rutland * @u: s64 value to compare with
3040ad811070SMark Rutland *
3041ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3042ad811070SMark Rutland *
3043ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_unless() there.
3044ad811070SMark Rutland *
3045ad811070SMark Rutland * Return: The original value of @v.
3046ad811070SMark Rutland */
3047e3d18ceeSMark Rutland static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)3048e3d18ceeSMark Rutland atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
3049e3d18ceeSMark Rutland {
3050e87c4f66SMarco Elver kcsan_mb();
3051e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3052c9268ac6SMark Rutland return raw_atomic64_fetch_add_unless(v, a, u);
3053e3d18ceeSMark Rutland }
3054e3d18ceeSMark Rutland
3055ad811070SMark Rutland /**
3056ad811070SMark Rutland * atomic64_add_unless() - atomic add unless value with full ordering
3057ad811070SMark Rutland * @v: pointer to atomic64_t
3058ad811070SMark Rutland * @a: s64 value to add
3059ad811070SMark Rutland * @u: s64 value to compare with
3060ad811070SMark Rutland *
3061ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3062ad811070SMark Rutland *
3063ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_add_unless() there.
3064ad811070SMark Rutland *
3065ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
3066ad811070SMark Rutland */
3067e3d18ceeSMark Rutland static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)3068e3d18ceeSMark Rutland atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
3069e3d18ceeSMark Rutland {
3070e87c4f66SMarco Elver kcsan_mb();
3071e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3072c9268ac6SMark Rutland return raw_atomic64_add_unless(v, a, u);
3073e3d18ceeSMark Rutland }
3074e3d18ceeSMark Rutland
3075ad811070SMark Rutland /**
3076ad811070SMark Rutland * atomic64_inc_not_zero() - atomic increment unless zero with full ordering
3077ad811070SMark Rutland * @v: pointer to atomic64_t
3078ad811070SMark Rutland *
3079ad811070SMark Rutland * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
3080ad811070SMark Rutland *
3081ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_not_zero() there.
3082ad811070SMark Rutland *
3083ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
3084ad811070SMark Rutland */
3085e3d18ceeSMark Rutland static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)3086e3d18ceeSMark Rutland atomic64_inc_not_zero(atomic64_t *v)
3087e3d18ceeSMark Rutland {
3088e87c4f66SMarco Elver kcsan_mb();
3089e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3090c9268ac6SMark Rutland return raw_atomic64_inc_not_zero(v);
3091e3d18ceeSMark Rutland }
3092e3d18ceeSMark Rutland
3093ad811070SMark Rutland /**
3094ad811070SMark Rutland * atomic64_inc_unless_negative() - atomic increment unless negative with full ordering
3095ad811070SMark Rutland * @v: pointer to atomic64_t
3096ad811070SMark Rutland *
3097ad811070SMark Rutland * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
3098ad811070SMark Rutland *
3099ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_inc_unless_negative() there.
3100ad811070SMark Rutland *
3101ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
3102ad811070SMark Rutland */
3103e3d18ceeSMark Rutland static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)3104e3d18ceeSMark Rutland atomic64_inc_unless_negative(atomic64_t *v)
3105e3d18ceeSMark Rutland {
3106e87c4f66SMarco Elver kcsan_mb();
3107e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3108c9268ac6SMark Rutland return raw_atomic64_inc_unless_negative(v);
3109e3d18ceeSMark Rutland }
3110e3d18ceeSMark Rutland
3111ad811070SMark Rutland /**
3112ad811070SMark Rutland * atomic64_dec_unless_positive() - atomic decrement unless positive with full ordering
3113ad811070SMark Rutland * @v: pointer to atomic64_t
3114ad811070SMark Rutland *
3115ad811070SMark Rutland * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
3116ad811070SMark Rutland *
3117ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_unless_positive() there.
3118ad811070SMark Rutland *
3119ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
3120ad811070SMark Rutland */
3121e3d18ceeSMark Rutland static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)3122e3d18ceeSMark Rutland atomic64_dec_unless_positive(atomic64_t *v)
3123e3d18ceeSMark Rutland {
3124e87c4f66SMarco Elver kcsan_mb();
3125e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3126c9268ac6SMark Rutland return raw_atomic64_dec_unless_positive(v);
3127e3d18ceeSMark Rutland }
3128e3d18ceeSMark Rutland
3129ad811070SMark Rutland /**
3130ad811070SMark Rutland * atomic64_dec_if_positive() - atomic decrement if positive with full ordering
3131ad811070SMark Rutland * @v: pointer to atomic64_t
3132ad811070SMark Rutland *
3133ad811070SMark Rutland * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
3134ad811070SMark Rutland *
3135ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic64_dec_if_positive() there.
3136ad811070SMark Rutland *
3137*b33eb50aSMark Rutland * Return: The old value of (@v - 1), regardless of whether @v was updated.
3138ad811070SMark Rutland */
3139e3d18ceeSMark Rutland static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)3140e3d18ceeSMark Rutland atomic64_dec_if_positive(atomic64_t *v)
3141e3d18ceeSMark Rutland {
3142e87c4f66SMarco Elver kcsan_mb();
3143e3d18ceeSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3144c9268ac6SMark Rutland return raw_atomic64_dec_if_positive(v);
3145e3d18ceeSMark Rutland }
3146e3d18ceeSMark Rutland
3147ad811070SMark Rutland /**
3148ad811070SMark Rutland * atomic_long_read() - atomic load with relaxed ordering
3149ad811070SMark Rutland * @v: pointer to atomic_long_t
3150ad811070SMark Rutland *
3151ad811070SMark Rutland * Atomically loads the value of @v with relaxed ordering.
3152ad811070SMark Rutland *
3153ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_read() there.
3154ad811070SMark Rutland *
3155ad811070SMark Rutland * Return: The value loaded from @v.
3156ad811070SMark Rutland */
315767d1b0deSMark Rutland static __always_inline long
atomic_long_read(const atomic_long_t * v)315867d1b0deSMark Rutland atomic_long_read(const atomic_long_t *v)
315967d1b0deSMark Rutland {
316067d1b0deSMark Rutland instrument_atomic_read(v, sizeof(*v));
3161c9268ac6SMark Rutland return raw_atomic_long_read(v);
316267d1b0deSMark Rutland }
316367d1b0deSMark Rutland
3164ad811070SMark Rutland /**
3165ad811070SMark Rutland * atomic_long_read_acquire() - atomic load with acquire ordering
3166ad811070SMark Rutland * @v: pointer to atomic_long_t
3167ad811070SMark Rutland *
3168ad811070SMark Rutland * Atomically loads the value of @v with acquire ordering.
3169ad811070SMark Rutland *
3170ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_read_acquire() there.
3171ad811070SMark Rutland *
3172ad811070SMark Rutland * Return: The value loaded from @v.
3173ad811070SMark Rutland */
317467d1b0deSMark Rutland static __always_inline long
atomic_long_read_acquire(const atomic_long_t * v)317567d1b0deSMark Rutland atomic_long_read_acquire(const atomic_long_t *v)
317667d1b0deSMark Rutland {
317767d1b0deSMark Rutland instrument_atomic_read(v, sizeof(*v));
3178c9268ac6SMark Rutland return raw_atomic_long_read_acquire(v);
317967d1b0deSMark Rutland }
318067d1b0deSMark Rutland
3181ad811070SMark Rutland /**
3182ad811070SMark Rutland * atomic_long_set() - atomic set with relaxed ordering
3183ad811070SMark Rutland * @v: pointer to atomic_long_t
3184ad811070SMark Rutland * @i: long value to assign
3185ad811070SMark Rutland *
3186ad811070SMark Rutland * Atomically sets @v to @i with relaxed ordering.
3187ad811070SMark Rutland *
3188ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_set() there.
3189ad811070SMark Rutland *
3190ad811070SMark Rutland * Return: Nothing.
3191ad811070SMark Rutland */
319267d1b0deSMark Rutland static __always_inline void
atomic_long_set(atomic_long_t * v,long i)319367d1b0deSMark Rutland atomic_long_set(atomic_long_t *v, long i)
319467d1b0deSMark Rutland {
319567d1b0deSMark Rutland instrument_atomic_write(v, sizeof(*v));
3196c9268ac6SMark Rutland raw_atomic_long_set(v, i);
319767d1b0deSMark Rutland }
319867d1b0deSMark Rutland
3199ad811070SMark Rutland /**
3200ad811070SMark Rutland * atomic_long_set_release() - atomic set with release ordering
3201ad811070SMark Rutland * @v: pointer to atomic_long_t
3202ad811070SMark Rutland * @i: long value to assign
3203ad811070SMark Rutland *
3204ad811070SMark Rutland * Atomically sets @v to @i with release ordering.
3205ad811070SMark Rutland *
3206ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_set_release() there.
3207ad811070SMark Rutland *
3208ad811070SMark Rutland * Return: Nothing.
3209ad811070SMark Rutland */
321067d1b0deSMark Rutland static __always_inline void
atomic_long_set_release(atomic_long_t * v,long i)321167d1b0deSMark Rutland atomic_long_set_release(atomic_long_t *v, long i)
321267d1b0deSMark Rutland {
3213e87c4f66SMarco Elver kcsan_release();
321467d1b0deSMark Rutland instrument_atomic_write(v, sizeof(*v));
3215c9268ac6SMark Rutland raw_atomic_long_set_release(v, i);
321667d1b0deSMark Rutland }
321767d1b0deSMark Rutland
3218ad811070SMark Rutland /**
3219ad811070SMark Rutland * atomic_long_add() - atomic add with relaxed ordering
3220ad811070SMark Rutland * @i: long value to add
3221ad811070SMark Rutland * @v: pointer to atomic_long_t
3222ad811070SMark Rutland *
3223ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
3224ad811070SMark Rutland *
3225ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add() there.
3226ad811070SMark Rutland *
3227ad811070SMark Rutland * Return: Nothing.
3228ad811070SMark Rutland */
322967d1b0deSMark Rutland static __always_inline void
atomic_long_add(long i,atomic_long_t * v)323067d1b0deSMark Rutland atomic_long_add(long i, atomic_long_t *v)
323167d1b0deSMark Rutland {
323267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3233c9268ac6SMark Rutland raw_atomic_long_add(i, v);
323467d1b0deSMark Rutland }
323567d1b0deSMark Rutland
3236ad811070SMark Rutland /**
3237ad811070SMark Rutland * atomic_long_add_return() - atomic add with full ordering
3238ad811070SMark Rutland * @i: long value to add
3239ad811070SMark Rutland * @v: pointer to atomic_long_t
3240ad811070SMark Rutland *
3241ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
3242ad811070SMark Rutland *
3243ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_return() there.
3244ad811070SMark Rutland *
3245ad811070SMark Rutland * Return: The updated value of @v.
3246ad811070SMark Rutland */
324767d1b0deSMark Rutland static __always_inline long
atomic_long_add_return(long i,atomic_long_t * v)324867d1b0deSMark Rutland atomic_long_add_return(long i, atomic_long_t *v)
324967d1b0deSMark Rutland {
3250e87c4f66SMarco Elver kcsan_mb();
325167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3252c9268ac6SMark Rutland return raw_atomic_long_add_return(i, v);
325367d1b0deSMark Rutland }
325467d1b0deSMark Rutland
3255ad811070SMark Rutland /**
3256ad811070SMark Rutland * atomic_long_add_return_acquire() - atomic add with acquire ordering
3257ad811070SMark Rutland * @i: long value to add
3258ad811070SMark Rutland * @v: pointer to atomic_long_t
3259ad811070SMark Rutland *
3260ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
3261ad811070SMark Rutland *
3262ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_return_acquire() there.
3263ad811070SMark Rutland *
3264ad811070SMark Rutland * Return: The updated value of @v.
3265ad811070SMark Rutland */
326667d1b0deSMark Rutland static __always_inline long
atomic_long_add_return_acquire(long i,atomic_long_t * v)326767d1b0deSMark Rutland atomic_long_add_return_acquire(long i, atomic_long_t *v)
326867d1b0deSMark Rutland {
326967d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3270c9268ac6SMark Rutland return raw_atomic_long_add_return_acquire(i, v);
327167d1b0deSMark Rutland }
327267d1b0deSMark Rutland
3273ad811070SMark Rutland /**
3274ad811070SMark Rutland * atomic_long_add_return_release() - atomic add with release ordering
3275ad811070SMark Rutland * @i: long value to add
3276ad811070SMark Rutland * @v: pointer to atomic_long_t
3277ad811070SMark Rutland *
3278ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
3279ad811070SMark Rutland *
3280ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_return_release() there.
3281ad811070SMark Rutland *
3282ad811070SMark Rutland * Return: The updated value of @v.
3283ad811070SMark Rutland */
328467d1b0deSMark Rutland static __always_inline long
atomic_long_add_return_release(long i,atomic_long_t * v)328567d1b0deSMark Rutland atomic_long_add_return_release(long i, atomic_long_t *v)
328667d1b0deSMark Rutland {
3287e87c4f66SMarco Elver kcsan_release();
328867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3289c9268ac6SMark Rutland return raw_atomic_long_add_return_release(i, v);
329067d1b0deSMark Rutland }
329167d1b0deSMark Rutland
3292ad811070SMark Rutland /**
3293ad811070SMark Rutland * atomic_long_add_return_relaxed() - atomic add with relaxed ordering
3294ad811070SMark Rutland * @i: long value to add
3295ad811070SMark Rutland * @v: pointer to atomic_long_t
3296ad811070SMark Rutland *
3297ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
3298ad811070SMark Rutland *
3299ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_return_relaxed() there.
3300ad811070SMark Rutland *
3301ad811070SMark Rutland * Return: The updated value of @v.
3302ad811070SMark Rutland */
330367d1b0deSMark Rutland static __always_inline long
atomic_long_add_return_relaxed(long i,atomic_long_t * v)330467d1b0deSMark Rutland atomic_long_add_return_relaxed(long i, atomic_long_t *v)
330567d1b0deSMark Rutland {
330667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3307c9268ac6SMark Rutland return raw_atomic_long_add_return_relaxed(i, v);
330867d1b0deSMark Rutland }
330967d1b0deSMark Rutland
3310ad811070SMark Rutland /**
3311ad811070SMark Rutland * atomic_long_fetch_add() - atomic add with full ordering
3312ad811070SMark Rutland * @i: long value to add
3313ad811070SMark Rutland * @v: pointer to atomic_long_t
3314ad811070SMark Rutland *
3315ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
3316ad811070SMark Rutland *
3317ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add() there.
3318ad811070SMark Rutland *
3319ad811070SMark Rutland * Return: The original value of @v.
3320ad811070SMark Rutland */
332167d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_add(long i,atomic_long_t * v)332267d1b0deSMark Rutland atomic_long_fetch_add(long i, atomic_long_t *v)
332367d1b0deSMark Rutland {
3324e87c4f66SMarco Elver kcsan_mb();
332567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3326c9268ac6SMark Rutland return raw_atomic_long_fetch_add(i, v);
332767d1b0deSMark Rutland }
332867d1b0deSMark Rutland
3329ad811070SMark Rutland /**
3330ad811070SMark Rutland * atomic_long_fetch_add_acquire() - atomic add with acquire ordering
3331ad811070SMark Rutland * @i: long value to add
3332ad811070SMark Rutland * @v: pointer to atomic_long_t
3333ad811070SMark Rutland *
3334ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
3335ad811070SMark Rutland *
3336ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_acquire() there.
3337ad811070SMark Rutland *
3338ad811070SMark Rutland * Return: The original value of @v.
3339ad811070SMark Rutland */
334067d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_add_acquire(long i,atomic_long_t * v)334167d1b0deSMark Rutland atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
334267d1b0deSMark Rutland {
334367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3344c9268ac6SMark Rutland return raw_atomic_long_fetch_add_acquire(i, v);
334567d1b0deSMark Rutland }
334667d1b0deSMark Rutland
3347ad811070SMark Rutland /**
3348ad811070SMark Rutland * atomic_long_fetch_add_release() - atomic add with release ordering
3349ad811070SMark Rutland * @i: long value to add
3350ad811070SMark Rutland * @v: pointer to atomic_long_t
3351ad811070SMark Rutland *
3352ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
3353ad811070SMark Rutland *
3354ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_release() there.
3355ad811070SMark Rutland *
3356ad811070SMark Rutland * Return: The original value of @v.
3357ad811070SMark Rutland */
335867d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_add_release(long i,atomic_long_t * v)335967d1b0deSMark Rutland atomic_long_fetch_add_release(long i, atomic_long_t *v)
336067d1b0deSMark Rutland {
3361e87c4f66SMarco Elver kcsan_release();
336267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3363c9268ac6SMark Rutland return raw_atomic_long_fetch_add_release(i, v);
336467d1b0deSMark Rutland }
336567d1b0deSMark Rutland
3366ad811070SMark Rutland /**
3367ad811070SMark Rutland * atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
3368ad811070SMark Rutland * @i: long value to add
3369ad811070SMark Rutland * @v: pointer to atomic_long_t
3370ad811070SMark Rutland *
3371ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
3372ad811070SMark Rutland *
3373ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_relaxed() there.
3374ad811070SMark Rutland *
3375ad811070SMark Rutland * Return: The original value of @v.
3376ad811070SMark Rutland */
337767d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)337867d1b0deSMark Rutland atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
337967d1b0deSMark Rutland {
338067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3381c9268ac6SMark Rutland return raw_atomic_long_fetch_add_relaxed(i, v);
338267d1b0deSMark Rutland }
338367d1b0deSMark Rutland
3384ad811070SMark Rutland /**
3385ad811070SMark Rutland * atomic_long_sub() - atomic subtract with relaxed ordering
3386ad811070SMark Rutland * @i: long value to subtract
3387ad811070SMark Rutland * @v: pointer to atomic_long_t
3388ad811070SMark Rutland *
3389ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
3390ad811070SMark Rutland *
3391ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub() there.
3392ad811070SMark Rutland *
3393ad811070SMark Rutland * Return: Nothing.
3394ad811070SMark Rutland */
339567d1b0deSMark Rutland static __always_inline void
atomic_long_sub(long i,atomic_long_t * v)339667d1b0deSMark Rutland atomic_long_sub(long i, atomic_long_t *v)
339767d1b0deSMark Rutland {
339867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3399c9268ac6SMark Rutland raw_atomic_long_sub(i, v);
340067d1b0deSMark Rutland }
340167d1b0deSMark Rutland
3402ad811070SMark Rutland /**
3403ad811070SMark Rutland * atomic_long_sub_return() - atomic subtract with full ordering
3404ad811070SMark Rutland * @i: long value to subtract
3405ad811070SMark Rutland * @v: pointer to atomic_long_t
3406ad811070SMark Rutland *
3407ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
3408ad811070SMark Rutland *
3409ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub_return() there.
3410ad811070SMark Rutland *
3411ad811070SMark Rutland * Return: The updated value of @v.
3412ad811070SMark Rutland */
341367d1b0deSMark Rutland static __always_inline long
atomic_long_sub_return(long i,atomic_long_t * v)341467d1b0deSMark Rutland atomic_long_sub_return(long i, atomic_long_t *v)
341567d1b0deSMark Rutland {
3416e87c4f66SMarco Elver kcsan_mb();
341767d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3418c9268ac6SMark Rutland return raw_atomic_long_sub_return(i, v);
341967d1b0deSMark Rutland }
342067d1b0deSMark Rutland
3421ad811070SMark Rutland /**
3422ad811070SMark Rutland * atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
3423ad811070SMark Rutland * @i: long value to subtract
3424ad811070SMark Rutland * @v: pointer to atomic_long_t
3425ad811070SMark Rutland *
3426ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
3427ad811070SMark Rutland *
3428ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_acquire() there.
3429ad811070SMark Rutland *
3430ad811070SMark Rutland * Return: The updated value of @v.
3431ad811070SMark Rutland */
343267d1b0deSMark Rutland static __always_inline long
atomic_long_sub_return_acquire(long i,atomic_long_t * v)343367d1b0deSMark Rutland atomic_long_sub_return_acquire(long i, atomic_long_t *v)
343467d1b0deSMark Rutland {
343567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3436c9268ac6SMark Rutland return raw_atomic_long_sub_return_acquire(i, v);
343767d1b0deSMark Rutland }
343867d1b0deSMark Rutland
3439ad811070SMark Rutland /**
3440ad811070SMark Rutland * atomic_long_sub_return_release() - atomic subtract with release ordering
3441ad811070SMark Rutland * @i: long value to subtract
3442ad811070SMark Rutland * @v: pointer to atomic_long_t
3443ad811070SMark Rutland *
3444ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
3445ad811070SMark Rutland *
3446ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_release() there.
3447ad811070SMark Rutland *
3448ad811070SMark Rutland * Return: The updated value of @v.
3449ad811070SMark Rutland */
345067d1b0deSMark Rutland static __always_inline long
atomic_long_sub_return_release(long i,atomic_long_t * v)345167d1b0deSMark Rutland atomic_long_sub_return_release(long i, atomic_long_t *v)
345267d1b0deSMark Rutland {
3453e87c4f66SMarco Elver kcsan_release();
345467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3455c9268ac6SMark Rutland return raw_atomic_long_sub_return_release(i, v);
345667d1b0deSMark Rutland }
345767d1b0deSMark Rutland
3458ad811070SMark Rutland /**
3459ad811070SMark Rutland * atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
3460ad811070SMark Rutland * @i: long value to subtract
3461ad811070SMark Rutland * @v: pointer to atomic_long_t
3462ad811070SMark Rutland *
3463ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
3464ad811070SMark Rutland *
3465ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_relaxed() there.
3466ad811070SMark Rutland *
3467ad811070SMark Rutland * Return: The updated value of @v.
3468ad811070SMark Rutland */
346967d1b0deSMark Rutland static __always_inline long
atomic_long_sub_return_relaxed(long i,atomic_long_t * v)347067d1b0deSMark Rutland atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
347167d1b0deSMark Rutland {
347267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3473c9268ac6SMark Rutland return raw_atomic_long_sub_return_relaxed(i, v);
347467d1b0deSMark Rutland }
347567d1b0deSMark Rutland
3476ad811070SMark Rutland /**
3477ad811070SMark Rutland * atomic_long_fetch_sub() - atomic subtract with full ordering
3478ad811070SMark Rutland * @i: long value to subtract
3479ad811070SMark Rutland * @v: pointer to atomic_long_t
3480ad811070SMark Rutland *
3481ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
3482ad811070SMark Rutland *
3483ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub() there.
3484ad811070SMark Rutland *
3485ad811070SMark Rutland * Return: The original value of @v.
3486ad811070SMark Rutland */
348767d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_sub(long i,atomic_long_t * v)348867d1b0deSMark Rutland atomic_long_fetch_sub(long i, atomic_long_t *v)
348967d1b0deSMark Rutland {
3490e87c4f66SMarco Elver kcsan_mb();
349167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3492c9268ac6SMark Rutland return raw_atomic_long_fetch_sub(i, v);
349367d1b0deSMark Rutland }
349467d1b0deSMark Rutland
3495ad811070SMark Rutland /**
3496ad811070SMark Rutland * atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
3497ad811070SMark Rutland * @i: long value to subtract
3498ad811070SMark Rutland * @v: pointer to atomic_long_t
3499ad811070SMark Rutland *
3500ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
3501ad811070SMark Rutland *
3502ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_acquire() there.
3503ad811070SMark Rutland *
3504ad811070SMark Rutland * Return: The original value of @v.
3505ad811070SMark Rutland */
350667d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)350767d1b0deSMark Rutland atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
350867d1b0deSMark Rutland {
350967d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3510c9268ac6SMark Rutland return raw_atomic_long_fetch_sub_acquire(i, v);
351167d1b0deSMark Rutland }
351267d1b0deSMark Rutland
3513ad811070SMark Rutland /**
3514ad811070SMark Rutland * atomic_long_fetch_sub_release() - atomic subtract with release ordering
3515ad811070SMark Rutland * @i: long value to subtract
3516ad811070SMark Rutland * @v: pointer to atomic_long_t
3517ad811070SMark Rutland *
3518ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
3519ad811070SMark Rutland *
3520ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_release() there.
3521ad811070SMark Rutland *
3522ad811070SMark Rutland * Return: The original value of @v.
3523ad811070SMark Rutland */
352467d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_sub_release(long i,atomic_long_t * v)352567d1b0deSMark Rutland atomic_long_fetch_sub_release(long i, atomic_long_t *v)
352667d1b0deSMark Rutland {
3527e87c4f66SMarco Elver kcsan_release();
352867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3529c9268ac6SMark Rutland return raw_atomic_long_fetch_sub_release(i, v);
353067d1b0deSMark Rutland }
353167d1b0deSMark Rutland
3532ad811070SMark Rutland /**
3533ad811070SMark Rutland * atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
3534ad811070SMark Rutland * @i: long value to subtract
3535ad811070SMark Rutland * @v: pointer to atomic_long_t
3536ad811070SMark Rutland *
3537ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
3538ad811070SMark Rutland *
3539ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_relaxed() there.
3540ad811070SMark Rutland *
3541ad811070SMark Rutland * Return: The original value of @v.
3542ad811070SMark Rutland */
354367d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)354467d1b0deSMark Rutland atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
354567d1b0deSMark Rutland {
354667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3547c9268ac6SMark Rutland return raw_atomic_long_fetch_sub_relaxed(i, v);
354867d1b0deSMark Rutland }
354967d1b0deSMark Rutland
3550ad811070SMark Rutland /**
3551ad811070SMark Rutland * atomic_long_inc() - atomic increment with relaxed ordering
3552ad811070SMark Rutland * @v: pointer to atomic_long_t
3553ad811070SMark Rutland *
3554ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
3555ad811070SMark Rutland *
3556ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc() there.
3557ad811070SMark Rutland *
3558ad811070SMark Rutland * Return: Nothing.
3559ad811070SMark Rutland */
356067d1b0deSMark Rutland static __always_inline void
atomic_long_inc(atomic_long_t * v)356167d1b0deSMark Rutland atomic_long_inc(atomic_long_t *v)
356267d1b0deSMark Rutland {
356367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3564c9268ac6SMark Rutland raw_atomic_long_inc(v);
356567d1b0deSMark Rutland }
356667d1b0deSMark Rutland
3567ad811070SMark Rutland /**
3568ad811070SMark Rutland * atomic_long_inc_return() - atomic increment with full ordering
3569ad811070SMark Rutland * @v: pointer to atomic_long_t
3570ad811070SMark Rutland *
3571ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
3572ad811070SMark Rutland *
3573ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_return() there.
3574ad811070SMark Rutland *
3575ad811070SMark Rutland * Return: The updated value of @v.
3576ad811070SMark Rutland */
357767d1b0deSMark Rutland static __always_inline long
atomic_long_inc_return(atomic_long_t * v)357867d1b0deSMark Rutland atomic_long_inc_return(atomic_long_t *v)
357967d1b0deSMark Rutland {
3580e87c4f66SMarco Elver kcsan_mb();
358167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3582c9268ac6SMark Rutland return raw_atomic_long_inc_return(v);
358367d1b0deSMark Rutland }
358467d1b0deSMark Rutland
3585ad811070SMark Rutland /**
3586ad811070SMark Rutland * atomic_long_inc_return_acquire() - atomic increment with acquire ordering
3587ad811070SMark Rutland * @v: pointer to atomic_long_t
3588ad811070SMark Rutland *
3589ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
3590ad811070SMark Rutland *
3591ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_acquire() there.
3592ad811070SMark Rutland *
3593ad811070SMark Rutland * Return: The updated value of @v.
3594ad811070SMark Rutland */
359567d1b0deSMark Rutland static __always_inline long
atomic_long_inc_return_acquire(atomic_long_t * v)359667d1b0deSMark Rutland atomic_long_inc_return_acquire(atomic_long_t *v)
359767d1b0deSMark Rutland {
359867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3599c9268ac6SMark Rutland return raw_atomic_long_inc_return_acquire(v);
360067d1b0deSMark Rutland }
360167d1b0deSMark Rutland
3602ad811070SMark Rutland /**
3603ad811070SMark Rutland * atomic_long_inc_return_release() - atomic increment with release ordering
3604ad811070SMark Rutland * @v: pointer to atomic_long_t
3605ad811070SMark Rutland *
3606ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
3607ad811070SMark Rutland *
3608ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_release() there.
3609ad811070SMark Rutland *
3610ad811070SMark Rutland * Return: The updated value of @v.
3611ad811070SMark Rutland */
361267d1b0deSMark Rutland static __always_inline long
atomic_long_inc_return_release(atomic_long_t * v)361367d1b0deSMark Rutland atomic_long_inc_return_release(atomic_long_t *v)
361467d1b0deSMark Rutland {
3615e87c4f66SMarco Elver kcsan_release();
361667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3617c9268ac6SMark Rutland return raw_atomic_long_inc_return_release(v);
361867d1b0deSMark Rutland }
361967d1b0deSMark Rutland
3620ad811070SMark Rutland /**
3621ad811070SMark Rutland * atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
3622ad811070SMark Rutland * @v: pointer to atomic_long_t
3623ad811070SMark Rutland *
3624ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
3625ad811070SMark Rutland *
3626ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_relaxed() there.
3627ad811070SMark Rutland *
3628ad811070SMark Rutland * Return: The updated value of @v.
3629ad811070SMark Rutland */
363067d1b0deSMark Rutland static __always_inline long
atomic_long_inc_return_relaxed(atomic_long_t * v)363167d1b0deSMark Rutland atomic_long_inc_return_relaxed(atomic_long_t *v)
363267d1b0deSMark Rutland {
363367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3634c9268ac6SMark Rutland return raw_atomic_long_inc_return_relaxed(v);
363567d1b0deSMark Rutland }
363667d1b0deSMark Rutland
3637ad811070SMark Rutland /**
3638ad811070SMark Rutland * atomic_long_fetch_inc() - atomic increment with full ordering
3639ad811070SMark Rutland * @v: pointer to atomic_long_t
3640ad811070SMark Rutland *
3641ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
3642ad811070SMark Rutland *
3643ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc() there.
3644ad811070SMark Rutland *
3645ad811070SMark Rutland * Return: The original value of @v.
3646ad811070SMark Rutland */
364767d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_inc(atomic_long_t * v)364867d1b0deSMark Rutland atomic_long_fetch_inc(atomic_long_t *v)
364967d1b0deSMark Rutland {
3650e87c4f66SMarco Elver kcsan_mb();
365167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3652c9268ac6SMark Rutland return raw_atomic_long_fetch_inc(v);
365367d1b0deSMark Rutland }
365467d1b0deSMark Rutland
3655ad811070SMark Rutland /**
3656ad811070SMark Rutland * atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
3657ad811070SMark Rutland * @v: pointer to atomic_long_t
3658ad811070SMark Rutland *
3659ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
3660ad811070SMark Rutland *
3661ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_acquire() there.
3662ad811070SMark Rutland *
3663ad811070SMark Rutland * Return: The original value of @v.
3664ad811070SMark Rutland */
366567d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_inc_acquire(atomic_long_t * v)366667d1b0deSMark Rutland atomic_long_fetch_inc_acquire(atomic_long_t *v)
366767d1b0deSMark Rutland {
366867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3669c9268ac6SMark Rutland return raw_atomic_long_fetch_inc_acquire(v);
367067d1b0deSMark Rutland }
367167d1b0deSMark Rutland
3672ad811070SMark Rutland /**
3673ad811070SMark Rutland * atomic_long_fetch_inc_release() - atomic increment with release ordering
3674ad811070SMark Rutland * @v: pointer to atomic_long_t
3675ad811070SMark Rutland *
3676ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
3677ad811070SMark Rutland *
3678ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_release() there.
3679ad811070SMark Rutland *
3680ad811070SMark Rutland * Return: The original value of @v.
3681ad811070SMark Rutland */
368267d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_inc_release(atomic_long_t * v)368367d1b0deSMark Rutland atomic_long_fetch_inc_release(atomic_long_t *v)
368467d1b0deSMark Rutland {
3685e87c4f66SMarco Elver kcsan_release();
368667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3687c9268ac6SMark Rutland return raw_atomic_long_fetch_inc_release(v);
368867d1b0deSMark Rutland }
368967d1b0deSMark Rutland
3690ad811070SMark Rutland /**
3691ad811070SMark Rutland * atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
3692ad811070SMark Rutland * @v: pointer to atomic_long_t
3693ad811070SMark Rutland *
3694ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
3695ad811070SMark Rutland *
3696ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_relaxed() there.
3697ad811070SMark Rutland *
3698ad811070SMark Rutland * Return: The original value of @v.
3699ad811070SMark Rutland */
370067d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_inc_relaxed(atomic_long_t * v)370167d1b0deSMark Rutland atomic_long_fetch_inc_relaxed(atomic_long_t *v)
370267d1b0deSMark Rutland {
370367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3704c9268ac6SMark Rutland return raw_atomic_long_fetch_inc_relaxed(v);
370567d1b0deSMark Rutland }
370667d1b0deSMark Rutland
3707ad811070SMark Rutland /**
3708ad811070SMark Rutland * atomic_long_dec() - atomic decrement with relaxed ordering
3709ad811070SMark Rutland * @v: pointer to atomic_long_t
3710ad811070SMark Rutland *
3711ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
3712ad811070SMark Rutland *
3713ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec() there.
3714ad811070SMark Rutland *
3715ad811070SMark Rutland * Return: Nothing.
3716ad811070SMark Rutland */
371767d1b0deSMark Rutland static __always_inline void
atomic_long_dec(atomic_long_t * v)371867d1b0deSMark Rutland atomic_long_dec(atomic_long_t *v)
371967d1b0deSMark Rutland {
372067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3721c9268ac6SMark Rutland raw_atomic_long_dec(v);
372267d1b0deSMark Rutland }
372367d1b0deSMark Rutland
3724ad811070SMark Rutland /**
3725ad811070SMark Rutland * atomic_long_dec_return() - atomic decrement with full ordering
3726ad811070SMark Rutland * @v: pointer to atomic_long_t
3727ad811070SMark Rutland *
3728ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
3729ad811070SMark Rutland *
3730ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_return() there.
3731ad811070SMark Rutland *
3732ad811070SMark Rutland * Return: The updated value of @v.
3733ad811070SMark Rutland */
373467d1b0deSMark Rutland static __always_inline long
atomic_long_dec_return(atomic_long_t * v)373567d1b0deSMark Rutland atomic_long_dec_return(atomic_long_t *v)
373667d1b0deSMark Rutland {
3737e87c4f66SMarco Elver kcsan_mb();
373867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3739c9268ac6SMark Rutland return raw_atomic_long_dec_return(v);
374067d1b0deSMark Rutland }
374167d1b0deSMark Rutland
3742ad811070SMark Rutland /**
3743ad811070SMark Rutland * atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
3744ad811070SMark Rutland * @v: pointer to atomic_long_t
3745ad811070SMark Rutland *
3746ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
3747ad811070SMark Rutland *
3748ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_acquire() there.
3749ad811070SMark Rutland *
3750ad811070SMark Rutland * Return: The updated value of @v.
3751ad811070SMark Rutland */
375267d1b0deSMark Rutland static __always_inline long
atomic_long_dec_return_acquire(atomic_long_t * v)375367d1b0deSMark Rutland atomic_long_dec_return_acquire(atomic_long_t *v)
375467d1b0deSMark Rutland {
375567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3756c9268ac6SMark Rutland return raw_atomic_long_dec_return_acquire(v);
375767d1b0deSMark Rutland }
375867d1b0deSMark Rutland
3759ad811070SMark Rutland /**
3760ad811070SMark Rutland * atomic_long_dec_return_release() - atomic decrement with release ordering
3761ad811070SMark Rutland * @v: pointer to atomic_long_t
3762ad811070SMark Rutland *
3763ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
3764ad811070SMark Rutland *
3765ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_release() there.
3766ad811070SMark Rutland *
3767ad811070SMark Rutland * Return: The updated value of @v.
3768ad811070SMark Rutland */
376967d1b0deSMark Rutland static __always_inline long
atomic_long_dec_return_release(atomic_long_t * v)377067d1b0deSMark Rutland atomic_long_dec_return_release(atomic_long_t *v)
377167d1b0deSMark Rutland {
3772e87c4f66SMarco Elver kcsan_release();
377367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3774c9268ac6SMark Rutland return raw_atomic_long_dec_return_release(v);
377567d1b0deSMark Rutland }
377667d1b0deSMark Rutland
3777ad811070SMark Rutland /**
3778ad811070SMark Rutland * atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
3779ad811070SMark Rutland * @v: pointer to atomic_long_t
3780ad811070SMark Rutland *
3781ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
3782ad811070SMark Rutland *
3783ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_relaxed() there.
3784ad811070SMark Rutland *
3785ad811070SMark Rutland * Return: The updated value of @v.
3786ad811070SMark Rutland */
378767d1b0deSMark Rutland static __always_inline long
atomic_long_dec_return_relaxed(atomic_long_t * v)378867d1b0deSMark Rutland atomic_long_dec_return_relaxed(atomic_long_t *v)
378967d1b0deSMark Rutland {
379067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3791c9268ac6SMark Rutland return raw_atomic_long_dec_return_relaxed(v);
379267d1b0deSMark Rutland }
379367d1b0deSMark Rutland
3794ad811070SMark Rutland /**
3795ad811070SMark Rutland * atomic_long_fetch_dec() - atomic decrement with full ordering
3796ad811070SMark Rutland * @v: pointer to atomic_long_t
3797ad811070SMark Rutland *
3798ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
3799ad811070SMark Rutland *
3800ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec() there.
3801ad811070SMark Rutland *
3802ad811070SMark Rutland * Return: The original value of @v.
3803ad811070SMark Rutland */
380467d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_dec(atomic_long_t * v)380567d1b0deSMark Rutland atomic_long_fetch_dec(atomic_long_t *v)
380667d1b0deSMark Rutland {
3807e87c4f66SMarco Elver kcsan_mb();
380867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3809c9268ac6SMark Rutland return raw_atomic_long_fetch_dec(v);
381067d1b0deSMark Rutland }
381167d1b0deSMark Rutland
3812ad811070SMark Rutland /**
3813ad811070SMark Rutland * atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
3814ad811070SMark Rutland * @v: pointer to atomic_long_t
3815ad811070SMark Rutland *
3816ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
3817ad811070SMark Rutland *
3818ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_acquire() there.
3819ad811070SMark Rutland *
3820ad811070SMark Rutland * Return: The original value of @v.
3821ad811070SMark Rutland */
382267d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_dec_acquire(atomic_long_t * v)382367d1b0deSMark Rutland atomic_long_fetch_dec_acquire(atomic_long_t *v)
382467d1b0deSMark Rutland {
382567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3826c9268ac6SMark Rutland return raw_atomic_long_fetch_dec_acquire(v);
382767d1b0deSMark Rutland }
382867d1b0deSMark Rutland
3829ad811070SMark Rutland /**
3830ad811070SMark Rutland * atomic_long_fetch_dec_release() - atomic decrement with release ordering
3831ad811070SMark Rutland * @v: pointer to atomic_long_t
3832ad811070SMark Rutland *
3833ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
3834ad811070SMark Rutland *
3835ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_release() there.
3836ad811070SMark Rutland *
3837ad811070SMark Rutland * Return: The original value of @v.
3838ad811070SMark Rutland */
383967d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_dec_release(atomic_long_t * v)384067d1b0deSMark Rutland atomic_long_fetch_dec_release(atomic_long_t *v)
384167d1b0deSMark Rutland {
3842e87c4f66SMarco Elver kcsan_release();
384367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3844c9268ac6SMark Rutland return raw_atomic_long_fetch_dec_release(v);
384567d1b0deSMark Rutland }
384667d1b0deSMark Rutland
3847ad811070SMark Rutland /**
3848ad811070SMark Rutland * atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
3849ad811070SMark Rutland * @v: pointer to atomic_long_t
3850ad811070SMark Rutland *
3851ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
3852ad811070SMark Rutland *
3853ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_relaxed() there.
3854ad811070SMark Rutland *
3855ad811070SMark Rutland * Return: The original value of @v.
3856ad811070SMark Rutland */
385767d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_dec_relaxed(atomic_long_t * v)385867d1b0deSMark Rutland atomic_long_fetch_dec_relaxed(atomic_long_t *v)
385967d1b0deSMark Rutland {
386067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3861c9268ac6SMark Rutland return raw_atomic_long_fetch_dec_relaxed(v);
386267d1b0deSMark Rutland }
386367d1b0deSMark Rutland
3864ad811070SMark Rutland /**
3865ad811070SMark Rutland * atomic_long_and() - atomic bitwise AND with relaxed ordering
3866ad811070SMark Rutland * @i: long value
3867ad811070SMark Rutland * @v: pointer to atomic_long_t
3868ad811070SMark Rutland *
3869ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
3870ad811070SMark Rutland *
3871ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_and() there.
3872ad811070SMark Rutland *
3873ad811070SMark Rutland * Return: Nothing.
3874ad811070SMark Rutland */
387567d1b0deSMark Rutland static __always_inline void
atomic_long_and(long i,atomic_long_t * v)387667d1b0deSMark Rutland atomic_long_and(long i, atomic_long_t *v)
387767d1b0deSMark Rutland {
387867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3879c9268ac6SMark Rutland raw_atomic_long_and(i, v);
388067d1b0deSMark Rutland }
388167d1b0deSMark Rutland
3882ad811070SMark Rutland /**
3883ad811070SMark Rutland * atomic_long_fetch_and() - atomic bitwise AND with full ordering
3884ad811070SMark Rutland * @i: long value
3885ad811070SMark Rutland * @v: pointer to atomic_long_t
3886ad811070SMark Rutland *
3887ad811070SMark Rutland * Atomically updates @v to (@v & @i) with full ordering.
3888ad811070SMark Rutland *
3889ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and() there.
3890ad811070SMark Rutland *
3891ad811070SMark Rutland * Return: The original value of @v.
3892ad811070SMark Rutland */
389367d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_and(long i,atomic_long_t * v)389467d1b0deSMark Rutland atomic_long_fetch_and(long i, atomic_long_t *v)
389567d1b0deSMark Rutland {
3896e87c4f66SMarco Elver kcsan_mb();
389767d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3898c9268ac6SMark Rutland return raw_atomic_long_fetch_and(i, v);
389967d1b0deSMark Rutland }
390067d1b0deSMark Rutland
3901ad811070SMark Rutland /**
3902ad811070SMark Rutland * atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
3903ad811070SMark Rutland * @i: long value
3904ad811070SMark Rutland * @v: pointer to atomic_long_t
3905ad811070SMark Rutland *
3906ad811070SMark Rutland * Atomically updates @v to (@v & @i) with acquire ordering.
3907ad811070SMark Rutland *
3908ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_acquire() there.
3909ad811070SMark Rutland *
3910ad811070SMark Rutland * Return: The original value of @v.
3911ad811070SMark Rutland */
391267d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_and_acquire(long i,atomic_long_t * v)391367d1b0deSMark Rutland atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
391467d1b0deSMark Rutland {
391567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3916c9268ac6SMark Rutland return raw_atomic_long_fetch_and_acquire(i, v);
391767d1b0deSMark Rutland }
391867d1b0deSMark Rutland
3919ad811070SMark Rutland /**
3920ad811070SMark Rutland * atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
3921ad811070SMark Rutland * @i: long value
3922ad811070SMark Rutland * @v: pointer to atomic_long_t
3923ad811070SMark Rutland *
3924ad811070SMark Rutland * Atomically updates @v to (@v & @i) with release ordering.
3925ad811070SMark Rutland *
3926ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_release() there.
3927ad811070SMark Rutland *
3928ad811070SMark Rutland * Return: The original value of @v.
3929ad811070SMark Rutland */
393067d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_and_release(long i,atomic_long_t * v)393167d1b0deSMark Rutland atomic_long_fetch_and_release(long i, atomic_long_t *v)
393267d1b0deSMark Rutland {
3933e87c4f66SMarco Elver kcsan_release();
393467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3935c9268ac6SMark Rutland return raw_atomic_long_fetch_and_release(i, v);
393667d1b0deSMark Rutland }
393767d1b0deSMark Rutland
3938ad811070SMark Rutland /**
3939ad811070SMark Rutland * atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
3940ad811070SMark Rutland * @i: long value
3941ad811070SMark Rutland * @v: pointer to atomic_long_t
3942ad811070SMark Rutland *
3943ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
3944ad811070SMark Rutland *
3945ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_relaxed() there.
3946ad811070SMark Rutland *
3947ad811070SMark Rutland * Return: The original value of @v.
3948ad811070SMark Rutland */
394967d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)395067d1b0deSMark Rutland atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
395167d1b0deSMark Rutland {
395267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3953c9268ac6SMark Rutland return raw_atomic_long_fetch_and_relaxed(i, v);
395467d1b0deSMark Rutland }
395567d1b0deSMark Rutland
3956ad811070SMark Rutland /**
3957ad811070SMark Rutland * atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
3958ad811070SMark Rutland * @i: long value
3959ad811070SMark Rutland * @v: pointer to atomic_long_t
3960ad811070SMark Rutland *
3961ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
3962ad811070SMark Rutland *
3963ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_andnot() there.
3964ad811070SMark Rutland *
3965ad811070SMark Rutland * Return: Nothing.
3966ad811070SMark Rutland */
396767d1b0deSMark Rutland static __always_inline void
atomic_long_andnot(long i,atomic_long_t * v)396867d1b0deSMark Rutland atomic_long_andnot(long i, atomic_long_t *v)
396967d1b0deSMark Rutland {
397067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3971c9268ac6SMark Rutland raw_atomic_long_andnot(i, v);
397267d1b0deSMark Rutland }
397367d1b0deSMark Rutland
3974ad811070SMark Rutland /**
3975ad811070SMark Rutland * atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
3976ad811070SMark Rutland * @i: long value
3977ad811070SMark Rutland * @v: pointer to atomic_long_t
3978ad811070SMark Rutland *
3979ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with full ordering.
3980ad811070SMark Rutland *
3981ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot() there.
3982ad811070SMark Rutland *
3983ad811070SMark Rutland * Return: The original value of @v.
3984ad811070SMark Rutland */
398567d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_andnot(long i,atomic_long_t * v)398667d1b0deSMark Rutland atomic_long_fetch_andnot(long i, atomic_long_t *v)
398767d1b0deSMark Rutland {
3988e87c4f66SMarco Elver kcsan_mb();
398967d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
3990c9268ac6SMark Rutland return raw_atomic_long_fetch_andnot(i, v);
399167d1b0deSMark Rutland }
399267d1b0deSMark Rutland
3993ad811070SMark Rutland /**
3994ad811070SMark Rutland * atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
3995ad811070SMark Rutland * @i: long value
3996ad811070SMark Rutland * @v: pointer to atomic_long_t
3997ad811070SMark Rutland *
3998ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with acquire ordering.
3999ad811070SMark Rutland *
4000ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_acquire() there.
4001ad811070SMark Rutland *
4002ad811070SMark Rutland * Return: The original value of @v.
4003ad811070SMark Rutland */
400467d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)400567d1b0deSMark Rutland atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
400667d1b0deSMark Rutland {
400767d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4008c9268ac6SMark Rutland return raw_atomic_long_fetch_andnot_acquire(i, v);
400967d1b0deSMark Rutland }
401067d1b0deSMark Rutland
4011ad811070SMark Rutland /**
4012ad811070SMark Rutland * atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
4013ad811070SMark Rutland * @i: long value
4014ad811070SMark Rutland * @v: pointer to atomic_long_t
4015ad811070SMark Rutland *
4016ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with release ordering.
4017ad811070SMark Rutland *
4018ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_release() there.
4019ad811070SMark Rutland *
4020ad811070SMark Rutland * Return: The original value of @v.
4021ad811070SMark Rutland */
402267d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_andnot_release(long i,atomic_long_t * v)402367d1b0deSMark Rutland atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
402467d1b0deSMark Rutland {
4025e87c4f66SMarco Elver kcsan_release();
402667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4027c9268ac6SMark Rutland return raw_atomic_long_fetch_andnot_release(i, v);
402867d1b0deSMark Rutland }
402967d1b0deSMark Rutland
4030ad811070SMark Rutland /**
4031ad811070SMark Rutland * atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
4032ad811070SMark Rutland * @i: long value
4033ad811070SMark Rutland * @v: pointer to atomic_long_t
4034ad811070SMark Rutland *
4035ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
4036ad811070SMark Rutland *
4037ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_relaxed() there.
4038ad811070SMark Rutland *
4039ad811070SMark Rutland * Return: The original value of @v.
4040ad811070SMark Rutland */
404167d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)404267d1b0deSMark Rutland atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
404367d1b0deSMark Rutland {
404467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4045c9268ac6SMark Rutland return raw_atomic_long_fetch_andnot_relaxed(i, v);
404667d1b0deSMark Rutland }
404767d1b0deSMark Rutland
4048ad811070SMark Rutland /**
4049ad811070SMark Rutland * atomic_long_or() - atomic bitwise OR with relaxed ordering
4050ad811070SMark Rutland * @i: long value
4051ad811070SMark Rutland * @v: pointer to atomic_long_t
4052ad811070SMark Rutland *
4053ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
4054ad811070SMark Rutland *
4055ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_or() there.
4056ad811070SMark Rutland *
4057ad811070SMark Rutland * Return: Nothing.
4058ad811070SMark Rutland */
405967d1b0deSMark Rutland static __always_inline void
atomic_long_or(long i,atomic_long_t * v)406067d1b0deSMark Rutland atomic_long_or(long i, atomic_long_t *v)
406167d1b0deSMark Rutland {
406267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4063c9268ac6SMark Rutland raw_atomic_long_or(i, v);
406467d1b0deSMark Rutland }
406567d1b0deSMark Rutland
4066ad811070SMark Rutland /**
4067ad811070SMark Rutland * atomic_long_fetch_or() - atomic bitwise OR with full ordering
4068ad811070SMark Rutland * @i: long value
4069ad811070SMark Rutland * @v: pointer to atomic_long_t
4070ad811070SMark Rutland *
4071ad811070SMark Rutland * Atomically updates @v to (@v | @i) with full ordering.
4072ad811070SMark Rutland *
4073ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or() there.
4074ad811070SMark Rutland *
4075ad811070SMark Rutland * Return: The original value of @v.
4076ad811070SMark Rutland */
407767d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_or(long i,atomic_long_t * v)407867d1b0deSMark Rutland atomic_long_fetch_or(long i, atomic_long_t *v)
407967d1b0deSMark Rutland {
4080e87c4f66SMarco Elver kcsan_mb();
408167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4082c9268ac6SMark Rutland return raw_atomic_long_fetch_or(i, v);
408367d1b0deSMark Rutland }
408467d1b0deSMark Rutland
4085ad811070SMark Rutland /**
4086ad811070SMark Rutland * atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
4087ad811070SMark Rutland * @i: long value
4088ad811070SMark Rutland * @v: pointer to atomic_long_t
4089ad811070SMark Rutland *
4090ad811070SMark Rutland * Atomically updates @v to (@v | @i) with acquire ordering.
4091ad811070SMark Rutland *
4092ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_acquire() there.
4093ad811070SMark Rutland *
4094ad811070SMark Rutland * Return: The original value of @v.
4095ad811070SMark Rutland */
409667d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_or_acquire(long i,atomic_long_t * v)409767d1b0deSMark Rutland atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
409867d1b0deSMark Rutland {
409967d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4100c9268ac6SMark Rutland return raw_atomic_long_fetch_or_acquire(i, v);
410167d1b0deSMark Rutland }
410267d1b0deSMark Rutland
4103ad811070SMark Rutland /**
4104ad811070SMark Rutland * atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
4105ad811070SMark Rutland * @i: long value
4106ad811070SMark Rutland * @v: pointer to atomic_long_t
4107ad811070SMark Rutland *
4108ad811070SMark Rutland * Atomically updates @v to (@v | @i) with release ordering.
4109ad811070SMark Rutland *
4110ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_release() there.
4111ad811070SMark Rutland *
4112ad811070SMark Rutland * Return: The original value of @v.
4113ad811070SMark Rutland */
411467d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_or_release(long i,atomic_long_t * v)411567d1b0deSMark Rutland atomic_long_fetch_or_release(long i, atomic_long_t *v)
411667d1b0deSMark Rutland {
4117e87c4f66SMarco Elver kcsan_release();
411867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4119c9268ac6SMark Rutland return raw_atomic_long_fetch_or_release(i, v);
412067d1b0deSMark Rutland }
412167d1b0deSMark Rutland
4122ad811070SMark Rutland /**
4123ad811070SMark Rutland * atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
4124ad811070SMark Rutland * @i: long value
4125ad811070SMark Rutland * @v: pointer to atomic_long_t
4126ad811070SMark Rutland *
4127ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
4128ad811070SMark Rutland *
4129ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_relaxed() there.
4130ad811070SMark Rutland *
4131ad811070SMark Rutland * Return: The original value of @v.
4132ad811070SMark Rutland */
413367d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)413467d1b0deSMark Rutland atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
413567d1b0deSMark Rutland {
413667d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4137c9268ac6SMark Rutland return raw_atomic_long_fetch_or_relaxed(i, v);
413867d1b0deSMark Rutland }
413967d1b0deSMark Rutland
4140ad811070SMark Rutland /**
4141ad811070SMark Rutland * atomic_long_xor() - atomic bitwise XOR with relaxed ordering
4142ad811070SMark Rutland * @i: long value
4143ad811070SMark Rutland * @v: pointer to atomic_long_t
4144ad811070SMark Rutland *
4145ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4146ad811070SMark Rutland *
4147ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_xor() there.
4148ad811070SMark Rutland *
4149ad811070SMark Rutland * Return: Nothing.
4150ad811070SMark Rutland */
415167d1b0deSMark Rutland static __always_inline void
atomic_long_xor(long i,atomic_long_t * v)415267d1b0deSMark Rutland atomic_long_xor(long i, atomic_long_t *v)
415367d1b0deSMark Rutland {
415467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4155c9268ac6SMark Rutland raw_atomic_long_xor(i, v);
415667d1b0deSMark Rutland }
415767d1b0deSMark Rutland
4158ad811070SMark Rutland /**
4159ad811070SMark Rutland * atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
4160ad811070SMark Rutland * @i: long value
4161ad811070SMark Rutland * @v: pointer to atomic_long_t
4162ad811070SMark Rutland *
4163ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with full ordering.
4164ad811070SMark Rutland *
4165ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor() there.
4166ad811070SMark Rutland *
4167ad811070SMark Rutland * Return: The original value of @v.
4168ad811070SMark Rutland */
416967d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_xor(long i,atomic_long_t * v)417067d1b0deSMark Rutland atomic_long_fetch_xor(long i, atomic_long_t *v)
417167d1b0deSMark Rutland {
4172e87c4f66SMarco Elver kcsan_mb();
417367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4174c9268ac6SMark Rutland return raw_atomic_long_fetch_xor(i, v);
417567d1b0deSMark Rutland }
417667d1b0deSMark Rutland
4177ad811070SMark Rutland /**
4178ad811070SMark Rutland * atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
4179ad811070SMark Rutland * @i: long value
4180ad811070SMark Rutland * @v: pointer to atomic_long_t
4181ad811070SMark Rutland *
4182ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with acquire ordering.
4183ad811070SMark Rutland *
4184ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_acquire() there.
4185ad811070SMark Rutland *
4186ad811070SMark Rutland * Return: The original value of @v.
4187ad811070SMark Rutland */
418867d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)418967d1b0deSMark Rutland atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
419067d1b0deSMark Rutland {
419167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4192c9268ac6SMark Rutland return raw_atomic_long_fetch_xor_acquire(i, v);
419367d1b0deSMark Rutland }
419467d1b0deSMark Rutland
4195ad811070SMark Rutland /**
4196ad811070SMark Rutland * atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
4197ad811070SMark Rutland * @i: long value
4198ad811070SMark Rutland * @v: pointer to atomic_long_t
4199ad811070SMark Rutland *
4200ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with release ordering.
4201ad811070SMark Rutland *
4202ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_release() there.
4203ad811070SMark Rutland *
4204ad811070SMark Rutland * Return: The original value of @v.
4205ad811070SMark Rutland */
420667d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_xor_release(long i,atomic_long_t * v)420767d1b0deSMark Rutland atomic_long_fetch_xor_release(long i, atomic_long_t *v)
420867d1b0deSMark Rutland {
4209e87c4f66SMarco Elver kcsan_release();
421067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4211c9268ac6SMark Rutland return raw_atomic_long_fetch_xor_release(i, v);
421267d1b0deSMark Rutland }
421367d1b0deSMark Rutland
4214ad811070SMark Rutland /**
4215ad811070SMark Rutland * atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
4216ad811070SMark Rutland * @i: long value
4217ad811070SMark Rutland * @v: pointer to atomic_long_t
4218ad811070SMark Rutland *
4219ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4220ad811070SMark Rutland *
4221ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_relaxed() there.
4222ad811070SMark Rutland *
4223ad811070SMark Rutland * Return: The original value of @v.
4224ad811070SMark Rutland */
422567d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)422667d1b0deSMark Rutland atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
422767d1b0deSMark Rutland {
422867d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4229c9268ac6SMark Rutland return raw_atomic_long_fetch_xor_relaxed(i, v);
423067d1b0deSMark Rutland }
423167d1b0deSMark Rutland
4232ad811070SMark Rutland /**
4233ad811070SMark Rutland * atomic_long_xchg() - atomic exchange with full ordering
4234ad811070SMark Rutland * @v: pointer to atomic_long_t
4235ad811070SMark Rutland * @new: long value to assign
4236ad811070SMark Rutland *
4237ad811070SMark Rutland * Atomically updates @v to @new with full ordering.
4238ad811070SMark Rutland *
4239ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_xchg() there.
4240ad811070SMark Rutland *
4241ad811070SMark Rutland * Return: The original value of @v.
4242ad811070SMark Rutland */
424367d1b0deSMark Rutland static __always_inline long
atomic_long_xchg(atomic_long_t * v,long new)42441d78814dSMark Rutland atomic_long_xchg(atomic_long_t *v, long new)
424567d1b0deSMark Rutland {
4246e87c4f66SMarco Elver kcsan_mb();
424767d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
42481d78814dSMark Rutland return raw_atomic_long_xchg(v, new);
424967d1b0deSMark Rutland }
425067d1b0deSMark Rutland
4251ad811070SMark Rutland /**
4252ad811070SMark Rutland * atomic_long_xchg_acquire() - atomic exchange with acquire ordering
4253ad811070SMark Rutland * @v: pointer to atomic_long_t
4254ad811070SMark Rutland * @new: long value to assign
4255ad811070SMark Rutland *
4256ad811070SMark Rutland * Atomically updates @v to @new with acquire ordering.
4257ad811070SMark Rutland *
4258ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_xchg_acquire() there.
4259ad811070SMark Rutland *
4260ad811070SMark Rutland * Return: The original value of @v.
4261ad811070SMark Rutland */
426267d1b0deSMark Rutland static __always_inline long
atomic_long_xchg_acquire(atomic_long_t * v,long new)42631d78814dSMark Rutland atomic_long_xchg_acquire(atomic_long_t *v, long new)
426467d1b0deSMark Rutland {
426567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
42661d78814dSMark Rutland return raw_atomic_long_xchg_acquire(v, new);
426767d1b0deSMark Rutland }
426867d1b0deSMark Rutland
4269ad811070SMark Rutland /**
4270ad811070SMark Rutland * atomic_long_xchg_release() - atomic exchange with release ordering
4271ad811070SMark Rutland * @v: pointer to atomic_long_t
4272ad811070SMark Rutland * @new: long value to assign
4273ad811070SMark Rutland *
4274ad811070SMark Rutland * Atomically updates @v to @new with release ordering.
4275ad811070SMark Rutland *
4276ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_xchg_release() there.
4277ad811070SMark Rutland *
4278ad811070SMark Rutland * Return: The original value of @v.
4279ad811070SMark Rutland */
428067d1b0deSMark Rutland static __always_inline long
atomic_long_xchg_release(atomic_long_t * v,long new)42811d78814dSMark Rutland atomic_long_xchg_release(atomic_long_t *v, long new)
428267d1b0deSMark Rutland {
4283e87c4f66SMarco Elver kcsan_release();
428467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
42851d78814dSMark Rutland return raw_atomic_long_xchg_release(v, new);
428667d1b0deSMark Rutland }
428767d1b0deSMark Rutland
4288ad811070SMark Rutland /**
4289ad811070SMark Rutland * atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
4290ad811070SMark Rutland * @v: pointer to atomic_long_t
4291ad811070SMark Rutland * @new: long value to assign
4292ad811070SMark Rutland *
4293ad811070SMark Rutland * Atomically updates @v to @new with relaxed ordering.
4294ad811070SMark Rutland *
4295ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_xchg_relaxed() there.
4296ad811070SMark Rutland *
4297ad811070SMark Rutland * Return: The original value of @v.
4298ad811070SMark Rutland */
429967d1b0deSMark Rutland static __always_inline long
atomic_long_xchg_relaxed(atomic_long_t * v,long new)43001d78814dSMark Rutland atomic_long_xchg_relaxed(atomic_long_t *v, long new)
430167d1b0deSMark Rutland {
430267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
43031d78814dSMark Rutland return raw_atomic_long_xchg_relaxed(v, new);
430467d1b0deSMark Rutland }
430567d1b0deSMark Rutland
4306ad811070SMark Rutland /**
4307ad811070SMark Rutland * atomic_long_cmpxchg() - atomic compare and exchange with full ordering
4308ad811070SMark Rutland * @v: pointer to atomic_long_t
4309ad811070SMark Rutland * @old: long value to compare with
4310ad811070SMark Rutland * @new: long value to assign
4311ad811070SMark Rutland *
4312ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
4313ad811070SMark Rutland *
4314ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg() there.
4315ad811070SMark Rutland *
4316ad811070SMark Rutland * Return: The original value of @v.
4317ad811070SMark Rutland */
431867d1b0deSMark Rutland static __always_inline long
atomic_long_cmpxchg(atomic_long_t * v,long old,long new)431967d1b0deSMark Rutland atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
432067d1b0deSMark Rutland {
4321e87c4f66SMarco Elver kcsan_mb();
432267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4323c9268ac6SMark Rutland return raw_atomic_long_cmpxchg(v, old, new);
432467d1b0deSMark Rutland }
432567d1b0deSMark Rutland
4326ad811070SMark Rutland /**
4327ad811070SMark Rutland * atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4328ad811070SMark Rutland * @v: pointer to atomic_long_t
4329ad811070SMark Rutland * @old: long value to compare with
4330ad811070SMark Rutland * @new: long value to assign
4331ad811070SMark Rutland *
4332ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
4333ad811070SMark Rutland *
4334ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_acquire() there.
4335ad811070SMark Rutland *
4336ad811070SMark Rutland * Return: The original value of @v.
4337ad811070SMark Rutland */
433867d1b0deSMark Rutland static __always_inline long
atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)433967d1b0deSMark Rutland atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
434067d1b0deSMark Rutland {
434167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4342c9268ac6SMark Rutland return raw_atomic_long_cmpxchg_acquire(v, old, new);
434367d1b0deSMark Rutland }
434467d1b0deSMark Rutland
4345ad811070SMark Rutland /**
4346ad811070SMark Rutland * atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
4347ad811070SMark Rutland * @v: pointer to atomic_long_t
4348ad811070SMark Rutland * @old: long value to compare with
4349ad811070SMark Rutland * @new: long value to assign
4350ad811070SMark Rutland *
4351ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
4352ad811070SMark Rutland *
4353ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_release() there.
4354ad811070SMark Rutland *
4355ad811070SMark Rutland * Return: The original value of @v.
4356ad811070SMark Rutland */
435767d1b0deSMark Rutland static __always_inline long
atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)435867d1b0deSMark Rutland atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
435967d1b0deSMark Rutland {
4360e87c4f66SMarco Elver kcsan_release();
436167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4362c9268ac6SMark Rutland return raw_atomic_long_cmpxchg_release(v, old, new);
436367d1b0deSMark Rutland }
436467d1b0deSMark Rutland
4365ad811070SMark Rutland /**
4366ad811070SMark Rutland * atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4367ad811070SMark Rutland * @v: pointer to atomic_long_t
4368ad811070SMark Rutland * @old: long value to compare with
4369ad811070SMark Rutland * @new: long value to assign
4370ad811070SMark Rutland *
4371ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4372ad811070SMark Rutland *
4373ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_relaxed() there.
4374ad811070SMark Rutland *
4375ad811070SMark Rutland * Return: The original value of @v.
4376ad811070SMark Rutland */
437767d1b0deSMark Rutland static __always_inline long
atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)437867d1b0deSMark Rutland atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
437967d1b0deSMark Rutland {
438067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4381c9268ac6SMark Rutland return raw_atomic_long_cmpxchg_relaxed(v, old, new);
438267d1b0deSMark Rutland }
438367d1b0deSMark Rutland
4384ad811070SMark Rutland /**
4385ad811070SMark Rutland * atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
4386ad811070SMark Rutland * @v: pointer to atomic_long_t
4387ad811070SMark Rutland * @old: pointer to long value to compare with
4388ad811070SMark Rutland * @new: long value to assign
4389ad811070SMark Rutland *
4390ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
4391ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
4392ad811070SMark Rutland *
4393ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg() there.
4394ad811070SMark Rutland *
4395ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
4396ad811070SMark Rutland */
439767d1b0deSMark Rutland static __always_inline bool
atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)439867d1b0deSMark Rutland atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
439967d1b0deSMark Rutland {
4400e87c4f66SMarco Elver kcsan_mb();
440167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
440267d1b0deSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
4403c9268ac6SMark Rutland return raw_atomic_long_try_cmpxchg(v, old, new);
440467d1b0deSMark Rutland }
440567d1b0deSMark Rutland
4406ad811070SMark Rutland /**
4407ad811070SMark Rutland * atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4408ad811070SMark Rutland * @v: pointer to atomic_long_t
4409ad811070SMark Rutland * @old: pointer to long value to compare with
4410ad811070SMark Rutland * @new: long value to assign
4411ad811070SMark Rutland *
4412ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
4413ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
4414ad811070SMark Rutland *
4415ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_acquire() there.
4416ad811070SMark Rutland *
4417ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
4418ad811070SMark Rutland */
441967d1b0deSMark Rutland static __always_inline bool
atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)442067d1b0deSMark Rutland atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
442167d1b0deSMark Rutland {
442267d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
442367d1b0deSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
4424c9268ac6SMark Rutland return raw_atomic_long_try_cmpxchg_acquire(v, old, new);
442567d1b0deSMark Rutland }
442667d1b0deSMark Rutland
4427ad811070SMark Rutland /**
4428ad811070SMark Rutland * atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
4429ad811070SMark Rutland * @v: pointer to atomic_long_t
4430ad811070SMark Rutland * @old: pointer to long value to compare with
4431ad811070SMark Rutland * @new: long value to assign
4432ad811070SMark Rutland *
4433ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
4434ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
4435ad811070SMark Rutland *
4436ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_release() there.
4437ad811070SMark Rutland *
4438ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
4439ad811070SMark Rutland */
444067d1b0deSMark Rutland static __always_inline bool
atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)444167d1b0deSMark Rutland atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
444267d1b0deSMark Rutland {
4443e87c4f66SMarco Elver kcsan_release();
444467d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
444567d1b0deSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
4446c9268ac6SMark Rutland return raw_atomic_long_try_cmpxchg_release(v, old, new);
444767d1b0deSMark Rutland }
444867d1b0deSMark Rutland
4449ad811070SMark Rutland /**
4450ad811070SMark Rutland * atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4451ad811070SMark Rutland * @v: pointer to atomic_long_t
4452ad811070SMark Rutland * @old: pointer to long value to compare with
4453ad811070SMark Rutland * @new: long value to assign
4454ad811070SMark Rutland *
4455ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4456ad811070SMark Rutland * Otherwise, updates @old to the current value of @v.
4457ad811070SMark Rutland *
4458ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_relaxed() there.
4459ad811070SMark Rutland *
4460ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
4461ad811070SMark Rutland */
446267d1b0deSMark Rutland static __always_inline bool
atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)446367d1b0deSMark Rutland atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
446467d1b0deSMark Rutland {
446567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
446667d1b0deSMark Rutland instrument_atomic_read_write(old, sizeof(*old));
4467c9268ac6SMark Rutland return raw_atomic_long_try_cmpxchg_relaxed(v, old, new);
446867d1b0deSMark Rutland }
446967d1b0deSMark Rutland
4470ad811070SMark Rutland /**
4471ad811070SMark Rutland * atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
4472ad811070SMark Rutland * @i: long value to add
4473ad811070SMark Rutland * @v: pointer to atomic_long_t
4474ad811070SMark Rutland *
4475ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
4476ad811070SMark Rutland *
4477ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_sub_and_test() there.
4478ad811070SMark Rutland *
4479ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
4480ad811070SMark Rutland */
448167d1b0deSMark Rutland static __always_inline bool
atomic_long_sub_and_test(long i,atomic_long_t * v)448267d1b0deSMark Rutland atomic_long_sub_and_test(long i, atomic_long_t *v)
448367d1b0deSMark Rutland {
4484e87c4f66SMarco Elver kcsan_mb();
448567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4486c9268ac6SMark Rutland return raw_atomic_long_sub_and_test(i, v);
448767d1b0deSMark Rutland }
448867d1b0deSMark Rutland
4489ad811070SMark Rutland /**
4490ad811070SMark Rutland * atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
4491ad811070SMark Rutland * @v: pointer to atomic_long_t
4492ad811070SMark Rutland *
4493ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
4494ad811070SMark Rutland *
4495ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_and_test() there.
4496ad811070SMark Rutland *
4497ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
4498ad811070SMark Rutland */
449967d1b0deSMark Rutland static __always_inline bool
atomic_long_dec_and_test(atomic_long_t * v)450067d1b0deSMark Rutland atomic_long_dec_and_test(atomic_long_t *v)
450167d1b0deSMark Rutland {
4502e87c4f66SMarco Elver kcsan_mb();
450367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4504c9268ac6SMark Rutland return raw_atomic_long_dec_and_test(v);
450567d1b0deSMark Rutland }
450667d1b0deSMark Rutland
4507ad811070SMark Rutland /**
4508ad811070SMark Rutland * atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
4509ad811070SMark Rutland * @v: pointer to atomic_long_t
4510ad811070SMark Rutland *
4511ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
4512ad811070SMark Rutland *
4513ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_and_test() there.
4514ad811070SMark Rutland *
4515ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
4516ad811070SMark Rutland */
451767d1b0deSMark Rutland static __always_inline bool
atomic_long_inc_and_test(atomic_long_t * v)451867d1b0deSMark Rutland atomic_long_inc_and_test(atomic_long_t *v)
451967d1b0deSMark Rutland {
4520e87c4f66SMarco Elver kcsan_mb();
452167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4522c9268ac6SMark Rutland return raw_atomic_long_inc_and_test(v);
452367d1b0deSMark Rutland }
452467d1b0deSMark Rutland
4525ad811070SMark Rutland /**
4526ad811070SMark Rutland * atomic_long_add_negative() - atomic add and test if negative with full ordering
4527ad811070SMark Rutland * @i: long value to add
4528ad811070SMark Rutland * @v: pointer to atomic_long_t
4529ad811070SMark Rutland *
4530ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
4531ad811070SMark Rutland *
4532ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_negative() there.
4533ad811070SMark Rutland *
4534ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
4535ad811070SMark Rutland */
453667d1b0deSMark Rutland static __always_inline bool
atomic_long_add_negative(long i,atomic_long_t * v)453767d1b0deSMark Rutland atomic_long_add_negative(long i, atomic_long_t *v)
453867d1b0deSMark Rutland {
4539e87c4f66SMarco Elver kcsan_mb();
454067d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4541c9268ac6SMark Rutland return raw_atomic_long_add_negative(i, v);
454267d1b0deSMark Rutland }
454367d1b0deSMark Rutland
4544ad811070SMark Rutland /**
4545ad811070SMark Rutland * atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
4546ad811070SMark Rutland * @i: long value to add
4547ad811070SMark Rutland * @v: pointer to atomic_long_t
4548ad811070SMark Rutland *
4549ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
4550ad811070SMark Rutland *
4551ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_acquire() there.
4552ad811070SMark Rutland *
4553ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
4554ad811070SMark Rutland */
4555e5ab9effSThomas Gleixner static __always_inline bool
atomic_long_add_negative_acquire(long i,atomic_long_t * v)4556e5ab9effSThomas Gleixner atomic_long_add_negative_acquire(long i, atomic_long_t *v)
4557e5ab9effSThomas Gleixner {
4558e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
4559c9268ac6SMark Rutland return raw_atomic_long_add_negative_acquire(i, v);
4560e5ab9effSThomas Gleixner }
4561e5ab9effSThomas Gleixner
4562ad811070SMark Rutland /**
4563ad811070SMark Rutland * atomic_long_add_negative_release() - atomic add and test if negative with release ordering
4564ad811070SMark Rutland * @i: long value to add
4565ad811070SMark Rutland * @v: pointer to atomic_long_t
4566ad811070SMark Rutland *
4567ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
4568ad811070SMark Rutland *
4569ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_release() there.
4570ad811070SMark Rutland *
4571ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
4572ad811070SMark Rutland */
4573e5ab9effSThomas Gleixner static __always_inline bool
atomic_long_add_negative_release(long i,atomic_long_t * v)4574e5ab9effSThomas Gleixner atomic_long_add_negative_release(long i, atomic_long_t *v)
4575e5ab9effSThomas Gleixner {
4576e5ab9effSThomas Gleixner kcsan_release();
4577e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
4578c9268ac6SMark Rutland return raw_atomic_long_add_negative_release(i, v);
4579e5ab9effSThomas Gleixner }
4580e5ab9effSThomas Gleixner
4581ad811070SMark Rutland /**
4582ad811070SMark Rutland * atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
4583ad811070SMark Rutland * @i: long value to add
4584ad811070SMark Rutland * @v: pointer to atomic_long_t
4585ad811070SMark Rutland *
4586ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
4587ad811070SMark Rutland *
4588ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_relaxed() there.
4589ad811070SMark Rutland *
4590ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
4591ad811070SMark Rutland */
4592e5ab9effSThomas Gleixner static __always_inline bool
atomic_long_add_negative_relaxed(long i,atomic_long_t * v)4593e5ab9effSThomas Gleixner atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
4594e5ab9effSThomas Gleixner {
4595e5ab9effSThomas Gleixner instrument_atomic_read_write(v, sizeof(*v));
4596c9268ac6SMark Rutland return raw_atomic_long_add_negative_relaxed(i, v);
4597e5ab9effSThomas Gleixner }
4598e5ab9effSThomas Gleixner
4599ad811070SMark Rutland /**
4600ad811070SMark Rutland * atomic_long_fetch_add_unless() - atomic add unless value with full ordering
4601ad811070SMark Rutland * @v: pointer to atomic_long_t
4602ad811070SMark Rutland * @a: long value to add
4603ad811070SMark Rutland * @u: long value to compare with
4604ad811070SMark Rutland *
4605ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4606ad811070SMark Rutland *
4607ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_unless() there.
4608ad811070SMark Rutland *
4609ad811070SMark Rutland * Return: The original value of @v.
4610ad811070SMark Rutland */
461167d1b0deSMark Rutland static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)461267d1b0deSMark Rutland atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
461367d1b0deSMark Rutland {
4614e87c4f66SMarco Elver kcsan_mb();
461567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4616c9268ac6SMark Rutland return raw_atomic_long_fetch_add_unless(v, a, u);
461767d1b0deSMark Rutland }
461867d1b0deSMark Rutland
4619ad811070SMark Rutland /**
4620ad811070SMark Rutland * atomic_long_add_unless() - atomic add unless value with full ordering
4621ad811070SMark Rutland * @v: pointer to atomic_long_t
4622ad811070SMark Rutland * @a: long value to add
4623ad811070SMark Rutland * @u: long value to compare with
4624ad811070SMark Rutland *
4625ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4626ad811070SMark Rutland *
4627ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_add_unless() there.
4628ad811070SMark Rutland *
4629ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
4630ad811070SMark Rutland */
463167d1b0deSMark Rutland static __always_inline bool
atomic_long_add_unless(atomic_long_t * v,long a,long u)463267d1b0deSMark Rutland atomic_long_add_unless(atomic_long_t *v, long a, long u)
463367d1b0deSMark Rutland {
4634e87c4f66SMarco Elver kcsan_mb();
463567d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4636c9268ac6SMark Rutland return raw_atomic_long_add_unless(v, a, u);
463767d1b0deSMark Rutland }
463867d1b0deSMark Rutland
4639ad811070SMark Rutland /**
4640ad811070SMark Rutland * atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
4641ad811070SMark Rutland * @v: pointer to atomic_long_t
4642ad811070SMark Rutland *
4643ad811070SMark Rutland * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
4644ad811070SMark Rutland *
4645ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_not_zero() there.
4646ad811070SMark Rutland *
4647ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
4648ad811070SMark Rutland */
464967d1b0deSMark Rutland static __always_inline bool
atomic_long_inc_not_zero(atomic_long_t * v)465067d1b0deSMark Rutland atomic_long_inc_not_zero(atomic_long_t *v)
465167d1b0deSMark Rutland {
4652e87c4f66SMarco Elver kcsan_mb();
465367d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4654c9268ac6SMark Rutland return raw_atomic_long_inc_not_zero(v);
465567d1b0deSMark Rutland }
465667d1b0deSMark Rutland
4657ad811070SMark Rutland /**
4658ad811070SMark Rutland * atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
4659ad811070SMark Rutland * @v: pointer to atomic_long_t
4660ad811070SMark Rutland *
4661ad811070SMark Rutland * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
4662ad811070SMark Rutland *
4663ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_inc_unless_negative() there.
4664ad811070SMark Rutland *
4665ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
4666ad811070SMark Rutland */
466767d1b0deSMark Rutland static __always_inline bool
atomic_long_inc_unless_negative(atomic_long_t * v)466867d1b0deSMark Rutland atomic_long_inc_unless_negative(atomic_long_t *v)
466967d1b0deSMark Rutland {
4670e87c4f66SMarco Elver kcsan_mb();
467167d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4672c9268ac6SMark Rutland return raw_atomic_long_inc_unless_negative(v);
467367d1b0deSMark Rutland }
467467d1b0deSMark Rutland
4675ad811070SMark Rutland /**
4676ad811070SMark Rutland * atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
4677ad811070SMark Rutland * @v: pointer to atomic_long_t
4678ad811070SMark Rutland *
4679ad811070SMark Rutland * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
4680ad811070SMark Rutland *
4681ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_unless_positive() there.
4682ad811070SMark Rutland *
4683ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
4684ad811070SMark Rutland */
468567d1b0deSMark Rutland static __always_inline bool
atomic_long_dec_unless_positive(atomic_long_t * v)468667d1b0deSMark Rutland atomic_long_dec_unless_positive(atomic_long_t *v)
468767d1b0deSMark Rutland {
4688e87c4f66SMarco Elver kcsan_mb();
468967d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4690c9268ac6SMark Rutland return raw_atomic_long_dec_unless_positive(v);
469167d1b0deSMark Rutland }
469267d1b0deSMark Rutland
4693ad811070SMark Rutland /**
4694ad811070SMark Rutland * atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
4695ad811070SMark Rutland * @v: pointer to atomic_long_t
4696ad811070SMark Rutland *
4697ad811070SMark Rutland * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
4698ad811070SMark Rutland *
4699ad811070SMark Rutland * Unsafe to use in noinstr code; use raw_atomic_long_dec_if_positive() there.
4700ad811070SMark Rutland *
4701*b33eb50aSMark Rutland * Return: The old value of (@v - 1), regardless of whether @v was updated.
4702ad811070SMark Rutland */
470367d1b0deSMark Rutland static __always_inline long
atomic_long_dec_if_positive(atomic_long_t * v)470467d1b0deSMark Rutland atomic_long_dec_if_positive(atomic_long_t *v)
470567d1b0deSMark Rutland {
4706e87c4f66SMarco Elver kcsan_mb();
470767d1b0deSMark Rutland instrument_atomic_read_write(v, sizeof(*v));
4708c9268ac6SMark Rutland return raw_atomic_long_dec_if_positive(v);
470967d1b0deSMark Rutland }
471067d1b0deSMark Rutland
4711e3d18ceeSMark Rutland #define xchg(ptr, ...) \
4712e3d18ceeSMark Rutland ({ \
4713e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4714e87c4f66SMarco Elver kcsan_mb(); \
4715ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4716c9268ac6SMark Rutland raw_xchg(__ai_ptr, __VA_ARGS__); \
4717e3d18ceeSMark Rutland })
4718e3d18ceeSMark Rutland
4719e3d18ceeSMark Rutland #define xchg_acquire(ptr, ...) \
4720e3d18ceeSMark Rutland ({ \
4721e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4722ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4723c9268ac6SMark Rutland raw_xchg_acquire(__ai_ptr, __VA_ARGS__); \
4724e3d18ceeSMark Rutland })
4725e3d18ceeSMark Rutland
4726e3d18ceeSMark Rutland #define xchg_release(ptr, ...) \
4727e3d18ceeSMark Rutland ({ \
4728e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4729e87c4f66SMarco Elver kcsan_release(); \
4730ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4731c9268ac6SMark Rutland raw_xchg_release(__ai_ptr, __VA_ARGS__); \
4732e3d18ceeSMark Rutland })
4733e3d18ceeSMark Rutland
4734e3d18ceeSMark Rutland #define xchg_relaxed(ptr, ...) \
4735e3d18ceeSMark Rutland ({ \
4736e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4737ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4738c9268ac6SMark Rutland raw_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
4739e3d18ceeSMark Rutland })
4740e3d18ceeSMark Rutland
4741e3d18ceeSMark Rutland #define cmpxchg(ptr, ...) \
4742e3d18ceeSMark Rutland ({ \
4743e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4744e87c4f66SMarco Elver kcsan_mb(); \
4745ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4746c9268ac6SMark Rutland raw_cmpxchg(__ai_ptr, __VA_ARGS__); \
4747e3d18ceeSMark Rutland })
4748e3d18ceeSMark Rutland
4749e3d18ceeSMark Rutland #define cmpxchg_acquire(ptr, ...) \
4750e3d18ceeSMark Rutland ({ \
4751e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4752ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4753c9268ac6SMark Rutland raw_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
4754e3d18ceeSMark Rutland })
4755e3d18ceeSMark Rutland
4756e3d18ceeSMark Rutland #define cmpxchg_release(ptr, ...) \
4757e3d18ceeSMark Rutland ({ \
4758e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4759e87c4f66SMarco Elver kcsan_release(); \
4760ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4761c9268ac6SMark Rutland raw_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
4762e3d18ceeSMark Rutland })
4763e3d18ceeSMark Rutland
4764e3d18ceeSMark Rutland #define cmpxchg_relaxed(ptr, ...) \
4765e3d18ceeSMark Rutland ({ \
4766e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4767ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4768c9268ac6SMark Rutland raw_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
4769e3d18ceeSMark Rutland })
4770e3d18ceeSMark Rutland
4771e3d18ceeSMark Rutland #define cmpxchg64(ptr, ...) \
4772e3d18ceeSMark Rutland ({ \
4773e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4774e87c4f66SMarco Elver kcsan_mb(); \
4775ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4776c9268ac6SMark Rutland raw_cmpxchg64(__ai_ptr, __VA_ARGS__); \
4777e3d18ceeSMark Rutland })
4778e3d18ceeSMark Rutland
4779e3d18ceeSMark Rutland #define cmpxchg64_acquire(ptr, ...) \
4780e3d18ceeSMark Rutland ({ \
4781e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4782ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4783c9268ac6SMark Rutland raw_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
4784e3d18ceeSMark Rutland })
4785e3d18ceeSMark Rutland
4786e3d18ceeSMark Rutland #define cmpxchg64_release(ptr, ...) \
4787e3d18ceeSMark Rutland ({ \
4788e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4789e87c4f66SMarco Elver kcsan_release(); \
4790ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4791c9268ac6SMark Rutland raw_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
4792e3d18ceeSMark Rutland })
4793e3d18ceeSMark Rutland
4794e3d18ceeSMark Rutland #define cmpxchg64_relaxed(ptr, ...) \
4795e3d18ceeSMark Rutland ({ \
4796e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4797ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4798c9268ac6SMark Rutland raw_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
4799e3d18ceeSMark Rutland })
4800e3d18ceeSMark Rutland
48018c8b096aSPeter Zijlstra #define cmpxchg128(ptr, ...) \
48028c8b096aSPeter Zijlstra ({ \
48038c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
48048c8b096aSPeter Zijlstra kcsan_mb(); \
48058c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4806c9268ac6SMark Rutland raw_cmpxchg128(__ai_ptr, __VA_ARGS__); \
48078c8b096aSPeter Zijlstra })
48088c8b096aSPeter Zijlstra
48098c8b096aSPeter Zijlstra #define cmpxchg128_acquire(ptr, ...) \
48108c8b096aSPeter Zijlstra ({ \
48118c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
48128c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4813c9268ac6SMark Rutland raw_cmpxchg128_acquire(__ai_ptr, __VA_ARGS__); \
48148c8b096aSPeter Zijlstra })
48158c8b096aSPeter Zijlstra
48168c8b096aSPeter Zijlstra #define cmpxchg128_release(ptr, ...) \
48178c8b096aSPeter Zijlstra ({ \
48188c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
48198c8b096aSPeter Zijlstra kcsan_release(); \
48208c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4821c9268ac6SMark Rutland raw_cmpxchg128_release(__ai_ptr, __VA_ARGS__); \
48228c8b096aSPeter Zijlstra })
48238c8b096aSPeter Zijlstra
48248c8b096aSPeter Zijlstra #define cmpxchg128_relaxed(ptr, ...) \
48258c8b096aSPeter Zijlstra ({ \
48268c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
48278c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4828c9268ac6SMark Rutland raw_cmpxchg128_relaxed(__ai_ptr, __VA_ARGS__); \
48298c8b096aSPeter Zijlstra })
48308c8b096aSPeter Zijlstra
4831e3d18ceeSMark Rutland #define try_cmpxchg(ptr, oldp, ...) \
4832e3d18ceeSMark Rutland ({ \
4833e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4834e3d18ceeSMark Rutland typeof(oldp) __ai_oldp = (oldp); \
4835e87c4f66SMarco Elver kcsan_mb(); \
4836ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4837ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4838c9268ac6SMark Rutland raw_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4839e3d18ceeSMark Rutland })
4840e3d18ceeSMark Rutland
4841e3d18ceeSMark Rutland #define try_cmpxchg_acquire(ptr, oldp, ...) \
4842e3d18ceeSMark Rutland ({ \
4843e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4844e3d18ceeSMark Rutland typeof(oldp) __ai_oldp = (oldp); \
4845ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4846ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4847c9268ac6SMark Rutland raw_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4848e3d18ceeSMark Rutland })
4849e3d18ceeSMark Rutland
4850e3d18ceeSMark Rutland #define try_cmpxchg_release(ptr, oldp, ...) \
4851e3d18ceeSMark Rutland ({ \
4852e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4853e3d18ceeSMark Rutland typeof(oldp) __ai_oldp = (oldp); \
4854e87c4f66SMarco Elver kcsan_release(); \
4855ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4856ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4857c9268ac6SMark Rutland raw_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4858e3d18ceeSMark Rutland })
4859e3d18ceeSMark Rutland
4860e3d18ceeSMark Rutland #define try_cmpxchg_relaxed(ptr, oldp, ...) \
4861e3d18ceeSMark Rutland ({ \
4862e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4863e3d18ceeSMark Rutland typeof(oldp) __ai_oldp = (oldp); \
4864ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4865ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4866c9268ac6SMark Rutland raw_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4867e3d18ceeSMark Rutland })
4868e3d18ceeSMark Rutland
48690aa7be05SUros Bizjak #define try_cmpxchg64(ptr, oldp, ...) \
48700aa7be05SUros Bizjak ({ \
48710aa7be05SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
48720aa7be05SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
48730aa7be05SUros Bizjak kcsan_mb(); \
4874ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4875ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4876c9268ac6SMark Rutland raw_try_cmpxchg64(__ai_ptr, __ai_oldp, __VA_ARGS__); \
48770aa7be05SUros Bizjak })
48780aa7be05SUros Bizjak
48790aa7be05SUros Bizjak #define try_cmpxchg64_acquire(ptr, oldp, ...) \
48800aa7be05SUros Bizjak ({ \
48810aa7be05SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
48820aa7be05SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
4883ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4884ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4885c9268ac6SMark Rutland raw_try_cmpxchg64_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
48860aa7be05SUros Bizjak })
48870aa7be05SUros Bizjak
48880aa7be05SUros Bizjak #define try_cmpxchg64_release(ptr, oldp, ...) \
48890aa7be05SUros Bizjak ({ \
48900aa7be05SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
48910aa7be05SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
48920aa7be05SUros Bizjak kcsan_release(); \
4893ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4894ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4895c9268ac6SMark Rutland raw_try_cmpxchg64_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
48960aa7be05SUros Bizjak })
48970aa7be05SUros Bizjak
48980aa7be05SUros Bizjak #define try_cmpxchg64_relaxed(ptr, oldp, ...) \
48990aa7be05SUros Bizjak ({ \
49000aa7be05SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
49010aa7be05SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
4902ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4903ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4904c9268ac6SMark Rutland raw_try_cmpxchg64_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49050aa7be05SUros Bizjak })
49060aa7be05SUros Bizjak
49078c8b096aSPeter Zijlstra #define try_cmpxchg128(ptr, oldp, ...) \
49088c8b096aSPeter Zijlstra ({ \
49098c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49108c8b096aSPeter Zijlstra typeof(oldp) __ai_oldp = (oldp); \
49118c8b096aSPeter Zijlstra kcsan_mb(); \
49128c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
49138c8b096aSPeter Zijlstra instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4914c9268ac6SMark Rutland raw_try_cmpxchg128(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49158c8b096aSPeter Zijlstra })
49168c8b096aSPeter Zijlstra
49178c8b096aSPeter Zijlstra #define try_cmpxchg128_acquire(ptr, oldp, ...) \
49188c8b096aSPeter Zijlstra ({ \
49198c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49208c8b096aSPeter Zijlstra typeof(oldp) __ai_oldp = (oldp); \
49218c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
49228c8b096aSPeter Zijlstra instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4923c9268ac6SMark Rutland raw_try_cmpxchg128_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49248c8b096aSPeter Zijlstra })
49258c8b096aSPeter Zijlstra
49268c8b096aSPeter Zijlstra #define try_cmpxchg128_release(ptr, oldp, ...) \
49278c8b096aSPeter Zijlstra ({ \
49288c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49298c8b096aSPeter Zijlstra typeof(oldp) __ai_oldp = (oldp); \
49308c8b096aSPeter Zijlstra kcsan_release(); \
49318c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
49328c8b096aSPeter Zijlstra instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4933c9268ac6SMark Rutland raw_try_cmpxchg128_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49348c8b096aSPeter Zijlstra })
49358c8b096aSPeter Zijlstra
49368c8b096aSPeter Zijlstra #define try_cmpxchg128_relaxed(ptr, oldp, ...) \
49378c8b096aSPeter Zijlstra ({ \
49388c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49398c8b096aSPeter Zijlstra typeof(oldp) __ai_oldp = (oldp); \
49408c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
49418c8b096aSPeter Zijlstra instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4942c9268ac6SMark Rutland raw_try_cmpxchg128_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49438c8b096aSPeter Zijlstra })
49448c8b096aSPeter Zijlstra
4945e3d18ceeSMark Rutland #define cmpxchg_local(ptr, ...) \
4946e3d18ceeSMark Rutland ({ \
4947e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4948ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4949c9268ac6SMark Rutland raw_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
4950e3d18ceeSMark Rutland })
4951e3d18ceeSMark Rutland
4952e3d18ceeSMark Rutland #define cmpxchg64_local(ptr, ...) \
4953e3d18ceeSMark Rutland ({ \
4954e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4955ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4956c9268ac6SMark Rutland raw_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
4957e3d18ceeSMark Rutland })
4958e3d18ceeSMark Rutland
49598c8b096aSPeter Zijlstra #define cmpxchg128_local(ptr, ...) \
49608c8b096aSPeter Zijlstra ({ \
49618c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49628c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4963c9268ac6SMark Rutland raw_cmpxchg128_local(__ai_ptr, __VA_ARGS__); \
49648c8b096aSPeter Zijlstra })
49658c8b096aSPeter Zijlstra
4966e3d18ceeSMark Rutland #define sync_cmpxchg(ptr, ...) \
4967e3d18ceeSMark Rutland ({ \
4968e3d18ceeSMark Rutland typeof(ptr) __ai_ptr = (ptr); \
4969e87c4f66SMarco Elver kcsan_mb(); \
4970ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4971c9268ac6SMark Rutland raw_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
4972e3d18ceeSMark Rutland })
4973e3d18ceeSMark Rutland
4974e6ce9d74SUros Bizjak #define try_cmpxchg_local(ptr, oldp, ...) \
4975e6ce9d74SUros Bizjak ({ \
4976e6ce9d74SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
4977e6ce9d74SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
4978ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4979ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4980c9268ac6SMark Rutland raw_try_cmpxchg_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4981e6ce9d74SUros Bizjak })
4982e6ce9d74SUros Bizjak
4983e6ce9d74SUros Bizjak #define try_cmpxchg64_local(ptr, oldp, ...) \
4984e6ce9d74SUros Bizjak ({ \
4985e6ce9d74SUros Bizjak typeof(ptr) __ai_ptr = (ptr); \
4986e6ce9d74SUros Bizjak typeof(oldp) __ai_oldp = (oldp); \
4987ec570320SMark Rutland instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4988ec570320SMark Rutland instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4989c9268ac6SMark Rutland raw_try_cmpxchg64_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4990e6ce9d74SUros Bizjak })
4991e6ce9d74SUros Bizjak
49928c8b096aSPeter Zijlstra #define try_cmpxchg128_local(ptr, oldp, ...) \
49938c8b096aSPeter Zijlstra ({ \
49948c8b096aSPeter Zijlstra typeof(ptr) __ai_ptr = (ptr); \
49958c8b096aSPeter Zijlstra typeof(oldp) __ai_oldp = (oldp); \
49968c8b096aSPeter Zijlstra instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
49978c8b096aSPeter Zijlstra instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4998c9268ac6SMark Rutland raw_try_cmpxchg128_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
49998c8b096aSPeter Zijlstra })
50008c8b096aSPeter Zijlstra
5001e3d18ceeSMark Rutland
5002e3d18ceeSMark Rutland #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
5003*b33eb50aSMark Rutland // 1568f875fef72097413caab8339120c065a39aa4
5004