1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2ae3a197eSDavid Howells #ifndef _ASM_POWERPC_CMPXCHG_H_
3ae3a197eSDavid Howells #define _ASM_POWERPC_CMPXCHG_H_
4ae3a197eSDavid Howells
5ae3a197eSDavid Howells #ifdef __KERNEL__
6ae3a197eSDavid Howells #include <linux/compiler.h>
7ae3a197eSDavid Howells #include <asm/synch.h>
810d8b148Span xinhui #include <linux/bug.h>
9ae3a197eSDavid Howells
10d0563a12SPan Xinhui #ifdef __BIG_ENDIAN
11d0563a12SPan Xinhui #define BITOFF_CAL(size, off) ((sizeof(u32) - size - off) * BITS_PER_BYTE)
12d0563a12SPan Xinhui #else
13d0563a12SPan Xinhui #define BITOFF_CAL(size, off) (off * BITS_PER_BYTE)
14d0563a12SPan Xinhui #endif
15d0563a12SPan Xinhui
16d0563a12SPan Xinhui #define XCHG_GEN(type, sfx, cl) \
17da58b23cSMichael Ellerman static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \
18d0563a12SPan Xinhui { \
19d0563a12SPan Xinhui unsigned int prev, prev_mask, tmp, bitoff, off; \
20d0563a12SPan Xinhui \
21d0563a12SPan Xinhui off = (unsigned long)p % sizeof(u32); \
22d0563a12SPan Xinhui bitoff = BITOFF_CAL(sizeof(type), off); \
23d0563a12SPan Xinhui p -= off; \
24d0563a12SPan Xinhui val <<= bitoff; \
25d0563a12SPan Xinhui prev_mask = (u32)(type)-1 << bitoff; \
26d0563a12SPan Xinhui \
27d0563a12SPan Xinhui __asm__ __volatile__( \
28d0563a12SPan Xinhui "1: lwarx %0,0,%3\n" \
29d0563a12SPan Xinhui " andc %1,%0,%5\n" \
30d0563a12SPan Xinhui " or %1,%1,%4\n" \
31d0563a12SPan Xinhui " stwcx. %1,0,%3\n" \
32d0563a12SPan Xinhui " bne- 1b\n" \
33d0563a12SPan Xinhui : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
34d0563a12SPan Xinhui : "r" (p), "r" (val), "r" (prev_mask) \
35d0563a12SPan Xinhui : "cc", cl); \
36d0563a12SPan Xinhui \
37d0563a12SPan Xinhui return prev >> bitoff; \
38d0563a12SPan Xinhui }
39d0563a12SPan Xinhui
40d0563a12SPan Xinhui #define CMPXCHG_GEN(type, sfx, br, br2, cl) \
41d0563a12SPan Xinhui static inline \
42da58b23cSMichael Ellerman u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
43d0563a12SPan Xinhui { \
44d0563a12SPan Xinhui unsigned int prev, prev_mask, tmp, bitoff, off; \
45d0563a12SPan Xinhui \
46d0563a12SPan Xinhui off = (unsigned long)p % sizeof(u32); \
47d0563a12SPan Xinhui bitoff = BITOFF_CAL(sizeof(type), off); \
48d0563a12SPan Xinhui p -= off; \
49d0563a12SPan Xinhui old <<= bitoff; \
50d0563a12SPan Xinhui new <<= bitoff; \
51d0563a12SPan Xinhui prev_mask = (u32)(type)-1 << bitoff; \
52d0563a12SPan Xinhui \
53d0563a12SPan Xinhui __asm__ __volatile__( \
54d0563a12SPan Xinhui br \
55d0563a12SPan Xinhui "1: lwarx %0,0,%3\n" \
56d0563a12SPan Xinhui " and %1,%0,%6\n" \
57d0563a12SPan Xinhui " cmpw 0,%1,%4\n" \
58d0563a12SPan Xinhui " bne- 2f\n" \
59d0563a12SPan Xinhui " andc %1,%0,%6\n" \
60d0563a12SPan Xinhui " or %1,%1,%5\n" \
61d0563a12SPan Xinhui " stwcx. %1,0,%3\n" \
62d0563a12SPan Xinhui " bne- 1b\n" \
63d0563a12SPan Xinhui br2 \
64d0563a12SPan Xinhui "\n" \
65d0563a12SPan Xinhui "2:" \
66d0563a12SPan Xinhui : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
67d0563a12SPan Xinhui : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
68d0563a12SPan Xinhui : "cc", cl); \
69d0563a12SPan Xinhui \
70d0563a12SPan Xinhui return prev >> bitoff; \
71d0563a12SPan Xinhui }
72d0563a12SPan Xinhui
73ae3a197eSDavid Howells /*
74ae3a197eSDavid Howells * Atomic exchange
75ae3a197eSDavid Howells *
7626760fc1SBoqun Feng * Changes the memory location '*p' to be val and returns
77ae3a197eSDavid Howells * the previous value stored there.
78ae3a197eSDavid Howells */
79ae3a197eSDavid Howells
80b86cf14fSNicholas Piggin #ifndef CONFIG_PPC_HAS_LBARX_LHARX
81d0563a12SPan Xinhui XCHG_GEN(u8, _local, "memory");
82d0563a12SPan Xinhui XCHG_GEN(u8, _relaxed, "cc");
83d0563a12SPan Xinhui XCHG_GEN(u16, _local, "memory");
84d0563a12SPan Xinhui XCHG_GEN(u16, _relaxed, "cc");
85b86cf14fSNicholas Piggin #else
86b86cf14fSNicholas Piggin static __always_inline unsigned long
__xchg_u8_local(volatile void * p,unsigned long val)87b86cf14fSNicholas Piggin __xchg_u8_local(volatile void *p, unsigned long val)
88b86cf14fSNicholas Piggin {
89b86cf14fSNicholas Piggin unsigned long prev;
90b86cf14fSNicholas Piggin
91b86cf14fSNicholas Piggin __asm__ __volatile__(
92b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __xchg_u8_local\n"
93b86cf14fSNicholas Piggin " stbcx. %3,0,%2 \n"
94b86cf14fSNicholas Piggin " bne- 1b"
95b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*(volatile unsigned char *)p)
96b86cf14fSNicholas Piggin : "r" (p), "r" (val)
97b86cf14fSNicholas Piggin : "cc", "memory");
98b86cf14fSNicholas Piggin
99b86cf14fSNicholas Piggin return prev;
100b86cf14fSNicholas Piggin }
101b86cf14fSNicholas Piggin
102b86cf14fSNicholas Piggin static __always_inline unsigned long
__xchg_u8_relaxed(u8 * p,unsigned long val)103b86cf14fSNicholas Piggin __xchg_u8_relaxed(u8 *p, unsigned long val)
104b86cf14fSNicholas Piggin {
105b86cf14fSNicholas Piggin unsigned long prev;
106b86cf14fSNicholas Piggin
107b86cf14fSNicholas Piggin __asm__ __volatile__(
108b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __xchg_u8_relaxed\n"
109b86cf14fSNicholas Piggin " stbcx. %3,0,%2\n"
110b86cf14fSNicholas Piggin " bne- 1b"
111b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
112b86cf14fSNicholas Piggin : "r" (p), "r" (val)
113b86cf14fSNicholas Piggin : "cc");
114b86cf14fSNicholas Piggin
115b86cf14fSNicholas Piggin return prev;
116b86cf14fSNicholas Piggin }
117b86cf14fSNicholas Piggin
118b86cf14fSNicholas Piggin static __always_inline unsigned long
__xchg_u16_local(volatile void * p,unsigned long val)119b86cf14fSNicholas Piggin __xchg_u16_local(volatile void *p, unsigned long val)
120b86cf14fSNicholas Piggin {
121b86cf14fSNicholas Piggin unsigned long prev;
122b86cf14fSNicholas Piggin
123b86cf14fSNicholas Piggin __asm__ __volatile__(
124b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __xchg_u16_local\n"
125b86cf14fSNicholas Piggin " sthcx. %3,0,%2\n"
126b86cf14fSNicholas Piggin " bne- 1b"
127b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*(volatile unsigned short *)p)
128b86cf14fSNicholas Piggin : "r" (p), "r" (val)
129b86cf14fSNicholas Piggin : "cc", "memory");
130b86cf14fSNicholas Piggin
131b86cf14fSNicholas Piggin return prev;
132b86cf14fSNicholas Piggin }
133b86cf14fSNicholas Piggin
134b86cf14fSNicholas Piggin static __always_inline unsigned long
__xchg_u16_relaxed(u16 * p,unsigned long val)135b86cf14fSNicholas Piggin __xchg_u16_relaxed(u16 *p, unsigned long val)
136b86cf14fSNicholas Piggin {
137b86cf14fSNicholas Piggin unsigned long prev;
138b86cf14fSNicholas Piggin
139b86cf14fSNicholas Piggin __asm__ __volatile__(
140b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __xchg_u16_relaxed\n"
141b86cf14fSNicholas Piggin " sthcx. %3,0,%2\n"
142b86cf14fSNicholas Piggin " bne- 1b"
143b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
144b86cf14fSNicholas Piggin : "r" (p), "r" (val)
145b86cf14fSNicholas Piggin : "cc");
146b86cf14fSNicholas Piggin
147b86cf14fSNicholas Piggin return prev;
148b86cf14fSNicholas Piggin }
149b86cf14fSNicholas Piggin #endif
150d0563a12SPan Xinhui
151ae3a197eSDavid Howells static __always_inline unsigned long
__xchg_u32_local(volatile void * p,unsigned long val)152ae3a197eSDavid Howells __xchg_u32_local(volatile void *p, unsigned long val)
153ae3a197eSDavid Howells {
154ae3a197eSDavid Howells unsigned long prev;
155ae3a197eSDavid Howells
156ae3a197eSDavid Howells __asm__ __volatile__(
157ae3a197eSDavid Howells "1: lwarx %0,0,%2 \n"
158ae3a197eSDavid Howells " stwcx. %3,0,%2 \n\
159ae3a197eSDavid Howells bne- 1b"
160ae3a197eSDavid Howells : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
161ae3a197eSDavid Howells : "r" (p), "r" (val)
162ae3a197eSDavid Howells : "cc", "memory");
163ae3a197eSDavid Howells
164ae3a197eSDavid Howells return prev;
165ae3a197eSDavid Howells }
166ae3a197eSDavid Howells
167ae3a197eSDavid Howells static __always_inline unsigned long
__xchg_u32_relaxed(u32 * p,unsigned long val)16826760fc1SBoqun Feng __xchg_u32_relaxed(u32 *p, unsigned long val)
169ae3a197eSDavid Howells {
170ae3a197eSDavid Howells unsigned long prev;
171ae3a197eSDavid Howells
172ae3a197eSDavid Howells __asm__ __volatile__(
17326760fc1SBoqun Feng "1: lwarx %0,0,%2\n"
17426760fc1SBoqun Feng " stwcx. %3,0,%2\n"
17526760fc1SBoqun Feng " bne- 1b"
17626760fc1SBoqun Feng : "=&r" (prev), "+m" (*p)
177ae3a197eSDavid Howells : "r" (p), "r" (val)
17826760fc1SBoqun Feng : "cc");
179ae3a197eSDavid Howells
180ae3a197eSDavid Howells return prev;
181ae3a197eSDavid Howells }
182ae3a197eSDavid Howells
18326760fc1SBoqun Feng #ifdef CONFIG_PPC64
184ae3a197eSDavid Howells static __always_inline unsigned long
__xchg_u64_local(volatile void * p,unsigned long val)185ae3a197eSDavid Howells __xchg_u64_local(volatile void *p, unsigned long val)
186ae3a197eSDavid Howells {
187ae3a197eSDavid Howells unsigned long prev;
188ae3a197eSDavid Howells
189ae3a197eSDavid Howells __asm__ __volatile__(
190ae3a197eSDavid Howells "1: ldarx %0,0,%2 \n"
191ae3a197eSDavid Howells " stdcx. %3,0,%2 \n\
192ae3a197eSDavid Howells bne- 1b"
193ae3a197eSDavid Howells : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
194ae3a197eSDavid Howells : "r" (p), "r" (val)
195ae3a197eSDavid Howells : "cc", "memory");
196ae3a197eSDavid Howells
197ae3a197eSDavid Howells return prev;
198ae3a197eSDavid Howells }
19926760fc1SBoqun Feng
20026760fc1SBoqun Feng static __always_inline unsigned long
__xchg_u64_relaxed(u64 * p,unsigned long val)20126760fc1SBoqun Feng __xchg_u64_relaxed(u64 *p, unsigned long val)
20226760fc1SBoqun Feng {
20326760fc1SBoqun Feng unsigned long prev;
20426760fc1SBoqun Feng
20526760fc1SBoqun Feng __asm__ __volatile__(
20626760fc1SBoqun Feng "1: ldarx %0,0,%2\n"
20726760fc1SBoqun Feng " stdcx. %3,0,%2\n"
20826760fc1SBoqun Feng " bne- 1b"
20926760fc1SBoqun Feng : "=&r" (prev), "+m" (*p)
21026760fc1SBoqun Feng : "r" (p), "r" (val)
21126760fc1SBoqun Feng : "cc");
21226760fc1SBoqun Feng
21326760fc1SBoqun Feng return prev;
21426760fc1SBoqun Feng }
215ae3a197eSDavid Howells #endif
216ae3a197eSDavid Howells
217ae3a197eSDavid Howells static __always_inline unsigned long
__xchg_local(void * ptr,unsigned long x,unsigned int size)218d0563a12SPan Xinhui __xchg_local(void *ptr, unsigned long x, unsigned int size)
219ae3a197eSDavid Howells {
220ae3a197eSDavid Howells switch (size) {
221d0563a12SPan Xinhui case 1:
222d0563a12SPan Xinhui return __xchg_u8_local(ptr, x);
223d0563a12SPan Xinhui case 2:
224d0563a12SPan Xinhui return __xchg_u16_local(ptr, x);
225ae3a197eSDavid Howells case 4:
226ae3a197eSDavid Howells return __xchg_u32_local(ptr, x);
227ae3a197eSDavid Howells #ifdef CONFIG_PPC64
228ae3a197eSDavid Howells case 8:
229ae3a197eSDavid Howells return __xchg_u64_local(ptr, x);
230ae3a197eSDavid Howells #endif
231ae3a197eSDavid Howells }
232*06855063SAndrzej Hajda BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
233ae3a197eSDavid Howells return x;
234ae3a197eSDavid Howells }
235ae3a197eSDavid Howells
23626760fc1SBoqun Feng static __always_inline unsigned long
__xchg_relaxed(void * ptr,unsigned long x,unsigned int size)23726760fc1SBoqun Feng __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
23826760fc1SBoqun Feng {
23926760fc1SBoqun Feng switch (size) {
240d0563a12SPan Xinhui case 1:
241d0563a12SPan Xinhui return __xchg_u8_relaxed(ptr, x);
242d0563a12SPan Xinhui case 2:
243d0563a12SPan Xinhui return __xchg_u16_relaxed(ptr, x);
24426760fc1SBoqun Feng case 4:
24526760fc1SBoqun Feng return __xchg_u32_relaxed(ptr, x);
24626760fc1SBoqun Feng #ifdef CONFIG_PPC64
24726760fc1SBoqun Feng case 8:
24826760fc1SBoqun Feng return __xchg_u64_relaxed(ptr, x);
24926760fc1SBoqun Feng #endif
25026760fc1SBoqun Feng }
251*06855063SAndrzej Hajda BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_relaxed");
25226760fc1SBoqun Feng return x;
25326760fc1SBoqun Feng }
2549eaa8293SMark Rutland #define arch_xchg_local(ptr,x) \
255ae3a197eSDavid Howells ({ \
256ae3a197eSDavid Howells __typeof__(*(ptr)) _x_ = (x); \
257ae3a197eSDavid Howells (__typeof__(*(ptr))) __xchg_local((ptr), \
258ae3a197eSDavid Howells (unsigned long)_x_, sizeof(*(ptr))); \
259ae3a197eSDavid Howells })
260ae3a197eSDavid Howells
2619eaa8293SMark Rutland #define arch_xchg_relaxed(ptr, x) \
26226760fc1SBoqun Feng ({ \
26326760fc1SBoqun Feng __typeof__(*(ptr)) _x_ = (x); \
26426760fc1SBoqun Feng (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
26526760fc1SBoqun Feng (unsigned long)_x_, sizeof(*(ptr))); \
26626760fc1SBoqun Feng })
267b86cf14fSNicholas Piggin
268ae3a197eSDavid Howells /*
269ae3a197eSDavid Howells * Compare and exchange - if *p == old, set it to new,
270ae3a197eSDavid Howells * and return the old value of *p.
271ae3a197eSDavid Howells */
272b86cf14fSNicholas Piggin #ifndef CONFIG_PPC_HAS_LBARX_LHARX
273d0563a12SPan Xinhui CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
274d0563a12SPan Xinhui CMPXCHG_GEN(u8, _local, , , "memory");
275d0563a12SPan Xinhui CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
276d0563a12SPan Xinhui CMPXCHG_GEN(u8, _relaxed, , , "cc");
277d0563a12SPan Xinhui CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
278d0563a12SPan Xinhui CMPXCHG_GEN(u16, _local, , , "memory");
279d0563a12SPan Xinhui CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
280d0563a12SPan Xinhui CMPXCHG_GEN(u16, _relaxed, , , "cc");
281b86cf14fSNicholas Piggin #else
282b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u8(volatile unsigned char * p,unsigned long old,unsigned long new)283b86cf14fSNicholas Piggin __cmpxchg_u8(volatile unsigned char *p, unsigned long old, unsigned long new)
284b86cf14fSNicholas Piggin {
285b86cf14fSNicholas Piggin unsigned int prev;
286b86cf14fSNicholas Piggin
287b86cf14fSNicholas Piggin __asm__ __volatile__ (
288b86cf14fSNicholas Piggin PPC_ATOMIC_ENTRY_BARRIER
289b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __cmpxchg_u8\n"
290b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
291b86cf14fSNicholas Piggin " bne- 2f\n"
292b86cf14fSNicholas Piggin " stbcx. %4,0,%2\n"
293b86cf14fSNicholas Piggin " bne- 1b"
294b86cf14fSNicholas Piggin PPC_ATOMIC_EXIT_BARRIER
295b86cf14fSNicholas Piggin "\n\
296b86cf14fSNicholas Piggin 2:"
297b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
298b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
299b86cf14fSNicholas Piggin : "cc", "memory");
300b86cf14fSNicholas Piggin
301b86cf14fSNicholas Piggin return prev;
302b86cf14fSNicholas Piggin }
303b86cf14fSNicholas Piggin
304b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u8_local(volatile unsigned char * p,unsigned long old,unsigned long new)305b86cf14fSNicholas Piggin __cmpxchg_u8_local(volatile unsigned char *p, unsigned long old,
306b86cf14fSNicholas Piggin unsigned long new)
307b86cf14fSNicholas Piggin {
308b86cf14fSNicholas Piggin unsigned int prev;
309b86cf14fSNicholas Piggin
310b86cf14fSNicholas Piggin __asm__ __volatile__ (
311b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __cmpxchg_u8_local\n"
312b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
313b86cf14fSNicholas Piggin " bne- 2f\n"
314b86cf14fSNicholas Piggin " stbcx. %4,0,%2\n"
315b86cf14fSNicholas Piggin " bne- 1b\n"
316b86cf14fSNicholas Piggin "2:"
317b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
318b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
319b86cf14fSNicholas Piggin : "cc", "memory");
320b86cf14fSNicholas Piggin
321b86cf14fSNicholas Piggin return prev;
322b86cf14fSNicholas Piggin }
323b86cf14fSNicholas Piggin
324b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u8_relaxed(u8 * p,unsigned long old,unsigned long new)325b86cf14fSNicholas Piggin __cmpxchg_u8_relaxed(u8 *p, unsigned long old, unsigned long new)
326b86cf14fSNicholas Piggin {
327b86cf14fSNicholas Piggin unsigned long prev;
328b86cf14fSNicholas Piggin
329b86cf14fSNicholas Piggin __asm__ __volatile__ (
330b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __cmpxchg_u8_relaxed\n"
331b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
332b86cf14fSNicholas Piggin " bne- 2f\n"
333b86cf14fSNicholas Piggin " stbcx. %4,0,%2\n"
334b86cf14fSNicholas Piggin " bne- 1b\n"
335b86cf14fSNicholas Piggin "2:"
336b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
337b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
338b86cf14fSNicholas Piggin : "cc");
339b86cf14fSNicholas Piggin
340b86cf14fSNicholas Piggin return prev;
341b86cf14fSNicholas Piggin }
342b86cf14fSNicholas Piggin
343b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u8_acquire(u8 * p,unsigned long old,unsigned long new)344b86cf14fSNicholas Piggin __cmpxchg_u8_acquire(u8 *p, unsigned long old, unsigned long new)
345b86cf14fSNicholas Piggin {
346b86cf14fSNicholas Piggin unsigned long prev;
347b86cf14fSNicholas Piggin
348b86cf14fSNicholas Piggin __asm__ __volatile__ (
349b86cf14fSNicholas Piggin "1: lbarx %0,0,%2 # __cmpxchg_u8_acquire\n"
350b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
351b86cf14fSNicholas Piggin " bne- 2f\n"
352b86cf14fSNicholas Piggin " stbcx. %4,0,%2\n"
353b86cf14fSNicholas Piggin " bne- 1b\n"
354b86cf14fSNicholas Piggin PPC_ACQUIRE_BARRIER
355b86cf14fSNicholas Piggin "2:"
356b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
357b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
358b86cf14fSNicholas Piggin : "cc", "memory");
359b86cf14fSNicholas Piggin
360b86cf14fSNicholas Piggin return prev;
361b86cf14fSNicholas Piggin }
362b86cf14fSNicholas Piggin
363b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u16(volatile unsigned short * p,unsigned long old,unsigned long new)364b86cf14fSNicholas Piggin __cmpxchg_u16(volatile unsigned short *p, unsigned long old, unsigned long new)
365b86cf14fSNicholas Piggin {
366b86cf14fSNicholas Piggin unsigned int prev;
367b86cf14fSNicholas Piggin
368b86cf14fSNicholas Piggin __asm__ __volatile__ (
369b86cf14fSNicholas Piggin PPC_ATOMIC_ENTRY_BARRIER
370b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __cmpxchg_u16\n"
371b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
372b86cf14fSNicholas Piggin " bne- 2f\n"
373b86cf14fSNicholas Piggin " sthcx. %4,0,%2\n"
374b86cf14fSNicholas Piggin " bne- 1b\n"
375b86cf14fSNicholas Piggin PPC_ATOMIC_EXIT_BARRIER
376b86cf14fSNicholas Piggin "2:"
377b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
378b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
379b86cf14fSNicholas Piggin : "cc", "memory");
380b86cf14fSNicholas Piggin
381b86cf14fSNicholas Piggin return prev;
382b86cf14fSNicholas Piggin }
383b86cf14fSNicholas Piggin
384b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u16_local(volatile unsigned short * p,unsigned long old,unsigned long new)385b86cf14fSNicholas Piggin __cmpxchg_u16_local(volatile unsigned short *p, unsigned long old,
386b86cf14fSNicholas Piggin unsigned long new)
387b86cf14fSNicholas Piggin {
388b86cf14fSNicholas Piggin unsigned int prev;
389b86cf14fSNicholas Piggin
390b86cf14fSNicholas Piggin __asm__ __volatile__ (
391b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __cmpxchg_u16_local\n"
392b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
393b86cf14fSNicholas Piggin " bne- 2f\n"
394b86cf14fSNicholas Piggin " sthcx. %4,0,%2\n"
395b86cf14fSNicholas Piggin " bne- 1b"
396b86cf14fSNicholas Piggin "2:"
397b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
398b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
399b86cf14fSNicholas Piggin : "cc", "memory");
400b86cf14fSNicholas Piggin
401b86cf14fSNicholas Piggin return prev;
402b86cf14fSNicholas Piggin }
403b86cf14fSNicholas Piggin
404b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u16_relaxed(u16 * p,unsigned long old,unsigned long new)405b86cf14fSNicholas Piggin __cmpxchg_u16_relaxed(u16 *p, unsigned long old, unsigned long new)
406b86cf14fSNicholas Piggin {
407b86cf14fSNicholas Piggin unsigned long prev;
408b86cf14fSNicholas Piggin
409b86cf14fSNicholas Piggin __asm__ __volatile__ (
410b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __cmpxchg_u16_relaxed\n"
411b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
412b86cf14fSNicholas Piggin " bne- 2f\n"
413b86cf14fSNicholas Piggin " sthcx. %4,0,%2\n"
414b86cf14fSNicholas Piggin " bne- 1b\n"
415b86cf14fSNicholas Piggin "2:"
416b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
417b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
418b86cf14fSNicholas Piggin : "cc");
419b86cf14fSNicholas Piggin
420b86cf14fSNicholas Piggin return prev;
421b86cf14fSNicholas Piggin }
422b86cf14fSNicholas Piggin
423b86cf14fSNicholas Piggin static __always_inline unsigned long
__cmpxchg_u16_acquire(u16 * p,unsigned long old,unsigned long new)424b86cf14fSNicholas Piggin __cmpxchg_u16_acquire(u16 *p, unsigned long old, unsigned long new)
425b86cf14fSNicholas Piggin {
426b86cf14fSNicholas Piggin unsigned long prev;
427b86cf14fSNicholas Piggin
428b86cf14fSNicholas Piggin __asm__ __volatile__ (
429b86cf14fSNicholas Piggin "1: lharx %0,0,%2 # __cmpxchg_u16_acquire\n"
430b86cf14fSNicholas Piggin " cmpw 0,%0,%3\n"
431b86cf14fSNicholas Piggin " bne- 2f\n"
432b86cf14fSNicholas Piggin " sthcx. %4,0,%2\n"
433b86cf14fSNicholas Piggin " bne- 1b\n"
434b86cf14fSNicholas Piggin PPC_ACQUIRE_BARRIER
435b86cf14fSNicholas Piggin "2:"
436b86cf14fSNicholas Piggin : "=&r" (prev), "+m" (*p)
437b86cf14fSNicholas Piggin : "r" (p), "r" (old), "r" (new)
438b86cf14fSNicholas Piggin : "cc", "memory");
439b86cf14fSNicholas Piggin
440b86cf14fSNicholas Piggin return prev;
441b86cf14fSNicholas Piggin }
442b86cf14fSNicholas Piggin #endif
443d0563a12SPan Xinhui
444ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg_u32(volatile unsigned int * p,unsigned long old,unsigned long new)445ae3a197eSDavid Howells __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
446ae3a197eSDavid Howells {
447ae3a197eSDavid Howells unsigned int prev;
448ae3a197eSDavid Howells
449ae3a197eSDavid Howells __asm__ __volatile__ (
45081d7a329SBoqun Feng PPC_ATOMIC_ENTRY_BARRIER
451ae3a197eSDavid Howells "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
452ae3a197eSDavid Howells cmpw 0,%0,%3\n\
453ae3a197eSDavid Howells bne- 2f\n"
454ae3a197eSDavid Howells " stwcx. %4,0,%2\n\
455ae3a197eSDavid Howells bne- 1b"
45681d7a329SBoqun Feng PPC_ATOMIC_EXIT_BARRIER
457ae3a197eSDavid Howells "\n\
458ae3a197eSDavid Howells 2:"
459ae3a197eSDavid Howells : "=&r" (prev), "+m" (*p)
460ae3a197eSDavid Howells : "r" (p), "r" (old), "r" (new)
461ae3a197eSDavid Howells : "cc", "memory");
462ae3a197eSDavid Howells
463ae3a197eSDavid Howells return prev;
464ae3a197eSDavid Howells }
465ae3a197eSDavid Howells
466ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg_u32_local(volatile unsigned int * p,unsigned long old,unsigned long new)467ae3a197eSDavid Howells __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
468ae3a197eSDavid Howells unsigned long new)
469ae3a197eSDavid Howells {
470ae3a197eSDavid Howells unsigned int prev;
471ae3a197eSDavid Howells
472ae3a197eSDavid Howells __asm__ __volatile__ (
473ae3a197eSDavid Howells "1: lwarx %0,0,%2 # __cmpxchg_u32\n\
474ae3a197eSDavid Howells cmpw 0,%0,%3\n\
475ae3a197eSDavid Howells bne- 2f\n"
476ae3a197eSDavid Howells " stwcx. %4,0,%2\n\
477ae3a197eSDavid Howells bne- 1b"
478ae3a197eSDavid Howells "\n\
479ae3a197eSDavid Howells 2:"
480ae3a197eSDavid Howells : "=&r" (prev), "+m" (*p)
481ae3a197eSDavid Howells : "r" (p), "r" (old), "r" (new)
482ae3a197eSDavid Howells : "cc", "memory");
483ae3a197eSDavid Howells
484ae3a197eSDavid Howells return prev;
485ae3a197eSDavid Howells }
486ae3a197eSDavid Howells
48756c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_u32_relaxed(u32 * p,unsigned long old,unsigned long new)48856c08e6dSBoqun Feng __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new)
48956c08e6dSBoqun Feng {
49056c08e6dSBoqun Feng unsigned long prev;
49156c08e6dSBoqun Feng
49256c08e6dSBoqun Feng __asm__ __volatile__ (
49356c08e6dSBoqun Feng "1: lwarx %0,0,%2 # __cmpxchg_u32_relaxed\n"
49456c08e6dSBoqun Feng " cmpw 0,%0,%3\n"
49556c08e6dSBoqun Feng " bne- 2f\n"
49656c08e6dSBoqun Feng " stwcx. %4,0,%2\n"
49756c08e6dSBoqun Feng " bne- 1b\n"
49856c08e6dSBoqun Feng "2:"
49956c08e6dSBoqun Feng : "=&r" (prev), "+m" (*p)
50056c08e6dSBoqun Feng : "r" (p), "r" (old), "r" (new)
50156c08e6dSBoqun Feng : "cc");
50256c08e6dSBoqun Feng
50356c08e6dSBoqun Feng return prev;
50456c08e6dSBoqun Feng }
50556c08e6dSBoqun Feng
50656c08e6dSBoqun Feng /*
50756c08e6dSBoqun Feng * cmpxchg family don't have order guarantee if cmp part fails, therefore we
50856c08e6dSBoqun Feng * can avoid superfluous barriers if we use assembly code to implement
50956c08e6dSBoqun Feng * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for
51056c08e6dSBoqun Feng * cmpxchg_release() because that will result in putting a barrier in the
51156c08e6dSBoqun Feng * middle of a ll/sc loop, which is probably a bad idea. For example, this
51256c08e6dSBoqun Feng * might cause the conditional store more likely to fail.
51356c08e6dSBoqun Feng */
51456c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_u32_acquire(u32 * p,unsigned long old,unsigned long new)51556c08e6dSBoqun Feng __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new)
51656c08e6dSBoqun Feng {
51756c08e6dSBoqun Feng unsigned long prev;
51856c08e6dSBoqun Feng
51956c08e6dSBoqun Feng __asm__ __volatile__ (
52056c08e6dSBoqun Feng "1: lwarx %0,0,%2 # __cmpxchg_u32_acquire\n"
52156c08e6dSBoqun Feng " cmpw 0,%0,%3\n"
52256c08e6dSBoqun Feng " bne- 2f\n"
52356c08e6dSBoqun Feng " stwcx. %4,0,%2\n"
52456c08e6dSBoqun Feng " bne- 1b\n"
52556c08e6dSBoqun Feng PPC_ACQUIRE_BARRIER
52656c08e6dSBoqun Feng "\n"
52756c08e6dSBoqun Feng "2:"
52856c08e6dSBoqun Feng : "=&r" (prev), "+m" (*p)
52956c08e6dSBoqun Feng : "r" (p), "r" (old), "r" (new)
53056c08e6dSBoqun Feng : "cc", "memory");
53156c08e6dSBoqun Feng
53256c08e6dSBoqun Feng return prev;
53356c08e6dSBoqun Feng }
53456c08e6dSBoqun Feng
535ae3a197eSDavid Howells #ifdef CONFIG_PPC64
536ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg_u64(volatile unsigned long * p,unsigned long old,unsigned long new)537ae3a197eSDavid Howells __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
538ae3a197eSDavid Howells {
539ae3a197eSDavid Howells unsigned long prev;
540ae3a197eSDavid Howells
541ae3a197eSDavid Howells __asm__ __volatile__ (
54281d7a329SBoqun Feng PPC_ATOMIC_ENTRY_BARRIER
543ae3a197eSDavid Howells "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
544ae3a197eSDavid Howells cmpd 0,%0,%3\n\
545ae3a197eSDavid Howells bne- 2f\n\
546ae3a197eSDavid Howells stdcx. %4,0,%2\n\
547ae3a197eSDavid Howells bne- 1b"
54881d7a329SBoqun Feng PPC_ATOMIC_EXIT_BARRIER
549ae3a197eSDavid Howells "\n\
550ae3a197eSDavid Howells 2:"
551ae3a197eSDavid Howells : "=&r" (prev), "+m" (*p)
552ae3a197eSDavid Howells : "r" (p), "r" (old), "r" (new)
553ae3a197eSDavid Howells : "cc", "memory");
554ae3a197eSDavid Howells
555ae3a197eSDavid Howells return prev;
556ae3a197eSDavid Howells }
557ae3a197eSDavid Howells
558ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg_u64_local(volatile unsigned long * p,unsigned long old,unsigned long new)559ae3a197eSDavid Howells __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
560ae3a197eSDavid Howells unsigned long new)
561ae3a197eSDavid Howells {
562ae3a197eSDavid Howells unsigned long prev;
563ae3a197eSDavid Howells
564ae3a197eSDavid Howells __asm__ __volatile__ (
565ae3a197eSDavid Howells "1: ldarx %0,0,%2 # __cmpxchg_u64\n\
566ae3a197eSDavid Howells cmpd 0,%0,%3\n\
567ae3a197eSDavid Howells bne- 2f\n\
568ae3a197eSDavid Howells stdcx. %4,0,%2\n\
569ae3a197eSDavid Howells bne- 1b"
570ae3a197eSDavid Howells "\n\
571ae3a197eSDavid Howells 2:"
572ae3a197eSDavid Howells : "=&r" (prev), "+m" (*p)
573ae3a197eSDavid Howells : "r" (p), "r" (old), "r" (new)
574ae3a197eSDavid Howells : "cc", "memory");
575ae3a197eSDavid Howells
576ae3a197eSDavid Howells return prev;
577ae3a197eSDavid Howells }
57856c08e6dSBoqun Feng
57956c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_u64_relaxed(u64 * p,unsigned long old,unsigned long new)58056c08e6dSBoqun Feng __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new)
58156c08e6dSBoqun Feng {
58256c08e6dSBoqun Feng unsigned long prev;
58356c08e6dSBoqun Feng
58456c08e6dSBoqun Feng __asm__ __volatile__ (
58556c08e6dSBoqun Feng "1: ldarx %0,0,%2 # __cmpxchg_u64_relaxed\n"
58656c08e6dSBoqun Feng " cmpd 0,%0,%3\n"
58756c08e6dSBoqun Feng " bne- 2f\n"
58856c08e6dSBoqun Feng " stdcx. %4,0,%2\n"
58956c08e6dSBoqun Feng " bne- 1b\n"
59056c08e6dSBoqun Feng "2:"
59156c08e6dSBoqun Feng : "=&r" (prev), "+m" (*p)
59256c08e6dSBoqun Feng : "r" (p), "r" (old), "r" (new)
59356c08e6dSBoqun Feng : "cc");
59456c08e6dSBoqun Feng
59556c08e6dSBoqun Feng return prev;
59656c08e6dSBoqun Feng }
59756c08e6dSBoqun Feng
59856c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_u64_acquire(u64 * p,unsigned long old,unsigned long new)59956c08e6dSBoqun Feng __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
60056c08e6dSBoqun Feng {
60156c08e6dSBoqun Feng unsigned long prev;
60256c08e6dSBoqun Feng
60356c08e6dSBoqun Feng __asm__ __volatile__ (
60456c08e6dSBoqun Feng "1: ldarx %0,0,%2 # __cmpxchg_u64_acquire\n"
60556c08e6dSBoqun Feng " cmpd 0,%0,%3\n"
60656c08e6dSBoqun Feng " bne- 2f\n"
60756c08e6dSBoqun Feng " stdcx. %4,0,%2\n"
60856c08e6dSBoqun Feng " bne- 1b\n"
60956c08e6dSBoqun Feng PPC_ACQUIRE_BARRIER
61056c08e6dSBoqun Feng "\n"
61156c08e6dSBoqun Feng "2:"
61256c08e6dSBoqun Feng : "=&r" (prev), "+m" (*p)
61356c08e6dSBoqun Feng : "r" (p), "r" (old), "r" (new)
61456c08e6dSBoqun Feng : "cc", "memory");
61556c08e6dSBoqun Feng
61656c08e6dSBoqun Feng return prev;
61756c08e6dSBoqun Feng }
618ae3a197eSDavid Howells #endif
619ae3a197eSDavid Howells
620ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,unsigned int size)621da58b23cSMichael Ellerman __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
622ae3a197eSDavid Howells unsigned int size)
623ae3a197eSDavid Howells {
624ae3a197eSDavid Howells switch (size) {
625d0563a12SPan Xinhui case 1:
626d0563a12SPan Xinhui return __cmpxchg_u8(ptr, old, new);
627d0563a12SPan Xinhui case 2:
628d0563a12SPan Xinhui return __cmpxchg_u16(ptr, old, new);
629ae3a197eSDavid Howells case 4:
630ae3a197eSDavid Howells return __cmpxchg_u32(ptr, old, new);
631ae3a197eSDavid Howells #ifdef CONFIG_PPC64
632ae3a197eSDavid Howells case 8:
633ae3a197eSDavid Howells return __cmpxchg_u64(ptr, old, new);
634ae3a197eSDavid Howells #endif
635ae3a197eSDavid Howells }
63610d8b148Span xinhui BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg");
637ae3a197eSDavid Howells return old;
638ae3a197eSDavid Howells }
639ae3a197eSDavid Howells
640ae3a197eSDavid Howells static __always_inline unsigned long
__cmpxchg_local(void * ptr,unsigned long old,unsigned long new,unsigned int size)641d0563a12SPan Xinhui __cmpxchg_local(void *ptr, unsigned long old, unsigned long new,
642ae3a197eSDavid Howells unsigned int size)
643ae3a197eSDavid Howells {
644ae3a197eSDavid Howells switch (size) {
645d0563a12SPan Xinhui case 1:
646d0563a12SPan Xinhui return __cmpxchg_u8_local(ptr, old, new);
647d0563a12SPan Xinhui case 2:
648d0563a12SPan Xinhui return __cmpxchg_u16_local(ptr, old, new);
649ae3a197eSDavid Howells case 4:
650ae3a197eSDavid Howells return __cmpxchg_u32_local(ptr, old, new);
651ae3a197eSDavid Howells #ifdef CONFIG_PPC64
652ae3a197eSDavid Howells case 8:
653ae3a197eSDavid Howells return __cmpxchg_u64_local(ptr, old, new);
654ae3a197eSDavid Howells #endif
655ae3a197eSDavid Howells }
65610d8b148Span xinhui BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local");
657ae3a197eSDavid Howells return old;
658ae3a197eSDavid Howells }
659ae3a197eSDavid Howells
66056c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_relaxed(void * ptr,unsigned long old,unsigned long new,unsigned int size)66156c08e6dSBoqun Feng __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
66256c08e6dSBoqun Feng unsigned int size)
66356c08e6dSBoqun Feng {
66456c08e6dSBoqun Feng switch (size) {
665d0563a12SPan Xinhui case 1:
666d0563a12SPan Xinhui return __cmpxchg_u8_relaxed(ptr, old, new);
667d0563a12SPan Xinhui case 2:
668d0563a12SPan Xinhui return __cmpxchg_u16_relaxed(ptr, old, new);
66956c08e6dSBoqun Feng case 4:
67056c08e6dSBoqun Feng return __cmpxchg_u32_relaxed(ptr, old, new);
67156c08e6dSBoqun Feng #ifdef CONFIG_PPC64
67256c08e6dSBoqun Feng case 8:
67356c08e6dSBoqun Feng return __cmpxchg_u64_relaxed(ptr, old, new);
67456c08e6dSBoqun Feng #endif
67556c08e6dSBoqun Feng }
67610d8b148Span xinhui BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed");
67756c08e6dSBoqun Feng return old;
67856c08e6dSBoqun Feng }
67956c08e6dSBoqun Feng
68056c08e6dSBoqun Feng static __always_inline unsigned long
__cmpxchg_acquire(void * ptr,unsigned long old,unsigned long new,unsigned int size)68156c08e6dSBoqun Feng __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
68256c08e6dSBoqun Feng unsigned int size)
68356c08e6dSBoqun Feng {
68456c08e6dSBoqun Feng switch (size) {
685d0563a12SPan Xinhui case 1:
686d0563a12SPan Xinhui return __cmpxchg_u8_acquire(ptr, old, new);
687d0563a12SPan Xinhui case 2:
688d0563a12SPan Xinhui return __cmpxchg_u16_acquire(ptr, old, new);
68956c08e6dSBoqun Feng case 4:
69056c08e6dSBoqun Feng return __cmpxchg_u32_acquire(ptr, old, new);
69156c08e6dSBoqun Feng #ifdef CONFIG_PPC64
69256c08e6dSBoqun Feng case 8:
69356c08e6dSBoqun Feng return __cmpxchg_u64_acquire(ptr, old, new);
69456c08e6dSBoqun Feng #endif
69556c08e6dSBoqun Feng }
69610d8b148Span xinhui BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire");
69756c08e6dSBoqun Feng return old;
69856c08e6dSBoqun Feng }
6999eaa8293SMark Rutland #define arch_cmpxchg(ptr, o, n) \
700ae3a197eSDavid Howells ({ \
701ae3a197eSDavid Howells __typeof__(*(ptr)) _o_ = (o); \
702ae3a197eSDavid Howells __typeof__(*(ptr)) _n_ = (n); \
703ae3a197eSDavid Howells (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
704ae3a197eSDavid Howells (unsigned long)_n_, sizeof(*(ptr))); \
705ae3a197eSDavid Howells })
706ae3a197eSDavid Howells
707ae3a197eSDavid Howells
7089eaa8293SMark Rutland #define arch_cmpxchg_local(ptr, o, n) \
709ae3a197eSDavid Howells ({ \
710ae3a197eSDavid Howells __typeof__(*(ptr)) _o_ = (o); \
711ae3a197eSDavid Howells __typeof__(*(ptr)) _n_ = (n); \
712ae3a197eSDavid Howells (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
713ae3a197eSDavid Howells (unsigned long)_n_, sizeof(*(ptr))); \
714ae3a197eSDavid Howells })
715ae3a197eSDavid Howells
7169eaa8293SMark Rutland #define arch_cmpxchg_relaxed(ptr, o, n) \
71756c08e6dSBoqun Feng ({ \
71856c08e6dSBoqun Feng __typeof__(*(ptr)) _o_ = (o); \
71956c08e6dSBoqun Feng __typeof__(*(ptr)) _n_ = (n); \
72056c08e6dSBoqun Feng (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
72156c08e6dSBoqun Feng (unsigned long)_o_, (unsigned long)_n_, \
72256c08e6dSBoqun Feng sizeof(*(ptr))); \
72356c08e6dSBoqun Feng })
72456c08e6dSBoqun Feng
7259eaa8293SMark Rutland #define arch_cmpxchg_acquire(ptr, o, n) \
72656c08e6dSBoqun Feng ({ \
72756c08e6dSBoqun Feng __typeof__(*(ptr)) _o_ = (o); \
72856c08e6dSBoqun Feng __typeof__(*(ptr)) _n_ = (n); \
72956c08e6dSBoqun Feng (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
73056c08e6dSBoqun Feng (unsigned long)_o_, (unsigned long)_n_, \
73156c08e6dSBoqun Feng sizeof(*(ptr))); \
73256c08e6dSBoqun Feng })
733ae3a197eSDavid Howells #ifdef CONFIG_PPC64
7349eaa8293SMark Rutland #define arch_cmpxchg64(ptr, o, n) \
735ae3a197eSDavid Howells ({ \
736ae3a197eSDavid Howells BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
7379eaa8293SMark Rutland arch_cmpxchg((ptr), (o), (n)); \
738ae3a197eSDavid Howells })
7399eaa8293SMark Rutland #define arch_cmpxchg64_local(ptr, o, n) \
740ae3a197eSDavid Howells ({ \
741ae3a197eSDavid Howells BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
7429eaa8293SMark Rutland arch_cmpxchg_local((ptr), (o), (n)); \
743ae3a197eSDavid Howells })
7449eaa8293SMark Rutland #define arch_cmpxchg64_relaxed(ptr, o, n) \
74556c08e6dSBoqun Feng ({ \
74656c08e6dSBoqun Feng BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
7479eaa8293SMark Rutland arch_cmpxchg_relaxed((ptr), (o), (n)); \
74856c08e6dSBoqun Feng })
7499eaa8293SMark Rutland #define arch_cmpxchg64_acquire(ptr, o, n) \
75056c08e6dSBoqun Feng ({ \
75156c08e6dSBoqun Feng BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
7529eaa8293SMark Rutland arch_cmpxchg_acquire((ptr), (o), (n)); \
75356c08e6dSBoqun Feng })
754ae3a197eSDavid Howells #else
755ae3a197eSDavid Howells #include <asm-generic/cmpxchg-local.h>
7569eaa8293SMark Rutland #define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
757ae3a197eSDavid Howells #endif
758ae3a197eSDavid Howells
759ae3a197eSDavid Howells #endif /* __KERNEL__ */
760ae3a197eSDavid Howells #endif /* _ASM_POWERPC_CMPXCHG_H_ */
761