xref: /openbmc/linux/arch/x86/lib/atomic64_386_32.S (revision 6c8c1406)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * atomic64_t for 386/486
4 *
5 * Copyright © 2010  Luca Barbieri
6 */
7
8#include <linux/linkage.h>
9#include <asm/alternative.h>
10
11/* if you want SMP support, implement these with real spinlocks */
12.macro IRQ_SAVE reg
13	pushfl
14	cli
15.endm
16
17.macro IRQ_RESTORE reg
18	popfl
19.endm
20
21#define BEGIN_IRQ_SAVE(op) \
22.macro endp; \
23SYM_FUNC_END(atomic64_##op##_386); \
24.purgem endp; \
25.endm; \
26SYM_FUNC_START(atomic64_##op##_386); \
27	IRQ_SAVE v;
28
29#define ENDP endp
30
31#define RET_IRQ_RESTORE \
32	IRQ_RESTORE v; \
33	RET
34
35#define v %ecx
36BEGIN_IRQ_SAVE(read)
37	movl  (v), %eax
38	movl 4(v), %edx
39	RET_IRQ_RESTORE
40ENDP
41#undef v
42
43#define v %esi
44BEGIN_IRQ_SAVE(set)
45	movl %ebx,  (v)
46	movl %ecx, 4(v)
47	RET_IRQ_RESTORE
48ENDP
49#undef v
50
51#define v  %esi
52BEGIN_IRQ_SAVE(xchg)
53	movl  (v), %eax
54	movl 4(v), %edx
55	movl %ebx,  (v)
56	movl %ecx, 4(v)
57	RET_IRQ_RESTORE
58ENDP
59#undef v
60
61#define v %ecx
62BEGIN_IRQ_SAVE(add)
63	addl %eax,  (v)
64	adcl %edx, 4(v)
65	RET_IRQ_RESTORE
66ENDP
67#undef v
68
69#define v %ecx
70BEGIN_IRQ_SAVE(add_return)
71	addl  (v), %eax
72	adcl 4(v), %edx
73	movl %eax,  (v)
74	movl %edx, 4(v)
75	RET_IRQ_RESTORE
76ENDP
77#undef v
78
79#define v %ecx
80BEGIN_IRQ_SAVE(sub)
81	subl %eax,  (v)
82	sbbl %edx, 4(v)
83	RET_IRQ_RESTORE
84ENDP
85#undef v
86
87#define v %ecx
88BEGIN_IRQ_SAVE(sub_return)
89	negl %edx
90	negl %eax
91	sbbl $0, %edx
92	addl  (v), %eax
93	adcl 4(v), %edx
94	movl %eax,  (v)
95	movl %edx, 4(v)
96	RET_IRQ_RESTORE
97ENDP
98#undef v
99
100#define v %esi
101BEGIN_IRQ_SAVE(inc)
102	addl $1,  (v)
103	adcl $0, 4(v)
104	RET_IRQ_RESTORE
105ENDP
106#undef v
107
108#define v %esi
109BEGIN_IRQ_SAVE(inc_return)
110	movl  (v), %eax
111	movl 4(v), %edx
112	addl $1, %eax
113	adcl $0, %edx
114	movl %eax,  (v)
115	movl %edx, 4(v)
116	RET_IRQ_RESTORE
117ENDP
118#undef v
119
120#define v %esi
121BEGIN_IRQ_SAVE(dec)
122	subl $1,  (v)
123	sbbl $0, 4(v)
124	RET_IRQ_RESTORE
125ENDP
126#undef v
127
128#define v %esi
129BEGIN_IRQ_SAVE(dec_return)
130	movl  (v), %eax
131	movl 4(v), %edx
132	subl $1, %eax
133	sbbl $0, %edx
134	movl %eax,  (v)
135	movl %edx, 4(v)
136	RET_IRQ_RESTORE
137ENDP
138#undef v
139
140#define v %esi
141BEGIN_IRQ_SAVE(add_unless)
142	addl %eax, %ecx
143	adcl %edx, %edi
144	addl  (v), %eax
145	adcl 4(v), %edx
146	cmpl %eax, %ecx
147	je 3f
1481:
149	movl %eax,  (v)
150	movl %edx, 4(v)
151	movl $1, %eax
1522:
153	RET_IRQ_RESTORE
1543:
155	cmpl %edx, %edi
156	jne 1b
157	xorl %eax, %eax
158	jmp 2b
159ENDP
160#undef v
161
162#define v %esi
163BEGIN_IRQ_SAVE(inc_not_zero)
164	movl  (v), %eax
165	movl 4(v), %edx
166	testl %eax, %eax
167	je 3f
1681:
169	addl $1, %eax
170	adcl $0, %edx
171	movl %eax,  (v)
172	movl %edx, 4(v)
173	movl $1, %eax
1742:
175	RET_IRQ_RESTORE
1763:
177	testl %edx, %edx
178	jne 1b
179	jmp 2b
180ENDP
181#undef v
182
183#define v %esi
184BEGIN_IRQ_SAVE(dec_if_positive)
185	movl  (v), %eax
186	movl 4(v), %edx
187	subl $1, %eax
188	sbbl $0, %edx
189	js 1f
190	movl %eax,  (v)
191	movl %edx, 4(v)
1921:
193	RET_IRQ_RESTORE
194ENDP
195#undef v
196