1/* 2 * atomic64_t for 586+ 3 * 4 * Copyright © 2010 Luca Barbieri 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation; either version 2 of the License, or 9 * (at your option) any later version. 10 */ 11 12#include <linux/linkage.h> 13#include <asm/alternative-asm.h> 14 15.macro read64 reg 16 movl %ebx, %eax 17 movl %ecx, %edx 18/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */ 19 LOCK_PREFIX 20 cmpxchg8b (\reg) 21.endm 22 23ENTRY(atomic64_read_cx8) 24 read64 %ecx 25 ret 26ENDPROC(atomic64_read_cx8) 27 28ENTRY(atomic64_set_cx8) 291: 30/* we don't need LOCK_PREFIX since aligned 64-bit writes 31 * are atomic on 586 and newer */ 32 cmpxchg8b (%esi) 33 jne 1b 34 35 ret 36ENDPROC(atomic64_set_cx8) 37 38ENTRY(atomic64_xchg_cx8) 391: 40 LOCK_PREFIX 41 cmpxchg8b (%esi) 42 jne 1b 43 44 ret 45ENDPROC(atomic64_xchg_cx8) 46 47.macro addsub_return func ins insc 48ENTRY(atomic64_\func\()_return_cx8) 49 pushl %ebp 50 pushl %ebx 51 pushl %esi 52 pushl %edi 53 54 movl %eax, %esi 55 movl %edx, %edi 56 movl %ecx, %ebp 57 58 read64 %ecx 591: 60 movl %eax, %ebx 61 movl %edx, %ecx 62 \ins\()l %esi, %ebx 63 \insc\()l %edi, %ecx 64 LOCK_PREFIX 65 cmpxchg8b (%ebp) 66 jne 1b 67 6810: 69 movl %ebx, %eax 70 movl %ecx, %edx 71 popl %edi 72 popl %esi 73 popl %ebx 74 popl %ebp 75 ret 76ENDPROC(atomic64_\func\()_return_cx8) 77.endm 78 79addsub_return add add adc 80addsub_return sub sub sbb 81 82.macro incdec_return func ins insc 83ENTRY(atomic64_\func\()_return_cx8) 84 pushl %ebx 85 86 read64 %esi 871: 88 movl %eax, %ebx 89 movl %edx, %ecx 90 \ins\()l $1, %ebx 91 \insc\()l $0, %ecx 92 LOCK_PREFIX 93 cmpxchg8b (%esi) 94 jne 1b 95 9610: 97 movl %ebx, %eax 98 movl %ecx, %edx 99 popl %ebx 100 ret 101ENDPROC(atomic64_\func\()_return_cx8) 102.endm 103 104incdec_return inc add adc 105incdec_return dec sub sbb 106 107ENTRY(atomic64_dec_if_positive_cx8) 108 pushl %ebx 109 110 read64 %esi 1111: 112 movl %eax, %ebx 113 movl %edx, %ecx 114 subl $1, %ebx 115 sbb $0, %ecx 116 js 2f 117 LOCK_PREFIX 118 cmpxchg8b (%esi) 119 jne 1b 120 1212: 122 movl %ebx, %eax 123 movl %ecx, %edx 124 popl %ebx 125 ret 126ENDPROC(atomic64_dec_if_positive_cx8) 127 128ENTRY(atomic64_add_unless_cx8) 129 pushl %ebp 130 pushl %ebx 131/* these just push these two parameters on the stack */ 132 pushl %edi 133 pushl %ecx 134 135 movl %eax, %ebp 136 movl %edx, %edi 137 138 read64 %esi 1391: 140 cmpl %eax, 0(%esp) 141 je 4f 1422: 143 movl %eax, %ebx 144 movl %edx, %ecx 145 addl %ebp, %ebx 146 adcl %edi, %ecx 147 LOCK_PREFIX 148 cmpxchg8b (%esi) 149 jne 1b 150 151 movl $1, %eax 1523: 153 addl $8, %esp 154 popl %ebx 155 popl %ebp 156 ret 1574: 158 cmpl %edx, 4(%esp) 159 jne 2b 160 xorl %eax, %eax 161 jmp 3b 162ENDPROC(atomic64_add_unless_cx8) 163 164ENTRY(atomic64_inc_not_zero_cx8) 165 pushl %ebx 166 167 read64 %esi 1681: 169 movl %eax, %ecx 170 orl %edx, %ecx 171 jz 3f 172 movl %eax, %ebx 173 xorl %ecx, %ecx 174 addl $1, %ebx 175 adcl %edx, %ecx 176 LOCK_PREFIX 177 cmpxchg8b (%esi) 178 jne 1b 179 180 movl $1, %eax 1813: 182 popl %ebx 183 ret 184ENDPROC(atomic64_inc_not_zero_cx8) 185