1/* atomic.S: These things are too big to do inline. 2 * 3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) 4 */ 5 6#include <linux/linkage.h> 7#include <asm/asi.h> 8#include <asm/backoff.h> 9 10 .text 11 12 /* Three versions of the atomic routines, one that 13 * does not return a value and does not perform 14 * memory barriers, and a two which return 15 * a value, the new and old value resp. and does the 16 * barriers. 17 */ 18 19#define ATOMIC_OP(op) \ 20ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 21 BACKOFF_SETUP(%o2); \ 221: lduw [%o1], %g1; \ 23 op %g1, %o0, %g7; \ 24 cas [%o1], %g1, %g7; \ 25 cmp %g1, %g7; \ 26 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 27 nop; \ 28 retl; \ 29 nop; \ 302: BACKOFF_SPIN(%o2, %o3, 1b); \ 31ENDPROC(atomic_##op); \ 32 33#define ATOMIC_OP_RETURN(op) \ 34ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ 35 BACKOFF_SETUP(%o2); \ 361: lduw [%o1], %g1; \ 37 op %g1, %o0, %g7; \ 38 cas [%o1], %g1, %g7; \ 39 cmp %g1, %g7; \ 40 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 41 op %g1, %o0, %g1; \ 42 retl; \ 43 sra %g1, 0, %o0; \ 442: BACKOFF_SPIN(%o2, %o3, 1b); \ 45ENDPROC(atomic_##op##_return); 46 47#define ATOMIC_FETCH_OP(op) \ 48ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 49 BACKOFF_SETUP(%o2); \ 501: lduw [%o1], %g1; \ 51 op %g1, %o0, %g7; \ 52 cas [%o1], %g1, %g7; \ 53 cmp %g1, %g7; \ 54 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \ 55 nop; \ 56 retl; \ 57 sra %g1, 0, %o0; \ 582: BACKOFF_SPIN(%o2, %o3, 1b); \ 59ENDPROC(atomic_fetch_##op); 60 61#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) 62 63ATOMIC_OPS(add) 64ATOMIC_OPS(sub) 65 66#undef ATOMIC_OPS 67#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op) 68 69ATOMIC_OPS(and) 70ATOMIC_OPS(or) 71ATOMIC_OPS(xor) 72 73#undef ATOMIC_OPS 74#undef ATOMIC_FETCH_OP 75#undef ATOMIC_OP_RETURN 76#undef ATOMIC_OP 77 78#define ATOMIC64_OP(op) \ 79ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 80 BACKOFF_SETUP(%o2); \ 811: ldx [%o1], %g1; \ 82 op %g1, %o0, %g7; \ 83 casx [%o1], %g1, %g7; \ 84 cmp %g1, %g7; \ 85 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 86 nop; \ 87 retl; \ 88 nop; \ 892: BACKOFF_SPIN(%o2, %o3, 1b); \ 90ENDPROC(atomic64_##op); \ 91 92#define ATOMIC64_OP_RETURN(op) \ 93ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \ 94 BACKOFF_SETUP(%o2); \ 951: ldx [%o1], %g1; \ 96 op %g1, %o0, %g7; \ 97 casx [%o1], %g1, %g7; \ 98 cmp %g1, %g7; \ 99 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 100 nop; \ 101 retl; \ 102 op %g1, %o0, %o0; \ 1032: BACKOFF_SPIN(%o2, %o3, 1b); \ 104ENDPROC(atomic64_##op##_return); 105 106#define ATOMIC64_FETCH_OP(op) \ 107ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */ \ 108 BACKOFF_SETUP(%o2); \ 1091: ldx [%o1], %g1; \ 110 op %g1, %o0, %g7; \ 111 casx [%o1], %g1, %g7; \ 112 cmp %g1, %g7; \ 113 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \ 114 nop; \ 115 retl; \ 116 mov %g1, %o0; \ 1172: BACKOFF_SPIN(%o2, %o3, 1b); \ 118ENDPROC(atomic64_fetch_##op); 119 120#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) 121 122ATOMIC64_OPS(add) 123ATOMIC64_OPS(sub) 124 125#undef ATOMIC64_OPS 126#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op) 127 128ATOMIC64_OPS(and) 129ATOMIC64_OPS(or) 130ATOMIC64_OPS(xor) 131 132#undef ATOMIC64_OPS 133#undef ATOMIC64_FETCH_OP 134#undef ATOMIC64_OP_RETURN 135#undef ATOMIC64_OP 136 137ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ 138 BACKOFF_SETUP(%o2) 1391: ldx [%o0], %g1 140 brlez,pn %g1, 3f 141 sub %g1, 1, %g7 142 casx [%o0], %g1, %g7 143 cmp %g1, %g7 144 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 145 nop 1463: retl 147 sub %g1, 1, %o0 1482: BACKOFF_SPIN(%o2, %o3, 1b) 149ENDPROC(atomic64_dec_if_positive) 150