1/* atomic.S: These things are too big to do inline. 2 * 3 * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net) 4 */ 5 6#include <linux/linkage.h> 7#include <asm/asi.h> 8#include <asm/backoff.h> 9 10 .text 11 12 /* Two versions of the atomic routines, one that 13 * does not return a value and does not perform 14 * memory barriers, and a second which returns 15 * a value and does the barriers. 16 */ 17ENTRY(atomic_add) /* %o0 = increment, %o1 = atomic_ptr */ 18 BACKOFF_SETUP(%o2) 191: lduw [%o1], %g1 20 add %g1, %o0, %g7 21 cas [%o1], %g1, %g7 22 cmp %g1, %g7 23 bne,pn %icc, BACKOFF_LABEL(2f, 1b) 24 nop 25 retl 26 nop 272: BACKOFF_SPIN(%o2, %o3, 1b) 28ENDPROC(atomic_add) 29 30ENTRY(atomic_sub) /* %o0 = decrement, %o1 = atomic_ptr */ 31 BACKOFF_SETUP(%o2) 321: lduw [%o1], %g1 33 sub %g1, %o0, %g7 34 cas [%o1], %g1, %g7 35 cmp %g1, %g7 36 bne,pn %icc, BACKOFF_LABEL(2f, 1b) 37 nop 38 retl 39 nop 402: BACKOFF_SPIN(%o2, %o3, 1b) 41ENDPROC(atomic_sub) 42 43ENTRY(atomic_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ 44 BACKOFF_SETUP(%o2) 451: lduw [%o1], %g1 46 add %g1, %o0, %g7 47 cas [%o1], %g1, %g7 48 cmp %g1, %g7 49 bne,pn %icc, BACKOFF_LABEL(2f, 1b) 50 add %g1, %o0, %g1 51 retl 52 sra %g1, 0, %o0 532: BACKOFF_SPIN(%o2, %o3, 1b) 54ENDPROC(atomic_add_ret) 55 56ENTRY(atomic_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ 57 BACKOFF_SETUP(%o2) 581: lduw [%o1], %g1 59 sub %g1, %o0, %g7 60 cas [%o1], %g1, %g7 61 cmp %g1, %g7 62 bne,pn %icc, BACKOFF_LABEL(2f, 1b) 63 sub %g1, %o0, %g1 64 retl 65 sra %g1, 0, %o0 662: BACKOFF_SPIN(%o2, %o3, 1b) 67ENDPROC(atomic_sub_ret) 68 69ENTRY(atomic64_add) /* %o0 = increment, %o1 = atomic_ptr */ 70 BACKOFF_SETUP(%o2) 711: ldx [%o1], %g1 72 add %g1, %o0, %g7 73 casx [%o1], %g1, %g7 74 cmp %g1, %g7 75 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 76 nop 77 retl 78 nop 792: BACKOFF_SPIN(%o2, %o3, 1b) 80ENDPROC(atomic64_add) 81 82ENTRY(atomic64_sub) /* %o0 = decrement, %o1 = atomic_ptr */ 83 BACKOFF_SETUP(%o2) 841: ldx [%o1], %g1 85 sub %g1, %o0, %g7 86 casx [%o1], %g1, %g7 87 cmp %g1, %g7 88 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 89 nop 90 retl 91 nop 922: BACKOFF_SPIN(%o2, %o3, 1b) 93ENDPROC(atomic64_sub) 94 95ENTRY(atomic64_add_ret) /* %o0 = increment, %o1 = atomic_ptr */ 96 BACKOFF_SETUP(%o2) 971: ldx [%o1], %g1 98 add %g1, %o0, %g7 99 casx [%o1], %g1, %g7 100 cmp %g1, %g7 101 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 102 nop 103 retl 104 add %g1, %o0, %o0 1052: BACKOFF_SPIN(%o2, %o3, 1b) 106ENDPROC(atomic64_add_ret) 107 108ENTRY(atomic64_sub_ret) /* %o0 = decrement, %o1 = atomic_ptr */ 109 BACKOFF_SETUP(%o2) 1101: ldx [%o1], %g1 111 sub %g1, %o0, %g7 112 casx [%o1], %g1, %g7 113 cmp %g1, %g7 114 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 115 nop 116 retl 117 sub %g1, %o0, %o0 1182: BACKOFF_SPIN(%o2, %o3, 1b) 119ENDPROC(atomic64_sub_ret) 120 121ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */ 122 BACKOFF_SETUP(%o2) 1231: ldx [%o0], %g1 124 brlez,pn %g1, 3f 125 sub %g1, 1, %g7 126 casx [%o0], %g1, %g7 127 cmp %g1, %g7 128 bne,pn %xcc, BACKOFF_LABEL(2f, 1b) 129 nop 1303: retl 131 sub %g1, 1, %o0 1322: BACKOFF_SPIN(%o2, %o3, 1b) 133ENDPROC(atomic64_dec_if_positive) 134