Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/net
[cascardo/linux.git] / arch / sparc / lib / atomic_64.S
1 /* atomic.S: These things are too big to do inline.
2  *
3  * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
4  */
5
6 #include <linux/linkage.h>
7 #include <asm/asi.h>
8 #include <asm/backoff.h>
9 #include <asm/export.h>
10
11         .text
12
13         /* Three versions of the atomic routines, one that
14          * does not return a value and does not perform
15          * memory barriers, and a two which return
16          * a value, the new and old value resp. and does the
17          * barriers.
18          */
19
20 #define ATOMIC_OP(op)                                                   \
21 ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */              \
22         BACKOFF_SETUP(%o2);                                             \
23 1:      lduw    [%o1], %g1;                                             \
24         op      %g1, %o0, %g7;                                          \
25         cas     [%o1], %g1, %g7;                                        \
26         cmp     %g1, %g7;                                               \
27         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
28          nop;                                                           \
29         retl;                                                           \
30          nop;                                                           \
31 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
32 ENDPROC(atomic_##op);                                                   \
33 EXPORT_SYMBOL(atomic_##op);
34
35 #define ATOMIC_OP_RETURN(op)                                            \
36 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */     \
37         BACKOFF_SETUP(%o2);                                             \
38 1:      lduw    [%o1], %g1;                                             \
39         op      %g1, %o0, %g7;                                          \
40         cas     [%o1], %g1, %g7;                                        \
41         cmp     %g1, %g7;                                               \
42         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
43          op     %g1, %o0, %g1;                                          \
44         retl;                                                           \
45          sra    %g1, 0, %o0;                                            \
46 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
47 ENDPROC(atomic_##op##_return);                                          \
48 EXPORT_SYMBOL(atomic_##op##_return);
49
50 #define ATOMIC_FETCH_OP(op)                                             \
51 ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */        \
52         BACKOFF_SETUP(%o2);                                             \
53 1:      lduw    [%o1], %g1;                                             \
54         op      %g1, %o0, %g7;                                          \
55         cas     [%o1], %g1, %g7;                                        \
56         cmp     %g1, %g7;                                               \
57         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
58          nop;                                                           \
59         retl;                                                           \
60          sra    %g1, 0, %o0;                                            \
61 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
62 ENDPROC(atomic_fetch_##op);                                             \
63 EXPORT_SYMBOL(atomic_fetch_##op);
64
65 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
66
67 ATOMIC_OPS(add)
68 ATOMIC_OPS(sub)
69
70 #undef ATOMIC_OPS
71 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
72
73 ATOMIC_OPS(and)
74 ATOMIC_OPS(or)
75 ATOMIC_OPS(xor)
76
77 #undef ATOMIC_OPS
78 #undef ATOMIC_FETCH_OP
79 #undef ATOMIC_OP_RETURN
80 #undef ATOMIC_OP
81
82 #define ATOMIC64_OP(op)                                                 \
83 ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */            \
84         BACKOFF_SETUP(%o2);                                             \
85 1:      ldx     [%o1], %g1;                                             \
86         op      %g1, %o0, %g7;                                          \
87         casx    [%o1], %g1, %g7;                                        \
88         cmp     %g1, %g7;                                               \
89         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
90          nop;                                                           \
91         retl;                                                           \
92          nop;                                                           \
93 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
94 ENDPROC(atomic64_##op);                                                 \
95 EXPORT_SYMBOL(atomic64_##op);
96
97 #define ATOMIC64_OP_RETURN(op)                                          \
98 ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */   \
99         BACKOFF_SETUP(%o2);                                             \
100 1:      ldx     [%o1], %g1;                                             \
101         op      %g1, %o0, %g7;                                          \
102         casx    [%o1], %g1, %g7;                                        \
103         cmp     %g1, %g7;                                               \
104         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
105          nop;                                                           \
106         retl;                                                           \
107          op     %g1, %o0, %o0;                                          \
108 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
109 ENDPROC(atomic64_##op##_return);                                        \
110 EXPORT_SYMBOL(atomic64_##op##_return);
111
112 #define ATOMIC64_FETCH_OP(op)                                           \
113 ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */      \
114         BACKOFF_SETUP(%o2);                                             \
115 1:      ldx     [%o1], %g1;                                             \
116         op      %g1, %o0, %g7;                                          \
117         casx    [%o1], %g1, %g7;                                        \
118         cmp     %g1, %g7;                                               \
119         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
120          nop;                                                           \
121         retl;                                                           \
122          mov    %g1, %o0;                                               \
123 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
124 ENDPROC(atomic64_fetch_##op);                                           \
125 EXPORT_SYMBOL(atomic64_fetch_##op);
126
127 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
128
129 ATOMIC64_OPS(add)
130 ATOMIC64_OPS(sub)
131
132 #undef ATOMIC64_OPS
133 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
134
135 ATOMIC64_OPS(and)
136 ATOMIC64_OPS(or)
137 ATOMIC64_OPS(xor)
138
139 #undef ATOMIC64_OPS
140 #undef ATOMIC64_FETCH_OP
141 #undef ATOMIC64_OP_RETURN
142 #undef ATOMIC64_OP
143
144 ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
145         BACKOFF_SETUP(%o2)
146 1:      ldx     [%o0], %g1
147         brlez,pn %g1, 3f
148          sub    %g1, 1, %g7
149         casx    [%o0], %g1, %g7
150         cmp     %g1, %g7
151         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b)
152          nop
153 3:      retl
154          sub    %g1, 1, %o0
155 2:      BACKOFF_SPIN(%o2, %o3, 1b)
156 ENDPROC(atomic64_dec_if_positive)
157 EXPORT_SYMBOL(atomic64_dec_if_positive)