1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
11 #include <asm/barrier.h>
13 #define ATOMIC_INIT(i) { (i) }
15 static __inline__ int atomic_read(const atomic_t *v)
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
24 static __inline__ void atomic_set(atomic_t *v, int i)
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
29 #define ATOMIC_OP(op, asm_op) \
30 static __inline__ void atomic_##op(int a, atomic_t *v) \
34 __asm__ __volatile__( \
35 "1: lwarx %0,0,%3 # atomic_" #op "\n" \
36 #asm_op " %0,%2,%0\n" \
38 " stwcx. %0,0,%3 \n" \
40 : "=&r" (t), "+m" (v->counter) \
41 : "r" (a), "r" (&v->counter) \
45 #define ATOMIC_OP_RETURN(op, asm_op) \
46 static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
50 __asm__ __volatile__( \
51 PPC_ATOMIC_ENTRY_BARRIER \
52 "1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
53 #asm_op " %0,%1,%0\n" \
55 " stwcx. %0,0,%2 \n" \
57 PPC_ATOMIC_EXIT_BARRIER \
59 : "r" (a), "r" (&v->counter) \
65 #define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
71 #undef ATOMIC_OP_RETURN
74 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
76 static __inline__ void atomic_inc(atomic_t *v)
81 "1: lwarx %0,0,%2 # atomic_inc\n\
86 : "=&r" (t), "+m" (v->counter)
91 static __inline__ int atomic_inc_return(atomic_t *v)
96 PPC_ATOMIC_ENTRY_BARRIER
97 "1: lwarx %0,0,%1 # atomic_inc_return\n\
102 PPC_ATOMIC_EXIT_BARRIER
105 : "cc", "xer", "memory");
111 * atomic_inc_and_test - increment and test
112 * @v: pointer of type atomic_t
114 * Atomically increments @v by 1
115 * and returns true if the result is zero, or false for all
118 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
120 static __inline__ void atomic_dec(atomic_t *v)
124 __asm__ __volatile__(
125 "1: lwarx %0,0,%2 # atomic_dec\n\
130 : "=&r" (t), "+m" (v->counter)
135 static __inline__ int atomic_dec_return(atomic_t *v)
139 __asm__ __volatile__(
140 PPC_ATOMIC_ENTRY_BARRIER
141 "1: lwarx %0,0,%1 # atomic_dec_return\n\
146 PPC_ATOMIC_EXIT_BARRIER
149 : "cc", "xer", "memory");
154 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
155 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
158 * __atomic_add_unless - add unless the number is a given value
159 * @v: pointer of type atomic_t
160 * @a: the amount to add to v...
161 * @u: ...unless v is equal to u.
163 * Atomically adds @a to @v, so long as it was not @u.
164 * Returns the old value of @v.
166 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
170 __asm__ __volatile__ (
171 PPC_ATOMIC_ENTRY_BARRIER
172 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
179 PPC_ATOMIC_EXIT_BARRIER
183 : "r" (&v->counter), "r" (a), "r" (u)
190 * atomic_inc_not_zero - increment unless the number is zero
191 * @v: pointer of type atomic_t
193 * Atomically increments @v by 1, so long as @v is non-zero.
194 * Returns non-zero if @v was non-zero, and zero otherwise.
196 static __inline__ int atomic_inc_not_zero(atomic_t *v)
200 __asm__ __volatile__ (
201 PPC_ATOMIC_ENTRY_BARRIER
202 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
209 PPC_ATOMIC_EXIT_BARRIER
212 : "=&r" (t1), "=&r" (t2)
214 : "cc", "xer", "memory");
218 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
220 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
221 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
224 * Atomically test *v and decrement if it is greater than 0.
225 * The function returns the old value of *v minus 1, even if
226 * the atomic variable, v, was not decremented.
228 static __inline__ int atomic_dec_if_positive(atomic_t *v)
232 __asm__ __volatile__(
233 PPC_ATOMIC_ENTRY_BARRIER
234 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
241 PPC_ATOMIC_EXIT_BARRIER
249 #define atomic_dec_if_positive atomic_dec_if_positive
253 #define ATOMIC64_INIT(i) { (i) }
255 static __inline__ long atomic64_read(const atomic64_t *v)
259 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
264 static __inline__ void atomic64_set(atomic64_t *v, long i)
266 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
269 #define ATOMIC64_OP(op, asm_op) \
270 static __inline__ void atomic64_##op(long a, atomic64_t *v) \
274 __asm__ __volatile__( \
275 "1: ldarx %0,0,%3 # atomic64_" #op "\n" \
276 #asm_op " %0,%2,%0\n" \
277 " stdcx. %0,0,%3 \n" \
279 : "=&r" (t), "+m" (v->counter) \
280 : "r" (a), "r" (&v->counter) \
284 #define ATOMIC64_OP_RETURN(op, asm_op) \
285 static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
289 __asm__ __volatile__( \
290 PPC_ATOMIC_ENTRY_BARRIER \
291 "1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
292 #asm_op " %0,%1,%0\n" \
293 " stdcx. %0,0,%2 \n" \
295 PPC_ATOMIC_EXIT_BARRIER \
297 : "r" (a), "r" (&v->counter) \
303 #define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
305 ATOMIC64_OPS(add, add)
306 ATOMIC64_OPS(sub, subf)
309 #undef ATOMIC64_OP_RETURN
312 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
314 static __inline__ void atomic64_inc(atomic64_t *v)
318 __asm__ __volatile__(
319 "1: ldarx %0,0,%2 # atomic64_inc\n\
323 : "=&r" (t), "+m" (v->counter)
328 static __inline__ long atomic64_inc_return(atomic64_t *v)
332 __asm__ __volatile__(
333 PPC_ATOMIC_ENTRY_BARRIER
334 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
338 PPC_ATOMIC_EXIT_BARRIER
341 : "cc", "xer", "memory");
347 * atomic64_inc_and_test - increment and test
348 * @v: pointer of type atomic64_t
350 * Atomically increments @v by 1
351 * and returns true if the result is zero, or false for all
354 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
356 static __inline__ void atomic64_dec(atomic64_t *v)
360 __asm__ __volatile__(
361 "1: ldarx %0,0,%2 # atomic64_dec\n\
365 : "=&r" (t), "+m" (v->counter)
370 static __inline__ long atomic64_dec_return(atomic64_t *v)
374 __asm__ __volatile__(
375 PPC_ATOMIC_ENTRY_BARRIER
376 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
380 PPC_ATOMIC_EXIT_BARRIER
383 : "cc", "xer", "memory");
388 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
389 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
392 * Atomically test *v and decrement if it is greater than 0.
393 * The function returns the old value of *v minus 1.
395 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
399 __asm__ __volatile__(
400 PPC_ATOMIC_ENTRY_BARRIER
401 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
406 PPC_ATOMIC_EXIT_BARRIER
410 : "cc", "xer", "memory");
415 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
416 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
419 * atomic64_add_unless - add unless the number is a given value
420 * @v: pointer of type atomic64_t
421 * @a: the amount to add to v...
422 * @u: ...unless v is equal to u.
424 * Atomically adds @a to @v, so long as it was not @u.
425 * Returns the old value of @v.
427 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
431 __asm__ __volatile__ (
432 PPC_ATOMIC_ENTRY_BARRIER
433 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
439 PPC_ATOMIC_EXIT_BARRIER
443 : "r" (&v->counter), "r" (a), "r" (u)
450 * atomic_inc64_not_zero - increment unless the number is zero
451 * @v: pointer of type atomic64_t
453 * Atomically increments @v by 1, so long as @v is non-zero.
454 * Returns non-zero if @v was non-zero, and zero otherwise.
456 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
460 __asm__ __volatile__ (
461 PPC_ATOMIC_ENTRY_BARRIER
462 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
468 PPC_ATOMIC_EXIT_BARRIER
471 : "=&r" (t1), "=&r" (t2)
473 : "cc", "xer", "memory");
478 #endif /* __powerpc64__ */
480 #endif /* __KERNEL__ */
481 #endif /* _ASM_POWERPC_ATOMIC_H_ */