1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
11 #include <asm/barrier.h>
13 #define ATOMIC_INIT(i) { (i) }
15 static __inline__ int atomic_read(const atomic_t *v)
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
24 static __inline__ void atomic_set(atomic_t *v, int i)
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
29 static __inline__ void atomic_add(int a, atomic_t *v)
34 "1: lwarx %0,0,%3 # atomic_add\n\
39 : "=&r" (t), "+m" (v->counter)
40 : "r" (a), "r" (&v->counter)
44 static __inline__ int atomic_add_return(int a, atomic_t *v)
49 PPC_ATOMIC_ENTRY_BARRIER
50 "1: lwarx %0,0,%2 # atomic_add_return\n\
55 PPC_ATOMIC_EXIT_BARRIER
57 : "r" (a), "r" (&v->counter)
63 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
65 static __inline__ void atomic_sub(int a, atomic_t *v)
70 "1: lwarx %0,0,%3 # atomic_sub\n\
75 : "=&r" (t), "+m" (v->counter)
76 : "r" (a), "r" (&v->counter)
80 static __inline__ int atomic_sub_return(int a, atomic_t *v)
85 PPC_ATOMIC_ENTRY_BARRIER
86 "1: lwarx %0,0,%2 # atomic_sub_return\n\
91 PPC_ATOMIC_EXIT_BARRIER
93 : "r" (a), "r" (&v->counter)
99 static __inline__ void atomic_inc(atomic_t *v)
103 __asm__ __volatile__(
104 "1: lwarx %0,0,%2 # atomic_inc\n\
109 : "=&r" (t), "+m" (v->counter)
114 static __inline__ int atomic_inc_return(atomic_t *v)
118 __asm__ __volatile__(
119 PPC_ATOMIC_ENTRY_BARRIER
120 "1: lwarx %0,0,%1 # atomic_inc_return\n\
125 PPC_ATOMIC_EXIT_BARRIER
128 : "cc", "xer", "memory");
134 * atomic_inc_and_test - increment and test
135 * @v: pointer of type atomic_t
137 * Atomically increments @v by 1
138 * and returns true if the result is zero, or false for all
141 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
143 static __inline__ void atomic_dec(atomic_t *v)
147 __asm__ __volatile__(
148 "1: lwarx %0,0,%2 # atomic_dec\n\
153 : "=&r" (t), "+m" (v->counter)
158 static __inline__ int atomic_dec_return(atomic_t *v)
162 __asm__ __volatile__(
163 PPC_ATOMIC_ENTRY_BARRIER
164 "1: lwarx %0,0,%1 # atomic_dec_return\n\
169 PPC_ATOMIC_EXIT_BARRIER
172 : "cc", "xer", "memory");
177 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
178 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
181 * __atomic_add_unless - add unless the number is a given value
182 * @v: pointer of type atomic_t
183 * @a: the amount to add to v...
184 * @u: ...unless v is equal to u.
186 * Atomically adds @a to @v, so long as it was not @u.
187 * Returns the old value of @v.
189 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
193 __asm__ __volatile__ (
194 PPC_ATOMIC_ENTRY_BARRIER
195 "1: lwarx %0,0,%1 # __atomic_add_unless\n\
202 PPC_ATOMIC_EXIT_BARRIER
206 : "r" (&v->counter), "r" (a), "r" (u)
213 * atomic_inc_not_zero - increment unless the number is zero
214 * @v: pointer of type atomic_t
216 * Atomically increments @v by 1, so long as @v is non-zero.
217 * Returns non-zero if @v was non-zero, and zero otherwise.
219 static __inline__ int atomic_inc_not_zero(atomic_t *v)
223 __asm__ __volatile__ (
224 PPC_ATOMIC_ENTRY_BARRIER
225 "1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
232 PPC_ATOMIC_EXIT_BARRIER
235 : "=&r" (t1), "=&r" (t2)
237 : "cc", "xer", "memory");
241 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
243 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
244 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
247 * Atomically test *v and decrement if it is greater than 0.
248 * The function returns the old value of *v minus 1, even if
249 * the atomic variable, v, was not decremented.
251 static __inline__ int atomic_dec_if_positive(atomic_t *v)
255 __asm__ __volatile__(
256 PPC_ATOMIC_ENTRY_BARRIER
257 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
264 PPC_ATOMIC_EXIT_BARRIER
272 #define atomic_dec_if_positive atomic_dec_if_positive
276 #define ATOMIC64_INIT(i) { (i) }
278 static __inline__ long atomic64_read(const atomic64_t *v)
282 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
287 static __inline__ void atomic64_set(atomic64_t *v, long i)
289 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
292 static __inline__ void atomic64_add(long a, atomic64_t *v)
296 __asm__ __volatile__(
297 "1: ldarx %0,0,%3 # atomic64_add\n\
301 : "=&r" (t), "+m" (v->counter)
302 : "r" (a), "r" (&v->counter)
306 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
310 __asm__ __volatile__(
311 PPC_ATOMIC_ENTRY_BARRIER
312 "1: ldarx %0,0,%2 # atomic64_add_return\n\
316 PPC_ATOMIC_EXIT_BARRIER
318 : "r" (a), "r" (&v->counter)
324 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
326 static __inline__ void atomic64_sub(long a, atomic64_t *v)
330 __asm__ __volatile__(
331 "1: ldarx %0,0,%3 # atomic64_sub\n\
335 : "=&r" (t), "+m" (v->counter)
336 : "r" (a), "r" (&v->counter)
340 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
344 __asm__ __volatile__(
345 PPC_ATOMIC_ENTRY_BARRIER
346 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
350 PPC_ATOMIC_EXIT_BARRIER
352 : "r" (a), "r" (&v->counter)
358 static __inline__ void atomic64_inc(atomic64_t *v)
362 __asm__ __volatile__(
363 "1: ldarx %0,0,%2 # atomic64_inc\n\
367 : "=&r" (t), "+m" (v->counter)
372 static __inline__ long atomic64_inc_return(atomic64_t *v)
376 __asm__ __volatile__(
377 PPC_ATOMIC_ENTRY_BARRIER
378 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
382 PPC_ATOMIC_EXIT_BARRIER
385 : "cc", "xer", "memory");
391 * atomic64_inc_and_test - increment and test
392 * @v: pointer of type atomic64_t
394 * Atomically increments @v by 1
395 * and returns true if the result is zero, or false for all
398 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
400 static __inline__ void atomic64_dec(atomic64_t *v)
404 __asm__ __volatile__(
405 "1: ldarx %0,0,%2 # atomic64_dec\n\
409 : "=&r" (t), "+m" (v->counter)
414 static __inline__ long atomic64_dec_return(atomic64_t *v)
418 __asm__ __volatile__(
419 PPC_ATOMIC_ENTRY_BARRIER
420 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
424 PPC_ATOMIC_EXIT_BARRIER
427 : "cc", "xer", "memory");
432 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
433 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
436 * Atomically test *v and decrement if it is greater than 0.
437 * The function returns the old value of *v minus 1.
439 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
443 __asm__ __volatile__(
444 PPC_ATOMIC_ENTRY_BARRIER
445 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
450 PPC_ATOMIC_EXIT_BARRIER
454 : "cc", "xer", "memory");
459 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
460 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
463 * atomic64_add_unless - add unless the number is a given value
464 * @v: pointer of type atomic64_t
465 * @a: the amount to add to v...
466 * @u: ...unless v is equal to u.
468 * Atomically adds @a to @v, so long as it was not @u.
469 * Returns the old value of @v.
471 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
475 __asm__ __volatile__ (
476 PPC_ATOMIC_ENTRY_BARRIER
477 "1: ldarx %0,0,%1 # __atomic_add_unless\n\
483 PPC_ATOMIC_EXIT_BARRIER
487 : "r" (&v->counter), "r" (a), "r" (u)
494 * atomic_inc64_not_zero - increment unless the number is zero
495 * @v: pointer of type atomic64_t
497 * Atomically increments @v by 1, so long as @v is non-zero.
498 * Returns non-zero if @v was non-zero, and zero otherwise.
500 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
504 __asm__ __volatile__ (
505 PPC_ATOMIC_ENTRY_BARRIER
506 "1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
512 PPC_ATOMIC_EXIT_BARRIER
515 : "=&r" (t1), "=&r" (t2)
517 : "cc", "xer", "memory");
522 #endif /* __powerpc64__ */
524 #endif /* __KERNEL__ */
525 #endif /* _ASM_POWERPC_ATOMIC_H_ */