2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
21 #include <asm/cmpxchg.h>
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) ACCESS_ONCE((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v, i) ((v)->counter = (i))
43 #define ATOMIC_OP(op, c_op, asm_op) \
44 static __inline__ void atomic_##op(int i, atomic_t * v) \
46 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
49 __asm__ __volatile__( \
50 " .set arch=r4000 \n" \
51 "1: ll %0, %1 # atomic_" #op " \n" \
52 " " #asm_op " %0, %2 \n" \
56 : "=&r" (temp), "+m" (v->counter) \
58 } else if (kernel_uses_llsc) { \
62 __asm__ __volatile__( \
63 " .set arch=r4000 \n" \
64 " ll %0, %1 # atomic_" #op "\n" \
65 " " #asm_op " %0, %2 \n" \
68 : "=&r" (temp), "+m" (v->counter) \
70 } while (unlikely(!temp)); \
72 unsigned long flags; \
74 raw_local_irq_save(flags); \
76 raw_local_irq_restore(flags); \
80 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
81 static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
85 smp_mb__before_llsc(); \
87 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
90 __asm__ __volatile__( \
91 " .set arch=r4000 \n" \
92 "1: ll %1, %2 # atomic_" #op "_return \n" \
93 " " #asm_op " %0, %1, %3 \n" \
96 " " #asm_op " %0, %1, %3 \n" \
98 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
100 } else if (kernel_uses_llsc) { \
104 __asm__ __volatile__( \
105 " .set arch=r4000 \n" \
106 " ll %1, %2 # atomic_" #op "_return \n" \
107 " " #asm_op " %0, %1, %3 \n" \
110 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
112 } while (unlikely(!result)); \
114 result = temp; result c_op i; \
116 unsigned long flags; \
118 raw_local_irq_save(flags); \
119 result = v->counter; \
121 v->counter = result; \
122 raw_local_irq_restore(flags); \
130 #define ATOMIC_OPS(op, c_op, asm_op) \
131 ATOMIC_OP(op, c_op, asm_op) \
132 ATOMIC_OP_RETURN(op, c_op, asm_op)
134 ATOMIC_OPS(add, +=, addu)
135 ATOMIC_OPS(sub, -=, subu)
138 #undef ATOMIC_OP_RETURN
142 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
143 * @i: integer value to subtract
144 * @v: pointer of type atomic_t
146 * Atomically test @v and subtract @i if @v is greater or equal than @i.
147 * The function returns the old value of @v minus @i.
149 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
153 smp_mb__before_llsc();
155 if (kernel_uses_llsc && R10000_LLSC_WAR) {
158 __asm__ __volatile__(
159 " .set arch=r4000 \n"
160 "1: ll %1, %2 # atomic_sub_if_positive\n"
161 " subu %0, %1, %3 \n"
166 " subu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
173 } else if (kernel_uses_llsc) {
176 __asm__ __volatile__(
177 " .set arch=r4000 \n"
178 "1: ll %1, %2 # atomic_sub_if_positive\n"
179 " subu %0, %1, %3 \n"
184 " subu %0, %1, %3 \n"
188 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
193 raw_local_irq_save(flags);
198 raw_local_irq_restore(flags);
206 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
207 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
210 * __atomic_add_unless - add unless the number is a given value
211 * @v: pointer of type atomic_t
212 * @a: the amount to add to v...
213 * @u: ...unless v is equal to u.
215 * Atomically adds @a to @v, so long as it was not @u.
216 * Returns the old value of @v.
218 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
223 if (unlikely(c == (u)))
225 old = atomic_cmpxchg((v), c, c + (a));
226 if (likely(old == c))
233 #define atomic_dec_return(v) atomic_sub_return(1, (v))
234 #define atomic_inc_return(v) atomic_add_return(1, (v))
237 * atomic_sub_and_test - subtract value from variable and test result
238 * @i: integer value to subtract
239 * @v: pointer of type atomic_t
241 * Atomically subtracts @i from @v and returns
242 * true if the result is zero, or false for all
245 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
248 * atomic_inc_and_test - increment and test
249 * @v: pointer of type atomic_t
251 * Atomically increments @v by 1
252 * and returns true if the result is zero, or false for all
255 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
258 * atomic_dec_and_test - decrement by 1 and test
259 * @v: pointer of type atomic_t
261 * Atomically decrements @v by 1 and
262 * returns true if the result is 0, or false for all other
265 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
268 * atomic_dec_if_positive - decrement by 1 if old value positive
269 * @v: pointer of type atomic_t
271 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
274 * atomic_inc - increment atomic variable
275 * @v: pointer of type atomic_t
277 * Atomically increments @v by 1.
279 #define atomic_inc(v) atomic_add(1, (v))
282 * atomic_dec - decrement and test
283 * @v: pointer of type atomic_t
285 * Atomically decrements @v by 1.
287 #define atomic_dec(v) atomic_sub(1, (v))
290 * atomic_add_negative - add and test if negative
291 * @v: pointer of type atomic_t
292 * @i: integer value to add
294 * Atomically adds @i to @v and returns true
295 * if the result is negative, or false when
296 * result is greater than or equal to zero.
298 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
302 #define ATOMIC64_INIT(i) { (i) }
305 * atomic64_read - read atomic variable
306 * @v: pointer of type atomic64_t
309 #define atomic64_read(v) ACCESS_ONCE((v)->counter)
312 * atomic64_set - set atomic variable
313 * @v: pointer of type atomic64_t
316 #define atomic64_set(v, i) ((v)->counter = (i))
318 #define ATOMIC64_OP(op, c_op, asm_op) \
319 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
321 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
324 __asm__ __volatile__( \
325 " .set arch=r4000 \n" \
326 "1: lld %0, %1 # atomic64_" #op " \n" \
327 " " #asm_op " %0, %2 \n" \
331 : "=&r" (temp), "+m" (v->counter) \
333 } else if (kernel_uses_llsc) { \
337 __asm__ __volatile__( \
338 " .set arch=r4000 \n" \
339 " lld %0, %1 # atomic64_" #op "\n" \
340 " " #asm_op " %0, %2 \n" \
343 : "=&r" (temp), "+m" (v->counter) \
345 } while (unlikely(!temp)); \
347 unsigned long flags; \
349 raw_local_irq_save(flags); \
351 raw_local_irq_restore(flags); \
355 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
356 static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
360 smp_mb__before_llsc(); \
362 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
365 __asm__ __volatile__( \
366 " .set arch=r4000 \n" \
367 "1: lld %1, %2 # atomic64_" #op "_return\n" \
368 " " #asm_op " %0, %1, %3 \n" \
371 " " #asm_op " %0, %1, %3 \n" \
373 : "=&r" (result), "=&r" (temp), "+m" (v->counter) \
375 } else if (kernel_uses_llsc) { \
379 __asm__ __volatile__( \
380 " .set arch=r4000 \n" \
381 " lld %1, %2 # atomic64_" #op "_return\n" \
382 " " #asm_op " %0, %1, %3 \n" \
385 : "=&r" (result), "=&r" (temp), "=m" (v->counter) \
386 : "Ir" (i), "m" (v->counter) \
388 } while (unlikely(!result)); \
390 result = temp; result c_op i; \
392 unsigned long flags; \
394 raw_local_irq_save(flags); \
395 result = v->counter; \
397 v->counter = result; \
398 raw_local_irq_restore(flags); \
406 #define ATOMIC64_OPS(op, c_op, asm_op) \
407 ATOMIC64_OP(op, c_op, asm_op) \
408 ATOMIC64_OP_RETURN(op, c_op, asm_op)
410 ATOMIC64_OPS(add, +=, daddu)
411 ATOMIC64_OPS(sub, -=, dsubu)
414 #undef ATOMIC64_OP_RETURN
418 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
419 * @i: integer value to subtract
420 * @v: pointer of type atomic64_t
422 * Atomically test @v and subtract @i if @v is greater or equal than @i.
423 * The function returns the old value of @v minus @i.
425 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
429 smp_mb__before_llsc();
431 if (kernel_uses_llsc && R10000_LLSC_WAR) {
434 __asm__ __volatile__(
435 " .set arch=r4000 \n"
436 "1: lld %1, %2 # atomic64_sub_if_positive\n"
437 " dsubu %0, %1, %3 \n"
442 " dsubu %0, %1, %3 \n"
446 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
447 : "Ir" (i), "m" (v->counter)
449 } else if (kernel_uses_llsc) {
452 __asm__ __volatile__(
453 " .set arch=r4000 \n"
454 "1: lld %1, %2 # atomic64_sub_if_positive\n"
455 " dsubu %0, %1, %3 \n"
460 " dsubu %0, %1, %3 \n"
464 : "=&r" (result), "=&r" (temp), "+m" (v->counter)
469 raw_local_irq_save(flags);
474 raw_local_irq_restore(flags);
482 #define atomic64_cmpxchg(v, o, n) \
483 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
484 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
487 * atomic64_add_unless - add unless the number is a given value
488 * @v: pointer of type atomic64_t
489 * @a: the amount to add to v...
490 * @u: ...unless v is equal to u.
492 * Atomically adds @a to @v, so long as it was not @u.
493 * Returns the old value of @v.
495 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
498 c = atomic64_read(v);
500 if (unlikely(c == (u)))
502 old = atomic64_cmpxchg((v), c, c + (a));
503 if (likely(old == c))
510 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
512 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
513 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
516 * atomic64_sub_and_test - subtract value from variable and test result
517 * @i: integer value to subtract
518 * @v: pointer of type atomic64_t
520 * Atomically subtracts @i from @v and returns
521 * true if the result is zero, or false for all
524 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
527 * atomic64_inc_and_test - increment and test
528 * @v: pointer of type atomic64_t
530 * Atomically increments @v by 1
531 * and returns true if the result is zero, or false for all
534 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
537 * atomic64_dec_and_test - decrement by 1 and test
538 * @v: pointer of type atomic64_t
540 * Atomically decrements @v by 1 and
541 * returns true if the result is 0, or false for all other
544 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
547 * atomic64_dec_if_positive - decrement by 1 if old value positive
548 * @v: pointer of type atomic64_t
550 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
553 * atomic64_inc - increment atomic variable
554 * @v: pointer of type atomic64_t
556 * Atomically increments @v by 1.
558 #define atomic64_inc(v) atomic64_add(1, (v))
561 * atomic64_dec - decrement and test
562 * @v: pointer of type atomic64_t
564 * Atomically decrements @v by 1.
566 #define atomic64_dec(v) atomic64_sub(1, (v))
569 * atomic64_add_negative - add and test if negative
570 * @v: pointer of type atomic64_t
571 * @i: integer value to add
573 * Atomically adds @i to @v and returns true
574 * if the result is negative, or false when
575 * result is greater than or equal to zero.
577 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
579 #endif /* CONFIG_64BIT */
581 #endif /* _ASM_ATOMIC_H */