1 #ifndef _ASM_M32R_ATOMIC_H
2 #define _ASM_M32R_ATOMIC_H
5 * linux/include/asm-m32r/atomic.h
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
12 #include <linux/types.h>
13 #include <asm/assembler.h>
14 #include <asm/cmpxchg.h>
15 #include <asm/dcache_clear.h>
16 #include <asm/barrier.h>
19 * Atomic operations that C can't guarantee us. Useful for
20 * resource counting etc..
23 #define ATOMIC_INIT(i) { (i) }
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
29 * Atomically reads the value of @v.
31 #define atomic_read(v) READ_ONCE((v)->counter)
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
38 * Atomically sets the value of @v to @i.
40 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
42 #ifdef CONFIG_CHIP_M32700_TS1
43 #define __ATOMIC_CLOBBER , "r4"
45 #define __ATOMIC_CLOBBER
48 #define ATOMIC_OP(op) \
49 static __inline__ void atomic_##op(int i, atomic_t *v) \
51 unsigned long flags; \
54 local_irq_save(flags); \
55 __asm__ __volatile__ ( \
56 "# atomic_" #op " \n\t" \
57 DCACHE_CLEAR("%0", "r4", "%1") \
58 M32R_LOCK" %0, @%1; \n\t" \
60 M32R_UNLOCK" %0, @%1; \n\t" \
62 : "r" (&v->counter), "r" (i) \
66 local_irq_restore(flags); \
69 #define ATOMIC_OP_RETURN(op) \
70 static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
72 unsigned long flags; \
75 local_irq_save(flags); \
76 __asm__ __volatile__ ( \
77 "# atomic_" #op "_return \n\t" \
78 DCACHE_CLEAR("%0", "r4", "%1") \
79 M32R_LOCK" %0, @%1; \n\t" \
81 M32R_UNLOCK" %0, @%1; \n\t" \
83 : "r" (&v->counter), "r" (i) \
87 local_irq_restore(flags); \
92 #define ATOMIC_FETCH_OP(op) \
93 static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
95 unsigned long flags; \
98 local_irq_save(flags); \
99 __asm__ __volatile__ ( \
100 "# atomic_fetch_" #op " \n\t" \
101 DCACHE_CLEAR("%0", "r4", "%2") \
102 M32R_LOCK" %1, @%2; \n\t" \
104 #op " %1, %3; \n\t" \
105 M32R_UNLOCK" %1, @%2; \n\t" \
106 : "=&r" (result), "=&r" (val) \
107 : "r" (&v->counter), "r" (i) \
111 local_irq_restore(flags); \
116 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
122 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
124 #define atomic_fetch_or atomic_fetch_or
131 #undef ATOMIC_FETCH_OP
132 #undef ATOMIC_OP_RETURN
136 * atomic_sub_and_test - subtract value from variable and test result
137 * @i: integer value to subtract
138 * @v: pointer of type atomic_t
140 * Atomically subtracts @i from @v and returns
141 * true if the result is zero, or false for all
144 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
147 * atomic_inc_return - increment atomic variable and return it
148 * @v: pointer of type atomic_t
150 * Atomically increments @v by 1 and returns the result.
152 static __inline__ int atomic_inc_return(atomic_t *v)
157 local_irq_save(flags);
158 __asm__ __volatile__ (
159 "# atomic_inc_return \n\t"
160 DCACHE_CLEAR("%0", "r4", "%1")
161 M32R_LOCK" %0, @%1; \n\t"
163 M32R_UNLOCK" %0, @%1; \n\t"
169 local_irq_restore(flags);
175 * atomic_dec_return - decrement atomic variable and return it
176 * @v: pointer of type atomic_t
178 * Atomically decrements @v by 1 and returns the result.
180 static __inline__ int atomic_dec_return(atomic_t *v)
185 local_irq_save(flags);
186 __asm__ __volatile__ (
187 "# atomic_dec_return \n\t"
188 DCACHE_CLEAR("%0", "r4", "%1")
189 M32R_LOCK" %0, @%1; \n\t"
191 M32R_UNLOCK" %0, @%1; \n\t"
197 local_irq_restore(flags);
203 * atomic_inc - increment atomic variable
204 * @v: pointer of type atomic_t
206 * Atomically increments @v by 1.
208 #define atomic_inc(v) ((void)atomic_inc_return(v))
211 * atomic_dec - decrement atomic variable
212 * @v: pointer of type atomic_t
214 * Atomically decrements @v by 1.
216 #define atomic_dec(v) ((void)atomic_dec_return(v))
219 * atomic_inc_and_test - increment and test
220 * @v: pointer of type atomic_t
222 * Atomically increments @v by 1
223 * and returns true if the result is zero, or false for all
226 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
229 * atomic_dec_and_test - decrement and test
230 * @v: pointer of type atomic_t
232 * Atomically decrements @v by 1 and
233 * returns true if the result is 0, or false for all
236 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
239 * atomic_add_negative - add and test if negative
240 * @v: pointer of type atomic_t
241 * @i: integer value to add
243 * Atomically adds @i to @v and returns true
244 * if the result is negative, or false when
245 * result is greater than or equal to zero.
247 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
249 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
250 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
253 * __atomic_add_unless - add unless the number is a given value
254 * @v: pointer of type atomic_t
255 * @a: the amount to add to v...
256 * @u: ...unless v is equal to u.
258 * Atomically adds @a to @v, so long as it was not @u.
259 * Returns the old value of @v.
261 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
266 if (unlikely(c == (u)))
268 old = atomic_cmpxchg((v), c, c + (a));
269 if (likely(old == c))
276 #endif /* _ASM_M32R_ATOMIC_H */