Merge tag 'arc-3.18-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/vgupta/arc
[cascardo/linux.git] / arch / arc / include / asm / atomic.h
index 6638a03..067551b 100644 (file)
 
 #define atomic_set(v, i) (((v)->counter) = (i))
 
-static inline void atomic_add(int i, atomic_t *v)
-{
-       unsigned int temp;
-
-       __asm__ __volatile__(
-       "1:     llock   %0, [%1]        \n"
-       "       add     %0, %0, %2      \n"
-       "       scond   %0, [%1]        \n"
-       "       bnz     1b              \n"
-       : "=&r"(temp)   /* Early clobber, to prevent reg reuse */
-       : "r"(&v->counter), "ir"(i)
-       : "cc");
-}
-
-static inline void atomic_sub(int i, atomic_t *v)
-{
-       unsigned int temp;
-
-       __asm__ __volatile__(
-       "1:     llock   %0, [%1]        \n"
-       "       sub     %0, %0, %2      \n"
-       "       scond   %0, [%1]        \n"
-       "       bnz     1b              \n"
-       : "=&r"(temp)
-       : "r"(&v->counter), "ir"(i)
-       : "cc");
-}
-
-/* add and also return the new value */
-static inline int atomic_add_return(int i, atomic_t *v)
-{
-       unsigned int temp;
-
-       __asm__ __volatile__(
-       "1:     llock   %0, [%1]        \n"
-       "       add     %0, %0, %2      \n"
-       "       scond   %0, [%1]        \n"
-       "       bnz     1b              \n"
-       : "=&r"(temp)
-       : "r"(&v->counter), "ir"(i)
-       : "cc");
-
-       return temp;
-}
-
-static inline int atomic_sub_return(int i, atomic_t *v)
-{
-       unsigned int temp;
-
-       __asm__ __volatile__(
-       "1:     llock   %0, [%1]        \n"
-       "       sub     %0, %0, %2      \n"
-       "       scond   %0, [%1]        \n"
-       "       bnz     1b              \n"
-       : "=&r"(temp)
-       : "r"(&v->counter), "ir"(i)
-       : "cc");
-
-       return temp;
-}
-
-static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
-{
-       unsigned int temp;
-
-       __asm__ __volatile__(
-       "1:     llock   %0, [%1]        \n"
-       "       bic     %0, %0, %2      \n"
-       "       scond   %0, [%1]        \n"
-       "       bnz     1b              \n"
-       : "=&r"(temp)
-       : "r"(addr), "ir"(mask)
-       : "cc");
+#define ATOMIC_OP(op, c_op, asm_op)                                    \
+static inline void atomic_##op(int i, atomic_t *v)                     \
+{                                                                      \
+       unsigned int temp;                                              \
+                                                                       \
+       __asm__ __volatile__(                                           \
+       "1:     llock   %0, [%1]        \n"                             \
+       "       " #asm_op " %0, %0, %2  \n"                             \
+       "       scond   %0, [%1]        \n"                             \
+       "       bnz     1b              \n"                             \
+       : "=&r"(temp)   /* Early clobber, to prevent reg reuse */       \
+       : "r"(&v->counter), "ir"(i)                                     \
+       : "cc");                                                        \
+}                                                                      \
+
+#define ATOMIC_OP_RETURN(op, c_op, asm_op)                             \
+static inline int atomic_##op##_return(int i, atomic_t *v)             \
+{                                                                      \
+       unsigned int temp;                                              \
+                                                                       \
+       __asm__ __volatile__(                                           \
+       "1:     llock   %0, [%1]        \n"                             \
+       "       " #asm_op " %0, %0, %2  \n"                             \
+       "       scond   %0, [%1]        \n"                             \
+       "       bnz     1b              \n"                             \
+       : "=&r"(temp)                                                   \
+       : "r"(&v->counter), "ir"(i)                                     \
+       : "cc");                                                        \
+                                                                       \
+       return temp;                                                    \
 }
 
 #else  /* !CONFIG_ARC_HAS_LLSC */
@@ -124,6 +81,7 @@ static inline void atomic_set(atomic_t *v, int i)
        v->counter = i;
        atomic_ops_unlock(flags);
 }
+
 #endif
 
 /*
@@ -131,62 +89,46 @@ static inline void atomic_set(atomic_t *v, int i)
  * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
  */
 
-static inline void atomic_add(int i, atomic_t *v)
-{
-       unsigned long flags;
-
-       atomic_ops_lock(flags);
-       v->counter += i;
-       atomic_ops_unlock(flags);
+#define ATOMIC_OP(op, c_op, asm_op)                                    \
+static inline void atomic_##op(int i, atomic_t *v)                     \
+{                                                                      \
+       unsigned long flags;                                            \
+                                                                       \
+       atomic_ops_lock(flags);                                         \
+       v->counter c_op i;                                              \
+       atomic_ops_unlock(flags);                                       \
 }
 
-static inline void atomic_sub(int i, atomic_t *v)
-{
-       unsigned long flags;
-
-       atomic_ops_lock(flags);
-       v->counter -= i;
-       atomic_ops_unlock(flags);
+#define ATOMIC_OP_RETURN(op, c_op)                                     \
+static inline int atomic_##op##_return(int i, atomic_t *v)             \
+{                                                                      \
+       unsigned long flags;                                            \
+       unsigned long temp;                                             \
+                                                                       \
+       atomic_ops_lock(flags);                                         \
+       temp = v->counter;                                              \
+       temp c_op i;                                                    \
+       v->counter = temp;                                              \
+       atomic_ops_unlock(flags);                                       \
+                                                                       \
+       return temp;                                                    \
 }
 
-static inline int atomic_add_return(int i, atomic_t *v)
-{
-       unsigned long flags;
-       unsigned long temp;
-
-       atomic_ops_lock(flags);
-       temp = v->counter;
-       temp += i;
-       v->counter = temp;
-       atomic_ops_unlock(flags);
-
-       return temp;
-}
-
-static inline int atomic_sub_return(int i, atomic_t *v)
-{
-       unsigned long flags;
-       unsigned long temp;
-
-       atomic_ops_lock(flags);
-       temp = v->counter;
-       temp -= i;
-       v->counter = temp;
-       atomic_ops_unlock(flags);
+#endif /* !CONFIG_ARC_HAS_LLSC */
 
-       return temp;
-}
+#define ATOMIC_OPS(op, c_op, asm_op)                                   \
+       ATOMIC_OP(op, c_op, asm_op)                                     \
+       ATOMIC_OP_RETURN(op, c_op, asm_op)
 
-static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
-{
-       unsigned long flags;
+ATOMIC_OPS(add, +=, add)
+ATOMIC_OPS(sub, -=, sub)
+ATOMIC_OP(and, &=, and)
 
-       atomic_ops_lock(flags);
-       *addr &= ~mask;
-       atomic_ops_unlock(flags);
-}
+#define atomic_clear_mask(mask, v) atomic_and(~(mask), (v))
 
-#endif /* !CONFIG_ARC_HAS_LLSC */
+#undef ATOMIC_OPS
+#undef ATOMIC_OP_RETURN
+#undef ATOMIC_OP
 
 /**
  * __atomic_add_unless - add unless the number is a given value