arch: Cleanup read_barrier_depends() and comments
[cascardo/linux.git] / arch / s390 / include / asm / barrier.h
1 /*
2  * Copyright IBM Corp. 1999, 2009
3  *
4  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5  */
6
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 /*
11  * Force strict CPU ordering.
12  * And yes, this is required on UP too when we're talking
13  * to devices.
14  */
15
16 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
17 /* Fast-BCR without checkpoint synchronization */
18 #define __ASM_BARRIER "bcr 14,0\n"
19 #else
20 #define __ASM_BARRIER "bcr 15,0\n"
21 #endif
22
23 #define mb() do {  asm volatile(__ASM_BARRIER : : : "memory"); } while (0)
24
25 #define rmb()                           mb()
26 #define wmb()                           mb()
27 #define smp_mb()                        mb()
28 #define smp_rmb()                       rmb()
29 #define smp_wmb()                       wmb()
30
31 #define read_barrier_depends()          do { } while (0)
32 #define smp_read_barrier_depends()      do { } while (0)
33
34 #define smp_mb__before_atomic()         smp_mb()
35 #define smp_mb__after_atomic()          smp_mb()
36
37 #define set_mb(var, value)              do { var = value; mb(); } while (0)
38
39 #define smp_store_release(p, v)                                         \
40 do {                                                                    \
41         compiletime_assert_atomic_type(*p);                             \
42         barrier();                                                      \
43         ACCESS_ONCE(*p) = (v);                                          \
44 } while (0)
45
46 #define smp_load_acquire(p)                                             \
47 ({                                                                      \
48         typeof(*p) ___p1 = ACCESS_ONCE(*p);                             \
49         compiletime_assert_atomic_type(*p);                             \
50         barrier();                                                      \
51         ___p1;                                                          \
52 })
53
54 #endif /* __ASM_BARRIER_H */