Merge branch 'work.splice_read' of git://git.kernel.org/pub/scm/linux/kernel/git...
[cascardo/linux.git] / arch / arm64 / include / asm / barrier.h
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20
21 #ifndef __ASSEMBLY__
22
23 #define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
24 #define nops(n)         asm volatile(__nops(n))
25
26 #define sev()           asm volatile("sev" : : : "memory")
27 #define wfe()           asm volatile("wfe" : : : "memory")
28 #define wfi()           asm volatile("wfi" : : : "memory")
29
30 #define isb()           asm volatile("isb" : : : "memory")
31 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
32 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
33
34 #define mb()            dsb(sy)
35 #define rmb()           dsb(ld)
36 #define wmb()           dsb(st)
37
38 #define dma_rmb()       dmb(oshld)
39 #define dma_wmb()       dmb(oshst)
40
41 #define __smp_mb()      dmb(ish)
42 #define __smp_rmb()     dmb(ishld)
43 #define __smp_wmb()     dmb(ishst)
44
45 #define __smp_store_release(p, v)                                               \
46 do {                                                                    \
47         compiletime_assert_atomic_type(*p);                             \
48         switch (sizeof(*p)) {                                           \
49         case 1:                                                         \
50                 asm volatile ("stlrb %w1, %0"                           \
51                                 : "=Q" (*p) : "r" (v) : "memory");      \
52                 break;                                                  \
53         case 2:                                                         \
54                 asm volatile ("stlrh %w1, %0"                           \
55                                 : "=Q" (*p) : "r" (v) : "memory");      \
56                 break;                                                  \
57         case 4:                                                         \
58                 asm volatile ("stlr %w1, %0"                            \
59                                 : "=Q" (*p) : "r" (v) : "memory");      \
60                 break;                                                  \
61         case 8:                                                         \
62                 asm volatile ("stlr %1, %0"                             \
63                                 : "=Q" (*p) : "r" (v) : "memory");      \
64                 break;                                                  \
65         }                                                               \
66 } while (0)
67
68 #define __smp_load_acquire(p)                                           \
69 ({                                                                      \
70         union { typeof(*p) __val; char __c[1]; } __u;                   \
71         compiletime_assert_atomic_type(*p);                             \
72         switch (sizeof(*p)) {                                           \
73         case 1:                                                         \
74                 asm volatile ("ldarb %w0, %1"                           \
75                         : "=r" (*(__u8 *)__u.__c)                       \
76                         : "Q" (*p) : "memory");                         \
77                 break;                                                  \
78         case 2:                                                         \
79                 asm volatile ("ldarh %w0, %1"                           \
80                         : "=r" (*(__u16 *)__u.__c)                      \
81                         : "Q" (*p) : "memory");                         \
82                 break;                                                  \
83         case 4:                                                         \
84                 asm volatile ("ldar %w0, %1"                            \
85                         : "=r" (*(__u32 *)__u.__c)                      \
86                         : "Q" (*p) : "memory");                         \
87                 break;                                                  \
88         case 8:                                                         \
89                 asm volatile ("ldar %0, %1"                             \
90                         : "=r" (*(__u64 *)__u.__c)                      \
91                         : "Q" (*p) : "memory");                         \
92                 break;                                                  \
93         }                                                               \
94         __u.__val;                                                      \
95 })
96
97 #define smp_cond_load_acquire(ptr, cond_expr)                           \
98 ({                                                                      \
99         typeof(ptr) __PTR = (ptr);                                      \
100         typeof(*ptr) VAL;                                               \
101         for (;;) {                                                      \
102                 VAL = smp_load_acquire(__PTR);                          \
103                 if (cond_expr)                                          \
104                         break;                                          \
105                 __cmpwait_relaxed(__PTR, VAL);                          \
106         }                                                               \
107         VAL;                                                            \
108 })
109
110 #include <asm-generic/barrier.h>
111
112 #endif  /* __ASSEMBLY__ */
113
114 #endif  /* __ASM_BARRIER_H */