Merge tag 'arc-4.9-rc3' of git://git.kernel.org/pub/scm/linux/kernel/git/vgupta/arc
[cascardo/linux.git] / arch / arm64 / include / asm / percpu.h
1 /*
2  * Copyright (C) 2013 ARM Ltd.
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  *
13  * You should have received a copy of the GNU General Public License
14  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
15  */
16 #ifndef __ASM_PERCPU_H
17 #define __ASM_PERCPU_H
18
19 static inline void set_my_cpu_offset(unsigned long off)
20 {
21         asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
22 }
23
24 static inline unsigned long __my_cpu_offset(void)
25 {
26         unsigned long off;
27
28         /*
29          * We want to allow caching the value, so avoid using volatile and
30          * instead use a fake stack read to hazard against barrier().
31          */
32         asm("mrs %0, tpidr_el1" : "=r" (off) :
33                 "Q" (*(const unsigned long *)current_stack_pointer));
34
35         return off;
36 }
37 #define __my_cpu_offset __my_cpu_offset()
38
39 #define PERCPU_OP(op, asm_op)                                           \
40 static inline unsigned long __percpu_##op(void *ptr,                    \
41                         unsigned long val, int size)                    \
42 {                                                                       \
43         unsigned long loop, ret;                                        \
44                                                                         \
45         switch (size) {                                                 \
46         case 1:                                                         \
47                 asm ("//__per_cpu_" #op "_1\n"                          \
48                 "1:     ldxrb     %w[ret], %[ptr]\n"                    \
49                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
50                 "       stxrb     %w[loop], %w[ret], %[ptr]\n"          \
51                 "       cbnz      %w[loop], 1b"                         \
52                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
53                   [ptr] "+Q"(*(u8 *)ptr)                                \
54                 : [val] "Ir" (val));                                    \
55                 break;                                                  \
56         case 2:                                                         \
57                 asm ("//__per_cpu_" #op "_2\n"                          \
58                 "1:     ldxrh     %w[ret], %[ptr]\n"                    \
59                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
60                 "       stxrh     %w[loop], %w[ret], %[ptr]\n"          \
61                 "       cbnz      %w[loop], 1b"                         \
62                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
63                   [ptr]  "+Q"(*(u16 *)ptr)                              \
64                 : [val] "Ir" (val));                                    \
65                 break;                                                  \
66         case 4:                                                         \
67                 asm ("//__per_cpu_" #op "_4\n"                          \
68                 "1:     ldxr      %w[ret], %[ptr]\n"                    \
69                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
70                 "       stxr      %w[loop], %w[ret], %[ptr]\n"          \
71                 "       cbnz      %w[loop], 1b"                         \
72                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
73                   [ptr] "+Q"(*(u32 *)ptr)                               \
74                 : [val] "Ir" (val));                                    \
75                 break;                                                  \
76         case 8:                                                         \
77                 asm ("//__per_cpu_" #op "_8\n"                          \
78                 "1:     ldxr      %[ret], %[ptr]\n"                     \
79                         #asm_op " %[ret], %[ret], %[val]\n"             \
80                 "       stxr      %w[loop], %[ret], %[ptr]\n"           \
81                 "       cbnz      %w[loop], 1b"                         \
82                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
83                   [ptr] "+Q"(*(u64 *)ptr)                               \
84                 : [val] "Ir" (val));                                    \
85                 break;                                                  \
86         default:                                                        \
87                 BUILD_BUG();                                            \
88         }                                                               \
89                                                                         \
90         return ret;                                                     \
91 }
92
93 PERCPU_OP(add, add)
94 PERCPU_OP(and, and)
95 PERCPU_OP(or, orr)
96 #undef PERCPU_OP
97
98 static inline unsigned long __percpu_read(void *ptr, int size)
99 {
100         unsigned long ret;
101
102         switch (size) {
103         case 1:
104                 ret = ACCESS_ONCE(*(u8 *)ptr);
105                 break;
106         case 2:
107                 ret = ACCESS_ONCE(*(u16 *)ptr);
108                 break;
109         case 4:
110                 ret = ACCESS_ONCE(*(u32 *)ptr);
111                 break;
112         case 8:
113                 ret = ACCESS_ONCE(*(u64 *)ptr);
114                 break;
115         default:
116                 BUILD_BUG();
117         }
118
119         return ret;
120 }
121
122 static inline void __percpu_write(void *ptr, unsigned long val, int size)
123 {
124         switch (size) {
125         case 1:
126                 ACCESS_ONCE(*(u8 *)ptr) = (u8)val;
127                 break;
128         case 2:
129                 ACCESS_ONCE(*(u16 *)ptr) = (u16)val;
130                 break;
131         case 4:
132                 ACCESS_ONCE(*(u32 *)ptr) = (u32)val;
133                 break;
134         case 8:
135                 ACCESS_ONCE(*(u64 *)ptr) = (u64)val;
136                 break;
137         default:
138                 BUILD_BUG();
139         }
140 }
141
142 static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
143                                                 int size)
144 {
145         unsigned long ret, loop;
146
147         switch (size) {
148         case 1:
149                 asm ("//__percpu_xchg_1\n"
150                 "1:     ldxrb   %w[ret], %[ptr]\n"
151                 "       stxrb   %w[loop], %w[val], %[ptr]\n"
152                 "       cbnz    %w[loop], 1b"
153                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
154                   [ptr] "+Q"(*(u8 *)ptr)
155                 : [val] "r" (val));
156                 break;
157         case 2:
158                 asm ("//__percpu_xchg_2\n"
159                 "1:     ldxrh   %w[ret], %[ptr]\n"
160                 "       stxrh   %w[loop], %w[val], %[ptr]\n"
161                 "       cbnz    %w[loop], 1b"
162                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
163                   [ptr] "+Q"(*(u16 *)ptr)
164                 : [val] "r" (val));
165                 break;
166         case 4:
167                 asm ("//__percpu_xchg_4\n"
168                 "1:     ldxr    %w[ret], %[ptr]\n"
169                 "       stxr    %w[loop], %w[val], %[ptr]\n"
170                 "       cbnz    %w[loop], 1b"
171                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
172                   [ptr] "+Q"(*(u32 *)ptr)
173                 : [val] "r" (val));
174                 break;
175         case 8:
176                 asm ("//__percpu_xchg_8\n"
177                 "1:     ldxr    %[ret], %[ptr]\n"
178                 "       stxr    %w[loop], %[val], %[ptr]\n"
179                 "       cbnz    %w[loop], 1b"
180                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
181                   [ptr] "+Q"(*(u64 *)ptr)
182                 : [val] "r" (val));
183                 break;
184         default:
185                 BUILD_BUG();
186         }
187
188         return ret;
189 }
190
191 #define _percpu_read(pcp)                                               \
192 ({                                                                      \
193         typeof(pcp) __retval;                                           \
194         preempt_disable_notrace();                                      \
195         __retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)),      \
196                                               sizeof(pcp));             \
197         preempt_enable_notrace();                                       \
198         __retval;                                                       \
199 })
200
201 #define _percpu_write(pcp, val)                                         \
202 do {                                                                    \
203         preempt_disable_notrace();                                      \
204         __percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val),       \
205                                 sizeof(pcp));                           \
206         preempt_enable_notrace();                                       \
207 } while(0)                                                              \
208
209 #define _pcp_protect(operation, pcp, val)                       \
210 ({                                                              \
211         typeof(pcp) __retval;                                   \
212         preempt_disable();                                      \
213         __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),  \
214                                           (val), sizeof(pcp));  \
215         preempt_enable();                                       \
216         __retval;                                               \
217 })
218
219 #define _percpu_add(pcp, val) \
220         _pcp_protect(__percpu_add, pcp, val)
221
222 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
223
224 #define _percpu_and(pcp, val) \
225         _pcp_protect(__percpu_and, pcp, val)
226
227 #define _percpu_or(pcp, val) \
228         _pcp_protect(__percpu_or, pcp, val)
229
230 #define _percpu_xchg(pcp, val) (typeof(pcp)) \
231         _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))
232
233 #define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
234 #define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
235 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
236 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
237
238 #define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
239 #define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
240 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
241 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
242
243 #define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
244 #define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
245 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
246 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
247
248 #define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
249 #define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
250 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
251 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
252
253 #define this_cpu_read_1(pcp) _percpu_read(pcp)
254 #define this_cpu_read_2(pcp) _percpu_read(pcp)
255 #define this_cpu_read_4(pcp) _percpu_read(pcp)
256 #define this_cpu_read_8(pcp) _percpu_read(pcp)
257
258 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
259 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
260 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
261 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
262
263 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
264 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
265 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
266 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
267
268 #include <asm-generic/percpu.h>
269
270 #endif /* __ASM_PERCPU_H */