]> Pileus Git - ~andy/linux/blob - arch/s390/include/asm/cmpxchg.h
26f2cb1aa9ff35ae975686e3030774216d6e8289
[~andy/linux] / arch / s390 / include / asm / cmpxchg.h
1 /*
2  * Copyright IBM Corp. 1999, 2011
3  *
4  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5  */
6
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
9
10 #include <linux/types.h>
11
12 extern void __xchg_called_with_bad_pointer(void);
13
14 static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
15 {
16         unsigned long addr, old;
17         int shift;
18
19         switch (size) {
20         case 1:
21                 addr = (unsigned long) ptr;
22                 shift = (3 ^ (addr & 3)) << 3;
23                 addr ^= addr & 3;
24                 asm volatile(
25                         "       l       %0,%4\n"
26                         "0:     lr      0,%0\n"
27                         "       nr      0,%3\n"
28                         "       or      0,%2\n"
29                         "       cs      %0,0,%4\n"
30                         "       jl      0b\n"
31                         : "=&d" (old), "=Q" (*(int *) addr)
32                         : "d" (x << shift), "d" (~(255 << shift)),
33                           "Q" (*(int *) addr) : "memory", "cc", "0");
34                 return old >> shift;
35         case 2:
36                 addr = (unsigned long) ptr;
37                 shift = (2 ^ (addr & 2)) << 3;
38                 addr ^= addr & 2;
39                 asm volatile(
40                         "       l       %0,%4\n"
41                         "0:     lr      0,%0\n"
42                         "       nr      0,%3\n"
43                         "       or      0,%2\n"
44                         "       cs      %0,0,%4\n"
45                         "       jl      0b\n"
46                         : "=&d" (old), "=Q" (*(int *) addr)
47                         : "d" (x << shift), "d" (~(65535 << shift)),
48                           "Q" (*(int *) addr) : "memory", "cc", "0");
49                 return old >> shift;
50         case 4:
51                 asm volatile(
52                         "       l       %0,%3\n"
53                         "0:     cs      %0,%2,%3\n"
54                         "       jl      0b\n"
55                         : "=&d" (old), "=Q" (*(int *) ptr)
56                         : "d" (x), "Q" (*(int *) ptr)
57                         : "memory", "cc");
58                 return old;
59 #ifdef CONFIG_64BIT
60         case 8:
61                 asm volatile(
62                         "       lg      %0,%3\n"
63                         "0:     csg     %0,%2,%3\n"
64                         "       jl      0b\n"
65                         : "=&d" (old), "=m" (*(long *) ptr)
66                         : "d" (x), "Q" (*(long *) ptr)
67                         : "memory", "cc");
68                 return old;
69 #endif /* CONFIG_64BIT */
70         }
71         __xchg_called_with_bad_pointer();
72         return x;
73 }
74
75 #define xchg(ptr, x)                                                      \
76 ({                                                                        \
77         __typeof__(*(ptr)) __ret;                                         \
78         __ret = (__typeof__(*(ptr)))                                      \
79                 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
80         __ret;                                                            \
81 })
82
83 /*
84  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
85  * store NEW in MEM.  Return the initial value in MEM.  Success is
86  * indicated by comparing RETURN with OLD.
87  */
88
89 #define __HAVE_ARCH_CMPXCHG
90
91 extern void __cmpxchg_called_with_bad_pointer(void);
92
93 static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
94                                       unsigned long new, int size)
95 {
96         unsigned long addr, prev, tmp;
97         int shift;
98
99         switch (size) {
100         case 1:
101                 addr = (unsigned long) ptr;
102                 shift = (3 ^ (addr & 3)) << 3;
103                 addr ^= addr & 3;
104                 asm volatile(
105                         "       l       %0,%2\n"
106                         "0:     nr      %0,%5\n"
107                         "       lr      %1,%0\n"
108                         "       or      %0,%3\n"
109                         "       or      %1,%4\n"
110                         "       cs      %0,%1,%2\n"
111                         "       jnl     1f\n"
112                         "       xr      %1,%0\n"
113                         "       nr      %1,%5\n"
114                         "       jnz     0b\n"
115                         "1:"
116                         : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
117                         : "d" (old << shift), "d" (new << shift),
118                           "d" (~(255 << shift))
119                         : "memory", "cc");
120                 return prev >> shift;
121         case 2:
122                 addr = (unsigned long) ptr;
123                 shift = (2 ^ (addr & 2)) << 3;
124                 addr ^= addr & 2;
125                 asm volatile(
126                         "       l       %0,%2\n"
127                         "0:     nr      %0,%5\n"
128                         "       lr      %1,%0\n"
129                         "       or      %0,%3\n"
130                         "       or      %1,%4\n"
131                         "       cs      %0,%1,%2\n"
132                         "       jnl     1f\n"
133                         "       xr      %1,%0\n"
134                         "       nr      %1,%5\n"
135                         "       jnz     0b\n"
136                         "1:"
137                         : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
138                         : "d" (old << shift), "d" (new << shift),
139                           "d" (~(65535 << shift))
140                         : "memory", "cc");
141                 return prev >> shift;
142         case 4:
143                 asm volatile(
144                         "       cs      %0,%3,%1\n"
145                         : "=&d" (prev), "=Q" (*(int *) ptr)
146                         : "0" (old), "d" (new), "Q" (*(int *) ptr)
147                         : "memory", "cc");
148                 return prev;
149 #ifdef CONFIG_64BIT
150         case 8:
151                 asm volatile(
152                         "       csg     %0,%3,%1\n"
153                         : "=&d" (prev), "=Q" (*(long *) ptr)
154                         : "0" (old), "d" (new), "Q" (*(long *) ptr)
155                         : "memory", "cc");
156                 return prev;
157 #endif /* CONFIG_64BIT */
158         }
159         __cmpxchg_called_with_bad_pointer();
160         return old;
161 }
162
163 #define cmpxchg(ptr, o, n)                                               \
164 ({                                                                       \
165         __typeof__(*(ptr)) __ret;                                        \
166         __ret = (__typeof__(*(ptr)))                                     \
167                 __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
168                           sizeof(*(ptr)));                               \
169         __ret;                                                           \
170 })
171
172 #ifdef CONFIG_64BIT
173 #define cmpxchg64(ptr, o, n)                                            \
174 ({                                                                      \
175         cmpxchg((ptr), (o), (n));                                       \
176 })
177 #else /* CONFIG_64BIT */
178 static inline unsigned long long __cmpxchg64(void *ptr,
179                                              unsigned long long old,
180                                              unsigned long long new)
181 {
182         register_pair rp_old = {.pair = old};
183         register_pair rp_new = {.pair = new};
184
185         asm volatile(
186                 "       cds     %0,%2,%1"
187                 : "+&d" (rp_old), "=Q" (ptr)
188                 : "d" (rp_new), "Q" (ptr)
189                 : "memory", "cc");
190         return rp_old.pair;
191 }
192
193 #define cmpxchg64(ptr, o, n)                            \
194 ({                                                      \
195         __typeof__(*(ptr)) __ret;                       \
196         __ret = (__typeof__(*(ptr)))                    \
197                 __cmpxchg64((ptr),                      \
198                             (unsigned long long)(o),    \
199                             (unsigned long long)(n));   \
200         __ret;                                          \
201 })
202 #endif /* CONFIG_64BIT */
203
204 #include <asm-generic/cmpxchg-local.h>
205
206 static inline unsigned long __cmpxchg_local(void *ptr,
207                                             unsigned long old,
208                                             unsigned long new, int size)
209 {
210         switch (size) {
211         case 1:
212         case 2:
213         case 4:
214 #ifdef CONFIG_64BIT
215         case 8:
216 #endif
217                 return __cmpxchg(ptr, old, new, size);
218         default:
219                 return __cmpxchg_local_generic(ptr, old, new, size);
220         }
221
222         return old;
223 }
224
225 /*
226  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
227  * them available.
228  */
229 #define cmpxchg_local(ptr, o, n)                                        \
230 ({                                                                      \
231         __typeof__(*(ptr)) __ret;                                       \
232         __ret = (__typeof__(*(ptr)))                                    \
233                 __cmpxchg_local((ptr), (unsigned long)(o),              \
234                                 (unsigned long)(n), sizeof(*(ptr)));    \
235         __ret;                                                          \
236 })
237
238 #define cmpxchg64_local(ptr, o, n)      cmpxchg64((ptr), (o), (n))
239
240 #endif /* __ASM_CMPXCHG_H */