]> Pileus Git - ~andy/linux/blob - arch/mips/include/asm/cmpxchg.h
smpboot: Remove leftover declaration
[~andy/linux] / arch / mips / include / asm / cmpxchg.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10
11 #include <linux/irqflags.h>
12 #include <asm/war.h>
13
14 static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
15 {
16         __u32 retval;
17
18         smp_mb__before_llsc();
19
20         if (kernel_uses_llsc && R10000_LLSC_WAR) {
21                 unsigned long dummy;
22
23                 __asm__ __volatile__(
24                 "       .set    mips3                                   \n"
25                 "1:     ll      %0, %3                  # xchg_u32      \n"
26                 "       .set    mips0                                   \n"
27                 "       move    %2, %z4                                 \n"
28                 "       .set    mips3                                   \n"
29                 "       sc      %2, %1                                  \n"
30                 "       beqzl   %2, 1b                                  \n"
31                 "       .set    mips0                                   \n"
32                 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
33                 : "R" (*m), "Jr" (val)
34                 : "memory");
35         } else if (kernel_uses_llsc) {
36                 unsigned long dummy;
37
38                 do {
39                         __asm__ __volatile__(
40                         "       .set    mips3                           \n"
41                         "       ll      %0, %3          # xchg_u32      \n"
42                         "       .set    mips0                           \n"
43                         "       move    %2, %z4                         \n"
44                         "       .set    mips3                           \n"
45                         "       sc      %2, %1                          \n"
46                         "       .set    mips0                           \n"
47                         : "=&r" (retval), "=m" (*m), "=&r" (dummy)
48                         : "R" (*m), "Jr" (val)
49                         : "memory");
50                 } while (unlikely(!dummy));
51         } else {
52                 unsigned long flags;
53
54                 raw_local_irq_save(flags);
55                 retval = *m;
56                 *m = val;
57                 raw_local_irq_restore(flags);   /* implies memory barrier  */
58         }
59
60         smp_llsc_mb();
61
62         return retval;
63 }
64
65 #ifdef CONFIG_64BIT
66 static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
67 {
68         __u64 retval;
69
70         smp_mb__before_llsc();
71
72         if (kernel_uses_llsc && R10000_LLSC_WAR) {
73                 unsigned long dummy;
74
75                 __asm__ __volatile__(
76                 "       .set    mips3                                   \n"
77                 "1:     lld     %0, %3                  # xchg_u64      \n"
78                 "       move    %2, %z4                                 \n"
79                 "       scd     %2, %1                                  \n"
80                 "       beqzl   %2, 1b                                  \n"
81                 "       .set    mips0                                   \n"
82                 : "=&r" (retval), "=m" (*m), "=&r" (dummy)
83                 : "R" (*m), "Jr" (val)
84                 : "memory");
85         } else if (kernel_uses_llsc) {
86                 unsigned long dummy;
87
88                 do {
89                         __asm__ __volatile__(
90                         "       .set    mips3                           \n"
91                         "       lld     %0, %3          # xchg_u64      \n"
92                         "       move    %2, %z4                         \n"
93                         "       scd     %2, %1                          \n"
94                         "       .set    mips0                           \n"
95                         : "=&r" (retval), "=m" (*m), "=&r" (dummy)
96                         : "R" (*m), "Jr" (val)
97                         : "memory");
98                 } while (unlikely(!dummy));
99         } else {
100                 unsigned long flags;
101
102                 raw_local_irq_save(flags);
103                 retval = *m;
104                 *m = val;
105                 raw_local_irq_restore(flags);   /* implies memory barrier  */
106         }
107
108         smp_llsc_mb();
109
110         return retval;
111 }
112 #else
113 extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
114 #define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
115 #endif
116
117 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
118 {
119         switch (size) {
120         case 4:
121                 return __xchg_u32(ptr, x);
122         case 8:
123                 return __xchg_u64(ptr, x);
124         }
125
126         return x;
127 }
128
129 #define xchg(ptr, x)                                                    \
130 ({                                                                      \
131         BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc);                            \
132                                                                         \
133         ((__typeof__(*(ptr)))                                           \
134                 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));     \
135 })
136
137 #define __HAVE_ARCH_CMPXCHG 1
138
139 #define __cmpxchg_asm(ld, st, m, old, new)                              \
140 ({                                                                      \
141         __typeof(*(m)) __ret;                                           \
142                                                                         \
143         if (kernel_uses_llsc && R10000_LLSC_WAR) {                      \
144                 __asm__ __volatile__(                                   \
145                 "       .set    push                            \n"     \
146                 "       .set    noat                            \n"     \
147                 "       .set    mips3                           \n"     \
148                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
149                 "       bne     %0, %z3, 2f                     \n"     \
150                 "       .set    mips0                           \n"     \
151                 "       move    $1, %z4                         \n"     \
152                 "       .set    mips3                           \n"     \
153                 "       " st "  $1, %1                          \n"     \
154                 "       beqzl   $1, 1b                          \n"     \
155                 "2:                                             \n"     \
156                 "       .set    pop                             \n"     \
157                 : "=&r" (__ret), "=R" (*m)                              \
158                 : "R" (*m), "Jr" (old), "Jr" (new)                      \
159                 : "memory");                                            \
160         } else if (kernel_uses_llsc) {                                  \
161                 __asm__ __volatile__(                                   \
162                 "       .set    push                            \n"     \
163                 "       .set    noat                            \n"     \
164                 "       .set    mips3                           \n"     \
165                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
166                 "       bne     %0, %z3, 2f                     \n"     \
167                 "       .set    mips0                           \n"     \
168                 "       move    $1, %z4                         \n"     \
169                 "       .set    mips3                           \n"     \
170                 "       " st "  $1, %1                          \n"     \
171                 "       beqz    $1, 1b                          \n"     \
172                 "       .set    pop                             \n"     \
173                 "2:                                             \n"     \
174                 : "=&r" (__ret), "=R" (*m)                              \
175                 : "R" (*m), "Jr" (old), "Jr" (new)                      \
176                 : "memory");                                            \
177         } else {                                                        \
178                 unsigned long __flags;                                  \
179                                                                         \
180                 raw_local_irq_save(__flags);                            \
181                 __ret = *m;                                             \
182                 if (__ret == old)                                       \
183                         *m = new;                                       \
184                 raw_local_irq_restore(__flags);                         \
185         }                                                               \
186                                                                         \
187         __ret;                                                          \
188 })
189
190 /*
191  * This function doesn't exist, so you'll get a linker error
192  * if something tries to do an invalid cmpxchg().
193  */
194 extern void __cmpxchg_called_with_bad_pointer(void);
195
196 #define __cmpxchg(ptr, old, new, pre_barrier, post_barrier)             \
197 ({                                                                      \
198         __typeof__(ptr) __ptr = (ptr);                                  \
199         __typeof__(*(ptr)) __old = (old);                               \
200         __typeof__(*(ptr)) __new = (new);                               \
201         __typeof__(*(ptr)) __res = 0;                                   \
202                                                                         \
203         pre_barrier;                                                    \
204                                                                         \
205         switch (sizeof(*(__ptr))) {                                     \
206         case 4:                                                         \
207                 __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
208                 break;                                                  \
209         case 8:                                                         \
210                 if (sizeof(long) == 8) {                                \
211                         __res = __cmpxchg_asm("lld", "scd", __ptr,      \
212                                            __old, __new);               \
213                         break;                                          \
214                 }                                                       \
215         default:                                                        \
216                 __cmpxchg_called_with_bad_pointer();                    \
217                 break;                                                  \
218         }                                                               \
219                                                                         \
220         post_barrier;                                                   \
221                                                                         \
222         __res;                                                          \
223 })
224
225 #define cmpxchg(ptr, old, new)          __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
226 #define cmpxchg_local(ptr, old, new)    __cmpxchg(ptr, old, new, , )
227
228 #define cmpxchg64(ptr, o, n)                                            \
229   ({                                                                    \
230         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
231         cmpxchg((ptr), (o), (n));                                       \
232   })
233
234 #ifdef CONFIG_64BIT
235 #define cmpxchg64_local(ptr, o, n)                                      \
236   ({                                                                    \
237         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
238         cmpxchg_local((ptr), (o), (n));                                 \
239   })
240 #else
241 #include <asm-generic/cmpxchg-local.h>
242 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
243 #endif
244
245 #endif /* __ASM_CMPXCHG_H */