]> Pileus Git - ~andy/linux/blob - arch/arm64/include/asm/barrier.h
firewire: don't use PREPARE_DELAYED_WORK
[~andy/linux] / arch / arm64 / include / asm / barrier.h
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20
21 #ifndef __ASSEMBLY__
22
23 #define sev()           asm volatile("sev" : : : "memory")
24 #define wfe()           asm volatile("wfe" : : : "memory")
25 #define wfi()           asm volatile("wfi" : : : "memory")
26
27 #define isb()           asm volatile("isb" : : : "memory")
28 #define dsb()           asm volatile("dsb sy" : : : "memory")
29
30 #define mb()            dsb()
31 #define rmb()           asm volatile("dsb ld" : : : "memory")
32 #define wmb()           asm volatile("dsb st" : : : "memory")
33
34 #ifndef CONFIG_SMP
35 #define smp_mb()        barrier()
36 #define smp_rmb()       barrier()
37 #define smp_wmb()       barrier()
38
39 #define smp_store_release(p, v)                                         \
40 do {                                                                    \
41         compiletime_assert_atomic_type(*p);                             \
42         smp_mb();                                                       \
43         ACCESS_ONCE(*p) = (v);                                          \
44 } while (0)
45
46 #define smp_load_acquire(p)                                             \
47 ({                                                                      \
48         typeof(*p) ___p1 = ACCESS_ONCE(*p);                             \
49         compiletime_assert_atomic_type(*p);                             \
50         smp_mb();                                                       \
51         ___p1;                                                          \
52 })
53
54 #else
55
56 #define smp_mb()        asm volatile("dmb ish" : : : "memory")
57 #define smp_rmb()       asm volatile("dmb ishld" : : : "memory")
58 #define smp_wmb()       asm volatile("dmb ishst" : : : "memory")
59
60 #define smp_store_release(p, v)                                         \
61 do {                                                                    \
62         compiletime_assert_atomic_type(*p);                             \
63         switch (sizeof(*p)) {                                           \
64         case 4:                                                         \
65                 asm volatile ("stlr %w1, %0"                            \
66                                 : "=Q" (*p) : "r" (v) : "memory");      \
67                 break;                                                  \
68         case 8:                                                         \
69                 asm volatile ("stlr %1, %0"                             \
70                                 : "=Q" (*p) : "r" (v) : "memory");      \
71                 break;                                                  \
72         }                                                               \
73 } while (0)
74
75 #define smp_load_acquire(p)                                             \
76 ({                                                                      \
77         typeof(*p) ___p1;                                               \
78         compiletime_assert_atomic_type(*p);                             \
79         switch (sizeof(*p)) {                                           \
80         case 4:                                                         \
81                 asm volatile ("ldar %w0, %1"                            \
82                         : "=r" (___p1) : "Q" (*p) : "memory");          \
83                 break;                                                  \
84         case 8:                                                         \
85                 asm volatile ("ldar %0, %1"                             \
86                         : "=r" (___p1) : "Q" (*p) : "memory");          \
87                 break;                                                  \
88         }                                                               \
89         ___p1;                                                          \
90 })
91
92 #endif
93
94 #define read_barrier_depends()          do { } while(0)
95 #define smp_read_barrier_depends()      do { } while(0)
96
97 #define set_mb(var, value)      do { var = value; smp_mb(); } while (0)
98 #define nop()           asm volatile("nop");
99
100 #endif  /* __ASSEMBLY__ */
101
102 #endif  /* __ASM_BARRIER_H */