diff options
author | David Daney <ddaney@caviumnetworks.com> | 2010-01-08 17:17:43 -0800 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2010-02-27 12:53:06 +0100 |
commit | f252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch) | |
tree | c057fc7c3a819152603b286f935fb367fc48ae73 /arch | |
parent | ec5380c768864c7afd92aa886dd4bb6d38497a01 (diff) |
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro
smp_mb__before_llsc(). It is used before ll/sc sequences that are
documented as needing write barrier semantics.
The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(),
so there are no changes in semantics.
Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just
barrier() in the non-SMP case.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
To: linux-mips@linux-mips.org
Patchwork: http://patchwork.linux-mips.org/patch/851/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 16 | ||||
-rw-r--r-- | arch/mips/include/asm/barrier.h | 15 | ||||
-rw-r--r-- | arch/mips/include/asm/bitops.h | 8 | ||||
-rw-r--r-- | arch/mips/include/asm/cmpxchg.h | 10 | ||||
-rw-r--r-- | arch/mips/include/asm/spinlock.h | 4 | ||||
-rw-r--r-- | arch/mips/include/asm/system.h | 4 |
6 files changed, 32 insertions, 25 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index dd75d673447e..519197ede089 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ -#define smp_mb__before_atomic_dec() smp_llsc_mb() +#define smp_mb__before_atomic_dec() smp_mb__before_llsc() #define smp_mb__after_atomic_dec() smp_llsc_mb() -#define smp_mb__before_atomic_inc() smp_llsc_mb() +#define smp_mb__before_atomic_inc() smp_mb__before_llsc() #define smp_mb__after_atomic_inc() smp_llsc_mb() #include <asm-generic/atomic-long.h> diff --git a/arch/mips/include/asm/barrier.h b/arch/mips/include/asm/barrier.h index 91785dc8e94e..1a5a51c3e96f 100644 --- a/arch/mips/include/asm/barrier.h +++ b/arch/mips/include/asm/barrier.h @@ -131,23 +131,26 @@ #endif /* !CONFIG_CPU_HAS_WB */ #if defined(CONFIG_WEAK_ORDERING) && defined(CONFIG_SMP) -#define __WEAK_ORDERING_MB " sync \n" +#define smp_mb() __asm__ __volatile__("sync" : : :"memory") +#define smp_rmb() __asm__ __volatile__("sync" : : :"memory") +#define smp_wmb() __asm__ __volatile__("sync" : : :"memory") #else -#define __WEAK_ORDERING_MB " \n" +#define smp_mb() barrier() +#define smp_rmb() barrier() +#define smp_wmb() barrier() #endif + #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) #define __WEAK_LLSC_MB " sync \n" #else #define __WEAK_LLSC_MB " \n" #endif -#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") -#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") -#define smp_wmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") - #define set_mb(var, value) \ do { var = value; smp_mb(); } while (0) #define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") +#define smp_mb__before_llsc() smp_llsc_mb() + #endif /* __ASM_BARRIER_H */ diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 84a383806b2c..9255cfbee459 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -42,7 +42,7 @@ /* * clear_bit() doesn't provide any barrier for the compiler. */ -#define smp_mb__before_clear_bit() smp_llsc_mb() +#define smp_mb__before_clear_bit() smp_mb__before_llsc() #define smp_mb__after_clear_bit() smp_llsc_mb() /* @@ -258,7 +258,7 @@ static inline int test_and_set_bit(unsigned long nr, unsigned short bit = nr & SZLONG_MASK; unsigned long res; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); @@ -395,7 +395,7 @@ static inline int test_and_clear_bit(unsigned long nr, unsigned short bit = nr & SZLONG_MASK; unsigned long res; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); @@ -485,7 +485,7 @@ static inline int test_and_change_bit(unsigned long nr, unsigned short bit = nr & SZLONG_MASK; unsigned long res; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index 815a438a268d..ed9aaaaf0749 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h @@ -72,14 +72,14 @@ */ extern void __cmpxchg_called_with_bad_pointer(void); -#define __cmpxchg(ptr, old, new, barrier) \ +#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \ ({ \ __typeof__(ptr) __ptr = (ptr); \ __typeof__(*(ptr)) __old = (old); \ __typeof__(*(ptr)) __new = (new); \ __typeof__(*(ptr)) __res = 0; \ \ - barrier; \ + pre_barrier; \ \ switch (sizeof(*(__ptr))) { \ case 4: \ @@ -96,13 +96,13 @@ extern void __cmpxchg_called_with_bad_pointer(void); break; \ } \ \ - barrier; \ + post_barrier; \ \ __res; \ }) -#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_llsc_mb()) -#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, ) +#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb()) +#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , ) #define cmpxchg64(ptr, o, n) \ ({ \ diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index 21ef9efbde43..5f16696eaa00 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h @@ -138,7 +138,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock) { int tmp; - smp_llsc_mb(); + smp_mb__before_llsc(); if (R10000_LLSC_WAR) { __asm__ __volatile__ ( @@ -305,7 +305,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) { unsigned int tmp; - smp_llsc_mb(); + smp_mb__before_llsc(); if (R10000_LLSC_WAR) { __asm__ __volatile__( diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h index 83b5509e09e8..bb937ccfba1e 100644 --- a/arch/mips/include/asm/system.h +++ b/arch/mips/include/asm/system.h @@ -95,6 +95,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) { __u32 retval; + smp_mb__before_llsc(); + if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long dummy; @@ -147,6 +149,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) { __u64 retval; + smp_mb__before_llsc(); + if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long dummy; |