diff options
author | David Daney <ddaney@caviumnetworks.com> | 2010-01-08 17:17:43 -0800 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2010-02-27 12:53:06 +0100 |
commit | f252ffd50c97dae87b45f1dbad24f71358ccfbd6 (patch) | |
tree | c057fc7c3a819152603b286f935fb367fc48ae73 /arch/mips/include/asm/atomic.h | |
parent | ec5380c768864c7afd92aa886dd4bb6d38497a01 (diff) |
MIPS: New macro smp_mb__before_llsc.
Replace some instances of smp_llsc_mb() with a new macro
smp_mb__before_llsc(). It is used before ll/sc sequences that are
documented as needing write barrier semantics.
The default implementation of smp_mb__before_llsc() is just smp_llsc_mb(),
so there are no changes in semantics.
Also simplify definition of smp_mb(), smp_rmb(), and smp_wmb() to be just
barrier() in the non-SMP case.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
To: linux-mips@linux-mips.org
Patchwork: http://patchwork.linux-mips.org/patch/851/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index dd75d673447e..519197ede089 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ -#define smp_mb__before_atomic_dec() smp_llsc_mb() +#define smp_mb__before_atomic_dec() smp_mb__before_llsc() #define smp_mb__after_atomic_dec() smp_llsc_mb() -#define smp_mb__before_atomic_inc() smp_llsc_mb() +#define smp_mb__before_atomic_inc() smp_mb__before_llsc() #define smp_mb__after_atomic_inc() smp_llsc_mb() #include <asm-generic/atomic-long.h> |