diff options
Diffstat (limited to 'arch')
-rw-r--r-- | arch/mips/Makefile | 3 | ||||
-rw-r--r-- | arch/mips/include/asm/atomic.h | 208 | ||||
-rw-r--r-- | arch/mips/include/asm/bitops.h | 270 | ||||
-rw-r--r-- | arch/mips/include/asm/cmpxchg.h | 7 | ||||
-rw-r--r-- | arch/mips/include/asm/system.h | 52 |
5 files changed, 243 insertions, 297 deletions
diff --git a/arch/mips/Makefile b/arch/mips/Makefile index f4a4b663ebb3..1a81240102c5 100644 --- a/arch/mips/Makefile +++ b/arch/mips/Makefile @@ -48,9 +48,6 @@ ifneq ($(SUBARCH),$(ARCH)) endif endif -ifndef CONFIG_FUNCTION_TRACER -cflags-y := -ffunction-sections -endif ifdef CONFIG_FUNCTION_GRAPH_TRACER ifndef KBUILD_MCOUNT_RA_ADDRESS ifeq ($(call cc-option-yn,-mmcount-ra-address), y) diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 47d87da379f9..4a02fe891ab6 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -64,18 +64,16 @@ static __inline__ void atomic_add(int i, atomic_t * v) } else if (kernel_uses_llsc) { int temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %0, %1 # atomic_add \n" - " addu %0, %2 \n" - " sc %0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " ll %0, %1 # atomic_add \n" + " addu %0, %2 \n" + " sc %0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter)); + } while (unlikely(!temp)); } else { unsigned long flags; @@ -109,18 +107,16 @@ static __inline__ void atomic_sub(int i, atomic_t * v) } else if (kernel_uses_llsc) { int temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %0, %1 # atomic_sub \n" - " subu %0, %2 \n" - " sc %0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " ll %0, %1 # atomic_sub \n" + " subu %0, %2 \n" + " sc %0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter)); + } while (unlikely(!temp)); } else { unsigned long flags; @@ -156,20 +152,19 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) } else if (kernel_uses_llsc) { int temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # atomic_add_return \n" - " addu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqz %0, 2f \n" - " addu %0, %1, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " ll %1, %2 # atomic_add_return \n" + " addu %0, %1, %3 \n" + " sc %0, %2 \n" + " .set mips0 \n" + : "=&r" (result), "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter) + : "memory"); + } while (unlikely(!result)); + + result = temp + i; } else { unsigned long flags; @@ -205,23 +200,24 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) : "memory"); + + result = temp - i; } else if (kernel_uses_llsc) { int temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %1, %2 # atomic_sub_return \n" - " subu %0, %1, %3 \n" - " sc %0, %2 \n" - " beqz %0, 2f \n" - " subu %0, %1, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " ll %1, %2 # atomic_sub_return \n" + " subu %0, %1, %3 \n" + " sc %0, %2 \n" + " .set mips0 \n" + : "=&r" (result), "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter) + : "memory"); + } while (unlikely(!result)); + + result = temp - i; } else { unsigned long flags; @@ -279,12 +275,9 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) " bltz %0, 1f \n" " sc %0, %2 \n" " .set noreorder \n" - " beqz %0, 2f \n" + " beqz %0, 1b \n" " subu %0, %1, %3 \n" " .set reorder \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) @@ -443,18 +436,16 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) } else if (kernel_uses_llsc) { long temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: lld %0, %1 # atomic64_add \n" - " daddu %0, %2 \n" - " scd %0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " lld %0, %1 # atomic64_add \n" + " daddu %0, %2 \n" + " scd %0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter)); + } while (unlikely(!temp)); } else { unsigned long flags; @@ -488,18 +479,16 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) } else if (kernel_uses_llsc) { long temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: lld %0, %1 # atomic64_sub \n" - " dsubu %0, %2 \n" - " scd %0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " lld %0, %1 # atomic64_sub \n" + " dsubu %0, %2 \n" + " scd %0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter)); + } while (unlikely(!temp)); } else { unsigned long flags; @@ -535,20 +524,19 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) } else if (kernel_uses_llsc) { long temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: lld %1, %2 # atomic64_add_return \n" - " daddu %0, %1, %3 \n" - " scd %0, %2 \n" - " beqz %0, 2f \n" - " daddu %0, %1, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " lld %1, %2 # atomic64_add_return \n" + " daddu %0, %1, %3 \n" + " scd %0, %2 \n" + " .set mips0 \n" + : "=&r" (result), "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter) + : "memory"); + } while (unlikely(!result)); + + result = temp + i; } else { unsigned long flags; @@ -587,20 +575,19 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) } else if (kernel_uses_llsc) { long temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: lld %1, %2 # atomic64_sub_return \n" - " dsubu %0, %1, %3 \n" - " scd %0, %2 \n" - " beqz %0, 2f \n" - " dsubu %0, %1, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " lld %1, %2 # atomic64_sub_return \n" + " dsubu %0, %1, %3 \n" + " scd %0, %2 \n" + " .set mips0 \n" + : "=&r" (result), "=&r" (temp), "=m" (v->counter) + : "Ir" (i), "m" (v->counter) + : "memory"); + } while (unlikely(!result)); + + result = temp - i; } else { unsigned long flags; @@ -658,12 +645,9 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) " bltz %0, 1f \n" " scd %0, %2 \n" " .set noreorder \n" - " beqz %0, 2f \n" + " beqz %0, 1b \n" " dsubu %0, %1, %3 \n" " .set reorder \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" "1: \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index b0ce7ca2851f..50b4ef288c53 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -73,30 +73,26 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) : "ir" (1UL << bit), "m" (*m)); #ifdef CONFIG_CPU_MIPSR2 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - __asm__ __volatile__( - "1: " __LL "%0, %1 # set_bit \n" - " " __INS "%0, %4, %2, 1 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - : "=&r" (temp), "=m" (*m) - : "ir" (bit), "m" (*m), "r" (~0)); + do { + __asm__ __volatile__( + " " __LL "%0, %1 # set_bit \n" + " " __INS "%0, %3, %2, 1 \n" + " " __SC "%0, %1 \n" + : "=&r" (temp), "+m" (*m) + : "ir" (bit), "r" (~0)); + } while (unlikely(!temp)); #endif /* CONFIG_CPU_MIPSR2 */ } else if (kernel_uses_llsc) { - __asm__ __volatile__( - " .set mips3 \n" - "1: " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (*m) - : "ir" (1UL << bit), "m" (*m)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # set_bit \n" + " or %0, %2 \n" + " " __SC "%0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m) + : "ir" (1UL << bit)); + } while (unlikely(!temp)); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -134,34 +130,30 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) " " __SC "%0, %1 \n" " beqzl %0, 1b \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m) - : "ir" (~(1UL << bit)), "m" (*m)); + : "=&r" (temp), "+m" (*m) + : "ir" (~(1UL << bit))); #ifdef CONFIG_CPU_MIPSR2 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - __asm__ __volatile__( - "1: " __LL "%0, %1 # clear_bit \n" - " " __INS "%0, $0, %2, 1 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - : "=&r" (temp), "=m" (*m) - : "ir" (bit), "m" (*m)); + do { + __asm__ __volatile__( + " " __LL "%0, %1 # clear_bit \n" + " " __INS "%0, $0, %2, 1 \n" + " " __SC "%0, %1 \n" + : "=&r" (temp), "+m" (*m) + : "ir" (bit)); + } while (unlikely(!temp)); #endif /* CONFIG_CPU_MIPSR2 */ } else if (kernel_uses_llsc) { - __asm__ __volatile__( - " .set mips3 \n" - "1: " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (*m) - : "ir" (~(1UL << bit)), "m" (*m)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # clear_bit \n" + " and %0, %2 \n" + " " __SC "%0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m) + : "ir" (~(1UL << bit))); + } while (unlikely(!temp)); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -213,24 +205,22 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) " " __SC "%0, %1 \n" " beqzl %0, 1b \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m) - : "ir" (1UL << bit), "m" (*m)); + : "=&r" (temp), "+m" (*m) + : "ir" (1UL << bit)); } else if (kernel_uses_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - " .set mips3 \n" - "1: " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (temp), "=m" (*m) - : "ir" (1UL << bit), "m" (*m)); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # change_bit \n" + " xor %0, %2 \n" + " " __SC "%0, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m) + : "ir" (1UL << bit)); + } while (unlikely(!temp)); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -272,30 +262,26 @@ static inline int test_and_set_bit(unsigned long nr, " beqzl %2, 1b \n" " and %2, %0, %3 \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) : "memory"); } else if (kernel_uses_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - " .set push \n" - " .set noreorder \n" - " .set mips3 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqz %2, 2f \n" - " and %2, %0, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " nop \n" - " .previous \n" - " .set pop \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # test_and_set_bit \n" + " or %2, %0, %3 \n" + " " __SC "%2, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) + : "memory"); + } while (unlikely(!res)); + + res = temp & (1UL << bit); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -340,30 +326,26 @@ static inline int test_and_set_bit_lock(unsigned long nr, " beqzl %2, 1b \n" " and %2, %0, %3 \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) : "memory"); } else if (kernel_uses_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - " .set push \n" - " .set noreorder \n" - " .set mips3 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqz %2, 2f \n" - " and %2, %0, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " nop \n" - " .previous \n" - " .set pop \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # test_and_set_bit \n" + " or %2, %0, %3 \n" + " " __SC "%2, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) + : "memory"); + } while (unlikely(!res)); + + res = temp & (1UL << bit); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -410,49 +392,43 @@ static inline int test_and_clear_bit(unsigned long nr, " beqzl %2, 1b \n" " and %2, %0, %3 \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) : "memory"); #ifdef CONFIG_CPU_MIPSR2 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - "1: " __LL "%0, %1 # test_and_clear_bit \n" - " " __EXT "%2, %0, %3, 1 \n" - " " __INS "%0, $0, %3, 1 \n" - " " __SC "%0, %1 \n" - " beqz %0, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "ir" (bit), "m" (*m) - : "memory"); + do { + __asm__ __volatile__( + " " __LL "%0, %1 # test_and_clear_bit \n" + " " __EXT "%2, %0, %3, 1 \n" + " " __INS "%0, $0, %3, 1 \n" + " " __SC "%0, %1 \n" + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "ir" (bit) + : "memory"); + } while (unlikely(!temp)); #endif } else if (kernel_uses_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - " .set push \n" - " .set noreorder \n" - " .set mips3 \n" - "1: " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " beqz %2, 2f \n" - " and %2, %0, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " nop \n" - " .previous \n" - " .set pop \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # test_and_clear_bit \n" + " or %2, %0, %3 \n" + " xor %2, %3 \n" + " " __SC "%2, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) + : "memory"); + } while (unlikely(!res)); + + res = temp & (1UL << bit); } else { volatile unsigned long *a = addr; unsigned long mask; @@ -499,30 +475,26 @@ static inline int test_and_change_bit(unsigned long nr, " beqzl %2, 1b \n" " and %2, %0, %3 \n" " .set mips0 \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) : "memory"); } else if (kernel_uses_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp; - __asm__ __volatile__( - " .set push \n" - " .set noreorder \n" - " .set mips3 \n" - "1: " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "\t%2, %1 \n" - " beqz %2, 2f \n" - " and %2, %0, %3 \n" - " .subsection 2 \n" - "2: b 1b \n" - " nop \n" - " .previous \n" - " .set pop \n" - : "=&r" (temp), "=m" (*m), "=&r" (res) - : "r" (1UL << bit), "m" (*m) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " " __LL "%0, %1 # test_and_change_bit \n" + " xor %2, %0, %3 \n" + " " __SC "\t%2, %1 \n" + " .set mips0 \n" + : "=&r" (temp), "+m" (*m), "=&r" (res) + : "r" (1UL << bit) + : "memory"); + } while (unlikely(!res)); + + res = temp & (1UL << bit); } else { volatile unsigned long *a = addr; unsigned long mask; diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index 2d28017e95d0..d8d1c2805ac7 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h @@ -44,12 +44,9 @@ " move $1, %z4 \n" \ " .set mips3 \n" \ " " st " $1, %1 \n" \ - " beqz $1, 3f \n" \ - "2: \n" \ - " .subsection 2 \n" \ - "3: b 1b \n" \ - " .previous \n" \ + " beqz $1, 1b \n" \ " .set pop \n" \ + "2: \n" \ : "=&r" (__ret), "=R" (*m) \ : "R" (*m), "Jr" (old), "Jr" (new) \ : "memory"); \ diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h index bb937ccfba1e..6018c80ce37a 100644 --- a/arch/mips/include/asm/system.h +++ b/arch/mips/include/asm/system.h @@ -115,21 +115,19 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) } else if (kernel_uses_llsc) { unsigned long dummy; - __asm__ __volatile__( - " .set mips3 \n" - "1: ll %0, %3 # xchg_u32 \n" - " .set mips0 \n" - " move %2, %z4 \n" - " .set mips3 \n" - " sc %2, %1 \n" - " beqz %2, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (retval), "=m" (*m), "=&r" (dummy) - : "R" (*m), "Jr" (val) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " ll %0, %3 # xchg_u32 \n" + " .set mips0 \n" + " move %2, %z4 \n" + " .set mips3 \n" + " sc %2, %1 \n" + " .set mips0 \n" + : "=&r" (retval), "=m" (*m), "=&r" (dummy) + : "R" (*m), "Jr" (val) + : "memory"); + } while (unlikely(!dummy)); } else { unsigned long flags; @@ -167,19 +165,17 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) } else if (kernel_uses_llsc) { unsigned long dummy; - __asm__ __volatile__( - " .set mips3 \n" - "1: lld %0, %3 # xchg_u64 \n" - " move %2, %z4 \n" - " scd %2, %1 \n" - " beqz %2, 2f \n" - " .subsection 2 \n" - "2: b 1b \n" - " .previous \n" - " .set mips0 \n" - : "=&r" (retval), "=m" (*m), "=&r" (dummy) - : "R" (*m), "Jr" (val) - : "memory"); + do { + __asm__ __volatile__( + " .set mips3 \n" + " lld %0, %3 # xchg_u64 \n" + " move %2, %z4 \n" + " scd %2, %1 \n" + " .set mips0 \n" + : "=&r" (retval), "=m" (*m), "=&r" (dummy) + : "R" (*m), "Jr" (val) + : "memory"); + } while (unlikely(!dummy)); } else { unsigned long flags; |