From 18d84e2e55b6abe1e5b8a658ad078796122899fb Mon Sep 17 00:00:00 2001 From: Alexander Lobakin Date: Wed, 22 Jan 2020 13:58:50 +0300 Subject: MIPS: make CPU_HAS_LOAD_STORE_LR opt-out CPU_HAS_LOAD_STORE_LR was introduced in 932afdeec18b ("MIPS: Add Kconfig variable for CPUs with unaligned load/store instructions") to make code in kernel/unaligned.c and lib/mem{cpy,set}.S more intuitive and give a possibility to easily add new CPUs without these instruction sets in future. Hovewer, this variant is not optimal for mainly two reasons: * For now, we have 20+ CPUs with such instructions and only two (MIPS R6) without. It will obviously be more effective and straightforward to have an option for these two rather than for the rest. * You can easily miss the fact that you need to select this option when adding a new CPU, while all processors lacking these sets are well-known, so the probability of missing something is way much lower. We can address both points by turning CPU_HAS_LOAD_STORE_LR into opt-out CPU_NO_LOAD_STORE_LR. This also makes MIPS root Kconfig more clear and understandable. Signed-off-by: Alexander Lobakin Signed-off-by: Paul Burton Cc: Ralf Baechle Cc: Alexandre Belloni Cc: Microchip Linux Driver Support Cc: Will Deacon Cc: Greg Kroah-Hartman Cc: Masahiro Yamada Cc: Paul Walmsley Cc: Michal Simek Cc: Allison Randal Cc: Thomas Gleixner Cc: Eric W. Biederman Cc: linux-mips@vger.kernel.org Cc: linux-kernel@vger.kernel.org --- arch/mips/lib/memcpy.S | 14 +++++++------- arch/mips/lib/memset.S | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) (limited to 'arch/mips/lib') diff --git a/arch/mips/lib/memcpy.S b/arch/mips/lib/memcpy.S index cdd19d8561e8..f7994d936505 100644 --- a/arch/mips/lib/memcpy.S +++ b/arch/mips/lib/memcpy.S @@ -301,14 +301,14 @@ and t0, src, ADDRMASK PREFS( 0, 2*32(src) ) PREFD( 1, 2*32(dst) ) -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR bnez t1, .Ldst_unaligned\@ nop bnez t0, .Lsrc_unaligned_dst_aligned\@ -#else +#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ or t0, t0, t1 bnez t0, .Lcopy_unaligned_bytes\@ -#endif +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ /* * use delay slot for fall-through * src and dst are aligned; need to compute rem @@ -389,7 +389,7 @@ bne rem, len, 1b .set noreorder -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR /* * src and dst are aligned, need to copy rem bytes (rem < NBYTES) * A loop would do only a byte at a time with possible branch @@ -491,7 +491,7 @@ bne len, rem, 1b .set noreorder -#endif /* CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* !CONFIG_CPU_NO_LOAD_STORE_LR */ .Lcopy_bytes_checklen\@: beqz len, .Ldone\@ nop @@ -520,7 +520,7 @@ jr ra nop -#ifndef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifdef CONFIG_CPU_NO_LOAD_STORE_LR .Lcopy_unaligned_bytes\@: 1: COPY_BYTE(0) @@ -534,7 +534,7 @@ ADD src, src, 8 b 1b ADD dst, dst, 8 -#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ .if __memcpy == 1 END(memcpy) .set __memcpy, 0 diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S index 418611ef13cf..d5449e8a3dfc 100644 --- a/arch/mips/lib/memset.S +++ b/arch/mips/lib/memset.S @@ -115,7 +115,7 @@ #endif .set reorder -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR R10KCBARRIER(0(ra)) #ifdef __MIPSEB__ EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ @@ -125,7 +125,7 @@ PTR_SUBU a0, t0 /* long align ptr */ PTR_ADDU a2, t0 /* correct size */ -#else /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ #define STORE_BYTE(N) \ EX(sb, a1, N(a0), .Lbyte_fixup\@); \ .set noreorder; \ @@ -150,7 +150,7 @@ ori a0, STORMASK xori a0, STORMASK PTR_ADDIU a0, STORSIZE -#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1: ori t1, a2, 0x3f /* # of full blocks */ xori t1, 0x3f andi t0, a2, 0x40-STORSIZE @@ -185,7 +185,7 @@ .set noreorder beqz a2, 1f -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR PTR_ADDU a0, a2 /* What's left */ .set reorder R10KCBARRIER(0(ra)) @@ -194,7 +194,7 @@ #else EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) #endif -#else +#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ PTR_SUBU t0, $0, a2 .set reorder move a2, zero /* No remaining longs */ @@ -211,7 +211,7 @@ EX(sb, a1, 6(a0), .Lbyte_fixup\@) #endif 0: -#endif +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1: move a2, zero jr ra @@ -234,7 +234,7 @@ .hidden __memset .endif -#ifndef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifdef CONFIG_CPU_NO_LOAD_STORE_LR .Lbyte_fixup\@: /* * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1 @@ -243,7 +243,7 @@ PTR_SUBU a2, t0 PTR_ADDIU a2, 1 jr ra -#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ .Lfirst_fixup\@: /* unset_bytes already in a2 */ -- cgit v1.2.3