summaryrefslogtreecommitdiff
path: root/arch/powerpc
diff options
context:
space:
mode:
authorChristophe Leroy <christophe.leroy@csgroup.eu>2020-09-27 09:16:34 +0000
committerMichael Ellerman <mpe@ellerman.id.au>2020-12-04 01:01:17 +1100
commited07f6353ddf19e51c4db6d2be72ca97f7ed8a08 (patch)
tree3a5653504336d639e595575896c53a7eda9046c3 /arch/powerpc
parent91bf695596f594e42d69d70deb2ae53cafecf77c (diff)
powerpc/vdso: Use builtin symbols to locate fixup section
Add builtin symbols to locate fixup section and use them instead of locating sections through elf headers at runtime. Signed-off-by: Christophe Leroy <christophe.leroy@csgroup.eu> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au> Link: https://lore.kernel.org/r/2954526981859ca1ccfcfc7a7c4263920e9ddfcb.1601197618.git.christophe.leroy@csgroup.eu
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/kernel/vdso.c55
-rw-r--r--arch/powerpc/kernel/vdso32/vdso32.lds.S8
-rw-r--r--arch/powerpc/kernel/vdso64/vdso64.lds.S8
3 files changed, 30 insertions, 41 deletions
diff --git a/arch/powerpc/kernel/vdso.c b/arch/powerpc/kernel/vdso.c
index e10bc0d9856c..27449202c1d7 100644
--- a/arch/powerpc/kernel/vdso.c
+++ b/arch/powerpc/kernel/vdso.c
@@ -434,6 +434,12 @@ static int __init vdso_do_func_patch64(struct lib32_elfinfo *v32,
#endif /* CONFIG_PPC64 */
+#define VDSO_DO_FIXUPS(type, value, bits, sec) do { \
+ void *__start = (void *)VDSO##bits##_SYMBOL(&vdso##bits##_start, sec##_start); \
+ void *__end = (void *)VDSO##bits##_SYMBOL(&vdso##bits##_start, sec##_end); \
+ \
+ do_##type##_fixups((value), __start, __end); \
+} while (0)
static __init int vdso_do_find_sections(struct lib32_elfinfo *v32,
struct lib64_elfinfo *v64)
@@ -530,53 +536,20 @@ static __init int vdso_fixup_datapage(struct lib32_elfinfo *v32,
static __init int vdso_fixup_features(struct lib32_elfinfo *v32,
struct lib64_elfinfo *v64)
{
- unsigned long size;
- void *start;
-
#ifdef CONFIG_PPC64
- start = find_section64(v64->hdr, "__ftr_fixup", &size);
- if (start)
- do_feature_fixups(cur_cpu_spec->cpu_features,
- start, start + size);
-
- start = find_section64(v64->hdr, "__mmu_ftr_fixup", &size);
- if (start)
- do_feature_fixups(cur_cpu_spec->mmu_features,
- start, start + size);
-
- start = find_section64(v64->hdr, "__fw_ftr_fixup", &size);
- if (start)
- do_feature_fixups(powerpc_firmware_features,
- start, start + size);
-
- start = find_section64(v64->hdr, "__lwsync_fixup", &size);
- if (start)
- do_lwsync_fixups(cur_cpu_spec->cpu_features,
- start, start + size);
+ VDSO_DO_FIXUPS(feature, cur_cpu_spec->cpu_features, 64, ftr_fixup);
+ VDSO_DO_FIXUPS(feature, cur_cpu_spec->mmu_features, 64, mmu_ftr_fixup);
+ VDSO_DO_FIXUPS(feature, powerpc_firmware_features, 64, fw_ftr_fixup);
+ VDSO_DO_FIXUPS(lwsync, cur_cpu_spec->cpu_features, 64, lwsync_fixup);
#endif /* CONFIG_PPC64 */
#ifdef CONFIG_VDSO32
- start = find_section32(v32->hdr, "__ftr_fixup", &size);
- if (start)
- do_feature_fixups(cur_cpu_spec->cpu_features,
- start, start + size);
-
- start = find_section32(v32->hdr, "__mmu_ftr_fixup", &size);
- if (start)
- do_feature_fixups(cur_cpu_spec->mmu_features,
- start, start + size);
-
+ VDSO_DO_FIXUPS(feature, cur_cpu_spec->cpu_features, 32, ftr_fixup);
+ VDSO_DO_FIXUPS(feature, cur_cpu_spec->mmu_features, 32, mmu_ftr_fixup);
#ifdef CONFIG_PPC64
- start = find_section32(v32->hdr, "__fw_ftr_fixup", &size);
- if (start)
- do_feature_fixups(powerpc_firmware_features,
- start, start + size);
+ VDSO_DO_FIXUPS(feature, powerpc_firmware_features, 32, fw_ftr_fixup);
#endif /* CONFIG_PPC64 */
-
- start = find_section32(v32->hdr, "__lwsync_fixup", &size);
- if (start)
- do_lwsync_fixups(cur_cpu_spec->cpu_features,
- start, start + size);
+ VDSO_DO_FIXUPS(lwsync, cur_cpu_spec->cpu_features, 32, lwsync_fixup);
#endif
return 0;
diff --git a/arch/powerpc/kernel/vdso32/vdso32.lds.S b/arch/powerpc/kernel/vdso32/vdso32.lds.S
index 078d75c0cd24..dc62772f028c 100644
--- a/arch/powerpc/kernel/vdso32/vdso32.lds.S
+++ b/arch/powerpc/kernel/vdso32/vdso32.lds.S
@@ -38,17 +38,25 @@ SECTIONS
PROVIDE(etext = .);
. = ALIGN(8);
+ VDSO_ftr_fixup_start = .;
__ftr_fixup : { *(__ftr_fixup) }
+ VDSO_ftr_fixup_end = .;
. = ALIGN(8);
+ VDSO_mmu_ftr_fixup_start = .;
__mmu_ftr_fixup : { *(__mmu_ftr_fixup) }
+ VDSO_mmu_ftr_fixup_end = .;
. = ALIGN(8);
+ VDSO_lwsync_fixup_start = .;
__lwsync_fixup : { *(__lwsync_fixup) }
+ VDSO_lwsync_fixup_end = .;
#ifdef CONFIG_PPC64
. = ALIGN(8);
+ VDSO_fw_ftr_fixup_start = .;
__fw_ftr_fixup : { *(__fw_ftr_fixup) }
+ VDSO_fw_ftr_fixup_end = .;
#endif
/*
diff --git a/arch/powerpc/kernel/vdso64/vdso64.lds.S b/arch/powerpc/kernel/vdso64/vdso64.lds.S
index 1f06e4f730a8..913d34e8bd05 100644
--- a/arch/powerpc/kernel/vdso64/vdso64.lds.S
+++ b/arch/powerpc/kernel/vdso64/vdso64.lds.S
@@ -39,16 +39,24 @@ SECTIONS
PROVIDE(etext = .);
. = ALIGN(8);
+ VDSO_ftr_fixup_start = .;
__ftr_fixup : { *(__ftr_fixup) }
+ VDSO_ftr_fixup_end = .;
. = ALIGN(8);
+ VDSO_mmu_ftr_fixup_start = .;
__mmu_ftr_fixup : { *(__mmu_ftr_fixup) }
+ VDSO_mmu_ftr_fixup_end = .;
. = ALIGN(8);
+ VDSO_lwsync_fixup_start = .;
__lwsync_fixup : { *(__lwsync_fixup) }
+ VDSO_lwsync_fixup_end = .;
. = ALIGN(8);
+ VDSO_fw_ftr_fixup_start = .;
__fw_ftr_fixup : { *(__fw_ftr_fixup) }
+ VDSO_fw_ftr_fixup_end = .;
/*
* Other stuff is appended to the text segment: