summaryrefslogtreecommitdiff
path: root/arch/arm64/kernel
diff options
context:
space:
mode:
authorCatalin Marinas <catalin.marinas@arm.com>2020-03-25 11:10:51 +0000
committerCatalin Marinas <catalin.marinas@arm.com>2020-03-25 11:10:51 +0000
commit806dc825f01f1543f613b8195112ef06d04eb6d3 (patch)
treee239498bc833c3b19b2b27cbdd2fcc6f5d623ed4 /arch/arm64/kernel
parent0829a076958ddd203cf4824dd330c93ba4662815 (diff)
parent6cf9a2dce6bd10cf454cf6299c1c23182cb486e7 (diff)
Merge branch 'for-next/asm-cleanups' into for-next/core
* for-next/asm-cleanups: : Various asm clean-ups (alignment, mov_q vs ldr, .idmap) arm64: move kimage_vaddr to .rodata arm64: use mov_q instead of literal ldr
Diffstat (limited to 'arch/arm64/kernel')
-rw-r--r--arch/arm64/kernel/cpu-reset.S2
-rw-r--r--arch/arm64/kernel/head.S12
-rw-r--r--arch/arm64/kernel/hyp-stub.S2
-rw-r--r--arch/arm64/kernel/relocate_kernel.S4
4 files changed, 10 insertions, 10 deletions
diff --git a/arch/arm64/kernel/cpu-reset.S b/arch/arm64/kernel/cpu-reset.S
index 32c7bf858dd9..38087b4c0432 100644
--- a/arch/arm64/kernel/cpu-reset.S
+++ b/arch/arm64/kernel/cpu-reset.S
@@ -32,7 +32,7 @@
ENTRY(__cpu_soft_restart)
/* Clear sctlr_el1 flags. */
mrs x12, sctlr_el1
- ldr x13, =SCTLR_ELx_FLAGS
+ mov_q x13, SCTLR_ELx_FLAGS
bic x12, x12, x13
pre_disable_mmu_workaround
msr sctlr_el1, x12
diff --git a/arch/arm64/kernel/head.S b/arch/arm64/kernel/head.S
index 5fa9daa1d227..2f7ea6d8f5bf 100644
--- a/arch/arm64/kernel/head.S
+++ b/arch/arm64/kernel/head.S
@@ -457,17 +457,19 @@ SYM_FUNC_START_LOCAL(__primary_switched)
b start_kernel
SYM_FUNC_END(__primary_switched)
+ .pushsection ".rodata", "a"
+SYM_DATA_START(kimage_vaddr)
+ .quad _text - TEXT_OFFSET
+SYM_DATA_END(kimage_vaddr)
+EXPORT_SYMBOL(kimage_vaddr)
+ .popsection
+
/*
* end early head section, begin head code that is also used for
* hotplug and needs to have the same protections as the text region
*/
.section ".idmap.text","awx"
-SYM_DATA_START(kimage_vaddr)
- .quad _text - TEXT_OFFSET
-SYM_DATA_END(kimage_vaddr)
-EXPORT_SYMBOL(kimage_vaddr)
-
/*
* If we're fortunate enough to boot at EL2, ensure that the world is
* sane before dropping to EL1.
diff --git a/arch/arm64/kernel/hyp-stub.S b/arch/arm64/kernel/hyp-stub.S
index 73d46070b315..e473ead806ed 100644
--- a/arch/arm64/kernel/hyp-stub.S
+++ b/arch/arm64/kernel/hyp-stub.S
@@ -63,7 +63,7 @@ el1_sync:
beq 9f // Nothing to reset!
/* Someone called kvm_call_hyp() against the hyp-stub... */
- ldr x0, =HVC_STUB_ERR
+ mov_q x0, HVC_STUB_ERR
eret
9: mov x0, xzr
diff --git a/arch/arm64/kernel/relocate_kernel.S b/arch/arm64/kernel/relocate_kernel.S
index c1d7db71a726..c40ce496c78b 100644
--- a/arch/arm64/kernel/relocate_kernel.S
+++ b/arch/arm64/kernel/relocate_kernel.S
@@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel)
cmp x0, #CurrentEL_EL2
b.ne 1f
mrs x0, sctlr_el2
- ldr x1, =SCTLR_ELx_FLAGS
+ mov_q x1, SCTLR_ELx_FLAGS
bic x0, x0, x1
pre_disable_mmu_workaround
msr sctlr_el2, x0
@@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel)
ENDPROC(arm64_relocate_new_kernel)
-.ltorg
-
.align 3 /* To keep the 64-bit values below naturally aligned. */
.Lcopy_end: