summaryrefslogtreecommitdiff
path: root/arch/powerpc/mm/tlb_nohash_low.S
diff options
context:
space:
mode:
authorScott Wood <scottwood@freescale.com>2015-10-06 22:48:09 -0500
committerScott Wood <scottwood@freescale.com>2015-10-22 22:50:46 -0500
commitd9e1831a420267a7ced708bb259d65b0a3c0344d (patch)
tree7df430b2f3deec91fd977f438ba4b207be8db46b /arch/powerpc/mm/tlb_nohash_low.S
parent1930bb5ccf77da3bf1483e2cd85a12ec9c0ed0f6 (diff)
powerpc/85xx: Load all early TLB entries at once
Use an AS=1 trampoline TLB entry to allow all normal TLB1 entries to be loaded at once. This avoids the need to keep the translation that code is executing from in the same TLB entry in the final TLB configuration as during early boot, which in turn is helpful for relocatable kernels (e.g. kdump) where the kernel is not running from what would be the first TLB entry. On e6500, we limit map_mem_in_cams() to the primary hwthread of a core (the boot cpu is always considered primary, as a kdump kernel can be entered on any cpu). Each TLB only needs to be set up once, and when we do, we don't want another thread to be running when we create a temporary trampoline TLB1 entry. Signed-off-by: Scott Wood <scottwood@freescale.com>
Diffstat (limited to 'arch/powerpc/mm/tlb_nohash_low.S')
-rw-r--r--arch/powerpc/mm/tlb_nohash_low.S63
1 files changed, 63 insertions, 0 deletions
diff --git a/arch/powerpc/mm/tlb_nohash_low.S b/arch/powerpc/mm/tlb_nohash_low.S
index 43ff3c797fbf..68c477592e43 100644
--- a/arch/powerpc/mm/tlb_nohash_low.S
+++ b/arch/powerpc/mm/tlb_nohash_low.S
@@ -400,6 +400,7 @@ _GLOBAL(set_context)
* extern void loadcam_entry(unsigned int index)
*
* Load TLBCAM[index] entry in to the L2 CAM MMU
+ * Must preserve r7, r8, r9, and r10
*/
_GLOBAL(loadcam_entry)
mflr r5
@@ -423,4 +424,66 @@ END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
tlbwe
isync
blr
+
+/*
+ * Load multiple TLB entries at once, using an alternate-space
+ * trampoline so that we don't have to care about whether the same
+ * TLB entry maps us before and after.
+ *
+ * r3 = first entry to write
+ * r4 = number of entries to write
+ * r5 = temporary tlb entry
+ */
+_GLOBAL(loadcam_multi)
+ mflr r8
+
+ /*
+ * Set up temporary TLB entry that is the same as what we're
+ * running from, but in AS=1.
+ */
+ bl 1f
+1: mflr r6
+ tlbsx 0,r8
+ mfspr r6,SPRN_MAS1
+ ori r6,r6,MAS1_TS
+ mtspr SPRN_MAS1,r6
+ mfspr r6,SPRN_MAS0
+ rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
+ mr r7,r5
+ mtspr SPRN_MAS0,r6
+ isync
+ tlbwe
+ isync
+
+ /* Switch to AS=1 */
+ mfmsr r6
+ ori r6,r6,MSR_IS|MSR_DS
+ mtmsr r6
+ isync
+
+ mr r9,r3
+ add r10,r3,r4
+2: bl loadcam_entry
+ addi r9,r9,1
+ cmpw r9,r10
+ mr r3,r9
+ blt 2b
+
+ /* Return to AS=0 and clear the temporary entry */
+ mfmsr r6
+ rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
+ mtmsr r6
+ isync
+
+ li r6,0
+ mtspr SPRN_MAS1,r6
+ rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
+ oris r6,r6,MAS0_TLBSEL(1)@h
+ mtspr SPRN_MAS0,r6
+ isync
+ tlbwe
+ isync
+
+ mtlr r8
+ blr
#endif