diff options
author | Kumar Gala <galak@kernel.crashing.org> | 2009-03-19 06:55:41 +0300 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2009-03-24 05:47:32 +0300 |
commit | 2319f1239592d0de80414ad2338c2bd7384a2a41 (patch) | |
tree | 805de041dfc84ae9ca767c9767d833977654dbe0 /arch | |
parent | eb3436a0139a651a39dbb37a75b10a2cccd00ad5 (diff) | |
download | linux-2319f1239592d0de80414ad2338c2bd7384a2a41.tar.xz |
powerpc/mm: e300c2/c3/c4 TLB errata workaround
Complete workaround for DTLB errata in e300c2/c3/c4 processors.
Due to the bug, the hardware-implemented LRU algorythm always goes to way
1 of the TLB. This fix implements the proposed software workaround in
form of a LRW table for chosing the TLB-way.
Based on patch from David Jander <david@protonic.nl>
Signed-off-by: Kumar Gala <galak@kernel.crashing.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/powerpc/include/asm/mmu.h | 6 | ||||
-rw-r--r-- | arch/powerpc/kernel/cpu_setup_6xx.S | 5 | ||||
-rw-r--r-- | arch/powerpc/kernel/cputable.c | 9 | ||||
-rw-r--r-- | arch/powerpc/kernel/head_32.S | 32 |
4 files changed, 45 insertions, 7 deletions
diff --git a/arch/powerpc/include/asm/mmu.h b/arch/powerpc/include/asm/mmu.h index dc82dcd06aea..c073de4af849 100644 --- a/arch/powerpc/include/asm/mmu.h +++ b/arch/powerpc/include/asm/mmu.h @@ -46,6 +46,12 @@ */ #define MMU_FTR_LOCK_BCAST_INVAL ASM_CONST(0x00100000) +/* This indicates that the processor doesn't handle way selection + * properly and needs SW to track and update the LRU state. This + * is specific to an errata on e300c2/c3/c4 class parts + */ +#define MMU_FTR_NEED_DTLB_SW_LRU ASM_CONST(0x00200000) + #ifndef __ASSEMBLY__ #include <asm/cputable.h> diff --git a/arch/powerpc/kernel/cpu_setup_6xx.S b/arch/powerpc/kernel/cpu_setup_6xx.S index 72d1d7395254..54f767e31a1a 100644 --- a/arch/powerpc/kernel/cpu_setup_6xx.S +++ b/arch/powerpc/kernel/cpu_setup_6xx.S @@ -15,9 +15,14 @@ #include <asm/ppc_asm.h> #include <asm/asm-offsets.h> #include <asm/cache.h> +#include <asm/mmu.h> _GLOBAL(__setup_cpu_603) mflr r4 +BEGIN_MMU_FTR_SECTION + li r10,0 + mtspr SPRN_SPRG4,r10 /* init SW LRU tracking */ +END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU) BEGIN_FTR_SECTION bl __init_fpu_registers END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE) diff --git a/arch/powerpc/kernel/cputable.c b/arch/powerpc/kernel/cputable.c index ccea2431ddf8..cd1b687544f3 100644 --- a/arch/powerpc/kernel/cputable.c +++ b/arch/powerpc/kernel/cputable.c @@ -1090,7 +1090,8 @@ static struct cpu_spec __initdata cpu_specs[] = { .cpu_name = "e300c2", .cpu_features = CPU_FTRS_E300C2, .cpu_user_features = PPC_FEATURE_32 | PPC_FEATURE_HAS_MMU, - .mmu_features = MMU_FTR_USE_HIGH_BATS, + .mmu_features = MMU_FTR_USE_HIGH_BATS | + MMU_FTR_NEED_DTLB_SW_LRU, .icache_bsize = 32, .dcache_bsize = 32, .cpu_setup = __setup_cpu_603, @@ -1103,7 +1104,8 @@ static struct cpu_spec __initdata cpu_specs[] = { .cpu_name = "e300c3", .cpu_features = CPU_FTRS_E300, .cpu_user_features = COMMON_USER, - .mmu_features = MMU_FTR_USE_HIGH_BATS, + .mmu_features = MMU_FTR_USE_HIGH_BATS | + MMU_FTR_NEED_DTLB_SW_LRU, .icache_bsize = 32, .dcache_bsize = 32, .cpu_setup = __setup_cpu_603, @@ -1118,7 +1120,8 @@ static struct cpu_spec __initdata cpu_specs[] = { .cpu_name = "e300c4", .cpu_features = CPU_FTRS_E300, .cpu_user_features = COMMON_USER, - .mmu_features = MMU_FTR_USE_HIGH_BATS, + .mmu_features = MMU_FTR_USE_HIGH_BATS | + MMU_FTR_NEED_DTLB_SW_LRU, .icache_bsize = 32, .dcache_bsize = 32, .cpu_setup = __setup_cpu_603, diff --git a/arch/powerpc/kernel/head_32.S b/arch/powerpc/kernel/head_32.S index 58dcc7c03109..54e68c11ae15 100644 --- a/arch/powerpc/kernel/head_32.S +++ b/arch/powerpc/kernel/head_32.S @@ -593,9 +593,21 @@ BEGIN_FTR_SECTION rlwinm r1,r1,0,~_PAGE_COHERENT /* clear M (coherence not required) */ END_FTR_SECTION_IFCLR(CPU_FTR_NEED_COHERENT) mtspr SPRN_RPA,r1 + mfspr r2,SPRN_SRR1 /* Need to restore CR0 */ + mtcrf 0x80,r2 +BEGIN_MMU_FTR_SECTION + li r0,1 + mfspr r1,SPRN_SPRG4 + rlwinm r2,r3,20,27,31 /* Get Address bits 15:19 */ + slw r0,r0,r2 + xor r1,r0,r1 + srw r0,r1,r2 + mtspr SPRN_SPRG4,r1 + mfspr r2,SPRN_SRR1 + rlwimi r2,r0,31-14,14,14 + mtspr SPRN_SRR1,r2 +END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU) tlbld r3 - mfspr r3,SPRN_SRR1 /* Need to restore CR0 */ - mtcrf 0x80,r3 rfi DataAddressInvalid: mfspr r3,SPRN_SRR1 @@ -661,9 +673,21 @@ BEGIN_FTR_SECTION rlwinm r1,r1,0,~_PAGE_COHERENT /* clear M (coherence not required) */ END_FTR_SECTION_IFCLR(CPU_FTR_NEED_COHERENT) mtspr SPRN_RPA,r1 + mfspr r2,SPRN_SRR1 /* Need to restore CR0 */ + mtcrf 0x80,r2 +BEGIN_MMU_FTR_SECTION + li r0,1 + mfspr r1,SPRN_SPRG4 + rlwinm r2,r3,20,27,31 /* Get Address bits 15:19 */ + slw r0,r0,r2 + xor r1,r0,r1 + srw r0,r1,r2 + mtspr SPRN_SPRG4,r1 + mfspr r2,SPRN_SRR1 + rlwimi r2,r0,31-14,14,14 + mtspr SPRN_SRR1,r2 +END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU) tlbld r3 - mfspr r3,SPRN_SRR1 /* Need to restore CR0 */ - mtcrf 0x80,r3 rfi #ifndef CONFIG_ALTIVEC |