diff options
author | Becky Bruce <beckyb@kernel.crashing.org> | 2011-10-10 14:50:42 +0400 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2011-12-07 09:26:22 +0400 |
commit | d1b9b12811ef079c37fe464f51953746d8b78e2a (patch) | |
tree | 4440a5420206d0dddd5770d5fefbaf6df8becefe /arch/powerpc/mm/tlb_low_64e.S | |
parent | 27609a42ee5486b8d132ece24dde6f7524d67df3 (diff) | |
download | linux-d1b9b12811ef079c37fe464f51953746d8b78e2a.tar.xz |
powerpc: Add hugepage support to 64-bit tablewalk code for FSL_BOOK3E
Before hugetlb, at each level of the table, we test for
!0 to determine if we have a valid table entry. With hugetlb, this
compare becomes:
< 0 is a normal entry
0 is an invalid entry
> 0 is huge
This works because the hugepage code pulls the top bit off the entry
(which for non-huge entries always has the top bit set) as an
indicator that we have a hugepage.
Signed-off-by: Becky Bruce <beckyb@kernel.crashing.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/mm/tlb_low_64e.S')
-rw-r--r-- | arch/powerpc/mm/tlb_low_64e.S | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/powerpc/mm/tlb_low_64e.S b/arch/powerpc/mm/tlb_low_64e.S index 71d5d9a0b220..ff672bd8fea9 100644 --- a/arch/powerpc/mm/tlb_low_64e.S +++ b/arch/powerpc/mm/tlb_low_64e.S @@ -136,22 +136,22 @@ ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBRSRV) #ifndef CONFIG_PPC_64K_PAGES rldicl r15,r16,64-PUD_SHIFT+3,64-PUD_INDEX_SIZE-3 clrrdi r15,r15,3 - cmlpdi cr0,r14,0 - beq tlb_miss_fault_bolted /* Bad pgd entry */ + cmpdi cr0,r14,0 + bge tlb_miss_fault_bolted /* Bad pgd entry or hugepage; bail */ ldx r14,r14,r15 /* grab pud entry */ #endif /* CONFIG_PPC_64K_PAGES */ rldicl r15,r16,64-PMD_SHIFT+3,64-PMD_INDEX_SIZE-3 clrrdi r15,r15,3 - cmpldi cr0,r14,0 - beq tlb_miss_fault_bolted + cmpdi cr0,r14,0 + bge tlb_miss_fault_bolted ldx r14,r14,r15 /* Grab pmd entry */ rldicl r15,r16,64-PAGE_SHIFT+3,64-PTE_INDEX_SIZE-3 clrrdi r15,r15,3 - cmpldi cr0,r14,0 - beq tlb_miss_fault_bolted - ldx r14,r14,r15 /* Grab PTE */ + cmpdi cr0,r14,0 + bge tlb_miss_fault_bolted + ldx r14,r14,r15 /* Grab PTE, normal (!huge) page */ /* Check if required permissions are met */ andc. r15,r11,r14 |