diff options
author | Kyle McMartin <kyle@parisc-linux.org> | 2006-08-14 06:17:19 +0400 |
---|---|---|
committer | Matthew Wilcox <willy@parisc-linux.org> | 2006-10-04 16:45:37 +0400 |
commit | 3d73cf5e18c47d416db4d0734245d3fb087603d9 (patch) | |
tree | 4a80d0f24493a2be47828fa17ecc358b473ba38b /arch/parisc/kernel | |
parent | f86e45131f9d41b1617fbaac7aa1ef23e8d0ab48 (diff) | |
download | linux-3d73cf5e18c47d416db4d0734245d3fb087603d9.tar.xz |
[PARISC] Abstract shift register left in .S
Abstract existing shift register left macros as shift register
right are. This lends itself to a nice clean up of some #ifdef
blocks in entry.S
Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
Diffstat (limited to 'arch/parisc/kernel')
-rw-r--r-- | arch/parisc/kernel/entry.S | 19 |
1 files changed, 4 insertions, 15 deletions
diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S index 192357a3b9fe..d55b45d54f4d 100644 --- a/arch/parisc/kernel/entry.S +++ b/arch/parisc/kernel/entry.S @@ -30,6 +30,7 @@ #include <asm/psw.h> +#include <asm/cache.h> /* for L1_CACHE_SHIFT */ #include <asm/assembly.h> /* for LDREG/STREG defines */ #include <asm/pgtable.h> #include <asm/signal.h> @@ -478,11 +479,7 @@ bb,>=,n \pmd,_PxD_PRESENT_BIT,\fault DEP %r0,31,PxD_FLAG_SHIFT,\pmd /* clear flags */ copy \pmd,%r9 -#ifdef CONFIG_64BIT - shld %r9,PxD_VALUE_SHIFT,\pmd -#else - shlw %r9,PxD_VALUE_SHIFT,\pmd -#endif + SHLREG %r9,PxD_VALUE_SHIFT,\pmd EXTR \va,31-PAGE_SHIFT,ASM_BITS_PER_PTE,\index DEP %r0,31,PAGE_SHIFT,\pmd /* clear offset */ shladd \index,BITS_PER_PTE_ENTRY,\pmd,\pmd @@ -970,11 +967,7 @@ intr_return: /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) amount ** irq_stat[] is defined using ____cacheline_aligned. */ -#ifdef CONFIG_64BIT - shld %r1, 6, %r20 -#else - shlw %r1, 5, %r20 -#endif + SHLREG %r1,L1_CACHE_SHIFT,%r20 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ #endif /* CONFIG_SMP */ @@ -2115,11 +2108,7 @@ syscall_check_bh: ldw TI_CPU-THREAD_SZ_ALGN-FRAME_SIZE(%r30),%r26 /* cpu # */ /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) bits */ -#ifdef CONFIG_64BIT - shld %r26, 6, %r20 -#else - shlw %r26, 5, %r20 -#endif + SHLREG %r26,L1_CACHE_SHIFT,%r20 add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ #endif /* CONFIG_SMP */ |