summaryrefslogtreecommitdiff
path: root/arch/arm/include/asm/memory.h
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@arm.linux.org.uk>2011-01-04 22:39:29 +0300
committerRussell King <rmk+kernel@arm.linux.org.uk>2011-02-18 02:27:33 +0300
commitcada3c0841e1deaec4c0f92654610b028dc683ff (patch)
tree9c6085c8b8447795ddc749315a31fd3906a6159d /arch/arm/include/asm/memory.h
parentdc21af99fadcfa0ae65b52fd0895f85824f0c288 (diff)
downloadlinux-cada3c0841e1deaec4c0f92654610b028dc683ff.tar.xz
ARM: P2V: extend to 16-bit translation offsets
MSM's memory is aligned to 2MB, which is more than we can do with our existing method as we're limited to the upper 8 bits. Extend this by using two instructions to 16 bits, automatically selected when MSM is enabled. Acked-by: Tony Lindgren <tony@atomide.com> Reviewed-by: Nicolas Pitre <nicolas.pitre@linaro.org> Tested-by: Nicolas Pitre <nicolas.pitre@linaro.org> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/include/asm/memory.h')
-rw-r--r--arch/arm/include/asm/memory.h21
1 files changed, 17 insertions, 4 deletions
diff --git a/arch/arm/include/asm/memory.h b/arch/arm/include/asm/memory.h
index 7197879e1cb7..2398b3fc0268 100644
--- a/arch/arm/include/asm/memory.h
+++ b/arch/arm/include/asm/memory.h
@@ -154,29 +154,42 @@
#ifndef __virt_to_phys
#ifdef CONFIG_ARM_PATCH_PHYS_VIRT
+/*
+ * Constants used to force the right instruction encodings and shifts
+ * so that all we need to do is modify the 8-bit constant field.
+ */
+#define __PV_BITS_31_24 0x81000000
+#define __PV_BITS_23_16 0x00810000
+
extern unsigned long __pv_phys_offset;
#define PHYS_OFFSET __pv_phys_offset
-#define __pv_stub(from,to,instr) \
+#define __pv_stub(from,to,instr,type) \
__asm__("@ __pv_stub\n" \
"1: " instr " %0, %1, %2\n" \
" .pushsection .pv_table,\"a\"\n" \
" .long 1b\n" \
" .popsection\n" \
: "=r" (to) \
- : "r" (from), "I" (0x81000000))
+ : "r" (from), "I" (type))
static inline unsigned long __virt_to_phys(unsigned long x)
{
unsigned long t;
- __pv_stub(x, t, "add");
+ __pv_stub(x, t, "add", __PV_BITS_31_24);
+#ifdef CONFIG_ARM_PATCH_PHYS_VIRT_16BIT
+ __pv_stub(t, t, "add", __PV_BITS_23_16);
+#endif
return t;
}
static inline unsigned long __phys_to_virt(unsigned long x)
{
unsigned long t;
- __pv_stub(x, t, "sub");
+ __pv_stub(x, t, "sub", __PV_BITS_31_24);
+#ifdef CONFIG_ARM_PATCH_PHYS_VIRT_16BIT
+ __pv_stub(t, t, "sub", __PV_BITS_23_16);
+#endif
return t;
}
#else