summaryrefslogtreecommitdiff
path: root/arch/x86/platform/pvh/head.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/platform/pvh/head.S')
-rw-r--r--arch/x86/platform/pvh/head.S67
1 files changed, 37 insertions, 30 deletions
diff --git a/arch/x86/platform/pvh/head.S b/arch/x86/platform/pvh/head.S
index 64fca49cd88f..1d78e5631bb8 100644
--- a/arch/x86/platform/pvh/head.S
+++ b/arch/x86/platform/pvh/head.S
@@ -6,7 +6,9 @@
.code32
.text
+#ifdef CONFIG_X86_32
#define _pa(x) ((x) - __START_KERNEL_map)
+#endif
#define rva(x) ((x) - pvh_start_xen)
#include <linux/elfnote.h>
@@ -52,7 +54,7 @@
#define PVH_CS_SEL (PVH_GDT_ENTRY_CS * 8)
#define PVH_DS_SEL (PVH_GDT_ENTRY_DS * 8)
-SYM_CODE_START_LOCAL(pvh_start_xen)
+SYM_CODE_START(pvh_start_xen)
UNWIND_HINT_END_OF_STACK
cld
@@ -72,8 +74,7 @@ SYM_CODE_START_LOCAL(pvh_start_xen)
movl $0, %esp
leal rva(gdt)(%ebp), %eax
- leal rva(gdt_start)(%ebp), %ecx
- movl %ecx, 2(%eax)
+ addl %eax, 2(%eax)
lgdt (%eax)
mov $PVH_DS_SEL,%eax
@@ -86,8 +87,7 @@ SYM_CODE_START_LOCAL(pvh_start_xen)
mov %ebx, %esi
movl rva(pvh_start_info_sz)(%ebp), %ecx
shr $2,%ecx
- rep
- movsl
+ rep movsl
leal rva(early_stack_end)(%ebp), %esp
@@ -103,10 +103,23 @@ SYM_CODE_START_LOCAL(pvh_start_xen)
btsl $_EFER_LME, %eax
wrmsr
+ /*
+ * Reuse the non-relocatable symbol emitted for the ELF note to
+ * subtract the build time physical address of pvh_start_xen() from
+ * its actual runtime address, without relying on absolute 32-bit ELF
+ * relocations, as these are not supported by the linker when running
+ * in -pie mode, and should be avoided in .head.text in general.
+ */
mov %ebp, %ebx
- subl $_pa(pvh_start_xen), %ebx /* offset */
+ subl rva(xen_elfnote_phys32_entry)(%ebp), %ebx
jz .Lpagetable_done
+ /*
+ * Store the resulting load offset in phys_base. __pa() needs
+ * phys_base set to calculate the hypercall page in xen_pvh_init().
+ */
+ movl %ebx, rva(phys_base)(%ebp)
+
/* Fixup page-tables for relocation. */
leal rva(pvh_init_top_pgt)(%ebp), %edi
movl $PTRS_PER_PGD, %ecx
@@ -159,26 +172,22 @@ SYM_CODE_START_LOCAL(pvh_start_xen)
1:
UNWIND_HINT_END_OF_STACK
- /* Set base address in stack canary descriptor. */
- mov $MSR_GS_BASE,%ecx
- leal canary(%rip), %eax
- xor %edx, %edx
- wrmsr
-
- /*
- * Calculate load offset and store in phys_base. __pa() needs
- * phys_base set to calculate the hypercall page in xen_pvh_init().
- */
- movq %rbp, %rbx
- subq $_pa(pvh_start_xen), %rbx
- movq %rbx, phys_base(%rip)
- call xen_prepare_pvh
/*
- * Clear phys_base. __startup_64 will *add* to its value,
- * so reset to 0.
+ * Set up GSBASE.
+ * Note that on SMP the boot CPU uses the init data section until
+ * the per-CPU areas are set up.
*/
- xor %rbx, %rbx
- movq %rbx, phys_base(%rip)
+ movl $MSR_GS_BASE,%ecx
+ xorl %eax, %eax
+ xorl %edx, %edx
+ wrmsr
+
+ /* Call xen_prepare_pvh() via the kernel virtual mapping */
+ leaq xen_prepare_pvh(%rip), %rax
+ subq phys_base(%rip), %rax
+ addq $__START_KERNEL_map, %rax
+ ANNOTATE_RETPOLINE_SAFE
+ call *%rax
/* startup_64 expects boot_params in %rsi. */
lea pvh_bootparams(%rip), %rsi
@@ -217,8 +226,8 @@ SYM_CODE_END(pvh_start_xen)
.section ".init.data","aw"
.balign 8
SYM_DATA_START_LOCAL(gdt)
- .word gdt_end - gdt_start
- .long _pa(gdt_start) /* x86-64 will overwrite if relocated. */
+ .word gdt_end - gdt_start - 1
+ .long gdt_start - gdt
.word 0
SYM_DATA_END(gdt)
SYM_DATA_START_LOCAL(gdt_start)
@@ -232,8 +241,6 @@ SYM_DATA_START_LOCAL(gdt_start)
SYM_DATA_END_LABEL(gdt_start, SYM_L_LOCAL, gdt_end)
.balign 16
-SYM_DATA_LOCAL(canary, .fill 48, 1, 0)
-
SYM_DATA_START_LOCAL(early_stack)
.fill BOOT_STACK_SIZE, 1, 0
SYM_DATA_END_LABEL(early_stack, SYM_L_LOCAL, early_stack_end)
@@ -300,5 +307,5 @@ SYM_DATA_END(pvh_level2_kernel_pgt)
.long KERNEL_IMAGE_SIZE - 1)
#endif
- ELFNOTE(Xen, XEN_ELFNOTE_PHYS32_ENTRY,
- _ASM_PTR (pvh_start_xen - __START_KERNEL_map))
+ ELFNOTE(Xen, XEN_ELFNOTE_PHYS32_ENTRY, .global xen_elfnote_phys32_entry;
+ xen_elfnote_phys32_entry: _ASM_PTR xen_elfnote_phys32_entry_value - .)