diff options
author | Kirill A. Shutemov <kirill.shutemov@linux.intel.com> | 2024-06-14 12:58:51 +0300 |
---|---|---|
committer | Borislav Petkov (AMD) <bp@alien8.de> | 2024-06-17 18:45:50 +0300 |
commit | de60613173dfd75a10f6aa8e001bbcafa242e623 (patch) | |
tree | b5196b9eb04cdf58e2cc4ee6c7254c6763bdcab0 /arch | |
parent | 7b46a8997db27ed70b01458fa4437ec2360feddd (diff) | |
download | linux-de60613173dfd75a10f6aa8e001bbcafa242e623.tar.xz |
x86/kexec: Keep CR4.MCE set during kexec for TDX guest
TDX guests run with MCA enabled (CR4.MCE=1b) from the very start. If
that bit is cleared during CR4 register reprogramming during boot or kexec
flows, a #VE exception will be raised which the guest kernel cannot handle.
Therefore, make sure the CR4.MCE setting is preserved over kexec too and avoid
raising any #VEs.
Signed-off-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com>
Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de>
Link: https://lore.kernel.org/r/20240614095904.1345461-7-kirill.shutemov@linux.intel.com
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/kernel/relocate_kernel_64.S | 17 |
1 files changed, 10 insertions, 7 deletions
diff --git a/arch/x86/kernel/relocate_kernel_64.S b/arch/x86/kernel/relocate_kernel_64.S index 8b8922de3765..042c9a0334e9 100644 --- a/arch/x86/kernel/relocate_kernel_64.S +++ b/arch/x86/kernel/relocate_kernel_64.S @@ -5,6 +5,8 @@ */ #include <linux/linkage.h> +#include <linux/stringify.h> +#include <asm/alternative.h> #include <asm/page_types.h> #include <asm/kexec.h> #include <asm/processor-flags.h> @@ -145,14 +147,15 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped) * Set cr4 to a known state: * - physical address extension enabled * - 5-level paging, if it was enabled before + * - Machine check exception on TDX guest, if it was enabled before. + * Clearing MCE might not be allowed in TDX guests, depending on setup. + * + * Use R13 that contains the original CR4 value, read in relocate_kernel(). + * PAE is always set in the original CR4. */ - movl $X86_CR4_PAE, %eax - testq $X86_CR4_LA57, %r13 - jz .Lno_la57 - orl $X86_CR4_LA57, %eax -.Lno_la57: - - movq %rax, %cr4 + andl $(X86_CR4_PAE | X86_CR4_LA57), %r13d + ALTERNATIVE "", __stringify(orl $X86_CR4_MCE, %r13d), X86_FEATURE_TDX_GUEST + movq %r13, %cr4 /* Flush the TLB (needed?) */ movq %r9, %cr3 |