diff options
author | Peter Zijlstra <peterz@infradead.org> | 2021-11-10 13:01:06 +0300 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2021-12-11 11:09:46 +0300 |
commit | ab0fedcc714aafaac6ac996b51791aee0d1cd8fd (patch) | |
tree | edd75969e8982021299f6a8f8dc478885457e08b /arch/x86 | |
parent | acba44d2436d463f60a54bf934d378dcf384a965 (diff) | |
download | linux-ab0fedcc714aafaac6ac996b51791aee0d1cd8fd.tar.xz |
x86/copy_mc_64: Remove .fixup usage
Place the anonymous .fixup code at the tail of the regular functions.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com>
Reviewed-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211110101325.127055887@infradead.org
Diffstat (limited to 'arch/x86')
-rw-r--r-- | arch/x86/lib/copy_mc_64.S | 12 |
1 files changed, 4 insertions, 8 deletions
diff --git a/arch/x86/lib/copy_mc_64.S b/arch/x86/lib/copy_mc_64.S index 23009792e19c..c859a8a09860 100644 --- a/arch/x86/lib/copy_mc_64.S +++ b/arch/x86/lib/copy_mc_64.S @@ -78,9 +78,7 @@ SYM_FUNC_START(copy_mc_fragile) xorl %eax, %eax .L_done: RET -SYM_FUNC_END(copy_mc_fragile) - .section .fixup, "ax" /* * Return number of bytes not copied for any failure. Note that * there is no "tail" handling since the source buffer is 8-byte @@ -105,14 +103,14 @@ SYM_FUNC_END(copy_mc_fragile) movl %ecx, %edx jmp copy_mc_fragile_handle_tail - .previous - _ASM_EXTABLE_TYPE(.L_read_leading_bytes, .E_leading_bytes, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_read_words, .E_read_words, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_read_trailing_bytes, .E_trailing_bytes, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes) _ASM_EXTABLE(.L_write_words, .E_write_words) _ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes) + +SYM_FUNC_END(copy_mc_fragile) #endif /* CONFIG_X86_MCE */ /* @@ -133,9 +131,7 @@ SYM_FUNC_START(copy_mc_enhanced_fast_string) /* Copy successful. Return zero */ xorl %eax, %eax RET -SYM_FUNC_END(copy_mc_enhanced_fast_string) - .section .fixup, "ax" .E_copy: /* * On fault %rcx is updated such that the copy instruction could @@ -147,7 +143,7 @@ SYM_FUNC_END(copy_mc_enhanced_fast_string) movq %rcx, %rax RET - .previous - _ASM_EXTABLE_TYPE(.L_copy, .E_copy, EX_TYPE_DEFAULT_MCE_SAFE) + +SYM_FUNC_END(copy_mc_enhanced_fast_string) #endif /* !CONFIG_UML */ |