diff options
author | Josh Poimboeuf <jpoimboe@redhat.com> | 2017-09-18 22:42:07 +0300 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2017-09-20 12:42:36 +0300 |
commit | 673ac6fbc74f835e2125df9ee39e8a2a423832e2 (patch) | |
tree | 91deae2919b19bdd0990ac2bfbf2cb8a85585469 /arch/x86/crypto/sha256-avx-asm.S | |
parent | 6488bce756861b94810e54f83416d5e74c0f18bf (diff) | |
download | linux-673ac6fbc74f835e2125df9ee39e8a2a423832e2.tar.xz |
crypto: x86/sha256-avx - Fix RBP usage
Using RBP as a temporary register breaks frame pointer convention and
breaks stack traces when unwinding from an interrupt in the crypto code.
Swap the usages of R12 and RBP. Use R12 for the TBL register, and use
RBP to store the pre-aligned stack pointer.
Reported-by: Eric Biggers <ebiggers@google.com>
Reported-by: Peter Zijlstra <peterz@infradead.org>
Tested-by: Eric Biggers <ebiggers@google.com>
Acked-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Josh Poimboeuf <jpoimboe@redhat.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto/sha256-avx-asm.S')
-rw-r--r-- | arch/x86/crypto/sha256-avx-asm.S | 15 |
1 files changed, 7 insertions, 8 deletions
diff --git a/arch/x86/crypto/sha256-avx-asm.S b/arch/x86/crypto/sha256-avx-asm.S index e08888a1a5f2..001bbcf93c79 100644 --- a/arch/x86/crypto/sha256-avx-asm.S +++ b/arch/x86/crypto/sha256-avx-asm.S @@ -103,7 +103,7 @@ SRND = %rsi # clobbers INP c = %ecx d = %r8d e = %edx -TBL = %rbp +TBL = %r12 a = %eax b = %ebx @@ -350,13 +350,13 @@ a = TMP_ ENTRY(sha256_transform_avx) .align 32 pushq %rbx - pushq %rbp + pushq %r12 pushq %r13 pushq %r14 pushq %r15 - pushq %r12 + pushq %rbp + movq %rsp, %rbp - mov %rsp, %r12 subq $STACK_SIZE, %rsp # allocate stack space and $~15, %rsp # align stack pointer @@ -452,13 +452,12 @@ loop2: done_hash: - mov %r12, %rsp - - popq %r12 + mov %rbp, %rsp + popq %rbp popq %r15 popq %r14 popq %r13 - popq %rbp + popq %r12 popq %rbx ret ENDPROC(sha256_transform_avx) |