summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2025-10-01 14:47:13 +0300
committerArd Biesheuvel <ardb@kernel.org>2025-11-12 11:52:02 +0300
commita6b40844550c73805fe8490bafc0e33ff5922f6c (patch)
treeddf16a912f267fb2d532ece98ab65811c8f9018e
parent931ceb5785755d82d80bb923c22ca08128af7721 (diff)
downloadlinux-a6b40844550c73805fe8490bafc0e33ff5922f6c.tar.xz
crypto/arm64: sha3 - Switch to 'ksimd' scoped guard API
Switch to the more abstract 'scoped_ksimd()' API, which will be modified in a future patch to transparently allocate a kernel mode FP/SIMD state buffer on the stack, so that kernel mode FP/SIMD code remains preemptible in principe, but without the memory overhead that adds 528 bytes to the size of struct task_struct. Reviewed-by: Eric Biggers <ebiggers@kernel.org> Reviewed-by: Jonathan Cameron <jonathan.cameron@huawei.com> Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
-rw-r--r--arch/arm64/crypto/sha3-ce-glue.c10
1 files changed, 4 insertions, 6 deletions
diff --git a/arch/arm64/crypto/sha3-ce-glue.c b/arch/arm64/crypto/sha3-ce-glue.c
index b4f1001046c9..22732760edd3 100644
--- a/arch/arm64/crypto/sha3-ce-glue.c
+++ b/arch/arm64/crypto/sha3-ce-glue.c
@@ -46,9 +46,8 @@ static int sha3_update(struct shash_desc *desc, const u8 *data,
do {
int rem;
- kernel_neon_begin();
- rem = sha3_ce_transform(sctx->st, data, blocks, ds);
- kernel_neon_end();
+ scoped_ksimd()
+ rem = sha3_ce_transform(sctx->st, data, blocks, ds);
data += (blocks - rem) * bs;
blocks = rem;
} while (blocks);
@@ -73,9 +72,8 @@ static int sha3_finup(struct shash_desc *desc, const u8 *src, unsigned int len,
memset(block + len, 0, bs - len);
block[bs - 1] |= 0x80;
- kernel_neon_begin();
- sha3_ce_transform(sctx->st, block, 1, ds);
- kernel_neon_end();
+ scoped_ksimd()
+ sha3_ce_transform(sctx->st, block, 1, ds);
memzero_explicit(block , sizeof(block));
for (i = 0; i < ds / 8; i++)