diff options
author | Eric Biggers <ebiggers@google.com> | 2025-01-05 22:21:10 +0300 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2025-01-14 06:38:32 +0300 |
commit | ee3c9c7e27a68c66490c65141a0c9c5b1fd87695 (patch) | |
tree | 7190f3abafd910c63a05130259454ae70db0bc6d /arch/powerpc | |
parent | 1742b0a0e4668763066fab5fa76d071c1ac92648 (diff) | |
download | linux-ee3c9c7e27a68c66490c65141a0c9c5b1fd87695.tar.xz |
crypto: powerpc/p10-aes-gcm - simplify handling of linear associated data
p10_aes_gcm_crypt() is abusing the scatter_walk API to get the virtual
address for the first source scatterlist element. But this code is only
built for PPC64 which is a !HIGHMEM platform, and it can read past a
page boundary from the address returned by scatterwalk_map() which means
it already assumes the address is from the kernel's direct map. Thus,
just use sg_virt() instead to get the same result in a simpler way.
Cc: Christophe Leroy <christophe.leroy@csgroup.eu>
Cc: Danny Tsen <dtsen@linux.ibm.com>
Cc: Michael Ellerman <mpe@ellerman.id.au>
Cc: Naveen N Rao <naveen@kernel.org>
Cc: Nicholas Piggin <npiggin@gmail.com>
Cc: linuxppc-dev@lists.ozlabs.org
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/powerpc')
-rw-r--r-- | arch/powerpc/crypto/aes-gcm-p10-glue.c | 9 |
1 files changed, 2 insertions, 7 deletions
diff --git a/arch/powerpc/crypto/aes-gcm-p10-glue.c b/arch/powerpc/crypto/aes-gcm-p10-glue.c index f37b3d13fc53..679f52794baf 100644 --- a/arch/powerpc/crypto/aes-gcm-p10-glue.c +++ b/arch/powerpc/crypto/aes-gcm-p10-glue.c @@ -214,7 +214,6 @@ static int p10_aes_gcm_crypt(struct aead_request *req, u8 *riv, struct gcm_ctx *gctx = PTR_ALIGN((void *)databuf, PPC_ALIGN); u8 hashbuf[sizeof(struct Hash_ctx) + PPC_ALIGN]; struct Hash_ctx *hash = PTR_ALIGN((void *)hashbuf, PPC_ALIGN); - struct scatter_walk assoc_sg_walk; struct skcipher_walk walk; u8 *assocmem = NULL; u8 *assoc; @@ -234,8 +233,7 @@ static int p10_aes_gcm_crypt(struct aead_request *req, u8 *riv, /* Linearize assoc, if not already linear */ if (req->src->length >= assoclen && req->src->length) { - scatterwalk_start(&assoc_sg_walk, req->src); - assoc = scatterwalk_map(&assoc_sg_walk); + assoc = sg_virt(req->src); /* ppc64 is !HIGHMEM */ } else { gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC; @@ -253,10 +251,7 @@ static int p10_aes_gcm_crypt(struct aead_request *req, u8 *riv, gcmp10_init(gctx, iv, (unsigned char *) &ctx->enc_key, hash, assoc, assoclen); vsx_end(); - if (!assocmem) - scatterwalk_unmap(assoc); - else - kfree(assocmem); + kfree(assocmem); if (enc) ret = skcipher_walk_aead_encrypt(&walk, req, false); |