diff options
author | Linus Walleij <linus.walleij@linaro.org> | 2018-10-01 14:20:45 +0300 |
---|---|---|
committer | Linus Walleij <linus.walleij@linaro.org> | 2018-10-01 14:20:45 +0300 |
commit | 30aa69e7bd9f7af3574120249eecb3726dcaf737 (patch) | |
tree | b4a9ec1374ec9aed1022fdb32802c483952e5a97 /arch/arm64/crypto/ghash-ce-glue.c | |
parent | 63e037bc51b32d414cd07dd700e71359ce27a11f (diff) | |
parent | 17b57b1883c1285f3d0dc2266e8f79286a7bef38 (diff) | |
download | linux-30aa69e7bd9f7af3574120249eecb3726dcaf737.tar.xz |
Merge tag 'v4.19-rc6' into devel
This is the 4.19-rc6 release
I needed to merge this in because of extensive conflicts in
the MSM and Intel pin control drivers. I know how to resolve
them, so let's do it like this.
Signed-off-by: Linus Walleij <linus.walleij@linaro.org>
Diffstat (limited to 'arch/arm64/crypto/ghash-ce-glue.c')
-rw-r--r-- | arch/arm64/crypto/ghash-ce-glue.c | 29 |
1 files changed, 23 insertions, 6 deletions
diff --git a/arch/arm64/crypto/ghash-ce-glue.c b/arch/arm64/crypto/ghash-ce-glue.c index 6e9f33d14930..067d8937d5af 100644 --- a/arch/arm64/crypto/ghash-ce-glue.c +++ b/arch/arm64/crypto/ghash-ce-glue.c @@ -417,7 +417,7 @@ static int gcm_encrypt(struct aead_request *req) __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv, nrounds); put_unaligned_be32(2, iv + GCM_IV_SIZE); - while (walk.nbytes >= AES_BLOCK_SIZE) { + while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) { int blocks = walk.nbytes / AES_BLOCK_SIZE; u8 *dst = walk.dst.virt.addr; u8 *src = walk.src.virt.addr; @@ -437,11 +437,18 @@ static int gcm_encrypt(struct aead_request *req) NULL); err = skcipher_walk_done(&walk, - walk.nbytes % AES_BLOCK_SIZE); + walk.nbytes % (2 * AES_BLOCK_SIZE)); } - if (walk.nbytes) + if (walk.nbytes) { __aes_arm64_encrypt(ctx->aes_key.key_enc, ks, iv, nrounds); + if (walk.nbytes > AES_BLOCK_SIZE) { + crypto_inc(iv, AES_BLOCK_SIZE); + __aes_arm64_encrypt(ctx->aes_key.key_enc, + ks + AES_BLOCK_SIZE, iv, + nrounds); + } + } } /* handle the tail */ @@ -545,7 +552,7 @@ static int gcm_decrypt(struct aead_request *req) __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv, nrounds); put_unaligned_be32(2, iv + GCM_IV_SIZE); - while (walk.nbytes >= AES_BLOCK_SIZE) { + while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) { int blocks = walk.nbytes / AES_BLOCK_SIZE; u8 *dst = walk.dst.virt.addr; u8 *src = walk.src.virt.addr; @@ -564,11 +571,21 @@ static int gcm_decrypt(struct aead_request *req) } while (--blocks > 0); err = skcipher_walk_done(&walk, - walk.nbytes % AES_BLOCK_SIZE); + walk.nbytes % (2 * AES_BLOCK_SIZE)); } - if (walk.nbytes) + if (walk.nbytes) { + if (walk.nbytes > AES_BLOCK_SIZE) { + u8 *iv2 = iv + AES_BLOCK_SIZE; + + memcpy(iv2, iv, AES_BLOCK_SIZE); + crypto_inc(iv2, AES_BLOCK_SIZE); + + __aes_arm64_encrypt(ctx->aes_key.key_enc, iv2, + iv2, nrounds); + } __aes_arm64_encrypt(ctx->aes_key.key_enc, iv, iv, nrounds); + } } /* handle the tail */ |