summaryrefslogtreecommitdiff
path: root/arch/arm64/crypto/aes-ce-ccm-glue.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2019-09-18 22:11:14 +0300
committerLinus Torvalds <torvalds@linux-foundation.org>2019-09-18 22:11:14 +0300
commit8b53c76533aa4356602aea98f98a2f3b4051464c (patch)
treeab10ba58e21501407f8108a6bb9003daa2176962 /arch/arm64/crypto/aes-ce-ccm-glue.c
parent6cfae0c26b21dce323fe8799b66cf4bc996e3565 (diff)
parent9575d1a5c0780ea26ff8dd29c94a32be32ce3c85 (diff)
downloadlinux-8b53c76533aa4356602aea98f98a2f3b4051464c.tar.xz
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - Add the ability to abort a skcipher walk. Algorithms: - Fix XTS to actually do the stealing. - Add library helpers for AES and DES for single-block users. - Add library helpers for SHA256. - Add new DES key verification helper. - Add surrounding bits for ESSIV generator. - Add accelerations for aegis128. - Add test vectors for lzo-rle. Drivers: - Add i.MX8MQ support to caam. - Add gcm/ccm/cfb/ofb aes support in inside-secure. - Add ofb/cfb aes support in media-tek. - Add HiSilicon ZIP accelerator support. Others: - Fix potential race condition in padata. - Use unbound workqueues in padata" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (311 commits) crypto: caam - Cast to long first before pointer conversion crypto: ccree - enable CTS support in AES-XTS crypto: inside-secure - Probe transform record cache RAM sizes crypto: inside-secure - Base RD fetchcount on actual RD FIFO size crypto: inside-secure - Base CD fetchcount on actual CD FIFO size crypto: inside-secure - Enable extended algorithms on newer HW crypto: inside-secure: Corrected configuration of EIP96_TOKEN_CTRL crypto: inside-secure - Add EIP97/EIP197 and endianness detection padata: remove cpu_index from the parallel_queue padata: unbind parallel jobs from specific CPUs padata: use separate workqueues for parallel and serial work padata, pcrypt: take CPU hotplug lock internally in padata_alloc_possible crypto: pcrypt - remove padata cpumask notifier padata: make padata_do_parallel find alternate callback CPU workqueue: require CPU hotplug read exclusion for apply_workqueue_attrs workqueue: unconfine alloc/apply/free_workqueue_attrs() padata: allocate workqueue internally arm64: dts: imx8mq: Add CAAM node random: Use wait_event_freezable() in add_hwgenerator_randomness() crypto: ux500 - Fix COMPILE_TEST warnings ...
Diffstat (limited to 'arch/arm64/crypto/aes-ce-ccm-glue.c')
-rw-r--r--arch/arm64/crypto/aes-ce-ccm-glue.c18
1 files changed, 6 insertions, 12 deletions
diff --git a/arch/arm64/crypto/aes-ce-ccm-glue.c b/arch/arm64/crypto/aes-ce-ccm-glue.c
index 827e5473e5de..541cf9165748 100644
--- a/arch/arm64/crypto/aes-ce-ccm-glue.c
+++ b/arch/arm64/crypto/aes-ce-ccm-glue.c
@@ -43,8 +43,6 @@ asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
u32 rounds);
-asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
-
static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
unsigned int key_len)
{
@@ -124,8 +122,7 @@ static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
}
while (abytes >= AES_BLOCK_SIZE) {
- __aes_arm64_encrypt(key->key_enc, mac, mac,
- num_rounds(key));
+ aes_encrypt(key, mac, mac);
crypto_xor(mac, in, AES_BLOCK_SIZE);
in += AES_BLOCK_SIZE;
@@ -133,8 +130,7 @@ static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
}
if (abytes > 0) {
- __aes_arm64_encrypt(key->key_enc, mac, mac,
- num_rounds(key));
+ aes_encrypt(key, mac, mac);
crypto_xor(mac, in, abytes);
*macp = abytes;
}
@@ -206,10 +202,8 @@ static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
bsize = nbytes;
crypto_inc(walk->iv, AES_BLOCK_SIZE);
- __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
- num_rounds(ctx));
- __aes_arm64_encrypt(ctx->key_enc, mac, mac,
- num_rounds(ctx));
+ aes_encrypt(ctx, buf, walk->iv);
+ aes_encrypt(ctx, mac, mac);
if (enc)
crypto_xor(mac, src, bsize);
crypto_xor_cpy(dst, src, buf, bsize);
@@ -224,8 +218,8 @@ static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
}
if (!err) {
- __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
- __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
+ aes_encrypt(ctx, buf, iv0);
+ aes_encrypt(ctx, mac, mac);
crypto_xor(mac, buf, AES_BLOCK_SIZE);
}
return err;