diff options
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Makefile | 2 | ||||
-rw-r--r-- | crypto/ablkcipher.c | 3 | ||||
-rw-r--r-- | crypto/ansi_cprng.c | 2 | ||||
-rw-r--r-- | crypto/async_tx/async_xor.c | 2 | ||||
-rw-r--r-- | crypto/authencesn.c | 835 | ||||
-rw-r--r-- | crypto/deflate.c | 3 | ||||
-rw-r--r-- | crypto/gf128mul.c | 2 | ||||
-rw-r--r-- | crypto/tcrypt.c | 3 | ||||
-rw-r--r-- | crypto/testmgr.c | 2 | ||||
-rw-r--r-- | crypto/testmgr.h | 30 | ||||
-rw-r--r-- | crypto/vmac.c | 2 | ||||
-rw-r--r-- | crypto/xts.c | 2 | ||||
-rw-r--r-- | crypto/zlib.c | 18 |
13 files changed, 888 insertions, 18 deletions
diff --git a/crypto/Makefile b/crypto/Makefile index e9a399ca69db..ce5a813d3639 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -78,7 +78,7 @@ obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o -obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o +obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o obj-$(CONFIG_CRYPTO_LZO) += lzo.o obj-$(CONFIG_CRYPTO_RNG2) += rng.o obj-$(CONFIG_CRYPTO_RNG2) += krng.o diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c index a854df2a5a4b..fdc67d38660b 100644 --- a/crypto/ablkcipher.c +++ b/crypto/ablkcipher.c @@ -141,8 +141,7 @@ err: if (walk->iv != req->info) memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize); - if (walk->iv_buffer) - kfree(walk->iv_buffer); + kfree(walk->iv_buffer); return err; } diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c index 2bc332142849..ffa0245e2abc 100644 --- a/crypto/ansi_cprng.c +++ b/crypto/ansi_cprng.c @@ -83,7 +83,7 @@ static void xor_vectors(unsigned char *in1, unsigned char *in2, } /* * Returns DEFAULT_BLK_SZ bytes of random data per call - * returns 0 if generation succeded, <0 if something went wrong + * returns 0 if generation succeeded, <0 if something went wrong */ static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test) { diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c index 079ae8ca590b..bc28337fded2 100644 --- a/crypto/async_tx/async_xor.c +++ b/crypto/async_tx/async_xor.c @@ -94,7 +94,7 @@ do_async_xor(struct dma_chan *chan, struct page *dest, struct page **src_list, if (unlikely(!tx)) async_tx_quiesce(&submit->depend_tx); - /* spin wait for the preceeding transactions to complete */ + /* spin wait for the preceding transactions to complete */ while (unlikely(!tx)) { dma_async_issue_pending(chan); tx = dma->device_prep_dma_xor(chan, dma_dest, diff --git a/crypto/authencesn.c b/crypto/authencesn.c new file mode 100644 index 000000000000..136b68b9d8d4 --- /dev/null +++ b/crypto/authencesn.c @@ -0,0 +1,835 @@ +/* + * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers, + * derived from authenc.c + * + * Copyright (C) 2010 secunet Security Networks AG + * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com> + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include <crypto/aead.h> +#include <crypto/internal/hash.h> +#include <crypto/internal/skcipher.h> +#include <crypto/authenc.h> +#include <crypto/scatterwalk.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/rtnetlink.h> +#include <linux/slab.h> +#include <linux/spinlock.h> + +struct authenc_esn_instance_ctx { + struct crypto_ahash_spawn auth; + struct crypto_skcipher_spawn enc; +}; + +struct crypto_authenc_esn_ctx { + unsigned int reqoff; + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; +}; + +struct authenc_esn_request_ctx { + unsigned int cryptlen; + unsigned int headlen; + unsigned int trailen; + struct scatterlist *sg; + struct scatterlist hsg[2]; + struct scatterlist tsg[1]; + struct scatterlist cipher[2]; + crypto_completion_t complete; + crypto_completion_t update_complete; + crypto_completion_t update_complete2; + char tail[]; +}; + +static void authenc_esn_request_complete(struct aead_request *req, int err) +{ + if (err != -EINPROGRESS) + aead_request_complete(req, err); +} + +static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key, + unsigned int keylen) +{ + unsigned int authkeylen; + unsigned int enckeylen; + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct crypto_ahash *auth = ctx->auth; + struct crypto_ablkcipher *enc = ctx->enc; + struct rtattr *rta = (void *)key; + struct crypto_authenc_key_param *param; + int err = -EINVAL; + + if (!RTA_OK(rta, keylen)) + goto badkey; + if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM) + goto badkey; + if (RTA_PAYLOAD(rta) < sizeof(*param)) + goto badkey; + + param = RTA_DATA(rta); + enckeylen = be32_to_cpu(param->enckeylen); + + key += RTA_ALIGN(rta->rta_len); + keylen -= RTA_ALIGN(rta->rta_len); + + if (keylen < enckeylen) + goto badkey; + + authkeylen = keylen - enckeylen; + + crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); + crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ahash_setkey(auth, key, authkeylen); + crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) & + CRYPTO_TFM_RES_MASK); + + if (err) + goto out; + + crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ablkcipher_setkey(enc, key + authkeylen, enckeylen); + crypto_aead_set_flags(authenc_esn, crypto_ablkcipher_get_flags(enc) & + CRYPTO_TFM_RES_MASK); + +out: + return err; + +badkey: + crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN); + goto out; +} + +static void authenc_esn_geniv_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + authenc_esn_request_complete(req, err); +} + +static void authenc_esn_geniv_ahash_update_done2(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + authenc_esn_request_complete(req, err); +} + + +static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + + if (err) + goto out; + + scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, + areq_ctx->cryptlen, + crypto_aead_authsize(authenc_esn), 1); + +out: + aead_request_complete(req, err); +} + + +static void authenc_esn_verify_ahash_update_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, + areq_ctx->cryptlen); + + ahash_request_set_callback(ahreq, + aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + +static void authenc_esn_verify_ahash_update_done2(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, ahreq->result, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & + CRYPTO_TFM_REQ_MAY_SLEEP, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + + +static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq, + int err) +{ + u8 *ihash; + unsigned int authsize; + struct ablkcipher_request *abreq; + struct aead_request *req = areq->data; + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + unsigned int cryptlen = req->cryptlen; + + if (err) + goto out; + + authsize = crypto_aead_authsize(authenc_esn); + cryptlen -= authsize; + ihash = ahreq->result + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + + err = memcmp(ihash, ahreq->result, authsize) ? -EBADMSG : 0; + if (err) + goto out; + + abreq = aead_request_ctx(req); + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, + cryptlen, req->iv); + + err = crypto_ablkcipher_decrypt(abreq); + +out: + authenc_esn_request_complete(req, err); +} + +static u8 *crypto_authenc_esn_ahash(struct aead_request *req, + unsigned int flags) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct crypto_ahash *auth = ctx->auth; + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); + u8 *hash = areq_ctx->tail; + int err; + + hash = (u8 *)ALIGN((unsigned long)hash + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1); + + ahash_request_set_tfm(ahreq, auth); + + err = crypto_ahash_init(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->hsg, hash, areq_ctx->headlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->update_complete, req); + + err = crypto_ahash_update(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, areq_ctx->cryptlen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->update_complete2, req); + + err = crypto_ahash_update(ahreq); + if (err) + return ERR_PTR(err); + + ahash_request_set_crypt(ahreq, areq_ctx->tsg, hash, + areq_ctx->trailen); + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags, + areq_ctx->complete, req); + + err = crypto_ahash_finup(ahreq); + if (err) + return ERR_PTR(err); + + return hash; +} + +static int crypto_authenc_esn_genicv(struct aead_request *req, u8 *iv, + unsigned int flags) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *dst = req->dst; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *hsg = areq_ctx->hsg; + struct scatterlist *tsg = areq_ctx->tsg; + struct scatterlist *assoc1; + struct scatterlist *assoc2; + unsigned int ivsize = crypto_aead_ivsize(authenc_esn); + unsigned int cryptlen = req->cryptlen; + struct page *dstp; + u8 *vdst; + u8 *hash; + + dstp = sg_page(dst); + vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, dst, vdst == iv + ivsize, 2); + dst = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) + return -EINVAL; + + assoc1 = assoc + 1; + if (sg_is_last(assoc1)) + return -EINVAL; + + assoc2 = assoc + 2; + if (!sg_is_last(assoc2)) + return -EINVAL; + + sg_init_table(hsg, 2); + sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset); + sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset); + + sg_init_table(tsg, 1); + sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset); + + areq_ctx->cryptlen = cryptlen; + areq_ctx->headlen = assoc->length + assoc2->length; + areq_ctx->trailen = assoc1->length; + areq_ctx->sg = dst; + + areq_ctx->complete = authenc_esn_geniv_ahash_done; + areq_ctx->update_complete = authenc_esn_geniv_ahash_update_done; + areq_ctx->update_complete2 = authenc_esn_geniv_ahash_update_done2; + + hash = crypto_authenc_esn_ahash(req, flags); + if (IS_ERR(hash)) + return PTR_ERR(hash); + + scatterwalk_map_and_copy(hash, dst, cryptlen, + crypto_aead_authsize(authenc_esn), 1); + return 0; +} + + +static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(areq); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct ablkcipher_request *abreq = aead_request_ctx(areq); + u8 *iv = (u8 *)(abreq + 1) + + crypto_ablkcipher_reqsize(ctx->enc); + + err = crypto_authenc_esn_genicv(areq, iv, 0); + } + + authenc_esn_request_complete(areq, err); +} + +static int crypto_authenc_esn_encrypt(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct crypto_ablkcipher *enc = ctx->enc; + struct scatterlist *dst = req->dst; + unsigned int cryptlen = req->cryptlen; + struct ablkcipher_request *abreq = (void *)(areq_ctx->tail + + ctx->reqoff); + u8 *iv = (u8 *)abreq - crypto_ablkcipher_ivsize(enc); + int err; + + ablkcipher_request_set_tfm(abreq, enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + crypto_authenc_esn_encrypt_done, req); + ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv); + + memcpy(iv, req->iv, crypto_aead_ivsize(authenc_esn)); + + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + return crypto_authenc_esn_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static void crypto_authenc_esn_givencrypt_done(struct crypto_async_request *req, + int err) +{ + struct aead_request *areq = req->data; + + if (!err) { + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + + err = crypto_authenc_esn_genicv(areq, greq->giv, 0); + } + + authenc_esn_request_complete(areq, err); +} + +static int crypto_authenc_esn_givencrypt(struct aead_givcrypt_request *req) +{ + struct crypto_aead *authenc_esn = aead_givcrypt_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct aead_request *areq = &req->areq; + struct skcipher_givcrypt_request *greq = aead_request_ctx(areq); + u8 *iv = req->giv; + int err; + + skcipher_givcrypt_set_tfm(greq, ctx->enc); + skcipher_givcrypt_set_callback(greq, aead_request_flags(areq), + crypto_authenc_esn_givencrypt_done, areq); + skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen, + areq->iv); + skcipher_givcrypt_set_giv(greq, iv, req->seq); + + err = crypto_skcipher_givencrypt(greq); + if (err) + return err; + + return crypto_authenc_esn_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP); +} + +static int crypto_authenc_esn_verify(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + u8 *ohash; + u8 *ihash; + unsigned int authsize; + + areq_ctx->complete = authenc_esn_verify_ahash_done; + areq_ctx->update_complete = authenc_esn_verify_ahash_update_done; + + ohash = crypto_authenc_esn_ahash(req, CRYPTO_TFM_REQ_MAY_SLEEP); + if (IS_ERR(ohash)) + return PTR_ERR(ohash); + + authsize = crypto_aead_authsize(authenc_esn); + ihash = ohash + authsize; + scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, + authsize, 0); + return memcmp(ihash, ohash, authsize) ? -EBADMSG : 0; +} + +static int crypto_authenc_esn_iverify(struct aead_request *req, u8 *iv, + unsigned int cryptlen) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req); + struct scatterlist *src = req->src; + struct scatterlist *assoc = req->assoc; + struct scatterlist *cipher = areq_ctx->cipher; + struct scatterlist *hsg = areq_ctx->hsg; + struct scatterlist *tsg = areq_ctx->tsg; + struct scatterlist *assoc1; + struct scatterlist *assoc2; + unsigned int ivsize = crypto_aead_ivsize(authenc_esn); + struct page *srcp; + u8 *vsrc; + + srcp = sg_page(src); + vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset; + + if (ivsize) { + sg_init_table(cipher, 2); + sg_set_buf(cipher, iv, ivsize); + scatterwalk_crypto_chain(cipher, src, vsrc == iv + ivsize, 2); + src = cipher; + cryptlen += ivsize; + } + + if (sg_is_last(assoc)) + return -EINVAL; + + assoc1 = assoc + 1; + if (sg_is_last(assoc1)) + return -EINVAL; + + assoc2 = assoc + 2; + if (!sg_is_last(assoc2)) + return -EINVAL; + + sg_init_table(hsg, 2); + sg_set_page(hsg, sg_page(assoc), assoc->length, assoc->offset); + sg_set_page(hsg + 1, sg_page(assoc2), assoc2->length, assoc2->offset); + + sg_init_table(tsg, 1); + sg_set_page(tsg, sg_page(assoc1), assoc1->length, assoc1->offset); + + areq_ctx->cryptlen = cryptlen; + areq_ctx->headlen = assoc->length + assoc2->length; + areq_ctx->trailen = assoc1->length; + areq_ctx->sg = src; + + areq_ctx->complete = authenc_esn_verify_ahash_done; + areq_ctx->update_complete = authenc_esn_verify_ahash_update_done; + areq_ctx->update_complete2 = authenc_esn_verify_ahash_update_done2; + + return crypto_authenc_esn_verify(req); +} + +static int crypto_authenc_esn_decrypt(struct aead_request *req) +{ + struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req); + struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn); + struct ablkcipher_request *abreq = aead_request_ctx(req); + unsigned int cryptlen = req->cryptlen; + unsigned int authsize = crypto_aead_authsize(authenc_esn); + u8 *iv = req->iv; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + err = crypto_authenc_esn_iverify(req, iv, cryptlen); + if (err) + return err; + + ablkcipher_request_set_tfm(abreq, ctx->enc); + ablkcipher_request_set_callback(abreq, aead_request_flags(req), + req->base.complete, req->base.data); + ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv); + + return crypto_ablkcipher_decrypt(abreq); +} + +static int crypto_authenc_esn_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); + struct authenc_esn_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_ahash *auth; + struct crypto_ablkcipher *enc; + int err; + + auth = crypto_spawn_ahash(&ictx->auth); + if (IS_ERR(auth)) + return PTR_ERR(auth); + + enc = crypto_spawn_skcipher(&ictx->enc); + err = PTR_ERR(enc); + if (IS_ERR(enc)) + goto err_free_ahash; + + ctx->auth = auth; + ctx->enc = enc; + + ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth) + + crypto_ahash_alignmask(auth), + crypto_ahash_alignmask(auth) + 1) + + crypto_ablkcipher_ivsize(enc); + + tfm->crt_aead.reqsize = sizeof(struct authenc_esn_request_ctx) + + ctx->reqoff + + max_t(unsigned int, + crypto_ahash_reqsize(auth) + + sizeof(struct ahash_request), + sizeof(struct skcipher_givcrypt_request) + + crypto_ablkcipher_reqsize(enc)); + + return 0; + +err_free_ahash: + crypto_free_ahash(auth); + return err; +} + +static void crypto_authenc_esn_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_authenc_esn_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_ahash(ctx->auth); + crypto_free_ablkcipher(ctx->enc); +} + +static struct crypto_instance *crypto_authenc_esn_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct hash_alg_common *auth; + struct crypto_alg *auth_base; + struct crypto_alg *enc; + struct authenc_esn_instance_ctx *ctx; + const char *enc_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, + CRYPTO_ALG_TYPE_AHASH_MASK); + if (IS_ERR(auth)) + return ERR_CAST(auth); + + auth_base = &auth->base; + + enc_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(enc_name); + if (IS_ERR(enc_name)) + goto out_put_auth; + + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_auth; + + ctx = crypto_instance_ctx(inst); + + err = crypto_init_ahash_spawn(&ctx->auth, auth, inst); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ctx->enc, inst); + err = crypto_grab_skcipher(&ctx->enc, enc_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_auth; + + enc = crypto_skcipher_spawn_alg(&ctx->enc); + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "authencesn(%s,%s)", auth_base->cra_name, enc->cra_name) >= + CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "authencesn(%s,%s)", auth_base->cra_driver_name, + enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_drop_enc; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = enc->cra_priority * + 10 + auth_base->cra_priority; + inst->alg.cra_blocksize = enc->cra_blocksize; + inst->alg.cra_alignmask = auth_base->cra_alignmask | enc->cra_alignmask; + inst->alg.cra_type = &crypto_aead_type; + + inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; + inst->alg.cra_aead.maxauthsize = auth->digestsize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx); + + inst->alg.cra_init = crypto_authenc_esn_init_tfm; + inst->alg.cra_exit = crypto_authenc_esn_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_authenc_esn_setkey; + inst->alg.cra_aead.encrypt = crypto_authenc_esn_encrypt; + inst->alg.cra_aead.decrypt = crypto_authenc_esn_decrypt; + inst->alg.cra_aead.givencrypt = crypto_authenc_esn_givencrypt; + +out: + crypto_mod_put(auth_base); + return inst; + +err_drop_enc: + crypto_drop_skcipher(&ctx->enc); +err_drop_auth: + crypto_drop_ahash(&ctx->auth); +err_free_inst: + kfree(inst); +out_put_auth: + inst = ERR_PTR(err); + goto out; +} + +static void crypto_authenc_esn_free(struct crypto_instance *inst) +{ + struct authenc_esn_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_skcipher(&ctx->enc); + crypto_drop_ahash(&ctx->auth); + kfree(inst); +} + +static struct crypto_template crypto_authenc_esn_tmpl = { + .name = "authencesn", + .alloc = crypto_authenc_esn_alloc, + .free = crypto_authenc_esn_free, + .module = THIS_MODULE, +}; + +static int __init crypto_authenc_esn_module_init(void) +{ + return crypto_register_template(&crypto_authenc_esn_tmpl); +} + +static void __exit crypto_authenc_esn_module_exit(void) +{ + crypto_unregister_template(&crypto_authenc_esn_tmpl); +} + +module_init(crypto_authenc_esn_module_init); +module_exit(crypto_authenc_esn_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>"); +MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers"); diff --git a/crypto/deflate.c b/crypto/deflate.c index cbc7a33a9600..b5ccae29be74 100644 --- a/crypto/deflate.c +++ b/crypto/deflate.c @@ -48,7 +48,8 @@ static int deflate_comp_init(struct deflate_ctx *ctx) int ret = 0; struct z_stream_s *stream = &ctx->comp_stream; - stream->workspace = vzalloc(zlib_deflate_workspacesize()); + stream->workspace = vzalloc(zlib_deflate_workspacesize( + -DEFLATE_DEF_WINBITS, DEFLATE_DEF_MEMLEVEL)); if (!stream->workspace) { ret = -ENOMEM; goto out; diff --git a/crypto/gf128mul.c b/crypto/gf128mul.c index a90d260528d4..df35e4ccd07e 100644 --- a/crypto/gf128mul.c +++ b/crypto/gf128mul.c @@ -89,7 +89,7 @@ } /* Given the value i in 0..255 as the byte overflow when a field element - in GHASH is multipled by x^8, this function will return the values that + in GHASH is multiplied by x^8, this function will return the values that are generated in the lo 16-bit word of the field value by applying the modular polynomial. The values lo_byte and hi_byte are returned via the macro xp_fun(lo_byte, hi_byte) so that the values can be assembled into diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 9aac5e58be94..e912ea5def3d 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -146,7 +146,8 @@ static void test_cipher_speed(const char *algo, int enc, unsigned int sec, unsigned int tcount, u8 *keysize) { unsigned int ret, i, j, iv_len; - const char *key, iv[128]; + const char *key; + char iv[128]; struct crypto_blkcipher *tfm; struct blkcipher_desc desc; const char *e; diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 27ea9fe9476f..2854865f2434 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -2077,6 +2077,7 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "ghash", .test = alg_test_hash, + .fips_allowed = 1, .suite = { .hash = { .vecs = ghash_tv_template, @@ -2453,6 +2454,7 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "xts(aes)", .test = alg_test_skcipher, + .fips_allowed = 1, .suite = { .cipher = { .enc = { diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 834af7f2adee..aa6dac05f843 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -451,8 +451,9 @@ static struct hash_testvec rmd320_tv_template[] = { /* * SHA1 test vectors from from FIPS PUB 180-1 + * Long vector from CAVS 5.0 */ -#define SHA1_TEST_VECTORS 2 +#define SHA1_TEST_VECTORS 3 static struct hash_testvec sha1_tv_template[] = { { @@ -467,6 +468,33 @@ static struct hash_testvec sha1_tv_template[] = { "\x4a\xa1\xf9\x51\x29\xe5\xe5\x46\x70\xf1", .np = 2, .tap = { 28, 28 } + }, { + .plaintext = "\xec\x29\x56\x12\x44\xed\xe7\x06" + "\xb6\xeb\x30\xa1\xc3\x71\xd7\x44" + "\x50\xa1\x05\xc3\xf9\x73\x5f\x7f" + "\xa9\xfe\x38\xcf\x67\xf3\x04\xa5" + "\x73\x6a\x10\x6e\x92\xe1\x71\x39" + "\xa6\x81\x3b\x1c\x81\xa4\xf3\xd3" + "\xfb\x95\x46\xab\x42\x96\xfa\x9f" + "\x72\x28\x26\xc0\x66\x86\x9e\xda" + "\xcd\x73\xb2\x54\x80\x35\x18\x58" + "\x13\xe2\x26\x34\xa9\xda\x44\x00" + "\x0d\x95\xa2\x81\xff\x9f\x26\x4e" + "\xcc\xe0\xa9\x31\x22\x21\x62\xd0" + "\x21\xcc\xa2\x8d\xb5\xf3\xc2\xaa" + "\x24\x94\x5a\xb1\xe3\x1c\xb4\x13" + "\xae\x29\x81\x0f\xd7\x94\xca\xd5" + "\xdf\xaf\x29\xec\x43\xcb\x38\xd1" + "\x98\xfe\x4a\xe1\xda\x23\x59\x78" + "\x02\x21\x40\x5b\xd6\x71\x2a\x53" + "\x05\xda\x4b\x1b\x73\x7f\xce\x7c" + "\xd2\x1c\x0e\xb7\x72\x8d\x08\x23" + "\x5a\x90\x11", + .psize = 163, + .digest = "\x97\x01\x11\xc4\xe7\x7b\xcc\x88\xcc\x20" + "\x45\x9c\x02\xb6\x9b\x4a\xa8\xf5\x82\x17", + .np = 4, + .tap = { 63, 64, 31, 5 } } }; diff --git a/crypto/vmac.c b/crypto/vmac.c index 0999274a27ac..f35ff8a3926e 100644 --- a/crypto/vmac.c +++ b/crypto/vmac.c @@ -95,7 +95,7 @@ const u64 mpoly = UINT64_C(0x1fffffff1fffffff); /* Poly key mask */ /* * For highest performance the L1 NH and L2 polynomial hashes should be - * carefully implemented to take advantage of one's target architechture. + * carefully implemented to take advantage of one's target architecture. * Here these two hash functions are defined multiple time; once for * 64-bit architectures, once for 32-bit SSE2 architectures, and once * for the rest (32-bit) architectures. diff --git a/crypto/xts.c b/crypto/xts.c index 555ecaab1e54..851705446c82 100644 --- a/crypto/xts.c +++ b/crypto/xts.c @@ -45,7 +45,7 @@ static int setkey(struct crypto_tfm *parent, const u8 *key, return -EINVAL; } - /* we need two cipher instances: one to compute the inital 'tweak' + /* we need two cipher instances: one to compute the initial 'tweak' * by encrypting the IV (usually the 'plain' iv) and the other * one to encrypt and decrypt the data */ diff --git a/crypto/zlib.c b/crypto/zlib.c index 739b8fca4cea..d11d761a5e41 100644 --- a/crypto/zlib.c +++ b/crypto/zlib.c @@ -85,6 +85,7 @@ static int zlib_compress_setup(struct crypto_pcomp *tfm, void *params, struct zlib_ctx *ctx = crypto_tfm_ctx(crypto_pcomp_tfm(tfm)); struct z_stream_s *stream = &ctx->comp_stream; struct nlattr *tb[ZLIB_COMP_MAX + 1]; + int window_bits, mem_level; size_t workspacesize; int ret; @@ -94,7 +95,14 @@ static int zlib_compress_setup(struct crypto_pcomp *tfm, void *params, zlib_comp_exit(ctx); - workspacesize = zlib_deflate_workspacesize(); + window_bits = tb[ZLIB_COMP_WINDOWBITS] + ? nla_get_u32(tb[ZLIB_COMP_WINDOWBITS]) + : MAX_WBITS; + mem_level = tb[ZLIB_COMP_MEMLEVEL] + ? nla_get_u32(tb[ZLIB_COMP_MEMLEVEL]) + : DEF_MEM_LEVEL; + + workspacesize = zlib_deflate_workspacesize(window_bits, mem_level); stream->workspace = vzalloc(workspacesize); if (!stream->workspace) return -ENOMEM; @@ -106,12 +114,8 @@ static int zlib_compress_setup(struct crypto_pcomp *tfm, void *params, tb[ZLIB_COMP_METHOD] ? nla_get_u32(tb[ZLIB_COMP_METHOD]) : Z_DEFLATED, - tb[ZLIB_COMP_WINDOWBITS] - ? nla_get_u32(tb[ZLIB_COMP_WINDOWBITS]) - : MAX_WBITS, - tb[ZLIB_COMP_MEMLEVEL] - ? nla_get_u32(tb[ZLIB_COMP_MEMLEVEL]) - : DEF_MEM_LEVEL, + window_bits, + mem_level, tb[ZLIB_COMP_STRATEGY] ? nla_get_u32(tb[ZLIB_COMP_STRATEGY]) : Z_DEFAULT_STRATEGY); |