diff options
author | Atsushi Nemoto <anemo@mba.ocn.ne.jp> | 2006-04-10 02:42:35 +0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-06-26 11:34:38 +0400 |
commit | e1147d8f47eb8fef93f98a30858192145137d2b2 (patch) | |
tree | 7e1bc0ab3d263e5bd801900195ab310625d9ab59 /crypto/digest.c | |
parent | d00e708cef16442cabaf23f653baf924f5d66e83 (diff) | |
download | linux-e1147d8f47eb8fef93f98a30858192145137d2b2.tar.xz |
[CRYPTO] digest: Add alignment handling
Some hash modules load/store data words directly. The digest layer
should pass properly aligned buffer to update()/final() method. This
patch also add cra_alignmask to some hash modules.
Signed-off-by: Atsushi Nemoto <anemo@mba.ocn.ne.jp>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/digest.c')
-rw-r--r-- | crypto/digest.c | 42 |
1 files changed, 27 insertions, 15 deletions
diff --git a/crypto/digest.c b/crypto/digest.c index d9b6ac9dbf8d..062d0a5a2c89 100644 --- a/crypto/digest.c +++ b/crypto/digest.c @@ -27,6 +27,7 @@ static void update(struct crypto_tfm *tfm, struct scatterlist *sg, unsigned int nsg) { unsigned int i; + unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); for (i = 0; i < nsg; i++) { @@ -38,12 +39,24 @@ static void update(struct crypto_tfm *tfm, unsigned int bytes_from_page = min(l, ((unsigned int) (PAGE_SIZE)) - offset); - char *p = crypto_kmap(pg, 0) + offset; + char *src = crypto_kmap(pg, 0); + char *p = src + offset; + if (unlikely(offset & alignmask)) { + unsigned int bytes = + alignmask + 1 - (offset & alignmask); + bytes = min(bytes, bytes_from_page); + tfm->__crt_alg->cra_digest.dia_update + (crypto_tfm_ctx(tfm), p, + bytes); + p += bytes; + bytes_from_page -= bytes; + l -= bytes; + } tfm->__crt_alg->cra_digest.dia_update (crypto_tfm_ctx(tfm), p, bytes_from_page); - crypto_kunmap(p, 0); + crypto_kunmap(src, 0); crypto_yield(tfm); offset = 0; pg++; @@ -54,7 +67,15 @@ static void update(struct crypto_tfm *tfm, static void final(struct crypto_tfm *tfm, u8 *out) { - tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), out); + unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); + if (unlikely((unsigned long)out & alignmask)) { + unsigned int size = crypto_tfm_alg_digestsize(tfm); + u8 buffer[size + alignmask]; + u8 *dst = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); + tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), dst); + memcpy(out, dst, size); + } else + tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), out); } static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) @@ -69,18 +90,9 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) static void digest(struct crypto_tfm *tfm, struct scatterlist *sg, unsigned int nsg, u8 *out) { - unsigned int i; - - tfm->crt_digest.dit_init(tfm); - - for (i = 0; i < nsg; i++) { - char *p = crypto_kmap(sg[i].page, 0) + sg[i].offset; - tfm->__crt_alg->cra_digest.dia_update(crypto_tfm_ctx(tfm), - p, sg[i].length); - crypto_kunmap(p, 0); - crypto_yield(tfm); - } - crypto_digest_final(tfm, out); + init(tfm); + update(tfm, sg, nsg); + final(tfm, out); } int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) |