summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-02-14 11:03:55 +0300
committerHerbert Xu <herbert@gondor.apana.org.au>2019-02-22 07:47:27 +0300
commit511306b2d075e3cb1e5dd805bffcf4041535f2b5 (patch)
treee46eb87d12257c17b38b9ee25fa7ea3478e6d87e /arch
parentfa5fd3afc7e64473bd9d417726c046b322ec0696 (diff)
downloadlinux-511306b2d075e3cb1e5dd805bffcf4041535f2b5.tar.xz
crypto: arm/aes-ce - update IV after partial final CTR block
Make the arm ctr-aes-ce algorithm update the IV buffer to contain the next counter after processing a partial final block, rather than leave it as the last counter. This makes ctr-aes-ce pass the updated AES-CTR tests. This change also makes the code match the arm64 version in arch/arm64/crypto/aes-modes.S more closely. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/crypto/aes-ce-core.S26
1 files changed, 13 insertions, 13 deletions
diff --git a/arch/arm/crypto/aes-ce-core.S b/arch/arm/crypto/aes-ce-core.S
index ba8e6a32fdc9..bc53bcaa772e 100644
--- a/arch/arm/crypto/aes-ce-core.S
+++ b/arch/arm/crypto/aes-ce-core.S
@@ -317,25 +317,27 @@ ENTRY(ce_aes_ctr_encrypt)
.Lctrloop:
vmov q0, q6
bl aes_encrypt
- subs r4, r4, #1
- bmi .Lctrtailblock @ blocks < 0 means tail block
- vld1.8 {q3}, [r1]!
- veor q3, q0, q3
- vst1.8 {q3}, [r0]!
adds r6, r6, #1 @ increment BE ctr
rev ip, r6
vmov s27, ip
bcs .Lctrcarry
- teq r4, #0
+
+.Lctrcarrydone:
+ subs r4, r4, #1
+ bmi .Lctrtailblock @ blocks < 0 means tail block
+ vld1.8 {q3}, [r1]!
+ veor q3, q0, q3
+ vst1.8 {q3}, [r0]!
bne .Lctrloop
+
.Lctrout:
- vst1.8 {q6}, [r5]
+ vst1.8 {q6}, [r5] @ return next CTR value
pop {r4-r6, pc}
.Lctrtailblock:
- vst1.8 {q0}, [r0, :64] @ return just the key stream
- pop {r4-r6, pc}
+ vst1.8 {q0}, [r0, :64] @ return the key stream
+ b .Lctrout
.Lctrcarry:
.irp sreg, s26, s25, s24
@@ -344,11 +346,9 @@ ENTRY(ce_aes_ctr_encrypt)
adds ip, ip, #1
rev ip, ip
vmov \sreg, ip
- bcc 0f
+ bcc .Lctrcarrydone
.endr
-0: teq r4, #0
- beq .Lctrout
- b .Lctrloop
+ b .Lctrcarrydone
ENDPROC(ce_aes_ctr_encrypt)
/*