summaryrefslogtreecommitdiff
path: root/arch/x86
diff options
context:
space:
mode:
authorEli Cooper <elicooper@gmx.com>2016-01-22 00:24:08 +0800
committerHerbert Xu <herbert@gondor.apana.org.au>2016-01-25 21:47:45 +0800
commitcbe09bd51bf23b42c3a94c5fb6815e1397c5fc3f (patch)
treef1dc325e655350589e60bfe8217495f4dc371475 /arch/x86
parent7ee7014d0eb6bcac679c0bd5fe9ce65bc4325648 (diff)
crypto: chacha20-ssse3 - Align stack pointer to 64 bytes
This aligns the stack pointer in chacha20_4block_xor_ssse3 to 64 bytes. Fixes general protection faults and potential kernel panics. Cc: stable@vger.kernel.org Signed-off-by: Eli Cooper <elicooper@gmx.com> Acked-by: Martin Willi <martin@strongswan.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/crypto/chacha20-ssse3-x86_64.S6
1 files changed, 4 insertions, 2 deletions
diff --git a/arch/x86/crypto/chacha20-ssse3-x86_64.S b/arch/x86/crypto/chacha20-ssse3-x86_64.S
index 712b13047b41..3a33124e9112 100644
--- a/arch/x86/crypto/chacha20-ssse3-x86_64.S
+++ b/arch/x86/crypto/chacha20-ssse3-x86_64.S
@@ -157,7 +157,9 @@ ENTRY(chacha20_4block_xor_ssse3)
# done with the slightly better performing SSSE3 byte shuffling,
# 7/12-bit word rotation uses traditional shift+OR.
- sub $0x40,%rsp
+ mov %rsp,%r11
+ sub $0x80,%rsp
+ and $~63,%rsp
# x0..15[0-3] = s0..3[0..3]
movq 0x00(%rdi),%xmm1
@@ -620,6 +622,6 @@ ENTRY(chacha20_4block_xor_ssse3)
pxor %xmm1,%xmm15
movdqu %xmm15,0xf0(%rsi)
- add $0x40,%rsp
+ mov %r11,%rsp
ret
ENDPROC(chacha20_4block_xor_ssse3)