Browse Source

crypto: x86/chacha20 - Manually align stack buffer

The kernel on x86-64 cannot use gcc attribute align to align to
a 16-byte boundary.  This patch reverts to the old way of aligning
it by hand.

Fixes: 9ae433bc79f9 ("crypto: chacha20 - convert generic and...")
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Herbert Xu 8 years ago
parent
commit
b8fbe71f75
1 changed files with 4 additions and 1 deletions
  1. 4 1
      arch/x86/crypto/chacha20_glue.c

+ 4 - 1
arch/x86/crypto/chacha20_glue.c

@@ -67,10 +67,13 @@ static int chacha20_simd(struct skcipher_request *req)
 {
 {
 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 	struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm);
 	struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm);
-	u32 state[16] __aligned(CHACHA20_STATE_ALIGN);
+	u32 *state, state_buf[16 + 2] __aligned(8);
 	struct skcipher_walk walk;
 	struct skcipher_walk walk;
 	int err;
 	int err;
 
 
+	BUILD_BUG_ON(CHACHA20_STATE_ALIGN != 16);
+	state = PTR_ALIGN(state_buf + 0, CHACHA20_STATE_ALIGN);
+
 	if (req->cryptlen <= CHACHA20_BLOCK_SIZE || !may_use_simd())
 	if (req->cryptlen <= CHACHA20_BLOCK_SIZE || !may_use_simd())
 		return crypto_chacha20_crypt(req);
 		return crypto_chacha20_crypt(req);