浏览代码

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull crypto fixes from Herbert Xu:
 "This fixes a the implementation of CRC32 on arm64 where it incorrectly
  applied negation on the result.

  It also fixes the arm64 implementations of SHA/SHA256 where in some
  cases it may end up finalising the result twice"

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
  crypto: arm64/sha2-ce - prevent asm code finalization in final() path
  crypto: arm64/sha1-ce - prevent asm code finalization in final() path
  crypto: arm64/crc32 - bring in line with generic CRC32
Linus Torvalds 10 年之前
父节点
当前提交
9c922a55db
共有 3 个文件被更改,包括 25 次插入3 次删除
  1. 19 3
      arch/arm64/crypto/crc32-arm64.c
  2. 3 0
      arch/arm64/crypto/sha1-ce-glue.c
  3. 3 0
      arch/arm64/crypto/sha2-ce-glue.c

+ 19 - 3
arch/arm64/crypto/crc32-arm64.c

@@ -147,13 +147,21 @@ static int chksum_final(struct shash_desc *desc, u8 *out)
 {
 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
 
+	put_unaligned_le32(ctx->crc, out);
+	return 0;
+}
+
+static int chksumc_final(struct shash_desc *desc, u8 *out)
+{
+	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
 	put_unaligned_le32(~ctx->crc, out);
 	return 0;
 }
 
 static int __chksum_finup(u32 crc, const u8 *data, unsigned int len, u8 *out)
 {
-	put_unaligned_le32(~crc32_arm64_le_hw(crc, data, len), out);
+	put_unaligned_le32(crc32_arm64_le_hw(crc, data, len), out);
 	return 0;
 }
 
@@ -199,6 +207,14 @@ static int crc32_cra_init(struct crypto_tfm *tfm)
 {
 	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
 
+	mctx->key = 0;
+	return 0;
+}
+
+static int crc32c_cra_init(struct crypto_tfm *tfm)
+{
+	struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+
 	mctx->key = ~0;
 	return 0;
 }
@@ -229,7 +245,7 @@ static struct shash_alg crc32c_alg = {
 	.setkey			=	chksum_setkey,
 	.init			=	chksum_init,
 	.update			=	chksumc_update,
-	.final			=	chksum_final,
+	.final			=	chksumc_final,
 	.finup			=	chksumc_finup,
 	.digest			=	chksumc_digest,
 	.descsize		=	sizeof(struct chksum_desc_ctx),
@@ -241,7 +257,7 @@ static struct shash_alg crc32c_alg = {
 		.cra_alignmask		=	0,
 		.cra_ctxsize		=	sizeof(struct chksum_ctx),
 		.cra_module		=	THIS_MODULE,
-		.cra_init		=	crc32_cra_init,
+		.cra_init		=	crc32c_cra_init,
 	}
 };
 

+ 3 - 0
arch/arm64/crypto/sha1-ce-glue.c

@@ -74,6 +74,9 @@ static int sha1_ce_finup(struct shash_desc *desc, const u8 *data,
 
 static int sha1_ce_final(struct shash_desc *desc, u8 *out)
 {
+	struct sha1_ce_state *sctx = shash_desc_ctx(desc);
+
+	sctx->finalize = 0;
 	kernel_neon_begin_partial(16);
 	sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_ce_transform);
 	kernel_neon_end();

+ 3 - 0
arch/arm64/crypto/sha2-ce-glue.c

@@ -75,6 +75,9 @@ static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
 
 static int sha256_ce_final(struct shash_desc *desc, u8 *out)
 {
+	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
+
+	sctx->finalize = 0;
 	kernel_neon_begin_partial(28);
 	sha256_base_do_finalize(desc, (sha256_block_fn *)sha2_ce_transform);
 	kernel_neon_end();