|
@@ -113,6 +113,13 @@ struct qat_alg_aead_ctx {
|
|
|
struct crypto_shash *hash_tfm;
|
|
|
enum icp_qat_hw_auth_algo qat_hash_alg;
|
|
|
struct qat_crypto_instance *inst;
|
|
|
+ union {
|
|
|
+ struct sha1_state sha1;
|
|
|
+ struct sha256_state sha256;
|
|
|
+ struct sha512_state sha512;
|
|
|
+ };
|
|
|
+ char ipad[SHA512_BLOCK_SIZE]; /* sufficient for SHA-1/SHA-256 as well */
|
|
|
+ char opad[SHA512_BLOCK_SIZE];
|
|
|
};
|
|
|
|
|
|
struct qat_alg_ablkcipher_ctx {
|
|
@@ -148,41 +155,32 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
unsigned int auth_keylen)
|
|
|
{
|
|
|
SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
|
|
|
- struct sha1_state sha1;
|
|
|
- struct sha256_state sha256;
|
|
|
- struct sha512_state sha512;
|
|
|
int block_size = crypto_shash_blocksize(ctx->hash_tfm);
|
|
|
int digest_size = crypto_shash_digestsize(ctx->hash_tfm);
|
|
|
- char ipad[MAX_ALGAPI_BLOCKSIZE];
|
|
|
- char opad[MAX_ALGAPI_BLOCKSIZE];
|
|
|
__be32 *hash_state_out;
|
|
|
__be64 *hash512_state_out;
|
|
|
int i, offset;
|
|
|
|
|
|
- memset(ipad, 0, block_size);
|
|
|
- memset(opad, 0, block_size);
|
|
|
+ memset(ctx->ipad, 0, block_size);
|
|
|
+ memset(ctx->opad, 0, block_size);
|
|
|
shash->tfm = ctx->hash_tfm;
|
|
|
shash->flags = 0x0;
|
|
|
|
|
|
- if (WARN_ON(block_size > sizeof(ipad) ||
|
|
|
- sizeof(ipad) != sizeof(opad)))
|
|
|
- return -EINVAL;
|
|
|
-
|
|
|
if (auth_keylen > block_size) {
|
|
|
int ret = crypto_shash_digest(shash, auth_key,
|
|
|
- auth_keylen, ipad);
|
|
|
+ auth_keylen, ctx->ipad);
|
|
|
if (ret)
|
|
|
return ret;
|
|
|
|
|
|
- memcpy(opad, ipad, digest_size);
|
|
|
+ memcpy(ctx->opad, ctx->ipad, digest_size);
|
|
|
} else {
|
|
|
- memcpy(ipad, auth_key, auth_keylen);
|
|
|
- memcpy(opad, auth_key, auth_keylen);
|
|
|
+ memcpy(ctx->ipad, auth_key, auth_keylen);
|
|
|
+ memcpy(ctx->opad, auth_key, auth_keylen);
|
|
|
}
|
|
|
|
|
|
for (i = 0; i < block_size; i++) {
|
|
|
- char *ipad_ptr = ipad + i;
|
|
|
- char *opad_ptr = opad + i;
|
|
|
+ char *ipad_ptr = ctx->ipad + i;
|
|
|
+ char *opad_ptr = ctx->opad + i;
|
|
|
*ipad_ptr ^= HMAC_IPAD_VALUE;
|
|
|
*opad_ptr ^= HMAC_OPAD_VALUE;
|
|
|
}
|
|
@@ -190,7 +188,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
if (crypto_shash_init(shash))
|
|
|
return -EFAULT;
|
|
|
|
|
|
- if (crypto_shash_update(shash, ipad, block_size))
|
|
|
+ if (crypto_shash_update(shash, ctx->ipad, block_size))
|
|
|
return -EFAULT;
|
|
|
|
|
|
hash_state_out = (__be32 *)hash->sha.state1;
|
|
@@ -198,22 +196,22 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
|
|
|
switch (ctx->qat_hash_alg) {
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
|
- if (crypto_shash_export(shash, &sha1))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha1))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
|
- *hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
|
+ *hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
|
|
|
break;
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
|
- if (crypto_shash_export(shash, &sha256))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha256))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
|
- *hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
|
+ *hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
|
|
|
break;
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
|
- if (crypto_shash_export(shash, &sha512))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha512))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
|
- *hash512_state_out = cpu_to_be64(*(sha512.state + i));
|
|
|
+ *hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
|
|
|
break;
|
|
|
default:
|
|
|
return -EFAULT;
|
|
@@ -222,7 +220,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
if (crypto_shash_init(shash))
|
|
|
return -EFAULT;
|
|
|
|
|
|
- if (crypto_shash_update(shash, opad, block_size))
|
|
|
+ if (crypto_shash_update(shash, ctx->opad, block_size))
|
|
|
return -EFAULT;
|
|
|
|
|
|
offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
|
|
@@ -231,28 +229,28 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
|
|
|
switch (ctx->qat_hash_alg) {
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
|
- if (crypto_shash_export(shash, &sha1))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha1))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
|
- *hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
|
+ *hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
|
|
|
break;
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
|
- if (crypto_shash_export(shash, &sha256))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha256))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
|
- *hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
|
+ *hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
|
|
|
break;
|
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
|
- if (crypto_shash_export(shash, &sha512))
|
|
|
+ if (crypto_shash_export(shash, &ctx->sha512))
|
|
|
return -EFAULT;
|
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
|
- *hash512_state_out = cpu_to_be64(*(sha512.state + i));
|
|
|
+ *hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
|
|
|
break;
|
|
|
default:
|
|
|
return -EFAULT;
|
|
|
}
|
|
|
- memzero_explicit(ipad, block_size);
|
|
|
- memzero_explicit(opad, block_size);
|
|
|
+ memzero_explicit(ctx->ipad, block_size);
|
|
|
+ memzero_explicit(ctx->opad, block_size);
|
|
|
return 0;
|
|
|
}
|
|
|
|