|
@@ -149,10 +149,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
unsigned int auth_keylen)
|
|
unsigned int auth_keylen)
|
|
{
|
|
{
|
|
struct qat_auth_state auth_state;
|
|
struct qat_auth_state auth_state;
|
|
- struct {
|
|
|
|
- struct shash_desc shash;
|
|
|
|
- char ctx[crypto_shash_descsize(ctx->hash_tfm)];
|
|
|
|
- } desc;
|
|
|
|
|
|
+ SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
|
|
struct sha1_state sha1;
|
|
struct sha1_state sha1;
|
|
struct sha256_state sha256;
|
|
struct sha256_state sha256;
|
|
struct sha512_state sha512;
|
|
struct sha512_state sha512;
|
|
@@ -165,12 +162,12 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
int i, offset;
|
|
int i, offset;
|
|
|
|
|
|
memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64);
|
|
memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64);
|
|
- desc.shash.tfm = ctx->hash_tfm;
|
|
|
|
- desc.shash.flags = 0x0;
|
|
|
|
|
|
+ shash->tfm = ctx->hash_tfm;
|
|
|
|
+ shash->flags = 0x0;
|
|
|
|
|
|
if (auth_keylen > block_size) {
|
|
if (auth_keylen > block_size) {
|
|
char buff[SHA512_BLOCK_SIZE];
|
|
char buff[SHA512_BLOCK_SIZE];
|
|
- int ret = crypto_shash_digest(&desc.shash, auth_key,
|
|
|
|
|
|
+ int ret = crypto_shash_digest(shash, auth_key,
|
|
auth_keylen, buff);
|
|
auth_keylen, buff);
|
|
if (ret)
|
|
if (ret)
|
|
return ret;
|
|
return ret;
|
|
@@ -193,10 +190,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
*opad_ptr ^= 0x5C;
|
|
*opad_ptr ^= 0x5C;
|
|
}
|
|
}
|
|
|
|
|
|
- if (crypto_shash_init(&desc.shash))
|
|
|
|
|
|
+ if (crypto_shash_init(shash))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
|
|
|
|
- if (crypto_shash_update(&desc.shash, ipad, block_size))
|
|
|
|
|
|
+ if (crypto_shash_update(shash, ipad, block_size))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
|
|
|
|
hash_state_out = (__be32 *)hash->sha.state1;
|
|
hash_state_out = (__be32 *)hash->sha.state1;
|
|
@@ -204,19 +201,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
|
|
|
switch (ctx->qat_hash_alg) {
|
|
switch (ctx->qat_hash_alg) {
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
- if (crypto_shash_export(&desc.shash, &sha1))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha1))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
*hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
*hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
break;
|
|
break;
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
- if (crypto_shash_export(&desc.shash, &sha256))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha256))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
*hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
*hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
break;
|
|
break;
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
- if (crypto_shash_export(&desc.shash, &sha512))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha512))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
*hash512_state_out = cpu_to_be64(*(sha512.state + i));
|
|
*hash512_state_out = cpu_to_be64(*(sha512.state + i));
|
|
@@ -225,10 +222,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
}
|
|
}
|
|
|
|
|
|
- if (crypto_shash_init(&desc.shash))
|
|
|
|
|
|
+ if (crypto_shash_init(shash))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
|
|
|
|
- if (crypto_shash_update(&desc.shash, opad, block_size))
|
|
|
|
|
|
+ if (crypto_shash_update(shash, opad, block_size))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
|
|
|
|
offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
|
|
offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
|
|
@@ -237,19 +234,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
|
|
|
|
|
|
switch (ctx->qat_hash_alg) {
|
|
switch (ctx->qat_hash_alg) {
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA1:
|
|
- if (crypto_shash_export(&desc.shash, &sha1))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha1))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
*hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
*hash_state_out = cpu_to_be32(*(sha1.state + i));
|
|
break;
|
|
break;
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA256:
|
|
- if (crypto_shash_export(&desc.shash, &sha256))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha256))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
|
|
*hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
*hash_state_out = cpu_to_be32(*(sha256.state + i));
|
|
break;
|
|
break;
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
case ICP_QAT_HW_AUTH_ALGO_SHA512:
|
|
- if (crypto_shash_export(&desc.shash, &sha512))
|
|
|
|
|
|
+ if (crypto_shash_export(shash, &sha512))
|
|
return -EFAULT;
|
|
return -EFAULT;
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
|
|
*hash512_state_out = cpu_to_be64(*(sha512.state + i));
|
|
*hash512_state_out = cpu_to_be64(*(sha512.state + i));
|