|
@@ -671,7 +671,7 @@ static int chcr_sg_ent_in_wr(struct scatterlist *src,
|
|
|
return min(srclen, dstlen);
|
|
|
}
|
|
|
|
|
|
-static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
|
|
|
+static int chcr_cipher_fallback(struct crypto_sync_skcipher *cipher,
|
|
|
u32 flags,
|
|
|
struct scatterlist *src,
|
|
|
struct scatterlist *dst,
|
|
@@ -681,9 +681,9 @@ static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
|
|
|
{
|
|
|
int err;
|
|
|
|
|
|
- SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
|
|
|
+ SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
|
|
|
|
|
|
- skcipher_request_set_tfm(subreq, cipher);
|
|
|
+ skcipher_request_set_sync_tfm(subreq, cipher);
|
|
|
skcipher_request_set_callback(subreq, flags, NULL, NULL);
|
|
|
skcipher_request_set_crypt(subreq, src, dst,
|
|
|
nbytes, iv);
|
|
@@ -854,13 +854,14 @@ static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher,
|
|
|
struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
|
|
|
int err = 0;
|
|
|
|
|
|
- crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
|
|
|
- crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags &
|
|
|
- CRYPTO_TFM_REQ_MASK);
|
|
|
- err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
|
|
|
+ crypto_sync_skcipher_clear_flags(ablkctx->sw_cipher,
|
|
|
+ CRYPTO_TFM_REQ_MASK);
|
|
|
+ crypto_sync_skcipher_set_flags(ablkctx->sw_cipher,
|
|
|
+ cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
|
|
|
+ err = crypto_sync_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
|
|
|
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
|
|
|
tfm->crt_flags |=
|
|
|
- crypto_skcipher_get_flags(ablkctx->sw_cipher) &
|
|
|
+ crypto_sync_skcipher_get_flags(ablkctx->sw_cipher) &
|
|
|
CRYPTO_TFM_RES_MASK;
|
|
|
return err;
|
|
|
}
|
|
@@ -1360,8 +1361,8 @@ static int chcr_cra_init(struct crypto_tfm *tfm)
|
|
|
struct chcr_context *ctx = crypto_tfm_ctx(tfm);
|
|
|
struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
|
|
|
|
|
|
- ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0,
|
|
|
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
|
+ ablkctx->sw_cipher = crypto_alloc_sync_skcipher(alg->cra_name, 0,
|
|
|
+ CRYPTO_ALG_NEED_FALLBACK);
|
|
|
if (IS_ERR(ablkctx->sw_cipher)) {
|
|
|
pr_err("failed to allocate fallback for %s\n", alg->cra_name);
|
|
|
return PTR_ERR(ablkctx->sw_cipher);
|
|
@@ -1390,8 +1391,8 @@ static int chcr_rfc3686_init(struct crypto_tfm *tfm)
|
|
|
/*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
|
|
|
* cannot be used as fallback in chcr_handle_cipher_response
|
|
|
*/
|
|
|
- ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
|
|
|
- CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
|
|
|
+ ablkctx->sw_cipher = crypto_alloc_sync_skcipher("ctr(aes)", 0,
|
|
|
+ CRYPTO_ALG_NEED_FALLBACK);
|
|
|
if (IS_ERR(ablkctx->sw_cipher)) {
|
|
|
pr_err("failed to allocate fallback for %s\n", alg->cra_name);
|
|
|
return PTR_ERR(ablkctx->sw_cipher);
|
|
@@ -1406,7 +1407,7 @@ static void chcr_cra_exit(struct crypto_tfm *tfm)
|
|
|
struct chcr_context *ctx = crypto_tfm_ctx(tfm);
|
|
|
struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
|
|
|
|
|
|
- crypto_free_skcipher(ablkctx->sw_cipher);
|
|
|
+ crypto_free_sync_skcipher(ablkctx->sw_cipher);
|
|
|
if (ablkctx->aes_generic)
|
|
|
crypto_free_cipher(ablkctx->aes_generic);
|
|
|
}
|