|
@@ -332,7 +332,7 @@ static int crypt_iv_essiv_init(struct crypt_config *cc)
|
|
|
int err;
|
|
|
|
|
|
desc->tfm = essiv->hash_tfm;
|
|
|
- desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
|
|
|
+ desc->flags = 0;
|
|
|
|
|
|
err = crypto_shash_digest(desc, cc->key, cc->key_size, essiv->salt);
|
|
|
shash_desc_zero(desc);
|
|
@@ -606,7 +606,7 @@ static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv,
|
|
|
int i, r;
|
|
|
|
|
|
desc->tfm = lmk->hash_tfm;
|
|
|
- desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
|
|
|
+ desc->flags = 0;
|
|
|
|
|
|
r = crypto_shash_init(desc);
|
|
|
if (r)
|
|
@@ -768,7 +768,7 @@ static int crypt_iv_tcw_whitening(struct crypt_config *cc,
|
|
|
|
|
|
/* calculate crc32 for every 32bit part and xor it */
|
|
|
desc->tfm = tcw->crc32_tfm;
|
|
|
- desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
|
|
|
+ desc->flags = 0;
|
|
|
for (i = 0; i < 4; i++) {
|
|
|
r = crypto_shash_init(desc);
|
|
|
if (r)
|
|
@@ -1251,7 +1251,7 @@ static void crypt_alloc_req_skcipher(struct crypt_config *cc,
|
|
|
* requests if driver request queue is full.
|
|
|
*/
|
|
|
skcipher_request_set_callback(ctx->r.req,
|
|
|
- CRYPTO_TFM_REQ_MAY_BACKLOG | CRYPTO_TFM_REQ_MAY_SLEEP,
|
|
|
+ CRYPTO_TFM_REQ_MAY_BACKLOG,
|
|
|
kcryptd_async_done, dmreq_of_req(cc, ctx->r.req));
|
|
|
}
|
|
|
|
|
@@ -1268,7 +1268,7 @@ static void crypt_alloc_req_aead(struct crypt_config *cc,
|
|
|
* requests if driver request queue is full.
|
|
|
*/
|
|
|
aead_request_set_callback(ctx->r.req_aead,
|
|
|
- CRYPTO_TFM_REQ_MAY_BACKLOG | CRYPTO_TFM_REQ_MAY_SLEEP,
|
|
|
+ CRYPTO_TFM_REQ_MAY_BACKLOG,
|
|
|
kcryptd_async_done, dmreq_of_req(cc, ctx->r.req_aead));
|
|
|
}
|
|
|
|