|
@@ -107,24 +107,23 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
|
|
|
ret = crypto_skcipher_encrypt(req);
|
|
|
skcipher_request_zero(req);
|
|
|
} else {
|
|
|
- preempt_disable();
|
|
|
- pagefault_disable();
|
|
|
- enable_kernel_vsx();
|
|
|
-
|
|
|
blkcipher_walk_init(&walk, dst, src, nbytes);
|
|
|
ret = blkcipher_walk_virt(desc, &walk);
|
|
|
while ((nbytes = walk.nbytes)) {
|
|
|
+ preempt_disable();
|
|
|
+ pagefault_disable();
|
|
|
+ enable_kernel_vsx();
|
|
|
aes_p8_cbc_encrypt(walk.src.virt.addr,
|
|
|
walk.dst.virt.addr,
|
|
|
nbytes & AES_BLOCK_MASK,
|
|
|
&ctx->enc_key, walk.iv, 1);
|
|
|
+ disable_kernel_vsx();
|
|
|
+ pagefault_enable();
|
|
|
+ preempt_enable();
|
|
|
+
|
|
|
nbytes &= AES_BLOCK_SIZE - 1;
|
|
|
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
|
|
}
|
|
|
-
|
|
|
- disable_kernel_vsx();
|
|
|
- pagefault_enable();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
|
|
|
return ret;
|
|
@@ -147,24 +146,23 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
|
|
|
ret = crypto_skcipher_decrypt(req);
|
|
|
skcipher_request_zero(req);
|
|
|
} else {
|
|
|
- preempt_disable();
|
|
|
- pagefault_disable();
|
|
|
- enable_kernel_vsx();
|
|
|
-
|
|
|
blkcipher_walk_init(&walk, dst, src, nbytes);
|
|
|
ret = blkcipher_walk_virt(desc, &walk);
|
|
|
while ((nbytes = walk.nbytes)) {
|
|
|
+ preempt_disable();
|
|
|
+ pagefault_disable();
|
|
|
+ enable_kernel_vsx();
|
|
|
aes_p8_cbc_encrypt(walk.src.virt.addr,
|
|
|
walk.dst.virt.addr,
|
|
|
nbytes & AES_BLOCK_MASK,
|
|
|
&ctx->dec_key, walk.iv, 0);
|
|
|
+ disable_kernel_vsx();
|
|
|
+ pagefault_enable();
|
|
|
+ preempt_enable();
|
|
|
+
|
|
|
nbytes &= AES_BLOCK_SIZE - 1;
|
|
|
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
|
|
}
|
|
|
-
|
|
|
- disable_kernel_vsx();
|
|
|
- pagefault_enable();
|
|
|
- preempt_enable();
|
|
|
}
|
|
|
|
|
|
return ret;
|