|
@@ -64,17 +64,17 @@ MODULE_LICENSE("GPL v2");
|
|
|
|
|
|
/* defined in aes-modes.S */
|
|
|
asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
|
|
|
- int rounds, int blocks, int first);
|
|
|
+ int rounds, int blocks);
|
|
|
asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
|
|
|
- int rounds, int blocks, int first);
|
|
|
+ int rounds, int blocks);
|
|
|
|
|
|
asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
|
|
|
- int rounds, int blocks, u8 iv[], int first);
|
|
|
+ int rounds, int blocks, u8 iv[]);
|
|
|
asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
|
|
|
- int rounds, int blocks, u8 iv[], int first);
|
|
|
+ int rounds, int blocks, u8 iv[]);
|
|
|
|
|
|
asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
|
|
|
- int rounds, int blocks, u8 ctr[], int first);
|
|
|
+ int rounds, int blocks, u8 ctr[]);
|
|
|
|
|
|
asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
|
|
|
int rounds, int blocks, u8 const rk2[], u8 iv[],
|
|
@@ -133,19 +133,19 @@ static int ecb_encrypt(struct skcipher_request *req)
|
|
|
{
|
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
- int err, first, rounds = 6 + ctx->key_length / 4;
|
|
|
+ int err, rounds = 6 + ctx->key_length / 4;
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
- (u8 *)ctx->key_enc, rounds, blocks, first);
|
|
|
+ (u8 *)ctx->key_enc, rounds, blocks);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
return err;
|
|
|
}
|
|
|
|
|
@@ -153,19 +153,19 @@ static int ecb_decrypt(struct skcipher_request *req)
|
|
|
{
|
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
- int err, first, rounds = 6 + ctx->key_length / 4;
|
|
|
+ int err, rounds = 6 + ctx->key_length / 4;
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
- (u8 *)ctx->key_dec, rounds, blocks, first);
|
|
|
+ (u8 *)ctx->key_dec, rounds, blocks);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
return err;
|
|
|
}
|
|
|
|
|
@@ -173,20 +173,19 @@ static int cbc_encrypt(struct skcipher_request *req)
|
|
|
{
|
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
- int err, first, rounds = 6 + ctx->key_length / 4;
|
|
|
+ int err, rounds = 6 + ctx->key_length / 4;
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
- (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
|
|
|
- first);
|
|
|
+ (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
return err;
|
|
|
}
|
|
|
|
|
@@ -194,20 +193,19 @@ static int cbc_decrypt(struct skcipher_request *req)
|
|
|
{
|
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
- int err, first, rounds = 6 + ctx->key_length / 4;
|
|
|
+ int err, rounds = 6 + ctx->key_length / 4;
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
- for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
- (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
|
|
|
- first);
|
|
|
+ (u8 *)ctx->key_dec, rounds, blocks, walk.iv);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
return err;
|
|
|
}
|
|
|
|
|
@@ -215,20 +213,18 @@ static int ctr_encrypt(struct skcipher_request *req)
|
|
|
{
|
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
|
|
|
- int err, first, rounds = 6 + ctx->key_length / 4;
|
|
|
+ int err, rounds = 6 + ctx->key_length / 4;
|
|
|
struct skcipher_walk walk;
|
|
|
int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- first = 1;
|
|
|
- kernel_neon_begin();
|
|
|
while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
- (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
|
|
|
- first);
|
|
|
+ (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
- first = 0;
|
|
|
+ kernel_neon_end();
|
|
|
}
|
|
|
if (walk.nbytes) {
|
|
|
u8 __aligned(8) tail[AES_BLOCK_SIZE];
|
|
@@ -241,12 +237,13 @@ static int ctr_encrypt(struct skcipher_request *req)
|
|
|
*/
|
|
|
blocks = -1;
|
|
|
|
|
|
+ kernel_neon_begin();
|
|
|
aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
|
|
|
- blocks, walk.iv, first);
|
|
|
+ blocks, walk.iv);
|
|
|
+ kernel_neon_end();
|
|
|
crypto_xor_cpy(tdst, tsrc, tail, nbytes);
|
|
|
err = skcipher_walk_done(&walk, 0);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
|
|
|
return err;
|
|
|
}
|
|
@@ -270,16 +267,16 @@ static int xts_encrypt(struct skcipher_request *req)
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
(u8 *)ctx->key1.key_enc, rounds, blocks,
|
|
|
(u8 *)ctx->key2.key_enc, walk.iv, first);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
|
|
|
return err;
|
|
|
}
|
|
@@ -292,16 +289,16 @@ static int xts_decrypt(struct skcipher_request *req)
|
|
|
struct skcipher_walk walk;
|
|
|
unsigned int blocks;
|
|
|
|
|
|
- err = skcipher_walk_virt(&walk, req, true);
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
|
|
- kernel_neon_begin();
|
|
|
for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
|
|
|
+ kernel_neon_begin();
|
|
|
aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
|
|
|
(u8 *)ctx->key1.key_dec, rounds, blocks,
|
|
|
(u8 *)ctx->key2.key_enc, walk.iv, first);
|
|
|
+ kernel_neon_end();
|
|
|
err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
|
|
|
}
|
|
|
- kernel_neon_end();
|
|
|
|
|
|
return err;
|
|
|
}
|
|
@@ -425,7 +422,7 @@ static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
|
|
|
|
|
|
/* encrypt the zero vector */
|
|
|
kernel_neon_begin();
|
|
|
- aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1, 1);
|
|
|
+ aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1);
|
|
|
kernel_neon_end();
|
|
|
|
|
|
cmac_gf128_mul_by_x(consts, consts);
|
|
@@ -454,8 +451,8 @@ static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
|
|
|
return err;
|
|
|
|
|
|
kernel_neon_begin();
|
|
|
- aes_ecb_encrypt(key, ks[0], rk, rounds, 1, 1);
|
|
|
- aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2, 0);
|
|
|
+ aes_ecb_encrypt(key, ks[0], rk, rounds, 1);
|
|
|
+ aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2);
|
|
|
kernel_neon_end();
|
|
|
|
|
|
return cbcmac_setkey(tfm, key, sizeof(key));
|