|
@@ -107,11 +107,13 @@ static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
|
|
|
}
|
|
|
|
|
|
static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
|
|
|
- u32 abytes, u32 *macp, bool use_neon)
|
|
|
+ u32 abytes, u32 *macp)
|
|
|
{
|
|
|
- if (likely(use_neon)) {
|
|
|
+ if (may_use_simd()) {
|
|
|
+ kernel_neon_begin();
|
|
|
ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
|
|
|
num_rounds(key));
|
|
|
+ kernel_neon_end();
|
|
|
} else {
|
|
|
if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
|
|
|
int added = min(abytes, AES_BLOCK_SIZE - *macp);
|
|
@@ -143,8 +145,7 @@ static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
|
|
|
- bool use_neon)
|
|
|
+static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
|
|
|
{
|
|
|
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
|
|
struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
|
|
@@ -163,7 +164,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
|
|
|
ltag.len = 6;
|
|
|
}
|
|
|
|
|
|
- ccm_update_mac(ctx, mac, (u8 *)<ag, ltag.len, &macp, use_neon);
|
|
|
+ ccm_update_mac(ctx, mac, (u8 *)<ag, ltag.len, &macp);
|
|
|
scatterwalk_start(&walk, req->src);
|
|
|
|
|
|
do {
|
|
@@ -175,7 +176,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
|
|
|
n = scatterwalk_clamp(&walk, len);
|
|
|
}
|
|
|
p = scatterwalk_map(&walk);
|
|
|
- ccm_update_mac(ctx, mac, p, n, &macp, use_neon);
|
|
|
+ ccm_update_mac(ctx, mac, p, n, &macp);
|
|
|
len -= n;
|
|
|
|
|
|
scatterwalk_unmap(p);
|
|
@@ -242,43 +243,42 @@ static int ccm_encrypt(struct aead_request *req)
|
|
|
u8 __aligned(8) mac[AES_BLOCK_SIZE];
|
|
|
u8 buf[AES_BLOCK_SIZE];
|
|
|
u32 len = req->cryptlen;
|
|
|
- bool use_neon = may_use_simd();
|
|
|
int err;
|
|
|
|
|
|
err = ccm_init_mac(req, mac, len);
|
|
|
if (err)
|
|
|
return err;
|
|
|
|
|
|
- if (likely(use_neon))
|
|
|
- kernel_neon_begin();
|
|
|
-
|
|
|
if (req->assoclen)
|
|
|
- ccm_calculate_auth_mac(req, mac, use_neon);
|
|
|
+ ccm_calculate_auth_mac(req, mac);
|
|
|
|
|
|
/* preserve the original iv for the final round */
|
|
|
memcpy(buf, req->iv, AES_BLOCK_SIZE);
|
|
|
|
|
|
err = skcipher_walk_aead_encrypt(&walk, req, true);
|
|
|
|
|
|
- if (likely(use_neon)) {
|
|
|
+ if (may_use_simd()) {
|
|
|
while (walk.nbytes) {
|
|
|
u32 tail = walk.nbytes % AES_BLOCK_SIZE;
|
|
|
|
|
|
if (walk.nbytes == walk.total)
|
|
|
tail = 0;
|
|
|
|
|
|
+ kernel_neon_begin();
|
|
|
ce_aes_ccm_encrypt(walk.dst.virt.addr,
|
|
|
walk.src.virt.addr,
|
|
|
walk.nbytes - tail, ctx->key_enc,
|
|
|
num_rounds(ctx), mac, walk.iv);
|
|
|
+ kernel_neon_end();
|
|
|
|
|
|
err = skcipher_walk_done(&walk, tail);
|
|
|
}
|
|
|
- if (!err)
|
|
|
+ if (!err) {
|
|
|
+ kernel_neon_begin();
|
|
|
ce_aes_ccm_final(mac, buf, ctx->key_enc,
|
|
|
num_rounds(ctx));
|
|
|
-
|
|
|
- kernel_neon_end();
|
|
|
+ kernel_neon_end();
|
|
|
+ }
|
|
|
} else {
|
|
|
err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
|
|
|
}
|
|
@@ -301,43 +301,42 @@ static int ccm_decrypt(struct aead_request *req)
|
|
|
u8 __aligned(8) mac[AES_BLOCK_SIZE];
|
|
|
u8 buf[AES_BLOCK_SIZE];
|
|
|
u32 len = req->cryptlen - authsize;
|
|
|
- bool use_neon = may_use_simd();
|
|
|
int err;
|
|
|
|
|
|
err = ccm_init_mac(req, mac, len);
|
|
|
if (err)
|
|
|
return err;
|
|
|
|
|
|
- if (likely(use_neon))
|
|
|
- kernel_neon_begin();
|
|
|
-
|
|
|
if (req->assoclen)
|
|
|
- ccm_calculate_auth_mac(req, mac, use_neon);
|
|
|
+ ccm_calculate_auth_mac(req, mac);
|
|
|
|
|
|
/* preserve the original iv for the final round */
|
|
|
memcpy(buf, req->iv, AES_BLOCK_SIZE);
|
|
|
|
|
|
err = skcipher_walk_aead_decrypt(&walk, req, true);
|
|
|
|
|
|
- if (likely(use_neon)) {
|
|
|
+ if (may_use_simd()) {
|
|
|
while (walk.nbytes) {
|
|
|
u32 tail = walk.nbytes % AES_BLOCK_SIZE;
|
|
|
|
|
|
if (walk.nbytes == walk.total)
|
|
|
tail = 0;
|
|
|
|
|
|
+ kernel_neon_begin();
|
|
|
ce_aes_ccm_decrypt(walk.dst.virt.addr,
|
|
|
walk.src.virt.addr,
|
|
|
walk.nbytes - tail, ctx->key_enc,
|
|
|
num_rounds(ctx), mac, walk.iv);
|
|
|
+ kernel_neon_end();
|
|
|
|
|
|
err = skcipher_walk_done(&walk, tail);
|
|
|
}
|
|
|
- if (!err)
|
|
|
+ if (!err) {
|
|
|
+ kernel_neon_begin();
|
|
|
ce_aes_ccm_final(mac, buf, ctx->key_enc,
|
|
|
num_rounds(ctx));
|
|
|
-
|
|
|
- kernel_neon_end();
|
|
|
+ kernel_neon_end();
|
|
|
+ }
|
|
|
} else {
|
|
|
err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
|
|
|
}
|