|
@@ -124,7 +124,7 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
|
|
|
|
|
|
ce_aes_ccm_auth_data(mac, (u8 *)<ag, ltag.len, &macp, ctx->key_enc,
|
|
|
num_rounds(ctx));
|
|
|
- scatterwalk_start(&walk, req->assoc);
|
|
|
+ scatterwalk_start(&walk, req->src);
|
|
|
|
|
|
do {
|
|
|
u32 n = scatterwalk_clamp(&walk, len);
|
|
@@ -151,6 +151,10 @@ static int ccm_encrypt(struct aead_request *req)
|
|
|
struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
|
|
|
struct blkcipher_desc desc = { .info = req->iv };
|
|
|
struct blkcipher_walk walk;
|
|
|
+ struct scatterlist srcbuf[2];
|
|
|
+ struct scatterlist dstbuf[2];
|
|
|
+ struct scatterlist *src;
|
|
|
+ struct scatterlist *dst;
|
|
|
u8 __aligned(8) mac[AES_BLOCK_SIZE];
|
|
|
u8 buf[AES_BLOCK_SIZE];
|
|
|
u32 len = req->cryptlen;
|
|
@@ -168,7 +172,12 @@ static int ccm_encrypt(struct aead_request *req)
|
|
|
/* preserve the original iv for the final round */
|
|
|
memcpy(buf, req->iv, AES_BLOCK_SIZE);
|
|
|
|
|
|
- blkcipher_walk_init(&walk, req->dst, req->src, len);
|
|
|
+ src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen);
|
|
|
+ dst = src;
|
|
|
+ if (req->src != req->dst)
|
|
|
+ dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen);
|
|
|
+
|
|
|
+ blkcipher_walk_init(&walk, dst, src, len);
|
|
|
err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
|
|
|
AES_BLOCK_SIZE);
|
|
|
|
|
@@ -194,7 +203,7 @@ static int ccm_encrypt(struct aead_request *req)
|
|
|
return err;
|
|
|
|
|
|
/* copy authtag to end of dst */
|
|
|
- scatterwalk_map_and_copy(mac, req->dst, req->cryptlen,
|
|
|
+ scatterwalk_map_and_copy(mac, dst, req->cryptlen,
|
|
|
crypto_aead_authsize(aead), 1);
|
|
|
|
|
|
return 0;
|
|
@@ -207,6 +216,10 @@ static int ccm_decrypt(struct aead_request *req)
|
|
|
unsigned int authsize = crypto_aead_authsize(aead);
|
|
|
struct blkcipher_desc desc = { .info = req->iv };
|
|
|
struct blkcipher_walk walk;
|
|
|
+ struct scatterlist srcbuf[2];
|
|
|
+ struct scatterlist dstbuf[2];
|
|
|
+ struct scatterlist *src;
|
|
|
+ struct scatterlist *dst;
|
|
|
u8 __aligned(8) mac[AES_BLOCK_SIZE];
|
|
|
u8 buf[AES_BLOCK_SIZE];
|
|
|
u32 len = req->cryptlen - authsize;
|
|
@@ -224,7 +237,12 @@ static int ccm_decrypt(struct aead_request *req)
|
|
|
/* preserve the original iv for the final round */
|
|
|
memcpy(buf, req->iv, AES_BLOCK_SIZE);
|
|
|
|
|
|
- blkcipher_walk_init(&walk, req->dst, req->src, len);
|
|
|
+ src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen);
|
|
|
+ dst = src;
|
|
|
+ if (req->src != req->dst)
|
|
|
+ dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen);
|
|
|
+
|
|
|
+ blkcipher_walk_init(&walk, dst, src, len);
|
|
|
err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
|
|
|
AES_BLOCK_SIZE);
|
|
|
|
|
@@ -250,44 +268,43 @@ static int ccm_decrypt(struct aead_request *req)
|
|
|
return err;
|
|
|
|
|
|
/* compare calculated auth tag with the stored one */
|
|
|
- scatterwalk_map_and_copy(buf, req->src, req->cryptlen - authsize,
|
|
|
+ scatterwalk_map_and_copy(buf, src, req->cryptlen - authsize,
|
|
|
authsize, 0);
|
|
|
|
|
|
- if (memcmp(mac, buf, authsize))
|
|
|
+ if (crypto_memneq(mac, buf, authsize))
|
|
|
return -EBADMSG;
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
-static struct crypto_alg ccm_aes_alg = {
|
|
|
- .cra_name = "ccm(aes)",
|
|
|
- .cra_driver_name = "ccm-aes-ce",
|
|
|
- .cra_priority = 300,
|
|
|
- .cra_flags = CRYPTO_ALG_TYPE_AEAD,
|
|
|
- .cra_blocksize = 1,
|
|
|
- .cra_ctxsize = sizeof(struct crypto_aes_ctx),
|
|
|
- .cra_alignmask = 7,
|
|
|
- .cra_type = &crypto_aead_type,
|
|
|
- .cra_module = THIS_MODULE,
|
|
|
- .cra_aead = {
|
|
|
- .ivsize = AES_BLOCK_SIZE,
|
|
|
- .maxauthsize = AES_BLOCK_SIZE,
|
|
|
- .setkey = ccm_setkey,
|
|
|
- .setauthsize = ccm_setauthsize,
|
|
|
- .encrypt = ccm_encrypt,
|
|
|
- .decrypt = ccm_decrypt,
|
|
|
- }
|
|
|
+static struct aead_alg ccm_aes_alg = {
|
|
|
+ .base = {
|
|
|
+ .cra_name = "ccm(aes)",
|
|
|
+ .cra_driver_name = "ccm-aes-ce",
|
|
|
+ .cra_flags = CRYPTO_ALG_AEAD_NEW,
|
|
|
+ .cra_priority = 300,
|
|
|
+ .cra_blocksize = 1,
|
|
|
+ .cra_ctxsize = sizeof(struct crypto_aes_ctx),
|
|
|
+ .cra_alignmask = 7,
|
|
|
+ .cra_module = THIS_MODULE,
|
|
|
+ },
|
|
|
+ .ivsize = AES_BLOCK_SIZE,
|
|
|
+ .maxauthsize = AES_BLOCK_SIZE,
|
|
|
+ .setkey = ccm_setkey,
|
|
|
+ .setauthsize = ccm_setauthsize,
|
|
|
+ .encrypt = ccm_encrypt,
|
|
|
+ .decrypt = ccm_decrypt,
|
|
|
};
|
|
|
|
|
|
static int __init aes_mod_init(void)
|
|
|
{
|
|
|
if (!(elf_hwcap & HWCAP_AES))
|
|
|
return -ENODEV;
|
|
|
- return crypto_register_alg(&ccm_aes_alg);
|
|
|
+ return crypto_register_aead(&ccm_aes_alg);
|
|
|
}
|
|
|
|
|
|
static void __exit aes_mod_exit(void)
|
|
|
{
|
|
|
- crypto_unregister_alg(&ccm_aes_alg);
|
|
|
+ crypto_unregister_aead(&ccm_aes_alg);
|
|
|
}
|
|
|
|
|
|
module_init(aes_mod_init);
|