|
@@ -70,6 +70,10 @@
|
|
|
#define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 18 * CAAM_CMD_SZ)
|
|
|
#define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 7 * CAAM_CMD_SZ)
|
|
|
|
|
|
+#define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
|
|
|
+#define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 14 * CAAM_CMD_SZ)
|
|
|
+#define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 17 * CAAM_CMD_SZ)
|
|
|
+
|
|
|
#define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
|
|
|
#define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
|
|
|
20 * CAAM_CMD_SZ)
|
|
@@ -109,9 +113,9 @@ static inline void append_dec_op1(u32 *desc, u32 type)
|
|
|
*/
|
|
|
static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
|
|
|
{
|
|
|
+ append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
|
|
|
append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
|
|
|
KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
|
|
|
- append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -200,6 +204,196 @@ static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
|
|
|
set_jump_tgt_here(desc, key_jump_cmd);
|
|
|
}
|
|
|
|
|
|
+static int aead_null_set_sh_desc(struct crypto_aead *aead)
|
|
|
+{
|
|
|
+ struct aead_tfm *tfm = &aead->base.crt_aead;
|
|
|
+ struct caam_ctx *ctx = crypto_aead_ctx(aead);
|
|
|
+ struct device *jrdev = ctx->jrdev;
|
|
|
+ bool keys_fit_inline = false;
|
|
|
+ u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
|
|
|
+ u32 *desc;
|
|
|
+
|
|
|
+ /*
|
|
|
+ * Job Descriptor and Shared Descriptors
|
|
|
+ * must all fit into the 64-word Descriptor h/w Buffer
|
|
|
+ */
|
|
|
+ if (DESC_AEAD_NULL_ENC_LEN + DESC_JOB_IO_LEN +
|
|
|
+ ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
|
|
|
+ keys_fit_inline = true;
|
|
|
+
|
|
|
+ /* aead_encrypt shared descriptor */
|
|
|
+ desc = ctx->sh_desc_enc;
|
|
|
+
|
|
|
+ init_sh_desc(desc, HDR_SHARE_SERIAL);
|
|
|
+
|
|
|
+ /* Skip if already shared */
|
|
|
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
|
|
|
+ JUMP_COND_SHRD);
|
|
|
+ if (keys_fit_inline)
|
|
|
+ append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
|
|
|
+ ctx->split_key_len, CLASS_2 |
|
|
|
+ KEY_DEST_MDHA_SPLIT | KEY_ENC);
|
|
|
+ else
|
|
|
+ append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
|
|
|
+ KEY_DEST_MDHA_SPLIT | KEY_ENC);
|
|
|
+ set_jump_tgt_here(desc, key_jump_cmd);
|
|
|
+
|
|
|
+ /* cryptlen = seqoutlen - authsize */
|
|
|
+ append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * NULL encryption; IV is zero
|
|
|
+ * assoclen = (assoclen + cryptlen) - cryptlen
|
|
|
+ */
|
|
|
+ append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
|
|
|
+
|
|
|
+ /* read assoc before reading payload */
|
|
|
+ append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
|
|
|
+ KEY_VLF);
|
|
|
+
|
|
|
+ /* Prepare to read and write cryptlen bytes */
|
|
|
+ append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
|
|
|
+ append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * MOVE_LEN opcode is not available in all SEC HW revisions,
|
|
|
+ * thus need to do some magic, i.e. self-patch the descriptor
|
|
|
+ * buffer.
|
|
|
+ */
|
|
|
+ read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
|
|
|
+ MOVE_DEST_MATH3 |
|
|
|
+ (0x6 << MOVE_LEN_SHIFT));
|
|
|
+ write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
|
|
|
+ MOVE_DEST_DESCBUF |
|
|
|
+ MOVE_WAITCOMP |
|
|
|
+ (0x8 << MOVE_LEN_SHIFT));
|
|
|
+
|
|
|
+ /* Class 2 operation */
|
|
|
+ append_operation(desc, ctx->class2_alg_type |
|
|
|
+ OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
|
|
|
+
|
|
|
+ /* Read and write cryptlen bytes */
|
|
|
+ aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
|
|
|
+
|
|
|
+ set_move_tgt_here(desc, read_move_cmd);
|
|
|
+ set_move_tgt_here(desc, write_move_cmd);
|
|
|
+ append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
|
|
|
+ append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
|
|
|
+ MOVE_AUX_LS);
|
|
|
+
|
|
|
+ /* Write ICV */
|
|
|
+ append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
|
|
|
+ LDST_SRCDST_BYTE_CONTEXT);
|
|
|
+
|
|
|
+ ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
|
|
|
+ desc_bytes(desc),
|
|
|
+ DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
|
|
|
+ dev_err(jrdev, "unable to map shared descriptor\n");
|
|
|
+ return -ENOMEM;
|
|
|
+ }
|
|
|
+#ifdef DEBUG
|
|
|
+ print_hex_dump(KERN_ERR,
|
|
|
+ "aead null enc shdesc@"__stringify(__LINE__)": ",
|
|
|
+ DUMP_PREFIX_ADDRESS, 16, 4, desc,
|
|
|
+ desc_bytes(desc), 1);
|
|
|
+#endif
|
|
|
+
|
|
|
+ /*
|
|
|
+ * Job Descriptor and Shared Descriptors
|
|
|
+ * must all fit into the 64-word Descriptor h/w Buffer
|
|
|
+ */
|
|
|
+ if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
|
|
|
+ ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
|
|
|
+ keys_fit_inline = true;
|
|
|
+
|
|
|
+ desc = ctx->sh_desc_dec;
|
|
|
+
|
|
|
+ /* aead_decrypt shared descriptor */
|
|
|
+ init_sh_desc(desc, HDR_SHARE_SERIAL);
|
|
|
+
|
|
|
+ /* Skip if already shared */
|
|
|
+ key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
|
|
|
+ JUMP_COND_SHRD);
|
|
|
+ if (keys_fit_inline)
|
|
|
+ append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
|
|
|
+ ctx->split_key_len, CLASS_2 |
|
|
|
+ KEY_DEST_MDHA_SPLIT | KEY_ENC);
|
|
|
+ else
|
|
|
+ append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
|
|
|
+ KEY_DEST_MDHA_SPLIT | KEY_ENC);
|
|
|
+ set_jump_tgt_here(desc, key_jump_cmd);
|
|
|
+
|
|
|
+ /* Class 2 operation */
|
|
|
+ append_operation(desc, ctx->class2_alg_type |
|
|
|
+ OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
|
|
|
+
|
|
|
+ /* assoclen + cryptlen = seqinlen - ivsize - authsize */
|
|
|
+ append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
|
|
|
+ ctx->authsize + tfm->ivsize);
|
|
|
+ /* assoclen = (assoclen + cryptlen) - cryptlen */
|
|
|
+ append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
|
|
|
+ append_math_sub(desc, VARSEQINLEN, REG3, REG2, CAAM_CMD_SZ);
|
|
|
+
|
|
|
+ /* read assoc before reading payload */
|
|
|
+ append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
|
|
|
+ KEY_VLF);
|
|
|
+
|
|
|
+ /* Prepare to read and write cryptlen bytes */
|
|
|
+ append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
|
|
|
+ append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * MOVE_LEN opcode is not available in all SEC HW revisions,
|
|
|
+ * thus need to do some magic, i.e. self-patch the descriptor
|
|
|
+ * buffer.
|
|
|
+ */
|
|
|
+ read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
|
|
|
+ MOVE_DEST_MATH2 |
|
|
|
+ (0x6 << MOVE_LEN_SHIFT));
|
|
|
+ write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
|
|
|
+ MOVE_DEST_DESCBUF |
|
|
|
+ MOVE_WAITCOMP |
|
|
|
+ (0x8 << MOVE_LEN_SHIFT));
|
|
|
+
|
|
|
+ /* Read and write cryptlen bytes */
|
|
|
+ aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
|
|
|
+
|
|
|
+ /*
|
|
|
+ * Insert a NOP here, since we need at least 4 instructions between
|
|
|
+ * code patching the descriptor buffer and the location being patched.
|
|
|
+ */
|
|
|
+ jump_cmd = append_jump(desc, JUMP_TEST_ALL);
|
|
|
+ set_jump_tgt_here(desc, jump_cmd);
|
|
|
+
|
|
|
+ set_move_tgt_here(desc, read_move_cmd);
|
|
|
+ set_move_tgt_here(desc, write_move_cmd);
|
|
|
+ append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
|
|
|
+ append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
|
|
|
+ MOVE_AUX_LS);
|
|
|
+ append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
|
|
|
+
|
|
|
+ /* Load ICV */
|
|
|
+ append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
|
|
|
+ FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
|
|
|
+
|
|
|
+ ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
|
|
|
+ desc_bytes(desc),
|
|
|
+ DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
|
|
|
+ dev_err(jrdev, "unable to map shared descriptor\n");
|
|
|
+ return -ENOMEM;
|
|
|
+ }
|
|
|
+#ifdef DEBUG
|
|
|
+ print_hex_dump(KERN_ERR,
|
|
|
+ "aead null dec shdesc@"__stringify(__LINE__)": ",
|
|
|
+ DUMP_PREFIX_ADDRESS, 16, 4, desc,
|
|
|
+ desc_bytes(desc), 1);
|
|
|
+#endif
|
|
|
+
|
|
|
+ return 0;
|
|
|
+}
|
|
|
+
|
|
|
static int aead_set_sh_desc(struct crypto_aead *aead)
|
|
|
{
|
|
|
struct aead_tfm *tfm = &aead->base.crt_aead;
|
|
@@ -209,9 +403,13 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
|
|
|
u32 geniv, moveiv;
|
|
|
u32 *desc;
|
|
|
|
|
|
- if (!ctx->enckeylen || !ctx->authsize)
|
|
|
+ if (!ctx->authsize)
|
|
|
return 0;
|
|
|
|
|
|
+ /* NULL encryption / decryption */
|
|
|
+ if (!ctx->enckeylen)
|
|
|
+ return aead_null_set_sh_desc(aead);
|
|
|
+
|
|
|
/*
|
|
|
* Job Descriptor and Shared Descriptors
|
|
|
* must all fit into the 64-word Descriptor h/w Buffer
|
|
@@ -290,7 +488,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
|
|
|
|
|
|
/* assoclen + cryptlen = seqinlen - ivsize - authsize */
|
|
|
append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM,
|
|
|
- ctx->authsize + tfm->ivsize)
|
|
|
+ ctx->authsize + tfm->ivsize);
|
|
|
/* assoclen = (assoclen + cryptlen) - cryptlen */
|
|
|
append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
|
|
|
append_math_sub(desc, VARSEQINLEN, REG3, REG2, CAAM_CMD_SZ);
|
|
@@ -1419,6 +1617,11 @@ static int aead_givencrypt(struct aead_givcrypt_request *areq)
|
|
|
return ret;
|
|
|
}
|
|
|
|
|
|
+static int aead_null_givencrypt(struct aead_givcrypt_request *areq)
|
|
|
+{
|
|
|
+ return aead_encrypt(&areq->areq);
|
|
|
+}
|
|
|
+
|
|
|
/*
|
|
|
* allocate and map the ablkcipher extended descriptor for ablkcipher
|
|
|
*/
|
|
@@ -1607,6 +1810,124 @@ struct caam_alg_template {
|
|
|
|
|
|
static struct caam_alg_template driver_algs[] = {
|
|
|
/* single-pass ipsec_esp descriptor */
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(md5),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-md5-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = MD5_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(sha1),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-sha1-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(sha224),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-sha224-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = SHA224_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
|
|
|
+ OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(sha256),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-sha256-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = SHA256_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
|
|
|
+ OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(sha384),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-sha384-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = SHA384_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
|
|
|
+ OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ .name = "authenc(hmac(sha512),ecb(cipher_null))",
|
|
|
+ .driver_name = "authenc-hmac-sha512-ecb-cipher_null-caam",
|
|
|
+ .blocksize = NULL_BLOCK_SIZE,
|
|
|
+ .type = CRYPTO_ALG_TYPE_AEAD,
|
|
|
+ .template_aead = {
|
|
|
+ .setkey = aead_setkey,
|
|
|
+ .setauthsize = aead_setauthsize,
|
|
|
+ .encrypt = aead_encrypt,
|
|
|
+ .decrypt = aead_decrypt,
|
|
|
+ .givencrypt = aead_null_givencrypt,
|
|
|
+ .geniv = "<built-in>",
|
|
|
+ .ivsize = NULL_IV_SIZE,
|
|
|
+ .maxauthsize = SHA512_DIGEST_SIZE,
|
|
|
+ },
|
|
|
+ .class1_alg_type = 0,
|
|
|
+ .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
|
|
|
+ OP_ALG_AAI_HMAC_PRECOMP,
|
|
|
+ .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
|
|
|
+ },
|
|
|
{
|
|
|
.name = "authenc(hmac(md5),cbc(aes))",
|
|
|
.driver_name = "authenc-hmac-md5-cbc-aes-caam",
|