|
@@ -406,8 +406,8 @@ ssi_aead_handle_config_buf(struct device *dev,
|
|
|
sg_init_one(&areq_ctx->ccm_adata_sg, config_data, AES_BLOCK_SIZE + areq_ctx->ccm_hdr_size);
|
|
|
if (unlikely(dma_map_sg(dev, &areq_ctx->ccm_adata_sg, 1,
|
|
|
DMA_TO_DEVICE) != 1)) {
|
|
|
- dev_err(dev, "dma_map_sg() config buffer failed\n");
|
|
|
- return -ENOMEM;
|
|
|
+ dev_err(dev, "dma_map_sg() config buffer failed\n");
|
|
|
+ return -ENOMEM;
|
|
|
}
|
|
|
dev_dbg(dev, "Mapped curr_buff: dma_address=%pad page=%p addr=%pK offset=%u length=%u\n",
|
|
|
&sg_dma_address(&areq_ctx->ccm_adata_sg),
|
|
@@ -435,8 +435,8 @@ static inline int ssi_ahash_handle_curr_buf(struct device *dev,
|
|
|
sg_init_one(areq_ctx->buff_sg, curr_buff, curr_buff_cnt);
|
|
|
if (unlikely(dma_map_sg(dev, areq_ctx->buff_sg, 1,
|
|
|
DMA_TO_DEVICE) != 1)) {
|
|
|
- dev_err(dev, "dma_map_sg() src buffer failed\n");
|
|
|
- return -ENOMEM;
|
|
|
+ dev_err(dev, "dma_map_sg() src buffer failed\n");
|
|
|
+ return -ENOMEM;
|
|
|
}
|
|
|
dev_dbg(dev, "Mapped curr_buff: dma_address=%pad page=%p addr=%pK offset=%u length=%u\n",
|
|
|
&sg_dma_address(areq_ctx->buff_sg), sg_page(areq_ctx->buff_sg),
|
|
@@ -1029,12 +1029,12 @@ static inline int ssi_buffer_mgr_prepare_aead_data_mlli(
|
|
|
* verification is made by CPU compare in order to simplify
|
|
|
* MAC verification upon request completion
|
|
|
*/
|
|
|
- u32 size_to_skip = req->assoclen;
|
|
|
+ u32 size_to_skip = req->assoclen;
|
|
|
|
|
|
- if (areq_ctx->is_gcm4543)
|
|
|
- size_to_skip += crypto_aead_ivsize(tfm);
|
|
|
+ if (areq_ctx->is_gcm4543)
|
|
|
+ size_to_skip += crypto_aead_ivsize(tfm);
|
|
|
|
|
|
- ssi_buffer_mgr_copy_scatterlist_portion(
|
|
|
+ ssi_buffer_mgr_copy_scatterlist_portion(
|
|
|
dev, areq_ctx->backup_mac, req->src,
|
|
|
size_to_skip + req->cryptlen - areq_ctx->req_authsize,
|
|
|
size_to_skip + req->cryptlen, SSI_SG_TO_BUF);
|