|
@@ -653,7 +653,7 @@ static void qat_alg_free_bufl(struct qat_crypto_instance *inst,
|
|
}
|
|
}
|
|
|
|
|
|
static int qat_alg_sgl_to_bufl(struct qat_crypto_instance *inst,
|
|
static int qat_alg_sgl_to_bufl(struct qat_crypto_instance *inst,
|
|
- struct scatterlist *assoc,
|
|
|
|
|
|
+ struct scatterlist *assoc, int assoclen,
|
|
struct scatterlist *sgl,
|
|
struct scatterlist *sgl,
|
|
struct scatterlist *sglout, uint8_t *iv,
|
|
struct scatterlist *sglout, uint8_t *iv,
|
|
uint8_t ivlen,
|
|
uint8_t ivlen,
|
|
@@ -685,15 +685,21 @@ static int qat_alg_sgl_to_bufl(struct qat_crypto_instance *inst,
|
|
for_each_sg(assoc, sg, assoc_n, i) {
|
|
for_each_sg(assoc, sg, assoc_n, i) {
|
|
if (!sg->length)
|
|
if (!sg->length)
|
|
continue;
|
|
continue;
|
|
- bufl->bufers[bufs].addr = dma_map_single(dev,
|
|
|
|
- sg_virt(sg),
|
|
|
|
- sg->length,
|
|
|
|
- DMA_BIDIRECTIONAL);
|
|
|
|
- bufl->bufers[bufs].len = sg->length;
|
|
|
|
|
|
+
|
|
|
|
+ if (!(assoclen > 0))
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ bufl->bufers[bufs].addr =
|
|
|
|
+ dma_map_single(dev, sg_virt(sg),
|
|
|
|
+ min_t(int, assoclen, sg->length),
|
|
|
|
+ DMA_BIDIRECTIONAL);
|
|
|
|
+ bufl->bufers[bufs].len = min_t(int, assoclen, sg->length);
|
|
if (unlikely(dma_mapping_error(dev, bufl->bufers[bufs].addr)))
|
|
if (unlikely(dma_mapping_error(dev, bufl->bufers[bufs].addr)))
|
|
goto err;
|
|
goto err;
|
|
bufs++;
|
|
bufs++;
|
|
|
|
+ assoclen -= sg->length;
|
|
}
|
|
}
|
|
|
|
+
|
|
if (ivlen) {
|
|
if (ivlen) {
|
|
bufl->bufers[bufs].addr = dma_map_single(dev, iv, ivlen,
|
|
bufl->bufers[bufs].addr = dma_map_single(dev, iv, ivlen,
|
|
DMA_BIDIRECTIONAL);
|
|
DMA_BIDIRECTIONAL);
|
|
@@ -845,8 +851,9 @@ static int qat_alg_aead_dec(struct aead_request *areq)
|
|
int digst_size = crypto_aead_crt(aead_tfm)->authsize;
|
|
int digst_size = crypto_aead_crt(aead_tfm)->authsize;
|
|
int ret, ctr = 0;
|
|
int ret, ctr = 0;
|
|
|
|
|
|
- ret = qat_alg_sgl_to_bufl(ctx->inst, areq->assoc, areq->src, areq->dst,
|
|
|
|
- areq->iv, AES_BLOCK_SIZE, qat_req);
|
|
|
|
|
|
+ ret = qat_alg_sgl_to_bufl(ctx->inst, areq->assoc, areq->assoclen,
|
|
|
|
+ areq->src, areq->dst, areq->iv,
|
|
|
|
+ AES_BLOCK_SIZE, qat_req);
|
|
if (unlikely(ret))
|
|
if (unlikely(ret))
|
|
return ret;
|
|
return ret;
|
|
|
|
|
|
@@ -889,8 +896,9 @@ static int qat_alg_aead_enc_internal(struct aead_request *areq, uint8_t *iv,
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
int ret, ctr = 0;
|
|
int ret, ctr = 0;
|
|
|
|
|
|
- ret = qat_alg_sgl_to_bufl(ctx->inst, areq->assoc, areq->src, areq->dst,
|
|
|
|
- iv, AES_BLOCK_SIZE, qat_req);
|
|
|
|
|
|
+ ret = qat_alg_sgl_to_bufl(ctx->inst, areq->assoc, areq->assoclen,
|
|
|
|
+ areq->src, areq->dst, iv, AES_BLOCK_SIZE,
|
|
|
|
+ qat_req);
|
|
if (unlikely(ret))
|
|
if (unlikely(ret))
|
|
return ret;
|
|
return ret;
|
|
|
|
|
|
@@ -1017,7 +1025,7 @@ static int qat_alg_ablkcipher_encrypt(struct ablkcipher_request *req)
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
int ret, ctr = 0;
|
|
int ret, ctr = 0;
|
|
|
|
|
|
- ret = qat_alg_sgl_to_bufl(ctx->inst, NULL, req->src, req->dst,
|
|
|
|
|
|
+ ret = qat_alg_sgl_to_bufl(ctx->inst, NULL, 0, req->src, req->dst,
|
|
NULL, 0, qat_req);
|
|
NULL, 0, qat_req);
|
|
if (unlikely(ret))
|
|
if (unlikely(ret))
|
|
return ret;
|
|
return ret;
|
|
@@ -1055,7 +1063,7 @@ static int qat_alg_ablkcipher_decrypt(struct ablkcipher_request *req)
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
struct icp_qat_fw_la_bulk_req *msg;
|
|
int ret, ctr = 0;
|
|
int ret, ctr = 0;
|
|
|
|
|
|
- ret = qat_alg_sgl_to_bufl(ctx->inst, NULL, req->src, req->dst,
|
|
|
|
|
|
+ ret = qat_alg_sgl_to_bufl(ctx->inst, NULL, 0, req->src, req->dst,
|
|
NULL, 0, qat_req);
|
|
NULL, 0, qat_req);
|
|
if (unlikely(ret))
|
|
if (unlikely(ret))
|
|
return ret;
|
|
return ret;
|