|
@@ -32,6 +32,7 @@ struct safexcel_ahash_req {
|
|
|
bool last_req;
|
|
|
bool finish;
|
|
|
bool hmac;
|
|
|
+ bool needs_inv;
|
|
|
|
|
|
u8 state_sz; /* expected sate size, only set once */
|
|
|
u32 state[SHA256_DIGEST_SIZE / sizeof(u32)];
|
|
@@ -119,9 +120,9 @@ static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
|
|
|
- struct crypto_async_request *async,
|
|
|
- bool *should_complete, int *ret)
|
|
|
+static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
|
|
|
+ struct crypto_async_request *async,
|
|
|
+ bool *should_complete, int *ret)
|
|
|
{
|
|
|
struct safexcel_result_desc *rdesc;
|
|
|
struct ahash_request *areq = ahash_request_cast(async);
|
|
@@ -165,9 +166,9 @@ static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
|
|
|
return 1;
|
|
|
}
|
|
|
|
|
|
-static int safexcel_ahash_send(struct crypto_async_request *async, int ring,
|
|
|
- struct safexcel_request *request, int *commands,
|
|
|
- int *results)
|
|
|
+static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
|
|
|
+ struct safexcel_request *request,
|
|
|
+ int *commands, int *results)
|
|
|
{
|
|
|
struct ahash_request *areq = ahash_request_cast(async);
|
|
|
struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
|
|
@@ -292,7 +293,6 @@ send_command:
|
|
|
|
|
|
req->processed += len;
|
|
|
request->req = &areq->base;
|
|
|
- ctx->base.handle_result = safexcel_handle_result;
|
|
|
|
|
|
*commands = n_cdesc;
|
|
|
*results = 1;
|
|
@@ -374,8 +374,6 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
|
|
|
|
|
|
ring = safexcel_select_ring(priv);
|
|
|
ctx->base.ring = ring;
|
|
|
- ctx->base.needs_inv = false;
|
|
|
- ctx->base.send = safexcel_ahash_send;
|
|
|
|
|
|
spin_lock_bh(&priv->ring[ring].queue_lock);
|
|
|
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
|
|
@@ -392,6 +390,26 @@ static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
|
|
|
return 1;
|
|
|
}
|
|
|
|
|
|
+static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
|
|
|
+ struct crypto_async_request *async,
|
|
|
+ bool *should_complete, int *ret)
|
|
|
+{
|
|
|
+ struct ahash_request *areq = ahash_request_cast(async);
|
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
|
+ int err;
|
|
|
+
|
|
|
+ if (req->needs_inv) {
|
|
|
+ req->needs_inv = false;
|
|
|
+ err = safexcel_handle_inv_result(priv, ring, async,
|
|
|
+ should_complete, ret);
|
|
|
+ } else {
|
|
|
+ err = safexcel_handle_req_result(priv, ring, async,
|
|
|
+ should_complete, ret);
|
|
|
+ }
|
|
|
+
|
|
|
+ return err;
|
|
|
+}
|
|
|
+
|
|
|
static int safexcel_ahash_send_inv(struct crypto_async_request *async,
|
|
|
int ring, struct safexcel_request *request,
|
|
|
int *commands, int *results)
|
|
@@ -400,7 +418,6 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async,
|
|
|
struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
|
|
|
int ret;
|
|
|
|
|
|
- ctx->base.handle_result = safexcel_handle_inv_result;
|
|
|
ret = safexcel_invalidate_cache(async, &ctx->base, ctx->priv,
|
|
|
ctx->base.ctxr_dma, ring, request);
|
|
|
if (unlikely(ret))
|
|
@@ -412,11 +429,29 @@ static int safexcel_ahash_send_inv(struct crypto_async_request *async,
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
|
+static int safexcel_ahash_send(struct crypto_async_request *async,
|
|
|
+ int ring, struct safexcel_request *request,
|
|
|
+ int *commands, int *results)
|
|
|
+{
|
|
|
+ struct ahash_request *areq = ahash_request_cast(async);
|
|
|
+ struct safexcel_ahash_req *req = ahash_request_ctx(areq);
|
|
|
+ int ret;
|
|
|
+
|
|
|
+ if (req->needs_inv)
|
|
|
+ ret = safexcel_ahash_send_inv(async, ring, request,
|
|
|
+ commands, results);
|
|
|
+ else
|
|
|
+ ret = safexcel_ahash_send_req(async, ring, request,
|
|
|
+ commands, results);
|
|
|
+ return ret;
|
|
|
+}
|
|
|
+
|
|
|
static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
|
|
|
{
|
|
|
struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
struct ahash_request req;
|
|
|
+ struct safexcel_ahash_req *rctx = ahash_request_ctx(&req);
|
|
|
struct safexcel_inv_result result = {};
|
|
|
int ring = ctx->base.ring;
|
|
|
|
|
@@ -430,7 +465,7 @@ static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
|
|
|
ahash_request_set_tfm(&req, __crypto_ahash_cast(tfm));
|
|
|
ctx = crypto_tfm_ctx(req.base.tfm);
|
|
|
ctx->base.exit_inv = true;
|
|
|
- ctx->base.send = safexcel_ahash_send_inv;
|
|
|
+ rctx->needs_inv = true;
|
|
|
|
|
|
spin_lock_bh(&priv->ring[ring].queue_lock);
|
|
|
crypto_enqueue_request(&priv->ring[ring].queue, &req.base);
|
|
@@ -481,14 +516,16 @@ static int safexcel_ahash_enqueue(struct ahash_request *areq)
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
int ret, ring;
|
|
|
|
|
|
- ctx->base.send = safexcel_ahash_send;
|
|
|
+ req->needs_inv = false;
|
|
|
|
|
|
if (req->processed && ctx->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)
|
|
|
ctx->base.needs_inv = safexcel_ahash_needs_inv_get(areq);
|
|
|
|
|
|
if (ctx->base.ctxr) {
|
|
|
- if (ctx->base.needs_inv)
|
|
|
- ctx->base.send = safexcel_ahash_send_inv;
|
|
|
+ if (ctx->base.needs_inv) {
|
|
|
+ ctx->base.needs_inv = false;
|
|
|
+ req->needs_inv = true;
|
|
|
+ }
|
|
|
} else {
|
|
|
ctx->base.ring = safexcel_select_ring(priv);
|
|
|
ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
|
|
@@ -622,6 +659,8 @@ static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
|
|
|
struct safexcel_alg_template, alg.ahash);
|
|
|
|
|
|
ctx->priv = tmpl->priv;
|
|
|
+ ctx->base.send = safexcel_ahash_send;
|
|
|
+ ctx->base.handle_result = safexcel_handle_result;
|
|
|
|
|
|
crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
|
|
|
sizeof(struct safexcel_ahash_req));
|