|
@@ -27,10 +27,10 @@
|
|
|
|
|
|
#include <linux/module.h>
|
|
|
#include <crypto/b128ops.h>
|
|
|
+#include <crypto/internal/skcipher.h>
|
|
|
#include <crypto/lrw.h>
|
|
|
#include <crypto/xts.h>
|
|
|
#include <asm/crypto/glue_helper.h>
|
|
|
-#include <crypto/scatterwalk.h>
|
|
|
|
|
|
static int __glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
|
|
|
struct blkcipher_desc *desc,
|
|
@@ -339,6 +339,41 @@ done:
|
|
|
return nbytes;
|
|
|
}
|
|
|
|
|
|
+static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx *gctx,
|
|
|
+ void *ctx,
|
|
|
+ struct skcipher_walk *walk)
|
|
|
+{
|
|
|
+ const unsigned int bsize = 128 / 8;
|
|
|
+ unsigned int nbytes = walk->nbytes;
|
|
|
+ u128 *src = walk->src.virt.addr;
|
|
|
+ u128 *dst = walk->dst.virt.addr;
|
|
|
+ unsigned int num_blocks, func_bytes;
|
|
|
+ unsigned int i;
|
|
|
+
|
|
|
+ /* Process multi-block batch */
|
|
|
+ for (i = 0; i < gctx->num_funcs; i++) {
|
|
|
+ num_blocks = gctx->funcs[i].num_blocks;
|
|
|
+ func_bytes = bsize * num_blocks;
|
|
|
+
|
|
|
+ if (nbytes >= func_bytes) {
|
|
|
+ do {
|
|
|
+ gctx->funcs[i].fn_u.xts(ctx, dst, src,
|
|
|
+ walk->iv);
|
|
|
+
|
|
|
+ src += num_blocks;
|
|
|
+ dst += num_blocks;
|
|
|
+ nbytes -= func_bytes;
|
|
|
+ } while (nbytes >= func_bytes);
|
|
|
+
|
|
|
+ if (nbytes < bsize)
|
|
|
+ goto done;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+done:
|
|
|
+ return nbytes;
|
|
|
+}
|
|
|
+
|
|
|
/* for implementations implementing faster XTS IV generator */
|
|
|
int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
|
|
|
struct blkcipher_desc *desc, struct scatterlist *dst,
|
|
@@ -379,6 +414,43 @@ int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
|
|
|
}
|
|
|
EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit);
|
|
|
|
|
|
+int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
|
|
|
+ struct skcipher_request *req,
|
|
|
+ common_glue_func_t tweak_fn, void *tweak_ctx,
|
|
|
+ void *crypt_ctx)
|
|
|
+{
|
|
|
+ const unsigned int bsize = 128 / 8;
|
|
|
+ struct skcipher_walk walk;
|
|
|
+ bool fpu_enabled = false;
|
|
|
+ unsigned int nbytes;
|
|
|
+ int err;
|
|
|
+
|
|
|
+ err = skcipher_walk_virt(&walk, req, false);
|
|
|
+ nbytes = walk.nbytes;
|
|
|
+ if (!nbytes)
|
|
|
+ return err;
|
|
|
+
|
|
|
+ /* set minimum length to bsize, for tweak_fn */
|
|
|
+ fpu_enabled = glue_skwalk_fpu_begin(bsize, gctx->fpu_blocks_limit,
|
|
|
+ &walk, fpu_enabled,
|
|
|
+ nbytes < bsize ? bsize : nbytes);
|
|
|
+
|
|
|
+ /* calculate first value of T */
|
|
|
+ tweak_fn(tweak_ctx, walk.iv, walk.iv);
|
|
|
+
|
|
|
+ while (nbytes) {
|
|
|
+ nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
|
|
|
+
|
|
|
+ err = skcipher_walk_done(&walk, nbytes);
|
|
|
+ nbytes = walk.nbytes;
|
|
|
+ }
|
|
|
+
|
|
|
+ glue_fpu_end(fpu_enabled);
|
|
|
+
|
|
|
+ return err;
|
|
|
+}
|
|
|
+EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
|
|
|
+
|
|
|
void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
|
|
|
common_glue_func_t fn)
|
|
|
{
|