xts.c 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367
  1. /* XTS: as defined in IEEE1619/D16
  2. * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
  3. * (sector sizes which are not a multiple of 16 bytes are,
  4. * however currently unsupported)
  5. *
  6. * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
  7. *
  8. * Based on ecb.c
  9. * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  10. *
  11. * This program is free software; you can redistribute it and/or modify it
  12. * under the terms of the GNU General Public License as published by the Free
  13. * Software Foundation; either version 2 of the License, or (at your option)
  14. * any later version.
  15. */
  16. #include <crypto/internal/skcipher.h>
  17. #include <crypto/scatterwalk.h>
  18. #include <linux/err.h>
  19. #include <linux/init.h>
  20. #include <linux/kernel.h>
  21. #include <linux/module.h>
  22. #include <linux/scatterlist.h>
  23. #include <linux/slab.h>
  24. #include <crypto/xts.h>
  25. #include <crypto/b128ops.h>
  26. #include <crypto/gf128mul.h>
  27. struct priv {
  28. struct crypto_skcipher *child;
  29. struct crypto_cipher *tweak;
  30. };
  31. struct xts_instance_ctx {
  32. struct crypto_skcipher_spawn spawn;
  33. char name[CRYPTO_MAX_ALG_NAME];
  34. };
  35. struct rctx {
  36. le128 t;
  37. struct skcipher_request subreq;
  38. };
  39. static int setkey(struct crypto_skcipher *parent, const u8 *key,
  40. unsigned int keylen)
  41. {
  42. struct priv *ctx = crypto_skcipher_ctx(parent);
  43. struct crypto_skcipher *child;
  44. struct crypto_cipher *tweak;
  45. int err;
  46. err = xts_verify_key(parent, key, keylen);
  47. if (err)
  48. return err;
  49. keylen /= 2;
  50. /* we need two cipher instances: one to compute the initial 'tweak'
  51. * by encrypting the IV (usually the 'plain' iv) and the other
  52. * one to encrypt and decrypt the data */
  53. /* tweak cipher, uses Key2 i.e. the second half of *key */
  54. tweak = ctx->tweak;
  55. crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
  56. crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
  57. CRYPTO_TFM_REQ_MASK);
  58. err = crypto_cipher_setkey(tweak, key + keylen, keylen);
  59. crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(tweak) &
  60. CRYPTO_TFM_RES_MASK);
  61. if (err)
  62. return err;
  63. /* data cipher, uses Key1 i.e. the first half of *key */
  64. child = ctx->child;
  65. crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  66. crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
  67. CRYPTO_TFM_REQ_MASK);
  68. err = crypto_skcipher_setkey(child, key, keylen);
  69. crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
  70. CRYPTO_TFM_RES_MASK);
  71. return err;
  72. }
  73. /*
  74. * We compute the tweak masks twice (both before and after the ECB encryption or
  75. * decryption) to avoid having to allocate a temporary buffer and/or make
  76. * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
  77. * just doing the gf128mul_x_ble() calls again.
  78. */
  79. static int xor_tweak(struct skcipher_request *req, bool second_pass)
  80. {
  81. struct rctx *rctx = skcipher_request_ctx(req);
  82. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  83. const int bs = XTS_BLOCK_SIZE;
  84. struct skcipher_walk w;
  85. le128 t = rctx->t;
  86. int err;
  87. if (second_pass) {
  88. req = &rctx->subreq;
  89. /* set to our TFM to enforce correct alignment: */
  90. skcipher_request_set_tfm(req, tfm);
  91. }
  92. err = skcipher_walk_virt(&w, req, false);
  93. while (w.nbytes) {
  94. unsigned int avail = w.nbytes;
  95. le128 *wsrc;
  96. le128 *wdst;
  97. wsrc = w.src.virt.addr;
  98. wdst = w.dst.virt.addr;
  99. do {
  100. le128_xor(wdst++, &t, wsrc++);
  101. gf128mul_x_ble(&t, &t);
  102. } while ((avail -= bs) >= bs);
  103. err = skcipher_walk_done(&w, avail);
  104. }
  105. return err;
  106. }
  107. static int xor_tweak_pre(struct skcipher_request *req)
  108. {
  109. return xor_tweak(req, false);
  110. }
  111. static int xor_tweak_post(struct skcipher_request *req)
  112. {
  113. return xor_tweak(req, true);
  114. }
  115. static void crypt_done(struct crypto_async_request *areq, int err)
  116. {
  117. struct skcipher_request *req = areq->data;
  118. if (!err)
  119. err = xor_tweak_post(req);
  120. skcipher_request_complete(req, err);
  121. }
  122. static void init_crypt(struct skcipher_request *req)
  123. {
  124. struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
  125. struct rctx *rctx = skcipher_request_ctx(req);
  126. struct skcipher_request *subreq = &rctx->subreq;
  127. skcipher_request_set_tfm(subreq, ctx->child);
  128. skcipher_request_set_callback(subreq, req->base.flags, crypt_done, req);
  129. skcipher_request_set_crypt(subreq, req->dst, req->dst,
  130. req->cryptlen, NULL);
  131. /* calculate first value of T */
  132. crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
  133. }
  134. static int encrypt(struct skcipher_request *req)
  135. {
  136. struct rctx *rctx = skcipher_request_ctx(req);
  137. struct skcipher_request *subreq = &rctx->subreq;
  138. init_crypt(req);
  139. return xor_tweak_pre(req) ?:
  140. crypto_skcipher_encrypt(subreq) ?:
  141. xor_tweak_post(req);
  142. }
  143. static int decrypt(struct skcipher_request *req)
  144. {
  145. struct rctx *rctx = skcipher_request_ctx(req);
  146. struct skcipher_request *subreq = &rctx->subreq;
  147. init_crypt(req);
  148. return xor_tweak_pre(req) ?:
  149. crypto_skcipher_decrypt(subreq) ?:
  150. xor_tweak_post(req);
  151. }
  152. static int init_tfm(struct crypto_skcipher *tfm)
  153. {
  154. struct skcipher_instance *inst = skcipher_alg_instance(tfm);
  155. struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
  156. struct priv *ctx = crypto_skcipher_ctx(tfm);
  157. struct crypto_skcipher *child;
  158. struct crypto_cipher *tweak;
  159. child = crypto_spawn_skcipher(&ictx->spawn);
  160. if (IS_ERR(child))
  161. return PTR_ERR(child);
  162. ctx->child = child;
  163. tweak = crypto_alloc_cipher(ictx->name, 0, 0);
  164. if (IS_ERR(tweak)) {
  165. crypto_free_skcipher(ctx->child);
  166. return PTR_ERR(tweak);
  167. }
  168. ctx->tweak = tweak;
  169. crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
  170. sizeof(struct rctx));
  171. return 0;
  172. }
  173. static void exit_tfm(struct crypto_skcipher *tfm)
  174. {
  175. struct priv *ctx = crypto_skcipher_ctx(tfm);
  176. crypto_free_skcipher(ctx->child);
  177. crypto_free_cipher(ctx->tweak);
  178. }
  179. static void free(struct skcipher_instance *inst)
  180. {
  181. crypto_drop_skcipher(skcipher_instance_ctx(inst));
  182. kfree(inst);
  183. }
  184. static int create(struct crypto_template *tmpl, struct rtattr **tb)
  185. {
  186. struct skcipher_instance *inst;
  187. struct crypto_attr_type *algt;
  188. struct xts_instance_ctx *ctx;
  189. struct skcipher_alg *alg;
  190. const char *cipher_name;
  191. u32 mask;
  192. int err;
  193. algt = crypto_get_attr_type(tb);
  194. if (IS_ERR(algt))
  195. return PTR_ERR(algt);
  196. if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
  197. return -EINVAL;
  198. cipher_name = crypto_attr_alg_name(tb[1]);
  199. if (IS_ERR(cipher_name))
  200. return PTR_ERR(cipher_name);
  201. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  202. if (!inst)
  203. return -ENOMEM;
  204. ctx = skcipher_instance_ctx(inst);
  205. crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
  206. mask = crypto_requires_off(algt->type, algt->mask,
  207. CRYPTO_ALG_NEED_FALLBACK |
  208. CRYPTO_ALG_ASYNC);
  209. err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, mask);
  210. if (err == -ENOENT) {
  211. err = -ENAMETOOLONG;
  212. if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
  213. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  214. goto err_free_inst;
  215. err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, mask);
  216. }
  217. if (err)
  218. goto err_free_inst;
  219. alg = crypto_skcipher_spawn_alg(&ctx->spawn);
  220. err = -EINVAL;
  221. if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
  222. goto err_drop_spawn;
  223. if (crypto_skcipher_alg_ivsize(alg))
  224. goto err_drop_spawn;
  225. err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
  226. &alg->base);
  227. if (err)
  228. goto err_drop_spawn;
  229. err = -EINVAL;
  230. cipher_name = alg->base.cra_name;
  231. /* Alas we screwed up the naming so we have to mangle the
  232. * cipher name.
  233. */
  234. if (!strncmp(cipher_name, "ecb(", 4)) {
  235. unsigned len;
  236. len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
  237. if (len < 2 || len >= sizeof(ctx->name))
  238. goto err_drop_spawn;
  239. if (ctx->name[len - 1] != ')')
  240. goto err_drop_spawn;
  241. ctx->name[len - 1] = 0;
  242. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  243. "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
  244. err = -ENAMETOOLONG;
  245. goto err_drop_spawn;
  246. }
  247. } else
  248. goto err_drop_spawn;
  249. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  250. inst->alg.base.cra_priority = alg->base.cra_priority;
  251. inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
  252. inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
  253. (__alignof__(u64) - 1);
  254. inst->alg.ivsize = XTS_BLOCK_SIZE;
  255. inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
  256. inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
  257. inst->alg.base.cra_ctxsize = sizeof(struct priv);
  258. inst->alg.init = init_tfm;
  259. inst->alg.exit = exit_tfm;
  260. inst->alg.setkey = setkey;
  261. inst->alg.encrypt = encrypt;
  262. inst->alg.decrypt = decrypt;
  263. inst->free = free;
  264. err = skcipher_register_instance(tmpl, inst);
  265. if (err)
  266. goto err_drop_spawn;
  267. out:
  268. return err;
  269. err_drop_spawn:
  270. crypto_drop_skcipher(&ctx->spawn);
  271. err_free_inst:
  272. kfree(inst);
  273. goto out;
  274. }
  275. static struct crypto_template crypto_tmpl = {
  276. .name = "xts",
  277. .create = create,
  278. .module = THIS_MODULE,
  279. };
  280. static int __init crypto_module_init(void)
  281. {
  282. return crypto_register_template(&crypto_tmpl);
  283. }
  284. static void __exit crypto_module_exit(void)
  285. {
  286. crypto_unregister_template(&crypto_tmpl);
  287. }
  288. module_init(crypto_module_init);
  289. module_exit(crypto_module_exit);
  290. MODULE_LICENSE("GPL");
  291. MODULE_DESCRIPTION("XTS block cipher mode");
  292. MODULE_ALIAS_CRYPTO("xts");