ctr.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488
  1. /*
  2. * CTR: Counter mode
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/algapi.h>
  13. #include <crypto/ctr.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <linux/err.h>
  16. #include <linux/init.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <linux/random.h>
  20. #include <linux/scatterlist.h>
  21. #include <linux/slab.h>
  22. struct crypto_ctr_ctx {
  23. struct crypto_cipher *child;
  24. };
  25. struct crypto_rfc3686_ctx {
  26. struct crypto_skcipher *child;
  27. u8 nonce[CTR_RFC3686_NONCE_SIZE];
  28. };
  29. struct crypto_rfc3686_req_ctx {
  30. u8 iv[CTR_RFC3686_BLOCK_SIZE];
  31. struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
  32. };
  33. static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
  34. unsigned int keylen)
  35. {
  36. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
  37. struct crypto_cipher *child = ctx->child;
  38. int err;
  39. crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  40. crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
  41. CRYPTO_TFM_REQ_MASK);
  42. err = crypto_cipher_setkey(child, key, keylen);
  43. crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
  44. CRYPTO_TFM_RES_MASK);
  45. return err;
  46. }
  47. static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
  48. struct crypto_cipher *tfm)
  49. {
  50. unsigned int bsize = crypto_cipher_blocksize(tfm);
  51. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  52. u8 *ctrblk = walk->iv;
  53. u8 tmp[bsize + alignmask];
  54. u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  55. u8 *src = walk->src.virt.addr;
  56. u8 *dst = walk->dst.virt.addr;
  57. unsigned int nbytes = walk->nbytes;
  58. crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
  59. crypto_xor(keystream, src, nbytes);
  60. memcpy(dst, keystream, nbytes);
  61. crypto_inc(ctrblk, bsize);
  62. }
  63. static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
  64. struct crypto_cipher *tfm)
  65. {
  66. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  67. crypto_cipher_alg(tfm)->cia_encrypt;
  68. unsigned int bsize = crypto_cipher_blocksize(tfm);
  69. u8 *ctrblk = walk->iv;
  70. u8 *src = walk->src.virt.addr;
  71. u8 *dst = walk->dst.virt.addr;
  72. unsigned int nbytes = walk->nbytes;
  73. do {
  74. /* create keystream */
  75. fn(crypto_cipher_tfm(tfm), dst, ctrblk);
  76. crypto_xor(dst, src, bsize);
  77. /* increment counter in counterblock */
  78. crypto_inc(ctrblk, bsize);
  79. src += bsize;
  80. dst += bsize;
  81. } while ((nbytes -= bsize) >= bsize);
  82. return nbytes;
  83. }
  84. static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
  85. struct crypto_cipher *tfm)
  86. {
  87. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  88. crypto_cipher_alg(tfm)->cia_encrypt;
  89. unsigned int bsize = crypto_cipher_blocksize(tfm);
  90. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  91. unsigned int nbytes = walk->nbytes;
  92. u8 *ctrblk = walk->iv;
  93. u8 *src = walk->src.virt.addr;
  94. u8 tmp[bsize + alignmask];
  95. u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  96. do {
  97. /* create keystream */
  98. fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
  99. crypto_xor(src, keystream, bsize);
  100. /* increment counter in counterblock */
  101. crypto_inc(ctrblk, bsize);
  102. src += bsize;
  103. } while ((nbytes -= bsize) >= bsize);
  104. return nbytes;
  105. }
  106. static int crypto_ctr_crypt(struct blkcipher_desc *desc,
  107. struct scatterlist *dst, struct scatterlist *src,
  108. unsigned int nbytes)
  109. {
  110. struct blkcipher_walk walk;
  111. struct crypto_blkcipher *tfm = desc->tfm;
  112. struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
  113. struct crypto_cipher *child = ctx->child;
  114. unsigned int bsize = crypto_cipher_blocksize(child);
  115. int err;
  116. blkcipher_walk_init(&walk, dst, src, nbytes);
  117. err = blkcipher_walk_virt_block(desc, &walk, bsize);
  118. while (walk.nbytes >= bsize) {
  119. if (walk.src.virt.addr == walk.dst.virt.addr)
  120. nbytes = crypto_ctr_crypt_inplace(&walk, child);
  121. else
  122. nbytes = crypto_ctr_crypt_segment(&walk, child);
  123. err = blkcipher_walk_done(desc, &walk, nbytes);
  124. }
  125. if (walk.nbytes) {
  126. crypto_ctr_crypt_final(&walk, child);
  127. err = blkcipher_walk_done(desc, &walk, 0);
  128. }
  129. return err;
  130. }
  131. static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
  132. {
  133. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  134. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  135. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
  136. struct crypto_cipher *cipher;
  137. cipher = crypto_spawn_cipher(spawn);
  138. if (IS_ERR(cipher))
  139. return PTR_ERR(cipher);
  140. ctx->child = cipher;
  141. return 0;
  142. }
  143. static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
  144. {
  145. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
  146. crypto_free_cipher(ctx->child);
  147. }
  148. static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
  149. {
  150. struct crypto_instance *inst;
  151. struct crypto_attr_type *algt;
  152. struct crypto_alg *alg;
  153. u32 mask;
  154. int err;
  155. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
  156. if (err)
  157. return ERR_PTR(err);
  158. algt = crypto_get_attr_type(tb);
  159. if (IS_ERR(algt))
  160. return ERR_CAST(algt);
  161. mask = CRYPTO_ALG_TYPE_MASK |
  162. crypto_requires_off(algt->type, algt->mask,
  163. CRYPTO_ALG_NEED_FALLBACK);
  164. alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, mask);
  165. if (IS_ERR(alg))
  166. return ERR_CAST(alg);
  167. /* Block size must be >= 4 bytes. */
  168. err = -EINVAL;
  169. if (alg->cra_blocksize < 4)
  170. goto out_put_alg;
  171. /* If this is false we'd fail the alignment of crypto_inc. */
  172. if (alg->cra_blocksize % 4)
  173. goto out_put_alg;
  174. inst = crypto_alloc_instance("ctr", alg);
  175. if (IS_ERR(inst))
  176. goto out;
  177. inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
  178. inst->alg.cra_priority = alg->cra_priority;
  179. inst->alg.cra_blocksize = 1;
  180. inst->alg.cra_alignmask = alg->cra_alignmask;
  181. inst->alg.cra_type = &crypto_blkcipher_type;
  182. inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
  183. inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
  184. inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
  185. inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
  186. inst->alg.cra_init = crypto_ctr_init_tfm;
  187. inst->alg.cra_exit = crypto_ctr_exit_tfm;
  188. inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
  189. inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
  190. inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
  191. inst->alg.cra_blkcipher.geniv = "chainiv";
  192. out:
  193. crypto_mod_put(alg);
  194. return inst;
  195. out_put_alg:
  196. inst = ERR_PTR(err);
  197. goto out;
  198. }
  199. static void crypto_ctr_free(struct crypto_instance *inst)
  200. {
  201. crypto_drop_spawn(crypto_instance_ctx(inst));
  202. kfree(inst);
  203. }
  204. static struct crypto_template crypto_ctr_tmpl = {
  205. .name = "ctr",
  206. .alloc = crypto_ctr_alloc,
  207. .free = crypto_ctr_free,
  208. .module = THIS_MODULE,
  209. };
  210. static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
  211. const u8 *key, unsigned int keylen)
  212. {
  213. struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
  214. struct crypto_skcipher *child = ctx->child;
  215. int err;
  216. /* the nonce is stored in bytes at end of key */
  217. if (keylen < CTR_RFC3686_NONCE_SIZE)
  218. return -EINVAL;
  219. memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
  220. CTR_RFC3686_NONCE_SIZE);
  221. keylen -= CTR_RFC3686_NONCE_SIZE;
  222. crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  223. crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
  224. CRYPTO_TFM_REQ_MASK);
  225. err = crypto_skcipher_setkey(child, key, keylen);
  226. crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
  227. CRYPTO_TFM_RES_MASK);
  228. return err;
  229. }
  230. static int crypto_rfc3686_crypt(struct skcipher_request *req)
  231. {
  232. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  233. struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
  234. struct crypto_skcipher *child = ctx->child;
  235. unsigned long align = crypto_skcipher_alignmask(tfm);
  236. struct crypto_rfc3686_req_ctx *rctx =
  237. (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
  238. struct skcipher_request *subreq = &rctx->subreq;
  239. u8 *iv = rctx->iv;
  240. /* set up counter block */
  241. memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
  242. memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
  243. /* initialize counter portion of counter block */
  244. *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
  245. cpu_to_be32(1);
  246. skcipher_request_set_tfm(subreq, child);
  247. skcipher_request_set_callback(subreq, req->base.flags,
  248. req->base.complete, req->base.data);
  249. skcipher_request_set_crypt(subreq, req->src, req->dst,
  250. req->cryptlen, iv);
  251. return crypto_skcipher_encrypt(subreq);
  252. }
  253. static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
  254. {
  255. struct skcipher_instance *inst = skcipher_alg_instance(tfm);
  256. struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
  257. struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
  258. struct crypto_skcipher *cipher;
  259. unsigned long align;
  260. unsigned int reqsize;
  261. cipher = crypto_spawn_skcipher(spawn);
  262. if (IS_ERR(cipher))
  263. return PTR_ERR(cipher);
  264. ctx->child = cipher;
  265. align = crypto_skcipher_alignmask(tfm);
  266. align &= ~(crypto_tfm_ctx_alignment() - 1);
  267. reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
  268. crypto_skcipher_reqsize(cipher);
  269. crypto_skcipher_set_reqsize(tfm, reqsize);
  270. return 0;
  271. }
  272. static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
  273. {
  274. struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
  275. crypto_free_skcipher(ctx->child);
  276. }
  277. static void crypto_rfc3686_free(struct skcipher_instance *inst)
  278. {
  279. struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
  280. crypto_drop_skcipher(spawn);
  281. kfree(inst);
  282. }
  283. static int crypto_rfc3686_create(struct crypto_template *tmpl,
  284. struct rtattr **tb)
  285. {
  286. struct crypto_attr_type *algt;
  287. struct skcipher_instance *inst;
  288. struct skcipher_alg *alg;
  289. struct crypto_skcipher_spawn *spawn;
  290. const char *cipher_name;
  291. u32 mask;
  292. int err;
  293. algt = crypto_get_attr_type(tb);
  294. if (IS_ERR(algt))
  295. return PTR_ERR(algt);
  296. if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
  297. return -EINVAL;
  298. cipher_name = crypto_attr_alg_name(tb[1]);
  299. if (IS_ERR(cipher_name))
  300. return PTR_ERR(cipher_name);
  301. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  302. if (!inst)
  303. return -ENOMEM;
  304. mask = crypto_requires_sync(algt->type, algt->mask) |
  305. crypto_requires_off(algt->type, algt->mask,
  306. CRYPTO_ALG_NEED_FALLBACK);
  307. spawn = skcipher_instance_ctx(inst);
  308. crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
  309. err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
  310. if (err)
  311. goto err_free_inst;
  312. alg = crypto_spawn_skcipher_alg(spawn);
  313. /* We only support 16-byte blocks. */
  314. err = -EINVAL;
  315. if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
  316. goto err_drop_spawn;
  317. /* Not a stream cipher? */
  318. if (alg->base.cra_blocksize != 1)
  319. goto err_drop_spawn;
  320. err = -ENAMETOOLONG;
  321. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  322. "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
  323. goto err_drop_spawn;
  324. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  325. "rfc3686(%s)", alg->base.cra_driver_name) >=
  326. CRYPTO_MAX_ALG_NAME)
  327. goto err_drop_spawn;
  328. inst->alg.base.cra_priority = alg->base.cra_priority;
  329. inst->alg.base.cra_blocksize = 1;
  330. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  331. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  332. inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
  333. inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
  334. inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
  335. CTR_RFC3686_NONCE_SIZE;
  336. inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
  337. CTR_RFC3686_NONCE_SIZE;
  338. inst->alg.setkey = crypto_rfc3686_setkey;
  339. inst->alg.encrypt = crypto_rfc3686_crypt;
  340. inst->alg.decrypt = crypto_rfc3686_crypt;
  341. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
  342. inst->alg.init = crypto_rfc3686_init_tfm;
  343. inst->alg.exit = crypto_rfc3686_exit_tfm;
  344. inst->free = crypto_rfc3686_free;
  345. err = skcipher_register_instance(tmpl, inst);
  346. if (err)
  347. goto err_drop_spawn;
  348. out:
  349. return err;
  350. err_drop_spawn:
  351. crypto_drop_skcipher(spawn);
  352. err_free_inst:
  353. kfree(inst);
  354. goto out;
  355. }
  356. static struct crypto_template crypto_rfc3686_tmpl = {
  357. .name = "rfc3686",
  358. .create = crypto_rfc3686_create,
  359. .module = THIS_MODULE,
  360. };
  361. static int __init crypto_ctr_module_init(void)
  362. {
  363. int err;
  364. err = crypto_register_template(&crypto_ctr_tmpl);
  365. if (err)
  366. goto out;
  367. err = crypto_register_template(&crypto_rfc3686_tmpl);
  368. if (err)
  369. goto out_drop_ctr;
  370. out:
  371. return err;
  372. out_drop_ctr:
  373. crypto_unregister_template(&crypto_ctr_tmpl);
  374. goto out;
  375. }
  376. static void __exit crypto_ctr_module_exit(void)
  377. {
  378. crypto_unregister_template(&crypto_rfc3686_tmpl);
  379. crypto_unregister_template(&crypto_ctr_tmpl);
  380. }
  381. module_init(crypto_ctr_module_init);
  382. module_exit(crypto_ctr_module_exit);
  383. MODULE_LICENSE("GPL");
  384. MODULE_DESCRIPTION("CTR Counter block mode");
  385. MODULE_ALIAS_CRYPTO("rfc3686");
  386. MODULE_ALIAS_CRYPTO("ctr");