ccm.c 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/slab.h>
  21. #include "internal.h"
  22. struct ccm_instance_ctx {
  23. struct crypto_skcipher_spawn ctr;
  24. struct crypto_ahash_spawn mac;
  25. };
  26. struct crypto_ccm_ctx {
  27. struct crypto_ahash *mac;
  28. struct crypto_skcipher *ctr;
  29. };
  30. struct crypto_rfc4309_ctx {
  31. struct crypto_aead *child;
  32. u8 nonce[3];
  33. };
  34. struct crypto_rfc4309_req_ctx {
  35. struct scatterlist src[3];
  36. struct scatterlist dst[3];
  37. struct aead_request subreq;
  38. };
  39. struct crypto_ccm_req_priv_ctx {
  40. u8 odata[16];
  41. u8 idata[16];
  42. u8 auth_tag[16];
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. union {
  47. struct ahash_request ahreq;
  48. struct skcipher_request skreq;
  49. };
  50. };
  51. struct cbcmac_tfm_ctx {
  52. struct crypto_cipher *child;
  53. };
  54. struct cbcmac_desc_ctx {
  55. unsigned int len;
  56. };
  57. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  58. struct aead_request *req)
  59. {
  60. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  61. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  62. }
  63. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  64. {
  65. __be32 data;
  66. memset(block, 0, csize);
  67. block += csize;
  68. if (csize >= 4)
  69. csize = 4;
  70. else if (msglen > (1 << (8 * csize)))
  71. return -EOVERFLOW;
  72. data = cpu_to_be32(msglen);
  73. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  74. return 0;
  75. }
  76. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  77. unsigned int keylen)
  78. {
  79. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  80. struct crypto_skcipher *ctr = ctx->ctr;
  81. struct crypto_ahash *mac = ctx->mac;
  82. int err = 0;
  83. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  84. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  85. CRYPTO_TFM_REQ_MASK);
  86. err = crypto_skcipher_setkey(ctr, key, keylen);
  87. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  88. CRYPTO_TFM_RES_MASK);
  89. if (err)
  90. goto out;
  91. crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
  92. crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
  93. CRYPTO_TFM_REQ_MASK);
  94. err = crypto_ahash_setkey(mac, key, keylen);
  95. crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
  96. CRYPTO_TFM_RES_MASK);
  97. out:
  98. return err;
  99. }
  100. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  101. unsigned int authsize)
  102. {
  103. switch (authsize) {
  104. case 4:
  105. case 6:
  106. case 8:
  107. case 10:
  108. case 12:
  109. case 14:
  110. case 16:
  111. break;
  112. default:
  113. return -EINVAL;
  114. }
  115. return 0;
  116. }
  117. static int format_input(u8 *info, struct aead_request *req,
  118. unsigned int cryptlen)
  119. {
  120. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  121. unsigned int lp = req->iv[0];
  122. unsigned int l = lp + 1;
  123. unsigned int m;
  124. m = crypto_aead_authsize(aead);
  125. memcpy(info, req->iv, 16);
  126. /* format control info per RFC 3610 and
  127. * NIST Special Publication 800-38C
  128. */
  129. *info |= (8 * ((m - 2) / 2));
  130. if (req->assoclen)
  131. *info |= 64;
  132. return set_msg_len(info + 16 - l, cryptlen, l);
  133. }
  134. static int format_adata(u8 *adata, unsigned int a)
  135. {
  136. int len = 0;
  137. /* add control info for associated data
  138. * RFC 3610 and NIST Special Publication 800-38C
  139. */
  140. if (a < 65280) {
  141. *(__be16 *)adata = cpu_to_be16(a);
  142. len = 2;
  143. } else {
  144. *(__be16 *)adata = cpu_to_be16(0xfffe);
  145. *(__be32 *)&adata[2] = cpu_to_be32(a);
  146. len = 6;
  147. }
  148. return len;
  149. }
  150. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  151. unsigned int cryptlen)
  152. {
  153. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  154. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  155. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  156. struct ahash_request *ahreq = &pctx->ahreq;
  157. unsigned int assoclen = req->assoclen;
  158. struct scatterlist sg[3];
  159. u8 *odata = pctx->odata;
  160. u8 *idata = pctx->idata;
  161. int ilen, err;
  162. /* format control data for input */
  163. err = format_input(odata, req, cryptlen);
  164. if (err)
  165. goto out;
  166. sg_init_table(sg, 3);
  167. sg_set_buf(&sg[0], odata, 16);
  168. /* format associated data and compute into mac */
  169. if (assoclen) {
  170. ilen = format_adata(idata, assoclen);
  171. sg_set_buf(&sg[1], idata, ilen);
  172. sg_chain(sg, 3, req->src);
  173. } else {
  174. ilen = 0;
  175. sg_chain(sg, 2, req->src);
  176. }
  177. ahash_request_set_tfm(ahreq, ctx->mac);
  178. ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
  179. ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
  180. err = crypto_ahash_init(ahreq);
  181. if (err)
  182. goto out;
  183. err = crypto_ahash_update(ahreq);
  184. if (err)
  185. goto out;
  186. /* we need to pad the MAC input to a round multiple of the block size */
  187. ilen = 16 - (assoclen + ilen) % 16;
  188. if (ilen < 16) {
  189. memset(idata, 0, ilen);
  190. sg_init_table(sg, 2);
  191. sg_set_buf(&sg[0], idata, ilen);
  192. if (plain)
  193. sg_chain(sg, 2, plain);
  194. plain = sg;
  195. cryptlen += ilen;
  196. }
  197. ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen);
  198. err = crypto_ahash_finup(ahreq);
  199. out:
  200. return err;
  201. }
  202. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  203. {
  204. struct aead_request *req = areq->data;
  205. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  206. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  207. u8 *odata = pctx->odata;
  208. if (!err)
  209. scatterwalk_map_and_copy(odata, req->dst,
  210. req->assoclen + req->cryptlen,
  211. crypto_aead_authsize(aead), 1);
  212. aead_request_complete(req, err);
  213. }
  214. static inline int crypto_ccm_check_iv(const u8 *iv)
  215. {
  216. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  217. if (1 > iv[0] || iv[0] > 7)
  218. return -EINVAL;
  219. return 0;
  220. }
  221. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  222. {
  223. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  224. struct scatterlist *sg;
  225. u8 *iv = req->iv;
  226. int err;
  227. err = crypto_ccm_check_iv(iv);
  228. if (err)
  229. return err;
  230. pctx->flags = aead_request_flags(req);
  231. /* Note: rfc 3610 and NIST 800-38C require counter of
  232. * zero to encrypt auth tag.
  233. */
  234. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  235. sg_init_table(pctx->src, 3);
  236. sg_set_buf(pctx->src, tag, 16);
  237. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  238. if (sg != pctx->src + 1)
  239. sg_chain(pctx->src, 2, sg);
  240. if (req->src != req->dst) {
  241. sg_init_table(pctx->dst, 3);
  242. sg_set_buf(pctx->dst, tag, 16);
  243. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  244. if (sg != pctx->dst + 1)
  245. sg_chain(pctx->dst, 2, sg);
  246. }
  247. return 0;
  248. }
  249. static int crypto_ccm_encrypt(struct aead_request *req)
  250. {
  251. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  252. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  253. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  254. struct skcipher_request *skreq = &pctx->skreq;
  255. struct scatterlist *dst;
  256. unsigned int cryptlen = req->cryptlen;
  257. u8 *odata = pctx->odata;
  258. u8 *iv = req->iv;
  259. int err;
  260. err = crypto_ccm_init_crypt(req, odata);
  261. if (err)
  262. return err;
  263. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  264. if (err)
  265. return err;
  266. dst = pctx->src;
  267. if (req->src != req->dst)
  268. dst = pctx->dst;
  269. skcipher_request_set_tfm(skreq, ctx->ctr);
  270. skcipher_request_set_callback(skreq, pctx->flags,
  271. crypto_ccm_encrypt_done, req);
  272. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  273. err = crypto_skcipher_encrypt(skreq);
  274. if (err)
  275. return err;
  276. /* copy authtag to end of dst */
  277. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  278. crypto_aead_authsize(aead), 1);
  279. return err;
  280. }
  281. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  282. int err)
  283. {
  284. struct aead_request *req = areq->data;
  285. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  286. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  287. unsigned int authsize = crypto_aead_authsize(aead);
  288. unsigned int cryptlen = req->cryptlen - authsize;
  289. struct scatterlist *dst;
  290. pctx->flags = 0;
  291. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  292. if (!err) {
  293. err = crypto_ccm_auth(req, dst, cryptlen);
  294. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  295. err = -EBADMSG;
  296. }
  297. aead_request_complete(req, err);
  298. }
  299. static int crypto_ccm_decrypt(struct aead_request *req)
  300. {
  301. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  302. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  303. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  304. struct skcipher_request *skreq = &pctx->skreq;
  305. struct scatterlist *dst;
  306. unsigned int authsize = crypto_aead_authsize(aead);
  307. unsigned int cryptlen = req->cryptlen;
  308. u8 *authtag = pctx->auth_tag;
  309. u8 *odata = pctx->odata;
  310. u8 *iv = pctx->idata;
  311. int err;
  312. cryptlen -= authsize;
  313. err = crypto_ccm_init_crypt(req, authtag);
  314. if (err)
  315. return err;
  316. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  317. authsize, 0);
  318. dst = pctx->src;
  319. if (req->src != req->dst)
  320. dst = pctx->dst;
  321. memcpy(iv, req->iv, 16);
  322. skcipher_request_set_tfm(skreq, ctx->ctr);
  323. skcipher_request_set_callback(skreq, pctx->flags,
  324. crypto_ccm_decrypt_done, req);
  325. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  326. err = crypto_skcipher_decrypt(skreq);
  327. if (err)
  328. return err;
  329. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  330. if (err)
  331. return err;
  332. /* verify */
  333. if (crypto_memneq(authtag, odata, authsize))
  334. return -EBADMSG;
  335. return err;
  336. }
  337. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  338. {
  339. struct aead_instance *inst = aead_alg_instance(tfm);
  340. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  341. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  342. struct crypto_ahash *mac;
  343. struct crypto_skcipher *ctr;
  344. unsigned long align;
  345. int err;
  346. mac = crypto_spawn_ahash(&ictx->mac);
  347. if (IS_ERR(mac))
  348. return PTR_ERR(mac);
  349. ctr = crypto_spawn_skcipher(&ictx->ctr);
  350. err = PTR_ERR(ctr);
  351. if (IS_ERR(ctr))
  352. goto err_free_mac;
  353. ctx->mac = mac;
  354. ctx->ctr = ctr;
  355. align = crypto_aead_alignmask(tfm);
  356. align &= ~(crypto_tfm_ctx_alignment() - 1);
  357. crypto_aead_set_reqsize(
  358. tfm,
  359. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  360. max(crypto_ahash_reqsize(mac), crypto_skcipher_reqsize(ctr)));
  361. return 0;
  362. err_free_mac:
  363. crypto_free_ahash(mac);
  364. return err;
  365. }
  366. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  367. {
  368. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  369. crypto_free_ahash(ctx->mac);
  370. crypto_free_skcipher(ctx->ctr);
  371. }
  372. static void crypto_ccm_free(struct aead_instance *inst)
  373. {
  374. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  375. crypto_drop_ahash(&ctx->mac);
  376. crypto_drop_skcipher(&ctx->ctr);
  377. kfree(inst);
  378. }
  379. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  380. struct rtattr **tb,
  381. const char *full_name,
  382. const char *ctr_name,
  383. const char *mac_name)
  384. {
  385. struct crypto_attr_type *algt;
  386. struct aead_instance *inst;
  387. struct skcipher_alg *ctr;
  388. struct crypto_alg *mac_alg;
  389. struct hash_alg_common *mac;
  390. struct ccm_instance_ctx *ictx;
  391. int err;
  392. algt = crypto_get_attr_type(tb);
  393. if (IS_ERR(algt))
  394. return PTR_ERR(algt);
  395. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  396. return -EINVAL;
  397. mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
  398. CRYPTO_ALG_TYPE_HASH,
  399. CRYPTO_ALG_TYPE_AHASH_MASK |
  400. CRYPTO_ALG_ASYNC);
  401. if (IS_ERR(mac_alg))
  402. return PTR_ERR(mac_alg);
  403. mac = __crypto_hash_alg_common(mac_alg);
  404. err = -EINVAL;
  405. if (mac->digestsize != 16)
  406. goto out_put_mac;
  407. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  408. err = -ENOMEM;
  409. if (!inst)
  410. goto out_put_mac;
  411. ictx = aead_instance_ctx(inst);
  412. err = crypto_init_ahash_spawn(&ictx->mac, mac,
  413. aead_crypto_instance(inst));
  414. if (err)
  415. goto err_free_inst;
  416. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  417. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  418. crypto_requires_sync(algt->type,
  419. algt->mask));
  420. if (err)
  421. goto err_drop_mac;
  422. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  423. /* Not a stream cipher? */
  424. err = -EINVAL;
  425. if (ctr->base.cra_blocksize != 1)
  426. goto err_drop_ctr;
  427. /* We want the real thing! */
  428. if (crypto_skcipher_alg_ivsize(ctr) != 16)
  429. goto err_drop_ctr;
  430. err = -ENAMETOOLONG;
  431. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  432. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  433. mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  434. goto err_drop_ctr;
  435. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  436. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  437. inst->alg.base.cra_priority = (mac->base.cra_priority +
  438. ctr->base.cra_priority) / 2;
  439. inst->alg.base.cra_blocksize = 1;
  440. inst->alg.base.cra_alignmask = mac->base.cra_alignmask |
  441. ctr->base.cra_alignmask;
  442. inst->alg.ivsize = 16;
  443. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  444. inst->alg.maxauthsize = 16;
  445. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  446. inst->alg.init = crypto_ccm_init_tfm;
  447. inst->alg.exit = crypto_ccm_exit_tfm;
  448. inst->alg.setkey = crypto_ccm_setkey;
  449. inst->alg.setauthsize = crypto_ccm_setauthsize;
  450. inst->alg.encrypt = crypto_ccm_encrypt;
  451. inst->alg.decrypt = crypto_ccm_decrypt;
  452. inst->free = crypto_ccm_free;
  453. err = aead_register_instance(tmpl, inst);
  454. if (err)
  455. goto err_drop_ctr;
  456. out_put_mac:
  457. crypto_mod_put(mac_alg);
  458. return err;
  459. err_drop_ctr:
  460. crypto_drop_skcipher(&ictx->ctr);
  461. err_drop_mac:
  462. crypto_drop_ahash(&ictx->mac);
  463. err_free_inst:
  464. kfree(inst);
  465. goto out_put_mac;
  466. }
  467. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  468. {
  469. const char *cipher_name;
  470. char ctr_name[CRYPTO_MAX_ALG_NAME];
  471. char mac_name[CRYPTO_MAX_ALG_NAME];
  472. char full_name[CRYPTO_MAX_ALG_NAME];
  473. cipher_name = crypto_attr_alg_name(tb[1]);
  474. if (IS_ERR(cipher_name))
  475. return PTR_ERR(cipher_name);
  476. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  477. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  478. return -ENAMETOOLONG;
  479. if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
  480. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  481. return -ENAMETOOLONG;
  482. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  483. CRYPTO_MAX_ALG_NAME)
  484. return -ENAMETOOLONG;
  485. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  486. mac_name);
  487. }
  488. static struct crypto_template crypto_ccm_tmpl = {
  489. .name = "ccm",
  490. .create = crypto_ccm_create,
  491. .module = THIS_MODULE,
  492. };
  493. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  494. struct rtattr **tb)
  495. {
  496. const char *ctr_name;
  497. const char *cipher_name;
  498. char full_name[CRYPTO_MAX_ALG_NAME];
  499. ctr_name = crypto_attr_alg_name(tb[1]);
  500. if (IS_ERR(ctr_name))
  501. return PTR_ERR(ctr_name);
  502. cipher_name = crypto_attr_alg_name(tb[2]);
  503. if (IS_ERR(cipher_name))
  504. return PTR_ERR(cipher_name);
  505. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  506. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  507. return -ENAMETOOLONG;
  508. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  509. cipher_name);
  510. }
  511. static struct crypto_template crypto_ccm_base_tmpl = {
  512. .name = "ccm_base",
  513. .create = crypto_ccm_base_create,
  514. .module = THIS_MODULE,
  515. };
  516. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  517. unsigned int keylen)
  518. {
  519. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  520. struct crypto_aead *child = ctx->child;
  521. int err;
  522. if (keylen < 3)
  523. return -EINVAL;
  524. keylen -= 3;
  525. memcpy(ctx->nonce, key + keylen, 3);
  526. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  527. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  528. CRYPTO_TFM_REQ_MASK);
  529. err = crypto_aead_setkey(child, key, keylen);
  530. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  531. CRYPTO_TFM_RES_MASK);
  532. return err;
  533. }
  534. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  535. unsigned int authsize)
  536. {
  537. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  538. switch (authsize) {
  539. case 8:
  540. case 12:
  541. case 16:
  542. break;
  543. default:
  544. return -EINVAL;
  545. }
  546. return crypto_aead_setauthsize(ctx->child, authsize);
  547. }
  548. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  549. {
  550. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  551. struct aead_request *subreq = &rctx->subreq;
  552. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  553. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  554. struct crypto_aead *child = ctx->child;
  555. struct scatterlist *sg;
  556. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  557. crypto_aead_alignmask(child) + 1);
  558. /* L' */
  559. iv[0] = 3;
  560. memcpy(iv + 1, ctx->nonce, 3);
  561. memcpy(iv + 4, req->iv, 8);
  562. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  563. sg_init_table(rctx->src, 3);
  564. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  565. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  566. if (sg != rctx->src + 1)
  567. sg_chain(rctx->src, 2, sg);
  568. if (req->src != req->dst) {
  569. sg_init_table(rctx->dst, 3);
  570. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  571. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  572. if (sg != rctx->dst + 1)
  573. sg_chain(rctx->dst, 2, sg);
  574. }
  575. aead_request_set_tfm(subreq, child);
  576. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  577. req->base.data);
  578. aead_request_set_crypt(subreq, rctx->src,
  579. req->src == req->dst ? rctx->src : rctx->dst,
  580. req->cryptlen, iv);
  581. aead_request_set_ad(subreq, req->assoclen - 8);
  582. return subreq;
  583. }
  584. static int crypto_rfc4309_encrypt(struct aead_request *req)
  585. {
  586. if (req->assoclen != 16 && req->assoclen != 20)
  587. return -EINVAL;
  588. req = crypto_rfc4309_crypt(req);
  589. return crypto_aead_encrypt(req);
  590. }
  591. static int crypto_rfc4309_decrypt(struct aead_request *req)
  592. {
  593. if (req->assoclen != 16 && req->assoclen != 20)
  594. return -EINVAL;
  595. req = crypto_rfc4309_crypt(req);
  596. return crypto_aead_decrypt(req);
  597. }
  598. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  599. {
  600. struct aead_instance *inst = aead_alg_instance(tfm);
  601. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  602. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  603. struct crypto_aead *aead;
  604. unsigned long align;
  605. aead = crypto_spawn_aead(spawn);
  606. if (IS_ERR(aead))
  607. return PTR_ERR(aead);
  608. ctx->child = aead;
  609. align = crypto_aead_alignmask(aead);
  610. align &= ~(crypto_tfm_ctx_alignment() - 1);
  611. crypto_aead_set_reqsize(
  612. tfm,
  613. sizeof(struct crypto_rfc4309_req_ctx) +
  614. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  615. align + 32);
  616. return 0;
  617. }
  618. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  619. {
  620. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  621. crypto_free_aead(ctx->child);
  622. }
  623. static void crypto_rfc4309_free(struct aead_instance *inst)
  624. {
  625. crypto_drop_aead(aead_instance_ctx(inst));
  626. kfree(inst);
  627. }
  628. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  629. struct rtattr **tb)
  630. {
  631. struct crypto_attr_type *algt;
  632. struct aead_instance *inst;
  633. struct crypto_aead_spawn *spawn;
  634. struct aead_alg *alg;
  635. const char *ccm_name;
  636. int err;
  637. algt = crypto_get_attr_type(tb);
  638. if (IS_ERR(algt))
  639. return PTR_ERR(algt);
  640. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  641. return -EINVAL;
  642. ccm_name = crypto_attr_alg_name(tb[1]);
  643. if (IS_ERR(ccm_name))
  644. return PTR_ERR(ccm_name);
  645. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  646. if (!inst)
  647. return -ENOMEM;
  648. spawn = aead_instance_ctx(inst);
  649. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  650. err = crypto_grab_aead(spawn, ccm_name, 0,
  651. crypto_requires_sync(algt->type, algt->mask));
  652. if (err)
  653. goto out_free_inst;
  654. alg = crypto_spawn_aead_alg(spawn);
  655. err = -EINVAL;
  656. /* We only support 16-byte blocks. */
  657. if (crypto_aead_alg_ivsize(alg) != 16)
  658. goto out_drop_alg;
  659. /* Not a stream cipher? */
  660. if (alg->base.cra_blocksize != 1)
  661. goto out_drop_alg;
  662. err = -ENAMETOOLONG;
  663. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  664. "rfc4309(%s)", alg->base.cra_name) >=
  665. CRYPTO_MAX_ALG_NAME ||
  666. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  667. "rfc4309(%s)", alg->base.cra_driver_name) >=
  668. CRYPTO_MAX_ALG_NAME)
  669. goto out_drop_alg;
  670. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  671. inst->alg.base.cra_priority = alg->base.cra_priority;
  672. inst->alg.base.cra_blocksize = 1;
  673. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  674. inst->alg.ivsize = 8;
  675. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  676. inst->alg.maxauthsize = 16;
  677. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  678. inst->alg.init = crypto_rfc4309_init_tfm;
  679. inst->alg.exit = crypto_rfc4309_exit_tfm;
  680. inst->alg.setkey = crypto_rfc4309_setkey;
  681. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  682. inst->alg.encrypt = crypto_rfc4309_encrypt;
  683. inst->alg.decrypt = crypto_rfc4309_decrypt;
  684. inst->free = crypto_rfc4309_free;
  685. err = aead_register_instance(tmpl, inst);
  686. if (err)
  687. goto out_drop_alg;
  688. out:
  689. return err;
  690. out_drop_alg:
  691. crypto_drop_aead(spawn);
  692. out_free_inst:
  693. kfree(inst);
  694. goto out;
  695. }
  696. static struct crypto_template crypto_rfc4309_tmpl = {
  697. .name = "rfc4309",
  698. .create = crypto_rfc4309_create,
  699. .module = THIS_MODULE,
  700. };
  701. static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
  702. const u8 *inkey, unsigned int keylen)
  703. {
  704. struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
  705. return crypto_cipher_setkey(ctx->child, inkey, keylen);
  706. }
  707. static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
  708. {
  709. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  710. int bs = crypto_shash_digestsize(pdesc->tfm);
  711. u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
  712. ctx->len = 0;
  713. memset(dg, 0, bs);
  714. return 0;
  715. }
  716. static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
  717. unsigned int len)
  718. {
  719. struct crypto_shash *parent = pdesc->tfm;
  720. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  721. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  722. struct crypto_cipher *tfm = tctx->child;
  723. int bs = crypto_shash_digestsize(parent);
  724. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  725. while (len > 0) {
  726. unsigned int l = min(len, bs - ctx->len);
  727. crypto_xor(dg + ctx->len, p, l);
  728. ctx->len +=l;
  729. len -= l;
  730. p += l;
  731. if (ctx->len == bs) {
  732. crypto_cipher_encrypt_one(tfm, dg, dg);
  733. ctx->len = 0;
  734. }
  735. }
  736. return 0;
  737. }
  738. static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out)
  739. {
  740. struct crypto_shash *parent = pdesc->tfm;
  741. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  742. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  743. struct crypto_cipher *tfm = tctx->child;
  744. int bs = crypto_shash_digestsize(parent);
  745. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  746. if (ctx->len)
  747. crypto_cipher_encrypt_one(tfm, dg, dg);
  748. memcpy(out, dg, bs);
  749. return 0;
  750. }
  751. static int cbcmac_init_tfm(struct crypto_tfm *tfm)
  752. {
  753. struct crypto_cipher *cipher;
  754. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  755. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  756. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  757. cipher = crypto_spawn_cipher(spawn);
  758. if (IS_ERR(cipher))
  759. return PTR_ERR(cipher);
  760. ctx->child = cipher;
  761. return 0;
  762. };
  763. static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
  764. {
  765. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  766. crypto_free_cipher(ctx->child);
  767. }
  768. static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
  769. {
  770. struct shash_instance *inst;
  771. struct crypto_alg *alg;
  772. int err;
  773. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
  774. if (err)
  775. return err;
  776. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
  777. CRYPTO_ALG_TYPE_MASK);
  778. if (IS_ERR(alg))
  779. return PTR_ERR(alg);
  780. inst = shash_alloc_instance("cbcmac", alg);
  781. err = PTR_ERR(inst);
  782. if (IS_ERR(inst))
  783. goto out_put_alg;
  784. err = crypto_init_spawn(shash_instance_ctx(inst), alg,
  785. shash_crypto_instance(inst),
  786. CRYPTO_ALG_TYPE_MASK);
  787. if (err)
  788. goto out_free_inst;
  789. inst->alg.base.cra_priority = alg->cra_priority;
  790. inst->alg.base.cra_blocksize = 1;
  791. inst->alg.digestsize = alg->cra_blocksize;
  792. inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx),
  793. alg->cra_alignmask + 1) +
  794. alg->cra_blocksize;
  795. inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
  796. inst->alg.base.cra_init = cbcmac_init_tfm;
  797. inst->alg.base.cra_exit = cbcmac_exit_tfm;
  798. inst->alg.init = crypto_cbcmac_digest_init;
  799. inst->alg.update = crypto_cbcmac_digest_update;
  800. inst->alg.final = crypto_cbcmac_digest_final;
  801. inst->alg.setkey = crypto_cbcmac_digest_setkey;
  802. err = shash_register_instance(tmpl, inst);
  803. out_free_inst:
  804. if (err)
  805. shash_free_instance(shash_crypto_instance(inst));
  806. out_put_alg:
  807. crypto_mod_put(alg);
  808. return err;
  809. }
  810. static struct crypto_template crypto_cbcmac_tmpl = {
  811. .name = "cbcmac",
  812. .create = cbcmac_create,
  813. .free = shash_free_instance,
  814. .module = THIS_MODULE,
  815. };
  816. static int __init crypto_ccm_module_init(void)
  817. {
  818. int err;
  819. err = crypto_register_template(&crypto_cbcmac_tmpl);
  820. if (err)
  821. goto out;
  822. err = crypto_register_template(&crypto_ccm_base_tmpl);
  823. if (err)
  824. goto out_undo_cbcmac;
  825. err = crypto_register_template(&crypto_ccm_tmpl);
  826. if (err)
  827. goto out_undo_base;
  828. err = crypto_register_template(&crypto_rfc4309_tmpl);
  829. if (err)
  830. goto out_undo_ccm;
  831. out:
  832. return err;
  833. out_undo_ccm:
  834. crypto_unregister_template(&crypto_ccm_tmpl);
  835. out_undo_base:
  836. crypto_unregister_template(&crypto_ccm_base_tmpl);
  837. out_undo_cbcmac:
  838. crypto_register_template(&crypto_cbcmac_tmpl);
  839. goto out;
  840. }
  841. static void __exit crypto_ccm_module_exit(void)
  842. {
  843. crypto_unregister_template(&crypto_rfc4309_tmpl);
  844. crypto_unregister_template(&crypto_ccm_tmpl);
  845. crypto_unregister_template(&crypto_ccm_base_tmpl);
  846. crypto_unregister_template(&crypto_cbcmac_tmpl);
  847. }
  848. module_init(crypto_ccm_module_init);
  849. module_exit(crypto_ccm_module_exit);
  850. MODULE_LICENSE("GPL");
  851. MODULE_DESCRIPTION("Counter with CBC MAC");
  852. MODULE_ALIAS_CRYPTO("ccm_base");
  853. MODULE_ALIAS_CRYPTO("rfc4309");
  854. MODULE_ALIAS_CRYPTO("ccm");