ccm.c 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/slab.h>
  21. #include "internal.h"
  22. struct ccm_instance_ctx {
  23. struct crypto_skcipher_spawn ctr;
  24. struct crypto_ahash_spawn mac;
  25. };
  26. struct crypto_ccm_ctx {
  27. struct crypto_ahash *mac;
  28. struct crypto_skcipher *ctr;
  29. };
  30. struct crypto_rfc4309_ctx {
  31. struct crypto_aead *child;
  32. u8 nonce[3];
  33. };
  34. struct crypto_rfc4309_req_ctx {
  35. struct scatterlist src[3];
  36. struct scatterlist dst[3];
  37. struct aead_request subreq;
  38. };
  39. struct crypto_ccm_req_priv_ctx {
  40. u8 odata[16];
  41. u8 idata[16];
  42. u8 auth_tag[16];
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct skcipher_request skreq;
  47. };
  48. struct cbcmac_tfm_ctx {
  49. struct crypto_cipher *child;
  50. };
  51. struct cbcmac_desc_ctx {
  52. unsigned int len;
  53. };
  54. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  55. struct aead_request *req)
  56. {
  57. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  58. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  59. }
  60. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  61. {
  62. __be32 data;
  63. memset(block, 0, csize);
  64. block += csize;
  65. if (csize >= 4)
  66. csize = 4;
  67. else if (msglen > (1 << (8 * csize)))
  68. return -EOVERFLOW;
  69. data = cpu_to_be32(msglen);
  70. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  71. return 0;
  72. }
  73. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  74. unsigned int keylen)
  75. {
  76. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  77. struct crypto_skcipher *ctr = ctx->ctr;
  78. struct crypto_ahash *mac = ctx->mac;
  79. int err = 0;
  80. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  81. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  82. CRYPTO_TFM_REQ_MASK);
  83. err = crypto_skcipher_setkey(ctr, key, keylen);
  84. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  85. CRYPTO_TFM_RES_MASK);
  86. if (err)
  87. goto out;
  88. crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
  89. crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
  90. CRYPTO_TFM_REQ_MASK);
  91. err = crypto_ahash_setkey(mac, key, keylen);
  92. crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
  93. CRYPTO_TFM_RES_MASK);
  94. out:
  95. return err;
  96. }
  97. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  98. unsigned int authsize)
  99. {
  100. switch (authsize) {
  101. case 4:
  102. case 6:
  103. case 8:
  104. case 10:
  105. case 12:
  106. case 14:
  107. case 16:
  108. break;
  109. default:
  110. return -EINVAL;
  111. }
  112. return 0;
  113. }
  114. static int format_input(u8 *info, struct aead_request *req,
  115. unsigned int cryptlen)
  116. {
  117. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  118. unsigned int lp = req->iv[0];
  119. unsigned int l = lp + 1;
  120. unsigned int m;
  121. m = crypto_aead_authsize(aead);
  122. memcpy(info, req->iv, 16);
  123. /* format control info per RFC 3610 and
  124. * NIST Special Publication 800-38C
  125. */
  126. *info |= (8 * ((m - 2) / 2));
  127. if (req->assoclen)
  128. *info |= 64;
  129. return set_msg_len(info + 16 - l, cryptlen, l);
  130. }
  131. static int format_adata(u8 *adata, unsigned int a)
  132. {
  133. int len = 0;
  134. /* add control info for associated data
  135. * RFC 3610 and NIST Special Publication 800-38C
  136. */
  137. if (a < 65280) {
  138. *(__be16 *)adata = cpu_to_be16(a);
  139. len = 2;
  140. } else {
  141. *(__be16 *)adata = cpu_to_be16(0xfffe);
  142. *(__be32 *)&adata[2] = cpu_to_be32(a);
  143. len = 6;
  144. }
  145. return len;
  146. }
  147. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  148. unsigned int cryptlen)
  149. {
  150. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  151. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  152. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  153. AHASH_REQUEST_ON_STACK(ahreq, ctx->mac);
  154. unsigned int assoclen = req->assoclen;
  155. struct scatterlist sg[3];
  156. u8 *odata = pctx->odata;
  157. u8 *idata = pctx->idata;
  158. int ilen, err;
  159. /* format control data for input */
  160. err = format_input(odata, req, cryptlen);
  161. if (err)
  162. goto out;
  163. sg_init_table(sg, 3);
  164. sg_set_buf(&sg[0], odata, 16);
  165. /* format associated data and compute into mac */
  166. if (assoclen) {
  167. ilen = format_adata(idata, assoclen);
  168. sg_set_buf(&sg[1], idata, ilen);
  169. sg_chain(sg, 3, req->src);
  170. } else {
  171. ilen = 0;
  172. sg_chain(sg, 2, req->src);
  173. }
  174. ahash_request_set_tfm(ahreq, ctx->mac);
  175. ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
  176. ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
  177. err = crypto_ahash_init(ahreq);
  178. if (err)
  179. goto out;
  180. err = crypto_ahash_update(ahreq);
  181. if (err)
  182. goto out;
  183. /* we need to pad the MAC input to a round multiple of the block size */
  184. ilen = 16 - (assoclen + ilen) % 16;
  185. if (ilen < 16) {
  186. memset(idata, 0, ilen);
  187. sg_init_table(sg, 2);
  188. sg_set_buf(&sg[0], idata, ilen);
  189. if (plain)
  190. sg_chain(sg, 2, plain);
  191. plain = sg;
  192. cryptlen += ilen;
  193. }
  194. ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen);
  195. err = crypto_ahash_finup(ahreq);
  196. out:
  197. return err;
  198. }
  199. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  200. {
  201. struct aead_request *req = areq->data;
  202. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  203. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  204. u8 *odata = pctx->odata;
  205. if (!err)
  206. scatterwalk_map_and_copy(odata, req->dst,
  207. req->assoclen + req->cryptlen,
  208. crypto_aead_authsize(aead), 1);
  209. aead_request_complete(req, err);
  210. }
  211. static inline int crypto_ccm_check_iv(const u8 *iv)
  212. {
  213. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  214. if (1 > iv[0] || iv[0] > 7)
  215. return -EINVAL;
  216. return 0;
  217. }
  218. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  219. {
  220. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  221. struct scatterlist *sg;
  222. u8 *iv = req->iv;
  223. int err;
  224. err = crypto_ccm_check_iv(iv);
  225. if (err)
  226. return err;
  227. pctx->flags = aead_request_flags(req);
  228. /* Note: rfc 3610 and NIST 800-38C require counter of
  229. * zero to encrypt auth tag.
  230. */
  231. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  232. sg_init_table(pctx->src, 3);
  233. sg_set_buf(pctx->src, tag, 16);
  234. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  235. if (sg != pctx->src + 1)
  236. sg_chain(pctx->src, 2, sg);
  237. if (req->src != req->dst) {
  238. sg_init_table(pctx->dst, 3);
  239. sg_set_buf(pctx->dst, tag, 16);
  240. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  241. if (sg != pctx->dst + 1)
  242. sg_chain(pctx->dst, 2, sg);
  243. }
  244. return 0;
  245. }
  246. static int crypto_ccm_encrypt(struct aead_request *req)
  247. {
  248. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  249. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  250. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  251. struct skcipher_request *skreq = &pctx->skreq;
  252. struct scatterlist *dst;
  253. unsigned int cryptlen = req->cryptlen;
  254. u8 *odata = pctx->odata;
  255. u8 *iv = req->iv;
  256. int err;
  257. err = crypto_ccm_init_crypt(req, odata);
  258. if (err)
  259. return err;
  260. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  261. if (err)
  262. return err;
  263. dst = pctx->src;
  264. if (req->src != req->dst)
  265. dst = pctx->dst;
  266. skcipher_request_set_tfm(skreq, ctx->ctr);
  267. skcipher_request_set_callback(skreq, pctx->flags,
  268. crypto_ccm_encrypt_done, req);
  269. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  270. err = crypto_skcipher_encrypt(skreq);
  271. if (err)
  272. return err;
  273. /* copy authtag to end of dst */
  274. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  275. crypto_aead_authsize(aead), 1);
  276. return err;
  277. }
  278. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  279. int err)
  280. {
  281. struct aead_request *req = areq->data;
  282. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  283. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  284. unsigned int authsize = crypto_aead_authsize(aead);
  285. unsigned int cryptlen = req->cryptlen - authsize;
  286. struct scatterlist *dst;
  287. pctx->flags = 0;
  288. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  289. if (!err) {
  290. err = crypto_ccm_auth(req, dst, cryptlen);
  291. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  292. err = -EBADMSG;
  293. }
  294. aead_request_complete(req, err);
  295. }
  296. static int crypto_ccm_decrypt(struct aead_request *req)
  297. {
  298. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  299. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  300. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  301. struct skcipher_request *skreq = &pctx->skreq;
  302. struct scatterlist *dst;
  303. unsigned int authsize = crypto_aead_authsize(aead);
  304. unsigned int cryptlen = req->cryptlen;
  305. u8 *authtag = pctx->auth_tag;
  306. u8 *odata = pctx->odata;
  307. u8 *iv = pctx->idata;
  308. int err;
  309. cryptlen -= authsize;
  310. err = crypto_ccm_init_crypt(req, authtag);
  311. if (err)
  312. return err;
  313. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  314. authsize, 0);
  315. dst = pctx->src;
  316. if (req->src != req->dst)
  317. dst = pctx->dst;
  318. memcpy(iv, req->iv, 16);
  319. skcipher_request_set_tfm(skreq, ctx->ctr);
  320. skcipher_request_set_callback(skreq, pctx->flags,
  321. crypto_ccm_decrypt_done, req);
  322. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  323. err = crypto_skcipher_decrypt(skreq);
  324. if (err)
  325. return err;
  326. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  327. if (err)
  328. return err;
  329. /* verify */
  330. if (crypto_memneq(authtag, odata, authsize))
  331. return -EBADMSG;
  332. return err;
  333. }
  334. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  335. {
  336. struct aead_instance *inst = aead_alg_instance(tfm);
  337. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  338. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  339. struct crypto_ahash *mac;
  340. struct crypto_skcipher *ctr;
  341. unsigned long align;
  342. int err;
  343. mac = crypto_spawn_ahash(&ictx->mac);
  344. if (IS_ERR(mac))
  345. return PTR_ERR(mac);
  346. ctr = crypto_spawn_skcipher(&ictx->ctr);
  347. err = PTR_ERR(ctr);
  348. if (IS_ERR(ctr))
  349. goto err_free_mac;
  350. ctx->mac = mac;
  351. ctx->ctr = ctr;
  352. align = crypto_aead_alignmask(tfm);
  353. align &= ~(crypto_tfm_ctx_alignment() - 1);
  354. crypto_aead_set_reqsize(
  355. tfm,
  356. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  357. crypto_skcipher_reqsize(ctr));
  358. return 0;
  359. err_free_mac:
  360. crypto_free_ahash(mac);
  361. return err;
  362. }
  363. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  364. {
  365. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  366. crypto_free_ahash(ctx->mac);
  367. crypto_free_skcipher(ctx->ctr);
  368. }
  369. static void crypto_ccm_free(struct aead_instance *inst)
  370. {
  371. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  372. crypto_drop_ahash(&ctx->mac);
  373. crypto_drop_skcipher(&ctx->ctr);
  374. kfree(inst);
  375. }
  376. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  377. struct rtattr **tb,
  378. const char *full_name,
  379. const char *ctr_name,
  380. const char *mac_name)
  381. {
  382. struct crypto_attr_type *algt;
  383. struct aead_instance *inst;
  384. struct skcipher_alg *ctr;
  385. struct crypto_alg *mac_alg;
  386. struct hash_alg_common *mac;
  387. struct ccm_instance_ctx *ictx;
  388. int err;
  389. algt = crypto_get_attr_type(tb);
  390. if (IS_ERR(algt))
  391. return PTR_ERR(algt);
  392. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  393. return -EINVAL;
  394. mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
  395. CRYPTO_ALG_TYPE_HASH,
  396. CRYPTO_ALG_TYPE_AHASH_MASK |
  397. CRYPTO_ALG_ASYNC);
  398. if (IS_ERR(mac_alg))
  399. return PTR_ERR(mac_alg);
  400. mac = __crypto_hash_alg_common(mac_alg);
  401. err = -EINVAL;
  402. if (mac->digestsize != 16)
  403. goto out_put_mac;
  404. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  405. err = -ENOMEM;
  406. if (!inst)
  407. goto out_put_mac;
  408. ictx = aead_instance_ctx(inst);
  409. err = crypto_init_ahash_spawn(&ictx->mac, mac,
  410. aead_crypto_instance(inst));
  411. if (err)
  412. goto err_free_inst;
  413. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  414. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  415. crypto_requires_sync(algt->type,
  416. algt->mask));
  417. if (err)
  418. goto err_drop_mac;
  419. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  420. /* Not a stream cipher? */
  421. err = -EINVAL;
  422. if (ctr->base.cra_blocksize != 1)
  423. goto err_drop_ctr;
  424. /* We want the real thing! */
  425. if (crypto_skcipher_alg_ivsize(ctr) != 16)
  426. goto err_drop_ctr;
  427. err = -ENAMETOOLONG;
  428. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  429. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  430. mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  431. goto err_drop_ctr;
  432. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  433. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  434. inst->alg.base.cra_priority = (mac->base.cra_priority +
  435. ctr->base.cra_priority) / 2;
  436. inst->alg.base.cra_blocksize = 1;
  437. inst->alg.base.cra_alignmask = mac->base.cra_alignmask |
  438. ctr->base.cra_alignmask;
  439. inst->alg.ivsize = 16;
  440. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  441. inst->alg.maxauthsize = 16;
  442. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  443. inst->alg.init = crypto_ccm_init_tfm;
  444. inst->alg.exit = crypto_ccm_exit_tfm;
  445. inst->alg.setkey = crypto_ccm_setkey;
  446. inst->alg.setauthsize = crypto_ccm_setauthsize;
  447. inst->alg.encrypt = crypto_ccm_encrypt;
  448. inst->alg.decrypt = crypto_ccm_decrypt;
  449. inst->free = crypto_ccm_free;
  450. err = aead_register_instance(tmpl, inst);
  451. if (err)
  452. goto err_drop_ctr;
  453. out_put_mac:
  454. crypto_mod_put(mac_alg);
  455. return err;
  456. err_drop_ctr:
  457. crypto_drop_skcipher(&ictx->ctr);
  458. err_drop_mac:
  459. crypto_drop_ahash(&ictx->mac);
  460. err_free_inst:
  461. kfree(inst);
  462. goto out_put_mac;
  463. }
  464. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  465. {
  466. const char *cipher_name;
  467. char ctr_name[CRYPTO_MAX_ALG_NAME];
  468. char mac_name[CRYPTO_MAX_ALG_NAME];
  469. char full_name[CRYPTO_MAX_ALG_NAME];
  470. cipher_name = crypto_attr_alg_name(tb[1]);
  471. if (IS_ERR(cipher_name))
  472. return PTR_ERR(cipher_name);
  473. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  474. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  475. return -ENAMETOOLONG;
  476. if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
  477. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  478. return -ENAMETOOLONG;
  479. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  480. CRYPTO_MAX_ALG_NAME)
  481. return -ENAMETOOLONG;
  482. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  483. mac_name);
  484. }
  485. static struct crypto_template crypto_ccm_tmpl = {
  486. .name = "ccm",
  487. .create = crypto_ccm_create,
  488. .module = THIS_MODULE,
  489. };
  490. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  491. struct rtattr **tb)
  492. {
  493. const char *ctr_name;
  494. const char *cipher_name;
  495. char full_name[CRYPTO_MAX_ALG_NAME];
  496. ctr_name = crypto_attr_alg_name(tb[1]);
  497. if (IS_ERR(ctr_name))
  498. return PTR_ERR(ctr_name);
  499. cipher_name = crypto_attr_alg_name(tb[2]);
  500. if (IS_ERR(cipher_name))
  501. return PTR_ERR(cipher_name);
  502. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  503. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  504. return -ENAMETOOLONG;
  505. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  506. cipher_name);
  507. }
  508. static struct crypto_template crypto_ccm_base_tmpl = {
  509. .name = "ccm_base",
  510. .create = crypto_ccm_base_create,
  511. .module = THIS_MODULE,
  512. };
  513. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  514. unsigned int keylen)
  515. {
  516. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  517. struct crypto_aead *child = ctx->child;
  518. int err;
  519. if (keylen < 3)
  520. return -EINVAL;
  521. keylen -= 3;
  522. memcpy(ctx->nonce, key + keylen, 3);
  523. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  524. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  525. CRYPTO_TFM_REQ_MASK);
  526. err = crypto_aead_setkey(child, key, keylen);
  527. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  528. CRYPTO_TFM_RES_MASK);
  529. return err;
  530. }
  531. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  532. unsigned int authsize)
  533. {
  534. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  535. switch (authsize) {
  536. case 8:
  537. case 12:
  538. case 16:
  539. break;
  540. default:
  541. return -EINVAL;
  542. }
  543. return crypto_aead_setauthsize(ctx->child, authsize);
  544. }
  545. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  546. {
  547. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  548. struct aead_request *subreq = &rctx->subreq;
  549. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  550. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  551. struct crypto_aead *child = ctx->child;
  552. struct scatterlist *sg;
  553. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  554. crypto_aead_alignmask(child) + 1);
  555. /* L' */
  556. iv[0] = 3;
  557. memcpy(iv + 1, ctx->nonce, 3);
  558. memcpy(iv + 4, req->iv, 8);
  559. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  560. sg_init_table(rctx->src, 3);
  561. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  562. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  563. if (sg != rctx->src + 1)
  564. sg_chain(rctx->src, 2, sg);
  565. if (req->src != req->dst) {
  566. sg_init_table(rctx->dst, 3);
  567. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  568. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  569. if (sg != rctx->dst + 1)
  570. sg_chain(rctx->dst, 2, sg);
  571. }
  572. aead_request_set_tfm(subreq, child);
  573. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  574. req->base.data);
  575. aead_request_set_crypt(subreq, rctx->src,
  576. req->src == req->dst ? rctx->src : rctx->dst,
  577. req->cryptlen, iv);
  578. aead_request_set_ad(subreq, req->assoclen - 8);
  579. return subreq;
  580. }
  581. static int crypto_rfc4309_encrypt(struct aead_request *req)
  582. {
  583. if (req->assoclen != 16 && req->assoclen != 20)
  584. return -EINVAL;
  585. req = crypto_rfc4309_crypt(req);
  586. return crypto_aead_encrypt(req);
  587. }
  588. static int crypto_rfc4309_decrypt(struct aead_request *req)
  589. {
  590. if (req->assoclen != 16 && req->assoclen != 20)
  591. return -EINVAL;
  592. req = crypto_rfc4309_crypt(req);
  593. return crypto_aead_decrypt(req);
  594. }
  595. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  596. {
  597. struct aead_instance *inst = aead_alg_instance(tfm);
  598. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  599. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  600. struct crypto_aead *aead;
  601. unsigned long align;
  602. aead = crypto_spawn_aead(spawn);
  603. if (IS_ERR(aead))
  604. return PTR_ERR(aead);
  605. ctx->child = aead;
  606. align = crypto_aead_alignmask(aead);
  607. align &= ~(crypto_tfm_ctx_alignment() - 1);
  608. crypto_aead_set_reqsize(
  609. tfm,
  610. sizeof(struct crypto_rfc4309_req_ctx) +
  611. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  612. align + 32);
  613. return 0;
  614. }
  615. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  616. {
  617. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  618. crypto_free_aead(ctx->child);
  619. }
  620. static void crypto_rfc4309_free(struct aead_instance *inst)
  621. {
  622. crypto_drop_aead(aead_instance_ctx(inst));
  623. kfree(inst);
  624. }
  625. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  626. struct rtattr **tb)
  627. {
  628. struct crypto_attr_type *algt;
  629. struct aead_instance *inst;
  630. struct crypto_aead_spawn *spawn;
  631. struct aead_alg *alg;
  632. const char *ccm_name;
  633. int err;
  634. algt = crypto_get_attr_type(tb);
  635. if (IS_ERR(algt))
  636. return PTR_ERR(algt);
  637. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  638. return -EINVAL;
  639. ccm_name = crypto_attr_alg_name(tb[1]);
  640. if (IS_ERR(ccm_name))
  641. return PTR_ERR(ccm_name);
  642. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  643. if (!inst)
  644. return -ENOMEM;
  645. spawn = aead_instance_ctx(inst);
  646. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  647. err = crypto_grab_aead(spawn, ccm_name, 0,
  648. crypto_requires_sync(algt->type, algt->mask));
  649. if (err)
  650. goto out_free_inst;
  651. alg = crypto_spawn_aead_alg(spawn);
  652. err = -EINVAL;
  653. /* We only support 16-byte blocks. */
  654. if (crypto_aead_alg_ivsize(alg) != 16)
  655. goto out_drop_alg;
  656. /* Not a stream cipher? */
  657. if (alg->base.cra_blocksize != 1)
  658. goto out_drop_alg;
  659. err = -ENAMETOOLONG;
  660. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  661. "rfc4309(%s)", alg->base.cra_name) >=
  662. CRYPTO_MAX_ALG_NAME ||
  663. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  664. "rfc4309(%s)", alg->base.cra_driver_name) >=
  665. CRYPTO_MAX_ALG_NAME)
  666. goto out_drop_alg;
  667. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  668. inst->alg.base.cra_priority = alg->base.cra_priority;
  669. inst->alg.base.cra_blocksize = 1;
  670. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  671. inst->alg.ivsize = 8;
  672. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  673. inst->alg.maxauthsize = 16;
  674. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  675. inst->alg.init = crypto_rfc4309_init_tfm;
  676. inst->alg.exit = crypto_rfc4309_exit_tfm;
  677. inst->alg.setkey = crypto_rfc4309_setkey;
  678. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  679. inst->alg.encrypt = crypto_rfc4309_encrypt;
  680. inst->alg.decrypt = crypto_rfc4309_decrypt;
  681. inst->free = crypto_rfc4309_free;
  682. err = aead_register_instance(tmpl, inst);
  683. if (err)
  684. goto out_drop_alg;
  685. out:
  686. return err;
  687. out_drop_alg:
  688. crypto_drop_aead(spawn);
  689. out_free_inst:
  690. kfree(inst);
  691. goto out;
  692. }
  693. static struct crypto_template crypto_rfc4309_tmpl = {
  694. .name = "rfc4309",
  695. .create = crypto_rfc4309_create,
  696. .module = THIS_MODULE,
  697. };
  698. static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
  699. const u8 *inkey, unsigned int keylen)
  700. {
  701. struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
  702. return crypto_cipher_setkey(ctx->child, inkey, keylen);
  703. }
  704. static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
  705. {
  706. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  707. int bs = crypto_shash_digestsize(pdesc->tfm);
  708. u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
  709. ctx->len = 0;
  710. memset(dg, 0, bs);
  711. return 0;
  712. }
  713. static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
  714. unsigned int len)
  715. {
  716. struct crypto_shash *parent = pdesc->tfm;
  717. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  718. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  719. struct crypto_cipher *tfm = tctx->child;
  720. int bs = crypto_shash_digestsize(parent);
  721. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  722. while (len > 0) {
  723. unsigned int l = min(len, bs - ctx->len);
  724. crypto_xor(dg + ctx->len, p, l);
  725. ctx->len +=l;
  726. len -= l;
  727. p += l;
  728. if (ctx->len == bs) {
  729. crypto_cipher_encrypt_one(tfm, dg, dg);
  730. ctx->len = 0;
  731. }
  732. }
  733. return 0;
  734. }
  735. static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out)
  736. {
  737. struct crypto_shash *parent = pdesc->tfm;
  738. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  739. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  740. struct crypto_cipher *tfm = tctx->child;
  741. int bs = crypto_shash_digestsize(parent);
  742. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  743. if (ctx->len)
  744. crypto_cipher_encrypt_one(tfm, dg, dg);
  745. memcpy(out, dg, bs);
  746. return 0;
  747. }
  748. static int cbcmac_init_tfm(struct crypto_tfm *tfm)
  749. {
  750. struct crypto_cipher *cipher;
  751. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  752. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  753. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  754. cipher = crypto_spawn_cipher(spawn);
  755. if (IS_ERR(cipher))
  756. return PTR_ERR(cipher);
  757. ctx->child = cipher;
  758. return 0;
  759. };
  760. static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
  761. {
  762. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  763. crypto_free_cipher(ctx->child);
  764. }
  765. static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
  766. {
  767. struct shash_instance *inst;
  768. struct crypto_alg *alg;
  769. int err;
  770. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
  771. if (err)
  772. return err;
  773. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
  774. CRYPTO_ALG_TYPE_MASK);
  775. if (IS_ERR(alg))
  776. return PTR_ERR(alg);
  777. inst = shash_alloc_instance("cbcmac", alg);
  778. err = PTR_ERR(inst);
  779. if (IS_ERR(inst))
  780. goto out_put_alg;
  781. err = crypto_init_spawn(shash_instance_ctx(inst), alg,
  782. shash_crypto_instance(inst),
  783. CRYPTO_ALG_TYPE_MASK);
  784. if (err)
  785. goto out_free_inst;
  786. inst->alg.base.cra_priority = alg->cra_priority;
  787. inst->alg.base.cra_blocksize = 1;
  788. inst->alg.digestsize = alg->cra_blocksize;
  789. inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx),
  790. alg->cra_alignmask + 1) +
  791. alg->cra_blocksize;
  792. inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
  793. inst->alg.base.cra_init = cbcmac_init_tfm;
  794. inst->alg.base.cra_exit = cbcmac_exit_tfm;
  795. inst->alg.init = crypto_cbcmac_digest_init;
  796. inst->alg.update = crypto_cbcmac_digest_update;
  797. inst->alg.final = crypto_cbcmac_digest_final;
  798. inst->alg.setkey = crypto_cbcmac_digest_setkey;
  799. err = shash_register_instance(tmpl, inst);
  800. out_free_inst:
  801. if (err)
  802. shash_free_instance(shash_crypto_instance(inst));
  803. out_put_alg:
  804. crypto_mod_put(alg);
  805. return err;
  806. }
  807. static struct crypto_template crypto_cbcmac_tmpl = {
  808. .name = "cbcmac",
  809. .create = cbcmac_create,
  810. .free = shash_free_instance,
  811. .module = THIS_MODULE,
  812. };
  813. static int __init crypto_ccm_module_init(void)
  814. {
  815. int err;
  816. err = crypto_register_template(&crypto_cbcmac_tmpl);
  817. if (err)
  818. goto out;
  819. err = crypto_register_template(&crypto_ccm_base_tmpl);
  820. if (err)
  821. goto out_undo_cbcmac;
  822. err = crypto_register_template(&crypto_ccm_tmpl);
  823. if (err)
  824. goto out_undo_base;
  825. err = crypto_register_template(&crypto_rfc4309_tmpl);
  826. if (err)
  827. goto out_undo_ccm;
  828. out:
  829. return err;
  830. out_undo_ccm:
  831. crypto_unregister_template(&crypto_ccm_tmpl);
  832. out_undo_base:
  833. crypto_unregister_template(&crypto_ccm_base_tmpl);
  834. out_undo_cbcmac:
  835. crypto_register_template(&crypto_cbcmac_tmpl);
  836. goto out;
  837. }
  838. static void __exit crypto_ccm_module_exit(void)
  839. {
  840. crypto_unregister_template(&crypto_rfc4309_tmpl);
  841. crypto_unregister_template(&crypto_ccm_tmpl);
  842. crypto_unregister_template(&crypto_ccm_base_tmpl);
  843. crypto_unregister_template(&crypto_cbcmac_tmpl);
  844. }
  845. module_init(crypto_ccm_module_init);
  846. module_exit(crypto_ccm_module_exit);
  847. MODULE_LICENSE("GPL");
  848. MODULE_DESCRIPTION("Counter with CBC MAC");
  849. MODULE_ALIAS_CRYPTO("ccm_base");
  850. MODULE_ALIAS_CRYPTO("rfc4309");
  851. MODULE_ALIAS_CRYPTO("ccm");