ccm.c 25 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <crypto/scatterwalk.h>
  16. #include <linux/err.h>
  17. #include <linux/init.h>
  18. #include <linux/kernel.h>
  19. #include <linux/module.h>
  20. #include <linux/slab.h>
  21. #include "internal.h"
  22. struct ccm_instance_ctx {
  23. struct crypto_skcipher_spawn ctr;
  24. struct crypto_ahash_spawn mac;
  25. };
  26. struct crypto_ccm_ctx {
  27. struct crypto_ahash *mac;
  28. struct crypto_skcipher *ctr;
  29. };
  30. struct crypto_rfc4309_ctx {
  31. struct crypto_aead *child;
  32. u8 nonce[3];
  33. };
  34. struct crypto_rfc4309_req_ctx {
  35. struct scatterlist src[3];
  36. struct scatterlist dst[3];
  37. struct aead_request subreq;
  38. };
  39. struct crypto_ccm_req_priv_ctx {
  40. u8 odata[16];
  41. u8 idata[16];
  42. u8 auth_tag[16];
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct skcipher_request skreq;
  47. };
  48. struct cbcmac_tfm_ctx {
  49. struct crypto_cipher *child;
  50. };
  51. struct cbcmac_desc_ctx {
  52. unsigned int len;
  53. };
  54. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  55. struct aead_request *req)
  56. {
  57. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  58. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  59. }
  60. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  61. {
  62. __be32 data;
  63. memset(block, 0, csize);
  64. block += csize;
  65. if (csize >= 4)
  66. csize = 4;
  67. else if (msglen > (1 << (8 * csize)))
  68. return -EOVERFLOW;
  69. data = cpu_to_be32(msglen);
  70. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  71. return 0;
  72. }
  73. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  74. unsigned int keylen)
  75. {
  76. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  77. struct crypto_skcipher *ctr = ctx->ctr;
  78. struct crypto_ahash *mac = ctx->mac;
  79. int err = 0;
  80. crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  81. crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  82. CRYPTO_TFM_REQ_MASK);
  83. err = crypto_skcipher_setkey(ctr, key, keylen);
  84. crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
  85. CRYPTO_TFM_RES_MASK);
  86. if (err)
  87. goto out;
  88. crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
  89. crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
  90. CRYPTO_TFM_REQ_MASK);
  91. err = crypto_ahash_setkey(mac, key, keylen);
  92. crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
  93. CRYPTO_TFM_RES_MASK);
  94. out:
  95. return err;
  96. }
  97. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  98. unsigned int authsize)
  99. {
  100. switch (authsize) {
  101. case 4:
  102. case 6:
  103. case 8:
  104. case 10:
  105. case 12:
  106. case 14:
  107. case 16:
  108. break;
  109. default:
  110. return -EINVAL;
  111. }
  112. return 0;
  113. }
  114. static int format_input(u8 *info, struct aead_request *req,
  115. unsigned int cryptlen)
  116. {
  117. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  118. unsigned int lp = req->iv[0];
  119. unsigned int l = lp + 1;
  120. unsigned int m;
  121. m = crypto_aead_authsize(aead);
  122. memcpy(info, req->iv, 16);
  123. /* format control info per RFC 3610 and
  124. * NIST Special Publication 800-38C
  125. */
  126. *info |= (8 * ((m - 2) / 2));
  127. if (req->assoclen)
  128. *info |= 64;
  129. return set_msg_len(info + 16 - l, cryptlen, l);
  130. }
  131. static int format_adata(u8 *adata, unsigned int a)
  132. {
  133. int len = 0;
  134. /* add control info for associated data
  135. * RFC 3610 and NIST Special Publication 800-38C
  136. */
  137. if (a < 65280) {
  138. *(__be16 *)adata = cpu_to_be16(a);
  139. len = 2;
  140. } else {
  141. *(__be16 *)adata = cpu_to_be16(0xfffe);
  142. *(__be32 *)&adata[2] = cpu_to_be32(a);
  143. len = 6;
  144. }
  145. return len;
  146. }
  147. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  148. unsigned int cryptlen)
  149. {
  150. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  151. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  152. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  153. AHASH_REQUEST_ON_STACK(ahreq, ctx->mac);
  154. unsigned int assoclen = req->assoclen;
  155. struct scatterlist sg[3];
  156. u8 *odata = pctx->odata;
  157. u8 *idata = pctx->idata;
  158. int ilen, err;
  159. /* format control data for input */
  160. err = format_input(odata, req, cryptlen);
  161. if (err)
  162. goto out;
  163. sg_init_table(sg, 3);
  164. sg_set_buf(&sg[0], odata, 16);
  165. /* format associated data and compute into mac */
  166. if (assoclen) {
  167. ilen = format_adata(idata, assoclen);
  168. sg_set_buf(&sg[1], idata, ilen);
  169. sg_chain(sg, 3, req->src);
  170. } else {
  171. ilen = 0;
  172. sg_chain(sg, 2, req->src);
  173. }
  174. ahash_request_set_tfm(ahreq, ctx->mac);
  175. ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
  176. ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
  177. err = crypto_ahash_init(ahreq);
  178. if (err)
  179. goto out;
  180. err = crypto_ahash_update(ahreq);
  181. if (err)
  182. goto out;
  183. /* we need to pad the MAC input to a round multiple of the block size */
  184. ilen = 16 - (assoclen + ilen) % 16;
  185. if (ilen < 16) {
  186. memset(idata, 0, ilen);
  187. sg_init_table(sg, 2);
  188. sg_set_buf(&sg[0], idata, ilen);
  189. if (plain)
  190. sg_chain(sg, 2, plain);
  191. plain = sg;
  192. cryptlen += ilen;
  193. }
  194. ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen);
  195. err = crypto_ahash_finup(ahreq);
  196. out:
  197. return err;
  198. }
  199. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  200. {
  201. struct aead_request *req = areq->data;
  202. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  203. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  204. u8 *odata = pctx->odata;
  205. if (!err)
  206. scatterwalk_map_and_copy(odata, req->dst,
  207. req->assoclen + req->cryptlen,
  208. crypto_aead_authsize(aead), 1);
  209. aead_request_complete(req, err);
  210. }
  211. static inline int crypto_ccm_check_iv(const u8 *iv)
  212. {
  213. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  214. if (1 > iv[0] || iv[0] > 7)
  215. return -EINVAL;
  216. return 0;
  217. }
  218. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  219. {
  220. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  221. struct scatterlist *sg;
  222. u8 *iv = req->iv;
  223. int err;
  224. err = crypto_ccm_check_iv(iv);
  225. if (err)
  226. return err;
  227. pctx->flags = aead_request_flags(req);
  228. /* Note: rfc 3610 and NIST 800-38C require counter of
  229. * zero to encrypt auth tag.
  230. */
  231. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  232. sg_init_table(pctx->src, 3);
  233. sg_set_buf(pctx->src, tag, 16);
  234. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  235. if (sg != pctx->src + 1)
  236. sg_chain(pctx->src, 2, sg);
  237. if (req->src != req->dst) {
  238. sg_init_table(pctx->dst, 3);
  239. sg_set_buf(pctx->dst, tag, 16);
  240. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  241. if (sg != pctx->dst + 1)
  242. sg_chain(pctx->dst, 2, sg);
  243. }
  244. return 0;
  245. }
  246. static int crypto_ccm_encrypt(struct aead_request *req)
  247. {
  248. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  249. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  250. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  251. struct skcipher_request *skreq = &pctx->skreq;
  252. struct scatterlist *dst;
  253. unsigned int cryptlen = req->cryptlen;
  254. u8 *odata = pctx->odata;
  255. u8 *iv = req->iv;
  256. int err;
  257. err = crypto_ccm_init_crypt(req, odata);
  258. if (err)
  259. return err;
  260. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  261. if (err)
  262. return err;
  263. dst = pctx->src;
  264. if (req->src != req->dst)
  265. dst = pctx->dst;
  266. skcipher_request_set_tfm(skreq, ctx->ctr);
  267. skcipher_request_set_callback(skreq, pctx->flags,
  268. crypto_ccm_encrypt_done, req);
  269. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  270. err = crypto_skcipher_encrypt(skreq);
  271. if (err)
  272. return err;
  273. /* copy authtag to end of dst */
  274. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  275. crypto_aead_authsize(aead), 1);
  276. return err;
  277. }
  278. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  279. int err)
  280. {
  281. struct aead_request *req = areq->data;
  282. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  283. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  284. unsigned int authsize = crypto_aead_authsize(aead);
  285. unsigned int cryptlen = req->cryptlen - authsize;
  286. struct scatterlist *dst;
  287. pctx->flags = 0;
  288. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  289. if (!err) {
  290. err = crypto_ccm_auth(req, dst, cryptlen);
  291. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  292. err = -EBADMSG;
  293. }
  294. aead_request_complete(req, err);
  295. }
  296. static int crypto_ccm_decrypt(struct aead_request *req)
  297. {
  298. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  299. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  300. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  301. struct skcipher_request *skreq = &pctx->skreq;
  302. struct scatterlist *dst;
  303. unsigned int authsize = crypto_aead_authsize(aead);
  304. unsigned int cryptlen = req->cryptlen;
  305. u8 *authtag = pctx->auth_tag;
  306. u8 *odata = pctx->odata;
  307. u8 *iv = req->iv;
  308. int err;
  309. cryptlen -= authsize;
  310. err = crypto_ccm_init_crypt(req, authtag);
  311. if (err)
  312. return err;
  313. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  314. authsize, 0);
  315. dst = pctx->src;
  316. if (req->src != req->dst)
  317. dst = pctx->dst;
  318. skcipher_request_set_tfm(skreq, ctx->ctr);
  319. skcipher_request_set_callback(skreq, pctx->flags,
  320. crypto_ccm_decrypt_done, req);
  321. skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
  322. err = crypto_skcipher_decrypt(skreq);
  323. if (err)
  324. return err;
  325. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  326. if (err)
  327. return err;
  328. /* verify */
  329. if (crypto_memneq(authtag, odata, authsize))
  330. return -EBADMSG;
  331. return err;
  332. }
  333. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  334. {
  335. struct aead_instance *inst = aead_alg_instance(tfm);
  336. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  337. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  338. struct crypto_ahash *mac;
  339. struct crypto_skcipher *ctr;
  340. unsigned long align;
  341. int err;
  342. mac = crypto_spawn_ahash(&ictx->mac);
  343. if (IS_ERR(mac))
  344. return PTR_ERR(mac);
  345. ctr = crypto_spawn_skcipher(&ictx->ctr);
  346. err = PTR_ERR(ctr);
  347. if (IS_ERR(ctr))
  348. goto err_free_mac;
  349. ctx->mac = mac;
  350. ctx->ctr = ctr;
  351. align = crypto_aead_alignmask(tfm);
  352. align &= ~(crypto_tfm_ctx_alignment() - 1);
  353. crypto_aead_set_reqsize(
  354. tfm,
  355. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  356. crypto_skcipher_reqsize(ctr));
  357. return 0;
  358. err_free_mac:
  359. crypto_free_ahash(mac);
  360. return err;
  361. }
  362. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  363. {
  364. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  365. crypto_free_ahash(ctx->mac);
  366. crypto_free_skcipher(ctx->ctr);
  367. }
  368. static void crypto_ccm_free(struct aead_instance *inst)
  369. {
  370. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  371. crypto_drop_ahash(&ctx->mac);
  372. crypto_drop_skcipher(&ctx->ctr);
  373. kfree(inst);
  374. }
  375. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  376. struct rtattr **tb,
  377. const char *full_name,
  378. const char *ctr_name,
  379. const char *mac_name)
  380. {
  381. struct crypto_attr_type *algt;
  382. struct aead_instance *inst;
  383. struct skcipher_alg *ctr;
  384. struct crypto_alg *mac_alg;
  385. struct hash_alg_common *mac;
  386. struct ccm_instance_ctx *ictx;
  387. int err;
  388. algt = crypto_get_attr_type(tb);
  389. if (IS_ERR(algt))
  390. return PTR_ERR(algt);
  391. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  392. return -EINVAL;
  393. mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
  394. CRYPTO_ALG_TYPE_HASH,
  395. CRYPTO_ALG_TYPE_AHASH_MASK |
  396. CRYPTO_ALG_ASYNC);
  397. if (IS_ERR(mac_alg))
  398. return PTR_ERR(mac_alg);
  399. mac = __crypto_hash_alg_common(mac_alg);
  400. err = -EINVAL;
  401. if (mac->digestsize != 16)
  402. goto out_put_mac;
  403. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  404. err = -ENOMEM;
  405. if (!inst)
  406. goto out_put_mac;
  407. ictx = aead_instance_ctx(inst);
  408. err = crypto_init_ahash_spawn(&ictx->mac, mac,
  409. aead_crypto_instance(inst));
  410. if (err)
  411. goto err_free_inst;
  412. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  413. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  414. crypto_requires_sync(algt->type,
  415. algt->mask));
  416. if (err)
  417. goto err_drop_mac;
  418. ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
  419. /* Not a stream cipher? */
  420. err = -EINVAL;
  421. if (ctr->base.cra_blocksize != 1)
  422. goto err_drop_ctr;
  423. /* We want the real thing! */
  424. if (crypto_skcipher_alg_ivsize(ctr) != 16)
  425. goto err_drop_ctr;
  426. err = -ENAMETOOLONG;
  427. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  428. "ccm_base(%s,%s)", ctr->base.cra_driver_name,
  429. mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  430. goto err_drop_ctr;
  431. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  432. inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
  433. inst->alg.base.cra_priority = (mac->base.cra_priority +
  434. ctr->base.cra_priority) / 2;
  435. inst->alg.base.cra_blocksize = 1;
  436. inst->alg.base.cra_alignmask = mac->base.cra_alignmask |
  437. ctr->base.cra_alignmask;
  438. inst->alg.ivsize = 16;
  439. inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
  440. inst->alg.maxauthsize = 16;
  441. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  442. inst->alg.init = crypto_ccm_init_tfm;
  443. inst->alg.exit = crypto_ccm_exit_tfm;
  444. inst->alg.setkey = crypto_ccm_setkey;
  445. inst->alg.setauthsize = crypto_ccm_setauthsize;
  446. inst->alg.encrypt = crypto_ccm_encrypt;
  447. inst->alg.decrypt = crypto_ccm_decrypt;
  448. inst->free = crypto_ccm_free;
  449. err = aead_register_instance(tmpl, inst);
  450. if (err)
  451. goto err_drop_ctr;
  452. out_put_mac:
  453. crypto_mod_put(mac_alg);
  454. return err;
  455. err_drop_ctr:
  456. crypto_drop_skcipher(&ictx->ctr);
  457. err_drop_mac:
  458. crypto_drop_ahash(&ictx->mac);
  459. err_free_inst:
  460. kfree(inst);
  461. goto out_put_mac;
  462. }
  463. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  464. {
  465. const char *cipher_name;
  466. char ctr_name[CRYPTO_MAX_ALG_NAME];
  467. char mac_name[CRYPTO_MAX_ALG_NAME];
  468. char full_name[CRYPTO_MAX_ALG_NAME];
  469. cipher_name = crypto_attr_alg_name(tb[1]);
  470. if (IS_ERR(cipher_name))
  471. return PTR_ERR(cipher_name);
  472. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  473. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  474. return -ENAMETOOLONG;
  475. if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
  476. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  477. return -ENAMETOOLONG;
  478. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  479. CRYPTO_MAX_ALG_NAME)
  480. return -ENAMETOOLONG;
  481. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  482. mac_name);
  483. }
  484. static struct crypto_template crypto_ccm_tmpl = {
  485. .name = "ccm",
  486. .create = crypto_ccm_create,
  487. .module = THIS_MODULE,
  488. };
  489. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  490. struct rtattr **tb)
  491. {
  492. const char *ctr_name;
  493. const char *cipher_name;
  494. char full_name[CRYPTO_MAX_ALG_NAME];
  495. ctr_name = crypto_attr_alg_name(tb[1]);
  496. if (IS_ERR(ctr_name))
  497. return PTR_ERR(ctr_name);
  498. cipher_name = crypto_attr_alg_name(tb[2]);
  499. if (IS_ERR(cipher_name))
  500. return PTR_ERR(cipher_name);
  501. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  502. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  503. return -ENAMETOOLONG;
  504. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  505. cipher_name);
  506. }
  507. static struct crypto_template crypto_ccm_base_tmpl = {
  508. .name = "ccm_base",
  509. .create = crypto_ccm_base_create,
  510. .module = THIS_MODULE,
  511. };
  512. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  513. unsigned int keylen)
  514. {
  515. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  516. struct crypto_aead *child = ctx->child;
  517. int err;
  518. if (keylen < 3)
  519. return -EINVAL;
  520. keylen -= 3;
  521. memcpy(ctx->nonce, key + keylen, 3);
  522. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  523. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  524. CRYPTO_TFM_REQ_MASK);
  525. err = crypto_aead_setkey(child, key, keylen);
  526. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  527. CRYPTO_TFM_RES_MASK);
  528. return err;
  529. }
  530. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  531. unsigned int authsize)
  532. {
  533. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  534. switch (authsize) {
  535. case 8:
  536. case 12:
  537. case 16:
  538. break;
  539. default:
  540. return -EINVAL;
  541. }
  542. return crypto_aead_setauthsize(ctx->child, authsize);
  543. }
  544. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  545. {
  546. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  547. struct aead_request *subreq = &rctx->subreq;
  548. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  549. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  550. struct crypto_aead *child = ctx->child;
  551. struct scatterlist *sg;
  552. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  553. crypto_aead_alignmask(child) + 1);
  554. /* L' */
  555. iv[0] = 3;
  556. memcpy(iv + 1, ctx->nonce, 3);
  557. memcpy(iv + 4, req->iv, 8);
  558. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  559. sg_init_table(rctx->src, 3);
  560. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  561. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  562. if (sg != rctx->src + 1)
  563. sg_chain(rctx->src, 2, sg);
  564. if (req->src != req->dst) {
  565. sg_init_table(rctx->dst, 3);
  566. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  567. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  568. if (sg != rctx->dst + 1)
  569. sg_chain(rctx->dst, 2, sg);
  570. }
  571. aead_request_set_tfm(subreq, child);
  572. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  573. req->base.data);
  574. aead_request_set_crypt(subreq, rctx->src,
  575. req->src == req->dst ? rctx->src : rctx->dst,
  576. req->cryptlen, iv);
  577. aead_request_set_ad(subreq, req->assoclen - 8);
  578. return subreq;
  579. }
  580. static int crypto_rfc4309_encrypt(struct aead_request *req)
  581. {
  582. if (req->assoclen != 16 && req->assoclen != 20)
  583. return -EINVAL;
  584. req = crypto_rfc4309_crypt(req);
  585. return crypto_aead_encrypt(req);
  586. }
  587. static int crypto_rfc4309_decrypt(struct aead_request *req)
  588. {
  589. if (req->assoclen != 16 && req->assoclen != 20)
  590. return -EINVAL;
  591. req = crypto_rfc4309_crypt(req);
  592. return crypto_aead_decrypt(req);
  593. }
  594. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  595. {
  596. struct aead_instance *inst = aead_alg_instance(tfm);
  597. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  598. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  599. struct crypto_aead *aead;
  600. unsigned long align;
  601. aead = crypto_spawn_aead(spawn);
  602. if (IS_ERR(aead))
  603. return PTR_ERR(aead);
  604. ctx->child = aead;
  605. align = crypto_aead_alignmask(aead);
  606. align &= ~(crypto_tfm_ctx_alignment() - 1);
  607. crypto_aead_set_reqsize(
  608. tfm,
  609. sizeof(struct crypto_rfc4309_req_ctx) +
  610. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  611. align + 32);
  612. return 0;
  613. }
  614. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  615. {
  616. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  617. crypto_free_aead(ctx->child);
  618. }
  619. static void crypto_rfc4309_free(struct aead_instance *inst)
  620. {
  621. crypto_drop_aead(aead_instance_ctx(inst));
  622. kfree(inst);
  623. }
  624. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  625. struct rtattr **tb)
  626. {
  627. struct crypto_attr_type *algt;
  628. struct aead_instance *inst;
  629. struct crypto_aead_spawn *spawn;
  630. struct aead_alg *alg;
  631. const char *ccm_name;
  632. int err;
  633. algt = crypto_get_attr_type(tb);
  634. if (IS_ERR(algt))
  635. return PTR_ERR(algt);
  636. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  637. return -EINVAL;
  638. ccm_name = crypto_attr_alg_name(tb[1]);
  639. if (IS_ERR(ccm_name))
  640. return PTR_ERR(ccm_name);
  641. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  642. if (!inst)
  643. return -ENOMEM;
  644. spawn = aead_instance_ctx(inst);
  645. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  646. err = crypto_grab_aead(spawn, ccm_name, 0,
  647. crypto_requires_sync(algt->type, algt->mask));
  648. if (err)
  649. goto out_free_inst;
  650. alg = crypto_spawn_aead_alg(spawn);
  651. err = -EINVAL;
  652. /* We only support 16-byte blocks. */
  653. if (crypto_aead_alg_ivsize(alg) != 16)
  654. goto out_drop_alg;
  655. /* Not a stream cipher? */
  656. if (alg->base.cra_blocksize != 1)
  657. goto out_drop_alg;
  658. err = -ENAMETOOLONG;
  659. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  660. "rfc4309(%s)", alg->base.cra_name) >=
  661. CRYPTO_MAX_ALG_NAME ||
  662. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  663. "rfc4309(%s)", alg->base.cra_driver_name) >=
  664. CRYPTO_MAX_ALG_NAME)
  665. goto out_drop_alg;
  666. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  667. inst->alg.base.cra_priority = alg->base.cra_priority;
  668. inst->alg.base.cra_blocksize = 1;
  669. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  670. inst->alg.ivsize = 8;
  671. inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
  672. inst->alg.maxauthsize = 16;
  673. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  674. inst->alg.init = crypto_rfc4309_init_tfm;
  675. inst->alg.exit = crypto_rfc4309_exit_tfm;
  676. inst->alg.setkey = crypto_rfc4309_setkey;
  677. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  678. inst->alg.encrypt = crypto_rfc4309_encrypt;
  679. inst->alg.decrypt = crypto_rfc4309_decrypt;
  680. inst->free = crypto_rfc4309_free;
  681. err = aead_register_instance(tmpl, inst);
  682. if (err)
  683. goto out_drop_alg;
  684. out:
  685. return err;
  686. out_drop_alg:
  687. crypto_drop_aead(spawn);
  688. out_free_inst:
  689. kfree(inst);
  690. goto out;
  691. }
  692. static struct crypto_template crypto_rfc4309_tmpl = {
  693. .name = "rfc4309",
  694. .create = crypto_rfc4309_create,
  695. .module = THIS_MODULE,
  696. };
  697. static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
  698. const u8 *inkey, unsigned int keylen)
  699. {
  700. struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
  701. return crypto_cipher_setkey(ctx->child, inkey, keylen);
  702. }
  703. static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
  704. {
  705. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  706. int bs = crypto_shash_digestsize(pdesc->tfm);
  707. u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
  708. ctx->len = 0;
  709. memset(dg, 0, bs);
  710. return 0;
  711. }
  712. static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
  713. unsigned int len)
  714. {
  715. struct crypto_shash *parent = pdesc->tfm;
  716. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  717. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  718. struct crypto_cipher *tfm = tctx->child;
  719. int bs = crypto_shash_digestsize(parent);
  720. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  721. while (len > 0) {
  722. unsigned int l = min(len, bs - ctx->len);
  723. crypto_xor(dg + ctx->len, p, l);
  724. ctx->len +=l;
  725. len -= l;
  726. p += l;
  727. if (ctx->len == bs) {
  728. crypto_cipher_encrypt_one(tfm, dg, dg);
  729. ctx->len = 0;
  730. }
  731. }
  732. return 0;
  733. }
  734. static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out)
  735. {
  736. struct crypto_shash *parent = pdesc->tfm;
  737. struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
  738. struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
  739. struct crypto_cipher *tfm = tctx->child;
  740. int bs = crypto_shash_digestsize(parent);
  741. u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
  742. if (ctx->len)
  743. crypto_cipher_encrypt_one(tfm, dg, dg);
  744. memcpy(out, dg, bs);
  745. return 0;
  746. }
  747. static int cbcmac_init_tfm(struct crypto_tfm *tfm)
  748. {
  749. struct crypto_cipher *cipher;
  750. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  751. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  752. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  753. cipher = crypto_spawn_cipher(spawn);
  754. if (IS_ERR(cipher))
  755. return PTR_ERR(cipher);
  756. ctx->child = cipher;
  757. return 0;
  758. };
  759. static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
  760. {
  761. struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
  762. crypto_free_cipher(ctx->child);
  763. }
  764. static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
  765. {
  766. struct shash_instance *inst;
  767. struct crypto_alg *alg;
  768. int err;
  769. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
  770. if (err)
  771. return err;
  772. alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
  773. CRYPTO_ALG_TYPE_MASK);
  774. if (IS_ERR(alg))
  775. return PTR_ERR(alg);
  776. inst = shash_alloc_instance("cbcmac", alg);
  777. err = PTR_ERR(inst);
  778. if (IS_ERR(inst))
  779. goto out_put_alg;
  780. err = crypto_init_spawn(shash_instance_ctx(inst), alg,
  781. shash_crypto_instance(inst),
  782. CRYPTO_ALG_TYPE_MASK);
  783. if (err)
  784. goto out_free_inst;
  785. inst->alg.base.cra_priority = alg->cra_priority;
  786. inst->alg.base.cra_blocksize = 1;
  787. inst->alg.digestsize = alg->cra_blocksize;
  788. inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx),
  789. alg->cra_alignmask + 1) +
  790. alg->cra_blocksize;
  791. inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
  792. inst->alg.base.cra_init = cbcmac_init_tfm;
  793. inst->alg.base.cra_exit = cbcmac_exit_tfm;
  794. inst->alg.init = crypto_cbcmac_digest_init;
  795. inst->alg.update = crypto_cbcmac_digest_update;
  796. inst->alg.final = crypto_cbcmac_digest_final;
  797. inst->alg.setkey = crypto_cbcmac_digest_setkey;
  798. err = shash_register_instance(tmpl, inst);
  799. out_free_inst:
  800. if (err)
  801. shash_free_instance(shash_crypto_instance(inst));
  802. out_put_alg:
  803. crypto_mod_put(alg);
  804. return err;
  805. }
  806. static struct crypto_template crypto_cbcmac_tmpl = {
  807. .name = "cbcmac",
  808. .create = cbcmac_create,
  809. .free = shash_free_instance,
  810. .module = THIS_MODULE,
  811. };
  812. static int __init crypto_ccm_module_init(void)
  813. {
  814. int err;
  815. err = crypto_register_template(&crypto_cbcmac_tmpl);
  816. if (err)
  817. goto out;
  818. err = crypto_register_template(&crypto_ccm_base_tmpl);
  819. if (err)
  820. goto out_undo_cbcmac;
  821. err = crypto_register_template(&crypto_ccm_tmpl);
  822. if (err)
  823. goto out_undo_base;
  824. err = crypto_register_template(&crypto_rfc4309_tmpl);
  825. if (err)
  826. goto out_undo_ccm;
  827. out:
  828. return err;
  829. out_undo_ccm:
  830. crypto_unregister_template(&crypto_ccm_tmpl);
  831. out_undo_base:
  832. crypto_unregister_template(&crypto_ccm_base_tmpl);
  833. out_undo_cbcmac:
  834. crypto_register_template(&crypto_cbcmac_tmpl);
  835. goto out;
  836. }
  837. static void __exit crypto_ccm_module_exit(void)
  838. {
  839. crypto_unregister_template(&crypto_rfc4309_tmpl);
  840. crypto_unregister_template(&crypto_ccm_tmpl);
  841. crypto_unregister_template(&crypto_ccm_base_tmpl);
  842. crypto_unregister_template(&crypto_cbcmac_tmpl);
  843. }
  844. module_init(crypto_ccm_module_init);
  845. module_exit(crypto_ccm_module_exit);
  846. MODULE_LICENSE("GPL");
  847. MODULE_DESCRIPTION("Counter with CBC MAC");
  848. MODULE_ALIAS_CRYPTO("ccm_base");
  849. MODULE_ALIAS_CRYPTO("rfc4309");
  850. MODULE_ALIAS_CRYPTO("ccm");