safexcel_cipher.c 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339
  1. /*
  2. * Copyright (C) 2017 Marvell
  3. *
  4. * Antoine Tenart <antoine.tenart@free-electrons.com>
  5. *
  6. * This file is licensed under the terms of the GNU General Public
  7. * License version 2. This program is licensed "as is" without any
  8. * warranty of any kind, whether express or implied.
  9. */
  10. #include <linux/device.h>
  11. #include <linux/dma-mapping.h>
  12. #include <linux/dmapool.h>
  13. #include <crypto/aead.h>
  14. #include <crypto/aes.h>
  15. #include <crypto/authenc.h>
  16. #include <crypto/des.h>
  17. #include <crypto/sha.h>
  18. #include <crypto/skcipher.h>
  19. #include <crypto/internal/aead.h>
  20. #include <crypto/internal/skcipher.h>
  21. #include "safexcel.h"
  22. enum safexcel_cipher_direction {
  23. SAFEXCEL_ENCRYPT,
  24. SAFEXCEL_DECRYPT,
  25. };
  26. enum safexcel_cipher_alg {
  27. SAFEXCEL_DES,
  28. SAFEXCEL_3DES,
  29. SAFEXCEL_AES,
  30. };
  31. struct safexcel_cipher_ctx {
  32. struct safexcel_context base;
  33. struct safexcel_crypto_priv *priv;
  34. u32 mode;
  35. enum safexcel_cipher_alg alg;
  36. bool aead;
  37. __le32 key[8];
  38. unsigned int key_len;
  39. /* All the below is AEAD specific */
  40. u32 hash_alg;
  41. u32 state_sz;
  42. u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
  43. u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
  44. };
  45. struct safexcel_cipher_req {
  46. enum safexcel_cipher_direction direction;
  47. bool needs_inv;
  48. };
  49. static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  50. struct safexcel_command_desc *cdesc,
  51. u32 length)
  52. {
  53. struct safexcel_token *token;
  54. unsigned offset = 0;
  55. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
  56. switch (ctx->alg) {
  57. case SAFEXCEL_DES:
  58. offset = DES_BLOCK_SIZE / sizeof(u32);
  59. memcpy(cdesc->control_data.token, iv, DES_BLOCK_SIZE);
  60. cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
  61. break;
  62. case SAFEXCEL_3DES:
  63. offset = DES3_EDE_BLOCK_SIZE / sizeof(u32);
  64. memcpy(cdesc->control_data.token, iv, DES3_EDE_BLOCK_SIZE);
  65. cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
  66. break;
  67. case SAFEXCEL_AES:
  68. offset = AES_BLOCK_SIZE / sizeof(u32);
  69. memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
  70. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  71. break;
  72. }
  73. }
  74. token = (struct safexcel_token *)(cdesc->control_data.token + offset);
  75. token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  76. token[0].packet_length = length;
  77. token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
  78. EIP197_TOKEN_STAT_LAST_HASH;
  79. token[0].instructions = EIP197_TOKEN_INS_LAST |
  80. EIP197_TOKEN_INS_TYPE_CRYTO |
  81. EIP197_TOKEN_INS_TYPE_OUTPUT;
  82. }
  83. static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  84. struct safexcel_command_desc *cdesc,
  85. enum safexcel_cipher_direction direction,
  86. u32 cryptlen, u32 assoclen, u32 digestsize)
  87. {
  88. struct safexcel_token *token;
  89. unsigned offset = 0;
  90. if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
  91. offset = AES_BLOCK_SIZE / sizeof(u32);
  92. memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
  93. cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  94. }
  95. token = (struct safexcel_token *)(cdesc->control_data.token + offset);
  96. if (direction == SAFEXCEL_DECRYPT)
  97. cryptlen -= digestsize;
  98. token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  99. token[0].packet_length = assoclen;
  100. token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
  101. EIP197_TOKEN_INS_TYPE_OUTPUT;
  102. token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  103. token[1].packet_length = cryptlen;
  104. token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  105. token[1].instructions = EIP197_TOKEN_INS_LAST |
  106. EIP197_TOKEN_INS_TYPE_CRYTO |
  107. EIP197_TOKEN_INS_TYPE_HASH |
  108. EIP197_TOKEN_INS_TYPE_OUTPUT;
  109. if (direction == SAFEXCEL_ENCRYPT) {
  110. token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
  111. token[2].packet_length = digestsize;
  112. token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
  113. EIP197_TOKEN_STAT_LAST_PACKET;
  114. token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
  115. EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  116. } else {
  117. token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
  118. token[2].packet_length = digestsize;
  119. token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
  120. EIP197_TOKEN_STAT_LAST_PACKET;
  121. token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
  122. token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
  123. token[3].packet_length = digestsize |
  124. EIP197_TOKEN_HASH_RESULT_VERIFY;
  125. token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
  126. EIP197_TOKEN_STAT_LAST_PACKET;
  127. token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
  128. }
  129. }
  130. static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
  131. const u8 *key, unsigned int len)
  132. {
  133. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  134. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  135. struct safexcel_crypto_priv *priv = ctx->priv;
  136. struct crypto_aes_ctx aes;
  137. int ret, i;
  138. ret = crypto_aes_expand_key(&aes, key, len);
  139. if (ret) {
  140. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  141. return ret;
  142. }
  143. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
  144. for (i = 0; i < len / sizeof(u32); i++) {
  145. if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
  146. ctx->base.needs_inv = true;
  147. break;
  148. }
  149. }
  150. }
  151. for (i = 0; i < len / sizeof(u32); i++)
  152. ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
  153. ctx->key_len = len;
  154. memzero_explicit(&aes, sizeof(aes));
  155. return 0;
  156. }
  157. static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
  158. unsigned int len)
  159. {
  160. struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
  161. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  162. struct safexcel_ahash_export_state istate, ostate;
  163. struct safexcel_crypto_priv *priv = ctx->priv;
  164. struct crypto_authenc_keys keys;
  165. if (crypto_authenc_extractkeys(&keys, key, len) != 0)
  166. goto badkey;
  167. if (keys.enckeylen > sizeof(ctx->key))
  168. goto badkey;
  169. /* Encryption key */
  170. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
  171. memcmp(ctx->key, keys.enckey, keys.enckeylen))
  172. ctx->base.needs_inv = true;
  173. /* Auth key */
  174. switch (ctx->hash_alg) {
  175. case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
  176. if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
  177. keys.authkeylen, &istate, &ostate))
  178. goto badkey;
  179. break;
  180. case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
  181. if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
  182. keys.authkeylen, &istate, &ostate))
  183. goto badkey;
  184. break;
  185. case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
  186. if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
  187. keys.authkeylen, &istate, &ostate))
  188. goto badkey;
  189. break;
  190. case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
  191. if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
  192. keys.authkeylen, &istate, &ostate))
  193. goto badkey;
  194. break;
  195. case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
  196. if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
  197. keys.authkeylen, &istate, &ostate))
  198. goto badkey;
  199. break;
  200. default:
  201. dev_err(priv->dev, "aead: unsupported hash algorithm\n");
  202. goto badkey;
  203. }
  204. crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
  205. CRYPTO_TFM_RES_MASK);
  206. if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
  207. (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
  208. memcmp(ctx->opad, ostate.state, ctx->state_sz)))
  209. ctx->base.needs_inv = true;
  210. /* Now copy the keys into the context */
  211. memcpy(ctx->key, keys.enckey, keys.enckeylen);
  212. ctx->key_len = keys.enckeylen;
  213. memcpy(ctx->ipad, &istate.state, ctx->state_sz);
  214. memcpy(ctx->opad, &ostate.state, ctx->state_sz);
  215. memzero_explicit(&keys, sizeof(keys));
  216. return 0;
  217. badkey:
  218. crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  219. memzero_explicit(&keys, sizeof(keys));
  220. return -EINVAL;
  221. }
  222. static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
  223. struct crypto_async_request *async,
  224. struct safexcel_cipher_req *sreq,
  225. struct safexcel_command_desc *cdesc)
  226. {
  227. struct safexcel_crypto_priv *priv = ctx->priv;
  228. int ctrl_size;
  229. if (ctx->aead) {
  230. if (sreq->direction == SAFEXCEL_ENCRYPT)
  231. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
  232. else
  233. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
  234. } else {
  235. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
  236. /* The decryption control type is a combination of the
  237. * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
  238. * types.
  239. */
  240. if (sreq->direction == SAFEXCEL_DECRYPT)
  241. cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
  242. }
  243. cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
  244. cdesc->control_data.control1 |= ctx->mode;
  245. if (ctx->aead)
  246. cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
  247. ctx->hash_alg;
  248. if (ctx->alg == SAFEXCEL_DES) {
  249. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
  250. } else if (ctx->alg == SAFEXCEL_3DES) {
  251. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
  252. } else if (ctx->alg == SAFEXCEL_AES) {
  253. switch (ctx->key_len) {
  254. case AES_KEYSIZE_128:
  255. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
  256. break;
  257. case AES_KEYSIZE_192:
  258. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
  259. break;
  260. case AES_KEYSIZE_256:
  261. cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
  262. break;
  263. default:
  264. dev_err(priv->dev, "aes keysize not supported: %u\n",
  265. ctx->key_len);
  266. return -EINVAL;
  267. }
  268. }
  269. ctrl_size = ctx->key_len / sizeof(u32);
  270. if (ctx->aead)
  271. /* Take in account the ipad+opad digests */
  272. ctrl_size += ctx->state_sz / sizeof(u32) * 2;
  273. cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
  274. return 0;
  275. }
  276. static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
  277. struct crypto_async_request *async,
  278. struct scatterlist *src,
  279. struct scatterlist *dst,
  280. unsigned int cryptlen,
  281. struct safexcel_cipher_req *sreq,
  282. bool *should_complete, int *ret)
  283. {
  284. struct safexcel_result_desc *rdesc;
  285. int ndesc = 0;
  286. *ret = 0;
  287. do {
  288. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  289. if (IS_ERR(rdesc)) {
  290. dev_err(priv->dev,
  291. "cipher: result: could not retrieve the result descriptor\n");
  292. *ret = PTR_ERR(rdesc);
  293. break;
  294. }
  295. if (likely(!*ret))
  296. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  297. ndesc++;
  298. } while (!rdesc->last_seg);
  299. safexcel_complete(priv, ring);
  300. if (src == dst) {
  301. dma_unmap_sg(priv->dev, src,
  302. sg_nents_for_len(src, cryptlen),
  303. DMA_BIDIRECTIONAL);
  304. } else {
  305. dma_unmap_sg(priv->dev, src,
  306. sg_nents_for_len(src, cryptlen),
  307. DMA_TO_DEVICE);
  308. dma_unmap_sg(priv->dev, dst,
  309. sg_nents_for_len(dst, cryptlen),
  310. DMA_FROM_DEVICE);
  311. }
  312. *should_complete = true;
  313. return ndesc;
  314. }
  315. static int safexcel_send_req(struct crypto_async_request *base, int ring,
  316. struct safexcel_cipher_req *sreq,
  317. struct scatterlist *src, struct scatterlist *dst,
  318. unsigned int cryptlen, unsigned int assoclen,
  319. unsigned int digestsize, u8 *iv, int *commands,
  320. int *results)
  321. {
  322. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  323. struct safexcel_crypto_priv *priv = ctx->priv;
  324. struct safexcel_command_desc *cdesc;
  325. struct safexcel_result_desc *rdesc, *first_rdesc;
  326. struct scatterlist *sg;
  327. unsigned int totlen = cryptlen + assoclen;
  328. int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
  329. int i, ret = 0;
  330. if (src == dst) {
  331. nr_src = dma_map_sg(priv->dev, src,
  332. sg_nents_for_len(src, totlen),
  333. DMA_BIDIRECTIONAL);
  334. nr_dst = nr_src;
  335. if (!nr_src)
  336. return -EINVAL;
  337. } else {
  338. nr_src = dma_map_sg(priv->dev, src,
  339. sg_nents_for_len(src, totlen),
  340. DMA_TO_DEVICE);
  341. if (!nr_src)
  342. return -EINVAL;
  343. nr_dst = dma_map_sg(priv->dev, dst,
  344. sg_nents_for_len(dst, totlen),
  345. DMA_FROM_DEVICE);
  346. if (!nr_dst) {
  347. dma_unmap_sg(priv->dev, src,
  348. sg_nents_for_len(src, totlen),
  349. DMA_TO_DEVICE);
  350. return -EINVAL;
  351. }
  352. }
  353. memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
  354. if (ctx->aead) {
  355. memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
  356. ctx->ipad, ctx->state_sz);
  357. memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
  358. ctx->opad, ctx->state_sz);
  359. }
  360. /* command descriptors */
  361. for_each_sg(src, sg, nr_src, i) {
  362. int len = sg_dma_len(sg);
  363. /* Do not overflow the request */
  364. if (queued - len < 0)
  365. len = queued;
  366. cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
  367. sg_dma_address(sg), len, totlen,
  368. ctx->base.ctxr_dma);
  369. if (IS_ERR(cdesc)) {
  370. /* No space left in the command descriptor ring */
  371. ret = PTR_ERR(cdesc);
  372. goto cdesc_rollback;
  373. }
  374. n_cdesc++;
  375. if (n_cdesc == 1) {
  376. safexcel_context_control(ctx, base, sreq, cdesc);
  377. if (ctx->aead)
  378. safexcel_aead_token(ctx, iv, cdesc,
  379. sreq->direction, cryptlen,
  380. assoclen, digestsize);
  381. else
  382. safexcel_skcipher_token(ctx, iv, cdesc,
  383. cryptlen);
  384. }
  385. queued -= len;
  386. if (!queued)
  387. break;
  388. }
  389. /* result descriptors */
  390. for_each_sg(dst, sg, nr_dst, i) {
  391. bool first = !i, last = (i == nr_dst - 1);
  392. u32 len = sg_dma_len(sg);
  393. rdesc = safexcel_add_rdesc(priv, ring, first, last,
  394. sg_dma_address(sg), len);
  395. if (IS_ERR(rdesc)) {
  396. /* No space left in the result descriptor ring */
  397. ret = PTR_ERR(rdesc);
  398. goto rdesc_rollback;
  399. }
  400. if (first)
  401. first_rdesc = rdesc;
  402. n_rdesc++;
  403. }
  404. safexcel_rdr_req_set(priv, ring, first_rdesc, base);
  405. *commands = n_cdesc;
  406. *results = n_rdesc;
  407. return 0;
  408. rdesc_rollback:
  409. for (i = 0; i < n_rdesc; i++)
  410. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
  411. cdesc_rollback:
  412. for (i = 0; i < n_cdesc; i++)
  413. safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
  414. if (src == dst) {
  415. dma_unmap_sg(priv->dev, src,
  416. sg_nents_for_len(src, totlen),
  417. DMA_BIDIRECTIONAL);
  418. } else {
  419. dma_unmap_sg(priv->dev, src,
  420. sg_nents_for_len(src, totlen),
  421. DMA_TO_DEVICE);
  422. dma_unmap_sg(priv->dev, dst,
  423. sg_nents_for_len(dst, totlen),
  424. DMA_FROM_DEVICE);
  425. }
  426. return ret;
  427. }
  428. static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
  429. int ring,
  430. struct crypto_async_request *base,
  431. bool *should_complete, int *ret)
  432. {
  433. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  434. struct safexcel_result_desc *rdesc;
  435. int ndesc = 0, enq_ret;
  436. *ret = 0;
  437. do {
  438. rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
  439. if (IS_ERR(rdesc)) {
  440. dev_err(priv->dev,
  441. "cipher: invalidate: could not retrieve the result descriptor\n");
  442. *ret = PTR_ERR(rdesc);
  443. break;
  444. }
  445. if (likely(!*ret))
  446. *ret = safexcel_rdesc_check_errors(priv, rdesc);
  447. ndesc++;
  448. } while (!rdesc->last_seg);
  449. safexcel_complete(priv, ring);
  450. if (ctx->base.exit_inv) {
  451. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  452. ctx->base.ctxr_dma);
  453. *should_complete = true;
  454. return ndesc;
  455. }
  456. ring = safexcel_select_ring(priv);
  457. ctx->base.ring = ring;
  458. spin_lock_bh(&priv->ring[ring].queue_lock);
  459. enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  460. spin_unlock_bh(&priv->ring[ring].queue_lock);
  461. if (enq_ret != -EINPROGRESS)
  462. *ret = enq_ret;
  463. queue_work(priv->ring[ring].workqueue,
  464. &priv->ring[ring].work_data.work);
  465. *should_complete = false;
  466. return ndesc;
  467. }
  468. static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
  469. int ring,
  470. struct crypto_async_request *async,
  471. bool *should_complete, int *ret)
  472. {
  473. struct skcipher_request *req = skcipher_request_cast(async);
  474. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  475. int err;
  476. if (sreq->needs_inv) {
  477. sreq->needs_inv = false;
  478. err = safexcel_handle_inv_result(priv, ring, async,
  479. should_complete, ret);
  480. } else {
  481. err = safexcel_handle_req_result(priv, ring, async, req->src,
  482. req->dst, req->cryptlen, sreq,
  483. should_complete, ret);
  484. }
  485. return err;
  486. }
  487. static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
  488. int ring,
  489. struct crypto_async_request *async,
  490. bool *should_complete, int *ret)
  491. {
  492. struct aead_request *req = aead_request_cast(async);
  493. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  494. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  495. int err;
  496. if (sreq->needs_inv) {
  497. sreq->needs_inv = false;
  498. err = safexcel_handle_inv_result(priv, ring, async,
  499. should_complete, ret);
  500. } else {
  501. err = safexcel_handle_req_result(priv, ring, async, req->src,
  502. req->dst,
  503. req->cryptlen + crypto_aead_authsize(tfm),
  504. sreq, should_complete, ret);
  505. }
  506. return err;
  507. }
  508. static int safexcel_cipher_send_inv(struct crypto_async_request *base,
  509. int ring, int *commands, int *results)
  510. {
  511. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  512. struct safexcel_crypto_priv *priv = ctx->priv;
  513. int ret;
  514. ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
  515. if (unlikely(ret))
  516. return ret;
  517. *commands = 1;
  518. *results = 1;
  519. return 0;
  520. }
  521. static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
  522. int *commands, int *results)
  523. {
  524. struct skcipher_request *req = skcipher_request_cast(async);
  525. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  526. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  527. struct safexcel_crypto_priv *priv = ctx->priv;
  528. int ret;
  529. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  530. if (sreq->needs_inv)
  531. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  532. else
  533. ret = safexcel_send_req(async, ring, sreq, req->src,
  534. req->dst, req->cryptlen, 0, 0, req->iv,
  535. commands, results);
  536. return ret;
  537. }
  538. static int safexcel_aead_send(struct crypto_async_request *async, int ring,
  539. int *commands, int *results)
  540. {
  541. struct aead_request *req = aead_request_cast(async);
  542. struct crypto_aead *tfm = crypto_aead_reqtfm(req);
  543. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  544. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  545. struct safexcel_crypto_priv *priv = ctx->priv;
  546. int ret;
  547. BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
  548. if (sreq->needs_inv)
  549. ret = safexcel_cipher_send_inv(async, ring, commands, results);
  550. else
  551. ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
  552. req->cryptlen, req->assoclen,
  553. crypto_aead_authsize(tfm), req->iv,
  554. commands, results);
  555. return ret;
  556. }
  557. static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
  558. struct crypto_async_request *base,
  559. struct safexcel_cipher_req *sreq,
  560. struct safexcel_inv_result *result)
  561. {
  562. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  563. struct safexcel_crypto_priv *priv = ctx->priv;
  564. int ring = ctx->base.ring;
  565. init_completion(&result->completion);
  566. ctx = crypto_tfm_ctx(base->tfm);
  567. ctx->base.exit_inv = true;
  568. sreq->needs_inv = true;
  569. spin_lock_bh(&priv->ring[ring].queue_lock);
  570. crypto_enqueue_request(&priv->ring[ring].queue, base);
  571. spin_unlock_bh(&priv->ring[ring].queue_lock);
  572. queue_work(priv->ring[ring].workqueue,
  573. &priv->ring[ring].work_data.work);
  574. wait_for_completion(&result->completion);
  575. if (result->error) {
  576. dev_warn(priv->dev,
  577. "cipher: sync: invalidate: completion error %d\n",
  578. result->error);
  579. return result->error;
  580. }
  581. return 0;
  582. }
  583. static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
  584. {
  585. EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
  586. struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
  587. struct safexcel_inv_result result = {};
  588. memset(req, 0, sizeof(struct skcipher_request));
  589. skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  590. safexcel_inv_complete, &result);
  591. skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
  592. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  593. }
  594. static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
  595. {
  596. EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
  597. struct safexcel_cipher_req *sreq = aead_request_ctx(req);
  598. struct safexcel_inv_result result = {};
  599. memset(req, 0, sizeof(struct aead_request));
  600. aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  601. safexcel_inv_complete, &result);
  602. aead_request_set_tfm(req, __crypto_aead_cast(tfm));
  603. return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
  604. }
  605. static int safexcel_queue_req(struct crypto_async_request *base,
  606. struct safexcel_cipher_req *sreq,
  607. enum safexcel_cipher_direction dir, u32 mode,
  608. enum safexcel_cipher_alg alg)
  609. {
  610. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
  611. struct safexcel_crypto_priv *priv = ctx->priv;
  612. int ret, ring;
  613. sreq->needs_inv = false;
  614. sreq->direction = dir;
  615. ctx->alg = alg;
  616. ctx->mode = mode;
  617. if (ctx->base.ctxr) {
  618. if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
  619. sreq->needs_inv = true;
  620. ctx->base.needs_inv = false;
  621. }
  622. } else {
  623. ctx->base.ring = safexcel_select_ring(priv);
  624. ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
  625. EIP197_GFP_FLAGS(*base),
  626. &ctx->base.ctxr_dma);
  627. if (!ctx->base.ctxr)
  628. return -ENOMEM;
  629. }
  630. ring = ctx->base.ring;
  631. spin_lock_bh(&priv->ring[ring].queue_lock);
  632. ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
  633. spin_unlock_bh(&priv->ring[ring].queue_lock);
  634. queue_work(priv->ring[ring].workqueue,
  635. &priv->ring[ring].work_data.work);
  636. return ret;
  637. }
  638. static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
  639. {
  640. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  641. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  642. SAFEXCEL_AES);
  643. }
  644. static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
  645. {
  646. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  647. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  648. SAFEXCEL_AES);
  649. }
  650. static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
  651. {
  652. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  653. struct safexcel_alg_template *tmpl =
  654. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  655. alg.skcipher.base);
  656. crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
  657. sizeof(struct safexcel_cipher_req));
  658. ctx->priv = tmpl->priv;
  659. ctx->base.send = safexcel_skcipher_send;
  660. ctx->base.handle_result = safexcel_skcipher_handle_result;
  661. return 0;
  662. }
  663. static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
  664. {
  665. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  666. memzero_explicit(ctx->key, sizeof(ctx->key));
  667. /* context not allocated, skip invalidation */
  668. if (!ctx->base.ctxr)
  669. return -ENOMEM;
  670. memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
  671. return 0;
  672. }
  673. static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
  674. {
  675. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  676. struct safexcel_crypto_priv *priv = ctx->priv;
  677. int ret;
  678. if (safexcel_cipher_cra_exit(tfm))
  679. return;
  680. if (priv->flags & EIP197_TRC_CACHE) {
  681. ret = safexcel_skcipher_exit_inv(tfm);
  682. if (ret)
  683. dev_warn(priv->dev, "skcipher: invalidation error %d\n",
  684. ret);
  685. } else {
  686. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  687. ctx->base.ctxr_dma);
  688. }
  689. }
  690. static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
  691. {
  692. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  693. struct safexcel_crypto_priv *priv = ctx->priv;
  694. int ret;
  695. if (safexcel_cipher_cra_exit(tfm))
  696. return;
  697. if (priv->flags & EIP197_TRC_CACHE) {
  698. ret = safexcel_aead_exit_inv(tfm);
  699. if (ret)
  700. dev_warn(priv->dev, "aead: invalidation error %d\n",
  701. ret);
  702. } else {
  703. dma_pool_free(priv->context_pool, ctx->base.ctxr,
  704. ctx->base.ctxr_dma);
  705. }
  706. }
  707. struct safexcel_alg_template safexcel_alg_ecb_aes = {
  708. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  709. .engines = EIP97IES | EIP197B | EIP197D,
  710. .alg.skcipher = {
  711. .setkey = safexcel_skcipher_aes_setkey,
  712. .encrypt = safexcel_ecb_aes_encrypt,
  713. .decrypt = safexcel_ecb_aes_decrypt,
  714. .min_keysize = AES_MIN_KEY_SIZE,
  715. .max_keysize = AES_MAX_KEY_SIZE,
  716. .base = {
  717. .cra_name = "ecb(aes)",
  718. .cra_driver_name = "safexcel-ecb-aes",
  719. .cra_priority = 300,
  720. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  721. CRYPTO_ALG_KERN_DRIVER_ONLY,
  722. .cra_blocksize = AES_BLOCK_SIZE,
  723. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  724. .cra_alignmask = 0,
  725. .cra_init = safexcel_skcipher_cra_init,
  726. .cra_exit = safexcel_skcipher_cra_exit,
  727. .cra_module = THIS_MODULE,
  728. },
  729. },
  730. };
  731. static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
  732. {
  733. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  734. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  735. SAFEXCEL_AES);
  736. }
  737. static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
  738. {
  739. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  740. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  741. SAFEXCEL_AES);
  742. }
  743. struct safexcel_alg_template safexcel_alg_cbc_aes = {
  744. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  745. .engines = EIP97IES | EIP197B | EIP197D,
  746. .alg.skcipher = {
  747. .setkey = safexcel_skcipher_aes_setkey,
  748. .encrypt = safexcel_cbc_aes_encrypt,
  749. .decrypt = safexcel_cbc_aes_decrypt,
  750. .min_keysize = AES_MIN_KEY_SIZE,
  751. .max_keysize = AES_MAX_KEY_SIZE,
  752. .ivsize = AES_BLOCK_SIZE,
  753. .base = {
  754. .cra_name = "cbc(aes)",
  755. .cra_driver_name = "safexcel-cbc-aes",
  756. .cra_priority = 300,
  757. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  758. CRYPTO_ALG_KERN_DRIVER_ONLY,
  759. .cra_blocksize = AES_BLOCK_SIZE,
  760. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  761. .cra_alignmask = 0,
  762. .cra_init = safexcel_skcipher_cra_init,
  763. .cra_exit = safexcel_skcipher_cra_exit,
  764. .cra_module = THIS_MODULE,
  765. },
  766. },
  767. };
  768. static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
  769. {
  770. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  771. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  772. SAFEXCEL_DES);
  773. }
  774. static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
  775. {
  776. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  777. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  778. SAFEXCEL_DES);
  779. }
  780. static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
  781. unsigned int len)
  782. {
  783. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  784. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  785. u32 tmp[DES_EXPKEY_WORDS];
  786. int ret;
  787. if (len != DES_KEY_SIZE) {
  788. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  789. return -EINVAL;
  790. }
  791. ret = des_ekey(tmp, key);
  792. if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
  793. tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
  794. return -EINVAL;
  795. }
  796. /* if context exits and key changed, need to invalidate it */
  797. if (ctx->base.ctxr_dma)
  798. if (memcmp(ctx->key, key, len))
  799. ctx->base.needs_inv = true;
  800. memcpy(ctx->key, key, len);
  801. ctx->key_len = len;
  802. return 0;
  803. }
  804. struct safexcel_alg_template safexcel_alg_cbc_des = {
  805. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  806. .engines = EIP97IES | EIP197B | EIP197D,
  807. .alg.skcipher = {
  808. .setkey = safexcel_des_setkey,
  809. .encrypt = safexcel_cbc_des_encrypt,
  810. .decrypt = safexcel_cbc_des_decrypt,
  811. .min_keysize = DES_KEY_SIZE,
  812. .max_keysize = DES_KEY_SIZE,
  813. .ivsize = DES_BLOCK_SIZE,
  814. .base = {
  815. .cra_name = "cbc(des)",
  816. .cra_driver_name = "safexcel-cbc-des",
  817. .cra_priority = 300,
  818. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  819. CRYPTO_ALG_KERN_DRIVER_ONLY,
  820. .cra_blocksize = DES_BLOCK_SIZE,
  821. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  822. .cra_alignmask = 0,
  823. .cra_init = safexcel_skcipher_cra_init,
  824. .cra_exit = safexcel_skcipher_cra_exit,
  825. .cra_module = THIS_MODULE,
  826. },
  827. },
  828. };
  829. static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
  830. {
  831. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  832. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  833. SAFEXCEL_DES);
  834. }
  835. static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
  836. {
  837. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  838. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  839. SAFEXCEL_DES);
  840. }
  841. struct safexcel_alg_template safexcel_alg_ecb_des = {
  842. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  843. .engines = EIP97IES | EIP197B | EIP197D,
  844. .alg.skcipher = {
  845. .setkey = safexcel_des_setkey,
  846. .encrypt = safexcel_ecb_des_encrypt,
  847. .decrypt = safexcel_ecb_des_decrypt,
  848. .min_keysize = DES_KEY_SIZE,
  849. .max_keysize = DES_KEY_SIZE,
  850. .ivsize = DES_BLOCK_SIZE,
  851. .base = {
  852. .cra_name = "ecb(des)",
  853. .cra_driver_name = "safexcel-ecb-des",
  854. .cra_priority = 300,
  855. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  856. CRYPTO_ALG_KERN_DRIVER_ONLY,
  857. .cra_blocksize = DES_BLOCK_SIZE,
  858. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  859. .cra_alignmask = 0,
  860. .cra_init = safexcel_skcipher_cra_init,
  861. .cra_exit = safexcel_skcipher_cra_exit,
  862. .cra_module = THIS_MODULE,
  863. },
  864. },
  865. };
  866. static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
  867. {
  868. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  869. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  870. SAFEXCEL_3DES);
  871. }
  872. static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
  873. {
  874. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  875. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
  876. SAFEXCEL_3DES);
  877. }
  878. static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
  879. const u8 *key, unsigned int len)
  880. {
  881. struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
  882. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  883. if (len != DES3_EDE_KEY_SIZE) {
  884. crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  885. return -EINVAL;
  886. }
  887. /* if context exits and key changed, need to invalidate it */
  888. if (ctx->base.ctxr_dma) {
  889. if (memcmp(ctx->key, key, len))
  890. ctx->base.needs_inv = true;
  891. }
  892. memcpy(ctx->key, key, len);
  893. ctx->key_len = len;
  894. return 0;
  895. }
  896. struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
  897. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  898. .engines = EIP97IES | EIP197B | EIP197D,
  899. .alg.skcipher = {
  900. .setkey = safexcel_des3_ede_setkey,
  901. .encrypt = safexcel_cbc_des3_ede_encrypt,
  902. .decrypt = safexcel_cbc_des3_ede_decrypt,
  903. .min_keysize = DES3_EDE_KEY_SIZE,
  904. .max_keysize = DES3_EDE_KEY_SIZE,
  905. .ivsize = DES3_EDE_BLOCK_SIZE,
  906. .base = {
  907. .cra_name = "cbc(des3_ede)",
  908. .cra_driver_name = "safexcel-cbc-des3_ede",
  909. .cra_priority = 300,
  910. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  911. CRYPTO_ALG_KERN_DRIVER_ONLY,
  912. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  913. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  914. .cra_alignmask = 0,
  915. .cra_init = safexcel_skcipher_cra_init,
  916. .cra_exit = safexcel_skcipher_cra_exit,
  917. .cra_module = THIS_MODULE,
  918. },
  919. },
  920. };
  921. static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
  922. {
  923. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  924. SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  925. SAFEXCEL_3DES);
  926. }
  927. static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
  928. {
  929. return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
  930. SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
  931. SAFEXCEL_3DES);
  932. }
  933. struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
  934. .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
  935. .engines = EIP97IES | EIP197B | EIP197D,
  936. .alg.skcipher = {
  937. .setkey = safexcel_des3_ede_setkey,
  938. .encrypt = safexcel_ecb_des3_ede_encrypt,
  939. .decrypt = safexcel_ecb_des3_ede_decrypt,
  940. .min_keysize = DES3_EDE_KEY_SIZE,
  941. .max_keysize = DES3_EDE_KEY_SIZE,
  942. .ivsize = DES3_EDE_BLOCK_SIZE,
  943. .base = {
  944. .cra_name = "ecb(des3_ede)",
  945. .cra_driver_name = "safexcel-ecb-des3_ede",
  946. .cra_priority = 300,
  947. .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
  948. CRYPTO_ALG_KERN_DRIVER_ONLY,
  949. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  950. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  951. .cra_alignmask = 0,
  952. .cra_init = safexcel_skcipher_cra_init,
  953. .cra_exit = safexcel_skcipher_cra_exit,
  954. .cra_module = THIS_MODULE,
  955. },
  956. },
  957. };
  958. static int safexcel_aead_encrypt(struct aead_request *req)
  959. {
  960. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  961. return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
  962. CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
  963. }
  964. static int safexcel_aead_decrypt(struct aead_request *req)
  965. {
  966. struct safexcel_cipher_req *creq = aead_request_ctx(req);
  967. return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
  968. CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
  969. }
  970. static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
  971. {
  972. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  973. struct safexcel_alg_template *tmpl =
  974. container_of(tfm->__crt_alg, struct safexcel_alg_template,
  975. alg.aead.base);
  976. crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
  977. sizeof(struct safexcel_cipher_req));
  978. ctx->priv = tmpl->priv;
  979. ctx->aead = true;
  980. ctx->base.send = safexcel_aead_send;
  981. ctx->base.handle_result = safexcel_aead_handle_result;
  982. return 0;
  983. }
  984. static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
  985. {
  986. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  987. safexcel_aead_cra_init(tfm);
  988. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
  989. ctx->state_sz = SHA1_DIGEST_SIZE;
  990. return 0;
  991. }
  992. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
  993. .type = SAFEXCEL_ALG_TYPE_AEAD,
  994. .engines = EIP97IES | EIP197B | EIP197D,
  995. .alg.aead = {
  996. .setkey = safexcel_aead_aes_setkey,
  997. .encrypt = safexcel_aead_encrypt,
  998. .decrypt = safexcel_aead_decrypt,
  999. .ivsize = AES_BLOCK_SIZE,
  1000. .maxauthsize = SHA1_DIGEST_SIZE,
  1001. .base = {
  1002. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1003. .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
  1004. .cra_priority = 300,
  1005. .cra_flags = CRYPTO_ALG_ASYNC |
  1006. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1007. .cra_blocksize = AES_BLOCK_SIZE,
  1008. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1009. .cra_alignmask = 0,
  1010. .cra_init = safexcel_aead_sha1_cra_init,
  1011. .cra_exit = safexcel_aead_cra_exit,
  1012. .cra_module = THIS_MODULE,
  1013. },
  1014. },
  1015. };
  1016. static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
  1017. {
  1018. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1019. safexcel_aead_cra_init(tfm);
  1020. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
  1021. ctx->state_sz = SHA256_DIGEST_SIZE;
  1022. return 0;
  1023. }
  1024. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
  1025. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1026. .engines = EIP97IES | EIP197B | EIP197D,
  1027. .alg.aead = {
  1028. .setkey = safexcel_aead_aes_setkey,
  1029. .encrypt = safexcel_aead_encrypt,
  1030. .decrypt = safexcel_aead_decrypt,
  1031. .ivsize = AES_BLOCK_SIZE,
  1032. .maxauthsize = SHA256_DIGEST_SIZE,
  1033. .base = {
  1034. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  1035. .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
  1036. .cra_priority = 300,
  1037. .cra_flags = CRYPTO_ALG_ASYNC |
  1038. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1039. .cra_blocksize = AES_BLOCK_SIZE,
  1040. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1041. .cra_alignmask = 0,
  1042. .cra_init = safexcel_aead_sha256_cra_init,
  1043. .cra_exit = safexcel_aead_cra_exit,
  1044. .cra_module = THIS_MODULE,
  1045. },
  1046. },
  1047. };
  1048. static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
  1049. {
  1050. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1051. safexcel_aead_cra_init(tfm);
  1052. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
  1053. ctx->state_sz = SHA256_DIGEST_SIZE;
  1054. return 0;
  1055. }
  1056. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
  1057. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1058. .engines = EIP97IES | EIP197B | EIP197D,
  1059. .alg.aead = {
  1060. .setkey = safexcel_aead_aes_setkey,
  1061. .encrypt = safexcel_aead_encrypt,
  1062. .decrypt = safexcel_aead_decrypt,
  1063. .ivsize = AES_BLOCK_SIZE,
  1064. .maxauthsize = SHA224_DIGEST_SIZE,
  1065. .base = {
  1066. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1067. .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
  1068. .cra_priority = 300,
  1069. .cra_flags = CRYPTO_ALG_ASYNC |
  1070. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1071. .cra_blocksize = AES_BLOCK_SIZE,
  1072. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1073. .cra_alignmask = 0,
  1074. .cra_init = safexcel_aead_sha224_cra_init,
  1075. .cra_exit = safexcel_aead_cra_exit,
  1076. .cra_module = THIS_MODULE,
  1077. },
  1078. },
  1079. };
  1080. static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
  1081. {
  1082. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1083. safexcel_aead_cra_init(tfm);
  1084. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
  1085. ctx->state_sz = SHA512_DIGEST_SIZE;
  1086. return 0;
  1087. }
  1088. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
  1089. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1090. .engines = EIP97IES | EIP197B | EIP197D,
  1091. .alg.aead = {
  1092. .setkey = safexcel_aead_aes_setkey,
  1093. .encrypt = safexcel_aead_encrypt,
  1094. .decrypt = safexcel_aead_decrypt,
  1095. .ivsize = AES_BLOCK_SIZE,
  1096. .maxauthsize = SHA512_DIGEST_SIZE,
  1097. .base = {
  1098. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  1099. .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
  1100. .cra_priority = 300,
  1101. .cra_flags = CRYPTO_ALG_ASYNC |
  1102. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1103. .cra_blocksize = AES_BLOCK_SIZE,
  1104. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1105. .cra_alignmask = 0,
  1106. .cra_init = safexcel_aead_sha512_cra_init,
  1107. .cra_exit = safexcel_aead_cra_exit,
  1108. .cra_module = THIS_MODULE,
  1109. },
  1110. },
  1111. };
  1112. static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
  1113. {
  1114. struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  1115. safexcel_aead_cra_init(tfm);
  1116. ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
  1117. ctx->state_sz = SHA512_DIGEST_SIZE;
  1118. return 0;
  1119. }
  1120. struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
  1121. .type = SAFEXCEL_ALG_TYPE_AEAD,
  1122. .engines = EIP97IES | EIP197B | EIP197D,
  1123. .alg.aead = {
  1124. .setkey = safexcel_aead_aes_setkey,
  1125. .encrypt = safexcel_aead_encrypt,
  1126. .decrypt = safexcel_aead_decrypt,
  1127. .ivsize = AES_BLOCK_SIZE,
  1128. .maxauthsize = SHA384_DIGEST_SIZE,
  1129. .base = {
  1130. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  1131. .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
  1132. .cra_priority = 300,
  1133. .cra_flags = CRYPTO_ALG_ASYNC |
  1134. CRYPTO_ALG_KERN_DRIVER_ONLY,
  1135. .cra_blocksize = AES_BLOCK_SIZE,
  1136. .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
  1137. .cra_alignmask = 0,
  1138. .cra_init = safexcel_aead_sha384_cra_init,
  1139. .cra_exit = safexcel_aead_cra_exit,
  1140. .cra_module = THIS_MODULE,
  1141. },
  1142. },
  1143. };