rk3288_crypto_ablkcipher.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508
  1. /*
  2. * Crypto acceleration support for Rockchip RK3288
  3. *
  4. * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
  5. *
  6. * Author: Zain Wang <zain.wang@rock-chips.com>
  7. *
  8. * This program is free software; you can redistribute it and/or modify it
  9. * under the terms and conditions of the GNU General Public License,
  10. * version 2, as published by the Free Software Foundation.
  11. *
  12. * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
  13. */
  14. #include "rk3288_crypto.h"
  15. #define RK_CRYPTO_DEC BIT(0)
  16. static void rk_crypto_complete(struct crypto_async_request *base, int err)
  17. {
  18. if (base->complete)
  19. base->complete(base, err);
  20. }
  21. static int rk_handle_req(struct rk_crypto_info *dev,
  22. struct ablkcipher_request *req)
  23. {
  24. if (!IS_ALIGNED(req->nbytes, dev->align_size))
  25. return -EINVAL;
  26. else
  27. return dev->enqueue(dev, &req->base);
  28. }
  29. static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
  30. const u8 *key, unsigned int keylen)
  31. {
  32. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
  33. struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  34. if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
  35. keylen != AES_KEYSIZE_256) {
  36. crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
  37. return -EINVAL;
  38. }
  39. ctx->keylen = keylen;
  40. memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
  41. return 0;
  42. }
  43. static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
  44. const u8 *key, unsigned int keylen)
  45. {
  46. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
  47. struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  48. u32 tmp[DES_EXPKEY_WORDS];
  49. if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
  50. crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
  51. return -EINVAL;
  52. }
  53. if (keylen == DES_KEY_SIZE) {
  54. if (!des_ekey(tmp, key) &&
  55. (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
  56. tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
  57. return -EINVAL;
  58. }
  59. }
  60. ctx->keylen = keylen;
  61. memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
  62. return 0;
  63. }
  64. static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
  65. {
  66. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  67. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  68. struct rk_crypto_info *dev = ctx->dev;
  69. ctx->mode = RK_CRYPTO_AES_ECB_MODE;
  70. return rk_handle_req(dev, req);
  71. }
  72. static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
  73. {
  74. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  75. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  76. struct rk_crypto_info *dev = ctx->dev;
  77. ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
  78. return rk_handle_req(dev, req);
  79. }
  80. static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
  81. {
  82. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  83. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  84. struct rk_crypto_info *dev = ctx->dev;
  85. ctx->mode = RK_CRYPTO_AES_CBC_MODE;
  86. return rk_handle_req(dev, req);
  87. }
  88. static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
  89. {
  90. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  91. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  92. struct rk_crypto_info *dev = ctx->dev;
  93. ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
  94. return rk_handle_req(dev, req);
  95. }
  96. static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
  97. {
  98. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  99. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  100. struct rk_crypto_info *dev = ctx->dev;
  101. ctx->mode = 0;
  102. return rk_handle_req(dev, req);
  103. }
  104. static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
  105. {
  106. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  107. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  108. struct rk_crypto_info *dev = ctx->dev;
  109. ctx->mode = RK_CRYPTO_DEC;
  110. return rk_handle_req(dev, req);
  111. }
  112. static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
  113. {
  114. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  115. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  116. struct rk_crypto_info *dev = ctx->dev;
  117. ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
  118. return rk_handle_req(dev, req);
  119. }
  120. static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
  121. {
  122. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  123. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  124. struct rk_crypto_info *dev = ctx->dev;
  125. ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
  126. return rk_handle_req(dev, req);
  127. }
  128. static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
  129. {
  130. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  131. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  132. struct rk_crypto_info *dev = ctx->dev;
  133. ctx->mode = RK_CRYPTO_TDES_SELECT;
  134. return rk_handle_req(dev, req);
  135. }
  136. static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
  137. {
  138. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  139. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  140. struct rk_crypto_info *dev = ctx->dev;
  141. ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
  142. return rk_handle_req(dev, req);
  143. }
  144. static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
  145. {
  146. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  147. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  148. struct rk_crypto_info *dev = ctx->dev;
  149. ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
  150. return rk_handle_req(dev, req);
  151. }
  152. static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
  153. {
  154. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  155. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
  156. struct rk_crypto_info *dev = ctx->dev;
  157. ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
  158. RK_CRYPTO_DEC;
  159. return rk_handle_req(dev, req);
  160. }
  161. static void rk_ablk_hw_init(struct rk_crypto_info *dev)
  162. {
  163. struct ablkcipher_request *req =
  164. ablkcipher_request_cast(dev->async_req);
  165. struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
  166. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
  167. struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
  168. u32 ivsize, block, conf_reg = 0;
  169. block = crypto_tfm_alg_blocksize(tfm);
  170. ivsize = crypto_ablkcipher_ivsize(cipher);
  171. if (block == DES_BLOCK_SIZE) {
  172. ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
  173. RK_CRYPTO_TDES_BYTESWAP_KEY |
  174. RK_CRYPTO_TDES_BYTESWAP_IV;
  175. CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
  176. memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
  177. conf_reg = RK_CRYPTO_DESSEL;
  178. } else {
  179. ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
  180. RK_CRYPTO_AES_KEY_CHANGE |
  181. RK_CRYPTO_AES_BYTESWAP_KEY |
  182. RK_CRYPTO_AES_BYTESWAP_IV;
  183. if (ctx->keylen == AES_KEYSIZE_192)
  184. ctx->mode |= RK_CRYPTO_AES_192BIT_key;
  185. else if (ctx->keylen == AES_KEYSIZE_256)
  186. ctx->mode |= RK_CRYPTO_AES_256BIT_key;
  187. CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
  188. memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
  189. }
  190. conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
  191. RK_CRYPTO_BYTESWAP_BRFIFO;
  192. CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
  193. CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
  194. RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
  195. }
  196. static void crypto_dma_start(struct rk_crypto_info *dev)
  197. {
  198. CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
  199. CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
  200. CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
  201. CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
  202. _SBF(RK_CRYPTO_BLOCK_START, 16));
  203. }
  204. static int rk_set_data_start(struct rk_crypto_info *dev)
  205. {
  206. int err;
  207. err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
  208. if (!err)
  209. crypto_dma_start(dev);
  210. return err;
  211. }
  212. static int rk_ablk_start(struct rk_crypto_info *dev)
  213. {
  214. struct ablkcipher_request *req =
  215. ablkcipher_request_cast(dev->async_req);
  216. unsigned long flags;
  217. int err = 0;
  218. dev->left_bytes = req->nbytes;
  219. dev->total = req->nbytes;
  220. dev->sg_src = req->src;
  221. dev->first = req->src;
  222. dev->nents = sg_nents(req->src);
  223. dev->sg_dst = req->dst;
  224. dev->aligned = 1;
  225. spin_lock_irqsave(&dev->lock, flags);
  226. rk_ablk_hw_init(dev);
  227. err = rk_set_data_start(dev);
  228. spin_unlock_irqrestore(&dev->lock, flags);
  229. return err;
  230. }
  231. static void rk_iv_copyback(struct rk_crypto_info *dev)
  232. {
  233. struct ablkcipher_request *req =
  234. ablkcipher_request_cast(dev->async_req);
  235. struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
  236. u32 ivsize = crypto_ablkcipher_ivsize(tfm);
  237. if (ivsize == DES_BLOCK_SIZE)
  238. memcpy_fromio(req->info, dev->reg + RK_CRYPTO_TDES_IV_0,
  239. ivsize);
  240. else if (ivsize == AES_BLOCK_SIZE)
  241. memcpy_fromio(req->info, dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
  242. }
  243. /* return:
  244. * true some err was occurred
  245. * fault no err, continue
  246. */
  247. static int rk_ablk_rx(struct rk_crypto_info *dev)
  248. {
  249. int err = 0;
  250. struct ablkcipher_request *req =
  251. ablkcipher_request_cast(dev->async_req);
  252. dev->unload_data(dev);
  253. if (!dev->aligned) {
  254. if (!sg_pcopy_from_buffer(req->dst, dev->nents,
  255. dev->addr_vir, dev->count,
  256. dev->total - dev->left_bytes -
  257. dev->count)) {
  258. err = -EINVAL;
  259. goto out_rx;
  260. }
  261. }
  262. if (dev->left_bytes) {
  263. if (dev->aligned) {
  264. if (sg_is_last(dev->sg_src)) {
  265. dev_err(dev->dev, "[%s:%d] Lack of data\n",
  266. __func__, __LINE__);
  267. err = -ENOMEM;
  268. goto out_rx;
  269. }
  270. dev->sg_src = sg_next(dev->sg_src);
  271. dev->sg_dst = sg_next(dev->sg_dst);
  272. }
  273. err = rk_set_data_start(dev);
  274. } else {
  275. rk_iv_copyback(dev);
  276. /* here show the calculation is over without any err */
  277. dev->complete(dev->async_req, 0);
  278. tasklet_schedule(&dev->queue_task);
  279. }
  280. out_rx:
  281. return err;
  282. }
  283. static int rk_ablk_cra_init(struct crypto_tfm *tfm)
  284. {
  285. struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  286. struct crypto_alg *alg = tfm->__crt_alg;
  287. struct rk_crypto_tmp *algt;
  288. algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
  289. ctx->dev = algt->dev;
  290. ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
  291. ctx->dev->start = rk_ablk_start;
  292. ctx->dev->update = rk_ablk_rx;
  293. ctx->dev->complete = rk_crypto_complete;
  294. ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
  295. return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
  296. }
  297. static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
  298. {
  299. struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
  300. free_page((unsigned long)ctx->dev->addr_vir);
  301. ctx->dev->disable_clk(ctx->dev);
  302. }
  303. struct rk_crypto_tmp rk_ecb_aes_alg = {
  304. .type = ALG_TYPE_CIPHER,
  305. .alg.crypto = {
  306. .cra_name = "ecb(aes)",
  307. .cra_driver_name = "ecb-aes-rk",
  308. .cra_priority = 300,
  309. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  310. CRYPTO_ALG_ASYNC,
  311. .cra_blocksize = AES_BLOCK_SIZE,
  312. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  313. .cra_alignmask = 0x0f,
  314. .cra_type = &crypto_ablkcipher_type,
  315. .cra_module = THIS_MODULE,
  316. .cra_init = rk_ablk_cra_init,
  317. .cra_exit = rk_ablk_cra_exit,
  318. .cra_u.ablkcipher = {
  319. .min_keysize = AES_MIN_KEY_SIZE,
  320. .max_keysize = AES_MAX_KEY_SIZE,
  321. .setkey = rk_aes_setkey,
  322. .encrypt = rk_aes_ecb_encrypt,
  323. .decrypt = rk_aes_ecb_decrypt,
  324. }
  325. }
  326. };
  327. struct rk_crypto_tmp rk_cbc_aes_alg = {
  328. .type = ALG_TYPE_CIPHER,
  329. .alg.crypto = {
  330. .cra_name = "cbc(aes)",
  331. .cra_driver_name = "cbc-aes-rk",
  332. .cra_priority = 300,
  333. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  334. CRYPTO_ALG_ASYNC,
  335. .cra_blocksize = AES_BLOCK_SIZE,
  336. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  337. .cra_alignmask = 0x0f,
  338. .cra_type = &crypto_ablkcipher_type,
  339. .cra_module = THIS_MODULE,
  340. .cra_init = rk_ablk_cra_init,
  341. .cra_exit = rk_ablk_cra_exit,
  342. .cra_u.ablkcipher = {
  343. .min_keysize = AES_MIN_KEY_SIZE,
  344. .max_keysize = AES_MAX_KEY_SIZE,
  345. .ivsize = AES_BLOCK_SIZE,
  346. .setkey = rk_aes_setkey,
  347. .encrypt = rk_aes_cbc_encrypt,
  348. .decrypt = rk_aes_cbc_decrypt,
  349. }
  350. }
  351. };
  352. struct rk_crypto_tmp rk_ecb_des_alg = {
  353. .type = ALG_TYPE_CIPHER,
  354. .alg.crypto = {
  355. .cra_name = "ecb(des)",
  356. .cra_driver_name = "ecb-des-rk",
  357. .cra_priority = 300,
  358. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  359. CRYPTO_ALG_ASYNC,
  360. .cra_blocksize = DES_BLOCK_SIZE,
  361. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  362. .cra_alignmask = 0x07,
  363. .cra_type = &crypto_ablkcipher_type,
  364. .cra_module = THIS_MODULE,
  365. .cra_init = rk_ablk_cra_init,
  366. .cra_exit = rk_ablk_cra_exit,
  367. .cra_u.ablkcipher = {
  368. .min_keysize = DES_KEY_SIZE,
  369. .max_keysize = DES_KEY_SIZE,
  370. .setkey = rk_tdes_setkey,
  371. .encrypt = rk_des_ecb_encrypt,
  372. .decrypt = rk_des_ecb_decrypt,
  373. }
  374. }
  375. };
  376. struct rk_crypto_tmp rk_cbc_des_alg = {
  377. .type = ALG_TYPE_CIPHER,
  378. .alg.crypto = {
  379. .cra_name = "cbc(des)",
  380. .cra_driver_name = "cbc-des-rk",
  381. .cra_priority = 300,
  382. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  383. CRYPTO_ALG_ASYNC,
  384. .cra_blocksize = DES_BLOCK_SIZE,
  385. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  386. .cra_alignmask = 0x07,
  387. .cra_type = &crypto_ablkcipher_type,
  388. .cra_module = THIS_MODULE,
  389. .cra_init = rk_ablk_cra_init,
  390. .cra_exit = rk_ablk_cra_exit,
  391. .cra_u.ablkcipher = {
  392. .min_keysize = DES_KEY_SIZE,
  393. .max_keysize = DES_KEY_SIZE,
  394. .ivsize = DES_BLOCK_SIZE,
  395. .setkey = rk_tdes_setkey,
  396. .encrypt = rk_des_cbc_encrypt,
  397. .decrypt = rk_des_cbc_decrypt,
  398. }
  399. }
  400. };
  401. struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
  402. .type = ALG_TYPE_CIPHER,
  403. .alg.crypto = {
  404. .cra_name = "ecb(des3_ede)",
  405. .cra_driver_name = "ecb-des3-ede-rk",
  406. .cra_priority = 300,
  407. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  408. CRYPTO_ALG_ASYNC,
  409. .cra_blocksize = DES_BLOCK_SIZE,
  410. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  411. .cra_alignmask = 0x07,
  412. .cra_type = &crypto_ablkcipher_type,
  413. .cra_module = THIS_MODULE,
  414. .cra_init = rk_ablk_cra_init,
  415. .cra_exit = rk_ablk_cra_exit,
  416. .cra_u.ablkcipher = {
  417. .min_keysize = DES3_EDE_KEY_SIZE,
  418. .max_keysize = DES3_EDE_KEY_SIZE,
  419. .ivsize = DES_BLOCK_SIZE,
  420. .setkey = rk_tdes_setkey,
  421. .encrypt = rk_des3_ede_ecb_encrypt,
  422. .decrypt = rk_des3_ede_ecb_decrypt,
  423. }
  424. }
  425. };
  426. struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
  427. .type = ALG_TYPE_CIPHER,
  428. .alg.crypto = {
  429. .cra_name = "cbc(des3_ede)",
  430. .cra_driver_name = "cbc-des3-ede-rk",
  431. .cra_priority = 300,
  432. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
  433. CRYPTO_ALG_ASYNC,
  434. .cra_blocksize = DES_BLOCK_SIZE,
  435. .cra_ctxsize = sizeof(struct rk_cipher_ctx),
  436. .cra_alignmask = 0x07,
  437. .cra_type = &crypto_ablkcipher_type,
  438. .cra_module = THIS_MODULE,
  439. .cra_init = rk_ablk_cra_init,
  440. .cra_exit = rk_ablk_cra_exit,
  441. .cra_u.ablkcipher = {
  442. .min_keysize = DES3_EDE_KEY_SIZE,
  443. .max_keysize = DES3_EDE_KEY_SIZE,
  444. .ivsize = DES_BLOCK_SIZE,
  445. .setkey = rk_tdes_setkey,
  446. .encrypt = rk_des3_ede_cbc_encrypt,
  447. .decrypt = rk_des3_ede_cbc_decrypt,
  448. }
  449. }
  450. };