aesbs-glue.c 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367
  1. /*
  2. * linux/arch/arm/crypto/aesbs-glue.c - glue code for NEON bit sliced AES
  3. *
  4. * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <crypto/aes.h>
  12. #include <crypto/cbc.h>
  13. #include <crypto/internal/simd.h>
  14. #include <crypto/internal/skcipher.h>
  15. #include <linux/module.h>
  16. #include <crypto/xts.h>
  17. #include "aes_glue.h"
  18. #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
  19. struct BS_KEY {
  20. struct AES_KEY rk;
  21. int converted;
  22. u8 __aligned(8) bs[BIT_SLICED_KEY_MAXSIZE];
  23. } __aligned(8);
  24. asmlinkage void bsaes_enc_key_convert(u8 out[], struct AES_KEY const *in);
  25. asmlinkage void bsaes_dec_key_convert(u8 out[], struct AES_KEY const *in);
  26. asmlinkage void bsaes_cbc_encrypt(u8 const in[], u8 out[], u32 bytes,
  27. struct BS_KEY *key, u8 iv[]);
  28. asmlinkage void bsaes_ctr32_encrypt_blocks(u8 const in[], u8 out[], u32 blocks,
  29. struct BS_KEY *key, u8 const iv[]);
  30. asmlinkage void bsaes_xts_encrypt(u8 const in[], u8 out[], u32 bytes,
  31. struct BS_KEY *key, u8 tweak[]);
  32. asmlinkage void bsaes_xts_decrypt(u8 const in[], u8 out[], u32 bytes,
  33. struct BS_KEY *key, u8 tweak[]);
  34. struct aesbs_cbc_ctx {
  35. struct AES_KEY enc;
  36. struct BS_KEY dec;
  37. };
  38. struct aesbs_ctr_ctx {
  39. struct BS_KEY enc;
  40. };
  41. struct aesbs_xts_ctx {
  42. struct BS_KEY enc;
  43. struct BS_KEY dec;
  44. struct AES_KEY twkey;
  45. };
  46. static int aesbs_cbc_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
  47. unsigned int key_len)
  48. {
  49. struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  50. int bits = key_len * 8;
  51. if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc)) {
  52. crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  53. return -EINVAL;
  54. }
  55. ctx->dec.rk = ctx->enc;
  56. private_AES_set_decrypt_key(in_key, bits, &ctx->dec.rk);
  57. ctx->dec.converted = 0;
  58. return 0;
  59. }
  60. static int aesbs_ctr_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
  61. unsigned int key_len)
  62. {
  63. struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  64. int bits = key_len * 8;
  65. if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc.rk)) {
  66. crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  67. return -EINVAL;
  68. }
  69. ctx->enc.converted = 0;
  70. return 0;
  71. }
  72. static int aesbs_xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
  73. unsigned int key_len)
  74. {
  75. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  76. int bits = key_len * 4;
  77. int err;
  78. err = xts_verify_key(tfm, in_key, key_len);
  79. if (err)
  80. return err;
  81. if (private_AES_set_encrypt_key(in_key, bits, &ctx->enc.rk)) {
  82. crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  83. return -EINVAL;
  84. }
  85. ctx->dec.rk = ctx->enc.rk;
  86. private_AES_set_decrypt_key(in_key, bits, &ctx->dec.rk);
  87. private_AES_set_encrypt_key(in_key + key_len / 2, bits, &ctx->twkey);
  88. ctx->enc.converted = ctx->dec.converted = 0;
  89. return 0;
  90. }
  91. static inline void aesbs_encrypt_one(struct crypto_skcipher *tfm,
  92. const u8 *src, u8 *dst)
  93. {
  94. struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  95. AES_encrypt(src, dst, &ctx->enc);
  96. }
  97. static int aesbs_cbc_encrypt(struct skcipher_request *req)
  98. {
  99. return crypto_cbc_encrypt_walk(req, aesbs_encrypt_one);
  100. }
  101. static inline void aesbs_decrypt_one(struct crypto_skcipher *tfm,
  102. const u8 *src, u8 *dst)
  103. {
  104. struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  105. AES_decrypt(src, dst, &ctx->dec.rk);
  106. }
  107. static int aesbs_cbc_decrypt(struct skcipher_request *req)
  108. {
  109. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  110. struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
  111. struct skcipher_walk walk;
  112. unsigned int nbytes;
  113. int err;
  114. for (err = skcipher_walk_virt(&walk, req, false);
  115. (nbytes = walk.nbytes); err = skcipher_walk_done(&walk, nbytes)) {
  116. u32 blocks = nbytes / AES_BLOCK_SIZE;
  117. u8 *dst = walk.dst.virt.addr;
  118. u8 *src = walk.src.virt.addr;
  119. u8 *iv = walk.iv;
  120. if (blocks >= 8) {
  121. kernel_neon_begin();
  122. bsaes_cbc_encrypt(src, dst, nbytes, &ctx->dec, iv);
  123. kernel_neon_end();
  124. nbytes %= AES_BLOCK_SIZE;
  125. continue;
  126. }
  127. nbytes = crypto_cbc_decrypt_blocks(&walk, tfm,
  128. aesbs_decrypt_one);
  129. }
  130. return err;
  131. }
  132. static void inc_be128_ctr(__be32 ctr[], u32 addend)
  133. {
  134. int i;
  135. for (i = 3; i >= 0; i--, addend = 1) {
  136. u32 n = be32_to_cpu(ctr[i]) + addend;
  137. ctr[i] = cpu_to_be32(n);
  138. if (n >= addend)
  139. break;
  140. }
  141. }
  142. static int aesbs_ctr_encrypt(struct skcipher_request *req)
  143. {
  144. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  145. struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  146. struct skcipher_walk walk;
  147. u32 blocks;
  148. int err;
  149. err = skcipher_walk_virt(&walk, req, false);
  150. while ((blocks = walk.nbytes / AES_BLOCK_SIZE)) {
  151. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  152. __be32 *ctr = (__be32 *)walk.iv;
  153. u32 headroom = UINT_MAX - be32_to_cpu(ctr[3]);
  154. /* avoid 32 bit counter overflow in the NEON code */
  155. if (unlikely(headroom < blocks)) {
  156. blocks = headroom + 1;
  157. tail = walk.nbytes - blocks * AES_BLOCK_SIZE;
  158. }
  159. kernel_neon_begin();
  160. bsaes_ctr32_encrypt_blocks(walk.src.virt.addr,
  161. walk.dst.virt.addr, blocks,
  162. &ctx->enc, walk.iv);
  163. kernel_neon_end();
  164. inc_be128_ctr(ctr, blocks);
  165. err = skcipher_walk_done(&walk, tail);
  166. }
  167. if (walk.nbytes) {
  168. u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
  169. u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
  170. u8 ks[AES_BLOCK_SIZE];
  171. AES_encrypt(walk.iv, ks, &ctx->enc.rk);
  172. if (tdst != tsrc)
  173. memcpy(tdst, tsrc, walk.nbytes);
  174. crypto_xor(tdst, ks, walk.nbytes);
  175. err = skcipher_walk_done(&walk, 0);
  176. }
  177. return err;
  178. }
  179. static int aesbs_xts_encrypt(struct skcipher_request *req)
  180. {
  181. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  182. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  183. struct skcipher_walk walk;
  184. int err;
  185. err = skcipher_walk_virt(&walk, req, false);
  186. /* generate the initial tweak */
  187. AES_encrypt(walk.iv, walk.iv, &ctx->twkey);
  188. while (walk.nbytes) {
  189. kernel_neon_begin();
  190. bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
  191. walk.nbytes, &ctx->enc, walk.iv);
  192. kernel_neon_end();
  193. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  194. }
  195. return err;
  196. }
  197. static int aesbs_xts_decrypt(struct skcipher_request *req)
  198. {
  199. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  200. struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  201. struct skcipher_walk walk;
  202. int err;
  203. err = skcipher_walk_virt(&walk, req, false);
  204. /* generate the initial tweak */
  205. AES_encrypt(walk.iv, walk.iv, &ctx->twkey);
  206. while (walk.nbytes) {
  207. kernel_neon_begin();
  208. bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
  209. walk.nbytes, &ctx->dec, walk.iv);
  210. kernel_neon_end();
  211. err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
  212. }
  213. return err;
  214. }
  215. static struct skcipher_alg aesbs_algs[] = { {
  216. .base = {
  217. .cra_name = "__cbc(aes)",
  218. .cra_driver_name = "__cbc-aes-neonbs",
  219. .cra_priority = 300,
  220. .cra_flags = CRYPTO_ALG_INTERNAL,
  221. .cra_blocksize = AES_BLOCK_SIZE,
  222. .cra_ctxsize = sizeof(struct aesbs_cbc_ctx),
  223. .cra_alignmask = 7,
  224. .cra_module = THIS_MODULE,
  225. },
  226. .min_keysize = AES_MIN_KEY_SIZE,
  227. .max_keysize = AES_MAX_KEY_SIZE,
  228. .ivsize = AES_BLOCK_SIZE,
  229. .setkey = aesbs_cbc_set_key,
  230. .encrypt = aesbs_cbc_encrypt,
  231. .decrypt = aesbs_cbc_decrypt,
  232. }, {
  233. .base = {
  234. .cra_name = "__ctr(aes)",
  235. .cra_driver_name = "__ctr-aes-neonbs",
  236. .cra_priority = 300,
  237. .cra_flags = CRYPTO_ALG_INTERNAL,
  238. .cra_blocksize = 1,
  239. .cra_ctxsize = sizeof(struct aesbs_ctr_ctx),
  240. .cra_alignmask = 7,
  241. .cra_module = THIS_MODULE,
  242. },
  243. .min_keysize = AES_MIN_KEY_SIZE,
  244. .max_keysize = AES_MAX_KEY_SIZE,
  245. .ivsize = AES_BLOCK_SIZE,
  246. .chunksize = AES_BLOCK_SIZE,
  247. .setkey = aesbs_ctr_set_key,
  248. .encrypt = aesbs_ctr_encrypt,
  249. .decrypt = aesbs_ctr_encrypt,
  250. }, {
  251. .base = {
  252. .cra_name = "__xts(aes)",
  253. .cra_driver_name = "__xts-aes-neonbs",
  254. .cra_priority = 300,
  255. .cra_flags = CRYPTO_ALG_INTERNAL,
  256. .cra_blocksize = AES_BLOCK_SIZE,
  257. .cra_ctxsize = sizeof(struct aesbs_xts_ctx),
  258. .cra_alignmask = 7,
  259. .cra_module = THIS_MODULE,
  260. },
  261. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  262. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  263. .ivsize = AES_BLOCK_SIZE,
  264. .setkey = aesbs_xts_set_key,
  265. .encrypt = aesbs_xts_encrypt,
  266. .decrypt = aesbs_xts_decrypt,
  267. } };
  268. struct simd_skcipher_alg *aesbs_simd_algs[ARRAY_SIZE(aesbs_algs)];
  269. static void aesbs_mod_exit(void)
  270. {
  271. int i;
  272. for (i = 0; i < ARRAY_SIZE(aesbs_simd_algs) && aesbs_simd_algs[i]; i++)
  273. simd_skcipher_free(aesbs_simd_algs[i]);
  274. crypto_unregister_skciphers(aesbs_algs, ARRAY_SIZE(aesbs_algs));
  275. }
  276. static int __init aesbs_mod_init(void)
  277. {
  278. struct simd_skcipher_alg *simd;
  279. const char *basename;
  280. const char *algname;
  281. const char *drvname;
  282. int err;
  283. int i;
  284. if (!cpu_has_neon())
  285. return -ENODEV;
  286. err = crypto_register_skciphers(aesbs_algs, ARRAY_SIZE(aesbs_algs));
  287. if (err)
  288. return err;
  289. for (i = 0; i < ARRAY_SIZE(aesbs_algs); i++) {
  290. algname = aesbs_algs[i].base.cra_name + 2;
  291. drvname = aesbs_algs[i].base.cra_driver_name + 2;
  292. basename = aesbs_algs[i].base.cra_driver_name;
  293. simd = simd_skcipher_create_compat(algname, drvname, basename);
  294. err = PTR_ERR(simd);
  295. if (IS_ERR(simd))
  296. goto unregister_simds;
  297. aesbs_simd_algs[i] = simd;
  298. }
  299. return 0;
  300. unregister_simds:
  301. aesbs_mod_exit();
  302. return err;
  303. }
  304. module_init(aesbs_mod_init);
  305. module_exit(aesbs_mod_exit);
  306. MODULE_DESCRIPTION("Bit sliced AES in CBC/CTR/XTS modes using NEON");
  307. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  308. MODULE_LICENSE("GPL");