sha2-ce-glue.c 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255
  1. /*
  2. * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
  3. *
  4. * Copyright (C) 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/unaligned.h>
  12. #include <crypto/internal/hash.h>
  13. #include <crypto/sha.h>
  14. #include <linux/cpufeature.h>
  15. #include <linux/crypto.h>
  16. #include <linux/module.h>
  17. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
  18. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  19. MODULE_LICENSE("GPL v2");
  20. asmlinkage int sha2_ce_transform(int blocks, u8 const *src, u32 *state,
  21. u8 *head, long bytes);
  22. static int sha224_init(struct shash_desc *desc)
  23. {
  24. struct sha256_state *sctx = shash_desc_ctx(desc);
  25. *sctx = (struct sha256_state){
  26. .state = {
  27. SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
  28. SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
  29. }
  30. };
  31. return 0;
  32. }
  33. static int sha256_init(struct shash_desc *desc)
  34. {
  35. struct sha256_state *sctx = shash_desc_ctx(desc);
  36. *sctx = (struct sha256_state){
  37. .state = {
  38. SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
  39. SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
  40. }
  41. };
  42. return 0;
  43. }
  44. static int sha2_update(struct shash_desc *desc, const u8 *data,
  45. unsigned int len)
  46. {
  47. struct sha256_state *sctx = shash_desc_ctx(desc);
  48. unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  49. sctx->count += len;
  50. if ((partial + len) >= SHA256_BLOCK_SIZE) {
  51. int blocks;
  52. if (partial) {
  53. int p = SHA256_BLOCK_SIZE - partial;
  54. memcpy(sctx->buf + partial, data, p);
  55. data += p;
  56. len -= p;
  57. }
  58. blocks = len / SHA256_BLOCK_SIZE;
  59. len %= SHA256_BLOCK_SIZE;
  60. kernel_neon_begin_partial(28);
  61. sha2_ce_transform(blocks, data, sctx->state,
  62. partial ? sctx->buf : NULL, 0);
  63. kernel_neon_end();
  64. data += blocks * SHA256_BLOCK_SIZE;
  65. partial = 0;
  66. }
  67. if (len)
  68. memcpy(sctx->buf + partial, data, len);
  69. return 0;
  70. }
  71. static void sha2_final(struct shash_desc *desc)
  72. {
  73. static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
  74. struct sha256_state *sctx = shash_desc_ctx(desc);
  75. __be64 bits = cpu_to_be64(sctx->count << 3);
  76. u32 padlen = SHA256_BLOCK_SIZE
  77. - ((sctx->count + sizeof(bits)) % SHA256_BLOCK_SIZE);
  78. sha2_update(desc, padding, padlen);
  79. sha2_update(desc, (const u8 *)&bits, sizeof(bits));
  80. }
  81. static int sha224_final(struct shash_desc *desc, u8 *out)
  82. {
  83. struct sha256_state *sctx = shash_desc_ctx(desc);
  84. __be32 *dst = (__be32 *)out;
  85. int i;
  86. sha2_final(desc);
  87. for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
  88. put_unaligned_be32(sctx->state[i], dst++);
  89. *sctx = (struct sha256_state){};
  90. return 0;
  91. }
  92. static int sha256_final(struct shash_desc *desc, u8 *out)
  93. {
  94. struct sha256_state *sctx = shash_desc_ctx(desc);
  95. __be32 *dst = (__be32 *)out;
  96. int i;
  97. sha2_final(desc);
  98. for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
  99. put_unaligned_be32(sctx->state[i], dst++);
  100. *sctx = (struct sha256_state){};
  101. return 0;
  102. }
  103. static void sha2_finup(struct shash_desc *desc, const u8 *data,
  104. unsigned int len)
  105. {
  106. struct sha256_state *sctx = shash_desc_ctx(desc);
  107. int blocks;
  108. if (sctx->count || !len || (len % SHA256_BLOCK_SIZE)) {
  109. sha2_update(desc, data, len);
  110. sha2_final(desc);
  111. return;
  112. }
  113. /*
  114. * Use a fast path if the input is a multiple of 64 bytes. In
  115. * this case, there is no need to copy data around, and we can
  116. * perform the entire digest calculation in a single invocation
  117. * of sha2_ce_transform()
  118. */
  119. blocks = len / SHA256_BLOCK_SIZE;
  120. kernel_neon_begin_partial(28);
  121. sha2_ce_transform(blocks, data, sctx->state, NULL, len);
  122. kernel_neon_end();
  123. data += blocks * SHA256_BLOCK_SIZE;
  124. }
  125. static int sha224_finup(struct shash_desc *desc, const u8 *data,
  126. unsigned int len, u8 *out)
  127. {
  128. struct sha256_state *sctx = shash_desc_ctx(desc);
  129. __be32 *dst = (__be32 *)out;
  130. int i;
  131. sha2_finup(desc, data, len);
  132. for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
  133. put_unaligned_be32(sctx->state[i], dst++);
  134. *sctx = (struct sha256_state){};
  135. return 0;
  136. }
  137. static int sha256_finup(struct shash_desc *desc, const u8 *data,
  138. unsigned int len, u8 *out)
  139. {
  140. struct sha256_state *sctx = shash_desc_ctx(desc);
  141. __be32 *dst = (__be32 *)out;
  142. int i;
  143. sha2_finup(desc, data, len);
  144. for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
  145. put_unaligned_be32(sctx->state[i], dst++);
  146. *sctx = (struct sha256_state){};
  147. return 0;
  148. }
  149. static int sha2_export(struct shash_desc *desc, void *out)
  150. {
  151. struct sha256_state *sctx = shash_desc_ctx(desc);
  152. struct sha256_state *dst = out;
  153. *dst = *sctx;
  154. return 0;
  155. }
  156. static int sha2_import(struct shash_desc *desc, const void *in)
  157. {
  158. struct sha256_state *sctx = shash_desc_ctx(desc);
  159. struct sha256_state const *src = in;
  160. *sctx = *src;
  161. return 0;
  162. }
  163. static struct shash_alg algs[] = { {
  164. .init = sha224_init,
  165. .update = sha2_update,
  166. .final = sha224_final,
  167. .finup = sha224_finup,
  168. .export = sha2_export,
  169. .import = sha2_import,
  170. .descsize = sizeof(struct sha256_state),
  171. .digestsize = SHA224_DIGEST_SIZE,
  172. .statesize = sizeof(struct sha256_state),
  173. .base = {
  174. .cra_name = "sha224",
  175. .cra_driver_name = "sha224-ce",
  176. .cra_priority = 200,
  177. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  178. .cra_blocksize = SHA256_BLOCK_SIZE,
  179. .cra_module = THIS_MODULE,
  180. }
  181. }, {
  182. .init = sha256_init,
  183. .update = sha2_update,
  184. .final = sha256_final,
  185. .finup = sha256_finup,
  186. .export = sha2_export,
  187. .import = sha2_import,
  188. .descsize = sizeof(struct sha256_state),
  189. .digestsize = SHA256_DIGEST_SIZE,
  190. .statesize = sizeof(struct sha256_state),
  191. .base = {
  192. .cra_name = "sha256",
  193. .cra_driver_name = "sha256-ce",
  194. .cra_priority = 200,
  195. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  196. .cra_blocksize = SHA256_BLOCK_SIZE,
  197. .cra_module = THIS_MODULE,
  198. }
  199. } };
  200. static int __init sha2_ce_mod_init(void)
  201. {
  202. return crypto_register_shashes(algs, ARRAY_SIZE(algs));
  203. }
  204. static void __exit sha2_ce_mod_fini(void)
  205. {
  206. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  207. }
  208. module_cpu_feature_match(SHA2, sha2_ce_mod_init);
  209. module_exit(sha2_ce_mod_fini);