sha2-ce-glue.c 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254
  1. /*
  2. * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
  3. *
  4. * Copyright (C) 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/unaligned.h>
  12. #include <crypto/internal/hash.h>
  13. #include <crypto/sha.h>
  14. #include <linux/cpufeature.h>
  15. #include <linux/crypto.h>
  16. #include <linux/module.h>
  17. MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
  18. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  19. MODULE_LICENSE("GPL v2");
  20. asmlinkage int sha2_ce_transform(int blocks, u8 const *src, u32 *state,
  21. u8 *head, long bytes);
  22. static int sha224_init(struct shash_desc *desc)
  23. {
  24. struct sha256_state *sctx = shash_desc_ctx(desc);
  25. *sctx = (struct sha256_state){
  26. .state = {
  27. SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
  28. SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
  29. }
  30. };
  31. return 0;
  32. }
  33. static int sha256_init(struct shash_desc *desc)
  34. {
  35. struct sha256_state *sctx = shash_desc_ctx(desc);
  36. *sctx = (struct sha256_state){
  37. .state = {
  38. SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
  39. SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
  40. }
  41. };
  42. return 0;
  43. }
  44. static int sha2_update(struct shash_desc *desc, const u8 *data,
  45. unsigned int len)
  46. {
  47. struct sha256_state *sctx = shash_desc_ctx(desc);
  48. unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  49. sctx->count += len;
  50. if ((partial + len) >= SHA256_BLOCK_SIZE) {
  51. int blocks;
  52. if (partial) {
  53. int p = SHA256_BLOCK_SIZE - partial;
  54. memcpy(sctx->buf + partial, data, p);
  55. data += p;
  56. len -= p;
  57. }
  58. blocks = len / SHA256_BLOCK_SIZE;
  59. len %= SHA256_BLOCK_SIZE;
  60. kernel_neon_begin_partial(28);
  61. sha2_ce_transform(blocks, data, sctx->state,
  62. partial ? sctx->buf : NULL, 0);
  63. kernel_neon_end();
  64. data += blocks * SHA256_BLOCK_SIZE;
  65. partial = 0;
  66. }
  67. if (len)
  68. memcpy(sctx->buf + partial, data, len);
  69. return 0;
  70. }
  71. static void sha2_final(struct shash_desc *desc)
  72. {
  73. static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
  74. struct sha256_state *sctx = shash_desc_ctx(desc);
  75. __be64 bits = cpu_to_be64(sctx->count << 3);
  76. u32 padlen = SHA256_BLOCK_SIZE
  77. - ((sctx->count + sizeof(bits)) % SHA256_BLOCK_SIZE);
  78. sha2_update(desc, padding, padlen);
  79. sha2_update(desc, (const u8 *)&bits, sizeof(bits));
  80. }
  81. static int sha224_final(struct shash_desc *desc, u8 *out)
  82. {
  83. struct sha256_state *sctx = shash_desc_ctx(desc);
  84. __be32 *dst = (__be32 *)out;
  85. int i;
  86. sha2_final(desc);
  87. for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
  88. put_unaligned_be32(sctx->state[i], dst++);
  89. *sctx = (struct sha256_state){};
  90. return 0;
  91. }
  92. static int sha256_final(struct shash_desc *desc, u8 *out)
  93. {
  94. struct sha256_state *sctx = shash_desc_ctx(desc);
  95. __be32 *dst = (__be32 *)out;
  96. int i;
  97. sha2_final(desc);
  98. for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
  99. put_unaligned_be32(sctx->state[i], dst++);
  100. *sctx = (struct sha256_state){};
  101. return 0;
  102. }
  103. static void sha2_finup(struct shash_desc *desc, const u8 *data,
  104. unsigned int len)
  105. {
  106. struct sha256_state *sctx = shash_desc_ctx(desc);
  107. int blocks;
  108. if (sctx->count || !len || (len % SHA256_BLOCK_SIZE)) {
  109. sha2_update(desc, data, len);
  110. sha2_final(desc);
  111. return;
  112. }
  113. /*
  114. * Use a fast path if the input is a multiple of 64 bytes. In
  115. * this case, there is no need to copy data around, and we can
  116. * perform the entire digest calculation in a single invocation
  117. * of sha2_ce_transform()
  118. */
  119. blocks = len / SHA256_BLOCK_SIZE;
  120. kernel_neon_begin_partial(28);
  121. sha2_ce_transform(blocks, data, sctx->state, NULL, len);
  122. kernel_neon_end();
  123. }
  124. static int sha224_finup(struct shash_desc *desc, const u8 *data,
  125. unsigned int len, u8 *out)
  126. {
  127. struct sha256_state *sctx = shash_desc_ctx(desc);
  128. __be32 *dst = (__be32 *)out;
  129. int i;
  130. sha2_finup(desc, data, len);
  131. for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
  132. put_unaligned_be32(sctx->state[i], dst++);
  133. *sctx = (struct sha256_state){};
  134. return 0;
  135. }
  136. static int sha256_finup(struct shash_desc *desc, const u8 *data,
  137. unsigned int len, u8 *out)
  138. {
  139. struct sha256_state *sctx = shash_desc_ctx(desc);
  140. __be32 *dst = (__be32 *)out;
  141. int i;
  142. sha2_finup(desc, data, len);
  143. for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
  144. put_unaligned_be32(sctx->state[i], dst++);
  145. *sctx = (struct sha256_state){};
  146. return 0;
  147. }
  148. static int sha2_export(struct shash_desc *desc, void *out)
  149. {
  150. struct sha256_state *sctx = shash_desc_ctx(desc);
  151. struct sha256_state *dst = out;
  152. *dst = *sctx;
  153. return 0;
  154. }
  155. static int sha2_import(struct shash_desc *desc, const void *in)
  156. {
  157. struct sha256_state *sctx = shash_desc_ctx(desc);
  158. struct sha256_state const *src = in;
  159. *sctx = *src;
  160. return 0;
  161. }
  162. static struct shash_alg algs[] = { {
  163. .init = sha224_init,
  164. .update = sha2_update,
  165. .final = sha224_final,
  166. .finup = sha224_finup,
  167. .export = sha2_export,
  168. .import = sha2_import,
  169. .descsize = sizeof(struct sha256_state),
  170. .digestsize = SHA224_DIGEST_SIZE,
  171. .statesize = sizeof(struct sha256_state),
  172. .base = {
  173. .cra_name = "sha224",
  174. .cra_driver_name = "sha224-ce",
  175. .cra_priority = 200,
  176. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  177. .cra_blocksize = SHA256_BLOCK_SIZE,
  178. .cra_module = THIS_MODULE,
  179. }
  180. }, {
  181. .init = sha256_init,
  182. .update = sha2_update,
  183. .final = sha256_final,
  184. .finup = sha256_finup,
  185. .export = sha2_export,
  186. .import = sha2_import,
  187. .descsize = sizeof(struct sha256_state),
  188. .digestsize = SHA256_DIGEST_SIZE,
  189. .statesize = sizeof(struct sha256_state),
  190. .base = {
  191. .cra_name = "sha256",
  192. .cra_driver_name = "sha256-ce",
  193. .cra_priority = 200,
  194. .cra_flags = CRYPTO_ALG_TYPE_SHASH,
  195. .cra_blocksize = SHA256_BLOCK_SIZE,
  196. .cra_module = THIS_MODULE,
  197. }
  198. } };
  199. static int __init sha2_ce_mod_init(void)
  200. {
  201. return crypto_register_shashes(algs, ARRAY_SIZE(algs));
  202. }
  203. static void __exit sha2_ce_mod_fini(void)
  204. {
  205. crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
  206. }
  207. module_cpu_feature_match(SHA2, sha2_ce_mod_init);
  208. module_exit(sha2_ce_mod_fini);