glue_helper.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Shared glue code for 128bit block ciphers
  4. */
  5. #ifndef _CRYPTO_GLUE_HELPER_H
  6. #define _CRYPTO_GLUE_HELPER_H
  7. #include <crypto/internal/skcipher.h>
  8. #include <linux/kernel.h>
  9. #include <asm/fpu/api.h>
  10. #include <crypto/b128ops.h>
  11. typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
  12. typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
  13. typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
  14. le128 *iv);
  15. typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
  16. le128 *iv);
  17. #define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
  18. #define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
  19. #define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
  20. #define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn))
  21. struct common_glue_func_entry {
  22. unsigned int num_blocks; /* number of blocks that @fn will process */
  23. union {
  24. common_glue_func_t ecb;
  25. common_glue_cbc_func_t cbc;
  26. common_glue_ctr_func_t ctr;
  27. common_glue_xts_func_t xts;
  28. } fn_u;
  29. };
  30. struct common_glue_ctx {
  31. unsigned int num_funcs;
  32. int fpu_blocks_limit; /* -1 means fpu not needed at all */
  33. /*
  34. * First funcs entry must have largest num_blocks and last funcs entry
  35. * must have num_blocks == 1!
  36. */
  37. struct common_glue_func_entry funcs[];
  38. };
  39. static inline bool glue_skwalk_fpu_begin(unsigned int bsize,
  40. int fpu_blocks_limit,
  41. struct skcipher_walk *walk,
  42. bool fpu_enabled, unsigned int nbytes)
  43. {
  44. if (likely(fpu_blocks_limit < 0))
  45. return false;
  46. if (fpu_enabled)
  47. return true;
  48. /*
  49. * Vector-registers are only used when chunk to be processed is large
  50. * enough, so do not enable FPU until it is necessary.
  51. */
  52. if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
  53. return false;
  54. /* prevent sleeping if FPU is in use */
  55. skcipher_walk_atomise(walk);
  56. kernel_fpu_begin();
  57. return true;
  58. }
  59. static inline void glue_fpu_end(bool fpu_enabled)
  60. {
  61. if (fpu_enabled)
  62. kernel_fpu_end();
  63. }
  64. static inline void le128_to_be128(be128 *dst, const le128 *src)
  65. {
  66. dst->a = cpu_to_be64(le64_to_cpu(src->a));
  67. dst->b = cpu_to_be64(le64_to_cpu(src->b));
  68. }
  69. static inline void be128_to_le128(le128 *dst, const be128 *src)
  70. {
  71. dst->a = cpu_to_le64(be64_to_cpu(src->a));
  72. dst->b = cpu_to_le64(be64_to_cpu(src->b));
  73. }
  74. static inline void le128_inc(le128 *i)
  75. {
  76. u64 a = le64_to_cpu(i->a);
  77. u64 b = le64_to_cpu(i->b);
  78. b++;
  79. if (!b)
  80. a++;
  81. i->a = cpu_to_le64(a);
  82. i->b = cpu_to_le64(b);
  83. }
  84. extern int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
  85. struct skcipher_request *req);
  86. extern int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
  87. struct skcipher_request *req);
  88. extern int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
  89. struct skcipher_request *req);
  90. extern int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
  91. struct skcipher_request *req);
  92. extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
  93. struct skcipher_request *req,
  94. common_glue_func_t tweak_fn, void *tweak_ctx,
  95. void *crypt_ctx);
  96. extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
  97. le128 *iv, common_glue_func_t fn);
  98. #endif /* _CRYPTO_GLUE_HELPER_H */