skcipher.c 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245
  1. /*
  2. * Symmetric key cipher operations.
  3. *
  4. * Generic encrypt/decrypt wrapper for ciphers, handles operations across
  5. * multiple page boundaries by using temporary blocks. In user context,
  6. * the kernel is given a chance to schedule us once per page.
  7. *
  8. * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the Free
  12. * Software Foundation; either version 2 of the License, or (at your option)
  13. * any later version.
  14. *
  15. */
  16. #include <crypto/internal/skcipher.h>
  17. #include <linux/bug.h>
  18. #include <linux/module.h>
  19. #include "internal.h"
  20. static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
  21. {
  22. if (alg->cra_type == &crypto_blkcipher_type)
  23. return sizeof(struct crypto_blkcipher *);
  24. BUG_ON(alg->cra_type != &crypto_ablkcipher_type &&
  25. alg->cra_type != &crypto_givcipher_type);
  26. return sizeof(struct crypto_ablkcipher *);
  27. }
  28. static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
  29. const u8 *key, unsigned int keylen)
  30. {
  31. struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
  32. struct crypto_blkcipher *blkcipher = *ctx;
  33. int err;
  34. crypto_blkcipher_clear_flags(blkcipher, ~0);
  35. crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) &
  36. CRYPTO_TFM_REQ_MASK);
  37. err = crypto_blkcipher_setkey(blkcipher, key, keylen);
  38. crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) &
  39. CRYPTO_TFM_RES_MASK);
  40. return err;
  41. }
  42. static int skcipher_crypt_blkcipher(struct skcipher_request *req,
  43. int (*crypt)(struct blkcipher_desc *,
  44. struct scatterlist *,
  45. struct scatterlist *,
  46. unsigned int))
  47. {
  48. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  49. struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
  50. struct blkcipher_desc desc = {
  51. .tfm = *ctx,
  52. .info = req->iv,
  53. .flags = req->base.flags,
  54. };
  55. return crypt(&desc, req->dst, req->src, req->cryptlen);
  56. }
  57. static int skcipher_encrypt_blkcipher(struct skcipher_request *req)
  58. {
  59. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  60. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  61. struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
  62. return skcipher_crypt_blkcipher(req, alg->encrypt);
  63. }
  64. static int skcipher_decrypt_blkcipher(struct skcipher_request *req)
  65. {
  66. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  67. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  68. struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
  69. return skcipher_crypt_blkcipher(req, alg->decrypt);
  70. }
  71. static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
  72. {
  73. struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
  74. crypto_free_blkcipher(*ctx);
  75. }
  76. static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
  77. {
  78. struct crypto_alg *calg = tfm->__crt_alg;
  79. struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
  80. struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
  81. struct crypto_blkcipher *blkcipher;
  82. struct crypto_tfm *btfm;
  83. if (!crypto_mod_get(calg))
  84. return -EAGAIN;
  85. btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER,
  86. CRYPTO_ALG_TYPE_MASK);
  87. if (IS_ERR(btfm)) {
  88. crypto_mod_put(calg);
  89. return PTR_ERR(btfm);
  90. }
  91. blkcipher = __crypto_blkcipher_cast(btfm);
  92. *ctx = blkcipher;
  93. tfm->exit = crypto_exit_skcipher_ops_blkcipher;
  94. skcipher->setkey = skcipher_setkey_blkcipher;
  95. skcipher->encrypt = skcipher_encrypt_blkcipher;
  96. skcipher->decrypt = skcipher_decrypt_blkcipher;
  97. skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher);
  98. return 0;
  99. }
  100. static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm,
  101. const u8 *key, unsigned int keylen)
  102. {
  103. struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
  104. struct crypto_ablkcipher *ablkcipher = *ctx;
  105. int err;
  106. crypto_ablkcipher_clear_flags(ablkcipher, ~0);
  107. crypto_ablkcipher_set_flags(ablkcipher,
  108. crypto_skcipher_get_flags(tfm) &
  109. CRYPTO_TFM_REQ_MASK);
  110. err = crypto_ablkcipher_setkey(ablkcipher, key, keylen);
  111. crypto_skcipher_set_flags(tfm,
  112. crypto_ablkcipher_get_flags(ablkcipher) &
  113. CRYPTO_TFM_RES_MASK);
  114. return err;
  115. }
  116. static int skcipher_crypt_ablkcipher(struct skcipher_request *req,
  117. int (*crypt)(struct ablkcipher_request *))
  118. {
  119. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  120. struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
  121. struct ablkcipher_request *subreq = skcipher_request_ctx(req);
  122. ablkcipher_request_set_tfm(subreq, *ctx);
  123. ablkcipher_request_set_callback(subreq, skcipher_request_flags(req),
  124. req->base.complete, req->base.data);
  125. ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
  126. req->iv);
  127. return crypt(subreq);
  128. }
  129. static int skcipher_encrypt_ablkcipher(struct skcipher_request *req)
  130. {
  131. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  132. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  133. struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
  134. return skcipher_crypt_ablkcipher(req, alg->encrypt);
  135. }
  136. static int skcipher_decrypt_ablkcipher(struct skcipher_request *req)
  137. {
  138. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  139. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  140. struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
  141. return skcipher_crypt_ablkcipher(req, alg->decrypt);
  142. }
  143. static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
  144. {
  145. struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
  146. crypto_free_ablkcipher(*ctx);
  147. }
  148. static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
  149. {
  150. struct crypto_alg *calg = tfm->__crt_alg;
  151. struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
  152. struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
  153. struct crypto_ablkcipher *ablkcipher;
  154. struct crypto_tfm *abtfm;
  155. if (!crypto_mod_get(calg))
  156. return -EAGAIN;
  157. abtfm = __crypto_alloc_tfm(calg, 0, 0);
  158. if (IS_ERR(abtfm)) {
  159. crypto_mod_put(calg);
  160. return PTR_ERR(abtfm);
  161. }
  162. ablkcipher = __crypto_ablkcipher_cast(abtfm);
  163. *ctx = ablkcipher;
  164. tfm->exit = crypto_exit_skcipher_ops_ablkcipher;
  165. skcipher->setkey = skcipher_setkey_ablkcipher;
  166. skcipher->encrypt = skcipher_encrypt_ablkcipher;
  167. skcipher->decrypt = skcipher_decrypt_ablkcipher;
  168. skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  169. skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) +
  170. sizeof(struct ablkcipher_request);
  171. return 0;
  172. }
  173. static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
  174. {
  175. if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type)
  176. return crypto_init_skcipher_ops_blkcipher(tfm);
  177. BUG_ON(tfm->__crt_alg->cra_type != &crypto_ablkcipher_type &&
  178. tfm->__crt_alg->cra_type != &crypto_givcipher_type);
  179. return crypto_init_skcipher_ops_ablkcipher(tfm);
  180. }
  181. static const struct crypto_type crypto_skcipher_type2 = {
  182. .extsize = crypto_skcipher_extsize,
  183. .init_tfm = crypto_skcipher_init_tfm,
  184. .maskclear = ~CRYPTO_ALG_TYPE_MASK,
  185. .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK,
  186. .type = CRYPTO_ALG_TYPE_BLKCIPHER,
  187. .tfmsize = offsetof(struct crypto_skcipher, base),
  188. };
  189. struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
  190. u32 type, u32 mask)
  191. {
  192. return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
  193. }
  194. EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
  195. MODULE_LICENSE("GPL");
  196. MODULE_DESCRIPTION("Symmetric key cipher type");