algapi.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440
  1. /*
  2. * Cryptographic API for algorithms (i.e., low-level API).
  3. *
  4. * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #ifndef _CRYPTO_ALGAPI_H
  13. #define _CRYPTO_ALGAPI_H
  14. #include <linux/crypto.h>
  15. #include <linux/list.h>
  16. #include <linux/kernel.h>
  17. #include <linux/skbuff.h>
  18. /*
  19. * Maximum values for blocksize and alignmask, used to allocate
  20. * static buffers that are big enough for any combination of
  21. * algs and architectures. Ciphers have a lower maximum size.
  22. */
  23. #define MAX_ALGAPI_BLOCKSIZE 160
  24. #define MAX_ALGAPI_ALIGNMASK 63
  25. #define MAX_CIPHER_BLOCKSIZE 16
  26. #define MAX_CIPHER_ALIGNMASK 15
  27. struct crypto_aead;
  28. struct crypto_instance;
  29. struct module;
  30. struct rtattr;
  31. struct seq_file;
  32. struct crypto_type {
  33. unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
  34. unsigned int (*extsize)(struct crypto_alg *alg);
  35. int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
  36. int (*init_tfm)(struct crypto_tfm *tfm);
  37. void (*show)(struct seq_file *m, struct crypto_alg *alg);
  38. int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
  39. void (*free)(struct crypto_instance *inst);
  40. unsigned int type;
  41. unsigned int maskclear;
  42. unsigned int maskset;
  43. unsigned int tfmsize;
  44. };
  45. struct crypto_instance {
  46. struct crypto_alg alg;
  47. struct crypto_template *tmpl;
  48. struct hlist_node list;
  49. void *__ctx[] CRYPTO_MINALIGN_ATTR;
  50. };
  51. struct crypto_template {
  52. struct list_head list;
  53. struct hlist_head instances;
  54. struct module *module;
  55. struct crypto_instance *(*alloc)(struct rtattr **tb);
  56. void (*free)(struct crypto_instance *inst);
  57. int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
  58. char name[CRYPTO_MAX_ALG_NAME];
  59. };
  60. struct crypto_spawn {
  61. struct list_head list;
  62. struct crypto_alg *alg;
  63. struct crypto_instance *inst;
  64. const struct crypto_type *frontend;
  65. u32 mask;
  66. };
  67. struct crypto_queue {
  68. struct list_head list;
  69. struct list_head *backlog;
  70. unsigned int qlen;
  71. unsigned int max_qlen;
  72. };
  73. struct scatter_walk {
  74. struct scatterlist *sg;
  75. unsigned int offset;
  76. };
  77. struct blkcipher_walk {
  78. union {
  79. struct {
  80. struct page *page;
  81. unsigned long offset;
  82. } phys;
  83. struct {
  84. u8 *page;
  85. u8 *addr;
  86. } virt;
  87. } src, dst;
  88. struct scatter_walk in;
  89. unsigned int nbytes;
  90. struct scatter_walk out;
  91. unsigned int total;
  92. void *page;
  93. u8 *buffer;
  94. u8 *iv;
  95. unsigned int ivsize;
  96. int flags;
  97. unsigned int walk_blocksize;
  98. unsigned int cipher_blocksize;
  99. unsigned int alignmask;
  100. };
  101. struct ablkcipher_walk {
  102. struct {
  103. struct page *page;
  104. unsigned int offset;
  105. } src, dst;
  106. struct scatter_walk in;
  107. unsigned int nbytes;
  108. struct scatter_walk out;
  109. unsigned int total;
  110. struct list_head buffers;
  111. u8 *iv_buffer;
  112. u8 *iv;
  113. int flags;
  114. unsigned int blocksize;
  115. };
  116. extern const struct crypto_type crypto_ablkcipher_type;
  117. extern const struct crypto_type crypto_blkcipher_type;
  118. void crypto_mod_put(struct crypto_alg *alg);
  119. int crypto_register_template(struct crypto_template *tmpl);
  120. void crypto_unregister_template(struct crypto_template *tmpl);
  121. struct crypto_template *crypto_lookup_template(const char *name);
  122. int crypto_register_instance(struct crypto_template *tmpl,
  123. struct crypto_instance *inst);
  124. int crypto_unregister_instance(struct crypto_instance *inst);
  125. int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
  126. struct crypto_instance *inst, u32 mask);
  127. int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
  128. struct crypto_instance *inst,
  129. const struct crypto_type *frontend);
  130. int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
  131. u32 type, u32 mask);
  132. void crypto_drop_spawn(struct crypto_spawn *spawn);
  133. struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
  134. u32 mask);
  135. void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
  136. static inline void crypto_set_spawn(struct crypto_spawn *spawn,
  137. struct crypto_instance *inst)
  138. {
  139. spawn->inst = inst;
  140. }
  141. struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
  142. int crypto_check_attr_type(struct rtattr **tb, u32 type);
  143. const char *crypto_attr_alg_name(struct rtattr *rta);
  144. struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
  145. const struct crypto_type *frontend,
  146. u32 type, u32 mask);
  147. static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
  148. u32 type, u32 mask)
  149. {
  150. return crypto_attr_alg2(rta, NULL, type, mask);
  151. }
  152. int crypto_attr_u32(struct rtattr *rta, u32 *num);
  153. int crypto_inst_setname(struct crypto_instance *inst, const char *name,
  154. struct crypto_alg *alg);
  155. void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
  156. unsigned int head);
  157. struct crypto_instance *crypto_alloc_instance(const char *name,
  158. struct crypto_alg *alg);
  159. void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
  160. int crypto_enqueue_request(struct crypto_queue *queue,
  161. struct crypto_async_request *request);
  162. struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
  163. int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
  164. static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
  165. {
  166. return queue->qlen;
  167. }
  168. void crypto_inc(u8 *a, unsigned int size);
  169. void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
  170. static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
  171. {
  172. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
  173. __builtin_constant_p(size) &&
  174. (size % sizeof(unsigned long)) == 0) {
  175. unsigned long *d = (unsigned long *)dst;
  176. unsigned long *s = (unsigned long *)src;
  177. while (size > 0) {
  178. *d++ ^= *s++;
  179. size -= sizeof(unsigned long);
  180. }
  181. } else {
  182. __crypto_xor(dst, dst, src, size);
  183. }
  184. }
  185. static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
  186. unsigned int size)
  187. {
  188. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
  189. __builtin_constant_p(size) &&
  190. (size % sizeof(unsigned long)) == 0) {
  191. unsigned long *d = (unsigned long *)dst;
  192. unsigned long *s1 = (unsigned long *)src1;
  193. unsigned long *s2 = (unsigned long *)src2;
  194. while (size > 0) {
  195. *d++ = *s1++ ^ *s2++;
  196. size -= sizeof(unsigned long);
  197. }
  198. } else {
  199. __crypto_xor(dst, src1, src2, size);
  200. }
  201. }
  202. int blkcipher_walk_done(struct blkcipher_desc *desc,
  203. struct blkcipher_walk *walk, int err);
  204. int blkcipher_walk_virt(struct blkcipher_desc *desc,
  205. struct blkcipher_walk *walk);
  206. int blkcipher_walk_phys(struct blkcipher_desc *desc,
  207. struct blkcipher_walk *walk);
  208. int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
  209. struct blkcipher_walk *walk,
  210. unsigned int blocksize);
  211. int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
  212. struct blkcipher_walk *walk,
  213. struct crypto_aead *tfm,
  214. unsigned int blocksize);
  215. int ablkcipher_walk_done(struct ablkcipher_request *req,
  216. struct ablkcipher_walk *walk, int err);
  217. int ablkcipher_walk_phys(struct ablkcipher_request *req,
  218. struct ablkcipher_walk *walk);
  219. void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
  220. static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
  221. {
  222. return PTR_ALIGN(crypto_tfm_ctx(tfm),
  223. crypto_tfm_alg_alignmask(tfm) + 1);
  224. }
  225. static inline struct crypto_instance *crypto_tfm_alg_instance(
  226. struct crypto_tfm *tfm)
  227. {
  228. return container_of(tfm->__crt_alg, struct crypto_instance, alg);
  229. }
  230. static inline void *crypto_instance_ctx(struct crypto_instance *inst)
  231. {
  232. return inst->__ctx;
  233. }
  234. static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
  235. struct crypto_ablkcipher *tfm)
  236. {
  237. return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
  238. }
  239. static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
  240. {
  241. return crypto_tfm_ctx(&tfm->base);
  242. }
  243. static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
  244. {
  245. return crypto_tfm_ctx_aligned(&tfm->base);
  246. }
  247. static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
  248. struct crypto_spawn *spawn)
  249. {
  250. u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
  251. u32 mask = CRYPTO_ALG_TYPE_MASK;
  252. return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
  253. }
  254. static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
  255. {
  256. return crypto_tfm_ctx(&tfm->base);
  257. }
  258. static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
  259. {
  260. return crypto_tfm_ctx_aligned(&tfm->base);
  261. }
  262. static inline struct crypto_cipher *crypto_spawn_cipher(
  263. struct crypto_spawn *spawn)
  264. {
  265. u32 type = CRYPTO_ALG_TYPE_CIPHER;
  266. u32 mask = CRYPTO_ALG_TYPE_MASK;
  267. return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
  268. }
  269. static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
  270. {
  271. return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
  272. }
  273. static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
  274. struct scatterlist *dst,
  275. struct scatterlist *src,
  276. unsigned int nbytes)
  277. {
  278. walk->in.sg = src;
  279. walk->out.sg = dst;
  280. walk->total = nbytes;
  281. }
  282. static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
  283. struct scatterlist *dst,
  284. struct scatterlist *src,
  285. unsigned int nbytes)
  286. {
  287. walk->in.sg = src;
  288. walk->out.sg = dst;
  289. walk->total = nbytes;
  290. INIT_LIST_HEAD(&walk->buffers);
  291. }
  292. static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
  293. {
  294. if (unlikely(!list_empty(&walk->buffers)))
  295. __ablkcipher_walk_complete(walk);
  296. }
  297. static inline struct crypto_async_request *crypto_get_backlog(
  298. struct crypto_queue *queue)
  299. {
  300. return queue->backlog == &queue->list ? NULL :
  301. container_of(queue->backlog, struct crypto_async_request, list);
  302. }
  303. static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
  304. struct ablkcipher_request *request)
  305. {
  306. return crypto_enqueue_request(queue, &request->base);
  307. }
  308. static inline struct ablkcipher_request *ablkcipher_dequeue_request(
  309. struct crypto_queue *queue)
  310. {
  311. return ablkcipher_request_cast(crypto_dequeue_request(queue));
  312. }
  313. static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
  314. {
  315. return req->__ctx;
  316. }
  317. static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
  318. struct crypto_ablkcipher *tfm)
  319. {
  320. return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
  321. }
  322. static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
  323. u32 type, u32 mask)
  324. {
  325. return crypto_attr_alg(tb[1], type, mask);
  326. }
  327. static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
  328. {
  329. return (type ^ off) & mask & off;
  330. }
  331. /*
  332. * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
  333. * Otherwise returns zero.
  334. */
  335. static inline int crypto_requires_sync(u32 type, u32 mask)
  336. {
  337. return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
  338. }
  339. noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
  340. /**
  341. * crypto_memneq - Compare two areas of memory without leaking
  342. * timing information.
  343. *
  344. * @a: One area of memory
  345. * @b: Another area of memory
  346. * @size: The size of the area.
  347. *
  348. * Returns 0 when data is equal, 1 otherwise.
  349. */
  350. static inline int crypto_memneq(const void *a, const void *b, size_t size)
  351. {
  352. return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
  353. }
  354. static inline void crypto_yield(u32 flags)
  355. {
  356. #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
  357. if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
  358. cond_resched();
  359. #endif
  360. }
  361. int crypto_register_notifier(struct notifier_block *nb);
  362. int crypto_unregister_notifier(struct notifier_block *nb);
  363. /* Crypto notification events. */
  364. enum {
  365. CRYPTO_MSG_ALG_REQUEST,
  366. CRYPTO_MSG_ALG_REGISTER,
  367. CRYPTO_MSG_ALG_LOADED,
  368. };
  369. #endif /* _CRYPTO_ALGAPI_H */