crypto.h 54 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606
  1. /*
  2. * Scatterlist Cryptographic API.
  3. *
  4. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  5. * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  6. * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  7. *
  8. * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  9. * and Nettle, by Niels Möller.
  10. *
  11. * This program is free software; you can redistribute it and/or modify it
  12. * under the terms of the GNU General Public License as published by the Free
  13. * Software Foundation; either version 2 of the License, or (at your option)
  14. * any later version.
  15. *
  16. */
  17. #ifndef _LINUX_CRYPTO_H
  18. #define _LINUX_CRYPTO_H
  19. #include <linux/atomic.h>
  20. #include <linux/kernel.h>
  21. #include <linux/list.h>
  22. #include <linux/bug.h>
  23. #include <linux/slab.h>
  24. #include <linux/string.h>
  25. #include <linux/uaccess.h>
  26. /*
  27. * Autoloaded crypto modules should only use a prefixed name to avoid allowing
  28. * arbitrary modules to be loaded. Loading from userspace may still need the
  29. * unprefixed names, so retains those aliases as well.
  30. * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
  31. * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
  32. * expands twice on the same line. Instead, use a separate base name for the
  33. * alias.
  34. */
  35. #define MODULE_ALIAS_CRYPTO(name) \
  36. __MODULE_INFO(alias, alias_userspace, name); \
  37. __MODULE_INFO(alias, alias_crypto, "crypto-" name)
  38. /*
  39. * Algorithm masks and types.
  40. */
  41. #define CRYPTO_ALG_TYPE_MASK 0x0000000f
  42. #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
  43. #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002
  44. #define CRYPTO_ALG_TYPE_AEAD 0x00000003
  45. #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
  46. #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
  47. #define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005
  48. #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
  49. #define CRYPTO_ALG_TYPE_KPP 0x00000008
  50. #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000000a
  51. #define CRYPTO_ALG_TYPE_RNG 0x0000000c
  52. #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000000d
  53. #define CRYPTO_ALG_TYPE_DIGEST 0x0000000e
  54. #define CRYPTO_ALG_TYPE_HASH 0x0000000e
  55. #define CRYPTO_ALG_TYPE_SHASH 0x0000000e
  56. #define CRYPTO_ALG_TYPE_AHASH 0x0000000f
  57. #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
  58. #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
  59. #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
  60. #define CRYPTO_ALG_LARVAL 0x00000010
  61. #define CRYPTO_ALG_DEAD 0x00000020
  62. #define CRYPTO_ALG_DYING 0x00000040
  63. #define CRYPTO_ALG_ASYNC 0x00000080
  64. /*
  65. * Set this bit if and only if the algorithm requires another algorithm of
  66. * the same type to handle corner cases.
  67. */
  68. #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
  69. /*
  70. * This bit is set for symmetric key ciphers that have already been wrapped
  71. * with a generic IV generator to prevent them from being wrapped again.
  72. */
  73. #define CRYPTO_ALG_GENIV 0x00000200
  74. /*
  75. * Set if the algorithm has passed automated run-time testing. Note that
  76. * if there is no run-time testing for a given algorithm it is considered
  77. * to have passed.
  78. */
  79. #define CRYPTO_ALG_TESTED 0x00000400
  80. /*
  81. * Set if the algorithm is an instance that is build from templates.
  82. */
  83. #define CRYPTO_ALG_INSTANCE 0x00000800
  84. /* Set this bit if the algorithm provided is hardware accelerated but
  85. * not available to userspace via instruction set or so.
  86. */
  87. #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
  88. /*
  89. * Mark a cipher as a service implementation only usable by another
  90. * cipher and never by a normal user of the kernel crypto API
  91. */
  92. #define CRYPTO_ALG_INTERNAL 0x00002000
  93. /*
  94. * Transform masks and values (for crt_flags).
  95. */
  96. #define CRYPTO_TFM_REQ_MASK 0x000fff00
  97. #define CRYPTO_TFM_RES_MASK 0xfff00000
  98. #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
  99. #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
  100. #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
  101. #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
  102. #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
  103. #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
  104. #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
  105. #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
  106. /*
  107. * Miscellaneous stuff.
  108. */
  109. #define CRYPTO_MAX_ALG_NAME 64
  110. /*
  111. * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
  112. * declaration) is used to ensure that the crypto_tfm context structure is
  113. * aligned correctly for the given architecture so that there are no alignment
  114. * faults for C data types. In particular, this is required on platforms such
  115. * as arm where pointers are 32-bit aligned but there are data types such as
  116. * u64 which require 64-bit alignment.
  117. */
  118. #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
  119. #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
  120. struct scatterlist;
  121. struct crypto_ablkcipher;
  122. struct crypto_async_request;
  123. struct crypto_blkcipher;
  124. struct crypto_tfm;
  125. struct crypto_type;
  126. struct skcipher_givcrypt_request;
  127. typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
  128. /**
  129. * DOC: Block Cipher Context Data Structures
  130. *
  131. * These data structures define the operating context for each block cipher
  132. * type.
  133. */
  134. struct crypto_async_request {
  135. struct list_head list;
  136. crypto_completion_t complete;
  137. void *data;
  138. struct crypto_tfm *tfm;
  139. u32 flags;
  140. };
  141. struct ablkcipher_request {
  142. struct crypto_async_request base;
  143. unsigned int nbytes;
  144. void *info;
  145. struct scatterlist *src;
  146. struct scatterlist *dst;
  147. void *__ctx[] CRYPTO_MINALIGN_ATTR;
  148. };
  149. struct blkcipher_desc {
  150. struct crypto_blkcipher *tfm;
  151. void *info;
  152. u32 flags;
  153. };
  154. struct cipher_desc {
  155. struct crypto_tfm *tfm;
  156. void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  157. unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
  158. const u8 *src, unsigned int nbytes);
  159. void *info;
  160. };
  161. /**
  162. * DOC: Block Cipher Algorithm Definitions
  163. *
  164. * These data structures define modular crypto algorithm implementations,
  165. * managed via crypto_register_alg() and crypto_unregister_alg().
  166. */
  167. /**
  168. * struct ablkcipher_alg - asynchronous block cipher definition
  169. * @min_keysize: Minimum key size supported by the transformation. This is the
  170. * smallest key length supported by this transformation algorithm.
  171. * This must be set to one of the pre-defined values as this is
  172. * not hardware specific. Possible values for this field can be
  173. * found via git grep "_MIN_KEY_SIZE" include/crypto/
  174. * @max_keysize: Maximum key size supported by the transformation. This is the
  175. * largest key length supported by this transformation algorithm.
  176. * This must be set to one of the pre-defined values as this is
  177. * not hardware specific. Possible values for this field can be
  178. * found via git grep "_MAX_KEY_SIZE" include/crypto/
  179. * @setkey: Set key for the transformation. This function is used to either
  180. * program a supplied key into the hardware or store the key in the
  181. * transformation context for programming it later. Note that this
  182. * function does modify the transformation context. This function can
  183. * be called multiple times during the existence of the transformation
  184. * object, so one must make sure the key is properly reprogrammed into
  185. * the hardware. This function is also responsible for checking the key
  186. * length for validity. In case a software fallback was put in place in
  187. * the @cra_init call, this function might need to use the fallback if
  188. * the algorithm doesn't support all of the key sizes.
  189. * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
  190. * the supplied scatterlist containing the blocks of data. The crypto
  191. * API consumer is responsible for aligning the entries of the
  192. * scatterlist properly and making sure the chunks are correctly
  193. * sized. In case a software fallback was put in place in the
  194. * @cra_init call, this function might need to use the fallback if
  195. * the algorithm doesn't support all of the key sizes. In case the
  196. * key was stored in transformation context, the key might need to be
  197. * re-programmed into the hardware in this function. This function
  198. * shall not modify the transformation context, as this function may
  199. * be called in parallel with the same transformation object.
  200. * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
  201. * and the conditions are exactly the same.
  202. * @givencrypt: Update the IV for encryption. With this function, a cipher
  203. * implementation may provide the function on how to update the IV
  204. * for encryption.
  205. * @givdecrypt: Update the IV for decryption. This is the reverse of
  206. * @givencrypt .
  207. * @geniv: The transformation implementation may use an "IV generator" provided
  208. * by the kernel crypto API. Several use cases have a predefined
  209. * approach how IVs are to be updated. For such use cases, the kernel
  210. * crypto API provides ready-to-use implementations that can be
  211. * referenced with this variable.
  212. * @ivsize: IV size applicable for transformation. The consumer must provide an
  213. * IV of exactly that size to perform the encrypt or decrypt operation.
  214. *
  215. * All fields except @givencrypt , @givdecrypt , @geniv and @ivsize are
  216. * mandatory and must be filled.
  217. */
  218. struct ablkcipher_alg {
  219. int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
  220. unsigned int keylen);
  221. int (*encrypt)(struct ablkcipher_request *req);
  222. int (*decrypt)(struct ablkcipher_request *req);
  223. int (*givencrypt)(struct skcipher_givcrypt_request *req);
  224. int (*givdecrypt)(struct skcipher_givcrypt_request *req);
  225. const char *geniv;
  226. unsigned int min_keysize;
  227. unsigned int max_keysize;
  228. unsigned int ivsize;
  229. };
  230. /**
  231. * struct blkcipher_alg - synchronous block cipher definition
  232. * @min_keysize: see struct ablkcipher_alg
  233. * @max_keysize: see struct ablkcipher_alg
  234. * @setkey: see struct ablkcipher_alg
  235. * @encrypt: see struct ablkcipher_alg
  236. * @decrypt: see struct ablkcipher_alg
  237. * @geniv: see struct ablkcipher_alg
  238. * @ivsize: see struct ablkcipher_alg
  239. *
  240. * All fields except @geniv and @ivsize are mandatory and must be filled.
  241. */
  242. struct blkcipher_alg {
  243. int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
  244. unsigned int keylen);
  245. int (*encrypt)(struct blkcipher_desc *desc,
  246. struct scatterlist *dst, struct scatterlist *src,
  247. unsigned int nbytes);
  248. int (*decrypt)(struct blkcipher_desc *desc,
  249. struct scatterlist *dst, struct scatterlist *src,
  250. unsigned int nbytes);
  251. const char *geniv;
  252. unsigned int min_keysize;
  253. unsigned int max_keysize;
  254. unsigned int ivsize;
  255. };
  256. /**
  257. * struct cipher_alg - single-block symmetric ciphers definition
  258. * @cia_min_keysize: Minimum key size supported by the transformation. This is
  259. * the smallest key length supported by this transformation
  260. * algorithm. This must be set to one of the pre-defined
  261. * values as this is not hardware specific. Possible values
  262. * for this field can be found via git grep "_MIN_KEY_SIZE"
  263. * include/crypto/
  264. * @cia_max_keysize: Maximum key size supported by the transformation. This is
  265. * the largest key length supported by this transformation
  266. * algorithm. This must be set to one of the pre-defined values
  267. * as this is not hardware specific. Possible values for this
  268. * field can be found via git grep "_MAX_KEY_SIZE"
  269. * include/crypto/
  270. * @cia_setkey: Set key for the transformation. This function is used to either
  271. * program a supplied key into the hardware or store the key in the
  272. * transformation context for programming it later. Note that this
  273. * function does modify the transformation context. This function
  274. * can be called multiple times during the existence of the
  275. * transformation object, so one must make sure the key is properly
  276. * reprogrammed into the hardware. This function is also
  277. * responsible for checking the key length for validity.
  278. * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
  279. * single block of data, which must be @cra_blocksize big. This
  280. * always operates on a full @cra_blocksize and it is not possible
  281. * to encrypt a block of smaller size. The supplied buffers must
  282. * therefore also be at least of @cra_blocksize size. Both the
  283. * input and output buffers are always aligned to @cra_alignmask.
  284. * In case either of the input or output buffer supplied by user
  285. * of the crypto API is not aligned to @cra_alignmask, the crypto
  286. * API will re-align the buffers. The re-alignment means that a
  287. * new buffer will be allocated, the data will be copied into the
  288. * new buffer, then the processing will happen on the new buffer,
  289. * then the data will be copied back into the original buffer and
  290. * finally the new buffer will be freed. In case a software
  291. * fallback was put in place in the @cra_init call, this function
  292. * might need to use the fallback if the algorithm doesn't support
  293. * all of the key sizes. In case the key was stored in
  294. * transformation context, the key might need to be re-programmed
  295. * into the hardware in this function. This function shall not
  296. * modify the transformation context, as this function may be
  297. * called in parallel with the same transformation object.
  298. * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
  299. * @cia_encrypt, and the conditions are exactly the same.
  300. *
  301. * All fields are mandatory and must be filled.
  302. */
  303. struct cipher_alg {
  304. unsigned int cia_min_keysize;
  305. unsigned int cia_max_keysize;
  306. int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
  307. unsigned int keylen);
  308. void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  309. void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  310. };
  311. struct compress_alg {
  312. int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
  313. unsigned int slen, u8 *dst, unsigned int *dlen);
  314. int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
  315. unsigned int slen, u8 *dst, unsigned int *dlen);
  316. };
  317. #define cra_ablkcipher cra_u.ablkcipher
  318. #define cra_blkcipher cra_u.blkcipher
  319. #define cra_cipher cra_u.cipher
  320. #define cra_compress cra_u.compress
  321. /**
  322. * struct crypto_alg - definition of a cryptograpic cipher algorithm
  323. * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
  324. * CRYPTO_ALG_* flags for the flags which go in here. Those are
  325. * used for fine-tuning the description of the transformation
  326. * algorithm.
  327. * @cra_blocksize: Minimum block size of this transformation. The size in bytes
  328. * of the smallest possible unit which can be transformed with
  329. * this algorithm. The users must respect this value.
  330. * In case of HASH transformation, it is possible for a smaller
  331. * block than @cra_blocksize to be passed to the crypto API for
  332. * transformation, in case of any other transformation type, an
  333. * error will be returned upon any attempt to transform smaller
  334. * than @cra_blocksize chunks.
  335. * @cra_ctxsize: Size of the operational context of the transformation. This
  336. * value informs the kernel crypto API about the memory size
  337. * needed to be allocated for the transformation context.
  338. * @cra_alignmask: Alignment mask for the input and output data buffer. The data
  339. * buffer containing the input data for the algorithm must be
  340. * aligned to this alignment mask. The data buffer for the
  341. * output data must be aligned to this alignment mask. Note that
  342. * the Crypto API will do the re-alignment in software, but
  343. * only under special conditions and there is a performance hit.
  344. * The re-alignment happens at these occasions for different
  345. * @cra_u types: cipher -- For both input data and output data
  346. * buffer; ahash -- For output hash destination buf; shash --
  347. * For output hash destination buf.
  348. * This is needed on hardware which is flawed by design and
  349. * cannot pick data from arbitrary addresses.
  350. * @cra_priority: Priority of this transformation implementation. In case
  351. * multiple transformations with same @cra_name are available to
  352. * the Crypto API, the kernel will use the one with highest
  353. * @cra_priority.
  354. * @cra_name: Generic name (usable by multiple implementations) of the
  355. * transformation algorithm. This is the name of the transformation
  356. * itself. This field is used by the kernel when looking up the
  357. * providers of particular transformation.
  358. * @cra_driver_name: Unique name of the transformation provider. This is the
  359. * name of the provider of the transformation. This can be any
  360. * arbitrary value, but in the usual case, this contains the
  361. * name of the chip or provider and the name of the
  362. * transformation algorithm.
  363. * @cra_type: Type of the cryptographic transformation. This is a pointer to
  364. * struct crypto_type, which implements callbacks common for all
  365. * transformation types. There are multiple options:
  366. * &crypto_blkcipher_type, &crypto_ablkcipher_type,
  367. * &crypto_ahash_type, &crypto_rng_type.
  368. * This field might be empty. In that case, there are no common
  369. * callbacks. This is the case for: cipher, compress, shash.
  370. * @cra_u: Callbacks implementing the transformation. This is a union of
  371. * multiple structures. Depending on the type of transformation selected
  372. * by @cra_type and @cra_flags above, the associated structure must be
  373. * filled with callbacks. This field might be empty. This is the case
  374. * for ahash, shash.
  375. * @cra_init: Initialize the cryptographic transformation object. This function
  376. * is used to initialize the cryptographic transformation object.
  377. * This function is called only once at the instantiation time, right
  378. * after the transformation context was allocated. In case the
  379. * cryptographic hardware has some special requirements which need to
  380. * be handled by software, this function shall check for the precise
  381. * requirement of the transformation and put any software fallbacks
  382. * in place.
  383. * @cra_exit: Deinitialize the cryptographic transformation object. This is a
  384. * counterpart to @cra_init, used to remove various changes set in
  385. * @cra_init.
  386. * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
  387. * @cra_list: internally used
  388. * @cra_users: internally used
  389. * @cra_refcnt: internally used
  390. * @cra_destroy: internally used
  391. *
  392. * The struct crypto_alg describes a generic Crypto API algorithm and is common
  393. * for all of the transformations. Any variable not documented here shall not
  394. * be used by a cipher implementation as it is internal to the Crypto API.
  395. */
  396. struct crypto_alg {
  397. struct list_head cra_list;
  398. struct list_head cra_users;
  399. u32 cra_flags;
  400. unsigned int cra_blocksize;
  401. unsigned int cra_ctxsize;
  402. unsigned int cra_alignmask;
  403. int cra_priority;
  404. atomic_t cra_refcnt;
  405. char cra_name[CRYPTO_MAX_ALG_NAME];
  406. char cra_driver_name[CRYPTO_MAX_ALG_NAME];
  407. const struct crypto_type *cra_type;
  408. union {
  409. struct ablkcipher_alg ablkcipher;
  410. struct blkcipher_alg blkcipher;
  411. struct cipher_alg cipher;
  412. struct compress_alg compress;
  413. } cra_u;
  414. int (*cra_init)(struct crypto_tfm *tfm);
  415. void (*cra_exit)(struct crypto_tfm *tfm);
  416. void (*cra_destroy)(struct crypto_alg *alg);
  417. struct module *cra_module;
  418. } CRYPTO_MINALIGN_ATTR;
  419. /*
  420. * Algorithm registration interface.
  421. */
  422. int crypto_register_alg(struct crypto_alg *alg);
  423. int crypto_unregister_alg(struct crypto_alg *alg);
  424. int crypto_register_algs(struct crypto_alg *algs, int count);
  425. int crypto_unregister_algs(struct crypto_alg *algs, int count);
  426. /*
  427. * Algorithm query interface.
  428. */
  429. int crypto_has_alg(const char *name, u32 type, u32 mask);
  430. /*
  431. * Transforms: user-instantiated objects which encapsulate algorithms
  432. * and core processing logic. Managed via crypto_alloc_*() and
  433. * crypto_free_*(), as well as the various helpers below.
  434. */
  435. struct ablkcipher_tfm {
  436. int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
  437. unsigned int keylen);
  438. int (*encrypt)(struct ablkcipher_request *req);
  439. int (*decrypt)(struct ablkcipher_request *req);
  440. struct crypto_ablkcipher *base;
  441. unsigned int ivsize;
  442. unsigned int reqsize;
  443. };
  444. struct blkcipher_tfm {
  445. void *iv;
  446. int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
  447. unsigned int keylen);
  448. int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
  449. struct scatterlist *src, unsigned int nbytes);
  450. int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
  451. struct scatterlist *src, unsigned int nbytes);
  452. };
  453. struct cipher_tfm {
  454. int (*cit_setkey)(struct crypto_tfm *tfm,
  455. const u8 *key, unsigned int keylen);
  456. void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  457. void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  458. };
  459. struct compress_tfm {
  460. int (*cot_compress)(struct crypto_tfm *tfm,
  461. const u8 *src, unsigned int slen,
  462. u8 *dst, unsigned int *dlen);
  463. int (*cot_decompress)(struct crypto_tfm *tfm,
  464. const u8 *src, unsigned int slen,
  465. u8 *dst, unsigned int *dlen);
  466. };
  467. #define crt_ablkcipher crt_u.ablkcipher
  468. #define crt_blkcipher crt_u.blkcipher
  469. #define crt_cipher crt_u.cipher
  470. #define crt_compress crt_u.compress
  471. struct crypto_tfm {
  472. u32 crt_flags;
  473. union {
  474. struct ablkcipher_tfm ablkcipher;
  475. struct blkcipher_tfm blkcipher;
  476. struct cipher_tfm cipher;
  477. struct compress_tfm compress;
  478. } crt_u;
  479. void (*exit)(struct crypto_tfm *tfm);
  480. struct crypto_alg *__crt_alg;
  481. void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
  482. };
  483. struct crypto_ablkcipher {
  484. struct crypto_tfm base;
  485. };
  486. struct crypto_blkcipher {
  487. struct crypto_tfm base;
  488. };
  489. struct crypto_cipher {
  490. struct crypto_tfm base;
  491. };
  492. struct crypto_comp {
  493. struct crypto_tfm base;
  494. };
  495. enum {
  496. CRYPTOA_UNSPEC,
  497. CRYPTOA_ALG,
  498. CRYPTOA_TYPE,
  499. CRYPTOA_U32,
  500. __CRYPTOA_MAX,
  501. };
  502. #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
  503. /* Maximum number of (rtattr) parameters for each template. */
  504. #define CRYPTO_MAX_ATTRS 32
  505. struct crypto_attr_alg {
  506. char name[CRYPTO_MAX_ALG_NAME];
  507. };
  508. struct crypto_attr_type {
  509. u32 type;
  510. u32 mask;
  511. };
  512. struct crypto_attr_u32 {
  513. u32 num;
  514. };
  515. /*
  516. * Transform user interface.
  517. */
  518. struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
  519. void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
  520. static inline void crypto_free_tfm(struct crypto_tfm *tfm)
  521. {
  522. return crypto_destroy_tfm(tfm, tfm);
  523. }
  524. int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
  525. /*
  526. * Transform helpers which query the underlying algorithm.
  527. */
  528. static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
  529. {
  530. return tfm->__crt_alg->cra_name;
  531. }
  532. static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
  533. {
  534. return tfm->__crt_alg->cra_driver_name;
  535. }
  536. static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
  537. {
  538. return tfm->__crt_alg->cra_priority;
  539. }
  540. static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
  541. {
  542. return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
  543. }
  544. static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
  545. {
  546. return tfm->__crt_alg->cra_blocksize;
  547. }
  548. static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
  549. {
  550. return tfm->__crt_alg->cra_alignmask;
  551. }
  552. static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
  553. {
  554. return tfm->crt_flags;
  555. }
  556. static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
  557. {
  558. tfm->crt_flags |= flags;
  559. }
  560. static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
  561. {
  562. tfm->crt_flags &= ~flags;
  563. }
  564. static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
  565. {
  566. return tfm->__crt_ctx;
  567. }
  568. static inline unsigned int crypto_tfm_ctx_alignment(void)
  569. {
  570. struct crypto_tfm *tfm;
  571. return __alignof__(tfm->__crt_ctx);
  572. }
  573. /*
  574. * API wrappers.
  575. */
  576. static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
  577. struct crypto_tfm *tfm)
  578. {
  579. return (struct crypto_ablkcipher *)tfm;
  580. }
  581. static inline u32 crypto_skcipher_type(u32 type)
  582. {
  583. type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  584. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  585. return type;
  586. }
  587. static inline u32 crypto_skcipher_mask(u32 mask)
  588. {
  589. mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  590. mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
  591. return mask;
  592. }
  593. /**
  594. * DOC: Asynchronous Block Cipher API
  595. *
  596. * Asynchronous block cipher API is used with the ciphers of type
  597. * CRYPTO_ALG_TYPE_ABLKCIPHER (listed as type "ablkcipher" in /proc/crypto).
  598. *
  599. * Asynchronous cipher operations imply that the function invocation for a
  600. * cipher request returns immediately before the completion of the operation.
  601. * The cipher request is scheduled as a separate kernel thread and therefore
  602. * load-balanced on the different CPUs via the process scheduler. To allow
  603. * the kernel crypto API to inform the caller about the completion of a cipher
  604. * request, the caller must provide a callback function. That function is
  605. * invoked with the cipher handle when the request completes.
  606. *
  607. * To support the asynchronous operation, additional information than just the
  608. * cipher handle must be supplied to the kernel crypto API. That additional
  609. * information is given by filling in the ablkcipher_request data structure.
  610. *
  611. * For the asynchronous block cipher API, the state is maintained with the tfm
  612. * cipher handle. A single tfm can be used across multiple calls and in
  613. * parallel. For asynchronous block cipher calls, context data supplied and
  614. * only used by the caller can be referenced the request data structure in
  615. * addition to the IV used for the cipher request. The maintenance of such
  616. * state information would be important for a crypto driver implementer to
  617. * have, because when calling the callback function upon completion of the
  618. * cipher operation, that callback function may need some information about
  619. * which operation just finished if it invoked multiple in parallel. This
  620. * state information is unused by the kernel crypto API.
  621. */
  622. static inline struct crypto_tfm *crypto_ablkcipher_tfm(
  623. struct crypto_ablkcipher *tfm)
  624. {
  625. return &tfm->base;
  626. }
  627. /**
  628. * crypto_free_ablkcipher() - zeroize and free cipher handle
  629. * @tfm: cipher handle to be freed
  630. */
  631. static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
  632. {
  633. crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
  634. }
  635. /**
  636. * crypto_has_ablkcipher() - Search for the availability of an ablkcipher.
  637. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  638. * ablkcipher
  639. * @type: specifies the type of the cipher
  640. * @mask: specifies the mask for the cipher
  641. *
  642. * Return: true when the ablkcipher is known to the kernel crypto API; false
  643. * otherwise
  644. */
  645. static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
  646. u32 mask)
  647. {
  648. return crypto_has_alg(alg_name, crypto_skcipher_type(type),
  649. crypto_skcipher_mask(mask));
  650. }
  651. static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
  652. struct crypto_ablkcipher *tfm)
  653. {
  654. return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
  655. }
  656. /**
  657. * crypto_ablkcipher_ivsize() - obtain IV size
  658. * @tfm: cipher handle
  659. *
  660. * The size of the IV for the ablkcipher referenced by the cipher handle is
  661. * returned. This IV size may be zero if the cipher does not need an IV.
  662. *
  663. * Return: IV size in bytes
  664. */
  665. static inline unsigned int crypto_ablkcipher_ivsize(
  666. struct crypto_ablkcipher *tfm)
  667. {
  668. return crypto_ablkcipher_crt(tfm)->ivsize;
  669. }
  670. /**
  671. * crypto_ablkcipher_blocksize() - obtain block size of cipher
  672. * @tfm: cipher handle
  673. *
  674. * The block size for the ablkcipher referenced with the cipher handle is
  675. * returned. The caller may use that information to allocate appropriate
  676. * memory for the data returned by the encryption or decryption operation
  677. *
  678. * Return: block size of cipher
  679. */
  680. static inline unsigned int crypto_ablkcipher_blocksize(
  681. struct crypto_ablkcipher *tfm)
  682. {
  683. return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
  684. }
  685. static inline unsigned int crypto_ablkcipher_alignmask(
  686. struct crypto_ablkcipher *tfm)
  687. {
  688. return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
  689. }
  690. static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
  691. {
  692. return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
  693. }
  694. static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
  695. u32 flags)
  696. {
  697. crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
  698. }
  699. static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
  700. u32 flags)
  701. {
  702. crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
  703. }
  704. /**
  705. * crypto_ablkcipher_setkey() - set key for cipher
  706. * @tfm: cipher handle
  707. * @key: buffer holding the key
  708. * @keylen: length of the key in bytes
  709. *
  710. * The caller provided key is set for the ablkcipher referenced by the cipher
  711. * handle.
  712. *
  713. * Note, the key length determines the cipher type. Many block ciphers implement
  714. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  715. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  716. * is performed.
  717. *
  718. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  719. */
  720. static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
  721. const u8 *key, unsigned int keylen)
  722. {
  723. struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
  724. return crt->setkey(crt->base, key, keylen);
  725. }
  726. /**
  727. * crypto_ablkcipher_reqtfm() - obtain cipher handle from request
  728. * @req: ablkcipher_request out of which the cipher handle is to be obtained
  729. *
  730. * Return the crypto_ablkcipher handle when furnishing an ablkcipher_request
  731. * data structure.
  732. *
  733. * Return: crypto_ablkcipher handle
  734. */
  735. static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
  736. struct ablkcipher_request *req)
  737. {
  738. return __crypto_ablkcipher_cast(req->base.tfm);
  739. }
  740. /**
  741. * crypto_ablkcipher_encrypt() - encrypt plaintext
  742. * @req: reference to the ablkcipher_request handle that holds all information
  743. * needed to perform the cipher operation
  744. *
  745. * Encrypt plaintext data using the ablkcipher_request handle. That data
  746. * structure and how it is filled with data is discussed with the
  747. * ablkcipher_request_* functions.
  748. *
  749. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  750. */
  751. static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
  752. {
  753. struct ablkcipher_tfm *crt =
  754. crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
  755. return crt->encrypt(req);
  756. }
  757. /**
  758. * crypto_ablkcipher_decrypt() - decrypt ciphertext
  759. * @req: reference to the ablkcipher_request handle that holds all information
  760. * needed to perform the cipher operation
  761. *
  762. * Decrypt ciphertext data using the ablkcipher_request handle. That data
  763. * structure and how it is filled with data is discussed with the
  764. * ablkcipher_request_* functions.
  765. *
  766. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  767. */
  768. static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
  769. {
  770. struct ablkcipher_tfm *crt =
  771. crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
  772. return crt->decrypt(req);
  773. }
  774. /**
  775. * DOC: Asynchronous Cipher Request Handle
  776. *
  777. * The ablkcipher_request data structure contains all pointers to data
  778. * required for the asynchronous cipher operation. This includes the cipher
  779. * handle (which can be used by multiple ablkcipher_request instances), pointer
  780. * to plaintext and ciphertext, asynchronous callback function, etc. It acts
  781. * as a handle to the ablkcipher_request_* API calls in a similar way as
  782. * ablkcipher handle to the crypto_ablkcipher_* API calls.
  783. */
  784. /**
  785. * crypto_ablkcipher_reqsize() - obtain size of the request data structure
  786. * @tfm: cipher handle
  787. *
  788. * Return: number of bytes
  789. */
  790. static inline unsigned int crypto_ablkcipher_reqsize(
  791. struct crypto_ablkcipher *tfm)
  792. {
  793. return crypto_ablkcipher_crt(tfm)->reqsize;
  794. }
  795. /**
  796. * ablkcipher_request_set_tfm() - update cipher handle reference in request
  797. * @req: request handle to be modified
  798. * @tfm: cipher handle that shall be added to the request handle
  799. *
  800. * Allow the caller to replace the existing ablkcipher handle in the request
  801. * data structure with a different one.
  802. */
  803. static inline void ablkcipher_request_set_tfm(
  804. struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
  805. {
  806. req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
  807. }
  808. static inline struct ablkcipher_request *ablkcipher_request_cast(
  809. struct crypto_async_request *req)
  810. {
  811. return container_of(req, struct ablkcipher_request, base);
  812. }
  813. /**
  814. * ablkcipher_request_alloc() - allocate request data structure
  815. * @tfm: cipher handle to be registered with the request
  816. * @gfp: memory allocation flag that is handed to kmalloc by the API call.
  817. *
  818. * Allocate the request data structure that must be used with the ablkcipher
  819. * encrypt and decrypt API calls. During the allocation, the provided ablkcipher
  820. * handle is registered in the request data structure.
  821. *
  822. * Return: allocated request handle in case of success, or NULL if out of memory
  823. */
  824. static inline struct ablkcipher_request *ablkcipher_request_alloc(
  825. struct crypto_ablkcipher *tfm, gfp_t gfp)
  826. {
  827. struct ablkcipher_request *req;
  828. req = kmalloc(sizeof(struct ablkcipher_request) +
  829. crypto_ablkcipher_reqsize(tfm), gfp);
  830. if (likely(req))
  831. ablkcipher_request_set_tfm(req, tfm);
  832. return req;
  833. }
  834. /**
  835. * ablkcipher_request_free() - zeroize and free request data structure
  836. * @req: request data structure cipher handle to be freed
  837. */
  838. static inline void ablkcipher_request_free(struct ablkcipher_request *req)
  839. {
  840. kzfree(req);
  841. }
  842. /**
  843. * ablkcipher_request_set_callback() - set asynchronous callback function
  844. * @req: request handle
  845. * @flags: specify zero or an ORing of the flags
  846. * CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
  847. * increase the wait queue beyond the initial maximum size;
  848. * CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
  849. * @compl: callback function pointer to be registered with the request handle
  850. * @data: The data pointer refers to memory that is not used by the kernel
  851. * crypto API, but provided to the callback function for it to use. Here,
  852. * the caller can provide a reference to memory the callback function can
  853. * operate on. As the callback function is invoked asynchronously to the
  854. * related functionality, it may need to access data structures of the
  855. * related functionality which can be referenced using this pointer. The
  856. * callback function can access the memory via the "data" field in the
  857. * crypto_async_request data structure provided to the callback function.
  858. *
  859. * This function allows setting the callback function that is triggered once the
  860. * cipher operation completes.
  861. *
  862. * The callback function is registered with the ablkcipher_request handle and
  863. * must comply with the following template
  864. *
  865. * void callback_function(struct crypto_async_request *req, int error)
  866. */
  867. static inline void ablkcipher_request_set_callback(
  868. struct ablkcipher_request *req,
  869. u32 flags, crypto_completion_t compl, void *data)
  870. {
  871. req->base.complete = compl;
  872. req->base.data = data;
  873. req->base.flags = flags;
  874. }
  875. /**
  876. * ablkcipher_request_set_crypt() - set data buffers
  877. * @req: request handle
  878. * @src: source scatter / gather list
  879. * @dst: destination scatter / gather list
  880. * @nbytes: number of bytes to process from @src
  881. * @iv: IV for the cipher operation which must comply with the IV size defined
  882. * by crypto_ablkcipher_ivsize
  883. *
  884. * This function allows setting of the source data and destination data
  885. * scatter / gather lists.
  886. *
  887. * For encryption, the source is treated as the plaintext and the
  888. * destination is the ciphertext. For a decryption operation, the use is
  889. * reversed - the source is the ciphertext and the destination is the plaintext.
  890. */
  891. static inline void ablkcipher_request_set_crypt(
  892. struct ablkcipher_request *req,
  893. struct scatterlist *src, struct scatterlist *dst,
  894. unsigned int nbytes, void *iv)
  895. {
  896. req->src = src;
  897. req->dst = dst;
  898. req->nbytes = nbytes;
  899. req->info = iv;
  900. }
  901. /**
  902. * DOC: Synchronous Block Cipher API
  903. *
  904. * The synchronous block cipher API is used with the ciphers of type
  905. * CRYPTO_ALG_TYPE_BLKCIPHER (listed as type "blkcipher" in /proc/crypto)
  906. *
  907. * Synchronous calls, have a context in the tfm. But since a single tfm can be
  908. * used in multiple calls and in parallel, this info should not be changeable
  909. * (unless a lock is used). This applies, for example, to the symmetric key.
  910. * However, the IV is changeable, so there is an iv field in blkcipher_tfm
  911. * structure for synchronous blkcipher api. So, its the only state info that can
  912. * be kept for synchronous calls without using a big lock across a tfm.
  913. *
  914. * The block cipher API allows the use of a complete cipher, i.e. a cipher
  915. * consisting of a template (a block chaining mode) and a single block cipher
  916. * primitive (e.g. AES).
  917. *
  918. * The plaintext data buffer and the ciphertext data buffer are pointed to
  919. * by using scatter/gather lists. The cipher operation is performed
  920. * on all segments of the provided scatter/gather lists.
  921. *
  922. * The kernel crypto API supports a cipher operation "in-place" which means that
  923. * the caller may provide the same scatter/gather list for the plaintext and
  924. * cipher text. After the completion of the cipher operation, the plaintext
  925. * data is replaced with the ciphertext data in case of an encryption and vice
  926. * versa for a decryption. The caller must ensure that the scatter/gather lists
  927. * for the output data point to sufficiently large buffers, i.e. multiples of
  928. * the block size of the cipher.
  929. */
  930. static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
  931. struct crypto_tfm *tfm)
  932. {
  933. return (struct crypto_blkcipher *)tfm;
  934. }
  935. static inline struct crypto_blkcipher *crypto_blkcipher_cast(
  936. struct crypto_tfm *tfm)
  937. {
  938. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
  939. return __crypto_blkcipher_cast(tfm);
  940. }
  941. /**
  942. * crypto_alloc_blkcipher() - allocate synchronous block cipher handle
  943. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  944. * blkcipher cipher
  945. * @type: specifies the type of the cipher
  946. * @mask: specifies the mask for the cipher
  947. *
  948. * Allocate a cipher handle for a block cipher. The returned struct
  949. * crypto_blkcipher is the cipher handle that is required for any subsequent
  950. * API invocation for that block cipher.
  951. *
  952. * Return: allocated cipher handle in case of success; IS_ERR() is true in case
  953. * of an error, PTR_ERR() returns the error code.
  954. */
  955. static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
  956. const char *alg_name, u32 type, u32 mask)
  957. {
  958. type &= ~CRYPTO_ALG_TYPE_MASK;
  959. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  960. mask |= CRYPTO_ALG_TYPE_MASK;
  961. return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
  962. }
  963. static inline struct crypto_tfm *crypto_blkcipher_tfm(
  964. struct crypto_blkcipher *tfm)
  965. {
  966. return &tfm->base;
  967. }
  968. /**
  969. * crypto_free_blkcipher() - zeroize and free the block cipher handle
  970. * @tfm: cipher handle to be freed
  971. */
  972. static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
  973. {
  974. crypto_free_tfm(crypto_blkcipher_tfm(tfm));
  975. }
  976. /**
  977. * crypto_has_blkcipher() - Search for the availability of a block cipher
  978. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  979. * block cipher
  980. * @type: specifies the type of the cipher
  981. * @mask: specifies the mask for the cipher
  982. *
  983. * Return: true when the block cipher is known to the kernel crypto API; false
  984. * otherwise
  985. */
  986. static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
  987. {
  988. type &= ~CRYPTO_ALG_TYPE_MASK;
  989. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  990. mask |= CRYPTO_ALG_TYPE_MASK;
  991. return crypto_has_alg(alg_name, type, mask);
  992. }
  993. /**
  994. * crypto_blkcipher_name() - return the name / cra_name from the cipher handle
  995. * @tfm: cipher handle
  996. *
  997. * Return: The character string holding the name of the cipher
  998. */
  999. static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
  1000. {
  1001. return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
  1002. }
  1003. static inline struct blkcipher_tfm *crypto_blkcipher_crt(
  1004. struct crypto_blkcipher *tfm)
  1005. {
  1006. return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
  1007. }
  1008. static inline struct blkcipher_alg *crypto_blkcipher_alg(
  1009. struct crypto_blkcipher *tfm)
  1010. {
  1011. return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
  1012. }
  1013. /**
  1014. * crypto_blkcipher_ivsize() - obtain IV size
  1015. * @tfm: cipher handle
  1016. *
  1017. * The size of the IV for the block cipher referenced by the cipher handle is
  1018. * returned. This IV size may be zero if the cipher does not need an IV.
  1019. *
  1020. * Return: IV size in bytes
  1021. */
  1022. static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
  1023. {
  1024. return crypto_blkcipher_alg(tfm)->ivsize;
  1025. }
  1026. /**
  1027. * crypto_blkcipher_blocksize() - obtain block size of cipher
  1028. * @tfm: cipher handle
  1029. *
  1030. * The block size for the block cipher referenced with the cipher handle is
  1031. * returned. The caller may use that information to allocate appropriate
  1032. * memory for the data returned by the encryption or decryption operation.
  1033. *
  1034. * Return: block size of cipher
  1035. */
  1036. static inline unsigned int crypto_blkcipher_blocksize(
  1037. struct crypto_blkcipher *tfm)
  1038. {
  1039. return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
  1040. }
  1041. static inline unsigned int crypto_blkcipher_alignmask(
  1042. struct crypto_blkcipher *tfm)
  1043. {
  1044. return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
  1045. }
  1046. static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
  1047. {
  1048. return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
  1049. }
  1050. static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
  1051. u32 flags)
  1052. {
  1053. crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
  1054. }
  1055. static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
  1056. u32 flags)
  1057. {
  1058. crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
  1059. }
  1060. /**
  1061. * crypto_blkcipher_setkey() - set key for cipher
  1062. * @tfm: cipher handle
  1063. * @key: buffer holding the key
  1064. * @keylen: length of the key in bytes
  1065. *
  1066. * The caller provided key is set for the block cipher referenced by the cipher
  1067. * handle.
  1068. *
  1069. * Note, the key length determines the cipher type. Many block ciphers implement
  1070. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  1071. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  1072. * is performed.
  1073. *
  1074. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  1075. */
  1076. static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
  1077. const u8 *key, unsigned int keylen)
  1078. {
  1079. return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
  1080. key, keylen);
  1081. }
  1082. /**
  1083. * crypto_blkcipher_encrypt() - encrypt plaintext
  1084. * @desc: reference to the block cipher handle with meta data
  1085. * @dst: scatter/gather list that is filled by the cipher operation with the
  1086. * ciphertext
  1087. * @src: scatter/gather list that holds the plaintext
  1088. * @nbytes: number of bytes of the plaintext to encrypt.
  1089. *
  1090. * Encrypt plaintext data using the IV set by the caller with a preceding
  1091. * call of crypto_blkcipher_set_iv.
  1092. *
  1093. * The blkcipher_desc data structure must be filled by the caller and can
  1094. * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
  1095. * with the block cipher handle; desc.flags is filled with either
  1096. * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
  1097. *
  1098. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1099. */
  1100. static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
  1101. struct scatterlist *dst,
  1102. struct scatterlist *src,
  1103. unsigned int nbytes)
  1104. {
  1105. desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
  1106. return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
  1107. }
  1108. /**
  1109. * crypto_blkcipher_encrypt_iv() - encrypt plaintext with dedicated IV
  1110. * @desc: reference to the block cipher handle with meta data
  1111. * @dst: scatter/gather list that is filled by the cipher operation with the
  1112. * ciphertext
  1113. * @src: scatter/gather list that holds the plaintext
  1114. * @nbytes: number of bytes of the plaintext to encrypt.
  1115. *
  1116. * Encrypt plaintext data with the use of an IV that is solely used for this
  1117. * cipher operation. Any previously set IV is not used.
  1118. *
  1119. * The blkcipher_desc data structure must be filled by the caller and can
  1120. * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
  1121. * with the block cipher handle; desc.info is filled with the IV to be used for
  1122. * the current operation; desc.flags is filled with either
  1123. * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
  1124. *
  1125. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1126. */
  1127. static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
  1128. struct scatterlist *dst,
  1129. struct scatterlist *src,
  1130. unsigned int nbytes)
  1131. {
  1132. return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
  1133. }
  1134. /**
  1135. * crypto_blkcipher_decrypt() - decrypt ciphertext
  1136. * @desc: reference to the block cipher handle with meta data
  1137. * @dst: scatter/gather list that is filled by the cipher operation with the
  1138. * plaintext
  1139. * @src: scatter/gather list that holds the ciphertext
  1140. * @nbytes: number of bytes of the ciphertext to decrypt.
  1141. *
  1142. * Decrypt ciphertext data using the IV set by the caller with a preceding
  1143. * call of crypto_blkcipher_set_iv.
  1144. *
  1145. * The blkcipher_desc data structure must be filled by the caller as documented
  1146. * for the crypto_blkcipher_encrypt call above.
  1147. *
  1148. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1149. *
  1150. */
  1151. static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
  1152. struct scatterlist *dst,
  1153. struct scatterlist *src,
  1154. unsigned int nbytes)
  1155. {
  1156. desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
  1157. return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
  1158. }
  1159. /**
  1160. * crypto_blkcipher_decrypt_iv() - decrypt ciphertext with dedicated IV
  1161. * @desc: reference to the block cipher handle with meta data
  1162. * @dst: scatter/gather list that is filled by the cipher operation with the
  1163. * plaintext
  1164. * @src: scatter/gather list that holds the ciphertext
  1165. * @nbytes: number of bytes of the ciphertext to decrypt.
  1166. *
  1167. * Decrypt ciphertext data with the use of an IV that is solely used for this
  1168. * cipher operation. Any previously set IV is not used.
  1169. *
  1170. * The blkcipher_desc data structure must be filled by the caller as documented
  1171. * for the crypto_blkcipher_encrypt_iv call above.
  1172. *
  1173. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1174. */
  1175. static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
  1176. struct scatterlist *dst,
  1177. struct scatterlist *src,
  1178. unsigned int nbytes)
  1179. {
  1180. return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
  1181. }
  1182. /**
  1183. * crypto_blkcipher_set_iv() - set IV for cipher
  1184. * @tfm: cipher handle
  1185. * @src: buffer holding the IV
  1186. * @len: length of the IV in bytes
  1187. *
  1188. * The caller provided IV is set for the block cipher referenced by the cipher
  1189. * handle.
  1190. */
  1191. static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
  1192. const u8 *src, unsigned int len)
  1193. {
  1194. memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
  1195. }
  1196. /**
  1197. * crypto_blkcipher_get_iv() - obtain IV from cipher
  1198. * @tfm: cipher handle
  1199. * @dst: buffer filled with the IV
  1200. * @len: length of the buffer dst
  1201. *
  1202. * The caller can obtain the IV set for the block cipher referenced by the
  1203. * cipher handle and store it into the user-provided buffer. If the buffer
  1204. * has an insufficient space, the IV is truncated to fit the buffer.
  1205. */
  1206. static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
  1207. u8 *dst, unsigned int len)
  1208. {
  1209. memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
  1210. }
  1211. /**
  1212. * DOC: Single Block Cipher API
  1213. *
  1214. * The single block cipher API is used with the ciphers of type
  1215. * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto).
  1216. *
  1217. * Using the single block cipher API calls, operations with the basic cipher
  1218. * primitive can be implemented. These cipher primitives exclude any block
  1219. * chaining operations including IV handling.
  1220. *
  1221. * The purpose of this single block cipher API is to support the implementation
  1222. * of templates or other concepts that only need to perform the cipher operation
  1223. * on one block at a time. Templates invoke the underlying cipher primitive
  1224. * block-wise and process either the input or the output data of these cipher
  1225. * operations.
  1226. */
  1227. static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
  1228. {
  1229. return (struct crypto_cipher *)tfm;
  1230. }
  1231. static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
  1232. {
  1233. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  1234. return __crypto_cipher_cast(tfm);
  1235. }
  1236. /**
  1237. * crypto_alloc_cipher() - allocate single block cipher handle
  1238. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  1239. * single block cipher
  1240. * @type: specifies the type of the cipher
  1241. * @mask: specifies the mask for the cipher
  1242. *
  1243. * Allocate a cipher handle for a single block cipher. The returned struct
  1244. * crypto_cipher is the cipher handle that is required for any subsequent API
  1245. * invocation for that single block cipher.
  1246. *
  1247. * Return: allocated cipher handle in case of success; IS_ERR() is true in case
  1248. * of an error, PTR_ERR() returns the error code.
  1249. */
  1250. static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
  1251. u32 type, u32 mask)
  1252. {
  1253. type &= ~CRYPTO_ALG_TYPE_MASK;
  1254. type |= CRYPTO_ALG_TYPE_CIPHER;
  1255. mask |= CRYPTO_ALG_TYPE_MASK;
  1256. return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
  1257. }
  1258. static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
  1259. {
  1260. return &tfm->base;
  1261. }
  1262. /**
  1263. * crypto_free_cipher() - zeroize and free the single block cipher handle
  1264. * @tfm: cipher handle to be freed
  1265. */
  1266. static inline void crypto_free_cipher(struct crypto_cipher *tfm)
  1267. {
  1268. crypto_free_tfm(crypto_cipher_tfm(tfm));
  1269. }
  1270. /**
  1271. * crypto_has_cipher() - Search for the availability of a single block cipher
  1272. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  1273. * single block cipher
  1274. * @type: specifies the type of the cipher
  1275. * @mask: specifies the mask for the cipher
  1276. *
  1277. * Return: true when the single block cipher is known to the kernel crypto API;
  1278. * false otherwise
  1279. */
  1280. static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
  1281. {
  1282. type &= ~CRYPTO_ALG_TYPE_MASK;
  1283. type |= CRYPTO_ALG_TYPE_CIPHER;
  1284. mask |= CRYPTO_ALG_TYPE_MASK;
  1285. return crypto_has_alg(alg_name, type, mask);
  1286. }
  1287. static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
  1288. {
  1289. return &crypto_cipher_tfm(tfm)->crt_cipher;
  1290. }
  1291. /**
  1292. * crypto_cipher_blocksize() - obtain block size for cipher
  1293. * @tfm: cipher handle
  1294. *
  1295. * The block size for the single block cipher referenced with the cipher handle
  1296. * tfm is returned. The caller may use that information to allocate appropriate
  1297. * memory for the data returned by the encryption or decryption operation
  1298. *
  1299. * Return: block size of cipher
  1300. */
  1301. static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
  1302. {
  1303. return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
  1304. }
  1305. static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
  1306. {
  1307. return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
  1308. }
  1309. static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
  1310. {
  1311. return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
  1312. }
  1313. static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
  1314. u32 flags)
  1315. {
  1316. crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
  1317. }
  1318. static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
  1319. u32 flags)
  1320. {
  1321. crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
  1322. }
  1323. /**
  1324. * crypto_cipher_setkey() - set key for cipher
  1325. * @tfm: cipher handle
  1326. * @key: buffer holding the key
  1327. * @keylen: length of the key in bytes
  1328. *
  1329. * The caller provided key is set for the single block cipher referenced by the
  1330. * cipher handle.
  1331. *
  1332. * Note, the key length determines the cipher type. Many block ciphers implement
  1333. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  1334. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  1335. * is performed.
  1336. *
  1337. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  1338. */
  1339. static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
  1340. const u8 *key, unsigned int keylen)
  1341. {
  1342. return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
  1343. key, keylen);
  1344. }
  1345. /**
  1346. * crypto_cipher_encrypt_one() - encrypt one block of plaintext
  1347. * @tfm: cipher handle
  1348. * @dst: points to the buffer that will be filled with the ciphertext
  1349. * @src: buffer holding the plaintext to be encrypted
  1350. *
  1351. * Invoke the encryption operation of one block. The caller must ensure that
  1352. * the plaintext and ciphertext buffers are at least one block in size.
  1353. */
  1354. static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
  1355. u8 *dst, const u8 *src)
  1356. {
  1357. crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
  1358. dst, src);
  1359. }
  1360. /**
  1361. * crypto_cipher_decrypt_one() - decrypt one block of ciphertext
  1362. * @tfm: cipher handle
  1363. * @dst: points to the buffer that will be filled with the plaintext
  1364. * @src: buffer holding the ciphertext to be decrypted
  1365. *
  1366. * Invoke the decryption operation of one block. The caller must ensure that
  1367. * the plaintext and ciphertext buffers are at least one block in size.
  1368. */
  1369. static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
  1370. u8 *dst, const u8 *src)
  1371. {
  1372. crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
  1373. dst, src);
  1374. }
  1375. static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
  1376. {
  1377. return (struct crypto_comp *)tfm;
  1378. }
  1379. static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
  1380. {
  1381. BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
  1382. CRYPTO_ALG_TYPE_MASK);
  1383. return __crypto_comp_cast(tfm);
  1384. }
  1385. static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
  1386. u32 type, u32 mask)
  1387. {
  1388. type &= ~CRYPTO_ALG_TYPE_MASK;
  1389. type |= CRYPTO_ALG_TYPE_COMPRESS;
  1390. mask |= CRYPTO_ALG_TYPE_MASK;
  1391. return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
  1392. }
  1393. static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
  1394. {
  1395. return &tfm->base;
  1396. }
  1397. static inline void crypto_free_comp(struct crypto_comp *tfm)
  1398. {
  1399. crypto_free_tfm(crypto_comp_tfm(tfm));
  1400. }
  1401. static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
  1402. {
  1403. type &= ~CRYPTO_ALG_TYPE_MASK;
  1404. type |= CRYPTO_ALG_TYPE_COMPRESS;
  1405. mask |= CRYPTO_ALG_TYPE_MASK;
  1406. return crypto_has_alg(alg_name, type, mask);
  1407. }
  1408. static inline const char *crypto_comp_name(struct crypto_comp *tfm)
  1409. {
  1410. return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
  1411. }
  1412. static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
  1413. {
  1414. return &crypto_comp_tfm(tfm)->crt_compress;
  1415. }
  1416. static inline int crypto_comp_compress(struct crypto_comp *tfm,
  1417. const u8 *src, unsigned int slen,
  1418. u8 *dst, unsigned int *dlen)
  1419. {
  1420. return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
  1421. src, slen, dst, dlen);
  1422. }
  1423. static inline int crypto_comp_decompress(struct crypto_comp *tfm,
  1424. const u8 *src, unsigned int slen,
  1425. u8 *dst, unsigned int *dlen)
  1426. {
  1427. return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
  1428. src, slen, dst, dlen);
  1429. }
  1430. #endif /* _LINUX_CRYPTO_H */