crypto.h 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648
  1. /*
  2. * Scatterlist Cryptographic API.
  3. *
  4. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  5. * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  6. * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  7. *
  8. * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  9. * and Nettle, by Niels Möller.
  10. *
  11. * This program is free software; you can redistribute it and/or modify it
  12. * under the terms of the GNU General Public License as published by the Free
  13. * Software Foundation; either version 2 of the License, or (at your option)
  14. * any later version.
  15. *
  16. */
  17. #ifndef _LINUX_CRYPTO_H
  18. #define _LINUX_CRYPTO_H
  19. #include <linux/atomic.h>
  20. #include <linux/kernel.h>
  21. #include <linux/list.h>
  22. #include <linux/bug.h>
  23. #include <linux/slab.h>
  24. #include <linux/string.h>
  25. #include <linux/uaccess.h>
  26. #include <linux/completion.h>
  27. /*
  28. * Autoloaded crypto modules should only use a prefixed name to avoid allowing
  29. * arbitrary modules to be loaded. Loading from userspace may still need the
  30. * unprefixed names, so retains those aliases as well.
  31. * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
  32. * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
  33. * expands twice on the same line. Instead, use a separate base name for the
  34. * alias.
  35. */
  36. #define MODULE_ALIAS_CRYPTO(name) \
  37. __MODULE_INFO(alias, alias_userspace, name); \
  38. __MODULE_INFO(alias, alias_crypto, "crypto-" name)
  39. /*
  40. * Algorithm masks and types.
  41. */
  42. #define CRYPTO_ALG_TYPE_MASK 0x0000000f
  43. #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
  44. #define CRYPTO_ALG_TYPE_COMPRESS 0x00000002
  45. #define CRYPTO_ALG_TYPE_AEAD 0x00000003
  46. #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
  47. #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
  48. #define CRYPTO_ALG_TYPE_SKCIPHER 0x00000005
  49. #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
  50. #define CRYPTO_ALG_TYPE_KPP 0x00000008
  51. #define CRYPTO_ALG_TYPE_ACOMPRESS 0x0000000a
  52. #define CRYPTO_ALG_TYPE_SCOMPRESS 0x0000000b
  53. #define CRYPTO_ALG_TYPE_RNG 0x0000000c
  54. #define CRYPTO_ALG_TYPE_AKCIPHER 0x0000000d
  55. #define CRYPTO_ALG_TYPE_DIGEST 0x0000000e
  56. #define CRYPTO_ALG_TYPE_HASH 0x0000000e
  57. #define CRYPTO_ALG_TYPE_SHASH 0x0000000e
  58. #define CRYPTO_ALG_TYPE_AHASH 0x0000000f
  59. #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
  60. #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
  61. #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
  62. #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK 0x0000000e
  63. #define CRYPTO_ALG_LARVAL 0x00000010
  64. #define CRYPTO_ALG_DEAD 0x00000020
  65. #define CRYPTO_ALG_DYING 0x00000040
  66. #define CRYPTO_ALG_ASYNC 0x00000080
  67. /*
  68. * Set this bit if and only if the algorithm requires another algorithm of
  69. * the same type to handle corner cases.
  70. */
  71. #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
  72. /*
  73. * This bit is set for symmetric key ciphers that have already been wrapped
  74. * with a generic IV generator to prevent them from being wrapped again.
  75. */
  76. #define CRYPTO_ALG_GENIV 0x00000200
  77. /*
  78. * Set if the algorithm has passed automated run-time testing. Note that
  79. * if there is no run-time testing for a given algorithm it is considered
  80. * to have passed.
  81. */
  82. #define CRYPTO_ALG_TESTED 0x00000400
  83. /*
  84. * Set if the algorithm is an instance that is built from templates.
  85. */
  86. #define CRYPTO_ALG_INSTANCE 0x00000800
  87. /* Set this bit if the algorithm provided is hardware accelerated but
  88. * not available to userspace via instruction set or so.
  89. */
  90. #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
  91. /*
  92. * Mark a cipher as a service implementation only usable by another
  93. * cipher and never by a normal user of the kernel crypto API
  94. */
  95. #define CRYPTO_ALG_INTERNAL 0x00002000
  96. /*
  97. * Transform masks and values (for crt_flags).
  98. */
  99. #define CRYPTO_TFM_REQ_MASK 0x000fff00
  100. #define CRYPTO_TFM_RES_MASK 0xfff00000
  101. #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
  102. #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
  103. #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
  104. #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
  105. #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
  106. #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
  107. #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
  108. #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
  109. /*
  110. * Miscellaneous stuff.
  111. */
  112. #define CRYPTO_MAX_ALG_NAME 128
  113. /*
  114. * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
  115. * declaration) is used to ensure that the crypto_tfm context structure is
  116. * aligned correctly for the given architecture so that there are no alignment
  117. * faults for C data types. In particular, this is required on platforms such
  118. * as arm where pointers are 32-bit aligned but there are data types such as
  119. * u64 which require 64-bit alignment.
  120. */
  121. #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
  122. #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
  123. struct scatterlist;
  124. struct crypto_ablkcipher;
  125. struct crypto_async_request;
  126. struct crypto_blkcipher;
  127. struct crypto_tfm;
  128. struct crypto_type;
  129. struct skcipher_givcrypt_request;
  130. typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
  131. /**
  132. * DOC: Block Cipher Context Data Structures
  133. *
  134. * These data structures define the operating context for each block cipher
  135. * type.
  136. */
  137. struct crypto_async_request {
  138. struct list_head list;
  139. crypto_completion_t complete;
  140. void *data;
  141. struct crypto_tfm *tfm;
  142. u32 flags;
  143. };
  144. struct ablkcipher_request {
  145. struct crypto_async_request base;
  146. unsigned int nbytes;
  147. void *info;
  148. struct scatterlist *src;
  149. struct scatterlist *dst;
  150. void *__ctx[] CRYPTO_MINALIGN_ATTR;
  151. };
  152. struct blkcipher_desc {
  153. struct crypto_blkcipher *tfm;
  154. void *info;
  155. u32 flags;
  156. };
  157. struct cipher_desc {
  158. struct crypto_tfm *tfm;
  159. void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  160. unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
  161. const u8 *src, unsigned int nbytes);
  162. void *info;
  163. };
  164. /**
  165. * DOC: Block Cipher Algorithm Definitions
  166. *
  167. * These data structures define modular crypto algorithm implementations,
  168. * managed via crypto_register_alg() and crypto_unregister_alg().
  169. */
  170. /**
  171. * struct ablkcipher_alg - asynchronous block cipher definition
  172. * @min_keysize: Minimum key size supported by the transformation. This is the
  173. * smallest key length supported by this transformation algorithm.
  174. * This must be set to one of the pre-defined values as this is
  175. * not hardware specific. Possible values for this field can be
  176. * found via git grep "_MIN_KEY_SIZE" include/crypto/
  177. * @max_keysize: Maximum key size supported by the transformation. This is the
  178. * largest key length supported by this transformation algorithm.
  179. * This must be set to one of the pre-defined values as this is
  180. * not hardware specific. Possible values for this field can be
  181. * found via git grep "_MAX_KEY_SIZE" include/crypto/
  182. * @setkey: Set key for the transformation. This function is used to either
  183. * program a supplied key into the hardware or store the key in the
  184. * transformation context for programming it later. Note that this
  185. * function does modify the transformation context. This function can
  186. * be called multiple times during the existence of the transformation
  187. * object, so one must make sure the key is properly reprogrammed into
  188. * the hardware. This function is also responsible for checking the key
  189. * length for validity. In case a software fallback was put in place in
  190. * the @cra_init call, this function might need to use the fallback if
  191. * the algorithm doesn't support all of the key sizes.
  192. * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
  193. * the supplied scatterlist containing the blocks of data. The crypto
  194. * API consumer is responsible for aligning the entries of the
  195. * scatterlist properly and making sure the chunks are correctly
  196. * sized. In case a software fallback was put in place in the
  197. * @cra_init call, this function might need to use the fallback if
  198. * the algorithm doesn't support all of the key sizes. In case the
  199. * key was stored in transformation context, the key might need to be
  200. * re-programmed into the hardware in this function. This function
  201. * shall not modify the transformation context, as this function may
  202. * be called in parallel with the same transformation object.
  203. * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
  204. * and the conditions are exactly the same.
  205. * @givencrypt: Update the IV for encryption. With this function, a cipher
  206. * implementation may provide the function on how to update the IV
  207. * for encryption.
  208. * @givdecrypt: Update the IV for decryption. This is the reverse of
  209. * @givencrypt .
  210. * @geniv: The transformation implementation may use an "IV generator" provided
  211. * by the kernel crypto API. Several use cases have a predefined
  212. * approach how IVs are to be updated. For such use cases, the kernel
  213. * crypto API provides ready-to-use implementations that can be
  214. * referenced with this variable.
  215. * @ivsize: IV size applicable for transformation. The consumer must provide an
  216. * IV of exactly that size to perform the encrypt or decrypt operation.
  217. *
  218. * All fields except @givencrypt , @givdecrypt , @geniv and @ivsize are
  219. * mandatory and must be filled.
  220. */
  221. struct ablkcipher_alg {
  222. int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
  223. unsigned int keylen);
  224. int (*encrypt)(struct ablkcipher_request *req);
  225. int (*decrypt)(struct ablkcipher_request *req);
  226. int (*givencrypt)(struct skcipher_givcrypt_request *req);
  227. int (*givdecrypt)(struct skcipher_givcrypt_request *req);
  228. const char *geniv;
  229. unsigned int min_keysize;
  230. unsigned int max_keysize;
  231. unsigned int ivsize;
  232. };
  233. /**
  234. * struct blkcipher_alg - synchronous block cipher definition
  235. * @min_keysize: see struct ablkcipher_alg
  236. * @max_keysize: see struct ablkcipher_alg
  237. * @setkey: see struct ablkcipher_alg
  238. * @encrypt: see struct ablkcipher_alg
  239. * @decrypt: see struct ablkcipher_alg
  240. * @geniv: see struct ablkcipher_alg
  241. * @ivsize: see struct ablkcipher_alg
  242. *
  243. * All fields except @geniv and @ivsize are mandatory and must be filled.
  244. */
  245. struct blkcipher_alg {
  246. int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
  247. unsigned int keylen);
  248. int (*encrypt)(struct blkcipher_desc *desc,
  249. struct scatterlist *dst, struct scatterlist *src,
  250. unsigned int nbytes);
  251. int (*decrypt)(struct blkcipher_desc *desc,
  252. struct scatterlist *dst, struct scatterlist *src,
  253. unsigned int nbytes);
  254. const char *geniv;
  255. unsigned int min_keysize;
  256. unsigned int max_keysize;
  257. unsigned int ivsize;
  258. };
  259. /**
  260. * struct cipher_alg - single-block symmetric ciphers definition
  261. * @cia_min_keysize: Minimum key size supported by the transformation. This is
  262. * the smallest key length supported by this transformation
  263. * algorithm. This must be set to one of the pre-defined
  264. * values as this is not hardware specific. Possible values
  265. * for this field can be found via git grep "_MIN_KEY_SIZE"
  266. * include/crypto/
  267. * @cia_max_keysize: Maximum key size supported by the transformation. This is
  268. * the largest key length supported by this transformation
  269. * algorithm. This must be set to one of the pre-defined values
  270. * as this is not hardware specific. Possible values for this
  271. * field can be found via git grep "_MAX_KEY_SIZE"
  272. * include/crypto/
  273. * @cia_setkey: Set key for the transformation. This function is used to either
  274. * program a supplied key into the hardware or store the key in the
  275. * transformation context for programming it later. Note that this
  276. * function does modify the transformation context. This function
  277. * can be called multiple times during the existence of the
  278. * transformation object, so one must make sure the key is properly
  279. * reprogrammed into the hardware. This function is also
  280. * responsible for checking the key length for validity.
  281. * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
  282. * single block of data, which must be @cra_blocksize big. This
  283. * always operates on a full @cra_blocksize and it is not possible
  284. * to encrypt a block of smaller size. The supplied buffers must
  285. * therefore also be at least of @cra_blocksize size. Both the
  286. * input and output buffers are always aligned to @cra_alignmask.
  287. * In case either of the input or output buffer supplied by user
  288. * of the crypto API is not aligned to @cra_alignmask, the crypto
  289. * API will re-align the buffers. The re-alignment means that a
  290. * new buffer will be allocated, the data will be copied into the
  291. * new buffer, then the processing will happen on the new buffer,
  292. * then the data will be copied back into the original buffer and
  293. * finally the new buffer will be freed. In case a software
  294. * fallback was put in place in the @cra_init call, this function
  295. * might need to use the fallback if the algorithm doesn't support
  296. * all of the key sizes. In case the key was stored in
  297. * transformation context, the key might need to be re-programmed
  298. * into the hardware in this function. This function shall not
  299. * modify the transformation context, as this function may be
  300. * called in parallel with the same transformation object.
  301. * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
  302. * @cia_encrypt, and the conditions are exactly the same.
  303. *
  304. * All fields are mandatory and must be filled.
  305. */
  306. struct cipher_alg {
  307. unsigned int cia_min_keysize;
  308. unsigned int cia_max_keysize;
  309. int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
  310. unsigned int keylen);
  311. void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  312. void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  313. };
  314. struct compress_alg {
  315. int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
  316. unsigned int slen, u8 *dst, unsigned int *dlen);
  317. int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
  318. unsigned int slen, u8 *dst, unsigned int *dlen);
  319. };
  320. #define cra_ablkcipher cra_u.ablkcipher
  321. #define cra_blkcipher cra_u.blkcipher
  322. #define cra_cipher cra_u.cipher
  323. #define cra_compress cra_u.compress
  324. /**
  325. * struct crypto_alg - definition of a cryptograpic cipher algorithm
  326. * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
  327. * CRYPTO_ALG_* flags for the flags which go in here. Those are
  328. * used for fine-tuning the description of the transformation
  329. * algorithm.
  330. * @cra_blocksize: Minimum block size of this transformation. The size in bytes
  331. * of the smallest possible unit which can be transformed with
  332. * this algorithm. The users must respect this value.
  333. * In case of HASH transformation, it is possible for a smaller
  334. * block than @cra_blocksize to be passed to the crypto API for
  335. * transformation, in case of any other transformation type, an
  336. * error will be returned upon any attempt to transform smaller
  337. * than @cra_blocksize chunks.
  338. * @cra_ctxsize: Size of the operational context of the transformation. This
  339. * value informs the kernel crypto API about the memory size
  340. * needed to be allocated for the transformation context.
  341. * @cra_alignmask: Alignment mask for the input and output data buffer. The data
  342. * buffer containing the input data for the algorithm must be
  343. * aligned to this alignment mask. The data buffer for the
  344. * output data must be aligned to this alignment mask. Note that
  345. * the Crypto API will do the re-alignment in software, but
  346. * only under special conditions and there is a performance hit.
  347. * The re-alignment happens at these occasions for different
  348. * @cra_u types: cipher -- For both input data and output data
  349. * buffer; ahash -- For output hash destination buf; shash --
  350. * For output hash destination buf.
  351. * This is needed on hardware which is flawed by design and
  352. * cannot pick data from arbitrary addresses.
  353. * @cra_priority: Priority of this transformation implementation. In case
  354. * multiple transformations with same @cra_name are available to
  355. * the Crypto API, the kernel will use the one with highest
  356. * @cra_priority.
  357. * @cra_name: Generic name (usable by multiple implementations) of the
  358. * transformation algorithm. This is the name of the transformation
  359. * itself. This field is used by the kernel when looking up the
  360. * providers of particular transformation.
  361. * @cra_driver_name: Unique name of the transformation provider. This is the
  362. * name of the provider of the transformation. This can be any
  363. * arbitrary value, but in the usual case, this contains the
  364. * name of the chip or provider and the name of the
  365. * transformation algorithm.
  366. * @cra_type: Type of the cryptographic transformation. This is a pointer to
  367. * struct crypto_type, which implements callbacks common for all
  368. * transformation types. There are multiple options:
  369. * &crypto_blkcipher_type, &crypto_ablkcipher_type,
  370. * &crypto_ahash_type, &crypto_rng_type.
  371. * This field might be empty. In that case, there are no common
  372. * callbacks. This is the case for: cipher, compress, shash.
  373. * @cra_u: Callbacks implementing the transformation. This is a union of
  374. * multiple structures. Depending on the type of transformation selected
  375. * by @cra_type and @cra_flags above, the associated structure must be
  376. * filled with callbacks. This field might be empty. This is the case
  377. * for ahash, shash.
  378. * @cra_init: Initialize the cryptographic transformation object. This function
  379. * is used to initialize the cryptographic transformation object.
  380. * This function is called only once at the instantiation time, right
  381. * after the transformation context was allocated. In case the
  382. * cryptographic hardware has some special requirements which need to
  383. * be handled by software, this function shall check for the precise
  384. * requirement of the transformation and put any software fallbacks
  385. * in place.
  386. * @cra_exit: Deinitialize the cryptographic transformation object. This is a
  387. * counterpart to @cra_init, used to remove various changes set in
  388. * @cra_init.
  389. * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
  390. * @cra_list: internally used
  391. * @cra_users: internally used
  392. * @cra_refcnt: internally used
  393. * @cra_destroy: internally used
  394. *
  395. * The struct crypto_alg describes a generic Crypto API algorithm and is common
  396. * for all of the transformations. Any variable not documented here shall not
  397. * be used by a cipher implementation as it is internal to the Crypto API.
  398. */
  399. struct crypto_alg {
  400. struct list_head cra_list;
  401. struct list_head cra_users;
  402. u32 cra_flags;
  403. unsigned int cra_blocksize;
  404. unsigned int cra_ctxsize;
  405. unsigned int cra_alignmask;
  406. int cra_priority;
  407. atomic_t cra_refcnt;
  408. char cra_name[CRYPTO_MAX_ALG_NAME];
  409. char cra_driver_name[CRYPTO_MAX_ALG_NAME];
  410. const struct crypto_type *cra_type;
  411. union {
  412. struct ablkcipher_alg ablkcipher;
  413. struct blkcipher_alg blkcipher;
  414. struct cipher_alg cipher;
  415. struct compress_alg compress;
  416. } cra_u;
  417. int (*cra_init)(struct crypto_tfm *tfm);
  418. void (*cra_exit)(struct crypto_tfm *tfm);
  419. void (*cra_destroy)(struct crypto_alg *alg);
  420. struct module *cra_module;
  421. } CRYPTO_MINALIGN_ATTR;
  422. /*
  423. * A helper struct for waiting for completion of async crypto ops
  424. */
  425. struct crypto_wait {
  426. struct completion completion;
  427. int err;
  428. };
  429. /*
  430. * Macro for declaring a crypto op async wait object on stack
  431. */
  432. #define DECLARE_CRYPTO_WAIT(_wait) \
  433. struct crypto_wait _wait = { \
  434. COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
  435. /*
  436. * Async ops completion helper functioons
  437. */
  438. void crypto_req_done(struct crypto_async_request *req, int err);
  439. static inline int crypto_wait_req(int err, struct crypto_wait *wait)
  440. {
  441. switch (err) {
  442. case -EINPROGRESS:
  443. case -EBUSY:
  444. wait_for_completion(&wait->completion);
  445. reinit_completion(&wait->completion);
  446. err = wait->err;
  447. break;
  448. };
  449. return err;
  450. }
  451. static inline void crypto_init_wait(struct crypto_wait *wait)
  452. {
  453. init_completion(&wait->completion);
  454. }
  455. /*
  456. * Algorithm registration interface.
  457. */
  458. int crypto_register_alg(struct crypto_alg *alg);
  459. int crypto_unregister_alg(struct crypto_alg *alg);
  460. int crypto_register_algs(struct crypto_alg *algs, int count);
  461. int crypto_unregister_algs(struct crypto_alg *algs, int count);
  462. /*
  463. * Algorithm query interface.
  464. */
  465. int crypto_has_alg(const char *name, u32 type, u32 mask);
  466. /*
  467. * Transforms: user-instantiated objects which encapsulate algorithms
  468. * and core processing logic. Managed via crypto_alloc_*() and
  469. * crypto_free_*(), as well as the various helpers below.
  470. */
  471. struct ablkcipher_tfm {
  472. int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
  473. unsigned int keylen);
  474. int (*encrypt)(struct ablkcipher_request *req);
  475. int (*decrypt)(struct ablkcipher_request *req);
  476. struct crypto_ablkcipher *base;
  477. unsigned int ivsize;
  478. unsigned int reqsize;
  479. };
  480. struct blkcipher_tfm {
  481. void *iv;
  482. int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
  483. unsigned int keylen);
  484. int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
  485. struct scatterlist *src, unsigned int nbytes);
  486. int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
  487. struct scatterlist *src, unsigned int nbytes);
  488. };
  489. struct cipher_tfm {
  490. int (*cit_setkey)(struct crypto_tfm *tfm,
  491. const u8 *key, unsigned int keylen);
  492. void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  493. void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
  494. };
  495. struct compress_tfm {
  496. int (*cot_compress)(struct crypto_tfm *tfm,
  497. const u8 *src, unsigned int slen,
  498. u8 *dst, unsigned int *dlen);
  499. int (*cot_decompress)(struct crypto_tfm *tfm,
  500. const u8 *src, unsigned int slen,
  501. u8 *dst, unsigned int *dlen);
  502. };
  503. #define crt_ablkcipher crt_u.ablkcipher
  504. #define crt_blkcipher crt_u.blkcipher
  505. #define crt_cipher crt_u.cipher
  506. #define crt_compress crt_u.compress
  507. struct crypto_tfm {
  508. u32 crt_flags;
  509. union {
  510. struct ablkcipher_tfm ablkcipher;
  511. struct blkcipher_tfm blkcipher;
  512. struct cipher_tfm cipher;
  513. struct compress_tfm compress;
  514. } crt_u;
  515. void (*exit)(struct crypto_tfm *tfm);
  516. struct crypto_alg *__crt_alg;
  517. void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
  518. };
  519. struct crypto_ablkcipher {
  520. struct crypto_tfm base;
  521. };
  522. struct crypto_blkcipher {
  523. struct crypto_tfm base;
  524. };
  525. struct crypto_cipher {
  526. struct crypto_tfm base;
  527. };
  528. struct crypto_comp {
  529. struct crypto_tfm base;
  530. };
  531. enum {
  532. CRYPTOA_UNSPEC,
  533. CRYPTOA_ALG,
  534. CRYPTOA_TYPE,
  535. CRYPTOA_U32,
  536. __CRYPTOA_MAX,
  537. };
  538. #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
  539. /* Maximum number of (rtattr) parameters for each template. */
  540. #define CRYPTO_MAX_ATTRS 32
  541. struct crypto_attr_alg {
  542. char name[CRYPTO_MAX_ALG_NAME];
  543. };
  544. struct crypto_attr_type {
  545. u32 type;
  546. u32 mask;
  547. };
  548. struct crypto_attr_u32 {
  549. u32 num;
  550. };
  551. /*
  552. * Transform user interface.
  553. */
  554. struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
  555. void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
  556. static inline void crypto_free_tfm(struct crypto_tfm *tfm)
  557. {
  558. return crypto_destroy_tfm(tfm, tfm);
  559. }
  560. int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
  561. /*
  562. * Transform helpers which query the underlying algorithm.
  563. */
  564. static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
  565. {
  566. return tfm->__crt_alg->cra_name;
  567. }
  568. static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
  569. {
  570. return tfm->__crt_alg->cra_driver_name;
  571. }
  572. static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
  573. {
  574. return tfm->__crt_alg->cra_priority;
  575. }
  576. static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
  577. {
  578. return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
  579. }
  580. static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
  581. {
  582. return tfm->__crt_alg->cra_blocksize;
  583. }
  584. static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
  585. {
  586. return tfm->__crt_alg->cra_alignmask;
  587. }
  588. static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
  589. {
  590. return tfm->crt_flags;
  591. }
  592. static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
  593. {
  594. tfm->crt_flags |= flags;
  595. }
  596. static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
  597. {
  598. tfm->crt_flags &= ~flags;
  599. }
  600. static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
  601. {
  602. return tfm->__crt_ctx;
  603. }
  604. static inline unsigned int crypto_tfm_ctx_alignment(void)
  605. {
  606. struct crypto_tfm *tfm;
  607. return __alignof__(tfm->__crt_ctx);
  608. }
  609. /*
  610. * API wrappers.
  611. */
  612. static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
  613. struct crypto_tfm *tfm)
  614. {
  615. return (struct crypto_ablkcipher *)tfm;
  616. }
  617. static inline u32 crypto_skcipher_type(u32 type)
  618. {
  619. type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  620. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  621. return type;
  622. }
  623. static inline u32 crypto_skcipher_mask(u32 mask)
  624. {
  625. mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
  626. mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
  627. return mask;
  628. }
  629. /**
  630. * DOC: Asynchronous Block Cipher API
  631. *
  632. * Asynchronous block cipher API is used with the ciphers of type
  633. * CRYPTO_ALG_TYPE_ABLKCIPHER (listed as type "ablkcipher" in /proc/crypto).
  634. *
  635. * Asynchronous cipher operations imply that the function invocation for a
  636. * cipher request returns immediately before the completion of the operation.
  637. * The cipher request is scheduled as a separate kernel thread and therefore
  638. * load-balanced on the different CPUs via the process scheduler. To allow
  639. * the kernel crypto API to inform the caller about the completion of a cipher
  640. * request, the caller must provide a callback function. That function is
  641. * invoked with the cipher handle when the request completes.
  642. *
  643. * To support the asynchronous operation, additional information than just the
  644. * cipher handle must be supplied to the kernel crypto API. That additional
  645. * information is given by filling in the ablkcipher_request data structure.
  646. *
  647. * For the asynchronous block cipher API, the state is maintained with the tfm
  648. * cipher handle. A single tfm can be used across multiple calls and in
  649. * parallel. For asynchronous block cipher calls, context data supplied and
  650. * only used by the caller can be referenced the request data structure in
  651. * addition to the IV used for the cipher request. The maintenance of such
  652. * state information would be important for a crypto driver implementer to
  653. * have, because when calling the callback function upon completion of the
  654. * cipher operation, that callback function may need some information about
  655. * which operation just finished if it invoked multiple in parallel. This
  656. * state information is unused by the kernel crypto API.
  657. */
  658. static inline struct crypto_tfm *crypto_ablkcipher_tfm(
  659. struct crypto_ablkcipher *tfm)
  660. {
  661. return &tfm->base;
  662. }
  663. /**
  664. * crypto_free_ablkcipher() - zeroize and free cipher handle
  665. * @tfm: cipher handle to be freed
  666. */
  667. static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
  668. {
  669. crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
  670. }
  671. /**
  672. * crypto_has_ablkcipher() - Search for the availability of an ablkcipher.
  673. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  674. * ablkcipher
  675. * @type: specifies the type of the cipher
  676. * @mask: specifies the mask for the cipher
  677. *
  678. * Return: true when the ablkcipher is known to the kernel crypto API; false
  679. * otherwise
  680. */
  681. static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
  682. u32 mask)
  683. {
  684. return crypto_has_alg(alg_name, crypto_skcipher_type(type),
  685. crypto_skcipher_mask(mask));
  686. }
  687. static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
  688. struct crypto_ablkcipher *tfm)
  689. {
  690. return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
  691. }
  692. /**
  693. * crypto_ablkcipher_ivsize() - obtain IV size
  694. * @tfm: cipher handle
  695. *
  696. * The size of the IV for the ablkcipher referenced by the cipher handle is
  697. * returned. This IV size may be zero if the cipher does not need an IV.
  698. *
  699. * Return: IV size in bytes
  700. */
  701. static inline unsigned int crypto_ablkcipher_ivsize(
  702. struct crypto_ablkcipher *tfm)
  703. {
  704. return crypto_ablkcipher_crt(tfm)->ivsize;
  705. }
  706. /**
  707. * crypto_ablkcipher_blocksize() - obtain block size of cipher
  708. * @tfm: cipher handle
  709. *
  710. * The block size for the ablkcipher referenced with the cipher handle is
  711. * returned. The caller may use that information to allocate appropriate
  712. * memory for the data returned by the encryption or decryption operation
  713. *
  714. * Return: block size of cipher
  715. */
  716. static inline unsigned int crypto_ablkcipher_blocksize(
  717. struct crypto_ablkcipher *tfm)
  718. {
  719. return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
  720. }
  721. static inline unsigned int crypto_ablkcipher_alignmask(
  722. struct crypto_ablkcipher *tfm)
  723. {
  724. return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
  725. }
  726. static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
  727. {
  728. return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
  729. }
  730. static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
  731. u32 flags)
  732. {
  733. crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
  734. }
  735. static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
  736. u32 flags)
  737. {
  738. crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
  739. }
  740. /**
  741. * crypto_ablkcipher_setkey() - set key for cipher
  742. * @tfm: cipher handle
  743. * @key: buffer holding the key
  744. * @keylen: length of the key in bytes
  745. *
  746. * The caller provided key is set for the ablkcipher referenced by the cipher
  747. * handle.
  748. *
  749. * Note, the key length determines the cipher type. Many block ciphers implement
  750. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  751. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  752. * is performed.
  753. *
  754. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  755. */
  756. static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
  757. const u8 *key, unsigned int keylen)
  758. {
  759. struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
  760. return crt->setkey(crt->base, key, keylen);
  761. }
  762. /**
  763. * crypto_ablkcipher_reqtfm() - obtain cipher handle from request
  764. * @req: ablkcipher_request out of which the cipher handle is to be obtained
  765. *
  766. * Return the crypto_ablkcipher handle when furnishing an ablkcipher_request
  767. * data structure.
  768. *
  769. * Return: crypto_ablkcipher handle
  770. */
  771. static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
  772. struct ablkcipher_request *req)
  773. {
  774. return __crypto_ablkcipher_cast(req->base.tfm);
  775. }
  776. /**
  777. * crypto_ablkcipher_encrypt() - encrypt plaintext
  778. * @req: reference to the ablkcipher_request handle that holds all information
  779. * needed to perform the cipher operation
  780. *
  781. * Encrypt plaintext data using the ablkcipher_request handle. That data
  782. * structure and how it is filled with data is discussed with the
  783. * ablkcipher_request_* functions.
  784. *
  785. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  786. */
  787. static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
  788. {
  789. struct ablkcipher_tfm *crt =
  790. crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
  791. return crt->encrypt(req);
  792. }
  793. /**
  794. * crypto_ablkcipher_decrypt() - decrypt ciphertext
  795. * @req: reference to the ablkcipher_request handle that holds all information
  796. * needed to perform the cipher operation
  797. *
  798. * Decrypt ciphertext data using the ablkcipher_request handle. That data
  799. * structure and how it is filled with data is discussed with the
  800. * ablkcipher_request_* functions.
  801. *
  802. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  803. */
  804. static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
  805. {
  806. struct ablkcipher_tfm *crt =
  807. crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
  808. return crt->decrypt(req);
  809. }
  810. /**
  811. * DOC: Asynchronous Cipher Request Handle
  812. *
  813. * The ablkcipher_request data structure contains all pointers to data
  814. * required for the asynchronous cipher operation. This includes the cipher
  815. * handle (which can be used by multiple ablkcipher_request instances), pointer
  816. * to plaintext and ciphertext, asynchronous callback function, etc. It acts
  817. * as a handle to the ablkcipher_request_* API calls in a similar way as
  818. * ablkcipher handle to the crypto_ablkcipher_* API calls.
  819. */
  820. /**
  821. * crypto_ablkcipher_reqsize() - obtain size of the request data structure
  822. * @tfm: cipher handle
  823. *
  824. * Return: number of bytes
  825. */
  826. static inline unsigned int crypto_ablkcipher_reqsize(
  827. struct crypto_ablkcipher *tfm)
  828. {
  829. return crypto_ablkcipher_crt(tfm)->reqsize;
  830. }
  831. /**
  832. * ablkcipher_request_set_tfm() - update cipher handle reference in request
  833. * @req: request handle to be modified
  834. * @tfm: cipher handle that shall be added to the request handle
  835. *
  836. * Allow the caller to replace the existing ablkcipher handle in the request
  837. * data structure with a different one.
  838. */
  839. static inline void ablkcipher_request_set_tfm(
  840. struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
  841. {
  842. req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
  843. }
  844. static inline struct ablkcipher_request *ablkcipher_request_cast(
  845. struct crypto_async_request *req)
  846. {
  847. return container_of(req, struct ablkcipher_request, base);
  848. }
  849. /**
  850. * ablkcipher_request_alloc() - allocate request data structure
  851. * @tfm: cipher handle to be registered with the request
  852. * @gfp: memory allocation flag that is handed to kmalloc by the API call.
  853. *
  854. * Allocate the request data structure that must be used with the ablkcipher
  855. * encrypt and decrypt API calls. During the allocation, the provided ablkcipher
  856. * handle is registered in the request data structure.
  857. *
  858. * Return: allocated request handle in case of success, or NULL if out of memory
  859. */
  860. static inline struct ablkcipher_request *ablkcipher_request_alloc(
  861. struct crypto_ablkcipher *tfm, gfp_t gfp)
  862. {
  863. struct ablkcipher_request *req;
  864. req = kmalloc(sizeof(struct ablkcipher_request) +
  865. crypto_ablkcipher_reqsize(tfm), gfp);
  866. if (likely(req))
  867. ablkcipher_request_set_tfm(req, tfm);
  868. return req;
  869. }
  870. /**
  871. * ablkcipher_request_free() - zeroize and free request data structure
  872. * @req: request data structure cipher handle to be freed
  873. */
  874. static inline void ablkcipher_request_free(struct ablkcipher_request *req)
  875. {
  876. kzfree(req);
  877. }
  878. /**
  879. * ablkcipher_request_set_callback() - set asynchronous callback function
  880. * @req: request handle
  881. * @flags: specify zero or an ORing of the flags
  882. * CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
  883. * increase the wait queue beyond the initial maximum size;
  884. * CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
  885. * @compl: callback function pointer to be registered with the request handle
  886. * @data: The data pointer refers to memory that is not used by the kernel
  887. * crypto API, but provided to the callback function for it to use. Here,
  888. * the caller can provide a reference to memory the callback function can
  889. * operate on. As the callback function is invoked asynchronously to the
  890. * related functionality, it may need to access data structures of the
  891. * related functionality which can be referenced using this pointer. The
  892. * callback function can access the memory via the "data" field in the
  893. * crypto_async_request data structure provided to the callback function.
  894. *
  895. * This function allows setting the callback function that is triggered once the
  896. * cipher operation completes.
  897. *
  898. * The callback function is registered with the ablkcipher_request handle and
  899. * must comply with the following template::
  900. *
  901. * void callback_function(struct crypto_async_request *req, int error)
  902. */
  903. static inline void ablkcipher_request_set_callback(
  904. struct ablkcipher_request *req,
  905. u32 flags, crypto_completion_t compl, void *data)
  906. {
  907. req->base.complete = compl;
  908. req->base.data = data;
  909. req->base.flags = flags;
  910. }
  911. /**
  912. * ablkcipher_request_set_crypt() - set data buffers
  913. * @req: request handle
  914. * @src: source scatter / gather list
  915. * @dst: destination scatter / gather list
  916. * @nbytes: number of bytes to process from @src
  917. * @iv: IV for the cipher operation which must comply with the IV size defined
  918. * by crypto_ablkcipher_ivsize
  919. *
  920. * This function allows setting of the source data and destination data
  921. * scatter / gather lists.
  922. *
  923. * For encryption, the source is treated as the plaintext and the
  924. * destination is the ciphertext. For a decryption operation, the use is
  925. * reversed - the source is the ciphertext and the destination is the plaintext.
  926. */
  927. static inline void ablkcipher_request_set_crypt(
  928. struct ablkcipher_request *req,
  929. struct scatterlist *src, struct scatterlist *dst,
  930. unsigned int nbytes, void *iv)
  931. {
  932. req->src = src;
  933. req->dst = dst;
  934. req->nbytes = nbytes;
  935. req->info = iv;
  936. }
  937. /**
  938. * DOC: Synchronous Block Cipher API
  939. *
  940. * The synchronous block cipher API is used with the ciphers of type
  941. * CRYPTO_ALG_TYPE_BLKCIPHER (listed as type "blkcipher" in /proc/crypto)
  942. *
  943. * Synchronous calls, have a context in the tfm. But since a single tfm can be
  944. * used in multiple calls and in parallel, this info should not be changeable
  945. * (unless a lock is used). This applies, for example, to the symmetric key.
  946. * However, the IV is changeable, so there is an iv field in blkcipher_tfm
  947. * structure for synchronous blkcipher api. So, its the only state info that can
  948. * be kept for synchronous calls without using a big lock across a tfm.
  949. *
  950. * The block cipher API allows the use of a complete cipher, i.e. a cipher
  951. * consisting of a template (a block chaining mode) and a single block cipher
  952. * primitive (e.g. AES).
  953. *
  954. * The plaintext data buffer and the ciphertext data buffer are pointed to
  955. * by using scatter/gather lists. The cipher operation is performed
  956. * on all segments of the provided scatter/gather lists.
  957. *
  958. * The kernel crypto API supports a cipher operation "in-place" which means that
  959. * the caller may provide the same scatter/gather list for the plaintext and
  960. * cipher text. After the completion of the cipher operation, the plaintext
  961. * data is replaced with the ciphertext data in case of an encryption and vice
  962. * versa for a decryption. The caller must ensure that the scatter/gather lists
  963. * for the output data point to sufficiently large buffers, i.e. multiples of
  964. * the block size of the cipher.
  965. */
  966. static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
  967. struct crypto_tfm *tfm)
  968. {
  969. return (struct crypto_blkcipher *)tfm;
  970. }
  971. static inline struct crypto_blkcipher *crypto_blkcipher_cast(
  972. struct crypto_tfm *tfm)
  973. {
  974. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
  975. return __crypto_blkcipher_cast(tfm);
  976. }
  977. /**
  978. * crypto_alloc_blkcipher() - allocate synchronous block cipher handle
  979. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  980. * blkcipher cipher
  981. * @type: specifies the type of the cipher
  982. * @mask: specifies the mask for the cipher
  983. *
  984. * Allocate a cipher handle for a block cipher. The returned struct
  985. * crypto_blkcipher is the cipher handle that is required for any subsequent
  986. * API invocation for that block cipher.
  987. *
  988. * Return: allocated cipher handle in case of success; IS_ERR() is true in case
  989. * of an error, PTR_ERR() returns the error code.
  990. */
  991. static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
  992. const char *alg_name, u32 type, u32 mask)
  993. {
  994. type &= ~CRYPTO_ALG_TYPE_MASK;
  995. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  996. mask |= CRYPTO_ALG_TYPE_MASK;
  997. return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
  998. }
  999. static inline struct crypto_tfm *crypto_blkcipher_tfm(
  1000. struct crypto_blkcipher *tfm)
  1001. {
  1002. return &tfm->base;
  1003. }
  1004. /**
  1005. * crypto_free_blkcipher() - zeroize and free the block cipher handle
  1006. * @tfm: cipher handle to be freed
  1007. */
  1008. static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
  1009. {
  1010. crypto_free_tfm(crypto_blkcipher_tfm(tfm));
  1011. }
  1012. /**
  1013. * crypto_has_blkcipher() - Search for the availability of a block cipher
  1014. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  1015. * block cipher
  1016. * @type: specifies the type of the cipher
  1017. * @mask: specifies the mask for the cipher
  1018. *
  1019. * Return: true when the block cipher is known to the kernel crypto API; false
  1020. * otherwise
  1021. */
  1022. static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
  1023. {
  1024. type &= ~CRYPTO_ALG_TYPE_MASK;
  1025. type |= CRYPTO_ALG_TYPE_BLKCIPHER;
  1026. mask |= CRYPTO_ALG_TYPE_MASK;
  1027. return crypto_has_alg(alg_name, type, mask);
  1028. }
  1029. /**
  1030. * crypto_blkcipher_name() - return the name / cra_name from the cipher handle
  1031. * @tfm: cipher handle
  1032. *
  1033. * Return: The character string holding the name of the cipher
  1034. */
  1035. static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
  1036. {
  1037. return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
  1038. }
  1039. static inline struct blkcipher_tfm *crypto_blkcipher_crt(
  1040. struct crypto_blkcipher *tfm)
  1041. {
  1042. return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
  1043. }
  1044. static inline struct blkcipher_alg *crypto_blkcipher_alg(
  1045. struct crypto_blkcipher *tfm)
  1046. {
  1047. return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
  1048. }
  1049. /**
  1050. * crypto_blkcipher_ivsize() - obtain IV size
  1051. * @tfm: cipher handle
  1052. *
  1053. * The size of the IV for the block cipher referenced by the cipher handle is
  1054. * returned. This IV size may be zero if the cipher does not need an IV.
  1055. *
  1056. * Return: IV size in bytes
  1057. */
  1058. static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
  1059. {
  1060. return crypto_blkcipher_alg(tfm)->ivsize;
  1061. }
  1062. /**
  1063. * crypto_blkcipher_blocksize() - obtain block size of cipher
  1064. * @tfm: cipher handle
  1065. *
  1066. * The block size for the block cipher referenced with the cipher handle is
  1067. * returned. The caller may use that information to allocate appropriate
  1068. * memory for the data returned by the encryption or decryption operation.
  1069. *
  1070. * Return: block size of cipher
  1071. */
  1072. static inline unsigned int crypto_blkcipher_blocksize(
  1073. struct crypto_blkcipher *tfm)
  1074. {
  1075. return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
  1076. }
  1077. static inline unsigned int crypto_blkcipher_alignmask(
  1078. struct crypto_blkcipher *tfm)
  1079. {
  1080. return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
  1081. }
  1082. static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
  1083. {
  1084. return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
  1085. }
  1086. static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
  1087. u32 flags)
  1088. {
  1089. crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
  1090. }
  1091. static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
  1092. u32 flags)
  1093. {
  1094. crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
  1095. }
  1096. /**
  1097. * crypto_blkcipher_setkey() - set key for cipher
  1098. * @tfm: cipher handle
  1099. * @key: buffer holding the key
  1100. * @keylen: length of the key in bytes
  1101. *
  1102. * The caller provided key is set for the block cipher referenced by the cipher
  1103. * handle.
  1104. *
  1105. * Note, the key length determines the cipher type. Many block ciphers implement
  1106. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  1107. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  1108. * is performed.
  1109. *
  1110. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  1111. */
  1112. static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
  1113. const u8 *key, unsigned int keylen)
  1114. {
  1115. return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
  1116. key, keylen);
  1117. }
  1118. /**
  1119. * crypto_blkcipher_encrypt() - encrypt plaintext
  1120. * @desc: reference to the block cipher handle with meta data
  1121. * @dst: scatter/gather list that is filled by the cipher operation with the
  1122. * ciphertext
  1123. * @src: scatter/gather list that holds the plaintext
  1124. * @nbytes: number of bytes of the plaintext to encrypt.
  1125. *
  1126. * Encrypt plaintext data using the IV set by the caller with a preceding
  1127. * call of crypto_blkcipher_set_iv.
  1128. *
  1129. * The blkcipher_desc data structure must be filled by the caller and can
  1130. * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
  1131. * with the block cipher handle; desc.flags is filled with either
  1132. * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
  1133. *
  1134. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1135. */
  1136. static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
  1137. struct scatterlist *dst,
  1138. struct scatterlist *src,
  1139. unsigned int nbytes)
  1140. {
  1141. desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
  1142. return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
  1143. }
  1144. /**
  1145. * crypto_blkcipher_encrypt_iv() - encrypt plaintext with dedicated IV
  1146. * @desc: reference to the block cipher handle with meta data
  1147. * @dst: scatter/gather list that is filled by the cipher operation with the
  1148. * ciphertext
  1149. * @src: scatter/gather list that holds the plaintext
  1150. * @nbytes: number of bytes of the plaintext to encrypt.
  1151. *
  1152. * Encrypt plaintext data with the use of an IV that is solely used for this
  1153. * cipher operation. Any previously set IV is not used.
  1154. *
  1155. * The blkcipher_desc data structure must be filled by the caller and can
  1156. * reside on the stack. The caller must fill desc as follows: desc.tfm is filled
  1157. * with the block cipher handle; desc.info is filled with the IV to be used for
  1158. * the current operation; desc.flags is filled with either
  1159. * CRYPTO_TFM_REQ_MAY_SLEEP or 0.
  1160. *
  1161. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1162. */
  1163. static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
  1164. struct scatterlist *dst,
  1165. struct scatterlist *src,
  1166. unsigned int nbytes)
  1167. {
  1168. return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
  1169. }
  1170. /**
  1171. * crypto_blkcipher_decrypt() - decrypt ciphertext
  1172. * @desc: reference to the block cipher handle with meta data
  1173. * @dst: scatter/gather list that is filled by the cipher operation with the
  1174. * plaintext
  1175. * @src: scatter/gather list that holds the ciphertext
  1176. * @nbytes: number of bytes of the ciphertext to decrypt.
  1177. *
  1178. * Decrypt ciphertext data using the IV set by the caller with a preceding
  1179. * call of crypto_blkcipher_set_iv.
  1180. *
  1181. * The blkcipher_desc data structure must be filled by the caller as documented
  1182. * for the crypto_blkcipher_encrypt call above.
  1183. *
  1184. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1185. *
  1186. */
  1187. static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
  1188. struct scatterlist *dst,
  1189. struct scatterlist *src,
  1190. unsigned int nbytes)
  1191. {
  1192. desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
  1193. return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
  1194. }
  1195. /**
  1196. * crypto_blkcipher_decrypt_iv() - decrypt ciphertext with dedicated IV
  1197. * @desc: reference to the block cipher handle with meta data
  1198. * @dst: scatter/gather list that is filled by the cipher operation with the
  1199. * plaintext
  1200. * @src: scatter/gather list that holds the ciphertext
  1201. * @nbytes: number of bytes of the ciphertext to decrypt.
  1202. *
  1203. * Decrypt ciphertext data with the use of an IV that is solely used for this
  1204. * cipher operation. Any previously set IV is not used.
  1205. *
  1206. * The blkcipher_desc data structure must be filled by the caller as documented
  1207. * for the crypto_blkcipher_encrypt_iv call above.
  1208. *
  1209. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  1210. */
  1211. static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
  1212. struct scatterlist *dst,
  1213. struct scatterlist *src,
  1214. unsigned int nbytes)
  1215. {
  1216. return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
  1217. }
  1218. /**
  1219. * crypto_blkcipher_set_iv() - set IV for cipher
  1220. * @tfm: cipher handle
  1221. * @src: buffer holding the IV
  1222. * @len: length of the IV in bytes
  1223. *
  1224. * The caller provided IV is set for the block cipher referenced by the cipher
  1225. * handle.
  1226. */
  1227. static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
  1228. const u8 *src, unsigned int len)
  1229. {
  1230. memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
  1231. }
  1232. /**
  1233. * crypto_blkcipher_get_iv() - obtain IV from cipher
  1234. * @tfm: cipher handle
  1235. * @dst: buffer filled with the IV
  1236. * @len: length of the buffer dst
  1237. *
  1238. * The caller can obtain the IV set for the block cipher referenced by the
  1239. * cipher handle and store it into the user-provided buffer. If the buffer
  1240. * has an insufficient space, the IV is truncated to fit the buffer.
  1241. */
  1242. static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
  1243. u8 *dst, unsigned int len)
  1244. {
  1245. memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
  1246. }
  1247. /**
  1248. * DOC: Single Block Cipher API
  1249. *
  1250. * The single block cipher API is used with the ciphers of type
  1251. * CRYPTO_ALG_TYPE_CIPHER (listed as type "cipher" in /proc/crypto).
  1252. *
  1253. * Using the single block cipher API calls, operations with the basic cipher
  1254. * primitive can be implemented. These cipher primitives exclude any block
  1255. * chaining operations including IV handling.
  1256. *
  1257. * The purpose of this single block cipher API is to support the implementation
  1258. * of templates or other concepts that only need to perform the cipher operation
  1259. * on one block at a time. Templates invoke the underlying cipher primitive
  1260. * block-wise and process either the input or the output data of these cipher
  1261. * operations.
  1262. */
  1263. static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
  1264. {
  1265. return (struct crypto_cipher *)tfm;
  1266. }
  1267. static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
  1268. {
  1269. BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
  1270. return __crypto_cipher_cast(tfm);
  1271. }
  1272. /**
  1273. * crypto_alloc_cipher() - allocate single block cipher handle
  1274. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  1275. * single block cipher
  1276. * @type: specifies the type of the cipher
  1277. * @mask: specifies the mask for the cipher
  1278. *
  1279. * Allocate a cipher handle for a single block cipher. The returned struct
  1280. * crypto_cipher is the cipher handle that is required for any subsequent API
  1281. * invocation for that single block cipher.
  1282. *
  1283. * Return: allocated cipher handle in case of success; IS_ERR() is true in case
  1284. * of an error, PTR_ERR() returns the error code.
  1285. */
  1286. static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
  1287. u32 type, u32 mask)
  1288. {
  1289. type &= ~CRYPTO_ALG_TYPE_MASK;
  1290. type |= CRYPTO_ALG_TYPE_CIPHER;
  1291. mask |= CRYPTO_ALG_TYPE_MASK;
  1292. return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
  1293. }
  1294. static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
  1295. {
  1296. return &tfm->base;
  1297. }
  1298. /**
  1299. * crypto_free_cipher() - zeroize and free the single block cipher handle
  1300. * @tfm: cipher handle to be freed
  1301. */
  1302. static inline void crypto_free_cipher(struct crypto_cipher *tfm)
  1303. {
  1304. crypto_free_tfm(crypto_cipher_tfm(tfm));
  1305. }
  1306. /**
  1307. * crypto_has_cipher() - Search for the availability of a single block cipher
  1308. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  1309. * single block cipher
  1310. * @type: specifies the type of the cipher
  1311. * @mask: specifies the mask for the cipher
  1312. *
  1313. * Return: true when the single block cipher is known to the kernel crypto API;
  1314. * false otherwise
  1315. */
  1316. static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
  1317. {
  1318. type &= ~CRYPTO_ALG_TYPE_MASK;
  1319. type |= CRYPTO_ALG_TYPE_CIPHER;
  1320. mask |= CRYPTO_ALG_TYPE_MASK;
  1321. return crypto_has_alg(alg_name, type, mask);
  1322. }
  1323. static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
  1324. {
  1325. return &crypto_cipher_tfm(tfm)->crt_cipher;
  1326. }
  1327. /**
  1328. * crypto_cipher_blocksize() - obtain block size for cipher
  1329. * @tfm: cipher handle
  1330. *
  1331. * The block size for the single block cipher referenced with the cipher handle
  1332. * tfm is returned. The caller may use that information to allocate appropriate
  1333. * memory for the data returned by the encryption or decryption operation
  1334. *
  1335. * Return: block size of cipher
  1336. */
  1337. static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
  1338. {
  1339. return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
  1340. }
  1341. static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
  1342. {
  1343. return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
  1344. }
  1345. static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
  1346. {
  1347. return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
  1348. }
  1349. static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
  1350. u32 flags)
  1351. {
  1352. crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
  1353. }
  1354. static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
  1355. u32 flags)
  1356. {
  1357. crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
  1358. }
  1359. /**
  1360. * crypto_cipher_setkey() - set key for cipher
  1361. * @tfm: cipher handle
  1362. * @key: buffer holding the key
  1363. * @keylen: length of the key in bytes
  1364. *
  1365. * The caller provided key is set for the single block cipher referenced by the
  1366. * cipher handle.
  1367. *
  1368. * Note, the key length determines the cipher type. Many block ciphers implement
  1369. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  1370. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  1371. * is performed.
  1372. *
  1373. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  1374. */
  1375. static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
  1376. const u8 *key, unsigned int keylen)
  1377. {
  1378. return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
  1379. key, keylen);
  1380. }
  1381. /**
  1382. * crypto_cipher_encrypt_one() - encrypt one block of plaintext
  1383. * @tfm: cipher handle
  1384. * @dst: points to the buffer that will be filled with the ciphertext
  1385. * @src: buffer holding the plaintext to be encrypted
  1386. *
  1387. * Invoke the encryption operation of one block. The caller must ensure that
  1388. * the plaintext and ciphertext buffers are at least one block in size.
  1389. */
  1390. static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
  1391. u8 *dst, const u8 *src)
  1392. {
  1393. crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
  1394. dst, src);
  1395. }
  1396. /**
  1397. * crypto_cipher_decrypt_one() - decrypt one block of ciphertext
  1398. * @tfm: cipher handle
  1399. * @dst: points to the buffer that will be filled with the plaintext
  1400. * @src: buffer holding the ciphertext to be decrypted
  1401. *
  1402. * Invoke the decryption operation of one block. The caller must ensure that
  1403. * the plaintext and ciphertext buffers are at least one block in size.
  1404. */
  1405. static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
  1406. u8 *dst, const u8 *src)
  1407. {
  1408. crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
  1409. dst, src);
  1410. }
  1411. static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
  1412. {
  1413. return (struct crypto_comp *)tfm;
  1414. }
  1415. static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
  1416. {
  1417. BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
  1418. CRYPTO_ALG_TYPE_MASK);
  1419. return __crypto_comp_cast(tfm);
  1420. }
  1421. static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
  1422. u32 type, u32 mask)
  1423. {
  1424. type &= ~CRYPTO_ALG_TYPE_MASK;
  1425. type |= CRYPTO_ALG_TYPE_COMPRESS;
  1426. mask |= CRYPTO_ALG_TYPE_MASK;
  1427. return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
  1428. }
  1429. static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
  1430. {
  1431. return &tfm->base;
  1432. }
  1433. static inline void crypto_free_comp(struct crypto_comp *tfm)
  1434. {
  1435. crypto_free_tfm(crypto_comp_tfm(tfm));
  1436. }
  1437. static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
  1438. {
  1439. type &= ~CRYPTO_ALG_TYPE_MASK;
  1440. type |= CRYPTO_ALG_TYPE_COMPRESS;
  1441. mask |= CRYPTO_ALG_TYPE_MASK;
  1442. return crypto_has_alg(alg_name, type, mask);
  1443. }
  1444. static inline const char *crypto_comp_name(struct crypto_comp *tfm)
  1445. {
  1446. return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
  1447. }
  1448. static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
  1449. {
  1450. return &crypto_comp_tfm(tfm)->crt_compress;
  1451. }
  1452. static inline int crypto_comp_compress(struct crypto_comp *tfm,
  1453. const u8 *src, unsigned int slen,
  1454. u8 *dst, unsigned int *dlen)
  1455. {
  1456. return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
  1457. src, slen, dst, dlen);
  1458. }
  1459. static inline int crypto_comp_decompress(struct crypto_comp *tfm,
  1460. const u8 *src, unsigned int slen,
  1461. u8 *dst, unsigned int *dlen)
  1462. {
  1463. return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
  1464. src, slen, dst, dlen);
  1465. }
  1466. #endif /* _LINUX_CRYPTO_H */