skcipher.h 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715
  1. /*
  2. * Symmetric key ciphers.
  3. *
  4. * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #ifndef _CRYPTO_SKCIPHER_H
  13. #define _CRYPTO_SKCIPHER_H
  14. #include <linux/crypto.h>
  15. #include <linux/kernel.h>
  16. #include <linux/slab.h>
  17. /**
  18. * struct skcipher_request - Symmetric key cipher request
  19. * @cryptlen: Number of bytes to encrypt or decrypt
  20. * @iv: Initialisation Vector
  21. * @src: Source SG list
  22. * @dst: Destination SG list
  23. * @base: Underlying async request request
  24. * @__ctx: Start of private context data
  25. */
  26. struct skcipher_request {
  27. unsigned int cryptlen;
  28. u8 *iv;
  29. struct scatterlist *src;
  30. struct scatterlist *dst;
  31. struct crypto_async_request base;
  32. void *__ctx[] CRYPTO_MINALIGN_ATTR;
  33. };
  34. /**
  35. * struct skcipher_givcrypt_request - Crypto request with IV generation
  36. * @seq: Sequence number for IV generation
  37. * @giv: Space for generated IV
  38. * @creq: The crypto request itself
  39. */
  40. struct skcipher_givcrypt_request {
  41. u64 seq;
  42. u8 *giv;
  43. struct ablkcipher_request creq;
  44. };
  45. struct crypto_skcipher {
  46. int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
  47. unsigned int keylen);
  48. int (*encrypt)(struct skcipher_request *req);
  49. int (*decrypt)(struct skcipher_request *req);
  50. unsigned int ivsize;
  51. unsigned int reqsize;
  52. unsigned int keysize;
  53. struct crypto_tfm base;
  54. };
  55. struct crypto_sync_skcipher {
  56. struct crypto_skcipher base;
  57. };
  58. /**
  59. * struct skcipher_alg - symmetric key cipher definition
  60. * @min_keysize: Minimum key size supported by the transformation. This is the
  61. * smallest key length supported by this transformation algorithm.
  62. * This must be set to one of the pre-defined values as this is
  63. * not hardware specific. Possible values for this field can be
  64. * found via git grep "_MIN_KEY_SIZE" include/crypto/
  65. * @max_keysize: Maximum key size supported by the transformation. This is the
  66. * largest key length supported by this transformation algorithm.
  67. * This must be set to one of the pre-defined values as this is
  68. * not hardware specific. Possible values for this field can be
  69. * found via git grep "_MAX_KEY_SIZE" include/crypto/
  70. * @setkey: Set key for the transformation. This function is used to either
  71. * program a supplied key into the hardware or store the key in the
  72. * transformation context for programming it later. Note that this
  73. * function does modify the transformation context. This function can
  74. * be called multiple times during the existence of the transformation
  75. * object, so one must make sure the key is properly reprogrammed into
  76. * the hardware. This function is also responsible for checking the key
  77. * length for validity. In case a software fallback was put in place in
  78. * the @cra_init call, this function might need to use the fallback if
  79. * the algorithm doesn't support all of the key sizes.
  80. * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
  81. * the supplied scatterlist containing the blocks of data. The crypto
  82. * API consumer is responsible for aligning the entries of the
  83. * scatterlist properly and making sure the chunks are correctly
  84. * sized. In case a software fallback was put in place in the
  85. * @cra_init call, this function might need to use the fallback if
  86. * the algorithm doesn't support all of the key sizes. In case the
  87. * key was stored in transformation context, the key might need to be
  88. * re-programmed into the hardware in this function. This function
  89. * shall not modify the transformation context, as this function may
  90. * be called in parallel with the same transformation object.
  91. * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
  92. * and the conditions are exactly the same.
  93. * @init: Initialize the cryptographic transformation object. This function
  94. * is used to initialize the cryptographic transformation object.
  95. * This function is called only once at the instantiation time, right
  96. * after the transformation context was allocated. In case the
  97. * cryptographic hardware has some special requirements which need to
  98. * be handled by software, this function shall check for the precise
  99. * requirement of the transformation and put any software fallbacks
  100. * in place.
  101. * @exit: Deinitialize the cryptographic transformation object. This is a
  102. * counterpart to @init, used to remove various changes set in
  103. * @init.
  104. * @ivsize: IV size applicable for transformation. The consumer must provide an
  105. * IV of exactly that size to perform the encrypt or decrypt operation.
  106. * @chunksize: Equal to the block size except for stream ciphers such as
  107. * CTR where it is set to the underlying block size.
  108. * @walksize: Equal to the chunk size except in cases where the algorithm is
  109. * considerably more efficient if it can operate on multiple chunks
  110. * in parallel. Should be a multiple of chunksize.
  111. * @base: Definition of a generic crypto algorithm.
  112. *
  113. * All fields except @ivsize are mandatory and must be filled.
  114. */
  115. struct skcipher_alg {
  116. int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
  117. unsigned int keylen);
  118. int (*encrypt)(struct skcipher_request *req);
  119. int (*decrypt)(struct skcipher_request *req);
  120. int (*init)(struct crypto_skcipher *tfm);
  121. void (*exit)(struct crypto_skcipher *tfm);
  122. unsigned int min_keysize;
  123. unsigned int max_keysize;
  124. unsigned int ivsize;
  125. unsigned int chunksize;
  126. unsigned int walksize;
  127. struct crypto_alg base;
  128. };
  129. #define MAX_SYNC_SKCIPHER_REQSIZE 384
  130. /*
  131. * This performs a type-check against the "tfm" argument to make sure
  132. * all users have the correct skcipher tfm for doing on-stack requests.
  133. */
  134. #define SYNC_SKCIPHER_REQUEST_ON_STACK(name, tfm) \
  135. char __##name##_desc[sizeof(struct skcipher_request) + \
  136. MAX_SYNC_SKCIPHER_REQSIZE + \
  137. (!(sizeof((struct crypto_sync_skcipher *)1 == \
  138. (typeof(tfm))1))) \
  139. ] CRYPTO_MINALIGN_ATTR; \
  140. struct skcipher_request *name = (void *)__##name##_desc
  141. /**
  142. * DOC: Symmetric Key Cipher API
  143. *
  144. * Symmetric key cipher API is used with the ciphers of type
  145. * CRYPTO_ALG_TYPE_SKCIPHER (listed as type "skcipher" in /proc/crypto).
  146. *
  147. * Asynchronous cipher operations imply that the function invocation for a
  148. * cipher request returns immediately before the completion of the operation.
  149. * The cipher request is scheduled as a separate kernel thread and therefore
  150. * load-balanced on the different CPUs via the process scheduler. To allow
  151. * the kernel crypto API to inform the caller about the completion of a cipher
  152. * request, the caller must provide a callback function. That function is
  153. * invoked with the cipher handle when the request completes.
  154. *
  155. * To support the asynchronous operation, additional information than just the
  156. * cipher handle must be supplied to the kernel crypto API. That additional
  157. * information is given by filling in the skcipher_request data structure.
  158. *
  159. * For the symmetric key cipher API, the state is maintained with the tfm
  160. * cipher handle. A single tfm can be used across multiple calls and in
  161. * parallel. For asynchronous block cipher calls, context data supplied and
  162. * only used by the caller can be referenced the request data structure in
  163. * addition to the IV used for the cipher request. The maintenance of such
  164. * state information would be important for a crypto driver implementer to
  165. * have, because when calling the callback function upon completion of the
  166. * cipher operation, that callback function may need some information about
  167. * which operation just finished if it invoked multiple in parallel. This
  168. * state information is unused by the kernel crypto API.
  169. */
  170. static inline struct crypto_skcipher *__crypto_skcipher_cast(
  171. struct crypto_tfm *tfm)
  172. {
  173. return container_of(tfm, struct crypto_skcipher, base);
  174. }
  175. /**
  176. * crypto_alloc_skcipher() - allocate symmetric key cipher handle
  177. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  178. * skcipher cipher
  179. * @type: specifies the type of the cipher
  180. * @mask: specifies the mask for the cipher
  181. *
  182. * Allocate a cipher handle for an skcipher. The returned struct
  183. * crypto_skcipher is the cipher handle that is required for any subsequent
  184. * API invocation for that skcipher.
  185. *
  186. * Return: allocated cipher handle in case of success; IS_ERR() is true in case
  187. * of an error, PTR_ERR() returns the error code.
  188. */
  189. struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
  190. u32 type, u32 mask);
  191. struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(const char *alg_name,
  192. u32 type, u32 mask);
  193. static inline struct crypto_tfm *crypto_skcipher_tfm(
  194. struct crypto_skcipher *tfm)
  195. {
  196. return &tfm->base;
  197. }
  198. /**
  199. * crypto_free_skcipher() - zeroize and free cipher handle
  200. * @tfm: cipher handle to be freed
  201. */
  202. static inline void crypto_free_skcipher(struct crypto_skcipher *tfm)
  203. {
  204. crypto_destroy_tfm(tfm, crypto_skcipher_tfm(tfm));
  205. }
  206. static inline void crypto_free_sync_skcipher(struct crypto_sync_skcipher *tfm)
  207. {
  208. crypto_free_skcipher(&tfm->base);
  209. }
  210. /**
  211. * crypto_has_skcipher() - Search for the availability of an skcipher.
  212. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  213. * skcipher
  214. * @type: specifies the type of the cipher
  215. * @mask: specifies the mask for the cipher
  216. *
  217. * Return: true when the skcipher is known to the kernel crypto API; false
  218. * otherwise
  219. */
  220. static inline int crypto_has_skcipher(const char *alg_name, u32 type,
  221. u32 mask)
  222. {
  223. return crypto_has_alg(alg_name, crypto_skcipher_type(type),
  224. crypto_skcipher_mask(mask));
  225. }
  226. /**
  227. * crypto_has_skcipher2() - Search for the availability of an skcipher.
  228. * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
  229. * skcipher
  230. * @type: specifies the type of the skcipher
  231. * @mask: specifies the mask for the skcipher
  232. *
  233. * Return: true when the skcipher is known to the kernel crypto API; false
  234. * otherwise
  235. */
  236. int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask);
  237. static inline const char *crypto_skcipher_driver_name(
  238. struct crypto_skcipher *tfm)
  239. {
  240. return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  241. }
  242. static inline struct skcipher_alg *crypto_skcipher_alg(
  243. struct crypto_skcipher *tfm)
  244. {
  245. return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
  246. struct skcipher_alg, base);
  247. }
  248. static inline unsigned int crypto_skcipher_alg_ivsize(struct skcipher_alg *alg)
  249. {
  250. if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  251. CRYPTO_ALG_TYPE_BLKCIPHER)
  252. return alg->base.cra_blkcipher.ivsize;
  253. if (alg->base.cra_ablkcipher.encrypt)
  254. return alg->base.cra_ablkcipher.ivsize;
  255. return alg->ivsize;
  256. }
  257. /**
  258. * crypto_skcipher_ivsize() - obtain IV size
  259. * @tfm: cipher handle
  260. *
  261. * The size of the IV for the skcipher referenced by the cipher handle is
  262. * returned. This IV size may be zero if the cipher does not need an IV.
  263. *
  264. * Return: IV size in bytes
  265. */
  266. static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm)
  267. {
  268. return tfm->ivsize;
  269. }
  270. static inline unsigned int crypto_sync_skcipher_ivsize(
  271. struct crypto_sync_skcipher *tfm)
  272. {
  273. return crypto_skcipher_ivsize(&tfm->base);
  274. }
  275. static inline unsigned int crypto_skcipher_alg_chunksize(
  276. struct skcipher_alg *alg)
  277. {
  278. if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  279. CRYPTO_ALG_TYPE_BLKCIPHER)
  280. return alg->base.cra_blocksize;
  281. if (alg->base.cra_ablkcipher.encrypt)
  282. return alg->base.cra_blocksize;
  283. return alg->chunksize;
  284. }
  285. static inline unsigned int crypto_skcipher_alg_walksize(
  286. struct skcipher_alg *alg)
  287. {
  288. if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  289. CRYPTO_ALG_TYPE_BLKCIPHER)
  290. return alg->base.cra_blocksize;
  291. if (alg->base.cra_ablkcipher.encrypt)
  292. return alg->base.cra_blocksize;
  293. return alg->walksize;
  294. }
  295. /**
  296. * crypto_skcipher_chunksize() - obtain chunk size
  297. * @tfm: cipher handle
  298. *
  299. * The block size is set to one for ciphers such as CTR. However,
  300. * you still need to provide incremental updates in multiples of
  301. * the underlying block size as the IV does not have sub-block
  302. * granularity. This is known in this API as the chunk size.
  303. *
  304. * Return: chunk size in bytes
  305. */
  306. static inline unsigned int crypto_skcipher_chunksize(
  307. struct crypto_skcipher *tfm)
  308. {
  309. return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm));
  310. }
  311. /**
  312. * crypto_skcipher_walksize() - obtain walk size
  313. * @tfm: cipher handle
  314. *
  315. * In some cases, algorithms can only perform optimally when operating on
  316. * multiple blocks in parallel. This is reflected by the walksize, which
  317. * must be a multiple of the chunksize (or equal if the concern does not
  318. * apply)
  319. *
  320. * Return: walk size in bytes
  321. */
  322. static inline unsigned int crypto_skcipher_walksize(
  323. struct crypto_skcipher *tfm)
  324. {
  325. return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
  326. }
  327. /**
  328. * crypto_skcipher_blocksize() - obtain block size of cipher
  329. * @tfm: cipher handle
  330. *
  331. * The block size for the skcipher referenced with the cipher handle is
  332. * returned. The caller may use that information to allocate appropriate
  333. * memory for the data returned by the encryption or decryption operation
  334. *
  335. * Return: block size of cipher
  336. */
  337. static inline unsigned int crypto_skcipher_blocksize(
  338. struct crypto_skcipher *tfm)
  339. {
  340. return crypto_tfm_alg_blocksize(crypto_skcipher_tfm(tfm));
  341. }
  342. static inline unsigned int crypto_sync_skcipher_blocksize(
  343. struct crypto_sync_skcipher *tfm)
  344. {
  345. return crypto_skcipher_blocksize(&tfm->base);
  346. }
  347. static inline unsigned int crypto_skcipher_alignmask(
  348. struct crypto_skcipher *tfm)
  349. {
  350. return crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm));
  351. }
  352. static inline u32 crypto_skcipher_get_flags(struct crypto_skcipher *tfm)
  353. {
  354. return crypto_tfm_get_flags(crypto_skcipher_tfm(tfm));
  355. }
  356. static inline void crypto_skcipher_set_flags(struct crypto_skcipher *tfm,
  357. u32 flags)
  358. {
  359. crypto_tfm_set_flags(crypto_skcipher_tfm(tfm), flags);
  360. }
  361. static inline void crypto_skcipher_clear_flags(struct crypto_skcipher *tfm,
  362. u32 flags)
  363. {
  364. crypto_tfm_clear_flags(crypto_skcipher_tfm(tfm), flags);
  365. }
  366. static inline u32 crypto_sync_skcipher_get_flags(
  367. struct crypto_sync_skcipher *tfm)
  368. {
  369. return crypto_skcipher_get_flags(&tfm->base);
  370. }
  371. static inline void crypto_sync_skcipher_set_flags(
  372. struct crypto_sync_skcipher *tfm, u32 flags)
  373. {
  374. crypto_skcipher_set_flags(&tfm->base, flags);
  375. }
  376. static inline void crypto_sync_skcipher_clear_flags(
  377. struct crypto_sync_skcipher *tfm, u32 flags)
  378. {
  379. crypto_skcipher_clear_flags(&tfm->base, flags);
  380. }
  381. /**
  382. * crypto_skcipher_setkey() - set key for cipher
  383. * @tfm: cipher handle
  384. * @key: buffer holding the key
  385. * @keylen: length of the key in bytes
  386. *
  387. * The caller provided key is set for the skcipher referenced by the cipher
  388. * handle.
  389. *
  390. * Note, the key length determines the cipher type. Many block ciphers implement
  391. * different cipher modes depending on the key size, such as AES-128 vs AES-192
  392. * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
  393. * is performed.
  394. *
  395. * Return: 0 if the setting of the key was successful; < 0 if an error occurred
  396. */
  397. static inline int crypto_skcipher_setkey(struct crypto_skcipher *tfm,
  398. const u8 *key, unsigned int keylen)
  399. {
  400. return tfm->setkey(tfm, key, keylen);
  401. }
  402. static inline int crypto_sync_skcipher_setkey(struct crypto_sync_skcipher *tfm,
  403. const u8 *key, unsigned int keylen)
  404. {
  405. return crypto_skcipher_setkey(&tfm->base, key, keylen);
  406. }
  407. static inline unsigned int crypto_skcipher_default_keysize(
  408. struct crypto_skcipher *tfm)
  409. {
  410. return tfm->keysize;
  411. }
  412. /**
  413. * crypto_skcipher_reqtfm() - obtain cipher handle from request
  414. * @req: skcipher_request out of which the cipher handle is to be obtained
  415. *
  416. * Return the crypto_skcipher handle when furnishing an skcipher_request
  417. * data structure.
  418. *
  419. * Return: crypto_skcipher handle
  420. */
  421. static inline struct crypto_skcipher *crypto_skcipher_reqtfm(
  422. struct skcipher_request *req)
  423. {
  424. return __crypto_skcipher_cast(req->base.tfm);
  425. }
  426. static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm(
  427. struct skcipher_request *req)
  428. {
  429. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  430. return container_of(tfm, struct crypto_sync_skcipher, base);
  431. }
  432. static inline void crypto_stat_skcipher_encrypt(struct skcipher_request *req,
  433. int ret, struct crypto_alg *alg)
  434. {
  435. #ifdef CONFIG_CRYPTO_STATS
  436. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  437. atomic_inc(&alg->cipher_err_cnt);
  438. } else {
  439. atomic_inc(&alg->encrypt_cnt);
  440. atomic64_add(req->cryptlen, &alg->encrypt_tlen);
  441. }
  442. #endif
  443. }
  444. static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req,
  445. int ret, struct crypto_alg *alg)
  446. {
  447. #ifdef CONFIG_CRYPTO_STATS
  448. if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
  449. atomic_inc(&alg->cipher_err_cnt);
  450. } else {
  451. atomic_inc(&alg->decrypt_cnt);
  452. atomic64_add(req->cryptlen, &alg->decrypt_tlen);
  453. }
  454. #endif
  455. }
  456. /**
  457. * crypto_skcipher_encrypt() - encrypt plaintext
  458. * @req: reference to the skcipher_request handle that holds all information
  459. * needed to perform the cipher operation
  460. *
  461. * Encrypt plaintext data using the skcipher_request handle. That data
  462. * structure and how it is filled with data is discussed with the
  463. * skcipher_request_* functions.
  464. *
  465. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  466. */
  467. static inline int crypto_skcipher_encrypt(struct skcipher_request *req)
  468. {
  469. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  470. int ret;
  471. if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
  472. ret = -ENOKEY;
  473. else
  474. ret = tfm->encrypt(req);
  475. crypto_stat_skcipher_encrypt(req, ret, tfm->base.__crt_alg);
  476. return ret;
  477. }
  478. /**
  479. * crypto_skcipher_decrypt() - decrypt ciphertext
  480. * @req: reference to the skcipher_request handle that holds all information
  481. * needed to perform the cipher operation
  482. *
  483. * Decrypt ciphertext data using the skcipher_request handle. That data
  484. * structure and how it is filled with data is discussed with the
  485. * skcipher_request_* functions.
  486. *
  487. * Return: 0 if the cipher operation was successful; < 0 if an error occurred
  488. */
  489. static inline int crypto_skcipher_decrypt(struct skcipher_request *req)
  490. {
  491. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  492. int ret;
  493. if (crypto_skcipher_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
  494. ret = -ENOKEY;
  495. else
  496. ret = tfm->decrypt(req);
  497. crypto_stat_skcipher_decrypt(req, ret, tfm->base.__crt_alg);
  498. return ret;
  499. }
  500. /**
  501. * DOC: Symmetric Key Cipher Request Handle
  502. *
  503. * The skcipher_request data structure contains all pointers to data
  504. * required for the symmetric key cipher operation. This includes the cipher
  505. * handle (which can be used by multiple skcipher_request instances), pointer
  506. * to plaintext and ciphertext, asynchronous callback function, etc. It acts
  507. * as a handle to the skcipher_request_* API calls in a similar way as
  508. * skcipher handle to the crypto_skcipher_* API calls.
  509. */
  510. /**
  511. * crypto_skcipher_reqsize() - obtain size of the request data structure
  512. * @tfm: cipher handle
  513. *
  514. * Return: number of bytes
  515. */
  516. static inline unsigned int crypto_skcipher_reqsize(struct crypto_skcipher *tfm)
  517. {
  518. return tfm->reqsize;
  519. }
  520. /**
  521. * skcipher_request_set_tfm() - update cipher handle reference in request
  522. * @req: request handle to be modified
  523. * @tfm: cipher handle that shall be added to the request handle
  524. *
  525. * Allow the caller to replace the existing skcipher handle in the request
  526. * data structure with a different one.
  527. */
  528. static inline void skcipher_request_set_tfm(struct skcipher_request *req,
  529. struct crypto_skcipher *tfm)
  530. {
  531. req->base.tfm = crypto_skcipher_tfm(tfm);
  532. }
  533. static inline void skcipher_request_set_sync_tfm(struct skcipher_request *req,
  534. struct crypto_sync_skcipher *tfm)
  535. {
  536. skcipher_request_set_tfm(req, &tfm->base);
  537. }
  538. static inline struct skcipher_request *skcipher_request_cast(
  539. struct crypto_async_request *req)
  540. {
  541. return container_of(req, struct skcipher_request, base);
  542. }
  543. /**
  544. * skcipher_request_alloc() - allocate request data structure
  545. * @tfm: cipher handle to be registered with the request
  546. * @gfp: memory allocation flag that is handed to kmalloc by the API call.
  547. *
  548. * Allocate the request data structure that must be used with the skcipher
  549. * encrypt and decrypt API calls. During the allocation, the provided skcipher
  550. * handle is registered in the request data structure.
  551. *
  552. * Return: allocated request handle in case of success, or NULL if out of memory
  553. */
  554. static inline struct skcipher_request *skcipher_request_alloc(
  555. struct crypto_skcipher *tfm, gfp_t gfp)
  556. {
  557. struct skcipher_request *req;
  558. req = kmalloc(sizeof(struct skcipher_request) +
  559. crypto_skcipher_reqsize(tfm), gfp);
  560. if (likely(req))
  561. skcipher_request_set_tfm(req, tfm);
  562. return req;
  563. }
  564. /**
  565. * skcipher_request_free() - zeroize and free request data structure
  566. * @req: request data structure cipher handle to be freed
  567. */
  568. static inline void skcipher_request_free(struct skcipher_request *req)
  569. {
  570. kzfree(req);
  571. }
  572. static inline void skcipher_request_zero(struct skcipher_request *req)
  573. {
  574. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  575. memzero_explicit(req, sizeof(*req) + crypto_skcipher_reqsize(tfm));
  576. }
  577. /**
  578. * skcipher_request_set_callback() - set asynchronous callback function
  579. * @req: request handle
  580. * @flags: specify zero or an ORing of the flags
  581. * CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
  582. * increase the wait queue beyond the initial maximum size;
  583. * CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
  584. * @compl: callback function pointer to be registered with the request handle
  585. * @data: The data pointer refers to memory that is not used by the kernel
  586. * crypto API, but provided to the callback function for it to use. Here,
  587. * the caller can provide a reference to memory the callback function can
  588. * operate on. As the callback function is invoked asynchronously to the
  589. * related functionality, it may need to access data structures of the
  590. * related functionality which can be referenced using this pointer. The
  591. * callback function can access the memory via the "data" field in the
  592. * crypto_async_request data structure provided to the callback function.
  593. *
  594. * This function allows setting the callback function that is triggered once the
  595. * cipher operation completes.
  596. *
  597. * The callback function is registered with the skcipher_request handle and
  598. * must comply with the following template::
  599. *
  600. * void callback_function(struct crypto_async_request *req, int error)
  601. */
  602. static inline void skcipher_request_set_callback(struct skcipher_request *req,
  603. u32 flags,
  604. crypto_completion_t compl,
  605. void *data)
  606. {
  607. req->base.complete = compl;
  608. req->base.data = data;
  609. req->base.flags = flags;
  610. }
  611. /**
  612. * skcipher_request_set_crypt() - set data buffers
  613. * @req: request handle
  614. * @src: source scatter / gather list
  615. * @dst: destination scatter / gather list
  616. * @cryptlen: number of bytes to process from @src
  617. * @iv: IV for the cipher operation which must comply with the IV size defined
  618. * by crypto_skcipher_ivsize
  619. *
  620. * This function allows setting of the source data and destination data
  621. * scatter / gather lists.
  622. *
  623. * For encryption, the source is treated as the plaintext and the
  624. * destination is the ciphertext. For a decryption operation, the use is
  625. * reversed - the source is the ciphertext and the destination is the plaintext.
  626. */
  627. static inline void skcipher_request_set_crypt(
  628. struct skcipher_request *req,
  629. struct scatterlist *src, struct scatterlist *dst,
  630. unsigned int cryptlen, void *iv)
  631. {
  632. req->src = src;
  633. req->dst = dst;
  634. req->cryptlen = cryptlen;
  635. req->iv = iv;
  636. }
  637. #endif /* _CRYPTO_SKCIPHER_H */