caamalg_desc.c 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * Shared descriptors for aead, skcipher algorithms
  4. *
  5. * Copyright 2016-2018 NXP
  6. */
  7. #include "compat.h"
  8. #include "desc_constr.h"
  9. #include "caamalg_desc.h"
  10. /*
  11. * For aead functions, read payload and write payload,
  12. * both of which are specified in req->src and req->dst
  13. */
  14. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  15. {
  16. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  17. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  18. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  19. }
  20. /* Set DK bit in class 1 operation if shared */
  21. static inline void append_dec_op1(u32 *desc, u32 type)
  22. {
  23. u32 *jump_cmd, *uncond_jump_cmd;
  24. /* DK bit is valid only for AES */
  25. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  26. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  27. OP_ALG_DECRYPT);
  28. return;
  29. }
  30. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  31. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  32. OP_ALG_DECRYPT);
  33. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  34. set_jump_tgt_here(desc, jump_cmd);
  35. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  36. OP_ALG_DECRYPT | OP_ALG_AAI_DK);
  37. set_jump_tgt_here(desc, uncond_jump_cmd);
  38. }
  39. /**
  40. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  41. * (non-protocol) with no (null) encryption.
  42. * @desc: pointer to buffer used for descriptor construction
  43. * @adata: pointer to authentication transform definitions.
  44. * A split key is required for SEC Era < 6; the size of the split key
  45. * is specified in this case. Valid algorithm values - one of
  46. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  47. * with OP_ALG_AAI_HMAC_PRECOMP.
  48. * @icvsize: integrity check value (ICV) size (truncated or full)
  49. * @era: SEC Era
  50. */
  51. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  52. unsigned int icvsize, int era)
  53. {
  54. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  55. init_sh_desc(desc, HDR_SHARE_SERIAL);
  56. /* Skip if already shared */
  57. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  58. JUMP_COND_SHRD);
  59. if (era < 6) {
  60. if (adata->key_inline)
  61. append_key_as_imm(desc, adata->key_virt,
  62. adata->keylen_pad, adata->keylen,
  63. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  64. KEY_ENC);
  65. else
  66. append_key(desc, adata->key_dma, adata->keylen,
  67. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  68. } else {
  69. append_proto_dkp(desc, adata);
  70. }
  71. set_jump_tgt_here(desc, key_jump_cmd);
  72. /* assoclen + cryptlen = seqinlen */
  73. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  74. /* Prepare to read and write cryptlen + assoclen bytes */
  75. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  76. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  77. /*
  78. * MOVE_LEN opcode is not available in all SEC HW revisions,
  79. * thus need to do some magic, i.e. self-patch the descriptor
  80. * buffer.
  81. */
  82. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  83. MOVE_DEST_MATH3 |
  84. (0x6 << MOVE_LEN_SHIFT));
  85. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  86. MOVE_DEST_DESCBUF |
  87. MOVE_WAITCOMP |
  88. (0x8 << MOVE_LEN_SHIFT));
  89. /* Class 2 operation */
  90. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  91. OP_ALG_ENCRYPT);
  92. /* Read and write cryptlen bytes */
  93. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  94. set_move_tgt_here(desc, read_move_cmd);
  95. set_move_tgt_here(desc, write_move_cmd);
  96. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  97. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  98. MOVE_AUX_LS);
  99. /* Write ICV */
  100. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  101. LDST_SRCDST_BYTE_CONTEXT);
  102. #ifdef DEBUG
  103. print_hex_dump(KERN_ERR,
  104. "aead null enc shdesc@" __stringify(__LINE__)": ",
  105. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  106. #endif
  107. }
  108. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  109. /**
  110. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  111. * (non-protocol) with no (null) decryption.
  112. * @desc: pointer to buffer used for descriptor construction
  113. * @adata: pointer to authentication transform definitions.
  114. * A split key is required for SEC Era < 6; the size of the split key
  115. * is specified in this case. Valid algorithm values - one of
  116. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  117. * with OP_ALG_AAI_HMAC_PRECOMP.
  118. * @icvsize: integrity check value (ICV) size (truncated or full)
  119. * @era: SEC Era
  120. */
  121. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  122. unsigned int icvsize, int era)
  123. {
  124. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  125. init_sh_desc(desc, HDR_SHARE_SERIAL);
  126. /* Skip if already shared */
  127. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  128. JUMP_COND_SHRD);
  129. if (era < 6) {
  130. if (adata->key_inline)
  131. append_key_as_imm(desc, adata->key_virt,
  132. adata->keylen_pad, adata->keylen,
  133. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  134. KEY_ENC);
  135. else
  136. append_key(desc, adata->key_dma, adata->keylen,
  137. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  138. } else {
  139. append_proto_dkp(desc, adata);
  140. }
  141. set_jump_tgt_here(desc, key_jump_cmd);
  142. /* Class 2 operation */
  143. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  144. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  145. /* assoclen + cryptlen = seqoutlen */
  146. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  147. /* Prepare to read and write cryptlen + assoclen bytes */
  148. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  149. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  150. /*
  151. * MOVE_LEN opcode is not available in all SEC HW revisions,
  152. * thus need to do some magic, i.e. self-patch the descriptor
  153. * buffer.
  154. */
  155. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  156. MOVE_DEST_MATH2 |
  157. (0x6 << MOVE_LEN_SHIFT));
  158. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  159. MOVE_DEST_DESCBUF |
  160. MOVE_WAITCOMP |
  161. (0x8 << MOVE_LEN_SHIFT));
  162. /* Read and write cryptlen bytes */
  163. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  164. /*
  165. * Insert a NOP here, since we need at least 4 instructions between
  166. * code patching the descriptor buffer and the location being patched.
  167. */
  168. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  169. set_jump_tgt_here(desc, jump_cmd);
  170. set_move_tgt_here(desc, read_move_cmd);
  171. set_move_tgt_here(desc, write_move_cmd);
  172. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  173. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  174. MOVE_AUX_LS);
  175. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  176. /* Load ICV */
  177. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  178. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  179. #ifdef DEBUG
  180. print_hex_dump(KERN_ERR,
  181. "aead null dec shdesc@" __stringify(__LINE__)": ",
  182. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  183. #endif
  184. }
  185. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  186. static void init_sh_desc_key_aead(u32 * const desc,
  187. struct alginfo * const cdata,
  188. struct alginfo * const adata,
  189. const bool is_rfc3686, u32 *nonce, int era)
  190. {
  191. u32 *key_jump_cmd;
  192. unsigned int enckeylen = cdata->keylen;
  193. /* Note: Context registers are saved. */
  194. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  195. /* Skip if already shared */
  196. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  197. JUMP_COND_SHRD);
  198. /*
  199. * RFC3686 specific:
  200. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  201. * | enckeylen = encryption key size + nonce size
  202. */
  203. if (is_rfc3686)
  204. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  205. if (era < 6) {
  206. if (adata->key_inline)
  207. append_key_as_imm(desc, adata->key_virt,
  208. adata->keylen_pad, adata->keylen,
  209. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  210. KEY_ENC);
  211. else
  212. append_key(desc, adata->key_dma, adata->keylen,
  213. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  214. } else {
  215. append_proto_dkp(desc, adata);
  216. }
  217. if (cdata->key_inline)
  218. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  219. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  220. else
  221. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  222. KEY_DEST_CLASS_REG);
  223. /* Load Counter into CONTEXT1 reg */
  224. if (is_rfc3686) {
  225. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  226. LDST_CLASS_IND_CCB |
  227. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  228. append_move(desc,
  229. MOVE_SRC_OUTFIFO |
  230. MOVE_DEST_CLASS1CTX |
  231. (16 << MOVE_OFFSET_SHIFT) |
  232. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  233. }
  234. set_jump_tgt_here(desc, key_jump_cmd);
  235. }
  236. /**
  237. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  238. * (non-protocol).
  239. * @desc: pointer to buffer used for descriptor construction
  240. * @cdata: pointer to block cipher transform definitions
  241. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  242. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  243. * @adata: pointer to authentication transform definitions.
  244. * A split key is required for SEC Era < 6; the size of the split key
  245. * is specified in this case. Valid algorithm values - one of
  246. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  247. * with OP_ALG_AAI_HMAC_PRECOMP.
  248. * @ivsize: initialization vector size
  249. * @icvsize: integrity check value (ICV) size (truncated or full)
  250. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  251. * @nonce: pointer to rfc3686 nonce
  252. * @ctx1_iv_off: IV offset in CONTEXT1 register
  253. * @is_qi: true when called from caam/qi
  254. * @era: SEC Era
  255. */
  256. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  257. struct alginfo *adata, unsigned int ivsize,
  258. unsigned int icvsize, const bool is_rfc3686,
  259. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
  260. int era)
  261. {
  262. /* Note: Context registers are saved. */
  263. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  264. /* Class 2 operation */
  265. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  266. OP_ALG_ENCRYPT);
  267. if (is_qi) {
  268. u32 *wait_load_cmd;
  269. /* REG3 = assoclen */
  270. append_seq_load(desc, 4, LDST_CLASS_DECO |
  271. LDST_SRCDST_WORD_DECO_MATH3 |
  272. (4 << LDST_OFFSET_SHIFT));
  273. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  274. JUMP_COND_CALM | JUMP_COND_NCP |
  275. JUMP_COND_NOP | JUMP_COND_NIP |
  276. JUMP_COND_NIFP);
  277. set_jump_tgt_here(desc, wait_load_cmd);
  278. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  279. LDST_SRCDST_BYTE_CONTEXT |
  280. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  281. }
  282. /* Read and write assoclen bytes */
  283. if (is_qi || era < 3) {
  284. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  285. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  286. } else {
  287. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  288. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  289. }
  290. /* Skip assoc data */
  291. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  292. /* read assoc before reading payload */
  293. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  294. FIFOLDST_VLF);
  295. /* Load Counter into CONTEXT1 reg */
  296. if (is_rfc3686)
  297. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  298. LDST_SRCDST_BYTE_CONTEXT |
  299. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  300. LDST_OFFSET_SHIFT));
  301. /* Class 1 operation */
  302. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  303. OP_ALG_ENCRYPT);
  304. /* Read and write cryptlen bytes */
  305. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  306. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  307. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  308. /* Write ICV */
  309. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  310. LDST_SRCDST_BYTE_CONTEXT);
  311. #ifdef DEBUG
  312. print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
  313. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  314. #endif
  315. }
  316. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  317. /**
  318. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  319. * (non-protocol).
  320. * @desc: pointer to buffer used for descriptor construction
  321. * @cdata: pointer to block cipher transform definitions
  322. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  323. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  324. * @adata: pointer to authentication transform definitions.
  325. * A split key is required for SEC Era < 6; the size of the split key
  326. * is specified in this case. Valid algorithm values - one of
  327. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  328. * with OP_ALG_AAI_HMAC_PRECOMP.
  329. * @ivsize: initialization vector size
  330. * @icvsize: integrity check value (ICV) size (truncated or full)
  331. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  332. * @nonce: pointer to rfc3686 nonce
  333. * @ctx1_iv_off: IV offset in CONTEXT1 register
  334. * @is_qi: true when called from caam/qi
  335. * @era: SEC Era
  336. */
  337. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  338. struct alginfo *adata, unsigned int ivsize,
  339. unsigned int icvsize, const bool geniv,
  340. const bool is_rfc3686, u32 *nonce,
  341. const u32 ctx1_iv_off, const bool is_qi, int era)
  342. {
  343. /* Note: Context registers are saved. */
  344. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  345. /* Class 2 operation */
  346. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  347. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  348. if (is_qi) {
  349. u32 *wait_load_cmd;
  350. /* REG3 = assoclen */
  351. append_seq_load(desc, 4, LDST_CLASS_DECO |
  352. LDST_SRCDST_WORD_DECO_MATH3 |
  353. (4 << LDST_OFFSET_SHIFT));
  354. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  355. JUMP_COND_CALM | JUMP_COND_NCP |
  356. JUMP_COND_NOP | JUMP_COND_NIP |
  357. JUMP_COND_NIFP);
  358. set_jump_tgt_here(desc, wait_load_cmd);
  359. if (!geniv)
  360. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  361. LDST_SRCDST_BYTE_CONTEXT |
  362. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  363. }
  364. /* Read and write assoclen bytes */
  365. if (is_qi || era < 3) {
  366. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  367. if (geniv)
  368. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
  369. ivsize);
  370. else
  371. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
  372. CAAM_CMD_SZ);
  373. } else {
  374. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  375. if (geniv)
  376. append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
  377. ivsize);
  378. else
  379. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
  380. CAAM_CMD_SZ);
  381. }
  382. /* Skip assoc data */
  383. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  384. /* read assoc before reading payload */
  385. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  386. KEY_VLF);
  387. if (geniv) {
  388. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  389. LDST_SRCDST_BYTE_CONTEXT |
  390. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  391. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  392. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  393. }
  394. /* Load Counter into CONTEXT1 reg */
  395. if (is_rfc3686)
  396. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  397. LDST_SRCDST_BYTE_CONTEXT |
  398. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  399. LDST_OFFSET_SHIFT));
  400. /* Choose operation */
  401. if (ctx1_iv_off)
  402. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  403. OP_ALG_DECRYPT);
  404. else
  405. append_dec_op1(desc, cdata->algtype);
  406. /* Read and write cryptlen bytes */
  407. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  408. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  409. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  410. /* Load ICV */
  411. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  412. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  413. #ifdef DEBUG
  414. print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
  415. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  416. #endif
  417. }
  418. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  419. /**
  420. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  421. * (non-protocol) with HW-generated initialization
  422. * vector.
  423. * @desc: pointer to buffer used for descriptor construction
  424. * @cdata: pointer to block cipher transform definitions
  425. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  426. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  427. * @adata: pointer to authentication transform definitions.
  428. * A split key is required for SEC Era < 6; the size of the split key
  429. * is specified in this case. Valid algorithm values - one of
  430. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  431. * with OP_ALG_AAI_HMAC_PRECOMP.
  432. * @ivsize: initialization vector size
  433. * @icvsize: integrity check value (ICV) size (truncated or full)
  434. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  435. * @nonce: pointer to rfc3686 nonce
  436. * @ctx1_iv_off: IV offset in CONTEXT1 register
  437. * @is_qi: true when called from caam/qi
  438. * @era: SEC Era
  439. */
  440. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  441. struct alginfo *adata, unsigned int ivsize,
  442. unsigned int icvsize, const bool is_rfc3686,
  443. u32 *nonce, const u32 ctx1_iv_off,
  444. const bool is_qi, int era)
  445. {
  446. u32 geniv, moveiv;
  447. /* Note: Context registers are saved. */
  448. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  449. if (is_qi) {
  450. u32 *wait_load_cmd;
  451. /* REG3 = assoclen */
  452. append_seq_load(desc, 4, LDST_CLASS_DECO |
  453. LDST_SRCDST_WORD_DECO_MATH3 |
  454. (4 << LDST_OFFSET_SHIFT));
  455. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  456. JUMP_COND_CALM | JUMP_COND_NCP |
  457. JUMP_COND_NOP | JUMP_COND_NIP |
  458. JUMP_COND_NIFP);
  459. set_jump_tgt_here(desc, wait_load_cmd);
  460. }
  461. if (is_rfc3686) {
  462. if (is_qi)
  463. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  464. LDST_SRCDST_BYTE_CONTEXT |
  465. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  466. goto copy_iv;
  467. }
  468. /* Generate IV */
  469. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  470. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  471. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  472. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  473. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  474. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  475. append_move(desc, MOVE_WAITCOMP |
  476. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  477. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  478. (ivsize << MOVE_LEN_SHIFT));
  479. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  480. copy_iv:
  481. /* Copy IV to class 1 context */
  482. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  483. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  484. (ivsize << MOVE_LEN_SHIFT));
  485. /* Return to encryption */
  486. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  487. OP_ALG_ENCRYPT);
  488. /* Read and write assoclen bytes */
  489. if (is_qi || era < 3) {
  490. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  491. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  492. } else {
  493. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  494. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  495. }
  496. /* Skip assoc data */
  497. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  498. /* read assoc before reading payload */
  499. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  500. KEY_VLF);
  501. /* Copy iv from outfifo to class 2 fifo */
  502. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  503. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  504. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  505. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  506. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  507. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  508. /* Load Counter into CONTEXT1 reg */
  509. if (is_rfc3686)
  510. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  511. LDST_SRCDST_BYTE_CONTEXT |
  512. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  513. LDST_OFFSET_SHIFT));
  514. /* Class 1 operation */
  515. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  516. OP_ALG_ENCRYPT);
  517. /* Will write ivsize + cryptlen */
  518. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  519. /* Not need to reload iv */
  520. append_seq_fifo_load(desc, ivsize,
  521. FIFOLD_CLASS_SKIP);
  522. /* Will read cryptlen */
  523. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  524. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  525. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  526. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  527. /* Write ICV */
  528. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  529. LDST_SRCDST_BYTE_CONTEXT);
  530. #ifdef DEBUG
  531. print_hex_dump(KERN_ERR,
  532. "aead givenc shdesc@" __stringify(__LINE__)": ",
  533. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  534. #endif
  535. }
  536. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  537. /**
  538. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  539. * @desc: pointer to buffer used for descriptor construction
  540. * @cdata: pointer to block cipher transform definitions
  541. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  542. * @ivsize: initialization vector size
  543. * @icvsize: integrity check value (ICV) size (truncated or full)
  544. * @is_qi: true when called from caam/qi
  545. */
  546. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  547. unsigned int ivsize, unsigned int icvsize,
  548. const bool is_qi)
  549. {
  550. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  551. *zero_assoc_jump_cmd2;
  552. init_sh_desc(desc, HDR_SHARE_SERIAL);
  553. /* skip key loading if they are loaded due to sharing */
  554. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  555. JUMP_COND_SHRD);
  556. if (cdata->key_inline)
  557. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  558. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  559. else
  560. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  561. KEY_DEST_CLASS_REG);
  562. set_jump_tgt_here(desc, key_jump_cmd);
  563. /* class 1 operation */
  564. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  565. OP_ALG_ENCRYPT);
  566. if (is_qi) {
  567. u32 *wait_load_cmd;
  568. /* REG3 = assoclen */
  569. append_seq_load(desc, 4, LDST_CLASS_DECO |
  570. LDST_SRCDST_WORD_DECO_MATH3 |
  571. (4 << LDST_OFFSET_SHIFT));
  572. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  573. JUMP_COND_CALM | JUMP_COND_NCP |
  574. JUMP_COND_NOP | JUMP_COND_NIP |
  575. JUMP_COND_NIFP);
  576. set_jump_tgt_here(desc, wait_load_cmd);
  577. append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
  578. ivsize);
  579. } else {
  580. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  581. CAAM_CMD_SZ);
  582. }
  583. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  584. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  585. JUMP_COND_MATH_Z);
  586. if (is_qi)
  587. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  588. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  589. /* if assoclen is ZERO, skip reading the assoc data */
  590. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  591. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  592. JUMP_COND_MATH_Z);
  593. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  594. /* skip assoc data */
  595. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  596. /* cryptlen = seqinlen - assoclen */
  597. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  598. /* if cryptlen is ZERO jump to zero-payload commands */
  599. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  600. JUMP_COND_MATH_Z);
  601. /* read assoc data */
  602. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  603. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  604. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  605. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  606. /* write encrypted data */
  607. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  608. /* read payload data */
  609. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  610. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  611. /* jump to ICV writing */
  612. if (is_qi)
  613. append_jump(desc, JUMP_TEST_ALL | 4);
  614. else
  615. append_jump(desc, JUMP_TEST_ALL | 2);
  616. /* zero-payload commands */
  617. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  618. /* read assoc data */
  619. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  620. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  621. if (is_qi)
  622. /* jump to ICV writing */
  623. append_jump(desc, JUMP_TEST_ALL | 2);
  624. /* There is no input data */
  625. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  626. if (is_qi)
  627. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  628. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
  629. FIFOLD_TYPE_LAST1);
  630. /* write ICV */
  631. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  632. LDST_SRCDST_BYTE_CONTEXT);
  633. #ifdef DEBUG
  634. print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
  635. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  636. #endif
  637. }
  638. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  639. /**
  640. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  641. * @desc: pointer to buffer used for descriptor construction
  642. * @cdata: pointer to block cipher transform definitions
  643. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  644. * @ivsize: initialization vector size
  645. * @icvsize: integrity check value (ICV) size (truncated or full)
  646. * @is_qi: true when called from caam/qi
  647. */
  648. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  649. unsigned int ivsize, unsigned int icvsize,
  650. const bool is_qi)
  651. {
  652. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  653. init_sh_desc(desc, HDR_SHARE_SERIAL);
  654. /* skip key loading if they are loaded due to sharing */
  655. key_jump_cmd = append_jump(desc, JUMP_JSL |
  656. JUMP_TEST_ALL | JUMP_COND_SHRD);
  657. if (cdata->key_inline)
  658. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  659. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  660. else
  661. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  662. KEY_DEST_CLASS_REG);
  663. set_jump_tgt_here(desc, key_jump_cmd);
  664. /* class 1 operation */
  665. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  666. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  667. if (is_qi) {
  668. u32 *wait_load_cmd;
  669. /* REG3 = assoclen */
  670. append_seq_load(desc, 4, LDST_CLASS_DECO |
  671. LDST_SRCDST_WORD_DECO_MATH3 |
  672. (4 << LDST_OFFSET_SHIFT));
  673. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  674. JUMP_COND_CALM | JUMP_COND_NCP |
  675. JUMP_COND_NOP | JUMP_COND_NIP |
  676. JUMP_COND_NIFP);
  677. set_jump_tgt_here(desc, wait_load_cmd);
  678. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  679. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  680. }
  681. /* if assoclen is ZERO, skip reading the assoc data */
  682. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  683. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  684. JUMP_COND_MATH_Z);
  685. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  686. /* skip assoc data */
  687. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  688. /* read assoc data */
  689. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  690. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  691. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  692. /* cryptlen = seqoutlen - assoclen */
  693. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  694. /* jump to zero-payload command if cryptlen is zero */
  695. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  696. JUMP_COND_MATH_Z);
  697. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  698. /* store encrypted data */
  699. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  700. /* read payload data */
  701. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  702. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  703. /* zero-payload command */
  704. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  705. /* read ICV */
  706. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  707. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  708. #ifdef DEBUG
  709. print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
  710. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  711. #endif
  712. }
  713. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  714. /**
  715. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  716. * (non-protocol).
  717. * @desc: pointer to buffer used for descriptor construction
  718. * @cdata: pointer to block cipher transform definitions
  719. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  720. * @ivsize: initialization vector size
  721. * @icvsize: integrity check value (ICV) size (truncated or full)
  722. * @is_qi: true when called from caam/qi
  723. */
  724. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  725. unsigned int ivsize, unsigned int icvsize,
  726. const bool is_qi)
  727. {
  728. u32 *key_jump_cmd;
  729. init_sh_desc(desc, HDR_SHARE_SERIAL);
  730. /* Skip key loading if it is loaded due to sharing */
  731. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  732. JUMP_COND_SHRD);
  733. if (cdata->key_inline)
  734. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  735. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  736. else
  737. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  738. KEY_DEST_CLASS_REG);
  739. set_jump_tgt_here(desc, key_jump_cmd);
  740. /* Class 1 operation */
  741. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  742. OP_ALG_ENCRYPT);
  743. if (is_qi) {
  744. u32 *wait_load_cmd;
  745. /* REG3 = assoclen */
  746. append_seq_load(desc, 4, LDST_CLASS_DECO |
  747. LDST_SRCDST_WORD_DECO_MATH3 |
  748. (4 << LDST_OFFSET_SHIFT));
  749. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  750. JUMP_COND_CALM | JUMP_COND_NCP |
  751. JUMP_COND_NOP | JUMP_COND_NIP |
  752. JUMP_COND_NIFP);
  753. set_jump_tgt_here(desc, wait_load_cmd);
  754. /* Read salt and IV */
  755. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  756. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  757. FIFOLD_TYPE_IV);
  758. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  759. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  760. }
  761. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  762. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  763. /* Read assoc data */
  764. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  765. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  766. /* Skip IV */
  767. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  768. /* Will read cryptlen bytes */
  769. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  770. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  771. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  772. /* Skip assoc data */
  773. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  774. /* cryptlen = seqoutlen - assoclen */
  775. append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
  776. /* Write encrypted data */
  777. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  778. /* Read payload data */
  779. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  780. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  781. /* Write ICV */
  782. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  783. LDST_SRCDST_BYTE_CONTEXT);
  784. #ifdef DEBUG
  785. print_hex_dump(KERN_ERR,
  786. "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  787. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  788. #endif
  789. }
  790. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  791. /**
  792. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  793. * (non-protocol).
  794. * @desc: pointer to buffer used for descriptor construction
  795. * @cdata: pointer to block cipher transform definitions
  796. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  797. * @ivsize: initialization vector size
  798. * @icvsize: integrity check value (ICV) size (truncated or full)
  799. * @is_qi: true when called from caam/qi
  800. */
  801. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  802. unsigned int ivsize, unsigned int icvsize,
  803. const bool is_qi)
  804. {
  805. u32 *key_jump_cmd;
  806. init_sh_desc(desc, HDR_SHARE_SERIAL);
  807. /* Skip key loading if it is loaded due to sharing */
  808. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  809. JUMP_COND_SHRD);
  810. if (cdata->key_inline)
  811. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  812. cdata->keylen, CLASS_1 |
  813. KEY_DEST_CLASS_REG);
  814. else
  815. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  816. KEY_DEST_CLASS_REG);
  817. set_jump_tgt_here(desc, key_jump_cmd);
  818. /* Class 1 operation */
  819. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  820. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  821. if (is_qi) {
  822. u32 *wait_load_cmd;
  823. /* REG3 = assoclen */
  824. append_seq_load(desc, 4, LDST_CLASS_DECO |
  825. LDST_SRCDST_WORD_DECO_MATH3 |
  826. (4 << LDST_OFFSET_SHIFT));
  827. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  828. JUMP_COND_CALM | JUMP_COND_NCP |
  829. JUMP_COND_NOP | JUMP_COND_NIP |
  830. JUMP_COND_NIFP);
  831. set_jump_tgt_here(desc, wait_load_cmd);
  832. /* Read salt and IV */
  833. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  834. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  835. FIFOLD_TYPE_IV);
  836. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  837. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  838. }
  839. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  840. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  841. /* Read assoc data */
  842. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  843. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  844. /* Skip IV */
  845. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  846. /* Will read cryptlen bytes */
  847. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  848. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  849. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  850. /* Skip assoc data */
  851. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  852. /* Will write cryptlen bytes */
  853. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  854. /* Store payload data */
  855. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  856. /* Read encrypted data */
  857. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  858. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  859. /* Read ICV */
  860. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  861. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  862. #ifdef DEBUG
  863. print_hex_dump(KERN_ERR,
  864. "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  865. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  866. #endif
  867. }
  868. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  869. /**
  870. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  871. * (non-protocol).
  872. * @desc: pointer to buffer used for descriptor construction
  873. * @cdata: pointer to block cipher transform definitions
  874. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  875. * @ivsize: initialization vector size
  876. * @icvsize: integrity check value (ICV) size (truncated or full)
  877. * @is_qi: true when called from caam/qi
  878. */
  879. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  880. unsigned int ivsize, unsigned int icvsize,
  881. const bool is_qi)
  882. {
  883. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  884. init_sh_desc(desc, HDR_SHARE_SERIAL);
  885. /* Skip key loading if it is loaded due to sharing */
  886. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  887. JUMP_COND_SHRD);
  888. if (cdata->key_inline)
  889. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  890. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  891. else
  892. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  893. KEY_DEST_CLASS_REG);
  894. set_jump_tgt_here(desc, key_jump_cmd);
  895. /* Class 1 operation */
  896. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  897. OP_ALG_ENCRYPT);
  898. if (is_qi) {
  899. /* assoclen is not needed, skip it */
  900. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  901. /* Read salt and IV */
  902. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  903. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  904. FIFOLD_TYPE_IV);
  905. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  906. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  907. }
  908. /* assoclen + cryptlen = seqinlen */
  909. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  910. /*
  911. * MOVE_LEN opcode is not available in all SEC HW revisions,
  912. * thus need to do some magic, i.e. self-patch the descriptor
  913. * buffer.
  914. */
  915. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  916. (0x6 << MOVE_LEN_SHIFT));
  917. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  918. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  919. /* Will read assoclen + cryptlen bytes */
  920. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  921. /* Will write assoclen + cryptlen bytes */
  922. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  923. /* Read and write assoclen + cryptlen bytes */
  924. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  925. set_move_tgt_here(desc, read_move_cmd);
  926. set_move_tgt_here(desc, write_move_cmd);
  927. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  928. /* Move payload data to OFIFO */
  929. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  930. /* Write ICV */
  931. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  932. LDST_SRCDST_BYTE_CONTEXT);
  933. #ifdef DEBUG
  934. print_hex_dump(KERN_ERR,
  935. "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  936. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  937. #endif
  938. }
  939. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  940. /**
  941. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  942. * (non-protocol).
  943. * @desc: pointer to buffer used for descriptor construction
  944. * @cdata: pointer to block cipher transform definitions
  945. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  946. * @ivsize: initialization vector size
  947. * @icvsize: integrity check value (ICV) size (truncated or full)
  948. * @is_qi: true when called from caam/qi
  949. */
  950. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  951. unsigned int ivsize, unsigned int icvsize,
  952. const bool is_qi)
  953. {
  954. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  955. init_sh_desc(desc, HDR_SHARE_SERIAL);
  956. /* Skip key loading if it is loaded due to sharing */
  957. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  958. JUMP_COND_SHRD);
  959. if (cdata->key_inline)
  960. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  961. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  962. else
  963. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  964. KEY_DEST_CLASS_REG);
  965. set_jump_tgt_here(desc, key_jump_cmd);
  966. /* Class 1 operation */
  967. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  968. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  969. if (is_qi) {
  970. /* assoclen is not needed, skip it */
  971. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  972. /* Read salt and IV */
  973. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  974. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  975. FIFOLD_TYPE_IV);
  976. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  977. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  978. }
  979. /* assoclen + cryptlen = seqoutlen */
  980. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  981. /*
  982. * MOVE_LEN opcode is not available in all SEC HW revisions,
  983. * thus need to do some magic, i.e. self-patch the descriptor
  984. * buffer.
  985. */
  986. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  987. (0x6 << MOVE_LEN_SHIFT));
  988. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  989. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  990. /* Will read assoclen + cryptlen bytes */
  991. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  992. /* Will write assoclen + cryptlen bytes */
  993. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  994. /* Store payload data */
  995. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  996. /* In-snoop assoclen + cryptlen data */
  997. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  998. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  999. set_move_tgt_here(desc, read_move_cmd);
  1000. set_move_tgt_here(desc, write_move_cmd);
  1001. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1002. /* Move payload data to OFIFO */
  1003. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  1004. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1005. /* Read ICV */
  1006. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  1007. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  1008. #ifdef DEBUG
  1009. print_hex_dump(KERN_ERR,
  1010. "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  1011. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1012. #endif
  1013. }
  1014. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  1015. /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
  1016. static inline void skcipher_append_src_dst(u32 *desc)
  1017. {
  1018. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1019. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1020. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  1021. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  1022. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  1023. }
  1024. /**
  1025. * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
  1026. * @desc: pointer to buffer used for descriptor construction
  1027. * @cdata: pointer to block cipher transform definitions
  1028. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1029. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1030. * @ivsize: initialization vector size
  1031. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1032. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1033. */
  1034. void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
  1035. unsigned int ivsize, const bool is_rfc3686,
  1036. const u32 ctx1_iv_off)
  1037. {
  1038. u32 *key_jump_cmd;
  1039. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1040. /* Skip if already shared */
  1041. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1042. JUMP_COND_SHRD);
  1043. /* Load class1 key only */
  1044. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1045. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1046. /* Load nonce into CONTEXT1 reg */
  1047. if (is_rfc3686) {
  1048. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1049. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1050. LDST_CLASS_IND_CCB |
  1051. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1052. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1053. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1054. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1055. }
  1056. set_jump_tgt_here(desc, key_jump_cmd);
  1057. /* Load iv */
  1058. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1059. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1060. /* Load counter into CONTEXT1 reg */
  1061. if (is_rfc3686)
  1062. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1063. LDST_SRCDST_BYTE_CONTEXT |
  1064. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1065. LDST_OFFSET_SHIFT));
  1066. /* Load operation */
  1067. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1068. OP_ALG_ENCRYPT);
  1069. /* Perform operation */
  1070. skcipher_append_src_dst(desc);
  1071. #ifdef DEBUG
  1072. print_hex_dump(KERN_ERR,
  1073. "skcipher enc shdesc@" __stringify(__LINE__)": ",
  1074. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1075. #endif
  1076. }
  1077. EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
  1078. /**
  1079. * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
  1080. * @desc: pointer to buffer used for descriptor construction
  1081. * @cdata: pointer to block cipher transform definitions
  1082. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1083. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1084. * @ivsize: initialization vector size
  1085. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1086. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1087. */
  1088. void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
  1089. unsigned int ivsize, const bool is_rfc3686,
  1090. const u32 ctx1_iv_off)
  1091. {
  1092. u32 *key_jump_cmd;
  1093. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1094. /* Skip if already shared */
  1095. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1096. JUMP_COND_SHRD);
  1097. /* Load class1 key only */
  1098. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1099. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1100. /* Load nonce into CONTEXT1 reg */
  1101. if (is_rfc3686) {
  1102. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1103. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1104. LDST_CLASS_IND_CCB |
  1105. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1106. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1107. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1108. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1109. }
  1110. set_jump_tgt_here(desc, key_jump_cmd);
  1111. /* load IV */
  1112. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1113. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1114. /* Load counter into CONTEXT1 reg */
  1115. if (is_rfc3686)
  1116. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1117. LDST_SRCDST_BYTE_CONTEXT |
  1118. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1119. LDST_OFFSET_SHIFT));
  1120. /* Choose operation */
  1121. if (ctx1_iv_off)
  1122. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1123. OP_ALG_DECRYPT);
  1124. else
  1125. append_dec_op1(desc, cdata->algtype);
  1126. /* Perform operation */
  1127. skcipher_append_src_dst(desc);
  1128. #ifdef DEBUG
  1129. print_hex_dump(KERN_ERR,
  1130. "skcipher dec shdesc@" __stringify(__LINE__)": ",
  1131. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1132. #endif
  1133. }
  1134. EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
  1135. /**
  1136. * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
  1137. * @desc: pointer to buffer used for descriptor construction
  1138. * @cdata: pointer to block cipher transform definitions
  1139. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1140. */
  1141. void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
  1142. {
  1143. __be64 sector_size = cpu_to_be64(512);
  1144. u32 *key_jump_cmd;
  1145. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1146. /* Skip if already shared */
  1147. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1148. JUMP_COND_SHRD);
  1149. /* Load class1 keys only */
  1150. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1151. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1152. /* Load sector size with index 40 bytes (0x28) */
  1153. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1154. LDST_SRCDST_BYTE_CONTEXT |
  1155. (0x28 << LDST_OFFSET_SHIFT));
  1156. set_jump_tgt_here(desc, key_jump_cmd);
  1157. /*
  1158. * create sequence for loading the sector index
  1159. * Upper 8B of IV - will be used as sector index
  1160. * Lower 8B of IV - will be discarded
  1161. */
  1162. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1163. (0x20 << LDST_OFFSET_SHIFT));
  1164. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1165. /* Load operation */
  1166. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1167. OP_ALG_ENCRYPT);
  1168. /* Perform operation */
  1169. skcipher_append_src_dst(desc);
  1170. #ifdef DEBUG
  1171. print_hex_dump(KERN_ERR,
  1172. "xts skcipher enc shdesc@" __stringify(__LINE__) ": ",
  1173. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1174. #endif
  1175. }
  1176. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
  1177. /**
  1178. * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
  1179. * @desc: pointer to buffer used for descriptor construction
  1180. * @cdata: pointer to block cipher transform definitions
  1181. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1182. */
  1183. void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
  1184. {
  1185. __be64 sector_size = cpu_to_be64(512);
  1186. u32 *key_jump_cmd;
  1187. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1188. /* Skip if already shared */
  1189. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1190. JUMP_COND_SHRD);
  1191. /* Load class1 key only */
  1192. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1193. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1194. /* Load sector size with index 40 bytes (0x28) */
  1195. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1196. LDST_SRCDST_BYTE_CONTEXT |
  1197. (0x28 << LDST_OFFSET_SHIFT));
  1198. set_jump_tgt_here(desc, key_jump_cmd);
  1199. /*
  1200. * create sequence for loading the sector index
  1201. * Upper 8B of IV - will be used as sector index
  1202. * Lower 8B of IV - will be discarded
  1203. */
  1204. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1205. (0x20 << LDST_OFFSET_SHIFT));
  1206. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1207. /* Load operation */
  1208. append_dec_op1(desc, cdata->algtype);
  1209. /* Perform operation */
  1210. skcipher_append_src_dst(desc);
  1211. #ifdef DEBUG
  1212. print_hex_dump(KERN_ERR,
  1213. "xts skcipher dec shdesc@" __stringify(__LINE__) ": ",
  1214. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1215. #endif
  1216. }
  1217. EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
  1218. MODULE_LICENSE("GPL");
  1219. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1220. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");