caamalg_desc.c 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306
  1. /*
  2. * Shared descriptors for aead, ablkcipher algorithms
  3. *
  4. * Copyright 2016 NXP
  5. */
  6. #include "compat.h"
  7. #include "desc_constr.h"
  8. #include "caamalg_desc.h"
  9. /*
  10. * For aead functions, read payload and write payload,
  11. * both of which are specified in req->src and req->dst
  12. */
  13. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  14. {
  15. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  16. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  17. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  18. }
  19. /* Set DK bit in class 1 operation if shared */
  20. static inline void append_dec_op1(u32 *desc, u32 type)
  21. {
  22. u32 *jump_cmd, *uncond_jump_cmd;
  23. /* DK bit is valid only for AES */
  24. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  25. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  26. OP_ALG_DECRYPT);
  27. return;
  28. }
  29. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  30. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  31. OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  35. OP_ALG_DECRYPT | OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions. Note that since a
  43. * split key is to be used, the size of the split key itself is
  44. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  45. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  46. * @icvsize: integrity check value (ICV) size (truncated or full)
  47. *
  48. * Note: Requires an MDHA split key.
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (adata->key_inline)
  59. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  60. adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
  61. KEY_ENC);
  62. else
  63. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  64. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  65. set_jump_tgt_here(desc, key_jump_cmd);
  66. /* assoclen + cryptlen = seqinlen */
  67. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  68. /* Prepare to read and write cryptlen + assoclen bytes */
  69. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  70. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  71. /*
  72. * MOVE_LEN opcode is not available in all SEC HW revisions,
  73. * thus need to do some magic, i.e. self-patch the descriptor
  74. * buffer.
  75. */
  76. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  77. MOVE_DEST_MATH3 |
  78. (0x6 << MOVE_LEN_SHIFT));
  79. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  80. MOVE_DEST_DESCBUF |
  81. MOVE_WAITCOMP |
  82. (0x8 << MOVE_LEN_SHIFT));
  83. /* Class 2 operation */
  84. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  85. OP_ALG_ENCRYPT);
  86. /* Read and write cryptlen bytes */
  87. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  88. set_move_tgt_here(desc, read_move_cmd);
  89. set_move_tgt_here(desc, write_move_cmd);
  90. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  91. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  92. MOVE_AUX_LS);
  93. /* Write ICV */
  94. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  95. LDST_SRCDST_BYTE_CONTEXT);
  96. #ifdef DEBUG
  97. print_hex_dump(KERN_ERR,
  98. "aead null enc shdesc@" __stringify(__LINE__)": ",
  99. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  100. #endif
  101. }
  102. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  103. /**
  104. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  105. * (non-protocol) with no (null) decryption.
  106. * @desc: pointer to buffer used for descriptor construction
  107. * @adata: pointer to authentication transform definitions. Note that since a
  108. * split key is to be used, the size of the split key itself is
  109. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  110. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  111. * @icvsize: integrity check value (ICV) size (truncated or full)
  112. *
  113. * Note: Requires an MDHA split key.
  114. */
  115. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  116. unsigned int icvsize)
  117. {
  118. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  119. init_sh_desc(desc, HDR_SHARE_SERIAL);
  120. /* Skip if already shared */
  121. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  122. JUMP_COND_SHRD);
  123. if (adata->key_inline)
  124. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  125. adata->keylen, CLASS_2 |
  126. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  127. else
  128. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  129. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  130. set_jump_tgt_here(desc, key_jump_cmd);
  131. /* Class 2 operation */
  132. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  133. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  134. /* assoclen + cryptlen = seqoutlen */
  135. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  136. /* Prepare to read and write cryptlen + assoclen bytes */
  137. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  138. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  139. /*
  140. * MOVE_LEN opcode is not available in all SEC HW revisions,
  141. * thus need to do some magic, i.e. self-patch the descriptor
  142. * buffer.
  143. */
  144. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  145. MOVE_DEST_MATH2 |
  146. (0x6 << MOVE_LEN_SHIFT));
  147. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  148. MOVE_DEST_DESCBUF |
  149. MOVE_WAITCOMP |
  150. (0x8 << MOVE_LEN_SHIFT));
  151. /* Read and write cryptlen bytes */
  152. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  153. /*
  154. * Insert a NOP here, since we need at least 4 instructions between
  155. * code patching the descriptor buffer and the location being patched.
  156. */
  157. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  158. set_jump_tgt_here(desc, jump_cmd);
  159. set_move_tgt_here(desc, read_move_cmd);
  160. set_move_tgt_here(desc, write_move_cmd);
  161. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  162. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  163. MOVE_AUX_LS);
  164. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  165. /* Load ICV */
  166. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  167. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  168. #ifdef DEBUG
  169. print_hex_dump(KERN_ERR,
  170. "aead null dec shdesc@" __stringify(__LINE__)": ",
  171. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  172. #endif
  173. }
  174. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  175. static void init_sh_desc_key_aead(u32 * const desc,
  176. struct alginfo * const cdata,
  177. struct alginfo * const adata,
  178. const bool is_rfc3686, u32 *nonce)
  179. {
  180. u32 *key_jump_cmd;
  181. unsigned int enckeylen = cdata->keylen;
  182. /* Note: Context registers are saved. */
  183. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  184. /* Skip if already shared */
  185. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  186. JUMP_COND_SHRD);
  187. /*
  188. * RFC3686 specific:
  189. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  190. * | enckeylen = encryption key size + nonce size
  191. */
  192. if (is_rfc3686)
  193. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  194. if (adata->key_inline)
  195. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  196. adata->keylen, CLASS_2 |
  197. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  198. else
  199. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  200. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  201. if (cdata->key_inline)
  202. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  203. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  204. else
  205. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  206. KEY_DEST_CLASS_REG);
  207. /* Load Counter into CONTEXT1 reg */
  208. if (is_rfc3686) {
  209. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  210. LDST_CLASS_IND_CCB |
  211. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  212. append_move(desc,
  213. MOVE_SRC_OUTFIFO |
  214. MOVE_DEST_CLASS1CTX |
  215. (16 << MOVE_OFFSET_SHIFT) |
  216. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  217. }
  218. set_jump_tgt_here(desc, key_jump_cmd);
  219. }
  220. /**
  221. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  222. * (non-protocol).
  223. * @desc: pointer to buffer used for descriptor construction
  224. * @cdata: pointer to block cipher transform definitions
  225. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  226. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  227. * @adata: pointer to authentication transform definitions. Note that since a
  228. * split key is to be used, the size of the split key itself is
  229. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  230. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  231. * @icvsize: integrity check value (ICV) size (truncated or full)
  232. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  233. * @nonce: pointer to rfc3686 nonce
  234. * @ctx1_iv_off: IV offset in CONTEXT1 register
  235. *
  236. * Note: Requires an MDHA split key.
  237. */
  238. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  239. struct alginfo *adata, unsigned int icvsize,
  240. const bool is_rfc3686, u32 *nonce,
  241. const u32 ctx1_iv_off)
  242. {
  243. /* Note: Context registers are saved. */
  244. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  245. /* Class 2 operation */
  246. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  247. OP_ALG_ENCRYPT);
  248. /* Read and write assoclen bytes */
  249. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  250. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  251. /* Skip assoc data */
  252. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  253. /* read assoc before reading payload */
  254. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  255. FIFOLDST_VLF);
  256. /* Load Counter into CONTEXT1 reg */
  257. if (is_rfc3686)
  258. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  259. LDST_SRCDST_BYTE_CONTEXT |
  260. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  261. LDST_OFFSET_SHIFT));
  262. /* Class 1 operation */
  263. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  264. OP_ALG_ENCRYPT);
  265. /* Read and write cryptlen bytes */
  266. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  267. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  268. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  269. /* Write ICV */
  270. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  271. LDST_SRCDST_BYTE_CONTEXT);
  272. #ifdef DEBUG
  273. print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
  274. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  275. #endif
  276. }
  277. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  278. /**
  279. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  280. * (non-protocol).
  281. * @desc: pointer to buffer used for descriptor construction
  282. * @cdata: pointer to block cipher transform definitions
  283. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  284. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  285. * @adata: pointer to authentication transform definitions. Note that since a
  286. * split key is to be used, the size of the split key itself is
  287. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  288. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  289. * @ivsize: initialization vector size
  290. * @icvsize: integrity check value (ICV) size (truncated or full)
  291. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  292. * @nonce: pointer to rfc3686 nonce
  293. * @ctx1_iv_off: IV offset in CONTEXT1 register
  294. *
  295. * Note: Requires an MDHA split key.
  296. */
  297. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  298. struct alginfo *adata, unsigned int ivsize,
  299. unsigned int icvsize, const bool geniv,
  300. const bool is_rfc3686, u32 *nonce,
  301. const u32 ctx1_iv_off)
  302. {
  303. /* Note: Context registers are saved. */
  304. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  305. /* Class 2 operation */
  306. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  307. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  308. /* Read and write assoclen bytes */
  309. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  310. if (geniv)
  311. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
  312. else
  313. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  314. /* Skip assoc data */
  315. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  316. /* read assoc before reading payload */
  317. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  318. KEY_VLF);
  319. if (geniv) {
  320. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  321. LDST_SRCDST_BYTE_CONTEXT |
  322. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  323. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  324. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  325. }
  326. /* Load Counter into CONTEXT1 reg */
  327. if (is_rfc3686)
  328. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  329. LDST_SRCDST_BYTE_CONTEXT |
  330. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  331. LDST_OFFSET_SHIFT));
  332. /* Choose operation */
  333. if (ctx1_iv_off)
  334. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  335. OP_ALG_DECRYPT);
  336. else
  337. append_dec_op1(desc, cdata->algtype);
  338. /* Read and write cryptlen bytes */
  339. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  340. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  341. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  342. /* Load ICV */
  343. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  344. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  345. #ifdef DEBUG
  346. print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
  347. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  348. #endif
  349. }
  350. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  351. /**
  352. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  353. * (non-protocol) with HW-generated initialization
  354. * vector.
  355. * @desc: pointer to buffer used for descriptor construction
  356. * @cdata: pointer to block cipher transform definitions
  357. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  358. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  359. * @adata: pointer to authentication transform definitions. Note that since a
  360. * split key is to be used, the size of the split key itself is
  361. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  362. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  363. * @ivsize: initialization vector size
  364. * @icvsize: integrity check value (ICV) size (truncated or full)
  365. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  366. * @nonce: pointer to rfc3686 nonce
  367. * @ctx1_iv_off: IV offset in CONTEXT1 register
  368. *
  369. * Note: Requires an MDHA split key.
  370. */
  371. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  372. struct alginfo *adata, unsigned int ivsize,
  373. unsigned int icvsize, const bool is_rfc3686,
  374. u32 *nonce, const u32 ctx1_iv_off)
  375. {
  376. u32 geniv, moveiv;
  377. /* Note: Context registers are saved. */
  378. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  379. if (is_rfc3686)
  380. goto copy_iv;
  381. /* Generate IV */
  382. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  383. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  384. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  385. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  386. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  387. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  388. append_move(desc, MOVE_WAITCOMP |
  389. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  390. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  391. (ivsize << MOVE_LEN_SHIFT));
  392. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  393. copy_iv:
  394. /* Copy IV to class 1 context */
  395. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  396. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  397. (ivsize << MOVE_LEN_SHIFT));
  398. /* Return to encryption */
  399. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  400. OP_ALG_ENCRYPT);
  401. /* Read and write assoclen bytes */
  402. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  403. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  404. /* Skip assoc data */
  405. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  406. /* read assoc before reading payload */
  407. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  408. KEY_VLF);
  409. /* Copy iv from outfifo to class 2 fifo */
  410. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  411. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  412. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  413. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  414. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  415. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  416. /* Load Counter into CONTEXT1 reg */
  417. if (is_rfc3686)
  418. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  419. LDST_SRCDST_BYTE_CONTEXT |
  420. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  421. LDST_OFFSET_SHIFT));
  422. /* Class 1 operation */
  423. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  424. OP_ALG_ENCRYPT);
  425. /* Will write ivsize + cryptlen */
  426. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  427. /* Not need to reload iv */
  428. append_seq_fifo_load(desc, ivsize,
  429. FIFOLD_CLASS_SKIP);
  430. /* Will read cryptlen */
  431. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  432. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  433. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  434. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  435. /* Write ICV */
  436. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  437. LDST_SRCDST_BYTE_CONTEXT);
  438. #ifdef DEBUG
  439. print_hex_dump(KERN_ERR,
  440. "aead givenc shdesc@" __stringify(__LINE__)": ",
  441. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  442. #endif
  443. }
  444. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  445. /**
  446. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  447. * @desc: pointer to buffer used for descriptor construction
  448. * @cdata: pointer to block cipher transform definitions
  449. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  450. * @icvsize: integrity check value (ICV) size (truncated or full)
  451. */
  452. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  453. unsigned int icvsize)
  454. {
  455. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  456. *zero_assoc_jump_cmd2;
  457. init_sh_desc(desc, HDR_SHARE_SERIAL);
  458. /* skip key loading if they are loaded due to sharing */
  459. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  460. JUMP_COND_SHRD | JUMP_COND_SELF);
  461. if (cdata->key_inline)
  462. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  463. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  464. else
  465. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  466. KEY_DEST_CLASS_REG);
  467. set_jump_tgt_here(desc, key_jump_cmd);
  468. /* class 1 operation */
  469. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  470. OP_ALG_ENCRYPT);
  471. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  472. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  473. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  474. JUMP_COND_MATH_Z);
  475. /* if assoclen is ZERO, skip reading the assoc data */
  476. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  477. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  478. JUMP_COND_MATH_Z);
  479. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  480. /* skip assoc data */
  481. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  482. /* cryptlen = seqinlen - assoclen */
  483. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  484. /* if cryptlen is ZERO jump to zero-payload commands */
  485. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  486. JUMP_COND_MATH_Z);
  487. /* read assoc data */
  488. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  489. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  490. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  491. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  492. /* write encrypted data */
  493. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  494. /* read payload data */
  495. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  496. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  497. /* jump the zero-payload commands */
  498. append_jump(desc, JUMP_TEST_ALL | 2);
  499. /* zero-payload commands */
  500. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  501. /* read assoc data */
  502. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  503. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  504. /* There is no input data */
  505. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  506. /* write ICV */
  507. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  508. LDST_SRCDST_BYTE_CONTEXT);
  509. #ifdef DEBUG
  510. print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
  511. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  512. #endif
  513. }
  514. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  515. /**
  516. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  517. * @desc: pointer to buffer used for descriptor construction
  518. * @cdata: pointer to block cipher transform definitions
  519. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  520. * @icvsize: integrity check value (ICV) size (truncated or full)
  521. */
  522. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  523. unsigned int icvsize)
  524. {
  525. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  526. init_sh_desc(desc, HDR_SHARE_SERIAL);
  527. /* skip key loading if they are loaded due to sharing */
  528. key_jump_cmd = append_jump(desc, JUMP_JSL |
  529. JUMP_TEST_ALL | JUMP_COND_SHRD |
  530. JUMP_COND_SELF);
  531. if (cdata->key_inline)
  532. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  533. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  534. else
  535. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  536. KEY_DEST_CLASS_REG);
  537. set_jump_tgt_here(desc, key_jump_cmd);
  538. /* class 1 operation */
  539. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  540. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  541. /* if assoclen is ZERO, skip reading the assoc data */
  542. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  543. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  544. JUMP_COND_MATH_Z);
  545. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  546. /* skip assoc data */
  547. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  548. /* read assoc data */
  549. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  550. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  551. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  552. /* cryptlen = seqoutlen - assoclen */
  553. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  554. /* jump to zero-payload command if cryptlen is zero */
  555. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  556. JUMP_COND_MATH_Z);
  557. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  558. /* store encrypted data */
  559. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  560. /* read payload data */
  561. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  562. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  563. /* zero-payload command */
  564. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  565. /* read ICV */
  566. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  567. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  568. #ifdef DEBUG
  569. print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
  570. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  571. #endif
  572. }
  573. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  574. /**
  575. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  576. * (non-protocol).
  577. * @desc: pointer to buffer used for descriptor construction
  578. * @cdata: pointer to block cipher transform definitions
  579. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  580. * @icvsize: integrity check value (ICV) size (truncated or full)
  581. */
  582. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  583. unsigned int icvsize)
  584. {
  585. u32 *key_jump_cmd;
  586. init_sh_desc(desc, HDR_SHARE_SERIAL);
  587. /* Skip key loading if it is loaded due to sharing */
  588. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  589. JUMP_COND_SHRD);
  590. if (cdata->key_inline)
  591. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  592. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  593. else
  594. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  595. KEY_DEST_CLASS_REG);
  596. set_jump_tgt_here(desc, key_jump_cmd);
  597. /* Class 1 operation */
  598. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  599. OP_ALG_ENCRYPT);
  600. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
  601. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  602. /* Read assoc data */
  603. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  604. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  605. /* Skip IV */
  606. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  607. /* Will read cryptlen bytes */
  608. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  609. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  610. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  611. /* Skip assoc data */
  612. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  613. /* cryptlen = seqoutlen - assoclen */
  614. append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
  615. /* Write encrypted data */
  616. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  617. /* Read payload data */
  618. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  619. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  620. /* Write ICV */
  621. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  622. LDST_SRCDST_BYTE_CONTEXT);
  623. #ifdef DEBUG
  624. print_hex_dump(KERN_ERR,
  625. "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  626. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  627. #endif
  628. }
  629. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  630. /**
  631. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  632. * (non-protocol).
  633. * @desc: pointer to buffer used for descriptor construction
  634. * @cdata: pointer to block cipher transform definitions
  635. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  636. * @icvsize: integrity check value (ICV) size (truncated or full)
  637. */
  638. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  639. unsigned int icvsize)
  640. {
  641. u32 *key_jump_cmd;
  642. init_sh_desc(desc, HDR_SHARE_SERIAL);
  643. /* Skip key loading if it is loaded due to sharing */
  644. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  645. JUMP_COND_SHRD);
  646. if (cdata->key_inline)
  647. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  648. cdata->keylen, CLASS_1 |
  649. KEY_DEST_CLASS_REG);
  650. else
  651. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  652. KEY_DEST_CLASS_REG);
  653. set_jump_tgt_here(desc, key_jump_cmd);
  654. /* Class 1 operation */
  655. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  656. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  657. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
  658. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  659. /* Read assoc data */
  660. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  661. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  662. /* Skip IV */
  663. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  664. /* Will read cryptlen bytes */
  665. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  666. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  667. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  668. /* Skip assoc data */
  669. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  670. /* Will write cryptlen bytes */
  671. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  672. /* Store payload data */
  673. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  674. /* Read encrypted data */
  675. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  676. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  677. /* Read ICV */
  678. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  679. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  680. #ifdef DEBUG
  681. print_hex_dump(KERN_ERR,
  682. "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  683. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  684. #endif
  685. }
  686. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  687. /**
  688. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  689. * (non-protocol).
  690. * @desc: pointer to buffer used for descriptor construction
  691. * @cdata: pointer to block cipher transform definitions
  692. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  693. * @icvsize: integrity check value (ICV) size (truncated or full)
  694. */
  695. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  696. unsigned int icvsize)
  697. {
  698. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  699. init_sh_desc(desc, HDR_SHARE_SERIAL);
  700. /* Skip key loading if it is loaded due to sharing */
  701. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  702. JUMP_COND_SHRD);
  703. if (cdata->key_inline)
  704. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  705. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  706. else
  707. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  708. KEY_DEST_CLASS_REG);
  709. set_jump_tgt_here(desc, key_jump_cmd);
  710. /* Class 1 operation */
  711. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  712. OP_ALG_ENCRYPT);
  713. /* assoclen + cryptlen = seqinlen */
  714. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  715. /*
  716. * MOVE_LEN opcode is not available in all SEC HW revisions,
  717. * thus need to do some magic, i.e. self-patch the descriptor
  718. * buffer.
  719. */
  720. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  721. (0x6 << MOVE_LEN_SHIFT));
  722. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  723. (0x8 << MOVE_LEN_SHIFT));
  724. /* Will read assoclen + cryptlen bytes */
  725. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  726. /* Will write assoclen + cryptlen bytes */
  727. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  728. /* Read and write assoclen + cryptlen bytes */
  729. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  730. set_move_tgt_here(desc, read_move_cmd);
  731. set_move_tgt_here(desc, write_move_cmd);
  732. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  733. /* Move payload data to OFIFO */
  734. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  735. /* Write ICV */
  736. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  737. LDST_SRCDST_BYTE_CONTEXT);
  738. #ifdef DEBUG
  739. print_hex_dump(KERN_ERR,
  740. "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  741. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  742. #endif
  743. }
  744. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  745. /**
  746. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  747. * (non-protocol).
  748. * @desc: pointer to buffer used for descriptor construction
  749. * @cdata: pointer to block cipher transform definitions
  750. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  751. * @icvsize: integrity check value (ICV) size (truncated or full)
  752. */
  753. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  754. unsigned int icvsize)
  755. {
  756. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  757. init_sh_desc(desc, HDR_SHARE_SERIAL);
  758. /* Skip key loading if it is loaded due to sharing */
  759. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  760. JUMP_COND_SHRD);
  761. if (cdata->key_inline)
  762. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  763. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  764. else
  765. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  766. KEY_DEST_CLASS_REG);
  767. set_jump_tgt_here(desc, key_jump_cmd);
  768. /* Class 1 operation */
  769. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  770. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  771. /* assoclen + cryptlen = seqoutlen */
  772. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  773. /*
  774. * MOVE_LEN opcode is not available in all SEC HW revisions,
  775. * thus need to do some magic, i.e. self-patch the descriptor
  776. * buffer.
  777. */
  778. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  779. (0x6 << MOVE_LEN_SHIFT));
  780. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  781. (0x8 << MOVE_LEN_SHIFT));
  782. /* Will read assoclen + cryptlen bytes */
  783. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  784. /* Will write assoclen + cryptlen bytes */
  785. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  786. /* Store payload data */
  787. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  788. /* In-snoop assoclen + cryptlen data */
  789. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  790. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  791. set_move_tgt_here(desc, read_move_cmd);
  792. set_move_tgt_here(desc, write_move_cmd);
  793. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  794. /* Move payload data to OFIFO */
  795. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  796. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  797. /* Read ICV */
  798. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  799. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  800. #ifdef DEBUG
  801. print_hex_dump(KERN_ERR,
  802. "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  803. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  804. #endif
  805. }
  806. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  807. /*
  808. * For ablkcipher encrypt and decrypt, read from req->src and
  809. * write to req->dst
  810. */
  811. static inline void ablkcipher_append_src_dst(u32 *desc)
  812. {
  813. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  814. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  815. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  816. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  817. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  818. }
  819. /**
  820. * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
  821. * @desc: pointer to buffer used for descriptor construction
  822. * @cdata: pointer to block cipher transform definitions
  823. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  824. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  825. * @ivsize: initialization vector size
  826. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  827. * @ctx1_iv_off: IV offset in CONTEXT1 register
  828. */
  829. void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
  830. unsigned int ivsize, const bool is_rfc3686,
  831. const u32 ctx1_iv_off)
  832. {
  833. u32 *key_jump_cmd;
  834. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  835. /* Skip if already shared */
  836. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  837. JUMP_COND_SHRD);
  838. /* Load class1 key only */
  839. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  840. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  841. /* Load nonce into CONTEXT1 reg */
  842. if (is_rfc3686) {
  843. u8 *nonce = cdata->key_virt + cdata->keylen;
  844. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  845. LDST_CLASS_IND_CCB |
  846. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  847. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  848. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  849. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  850. }
  851. set_jump_tgt_here(desc, key_jump_cmd);
  852. /* Load iv */
  853. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  854. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  855. /* Load counter into CONTEXT1 reg */
  856. if (is_rfc3686)
  857. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  858. LDST_SRCDST_BYTE_CONTEXT |
  859. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  860. LDST_OFFSET_SHIFT));
  861. /* Load operation */
  862. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  863. OP_ALG_ENCRYPT);
  864. /* Perform operation */
  865. ablkcipher_append_src_dst(desc);
  866. #ifdef DEBUG
  867. print_hex_dump(KERN_ERR,
  868. "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
  869. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  870. #endif
  871. }
  872. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
  873. /**
  874. * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
  875. * @desc: pointer to buffer used for descriptor construction
  876. * @cdata: pointer to block cipher transform definitions
  877. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  878. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  879. * @ivsize: initialization vector size
  880. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  881. * @ctx1_iv_off: IV offset in CONTEXT1 register
  882. */
  883. void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
  884. unsigned int ivsize, const bool is_rfc3686,
  885. const u32 ctx1_iv_off)
  886. {
  887. u32 *key_jump_cmd;
  888. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  889. /* Skip if already shared */
  890. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  891. JUMP_COND_SHRD);
  892. /* Load class1 key only */
  893. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  894. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  895. /* Load nonce into CONTEXT1 reg */
  896. if (is_rfc3686) {
  897. u8 *nonce = cdata->key_virt + cdata->keylen;
  898. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  899. LDST_CLASS_IND_CCB |
  900. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  901. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  902. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  903. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  904. }
  905. set_jump_tgt_here(desc, key_jump_cmd);
  906. /* load IV */
  907. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  908. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  909. /* Load counter into CONTEXT1 reg */
  910. if (is_rfc3686)
  911. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  912. LDST_SRCDST_BYTE_CONTEXT |
  913. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  914. LDST_OFFSET_SHIFT));
  915. /* Choose operation */
  916. if (ctx1_iv_off)
  917. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  918. OP_ALG_DECRYPT);
  919. else
  920. append_dec_op1(desc, cdata->algtype);
  921. /* Perform operation */
  922. ablkcipher_append_src_dst(desc);
  923. #ifdef DEBUG
  924. print_hex_dump(KERN_ERR,
  925. "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
  926. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  927. #endif
  928. }
  929. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
  930. /**
  931. * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
  932. * with HW-generated initialization vector.
  933. * @desc: pointer to buffer used for descriptor construction
  934. * @cdata: pointer to block cipher transform definitions
  935. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  936. * with OP_ALG_AAI_CBC.
  937. * @ivsize: initialization vector size
  938. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  939. * @ctx1_iv_off: IV offset in CONTEXT1 register
  940. */
  941. void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
  942. unsigned int ivsize, const bool is_rfc3686,
  943. const u32 ctx1_iv_off)
  944. {
  945. u32 *key_jump_cmd, geniv;
  946. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  947. /* Skip if already shared */
  948. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  949. JUMP_COND_SHRD);
  950. /* Load class1 key only */
  951. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  952. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  953. /* Load Nonce into CONTEXT1 reg */
  954. if (is_rfc3686) {
  955. u8 *nonce = cdata->key_virt + cdata->keylen;
  956. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  957. LDST_CLASS_IND_CCB |
  958. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  959. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  960. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  961. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  962. }
  963. set_jump_tgt_here(desc, key_jump_cmd);
  964. /* Generate IV */
  965. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  966. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
  967. (ivsize << NFIFOENTRY_DLEN_SHIFT);
  968. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  969. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  970. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  971. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
  972. MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
  973. (ctx1_iv_off << MOVE_OFFSET_SHIFT));
  974. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  975. /* Copy generated IV to memory */
  976. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  977. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  978. /* Load Counter into CONTEXT1 reg */
  979. if (is_rfc3686)
  980. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  981. LDST_SRCDST_BYTE_CONTEXT |
  982. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  983. LDST_OFFSET_SHIFT));
  984. if (ctx1_iv_off)
  985. append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
  986. (1 << JUMP_OFFSET_SHIFT));
  987. /* Load operation */
  988. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  989. OP_ALG_ENCRYPT);
  990. /* Perform operation */
  991. ablkcipher_append_src_dst(desc);
  992. #ifdef DEBUG
  993. print_hex_dump(KERN_ERR,
  994. "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
  995. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  996. #endif
  997. }
  998. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
  999. /**
  1000. * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
  1001. * descriptor
  1002. * @desc: pointer to buffer used for descriptor construction
  1003. * @cdata: pointer to block cipher transform definitions
  1004. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1005. */
  1006. void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
  1007. {
  1008. __be64 sector_size = cpu_to_be64(512);
  1009. u32 *key_jump_cmd;
  1010. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1011. /* Skip if already shared */
  1012. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1013. JUMP_COND_SHRD);
  1014. /* Load class1 keys only */
  1015. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1016. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1017. /* Load sector size with index 40 bytes (0x28) */
  1018. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1019. LDST_SRCDST_BYTE_CONTEXT |
  1020. (0x28 << LDST_OFFSET_SHIFT));
  1021. set_jump_tgt_here(desc, key_jump_cmd);
  1022. /*
  1023. * create sequence for loading the sector index
  1024. * Upper 8B of IV - will be used as sector index
  1025. * Lower 8B of IV - will be discarded
  1026. */
  1027. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1028. (0x20 << LDST_OFFSET_SHIFT));
  1029. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1030. /* Load operation */
  1031. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1032. OP_ALG_ENCRYPT);
  1033. /* Perform operation */
  1034. ablkcipher_append_src_dst(desc);
  1035. #ifdef DEBUG
  1036. print_hex_dump(KERN_ERR,
  1037. "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
  1038. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1039. #endif
  1040. }
  1041. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
  1042. /**
  1043. * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
  1044. * descriptor
  1045. * @desc: pointer to buffer used for descriptor construction
  1046. * @cdata: pointer to block cipher transform definitions
  1047. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1048. */
  1049. void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
  1050. {
  1051. __be64 sector_size = cpu_to_be64(512);
  1052. u32 *key_jump_cmd;
  1053. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1054. /* Skip if already shared */
  1055. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1056. JUMP_COND_SHRD);
  1057. /* Load class1 key only */
  1058. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1059. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1060. /* Load sector size with index 40 bytes (0x28) */
  1061. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1062. LDST_SRCDST_BYTE_CONTEXT |
  1063. (0x28 << LDST_OFFSET_SHIFT));
  1064. set_jump_tgt_here(desc, key_jump_cmd);
  1065. /*
  1066. * create sequence for loading the sector index
  1067. * Upper 8B of IV - will be used as sector index
  1068. * Lower 8B of IV - will be discarded
  1069. */
  1070. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1071. (0x20 << LDST_OFFSET_SHIFT));
  1072. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1073. /* Load operation */
  1074. append_dec_op1(desc, cdata->algtype);
  1075. /* Perform operation */
  1076. ablkcipher_append_src_dst(desc);
  1077. #ifdef DEBUG
  1078. print_hex_dump(KERN_ERR,
  1079. "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
  1080. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1081. #endif
  1082. }
  1083. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
  1084. MODULE_LICENSE("GPL");
  1085. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1086. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");