caamalg_desc.c 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547
  1. /*
  2. * Shared descriptors for aead, ablkcipher algorithms
  3. *
  4. * Copyright 2016 NXP
  5. */
  6. #include "compat.h"
  7. #include "desc_constr.h"
  8. #include "caamalg_desc.h"
  9. /*
  10. * For aead functions, read payload and write payload,
  11. * both of which are specified in req->src and req->dst
  12. */
  13. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  14. {
  15. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  16. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  17. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  18. }
  19. /* Set DK bit in class 1 operation if shared */
  20. static inline void append_dec_op1(u32 *desc, u32 type)
  21. {
  22. u32 *jump_cmd, *uncond_jump_cmd;
  23. /* DK bit is valid only for AES */
  24. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  25. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  26. OP_ALG_DECRYPT);
  27. return;
  28. }
  29. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  30. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  31. OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  35. OP_ALG_DECRYPT | OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions.
  43. * A split key is required for SEC Era < 6; the size of the split key
  44. * is specified in this case. Valid algorithm values - one of
  45. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  46. * with OP_ALG_AAI_HMAC_PRECOMP.
  47. * @icvsize: integrity check value (ICV) size (truncated or full)
  48. * @era: SEC Era
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize, int era)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (era < 6) {
  59. if (adata->key_inline)
  60. append_key_as_imm(desc, adata->key_virt,
  61. adata->keylen_pad, adata->keylen,
  62. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  63. KEY_ENC);
  64. else
  65. append_key(desc, adata->key_dma, adata->keylen,
  66. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  67. } else {
  68. append_proto_dkp(desc, adata);
  69. }
  70. set_jump_tgt_here(desc, key_jump_cmd);
  71. /* assoclen + cryptlen = seqinlen */
  72. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  73. /* Prepare to read and write cryptlen + assoclen bytes */
  74. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  75. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  76. /*
  77. * MOVE_LEN opcode is not available in all SEC HW revisions,
  78. * thus need to do some magic, i.e. self-patch the descriptor
  79. * buffer.
  80. */
  81. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  82. MOVE_DEST_MATH3 |
  83. (0x6 << MOVE_LEN_SHIFT));
  84. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  85. MOVE_DEST_DESCBUF |
  86. MOVE_WAITCOMP |
  87. (0x8 << MOVE_LEN_SHIFT));
  88. /* Class 2 operation */
  89. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  90. OP_ALG_ENCRYPT);
  91. /* Read and write cryptlen bytes */
  92. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  93. set_move_tgt_here(desc, read_move_cmd);
  94. set_move_tgt_here(desc, write_move_cmd);
  95. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  96. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  97. MOVE_AUX_LS);
  98. /* Write ICV */
  99. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  100. LDST_SRCDST_BYTE_CONTEXT);
  101. #ifdef DEBUG
  102. print_hex_dump(KERN_ERR,
  103. "aead null enc shdesc@" __stringify(__LINE__)": ",
  104. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  105. #endif
  106. }
  107. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  108. /**
  109. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  110. * (non-protocol) with no (null) decryption.
  111. * @desc: pointer to buffer used for descriptor construction
  112. * @adata: pointer to authentication transform definitions.
  113. * A split key is required for SEC Era < 6; the size of the split key
  114. * is specified in this case. Valid algorithm values - one of
  115. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  116. * with OP_ALG_AAI_HMAC_PRECOMP.
  117. * @icvsize: integrity check value (ICV) size (truncated or full)
  118. * @era: SEC Era
  119. */
  120. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  121. unsigned int icvsize, int era)
  122. {
  123. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  124. init_sh_desc(desc, HDR_SHARE_SERIAL);
  125. /* Skip if already shared */
  126. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  127. JUMP_COND_SHRD);
  128. if (era < 6) {
  129. if (adata->key_inline)
  130. append_key_as_imm(desc, adata->key_virt,
  131. adata->keylen_pad, adata->keylen,
  132. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  133. KEY_ENC);
  134. else
  135. append_key(desc, adata->key_dma, adata->keylen,
  136. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  137. } else {
  138. append_proto_dkp(desc, adata);
  139. }
  140. set_jump_tgt_here(desc, key_jump_cmd);
  141. /* Class 2 operation */
  142. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  143. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  144. /* assoclen + cryptlen = seqoutlen */
  145. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  146. /* Prepare to read and write cryptlen + assoclen bytes */
  147. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  148. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  149. /*
  150. * MOVE_LEN opcode is not available in all SEC HW revisions,
  151. * thus need to do some magic, i.e. self-patch the descriptor
  152. * buffer.
  153. */
  154. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  155. MOVE_DEST_MATH2 |
  156. (0x6 << MOVE_LEN_SHIFT));
  157. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  158. MOVE_DEST_DESCBUF |
  159. MOVE_WAITCOMP |
  160. (0x8 << MOVE_LEN_SHIFT));
  161. /* Read and write cryptlen bytes */
  162. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  163. /*
  164. * Insert a NOP here, since we need at least 4 instructions between
  165. * code patching the descriptor buffer and the location being patched.
  166. */
  167. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  168. set_jump_tgt_here(desc, jump_cmd);
  169. set_move_tgt_here(desc, read_move_cmd);
  170. set_move_tgt_here(desc, write_move_cmd);
  171. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  172. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  173. MOVE_AUX_LS);
  174. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  175. /* Load ICV */
  176. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  177. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  178. #ifdef DEBUG
  179. print_hex_dump(KERN_ERR,
  180. "aead null dec shdesc@" __stringify(__LINE__)": ",
  181. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  182. #endif
  183. }
  184. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  185. static void init_sh_desc_key_aead(u32 * const desc,
  186. struct alginfo * const cdata,
  187. struct alginfo * const adata,
  188. const bool is_rfc3686, u32 *nonce, int era)
  189. {
  190. u32 *key_jump_cmd;
  191. unsigned int enckeylen = cdata->keylen;
  192. /* Note: Context registers are saved. */
  193. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  194. /* Skip if already shared */
  195. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  196. JUMP_COND_SHRD);
  197. /*
  198. * RFC3686 specific:
  199. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  200. * | enckeylen = encryption key size + nonce size
  201. */
  202. if (is_rfc3686)
  203. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  204. if (era < 6) {
  205. if (adata->key_inline)
  206. append_key_as_imm(desc, adata->key_virt,
  207. adata->keylen_pad, adata->keylen,
  208. CLASS_2 | KEY_DEST_MDHA_SPLIT |
  209. KEY_ENC);
  210. else
  211. append_key(desc, adata->key_dma, adata->keylen,
  212. CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
  213. } else {
  214. append_proto_dkp(desc, adata);
  215. }
  216. if (cdata->key_inline)
  217. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  218. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  219. else
  220. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  221. KEY_DEST_CLASS_REG);
  222. /* Load Counter into CONTEXT1 reg */
  223. if (is_rfc3686) {
  224. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  225. LDST_CLASS_IND_CCB |
  226. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  227. append_move(desc,
  228. MOVE_SRC_OUTFIFO |
  229. MOVE_DEST_CLASS1CTX |
  230. (16 << MOVE_OFFSET_SHIFT) |
  231. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  232. }
  233. set_jump_tgt_here(desc, key_jump_cmd);
  234. }
  235. /**
  236. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  237. * (non-protocol).
  238. * @desc: pointer to buffer used for descriptor construction
  239. * @cdata: pointer to block cipher transform definitions
  240. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  241. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  242. * @adata: pointer to authentication transform definitions.
  243. * A split key is required for SEC Era < 6; the size of the split key
  244. * is specified in this case. Valid algorithm values - one of
  245. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  246. * with OP_ALG_AAI_HMAC_PRECOMP.
  247. * @ivsize: initialization vector size
  248. * @icvsize: integrity check value (ICV) size (truncated or full)
  249. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  250. * @nonce: pointer to rfc3686 nonce
  251. * @ctx1_iv_off: IV offset in CONTEXT1 register
  252. * @is_qi: true when called from caam/qi
  253. * @era: SEC Era
  254. */
  255. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  256. struct alginfo *adata, unsigned int ivsize,
  257. unsigned int icvsize, const bool is_rfc3686,
  258. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
  259. int era)
  260. {
  261. /* Note: Context registers are saved. */
  262. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  263. /* Class 2 operation */
  264. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  265. OP_ALG_ENCRYPT);
  266. if (is_qi) {
  267. u32 *wait_load_cmd;
  268. /* REG3 = assoclen */
  269. append_seq_load(desc, 4, LDST_CLASS_DECO |
  270. LDST_SRCDST_WORD_DECO_MATH3 |
  271. (4 << LDST_OFFSET_SHIFT));
  272. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  273. JUMP_COND_CALM | JUMP_COND_NCP |
  274. JUMP_COND_NOP | JUMP_COND_NIP |
  275. JUMP_COND_NIFP);
  276. set_jump_tgt_here(desc, wait_load_cmd);
  277. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  278. LDST_SRCDST_BYTE_CONTEXT |
  279. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  280. }
  281. /* Read and write assoclen bytes */
  282. if (is_qi || era < 3) {
  283. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  284. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  285. } else {
  286. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  287. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  288. }
  289. /* Skip assoc data */
  290. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  291. /* read assoc before reading payload */
  292. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  293. FIFOLDST_VLF);
  294. /* Load Counter into CONTEXT1 reg */
  295. if (is_rfc3686)
  296. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  297. LDST_SRCDST_BYTE_CONTEXT |
  298. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  299. LDST_OFFSET_SHIFT));
  300. /* Class 1 operation */
  301. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  302. OP_ALG_ENCRYPT);
  303. /* Read and write cryptlen bytes */
  304. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  305. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  306. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  307. /* Write ICV */
  308. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  309. LDST_SRCDST_BYTE_CONTEXT);
  310. #ifdef DEBUG
  311. print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
  312. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  313. #endif
  314. }
  315. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  316. /**
  317. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  318. * (non-protocol).
  319. * @desc: pointer to buffer used for descriptor construction
  320. * @cdata: pointer to block cipher transform definitions
  321. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  322. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  323. * @adata: pointer to authentication transform definitions.
  324. * A split key is required for SEC Era < 6; the size of the split key
  325. * is specified in this case. Valid algorithm values - one of
  326. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  327. * with OP_ALG_AAI_HMAC_PRECOMP.
  328. * @ivsize: initialization vector size
  329. * @icvsize: integrity check value (ICV) size (truncated or full)
  330. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  331. * @nonce: pointer to rfc3686 nonce
  332. * @ctx1_iv_off: IV offset in CONTEXT1 register
  333. * @is_qi: true when called from caam/qi
  334. * @era: SEC Era
  335. */
  336. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  337. struct alginfo *adata, unsigned int ivsize,
  338. unsigned int icvsize, const bool geniv,
  339. const bool is_rfc3686, u32 *nonce,
  340. const u32 ctx1_iv_off, const bool is_qi, int era)
  341. {
  342. /* Note: Context registers are saved. */
  343. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  344. /* Class 2 operation */
  345. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  346. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  347. if (is_qi) {
  348. u32 *wait_load_cmd;
  349. /* REG3 = assoclen */
  350. append_seq_load(desc, 4, LDST_CLASS_DECO |
  351. LDST_SRCDST_WORD_DECO_MATH3 |
  352. (4 << LDST_OFFSET_SHIFT));
  353. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  354. JUMP_COND_CALM | JUMP_COND_NCP |
  355. JUMP_COND_NOP | JUMP_COND_NIP |
  356. JUMP_COND_NIFP);
  357. set_jump_tgt_here(desc, wait_load_cmd);
  358. if (!geniv)
  359. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  360. LDST_SRCDST_BYTE_CONTEXT |
  361. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  362. }
  363. /* Read and write assoclen bytes */
  364. if (is_qi || era < 3) {
  365. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  366. if (geniv)
  367. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
  368. ivsize);
  369. else
  370. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
  371. CAAM_CMD_SZ);
  372. } else {
  373. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  374. if (geniv)
  375. append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
  376. ivsize);
  377. else
  378. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
  379. CAAM_CMD_SZ);
  380. }
  381. /* Skip assoc data */
  382. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  383. /* read assoc before reading payload */
  384. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  385. KEY_VLF);
  386. if (geniv) {
  387. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  388. LDST_SRCDST_BYTE_CONTEXT |
  389. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  390. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  391. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  392. }
  393. /* Load Counter into CONTEXT1 reg */
  394. if (is_rfc3686)
  395. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  396. LDST_SRCDST_BYTE_CONTEXT |
  397. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  398. LDST_OFFSET_SHIFT));
  399. /* Choose operation */
  400. if (ctx1_iv_off)
  401. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  402. OP_ALG_DECRYPT);
  403. else
  404. append_dec_op1(desc, cdata->algtype);
  405. /* Read and write cryptlen bytes */
  406. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  407. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  408. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  409. /* Load ICV */
  410. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  411. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  412. #ifdef DEBUG
  413. print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
  414. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  415. #endif
  416. }
  417. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  418. /**
  419. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  420. * (non-protocol) with HW-generated initialization
  421. * vector.
  422. * @desc: pointer to buffer used for descriptor construction
  423. * @cdata: pointer to block cipher transform definitions
  424. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  425. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  426. * @adata: pointer to authentication transform definitions.
  427. * A split key is required for SEC Era < 6; the size of the split key
  428. * is specified in this case. Valid algorithm values - one of
  429. * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
  430. * with OP_ALG_AAI_HMAC_PRECOMP.
  431. * @ivsize: initialization vector size
  432. * @icvsize: integrity check value (ICV) size (truncated or full)
  433. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  434. * @nonce: pointer to rfc3686 nonce
  435. * @ctx1_iv_off: IV offset in CONTEXT1 register
  436. * @is_qi: true when called from caam/qi
  437. * @era: SEC Era
  438. */
  439. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  440. struct alginfo *adata, unsigned int ivsize,
  441. unsigned int icvsize, const bool is_rfc3686,
  442. u32 *nonce, const u32 ctx1_iv_off,
  443. const bool is_qi, int era)
  444. {
  445. u32 geniv, moveiv;
  446. /* Note: Context registers are saved. */
  447. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
  448. if (is_qi) {
  449. u32 *wait_load_cmd;
  450. /* REG3 = assoclen */
  451. append_seq_load(desc, 4, LDST_CLASS_DECO |
  452. LDST_SRCDST_WORD_DECO_MATH3 |
  453. (4 << LDST_OFFSET_SHIFT));
  454. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  455. JUMP_COND_CALM | JUMP_COND_NCP |
  456. JUMP_COND_NOP | JUMP_COND_NIP |
  457. JUMP_COND_NIFP);
  458. set_jump_tgt_here(desc, wait_load_cmd);
  459. }
  460. if (is_rfc3686) {
  461. if (is_qi)
  462. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  463. LDST_SRCDST_BYTE_CONTEXT |
  464. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  465. goto copy_iv;
  466. }
  467. /* Generate IV */
  468. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  469. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  470. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  471. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  472. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  473. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  474. append_move(desc, MOVE_WAITCOMP |
  475. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  476. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  477. (ivsize << MOVE_LEN_SHIFT));
  478. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  479. copy_iv:
  480. /* Copy IV to class 1 context */
  481. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  482. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  483. (ivsize << MOVE_LEN_SHIFT));
  484. /* Return to encryption */
  485. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  486. OP_ALG_ENCRYPT);
  487. /* Read and write assoclen bytes */
  488. if (is_qi || era < 3) {
  489. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  490. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  491. } else {
  492. append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  493. append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
  494. }
  495. /* Skip assoc data */
  496. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  497. /* read assoc before reading payload */
  498. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  499. KEY_VLF);
  500. /* Copy iv from outfifo to class 2 fifo */
  501. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  502. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  503. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  504. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  505. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  506. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  507. /* Load Counter into CONTEXT1 reg */
  508. if (is_rfc3686)
  509. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  510. LDST_SRCDST_BYTE_CONTEXT |
  511. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  512. LDST_OFFSET_SHIFT));
  513. /* Class 1 operation */
  514. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  515. OP_ALG_ENCRYPT);
  516. /* Will write ivsize + cryptlen */
  517. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  518. /* Not need to reload iv */
  519. append_seq_fifo_load(desc, ivsize,
  520. FIFOLD_CLASS_SKIP);
  521. /* Will read cryptlen */
  522. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  523. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  524. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  525. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  526. /* Write ICV */
  527. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  528. LDST_SRCDST_BYTE_CONTEXT);
  529. #ifdef DEBUG
  530. print_hex_dump(KERN_ERR,
  531. "aead givenc shdesc@" __stringify(__LINE__)": ",
  532. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  533. #endif
  534. }
  535. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  536. /**
  537. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  538. * @desc: pointer to buffer used for descriptor construction
  539. * @cdata: pointer to block cipher transform definitions
  540. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  541. * @ivsize: initialization vector size
  542. * @icvsize: integrity check value (ICV) size (truncated or full)
  543. * @is_qi: true when called from caam/qi
  544. */
  545. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  546. unsigned int ivsize, unsigned int icvsize,
  547. const bool is_qi)
  548. {
  549. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  550. *zero_assoc_jump_cmd2;
  551. init_sh_desc(desc, HDR_SHARE_SERIAL);
  552. /* skip key loading if they are loaded due to sharing */
  553. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  554. JUMP_COND_SHRD);
  555. if (cdata->key_inline)
  556. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  557. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  558. else
  559. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  560. KEY_DEST_CLASS_REG);
  561. set_jump_tgt_here(desc, key_jump_cmd);
  562. /* class 1 operation */
  563. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  564. OP_ALG_ENCRYPT);
  565. if (is_qi) {
  566. u32 *wait_load_cmd;
  567. /* REG3 = assoclen */
  568. append_seq_load(desc, 4, LDST_CLASS_DECO |
  569. LDST_SRCDST_WORD_DECO_MATH3 |
  570. (4 << LDST_OFFSET_SHIFT));
  571. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  572. JUMP_COND_CALM | JUMP_COND_NCP |
  573. JUMP_COND_NOP | JUMP_COND_NIP |
  574. JUMP_COND_NIFP);
  575. set_jump_tgt_here(desc, wait_load_cmd);
  576. append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
  577. ivsize);
  578. } else {
  579. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
  580. CAAM_CMD_SZ);
  581. }
  582. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  583. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  584. JUMP_COND_MATH_Z);
  585. if (is_qi)
  586. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  587. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  588. /* if assoclen is ZERO, skip reading the assoc data */
  589. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  590. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  591. JUMP_COND_MATH_Z);
  592. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  593. /* skip assoc data */
  594. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  595. /* cryptlen = seqinlen - assoclen */
  596. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  597. /* if cryptlen is ZERO jump to zero-payload commands */
  598. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  599. JUMP_COND_MATH_Z);
  600. /* read assoc data */
  601. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  602. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  603. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  604. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  605. /* write encrypted data */
  606. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  607. /* read payload data */
  608. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  609. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  610. /* jump to ICV writing */
  611. if (is_qi)
  612. append_jump(desc, JUMP_TEST_ALL | 4);
  613. else
  614. append_jump(desc, JUMP_TEST_ALL | 2);
  615. /* zero-payload commands */
  616. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  617. /* read assoc data */
  618. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  619. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  620. if (is_qi)
  621. /* jump to ICV writing */
  622. append_jump(desc, JUMP_TEST_ALL | 2);
  623. /* There is no input data */
  624. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  625. if (is_qi)
  626. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  627. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
  628. FIFOLD_TYPE_LAST1);
  629. /* write ICV */
  630. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  631. LDST_SRCDST_BYTE_CONTEXT);
  632. #ifdef DEBUG
  633. print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
  634. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  635. #endif
  636. }
  637. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  638. /**
  639. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  640. * @desc: pointer to buffer used for descriptor construction
  641. * @cdata: pointer to block cipher transform definitions
  642. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  643. * @ivsize: initialization vector size
  644. * @icvsize: integrity check value (ICV) size (truncated or full)
  645. * @is_qi: true when called from caam/qi
  646. */
  647. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  648. unsigned int ivsize, unsigned int icvsize,
  649. const bool is_qi)
  650. {
  651. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  652. init_sh_desc(desc, HDR_SHARE_SERIAL);
  653. /* skip key loading if they are loaded due to sharing */
  654. key_jump_cmd = append_jump(desc, JUMP_JSL |
  655. JUMP_TEST_ALL | JUMP_COND_SHRD);
  656. if (cdata->key_inline)
  657. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  658. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  659. else
  660. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  661. KEY_DEST_CLASS_REG);
  662. set_jump_tgt_here(desc, key_jump_cmd);
  663. /* class 1 operation */
  664. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  665. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  666. if (is_qi) {
  667. u32 *wait_load_cmd;
  668. /* REG3 = assoclen */
  669. append_seq_load(desc, 4, LDST_CLASS_DECO |
  670. LDST_SRCDST_WORD_DECO_MATH3 |
  671. (4 << LDST_OFFSET_SHIFT));
  672. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  673. JUMP_COND_CALM | JUMP_COND_NCP |
  674. JUMP_COND_NOP | JUMP_COND_NIP |
  675. JUMP_COND_NIFP);
  676. set_jump_tgt_here(desc, wait_load_cmd);
  677. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  678. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  679. }
  680. /* if assoclen is ZERO, skip reading the assoc data */
  681. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  682. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  683. JUMP_COND_MATH_Z);
  684. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  685. /* skip assoc data */
  686. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  687. /* read assoc data */
  688. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  689. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  690. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  691. /* cryptlen = seqoutlen - assoclen */
  692. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  693. /* jump to zero-payload command if cryptlen is zero */
  694. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  695. JUMP_COND_MATH_Z);
  696. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  697. /* store encrypted data */
  698. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  699. /* read payload data */
  700. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  701. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  702. /* zero-payload command */
  703. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  704. /* read ICV */
  705. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  706. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  707. #ifdef DEBUG
  708. print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
  709. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  710. #endif
  711. }
  712. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  713. /**
  714. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  715. * (non-protocol).
  716. * @desc: pointer to buffer used for descriptor construction
  717. * @cdata: pointer to block cipher transform definitions
  718. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  719. * @ivsize: initialization vector size
  720. * @icvsize: integrity check value (ICV) size (truncated or full)
  721. * @is_qi: true when called from caam/qi
  722. */
  723. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  724. unsigned int ivsize, unsigned int icvsize,
  725. const bool is_qi)
  726. {
  727. u32 *key_jump_cmd;
  728. init_sh_desc(desc, HDR_SHARE_SERIAL);
  729. /* Skip key loading if it is loaded due to sharing */
  730. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  731. JUMP_COND_SHRD);
  732. if (cdata->key_inline)
  733. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  734. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  735. else
  736. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  737. KEY_DEST_CLASS_REG);
  738. set_jump_tgt_here(desc, key_jump_cmd);
  739. /* Class 1 operation */
  740. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  741. OP_ALG_ENCRYPT);
  742. if (is_qi) {
  743. u32 *wait_load_cmd;
  744. /* REG3 = assoclen */
  745. append_seq_load(desc, 4, LDST_CLASS_DECO |
  746. LDST_SRCDST_WORD_DECO_MATH3 |
  747. (4 << LDST_OFFSET_SHIFT));
  748. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  749. JUMP_COND_CALM | JUMP_COND_NCP |
  750. JUMP_COND_NOP | JUMP_COND_NIP |
  751. JUMP_COND_NIFP);
  752. set_jump_tgt_here(desc, wait_load_cmd);
  753. /* Read salt and IV */
  754. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  755. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  756. FIFOLD_TYPE_IV);
  757. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  758. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  759. }
  760. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  761. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  762. /* Read assoc data */
  763. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  764. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  765. /* Skip IV */
  766. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  767. /* Will read cryptlen bytes */
  768. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  769. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  770. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  771. /* Skip assoc data */
  772. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  773. /* cryptlen = seqoutlen - assoclen */
  774. append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
  775. /* Write encrypted data */
  776. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  777. /* Read payload data */
  778. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  779. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  780. /* Write ICV */
  781. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  782. LDST_SRCDST_BYTE_CONTEXT);
  783. #ifdef DEBUG
  784. print_hex_dump(KERN_ERR,
  785. "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  786. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  787. #endif
  788. }
  789. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  790. /**
  791. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  792. * (non-protocol).
  793. * @desc: pointer to buffer used for descriptor construction
  794. * @cdata: pointer to block cipher transform definitions
  795. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  796. * @ivsize: initialization vector size
  797. * @icvsize: integrity check value (ICV) size (truncated or full)
  798. * @is_qi: true when called from caam/qi
  799. */
  800. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  801. unsigned int ivsize, unsigned int icvsize,
  802. const bool is_qi)
  803. {
  804. u32 *key_jump_cmd;
  805. init_sh_desc(desc, HDR_SHARE_SERIAL);
  806. /* Skip key loading if it is loaded due to sharing */
  807. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  808. JUMP_COND_SHRD);
  809. if (cdata->key_inline)
  810. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  811. cdata->keylen, CLASS_1 |
  812. KEY_DEST_CLASS_REG);
  813. else
  814. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  815. KEY_DEST_CLASS_REG);
  816. set_jump_tgt_here(desc, key_jump_cmd);
  817. /* Class 1 operation */
  818. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  819. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  820. if (is_qi) {
  821. u32 *wait_load_cmd;
  822. /* REG3 = assoclen */
  823. append_seq_load(desc, 4, LDST_CLASS_DECO |
  824. LDST_SRCDST_WORD_DECO_MATH3 |
  825. (4 << LDST_OFFSET_SHIFT));
  826. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  827. JUMP_COND_CALM | JUMP_COND_NCP |
  828. JUMP_COND_NOP | JUMP_COND_NIP |
  829. JUMP_COND_NIFP);
  830. set_jump_tgt_here(desc, wait_load_cmd);
  831. /* Read salt and IV */
  832. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  833. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  834. FIFOLD_TYPE_IV);
  835. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  836. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  837. }
  838. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
  839. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  840. /* Read assoc data */
  841. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  842. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  843. /* Skip IV */
  844. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
  845. /* Will read cryptlen bytes */
  846. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  847. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  848. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  849. /* Skip assoc data */
  850. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  851. /* Will write cryptlen bytes */
  852. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  853. /* Store payload data */
  854. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  855. /* Read encrypted data */
  856. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  857. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  858. /* Read ICV */
  859. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  860. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  861. #ifdef DEBUG
  862. print_hex_dump(KERN_ERR,
  863. "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  864. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  865. #endif
  866. }
  867. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  868. /**
  869. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  870. * (non-protocol).
  871. * @desc: pointer to buffer used for descriptor construction
  872. * @cdata: pointer to block cipher transform definitions
  873. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  874. * @ivsize: initialization vector size
  875. * @icvsize: integrity check value (ICV) size (truncated or full)
  876. * @is_qi: true when called from caam/qi
  877. */
  878. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  879. unsigned int ivsize, unsigned int icvsize,
  880. const bool is_qi)
  881. {
  882. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  883. init_sh_desc(desc, HDR_SHARE_SERIAL);
  884. /* Skip key loading if it is loaded due to sharing */
  885. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  886. JUMP_COND_SHRD);
  887. if (cdata->key_inline)
  888. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  889. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  890. else
  891. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  892. KEY_DEST_CLASS_REG);
  893. set_jump_tgt_here(desc, key_jump_cmd);
  894. /* Class 1 operation */
  895. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  896. OP_ALG_ENCRYPT);
  897. if (is_qi) {
  898. /* assoclen is not needed, skip it */
  899. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  900. /* Read salt and IV */
  901. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  902. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  903. FIFOLD_TYPE_IV);
  904. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  905. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  906. }
  907. /* assoclen + cryptlen = seqinlen */
  908. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  909. /*
  910. * MOVE_LEN opcode is not available in all SEC HW revisions,
  911. * thus need to do some magic, i.e. self-patch the descriptor
  912. * buffer.
  913. */
  914. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  915. (0x6 << MOVE_LEN_SHIFT));
  916. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  917. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  918. /* Will read assoclen + cryptlen bytes */
  919. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  920. /* Will write assoclen + cryptlen bytes */
  921. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  922. /* Read and write assoclen + cryptlen bytes */
  923. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  924. set_move_tgt_here(desc, read_move_cmd);
  925. set_move_tgt_here(desc, write_move_cmd);
  926. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  927. /* Move payload data to OFIFO */
  928. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  929. /* Write ICV */
  930. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  931. LDST_SRCDST_BYTE_CONTEXT);
  932. #ifdef DEBUG
  933. print_hex_dump(KERN_ERR,
  934. "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  935. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  936. #endif
  937. }
  938. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  939. /**
  940. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  941. * (non-protocol).
  942. * @desc: pointer to buffer used for descriptor construction
  943. * @cdata: pointer to block cipher transform definitions
  944. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  945. * @ivsize: initialization vector size
  946. * @icvsize: integrity check value (ICV) size (truncated or full)
  947. * @is_qi: true when called from caam/qi
  948. */
  949. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  950. unsigned int ivsize, unsigned int icvsize,
  951. const bool is_qi)
  952. {
  953. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  954. init_sh_desc(desc, HDR_SHARE_SERIAL);
  955. /* Skip key loading if it is loaded due to sharing */
  956. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  957. JUMP_COND_SHRD);
  958. if (cdata->key_inline)
  959. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  960. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  961. else
  962. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  963. KEY_DEST_CLASS_REG);
  964. set_jump_tgt_here(desc, key_jump_cmd);
  965. /* Class 1 operation */
  966. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  967. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  968. if (is_qi) {
  969. /* assoclen is not needed, skip it */
  970. append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
  971. /* Read salt and IV */
  972. append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
  973. cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
  974. FIFOLD_TYPE_IV);
  975. append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
  976. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
  977. }
  978. /* assoclen + cryptlen = seqoutlen */
  979. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  980. /*
  981. * MOVE_LEN opcode is not available in all SEC HW revisions,
  982. * thus need to do some magic, i.e. self-patch the descriptor
  983. * buffer.
  984. */
  985. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  986. (0x6 << MOVE_LEN_SHIFT));
  987. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  988. (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
  989. /* Will read assoclen + cryptlen bytes */
  990. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  991. /* Will write assoclen + cryptlen bytes */
  992. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  993. /* Store payload data */
  994. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  995. /* In-snoop assoclen + cryptlen data */
  996. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  997. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  998. set_move_tgt_here(desc, read_move_cmd);
  999. set_move_tgt_here(desc, write_move_cmd);
  1000. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1001. /* Move payload data to OFIFO */
  1002. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  1003. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1004. /* Read ICV */
  1005. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  1006. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  1007. #ifdef DEBUG
  1008. print_hex_dump(KERN_ERR,
  1009. "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  1010. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1011. #endif
  1012. }
  1013. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  1014. /*
  1015. * For ablkcipher encrypt and decrypt, read from req->src and
  1016. * write to req->dst
  1017. */
  1018. static inline void ablkcipher_append_src_dst(u32 *desc)
  1019. {
  1020. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1021. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  1022. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  1023. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  1024. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  1025. }
  1026. /**
  1027. * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
  1028. * @desc: pointer to buffer used for descriptor construction
  1029. * @cdata: pointer to block cipher transform definitions
  1030. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1031. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1032. * @ivsize: initialization vector size
  1033. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1034. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1035. */
  1036. void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
  1037. unsigned int ivsize, const bool is_rfc3686,
  1038. const u32 ctx1_iv_off)
  1039. {
  1040. u32 *key_jump_cmd;
  1041. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1042. /* Skip if already shared */
  1043. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1044. JUMP_COND_SHRD);
  1045. /* Load class1 key only */
  1046. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1047. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1048. /* Load nonce into CONTEXT1 reg */
  1049. if (is_rfc3686) {
  1050. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1051. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1052. LDST_CLASS_IND_CCB |
  1053. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1054. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1055. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1056. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1057. }
  1058. set_jump_tgt_here(desc, key_jump_cmd);
  1059. /* Load iv */
  1060. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1061. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1062. /* Load counter into CONTEXT1 reg */
  1063. if (is_rfc3686)
  1064. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1065. LDST_SRCDST_BYTE_CONTEXT |
  1066. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1067. LDST_OFFSET_SHIFT));
  1068. /* Load operation */
  1069. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1070. OP_ALG_ENCRYPT);
  1071. /* Perform operation */
  1072. ablkcipher_append_src_dst(desc);
  1073. #ifdef DEBUG
  1074. print_hex_dump(KERN_ERR,
  1075. "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
  1076. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1077. #endif
  1078. }
  1079. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
  1080. /**
  1081. * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
  1082. * @desc: pointer to buffer used for descriptor construction
  1083. * @cdata: pointer to block cipher transform definitions
  1084. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1085. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  1086. * @ivsize: initialization vector size
  1087. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1088. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1089. */
  1090. void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
  1091. unsigned int ivsize, const bool is_rfc3686,
  1092. const u32 ctx1_iv_off)
  1093. {
  1094. u32 *key_jump_cmd;
  1095. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1096. /* Skip if already shared */
  1097. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1098. JUMP_COND_SHRD);
  1099. /* Load class1 key only */
  1100. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1101. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1102. /* Load nonce into CONTEXT1 reg */
  1103. if (is_rfc3686) {
  1104. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1105. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1106. LDST_CLASS_IND_CCB |
  1107. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1108. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1109. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1110. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1111. }
  1112. set_jump_tgt_here(desc, key_jump_cmd);
  1113. /* load IV */
  1114. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1115. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1116. /* Load counter into CONTEXT1 reg */
  1117. if (is_rfc3686)
  1118. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1119. LDST_SRCDST_BYTE_CONTEXT |
  1120. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1121. LDST_OFFSET_SHIFT));
  1122. /* Choose operation */
  1123. if (ctx1_iv_off)
  1124. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1125. OP_ALG_DECRYPT);
  1126. else
  1127. append_dec_op1(desc, cdata->algtype);
  1128. /* Perform operation */
  1129. ablkcipher_append_src_dst(desc);
  1130. #ifdef DEBUG
  1131. print_hex_dump(KERN_ERR,
  1132. "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
  1133. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1134. #endif
  1135. }
  1136. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
  1137. /**
  1138. * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
  1139. * with HW-generated initialization vector.
  1140. * @desc: pointer to buffer used for descriptor construction
  1141. * @cdata: pointer to block cipher transform definitions
  1142. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  1143. * with OP_ALG_AAI_CBC.
  1144. * @ivsize: initialization vector size
  1145. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  1146. * @ctx1_iv_off: IV offset in CONTEXT1 register
  1147. */
  1148. void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
  1149. unsigned int ivsize, const bool is_rfc3686,
  1150. const u32 ctx1_iv_off)
  1151. {
  1152. u32 *key_jump_cmd, geniv;
  1153. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1154. /* Skip if already shared */
  1155. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1156. JUMP_COND_SHRD);
  1157. /* Load class1 key only */
  1158. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1159. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1160. /* Load Nonce into CONTEXT1 reg */
  1161. if (is_rfc3686) {
  1162. const u8 *nonce = cdata->key_virt + cdata->keylen;
  1163. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1164. LDST_CLASS_IND_CCB |
  1165. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1166. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1167. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1168. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1169. }
  1170. set_jump_tgt_here(desc, key_jump_cmd);
  1171. /* Generate IV */
  1172. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  1173. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
  1174. (ivsize << NFIFOENTRY_DLEN_SHIFT);
  1175. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  1176. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  1177. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1178. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
  1179. MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
  1180. (ctx1_iv_off << MOVE_OFFSET_SHIFT));
  1181. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1182. /* Copy generated IV to memory */
  1183. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1184. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1185. /* Load Counter into CONTEXT1 reg */
  1186. if (is_rfc3686)
  1187. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1188. LDST_SRCDST_BYTE_CONTEXT |
  1189. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1190. LDST_OFFSET_SHIFT));
  1191. if (ctx1_iv_off)
  1192. append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
  1193. (1 << JUMP_OFFSET_SHIFT));
  1194. /* Load operation */
  1195. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1196. OP_ALG_ENCRYPT);
  1197. /* Perform operation */
  1198. ablkcipher_append_src_dst(desc);
  1199. #ifdef DEBUG
  1200. print_hex_dump(KERN_ERR,
  1201. "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
  1202. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1203. #endif
  1204. }
  1205. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
  1206. /**
  1207. * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
  1208. * descriptor
  1209. * @desc: pointer to buffer used for descriptor construction
  1210. * @cdata: pointer to block cipher transform definitions
  1211. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1212. */
  1213. void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
  1214. {
  1215. __be64 sector_size = cpu_to_be64(512);
  1216. u32 *key_jump_cmd;
  1217. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1218. /* Skip if already shared */
  1219. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1220. JUMP_COND_SHRD);
  1221. /* Load class1 keys only */
  1222. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1223. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1224. /* Load sector size with index 40 bytes (0x28) */
  1225. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1226. LDST_SRCDST_BYTE_CONTEXT |
  1227. (0x28 << LDST_OFFSET_SHIFT));
  1228. set_jump_tgt_here(desc, key_jump_cmd);
  1229. /*
  1230. * create sequence for loading the sector index
  1231. * Upper 8B of IV - will be used as sector index
  1232. * Lower 8B of IV - will be discarded
  1233. */
  1234. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1235. (0x20 << LDST_OFFSET_SHIFT));
  1236. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1237. /* Load operation */
  1238. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1239. OP_ALG_ENCRYPT);
  1240. /* Perform operation */
  1241. ablkcipher_append_src_dst(desc);
  1242. #ifdef DEBUG
  1243. print_hex_dump(KERN_ERR,
  1244. "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
  1245. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1246. #endif
  1247. }
  1248. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
  1249. /**
  1250. * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
  1251. * descriptor
  1252. * @desc: pointer to buffer used for descriptor construction
  1253. * @cdata: pointer to block cipher transform definitions
  1254. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1255. */
  1256. void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
  1257. {
  1258. __be64 sector_size = cpu_to_be64(512);
  1259. u32 *key_jump_cmd;
  1260. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1261. /* Skip if already shared */
  1262. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1263. JUMP_COND_SHRD);
  1264. /* Load class1 key only */
  1265. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1266. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1267. /* Load sector size with index 40 bytes (0x28) */
  1268. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1269. LDST_SRCDST_BYTE_CONTEXT |
  1270. (0x28 << LDST_OFFSET_SHIFT));
  1271. set_jump_tgt_here(desc, key_jump_cmd);
  1272. /*
  1273. * create sequence for loading the sector index
  1274. * Upper 8B of IV - will be used as sector index
  1275. * Lower 8B of IV - will be discarded
  1276. */
  1277. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1278. (0x20 << LDST_OFFSET_SHIFT));
  1279. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1280. /* Load operation */
  1281. append_dec_op1(desc, cdata->algtype);
  1282. /* Perform operation */
  1283. ablkcipher_append_src_dst(desc);
  1284. #ifdef DEBUG
  1285. print_hex_dump(KERN_ERR,
  1286. "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
  1287. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1288. #endif
  1289. }
  1290. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
  1291. MODULE_LICENSE("GPL");
  1292. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1293. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");