caamalg.c 93 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556
  1. /*
  2. * caam - Freescale FSL CAAM support for crypto API
  3. *
  4. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  5. * Copyright 2016 NXP
  6. *
  7. * Based on talitos crypto API driver.
  8. *
  9. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  10. *
  11. * --------------- ---------------
  12. * | JobDesc #1 |-------------------->| ShareDesc |
  13. * | *(packet 1) | | (PDB) |
  14. * --------------- |------------->| (hashKey) |
  15. * . | | (cipherKey) |
  16. * . | |-------->| (operation) |
  17. * --------------- | | ---------------
  18. * | JobDesc #2 |------| |
  19. * | *(packet 2) | |
  20. * --------------- |
  21. * . |
  22. * . |
  23. * --------------- |
  24. * | JobDesc #3 |------------
  25. * | *(packet 3) |
  26. * ---------------
  27. *
  28. * The SharedDesc never changes for a connection unless rekeyed, but
  29. * each packet will likely be in a different place. So all we need
  30. * to know to process the packet is where the input is, where the
  31. * output goes, and what context we want to process with. Context is
  32. * in the SharedDesc, packet references in the JobDesc.
  33. *
  34. * So, a job desc looks like:
  35. *
  36. * ---------------------
  37. * | Header |
  38. * | ShareDesc Pointer |
  39. * | SEQ_OUT_PTR |
  40. * | (output buffer) |
  41. * | (output length) |
  42. * | SEQ_IN_PTR |
  43. * | (input buffer) |
  44. * | (input length) |
  45. * ---------------------
  46. */
  47. #include "compat.h"
  48. #include "regs.h"
  49. #include "intern.h"
  50. #include "desc_constr.h"
  51. #include "jr.h"
  52. #include "error.h"
  53. #include "sg_sw_sec4.h"
  54. #include "key_gen.h"
  55. #include "caamalg_desc.h"
  56. /*
  57. * crypto alg
  58. */
  59. #define CAAM_CRA_PRIORITY 3000
  60. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  61. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  62. CTR_RFC3686_NONCE_SIZE + \
  63. SHA512_DIGEST_SIZE * 2)
  64. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  65. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  66. CAAM_CMD_SZ * 4)
  67. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  68. CAAM_CMD_SZ * 5)
  69. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
  70. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  71. #ifdef DEBUG
  72. /* for print_hex_dumps with line references */
  73. #define debug(format, arg...) printk(format, arg)
  74. #else
  75. #define debug(format, arg...)
  76. #endif
  77. #ifdef DEBUG
  78. #include <linux/highmem.h>
  79. static void dbg_dump_sg(const char *level, const char *prefix_str,
  80. int prefix_type, int rowsize, int groupsize,
  81. struct scatterlist *sg, size_t tlen, bool ascii)
  82. {
  83. struct scatterlist *it;
  84. void *it_page;
  85. size_t len;
  86. void *buf;
  87. for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
  88. /*
  89. * make sure the scatterlist's page
  90. * has a valid virtual memory mapping
  91. */
  92. it_page = kmap_atomic(sg_page(it));
  93. if (unlikely(!it_page)) {
  94. printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
  95. return;
  96. }
  97. buf = it_page + it->offset;
  98. len = min_t(size_t, tlen, it->length);
  99. print_hex_dump(level, prefix_str, prefix_type, rowsize,
  100. groupsize, buf, len, ascii);
  101. tlen -= len;
  102. kunmap_atomic(it_page);
  103. }
  104. }
  105. #endif
  106. static struct list_head alg_list;
  107. struct caam_alg_entry {
  108. int class1_alg_type;
  109. int class2_alg_type;
  110. bool rfc3686;
  111. bool geniv;
  112. };
  113. struct caam_aead_alg {
  114. struct aead_alg aead;
  115. struct caam_alg_entry caam;
  116. bool registered;
  117. };
  118. /*
  119. * per-session context
  120. */
  121. struct caam_ctx {
  122. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  123. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  124. u32 sh_desc_givenc[DESC_MAX_USED_LEN];
  125. u8 key[CAAM_MAX_KEY_SIZE];
  126. dma_addr_t sh_desc_enc_dma;
  127. dma_addr_t sh_desc_dec_dma;
  128. dma_addr_t sh_desc_givenc_dma;
  129. dma_addr_t key_dma;
  130. struct device *jrdev;
  131. struct alginfo adata;
  132. struct alginfo cdata;
  133. unsigned int authsize;
  134. };
  135. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  136. {
  137. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  138. struct device *jrdev = ctx->jrdev;
  139. u32 *desc;
  140. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  141. ctx->adata.keylen_pad;
  142. /*
  143. * Job Descriptor and Shared Descriptors
  144. * must all fit into the 64-word Descriptor h/w Buffer
  145. */
  146. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  147. ctx->adata.key_inline = true;
  148. ctx->adata.key_virt = ctx->key;
  149. } else {
  150. ctx->adata.key_inline = false;
  151. ctx->adata.key_dma = ctx->key_dma;
  152. }
  153. /* aead_encrypt shared descriptor */
  154. desc = ctx->sh_desc_enc;
  155. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
  156. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  157. desc_bytes(desc), DMA_TO_DEVICE);
  158. /*
  159. * Job Descriptor and Shared Descriptors
  160. * must all fit into the 64-word Descriptor h/w Buffer
  161. */
  162. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  163. ctx->adata.key_inline = true;
  164. ctx->adata.key_virt = ctx->key;
  165. } else {
  166. ctx->adata.key_inline = false;
  167. ctx->adata.key_dma = ctx->key_dma;
  168. }
  169. /* aead_decrypt shared descriptor */
  170. desc = ctx->sh_desc_dec;
  171. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
  172. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  173. desc_bytes(desc), DMA_TO_DEVICE);
  174. return 0;
  175. }
  176. static int aead_set_sh_desc(struct crypto_aead *aead)
  177. {
  178. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  179. struct caam_aead_alg, aead);
  180. unsigned int ivsize = crypto_aead_ivsize(aead);
  181. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  182. struct device *jrdev = ctx->jrdev;
  183. u32 ctx1_iv_off = 0;
  184. u32 *desc, *nonce = NULL;
  185. u32 inl_mask;
  186. unsigned int data_len[2];
  187. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  188. OP_ALG_AAI_CTR_MOD128);
  189. const bool is_rfc3686 = alg->caam.rfc3686;
  190. if (!ctx->authsize)
  191. return 0;
  192. /* NULL encryption / decryption */
  193. if (!ctx->cdata.keylen)
  194. return aead_null_set_sh_desc(aead);
  195. /*
  196. * AES-CTR needs to load IV in CONTEXT1 reg
  197. * at an offset of 128bits (16bytes)
  198. * CONTEXT1[255:128] = IV
  199. */
  200. if (ctr_mode)
  201. ctx1_iv_off = 16;
  202. /*
  203. * RFC3686 specific:
  204. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  205. */
  206. if (is_rfc3686) {
  207. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  208. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  209. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  210. }
  211. data_len[0] = ctx->adata.keylen_pad;
  212. data_len[1] = ctx->cdata.keylen;
  213. if (alg->caam.geniv)
  214. goto skip_enc;
  215. /*
  216. * Job Descriptor and Shared Descriptors
  217. * must all fit into the 64-word Descriptor h/w Buffer
  218. */
  219. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  220. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  221. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  222. ARRAY_SIZE(data_len)) < 0)
  223. return -EINVAL;
  224. if (inl_mask & 1)
  225. ctx->adata.key_virt = ctx->key;
  226. else
  227. ctx->adata.key_dma = ctx->key_dma;
  228. if (inl_mask & 2)
  229. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  230. else
  231. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  232. ctx->adata.key_inline = !!(inl_mask & 1);
  233. ctx->cdata.key_inline = !!(inl_mask & 2);
  234. /* aead_encrypt shared descriptor */
  235. desc = ctx->sh_desc_enc;
  236. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ctx->authsize,
  237. is_rfc3686, nonce, ctx1_iv_off);
  238. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  239. desc_bytes(desc), DMA_TO_DEVICE);
  240. skip_enc:
  241. /*
  242. * Job Descriptor and Shared Descriptors
  243. * must all fit into the 64-word Descriptor h/w Buffer
  244. */
  245. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  246. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  247. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  248. ARRAY_SIZE(data_len)) < 0)
  249. return -EINVAL;
  250. if (inl_mask & 1)
  251. ctx->adata.key_virt = ctx->key;
  252. else
  253. ctx->adata.key_dma = ctx->key_dma;
  254. if (inl_mask & 2)
  255. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  256. else
  257. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  258. ctx->adata.key_inline = !!(inl_mask & 1);
  259. ctx->cdata.key_inline = !!(inl_mask & 2);
  260. /* aead_decrypt shared descriptor */
  261. desc = ctx->sh_desc_dec;
  262. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  263. ctx->authsize, alg->caam.geniv, is_rfc3686,
  264. nonce, ctx1_iv_off);
  265. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  266. desc_bytes(desc), DMA_TO_DEVICE);
  267. if (!alg->caam.geniv)
  268. goto skip_givenc;
  269. /*
  270. * Job Descriptor and Shared Descriptors
  271. * must all fit into the 64-word Descriptor h/w Buffer
  272. */
  273. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  274. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  275. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  276. ARRAY_SIZE(data_len)) < 0)
  277. return -EINVAL;
  278. if (inl_mask & 1)
  279. ctx->adata.key_virt = ctx->key;
  280. else
  281. ctx->adata.key_dma = ctx->key_dma;
  282. if (inl_mask & 2)
  283. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  284. else
  285. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  286. ctx->adata.key_inline = !!(inl_mask & 1);
  287. ctx->cdata.key_inline = !!(inl_mask & 2);
  288. /* aead_givencrypt shared descriptor */
  289. desc = ctx->sh_desc_enc;
  290. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  291. ctx->authsize, is_rfc3686, nonce,
  292. ctx1_iv_off);
  293. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  294. desc_bytes(desc), DMA_TO_DEVICE);
  295. skip_givenc:
  296. return 0;
  297. }
  298. static int aead_setauthsize(struct crypto_aead *authenc,
  299. unsigned int authsize)
  300. {
  301. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  302. ctx->authsize = authsize;
  303. aead_set_sh_desc(authenc);
  304. return 0;
  305. }
  306. static int gcm_set_sh_desc(struct crypto_aead *aead)
  307. {
  308. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  309. struct device *jrdev = ctx->jrdev;
  310. u32 *desc;
  311. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  312. ctx->cdata.keylen;
  313. if (!ctx->cdata.keylen || !ctx->authsize)
  314. return 0;
  315. /*
  316. * AES GCM encrypt shared descriptor
  317. * Job Descriptor and Shared Descriptor
  318. * must fit into the 64-word Descriptor h/w Buffer
  319. */
  320. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  321. ctx->cdata.key_inline = true;
  322. ctx->cdata.key_virt = ctx->key;
  323. } else {
  324. ctx->cdata.key_inline = false;
  325. ctx->cdata.key_dma = ctx->key_dma;
  326. }
  327. desc = ctx->sh_desc_enc;
  328. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
  329. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  330. desc_bytes(desc), DMA_TO_DEVICE);
  331. /*
  332. * Job Descriptor and Shared Descriptors
  333. * must all fit into the 64-word Descriptor h/w Buffer
  334. */
  335. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  336. ctx->cdata.key_inline = true;
  337. ctx->cdata.key_virt = ctx->key;
  338. } else {
  339. ctx->cdata.key_inline = false;
  340. ctx->cdata.key_dma = ctx->key_dma;
  341. }
  342. desc = ctx->sh_desc_dec;
  343. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
  344. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  345. desc_bytes(desc), DMA_TO_DEVICE);
  346. return 0;
  347. }
  348. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  349. {
  350. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  351. ctx->authsize = authsize;
  352. gcm_set_sh_desc(authenc);
  353. return 0;
  354. }
  355. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  356. {
  357. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  358. struct device *jrdev = ctx->jrdev;
  359. u32 *desc;
  360. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  361. ctx->cdata.keylen;
  362. if (!ctx->cdata.keylen || !ctx->authsize)
  363. return 0;
  364. /*
  365. * RFC4106 encrypt shared descriptor
  366. * Job Descriptor and Shared Descriptor
  367. * must fit into the 64-word Descriptor h/w Buffer
  368. */
  369. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  370. ctx->cdata.key_inline = true;
  371. ctx->cdata.key_virt = ctx->key;
  372. } else {
  373. ctx->cdata.key_inline = false;
  374. ctx->cdata.key_dma = ctx->key_dma;
  375. }
  376. desc = ctx->sh_desc_enc;
  377. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
  378. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  379. desc_bytes(desc), DMA_TO_DEVICE);
  380. /*
  381. * Job Descriptor and Shared Descriptors
  382. * must all fit into the 64-word Descriptor h/w Buffer
  383. */
  384. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  385. ctx->cdata.key_inline = true;
  386. ctx->cdata.key_virt = ctx->key;
  387. } else {
  388. ctx->cdata.key_inline = false;
  389. ctx->cdata.key_dma = ctx->key_dma;
  390. }
  391. desc = ctx->sh_desc_dec;
  392. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
  393. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  394. desc_bytes(desc), DMA_TO_DEVICE);
  395. return 0;
  396. }
  397. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  398. unsigned int authsize)
  399. {
  400. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  401. ctx->authsize = authsize;
  402. rfc4106_set_sh_desc(authenc);
  403. return 0;
  404. }
  405. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  406. {
  407. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  408. struct device *jrdev = ctx->jrdev;
  409. u32 *desc;
  410. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  411. ctx->cdata.keylen;
  412. if (!ctx->cdata.keylen || !ctx->authsize)
  413. return 0;
  414. /*
  415. * RFC4543 encrypt shared descriptor
  416. * Job Descriptor and Shared Descriptor
  417. * must fit into the 64-word Descriptor h/w Buffer
  418. */
  419. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  420. ctx->cdata.key_inline = true;
  421. ctx->cdata.key_virt = ctx->key;
  422. } else {
  423. ctx->cdata.key_inline = false;
  424. ctx->cdata.key_dma = ctx->key_dma;
  425. }
  426. desc = ctx->sh_desc_enc;
  427. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
  428. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  429. desc_bytes(desc), DMA_TO_DEVICE);
  430. /*
  431. * Job Descriptor and Shared Descriptors
  432. * must all fit into the 64-word Descriptor h/w Buffer
  433. */
  434. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  435. ctx->cdata.key_inline = true;
  436. ctx->cdata.key_virt = ctx->key;
  437. } else {
  438. ctx->cdata.key_inline = false;
  439. ctx->cdata.key_dma = ctx->key_dma;
  440. }
  441. desc = ctx->sh_desc_dec;
  442. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
  443. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  444. desc_bytes(desc), DMA_TO_DEVICE);
  445. return 0;
  446. }
  447. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  448. unsigned int authsize)
  449. {
  450. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  451. ctx->authsize = authsize;
  452. rfc4543_set_sh_desc(authenc);
  453. return 0;
  454. }
  455. static int aead_setkey(struct crypto_aead *aead,
  456. const u8 *key, unsigned int keylen)
  457. {
  458. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  459. struct device *jrdev = ctx->jrdev;
  460. struct crypto_authenc_keys keys;
  461. int ret = 0;
  462. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  463. goto badkey;
  464. #ifdef DEBUG
  465. printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
  466. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  467. keys.authkeylen);
  468. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  469. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  470. #endif
  471. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  472. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  473. keys.enckeylen);
  474. if (ret) {
  475. goto badkey;
  476. }
  477. /* postpend encryption key to auth split key */
  478. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  479. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  480. keys.enckeylen, DMA_TO_DEVICE);
  481. #ifdef DEBUG
  482. print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
  483. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  484. ctx->adata.keylen_pad + keys.enckeylen, 1);
  485. #endif
  486. ctx->cdata.keylen = keys.enckeylen;
  487. return aead_set_sh_desc(aead);
  488. badkey:
  489. crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
  490. return -EINVAL;
  491. }
  492. static int gcm_setkey(struct crypto_aead *aead,
  493. const u8 *key, unsigned int keylen)
  494. {
  495. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  496. struct device *jrdev = ctx->jrdev;
  497. #ifdef DEBUG
  498. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  499. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  500. #endif
  501. memcpy(ctx->key, key, keylen);
  502. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  503. ctx->cdata.keylen = keylen;
  504. return gcm_set_sh_desc(aead);
  505. }
  506. static int rfc4106_setkey(struct crypto_aead *aead,
  507. const u8 *key, unsigned int keylen)
  508. {
  509. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  510. struct device *jrdev = ctx->jrdev;
  511. if (keylen < 4)
  512. return -EINVAL;
  513. #ifdef DEBUG
  514. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  515. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  516. #endif
  517. memcpy(ctx->key, key, keylen);
  518. /*
  519. * The last four bytes of the key material are used as the salt value
  520. * in the nonce. Update the AES key length.
  521. */
  522. ctx->cdata.keylen = keylen - 4;
  523. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  524. DMA_TO_DEVICE);
  525. return rfc4106_set_sh_desc(aead);
  526. }
  527. static int rfc4543_setkey(struct crypto_aead *aead,
  528. const u8 *key, unsigned int keylen)
  529. {
  530. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  531. struct device *jrdev = ctx->jrdev;
  532. if (keylen < 4)
  533. return -EINVAL;
  534. #ifdef DEBUG
  535. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  536. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  537. #endif
  538. memcpy(ctx->key, key, keylen);
  539. /*
  540. * The last four bytes of the key material are used as the salt value
  541. * in the nonce. Update the AES key length.
  542. */
  543. ctx->cdata.keylen = keylen - 4;
  544. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  545. DMA_TO_DEVICE);
  546. return rfc4543_set_sh_desc(aead);
  547. }
  548. static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  549. const u8 *key, unsigned int keylen)
  550. {
  551. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  552. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
  553. const char *alg_name = crypto_tfm_alg_name(tfm);
  554. struct device *jrdev = ctx->jrdev;
  555. unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  556. u32 *desc;
  557. u32 ctx1_iv_off = 0;
  558. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  559. OP_ALG_AAI_CTR_MOD128);
  560. const bool is_rfc3686 = (ctr_mode &&
  561. (strstr(alg_name, "rfc3686") != NULL));
  562. memcpy(ctx->key, key, keylen);
  563. #ifdef DEBUG
  564. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  565. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  566. #endif
  567. /*
  568. * AES-CTR needs to load IV in CONTEXT1 reg
  569. * at an offset of 128bits (16bytes)
  570. * CONTEXT1[255:128] = IV
  571. */
  572. if (ctr_mode)
  573. ctx1_iv_off = 16;
  574. /*
  575. * RFC3686 specific:
  576. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  577. * | *key = {KEY, NONCE}
  578. */
  579. if (is_rfc3686) {
  580. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  581. keylen -= CTR_RFC3686_NONCE_SIZE;
  582. }
  583. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  584. ctx->cdata.keylen = keylen;
  585. ctx->cdata.key_virt = ctx->key;
  586. ctx->cdata.key_inline = true;
  587. /* ablkcipher_encrypt shared descriptor */
  588. desc = ctx->sh_desc_enc;
  589. cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  590. ctx1_iv_off);
  591. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  592. desc_bytes(desc), DMA_TO_DEVICE);
  593. /* ablkcipher_decrypt shared descriptor */
  594. desc = ctx->sh_desc_dec;
  595. cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  596. ctx1_iv_off);
  597. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  598. desc_bytes(desc), DMA_TO_DEVICE);
  599. /* ablkcipher_givencrypt shared descriptor */
  600. desc = ctx->sh_desc_givenc;
  601. cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
  602. ctx1_iv_off);
  603. dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
  604. desc_bytes(desc), DMA_TO_DEVICE);
  605. return 0;
  606. }
  607. static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  608. const u8 *key, unsigned int keylen)
  609. {
  610. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  611. struct device *jrdev = ctx->jrdev;
  612. u32 *desc;
  613. if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
  614. crypto_ablkcipher_set_flags(ablkcipher,
  615. CRYPTO_TFM_RES_BAD_KEY_LEN);
  616. dev_err(jrdev, "key size mismatch\n");
  617. return -EINVAL;
  618. }
  619. memcpy(ctx->key, key, keylen);
  620. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  621. ctx->cdata.keylen = keylen;
  622. ctx->cdata.key_virt = ctx->key;
  623. ctx->cdata.key_inline = true;
  624. /* xts_ablkcipher_encrypt shared descriptor */
  625. desc = ctx->sh_desc_enc;
  626. cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
  627. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  628. desc_bytes(desc), DMA_TO_DEVICE);
  629. /* xts_ablkcipher_decrypt shared descriptor */
  630. desc = ctx->sh_desc_dec;
  631. cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
  632. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  633. desc_bytes(desc), DMA_TO_DEVICE);
  634. return 0;
  635. }
  636. /*
  637. * aead_edesc - s/w-extended aead descriptor
  638. * @src_nents: number of segments in input s/w scatterlist
  639. * @dst_nents: number of segments in output s/w scatterlist
  640. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  641. * @sec4_sg_dma: bus physical mapped address of h/w link table
  642. * @sec4_sg: pointer to h/w link table
  643. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  644. */
  645. struct aead_edesc {
  646. int src_nents;
  647. int dst_nents;
  648. int sec4_sg_bytes;
  649. dma_addr_t sec4_sg_dma;
  650. struct sec4_sg_entry *sec4_sg;
  651. u32 hw_desc[];
  652. };
  653. /*
  654. * ablkcipher_edesc - s/w-extended ablkcipher descriptor
  655. * @src_nents: number of segments in input s/w scatterlist
  656. * @dst_nents: number of segments in output s/w scatterlist
  657. * @iv_dma: dma address of iv for checking continuity and link table
  658. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  659. * @sec4_sg_dma: bus physical mapped address of h/w link table
  660. * @sec4_sg: pointer to h/w link table
  661. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  662. */
  663. struct ablkcipher_edesc {
  664. int src_nents;
  665. int dst_nents;
  666. dma_addr_t iv_dma;
  667. int sec4_sg_bytes;
  668. dma_addr_t sec4_sg_dma;
  669. struct sec4_sg_entry *sec4_sg;
  670. u32 hw_desc[0];
  671. };
  672. static void caam_unmap(struct device *dev, struct scatterlist *src,
  673. struct scatterlist *dst, int src_nents,
  674. int dst_nents,
  675. dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
  676. int sec4_sg_bytes)
  677. {
  678. if (dst != src) {
  679. if (src_nents)
  680. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  681. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  682. } else {
  683. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  684. }
  685. if (iv_dma)
  686. dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
  687. if (sec4_sg_bytes)
  688. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  689. DMA_TO_DEVICE);
  690. }
  691. static void aead_unmap(struct device *dev,
  692. struct aead_edesc *edesc,
  693. struct aead_request *req)
  694. {
  695. caam_unmap(dev, req->src, req->dst,
  696. edesc->src_nents, edesc->dst_nents, 0, 0,
  697. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  698. }
  699. static void ablkcipher_unmap(struct device *dev,
  700. struct ablkcipher_edesc *edesc,
  701. struct ablkcipher_request *req)
  702. {
  703. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  704. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  705. caam_unmap(dev, req->src, req->dst,
  706. edesc->src_nents, edesc->dst_nents,
  707. edesc->iv_dma, ivsize,
  708. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  709. }
  710. static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  711. void *context)
  712. {
  713. struct aead_request *req = context;
  714. struct aead_edesc *edesc;
  715. #ifdef DEBUG
  716. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  717. #endif
  718. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  719. if (err)
  720. caam_jr_strstatus(jrdev, err);
  721. aead_unmap(jrdev, edesc, req);
  722. kfree(edesc);
  723. aead_request_complete(req, err);
  724. }
  725. static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  726. void *context)
  727. {
  728. struct aead_request *req = context;
  729. struct aead_edesc *edesc;
  730. #ifdef DEBUG
  731. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  732. #endif
  733. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  734. if (err)
  735. caam_jr_strstatus(jrdev, err);
  736. aead_unmap(jrdev, edesc, req);
  737. /*
  738. * verify hw auth check passed else return -EBADMSG
  739. */
  740. if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
  741. err = -EBADMSG;
  742. kfree(edesc);
  743. aead_request_complete(req, err);
  744. }
  745. static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  746. void *context)
  747. {
  748. struct ablkcipher_request *req = context;
  749. struct ablkcipher_edesc *edesc;
  750. #ifdef DEBUG
  751. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  752. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  753. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  754. #endif
  755. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  756. if (err)
  757. caam_jr_strstatus(jrdev, err);
  758. #ifdef DEBUG
  759. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  760. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  761. edesc->src_nents > 1 ? 100 : ivsize, 1);
  762. dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
  763. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  764. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  765. #endif
  766. ablkcipher_unmap(jrdev, edesc, req);
  767. kfree(edesc);
  768. ablkcipher_request_complete(req, err);
  769. }
  770. static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  771. void *context)
  772. {
  773. struct ablkcipher_request *req = context;
  774. struct ablkcipher_edesc *edesc;
  775. #ifdef DEBUG
  776. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  777. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  778. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  779. #endif
  780. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  781. if (err)
  782. caam_jr_strstatus(jrdev, err);
  783. #ifdef DEBUG
  784. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  785. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  786. ivsize, 1);
  787. dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
  788. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  789. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  790. #endif
  791. ablkcipher_unmap(jrdev, edesc, req);
  792. kfree(edesc);
  793. ablkcipher_request_complete(req, err);
  794. }
  795. /*
  796. * Fill in aead job descriptor
  797. */
  798. static void init_aead_job(struct aead_request *req,
  799. struct aead_edesc *edesc,
  800. bool all_contig, bool encrypt)
  801. {
  802. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  803. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  804. int authsize = ctx->authsize;
  805. u32 *desc = edesc->hw_desc;
  806. u32 out_options, in_options;
  807. dma_addr_t dst_dma, src_dma;
  808. int len, sec4_sg_index = 0;
  809. dma_addr_t ptr;
  810. u32 *sh_desc;
  811. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  812. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  813. len = desc_len(sh_desc);
  814. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  815. if (all_contig) {
  816. src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
  817. in_options = 0;
  818. } else {
  819. src_dma = edesc->sec4_sg_dma;
  820. sec4_sg_index += edesc->src_nents;
  821. in_options = LDST_SGF;
  822. }
  823. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  824. in_options);
  825. dst_dma = src_dma;
  826. out_options = in_options;
  827. if (unlikely(req->src != req->dst)) {
  828. if (edesc->dst_nents == 1) {
  829. dst_dma = sg_dma_address(req->dst);
  830. } else {
  831. dst_dma = edesc->sec4_sg_dma +
  832. sec4_sg_index *
  833. sizeof(struct sec4_sg_entry);
  834. out_options = LDST_SGF;
  835. }
  836. }
  837. if (encrypt)
  838. append_seq_out_ptr(desc, dst_dma,
  839. req->assoclen + req->cryptlen + authsize,
  840. out_options);
  841. else
  842. append_seq_out_ptr(desc, dst_dma,
  843. req->assoclen + req->cryptlen - authsize,
  844. out_options);
  845. /* REG3 = assoclen */
  846. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  847. }
  848. static void init_gcm_job(struct aead_request *req,
  849. struct aead_edesc *edesc,
  850. bool all_contig, bool encrypt)
  851. {
  852. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  853. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  854. unsigned int ivsize = crypto_aead_ivsize(aead);
  855. u32 *desc = edesc->hw_desc;
  856. bool generic_gcm = (ivsize == 12);
  857. unsigned int last;
  858. init_aead_job(req, edesc, all_contig, encrypt);
  859. /* BUG This should not be specific to generic GCM. */
  860. last = 0;
  861. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  862. last = FIFOLD_TYPE_LAST1;
  863. /* Read GCM IV */
  864. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  865. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
  866. /* Append Salt */
  867. if (!generic_gcm)
  868. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  869. /* Append IV */
  870. append_data(desc, req->iv, ivsize);
  871. /* End of blank commands */
  872. }
  873. static void init_authenc_job(struct aead_request *req,
  874. struct aead_edesc *edesc,
  875. bool all_contig, bool encrypt)
  876. {
  877. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  878. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  879. struct caam_aead_alg, aead);
  880. unsigned int ivsize = crypto_aead_ivsize(aead);
  881. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  882. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  883. OP_ALG_AAI_CTR_MOD128);
  884. const bool is_rfc3686 = alg->caam.rfc3686;
  885. u32 *desc = edesc->hw_desc;
  886. u32 ivoffset = 0;
  887. /*
  888. * AES-CTR needs to load IV in CONTEXT1 reg
  889. * at an offset of 128bits (16bytes)
  890. * CONTEXT1[255:128] = IV
  891. */
  892. if (ctr_mode)
  893. ivoffset = 16;
  894. /*
  895. * RFC3686 specific:
  896. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  897. */
  898. if (is_rfc3686)
  899. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  900. init_aead_job(req, edesc, all_contig, encrypt);
  901. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  902. append_load_as_imm(desc, req->iv, ivsize,
  903. LDST_CLASS_1_CCB |
  904. LDST_SRCDST_BYTE_CONTEXT |
  905. (ivoffset << LDST_OFFSET_SHIFT));
  906. }
  907. /*
  908. * Fill in ablkcipher job descriptor
  909. */
  910. static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
  911. struct ablkcipher_edesc *edesc,
  912. struct ablkcipher_request *req,
  913. bool iv_contig)
  914. {
  915. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  916. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  917. u32 *desc = edesc->hw_desc;
  918. u32 out_options = 0, in_options;
  919. dma_addr_t dst_dma, src_dma;
  920. int len, sec4_sg_index = 0;
  921. #ifdef DEBUG
  922. print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
  923. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  924. ivsize, 1);
  925. pr_err("asked=%d, nbytes%d\n",
  926. (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
  927. dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
  928. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  929. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  930. #endif
  931. len = desc_len(sh_desc);
  932. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  933. if (iv_contig) {
  934. src_dma = edesc->iv_dma;
  935. in_options = 0;
  936. } else {
  937. src_dma = edesc->sec4_sg_dma;
  938. sec4_sg_index += edesc->src_nents + 1;
  939. in_options = LDST_SGF;
  940. }
  941. append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
  942. if (likely(req->src == req->dst)) {
  943. if (edesc->src_nents == 1 && iv_contig) {
  944. dst_dma = sg_dma_address(req->src);
  945. } else {
  946. dst_dma = edesc->sec4_sg_dma +
  947. sizeof(struct sec4_sg_entry);
  948. out_options = LDST_SGF;
  949. }
  950. } else {
  951. if (edesc->dst_nents == 1) {
  952. dst_dma = sg_dma_address(req->dst);
  953. } else {
  954. dst_dma = edesc->sec4_sg_dma +
  955. sec4_sg_index * sizeof(struct sec4_sg_entry);
  956. out_options = LDST_SGF;
  957. }
  958. }
  959. append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
  960. }
  961. /*
  962. * Fill in ablkcipher givencrypt job descriptor
  963. */
  964. static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
  965. struct ablkcipher_edesc *edesc,
  966. struct ablkcipher_request *req,
  967. bool iv_contig)
  968. {
  969. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  970. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  971. u32 *desc = edesc->hw_desc;
  972. u32 out_options, in_options;
  973. dma_addr_t dst_dma, src_dma;
  974. int len, sec4_sg_index = 0;
  975. #ifdef DEBUG
  976. print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
  977. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  978. ivsize, 1);
  979. dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
  980. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  981. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  982. #endif
  983. len = desc_len(sh_desc);
  984. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  985. if (edesc->src_nents == 1) {
  986. src_dma = sg_dma_address(req->src);
  987. in_options = 0;
  988. } else {
  989. src_dma = edesc->sec4_sg_dma;
  990. sec4_sg_index += edesc->src_nents;
  991. in_options = LDST_SGF;
  992. }
  993. append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
  994. if (iv_contig) {
  995. dst_dma = edesc->iv_dma;
  996. out_options = 0;
  997. } else {
  998. dst_dma = edesc->sec4_sg_dma +
  999. sec4_sg_index * sizeof(struct sec4_sg_entry);
  1000. out_options = LDST_SGF;
  1001. }
  1002. append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
  1003. }
  1004. /*
  1005. * allocate and map the aead extended descriptor
  1006. */
  1007. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  1008. int desc_bytes, bool *all_contig_ptr,
  1009. bool encrypt)
  1010. {
  1011. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1012. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1013. struct device *jrdev = ctx->jrdev;
  1014. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1015. CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
  1016. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1017. struct aead_edesc *edesc;
  1018. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  1019. unsigned int authsize = ctx->authsize;
  1020. if (unlikely(req->dst != req->src)) {
  1021. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1022. req->cryptlen);
  1023. if (unlikely(src_nents < 0)) {
  1024. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1025. req->assoclen + req->cryptlen);
  1026. return ERR_PTR(src_nents);
  1027. }
  1028. dst_nents = sg_nents_for_len(req->dst, req->assoclen +
  1029. req->cryptlen +
  1030. (encrypt ? authsize :
  1031. (-authsize)));
  1032. if (unlikely(dst_nents < 0)) {
  1033. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1034. req->assoclen + req->cryptlen +
  1035. (encrypt ? authsize : (-authsize)));
  1036. return ERR_PTR(dst_nents);
  1037. }
  1038. } else {
  1039. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1040. req->cryptlen +
  1041. (encrypt ? authsize : 0));
  1042. if (unlikely(src_nents < 0)) {
  1043. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1044. req->assoclen + req->cryptlen +
  1045. (encrypt ? authsize : 0));
  1046. return ERR_PTR(src_nents);
  1047. }
  1048. }
  1049. if (likely(req->src == req->dst)) {
  1050. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1051. DMA_BIDIRECTIONAL);
  1052. if (unlikely(!mapped_src_nents)) {
  1053. dev_err(jrdev, "unable to map source\n");
  1054. return ERR_PTR(-ENOMEM);
  1055. }
  1056. } else {
  1057. /* Cover also the case of null (zero length) input data */
  1058. if (src_nents) {
  1059. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1060. src_nents, DMA_TO_DEVICE);
  1061. if (unlikely(!mapped_src_nents)) {
  1062. dev_err(jrdev, "unable to map source\n");
  1063. return ERR_PTR(-ENOMEM);
  1064. }
  1065. } else {
  1066. mapped_src_nents = 0;
  1067. }
  1068. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1069. DMA_FROM_DEVICE);
  1070. if (unlikely(!mapped_dst_nents)) {
  1071. dev_err(jrdev, "unable to map destination\n");
  1072. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1073. return ERR_PTR(-ENOMEM);
  1074. }
  1075. }
  1076. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1077. sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1078. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1079. /* allocate space for base edesc and hw desc commands, link tables */
  1080. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1081. GFP_DMA | flags);
  1082. if (!edesc) {
  1083. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1084. 0, 0, 0);
  1085. return ERR_PTR(-ENOMEM);
  1086. }
  1087. edesc->src_nents = src_nents;
  1088. edesc->dst_nents = dst_nents;
  1089. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1090. desc_bytes;
  1091. *all_contig_ptr = !(mapped_src_nents > 1);
  1092. sec4_sg_index = 0;
  1093. if (mapped_src_nents > 1) {
  1094. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1095. edesc->sec4_sg + sec4_sg_index, 0);
  1096. sec4_sg_index += mapped_src_nents;
  1097. }
  1098. if (mapped_dst_nents > 1) {
  1099. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1100. edesc->sec4_sg + sec4_sg_index, 0);
  1101. }
  1102. if (!sec4_sg_bytes)
  1103. return edesc;
  1104. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1105. sec4_sg_bytes, DMA_TO_DEVICE);
  1106. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1107. dev_err(jrdev, "unable to map S/G table\n");
  1108. aead_unmap(jrdev, edesc, req);
  1109. kfree(edesc);
  1110. return ERR_PTR(-ENOMEM);
  1111. }
  1112. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1113. return edesc;
  1114. }
  1115. static int gcm_encrypt(struct aead_request *req)
  1116. {
  1117. struct aead_edesc *edesc;
  1118. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1119. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1120. struct device *jrdev = ctx->jrdev;
  1121. bool all_contig;
  1122. u32 *desc;
  1123. int ret = 0;
  1124. /* allocate extended descriptor */
  1125. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
  1126. if (IS_ERR(edesc))
  1127. return PTR_ERR(edesc);
  1128. /* Create and submit job descriptor */
  1129. init_gcm_job(req, edesc, all_contig, true);
  1130. #ifdef DEBUG
  1131. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1132. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1133. desc_bytes(edesc->hw_desc), 1);
  1134. #endif
  1135. desc = edesc->hw_desc;
  1136. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1137. if (!ret) {
  1138. ret = -EINPROGRESS;
  1139. } else {
  1140. aead_unmap(jrdev, edesc, req);
  1141. kfree(edesc);
  1142. }
  1143. return ret;
  1144. }
  1145. static int ipsec_gcm_encrypt(struct aead_request *req)
  1146. {
  1147. if (req->assoclen < 8)
  1148. return -EINVAL;
  1149. return gcm_encrypt(req);
  1150. }
  1151. static int aead_encrypt(struct aead_request *req)
  1152. {
  1153. struct aead_edesc *edesc;
  1154. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1155. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1156. struct device *jrdev = ctx->jrdev;
  1157. bool all_contig;
  1158. u32 *desc;
  1159. int ret = 0;
  1160. /* allocate extended descriptor */
  1161. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1162. &all_contig, true);
  1163. if (IS_ERR(edesc))
  1164. return PTR_ERR(edesc);
  1165. /* Create and submit job descriptor */
  1166. init_authenc_job(req, edesc, all_contig, true);
  1167. #ifdef DEBUG
  1168. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1169. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1170. desc_bytes(edesc->hw_desc), 1);
  1171. #endif
  1172. desc = edesc->hw_desc;
  1173. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1174. if (!ret) {
  1175. ret = -EINPROGRESS;
  1176. } else {
  1177. aead_unmap(jrdev, edesc, req);
  1178. kfree(edesc);
  1179. }
  1180. return ret;
  1181. }
  1182. static int gcm_decrypt(struct aead_request *req)
  1183. {
  1184. struct aead_edesc *edesc;
  1185. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1186. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1187. struct device *jrdev = ctx->jrdev;
  1188. bool all_contig;
  1189. u32 *desc;
  1190. int ret = 0;
  1191. /* allocate extended descriptor */
  1192. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
  1193. if (IS_ERR(edesc))
  1194. return PTR_ERR(edesc);
  1195. /* Create and submit job descriptor*/
  1196. init_gcm_job(req, edesc, all_contig, false);
  1197. #ifdef DEBUG
  1198. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1199. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1200. desc_bytes(edesc->hw_desc), 1);
  1201. #endif
  1202. desc = edesc->hw_desc;
  1203. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1204. if (!ret) {
  1205. ret = -EINPROGRESS;
  1206. } else {
  1207. aead_unmap(jrdev, edesc, req);
  1208. kfree(edesc);
  1209. }
  1210. return ret;
  1211. }
  1212. static int ipsec_gcm_decrypt(struct aead_request *req)
  1213. {
  1214. if (req->assoclen < 8)
  1215. return -EINVAL;
  1216. return gcm_decrypt(req);
  1217. }
  1218. static int aead_decrypt(struct aead_request *req)
  1219. {
  1220. struct aead_edesc *edesc;
  1221. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1222. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1223. struct device *jrdev = ctx->jrdev;
  1224. bool all_contig;
  1225. u32 *desc;
  1226. int ret = 0;
  1227. #ifdef DEBUG
  1228. dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
  1229. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1230. req->assoclen + req->cryptlen, 1);
  1231. #endif
  1232. /* allocate extended descriptor */
  1233. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1234. &all_contig, false);
  1235. if (IS_ERR(edesc))
  1236. return PTR_ERR(edesc);
  1237. /* Create and submit job descriptor*/
  1238. init_authenc_job(req, edesc, all_contig, false);
  1239. #ifdef DEBUG
  1240. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1241. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1242. desc_bytes(edesc->hw_desc), 1);
  1243. #endif
  1244. desc = edesc->hw_desc;
  1245. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1246. if (!ret) {
  1247. ret = -EINPROGRESS;
  1248. } else {
  1249. aead_unmap(jrdev, edesc, req);
  1250. kfree(edesc);
  1251. }
  1252. return ret;
  1253. }
  1254. /*
  1255. * allocate and map the ablkcipher extended descriptor for ablkcipher
  1256. */
  1257. static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
  1258. *req, int desc_bytes,
  1259. bool *iv_contig_out)
  1260. {
  1261. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1262. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1263. struct device *jrdev = ctx->jrdev;
  1264. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1265. CRYPTO_TFM_REQ_MAY_SLEEP)) ?
  1266. GFP_KERNEL : GFP_ATOMIC;
  1267. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1268. struct ablkcipher_edesc *edesc;
  1269. dma_addr_t iv_dma = 0;
  1270. bool in_contig;
  1271. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1272. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1273. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1274. if (unlikely(src_nents < 0)) {
  1275. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1276. req->nbytes);
  1277. return ERR_PTR(src_nents);
  1278. }
  1279. if (req->dst != req->src) {
  1280. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1281. if (unlikely(dst_nents < 0)) {
  1282. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1283. req->nbytes);
  1284. return ERR_PTR(dst_nents);
  1285. }
  1286. }
  1287. if (likely(req->src == req->dst)) {
  1288. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1289. DMA_BIDIRECTIONAL);
  1290. if (unlikely(!mapped_src_nents)) {
  1291. dev_err(jrdev, "unable to map source\n");
  1292. return ERR_PTR(-ENOMEM);
  1293. }
  1294. } else {
  1295. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1296. DMA_TO_DEVICE);
  1297. if (unlikely(!mapped_src_nents)) {
  1298. dev_err(jrdev, "unable to map source\n");
  1299. return ERR_PTR(-ENOMEM);
  1300. }
  1301. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1302. DMA_FROM_DEVICE);
  1303. if (unlikely(!mapped_dst_nents)) {
  1304. dev_err(jrdev, "unable to map destination\n");
  1305. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1306. return ERR_PTR(-ENOMEM);
  1307. }
  1308. }
  1309. iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
  1310. if (dma_mapping_error(jrdev, iv_dma)) {
  1311. dev_err(jrdev, "unable to map IV\n");
  1312. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1313. 0, 0, 0);
  1314. return ERR_PTR(-ENOMEM);
  1315. }
  1316. if (mapped_src_nents == 1 &&
  1317. iv_dma + ivsize == sg_dma_address(req->src)) {
  1318. in_contig = true;
  1319. sec4_sg_ents = 0;
  1320. } else {
  1321. in_contig = false;
  1322. sec4_sg_ents = 1 + mapped_src_nents;
  1323. }
  1324. dst_sg_idx = sec4_sg_ents;
  1325. sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1326. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1327. /* allocate space for base edesc and hw desc commands, link tables */
  1328. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1329. GFP_DMA | flags);
  1330. if (!edesc) {
  1331. dev_err(jrdev, "could not allocate extended descriptor\n");
  1332. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1333. iv_dma, ivsize, 0, 0);
  1334. return ERR_PTR(-ENOMEM);
  1335. }
  1336. edesc->src_nents = src_nents;
  1337. edesc->dst_nents = dst_nents;
  1338. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1339. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1340. desc_bytes;
  1341. if (!in_contig) {
  1342. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1343. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1344. edesc->sec4_sg + 1, 0);
  1345. }
  1346. if (mapped_dst_nents > 1) {
  1347. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1348. edesc->sec4_sg + dst_sg_idx, 0);
  1349. }
  1350. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1351. sec4_sg_bytes, DMA_TO_DEVICE);
  1352. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1353. dev_err(jrdev, "unable to map S/G table\n");
  1354. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1355. iv_dma, ivsize, 0, 0);
  1356. kfree(edesc);
  1357. return ERR_PTR(-ENOMEM);
  1358. }
  1359. edesc->iv_dma = iv_dma;
  1360. #ifdef DEBUG
  1361. print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
  1362. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1363. sec4_sg_bytes, 1);
  1364. #endif
  1365. *iv_contig_out = in_contig;
  1366. return edesc;
  1367. }
  1368. static int ablkcipher_encrypt(struct ablkcipher_request *req)
  1369. {
  1370. struct ablkcipher_edesc *edesc;
  1371. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1372. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1373. struct device *jrdev = ctx->jrdev;
  1374. bool iv_contig;
  1375. u32 *desc;
  1376. int ret = 0;
  1377. /* allocate extended descriptor */
  1378. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
  1379. CAAM_CMD_SZ, &iv_contig);
  1380. if (IS_ERR(edesc))
  1381. return PTR_ERR(edesc);
  1382. /* Create and submit job descriptor*/
  1383. init_ablkcipher_job(ctx->sh_desc_enc,
  1384. ctx->sh_desc_enc_dma, edesc, req, iv_contig);
  1385. #ifdef DEBUG
  1386. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1387. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1388. desc_bytes(edesc->hw_desc), 1);
  1389. #endif
  1390. desc = edesc->hw_desc;
  1391. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1392. if (!ret) {
  1393. ret = -EINPROGRESS;
  1394. } else {
  1395. ablkcipher_unmap(jrdev, edesc, req);
  1396. kfree(edesc);
  1397. }
  1398. return ret;
  1399. }
  1400. static int ablkcipher_decrypt(struct ablkcipher_request *req)
  1401. {
  1402. struct ablkcipher_edesc *edesc;
  1403. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1404. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1405. struct device *jrdev = ctx->jrdev;
  1406. bool iv_contig;
  1407. u32 *desc;
  1408. int ret = 0;
  1409. /* allocate extended descriptor */
  1410. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
  1411. CAAM_CMD_SZ, &iv_contig);
  1412. if (IS_ERR(edesc))
  1413. return PTR_ERR(edesc);
  1414. /* Create and submit job descriptor*/
  1415. init_ablkcipher_job(ctx->sh_desc_dec,
  1416. ctx->sh_desc_dec_dma, edesc, req, iv_contig);
  1417. desc = edesc->hw_desc;
  1418. #ifdef DEBUG
  1419. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1420. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1421. desc_bytes(edesc->hw_desc), 1);
  1422. #endif
  1423. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
  1424. if (!ret) {
  1425. ret = -EINPROGRESS;
  1426. } else {
  1427. ablkcipher_unmap(jrdev, edesc, req);
  1428. kfree(edesc);
  1429. }
  1430. return ret;
  1431. }
  1432. /*
  1433. * allocate and map the ablkcipher extended descriptor
  1434. * for ablkcipher givencrypt
  1435. */
  1436. static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
  1437. struct skcipher_givcrypt_request *greq,
  1438. int desc_bytes,
  1439. bool *iv_contig_out)
  1440. {
  1441. struct ablkcipher_request *req = &greq->creq;
  1442. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1443. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1444. struct device *jrdev = ctx->jrdev;
  1445. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1446. CRYPTO_TFM_REQ_MAY_SLEEP)) ?
  1447. GFP_KERNEL : GFP_ATOMIC;
  1448. int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
  1449. struct ablkcipher_edesc *edesc;
  1450. dma_addr_t iv_dma = 0;
  1451. bool out_contig;
  1452. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1453. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1454. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1455. if (unlikely(src_nents < 0)) {
  1456. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1457. req->nbytes);
  1458. return ERR_PTR(src_nents);
  1459. }
  1460. if (likely(req->src == req->dst)) {
  1461. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1462. DMA_BIDIRECTIONAL);
  1463. if (unlikely(!mapped_src_nents)) {
  1464. dev_err(jrdev, "unable to map source\n");
  1465. return ERR_PTR(-ENOMEM);
  1466. }
  1467. dst_nents = src_nents;
  1468. mapped_dst_nents = src_nents;
  1469. } else {
  1470. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1471. DMA_TO_DEVICE);
  1472. if (unlikely(!mapped_src_nents)) {
  1473. dev_err(jrdev, "unable to map source\n");
  1474. return ERR_PTR(-ENOMEM);
  1475. }
  1476. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1477. if (unlikely(dst_nents < 0)) {
  1478. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1479. req->nbytes);
  1480. return ERR_PTR(dst_nents);
  1481. }
  1482. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1483. DMA_FROM_DEVICE);
  1484. if (unlikely(!mapped_dst_nents)) {
  1485. dev_err(jrdev, "unable to map destination\n");
  1486. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1487. return ERR_PTR(-ENOMEM);
  1488. }
  1489. }
  1490. /*
  1491. * Check if iv can be contiguous with source and destination.
  1492. * If so, include it. If not, create scatterlist.
  1493. */
  1494. iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
  1495. if (dma_mapping_error(jrdev, iv_dma)) {
  1496. dev_err(jrdev, "unable to map IV\n");
  1497. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1498. 0, 0, 0);
  1499. return ERR_PTR(-ENOMEM);
  1500. }
  1501. sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1502. dst_sg_idx = sec4_sg_ents;
  1503. if (mapped_dst_nents == 1 &&
  1504. iv_dma + ivsize == sg_dma_address(req->dst)) {
  1505. out_contig = true;
  1506. } else {
  1507. out_contig = false;
  1508. sec4_sg_ents += 1 + mapped_dst_nents;
  1509. }
  1510. /* allocate space for base edesc and hw desc commands, link tables */
  1511. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1512. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1513. GFP_DMA | flags);
  1514. if (!edesc) {
  1515. dev_err(jrdev, "could not allocate extended descriptor\n");
  1516. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1517. iv_dma, ivsize, 0, 0);
  1518. return ERR_PTR(-ENOMEM);
  1519. }
  1520. edesc->src_nents = src_nents;
  1521. edesc->dst_nents = dst_nents;
  1522. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1523. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1524. desc_bytes;
  1525. if (mapped_src_nents > 1)
  1526. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
  1527. 0);
  1528. if (!out_contig) {
  1529. dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
  1530. iv_dma, ivsize, 0);
  1531. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1532. edesc->sec4_sg + dst_sg_idx + 1, 0);
  1533. }
  1534. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1535. sec4_sg_bytes, DMA_TO_DEVICE);
  1536. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1537. dev_err(jrdev, "unable to map S/G table\n");
  1538. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1539. iv_dma, ivsize, 0, 0);
  1540. kfree(edesc);
  1541. return ERR_PTR(-ENOMEM);
  1542. }
  1543. edesc->iv_dma = iv_dma;
  1544. #ifdef DEBUG
  1545. print_hex_dump(KERN_ERR,
  1546. "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
  1547. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1548. sec4_sg_bytes, 1);
  1549. #endif
  1550. *iv_contig_out = out_contig;
  1551. return edesc;
  1552. }
  1553. static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
  1554. {
  1555. struct ablkcipher_request *req = &creq->creq;
  1556. struct ablkcipher_edesc *edesc;
  1557. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1558. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1559. struct device *jrdev = ctx->jrdev;
  1560. bool iv_contig = false;
  1561. u32 *desc;
  1562. int ret = 0;
  1563. /* allocate extended descriptor */
  1564. edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
  1565. CAAM_CMD_SZ, &iv_contig);
  1566. if (IS_ERR(edesc))
  1567. return PTR_ERR(edesc);
  1568. /* Create and submit job descriptor*/
  1569. init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
  1570. edesc, req, iv_contig);
  1571. #ifdef DEBUG
  1572. print_hex_dump(KERN_ERR,
  1573. "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
  1574. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1575. desc_bytes(edesc->hw_desc), 1);
  1576. #endif
  1577. desc = edesc->hw_desc;
  1578. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1579. if (!ret) {
  1580. ret = -EINPROGRESS;
  1581. } else {
  1582. ablkcipher_unmap(jrdev, edesc, req);
  1583. kfree(edesc);
  1584. }
  1585. return ret;
  1586. }
  1587. #define template_aead template_u.aead
  1588. #define template_ablkcipher template_u.ablkcipher
  1589. struct caam_alg_template {
  1590. char name[CRYPTO_MAX_ALG_NAME];
  1591. char driver_name[CRYPTO_MAX_ALG_NAME];
  1592. unsigned int blocksize;
  1593. u32 type;
  1594. union {
  1595. struct ablkcipher_alg ablkcipher;
  1596. } template_u;
  1597. u32 class1_alg_type;
  1598. u32 class2_alg_type;
  1599. };
  1600. static struct caam_alg_template driver_algs[] = {
  1601. /* ablkcipher descriptor */
  1602. {
  1603. .name = "cbc(aes)",
  1604. .driver_name = "cbc-aes-caam",
  1605. .blocksize = AES_BLOCK_SIZE,
  1606. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1607. .template_ablkcipher = {
  1608. .setkey = ablkcipher_setkey,
  1609. .encrypt = ablkcipher_encrypt,
  1610. .decrypt = ablkcipher_decrypt,
  1611. .givencrypt = ablkcipher_givencrypt,
  1612. .geniv = "<built-in>",
  1613. .min_keysize = AES_MIN_KEY_SIZE,
  1614. .max_keysize = AES_MAX_KEY_SIZE,
  1615. .ivsize = AES_BLOCK_SIZE,
  1616. },
  1617. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1618. },
  1619. {
  1620. .name = "cbc(des3_ede)",
  1621. .driver_name = "cbc-3des-caam",
  1622. .blocksize = DES3_EDE_BLOCK_SIZE,
  1623. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1624. .template_ablkcipher = {
  1625. .setkey = ablkcipher_setkey,
  1626. .encrypt = ablkcipher_encrypt,
  1627. .decrypt = ablkcipher_decrypt,
  1628. .givencrypt = ablkcipher_givencrypt,
  1629. .geniv = "<built-in>",
  1630. .min_keysize = DES3_EDE_KEY_SIZE,
  1631. .max_keysize = DES3_EDE_KEY_SIZE,
  1632. .ivsize = DES3_EDE_BLOCK_SIZE,
  1633. },
  1634. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1635. },
  1636. {
  1637. .name = "cbc(des)",
  1638. .driver_name = "cbc-des-caam",
  1639. .blocksize = DES_BLOCK_SIZE,
  1640. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1641. .template_ablkcipher = {
  1642. .setkey = ablkcipher_setkey,
  1643. .encrypt = ablkcipher_encrypt,
  1644. .decrypt = ablkcipher_decrypt,
  1645. .givencrypt = ablkcipher_givencrypt,
  1646. .geniv = "<built-in>",
  1647. .min_keysize = DES_KEY_SIZE,
  1648. .max_keysize = DES_KEY_SIZE,
  1649. .ivsize = DES_BLOCK_SIZE,
  1650. },
  1651. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1652. },
  1653. {
  1654. .name = "ctr(aes)",
  1655. .driver_name = "ctr-aes-caam",
  1656. .blocksize = 1,
  1657. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1658. .template_ablkcipher = {
  1659. .setkey = ablkcipher_setkey,
  1660. .encrypt = ablkcipher_encrypt,
  1661. .decrypt = ablkcipher_decrypt,
  1662. .geniv = "chainiv",
  1663. .min_keysize = AES_MIN_KEY_SIZE,
  1664. .max_keysize = AES_MAX_KEY_SIZE,
  1665. .ivsize = AES_BLOCK_SIZE,
  1666. },
  1667. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1668. },
  1669. {
  1670. .name = "rfc3686(ctr(aes))",
  1671. .driver_name = "rfc3686-ctr-aes-caam",
  1672. .blocksize = 1,
  1673. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1674. .template_ablkcipher = {
  1675. .setkey = ablkcipher_setkey,
  1676. .encrypt = ablkcipher_encrypt,
  1677. .decrypt = ablkcipher_decrypt,
  1678. .givencrypt = ablkcipher_givencrypt,
  1679. .geniv = "<built-in>",
  1680. .min_keysize = AES_MIN_KEY_SIZE +
  1681. CTR_RFC3686_NONCE_SIZE,
  1682. .max_keysize = AES_MAX_KEY_SIZE +
  1683. CTR_RFC3686_NONCE_SIZE,
  1684. .ivsize = CTR_RFC3686_IV_SIZE,
  1685. },
  1686. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1687. },
  1688. {
  1689. .name = "xts(aes)",
  1690. .driver_name = "xts-aes-caam",
  1691. .blocksize = AES_BLOCK_SIZE,
  1692. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1693. .template_ablkcipher = {
  1694. .setkey = xts_ablkcipher_setkey,
  1695. .encrypt = ablkcipher_encrypt,
  1696. .decrypt = ablkcipher_decrypt,
  1697. .geniv = "eseqiv",
  1698. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1699. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1700. .ivsize = AES_BLOCK_SIZE,
  1701. },
  1702. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1703. },
  1704. };
  1705. static struct caam_aead_alg driver_aeads[] = {
  1706. {
  1707. .aead = {
  1708. .base = {
  1709. .cra_name = "rfc4106(gcm(aes))",
  1710. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1711. .cra_blocksize = 1,
  1712. },
  1713. .setkey = rfc4106_setkey,
  1714. .setauthsize = rfc4106_setauthsize,
  1715. .encrypt = ipsec_gcm_encrypt,
  1716. .decrypt = ipsec_gcm_decrypt,
  1717. .ivsize = 8,
  1718. .maxauthsize = AES_BLOCK_SIZE,
  1719. },
  1720. .caam = {
  1721. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1722. },
  1723. },
  1724. {
  1725. .aead = {
  1726. .base = {
  1727. .cra_name = "rfc4543(gcm(aes))",
  1728. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1729. .cra_blocksize = 1,
  1730. },
  1731. .setkey = rfc4543_setkey,
  1732. .setauthsize = rfc4543_setauthsize,
  1733. .encrypt = ipsec_gcm_encrypt,
  1734. .decrypt = ipsec_gcm_decrypt,
  1735. .ivsize = 8,
  1736. .maxauthsize = AES_BLOCK_SIZE,
  1737. },
  1738. .caam = {
  1739. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1740. },
  1741. },
  1742. /* Galois Counter Mode */
  1743. {
  1744. .aead = {
  1745. .base = {
  1746. .cra_name = "gcm(aes)",
  1747. .cra_driver_name = "gcm-aes-caam",
  1748. .cra_blocksize = 1,
  1749. },
  1750. .setkey = gcm_setkey,
  1751. .setauthsize = gcm_setauthsize,
  1752. .encrypt = gcm_encrypt,
  1753. .decrypt = gcm_decrypt,
  1754. .ivsize = 12,
  1755. .maxauthsize = AES_BLOCK_SIZE,
  1756. },
  1757. .caam = {
  1758. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1759. },
  1760. },
  1761. /* single-pass ipsec_esp descriptor */
  1762. {
  1763. .aead = {
  1764. .base = {
  1765. .cra_name = "authenc(hmac(md5),"
  1766. "ecb(cipher_null))",
  1767. .cra_driver_name = "authenc-hmac-md5-"
  1768. "ecb-cipher_null-caam",
  1769. .cra_blocksize = NULL_BLOCK_SIZE,
  1770. },
  1771. .setkey = aead_setkey,
  1772. .setauthsize = aead_setauthsize,
  1773. .encrypt = aead_encrypt,
  1774. .decrypt = aead_decrypt,
  1775. .ivsize = NULL_IV_SIZE,
  1776. .maxauthsize = MD5_DIGEST_SIZE,
  1777. },
  1778. .caam = {
  1779. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1780. OP_ALG_AAI_HMAC_PRECOMP,
  1781. },
  1782. },
  1783. {
  1784. .aead = {
  1785. .base = {
  1786. .cra_name = "authenc(hmac(sha1),"
  1787. "ecb(cipher_null))",
  1788. .cra_driver_name = "authenc-hmac-sha1-"
  1789. "ecb-cipher_null-caam",
  1790. .cra_blocksize = NULL_BLOCK_SIZE,
  1791. },
  1792. .setkey = aead_setkey,
  1793. .setauthsize = aead_setauthsize,
  1794. .encrypt = aead_encrypt,
  1795. .decrypt = aead_decrypt,
  1796. .ivsize = NULL_IV_SIZE,
  1797. .maxauthsize = SHA1_DIGEST_SIZE,
  1798. },
  1799. .caam = {
  1800. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1801. OP_ALG_AAI_HMAC_PRECOMP,
  1802. },
  1803. },
  1804. {
  1805. .aead = {
  1806. .base = {
  1807. .cra_name = "authenc(hmac(sha224),"
  1808. "ecb(cipher_null))",
  1809. .cra_driver_name = "authenc-hmac-sha224-"
  1810. "ecb-cipher_null-caam",
  1811. .cra_blocksize = NULL_BLOCK_SIZE,
  1812. },
  1813. .setkey = aead_setkey,
  1814. .setauthsize = aead_setauthsize,
  1815. .encrypt = aead_encrypt,
  1816. .decrypt = aead_decrypt,
  1817. .ivsize = NULL_IV_SIZE,
  1818. .maxauthsize = SHA224_DIGEST_SIZE,
  1819. },
  1820. .caam = {
  1821. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1822. OP_ALG_AAI_HMAC_PRECOMP,
  1823. },
  1824. },
  1825. {
  1826. .aead = {
  1827. .base = {
  1828. .cra_name = "authenc(hmac(sha256),"
  1829. "ecb(cipher_null))",
  1830. .cra_driver_name = "authenc-hmac-sha256-"
  1831. "ecb-cipher_null-caam",
  1832. .cra_blocksize = NULL_BLOCK_SIZE,
  1833. },
  1834. .setkey = aead_setkey,
  1835. .setauthsize = aead_setauthsize,
  1836. .encrypt = aead_encrypt,
  1837. .decrypt = aead_decrypt,
  1838. .ivsize = NULL_IV_SIZE,
  1839. .maxauthsize = SHA256_DIGEST_SIZE,
  1840. },
  1841. .caam = {
  1842. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1843. OP_ALG_AAI_HMAC_PRECOMP,
  1844. },
  1845. },
  1846. {
  1847. .aead = {
  1848. .base = {
  1849. .cra_name = "authenc(hmac(sha384),"
  1850. "ecb(cipher_null))",
  1851. .cra_driver_name = "authenc-hmac-sha384-"
  1852. "ecb-cipher_null-caam",
  1853. .cra_blocksize = NULL_BLOCK_SIZE,
  1854. },
  1855. .setkey = aead_setkey,
  1856. .setauthsize = aead_setauthsize,
  1857. .encrypt = aead_encrypt,
  1858. .decrypt = aead_decrypt,
  1859. .ivsize = NULL_IV_SIZE,
  1860. .maxauthsize = SHA384_DIGEST_SIZE,
  1861. },
  1862. .caam = {
  1863. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1864. OP_ALG_AAI_HMAC_PRECOMP,
  1865. },
  1866. },
  1867. {
  1868. .aead = {
  1869. .base = {
  1870. .cra_name = "authenc(hmac(sha512),"
  1871. "ecb(cipher_null))",
  1872. .cra_driver_name = "authenc-hmac-sha512-"
  1873. "ecb-cipher_null-caam",
  1874. .cra_blocksize = NULL_BLOCK_SIZE,
  1875. },
  1876. .setkey = aead_setkey,
  1877. .setauthsize = aead_setauthsize,
  1878. .encrypt = aead_encrypt,
  1879. .decrypt = aead_decrypt,
  1880. .ivsize = NULL_IV_SIZE,
  1881. .maxauthsize = SHA512_DIGEST_SIZE,
  1882. },
  1883. .caam = {
  1884. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1885. OP_ALG_AAI_HMAC_PRECOMP,
  1886. },
  1887. },
  1888. {
  1889. .aead = {
  1890. .base = {
  1891. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1892. .cra_driver_name = "authenc-hmac-md5-"
  1893. "cbc-aes-caam",
  1894. .cra_blocksize = AES_BLOCK_SIZE,
  1895. },
  1896. .setkey = aead_setkey,
  1897. .setauthsize = aead_setauthsize,
  1898. .encrypt = aead_encrypt,
  1899. .decrypt = aead_decrypt,
  1900. .ivsize = AES_BLOCK_SIZE,
  1901. .maxauthsize = MD5_DIGEST_SIZE,
  1902. },
  1903. .caam = {
  1904. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1905. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1906. OP_ALG_AAI_HMAC_PRECOMP,
  1907. },
  1908. },
  1909. {
  1910. .aead = {
  1911. .base = {
  1912. .cra_name = "echainiv(authenc(hmac(md5),"
  1913. "cbc(aes)))",
  1914. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1915. "cbc-aes-caam",
  1916. .cra_blocksize = AES_BLOCK_SIZE,
  1917. },
  1918. .setkey = aead_setkey,
  1919. .setauthsize = aead_setauthsize,
  1920. .encrypt = aead_encrypt,
  1921. .decrypt = aead_decrypt,
  1922. .ivsize = AES_BLOCK_SIZE,
  1923. .maxauthsize = MD5_DIGEST_SIZE,
  1924. },
  1925. .caam = {
  1926. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1927. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1928. OP_ALG_AAI_HMAC_PRECOMP,
  1929. .geniv = true,
  1930. },
  1931. },
  1932. {
  1933. .aead = {
  1934. .base = {
  1935. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1936. .cra_driver_name = "authenc-hmac-sha1-"
  1937. "cbc-aes-caam",
  1938. .cra_blocksize = AES_BLOCK_SIZE,
  1939. },
  1940. .setkey = aead_setkey,
  1941. .setauthsize = aead_setauthsize,
  1942. .encrypt = aead_encrypt,
  1943. .decrypt = aead_decrypt,
  1944. .ivsize = AES_BLOCK_SIZE,
  1945. .maxauthsize = SHA1_DIGEST_SIZE,
  1946. },
  1947. .caam = {
  1948. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1949. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1950. OP_ALG_AAI_HMAC_PRECOMP,
  1951. },
  1952. },
  1953. {
  1954. .aead = {
  1955. .base = {
  1956. .cra_name = "echainiv(authenc(hmac(sha1),"
  1957. "cbc(aes)))",
  1958. .cra_driver_name = "echainiv-authenc-"
  1959. "hmac-sha1-cbc-aes-caam",
  1960. .cra_blocksize = AES_BLOCK_SIZE,
  1961. },
  1962. .setkey = aead_setkey,
  1963. .setauthsize = aead_setauthsize,
  1964. .encrypt = aead_encrypt,
  1965. .decrypt = aead_decrypt,
  1966. .ivsize = AES_BLOCK_SIZE,
  1967. .maxauthsize = SHA1_DIGEST_SIZE,
  1968. },
  1969. .caam = {
  1970. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1971. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1972. OP_ALG_AAI_HMAC_PRECOMP,
  1973. .geniv = true,
  1974. },
  1975. },
  1976. {
  1977. .aead = {
  1978. .base = {
  1979. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1980. .cra_driver_name = "authenc-hmac-sha224-"
  1981. "cbc-aes-caam",
  1982. .cra_blocksize = AES_BLOCK_SIZE,
  1983. },
  1984. .setkey = aead_setkey,
  1985. .setauthsize = aead_setauthsize,
  1986. .encrypt = aead_encrypt,
  1987. .decrypt = aead_decrypt,
  1988. .ivsize = AES_BLOCK_SIZE,
  1989. .maxauthsize = SHA224_DIGEST_SIZE,
  1990. },
  1991. .caam = {
  1992. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1993. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1994. OP_ALG_AAI_HMAC_PRECOMP,
  1995. },
  1996. },
  1997. {
  1998. .aead = {
  1999. .base = {
  2000. .cra_name = "echainiv(authenc(hmac(sha224),"
  2001. "cbc(aes)))",
  2002. .cra_driver_name = "echainiv-authenc-"
  2003. "hmac-sha224-cbc-aes-caam",
  2004. .cra_blocksize = AES_BLOCK_SIZE,
  2005. },
  2006. .setkey = aead_setkey,
  2007. .setauthsize = aead_setauthsize,
  2008. .encrypt = aead_encrypt,
  2009. .decrypt = aead_decrypt,
  2010. .ivsize = AES_BLOCK_SIZE,
  2011. .maxauthsize = SHA224_DIGEST_SIZE,
  2012. },
  2013. .caam = {
  2014. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2015. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2016. OP_ALG_AAI_HMAC_PRECOMP,
  2017. .geniv = true,
  2018. },
  2019. },
  2020. {
  2021. .aead = {
  2022. .base = {
  2023. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  2024. .cra_driver_name = "authenc-hmac-sha256-"
  2025. "cbc-aes-caam",
  2026. .cra_blocksize = AES_BLOCK_SIZE,
  2027. },
  2028. .setkey = aead_setkey,
  2029. .setauthsize = aead_setauthsize,
  2030. .encrypt = aead_encrypt,
  2031. .decrypt = aead_decrypt,
  2032. .ivsize = AES_BLOCK_SIZE,
  2033. .maxauthsize = SHA256_DIGEST_SIZE,
  2034. },
  2035. .caam = {
  2036. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2037. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2038. OP_ALG_AAI_HMAC_PRECOMP,
  2039. },
  2040. },
  2041. {
  2042. .aead = {
  2043. .base = {
  2044. .cra_name = "echainiv(authenc(hmac(sha256),"
  2045. "cbc(aes)))",
  2046. .cra_driver_name = "echainiv-authenc-"
  2047. "hmac-sha256-cbc-aes-caam",
  2048. .cra_blocksize = AES_BLOCK_SIZE,
  2049. },
  2050. .setkey = aead_setkey,
  2051. .setauthsize = aead_setauthsize,
  2052. .encrypt = aead_encrypt,
  2053. .decrypt = aead_decrypt,
  2054. .ivsize = AES_BLOCK_SIZE,
  2055. .maxauthsize = SHA256_DIGEST_SIZE,
  2056. },
  2057. .caam = {
  2058. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2059. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2060. OP_ALG_AAI_HMAC_PRECOMP,
  2061. .geniv = true,
  2062. },
  2063. },
  2064. {
  2065. .aead = {
  2066. .base = {
  2067. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  2068. .cra_driver_name = "authenc-hmac-sha384-"
  2069. "cbc-aes-caam",
  2070. .cra_blocksize = AES_BLOCK_SIZE,
  2071. },
  2072. .setkey = aead_setkey,
  2073. .setauthsize = aead_setauthsize,
  2074. .encrypt = aead_encrypt,
  2075. .decrypt = aead_decrypt,
  2076. .ivsize = AES_BLOCK_SIZE,
  2077. .maxauthsize = SHA384_DIGEST_SIZE,
  2078. },
  2079. .caam = {
  2080. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2081. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2082. OP_ALG_AAI_HMAC_PRECOMP,
  2083. },
  2084. },
  2085. {
  2086. .aead = {
  2087. .base = {
  2088. .cra_name = "echainiv(authenc(hmac(sha384),"
  2089. "cbc(aes)))",
  2090. .cra_driver_name = "echainiv-authenc-"
  2091. "hmac-sha384-cbc-aes-caam",
  2092. .cra_blocksize = AES_BLOCK_SIZE,
  2093. },
  2094. .setkey = aead_setkey,
  2095. .setauthsize = aead_setauthsize,
  2096. .encrypt = aead_encrypt,
  2097. .decrypt = aead_decrypt,
  2098. .ivsize = AES_BLOCK_SIZE,
  2099. .maxauthsize = SHA384_DIGEST_SIZE,
  2100. },
  2101. .caam = {
  2102. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2103. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2104. OP_ALG_AAI_HMAC_PRECOMP,
  2105. .geniv = true,
  2106. },
  2107. },
  2108. {
  2109. .aead = {
  2110. .base = {
  2111. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  2112. .cra_driver_name = "authenc-hmac-sha512-"
  2113. "cbc-aes-caam",
  2114. .cra_blocksize = AES_BLOCK_SIZE,
  2115. },
  2116. .setkey = aead_setkey,
  2117. .setauthsize = aead_setauthsize,
  2118. .encrypt = aead_encrypt,
  2119. .decrypt = aead_decrypt,
  2120. .ivsize = AES_BLOCK_SIZE,
  2121. .maxauthsize = SHA512_DIGEST_SIZE,
  2122. },
  2123. .caam = {
  2124. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2125. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2126. OP_ALG_AAI_HMAC_PRECOMP,
  2127. },
  2128. },
  2129. {
  2130. .aead = {
  2131. .base = {
  2132. .cra_name = "echainiv(authenc(hmac(sha512),"
  2133. "cbc(aes)))",
  2134. .cra_driver_name = "echainiv-authenc-"
  2135. "hmac-sha512-cbc-aes-caam",
  2136. .cra_blocksize = AES_BLOCK_SIZE,
  2137. },
  2138. .setkey = aead_setkey,
  2139. .setauthsize = aead_setauthsize,
  2140. .encrypt = aead_encrypt,
  2141. .decrypt = aead_decrypt,
  2142. .ivsize = AES_BLOCK_SIZE,
  2143. .maxauthsize = SHA512_DIGEST_SIZE,
  2144. },
  2145. .caam = {
  2146. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2147. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2148. OP_ALG_AAI_HMAC_PRECOMP,
  2149. .geniv = true,
  2150. },
  2151. },
  2152. {
  2153. .aead = {
  2154. .base = {
  2155. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  2156. .cra_driver_name = "authenc-hmac-md5-"
  2157. "cbc-des3_ede-caam",
  2158. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2159. },
  2160. .setkey = aead_setkey,
  2161. .setauthsize = aead_setauthsize,
  2162. .encrypt = aead_encrypt,
  2163. .decrypt = aead_decrypt,
  2164. .ivsize = DES3_EDE_BLOCK_SIZE,
  2165. .maxauthsize = MD5_DIGEST_SIZE,
  2166. },
  2167. .caam = {
  2168. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2169. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2170. OP_ALG_AAI_HMAC_PRECOMP,
  2171. }
  2172. },
  2173. {
  2174. .aead = {
  2175. .base = {
  2176. .cra_name = "echainiv(authenc(hmac(md5),"
  2177. "cbc(des3_ede)))",
  2178. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2179. "cbc-des3_ede-caam",
  2180. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2181. },
  2182. .setkey = aead_setkey,
  2183. .setauthsize = aead_setauthsize,
  2184. .encrypt = aead_encrypt,
  2185. .decrypt = aead_decrypt,
  2186. .ivsize = DES3_EDE_BLOCK_SIZE,
  2187. .maxauthsize = MD5_DIGEST_SIZE,
  2188. },
  2189. .caam = {
  2190. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2191. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2192. OP_ALG_AAI_HMAC_PRECOMP,
  2193. .geniv = true,
  2194. }
  2195. },
  2196. {
  2197. .aead = {
  2198. .base = {
  2199. .cra_name = "authenc(hmac(sha1),"
  2200. "cbc(des3_ede))",
  2201. .cra_driver_name = "authenc-hmac-sha1-"
  2202. "cbc-des3_ede-caam",
  2203. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2204. },
  2205. .setkey = aead_setkey,
  2206. .setauthsize = aead_setauthsize,
  2207. .encrypt = aead_encrypt,
  2208. .decrypt = aead_decrypt,
  2209. .ivsize = DES3_EDE_BLOCK_SIZE,
  2210. .maxauthsize = SHA1_DIGEST_SIZE,
  2211. },
  2212. .caam = {
  2213. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2214. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2215. OP_ALG_AAI_HMAC_PRECOMP,
  2216. },
  2217. },
  2218. {
  2219. .aead = {
  2220. .base = {
  2221. .cra_name = "echainiv(authenc(hmac(sha1),"
  2222. "cbc(des3_ede)))",
  2223. .cra_driver_name = "echainiv-authenc-"
  2224. "hmac-sha1-"
  2225. "cbc-des3_ede-caam",
  2226. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2227. },
  2228. .setkey = aead_setkey,
  2229. .setauthsize = aead_setauthsize,
  2230. .encrypt = aead_encrypt,
  2231. .decrypt = aead_decrypt,
  2232. .ivsize = DES3_EDE_BLOCK_SIZE,
  2233. .maxauthsize = SHA1_DIGEST_SIZE,
  2234. },
  2235. .caam = {
  2236. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2237. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2238. OP_ALG_AAI_HMAC_PRECOMP,
  2239. .geniv = true,
  2240. },
  2241. },
  2242. {
  2243. .aead = {
  2244. .base = {
  2245. .cra_name = "authenc(hmac(sha224),"
  2246. "cbc(des3_ede))",
  2247. .cra_driver_name = "authenc-hmac-sha224-"
  2248. "cbc-des3_ede-caam",
  2249. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2250. },
  2251. .setkey = aead_setkey,
  2252. .setauthsize = aead_setauthsize,
  2253. .encrypt = aead_encrypt,
  2254. .decrypt = aead_decrypt,
  2255. .ivsize = DES3_EDE_BLOCK_SIZE,
  2256. .maxauthsize = SHA224_DIGEST_SIZE,
  2257. },
  2258. .caam = {
  2259. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2260. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2261. OP_ALG_AAI_HMAC_PRECOMP,
  2262. },
  2263. },
  2264. {
  2265. .aead = {
  2266. .base = {
  2267. .cra_name = "echainiv(authenc(hmac(sha224),"
  2268. "cbc(des3_ede)))",
  2269. .cra_driver_name = "echainiv-authenc-"
  2270. "hmac-sha224-"
  2271. "cbc-des3_ede-caam",
  2272. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2273. },
  2274. .setkey = aead_setkey,
  2275. .setauthsize = aead_setauthsize,
  2276. .encrypt = aead_encrypt,
  2277. .decrypt = aead_decrypt,
  2278. .ivsize = DES3_EDE_BLOCK_SIZE,
  2279. .maxauthsize = SHA224_DIGEST_SIZE,
  2280. },
  2281. .caam = {
  2282. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2283. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2284. OP_ALG_AAI_HMAC_PRECOMP,
  2285. .geniv = true,
  2286. },
  2287. },
  2288. {
  2289. .aead = {
  2290. .base = {
  2291. .cra_name = "authenc(hmac(sha256),"
  2292. "cbc(des3_ede))",
  2293. .cra_driver_name = "authenc-hmac-sha256-"
  2294. "cbc-des3_ede-caam",
  2295. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2296. },
  2297. .setkey = aead_setkey,
  2298. .setauthsize = aead_setauthsize,
  2299. .encrypt = aead_encrypt,
  2300. .decrypt = aead_decrypt,
  2301. .ivsize = DES3_EDE_BLOCK_SIZE,
  2302. .maxauthsize = SHA256_DIGEST_SIZE,
  2303. },
  2304. .caam = {
  2305. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2306. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2307. OP_ALG_AAI_HMAC_PRECOMP,
  2308. },
  2309. },
  2310. {
  2311. .aead = {
  2312. .base = {
  2313. .cra_name = "echainiv(authenc(hmac(sha256),"
  2314. "cbc(des3_ede)))",
  2315. .cra_driver_name = "echainiv-authenc-"
  2316. "hmac-sha256-"
  2317. "cbc-des3_ede-caam",
  2318. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2319. },
  2320. .setkey = aead_setkey,
  2321. .setauthsize = aead_setauthsize,
  2322. .encrypt = aead_encrypt,
  2323. .decrypt = aead_decrypt,
  2324. .ivsize = DES3_EDE_BLOCK_SIZE,
  2325. .maxauthsize = SHA256_DIGEST_SIZE,
  2326. },
  2327. .caam = {
  2328. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2329. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2330. OP_ALG_AAI_HMAC_PRECOMP,
  2331. .geniv = true,
  2332. },
  2333. },
  2334. {
  2335. .aead = {
  2336. .base = {
  2337. .cra_name = "authenc(hmac(sha384),"
  2338. "cbc(des3_ede))",
  2339. .cra_driver_name = "authenc-hmac-sha384-"
  2340. "cbc-des3_ede-caam",
  2341. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2342. },
  2343. .setkey = aead_setkey,
  2344. .setauthsize = aead_setauthsize,
  2345. .encrypt = aead_encrypt,
  2346. .decrypt = aead_decrypt,
  2347. .ivsize = DES3_EDE_BLOCK_SIZE,
  2348. .maxauthsize = SHA384_DIGEST_SIZE,
  2349. },
  2350. .caam = {
  2351. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2352. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2353. OP_ALG_AAI_HMAC_PRECOMP,
  2354. },
  2355. },
  2356. {
  2357. .aead = {
  2358. .base = {
  2359. .cra_name = "echainiv(authenc(hmac(sha384),"
  2360. "cbc(des3_ede)))",
  2361. .cra_driver_name = "echainiv-authenc-"
  2362. "hmac-sha384-"
  2363. "cbc-des3_ede-caam",
  2364. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2365. },
  2366. .setkey = aead_setkey,
  2367. .setauthsize = aead_setauthsize,
  2368. .encrypt = aead_encrypt,
  2369. .decrypt = aead_decrypt,
  2370. .ivsize = DES3_EDE_BLOCK_SIZE,
  2371. .maxauthsize = SHA384_DIGEST_SIZE,
  2372. },
  2373. .caam = {
  2374. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2375. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2376. OP_ALG_AAI_HMAC_PRECOMP,
  2377. .geniv = true,
  2378. },
  2379. },
  2380. {
  2381. .aead = {
  2382. .base = {
  2383. .cra_name = "authenc(hmac(sha512),"
  2384. "cbc(des3_ede))",
  2385. .cra_driver_name = "authenc-hmac-sha512-"
  2386. "cbc-des3_ede-caam",
  2387. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2388. },
  2389. .setkey = aead_setkey,
  2390. .setauthsize = aead_setauthsize,
  2391. .encrypt = aead_encrypt,
  2392. .decrypt = aead_decrypt,
  2393. .ivsize = DES3_EDE_BLOCK_SIZE,
  2394. .maxauthsize = SHA512_DIGEST_SIZE,
  2395. },
  2396. .caam = {
  2397. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2398. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2399. OP_ALG_AAI_HMAC_PRECOMP,
  2400. },
  2401. },
  2402. {
  2403. .aead = {
  2404. .base = {
  2405. .cra_name = "echainiv(authenc(hmac(sha512),"
  2406. "cbc(des3_ede)))",
  2407. .cra_driver_name = "echainiv-authenc-"
  2408. "hmac-sha512-"
  2409. "cbc-des3_ede-caam",
  2410. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2411. },
  2412. .setkey = aead_setkey,
  2413. .setauthsize = aead_setauthsize,
  2414. .encrypt = aead_encrypt,
  2415. .decrypt = aead_decrypt,
  2416. .ivsize = DES3_EDE_BLOCK_SIZE,
  2417. .maxauthsize = SHA512_DIGEST_SIZE,
  2418. },
  2419. .caam = {
  2420. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2421. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2422. OP_ALG_AAI_HMAC_PRECOMP,
  2423. .geniv = true,
  2424. },
  2425. },
  2426. {
  2427. .aead = {
  2428. .base = {
  2429. .cra_name = "authenc(hmac(md5),cbc(des))",
  2430. .cra_driver_name = "authenc-hmac-md5-"
  2431. "cbc-des-caam",
  2432. .cra_blocksize = DES_BLOCK_SIZE,
  2433. },
  2434. .setkey = aead_setkey,
  2435. .setauthsize = aead_setauthsize,
  2436. .encrypt = aead_encrypt,
  2437. .decrypt = aead_decrypt,
  2438. .ivsize = DES_BLOCK_SIZE,
  2439. .maxauthsize = MD5_DIGEST_SIZE,
  2440. },
  2441. .caam = {
  2442. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2443. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2444. OP_ALG_AAI_HMAC_PRECOMP,
  2445. },
  2446. },
  2447. {
  2448. .aead = {
  2449. .base = {
  2450. .cra_name = "echainiv(authenc(hmac(md5),"
  2451. "cbc(des)))",
  2452. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2453. "cbc-des-caam",
  2454. .cra_blocksize = DES_BLOCK_SIZE,
  2455. },
  2456. .setkey = aead_setkey,
  2457. .setauthsize = aead_setauthsize,
  2458. .encrypt = aead_encrypt,
  2459. .decrypt = aead_decrypt,
  2460. .ivsize = DES_BLOCK_SIZE,
  2461. .maxauthsize = MD5_DIGEST_SIZE,
  2462. },
  2463. .caam = {
  2464. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2465. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2466. OP_ALG_AAI_HMAC_PRECOMP,
  2467. .geniv = true,
  2468. },
  2469. },
  2470. {
  2471. .aead = {
  2472. .base = {
  2473. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2474. .cra_driver_name = "authenc-hmac-sha1-"
  2475. "cbc-des-caam",
  2476. .cra_blocksize = DES_BLOCK_SIZE,
  2477. },
  2478. .setkey = aead_setkey,
  2479. .setauthsize = aead_setauthsize,
  2480. .encrypt = aead_encrypt,
  2481. .decrypt = aead_decrypt,
  2482. .ivsize = DES_BLOCK_SIZE,
  2483. .maxauthsize = SHA1_DIGEST_SIZE,
  2484. },
  2485. .caam = {
  2486. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2487. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2488. OP_ALG_AAI_HMAC_PRECOMP,
  2489. },
  2490. },
  2491. {
  2492. .aead = {
  2493. .base = {
  2494. .cra_name = "echainiv(authenc(hmac(sha1),"
  2495. "cbc(des)))",
  2496. .cra_driver_name = "echainiv-authenc-"
  2497. "hmac-sha1-cbc-des-caam",
  2498. .cra_blocksize = DES_BLOCK_SIZE,
  2499. },
  2500. .setkey = aead_setkey,
  2501. .setauthsize = aead_setauthsize,
  2502. .encrypt = aead_encrypt,
  2503. .decrypt = aead_decrypt,
  2504. .ivsize = DES_BLOCK_SIZE,
  2505. .maxauthsize = SHA1_DIGEST_SIZE,
  2506. },
  2507. .caam = {
  2508. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2509. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2510. OP_ALG_AAI_HMAC_PRECOMP,
  2511. .geniv = true,
  2512. },
  2513. },
  2514. {
  2515. .aead = {
  2516. .base = {
  2517. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2518. .cra_driver_name = "authenc-hmac-sha224-"
  2519. "cbc-des-caam",
  2520. .cra_blocksize = DES_BLOCK_SIZE,
  2521. },
  2522. .setkey = aead_setkey,
  2523. .setauthsize = aead_setauthsize,
  2524. .encrypt = aead_encrypt,
  2525. .decrypt = aead_decrypt,
  2526. .ivsize = DES_BLOCK_SIZE,
  2527. .maxauthsize = SHA224_DIGEST_SIZE,
  2528. },
  2529. .caam = {
  2530. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2531. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2532. OP_ALG_AAI_HMAC_PRECOMP,
  2533. },
  2534. },
  2535. {
  2536. .aead = {
  2537. .base = {
  2538. .cra_name = "echainiv(authenc(hmac(sha224),"
  2539. "cbc(des)))",
  2540. .cra_driver_name = "echainiv-authenc-"
  2541. "hmac-sha224-cbc-des-caam",
  2542. .cra_blocksize = DES_BLOCK_SIZE,
  2543. },
  2544. .setkey = aead_setkey,
  2545. .setauthsize = aead_setauthsize,
  2546. .encrypt = aead_encrypt,
  2547. .decrypt = aead_decrypt,
  2548. .ivsize = DES_BLOCK_SIZE,
  2549. .maxauthsize = SHA224_DIGEST_SIZE,
  2550. },
  2551. .caam = {
  2552. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2553. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2554. OP_ALG_AAI_HMAC_PRECOMP,
  2555. .geniv = true,
  2556. },
  2557. },
  2558. {
  2559. .aead = {
  2560. .base = {
  2561. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2562. .cra_driver_name = "authenc-hmac-sha256-"
  2563. "cbc-des-caam",
  2564. .cra_blocksize = DES_BLOCK_SIZE,
  2565. },
  2566. .setkey = aead_setkey,
  2567. .setauthsize = aead_setauthsize,
  2568. .encrypt = aead_encrypt,
  2569. .decrypt = aead_decrypt,
  2570. .ivsize = DES_BLOCK_SIZE,
  2571. .maxauthsize = SHA256_DIGEST_SIZE,
  2572. },
  2573. .caam = {
  2574. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2575. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2576. OP_ALG_AAI_HMAC_PRECOMP,
  2577. },
  2578. },
  2579. {
  2580. .aead = {
  2581. .base = {
  2582. .cra_name = "echainiv(authenc(hmac(sha256),"
  2583. "cbc(des)))",
  2584. .cra_driver_name = "echainiv-authenc-"
  2585. "hmac-sha256-cbc-des-caam",
  2586. .cra_blocksize = DES_BLOCK_SIZE,
  2587. },
  2588. .setkey = aead_setkey,
  2589. .setauthsize = aead_setauthsize,
  2590. .encrypt = aead_encrypt,
  2591. .decrypt = aead_decrypt,
  2592. .ivsize = DES_BLOCK_SIZE,
  2593. .maxauthsize = SHA256_DIGEST_SIZE,
  2594. },
  2595. .caam = {
  2596. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2597. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2598. OP_ALG_AAI_HMAC_PRECOMP,
  2599. .geniv = true,
  2600. },
  2601. },
  2602. {
  2603. .aead = {
  2604. .base = {
  2605. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2606. .cra_driver_name = "authenc-hmac-sha384-"
  2607. "cbc-des-caam",
  2608. .cra_blocksize = DES_BLOCK_SIZE,
  2609. },
  2610. .setkey = aead_setkey,
  2611. .setauthsize = aead_setauthsize,
  2612. .encrypt = aead_encrypt,
  2613. .decrypt = aead_decrypt,
  2614. .ivsize = DES_BLOCK_SIZE,
  2615. .maxauthsize = SHA384_DIGEST_SIZE,
  2616. },
  2617. .caam = {
  2618. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2619. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2620. OP_ALG_AAI_HMAC_PRECOMP,
  2621. },
  2622. },
  2623. {
  2624. .aead = {
  2625. .base = {
  2626. .cra_name = "echainiv(authenc(hmac(sha384),"
  2627. "cbc(des)))",
  2628. .cra_driver_name = "echainiv-authenc-"
  2629. "hmac-sha384-cbc-des-caam",
  2630. .cra_blocksize = DES_BLOCK_SIZE,
  2631. },
  2632. .setkey = aead_setkey,
  2633. .setauthsize = aead_setauthsize,
  2634. .encrypt = aead_encrypt,
  2635. .decrypt = aead_decrypt,
  2636. .ivsize = DES_BLOCK_SIZE,
  2637. .maxauthsize = SHA384_DIGEST_SIZE,
  2638. },
  2639. .caam = {
  2640. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2641. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2642. OP_ALG_AAI_HMAC_PRECOMP,
  2643. .geniv = true,
  2644. },
  2645. },
  2646. {
  2647. .aead = {
  2648. .base = {
  2649. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2650. .cra_driver_name = "authenc-hmac-sha512-"
  2651. "cbc-des-caam",
  2652. .cra_blocksize = DES_BLOCK_SIZE,
  2653. },
  2654. .setkey = aead_setkey,
  2655. .setauthsize = aead_setauthsize,
  2656. .encrypt = aead_encrypt,
  2657. .decrypt = aead_decrypt,
  2658. .ivsize = DES_BLOCK_SIZE,
  2659. .maxauthsize = SHA512_DIGEST_SIZE,
  2660. },
  2661. .caam = {
  2662. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2663. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2664. OP_ALG_AAI_HMAC_PRECOMP,
  2665. },
  2666. },
  2667. {
  2668. .aead = {
  2669. .base = {
  2670. .cra_name = "echainiv(authenc(hmac(sha512),"
  2671. "cbc(des)))",
  2672. .cra_driver_name = "echainiv-authenc-"
  2673. "hmac-sha512-cbc-des-caam",
  2674. .cra_blocksize = DES_BLOCK_SIZE,
  2675. },
  2676. .setkey = aead_setkey,
  2677. .setauthsize = aead_setauthsize,
  2678. .encrypt = aead_encrypt,
  2679. .decrypt = aead_decrypt,
  2680. .ivsize = DES_BLOCK_SIZE,
  2681. .maxauthsize = SHA512_DIGEST_SIZE,
  2682. },
  2683. .caam = {
  2684. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2685. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2686. OP_ALG_AAI_HMAC_PRECOMP,
  2687. .geniv = true,
  2688. },
  2689. },
  2690. {
  2691. .aead = {
  2692. .base = {
  2693. .cra_name = "authenc(hmac(md5),"
  2694. "rfc3686(ctr(aes)))",
  2695. .cra_driver_name = "authenc-hmac-md5-"
  2696. "rfc3686-ctr-aes-caam",
  2697. .cra_blocksize = 1,
  2698. },
  2699. .setkey = aead_setkey,
  2700. .setauthsize = aead_setauthsize,
  2701. .encrypt = aead_encrypt,
  2702. .decrypt = aead_decrypt,
  2703. .ivsize = CTR_RFC3686_IV_SIZE,
  2704. .maxauthsize = MD5_DIGEST_SIZE,
  2705. },
  2706. .caam = {
  2707. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2708. OP_ALG_AAI_CTR_MOD128,
  2709. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2710. OP_ALG_AAI_HMAC_PRECOMP,
  2711. .rfc3686 = true,
  2712. },
  2713. },
  2714. {
  2715. .aead = {
  2716. .base = {
  2717. .cra_name = "seqiv(authenc("
  2718. "hmac(md5),rfc3686(ctr(aes))))",
  2719. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2720. "rfc3686-ctr-aes-caam",
  2721. .cra_blocksize = 1,
  2722. },
  2723. .setkey = aead_setkey,
  2724. .setauthsize = aead_setauthsize,
  2725. .encrypt = aead_encrypt,
  2726. .decrypt = aead_decrypt,
  2727. .ivsize = CTR_RFC3686_IV_SIZE,
  2728. .maxauthsize = MD5_DIGEST_SIZE,
  2729. },
  2730. .caam = {
  2731. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2732. OP_ALG_AAI_CTR_MOD128,
  2733. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2734. OP_ALG_AAI_HMAC_PRECOMP,
  2735. .rfc3686 = true,
  2736. .geniv = true,
  2737. },
  2738. },
  2739. {
  2740. .aead = {
  2741. .base = {
  2742. .cra_name = "authenc(hmac(sha1),"
  2743. "rfc3686(ctr(aes)))",
  2744. .cra_driver_name = "authenc-hmac-sha1-"
  2745. "rfc3686-ctr-aes-caam",
  2746. .cra_blocksize = 1,
  2747. },
  2748. .setkey = aead_setkey,
  2749. .setauthsize = aead_setauthsize,
  2750. .encrypt = aead_encrypt,
  2751. .decrypt = aead_decrypt,
  2752. .ivsize = CTR_RFC3686_IV_SIZE,
  2753. .maxauthsize = SHA1_DIGEST_SIZE,
  2754. },
  2755. .caam = {
  2756. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2757. OP_ALG_AAI_CTR_MOD128,
  2758. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2759. OP_ALG_AAI_HMAC_PRECOMP,
  2760. .rfc3686 = true,
  2761. },
  2762. },
  2763. {
  2764. .aead = {
  2765. .base = {
  2766. .cra_name = "seqiv(authenc("
  2767. "hmac(sha1),rfc3686(ctr(aes))))",
  2768. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2769. "rfc3686-ctr-aes-caam",
  2770. .cra_blocksize = 1,
  2771. },
  2772. .setkey = aead_setkey,
  2773. .setauthsize = aead_setauthsize,
  2774. .encrypt = aead_encrypt,
  2775. .decrypt = aead_decrypt,
  2776. .ivsize = CTR_RFC3686_IV_SIZE,
  2777. .maxauthsize = SHA1_DIGEST_SIZE,
  2778. },
  2779. .caam = {
  2780. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2781. OP_ALG_AAI_CTR_MOD128,
  2782. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2783. OP_ALG_AAI_HMAC_PRECOMP,
  2784. .rfc3686 = true,
  2785. .geniv = true,
  2786. },
  2787. },
  2788. {
  2789. .aead = {
  2790. .base = {
  2791. .cra_name = "authenc(hmac(sha224),"
  2792. "rfc3686(ctr(aes)))",
  2793. .cra_driver_name = "authenc-hmac-sha224-"
  2794. "rfc3686-ctr-aes-caam",
  2795. .cra_blocksize = 1,
  2796. },
  2797. .setkey = aead_setkey,
  2798. .setauthsize = aead_setauthsize,
  2799. .encrypt = aead_encrypt,
  2800. .decrypt = aead_decrypt,
  2801. .ivsize = CTR_RFC3686_IV_SIZE,
  2802. .maxauthsize = SHA224_DIGEST_SIZE,
  2803. },
  2804. .caam = {
  2805. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2806. OP_ALG_AAI_CTR_MOD128,
  2807. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2808. OP_ALG_AAI_HMAC_PRECOMP,
  2809. .rfc3686 = true,
  2810. },
  2811. },
  2812. {
  2813. .aead = {
  2814. .base = {
  2815. .cra_name = "seqiv(authenc("
  2816. "hmac(sha224),rfc3686(ctr(aes))))",
  2817. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2818. "rfc3686-ctr-aes-caam",
  2819. .cra_blocksize = 1,
  2820. },
  2821. .setkey = aead_setkey,
  2822. .setauthsize = aead_setauthsize,
  2823. .encrypt = aead_encrypt,
  2824. .decrypt = aead_decrypt,
  2825. .ivsize = CTR_RFC3686_IV_SIZE,
  2826. .maxauthsize = SHA224_DIGEST_SIZE,
  2827. },
  2828. .caam = {
  2829. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2830. OP_ALG_AAI_CTR_MOD128,
  2831. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2832. OP_ALG_AAI_HMAC_PRECOMP,
  2833. .rfc3686 = true,
  2834. .geniv = true,
  2835. },
  2836. },
  2837. {
  2838. .aead = {
  2839. .base = {
  2840. .cra_name = "authenc(hmac(sha256),"
  2841. "rfc3686(ctr(aes)))",
  2842. .cra_driver_name = "authenc-hmac-sha256-"
  2843. "rfc3686-ctr-aes-caam",
  2844. .cra_blocksize = 1,
  2845. },
  2846. .setkey = aead_setkey,
  2847. .setauthsize = aead_setauthsize,
  2848. .encrypt = aead_encrypt,
  2849. .decrypt = aead_decrypt,
  2850. .ivsize = CTR_RFC3686_IV_SIZE,
  2851. .maxauthsize = SHA256_DIGEST_SIZE,
  2852. },
  2853. .caam = {
  2854. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2855. OP_ALG_AAI_CTR_MOD128,
  2856. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2857. OP_ALG_AAI_HMAC_PRECOMP,
  2858. .rfc3686 = true,
  2859. },
  2860. },
  2861. {
  2862. .aead = {
  2863. .base = {
  2864. .cra_name = "seqiv(authenc(hmac(sha256),"
  2865. "rfc3686(ctr(aes))))",
  2866. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2867. "rfc3686-ctr-aes-caam",
  2868. .cra_blocksize = 1,
  2869. },
  2870. .setkey = aead_setkey,
  2871. .setauthsize = aead_setauthsize,
  2872. .encrypt = aead_encrypt,
  2873. .decrypt = aead_decrypt,
  2874. .ivsize = CTR_RFC3686_IV_SIZE,
  2875. .maxauthsize = SHA256_DIGEST_SIZE,
  2876. },
  2877. .caam = {
  2878. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2879. OP_ALG_AAI_CTR_MOD128,
  2880. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2881. OP_ALG_AAI_HMAC_PRECOMP,
  2882. .rfc3686 = true,
  2883. .geniv = true,
  2884. },
  2885. },
  2886. {
  2887. .aead = {
  2888. .base = {
  2889. .cra_name = "authenc(hmac(sha384),"
  2890. "rfc3686(ctr(aes)))",
  2891. .cra_driver_name = "authenc-hmac-sha384-"
  2892. "rfc3686-ctr-aes-caam",
  2893. .cra_blocksize = 1,
  2894. },
  2895. .setkey = aead_setkey,
  2896. .setauthsize = aead_setauthsize,
  2897. .encrypt = aead_encrypt,
  2898. .decrypt = aead_decrypt,
  2899. .ivsize = CTR_RFC3686_IV_SIZE,
  2900. .maxauthsize = SHA384_DIGEST_SIZE,
  2901. },
  2902. .caam = {
  2903. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2904. OP_ALG_AAI_CTR_MOD128,
  2905. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2906. OP_ALG_AAI_HMAC_PRECOMP,
  2907. .rfc3686 = true,
  2908. },
  2909. },
  2910. {
  2911. .aead = {
  2912. .base = {
  2913. .cra_name = "seqiv(authenc(hmac(sha384),"
  2914. "rfc3686(ctr(aes))))",
  2915. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2916. "rfc3686-ctr-aes-caam",
  2917. .cra_blocksize = 1,
  2918. },
  2919. .setkey = aead_setkey,
  2920. .setauthsize = aead_setauthsize,
  2921. .encrypt = aead_encrypt,
  2922. .decrypt = aead_decrypt,
  2923. .ivsize = CTR_RFC3686_IV_SIZE,
  2924. .maxauthsize = SHA384_DIGEST_SIZE,
  2925. },
  2926. .caam = {
  2927. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2928. OP_ALG_AAI_CTR_MOD128,
  2929. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2930. OP_ALG_AAI_HMAC_PRECOMP,
  2931. .rfc3686 = true,
  2932. .geniv = true,
  2933. },
  2934. },
  2935. {
  2936. .aead = {
  2937. .base = {
  2938. .cra_name = "authenc(hmac(sha512),"
  2939. "rfc3686(ctr(aes)))",
  2940. .cra_driver_name = "authenc-hmac-sha512-"
  2941. "rfc3686-ctr-aes-caam",
  2942. .cra_blocksize = 1,
  2943. },
  2944. .setkey = aead_setkey,
  2945. .setauthsize = aead_setauthsize,
  2946. .encrypt = aead_encrypt,
  2947. .decrypt = aead_decrypt,
  2948. .ivsize = CTR_RFC3686_IV_SIZE,
  2949. .maxauthsize = SHA512_DIGEST_SIZE,
  2950. },
  2951. .caam = {
  2952. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2953. OP_ALG_AAI_CTR_MOD128,
  2954. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2955. OP_ALG_AAI_HMAC_PRECOMP,
  2956. .rfc3686 = true,
  2957. },
  2958. },
  2959. {
  2960. .aead = {
  2961. .base = {
  2962. .cra_name = "seqiv(authenc(hmac(sha512),"
  2963. "rfc3686(ctr(aes))))",
  2964. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2965. "rfc3686-ctr-aes-caam",
  2966. .cra_blocksize = 1,
  2967. },
  2968. .setkey = aead_setkey,
  2969. .setauthsize = aead_setauthsize,
  2970. .encrypt = aead_encrypt,
  2971. .decrypt = aead_decrypt,
  2972. .ivsize = CTR_RFC3686_IV_SIZE,
  2973. .maxauthsize = SHA512_DIGEST_SIZE,
  2974. },
  2975. .caam = {
  2976. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2977. OP_ALG_AAI_CTR_MOD128,
  2978. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2979. OP_ALG_AAI_HMAC_PRECOMP,
  2980. .rfc3686 = true,
  2981. .geniv = true,
  2982. },
  2983. },
  2984. };
  2985. struct caam_crypto_alg {
  2986. struct crypto_alg crypto_alg;
  2987. struct list_head entry;
  2988. struct caam_alg_entry caam;
  2989. };
  2990. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
  2991. {
  2992. dma_addr_t dma_addr;
  2993. ctx->jrdev = caam_jr_alloc();
  2994. if (IS_ERR(ctx->jrdev)) {
  2995. pr_err("Job Ring Device allocation for transform failed\n");
  2996. return PTR_ERR(ctx->jrdev);
  2997. }
  2998. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  2999. offsetof(struct caam_ctx,
  3000. sh_desc_enc_dma),
  3001. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  3002. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  3003. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  3004. caam_jr_free(ctx->jrdev);
  3005. return -ENOMEM;
  3006. }
  3007. ctx->sh_desc_enc_dma = dma_addr;
  3008. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  3009. sh_desc_dec);
  3010. ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
  3011. sh_desc_givenc);
  3012. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
  3013. /* copy descriptor header template value */
  3014. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  3015. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  3016. return 0;
  3017. }
  3018. static int caam_cra_init(struct crypto_tfm *tfm)
  3019. {
  3020. struct crypto_alg *alg = tfm->__crt_alg;
  3021. struct caam_crypto_alg *caam_alg =
  3022. container_of(alg, struct caam_crypto_alg, crypto_alg);
  3023. struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
  3024. return caam_init_common(ctx, &caam_alg->caam);
  3025. }
  3026. static int caam_aead_init(struct crypto_aead *tfm)
  3027. {
  3028. struct aead_alg *alg = crypto_aead_alg(tfm);
  3029. struct caam_aead_alg *caam_alg =
  3030. container_of(alg, struct caam_aead_alg, aead);
  3031. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  3032. return caam_init_common(ctx, &caam_alg->caam);
  3033. }
  3034. static void caam_exit_common(struct caam_ctx *ctx)
  3035. {
  3036. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  3037. offsetof(struct caam_ctx, sh_desc_enc_dma),
  3038. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  3039. caam_jr_free(ctx->jrdev);
  3040. }
  3041. static void caam_cra_exit(struct crypto_tfm *tfm)
  3042. {
  3043. caam_exit_common(crypto_tfm_ctx(tfm));
  3044. }
  3045. static void caam_aead_exit(struct crypto_aead *tfm)
  3046. {
  3047. caam_exit_common(crypto_aead_ctx(tfm));
  3048. }
  3049. static void __exit caam_algapi_exit(void)
  3050. {
  3051. struct caam_crypto_alg *t_alg, *n;
  3052. int i;
  3053. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3054. struct caam_aead_alg *t_alg = driver_aeads + i;
  3055. if (t_alg->registered)
  3056. crypto_unregister_aead(&t_alg->aead);
  3057. }
  3058. if (!alg_list.next)
  3059. return;
  3060. list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
  3061. crypto_unregister_alg(&t_alg->crypto_alg);
  3062. list_del(&t_alg->entry);
  3063. kfree(t_alg);
  3064. }
  3065. }
  3066. static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
  3067. *template)
  3068. {
  3069. struct caam_crypto_alg *t_alg;
  3070. struct crypto_alg *alg;
  3071. t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
  3072. if (!t_alg) {
  3073. pr_err("failed to allocate t_alg\n");
  3074. return ERR_PTR(-ENOMEM);
  3075. }
  3076. alg = &t_alg->crypto_alg;
  3077. snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
  3078. snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
  3079. template->driver_name);
  3080. alg->cra_module = THIS_MODULE;
  3081. alg->cra_init = caam_cra_init;
  3082. alg->cra_exit = caam_cra_exit;
  3083. alg->cra_priority = CAAM_CRA_PRIORITY;
  3084. alg->cra_blocksize = template->blocksize;
  3085. alg->cra_alignmask = 0;
  3086. alg->cra_ctxsize = sizeof(struct caam_ctx);
  3087. alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
  3088. template->type;
  3089. switch (template->type) {
  3090. case CRYPTO_ALG_TYPE_GIVCIPHER:
  3091. alg->cra_type = &crypto_givcipher_type;
  3092. alg->cra_ablkcipher = template->template_ablkcipher;
  3093. break;
  3094. case CRYPTO_ALG_TYPE_ABLKCIPHER:
  3095. alg->cra_type = &crypto_ablkcipher_type;
  3096. alg->cra_ablkcipher = template->template_ablkcipher;
  3097. break;
  3098. }
  3099. t_alg->caam.class1_alg_type = template->class1_alg_type;
  3100. t_alg->caam.class2_alg_type = template->class2_alg_type;
  3101. return t_alg;
  3102. }
  3103. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  3104. {
  3105. struct aead_alg *alg = &t_alg->aead;
  3106. alg->base.cra_module = THIS_MODULE;
  3107. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3108. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3109. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  3110. alg->init = caam_aead_init;
  3111. alg->exit = caam_aead_exit;
  3112. }
  3113. static int __init caam_algapi_init(void)
  3114. {
  3115. struct device_node *dev_node;
  3116. struct platform_device *pdev;
  3117. struct device *ctrldev;
  3118. struct caam_drv_private *priv;
  3119. int i = 0, err = 0;
  3120. u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
  3121. unsigned int md_limit = SHA512_DIGEST_SIZE;
  3122. bool registered = false;
  3123. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
  3124. if (!dev_node) {
  3125. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
  3126. if (!dev_node)
  3127. return -ENODEV;
  3128. }
  3129. pdev = of_find_device_by_node(dev_node);
  3130. if (!pdev) {
  3131. of_node_put(dev_node);
  3132. return -ENODEV;
  3133. }
  3134. ctrldev = &pdev->dev;
  3135. priv = dev_get_drvdata(ctrldev);
  3136. of_node_put(dev_node);
  3137. /*
  3138. * If priv is NULL, it's probably because the caam driver wasn't
  3139. * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
  3140. */
  3141. if (!priv)
  3142. return -ENODEV;
  3143. INIT_LIST_HEAD(&alg_list);
  3144. /*
  3145. * Register crypto algorithms the device supports.
  3146. * First, detect presence and attributes of DES, AES, and MD blocks.
  3147. */
  3148. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  3149. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  3150. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
  3151. aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
  3152. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3153. /* If MD is present, limit digest size based on LP256 */
  3154. if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
  3155. md_limit = SHA256_DIGEST_SIZE;
  3156. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3157. struct caam_crypto_alg *t_alg;
  3158. struct caam_alg_template *alg = driver_algs + i;
  3159. u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
  3160. /* Skip DES algorithms if not supported by device */
  3161. if (!des_inst &&
  3162. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  3163. (alg_sel == OP_ALG_ALGSEL_DES)))
  3164. continue;
  3165. /* Skip AES algorithms if not supported by device */
  3166. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  3167. continue;
  3168. /*
  3169. * Check support for AES modes not available
  3170. * on LP devices.
  3171. */
  3172. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3173. if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
  3174. OP_ALG_AAI_XTS)
  3175. continue;
  3176. t_alg = caam_alg_alloc(alg);
  3177. if (IS_ERR(t_alg)) {
  3178. err = PTR_ERR(t_alg);
  3179. pr_warn("%s alg allocation failed\n", alg->driver_name);
  3180. continue;
  3181. }
  3182. err = crypto_register_alg(&t_alg->crypto_alg);
  3183. if (err) {
  3184. pr_warn("%s alg registration failed\n",
  3185. t_alg->crypto_alg.cra_driver_name);
  3186. kfree(t_alg);
  3187. continue;
  3188. }
  3189. list_add_tail(&t_alg->entry, &alg_list);
  3190. registered = true;
  3191. }
  3192. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3193. struct caam_aead_alg *t_alg = driver_aeads + i;
  3194. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  3195. OP_ALG_ALGSEL_MASK;
  3196. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  3197. OP_ALG_ALGSEL_MASK;
  3198. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3199. /* Skip DES algorithms if not supported by device */
  3200. if (!des_inst &&
  3201. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  3202. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  3203. continue;
  3204. /* Skip AES algorithms if not supported by device */
  3205. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  3206. continue;
  3207. /*
  3208. * Check support for AES algorithms not available
  3209. * on LP devices.
  3210. */
  3211. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3212. if (alg_aai == OP_ALG_AAI_GCM)
  3213. continue;
  3214. /*
  3215. * Skip algorithms requiring message digests
  3216. * if MD or MD size is not supported by device.
  3217. */
  3218. if (c2_alg_sel &&
  3219. (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
  3220. continue;
  3221. caam_aead_alg_init(t_alg);
  3222. err = crypto_register_aead(&t_alg->aead);
  3223. if (err) {
  3224. pr_warn("%s alg registration failed\n",
  3225. t_alg->aead.base.cra_driver_name);
  3226. continue;
  3227. }
  3228. t_alg->registered = true;
  3229. registered = true;
  3230. }
  3231. if (registered)
  3232. pr_info("caam algorithms registered in /proc/crypto\n");
  3233. return err;
  3234. }
  3235. module_init(caam_algapi_init);
  3236. module_exit(caam_algapi_exit);
  3237. MODULE_LICENSE("GPL");
  3238. MODULE_DESCRIPTION("FSL CAAM support for crypto API");
  3239. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");