caamalg.c 85 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275
  1. // SPDX-License-Identifier: GPL-2.0+
  2. /*
  3. * caam - Freescale FSL CAAM support for crypto API
  4. *
  5. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  6. * Copyright 2016-2018 NXP
  7. *
  8. * Based on talitos crypto API driver.
  9. *
  10. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  11. *
  12. * --------------- ---------------
  13. * | JobDesc #1 |-------------------->| ShareDesc |
  14. * | *(packet 1) | | (PDB) |
  15. * --------------- |------------->| (hashKey) |
  16. * . | | (cipherKey) |
  17. * . | |-------->| (operation) |
  18. * --------------- | | ---------------
  19. * | JobDesc #2 |------| |
  20. * | *(packet 2) | |
  21. * --------------- |
  22. * . |
  23. * . |
  24. * --------------- |
  25. * | JobDesc #3 |------------
  26. * | *(packet 3) |
  27. * ---------------
  28. *
  29. * The SharedDesc never changes for a connection unless rekeyed, but
  30. * each packet will likely be in a different place. So all we need
  31. * to know to process the packet is where the input is, where the
  32. * output goes, and what context we want to process with. Context is
  33. * in the SharedDesc, packet references in the JobDesc.
  34. *
  35. * So, a job desc looks like:
  36. *
  37. * ---------------------
  38. * | Header |
  39. * | ShareDesc Pointer |
  40. * | SEQ_OUT_PTR |
  41. * | (output buffer) |
  42. * | (output length) |
  43. * | SEQ_IN_PTR |
  44. * | (input buffer) |
  45. * | (input length) |
  46. * ---------------------
  47. */
  48. #include "compat.h"
  49. #include "regs.h"
  50. #include "intern.h"
  51. #include "desc_constr.h"
  52. #include "jr.h"
  53. #include "error.h"
  54. #include "sg_sw_sec4.h"
  55. #include "key_gen.h"
  56. #include "caamalg_desc.h"
  57. /*
  58. * crypto alg
  59. */
  60. #define CAAM_CRA_PRIORITY 3000
  61. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  62. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  63. CTR_RFC3686_NONCE_SIZE + \
  64. SHA512_DIGEST_SIZE * 2)
  65. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  66. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  67. CAAM_CMD_SZ * 4)
  68. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  69. CAAM_CMD_SZ * 5)
  70. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
  71. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  72. #ifdef DEBUG
  73. /* for print_hex_dumps with line references */
  74. #define debug(format, arg...) printk(format, arg)
  75. #else
  76. #define debug(format, arg...)
  77. #endif
  78. struct caam_alg_entry {
  79. int class1_alg_type;
  80. int class2_alg_type;
  81. bool rfc3686;
  82. bool geniv;
  83. };
  84. struct caam_aead_alg {
  85. struct aead_alg aead;
  86. struct caam_alg_entry caam;
  87. bool registered;
  88. };
  89. struct caam_skcipher_alg {
  90. struct skcipher_alg skcipher;
  91. struct caam_alg_entry caam;
  92. bool registered;
  93. };
  94. /*
  95. * per-session context
  96. */
  97. struct caam_ctx {
  98. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  99. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  100. u8 key[CAAM_MAX_KEY_SIZE];
  101. dma_addr_t sh_desc_enc_dma;
  102. dma_addr_t sh_desc_dec_dma;
  103. dma_addr_t key_dma;
  104. enum dma_data_direction dir;
  105. struct device *jrdev;
  106. struct alginfo adata;
  107. struct alginfo cdata;
  108. unsigned int authsize;
  109. };
  110. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  111. {
  112. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  113. struct device *jrdev = ctx->jrdev;
  114. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  115. u32 *desc;
  116. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  117. ctx->adata.keylen_pad;
  118. /*
  119. * Job Descriptor and Shared Descriptors
  120. * must all fit into the 64-word Descriptor h/w Buffer
  121. */
  122. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  123. ctx->adata.key_inline = true;
  124. ctx->adata.key_virt = ctx->key;
  125. } else {
  126. ctx->adata.key_inline = false;
  127. ctx->adata.key_dma = ctx->key_dma;
  128. }
  129. /* aead_encrypt shared descriptor */
  130. desc = ctx->sh_desc_enc;
  131. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
  132. ctrlpriv->era);
  133. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  134. desc_bytes(desc), ctx->dir);
  135. /*
  136. * Job Descriptor and Shared Descriptors
  137. * must all fit into the 64-word Descriptor h/w Buffer
  138. */
  139. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  140. ctx->adata.key_inline = true;
  141. ctx->adata.key_virt = ctx->key;
  142. } else {
  143. ctx->adata.key_inline = false;
  144. ctx->adata.key_dma = ctx->key_dma;
  145. }
  146. /* aead_decrypt shared descriptor */
  147. desc = ctx->sh_desc_dec;
  148. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
  149. ctrlpriv->era);
  150. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  151. desc_bytes(desc), ctx->dir);
  152. return 0;
  153. }
  154. static int aead_set_sh_desc(struct crypto_aead *aead)
  155. {
  156. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  157. struct caam_aead_alg, aead);
  158. unsigned int ivsize = crypto_aead_ivsize(aead);
  159. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  160. struct device *jrdev = ctx->jrdev;
  161. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  162. u32 ctx1_iv_off = 0;
  163. u32 *desc, *nonce = NULL;
  164. u32 inl_mask;
  165. unsigned int data_len[2];
  166. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  167. OP_ALG_AAI_CTR_MOD128);
  168. const bool is_rfc3686 = alg->caam.rfc3686;
  169. if (!ctx->authsize)
  170. return 0;
  171. /* NULL encryption / decryption */
  172. if (!ctx->cdata.keylen)
  173. return aead_null_set_sh_desc(aead);
  174. /*
  175. * AES-CTR needs to load IV in CONTEXT1 reg
  176. * at an offset of 128bits (16bytes)
  177. * CONTEXT1[255:128] = IV
  178. */
  179. if (ctr_mode)
  180. ctx1_iv_off = 16;
  181. /*
  182. * RFC3686 specific:
  183. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  184. */
  185. if (is_rfc3686) {
  186. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  187. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  188. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  189. }
  190. data_len[0] = ctx->adata.keylen_pad;
  191. data_len[1] = ctx->cdata.keylen;
  192. if (alg->caam.geniv)
  193. goto skip_enc;
  194. /*
  195. * Job Descriptor and Shared Descriptors
  196. * must all fit into the 64-word Descriptor h/w Buffer
  197. */
  198. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  199. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  200. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  201. ARRAY_SIZE(data_len)) < 0)
  202. return -EINVAL;
  203. if (inl_mask & 1)
  204. ctx->adata.key_virt = ctx->key;
  205. else
  206. ctx->adata.key_dma = ctx->key_dma;
  207. if (inl_mask & 2)
  208. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  209. else
  210. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  211. ctx->adata.key_inline = !!(inl_mask & 1);
  212. ctx->cdata.key_inline = !!(inl_mask & 2);
  213. /* aead_encrypt shared descriptor */
  214. desc = ctx->sh_desc_enc;
  215. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
  216. ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
  217. false, ctrlpriv->era);
  218. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  219. desc_bytes(desc), ctx->dir);
  220. skip_enc:
  221. /*
  222. * Job Descriptor and Shared Descriptors
  223. * must all fit into the 64-word Descriptor h/w Buffer
  224. */
  225. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  226. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  227. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  228. ARRAY_SIZE(data_len)) < 0)
  229. return -EINVAL;
  230. if (inl_mask & 1)
  231. ctx->adata.key_virt = ctx->key;
  232. else
  233. ctx->adata.key_dma = ctx->key_dma;
  234. if (inl_mask & 2)
  235. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  236. else
  237. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  238. ctx->adata.key_inline = !!(inl_mask & 1);
  239. ctx->cdata.key_inline = !!(inl_mask & 2);
  240. /* aead_decrypt shared descriptor */
  241. desc = ctx->sh_desc_dec;
  242. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  243. ctx->authsize, alg->caam.geniv, is_rfc3686,
  244. nonce, ctx1_iv_off, false, ctrlpriv->era);
  245. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  246. desc_bytes(desc), ctx->dir);
  247. if (!alg->caam.geniv)
  248. goto skip_givenc;
  249. /*
  250. * Job Descriptor and Shared Descriptors
  251. * must all fit into the 64-word Descriptor h/w Buffer
  252. */
  253. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  254. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  255. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  256. ARRAY_SIZE(data_len)) < 0)
  257. return -EINVAL;
  258. if (inl_mask & 1)
  259. ctx->adata.key_virt = ctx->key;
  260. else
  261. ctx->adata.key_dma = ctx->key_dma;
  262. if (inl_mask & 2)
  263. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  264. else
  265. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  266. ctx->adata.key_inline = !!(inl_mask & 1);
  267. ctx->cdata.key_inline = !!(inl_mask & 2);
  268. /* aead_givencrypt shared descriptor */
  269. desc = ctx->sh_desc_enc;
  270. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  271. ctx->authsize, is_rfc3686, nonce,
  272. ctx1_iv_off, false, ctrlpriv->era);
  273. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  274. desc_bytes(desc), ctx->dir);
  275. skip_givenc:
  276. return 0;
  277. }
  278. static int aead_setauthsize(struct crypto_aead *authenc,
  279. unsigned int authsize)
  280. {
  281. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  282. ctx->authsize = authsize;
  283. aead_set_sh_desc(authenc);
  284. return 0;
  285. }
  286. static int gcm_set_sh_desc(struct crypto_aead *aead)
  287. {
  288. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  289. struct device *jrdev = ctx->jrdev;
  290. unsigned int ivsize = crypto_aead_ivsize(aead);
  291. u32 *desc;
  292. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  293. ctx->cdata.keylen;
  294. if (!ctx->cdata.keylen || !ctx->authsize)
  295. return 0;
  296. /*
  297. * AES GCM encrypt shared descriptor
  298. * Job Descriptor and Shared Descriptor
  299. * must fit into the 64-word Descriptor h/w Buffer
  300. */
  301. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  302. ctx->cdata.key_inline = true;
  303. ctx->cdata.key_virt = ctx->key;
  304. } else {
  305. ctx->cdata.key_inline = false;
  306. ctx->cdata.key_dma = ctx->key_dma;
  307. }
  308. desc = ctx->sh_desc_enc;
  309. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  310. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  311. desc_bytes(desc), ctx->dir);
  312. /*
  313. * Job Descriptor and Shared Descriptors
  314. * must all fit into the 64-word Descriptor h/w Buffer
  315. */
  316. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  317. ctx->cdata.key_inline = true;
  318. ctx->cdata.key_virt = ctx->key;
  319. } else {
  320. ctx->cdata.key_inline = false;
  321. ctx->cdata.key_dma = ctx->key_dma;
  322. }
  323. desc = ctx->sh_desc_dec;
  324. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  325. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  326. desc_bytes(desc), ctx->dir);
  327. return 0;
  328. }
  329. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  330. {
  331. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  332. ctx->authsize = authsize;
  333. gcm_set_sh_desc(authenc);
  334. return 0;
  335. }
  336. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  337. {
  338. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  339. struct device *jrdev = ctx->jrdev;
  340. unsigned int ivsize = crypto_aead_ivsize(aead);
  341. u32 *desc;
  342. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  343. ctx->cdata.keylen;
  344. if (!ctx->cdata.keylen || !ctx->authsize)
  345. return 0;
  346. /*
  347. * RFC4106 encrypt shared descriptor
  348. * Job Descriptor and Shared Descriptor
  349. * must fit into the 64-word Descriptor h/w Buffer
  350. */
  351. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  352. ctx->cdata.key_inline = true;
  353. ctx->cdata.key_virt = ctx->key;
  354. } else {
  355. ctx->cdata.key_inline = false;
  356. ctx->cdata.key_dma = ctx->key_dma;
  357. }
  358. desc = ctx->sh_desc_enc;
  359. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  360. false);
  361. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  362. desc_bytes(desc), ctx->dir);
  363. /*
  364. * Job Descriptor and Shared Descriptors
  365. * must all fit into the 64-word Descriptor h/w Buffer
  366. */
  367. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  368. ctx->cdata.key_inline = true;
  369. ctx->cdata.key_virt = ctx->key;
  370. } else {
  371. ctx->cdata.key_inline = false;
  372. ctx->cdata.key_dma = ctx->key_dma;
  373. }
  374. desc = ctx->sh_desc_dec;
  375. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  376. false);
  377. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  378. desc_bytes(desc), ctx->dir);
  379. return 0;
  380. }
  381. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  382. unsigned int authsize)
  383. {
  384. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  385. ctx->authsize = authsize;
  386. rfc4106_set_sh_desc(authenc);
  387. return 0;
  388. }
  389. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  390. {
  391. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  392. struct device *jrdev = ctx->jrdev;
  393. unsigned int ivsize = crypto_aead_ivsize(aead);
  394. u32 *desc;
  395. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  396. ctx->cdata.keylen;
  397. if (!ctx->cdata.keylen || !ctx->authsize)
  398. return 0;
  399. /*
  400. * RFC4543 encrypt shared descriptor
  401. * Job Descriptor and Shared Descriptor
  402. * must fit into the 64-word Descriptor h/w Buffer
  403. */
  404. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  405. ctx->cdata.key_inline = true;
  406. ctx->cdata.key_virt = ctx->key;
  407. } else {
  408. ctx->cdata.key_inline = false;
  409. ctx->cdata.key_dma = ctx->key_dma;
  410. }
  411. desc = ctx->sh_desc_enc;
  412. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  413. false);
  414. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  415. desc_bytes(desc), ctx->dir);
  416. /*
  417. * Job Descriptor and Shared Descriptors
  418. * must all fit into the 64-word Descriptor h/w Buffer
  419. */
  420. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  421. ctx->cdata.key_inline = true;
  422. ctx->cdata.key_virt = ctx->key;
  423. } else {
  424. ctx->cdata.key_inline = false;
  425. ctx->cdata.key_dma = ctx->key_dma;
  426. }
  427. desc = ctx->sh_desc_dec;
  428. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  429. false);
  430. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  431. desc_bytes(desc), ctx->dir);
  432. return 0;
  433. }
  434. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  435. unsigned int authsize)
  436. {
  437. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  438. ctx->authsize = authsize;
  439. rfc4543_set_sh_desc(authenc);
  440. return 0;
  441. }
  442. static int aead_setkey(struct crypto_aead *aead,
  443. const u8 *key, unsigned int keylen)
  444. {
  445. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  446. struct device *jrdev = ctx->jrdev;
  447. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  448. struct crypto_authenc_keys keys;
  449. int ret = 0;
  450. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  451. goto badkey;
  452. #ifdef DEBUG
  453. printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
  454. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  455. keys.authkeylen);
  456. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  457. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  458. #endif
  459. /*
  460. * If DKP is supported, use it in the shared descriptor to generate
  461. * the split key.
  462. */
  463. if (ctrlpriv->era >= 6) {
  464. ctx->adata.keylen = keys.authkeylen;
  465. ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
  466. OP_ALG_ALGSEL_MASK);
  467. if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
  468. goto badkey;
  469. memcpy(ctx->key, keys.authkey, keys.authkeylen);
  470. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
  471. keys.enckeylen);
  472. dma_sync_single_for_device(jrdev, ctx->key_dma,
  473. ctx->adata.keylen_pad +
  474. keys.enckeylen, ctx->dir);
  475. goto skip_split_key;
  476. }
  477. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  478. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  479. keys.enckeylen);
  480. if (ret) {
  481. goto badkey;
  482. }
  483. /* postpend encryption key to auth split key */
  484. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  485. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  486. keys.enckeylen, ctx->dir);
  487. #ifdef DEBUG
  488. print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
  489. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  490. ctx->adata.keylen_pad + keys.enckeylen, 1);
  491. #endif
  492. skip_split_key:
  493. ctx->cdata.keylen = keys.enckeylen;
  494. memzero_explicit(&keys, sizeof(keys));
  495. return aead_set_sh_desc(aead);
  496. badkey:
  497. crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
  498. memzero_explicit(&keys, sizeof(keys));
  499. return -EINVAL;
  500. }
  501. static int gcm_setkey(struct crypto_aead *aead,
  502. const u8 *key, unsigned int keylen)
  503. {
  504. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  505. struct device *jrdev = ctx->jrdev;
  506. #ifdef DEBUG
  507. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  508. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  509. #endif
  510. memcpy(ctx->key, key, keylen);
  511. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
  512. ctx->cdata.keylen = keylen;
  513. return gcm_set_sh_desc(aead);
  514. }
  515. static int rfc4106_setkey(struct crypto_aead *aead,
  516. const u8 *key, unsigned int keylen)
  517. {
  518. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  519. struct device *jrdev = ctx->jrdev;
  520. if (keylen < 4)
  521. return -EINVAL;
  522. #ifdef DEBUG
  523. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  524. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  525. #endif
  526. memcpy(ctx->key, key, keylen);
  527. /*
  528. * The last four bytes of the key material are used as the salt value
  529. * in the nonce. Update the AES key length.
  530. */
  531. ctx->cdata.keylen = keylen - 4;
  532. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  533. ctx->dir);
  534. return rfc4106_set_sh_desc(aead);
  535. }
  536. static int rfc4543_setkey(struct crypto_aead *aead,
  537. const u8 *key, unsigned int keylen)
  538. {
  539. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  540. struct device *jrdev = ctx->jrdev;
  541. if (keylen < 4)
  542. return -EINVAL;
  543. #ifdef DEBUG
  544. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  545. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  546. #endif
  547. memcpy(ctx->key, key, keylen);
  548. /*
  549. * The last four bytes of the key material are used as the salt value
  550. * in the nonce. Update the AES key length.
  551. */
  552. ctx->cdata.keylen = keylen - 4;
  553. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  554. ctx->dir);
  555. return rfc4543_set_sh_desc(aead);
  556. }
  557. static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
  558. unsigned int keylen)
  559. {
  560. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  561. struct caam_skcipher_alg *alg =
  562. container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
  563. skcipher);
  564. struct device *jrdev = ctx->jrdev;
  565. unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
  566. u32 *desc;
  567. u32 ctx1_iv_off = 0;
  568. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  569. OP_ALG_AAI_CTR_MOD128);
  570. const bool is_rfc3686 = alg->caam.rfc3686;
  571. #ifdef DEBUG
  572. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  573. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  574. #endif
  575. /*
  576. * AES-CTR needs to load IV in CONTEXT1 reg
  577. * at an offset of 128bits (16bytes)
  578. * CONTEXT1[255:128] = IV
  579. */
  580. if (ctr_mode)
  581. ctx1_iv_off = 16;
  582. /*
  583. * RFC3686 specific:
  584. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  585. * | *key = {KEY, NONCE}
  586. */
  587. if (is_rfc3686) {
  588. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  589. keylen -= CTR_RFC3686_NONCE_SIZE;
  590. }
  591. ctx->cdata.keylen = keylen;
  592. ctx->cdata.key_virt = key;
  593. ctx->cdata.key_inline = true;
  594. /* skcipher_encrypt shared descriptor */
  595. desc = ctx->sh_desc_enc;
  596. cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  597. ctx1_iv_off);
  598. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  599. desc_bytes(desc), ctx->dir);
  600. /* skcipher_decrypt shared descriptor */
  601. desc = ctx->sh_desc_dec;
  602. cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  603. ctx1_iv_off);
  604. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  605. desc_bytes(desc), ctx->dir);
  606. return 0;
  607. }
  608. static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
  609. unsigned int keylen)
  610. {
  611. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  612. struct device *jrdev = ctx->jrdev;
  613. u32 *desc;
  614. if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
  615. crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
  616. dev_err(jrdev, "key size mismatch\n");
  617. return -EINVAL;
  618. }
  619. ctx->cdata.keylen = keylen;
  620. ctx->cdata.key_virt = key;
  621. ctx->cdata.key_inline = true;
  622. /* xts_skcipher_encrypt shared descriptor */
  623. desc = ctx->sh_desc_enc;
  624. cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
  625. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  626. desc_bytes(desc), ctx->dir);
  627. /* xts_skcipher_decrypt shared descriptor */
  628. desc = ctx->sh_desc_dec;
  629. cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
  630. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  631. desc_bytes(desc), ctx->dir);
  632. return 0;
  633. }
  634. /*
  635. * aead_edesc - s/w-extended aead descriptor
  636. * @src_nents: number of segments in input s/w scatterlist
  637. * @dst_nents: number of segments in output s/w scatterlist
  638. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  639. * @sec4_sg_dma: bus physical mapped address of h/w link table
  640. * @sec4_sg: pointer to h/w link table
  641. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  642. */
  643. struct aead_edesc {
  644. int src_nents;
  645. int dst_nents;
  646. int sec4_sg_bytes;
  647. dma_addr_t sec4_sg_dma;
  648. struct sec4_sg_entry *sec4_sg;
  649. u32 hw_desc[];
  650. };
  651. /*
  652. * skcipher_edesc - s/w-extended skcipher descriptor
  653. * @src_nents: number of segments in input s/w scatterlist
  654. * @dst_nents: number of segments in output s/w scatterlist
  655. * @iv_dma: dma address of iv for checking continuity and link table
  656. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  657. * @sec4_sg_dma: bus physical mapped address of h/w link table
  658. * @sec4_sg: pointer to h/w link table
  659. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  660. * and IV
  661. */
  662. struct skcipher_edesc {
  663. int src_nents;
  664. int dst_nents;
  665. dma_addr_t iv_dma;
  666. int sec4_sg_bytes;
  667. dma_addr_t sec4_sg_dma;
  668. struct sec4_sg_entry *sec4_sg;
  669. u32 hw_desc[0];
  670. };
  671. static void caam_unmap(struct device *dev, struct scatterlist *src,
  672. struct scatterlist *dst, int src_nents,
  673. int dst_nents,
  674. dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
  675. int sec4_sg_bytes)
  676. {
  677. if (dst != src) {
  678. if (src_nents)
  679. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  680. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  681. } else {
  682. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  683. }
  684. if (iv_dma)
  685. dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
  686. if (sec4_sg_bytes)
  687. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  688. DMA_TO_DEVICE);
  689. }
  690. static void aead_unmap(struct device *dev,
  691. struct aead_edesc *edesc,
  692. struct aead_request *req)
  693. {
  694. caam_unmap(dev, req->src, req->dst,
  695. edesc->src_nents, edesc->dst_nents, 0, 0,
  696. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  697. }
  698. static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
  699. struct skcipher_request *req)
  700. {
  701. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  702. int ivsize = crypto_skcipher_ivsize(skcipher);
  703. caam_unmap(dev, req->src, req->dst,
  704. edesc->src_nents, edesc->dst_nents,
  705. edesc->iv_dma, ivsize,
  706. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  707. }
  708. static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  709. void *context)
  710. {
  711. struct aead_request *req = context;
  712. struct aead_edesc *edesc;
  713. #ifdef DEBUG
  714. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  715. #endif
  716. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  717. if (err)
  718. caam_jr_strstatus(jrdev, err);
  719. aead_unmap(jrdev, edesc, req);
  720. kfree(edesc);
  721. aead_request_complete(req, err);
  722. }
  723. static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  724. void *context)
  725. {
  726. struct aead_request *req = context;
  727. struct aead_edesc *edesc;
  728. #ifdef DEBUG
  729. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  730. #endif
  731. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  732. if (err)
  733. caam_jr_strstatus(jrdev, err);
  734. aead_unmap(jrdev, edesc, req);
  735. /*
  736. * verify hw auth check passed else return -EBADMSG
  737. */
  738. if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
  739. err = -EBADMSG;
  740. kfree(edesc);
  741. aead_request_complete(req, err);
  742. }
  743. static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  744. void *context)
  745. {
  746. struct skcipher_request *req = context;
  747. struct skcipher_edesc *edesc;
  748. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  749. int ivsize = crypto_skcipher_ivsize(skcipher);
  750. #ifdef DEBUG
  751. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  752. #endif
  753. edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
  754. if (err)
  755. caam_jr_strstatus(jrdev, err);
  756. #ifdef DEBUG
  757. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  758. DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
  759. edesc->src_nents > 1 ? 100 : ivsize, 1);
  760. #endif
  761. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  762. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  763. edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
  764. skcipher_unmap(jrdev, edesc, req);
  765. /*
  766. * The crypto API expects us to set the IV (req->iv) to the last
  767. * ciphertext block. This is used e.g. by the CTS mode.
  768. */
  769. scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - ivsize,
  770. ivsize, 0);
  771. kfree(edesc);
  772. skcipher_request_complete(req, err);
  773. }
  774. static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  775. void *context)
  776. {
  777. struct skcipher_request *req = context;
  778. struct skcipher_edesc *edesc;
  779. #ifdef DEBUG
  780. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  781. int ivsize = crypto_skcipher_ivsize(skcipher);
  782. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  783. #endif
  784. edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
  785. if (err)
  786. caam_jr_strstatus(jrdev, err);
  787. #ifdef DEBUG
  788. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  789. DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
  790. #endif
  791. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  792. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  793. edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
  794. skcipher_unmap(jrdev, edesc, req);
  795. kfree(edesc);
  796. skcipher_request_complete(req, err);
  797. }
  798. /*
  799. * Fill in aead job descriptor
  800. */
  801. static void init_aead_job(struct aead_request *req,
  802. struct aead_edesc *edesc,
  803. bool all_contig, bool encrypt)
  804. {
  805. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  806. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  807. int authsize = ctx->authsize;
  808. u32 *desc = edesc->hw_desc;
  809. u32 out_options, in_options;
  810. dma_addr_t dst_dma, src_dma;
  811. int len, sec4_sg_index = 0;
  812. dma_addr_t ptr;
  813. u32 *sh_desc;
  814. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  815. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  816. len = desc_len(sh_desc);
  817. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  818. if (all_contig) {
  819. src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
  820. in_options = 0;
  821. } else {
  822. src_dma = edesc->sec4_sg_dma;
  823. sec4_sg_index += edesc->src_nents;
  824. in_options = LDST_SGF;
  825. }
  826. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  827. in_options);
  828. dst_dma = src_dma;
  829. out_options = in_options;
  830. if (unlikely(req->src != req->dst)) {
  831. if (edesc->dst_nents == 1) {
  832. dst_dma = sg_dma_address(req->dst);
  833. } else {
  834. dst_dma = edesc->sec4_sg_dma +
  835. sec4_sg_index *
  836. sizeof(struct sec4_sg_entry);
  837. out_options = LDST_SGF;
  838. }
  839. }
  840. if (encrypt)
  841. append_seq_out_ptr(desc, dst_dma,
  842. req->assoclen + req->cryptlen + authsize,
  843. out_options);
  844. else
  845. append_seq_out_ptr(desc, dst_dma,
  846. req->assoclen + req->cryptlen - authsize,
  847. out_options);
  848. }
  849. static void init_gcm_job(struct aead_request *req,
  850. struct aead_edesc *edesc,
  851. bool all_contig, bool encrypt)
  852. {
  853. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  854. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  855. unsigned int ivsize = crypto_aead_ivsize(aead);
  856. u32 *desc = edesc->hw_desc;
  857. bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
  858. unsigned int last;
  859. init_aead_job(req, edesc, all_contig, encrypt);
  860. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  861. /* BUG This should not be specific to generic GCM. */
  862. last = 0;
  863. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  864. last = FIFOLD_TYPE_LAST1;
  865. /* Read GCM IV */
  866. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  867. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
  868. /* Append Salt */
  869. if (!generic_gcm)
  870. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  871. /* Append IV */
  872. append_data(desc, req->iv, ivsize);
  873. /* End of blank commands */
  874. }
  875. static void init_authenc_job(struct aead_request *req,
  876. struct aead_edesc *edesc,
  877. bool all_contig, bool encrypt)
  878. {
  879. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  880. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  881. struct caam_aead_alg, aead);
  882. unsigned int ivsize = crypto_aead_ivsize(aead);
  883. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  884. struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
  885. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  886. OP_ALG_AAI_CTR_MOD128);
  887. const bool is_rfc3686 = alg->caam.rfc3686;
  888. u32 *desc = edesc->hw_desc;
  889. u32 ivoffset = 0;
  890. /*
  891. * AES-CTR needs to load IV in CONTEXT1 reg
  892. * at an offset of 128bits (16bytes)
  893. * CONTEXT1[255:128] = IV
  894. */
  895. if (ctr_mode)
  896. ivoffset = 16;
  897. /*
  898. * RFC3686 specific:
  899. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  900. */
  901. if (is_rfc3686)
  902. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  903. init_aead_job(req, edesc, all_contig, encrypt);
  904. /*
  905. * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
  906. * having DPOVRD as destination.
  907. */
  908. if (ctrlpriv->era < 3)
  909. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  910. else
  911. append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
  912. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  913. append_load_as_imm(desc, req->iv, ivsize,
  914. LDST_CLASS_1_CCB |
  915. LDST_SRCDST_BYTE_CONTEXT |
  916. (ivoffset << LDST_OFFSET_SHIFT));
  917. }
  918. /*
  919. * Fill in skcipher job descriptor
  920. */
  921. static void init_skcipher_job(struct skcipher_request *req,
  922. struct skcipher_edesc *edesc,
  923. const bool encrypt)
  924. {
  925. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  926. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  927. int ivsize = crypto_skcipher_ivsize(skcipher);
  928. u32 *desc = edesc->hw_desc;
  929. u32 *sh_desc;
  930. u32 out_options = 0;
  931. dma_addr_t dst_dma, ptr;
  932. int len;
  933. #ifdef DEBUG
  934. print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
  935. DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
  936. pr_err("asked=%d, cryptlen%d\n",
  937. (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
  938. #endif
  939. caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
  940. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  941. edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
  942. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  943. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  944. len = desc_len(sh_desc);
  945. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  946. append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->cryptlen + ivsize,
  947. LDST_SGF);
  948. if (likely(req->src == req->dst)) {
  949. dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
  950. out_options = LDST_SGF;
  951. } else {
  952. if (edesc->dst_nents == 1) {
  953. dst_dma = sg_dma_address(req->dst);
  954. } else {
  955. dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
  956. sizeof(struct sec4_sg_entry);
  957. out_options = LDST_SGF;
  958. }
  959. }
  960. append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
  961. }
  962. /*
  963. * allocate and map the aead extended descriptor
  964. */
  965. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  966. int desc_bytes, bool *all_contig_ptr,
  967. bool encrypt)
  968. {
  969. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  970. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  971. struct device *jrdev = ctx->jrdev;
  972. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  973. GFP_KERNEL : GFP_ATOMIC;
  974. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  975. struct aead_edesc *edesc;
  976. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  977. unsigned int authsize = ctx->authsize;
  978. if (unlikely(req->dst != req->src)) {
  979. src_nents = sg_nents_for_len(req->src, req->assoclen +
  980. req->cryptlen);
  981. if (unlikely(src_nents < 0)) {
  982. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  983. req->assoclen + req->cryptlen);
  984. return ERR_PTR(src_nents);
  985. }
  986. dst_nents = sg_nents_for_len(req->dst, req->assoclen +
  987. req->cryptlen +
  988. (encrypt ? authsize :
  989. (-authsize)));
  990. if (unlikely(dst_nents < 0)) {
  991. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  992. req->assoclen + req->cryptlen +
  993. (encrypt ? authsize : (-authsize)));
  994. return ERR_PTR(dst_nents);
  995. }
  996. } else {
  997. src_nents = sg_nents_for_len(req->src, req->assoclen +
  998. req->cryptlen +
  999. (encrypt ? authsize : 0));
  1000. if (unlikely(src_nents < 0)) {
  1001. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1002. req->assoclen + req->cryptlen +
  1003. (encrypt ? authsize : 0));
  1004. return ERR_PTR(src_nents);
  1005. }
  1006. }
  1007. if (likely(req->src == req->dst)) {
  1008. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1009. DMA_BIDIRECTIONAL);
  1010. if (unlikely(!mapped_src_nents)) {
  1011. dev_err(jrdev, "unable to map source\n");
  1012. return ERR_PTR(-ENOMEM);
  1013. }
  1014. } else {
  1015. /* Cover also the case of null (zero length) input data */
  1016. if (src_nents) {
  1017. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1018. src_nents, DMA_TO_DEVICE);
  1019. if (unlikely(!mapped_src_nents)) {
  1020. dev_err(jrdev, "unable to map source\n");
  1021. return ERR_PTR(-ENOMEM);
  1022. }
  1023. } else {
  1024. mapped_src_nents = 0;
  1025. }
  1026. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1027. DMA_FROM_DEVICE);
  1028. if (unlikely(!mapped_dst_nents)) {
  1029. dev_err(jrdev, "unable to map destination\n");
  1030. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1031. return ERR_PTR(-ENOMEM);
  1032. }
  1033. }
  1034. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1035. sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1036. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1037. /* allocate space for base edesc and hw desc commands, link tables */
  1038. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1039. GFP_DMA | flags);
  1040. if (!edesc) {
  1041. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1042. 0, 0, 0);
  1043. return ERR_PTR(-ENOMEM);
  1044. }
  1045. edesc->src_nents = src_nents;
  1046. edesc->dst_nents = dst_nents;
  1047. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1048. desc_bytes;
  1049. *all_contig_ptr = !(mapped_src_nents > 1);
  1050. sec4_sg_index = 0;
  1051. if (mapped_src_nents > 1) {
  1052. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1053. edesc->sec4_sg + sec4_sg_index, 0);
  1054. sec4_sg_index += mapped_src_nents;
  1055. }
  1056. if (mapped_dst_nents > 1) {
  1057. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1058. edesc->sec4_sg + sec4_sg_index, 0);
  1059. }
  1060. if (!sec4_sg_bytes)
  1061. return edesc;
  1062. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1063. sec4_sg_bytes, DMA_TO_DEVICE);
  1064. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1065. dev_err(jrdev, "unable to map S/G table\n");
  1066. aead_unmap(jrdev, edesc, req);
  1067. kfree(edesc);
  1068. return ERR_PTR(-ENOMEM);
  1069. }
  1070. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1071. return edesc;
  1072. }
  1073. static int gcm_encrypt(struct aead_request *req)
  1074. {
  1075. struct aead_edesc *edesc;
  1076. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1077. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1078. struct device *jrdev = ctx->jrdev;
  1079. bool all_contig;
  1080. u32 *desc;
  1081. int ret = 0;
  1082. /* allocate extended descriptor */
  1083. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
  1084. if (IS_ERR(edesc))
  1085. return PTR_ERR(edesc);
  1086. /* Create and submit job descriptor */
  1087. init_gcm_job(req, edesc, all_contig, true);
  1088. #ifdef DEBUG
  1089. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1090. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1091. desc_bytes(edesc->hw_desc), 1);
  1092. #endif
  1093. desc = edesc->hw_desc;
  1094. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1095. if (!ret) {
  1096. ret = -EINPROGRESS;
  1097. } else {
  1098. aead_unmap(jrdev, edesc, req);
  1099. kfree(edesc);
  1100. }
  1101. return ret;
  1102. }
  1103. static int ipsec_gcm_encrypt(struct aead_request *req)
  1104. {
  1105. if (req->assoclen < 8)
  1106. return -EINVAL;
  1107. return gcm_encrypt(req);
  1108. }
  1109. static int aead_encrypt(struct aead_request *req)
  1110. {
  1111. struct aead_edesc *edesc;
  1112. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1113. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1114. struct device *jrdev = ctx->jrdev;
  1115. bool all_contig;
  1116. u32 *desc;
  1117. int ret = 0;
  1118. /* allocate extended descriptor */
  1119. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1120. &all_contig, true);
  1121. if (IS_ERR(edesc))
  1122. return PTR_ERR(edesc);
  1123. /* Create and submit job descriptor */
  1124. init_authenc_job(req, edesc, all_contig, true);
  1125. #ifdef DEBUG
  1126. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1127. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1128. desc_bytes(edesc->hw_desc), 1);
  1129. #endif
  1130. desc = edesc->hw_desc;
  1131. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1132. if (!ret) {
  1133. ret = -EINPROGRESS;
  1134. } else {
  1135. aead_unmap(jrdev, edesc, req);
  1136. kfree(edesc);
  1137. }
  1138. return ret;
  1139. }
  1140. static int gcm_decrypt(struct aead_request *req)
  1141. {
  1142. struct aead_edesc *edesc;
  1143. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1144. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1145. struct device *jrdev = ctx->jrdev;
  1146. bool all_contig;
  1147. u32 *desc;
  1148. int ret = 0;
  1149. /* allocate extended descriptor */
  1150. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
  1151. if (IS_ERR(edesc))
  1152. return PTR_ERR(edesc);
  1153. /* Create and submit job descriptor*/
  1154. init_gcm_job(req, edesc, all_contig, false);
  1155. #ifdef DEBUG
  1156. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1157. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1158. desc_bytes(edesc->hw_desc), 1);
  1159. #endif
  1160. desc = edesc->hw_desc;
  1161. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1162. if (!ret) {
  1163. ret = -EINPROGRESS;
  1164. } else {
  1165. aead_unmap(jrdev, edesc, req);
  1166. kfree(edesc);
  1167. }
  1168. return ret;
  1169. }
  1170. static int ipsec_gcm_decrypt(struct aead_request *req)
  1171. {
  1172. if (req->assoclen < 8)
  1173. return -EINVAL;
  1174. return gcm_decrypt(req);
  1175. }
  1176. static int aead_decrypt(struct aead_request *req)
  1177. {
  1178. struct aead_edesc *edesc;
  1179. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1180. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1181. struct device *jrdev = ctx->jrdev;
  1182. bool all_contig;
  1183. u32 *desc;
  1184. int ret = 0;
  1185. caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
  1186. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1187. req->assoclen + req->cryptlen, 1);
  1188. /* allocate extended descriptor */
  1189. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1190. &all_contig, false);
  1191. if (IS_ERR(edesc))
  1192. return PTR_ERR(edesc);
  1193. /* Create and submit job descriptor*/
  1194. init_authenc_job(req, edesc, all_contig, false);
  1195. #ifdef DEBUG
  1196. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1197. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1198. desc_bytes(edesc->hw_desc), 1);
  1199. #endif
  1200. desc = edesc->hw_desc;
  1201. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1202. if (!ret) {
  1203. ret = -EINPROGRESS;
  1204. } else {
  1205. aead_unmap(jrdev, edesc, req);
  1206. kfree(edesc);
  1207. }
  1208. return ret;
  1209. }
  1210. /*
  1211. * allocate and map the skcipher extended descriptor for skcipher
  1212. */
  1213. static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
  1214. int desc_bytes)
  1215. {
  1216. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1217. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1218. struct device *jrdev = ctx->jrdev;
  1219. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1220. GFP_KERNEL : GFP_ATOMIC;
  1221. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1222. struct skcipher_edesc *edesc;
  1223. dma_addr_t iv_dma;
  1224. u8 *iv;
  1225. int ivsize = crypto_skcipher_ivsize(skcipher);
  1226. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1227. src_nents = sg_nents_for_len(req->src, req->cryptlen);
  1228. if (unlikely(src_nents < 0)) {
  1229. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1230. req->cryptlen);
  1231. return ERR_PTR(src_nents);
  1232. }
  1233. if (req->dst != req->src) {
  1234. dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
  1235. if (unlikely(dst_nents < 0)) {
  1236. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1237. req->cryptlen);
  1238. return ERR_PTR(dst_nents);
  1239. }
  1240. }
  1241. if (likely(req->src == req->dst)) {
  1242. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1243. DMA_BIDIRECTIONAL);
  1244. if (unlikely(!mapped_src_nents)) {
  1245. dev_err(jrdev, "unable to map source\n");
  1246. return ERR_PTR(-ENOMEM);
  1247. }
  1248. } else {
  1249. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1250. DMA_TO_DEVICE);
  1251. if (unlikely(!mapped_src_nents)) {
  1252. dev_err(jrdev, "unable to map source\n");
  1253. return ERR_PTR(-ENOMEM);
  1254. }
  1255. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1256. DMA_FROM_DEVICE);
  1257. if (unlikely(!mapped_dst_nents)) {
  1258. dev_err(jrdev, "unable to map destination\n");
  1259. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1260. return ERR_PTR(-ENOMEM);
  1261. }
  1262. }
  1263. sec4_sg_ents = 1 + mapped_src_nents;
  1264. dst_sg_idx = sec4_sg_ents;
  1265. sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1266. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1267. /*
  1268. * allocate space for base edesc and hw desc commands, link tables, IV
  1269. */
  1270. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1271. GFP_DMA | flags);
  1272. if (!edesc) {
  1273. dev_err(jrdev, "could not allocate extended descriptor\n");
  1274. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1275. 0, 0, 0);
  1276. return ERR_PTR(-ENOMEM);
  1277. }
  1278. edesc->src_nents = src_nents;
  1279. edesc->dst_nents = dst_nents;
  1280. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1281. edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
  1282. desc_bytes);
  1283. /* Make sure IV is located in a DMAable area */
  1284. iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
  1285. memcpy(iv, req->iv, ivsize);
  1286. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
  1287. if (dma_mapping_error(jrdev, iv_dma)) {
  1288. dev_err(jrdev, "unable to map IV\n");
  1289. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1290. 0, 0, 0);
  1291. kfree(edesc);
  1292. return ERR_PTR(-ENOMEM);
  1293. }
  1294. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1295. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
  1296. if (mapped_dst_nents > 1) {
  1297. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1298. edesc->sec4_sg + dst_sg_idx, 0);
  1299. }
  1300. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1301. sec4_sg_bytes, DMA_TO_DEVICE);
  1302. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1303. dev_err(jrdev, "unable to map S/G table\n");
  1304. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1305. iv_dma, ivsize, 0, 0);
  1306. kfree(edesc);
  1307. return ERR_PTR(-ENOMEM);
  1308. }
  1309. edesc->iv_dma = iv_dma;
  1310. #ifdef DEBUG
  1311. print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
  1312. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1313. sec4_sg_bytes, 1);
  1314. #endif
  1315. return edesc;
  1316. }
  1317. static int skcipher_encrypt(struct skcipher_request *req)
  1318. {
  1319. struct skcipher_edesc *edesc;
  1320. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1321. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1322. struct device *jrdev = ctx->jrdev;
  1323. u32 *desc;
  1324. int ret = 0;
  1325. /* allocate extended descriptor */
  1326. edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1327. if (IS_ERR(edesc))
  1328. return PTR_ERR(edesc);
  1329. /* Create and submit job descriptor*/
  1330. init_skcipher_job(req, edesc, true);
  1331. #ifdef DEBUG
  1332. print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
  1333. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1334. desc_bytes(edesc->hw_desc), 1);
  1335. #endif
  1336. desc = edesc->hw_desc;
  1337. ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
  1338. if (!ret) {
  1339. ret = -EINPROGRESS;
  1340. } else {
  1341. skcipher_unmap(jrdev, edesc, req);
  1342. kfree(edesc);
  1343. }
  1344. return ret;
  1345. }
  1346. static int skcipher_decrypt(struct skcipher_request *req)
  1347. {
  1348. struct skcipher_edesc *edesc;
  1349. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  1350. struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
  1351. int ivsize = crypto_skcipher_ivsize(skcipher);
  1352. struct device *jrdev = ctx->jrdev;
  1353. u32 *desc;
  1354. int ret = 0;
  1355. /* allocate extended descriptor */
  1356. edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1357. if (IS_ERR(edesc))
  1358. return PTR_ERR(edesc);
  1359. /*
  1360. * The crypto API expects us to set the IV (req->iv) to the last
  1361. * ciphertext block.
  1362. */
  1363. scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - ivsize,
  1364. ivsize, 0);
  1365. /* Create and submit job descriptor*/
  1366. init_skcipher_job(req, edesc, false);
  1367. desc = edesc->hw_desc;
  1368. #ifdef DEBUG
  1369. print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
  1370. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1371. desc_bytes(edesc->hw_desc), 1);
  1372. #endif
  1373. ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
  1374. if (!ret) {
  1375. ret = -EINPROGRESS;
  1376. } else {
  1377. skcipher_unmap(jrdev, edesc, req);
  1378. kfree(edesc);
  1379. }
  1380. return ret;
  1381. }
  1382. static struct caam_skcipher_alg driver_algs[] = {
  1383. {
  1384. .skcipher = {
  1385. .base = {
  1386. .cra_name = "cbc(aes)",
  1387. .cra_driver_name = "cbc-aes-caam",
  1388. .cra_blocksize = AES_BLOCK_SIZE,
  1389. },
  1390. .setkey = skcipher_setkey,
  1391. .encrypt = skcipher_encrypt,
  1392. .decrypt = skcipher_decrypt,
  1393. .min_keysize = AES_MIN_KEY_SIZE,
  1394. .max_keysize = AES_MAX_KEY_SIZE,
  1395. .ivsize = AES_BLOCK_SIZE,
  1396. },
  1397. .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1398. },
  1399. {
  1400. .skcipher = {
  1401. .base = {
  1402. .cra_name = "cbc(des3_ede)",
  1403. .cra_driver_name = "cbc-3des-caam",
  1404. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1405. },
  1406. .setkey = skcipher_setkey,
  1407. .encrypt = skcipher_encrypt,
  1408. .decrypt = skcipher_decrypt,
  1409. .min_keysize = DES3_EDE_KEY_SIZE,
  1410. .max_keysize = DES3_EDE_KEY_SIZE,
  1411. .ivsize = DES3_EDE_BLOCK_SIZE,
  1412. },
  1413. .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1414. },
  1415. {
  1416. .skcipher = {
  1417. .base = {
  1418. .cra_name = "cbc(des)",
  1419. .cra_driver_name = "cbc-des-caam",
  1420. .cra_blocksize = DES_BLOCK_SIZE,
  1421. },
  1422. .setkey = skcipher_setkey,
  1423. .encrypt = skcipher_encrypt,
  1424. .decrypt = skcipher_decrypt,
  1425. .min_keysize = DES_KEY_SIZE,
  1426. .max_keysize = DES_KEY_SIZE,
  1427. .ivsize = DES_BLOCK_SIZE,
  1428. },
  1429. .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1430. },
  1431. {
  1432. .skcipher = {
  1433. .base = {
  1434. .cra_name = "ctr(aes)",
  1435. .cra_driver_name = "ctr-aes-caam",
  1436. .cra_blocksize = 1,
  1437. },
  1438. .setkey = skcipher_setkey,
  1439. .encrypt = skcipher_encrypt,
  1440. .decrypt = skcipher_decrypt,
  1441. .min_keysize = AES_MIN_KEY_SIZE,
  1442. .max_keysize = AES_MAX_KEY_SIZE,
  1443. .ivsize = AES_BLOCK_SIZE,
  1444. .chunksize = AES_BLOCK_SIZE,
  1445. },
  1446. .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
  1447. OP_ALG_AAI_CTR_MOD128,
  1448. },
  1449. {
  1450. .skcipher = {
  1451. .base = {
  1452. .cra_name = "rfc3686(ctr(aes))",
  1453. .cra_driver_name = "rfc3686-ctr-aes-caam",
  1454. .cra_blocksize = 1,
  1455. },
  1456. .setkey = skcipher_setkey,
  1457. .encrypt = skcipher_encrypt,
  1458. .decrypt = skcipher_decrypt,
  1459. .min_keysize = AES_MIN_KEY_SIZE +
  1460. CTR_RFC3686_NONCE_SIZE,
  1461. .max_keysize = AES_MAX_KEY_SIZE +
  1462. CTR_RFC3686_NONCE_SIZE,
  1463. .ivsize = CTR_RFC3686_IV_SIZE,
  1464. .chunksize = AES_BLOCK_SIZE,
  1465. },
  1466. .caam = {
  1467. .class1_alg_type = OP_ALG_ALGSEL_AES |
  1468. OP_ALG_AAI_CTR_MOD128,
  1469. .rfc3686 = true,
  1470. },
  1471. },
  1472. {
  1473. .skcipher = {
  1474. .base = {
  1475. .cra_name = "xts(aes)",
  1476. .cra_driver_name = "xts-aes-caam",
  1477. .cra_blocksize = AES_BLOCK_SIZE,
  1478. },
  1479. .setkey = xts_skcipher_setkey,
  1480. .encrypt = skcipher_encrypt,
  1481. .decrypt = skcipher_decrypt,
  1482. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1483. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1484. .ivsize = AES_BLOCK_SIZE,
  1485. },
  1486. .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1487. },
  1488. };
  1489. static struct caam_aead_alg driver_aeads[] = {
  1490. {
  1491. .aead = {
  1492. .base = {
  1493. .cra_name = "rfc4106(gcm(aes))",
  1494. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1495. .cra_blocksize = 1,
  1496. },
  1497. .setkey = rfc4106_setkey,
  1498. .setauthsize = rfc4106_setauthsize,
  1499. .encrypt = ipsec_gcm_encrypt,
  1500. .decrypt = ipsec_gcm_decrypt,
  1501. .ivsize = GCM_RFC4106_IV_SIZE,
  1502. .maxauthsize = AES_BLOCK_SIZE,
  1503. },
  1504. .caam = {
  1505. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1506. },
  1507. },
  1508. {
  1509. .aead = {
  1510. .base = {
  1511. .cra_name = "rfc4543(gcm(aes))",
  1512. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1513. .cra_blocksize = 1,
  1514. },
  1515. .setkey = rfc4543_setkey,
  1516. .setauthsize = rfc4543_setauthsize,
  1517. .encrypt = ipsec_gcm_encrypt,
  1518. .decrypt = ipsec_gcm_decrypt,
  1519. .ivsize = GCM_RFC4543_IV_SIZE,
  1520. .maxauthsize = AES_BLOCK_SIZE,
  1521. },
  1522. .caam = {
  1523. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1524. },
  1525. },
  1526. /* Galois Counter Mode */
  1527. {
  1528. .aead = {
  1529. .base = {
  1530. .cra_name = "gcm(aes)",
  1531. .cra_driver_name = "gcm-aes-caam",
  1532. .cra_blocksize = 1,
  1533. },
  1534. .setkey = gcm_setkey,
  1535. .setauthsize = gcm_setauthsize,
  1536. .encrypt = gcm_encrypt,
  1537. .decrypt = gcm_decrypt,
  1538. .ivsize = GCM_AES_IV_SIZE,
  1539. .maxauthsize = AES_BLOCK_SIZE,
  1540. },
  1541. .caam = {
  1542. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1543. },
  1544. },
  1545. /* single-pass ipsec_esp descriptor */
  1546. {
  1547. .aead = {
  1548. .base = {
  1549. .cra_name = "authenc(hmac(md5),"
  1550. "ecb(cipher_null))",
  1551. .cra_driver_name = "authenc-hmac-md5-"
  1552. "ecb-cipher_null-caam",
  1553. .cra_blocksize = NULL_BLOCK_SIZE,
  1554. },
  1555. .setkey = aead_setkey,
  1556. .setauthsize = aead_setauthsize,
  1557. .encrypt = aead_encrypt,
  1558. .decrypt = aead_decrypt,
  1559. .ivsize = NULL_IV_SIZE,
  1560. .maxauthsize = MD5_DIGEST_SIZE,
  1561. },
  1562. .caam = {
  1563. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1564. OP_ALG_AAI_HMAC_PRECOMP,
  1565. },
  1566. },
  1567. {
  1568. .aead = {
  1569. .base = {
  1570. .cra_name = "authenc(hmac(sha1),"
  1571. "ecb(cipher_null))",
  1572. .cra_driver_name = "authenc-hmac-sha1-"
  1573. "ecb-cipher_null-caam",
  1574. .cra_blocksize = NULL_BLOCK_SIZE,
  1575. },
  1576. .setkey = aead_setkey,
  1577. .setauthsize = aead_setauthsize,
  1578. .encrypt = aead_encrypt,
  1579. .decrypt = aead_decrypt,
  1580. .ivsize = NULL_IV_SIZE,
  1581. .maxauthsize = SHA1_DIGEST_SIZE,
  1582. },
  1583. .caam = {
  1584. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1585. OP_ALG_AAI_HMAC_PRECOMP,
  1586. },
  1587. },
  1588. {
  1589. .aead = {
  1590. .base = {
  1591. .cra_name = "authenc(hmac(sha224),"
  1592. "ecb(cipher_null))",
  1593. .cra_driver_name = "authenc-hmac-sha224-"
  1594. "ecb-cipher_null-caam",
  1595. .cra_blocksize = NULL_BLOCK_SIZE,
  1596. },
  1597. .setkey = aead_setkey,
  1598. .setauthsize = aead_setauthsize,
  1599. .encrypt = aead_encrypt,
  1600. .decrypt = aead_decrypt,
  1601. .ivsize = NULL_IV_SIZE,
  1602. .maxauthsize = SHA224_DIGEST_SIZE,
  1603. },
  1604. .caam = {
  1605. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1606. OP_ALG_AAI_HMAC_PRECOMP,
  1607. },
  1608. },
  1609. {
  1610. .aead = {
  1611. .base = {
  1612. .cra_name = "authenc(hmac(sha256),"
  1613. "ecb(cipher_null))",
  1614. .cra_driver_name = "authenc-hmac-sha256-"
  1615. "ecb-cipher_null-caam",
  1616. .cra_blocksize = NULL_BLOCK_SIZE,
  1617. },
  1618. .setkey = aead_setkey,
  1619. .setauthsize = aead_setauthsize,
  1620. .encrypt = aead_encrypt,
  1621. .decrypt = aead_decrypt,
  1622. .ivsize = NULL_IV_SIZE,
  1623. .maxauthsize = SHA256_DIGEST_SIZE,
  1624. },
  1625. .caam = {
  1626. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1627. OP_ALG_AAI_HMAC_PRECOMP,
  1628. },
  1629. },
  1630. {
  1631. .aead = {
  1632. .base = {
  1633. .cra_name = "authenc(hmac(sha384),"
  1634. "ecb(cipher_null))",
  1635. .cra_driver_name = "authenc-hmac-sha384-"
  1636. "ecb-cipher_null-caam",
  1637. .cra_blocksize = NULL_BLOCK_SIZE,
  1638. },
  1639. .setkey = aead_setkey,
  1640. .setauthsize = aead_setauthsize,
  1641. .encrypt = aead_encrypt,
  1642. .decrypt = aead_decrypt,
  1643. .ivsize = NULL_IV_SIZE,
  1644. .maxauthsize = SHA384_DIGEST_SIZE,
  1645. },
  1646. .caam = {
  1647. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1648. OP_ALG_AAI_HMAC_PRECOMP,
  1649. },
  1650. },
  1651. {
  1652. .aead = {
  1653. .base = {
  1654. .cra_name = "authenc(hmac(sha512),"
  1655. "ecb(cipher_null))",
  1656. .cra_driver_name = "authenc-hmac-sha512-"
  1657. "ecb-cipher_null-caam",
  1658. .cra_blocksize = NULL_BLOCK_SIZE,
  1659. },
  1660. .setkey = aead_setkey,
  1661. .setauthsize = aead_setauthsize,
  1662. .encrypt = aead_encrypt,
  1663. .decrypt = aead_decrypt,
  1664. .ivsize = NULL_IV_SIZE,
  1665. .maxauthsize = SHA512_DIGEST_SIZE,
  1666. },
  1667. .caam = {
  1668. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1669. OP_ALG_AAI_HMAC_PRECOMP,
  1670. },
  1671. },
  1672. {
  1673. .aead = {
  1674. .base = {
  1675. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1676. .cra_driver_name = "authenc-hmac-md5-"
  1677. "cbc-aes-caam",
  1678. .cra_blocksize = AES_BLOCK_SIZE,
  1679. },
  1680. .setkey = aead_setkey,
  1681. .setauthsize = aead_setauthsize,
  1682. .encrypt = aead_encrypt,
  1683. .decrypt = aead_decrypt,
  1684. .ivsize = AES_BLOCK_SIZE,
  1685. .maxauthsize = MD5_DIGEST_SIZE,
  1686. },
  1687. .caam = {
  1688. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1689. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1690. OP_ALG_AAI_HMAC_PRECOMP,
  1691. },
  1692. },
  1693. {
  1694. .aead = {
  1695. .base = {
  1696. .cra_name = "echainiv(authenc(hmac(md5),"
  1697. "cbc(aes)))",
  1698. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1699. "cbc-aes-caam",
  1700. .cra_blocksize = AES_BLOCK_SIZE,
  1701. },
  1702. .setkey = aead_setkey,
  1703. .setauthsize = aead_setauthsize,
  1704. .encrypt = aead_encrypt,
  1705. .decrypt = aead_decrypt,
  1706. .ivsize = AES_BLOCK_SIZE,
  1707. .maxauthsize = MD5_DIGEST_SIZE,
  1708. },
  1709. .caam = {
  1710. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1711. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1712. OP_ALG_AAI_HMAC_PRECOMP,
  1713. .geniv = true,
  1714. },
  1715. },
  1716. {
  1717. .aead = {
  1718. .base = {
  1719. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1720. .cra_driver_name = "authenc-hmac-sha1-"
  1721. "cbc-aes-caam",
  1722. .cra_blocksize = AES_BLOCK_SIZE,
  1723. },
  1724. .setkey = aead_setkey,
  1725. .setauthsize = aead_setauthsize,
  1726. .encrypt = aead_encrypt,
  1727. .decrypt = aead_decrypt,
  1728. .ivsize = AES_BLOCK_SIZE,
  1729. .maxauthsize = SHA1_DIGEST_SIZE,
  1730. },
  1731. .caam = {
  1732. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1733. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1734. OP_ALG_AAI_HMAC_PRECOMP,
  1735. },
  1736. },
  1737. {
  1738. .aead = {
  1739. .base = {
  1740. .cra_name = "echainiv(authenc(hmac(sha1),"
  1741. "cbc(aes)))",
  1742. .cra_driver_name = "echainiv-authenc-"
  1743. "hmac-sha1-cbc-aes-caam",
  1744. .cra_blocksize = AES_BLOCK_SIZE,
  1745. },
  1746. .setkey = aead_setkey,
  1747. .setauthsize = aead_setauthsize,
  1748. .encrypt = aead_encrypt,
  1749. .decrypt = aead_decrypt,
  1750. .ivsize = AES_BLOCK_SIZE,
  1751. .maxauthsize = SHA1_DIGEST_SIZE,
  1752. },
  1753. .caam = {
  1754. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1755. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1756. OP_ALG_AAI_HMAC_PRECOMP,
  1757. .geniv = true,
  1758. },
  1759. },
  1760. {
  1761. .aead = {
  1762. .base = {
  1763. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1764. .cra_driver_name = "authenc-hmac-sha224-"
  1765. "cbc-aes-caam",
  1766. .cra_blocksize = AES_BLOCK_SIZE,
  1767. },
  1768. .setkey = aead_setkey,
  1769. .setauthsize = aead_setauthsize,
  1770. .encrypt = aead_encrypt,
  1771. .decrypt = aead_decrypt,
  1772. .ivsize = AES_BLOCK_SIZE,
  1773. .maxauthsize = SHA224_DIGEST_SIZE,
  1774. },
  1775. .caam = {
  1776. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1777. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1778. OP_ALG_AAI_HMAC_PRECOMP,
  1779. },
  1780. },
  1781. {
  1782. .aead = {
  1783. .base = {
  1784. .cra_name = "echainiv(authenc(hmac(sha224),"
  1785. "cbc(aes)))",
  1786. .cra_driver_name = "echainiv-authenc-"
  1787. "hmac-sha224-cbc-aes-caam",
  1788. .cra_blocksize = AES_BLOCK_SIZE,
  1789. },
  1790. .setkey = aead_setkey,
  1791. .setauthsize = aead_setauthsize,
  1792. .encrypt = aead_encrypt,
  1793. .decrypt = aead_decrypt,
  1794. .ivsize = AES_BLOCK_SIZE,
  1795. .maxauthsize = SHA224_DIGEST_SIZE,
  1796. },
  1797. .caam = {
  1798. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1799. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1800. OP_ALG_AAI_HMAC_PRECOMP,
  1801. .geniv = true,
  1802. },
  1803. },
  1804. {
  1805. .aead = {
  1806. .base = {
  1807. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  1808. .cra_driver_name = "authenc-hmac-sha256-"
  1809. "cbc-aes-caam",
  1810. .cra_blocksize = AES_BLOCK_SIZE,
  1811. },
  1812. .setkey = aead_setkey,
  1813. .setauthsize = aead_setauthsize,
  1814. .encrypt = aead_encrypt,
  1815. .decrypt = aead_decrypt,
  1816. .ivsize = AES_BLOCK_SIZE,
  1817. .maxauthsize = SHA256_DIGEST_SIZE,
  1818. },
  1819. .caam = {
  1820. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1821. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1822. OP_ALG_AAI_HMAC_PRECOMP,
  1823. },
  1824. },
  1825. {
  1826. .aead = {
  1827. .base = {
  1828. .cra_name = "echainiv(authenc(hmac(sha256),"
  1829. "cbc(aes)))",
  1830. .cra_driver_name = "echainiv-authenc-"
  1831. "hmac-sha256-cbc-aes-caam",
  1832. .cra_blocksize = AES_BLOCK_SIZE,
  1833. },
  1834. .setkey = aead_setkey,
  1835. .setauthsize = aead_setauthsize,
  1836. .encrypt = aead_encrypt,
  1837. .decrypt = aead_decrypt,
  1838. .ivsize = AES_BLOCK_SIZE,
  1839. .maxauthsize = SHA256_DIGEST_SIZE,
  1840. },
  1841. .caam = {
  1842. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1843. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1844. OP_ALG_AAI_HMAC_PRECOMP,
  1845. .geniv = true,
  1846. },
  1847. },
  1848. {
  1849. .aead = {
  1850. .base = {
  1851. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  1852. .cra_driver_name = "authenc-hmac-sha384-"
  1853. "cbc-aes-caam",
  1854. .cra_blocksize = AES_BLOCK_SIZE,
  1855. },
  1856. .setkey = aead_setkey,
  1857. .setauthsize = aead_setauthsize,
  1858. .encrypt = aead_encrypt,
  1859. .decrypt = aead_decrypt,
  1860. .ivsize = AES_BLOCK_SIZE,
  1861. .maxauthsize = SHA384_DIGEST_SIZE,
  1862. },
  1863. .caam = {
  1864. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1865. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1866. OP_ALG_AAI_HMAC_PRECOMP,
  1867. },
  1868. },
  1869. {
  1870. .aead = {
  1871. .base = {
  1872. .cra_name = "echainiv(authenc(hmac(sha384),"
  1873. "cbc(aes)))",
  1874. .cra_driver_name = "echainiv-authenc-"
  1875. "hmac-sha384-cbc-aes-caam",
  1876. .cra_blocksize = AES_BLOCK_SIZE,
  1877. },
  1878. .setkey = aead_setkey,
  1879. .setauthsize = aead_setauthsize,
  1880. .encrypt = aead_encrypt,
  1881. .decrypt = aead_decrypt,
  1882. .ivsize = AES_BLOCK_SIZE,
  1883. .maxauthsize = SHA384_DIGEST_SIZE,
  1884. },
  1885. .caam = {
  1886. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1887. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1888. OP_ALG_AAI_HMAC_PRECOMP,
  1889. .geniv = true,
  1890. },
  1891. },
  1892. {
  1893. .aead = {
  1894. .base = {
  1895. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  1896. .cra_driver_name = "authenc-hmac-sha512-"
  1897. "cbc-aes-caam",
  1898. .cra_blocksize = AES_BLOCK_SIZE,
  1899. },
  1900. .setkey = aead_setkey,
  1901. .setauthsize = aead_setauthsize,
  1902. .encrypt = aead_encrypt,
  1903. .decrypt = aead_decrypt,
  1904. .ivsize = AES_BLOCK_SIZE,
  1905. .maxauthsize = SHA512_DIGEST_SIZE,
  1906. },
  1907. .caam = {
  1908. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1909. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1910. OP_ALG_AAI_HMAC_PRECOMP,
  1911. },
  1912. },
  1913. {
  1914. .aead = {
  1915. .base = {
  1916. .cra_name = "echainiv(authenc(hmac(sha512),"
  1917. "cbc(aes)))",
  1918. .cra_driver_name = "echainiv-authenc-"
  1919. "hmac-sha512-cbc-aes-caam",
  1920. .cra_blocksize = AES_BLOCK_SIZE,
  1921. },
  1922. .setkey = aead_setkey,
  1923. .setauthsize = aead_setauthsize,
  1924. .encrypt = aead_encrypt,
  1925. .decrypt = aead_decrypt,
  1926. .ivsize = AES_BLOCK_SIZE,
  1927. .maxauthsize = SHA512_DIGEST_SIZE,
  1928. },
  1929. .caam = {
  1930. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1931. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1932. OP_ALG_AAI_HMAC_PRECOMP,
  1933. .geniv = true,
  1934. },
  1935. },
  1936. {
  1937. .aead = {
  1938. .base = {
  1939. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  1940. .cra_driver_name = "authenc-hmac-md5-"
  1941. "cbc-des3_ede-caam",
  1942. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1943. },
  1944. .setkey = aead_setkey,
  1945. .setauthsize = aead_setauthsize,
  1946. .encrypt = aead_encrypt,
  1947. .decrypt = aead_decrypt,
  1948. .ivsize = DES3_EDE_BLOCK_SIZE,
  1949. .maxauthsize = MD5_DIGEST_SIZE,
  1950. },
  1951. .caam = {
  1952. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1953. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1954. OP_ALG_AAI_HMAC_PRECOMP,
  1955. }
  1956. },
  1957. {
  1958. .aead = {
  1959. .base = {
  1960. .cra_name = "echainiv(authenc(hmac(md5),"
  1961. "cbc(des3_ede)))",
  1962. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1963. "cbc-des3_ede-caam",
  1964. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1965. },
  1966. .setkey = aead_setkey,
  1967. .setauthsize = aead_setauthsize,
  1968. .encrypt = aead_encrypt,
  1969. .decrypt = aead_decrypt,
  1970. .ivsize = DES3_EDE_BLOCK_SIZE,
  1971. .maxauthsize = MD5_DIGEST_SIZE,
  1972. },
  1973. .caam = {
  1974. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1975. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1976. OP_ALG_AAI_HMAC_PRECOMP,
  1977. .geniv = true,
  1978. }
  1979. },
  1980. {
  1981. .aead = {
  1982. .base = {
  1983. .cra_name = "authenc(hmac(sha1),"
  1984. "cbc(des3_ede))",
  1985. .cra_driver_name = "authenc-hmac-sha1-"
  1986. "cbc-des3_ede-caam",
  1987. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  1988. },
  1989. .setkey = aead_setkey,
  1990. .setauthsize = aead_setauthsize,
  1991. .encrypt = aead_encrypt,
  1992. .decrypt = aead_decrypt,
  1993. .ivsize = DES3_EDE_BLOCK_SIZE,
  1994. .maxauthsize = SHA1_DIGEST_SIZE,
  1995. },
  1996. .caam = {
  1997. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1998. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1999. OP_ALG_AAI_HMAC_PRECOMP,
  2000. },
  2001. },
  2002. {
  2003. .aead = {
  2004. .base = {
  2005. .cra_name = "echainiv(authenc(hmac(sha1),"
  2006. "cbc(des3_ede)))",
  2007. .cra_driver_name = "echainiv-authenc-"
  2008. "hmac-sha1-"
  2009. "cbc-des3_ede-caam",
  2010. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2011. },
  2012. .setkey = aead_setkey,
  2013. .setauthsize = aead_setauthsize,
  2014. .encrypt = aead_encrypt,
  2015. .decrypt = aead_decrypt,
  2016. .ivsize = DES3_EDE_BLOCK_SIZE,
  2017. .maxauthsize = SHA1_DIGEST_SIZE,
  2018. },
  2019. .caam = {
  2020. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2021. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2022. OP_ALG_AAI_HMAC_PRECOMP,
  2023. .geniv = true,
  2024. },
  2025. },
  2026. {
  2027. .aead = {
  2028. .base = {
  2029. .cra_name = "authenc(hmac(sha224),"
  2030. "cbc(des3_ede))",
  2031. .cra_driver_name = "authenc-hmac-sha224-"
  2032. "cbc-des3_ede-caam",
  2033. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2034. },
  2035. .setkey = aead_setkey,
  2036. .setauthsize = aead_setauthsize,
  2037. .encrypt = aead_encrypt,
  2038. .decrypt = aead_decrypt,
  2039. .ivsize = DES3_EDE_BLOCK_SIZE,
  2040. .maxauthsize = SHA224_DIGEST_SIZE,
  2041. },
  2042. .caam = {
  2043. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2044. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2045. OP_ALG_AAI_HMAC_PRECOMP,
  2046. },
  2047. },
  2048. {
  2049. .aead = {
  2050. .base = {
  2051. .cra_name = "echainiv(authenc(hmac(sha224),"
  2052. "cbc(des3_ede)))",
  2053. .cra_driver_name = "echainiv-authenc-"
  2054. "hmac-sha224-"
  2055. "cbc-des3_ede-caam",
  2056. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2057. },
  2058. .setkey = aead_setkey,
  2059. .setauthsize = aead_setauthsize,
  2060. .encrypt = aead_encrypt,
  2061. .decrypt = aead_decrypt,
  2062. .ivsize = DES3_EDE_BLOCK_SIZE,
  2063. .maxauthsize = SHA224_DIGEST_SIZE,
  2064. },
  2065. .caam = {
  2066. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2067. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2068. OP_ALG_AAI_HMAC_PRECOMP,
  2069. .geniv = true,
  2070. },
  2071. },
  2072. {
  2073. .aead = {
  2074. .base = {
  2075. .cra_name = "authenc(hmac(sha256),"
  2076. "cbc(des3_ede))",
  2077. .cra_driver_name = "authenc-hmac-sha256-"
  2078. "cbc-des3_ede-caam",
  2079. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2080. },
  2081. .setkey = aead_setkey,
  2082. .setauthsize = aead_setauthsize,
  2083. .encrypt = aead_encrypt,
  2084. .decrypt = aead_decrypt,
  2085. .ivsize = DES3_EDE_BLOCK_SIZE,
  2086. .maxauthsize = SHA256_DIGEST_SIZE,
  2087. },
  2088. .caam = {
  2089. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2090. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2091. OP_ALG_AAI_HMAC_PRECOMP,
  2092. },
  2093. },
  2094. {
  2095. .aead = {
  2096. .base = {
  2097. .cra_name = "echainiv(authenc(hmac(sha256),"
  2098. "cbc(des3_ede)))",
  2099. .cra_driver_name = "echainiv-authenc-"
  2100. "hmac-sha256-"
  2101. "cbc-des3_ede-caam",
  2102. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2103. },
  2104. .setkey = aead_setkey,
  2105. .setauthsize = aead_setauthsize,
  2106. .encrypt = aead_encrypt,
  2107. .decrypt = aead_decrypt,
  2108. .ivsize = DES3_EDE_BLOCK_SIZE,
  2109. .maxauthsize = SHA256_DIGEST_SIZE,
  2110. },
  2111. .caam = {
  2112. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2113. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2114. OP_ALG_AAI_HMAC_PRECOMP,
  2115. .geniv = true,
  2116. },
  2117. },
  2118. {
  2119. .aead = {
  2120. .base = {
  2121. .cra_name = "authenc(hmac(sha384),"
  2122. "cbc(des3_ede))",
  2123. .cra_driver_name = "authenc-hmac-sha384-"
  2124. "cbc-des3_ede-caam",
  2125. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2126. },
  2127. .setkey = aead_setkey,
  2128. .setauthsize = aead_setauthsize,
  2129. .encrypt = aead_encrypt,
  2130. .decrypt = aead_decrypt,
  2131. .ivsize = DES3_EDE_BLOCK_SIZE,
  2132. .maxauthsize = SHA384_DIGEST_SIZE,
  2133. },
  2134. .caam = {
  2135. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2136. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2137. OP_ALG_AAI_HMAC_PRECOMP,
  2138. },
  2139. },
  2140. {
  2141. .aead = {
  2142. .base = {
  2143. .cra_name = "echainiv(authenc(hmac(sha384),"
  2144. "cbc(des3_ede)))",
  2145. .cra_driver_name = "echainiv-authenc-"
  2146. "hmac-sha384-"
  2147. "cbc-des3_ede-caam",
  2148. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2149. },
  2150. .setkey = aead_setkey,
  2151. .setauthsize = aead_setauthsize,
  2152. .encrypt = aead_encrypt,
  2153. .decrypt = aead_decrypt,
  2154. .ivsize = DES3_EDE_BLOCK_SIZE,
  2155. .maxauthsize = SHA384_DIGEST_SIZE,
  2156. },
  2157. .caam = {
  2158. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2159. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2160. OP_ALG_AAI_HMAC_PRECOMP,
  2161. .geniv = true,
  2162. },
  2163. },
  2164. {
  2165. .aead = {
  2166. .base = {
  2167. .cra_name = "authenc(hmac(sha512),"
  2168. "cbc(des3_ede))",
  2169. .cra_driver_name = "authenc-hmac-sha512-"
  2170. "cbc-des3_ede-caam",
  2171. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2172. },
  2173. .setkey = aead_setkey,
  2174. .setauthsize = aead_setauthsize,
  2175. .encrypt = aead_encrypt,
  2176. .decrypt = aead_decrypt,
  2177. .ivsize = DES3_EDE_BLOCK_SIZE,
  2178. .maxauthsize = SHA512_DIGEST_SIZE,
  2179. },
  2180. .caam = {
  2181. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2182. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2183. OP_ALG_AAI_HMAC_PRECOMP,
  2184. },
  2185. },
  2186. {
  2187. .aead = {
  2188. .base = {
  2189. .cra_name = "echainiv(authenc(hmac(sha512),"
  2190. "cbc(des3_ede)))",
  2191. .cra_driver_name = "echainiv-authenc-"
  2192. "hmac-sha512-"
  2193. "cbc-des3_ede-caam",
  2194. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2195. },
  2196. .setkey = aead_setkey,
  2197. .setauthsize = aead_setauthsize,
  2198. .encrypt = aead_encrypt,
  2199. .decrypt = aead_decrypt,
  2200. .ivsize = DES3_EDE_BLOCK_SIZE,
  2201. .maxauthsize = SHA512_DIGEST_SIZE,
  2202. },
  2203. .caam = {
  2204. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2205. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2206. OP_ALG_AAI_HMAC_PRECOMP,
  2207. .geniv = true,
  2208. },
  2209. },
  2210. {
  2211. .aead = {
  2212. .base = {
  2213. .cra_name = "authenc(hmac(md5),cbc(des))",
  2214. .cra_driver_name = "authenc-hmac-md5-"
  2215. "cbc-des-caam",
  2216. .cra_blocksize = DES_BLOCK_SIZE,
  2217. },
  2218. .setkey = aead_setkey,
  2219. .setauthsize = aead_setauthsize,
  2220. .encrypt = aead_encrypt,
  2221. .decrypt = aead_decrypt,
  2222. .ivsize = DES_BLOCK_SIZE,
  2223. .maxauthsize = MD5_DIGEST_SIZE,
  2224. },
  2225. .caam = {
  2226. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2227. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2228. OP_ALG_AAI_HMAC_PRECOMP,
  2229. },
  2230. },
  2231. {
  2232. .aead = {
  2233. .base = {
  2234. .cra_name = "echainiv(authenc(hmac(md5),"
  2235. "cbc(des)))",
  2236. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2237. "cbc-des-caam",
  2238. .cra_blocksize = DES_BLOCK_SIZE,
  2239. },
  2240. .setkey = aead_setkey,
  2241. .setauthsize = aead_setauthsize,
  2242. .encrypt = aead_encrypt,
  2243. .decrypt = aead_decrypt,
  2244. .ivsize = DES_BLOCK_SIZE,
  2245. .maxauthsize = MD5_DIGEST_SIZE,
  2246. },
  2247. .caam = {
  2248. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2249. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2250. OP_ALG_AAI_HMAC_PRECOMP,
  2251. .geniv = true,
  2252. },
  2253. },
  2254. {
  2255. .aead = {
  2256. .base = {
  2257. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2258. .cra_driver_name = "authenc-hmac-sha1-"
  2259. "cbc-des-caam",
  2260. .cra_blocksize = DES_BLOCK_SIZE,
  2261. },
  2262. .setkey = aead_setkey,
  2263. .setauthsize = aead_setauthsize,
  2264. .encrypt = aead_encrypt,
  2265. .decrypt = aead_decrypt,
  2266. .ivsize = DES_BLOCK_SIZE,
  2267. .maxauthsize = SHA1_DIGEST_SIZE,
  2268. },
  2269. .caam = {
  2270. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2271. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2272. OP_ALG_AAI_HMAC_PRECOMP,
  2273. },
  2274. },
  2275. {
  2276. .aead = {
  2277. .base = {
  2278. .cra_name = "echainiv(authenc(hmac(sha1),"
  2279. "cbc(des)))",
  2280. .cra_driver_name = "echainiv-authenc-"
  2281. "hmac-sha1-cbc-des-caam",
  2282. .cra_blocksize = DES_BLOCK_SIZE,
  2283. },
  2284. .setkey = aead_setkey,
  2285. .setauthsize = aead_setauthsize,
  2286. .encrypt = aead_encrypt,
  2287. .decrypt = aead_decrypt,
  2288. .ivsize = DES_BLOCK_SIZE,
  2289. .maxauthsize = SHA1_DIGEST_SIZE,
  2290. },
  2291. .caam = {
  2292. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2293. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2294. OP_ALG_AAI_HMAC_PRECOMP,
  2295. .geniv = true,
  2296. },
  2297. },
  2298. {
  2299. .aead = {
  2300. .base = {
  2301. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2302. .cra_driver_name = "authenc-hmac-sha224-"
  2303. "cbc-des-caam",
  2304. .cra_blocksize = DES_BLOCK_SIZE,
  2305. },
  2306. .setkey = aead_setkey,
  2307. .setauthsize = aead_setauthsize,
  2308. .encrypt = aead_encrypt,
  2309. .decrypt = aead_decrypt,
  2310. .ivsize = DES_BLOCK_SIZE,
  2311. .maxauthsize = SHA224_DIGEST_SIZE,
  2312. },
  2313. .caam = {
  2314. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2315. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2316. OP_ALG_AAI_HMAC_PRECOMP,
  2317. },
  2318. },
  2319. {
  2320. .aead = {
  2321. .base = {
  2322. .cra_name = "echainiv(authenc(hmac(sha224),"
  2323. "cbc(des)))",
  2324. .cra_driver_name = "echainiv-authenc-"
  2325. "hmac-sha224-cbc-des-caam",
  2326. .cra_blocksize = DES_BLOCK_SIZE,
  2327. },
  2328. .setkey = aead_setkey,
  2329. .setauthsize = aead_setauthsize,
  2330. .encrypt = aead_encrypt,
  2331. .decrypt = aead_decrypt,
  2332. .ivsize = DES_BLOCK_SIZE,
  2333. .maxauthsize = SHA224_DIGEST_SIZE,
  2334. },
  2335. .caam = {
  2336. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2337. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2338. OP_ALG_AAI_HMAC_PRECOMP,
  2339. .geniv = true,
  2340. },
  2341. },
  2342. {
  2343. .aead = {
  2344. .base = {
  2345. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2346. .cra_driver_name = "authenc-hmac-sha256-"
  2347. "cbc-des-caam",
  2348. .cra_blocksize = DES_BLOCK_SIZE,
  2349. },
  2350. .setkey = aead_setkey,
  2351. .setauthsize = aead_setauthsize,
  2352. .encrypt = aead_encrypt,
  2353. .decrypt = aead_decrypt,
  2354. .ivsize = DES_BLOCK_SIZE,
  2355. .maxauthsize = SHA256_DIGEST_SIZE,
  2356. },
  2357. .caam = {
  2358. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2359. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2360. OP_ALG_AAI_HMAC_PRECOMP,
  2361. },
  2362. },
  2363. {
  2364. .aead = {
  2365. .base = {
  2366. .cra_name = "echainiv(authenc(hmac(sha256),"
  2367. "cbc(des)))",
  2368. .cra_driver_name = "echainiv-authenc-"
  2369. "hmac-sha256-cbc-des-caam",
  2370. .cra_blocksize = DES_BLOCK_SIZE,
  2371. },
  2372. .setkey = aead_setkey,
  2373. .setauthsize = aead_setauthsize,
  2374. .encrypt = aead_encrypt,
  2375. .decrypt = aead_decrypt,
  2376. .ivsize = DES_BLOCK_SIZE,
  2377. .maxauthsize = SHA256_DIGEST_SIZE,
  2378. },
  2379. .caam = {
  2380. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2381. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2382. OP_ALG_AAI_HMAC_PRECOMP,
  2383. .geniv = true,
  2384. },
  2385. },
  2386. {
  2387. .aead = {
  2388. .base = {
  2389. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2390. .cra_driver_name = "authenc-hmac-sha384-"
  2391. "cbc-des-caam",
  2392. .cra_blocksize = DES_BLOCK_SIZE,
  2393. },
  2394. .setkey = aead_setkey,
  2395. .setauthsize = aead_setauthsize,
  2396. .encrypt = aead_encrypt,
  2397. .decrypt = aead_decrypt,
  2398. .ivsize = DES_BLOCK_SIZE,
  2399. .maxauthsize = SHA384_DIGEST_SIZE,
  2400. },
  2401. .caam = {
  2402. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2403. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2404. OP_ALG_AAI_HMAC_PRECOMP,
  2405. },
  2406. },
  2407. {
  2408. .aead = {
  2409. .base = {
  2410. .cra_name = "echainiv(authenc(hmac(sha384),"
  2411. "cbc(des)))",
  2412. .cra_driver_name = "echainiv-authenc-"
  2413. "hmac-sha384-cbc-des-caam",
  2414. .cra_blocksize = DES_BLOCK_SIZE,
  2415. },
  2416. .setkey = aead_setkey,
  2417. .setauthsize = aead_setauthsize,
  2418. .encrypt = aead_encrypt,
  2419. .decrypt = aead_decrypt,
  2420. .ivsize = DES_BLOCK_SIZE,
  2421. .maxauthsize = SHA384_DIGEST_SIZE,
  2422. },
  2423. .caam = {
  2424. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2425. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2426. OP_ALG_AAI_HMAC_PRECOMP,
  2427. .geniv = true,
  2428. },
  2429. },
  2430. {
  2431. .aead = {
  2432. .base = {
  2433. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2434. .cra_driver_name = "authenc-hmac-sha512-"
  2435. "cbc-des-caam",
  2436. .cra_blocksize = DES_BLOCK_SIZE,
  2437. },
  2438. .setkey = aead_setkey,
  2439. .setauthsize = aead_setauthsize,
  2440. .encrypt = aead_encrypt,
  2441. .decrypt = aead_decrypt,
  2442. .ivsize = DES_BLOCK_SIZE,
  2443. .maxauthsize = SHA512_DIGEST_SIZE,
  2444. },
  2445. .caam = {
  2446. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2447. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2448. OP_ALG_AAI_HMAC_PRECOMP,
  2449. },
  2450. },
  2451. {
  2452. .aead = {
  2453. .base = {
  2454. .cra_name = "echainiv(authenc(hmac(sha512),"
  2455. "cbc(des)))",
  2456. .cra_driver_name = "echainiv-authenc-"
  2457. "hmac-sha512-cbc-des-caam",
  2458. .cra_blocksize = DES_BLOCK_SIZE,
  2459. },
  2460. .setkey = aead_setkey,
  2461. .setauthsize = aead_setauthsize,
  2462. .encrypt = aead_encrypt,
  2463. .decrypt = aead_decrypt,
  2464. .ivsize = DES_BLOCK_SIZE,
  2465. .maxauthsize = SHA512_DIGEST_SIZE,
  2466. },
  2467. .caam = {
  2468. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2469. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2470. OP_ALG_AAI_HMAC_PRECOMP,
  2471. .geniv = true,
  2472. },
  2473. },
  2474. {
  2475. .aead = {
  2476. .base = {
  2477. .cra_name = "authenc(hmac(md5),"
  2478. "rfc3686(ctr(aes)))",
  2479. .cra_driver_name = "authenc-hmac-md5-"
  2480. "rfc3686-ctr-aes-caam",
  2481. .cra_blocksize = 1,
  2482. },
  2483. .setkey = aead_setkey,
  2484. .setauthsize = aead_setauthsize,
  2485. .encrypt = aead_encrypt,
  2486. .decrypt = aead_decrypt,
  2487. .ivsize = CTR_RFC3686_IV_SIZE,
  2488. .maxauthsize = MD5_DIGEST_SIZE,
  2489. },
  2490. .caam = {
  2491. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2492. OP_ALG_AAI_CTR_MOD128,
  2493. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2494. OP_ALG_AAI_HMAC_PRECOMP,
  2495. .rfc3686 = true,
  2496. },
  2497. },
  2498. {
  2499. .aead = {
  2500. .base = {
  2501. .cra_name = "seqiv(authenc("
  2502. "hmac(md5),rfc3686(ctr(aes))))",
  2503. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2504. "rfc3686-ctr-aes-caam",
  2505. .cra_blocksize = 1,
  2506. },
  2507. .setkey = aead_setkey,
  2508. .setauthsize = aead_setauthsize,
  2509. .encrypt = aead_encrypt,
  2510. .decrypt = aead_decrypt,
  2511. .ivsize = CTR_RFC3686_IV_SIZE,
  2512. .maxauthsize = MD5_DIGEST_SIZE,
  2513. },
  2514. .caam = {
  2515. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2516. OP_ALG_AAI_CTR_MOD128,
  2517. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2518. OP_ALG_AAI_HMAC_PRECOMP,
  2519. .rfc3686 = true,
  2520. .geniv = true,
  2521. },
  2522. },
  2523. {
  2524. .aead = {
  2525. .base = {
  2526. .cra_name = "authenc(hmac(sha1),"
  2527. "rfc3686(ctr(aes)))",
  2528. .cra_driver_name = "authenc-hmac-sha1-"
  2529. "rfc3686-ctr-aes-caam",
  2530. .cra_blocksize = 1,
  2531. },
  2532. .setkey = aead_setkey,
  2533. .setauthsize = aead_setauthsize,
  2534. .encrypt = aead_encrypt,
  2535. .decrypt = aead_decrypt,
  2536. .ivsize = CTR_RFC3686_IV_SIZE,
  2537. .maxauthsize = SHA1_DIGEST_SIZE,
  2538. },
  2539. .caam = {
  2540. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2541. OP_ALG_AAI_CTR_MOD128,
  2542. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2543. OP_ALG_AAI_HMAC_PRECOMP,
  2544. .rfc3686 = true,
  2545. },
  2546. },
  2547. {
  2548. .aead = {
  2549. .base = {
  2550. .cra_name = "seqiv(authenc("
  2551. "hmac(sha1),rfc3686(ctr(aes))))",
  2552. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2553. "rfc3686-ctr-aes-caam",
  2554. .cra_blocksize = 1,
  2555. },
  2556. .setkey = aead_setkey,
  2557. .setauthsize = aead_setauthsize,
  2558. .encrypt = aead_encrypt,
  2559. .decrypt = aead_decrypt,
  2560. .ivsize = CTR_RFC3686_IV_SIZE,
  2561. .maxauthsize = SHA1_DIGEST_SIZE,
  2562. },
  2563. .caam = {
  2564. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2565. OP_ALG_AAI_CTR_MOD128,
  2566. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2567. OP_ALG_AAI_HMAC_PRECOMP,
  2568. .rfc3686 = true,
  2569. .geniv = true,
  2570. },
  2571. },
  2572. {
  2573. .aead = {
  2574. .base = {
  2575. .cra_name = "authenc(hmac(sha224),"
  2576. "rfc3686(ctr(aes)))",
  2577. .cra_driver_name = "authenc-hmac-sha224-"
  2578. "rfc3686-ctr-aes-caam",
  2579. .cra_blocksize = 1,
  2580. },
  2581. .setkey = aead_setkey,
  2582. .setauthsize = aead_setauthsize,
  2583. .encrypt = aead_encrypt,
  2584. .decrypt = aead_decrypt,
  2585. .ivsize = CTR_RFC3686_IV_SIZE,
  2586. .maxauthsize = SHA224_DIGEST_SIZE,
  2587. },
  2588. .caam = {
  2589. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2590. OP_ALG_AAI_CTR_MOD128,
  2591. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2592. OP_ALG_AAI_HMAC_PRECOMP,
  2593. .rfc3686 = true,
  2594. },
  2595. },
  2596. {
  2597. .aead = {
  2598. .base = {
  2599. .cra_name = "seqiv(authenc("
  2600. "hmac(sha224),rfc3686(ctr(aes))))",
  2601. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2602. "rfc3686-ctr-aes-caam",
  2603. .cra_blocksize = 1,
  2604. },
  2605. .setkey = aead_setkey,
  2606. .setauthsize = aead_setauthsize,
  2607. .encrypt = aead_encrypt,
  2608. .decrypt = aead_decrypt,
  2609. .ivsize = CTR_RFC3686_IV_SIZE,
  2610. .maxauthsize = SHA224_DIGEST_SIZE,
  2611. },
  2612. .caam = {
  2613. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2614. OP_ALG_AAI_CTR_MOD128,
  2615. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2616. OP_ALG_AAI_HMAC_PRECOMP,
  2617. .rfc3686 = true,
  2618. .geniv = true,
  2619. },
  2620. },
  2621. {
  2622. .aead = {
  2623. .base = {
  2624. .cra_name = "authenc(hmac(sha256),"
  2625. "rfc3686(ctr(aes)))",
  2626. .cra_driver_name = "authenc-hmac-sha256-"
  2627. "rfc3686-ctr-aes-caam",
  2628. .cra_blocksize = 1,
  2629. },
  2630. .setkey = aead_setkey,
  2631. .setauthsize = aead_setauthsize,
  2632. .encrypt = aead_encrypt,
  2633. .decrypt = aead_decrypt,
  2634. .ivsize = CTR_RFC3686_IV_SIZE,
  2635. .maxauthsize = SHA256_DIGEST_SIZE,
  2636. },
  2637. .caam = {
  2638. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2639. OP_ALG_AAI_CTR_MOD128,
  2640. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2641. OP_ALG_AAI_HMAC_PRECOMP,
  2642. .rfc3686 = true,
  2643. },
  2644. },
  2645. {
  2646. .aead = {
  2647. .base = {
  2648. .cra_name = "seqiv(authenc(hmac(sha256),"
  2649. "rfc3686(ctr(aes))))",
  2650. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2651. "rfc3686-ctr-aes-caam",
  2652. .cra_blocksize = 1,
  2653. },
  2654. .setkey = aead_setkey,
  2655. .setauthsize = aead_setauthsize,
  2656. .encrypt = aead_encrypt,
  2657. .decrypt = aead_decrypt,
  2658. .ivsize = CTR_RFC3686_IV_SIZE,
  2659. .maxauthsize = SHA256_DIGEST_SIZE,
  2660. },
  2661. .caam = {
  2662. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2663. OP_ALG_AAI_CTR_MOD128,
  2664. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2665. OP_ALG_AAI_HMAC_PRECOMP,
  2666. .rfc3686 = true,
  2667. .geniv = true,
  2668. },
  2669. },
  2670. {
  2671. .aead = {
  2672. .base = {
  2673. .cra_name = "authenc(hmac(sha384),"
  2674. "rfc3686(ctr(aes)))",
  2675. .cra_driver_name = "authenc-hmac-sha384-"
  2676. "rfc3686-ctr-aes-caam",
  2677. .cra_blocksize = 1,
  2678. },
  2679. .setkey = aead_setkey,
  2680. .setauthsize = aead_setauthsize,
  2681. .encrypt = aead_encrypt,
  2682. .decrypt = aead_decrypt,
  2683. .ivsize = CTR_RFC3686_IV_SIZE,
  2684. .maxauthsize = SHA384_DIGEST_SIZE,
  2685. },
  2686. .caam = {
  2687. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2688. OP_ALG_AAI_CTR_MOD128,
  2689. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2690. OP_ALG_AAI_HMAC_PRECOMP,
  2691. .rfc3686 = true,
  2692. },
  2693. },
  2694. {
  2695. .aead = {
  2696. .base = {
  2697. .cra_name = "seqiv(authenc(hmac(sha384),"
  2698. "rfc3686(ctr(aes))))",
  2699. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2700. "rfc3686-ctr-aes-caam",
  2701. .cra_blocksize = 1,
  2702. },
  2703. .setkey = aead_setkey,
  2704. .setauthsize = aead_setauthsize,
  2705. .encrypt = aead_encrypt,
  2706. .decrypt = aead_decrypt,
  2707. .ivsize = CTR_RFC3686_IV_SIZE,
  2708. .maxauthsize = SHA384_DIGEST_SIZE,
  2709. },
  2710. .caam = {
  2711. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2712. OP_ALG_AAI_CTR_MOD128,
  2713. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2714. OP_ALG_AAI_HMAC_PRECOMP,
  2715. .rfc3686 = true,
  2716. .geniv = true,
  2717. },
  2718. },
  2719. {
  2720. .aead = {
  2721. .base = {
  2722. .cra_name = "authenc(hmac(sha512),"
  2723. "rfc3686(ctr(aes)))",
  2724. .cra_driver_name = "authenc-hmac-sha512-"
  2725. "rfc3686-ctr-aes-caam",
  2726. .cra_blocksize = 1,
  2727. },
  2728. .setkey = aead_setkey,
  2729. .setauthsize = aead_setauthsize,
  2730. .encrypt = aead_encrypt,
  2731. .decrypt = aead_decrypt,
  2732. .ivsize = CTR_RFC3686_IV_SIZE,
  2733. .maxauthsize = SHA512_DIGEST_SIZE,
  2734. },
  2735. .caam = {
  2736. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2737. OP_ALG_AAI_CTR_MOD128,
  2738. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2739. OP_ALG_AAI_HMAC_PRECOMP,
  2740. .rfc3686 = true,
  2741. },
  2742. },
  2743. {
  2744. .aead = {
  2745. .base = {
  2746. .cra_name = "seqiv(authenc(hmac(sha512),"
  2747. "rfc3686(ctr(aes))))",
  2748. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2749. "rfc3686-ctr-aes-caam",
  2750. .cra_blocksize = 1,
  2751. },
  2752. .setkey = aead_setkey,
  2753. .setauthsize = aead_setauthsize,
  2754. .encrypt = aead_encrypt,
  2755. .decrypt = aead_decrypt,
  2756. .ivsize = CTR_RFC3686_IV_SIZE,
  2757. .maxauthsize = SHA512_DIGEST_SIZE,
  2758. },
  2759. .caam = {
  2760. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2761. OP_ALG_AAI_CTR_MOD128,
  2762. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2763. OP_ALG_AAI_HMAC_PRECOMP,
  2764. .rfc3686 = true,
  2765. .geniv = true,
  2766. },
  2767. },
  2768. };
  2769. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
  2770. bool uses_dkp)
  2771. {
  2772. dma_addr_t dma_addr;
  2773. struct caam_drv_private *priv;
  2774. ctx->jrdev = caam_jr_alloc();
  2775. if (IS_ERR(ctx->jrdev)) {
  2776. pr_err("Job Ring Device allocation for transform failed\n");
  2777. return PTR_ERR(ctx->jrdev);
  2778. }
  2779. priv = dev_get_drvdata(ctx->jrdev->parent);
  2780. if (priv->era >= 6 && uses_dkp)
  2781. ctx->dir = DMA_BIDIRECTIONAL;
  2782. else
  2783. ctx->dir = DMA_TO_DEVICE;
  2784. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  2785. offsetof(struct caam_ctx,
  2786. sh_desc_enc_dma),
  2787. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  2788. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  2789. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  2790. caam_jr_free(ctx->jrdev);
  2791. return -ENOMEM;
  2792. }
  2793. ctx->sh_desc_enc_dma = dma_addr;
  2794. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  2795. sh_desc_dec);
  2796. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
  2797. /* copy descriptor header template value */
  2798. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  2799. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  2800. return 0;
  2801. }
  2802. static int caam_cra_init(struct crypto_skcipher *tfm)
  2803. {
  2804. struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
  2805. struct caam_skcipher_alg *caam_alg =
  2806. container_of(alg, typeof(*caam_alg), skcipher);
  2807. return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
  2808. false);
  2809. }
  2810. static int caam_aead_init(struct crypto_aead *tfm)
  2811. {
  2812. struct aead_alg *alg = crypto_aead_alg(tfm);
  2813. struct caam_aead_alg *caam_alg =
  2814. container_of(alg, struct caam_aead_alg, aead);
  2815. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  2816. return caam_init_common(ctx, &caam_alg->caam,
  2817. alg->setkey == aead_setkey);
  2818. }
  2819. static void caam_exit_common(struct caam_ctx *ctx)
  2820. {
  2821. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  2822. offsetof(struct caam_ctx, sh_desc_enc_dma),
  2823. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  2824. caam_jr_free(ctx->jrdev);
  2825. }
  2826. static void caam_cra_exit(struct crypto_skcipher *tfm)
  2827. {
  2828. caam_exit_common(crypto_skcipher_ctx(tfm));
  2829. }
  2830. static void caam_aead_exit(struct crypto_aead *tfm)
  2831. {
  2832. caam_exit_common(crypto_aead_ctx(tfm));
  2833. }
  2834. static void __exit caam_algapi_exit(void)
  2835. {
  2836. int i;
  2837. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  2838. struct caam_aead_alg *t_alg = driver_aeads + i;
  2839. if (t_alg->registered)
  2840. crypto_unregister_aead(&t_alg->aead);
  2841. }
  2842. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  2843. struct caam_skcipher_alg *t_alg = driver_algs + i;
  2844. if (t_alg->registered)
  2845. crypto_unregister_skcipher(&t_alg->skcipher);
  2846. }
  2847. }
  2848. static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
  2849. {
  2850. struct skcipher_alg *alg = &t_alg->skcipher;
  2851. alg->base.cra_module = THIS_MODULE;
  2852. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  2853. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  2854. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  2855. alg->init = caam_cra_init;
  2856. alg->exit = caam_cra_exit;
  2857. }
  2858. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  2859. {
  2860. struct aead_alg *alg = &t_alg->aead;
  2861. alg->base.cra_module = THIS_MODULE;
  2862. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  2863. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  2864. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  2865. alg->init = caam_aead_init;
  2866. alg->exit = caam_aead_exit;
  2867. }
  2868. static int __init caam_algapi_init(void)
  2869. {
  2870. struct device_node *dev_node;
  2871. struct platform_device *pdev;
  2872. struct device *ctrldev;
  2873. struct caam_drv_private *priv;
  2874. int i = 0, err = 0;
  2875. u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
  2876. unsigned int md_limit = SHA512_DIGEST_SIZE;
  2877. bool registered = false;
  2878. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
  2879. if (!dev_node) {
  2880. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
  2881. if (!dev_node)
  2882. return -ENODEV;
  2883. }
  2884. pdev = of_find_device_by_node(dev_node);
  2885. if (!pdev) {
  2886. of_node_put(dev_node);
  2887. return -ENODEV;
  2888. }
  2889. ctrldev = &pdev->dev;
  2890. priv = dev_get_drvdata(ctrldev);
  2891. of_node_put(dev_node);
  2892. /*
  2893. * If priv is NULL, it's probably because the caam driver wasn't
  2894. * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
  2895. */
  2896. if (!priv)
  2897. return -ENODEV;
  2898. /*
  2899. * Register crypto algorithms the device supports.
  2900. * First, detect presence and attributes of DES, AES, and MD blocks.
  2901. */
  2902. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  2903. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  2904. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
  2905. aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
  2906. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  2907. /* If MD is present, limit digest size based on LP256 */
  2908. if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
  2909. md_limit = SHA256_DIGEST_SIZE;
  2910. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  2911. struct caam_skcipher_alg *t_alg = driver_algs + i;
  2912. u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
  2913. /* Skip DES algorithms if not supported by device */
  2914. if (!des_inst &&
  2915. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  2916. (alg_sel == OP_ALG_ALGSEL_DES)))
  2917. continue;
  2918. /* Skip AES algorithms if not supported by device */
  2919. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  2920. continue;
  2921. /*
  2922. * Check support for AES modes not available
  2923. * on LP devices.
  2924. */
  2925. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  2926. if ((t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
  2927. OP_ALG_AAI_XTS)
  2928. continue;
  2929. caam_skcipher_alg_init(t_alg);
  2930. err = crypto_register_skcipher(&t_alg->skcipher);
  2931. if (err) {
  2932. pr_warn("%s alg registration failed\n",
  2933. t_alg->skcipher.base.cra_driver_name);
  2934. continue;
  2935. }
  2936. t_alg->registered = true;
  2937. registered = true;
  2938. }
  2939. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  2940. struct caam_aead_alg *t_alg = driver_aeads + i;
  2941. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  2942. OP_ALG_ALGSEL_MASK;
  2943. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  2944. OP_ALG_ALGSEL_MASK;
  2945. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  2946. /* Skip DES algorithms if not supported by device */
  2947. if (!des_inst &&
  2948. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  2949. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  2950. continue;
  2951. /* Skip AES algorithms if not supported by device */
  2952. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  2953. continue;
  2954. /*
  2955. * Check support for AES algorithms not available
  2956. * on LP devices.
  2957. */
  2958. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  2959. if (alg_aai == OP_ALG_AAI_GCM)
  2960. continue;
  2961. /*
  2962. * Skip algorithms requiring message digests
  2963. * if MD or MD size is not supported by device.
  2964. */
  2965. if (c2_alg_sel &&
  2966. (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
  2967. continue;
  2968. caam_aead_alg_init(t_alg);
  2969. err = crypto_register_aead(&t_alg->aead);
  2970. if (err) {
  2971. pr_warn("%s alg registration failed\n",
  2972. t_alg->aead.base.cra_driver_name);
  2973. continue;
  2974. }
  2975. t_alg->registered = true;
  2976. registered = true;
  2977. }
  2978. if (registered)
  2979. pr_info("caam algorithms registered in /proc/crypto\n");
  2980. return err;
  2981. }
  2982. module_init(caam_algapi_init);
  2983. module_exit(caam_algapi_exit);
  2984. MODULE_LICENSE("GPL");
  2985. MODULE_DESCRIPTION("FSL CAAM support for crypto API");
  2986. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");