caamalg.c 93 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557
  1. /*
  2. * caam - Freescale FSL CAAM support for crypto API
  3. *
  4. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  5. * Copyright 2016 NXP
  6. *
  7. * Based on talitos crypto API driver.
  8. *
  9. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  10. *
  11. * --------------- ---------------
  12. * | JobDesc #1 |-------------------->| ShareDesc |
  13. * | *(packet 1) | | (PDB) |
  14. * --------------- |------------->| (hashKey) |
  15. * . | | (cipherKey) |
  16. * . | |-------->| (operation) |
  17. * --------------- | | ---------------
  18. * | JobDesc #2 |------| |
  19. * | *(packet 2) | |
  20. * --------------- |
  21. * . |
  22. * . |
  23. * --------------- |
  24. * | JobDesc #3 |------------
  25. * | *(packet 3) |
  26. * ---------------
  27. *
  28. * The SharedDesc never changes for a connection unless rekeyed, but
  29. * each packet will likely be in a different place. So all we need
  30. * to know to process the packet is where the input is, where the
  31. * output goes, and what context we want to process with. Context is
  32. * in the SharedDesc, packet references in the JobDesc.
  33. *
  34. * So, a job desc looks like:
  35. *
  36. * ---------------------
  37. * | Header |
  38. * | ShareDesc Pointer |
  39. * | SEQ_OUT_PTR |
  40. * | (output buffer) |
  41. * | (output length) |
  42. * | SEQ_IN_PTR |
  43. * | (input buffer) |
  44. * | (input length) |
  45. * ---------------------
  46. */
  47. #include "compat.h"
  48. #include "regs.h"
  49. #include "intern.h"
  50. #include "desc_constr.h"
  51. #include "jr.h"
  52. #include "error.h"
  53. #include "sg_sw_sec4.h"
  54. #include "key_gen.h"
  55. #include "caamalg_desc.h"
  56. /*
  57. * crypto alg
  58. */
  59. #define CAAM_CRA_PRIORITY 3000
  60. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  61. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  62. CTR_RFC3686_NONCE_SIZE + \
  63. SHA512_DIGEST_SIZE * 2)
  64. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  65. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  66. CAAM_CMD_SZ * 4)
  67. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  68. CAAM_CMD_SZ * 5)
  69. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
  70. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  71. #ifdef DEBUG
  72. /* for print_hex_dumps with line references */
  73. #define debug(format, arg...) printk(format, arg)
  74. #else
  75. #define debug(format, arg...)
  76. #endif
  77. #ifdef DEBUG
  78. #include <linux/highmem.h>
  79. static void dbg_dump_sg(const char *level, const char *prefix_str,
  80. int prefix_type, int rowsize, int groupsize,
  81. struct scatterlist *sg, size_t tlen, bool ascii)
  82. {
  83. struct scatterlist *it;
  84. void *it_page;
  85. size_t len;
  86. void *buf;
  87. for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
  88. /*
  89. * make sure the scatterlist's page
  90. * has a valid virtual memory mapping
  91. */
  92. it_page = kmap_atomic(sg_page(it));
  93. if (unlikely(!it_page)) {
  94. printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
  95. return;
  96. }
  97. buf = it_page + it->offset;
  98. len = min_t(size_t, tlen, it->length);
  99. print_hex_dump(level, prefix_str, prefix_type, rowsize,
  100. groupsize, buf, len, ascii);
  101. tlen -= len;
  102. kunmap_atomic(it_page);
  103. }
  104. }
  105. #endif
  106. static struct list_head alg_list;
  107. struct caam_alg_entry {
  108. int class1_alg_type;
  109. int class2_alg_type;
  110. bool rfc3686;
  111. bool geniv;
  112. };
  113. struct caam_aead_alg {
  114. struct aead_alg aead;
  115. struct caam_alg_entry caam;
  116. bool registered;
  117. };
  118. /*
  119. * per-session context
  120. */
  121. struct caam_ctx {
  122. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  123. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  124. u32 sh_desc_givenc[DESC_MAX_USED_LEN];
  125. u8 key[CAAM_MAX_KEY_SIZE];
  126. dma_addr_t sh_desc_enc_dma;
  127. dma_addr_t sh_desc_dec_dma;
  128. dma_addr_t sh_desc_givenc_dma;
  129. dma_addr_t key_dma;
  130. struct device *jrdev;
  131. struct alginfo adata;
  132. struct alginfo cdata;
  133. unsigned int authsize;
  134. };
  135. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  136. {
  137. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  138. struct device *jrdev = ctx->jrdev;
  139. u32 *desc;
  140. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  141. ctx->adata.keylen_pad;
  142. /*
  143. * Job Descriptor and Shared Descriptors
  144. * must all fit into the 64-word Descriptor h/w Buffer
  145. */
  146. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  147. ctx->adata.key_inline = true;
  148. ctx->adata.key_virt = ctx->key;
  149. } else {
  150. ctx->adata.key_inline = false;
  151. ctx->adata.key_dma = ctx->key_dma;
  152. }
  153. /* aead_encrypt shared descriptor */
  154. desc = ctx->sh_desc_enc;
  155. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
  156. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  157. desc_bytes(desc), DMA_TO_DEVICE);
  158. /*
  159. * Job Descriptor and Shared Descriptors
  160. * must all fit into the 64-word Descriptor h/w Buffer
  161. */
  162. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  163. ctx->adata.key_inline = true;
  164. ctx->adata.key_virt = ctx->key;
  165. } else {
  166. ctx->adata.key_inline = false;
  167. ctx->adata.key_dma = ctx->key_dma;
  168. }
  169. /* aead_decrypt shared descriptor */
  170. desc = ctx->sh_desc_dec;
  171. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
  172. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  173. desc_bytes(desc), DMA_TO_DEVICE);
  174. return 0;
  175. }
  176. static int aead_set_sh_desc(struct crypto_aead *aead)
  177. {
  178. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  179. struct caam_aead_alg, aead);
  180. unsigned int ivsize = crypto_aead_ivsize(aead);
  181. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  182. struct device *jrdev = ctx->jrdev;
  183. u32 ctx1_iv_off = 0;
  184. u32 *desc, *nonce = NULL;
  185. u32 inl_mask;
  186. unsigned int data_len[2];
  187. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  188. OP_ALG_AAI_CTR_MOD128);
  189. const bool is_rfc3686 = alg->caam.rfc3686;
  190. if (!ctx->authsize)
  191. return 0;
  192. /* NULL encryption / decryption */
  193. if (!ctx->cdata.keylen)
  194. return aead_null_set_sh_desc(aead);
  195. /*
  196. * AES-CTR needs to load IV in CONTEXT1 reg
  197. * at an offset of 128bits (16bytes)
  198. * CONTEXT1[255:128] = IV
  199. */
  200. if (ctr_mode)
  201. ctx1_iv_off = 16;
  202. /*
  203. * RFC3686 specific:
  204. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  205. */
  206. if (is_rfc3686) {
  207. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  208. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  209. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  210. }
  211. data_len[0] = ctx->adata.keylen_pad;
  212. data_len[1] = ctx->cdata.keylen;
  213. if (alg->caam.geniv)
  214. goto skip_enc;
  215. /*
  216. * Job Descriptor and Shared Descriptors
  217. * must all fit into the 64-word Descriptor h/w Buffer
  218. */
  219. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  220. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  221. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  222. ARRAY_SIZE(data_len)) < 0)
  223. return -EINVAL;
  224. if (inl_mask & 1)
  225. ctx->adata.key_virt = ctx->key;
  226. else
  227. ctx->adata.key_dma = ctx->key_dma;
  228. if (inl_mask & 2)
  229. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  230. else
  231. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  232. ctx->adata.key_inline = !!(inl_mask & 1);
  233. ctx->cdata.key_inline = !!(inl_mask & 2);
  234. /* aead_encrypt shared descriptor */
  235. desc = ctx->sh_desc_enc;
  236. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
  237. ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
  238. false);
  239. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  240. desc_bytes(desc), DMA_TO_DEVICE);
  241. skip_enc:
  242. /*
  243. * Job Descriptor and Shared Descriptors
  244. * must all fit into the 64-word Descriptor h/w Buffer
  245. */
  246. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  247. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  248. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  249. ARRAY_SIZE(data_len)) < 0)
  250. return -EINVAL;
  251. if (inl_mask & 1)
  252. ctx->adata.key_virt = ctx->key;
  253. else
  254. ctx->adata.key_dma = ctx->key_dma;
  255. if (inl_mask & 2)
  256. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  257. else
  258. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  259. ctx->adata.key_inline = !!(inl_mask & 1);
  260. ctx->cdata.key_inline = !!(inl_mask & 2);
  261. /* aead_decrypt shared descriptor */
  262. desc = ctx->sh_desc_dec;
  263. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  264. ctx->authsize, alg->caam.geniv, is_rfc3686,
  265. nonce, ctx1_iv_off, false);
  266. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  267. desc_bytes(desc), DMA_TO_DEVICE);
  268. if (!alg->caam.geniv)
  269. goto skip_givenc;
  270. /*
  271. * Job Descriptor and Shared Descriptors
  272. * must all fit into the 64-word Descriptor h/w Buffer
  273. */
  274. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  275. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  276. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  277. ARRAY_SIZE(data_len)) < 0)
  278. return -EINVAL;
  279. if (inl_mask & 1)
  280. ctx->adata.key_virt = ctx->key;
  281. else
  282. ctx->adata.key_dma = ctx->key_dma;
  283. if (inl_mask & 2)
  284. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  285. else
  286. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  287. ctx->adata.key_inline = !!(inl_mask & 1);
  288. ctx->cdata.key_inline = !!(inl_mask & 2);
  289. /* aead_givencrypt shared descriptor */
  290. desc = ctx->sh_desc_enc;
  291. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  292. ctx->authsize, is_rfc3686, nonce,
  293. ctx1_iv_off, false);
  294. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  295. desc_bytes(desc), DMA_TO_DEVICE);
  296. skip_givenc:
  297. return 0;
  298. }
  299. static int aead_setauthsize(struct crypto_aead *authenc,
  300. unsigned int authsize)
  301. {
  302. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  303. ctx->authsize = authsize;
  304. aead_set_sh_desc(authenc);
  305. return 0;
  306. }
  307. static int gcm_set_sh_desc(struct crypto_aead *aead)
  308. {
  309. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  310. struct device *jrdev = ctx->jrdev;
  311. u32 *desc;
  312. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  313. ctx->cdata.keylen;
  314. if (!ctx->cdata.keylen || !ctx->authsize)
  315. return 0;
  316. /*
  317. * AES GCM encrypt shared descriptor
  318. * Job Descriptor and Shared Descriptor
  319. * must fit into the 64-word Descriptor h/w Buffer
  320. */
  321. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  322. ctx->cdata.key_inline = true;
  323. ctx->cdata.key_virt = ctx->key;
  324. } else {
  325. ctx->cdata.key_inline = false;
  326. ctx->cdata.key_dma = ctx->key_dma;
  327. }
  328. desc = ctx->sh_desc_enc;
  329. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
  330. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  331. desc_bytes(desc), DMA_TO_DEVICE);
  332. /*
  333. * Job Descriptor and Shared Descriptors
  334. * must all fit into the 64-word Descriptor h/w Buffer
  335. */
  336. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  337. ctx->cdata.key_inline = true;
  338. ctx->cdata.key_virt = ctx->key;
  339. } else {
  340. ctx->cdata.key_inline = false;
  341. ctx->cdata.key_dma = ctx->key_dma;
  342. }
  343. desc = ctx->sh_desc_dec;
  344. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
  345. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  346. desc_bytes(desc), DMA_TO_DEVICE);
  347. return 0;
  348. }
  349. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  350. {
  351. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  352. ctx->authsize = authsize;
  353. gcm_set_sh_desc(authenc);
  354. return 0;
  355. }
  356. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  357. {
  358. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  359. struct device *jrdev = ctx->jrdev;
  360. u32 *desc;
  361. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  362. ctx->cdata.keylen;
  363. if (!ctx->cdata.keylen || !ctx->authsize)
  364. return 0;
  365. /*
  366. * RFC4106 encrypt shared descriptor
  367. * Job Descriptor and Shared Descriptor
  368. * must fit into the 64-word Descriptor h/w Buffer
  369. */
  370. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  371. ctx->cdata.key_inline = true;
  372. ctx->cdata.key_virt = ctx->key;
  373. } else {
  374. ctx->cdata.key_inline = false;
  375. ctx->cdata.key_dma = ctx->key_dma;
  376. }
  377. desc = ctx->sh_desc_enc;
  378. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
  379. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  380. desc_bytes(desc), DMA_TO_DEVICE);
  381. /*
  382. * Job Descriptor and Shared Descriptors
  383. * must all fit into the 64-word Descriptor h/w Buffer
  384. */
  385. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  386. ctx->cdata.key_inline = true;
  387. ctx->cdata.key_virt = ctx->key;
  388. } else {
  389. ctx->cdata.key_inline = false;
  390. ctx->cdata.key_dma = ctx->key_dma;
  391. }
  392. desc = ctx->sh_desc_dec;
  393. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
  394. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  395. desc_bytes(desc), DMA_TO_DEVICE);
  396. return 0;
  397. }
  398. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  399. unsigned int authsize)
  400. {
  401. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  402. ctx->authsize = authsize;
  403. rfc4106_set_sh_desc(authenc);
  404. return 0;
  405. }
  406. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  407. {
  408. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  409. struct device *jrdev = ctx->jrdev;
  410. u32 *desc;
  411. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  412. ctx->cdata.keylen;
  413. if (!ctx->cdata.keylen || !ctx->authsize)
  414. return 0;
  415. /*
  416. * RFC4543 encrypt shared descriptor
  417. * Job Descriptor and Shared Descriptor
  418. * must fit into the 64-word Descriptor h/w Buffer
  419. */
  420. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  421. ctx->cdata.key_inline = true;
  422. ctx->cdata.key_virt = ctx->key;
  423. } else {
  424. ctx->cdata.key_inline = false;
  425. ctx->cdata.key_dma = ctx->key_dma;
  426. }
  427. desc = ctx->sh_desc_enc;
  428. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
  429. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  430. desc_bytes(desc), DMA_TO_DEVICE);
  431. /*
  432. * Job Descriptor and Shared Descriptors
  433. * must all fit into the 64-word Descriptor h/w Buffer
  434. */
  435. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  436. ctx->cdata.key_inline = true;
  437. ctx->cdata.key_virt = ctx->key;
  438. } else {
  439. ctx->cdata.key_inline = false;
  440. ctx->cdata.key_dma = ctx->key_dma;
  441. }
  442. desc = ctx->sh_desc_dec;
  443. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
  444. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  445. desc_bytes(desc), DMA_TO_DEVICE);
  446. return 0;
  447. }
  448. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  449. unsigned int authsize)
  450. {
  451. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  452. ctx->authsize = authsize;
  453. rfc4543_set_sh_desc(authenc);
  454. return 0;
  455. }
  456. static int aead_setkey(struct crypto_aead *aead,
  457. const u8 *key, unsigned int keylen)
  458. {
  459. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  460. struct device *jrdev = ctx->jrdev;
  461. struct crypto_authenc_keys keys;
  462. int ret = 0;
  463. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  464. goto badkey;
  465. #ifdef DEBUG
  466. printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
  467. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  468. keys.authkeylen);
  469. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  470. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  471. #endif
  472. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  473. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  474. keys.enckeylen);
  475. if (ret) {
  476. goto badkey;
  477. }
  478. /* postpend encryption key to auth split key */
  479. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  480. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  481. keys.enckeylen, DMA_TO_DEVICE);
  482. #ifdef DEBUG
  483. print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
  484. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  485. ctx->adata.keylen_pad + keys.enckeylen, 1);
  486. #endif
  487. ctx->cdata.keylen = keys.enckeylen;
  488. return aead_set_sh_desc(aead);
  489. badkey:
  490. crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
  491. return -EINVAL;
  492. }
  493. static int gcm_setkey(struct crypto_aead *aead,
  494. const u8 *key, unsigned int keylen)
  495. {
  496. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  497. struct device *jrdev = ctx->jrdev;
  498. #ifdef DEBUG
  499. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  500. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  501. #endif
  502. memcpy(ctx->key, key, keylen);
  503. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  504. ctx->cdata.keylen = keylen;
  505. return gcm_set_sh_desc(aead);
  506. }
  507. static int rfc4106_setkey(struct crypto_aead *aead,
  508. const u8 *key, unsigned int keylen)
  509. {
  510. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  511. struct device *jrdev = ctx->jrdev;
  512. if (keylen < 4)
  513. return -EINVAL;
  514. #ifdef DEBUG
  515. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  516. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  517. #endif
  518. memcpy(ctx->key, key, keylen);
  519. /*
  520. * The last four bytes of the key material are used as the salt value
  521. * in the nonce. Update the AES key length.
  522. */
  523. ctx->cdata.keylen = keylen - 4;
  524. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  525. DMA_TO_DEVICE);
  526. return rfc4106_set_sh_desc(aead);
  527. }
  528. static int rfc4543_setkey(struct crypto_aead *aead,
  529. const u8 *key, unsigned int keylen)
  530. {
  531. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  532. struct device *jrdev = ctx->jrdev;
  533. if (keylen < 4)
  534. return -EINVAL;
  535. #ifdef DEBUG
  536. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  537. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  538. #endif
  539. memcpy(ctx->key, key, keylen);
  540. /*
  541. * The last four bytes of the key material are used as the salt value
  542. * in the nonce. Update the AES key length.
  543. */
  544. ctx->cdata.keylen = keylen - 4;
  545. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  546. DMA_TO_DEVICE);
  547. return rfc4543_set_sh_desc(aead);
  548. }
  549. static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  550. const u8 *key, unsigned int keylen)
  551. {
  552. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  553. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
  554. const char *alg_name = crypto_tfm_alg_name(tfm);
  555. struct device *jrdev = ctx->jrdev;
  556. unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  557. u32 *desc;
  558. u32 ctx1_iv_off = 0;
  559. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  560. OP_ALG_AAI_CTR_MOD128);
  561. const bool is_rfc3686 = (ctr_mode &&
  562. (strstr(alg_name, "rfc3686") != NULL));
  563. memcpy(ctx->key, key, keylen);
  564. #ifdef DEBUG
  565. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  566. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  567. #endif
  568. /*
  569. * AES-CTR needs to load IV in CONTEXT1 reg
  570. * at an offset of 128bits (16bytes)
  571. * CONTEXT1[255:128] = IV
  572. */
  573. if (ctr_mode)
  574. ctx1_iv_off = 16;
  575. /*
  576. * RFC3686 specific:
  577. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  578. * | *key = {KEY, NONCE}
  579. */
  580. if (is_rfc3686) {
  581. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  582. keylen -= CTR_RFC3686_NONCE_SIZE;
  583. }
  584. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  585. ctx->cdata.keylen = keylen;
  586. ctx->cdata.key_virt = ctx->key;
  587. ctx->cdata.key_inline = true;
  588. /* ablkcipher_encrypt shared descriptor */
  589. desc = ctx->sh_desc_enc;
  590. cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  591. ctx1_iv_off);
  592. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  593. desc_bytes(desc), DMA_TO_DEVICE);
  594. /* ablkcipher_decrypt shared descriptor */
  595. desc = ctx->sh_desc_dec;
  596. cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  597. ctx1_iv_off);
  598. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  599. desc_bytes(desc), DMA_TO_DEVICE);
  600. /* ablkcipher_givencrypt shared descriptor */
  601. desc = ctx->sh_desc_givenc;
  602. cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
  603. ctx1_iv_off);
  604. dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
  605. desc_bytes(desc), DMA_TO_DEVICE);
  606. return 0;
  607. }
  608. static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  609. const u8 *key, unsigned int keylen)
  610. {
  611. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  612. struct device *jrdev = ctx->jrdev;
  613. u32 *desc;
  614. if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
  615. crypto_ablkcipher_set_flags(ablkcipher,
  616. CRYPTO_TFM_RES_BAD_KEY_LEN);
  617. dev_err(jrdev, "key size mismatch\n");
  618. return -EINVAL;
  619. }
  620. memcpy(ctx->key, key, keylen);
  621. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  622. ctx->cdata.keylen = keylen;
  623. ctx->cdata.key_virt = ctx->key;
  624. ctx->cdata.key_inline = true;
  625. /* xts_ablkcipher_encrypt shared descriptor */
  626. desc = ctx->sh_desc_enc;
  627. cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
  628. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  629. desc_bytes(desc), DMA_TO_DEVICE);
  630. /* xts_ablkcipher_decrypt shared descriptor */
  631. desc = ctx->sh_desc_dec;
  632. cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
  633. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  634. desc_bytes(desc), DMA_TO_DEVICE);
  635. return 0;
  636. }
  637. /*
  638. * aead_edesc - s/w-extended aead descriptor
  639. * @src_nents: number of segments in input s/w scatterlist
  640. * @dst_nents: number of segments in output s/w scatterlist
  641. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  642. * @sec4_sg_dma: bus physical mapped address of h/w link table
  643. * @sec4_sg: pointer to h/w link table
  644. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  645. */
  646. struct aead_edesc {
  647. int src_nents;
  648. int dst_nents;
  649. int sec4_sg_bytes;
  650. dma_addr_t sec4_sg_dma;
  651. struct sec4_sg_entry *sec4_sg;
  652. u32 hw_desc[];
  653. };
  654. /*
  655. * ablkcipher_edesc - s/w-extended ablkcipher descriptor
  656. * @src_nents: number of segments in input s/w scatterlist
  657. * @dst_nents: number of segments in output s/w scatterlist
  658. * @iv_dma: dma address of iv for checking continuity and link table
  659. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  660. * @sec4_sg_dma: bus physical mapped address of h/w link table
  661. * @sec4_sg: pointer to h/w link table
  662. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  663. */
  664. struct ablkcipher_edesc {
  665. int src_nents;
  666. int dst_nents;
  667. dma_addr_t iv_dma;
  668. int sec4_sg_bytes;
  669. dma_addr_t sec4_sg_dma;
  670. struct sec4_sg_entry *sec4_sg;
  671. u32 hw_desc[0];
  672. };
  673. static void caam_unmap(struct device *dev, struct scatterlist *src,
  674. struct scatterlist *dst, int src_nents,
  675. int dst_nents,
  676. dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
  677. int sec4_sg_bytes)
  678. {
  679. if (dst != src) {
  680. if (src_nents)
  681. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  682. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  683. } else {
  684. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  685. }
  686. if (iv_dma)
  687. dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
  688. if (sec4_sg_bytes)
  689. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  690. DMA_TO_DEVICE);
  691. }
  692. static void aead_unmap(struct device *dev,
  693. struct aead_edesc *edesc,
  694. struct aead_request *req)
  695. {
  696. caam_unmap(dev, req->src, req->dst,
  697. edesc->src_nents, edesc->dst_nents, 0, 0,
  698. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  699. }
  700. static void ablkcipher_unmap(struct device *dev,
  701. struct ablkcipher_edesc *edesc,
  702. struct ablkcipher_request *req)
  703. {
  704. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  705. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  706. caam_unmap(dev, req->src, req->dst,
  707. edesc->src_nents, edesc->dst_nents,
  708. edesc->iv_dma, ivsize,
  709. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  710. }
  711. static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  712. void *context)
  713. {
  714. struct aead_request *req = context;
  715. struct aead_edesc *edesc;
  716. #ifdef DEBUG
  717. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  718. #endif
  719. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  720. if (err)
  721. caam_jr_strstatus(jrdev, err);
  722. aead_unmap(jrdev, edesc, req);
  723. kfree(edesc);
  724. aead_request_complete(req, err);
  725. }
  726. static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  727. void *context)
  728. {
  729. struct aead_request *req = context;
  730. struct aead_edesc *edesc;
  731. #ifdef DEBUG
  732. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  733. #endif
  734. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  735. if (err)
  736. caam_jr_strstatus(jrdev, err);
  737. aead_unmap(jrdev, edesc, req);
  738. /*
  739. * verify hw auth check passed else return -EBADMSG
  740. */
  741. if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
  742. err = -EBADMSG;
  743. kfree(edesc);
  744. aead_request_complete(req, err);
  745. }
  746. static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  747. void *context)
  748. {
  749. struct ablkcipher_request *req = context;
  750. struct ablkcipher_edesc *edesc;
  751. #ifdef DEBUG
  752. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  753. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  754. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  755. #endif
  756. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  757. if (err)
  758. caam_jr_strstatus(jrdev, err);
  759. #ifdef DEBUG
  760. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  761. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  762. edesc->src_nents > 1 ? 100 : ivsize, 1);
  763. dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
  764. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  765. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  766. #endif
  767. ablkcipher_unmap(jrdev, edesc, req);
  768. kfree(edesc);
  769. ablkcipher_request_complete(req, err);
  770. }
  771. static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  772. void *context)
  773. {
  774. struct ablkcipher_request *req = context;
  775. struct ablkcipher_edesc *edesc;
  776. #ifdef DEBUG
  777. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  778. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  779. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  780. #endif
  781. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  782. if (err)
  783. caam_jr_strstatus(jrdev, err);
  784. #ifdef DEBUG
  785. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  786. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  787. ivsize, 1);
  788. dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
  789. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  790. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  791. #endif
  792. ablkcipher_unmap(jrdev, edesc, req);
  793. kfree(edesc);
  794. ablkcipher_request_complete(req, err);
  795. }
  796. /*
  797. * Fill in aead job descriptor
  798. */
  799. static void init_aead_job(struct aead_request *req,
  800. struct aead_edesc *edesc,
  801. bool all_contig, bool encrypt)
  802. {
  803. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  804. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  805. int authsize = ctx->authsize;
  806. u32 *desc = edesc->hw_desc;
  807. u32 out_options, in_options;
  808. dma_addr_t dst_dma, src_dma;
  809. int len, sec4_sg_index = 0;
  810. dma_addr_t ptr;
  811. u32 *sh_desc;
  812. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  813. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  814. len = desc_len(sh_desc);
  815. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  816. if (all_contig) {
  817. src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
  818. in_options = 0;
  819. } else {
  820. src_dma = edesc->sec4_sg_dma;
  821. sec4_sg_index += edesc->src_nents;
  822. in_options = LDST_SGF;
  823. }
  824. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  825. in_options);
  826. dst_dma = src_dma;
  827. out_options = in_options;
  828. if (unlikely(req->src != req->dst)) {
  829. if (edesc->dst_nents == 1) {
  830. dst_dma = sg_dma_address(req->dst);
  831. } else {
  832. dst_dma = edesc->sec4_sg_dma +
  833. sec4_sg_index *
  834. sizeof(struct sec4_sg_entry);
  835. out_options = LDST_SGF;
  836. }
  837. }
  838. if (encrypt)
  839. append_seq_out_ptr(desc, dst_dma,
  840. req->assoclen + req->cryptlen + authsize,
  841. out_options);
  842. else
  843. append_seq_out_ptr(desc, dst_dma,
  844. req->assoclen + req->cryptlen - authsize,
  845. out_options);
  846. /* REG3 = assoclen */
  847. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  848. }
  849. static void init_gcm_job(struct aead_request *req,
  850. struct aead_edesc *edesc,
  851. bool all_contig, bool encrypt)
  852. {
  853. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  854. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  855. unsigned int ivsize = crypto_aead_ivsize(aead);
  856. u32 *desc = edesc->hw_desc;
  857. bool generic_gcm = (ivsize == 12);
  858. unsigned int last;
  859. init_aead_job(req, edesc, all_contig, encrypt);
  860. /* BUG This should not be specific to generic GCM. */
  861. last = 0;
  862. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  863. last = FIFOLD_TYPE_LAST1;
  864. /* Read GCM IV */
  865. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  866. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
  867. /* Append Salt */
  868. if (!generic_gcm)
  869. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  870. /* Append IV */
  871. append_data(desc, req->iv, ivsize);
  872. /* End of blank commands */
  873. }
  874. static void init_authenc_job(struct aead_request *req,
  875. struct aead_edesc *edesc,
  876. bool all_contig, bool encrypt)
  877. {
  878. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  879. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  880. struct caam_aead_alg, aead);
  881. unsigned int ivsize = crypto_aead_ivsize(aead);
  882. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  883. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  884. OP_ALG_AAI_CTR_MOD128);
  885. const bool is_rfc3686 = alg->caam.rfc3686;
  886. u32 *desc = edesc->hw_desc;
  887. u32 ivoffset = 0;
  888. /*
  889. * AES-CTR needs to load IV in CONTEXT1 reg
  890. * at an offset of 128bits (16bytes)
  891. * CONTEXT1[255:128] = IV
  892. */
  893. if (ctr_mode)
  894. ivoffset = 16;
  895. /*
  896. * RFC3686 specific:
  897. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  898. */
  899. if (is_rfc3686)
  900. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  901. init_aead_job(req, edesc, all_contig, encrypt);
  902. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  903. append_load_as_imm(desc, req->iv, ivsize,
  904. LDST_CLASS_1_CCB |
  905. LDST_SRCDST_BYTE_CONTEXT |
  906. (ivoffset << LDST_OFFSET_SHIFT));
  907. }
  908. /*
  909. * Fill in ablkcipher job descriptor
  910. */
  911. static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
  912. struct ablkcipher_edesc *edesc,
  913. struct ablkcipher_request *req,
  914. bool iv_contig)
  915. {
  916. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  917. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  918. u32 *desc = edesc->hw_desc;
  919. u32 out_options = 0, in_options;
  920. dma_addr_t dst_dma, src_dma;
  921. int len, sec4_sg_index = 0;
  922. #ifdef DEBUG
  923. print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
  924. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  925. ivsize, 1);
  926. pr_err("asked=%d, nbytes%d\n",
  927. (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
  928. dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
  929. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  930. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  931. #endif
  932. len = desc_len(sh_desc);
  933. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  934. if (iv_contig) {
  935. src_dma = edesc->iv_dma;
  936. in_options = 0;
  937. } else {
  938. src_dma = edesc->sec4_sg_dma;
  939. sec4_sg_index += edesc->src_nents + 1;
  940. in_options = LDST_SGF;
  941. }
  942. append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
  943. if (likely(req->src == req->dst)) {
  944. if (edesc->src_nents == 1 && iv_contig) {
  945. dst_dma = sg_dma_address(req->src);
  946. } else {
  947. dst_dma = edesc->sec4_sg_dma +
  948. sizeof(struct sec4_sg_entry);
  949. out_options = LDST_SGF;
  950. }
  951. } else {
  952. if (edesc->dst_nents == 1) {
  953. dst_dma = sg_dma_address(req->dst);
  954. } else {
  955. dst_dma = edesc->sec4_sg_dma +
  956. sec4_sg_index * sizeof(struct sec4_sg_entry);
  957. out_options = LDST_SGF;
  958. }
  959. }
  960. append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
  961. }
  962. /*
  963. * Fill in ablkcipher givencrypt job descriptor
  964. */
  965. static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
  966. struct ablkcipher_edesc *edesc,
  967. struct ablkcipher_request *req,
  968. bool iv_contig)
  969. {
  970. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  971. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  972. u32 *desc = edesc->hw_desc;
  973. u32 out_options, in_options;
  974. dma_addr_t dst_dma, src_dma;
  975. int len, sec4_sg_index = 0;
  976. #ifdef DEBUG
  977. print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
  978. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  979. ivsize, 1);
  980. dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
  981. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  982. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  983. #endif
  984. len = desc_len(sh_desc);
  985. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  986. if (edesc->src_nents == 1) {
  987. src_dma = sg_dma_address(req->src);
  988. in_options = 0;
  989. } else {
  990. src_dma = edesc->sec4_sg_dma;
  991. sec4_sg_index += edesc->src_nents;
  992. in_options = LDST_SGF;
  993. }
  994. append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
  995. if (iv_contig) {
  996. dst_dma = edesc->iv_dma;
  997. out_options = 0;
  998. } else {
  999. dst_dma = edesc->sec4_sg_dma +
  1000. sec4_sg_index * sizeof(struct sec4_sg_entry);
  1001. out_options = LDST_SGF;
  1002. }
  1003. append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
  1004. }
  1005. /*
  1006. * allocate and map the aead extended descriptor
  1007. */
  1008. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  1009. int desc_bytes, bool *all_contig_ptr,
  1010. bool encrypt)
  1011. {
  1012. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1013. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1014. struct device *jrdev = ctx->jrdev;
  1015. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1016. CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
  1017. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1018. struct aead_edesc *edesc;
  1019. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  1020. unsigned int authsize = ctx->authsize;
  1021. if (unlikely(req->dst != req->src)) {
  1022. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1023. req->cryptlen);
  1024. if (unlikely(src_nents < 0)) {
  1025. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1026. req->assoclen + req->cryptlen);
  1027. return ERR_PTR(src_nents);
  1028. }
  1029. dst_nents = sg_nents_for_len(req->dst, req->assoclen +
  1030. req->cryptlen +
  1031. (encrypt ? authsize :
  1032. (-authsize)));
  1033. if (unlikely(dst_nents < 0)) {
  1034. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1035. req->assoclen + req->cryptlen +
  1036. (encrypt ? authsize : (-authsize)));
  1037. return ERR_PTR(dst_nents);
  1038. }
  1039. } else {
  1040. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1041. req->cryptlen +
  1042. (encrypt ? authsize : 0));
  1043. if (unlikely(src_nents < 0)) {
  1044. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1045. req->assoclen + req->cryptlen +
  1046. (encrypt ? authsize : 0));
  1047. return ERR_PTR(src_nents);
  1048. }
  1049. }
  1050. if (likely(req->src == req->dst)) {
  1051. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1052. DMA_BIDIRECTIONAL);
  1053. if (unlikely(!mapped_src_nents)) {
  1054. dev_err(jrdev, "unable to map source\n");
  1055. return ERR_PTR(-ENOMEM);
  1056. }
  1057. } else {
  1058. /* Cover also the case of null (zero length) input data */
  1059. if (src_nents) {
  1060. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1061. src_nents, DMA_TO_DEVICE);
  1062. if (unlikely(!mapped_src_nents)) {
  1063. dev_err(jrdev, "unable to map source\n");
  1064. return ERR_PTR(-ENOMEM);
  1065. }
  1066. } else {
  1067. mapped_src_nents = 0;
  1068. }
  1069. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1070. DMA_FROM_DEVICE);
  1071. if (unlikely(!mapped_dst_nents)) {
  1072. dev_err(jrdev, "unable to map destination\n");
  1073. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1074. return ERR_PTR(-ENOMEM);
  1075. }
  1076. }
  1077. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1078. sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1079. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1080. /* allocate space for base edesc and hw desc commands, link tables */
  1081. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1082. GFP_DMA | flags);
  1083. if (!edesc) {
  1084. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1085. 0, 0, 0);
  1086. return ERR_PTR(-ENOMEM);
  1087. }
  1088. edesc->src_nents = src_nents;
  1089. edesc->dst_nents = dst_nents;
  1090. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1091. desc_bytes;
  1092. *all_contig_ptr = !(mapped_src_nents > 1);
  1093. sec4_sg_index = 0;
  1094. if (mapped_src_nents > 1) {
  1095. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1096. edesc->sec4_sg + sec4_sg_index, 0);
  1097. sec4_sg_index += mapped_src_nents;
  1098. }
  1099. if (mapped_dst_nents > 1) {
  1100. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1101. edesc->sec4_sg + sec4_sg_index, 0);
  1102. }
  1103. if (!sec4_sg_bytes)
  1104. return edesc;
  1105. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1106. sec4_sg_bytes, DMA_TO_DEVICE);
  1107. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1108. dev_err(jrdev, "unable to map S/G table\n");
  1109. aead_unmap(jrdev, edesc, req);
  1110. kfree(edesc);
  1111. return ERR_PTR(-ENOMEM);
  1112. }
  1113. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1114. return edesc;
  1115. }
  1116. static int gcm_encrypt(struct aead_request *req)
  1117. {
  1118. struct aead_edesc *edesc;
  1119. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1120. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1121. struct device *jrdev = ctx->jrdev;
  1122. bool all_contig;
  1123. u32 *desc;
  1124. int ret = 0;
  1125. /* allocate extended descriptor */
  1126. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
  1127. if (IS_ERR(edesc))
  1128. return PTR_ERR(edesc);
  1129. /* Create and submit job descriptor */
  1130. init_gcm_job(req, edesc, all_contig, true);
  1131. #ifdef DEBUG
  1132. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1133. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1134. desc_bytes(edesc->hw_desc), 1);
  1135. #endif
  1136. desc = edesc->hw_desc;
  1137. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1138. if (!ret) {
  1139. ret = -EINPROGRESS;
  1140. } else {
  1141. aead_unmap(jrdev, edesc, req);
  1142. kfree(edesc);
  1143. }
  1144. return ret;
  1145. }
  1146. static int ipsec_gcm_encrypt(struct aead_request *req)
  1147. {
  1148. if (req->assoclen < 8)
  1149. return -EINVAL;
  1150. return gcm_encrypt(req);
  1151. }
  1152. static int aead_encrypt(struct aead_request *req)
  1153. {
  1154. struct aead_edesc *edesc;
  1155. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1156. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1157. struct device *jrdev = ctx->jrdev;
  1158. bool all_contig;
  1159. u32 *desc;
  1160. int ret = 0;
  1161. /* allocate extended descriptor */
  1162. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1163. &all_contig, true);
  1164. if (IS_ERR(edesc))
  1165. return PTR_ERR(edesc);
  1166. /* Create and submit job descriptor */
  1167. init_authenc_job(req, edesc, all_contig, true);
  1168. #ifdef DEBUG
  1169. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1170. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1171. desc_bytes(edesc->hw_desc), 1);
  1172. #endif
  1173. desc = edesc->hw_desc;
  1174. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1175. if (!ret) {
  1176. ret = -EINPROGRESS;
  1177. } else {
  1178. aead_unmap(jrdev, edesc, req);
  1179. kfree(edesc);
  1180. }
  1181. return ret;
  1182. }
  1183. static int gcm_decrypt(struct aead_request *req)
  1184. {
  1185. struct aead_edesc *edesc;
  1186. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1187. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1188. struct device *jrdev = ctx->jrdev;
  1189. bool all_contig;
  1190. u32 *desc;
  1191. int ret = 0;
  1192. /* allocate extended descriptor */
  1193. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
  1194. if (IS_ERR(edesc))
  1195. return PTR_ERR(edesc);
  1196. /* Create and submit job descriptor*/
  1197. init_gcm_job(req, edesc, all_contig, false);
  1198. #ifdef DEBUG
  1199. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1200. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1201. desc_bytes(edesc->hw_desc), 1);
  1202. #endif
  1203. desc = edesc->hw_desc;
  1204. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1205. if (!ret) {
  1206. ret = -EINPROGRESS;
  1207. } else {
  1208. aead_unmap(jrdev, edesc, req);
  1209. kfree(edesc);
  1210. }
  1211. return ret;
  1212. }
  1213. static int ipsec_gcm_decrypt(struct aead_request *req)
  1214. {
  1215. if (req->assoclen < 8)
  1216. return -EINVAL;
  1217. return gcm_decrypt(req);
  1218. }
  1219. static int aead_decrypt(struct aead_request *req)
  1220. {
  1221. struct aead_edesc *edesc;
  1222. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1223. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1224. struct device *jrdev = ctx->jrdev;
  1225. bool all_contig;
  1226. u32 *desc;
  1227. int ret = 0;
  1228. #ifdef DEBUG
  1229. dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
  1230. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1231. req->assoclen + req->cryptlen, 1);
  1232. #endif
  1233. /* allocate extended descriptor */
  1234. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1235. &all_contig, false);
  1236. if (IS_ERR(edesc))
  1237. return PTR_ERR(edesc);
  1238. /* Create and submit job descriptor*/
  1239. init_authenc_job(req, edesc, all_contig, false);
  1240. #ifdef DEBUG
  1241. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1242. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1243. desc_bytes(edesc->hw_desc), 1);
  1244. #endif
  1245. desc = edesc->hw_desc;
  1246. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1247. if (!ret) {
  1248. ret = -EINPROGRESS;
  1249. } else {
  1250. aead_unmap(jrdev, edesc, req);
  1251. kfree(edesc);
  1252. }
  1253. return ret;
  1254. }
  1255. /*
  1256. * allocate and map the ablkcipher extended descriptor for ablkcipher
  1257. */
  1258. static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
  1259. *req, int desc_bytes,
  1260. bool *iv_contig_out)
  1261. {
  1262. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1263. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1264. struct device *jrdev = ctx->jrdev;
  1265. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1266. CRYPTO_TFM_REQ_MAY_SLEEP)) ?
  1267. GFP_KERNEL : GFP_ATOMIC;
  1268. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1269. struct ablkcipher_edesc *edesc;
  1270. dma_addr_t iv_dma = 0;
  1271. bool in_contig;
  1272. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1273. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1274. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1275. if (unlikely(src_nents < 0)) {
  1276. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1277. req->nbytes);
  1278. return ERR_PTR(src_nents);
  1279. }
  1280. if (req->dst != req->src) {
  1281. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1282. if (unlikely(dst_nents < 0)) {
  1283. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1284. req->nbytes);
  1285. return ERR_PTR(dst_nents);
  1286. }
  1287. }
  1288. if (likely(req->src == req->dst)) {
  1289. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1290. DMA_BIDIRECTIONAL);
  1291. if (unlikely(!mapped_src_nents)) {
  1292. dev_err(jrdev, "unable to map source\n");
  1293. return ERR_PTR(-ENOMEM);
  1294. }
  1295. } else {
  1296. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1297. DMA_TO_DEVICE);
  1298. if (unlikely(!mapped_src_nents)) {
  1299. dev_err(jrdev, "unable to map source\n");
  1300. return ERR_PTR(-ENOMEM);
  1301. }
  1302. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1303. DMA_FROM_DEVICE);
  1304. if (unlikely(!mapped_dst_nents)) {
  1305. dev_err(jrdev, "unable to map destination\n");
  1306. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1307. return ERR_PTR(-ENOMEM);
  1308. }
  1309. }
  1310. iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
  1311. if (dma_mapping_error(jrdev, iv_dma)) {
  1312. dev_err(jrdev, "unable to map IV\n");
  1313. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1314. 0, 0, 0);
  1315. return ERR_PTR(-ENOMEM);
  1316. }
  1317. if (mapped_src_nents == 1 &&
  1318. iv_dma + ivsize == sg_dma_address(req->src)) {
  1319. in_contig = true;
  1320. sec4_sg_ents = 0;
  1321. } else {
  1322. in_contig = false;
  1323. sec4_sg_ents = 1 + mapped_src_nents;
  1324. }
  1325. dst_sg_idx = sec4_sg_ents;
  1326. sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1327. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1328. /* allocate space for base edesc and hw desc commands, link tables */
  1329. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1330. GFP_DMA | flags);
  1331. if (!edesc) {
  1332. dev_err(jrdev, "could not allocate extended descriptor\n");
  1333. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1334. iv_dma, ivsize, 0, 0);
  1335. return ERR_PTR(-ENOMEM);
  1336. }
  1337. edesc->src_nents = src_nents;
  1338. edesc->dst_nents = dst_nents;
  1339. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1340. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1341. desc_bytes;
  1342. if (!in_contig) {
  1343. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1344. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1345. edesc->sec4_sg + 1, 0);
  1346. }
  1347. if (mapped_dst_nents > 1) {
  1348. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1349. edesc->sec4_sg + dst_sg_idx, 0);
  1350. }
  1351. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1352. sec4_sg_bytes, DMA_TO_DEVICE);
  1353. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1354. dev_err(jrdev, "unable to map S/G table\n");
  1355. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1356. iv_dma, ivsize, 0, 0);
  1357. kfree(edesc);
  1358. return ERR_PTR(-ENOMEM);
  1359. }
  1360. edesc->iv_dma = iv_dma;
  1361. #ifdef DEBUG
  1362. print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
  1363. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1364. sec4_sg_bytes, 1);
  1365. #endif
  1366. *iv_contig_out = in_contig;
  1367. return edesc;
  1368. }
  1369. static int ablkcipher_encrypt(struct ablkcipher_request *req)
  1370. {
  1371. struct ablkcipher_edesc *edesc;
  1372. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1373. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1374. struct device *jrdev = ctx->jrdev;
  1375. bool iv_contig;
  1376. u32 *desc;
  1377. int ret = 0;
  1378. /* allocate extended descriptor */
  1379. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
  1380. CAAM_CMD_SZ, &iv_contig);
  1381. if (IS_ERR(edesc))
  1382. return PTR_ERR(edesc);
  1383. /* Create and submit job descriptor*/
  1384. init_ablkcipher_job(ctx->sh_desc_enc,
  1385. ctx->sh_desc_enc_dma, edesc, req, iv_contig);
  1386. #ifdef DEBUG
  1387. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1388. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1389. desc_bytes(edesc->hw_desc), 1);
  1390. #endif
  1391. desc = edesc->hw_desc;
  1392. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1393. if (!ret) {
  1394. ret = -EINPROGRESS;
  1395. } else {
  1396. ablkcipher_unmap(jrdev, edesc, req);
  1397. kfree(edesc);
  1398. }
  1399. return ret;
  1400. }
  1401. static int ablkcipher_decrypt(struct ablkcipher_request *req)
  1402. {
  1403. struct ablkcipher_edesc *edesc;
  1404. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1405. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1406. struct device *jrdev = ctx->jrdev;
  1407. bool iv_contig;
  1408. u32 *desc;
  1409. int ret = 0;
  1410. /* allocate extended descriptor */
  1411. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
  1412. CAAM_CMD_SZ, &iv_contig);
  1413. if (IS_ERR(edesc))
  1414. return PTR_ERR(edesc);
  1415. /* Create and submit job descriptor*/
  1416. init_ablkcipher_job(ctx->sh_desc_dec,
  1417. ctx->sh_desc_dec_dma, edesc, req, iv_contig);
  1418. desc = edesc->hw_desc;
  1419. #ifdef DEBUG
  1420. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1421. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1422. desc_bytes(edesc->hw_desc), 1);
  1423. #endif
  1424. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
  1425. if (!ret) {
  1426. ret = -EINPROGRESS;
  1427. } else {
  1428. ablkcipher_unmap(jrdev, edesc, req);
  1429. kfree(edesc);
  1430. }
  1431. return ret;
  1432. }
  1433. /*
  1434. * allocate and map the ablkcipher extended descriptor
  1435. * for ablkcipher givencrypt
  1436. */
  1437. static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
  1438. struct skcipher_givcrypt_request *greq,
  1439. int desc_bytes,
  1440. bool *iv_contig_out)
  1441. {
  1442. struct ablkcipher_request *req = &greq->creq;
  1443. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1444. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1445. struct device *jrdev = ctx->jrdev;
  1446. gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
  1447. CRYPTO_TFM_REQ_MAY_SLEEP)) ?
  1448. GFP_KERNEL : GFP_ATOMIC;
  1449. int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
  1450. struct ablkcipher_edesc *edesc;
  1451. dma_addr_t iv_dma = 0;
  1452. bool out_contig;
  1453. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1454. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1455. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1456. if (unlikely(src_nents < 0)) {
  1457. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1458. req->nbytes);
  1459. return ERR_PTR(src_nents);
  1460. }
  1461. if (likely(req->src == req->dst)) {
  1462. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1463. DMA_BIDIRECTIONAL);
  1464. if (unlikely(!mapped_src_nents)) {
  1465. dev_err(jrdev, "unable to map source\n");
  1466. return ERR_PTR(-ENOMEM);
  1467. }
  1468. dst_nents = src_nents;
  1469. mapped_dst_nents = src_nents;
  1470. } else {
  1471. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1472. DMA_TO_DEVICE);
  1473. if (unlikely(!mapped_src_nents)) {
  1474. dev_err(jrdev, "unable to map source\n");
  1475. return ERR_PTR(-ENOMEM);
  1476. }
  1477. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1478. if (unlikely(dst_nents < 0)) {
  1479. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1480. req->nbytes);
  1481. return ERR_PTR(dst_nents);
  1482. }
  1483. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1484. DMA_FROM_DEVICE);
  1485. if (unlikely(!mapped_dst_nents)) {
  1486. dev_err(jrdev, "unable to map destination\n");
  1487. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1488. return ERR_PTR(-ENOMEM);
  1489. }
  1490. }
  1491. /*
  1492. * Check if iv can be contiguous with source and destination.
  1493. * If so, include it. If not, create scatterlist.
  1494. */
  1495. iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
  1496. if (dma_mapping_error(jrdev, iv_dma)) {
  1497. dev_err(jrdev, "unable to map IV\n");
  1498. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1499. 0, 0, 0);
  1500. return ERR_PTR(-ENOMEM);
  1501. }
  1502. sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1503. dst_sg_idx = sec4_sg_ents;
  1504. if (mapped_dst_nents == 1 &&
  1505. iv_dma + ivsize == sg_dma_address(req->dst)) {
  1506. out_contig = true;
  1507. } else {
  1508. out_contig = false;
  1509. sec4_sg_ents += 1 + mapped_dst_nents;
  1510. }
  1511. /* allocate space for base edesc and hw desc commands, link tables */
  1512. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1513. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1514. GFP_DMA | flags);
  1515. if (!edesc) {
  1516. dev_err(jrdev, "could not allocate extended descriptor\n");
  1517. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1518. iv_dma, ivsize, 0, 0);
  1519. return ERR_PTR(-ENOMEM);
  1520. }
  1521. edesc->src_nents = src_nents;
  1522. edesc->dst_nents = dst_nents;
  1523. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1524. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1525. desc_bytes;
  1526. if (mapped_src_nents > 1)
  1527. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
  1528. 0);
  1529. if (!out_contig) {
  1530. dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
  1531. iv_dma, ivsize, 0);
  1532. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1533. edesc->sec4_sg + dst_sg_idx + 1, 0);
  1534. }
  1535. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1536. sec4_sg_bytes, DMA_TO_DEVICE);
  1537. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1538. dev_err(jrdev, "unable to map S/G table\n");
  1539. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1540. iv_dma, ivsize, 0, 0);
  1541. kfree(edesc);
  1542. return ERR_PTR(-ENOMEM);
  1543. }
  1544. edesc->iv_dma = iv_dma;
  1545. #ifdef DEBUG
  1546. print_hex_dump(KERN_ERR,
  1547. "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
  1548. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1549. sec4_sg_bytes, 1);
  1550. #endif
  1551. *iv_contig_out = out_contig;
  1552. return edesc;
  1553. }
  1554. static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
  1555. {
  1556. struct ablkcipher_request *req = &creq->creq;
  1557. struct ablkcipher_edesc *edesc;
  1558. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1559. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1560. struct device *jrdev = ctx->jrdev;
  1561. bool iv_contig = false;
  1562. u32 *desc;
  1563. int ret = 0;
  1564. /* allocate extended descriptor */
  1565. edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
  1566. CAAM_CMD_SZ, &iv_contig);
  1567. if (IS_ERR(edesc))
  1568. return PTR_ERR(edesc);
  1569. /* Create and submit job descriptor*/
  1570. init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
  1571. edesc, req, iv_contig);
  1572. #ifdef DEBUG
  1573. print_hex_dump(KERN_ERR,
  1574. "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
  1575. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1576. desc_bytes(edesc->hw_desc), 1);
  1577. #endif
  1578. desc = edesc->hw_desc;
  1579. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1580. if (!ret) {
  1581. ret = -EINPROGRESS;
  1582. } else {
  1583. ablkcipher_unmap(jrdev, edesc, req);
  1584. kfree(edesc);
  1585. }
  1586. return ret;
  1587. }
  1588. #define template_aead template_u.aead
  1589. #define template_ablkcipher template_u.ablkcipher
  1590. struct caam_alg_template {
  1591. char name[CRYPTO_MAX_ALG_NAME];
  1592. char driver_name[CRYPTO_MAX_ALG_NAME];
  1593. unsigned int blocksize;
  1594. u32 type;
  1595. union {
  1596. struct ablkcipher_alg ablkcipher;
  1597. } template_u;
  1598. u32 class1_alg_type;
  1599. u32 class2_alg_type;
  1600. };
  1601. static struct caam_alg_template driver_algs[] = {
  1602. /* ablkcipher descriptor */
  1603. {
  1604. .name = "cbc(aes)",
  1605. .driver_name = "cbc-aes-caam",
  1606. .blocksize = AES_BLOCK_SIZE,
  1607. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1608. .template_ablkcipher = {
  1609. .setkey = ablkcipher_setkey,
  1610. .encrypt = ablkcipher_encrypt,
  1611. .decrypt = ablkcipher_decrypt,
  1612. .givencrypt = ablkcipher_givencrypt,
  1613. .geniv = "<built-in>",
  1614. .min_keysize = AES_MIN_KEY_SIZE,
  1615. .max_keysize = AES_MAX_KEY_SIZE,
  1616. .ivsize = AES_BLOCK_SIZE,
  1617. },
  1618. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1619. },
  1620. {
  1621. .name = "cbc(des3_ede)",
  1622. .driver_name = "cbc-3des-caam",
  1623. .blocksize = DES3_EDE_BLOCK_SIZE,
  1624. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1625. .template_ablkcipher = {
  1626. .setkey = ablkcipher_setkey,
  1627. .encrypt = ablkcipher_encrypt,
  1628. .decrypt = ablkcipher_decrypt,
  1629. .givencrypt = ablkcipher_givencrypt,
  1630. .geniv = "<built-in>",
  1631. .min_keysize = DES3_EDE_KEY_SIZE,
  1632. .max_keysize = DES3_EDE_KEY_SIZE,
  1633. .ivsize = DES3_EDE_BLOCK_SIZE,
  1634. },
  1635. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1636. },
  1637. {
  1638. .name = "cbc(des)",
  1639. .driver_name = "cbc-des-caam",
  1640. .blocksize = DES_BLOCK_SIZE,
  1641. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1642. .template_ablkcipher = {
  1643. .setkey = ablkcipher_setkey,
  1644. .encrypt = ablkcipher_encrypt,
  1645. .decrypt = ablkcipher_decrypt,
  1646. .givencrypt = ablkcipher_givencrypt,
  1647. .geniv = "<built-in>",
  1648. .min_keysize = DES_KEY_SIZE,
  1649. .max_keysize = DES_KEY_SIZE,
  1650. .ivsize = DES_BLOCK_SIZE,
  1651. },
  1652. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1653. },
  1654. {
  1655. .name = "ctr(aes)",
  1656. .driver_name = "ctr-aes-caam",
  1657. .blocksize = 1,
  1658. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1659. .template_ablkcipher = {
  1660. .setkey = ablkcipher_setkey,
  1661. .encrypt = ablkcipher_encrypt,
  1662. .decrypt = ablkcipher_decrypt,
  1663. .geniv = "chainiv",
  1664. .min_keysize = AES_MIN_KEY_SIZE,
  1665. .max_keysize = AES_MAX_KEY_SIZE,
  1666. .ivsize = AES_BLOCK_SIZE,
  1667. },
  1668. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1669. },
  1670. {
  1671. .name = "rfc3686(ctr(aes))",
  1672. .driver_name = "rfc3686-ctr-aes-caam",
  1673. .blocksize = 1,
  1674. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1675. .template_ablkcipher = {
  1676. .setkey = ablkcipher_setkey,
  1677. .encrypt = ablkcipher_encrypt,
  1678. .decrypt = ablkcipher_decrypt,
  1679. .givencrypt = ablkcipher_givencrypt,
  1680. .geniv = "<built-in>",
  1681. .min_keysize = AES_MIN_KEY_SIZE +
  1682. CTR_RFC3686_NONCE_SIZE,
  1683. .max_keysize = AES_MAX_KEY_SIZE +
  1684. CTR_RFC3686_NONCE_SIZE,
  1685. .ivsize = CTR_RFC3686_IV_SIZE,
  1686. },
  1687. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1688. },
  1689. {
  1690. .name = "xts(aes)",
  1691. .driver_name = "xts-aes-caam",
  1692. .blocksize = AES_BLOCK_SIZE,
  1693. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1694. .template_ablkcipher = {
  1695. .setkey = xts_ablkcipher_setkey,
  1696. .encrypt = ablkcipher_encrypt,
  1697. .decrypt = ablkcipher_decrypt,
  1698. .geniv = "eseqiv",
  1699. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1700. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1701. .ivsize = AES_BLOCK_SIZE,
  1702. },
  1703. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1704. },
  1705. };
  1706. static struct caam_aead_alg driver_aeads[] = {
  1707. {
  1708. .aead = {
  1709. .base = {
  1710. .cra_name = "rfc4106(gcm(aes))",
  1711. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1712. .cra_blocksize = 1,
  1713. },
  1714. .setkey = rfc4106_setkey,
  1715. .setauthsize = rfc4106_setauthsize,
  1716. .encrypt = ipsec_gcm_encrypt,
  1717. .decrypt = ipsec_gcm_decrypt,
  1718. .ivsize = 8,
  1719. .maxauthsize = AES_BLOCK_SIZE,
  1720. },
  1721. .caam = {
  1722. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1723. },
  1724. },
  1725. {
  1726. .aead = {
  1727. .base = {
  1728. .cra_name = "rfc4543(gcm(aes))",
  1729. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1730. .cra_blocksize = 1,
  1731. },
  1732. .setkey = rfc4543_setkey,
  1733. .setauthsize = rfc4543_setauthsize,
  1734. .encrypt = ipsec_gcm_encrypt,
  1735. .decrypt = ipsec_gcm_decrypt,
  1736. .ivsize = 8,
  1737. .maxauthsize = AES_BLOCK_SIZE,
  1738. },
  1739. .caam = {
  1740. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1741. },
  1742. },
  1743. /* Galois Counter Mode */
  1744. {
  1745. .aead = {
  1746. .base = {
  1747. .cra_name = "gcm(aes)",
  1748. .cra_driver_name = "gcm-aes-caam",
  1749. .cra_blocksize = 1,
  1750. },
  1751. .setkey = gcm_setkey,
  1752. .setauthsize = gcm_setauthsize,
  1753. .encrypt = gcm_encrypt,
  1754. .decrypt = gcm_decrypt,
  1755. .ivsize = 12,
  1756. .maxauthsize = AES_BLOCK_SIZE,
  1757. },
  1758. .caam = {
  1759. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1760. },
  1761. },
  1762. /* single-pass ipsec_esp descriptor */
  1763. {
  1764. .aead = {
  1765. .base = {
  1766. .cra_name = "authenc(hmac(md5),"
  1767. "ecb(cipher_null))",
  1768. .cra_driver_name = "authenc-hmac-md5-"
  1769. "ecb-cipher_null-caam",
  1770. .cra_blocksize = NULL_BLOCK_SIZE,
  1771. },
  1772. .setkey = aead_setkey,
  1773. .setauthsize = aead_setauthsize,
  1774. .encrypt = aead_encrypt,
  1775. .decrypt = aead_decrypt,
  1776. .ivsize = NULL_IV_SIZE,
  1777. .maxauthsize = MD5_DIGEST_SIZE,
  1778. },
  1779. .caam = {
  1780. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1781. OP_ALG_AAI_HMAC_PRECOMP,
  1782. },
  1783. },
  1784. {
  1785. .aead = {
  1786. .base = {
  1787. .cra_name = "authenc(hmac(sha1),"
  1788. "ecb(cipher_null))",
  1789. .cra_driver_name = "authenc-hmac-sha1-"
  1790. "ecb-cipher_null-caam",
  1791. .cra_blocksize = NULL_BLOCK_SIZE,
  1792. },
  1793. .setkey = aead_setkey,
  1794. .setauthsize = aead_setauthsize,
  1795. .encrypt = aead_encrypt,
  1796. .decrypt = aead_decrypt,
  1797. .ivsize = NULL_IV_SIZE,
  1798. .maxauthsize = SHA1_DIGEST_SIZE,
  1799. },
  1800. .caam = {
  1801. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1802. OP_ALG_AAI_HMAC_PRECOMP,
  1803. },
  1804. },
  1805. {
  1806. .aead = {
  1807. .base = {
  1808. .cra_name = "authenc(hmac(sha224),"
  1809. "ecb(cipher_null))",
  1810. .cra_driver_name = "authenc-hmac-sha224-"
  1811. "ecb-cipher_null-caam",
  1812. .cra_blocksize = NULL_BLOCK_SIZE,
  1813. },
  1814. .setkey = aead_setkey,
  1815. .setauthsize = aead_setauthsize,
  1816. .encrypt = aead_encrypt,
  1817. .decrypt = aead_decrypt,
  1818. .ivsize = NULL_IV_SIZE,
  1819. .maxauthsize = SHA224_DIGEST_SIZE,
  1820. },
  1821. .caam = {
  1822. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1823. OP_ALG_AAI_HMAC_PRECOMP,
  1824. },
  1825. },
  1826. {
  1827. .aead = {
  1828. .base = {
  1829. .cra_name = "authenc(hmac(sha256),"
  1830. "ecb(cipher_null))",
  1831. .cra_driver_name = "authenc-hmac-sha256-"
  1832. "ecb-cipher_null-caam",
  1833. .cra_blocksize = NULL_BLOCK_SIZE,
  1834. },
  1835. .setkey = aead_setkey,
  1836. .setauthsize = aead_setauthsize,
  1837. .encrypt = aead_encrypt,
  1838. .decrypt = aead_decrypt,
  1839. .ivsize = NULL_IV_SIZE,
  1840. .maxauthsize = SHA256_DIGEST_SIZE,
  1841. },
  1842. .caam = {
  1843. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1844. OP_ALG_AAI_HMAC_PRECOMP,
  1845. },
  1846. },
  1847. {
  1848. .aead = {
  1849. .base = {
  1850. .cra_name = "authenc(hmac(sha384),"
  1851. "ecb(cipher_null))",
  1852. .cra_driver_name = "authenc-hmac-sha384-"
  1853. "ecb-cipher_null-caam",
  1854. .cra_blocksize = NULL_BLOCK_SIZE,
  1855. },
  1856. .setkey = aead_setkey,
  1857. .setauthsize = aead_setauthsize,
  1858. .encrypt = aead_encrypt,
  1859. .decrypt = aead_decrypt,
  1860. .ivsize = NULL_IV_SIZE,
  1861. .maxauthsize = SHA384_DIGEST_SIZE,
  1862. },
  1863. .caam = {
  1864. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1865. OP_ALG_AAI_HMAC_PRECOMP,
  1866. },
  1867. },
  1868. {
  1869. .aead = {
  1870. .base = {
  1871. .cra_name = "authenc(hmac(sha512),"
  1872. "ecb(cipher_null))",
  1873. .cra_driver_name = "authenc-hmac-sha512-"
  1874. "ecb-cipher_null-caam",
  1875. .cra_blocksize = NULL_BLOCK_SIZE,
  1876. },
  1877. .setkey = aead_setkey,
  1878. .setauthsize = aead_setauthsize,
  1879. .encrypt = aead_encrypt,
  1880. .decrypt = aead_decrypt,
  1881. .ivsize = NULL_IV_SIZE,
  1882. .maxauthsize = SHA512_DIGEST_SIZE,
  1883. },
  1884. .caam = {
  1885. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1886. OP_ALG_AAI_HMAC_PRECOMP,
  1887. },
  1888. },
  1889. {
  1890. .aead = {
  1891. .base = {
  1892. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1893. .cra_driver_name = "authenc-hmac-md5-"
  1894. "cbc-aes-caam",
  1895. .cra_blocksize = AES_BLOCK_SIZE,
  1896. },
  1897. .setkey = aead_setkey,
  1898. .setauthsize = aead_setauthsize,
  1899. .encrypt = aead_encrypt,
  1900. .decrypt = aead_decrypt,
  1901. .ivsize = AES_BLOCK_SIZE,
  1902. .maxauthsize = MD5_DIGEST_SIZE,
  1903. },
  1904. .caam = {
  1905. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1906. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1907. OP_ALG_AAI_HMAC_PRECOMP,
  1908. },
  1909. },
  1910. {
  1911. .aead = {
  1912. .base = {
  1913. .cra_name = "echainiv(authenc(hmac(md5),"
  1914. "cbc(aes)))",
  1915. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1916. "cbc-aes-caam",
  1917. .cra_blocksize = AES_BLOCK_SIZE,
  1918. },
  1919. .setkey = aead_setkey,
  1920. .setauthsize = aead_setauthsize,
  1921. .encrypt = aead_encrypt,
  1922. .decrypt = aead_decrypt,
  1923. .ivsize = AES_BLOCK_SIZE,
  1924. .maxauthsize = MD5_DIGEST_SIZE,
  1925. },
  1926. .caam = {
  1927. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1928. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1929. OP_ALG_AAI_HMAC_PRECOMP,
  1930. .geniv = true,
  1931. },
  1932. },
  1933. {
  1934. .aead = {
  1935. .base = {
  1936. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1937. .cra_driver_name = "authenc-hmac-sha1-"
  1938. "cbc-aes-caam",
  1939. .cra_blocksize = AES_BLOCK_SIZE,
  1940. },
  1941. .setkey = aead_setkey,
  1942. .setauthsize = aead_setauthsize,
  1943. .encrypt = aead_encrypt,
  1944. .decrypt = aead_decrypt,
  1945. .ivsize = AES_BLOCK_SIZE,
  1946. .maxauthsize = SHA1_DIGEST_SIZE,
  1947. },
  1948. .caam = {
  1949. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1950. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1951. OP_ALG_AAI_HMAC_PRECOMP,
  1952. },
  1953. },
  1954. {
  1955. .aead = {
  1956. .base = {
  1957. .cra_name = "echainiv(authenc(hmac(sha1),"
  1958. "cbc(aes)))",
  1959. .cra_driver_name = "echainiv-authenc-"
  1960. "hmac-sha1-cbc-aes-caam",
  1961. .cra_blocksize = AES_BLOCK_SIZE,
  1962. },
  1963. .setkey = aead_setkey,
  1964. .setauthsize = aead_setauthsize,
  1965. .encrypt = aead_encrypt,
  1966. .decrypt = aead_decrypt,
  1967. .ivsize = AES_BLOCK_SIZE,
  1968. .maxauthsize = SHA1_DIGEST_SIZE,
  1969. },
  1970. .caam = {
  1971. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1972. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1973. OP_ALG_AAI_HMAC_PRECOMP,
  1974. .geniv = true,
  1975. },
  1976. },
  1977. {
  1978. .aead = {
  1979. .base = {
  1980. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1981. .cra_driver_name = "authenc-hmac-sha224-"
  1982. "cbc-aes-caam",
  1983. .cra_blocksize = AES_BLOCK_SIZE,
  1984. },
  1985. .setkey = aead_setkey,
  1986. .setauthsize = aead_setauthsize,
  1987. .encrypt = aead_encrypt,
  1988. .decrypt = aead_decrypt,
  1989. .ivsize = AES_BLOCK_SIZE,
  1990. .maxauthsize = SHA224_DIGEST_SIZE,
  1991. },
  1992. .caam = {
  1993. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1994. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1995. OP_ALG_AAI_HMAC_PRECOMP,
  1996. },
  1997. },
  1998. {
  1999. .aead = {
  2000. .base = {
  2001. .cra_name = "echainiv(authenc(hmac(sha224),"
  2002. "cbc(aes)))",
  2003. .cra_driver_name = "echainiv-authenc-"
  2004. "hmac-sha224-cbc-aes-caam",
  2005. .cra_blocksize = AES_BLOCK_SIZE,
  2006. },
  2007. .setkey = aead_setkey,
  2008. .setauthsize = aead_setauthsize,
  2009. .encrypt = aead_encrypt,
  2010. .decrypt = aead_decrypt,
  2011. .ivsize = AES_BLOCK_SIZE,
  2012. .maxauthsize = SHA224_DIGEST_SIZE,
  2013. },
  2014. .caam = {
  2015. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2016. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2017. OP_ALG_AAI_HMAC_PRECOMP,
  2018. .geniv = true,
  2019. },
  2020. },
  2021. {
  2022. .aead = {
  2023. .base = {
  2024. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  2025. .cra_driver_name = "authenc-hmac-sha256-"
  2026. "cbc-aes-caam",
  2027. .cra_blocksize = AES_BLOCK_SIZE,
  2028. },
  2029. .setkey = aead_setkey,
  2030. .setauthsize = aead_setauthsize,
  2031. .encrypt = aead_encrypt,
  2032. .decrypt = aead_decrypt,
  2033. .ivsize = AES_BLOCK_SIZE,
  2034. .maxauthsize = SHA256_DIGEST_SIZE,
  2035. },
  2036. .caam = {
  2037. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2038. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2039. OP_ALG_AAI_HMAC_PRECOMP,
  2040. },
  2041. },
  2042. {
  2043. .aead = {
  2044. .base = {
  2045. .cra_name = "echainiv(authenc(hmac(sha256),"
  2046. "cbc(aes)))",
  2047. .cra_driver_name = "echainiv-authenc-"
  2048. "hmac-sha256-cbc-aes-caam",
  2049. .cra_blocksize = AES_BLOCK_SIZE,
  2050. },
  2051. .setkey = aead_setkey,
  2052. .setauthsize = aead_setauthsize,
  2053. .encrypt = aead_encrypt,
  2054. .decrypt = aead_decrypt,
  2055. .ivsize = AES_BLOCK_SIZE,
  2056. .maxauthsize = SHA256_DIGEST_SIZE,
  2057. },
  2058. .caam = {
  2059. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2060. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2061. OP_ALG_AAI_HMAC_PRECOMP,
  2062. .geniv = true,
  2063. },
  2064. },
  2065. {
  2066. .aead = {
  2067. .base = {
  2068. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  2069. .cra_driver_name = "authenc-hmac-sha384-"
  2070. "cbc-aes-caam",
  2071. .cra_blocksize = AES_BLOCK_SIZE,
  2072. },
  2073. .setkey = aead_setkey,
  2074. .setauthsize = aead_setauthsize,
  2075. .encrypt = aead_encrypt,
  2076. .decrypt = aead_decrypt,
  2077. .ivsize = AES_BLOCK_SIZE,
  2078. .maxauthsize = SHA384_DIGEST_SIZE,
  2079. },
  2080. .caam = {
  2081. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2082. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2083. OP_ALG_AAI_HMAC_PRECOMP,
  2084. },
  2085. },
  2086. {
  2087. .aead = {
  2088. .base = {
  2089. .cra_name = "echainiv(authenc(hmac(sha384),"
  2090. "cbc(aes)))",
  2091. .cra_driver_name = "echainiv-authenc-"
  2092. "hmac-sha384-cbc-aes-caam",
  2093. .cra_blocksize = AES_BLOCK_SIZE,
  2094. },
  2095. .setkey = aead_setkey,
  2096. .setauthsize = aead_setauthsize,
  2097. .encrypt = aead_encrypt,
  2098. .decrypt = aead_decrypt,
  2099. .ivsize = AES_BLOCK_SIZE,
  2100. .maxauthsize = SHA384_DIGEST_SIZE,
  2101. },
  2102. .caam = {
  2103. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2104. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2105. OP_ALG_AAI_HMAC_PRECOMP,
  2106. .geniv = true,
  2107. },
  2108. },
  2109. {
  2110. .aead = {
  2111. .base = {
  2112. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  2113. .cra_driver_name = "authenc-hmac-sha512-"
  2114. "cbc-aes-caam",
  2115. .cra_blocksize = AES_BLOCK_SIZE,
  2116. },
  2117. .setkey = aead_setkey,
  2118. .setauthsize = aead_setauthsize,
  2119. .encrypt = aead_encrypt,
  2120. .decrypt = aead_decrypt,
  2121. .ivsize = AES_BLOCK_SIZE,
  2122. .maxauthsize = SHA512_DIGEST_SIZE,
  2123. },
  2124. .caam = {
  2125. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2126. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2127. OP_ALG_AAI_HMAC_PRECOMP,
  2128. },
  2129. },
  2130. {
  2131. .aead = {
  2132. .base = {
  2133. .cra_name = "echainiv(authenc(hmac(sha512),"
  2134. "cbc(aes)))",
  2135. .cra_driver_name = "echainiv-authenc-"
  2136. "hmac-sha512-cbc-aes-caam",
  2137. .cra_blocksize = AES_BLOCK_SIZE,
  2138. },
  2139. .setkey = aead_setkey,
  2140. .setauthsize = aead_setauthsize,
  2141. .encrypt = aead_encrypt,
  2142. .decrypt = aead_decrypt,
  2143. .ivsize = AES_BLOCK_SIZE,
  2144. .maxauthsize = SHA512_DIGEST_SIZE,
  2145. },
  2146. .caam = {
  2147. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2148. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2149. OP_ALG_AAI_HMAC_PRECOMP,
  2150. .geniv = true,
  2151. },
  2152. },
  2153. {
  2154. .aead = {
  2155. .base = {
  2156. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  2157. .cra_driver_name = "authenc-hmac-md5-"
  2158. "cbc-des3_ede-caam",
  2159. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2160. },
  2161. .setkey = aead_setkey,
  2162. .setauthsize = aead_setauthsize,
  2163. .encrypt = aead_encrypt,
  2164. .decrypt = aead_decrypt,
  2165. .ivsize = DES3_EDE_BLOCK_SIZE,
  2166. .maxauthsize = MD5_DIGEST_SIZE,
  2167. },
  2168. .caam = {
  2169. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2170. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2171. OP_ALG_AAI_HMAC_PRECOMP,
  2172. }
  2173. },
  2174. {
  2175. .aead = {
  2176. .base = {
  2177. .cra_name = "echainiv(authenc(hmac(md5),"
  2178. "cbc(des3_ede)))",
  2179. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2180. "cbc-des3_ede-caam",
  2181. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2182. },
  2183. .setkey = aead_setkey,
  2184. .setauthsize = aead_setauthsize,
  2185. .encrypt = aead_encrypt,
  2186. .decrypt = aead_decrypt,
  2187. .ivsize = DES3_EDE_BLOCK_SIZE,
  2188. .maxauthsize = MD5_DIGEST_SIZE,
  2189. },
  2190. .caam = {
  2191. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2192. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2193. OP_ALG_AAI_HMAC_PRECOMP,
  2194. .geniv = true,
  2195. }
  2196. },
  2197. {
  2198. .aead = {
  2199. .base = {
  2200. .cra_name = "authenc(hmac(sha1),"
  2201. "cbc(des3_ede))",
  2202. .cra_driver_name = "authenc-hmac-sha1-"
  2203. "cbc-des3_ede-caam",
  2204. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2205. },
  2206. .setkey = aead_setkey,
  2207. .setauthsize = aead_setauthsize,
  2208. .encrypt = aead_encrypt,
  2209. .decrypt = aead_decrypt,
  2210. .ivsize = DES3_EDE_BLOCK_SIZE,
  2211. .maxauthsize = SHA1_DIGEST_SIZE,
  2212. },
  2213. .caam = {
  2214. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2215. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2216. OP_ALG_AAI_HMAC_PRECOMP,
  2217. },
  2218. },
  2219. {
  2220. .aead = {
  2221. .base = {
  2222. .cra_name = "echainiv(authenc(hmac(sha1),"
  2223. "cbc(des3_ede)))",
  2224. .cra_driver_name = "echainiv-authenc-"
  2225. "hmac-sha1-"
  2226. "cbc-des3_ede-caam",
  2227. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2228. },
  2229. .setkey = aead_setkey,
  2230. .setauthsize = aead_setauthsize,
  2231. .encrypt = aead_encrypt,
  2232. .decrypt = aead_decrypt,
  2233. .ivsize = DES3_EDE_BLOCK_SIZE,
  2234. .maxauthsize = SHA1_DIGEST_SIZE,
  2235. },
  2236. .caam = {
  2237. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2238. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2239. OP_ALG_AAI_HMAC_PRECOMP,
  2240. .geniv = true,
  2241. },
  2242. },
  2243. {
  2244. .aead = {
  2245. .base = {
  2246. .cra_name = "authenc(hmac(sha224),"
  2247. "cbc(des3_ede))",
  2248. .cra_driver_name = "authenc-hmac-sha224-"
  2249. "cbc-des3_ede-caam",
  2250. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2251. },
  2252. .setkey = aead_setkey,
  2253. .setauthsize = aead_setauthsize,
  2254. .encrypt = aead_encrypt,
  2255. .decrypt = aead_decrypt,
  2256. .ivsize = DES3_EDE_BLOCK_SIZE,
  2257. .maxauthsize = SHA224_DIGEST_SIZE,
  2258. },
  2259. .caam = {
  2260. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2261. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2262. OP_ALG_AAI_HMAC_PRECOMP,
  2263. },
  2264. },
  2265. {
  2266. .aead = {
  2267. .base = {
  2268. .cra_name = "echainiv(authenc(hmac(sha224),"
  2269. "cbc(des3_ede)))",
  2270. .cra_driver_name = "echainiv-authenc-"
  2271. "hmac-sha224-"
  2272. "cbc-des3_ede-caam",
  2273. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2274. },
  2275. .setkey = aead_setkey,
  2276. .setauthsize = aead_setauthsize,
  2277. .encrypt = aead_encrypt,
  2278. .decrypt = aead_decrypt,
  2279. .ivsize = DES3_EDE_BLOCK_SIZE,
  2280. .maxauthsize = SHA224_DIGEST_SIZE,
  2281. },
  2282. .caam = {
  2283. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2284. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2285. OP_ALG_AAI_HMAC_PRECOMP,
  2286. .geniv = true,
  2287. },
  2288. },
  2289. {
  2290. .aead = {
  2291. .base = {
  2292. .cra_name = "authenc(hmac(sha256),"
  2293. "cbc(des3_ede))",
  2294. .cra_driver_name = "authenc-hmac-sha256-"
  2295. "cbc-des3_ede-caam",
  2296. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2297. },
  2298. .setkey = aead_setkey,
  2299. .setauthsize = aead_setauthsize,
  2300. .encrypt = aead_encrypt,
  2301. .decrypt = aead_decrypt,
  2302. .ivsize = DES3_EDE_BLOCK_SIZE,
  2303. .maxauthsize = SHA256_DIGEST_SIZE,
  2304. },
  2305. .caam = {
  2306. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2307. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2308. OP_ALG_AAI_HMAC_PRECOMP,
  2309. },
  2310. },
  2311. {
  2312. .aead = {
  2313. .base = {
  2314. .cra_name = "echainiv(authenc(hmac(sha256),"
  2315. "cbc(des3_ede)))",
  2316. .cra_driver_name = "echainiv-authenc-"
  2317. "hmac-sha256-"
  2318. "cbc-des3_ede-caam",
  2319. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2320. },
  2321. .setkey = aead_setkey,
  2322. .setauthsize = aead_setauthsize,
  2323. .encrypt = aead_encrypt,
  2324. .decrypt = aead_decrypt,
  2325. .ivsize = DES3_EDE_BLOCK_SIZE,
  2326. .maxauthsize = SHA256_DIGEST_SIZE,
  2327. },
  2328. .caam = {
  2329. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2330. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2331. OP_ALG_AAI_HMAC_PRECOMP,
  2332. .geniv = true,
  2333. },
  2334. },
  2335. {
  2336. .aead = {
  2337. .base = {
  2338. .cra_name = "authenc(hmac(sha384),"
  2339. "cbc(des3_ede))",
  2340. .cra_driver_name = "authenc-hmac-sha384-"
  2341. "cbc-des3_ede-caam",
  2342. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2343. },
  2344. .setkey = aead_setkey,
  2345. .setauthsize = aead_setauthsize,
  2346. .encrypt = aead_encrypt,
  2347. .decrypt = aead_decrypt,
  2348. .ivsize = DES3_EDE_BLOCK_SIZE,
  2349. .maxauthsize = SHA384_DIGEST_SIZE,
  2350. },
  2351. .caam = {
  2352. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2353. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2354. OP_ALG_AAI_HMAC_PRECOMP,
  2355. },
  2356. },
  2357. {
  2358. .aead = {
  2359. .base = {
  2360. .cra_name = "echainiv(authenc(hmac(sha384),"
  2361. "cbc(des3_ede)))",
  2362. .cra_driver_name = "echainiv-authenc-"
  2363. "hmac-sha384-"
  2364. "cbc-des3_ede-caam",
  2365. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2366. },
  2367. .setkey = aead_setkey,
  2368. .setauthsize = aead_setauthsize,
  2369. .encrypt = aead_encrypt,
  2370. .decrypt = aead_decrypt,
  2371. .ivsize = DES3_EDE_BLOCK_SIZE,
  2372. .maxauthsize = SHA384_DIGEST_SIZE,
  2373. },
  2374. .caam = {
  2375. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2376. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2377. OP_ALG_AAI_HMAC_PRECOMP,
  2378. .geniv = true,
  2379. },
  2380. },
  2381. {
  2382. .aead = {
  2383. .base = {
  2384. .cra_name = "authenc(hmac(sha512),"
  2385. "cbc(des3_ede))",
  2386. .cra_driver_name = "authenc-hmac-sha512-"
  2387. "cbc-des3_ede-caam",
  2388. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2389. },
  2390. .setkey = aead_setkey,
  2391. .setauthsize = aead_setauthsize,
  2392. .encrypt = aead_encrypt,
  2393. .decrypt = aead_decrypt,
  2394. .ivsize = DES3_EDE_BLOCK_SIZE,
  2395. .maxauthsize = SHA512_DIGEST_SIZE,
  2396. },
  2397. .caam = {
  2398. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2399. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2400. OP_ALG_AAI_HMAC_PRECOMP,
  2401. },
  2402. },
  2403. {
  2404. .aead = {
  2405. .base = {
  2406. .cra_name = "echainiv(authenc(hmac(sha512),"
  2407. "cbc(des3_ede)))",
  2408. .cra_driver_name = "echainiv-authenc-"
  2409. "hmac-sha512-"
  2410. "cbc-des3_ede-caam",
  2411. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2412. },
  2413. .setkey = aead_setkey,
  2414. .setauthsize = aead_setauthsize,
  2415. .encrypt = aead_encrypt,
  2416. .decrypt = aead_decrypt,
  2417. .ivsize = DES3_EDE_BLOCK_SIZE,
  2418. .maxauthsize = SHA512_DIGEST_SIZE,
  2419. },
  2420. .caam = {
  2421. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2422. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2423. OP_ALG_AAI_HMAC_PRECOMP,
  2424. .geniv = true,
  2425. },
  2426. },
  2427. {
  2428. .aead = {
  2429. .base = {
  2430. .cra_name = "authenc(hmac(md5),cbc(des))",
  2431. .cra_driver_name = "authenc-hmac-md5-"
  2432. "cbc-des-caam",
  2433. .cra_blocksize = DES_BLOCK_SIZE,
  2434. },
  2435. .setkey = aead_setkey,
  2436. .setauthsize = aead_setauthsize,
  2437. .encrypt = aead_encrypt,
  2438. .decrypt = aead_decrypt,
  2439. .ivsize = DES_BLOCK_SIZE,
  2440. .maxauthsize = MD5_DIGEST_SIZE,
  2441. },
  2442. .caam = {
  2443. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2444. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2445. OP_ALG_AAI_HMAC_PRECOMP,
  2446. },
  2447. },
  2448. {
  2449. .aead = {
  2450. .base = {
  2451. .cra_name = "echainiv(authenc(hmac(md5),"
  2452. "cbc(des)))",
  2453. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2454. "cbc-des-caam",
  2455. .cra_blocksize = DES_BLOCK_SIZE,
  2456. },
  2457. .setkey = aead_setkey,
  2458. .setauthsize = aead_setauthsize,
  2459. .encrypt = aead_encrypt,
  2460. .decrypt = aead_decrypt,
  2461. .ivsize = DES_BLOCK_SIZE,
  2462. .maxauthsize = MD5_DIGEST_SIZE,
  2463. },
  2464. .caam = {
  2465. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2466. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2467. OP_ALG_AAI_HMAC_PRECOMP,
  2468. .geniv = true,
  2469. },
  2470. },
  2471. {
  2472. .aead = {
  2473. .base = {
  2474. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2475. .cra_driver_name = "authenc-hmac-sha1-"
  2476. "cbc-des-caam",
  2477. .cra_blocksize = DES_BLOCK_SIZE,
  2478. },
  2479. .setkey = aead_setkey,
  2480. .setauthsize = aead_setauthsize,
  2481. .encrypt = aead_encrypt,
  2482. .decrypt = aead_decrypt,
  2483. .ivsize = DES_BLOCK_SIZE,
  2484. .maxauthsize = SHA1_DIGEST_SIZE,
  2485. },
  2486. .caam = {
  2487. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2488. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2489. OP_ALG_AAI_HMAC_PRECOMP,
  2490. },
  2491. },
  2492. {
  2493. .aead = {
  2494. .base = {
  2495. .cra_name = "echainiv(authenc(hmac(sha1),"
  2496. "cbc(des)))",
  2497. .cra_driver_name = "echainiv-authenc-"
  2498. "hmac-sha1-cbc-des-caam",
  2499. .cra_blocksize = DES_BLOCK_SIZE,
  2500. },
  2501. .setkey = aead_setkey,
  2502. .setauthsize = aead_setauthsize,
  2503. .encrypt = aead_encrypt,
  2504. .decrypt = aead_decrypt,
  2505. .ivsize = DES_BLOCK_SIZE,
  2506. .maxauthsize = SHA1_DIGEST_SIZE,
  2507. },
  2508. .caam = {
  2509. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2510. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2511. OP_ALG_AAI_HMAC_PRECOMP,
  2512. .geniv = true,
  2513. },
  2514. },
  2515. {
  2516. .aead = {
  2517. .base = {
  2518. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2519. .cra_driver_name = "authenc-hmac-sha224-"
  2520. "cbc-des-caam",
  2521. .cra_blocksize = DES_BLOCK_SIZE,
  2522. },
  2523. .setkey = aead_setkey,
  2524. .setauthsize = aead_setauthsize,
  2525. .encrypt = aead_encrypt,
  2526. .decrypt = aead_decrypt,
  2527. .ivsize = DES_BLOCK_SIZE,
  2528. .maxauthsize = SHA224_DIGEST_SIZE,
  2529. },
  2530. .caam = {
  2531. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2532. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2533. OP_ALG_AAI_HMAC_PRECOMP,
  2534. },
  2535. },
  2536. {
  2537. .aead = {
  2538. .base = {
  2539. .cra_name = "echainiv(authenc(hmac(sha224),"
  2540. "cbc(des)))",
  2541. .cra_driver_name = "echainiv-authenc-"
  2542. "hmac-sha224-cbc-des-caam",
  2543. .cra_blocksize = DES_BLOCK_SIZE,
  2544. },
  2545. .setkey = aead_setkey,
  2546. .setauthsize = aead_setauthsize,
  2547. .encrypt = aead_encrypt,
  2548. .decrypt = aead_decrypt,
  2549. .ivsize = DES_BLOCK_SIZE,
  2550. .maxauthsize = SHA224_DIGEST_SIZE,
  2551. },
  2552. .caam = {
  2553. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2554. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2555. OP_ALG_AAI_HMAC_PRECOMP,
  2556. .geniv = true,
  2557. },
  2558. },
  2559. {
  2560. .aead = {
  2561. .base = {
  2562. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2563. .cra_driver_name = "authenc-hmac-sha256-"
  2564. "cbc-des-caam",
  2565. .cra_blocksize = DES_BLOCK_SIZE,
  2566. },
  2567. .setkey = aead_setkey,
  2568. .setauthsize = aead_setauthsize,
  2569. .encrypt = aead_encrypt,
  2570. .decrypt = aead_decrypt,
  2571. .ivsize = DES_BLOCK_SIZE,
  2572. .maxauthsize = SHA256_DIGEST_SIZE,
  2573. },
  2574. .caam = {
  2575. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2576. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2577. OP_ALG_AAI_HMAC_PRECOMP,
  2578. },
  2579. },
  2580. {
  2581. .aead = {
  2582. .base = {
  2583. .cra_name = "echainiv(authenc(hmac(sha256),"
  2584. "cbc(des)))",
  2585. .cra_driver_name = "echainiv-authenc-"
  2586. "hmac-sha256-cbc-des-caam",
  2587. .cra_blocksize = DES_BLOCK_SIZE,
  2588. },
  2589. .setkey = aead_setkey,
  2590. .setauthsize = aead_setauthsize,
  2591. .encrypt = aead_encrypt,
  2592. .decrypt = aead_decrypt,
  2593. .ivsize = DES_BLOCK_SIZE,
  2594. .maxauthsize = SHA256_DIGEST_SIZE,
  2595. },
  2596. .caam = {
  2597. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2598. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2599. OP_ALG_AAI_HMAC_PRECOMP,
  2600. .geniv = true,
  2601. },
  2602. },
  2603. {
  2604. .aead = {
  2605. .base = {
  2606. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2607. .cra_driver_name = "authenc-hmac-sha384-"
  2608. "cbc-des-caam",
  2609. .cra_blocksize = DES_BLOCK_SIZE,
  2610. },
  2611. .setkey = aead_setkey,
  2612. .setauthsize = aead_setauthsize,
  2613. .encrypt = aead_encrypt,
  2614. .decrypt = aead_decrypt,
  2615. .ivsize = DES_BLOCK_SIZE,
  2616. .maxauthsize = SHA384_DIGEST_SIZE,
  2617. },
  2618. .caam = {
  2619. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2620. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2621. OP_ALG_AAI_HMAC_PRECOMP,
  2622. },
  2623. },
  2624. {
  2625. .aead = {
  2626. .base = {
  2627. .cra_name = "echainiv(authenc(hmac(sha384),"
  2628. "cbc(des)))",
  2629. .cra_driver_name = "echainiv-authenc-"
  2630. "hmac-sha384-cbc-des-caam",
  2631. .cra_blocksize = DES_BLOCK_SIZE,
  2632. },
  2633. .setkey = aead_setkey,
  2634. .setauthsize = aead_setauthsize,
  2635. .encrypt = aead_encrypt,
  2636. .decrypt = aead_decrypt,
  2637. .ivsize = DES_BLOCK_SIZE,
  2638. .maxauthsize = SHA384_DIGEST_SIZE,
  2639. },
  2640. .caam = {
  2641. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2642. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2643. OP_ALG_AAI_HMAC_PRECOMP,
  2644. .geniv = true,
  2645. },
  2646. },
  2647. {
  2648. .aead = {
  2649. .base = {
  2650. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2651. .cra_driver_name = "authenc-hmac-sha512-"
  2652. "cbc-des-caam",
  2653. .cra_blocksize = DES_BLOCK_SIZE,
  2654. },
  2655. .setkey = aead_setkey,
  2656. .setauthsize = aead_setauthsize,
  2657. .encrypt = aead_encrypt,
  2658. .decrypt = aead_decrypt,
  2659. .ivsize = DES_BLOCK_SIZE,
  2660. .maxauthsize = SHA512_DIGEST_SIZE,
  2661. },
  2662. .caam = {
  2663. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2664. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2665. OP_ALG_AAI_HMAC_PRECOMP,
  2666. },
  2667. },
  2668. {
  2669. .aead = {
  2670. .base = {
  2671. .cra_name = "echainiv(authenc(hmac(sha512),"
  2672. "cbc(des)))",
  2673. .cra_driver_name = "echainiv-authenc-"
  2674. "hmac-sha512-cbc-des-caam",
  2675. .cra_blocksize = DES_BLOCK_SIZE,
  2676. },
  2677. .setkey = aead_setkey,
  2678. .setauthsize = aead_setauthsize,
  2679. .encrypt = aead_encrypt,
  2680. .decrypt = aead_decrypt,
  2681. .ivsize = DES_BLOCK_SIZE,
  2682. .maxauthsize = SHA512_DIGEST_SIZE,
  2683. },
  2684. .caam = {
  2685. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2686. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2687. OP_ALG_AAI_HMAC_PRECOMP,
  2688. .geniv = true,
  2689. },
  2690. },
  2691. {
  2692. .aead = {
  2693. .base = {
  2694. .cra_name = "authenc(hmac(md5),"
  2695. "rfc3686(ctr(aes)))",
  2696. .cra_driver_name = "authenc-hmac-md5-"
  2697. "rfc3686-ctr-aes-caam",
  2698. .cra_blocksize = 1,
  2699. },
  2700. .setkey = aead_setkey,
  2701. .setauthsize = aead_setauthsize,
  2702. .encrypt = aead_encrypt,
  2703. .decrypt = aead_decrypt,
  2704. .ivsize = CTR_RFC3686_IV_SIZE,
  2705. .maxauthsize = MD5_DIGEST_SIZE,
  2706. },
  2707. .caam = {
  2708. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2709. OP_ALG_AAI_CTR_MOD128,
  2710. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2711. OP_ALG_AAI_HMAC_PRECOMP,
  2712. .rfc3686 = true,
  2713. },
  2714. },
  2715. {
  2716. .aead = {
  2717. .base = {
  2718. .cra_name = "seqiv(authenc("
  2719. "hmac(md5),rfc3686(ctr(aes))))",
  2720. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2721. "rfc3686-ctr-aes-caam",
  2722. .cra_blocksize = 1,
  2723. },
  2724. .setkey = aead_setkey,
  2725. .setauthsize = aead_setauthsize,
  2726. .encrypt = aead_encrypt,
  2727. .decrypt = aead_decrypt,
  2728. .ivsize = CTR_RFC3686_IV_SIZE,
  2729. .maxauthsize = MD5_DIGEST_SIZE,
  2730. },
  2731. .caam = {
  2732. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2733. OP_ALG_AAI_CTR_MOD128,
  2734. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2735. OP_ALG_AAI_HMAC_PRECOMP,
  2736. .rfc3686 = true,
  2737. .geniv = true,
  2738. },
  2739. },
  2740. {
  2741. .aead = {
  2742. .base = {
  2743. .cra_name = "authenc(hmac(sha1),"
  2744. "rfc3686(ctr(aes)))",
  2745. .cra_driver_name = "authenc-hmac-sha1-"
  2746. "rfc3686-ctr-aes-caam",
  2747. .cra_blocksize = 1,
  2748. },
  2749. .setkey = aead_setkey,
  2750. .setauthsize = aead_setauthsize,
  2751. .encrypt = aead_encrypt,
  2752. .decrypt = aead_decrypt,
  2753. .ivsize = CTR_RFC3686_IV_SIZE,
  2754. .maxauthsize = SHA1_DIGEST_SIZE,
  2755. },
  2756. .caam = {
  2757. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2758. OP_ALG_AAI_CTR_MOD128,
  2759. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2760. OP_ALG_AAI_HMAC_PRECOMP,
  2761. .rfc3686 = true,
  2762. },
  2763. },
  2764. {
  2765. .aead = {
  2766. .base = {
  2767. .cra_name = "seqiv(authenc("
  2768. "hmac(sha1),rfc3686(ctr(aes))))",
  2769. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2770. "rfc3686-ctr-aes-caam",
  2771. .cra_blocksize = 1,
  2772. },
  2773. .setkey = aead_setkey,
  2774. .setauthsize = aead_setauthsize,
  2775. .encrypt = aead_encrypt,
  2776. .decrypt = aead_decrypt,
  2777. .ivsize = CTR_RFC3686_IV_SIZE,
  2778. .maxauthsize = SHA1_DIGEST_SIZE,
  2779. },
  2780. .caam = {
  2781. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2782. OP_ALG_AAI_CTR_MOD128,
  2783. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2784. OP_ALG_AAI_HMAC_PRECOMP,
  2785. .rfc3686 = true,
  2786. .geniv = true,
  2787. },
  2788. },
  2789. {
  2790. .aead = {
  2791. .base = {
  2792. .cra_name = "authenc(hmac(sha224),"
  2793. "rfc3686(ctr(aes)))",
  2794. .cra_driver_name = "authenc-hmac-sha224-"
  2795. "rfc3686-ctr-aes-caam",
  2796. .cra_blocksize = 1,
  2797. },
  2798. .setkey = aead_setkey,
  2799. .setauthsize = aead_setauthsize,
  2800. .encrypt = aead_encrypt,
  2801. .decrypt = aead_decrypt,
  2802. .ivsize = CTR_RFC3686_IV_SIZE,
  2803. .maxauthsize = SHA224_DIGEST_SIZE,
  2804. },
  2805. .caam = {
  2806. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2807. OP_ALG_AAI_CTR_MOD128,
  2808. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2809. OP_ALG_AAI_HMAC_PRECOMP,
  2810. .rfc3686 = true,
  2811. },
  2812. },
  2813. {
  2814. .aead = {
  2815. .base = {
  2816. .cra_name = "seqiv(authenc("
  2817. "hmac(sha224),rfc3686(ctr(aes))))",
  2818. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2819. "rfc3686-ctr-aes-caam",
  2820. .cra_blocksize = 1,
  2821. },
  2822. .setkey = aead_setkey,
  2823. .setauthsize = aead_setauthsize,
  2824. .encrypt = aead_encrypt,
  2825. .decrypt = aead_decrypt,
  2826. .ivsize = CTR_RFC3686_IV_SIZE,
  2827. .maxauthsize = SHA224_DIGEST_SIZE,
  2828. },
  2829. .caam = {
  2830. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2831. OP_ALG_AAI_CTR_MOD128,
  2832. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2833. OP_ALG_AAI_HMAC_PRECOMP,
  2834. .rfc3686 = true,
  2835. .geniv = true,
  2836. },
  2837. },
  2838. {
  2839. .aead = {
  2840. .base = {
  2841. .cra_name = "authenc(hmac(sha256),"
  2842. "rfc3686(ctr(aes)))",
  2843. .cra_driver_name = "authenc-hmac-sha256-"
  2844. "rfc3686-ctr-aes-caam",
  2845. .cra_blocksize = 1,
  2846. },
  2847. .setkey = aead_setkey,
  2848. .setauthsize = aead_setauthsize,
  2849. .encrypt = aead_encrypt,
  2850. .decrypt = aead_decrypt,
  2851. .ivsize = CTR_RFC3686_IV_SIZE,
  2852. .maxauthsize = SHA256_DIGEST_SIZE,
  2853. },
  2854. .caam = {
  2855. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2856. OP_ALG_AAI_CTR_MOD128,
  2857. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2858. OP_ALG_AAI_HMAC_PRECOMP,
  2859. .rfc3686 = true,
  2860. },
  2861. },
  2862. {
  2863. .aead = {
  2864. .base = {
  2865. .cra_name = "seqiv(authenc(hmac(sha256),"
  2866. "rfc3686(ctr(aes))))",
  2867. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2868. "rfc3686-ctr-aes-caam",
  2869. .cra_blocksize = 1,
  2870. },
  2871. .setkey = aead_setkey,
  2872. .setauthsize = aead_setauthsize,
  2873. .encrypt = aead_encrypt,
  2874. .decrypt = aead_decrypt,
  2875. .ivsize = CTR_RFC3686_IV_SIZE,
  2876. .maxauthsize = SHA256_DIGEST_SIZE,
  2877. },
  2878. .caam = {
  2879. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2880. OP_ALG_AAI_CTR_MOD128,
  2881. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2882. OP_ALG_AAI_HMAC_PRECOMP,
  2883. .rfc3686 = true,
  2884. .geniv = true,
  2885. },
  2886. },
  2887. {
  2888. .aead = {
  2889. .base = {
  2890. .cra_name = "authenc(hmac(sha384),"
  2891. "rfc3686(ctr(aes)))",
  2892. .cra_driver_name = "authenc-hmac-sha384-"
  2893. "rfc3686-ctr-aes-caam",
  2894. .cra_blocksize = 1,
  2895. },
  2896. .setkey = aead_setkey,
  2897. .setauthsize = aead_setauthsize,
  2898. .encrypt = aead_encrypt,
  2899. .decrypt = aead_decrypt,
  2900. .ivsize = CTR_RFC3686_IV_SIZE,
  2901. .maxauthsize = SHA384_DIGEST_SIZE,
  2902. },
  2903. .caam = {
  2904. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2905. OP_ALG_AAI_CTR_MOD128,
  2906. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2907. OP_ALG_AAI_HMAC_PRECOMP,
  2908. .rfc3686 = true,
  2909. },
  2910. },
  2911. {
  2912. .aead = {
  2913. .base = {
  2914. .cra_name = "seqiv(authenc(hmac(sha384),"
  2915. "rfc3686(ctr(aes))))",
  2916. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2917. "rfc3686-ctr-aes-caam",
  2918. .cra_blocksize = 1,
  2919. },
  2920. .setkey = aead_setkey,
  2921. .setauthsize = aead_setauthsize,
  2922. .encrypt = aead_encrypt,
  2923. .decrypt = aead_decrypt,
  2924. .ivsize = CTR_RFC3686_IV_SIZE,
  2925. .maxauthsize = SHA384_DIGEST_SIZE,
  2926. },
  2927. .caam = {
  2928. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2929. OP_ALG_AAI_CTR_MOD128,
  2930. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2931. OP_ALG_AAI_HMAC_PRECOMP,
  2932. .rfc3686 = true,
  2933. .geniv = true,
  2934. },
  2935. },
  2936. {
  2937. .aead = {
  2938. .base = {
  2939. .cra_name = "authenc(hmac(sha512),"
  2940. "rfc3686(ctr(aes)))",
  2941. .cra_driver_name = "authenc-hmac-sha512-"
  2942. "rfc3686-ctr-aes-caam",
  2943. .cra_blocksize = 1,
  2944. },
  2945. .setkey = aead_setkey,
  2946. .setauthsize = aead_setauthsize,
  2947. .encrypt = aead_encrypt,
  2948. .decrypt = aead_decrypt,
  2949. .ivsize = CTR_RFC3686_IV_SIZE,
  2950. .maxauthsize = SHA512_DIGEST_SIZE,
  2951. },
  2952. .caam = {
  2953. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2954. OP_ALG_AAI_CTR_MOD128,
  2955. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2956. OP_ALG_AAI_HMAC_PRECOMP,
  2957. .rfc3686 = true,
  2958. },
  2959. },
  2960. {
  2961. .aead = {
  2962. .base = {
  2963. .cra_name = "seqiv(authenc(hmac(sha512),"
  2964. "rfc3686(ctr(aes))))",
  2965. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2966. "rfc3686-ctr-aes-caam",
  2967. .cra_blocksize = 1,
  2968. },
  2969. .setkey = aead_setkey,
  2970. .setauthsize = aead_setauthsize,
  2971. .encrypt = aead_encrypt,
  2972. .decrypt = aead_decrypt,
  2973. .ivsize = CTR_RFC3686_IV_SIZE,
  2974. .maxauthsize = SHA512_DIGEST_SIZE,
  2975. },
  2976. .caam = {
  2977. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2978. OP_ALG_AAI_CTR_MOD128,
  2979. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2980. OP_ALG_AAI_HMAC_PRECOMP,
  2981. .rfc3686 = true,
  2982. .geniv = true,
  2983. },
  2984. },
  2985. };
  2986. struct caam_crypto_alg {
  2987. struct crypto_alg crypto_alg;
  2988. struct list_head entry;
  2989. struct caam_alg_entry caam;
  2990. };
  2991. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
  2992. {
  2993. dma_addr_t dma_addr;
  2994. ctx->jrdev = caam_jr_alloc();
  2995. if (IS_ERR(ctx->jrdev)) {
  2996. pr_err("Job Ring Device allocation for transform failed\n");
  2997. return PTR_ERR(ctx->jrdev);
  2998. }
  2999. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  3000. offsetof(struct caam_ctx,
  3001. sh_desc_enc_dma),
  3002. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  3003. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  3004. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  3005. caam_jr_free(ctx->jrdev);
  3006. return -ENOMEM;
  3007. }
  3008. ctx->sh_desc_enc_dma = dma_addr;
  3009. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  3010. sh_desc_dec);
  3011. ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
  3012. sh_desc_givenc);
  3013. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
  3014. /* copy descriptor header template value */
  3015. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  3016. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  3017. return 0;
  3018. }
  3019. static int caam_cra_init(struct crypto_tfm *tfm)
  3020. {
  3021. struct crypto_alg *alg = tfm->__crt_alg;
  3022. struct caam_crypto_alg *caam_alg =
  3023. container_of(alg, struct caam_crypto_alg, crypto_alg);
  3024. struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
  3025. return caam_init_common(ctx, &caam_alg->caam);
  3026. }
  3027. static int caam_aead_init(struct crypto_aead *tfm)
  3028. {
  3029. struct aead_alg *alg = crypto_aead_alg(tfm);
  3030. struct caam_aead_alg *caam_alg =
  3031. container_of(alg, struct caam_aead_alg, aead);
  3032. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  3033. return caam_init_common(ctx, &caam_alg->caam);
  3034. }
  3035. static void caam_exit_common(struct caam_ctx *ctx)
  3036. {
  3037. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  3038. offsetof(struct caam_ctx, sh_desc_enc_dma),
  3039. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  3040. caam_jr_free(ctx->jrdev);
  3041. }
  3042. static void caam_cra_exit(struct crypto_tfm *tfm)
  3043. {
  3044. caam_exit_common(crypto_tfm_ctx(tfm));
  3045. }
  3046. static void caam_aead_exit(struct crypto_aead *tfm)
  3047. {
  3048. caam_exit_common(crypto_aead_ctx(tfm));
  3049. }
  3050. static void __exit caam_algapi_exit(void)
  3051. {
  3052. struct caam_crypto_alg *t_alg, *n;
  3053. int i;
  3054. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3055. struct caam_aead_alg *t_alg = driver_aeads + i;
  3056. if (t_alg->registered)
  3057. crypto_unregister_aead(&t_alg->aead);
  3058. }
  3059. if (!alg_list.next)
  3060. return;
  3061. list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
  3062. crypto_unregister_alg(&t_alg->crypto_alg);
  3063. list_del(&t_alg->entry);
  3064. kfree(t_alg);
  3065. }
  3066. }
  3067. static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
  3068. *template)
  3069. {
  3070. struct caam_crypto_alg *t_alg;
  3071. struct crypto_alg *alg;
  3072. t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
  3073. if (!t_alg) {
  3074. pr_err("failed to allocate t_alg\n");
  3075. return ERR_PTR(-ENOMEM);
  3076. }
  3077. alg = &t_alg->crypto_alg;
  3078. snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
  3079. snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
  3080. template->driver_name);
  3081. alg->cra_module = THIS_MODULE;
  3082. alg->cra_init = caam_cra_init;
  3083. alg->cra_exit = caam_cra_exit;
  3084. alg->cra_priority = CAAM_CRA_PRIORITY;
  3085. alg->cra_blocksize = template->blocksize;
  3086. alg->cra_alignmask = 0;
  3087. alg->cra_ctxsize = sizeof(struct caam_ctx);
  3088. alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
  3089. template->type;
  3090. switch (template->type) {
  3091. case CRYPTO_ALG_TYPE_GIVCIPHER:
  3092. alg->cra_type = &crypto_givcipher_type;
  3093. alg->cra_ablkcipher = template->template_ablkcipher;
  3094. break;
  3095. case CRYPTO_ALG_TYPE_ABLKCIPHER:
  3096. alg->cra_type = &crypto_ablkcipher_type;
  3097. alg->cra_ablkcipher = template->template_ablkcipher;
  3098. break;
  3099. }
  3100. t_alg->caam.class1_alg_type = template->class1_alg_type;
  3101. t_alg->caam.class2_alg_type = template->class2_alg_type;
  3102. return t_alg;
  3103. }
  3104. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  3105. {
  3106. struct aead_alg *alg = &t_alg->aead;
  3107. alg->base.cra_module = THIS_MODULE;
  3108. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3109. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3110. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  3111. alg->init = caam_aead_init;
  3112. alg->exit = caam_aead_exit;
  3113. }
  3114. static int __init caam_algapi_init(void)
  3115. {
  3116. struct device_node *dev_node;
  3117. struct platform_device *pdev;
  3118. struct device *ctrldev;
  3119. struct caam_drv_private *priv;
  3120. int i = 0, err = 0;
  3121. u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
  3122. unsigned int md_limit = SHA512_DIGEST_SIZE;
  3123. bool registered = false;
  3124. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
  3125. if (!dev_node) {
  3126. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
  3127. if (!dev_node)
  3128. return -ENODEV;
  3129. }
  3130. pdev = of_find_device_by_node(dev_node);
  3131. if (!pdev) {
  3132. of_node_put(dev_node);
  3133. return -ENODEV;
  3134. }
  3135. ctrldev = &pdev->dev;
  3136. priv = dev_get_drvdata(ctrldev);
  3137. of_node_put(dev_node);
  3138. /*
  3139. * If priv is NULL, it's probably because the caam driver wasn't
  3140. * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
  3141. */
  3142. if (!priv)
  3143. return -ENODEV;
  3144. INIT_LIST_HEAD(&alg_list);
  3145. /*
  3146. * Register crypto algorithms the device supports.
  3147. * First, detect presence and attributes of DES, AES, and MD blocks.
  3148. */
  3149. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  3150. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  3151. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
  3152. aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
  3153. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3154. /* If MD is present, limit digest size based on LP256 */
  3155. if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
  3156. md_limit = SHA256_DIGEST_SIZE;
  3157. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3158. struct caam_crypto_alg *t_alg;
  3159. struct caam_alg_template *alg = driver_algs + i;
  3160. u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
  3161. /* Skip DES algorithms if not supported by device */
  3162. if (!des_inst &&
  3163. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  3164. (alg_sel == OP_ALG_ALGSEL_DES)))
  3165. continue;
  3166. /* Skip AES algorithms if not supported by device */
  3167. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  3168. continue;
  3169. /*
  3170. * Check support for AES modes not available
  3171. * on LP devices.
  3172. */
  3173. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3174. if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
  3175. OP_ALG_AAI_XTS)
  3176. continue;
  3177. t_alg = caam_alg_alloc(alg);
  3178. if (IS_ERR(t_alg)) {
  3179. err = PTR_ERR(t_alg);
  3180. pr_warn("%s alg allocation failed\n", alg->driver_name);
  3181. continue;
  3182. }
  3183. err = crypto_register_alg(&t_alg->crypto_alg);
  3184. if (err) {
  3185. pr_warn("%s alg registration failed\n",
  3186. t_alg->crypto_alg.cra_driver_name);
  3187. kfree(t_alg);
  3188. continue;
  3189. }
  3190. list_add_tail(&t_alg->entry, &alg_list);
  3191. registered = true;
  3192. }
  3193. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3194. struct caam_aead_alg *t_alg = driver_aeads + i;
  3195. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  3196. OP_ALG_ALGSEL_MASK;
  3197. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  3198. OP_ALG_ALGSEL_MASK;
  3199. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3200. /* Skip DES algorithms if not supported by device */
  3201. if (!des_inst &&
  3202. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  3203. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  3204. continue;
  3205. /* Skip AES algorithms if not supported by device */
  3206. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  3207. continue;
  3208. /*
  3209. * Check support for AES algorithms not available
  3210. * on LP devices.
  3211. */
  3212. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3213. if (alg_aai == OP_ALG_AAI_GCM)
  3214. continue;
  3215. /*
  3216. * Skip algorithms requiring message digests
  3217. * if MD or MD size is not supported by device.
  3218. */
  3219. if (c2_alg_sel &&
  3220. (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
  3221. continue;
  3222. caam_aead_alg_init(t_alg);
  3223. err = crypto_register_aead(&t_alg->aead);
  3224. if (err) {
  3225. pr_warn("%s alg registration failed\n",
  3226. t_alg->aead.base.cra_driver_name);
  3227. continue;
  3228. }
  3229. t_alg->registered = true;
  3230. registered = true;
  3231. }
  3232. if (registered)
  3233. pr_info("caam algorithms registered in /proc/crypto\n");
  3234. return err;
  3235. }
  3236. module_init(caam_algapi_init);
  3237. module_exit(caam_algapi_exit);
  3238. MODULE_LICENSE("GPL");
  3239. MODULE_DESCRIPTION("FSL CAAM support for crypto API");
  3240. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");