caamalg.c 94 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561
  1. /*
  2. * caam - Freescale FSL CAAM support for crypto API
  3. *
  4. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  5. * Copyright 2016 NXP
  6. *
  7. * Based on talitos crypto API driver.
  8. *
  9. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  10. *
  11. * --------------- ---------------
  12. * | JobDesc #1 |-------------------->| ShareDesc |
  13. * | *(packet 1) | | (PDB) |
  14. * --------------- |------------->| (hashKey) |
  15. * . | | (cipherKey) |
  16. * . | |-------->| (operation) |
  17. * --------------- | | ---------------
  18. * | JobDesc #2 |------| |
  19. * | *(packet 2) | |
  20. * --------------- |
  21. * . |
  22. * . |
  23. * --------------- |
  24. * | JobDesc #3 |------------
  25. * | *(packet 3) |
  26. * ---------------
  27. *
  28. * The SharedDesc never changes for a connection unless rekeyed, but
  29. * each packet will likely be in a different place. So all we need
  30. * to know to process the packet is where the input is, where the
  31. * output goes, and what context we want to process with. Context is
  32. * in the SharedDesc, packet references in the JobDesc.
  33. *
  34. * So, a job desc looks like:
  35. *
  36. * ---------------------
  37. * | Header |
  38. * | ShareDesc Pointer |
  39. * | SEQ_OUT_PTR |
  40. * | (output buffer) |
  41. * | (output length) |
  42. * | SEQ_IN_PTR |
  43. * | (input buffer) |
  44. * | (input length) |
  45. * ---------------------
  46. */
  47. #include "compat.h"
  48. #include "regs.h"
  49. #include "intern.h"
  50. #include "desc_constr.h"
  51. #include "jr.h"
  52. #include "error.h"
  53. #include "sg_sw_sec4.h"
  54. #include "key_gen.h"
  55. #include "caamalg_desc.h"
  56. /*
  57. * crypto alg
  58. */
  59. #define CAAM_CRA_PRIORITY 3000
  60. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  61. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  62. CTR_RFC3686_NONCE_SIZE + \
  63. SHA512_DIGEST_SIZE * 2)
  64. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  65. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  66. CAAM_CMD_SZ * 4)
  67. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  68. CAAM_CMD_SZ * 5)
  69. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
  70. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  71. #ifdef DEBUG
  72. /* for print_hex_dumps with line references */
  73. #define debug(format, arg...) printk(format, arg)
  74. #else
  75. #define debug(format, arg...)
  76. #endif
  77. static struct list_head alg_list;
  78. struct caam_alg_entry {
  79. int class1_alg_type;
  80. int class2_alg_type;
  81. bool rfc3686;
  82. bool geniv;
  83. };
  84. struct caam_aead_alg {
  85. struct aead_alg aead;
  86. struct caam_alg_entry caam;
  87. bool registered;
  88. };
  89. /*
  90. * per-session context
  91. */
  92. struct caam_ctx {
  93. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  94. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  95. u32 sh_desc_givenc[DESC_MAX_USED_LEN];
  96. u8 key[CAAM_MAX_KEY_SIZE];
  97. dma_addr_t sh_desc_enc_dma;
  98. dma_addr_t sh_desc_dec_dma;
  99. dma_addr_t sh_desc_givenc_dma;
  100. dma_addr_t key_dma;
  101. enum dma_data_direction dir;
  102. struct device *jrdev;
  103. struct alginfo adata;
  104. struct alginfo cdata;
  105. unsigned int authsize;
  106. };
  107. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  108. {
  109. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  110. struct device *jrdev = ctx->jrdev;
  111. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  112. u32 *desc;
  113. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  114. ctx->adata.keylen_pad;
  115. /*
  116. * Job Descriptor and Shared Descriptors
  117. * must all fit into the 64-word Descriptor h/w Buffer
  118. */
  119. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  120. ctx->adata.key_inline = true;
  121. ctx->adata.key_virt = ctx->key;
  122. } else {
  123. ctx->adata.key_inline = false;
  124. ctx->adata.key_dma = ctx->key_dma;
  125. }
  126. /* aead_encrypt shared descriptor */
  127. desc = ctx->sh_desc_enc;
  128. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
  129. ctrlpriv->era);
  130. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  131. desc_bytes(desc), ctx->dir);
  132. /*
  133. * Job Descriptor and Shared Descriptors
  134. * must all fit into the 64-word Descriptor h/w Buffer
  135. */
  136. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  137. ctx->adata.key_inline = true;
  138. ctx->adata.key_virt = ctx->key;
  139. } else {
  140. ctx->adata.key_inline = false;
  141. ctx->adata.key_dma = ctx->key_dma;
  142. }
  143. /* aead_decrypt shared descriptor */
  144. desc = ctx->sh_desc_dec;
  145. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
  146. ctrlpriv->era);
  147. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  148. desc_bytes(desc), ctx->dir);
  149. return 0;
  150. }
  151. static int aead_set_sh_desc(struct crypto_aead *aead)
  152. {
  153. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  154. struct caam_aead_alg, aead);
  155. unsigned int ivsize = crypto_aead_ivsize(aead);
  156. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  157. struct device *jrdev = ctx->jrdev;
  158. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  159. u32 ctx1_iv_off = 0;
  160. u32 *desc, *nonce = NULL;
  161. u32 inl_mask;
  162. unsigned int data_len[2];
  163. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  164. OP_ALG_AAI_CTR_MOD128);
  165. const bool is_rfc3686 = alg->caam.rfc3686;
  166. if (!ctx->authsize)
  167. return 0;
  168. /* NULL encryption / decryption */
  169. if (!ctx->cdata.keylen)
  170. return aead_null_set_sh_desc(aead);
  171. /*
  172. * AES-CTR needs to load IV in CONTEXT1 reg
  173. * at an offset of 128bits (16bytes)
  174. * CONTEXT1[255:128] = IV
  175. */
  176. if (ctr_mode)
  177. ctx1_iv_off = 16;
  178. /*
  179. * RFC3686 specific:
  180. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  181. */
  182. if (is_rfc3686) {
  183. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  184. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  185. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  186. }
  187. data_len[0] = ctx->adata.keylen_pad;
  188. data_len[1] = ctx->cdata.keylen;
  189. if (alg->caam.geniv)
  190. goto skip_enc;
  191. /*
  192. * Job Descriptor and Shared Descriptors
  193. * must all fit into the 64-word Descriptor h/w Buffer
  194. */
  195. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  196. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  197. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  198. ARRAY_SIZE(data_len)) < 0)
  199. return -EINVAL;
  200. if (inl_mask & 1)
  201. ctx->adata.key_virt = ctx->key;
  202. else
  203. ctx->adata.key_dma = ctx->key_dma;
  204. if (inl_mask & 2)
  205. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  206. else
  207. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  208. ctx->adata.key_inline = !!(inl_mask & 1);
  209. ctx->cdata.key_inline = !!(inl_mask & 2);
  210. /* aead_encrypt shared descriptor */
  211. desc = ctx->sh_desc_enc;
  212. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
  213. ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
  214. false, ctrlpriv->era);
  215. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  216. desc_bytes(desc), ctx->dir);
  217. skip_enc:
  218. /*
  219. * Job Descriptor and Shared Descriptors
  220. * must all fit into the 64-word Descriptor h/w Buffer
  221. */
  222. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  223. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  224. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  225. ARRAY_SIZE(data_len)) < 0)
  226. return -EINVAL;
  227. if (inl_mask & 1)
  228. ctx->adata.key_virt = ctx->key;
  229. else
  230. ctx->adata.key_dma = ctx->key_dma;
  231. if (inl_mask & 2)
  232. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  233. else
  234. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  235. ctx->adata.key_inline = !!(inl_mask & 1);
  236. ctx->cdata.key_inline = !!(inl_mask & 2);
  237. /* aead_decrypt shared descriptor */
  238. desc = ctx->sh_desc_dec;
  239. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  240. ctx->authsize, alg->caam.geniv, is_rfc3686,
  241. nonce, ctx1_iv_off, false, ctrlpriv->era);
  242. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  243. desc_bytes(desc), ctx->dir);
  244. if (!alg->caam.geniv)
  245. goto skip_givenc;
  246. /*
  247. * Job Descriptor and Shared Descriptors
  248. * must all fit into the 64-word Descriptor h/w Buffer
  249. */
  250. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  251. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  252. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  253. ARRAY_SIZE(data_len)) < 0)
  254. return -EINVAL;
  255. if (inl_mask & 1)
  256. ctx->adata.key_virt = ctx->key;
  257. else
  258. ctx->adata.key_dma = ctx->key_dma;
  259. if (inl_mask & 2)
  260. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  261. else
  262. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  263. ctx->adata.key_inline = !!(inl_mask & 1);
  264. ctx->cdata.key_inline = !!(inl_mask & 2);
  265. /* aead_givencrypt shared descriptor */
  266. desc = ctx->sh_desc_enc;
  267. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  268. ctx->authsize, is_rfc3686, nonce,
  269. ctx1_iv_off, false, ctrlpriv->era);
  270. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  271. desc_bytes(desc), ctx->dir);
  272. skip_givenc:
  273. return 0;
  274. }
  275. static int aead_setauthsize(struct crypto_aead *authenc,
  276. unsigned int authsize)
  277. {
  278. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  279. ctx->authsize = authsize;
  280. aead_set_sh_desc(authenc);
  281. return 0;
  282. }
  283. static int gcm_set_sh_desc(struct crypto_aead *aead)
  284. {
  285. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  286. struct device *jrdev = ctx->jrdev;
  287. unsigned int ivsize = crypto_aead_ivsize(aead);
  288. u32 *desc;
  289. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  290. ctx->cdata.keylen;
  291. if (!ctx->cdata.keylen || !ctx->authsize)
  292. return 0;
  293. /*
  294. * AES GCM encrypt shared descriptor
  295. * Job Descriptor and Shared Descriptor
  296. * must fit into the 64-word Descriptor h/w Buffer
  297. */
  298. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  299. ctx->cdata.key_inline = true;
  300. ctx->cdata.key_virt = ctx->key;
  301. } else {
  302. ctx->cdata.key_inline = false;
  303. ctx->cdata.key_dma = ctx->key_dma;
  304. }
  305. desc = ctx->sh_desc_enc;
  306. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  307. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  308. desc_bytes(desc), ctx->dir);
  309. /*
  310. * Job Descriptor and Shared Descriptors
  311. * must all fit into the 64-word Descriptor h/w Buffer
  312. */
  313. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  314. ctx->cdata.key_inline = true;
  315. ctx->cdata.key_virt = ctx->key;
  316. } else {
  317. ctx->cdata.key_inline = false;
  318. ctx->cdata.key_dma = ctx->key_dma;
  319. }
  320. desc = ctx->sh_desc_dec;
  321. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
  322. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  323. desc_bytes(desc), ctx->dir);
  324. return 0;
  325. }
  326. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  327. {
  328. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  329. ctx->authsize = authsize;
  330. gcm_set_sh_desc(authenc);
  331. return 0;
  332. }
  333. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  334. {
  335. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  336. struct device *jrdev = ctx->jrdev;
  337. unsigned int ivsize = crypto_aead_ivsize(aead);
  338. u32 *desc;
  339. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  340. ctx->cdata.keylen;
  341. if (!ctx->cdata.keylen || !ctx->authsize)
  342. return 0;
  343. /*
  344. * RFC4106 encrypt shared descriptor
  345. * Job Descriptor and Shared Descriptor
  346. * must fit into the 64-word Descriptor h/w Buffer
  347. */
  348. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  349. ctx->cdata.key_inline = true;
  350. ctx->cdata.key_virt = ctx->key;
  351. } else {
  352. ctx->cdata.key_inline = false;
  353. ctx->cdata.key_dma = ctx->key_dma;
  354. }
  355. desc = ctx->sh_desc_enc;
  356. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  357. false);
  358. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  359. desc_bytes(desc), ctx->dir);
  360. /*
  361. * Job Descriptor and Shared Descriptors
  362. * must all fit into the 64-word Descriptor h/w Buffer
  363. */
  364. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  365. ctx->cdata.key_inline = true;
  366. ctx->cdata.key_virt = ctx->key;
  367. } else {
  368. ctx->cdata.key_inline = false;
  369. ctx->cdata.key_dma = ctx->key_dma;
  370. }
  371. desc = ctx->sh_desc_dec;
  372. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  373. false);
  374. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  375. desc_bytes(desc), ctx->dir);
  376. return 0;
  377. }
  378. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  379. unsigned int authsize)
  380. {
  381. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  382. ctx->authsize = authsize;
  383. rfc4106_set_sh_desc(authenc);
  384. return 0;
  385. }
  386. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  387. {
  388. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  389. struct device *jrdev = ctx->jrdev;
  390. unsigned int ivsize = crypto_aead_ivsize(aead);
  391. u32 *desc;
  392. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  393. ctx->cdata.keylen;
  394. if (!ctx->cdata.keylen || !ctx->authsize)
  395. return 0;
  396. /*
  397. * RFC4543 encrypt shared descriptor
  398. * Job Descriptor and Shared Descriptor
  399. * must fit into the 64-word Descriptor h/w Buffer
  400. */
  401. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  402. ctx->cdata.key_inline = true;
  403. ctx->cdata.key_virt = ctx->key;
  404. } else {
  405. ctx->cdata.key_inline = false;
  406. ctx->cdata.key_dma = ctx->key_dma;
  407. }
  408. desc = ctx->sh_desc_enc;
  409. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
  410. false);
  411. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  412. desc_bytes(desc), ctx->dir);
  413. /*
  414. * Job Descriptor and Shared Descriptors
  415. * must all fit into the 64-word Descriptor h/w Buffer
  416. */
  417. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  418. ctx->cdata.key_inline = true;
  419. ctx->cdata.key_virt = ctx->key;
  420. } else {
  421. ctx->cdata.key_inline = false;
  422. ctx->cdata.key_dma = ctx->key_dma;
  423. }
  424. desc = ctx->sh_desc_dec;
  425. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
  426. false);
  427. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  428. desc_bytes(desc), ctx->dir);
  429. return 0;
  430. }
  431. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  432. unsigned int authsize)
  433. {
  434. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  435. ctx->authsize = authsize;
  436. rfc4543_set_sh_desc(authenc);
  437. return 0;
  438. }
  439. static int aead_setkey(struct crypto_aead *aead,
  440. const u8 *key, unsigned int keylen)
  441. {
  442. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  443. struct device *jrdev = ctx->jrdev;
  444. struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
  445. struct crypto_authenc_keys keys;
  446. int ret = 0;
  447. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  448. goto badkey;
  449. #ifdef DEBUG
  450. printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
  451. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  452. keys.authkeylen);
  453. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  454. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  455. #endif
  456. /*
  457. * If DKP is supported, use it in the shared descriptor to generate
  458. * the split key.
  459. */
  460. if (ctrlpriv->era >= 6) {
  461. ctx->adata.keylen = keys.authkeylen;
  462. ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
  463. OP_ALG_ALGSEL_MASK);
  464. if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
  465. goto badkey;
  466. memcpy(ctx->key, keys.authkey, keys.authkeylen);
  467. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
  468. keys.enckeylen);
  469. dma_sync_single_for_device(jrdev, ctx->key_dma,
  470. ctx->adata.keylen_pad +
  471. keys.enckeylen, ctx->dir);
  472. goto skip_split_key;
  473. }
  474. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  475. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  476. keys.enckeylen);
  477. if (ret) {
  478. goto badkey;
  479. }
  480. /* postpend encryption key to auth split key */
  481. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  482. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  483. keys.enckeylen, ctx->dir);
  484. #ifdef DEBUG
  485. print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
  486. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  487. ctx->adata.keylen_pad + keys.enckeylen, 1);
  488. #endif
  489. skip_split_key:
  490. ctx->cdata.keylen = keys.enckeylen;
  491. memzero_explicit(&keys, sizeof(keys));
  492. return aead_set_sh_desc(aead);
  493. badkey:
  494. crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
  495. memzero_explicit(&keys, sizeof(keys));
  496. return -EINVAL;
  497. }
  498. static int gcm_setkey(struct crypto_aead *aead,
  499. const u8 *key, unsigned int keylen)
  500. {
  501. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  502. struct device *jrdev = ctx->jrdev;
  503. #ifdef DEBUG
  504. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  505. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  506. #endif
  507. memcpy(ctx->key, key, keylen);
  508. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
  509. ctx->cdata.keylen = keylen;
  510. return gcm_set_sh_desc(aead);
  511. }
  512. static int rfc4106_setkey(struct crypto_aead *aead,
  513. const u8 *key, unsigned int keylen)
  514. {
  515. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  516. struct device *jrdev = ctx->jrdev;
  517. if (keylen < 4)
  518. return -EINVAL;
  519. #ifdef DEBUG
  520. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  521. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  522. #endif
  523. memcpy(ctx->key, key, keylen);
  524. /*
  525. * The last four bytes of the key material are used as the salt value
  526. * in the nonce. Update the AES key length.
  527. */
  528. ctx->cdata.keylen = keylen - 4;
  529. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  530. ctx->dir);
  531. return rfc4106_set_sh_desc(aead);
  532. }
  533. static int rfc4543_setkey(struct crypto_aead *aead,
  534. const u8 *key, unsigned int keylen)
  535. {
  536. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  537. struct device *jrdev = ctx->jrdev;
  538. if (keylen < 4)
  539. return -EINVAL;
  540. #ifdef DEBUG
  541. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  542. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  543. #endif
  544. memcpy(ctx->key, key, keylen);
  545. /*
  546. * The last four bytes of the key material are used as the salt value
  547. * in the nonce. Update the AES key length.
  548. */
  549. ctx->cdata.keylen = keylen - 4;
  550. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  551. ctx->dir);
  552. return rfc4543_set_sh_desc(aead);
  553. }
  554. static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  555. const u8 *key, unsigned int keylen)
  556. {
  557. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  558. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
  559. const char *alg_name = crypto_tfm_alg_name(tfm);
  560. struct device *jrdev = ctx->jrdev;
  561. unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  562. u32 *desc;
  563. u32 ctx1_iv_off = 0;
  564. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  565. OP_ALG_AAI_CTR_MOD128);
  566. const bool is_rfc3686 = (ctr_mode &&
  567. (strstr(alg_name, "rfc3686") != NULL));
  568. #ifdef DEBUG
  569. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  570. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  571. #endif
  572. /*
  573. * AES-CTR needs to load IV in CONTEXT1 reg
  574. * at an offset of 128bits (16bytes)
  575. * CONTEXT1[255:128] = IV
  576. */
  577. if (ctr_mode)
  578. ctx1_iv_off = 16;
  579. /*
  580. * RFC3686 specific:
  581. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  582. * | *key = {KEY, NONCE}
  583. */
  584. if (is_rfc3686) {
  585. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  586. keylen -= CTR_RFC3686_NONCE_SIZE;
  587. }
  588. ctx->cdata.keylen = keylen;
  589. ctx->cdata.key_virt = key;
  590. ctx->cdata.key_inline = true;
  591. /* ablkcipher_encrypt shared descriptor */
  592. desc = ctx->sh_desc_enc;
  593. cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  594. ctx1_iv_off);
  595. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  596. desc_bytes(desc), ctx->dir);
  597. /* ablkcipher_decrypt shared descriptor */
  598. desc = ctx->sh_desc_dec;
  599. cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  600. ctx1_iv_off);
  601. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  602. desc_bytes(desc), ctx->dir);
  603. /* ablkcipher_givencrypt shared descriptor */
  604. desc = ctx->sh_desc_givenc;
  605. cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
  606. ctx1_iv_off);
  607. dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
  608. desc_bytes(desc), ctx->dir);
  609. return 0;
  610. }
  611. static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  612. const u8 *key, unsigned int keylen)
  613. {
  614. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  615. struct device *jrdev = ctx->jrdev;
  616. u32 *desc;
  617. if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
  618. crypto_ablkcipher_set_flags(ablkcipher,
  619. CRYPTO_TFM_RES_BAD_KEY_LEN);
  620. dev_err(jrdev, "key size mismatch\n");
  621. return -EINVAL;
  622. }
  623. ctx->cdata.keylen = keylen;
  624. ctx->cdata.key_virt = key;
  625. ctx->cdata.key_inline = true;
  626. /* xts_ablkcipher_encrypt shared descriptor */
  627. desc = ctx->sh_desc_enc;
  628. cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
  629. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  630. desc_bytes(desc), ctx->dir);
  631. /* xts_ablkcipher_decrypt shared descriptor */
  632. desc = ctx->sh_desc_dec;
  633. cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
  634. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  635. desc_bytes(desc), ctx->dir);
  636. return 0;
  637. }
  638. /*
  639. * aead_edesc - s/w-extended aead descriptor
  640. * @src_nents: number of segments in input s/w scatterlist
  641. * @dst_nents: number of segments in output s/w scatterlist
  642. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  643. * @sec4_sg_dma: bus physical mapped address of h/w link table
  644. * @sec4_sg: pointer to h/w link table
  645. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  646. */
  647. struct aead_edesc {
  648. int src_nents;
  649. int dst_nents;
  650. int sec4_sg_bytes;
  651. dma_addr_t sec4_sg_dma;
  652. struct sec4_sg_entry *sec4_sg;
  653. u32 hw_desc[];
  654. };
  655. /*
  656. * ablkcipher_edesc - s/w-extended ablkcipher descriptor
  657. * @src_nents: number of segments in input s/w scatterlist
  658. * @dst_nents: number of segments in output s/w scatterlist
  659. * @iv_dma: dma address of iv for checking continuity and link table
  660. * @iv_dir: DMA mapping direction for IV
  661. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  662. * @sec4_sg_dma: bus physical mapped address of h/w link table
  663. * @sec4_sg: pointer to h/w link table
  664. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  665. * and IV
  666. */
  667. struct ablkcipher_edesc {
  668. int src_nents;
  669. int dst_nents;
  670. dma_addr_t iv_dma;
  671. enum dma_data_direction iv_dir;
  672. int sec4_sg_bytes;
  673. dma_addr_t sec4_sg_dma;
  674. struct sec4_sg_entry *sec4_sg;
  675. u32 hw_desc[0];
  676. };
  677. static void caam_unmap(struct device *dev, struct scatterlist *src,
  678. struct scatterlist *dst, int src_nents,
  679. int dst_nents,
  680. dma_addr_t iv_dma, int ivsize,
  681. enum dma_data_direction iv_dir, dma_addr_t sec4_sg_dma,
  682. int sec4_sg_bytes)
  683. {
  684. if (dst != src) {
  685. if (src_nents)
  686. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  687. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  688. } else {
  689. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  690. }
  691. if (iv_dma)
  692. dma_unmap_single(dev, iv_dma, ivsize, iv_dir);
  693. if (sec4_sg_bytes)
  694. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  695. DMA_TO_DEVICE);
  696. }
  697. static void aead_unmap(struct device *dev,
  698. struct aead_edesc *edesc,
  699. struct aead_request *req)
  700. {
  701. caam_unmap(dev, req->src, req->dst,
  702. edesc->src_nents, edesc->dst_nents, 0, 0, DMA_NONE,
  703. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  704. }
  705. static void ablkcipher_unmap(struct device *dev,
  706. struct ablkcipher_edesc *edesc,
  707. struct ablkcipher_request *req)
  708. {
  709. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  710. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  711. caam_unmap(dev, req->src, req->dst,
  712. edesc->src_nents, edesc->dst_nents,
  713. edesc->iv_dma, ivsize, edesc->iv_dir,
  714. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  715. }
  716. static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  717. void *context)
  718. {
  719. struct aead_request *req = context;
  720. struct aead_edesc *edesc;
  721. #ifdef DEBUG
  722. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  723. #endif
  724. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  725. if (err)
  726. caam_jr_strstatus(jrdev, err);
  727. aead_unmap(jrdev, edesc, req);
  728. kfree(edesc);
  729. aead_request_complete(req, err);
  730. }
  731. static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  732. void *context)
  733. {
  734. struct aead_request *req = context;
  735. struct aead_edesc *edesc;
  736. #ifdef DEBUG
  737. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  738. #endif
  739. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  740. if (err)
  741. caam_jr_strstatus(jrdev, err);
  742. aead_unmap(jrdev, edesc, req);
  743. /*
  744. * verify hw auth check passed else return -EBADMSG
  745. */
  746. if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
  747. err = -EBADMSG;
  748. kfree(edesc);
  749. aead_request_complete(req, err);
  750. }
  751. static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  752. void *context)
  753. {
  754. struct ablkcipher_request *req = context;
  755. struct ablkcipher_edesc *edesc;
  756. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  757. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  758. #ifdef DEBUG
  759. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  760. #endif
  761. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  762. if (err)
  763. caam_jr_strstatus(jrdev, err);
  764. #ifdef DEBUG
  765. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  766. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  767. edesc->src_nents > 1 ? 100 : ivsize, 1);
  768. #endif
  769. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  770. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  771. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  772. ablkcipher_unmap(jrdev, edesc, req);
  773. /*
  774. * The crypto API expects us to set the IV (req->info) to the last
  775. * ciphertext block. This is used e.g. by the CTS mode.
  776. */
  777. scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize,
  778. ivsize, 0);
  779. /* In case initial IV was generated, copy it in GIVCIPHER request */
  780. if (edesc->iv_dir == DMA_FROM_DEVICE) {
  781. u8 *iv;
  782. struct skcipher_givcrypt_request *greq;
  783. greq = container_of(req, struct skcipher_givcrypt_request,
  784. creq);
  785. iv = (u8 *)edesc->hw_desc + desc_bytes(edesc->hw_desc) +
  786. edesc->sec4_sg_bytes;
  787. memcpy(greq->giv, iv, ivsize);
  788. }
  789. kfree(edesc);
  790. ablkcipher_request_complete(req, err);
  791. }
  792. static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  793. void *context)
  794. {
  795. struct ablkcipher_request *req = context;
  796. struct ablkcipher_edesc *edesc;
  797. #ifdef DEBUG
  798. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  799. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  800. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  801. #endif
  802. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  803. if (err)
  804. caam_jr_strstatus(jrdev, err);
  805. #ifdef DEBUG
  806. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  807. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  808. ivsize, 1);
  809. #endif
  810. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  811. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  812. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  813. ablkcipher_unmap(jrdev, edesc, req);
  814. kfree(edesc);
  815. ablkcipher_request_complete(req, err);
  816. }
  817. /*
  818. * Fill in aead job descriptor
  819. */
  820. static void init_aead_job(struct aead_request *req,
  821. struct aead_edesc *edesc,
  822. bool all_contig, bool encrypt)
  823. {
  824. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  825. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  826. int authsize = ctx->authsize;
  827. u32 *desc = edesc->hw_desc;
  828. u32 out_options, in_options;
  829. dma_addr_t dst_dma, src_dma;
  830. int len, sec4_sg_index = 0;
  831. dma_addr_t ptr;
  832. u32 *sh_desc;
  833. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  834. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  835. len = desc_len(sh_desc);
  836. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  837. if (all_contig) {
  838. src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
  839. in_options = 0;
  840. } else {
  841. src_dma = edesc->sec4_sg_dma;
  842. sec4_sg_index += edesc->src_nents;
  843. in_options = LDST_SGF;
  844. }
  845. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  846. in_options);
  847. dst_dma = src_dma;
  848. out_options = in_options;
  849. if (unlikely(req->src != req->dst)) {
  850. if (edesc->dst_nents == 1) {
  851. dst_dma = sg_dma_address(req->dst);
  852. } else {
  853. dst_dma = edesc->sec4_sg_dma +
  854. sec4_sg_index *
  855. sizeof(struct sec4_sg_entry);
  856. out_options = LDST_SGF;
  857. }
  858. }
  859. if (encrypt)
  860. append_seq_out_ptr(desc, dst_dma,
  861. req->assoclen + req->cryptlen + authsize,
  862. out_options);
  863. else
  864. append_seq_out_ptr(desc, dst_dma,
  865. req->assoclen + req->cryptlen - authsize,
  866. out_options);
  867. }
  868. static void init_gcm_job(struct aead_request *req,
  869. struct aead_edesc *edesc,
  870. bool all_contig, bool encrypt)
  871. {
  872. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  873. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  874. unsigned int ivsize = crypto_aead_ivsize(aead);
  875. u32 *desc = edesc->hw_desc;
  876. bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
  877. unsigned int last;
  878. init_aead_job(req, edesc, all_contig, encrypt);
  879. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  880. /* BUG This should not be specific to generic GCM. */
  881. last = 0;
  882. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  883. last = FIFOLD_TYPE_LAST1;
  884. /* Read GCM IV */
  885. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  886. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
  887. /* Append Salt */
  888. if (!generic_gcm)
  889. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  890. /* Append IV */
  891. append_data(desc, req->iv, ivsize);
  892. /* End of blank commands */
  893. }
  894. static void init_authenc_job(struct aead_request *req,
  895. struct aead_edesc *edesc,
  896. bool all_contig, bool encrypt)
  897. {
  898. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  899. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  900. struct caam_aead_alg, aead);
  901. unsigned int ivsize = crypto_aead_ivsize(aead);
  902. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  903. struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
  904. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  905. OP_ALG_AAI_CTR_MOD128);
  906. const bool is_rfc3686 = alg->caam.rfc3686;
  907. u32 *desc = edesc->hw_desc;
  908. u32 ivoffset = 0;
  909. /*
  910. * AES-CTR needs to load IV in CONTEXT1 reg
  911. * at an offset of 128bits (16bytes)
  912. * CONTEXT1[255:128] = IV
  913. */
  914. if (ctr_mode)
  915. ivoffset = 16;
  916. /*
  917. * RFC3686 specific:
  918. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  919. */
  920. if (is_rfc3686)
  921. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  922. init_aead_job(req, edesc, all_contig, encrypt);
  923. /*
  924. * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
  925. * having DPOVRD as destination.
  926. */
  927. if (ctrlpriv->era < 3)
  928. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  929. else
  930. append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
  931. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  932. append_load_as_imm(desc, req->iv, ivsize,
  933. LDST_CLASS_1_CCB |
  934. LDST_SRCDST_BYTE_CONTEXT |
  935. (ivoffset << LDST_OFFSET_SHIFT));
  936. }
  937. /*
  938. * Fill in ablkcipher job descriptor
  939. */
  940. static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
  941. struct ablkcipher_edesc *edesc,
  942. struct ablkcipher_request *req)
  943. {
  944. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  945. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  946. u32 *desc = edesc->hw_desc;
  947. u32 out_options = 0;
  948. dma_addr_t dst_dma;
  949. int len;
  950. #ifdef DEBUG
  951. print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
  952. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  953. ivsize, 1);
  954. pr_err("asked=%d, nbytes%d\n",
  955. (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
  956. #endif
  957. caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
  958. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  959. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  960. len = desc_len(sh_desc);
  961. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  962. append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->nbytes + ivsize,
  963. LDST_SGF);
  964. if (likely(req->src == req->dst)) {
  965. dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
  966. out_options = LDST_SGF;
  967. } else {
  968. if (edesc->dst_nents == 1) {
  969. dst_dma = sg_dma_address(req->dst);
  970. } else {
  971. dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
  972. sizeof(struct sec4_sg_entry);
  973. out_options = LDST_SGF;
  974. }
  975. }
  976. append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
  977. }
  978. /*
  979. * Fill in ablkcipher givencrypt job descriptor
  980. */
  981. static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
  982. struct ablkcipher_edesc *edesc,
  983. struct ablkcipher_request *req)
  984. {
  985. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  986. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  987. u32 *desc = edesc->hw_desc;
  988. u32 in_options;
  989. dma_addr_t dst_dma, src_dma;
  990. int len, sec4_sg_index = 0;
  991. #ifdef DEBUG
  992. print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
  993. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  994. ivsize, 1);
  995. #endif
  996. caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
  997. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  998. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  999. len = desc_len(sh_desc);
  1000. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  1001. if (edesc->src_nents == 1) {
  1002. src_dma = sg_dma_address(req->src);
  1003. in_options = 0;
  1004. } else {
  1005. src_dma = edesc->sec4_sg_dma;
  1006. sec4_sg_index += edesc->src_nents;
  1007. in_options = LDST_SGF;
  1008. }
  1009. append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
  1010. dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
  1011. sizeof(struct sec4_sg_entry);
  1012. append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, LDST_SGF);
  1013. }
  1014. /*
  1015. * allocate and map the aead extended descriptor
  1016. */
  1017. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  1018. int desc_bytes, bool *all_contig_ptr,
  1019. bool encrypt)
  1020. {
  1021. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1022. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1023. struct device *jrdev = ctx->jrdev;
  1024. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1025. GFP_KERNEL : GFP_ATOMIC;
  1026. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1027. struct aead_edesc *edesc;
  1028. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  1029. unsigned int authsize = ctx->authsize;
  1030. if (unlikely(req->dst != req->src)) {
  1031. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1032. req->cryptlen);
  1033. if (unlikely(src_nents < 0)) {
  1034. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1035. req->assoclen + req->cryptlen);
  1036. return ERR_PTR(src_nents);
  1037. }
  1038. dst_nents = sg_nents_for_len(req->dst, req->assoclen +
  1039. req->cryptlen +
  1040. (encrypt ? authsize :
  1041. (-authsize)));
  1042. if (unlikely(dst_nents < 0)) {
  1043. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1044. req->assoclen + req->cryptlen +
  1045. (encrypt ? authsize : (-authsize)));
  1046. return ERR_PTR(dst_nents);
  1047. }
  1048. } else {
  1049. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1050. req->cryptlen +
  1051. (encrypt ? authsize : 0));
  1052. if (unlikely(src_nents < 0)) {
  1053. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1054. req->assoclen + req->cryptlen +
  1055. (encrypt ? authsize : 0));
  1056. return ERR_PTR(src_nents);
  1057. }
  1058. }
  1059. if (likely(req->src == req->dst)) {
  1060. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1061. DMA_BIDIRECTIONAL);
  1062. if (unlikely(!mapped_src_nents)) {
  1063. dev_err(jrdev, "unable to map source\n");
  1064. return ERR_PTR(-ENOMEM);
  1065. }
  1066. } else {
  1067. /* Cover also the case of null (zero length) input data */
  1068. if (src_nents) {
  1069. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1070. src_nents, DMA_TO_DEVICE);
  1071. if (unlikely(!mapped_src_nents)) {
  1072. dev_err(jrdev, "unable to map source\n");
  1073. return ERR_PTR(-ENOMEM);
  1074. }
  1075. } else {
  1076. mapped_src_nents = 0;
  1077. }
  1078. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1079. DMA_FROM_DEVICE);
  1080. if (unlikely(!mapped_dst_nents)) {
  1081. dev_err(jrdev, "unable to map destination\n");
  1082. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1083. return ERR_PTR(-ENOMEM);
  1084. }
  1085. }
  1086. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1087. sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1088. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1089. /* allocate space for base edesc and hw desc commands, link tables */
  1090. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1091. GFP_DMA | flags);
  1092. if (!edesc) {
  1093. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1094. 0, DMA_NONE, 0, 0);
  1095. return ERR_PTR(-ENOMEM);
  1096. }
  1097. edesc->src_nents = src_nents;
  1098. edesc->dst_nents = dst_nents;
  1099. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1100. desc_bytes;
  1101. *all_contig_ptr = !(mapped_src_nents > 1);
  1102. sec4_sg_index = 0;
  1103. if (mapped_src_nents > 1) {
  1104. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1105. edesc->sec4_sg + sec4_sg_index, 0);
  1106. sec4_sg_index += mapped_src_nents;
  1107. }
  1108. if (mapped_dst_nents > 1) {
  1109. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1110. edesc->sec4_sg + sec4_sg_index, 0);
  1111. }
  1112. if (!sec4_sg_bytes)
  1113. return edesc;
  1114. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1115. sec4_sg_bytes, DMA_TO_DEVICE);
  1116. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1117. dev_err(jrdev, "unable to map S/G table\n");
  1118. aead_unmap(jrdev, edesc, req);
  1119. kfree(edesc);
  1120. return ERR_PTR(-ENOMEM);
  1121. }
  1122. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1123. return edesc;
  1124. }
  1125. static int gcm_encrypt(struct aead_request *req)
  1126. {
  1127. struct aead_edesc *edesc;
  1128. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1129. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1130. struct device *jrdev = ctx->jrdev;
  1131. bool all_contig;
  1132. u32 *desc;
  1133. int ret = 0;
  1134. /* allocate extended descriptor */
  1135. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
  1136. if (IS_ERR(edesc))
  1137. return PTR_ERR(edesc);
  1138. /* Create and submit job descriptor */
  1139. init_gcm_job(req, edesc, all_contig, true);
  1140. #ifdef DEBUG
  1141. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1142. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1143. desc_bytes(edesc->hw_desc), 1);
  1144. #endif
  1145. desc = edesc->hw_desc;
  1146. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1147. if (!ret) {
  1148. ret = -EINPROGRESS;
  1149. } else {
  1150. aead_unmap(jrdev, edesc, req);
  1151. kfree(edesc);
  1152. }
  1153. return ret;
  1154. }
  1155. static int ipsec_gcm_encrypt(struct aead_request *req)
  1156. {
  1157. if (req->assoclen < 8)
  1158. return -EINVAL;
  1159. return gcm_encrypt(req);
  1160. }
  1161. static int aead_encrypt(struct aead_request *req)
  1162. {
  1163. struct aead_edesc *edesc;
  1164. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1165. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1166. struct device *jrdev = ctx->jrdev;
  1167. bool all_contig;
  1168. u32 *desc;
  1169. int ret = 0;
  1170. /* allocate extended descriptor */
  1171. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1172. &all_contig, true);
  1173. if (IS_ERR(edesc))
  1174. return PTR_ERR(edesc);
  1175. /* Create and submit job descriptor */
  1176. init_authenc_job(req, edesc, all_contig, true);
  1177. #ifdef DEBUG
  1178. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1179. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1180. desc_bytes(edesc->hw_desc), 1);
  1181. #endif
  1182. desc = edesc->hw_desc;
  1183. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1184. if (!ret) {
  1185. ret = -EINPROGRESS;
  1186. } else {
  1187. aead_unmap(jrdev, edesc, req);
  1188. kfree(edesc);
  1189. }
  1190. return ret;
  1191. }
  1192. static int gcm_decrypt(struct aead_request *req)
  1193. {
  1194. struct aead_edesc *edesc;
  1195. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1196. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1197. struct device *jrdev = ctx->jrdev;
  1198. bool all_contig;
  1199. u32 *desc;
  1200. int ret = 0;
  1201. /* allocate extended descriptor */
  1202. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
  1203. if (IS_ERR(edesc))
  1204. return PTR_ERR(edesc);
  1205. /* Create and submit job descriptor*/
  1206. init_gcm_job(req, edesc, all_contig, false);
  1207. #ifdef DEBUG
  1208. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1209. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1210. desc_bytes(edesc->hw_desc), 1);
  1211. #endif
  1212. desc = edesc->hw_desc;
  1213. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1214. if (!ret) {
  1215. ret = -EINPROGRESS;
  1216. } else {
  1217. aead_unmap(jrdev, edesc, req);
  1218. kfree(edesc);
  1219. }
  1220. return ret;
  1221. }
  1222. static int ipsec_gcm_decrypt(struct aead_request *req)
  1223. {
  1224. if (req->assoclen < 8)
  1225. return -EINVAL;
  1226. return gcm_decrypt(req);
  1227. }
  1228. static int aead_decrypt(struct aead_request *req)
  1229. {
  1230. struct aead_edesc *edesc;
  1231. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1232. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1233. struct device *jrdev = ctx->jrdev;
  1234. bool all_contig;
  1235. u32 *desc;
  1236. int ret = 0;
  1237. caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
  1238. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1239. req->assoclen + req->cryptlen, 1);
  1240. /* allocate extended descriptor */
  1241. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1242. &all_contig, false);
  1243. if (IS_ERR(edesc))
  1244. return PTR_ERR(edesc);
  1245. /* Create and submit job descriptor*/
  1246. init_authenc_job(req, edesc, all_contig, false);
  1247. #ifdef DEBUG
  1248. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1249. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1250. desc_bytes(edesc->hw_desc), 1);
  1251. #endif
  1252. desc = edesc->hw_desc;
  1253. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1254. if (!ret) {
  1255. ret = -EINPROGRESS;
  1256. } else {
  1257. aead_unmap(jrdev, edesc, req);
  1258. kfree(edesc);
  1259. }
  1260. return ret;
  1261. }
  1262. /*
  1263. * allocate and map the ablkcipher extended descriptor for ablkcipher
  1264. */
  1265. static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
  1266. *req, int desc_bytes)
  1267. {
  1268. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1269. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1270. struct device *jrdev = ctx->jrdev;
  1271. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1272. GFP_KERNEL : GFP_ATOMIC;
  1273. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1274. struct ablkcipher_edesc *edesc;
  1275. dma_addr_t iv_dma;
  1276. u8 *iv;
  1277. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1278. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1279. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1280. if (unlikely(src_nents < 0)) {
  1281. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1282. req->nbytes);
  1283. return ERR_PTR(src_nents);
  1284. }
  1285. if (req->dst != req->src) {
  1286. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1287. if (unlikely(dst_nents < 0)) {
  1288. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1289. req->nbytes);
  1290. return ERR_PTR(dst_nents);
  1291. }
  1292. }
  1293. if (likely(req->src == req->dst)) {
  1294. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1295. DMA_BIDIRECTIONAL);
  1296. if (unlikely(!mapped_src_nents)) {
  1297. dev_err(jrdev, "unable to map source\n");
  1298. return ERR_PTR(-ENOMEM);
  1299. }
  1300. } else {
  1301. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1302. DMA_TO_DEVICE);
  1303. if (unlikely(!mapped_src_nents)) {
  1304. dev_err(jrdev, "unable to map source\n");
  1305. return ERR_PTR(-ENOMEM);
  1306. }
  1307. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1308. DMA_FROM_DEVICE);
  1309. if (unlikely(!mapped_dst_nents)) {
  1310. dev_err(jrdev, "unable to map destination\n");
  1311. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1312. return ERR_PTR(-ENOMEM);
  1313. }
  1314. }
  1315. sec4_sg_ents = 1 + mapped_src_nents;
  1316. dst_sg_idx = sec4_sg_ents;
  1317. sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1318. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1319. /*
  1320. * allocate space for base edesc and hw desc commands, link tables, IV
  1321. */
  1322. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1323. GFP_DMA | flags);
  1324. if (!edesc) {
  1325. dev_err(jrdev, "could not allocate extended descriptor\n");
  1326. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1327. 0, DMA_NONE, 0, 0);
  1328. return ERR_PTR(-ENOMEM);
  1329. }
  1330. edesc->src_nents = src_nents;
  1331. edesc->dst_nents = dst_nents;
  1332. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1333. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1334. desc_bytes;
  1335. edesc->iv_dir = DMA_TO_DEVICE;
  1336. /* Make sure IV is located in a DMAable area */
  1337. iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
  1338. memcpy(iv, req->info, ivsize);
  1339. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
  1340. if (dma_mapping_error(jrdev, iv_dma)) {
  1341. dev_err(jrdev, "unable to map IV\n");
  1342. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1343. 0, DMA_NONE, 0, 0);
  1344. kfree(edesc);
  1345. return ERR_PTR(-ENOMEM);
  1346. }
  1347. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1348. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
  1349. if (mapped_dst_nents > 1) {
  1350. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1351. edesc->sec4_sg + dst_sg_idx, 0);
  1352. }
  1353. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1354. sec4_sg_bytes, DMA_TO_DEVICE);
  1355. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1356. dev_err(jrdev, "unable to map S/G table\n");
  1357. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1358. iv_dma, ivsize, DMA_TO_DEVICE, 0, 0);
  1359. kfree(edesc);
  1360. return ERR_PTR(-ENOMEM);
  1361. }
  1362. edesc->iv_dma = iv_dma;
  1363. #ifdef DEBUG
  1364. print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
  1365. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1366. sec4_sg_bytes, 1);
  1367. #endif
  1368. return edesc;
  1369. }
  1370. static int ablkcipher_encrypt(struct ablkcipher_request *req)
  1371. {
  1372. struct ablkcipher_edesc *edesc;
  1373. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1374. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1375. struct device *jrdev = ctx->jrdev;
  1376. u32 *desc;
  1377. int ret = 0;
  1378. /* allocate extended descriptor */
  1379. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1380. if (IS_ERR(edesc))
  1381. return PTR_ERR(edesc);
  1382. /* Create and submit job descriptor*/
  1383. init_ablkcipher_job(ctx->sh_desc_enc, ctx->sh_desc_enc_dma, edesc, req);
  1384. #ifdef DEBUG
  1385. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1386. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1387. desc_bytes(edesc->hw_desc), 1);
  1388. #endif
  1389. desc = edesc->hw_desc;
  1390. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1391. if (!ret) {
  1392. ret = -EINPROGRESS;
  1393. } else {
  1394. ablkcipher_unmap(jrdev, edesc, req);
  1395. kfree(edesc);
  1396. }
  1397. return ret;
  1398. }
  1399. static int ablkcipher_decrypt(struct ablkcipher_request *req)
  1400. {
  1401. struct ablkcipher_edesc *edesc;
  1402. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1403. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1404. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1405. struct device *jrdev = ctx->jrdev;
  1406. u32 *desc;
  1407. int ret = 0;
  1408. /* allocate extended descriptor */
  1409. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1410. if (IS_ERR(edesc))
  1411. return PTR_ERR(edesc);
  1412. /*
  1413. * The crypto API expects us to set the IV (req->info) to the last
  1414. * ciphertext block.
  1415. */
  1416. scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize,
  1417. ivsize, 0);
  1418. /* Create and submit job descriptor*/
  1419. init_ablkcipher_job(ctx->sh_desc_dec, ctx->sh_desc_dec_dma, edesc, req);
  1420. desc = edesc->hw_desc;
  1421. #ifdef DEBUG
  1422. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1423. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1424. desc_bytes(edesc->hw_desc), 1);
  1425. #endif
  1426. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
  1427. if (!ret) {
  1428. ret = -EINPROGRESS;
  1429. } else {
  1430. ablkcipher_unmap(jrdev, edesc, req);
  1431. kfree(edesc);
  1432. }
  1433. return ret;
  1434. }
  1435. /*
  1436. * allocate and map the ablkcipher extended descriptor
  1437. * for ablkcipher givencrypt
  1438. */
  1439. static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
  1440. struct skcipher_givcrypt_request *greq,
  1441. int desc_bytes)
  1442. {
  1443. struct ablkcipher_request *req = &greq->creq;
  1444. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1445. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1446. struct device *jrdev = ctx->jrdev;
  1447. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1448. GFP_KERNEL : GFP_ATOMIC;
  1449. int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
  1450. struct ablkcipher_edesc *edesc;
  1451. dma_addr_t iv_dma;
  1452. u8 *iv;
  1453. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1454. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1455. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1456. if (unlikely(src_nents < 0)) {
  1457. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1458. req->nbytes);
  1459. return ERR_PTR(src_nents);
  1460. }
  1461. if (likely(req->src == req->dst)) {
  1462. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1463. DMA_BIDIRECTIONAL);
  1464. if (unlikely(!mapped_src_nents)) {
  1465. dev_err(jrdev, "unable to map source\n");
  1466. return ERR_PTR(-ENOMEM);
  1467. }
  1468. dst_nents = src_nents;
  1469. mapped_dst_nents = src_nents;
  1470. } else {
  1471. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1472. DMA_TO_DEVICE);
  1473. if (unlikely(!mapped_src_nents)) {
  1474. dev_err(jrdev, "unable to map source\n");
  1475. return ERR_PTR(-ENOMEM);
  1476. }
  1477. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1478. if (unlikely(dst_nents < 0)) {
  1479. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1480. req->nbytes);
  1481. return ERR_PTR(dst_nents);
  1482. }
  1483. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1484. DMA_FROM_DEVICE);
  1485. if (unlikely(!mapped_dst_nents)) {
  1486. dev_err(jrdev, "unable to map destination\n");
  1487. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1488. return ERR_PTR(-ENOMEM);
  1489. }
  1490. }
  1491. sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1492. dst_sg_idx = sec4_sg_ents;
  1493. sec4_sg_ents += 1 + mapped_dst_nents;
  1494. /*
  1495. * allocate space for base edesc and hw desc commands, link tables, IV
  1496. */
  1497. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1498. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1499. GFP_DMA | flags);
  1500. if (!edesc) {
  1501. dev_err(jrdev, "could not allocate extended descriptor\n");
  1502. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1503. 0, DMA_NONE, 0, 0);
  1504. return ERR_PTR(-ENOMEM);
  1505. }
  1506. edesc->src_nents = src_nents;
  1507. edesc->dst_nents = dst_nents;
  1508. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1509. edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
  1510. desc_bytes;
  1511. edesc->iv_dir = DMA_FROM_DEVICE;
  1512. /* Make sure IV is located in a DMAable area */
  1513. iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
  1514. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_FROM_DEVICE);
  1515. if (dma_mapping_error(jrdev, iv_dma)) {
  1516. dev_err(jrdev, "unable to map IV\n");
  1517. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1518. 0, DMA_NONE, 0, 0);
  1519. kfree(edesc);
  1520. return ERR_PTR(-ENOMEM);
  1521. }
  1522. if (mapped_src_nents > 1)
  1523. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
  1524. 0);
  1525. dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, iv_dma, ivsize, 0);
  1526. sg_to_sec4_sg_last(req->dst, mapped_dst_nents, edesc->sec4_sg +
  1527. dst_sg_idx + 1, 0);
  1528. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1529. sec4_sg_bytes, DMA_TO_DEVICE);
  1530. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1531. dev_err(jrdev, "unable to map S/G table\n");
  1532. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1533. iv_dma, ivsize, DMA_FROM_DEVICE, 0, 0);
  1534. kfree(edesc);
  1535. return ERR_PTR(-ENOMEM);
  1536. }
  1537. edesc->iv_dma = iv_dma;
  1538. #ifdef DEBUG
  1539. print_hex_dump(KERN_ERR,
  1540. "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
  1541. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1542. sec4_sg_bytes, 1);
  1543. #endif
  1544. return edesc;
  1545. }
  1546. static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
  1547. {
  1548. struct ablkcipher_request *req = &creq->creq;
  1549. struct ablkcipher_edesc *edesc;
  1550. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1551. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1552. struct device *jrdev = ctx->jrdev;
  1553. u32 *desc;
  1554. int ret = 0;
  1555. /* allocate extended descriptor */
  1556. edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1557. if (IS_ERR(edesc))
  1558. return PTR_ERR(edesc);
  1559. /* Create and submit job descriptor*/
  1560. init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
  1561. edesc, req);
  1562. #ifdef DEBUG
  1563. print_hex_dump(KERN_ERR,
  1564. "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
  1565. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1566. desc_bytes(edesc->hw_desc), 1);
  1567. #endif
  1568. desc = edesc->hw_desc;
  1569. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1570. if (!ret) {
  1571. ret = -EINPROGRESS;
  1572. } else {
  1573. ablkcipher_unmap(jrdev, edesc, req);
  1574. kfree(edesc);
  1575. }
  1576. return ret;
  1577. }
  1578. #define template_aead template_u.aead
  1579. #define template_ablkcipher template_u.ablkcipher
  1580. struct caam_alg_template {
  1581. char name[CRYPTO_MAX_ALG_NAME];
  1582. char driver_name[CRYPTO_MAX_ALG_NAME];
  1583. unsigned int blocksize;
  1584. u32 type;
  1585. union {
  1586. struct ablkcipher_alg ablkcipher;
  1587. } template_u;
  1588. u32 class1_alg_type;
  1589. u32 class2_alg_type;
  1590. };
  1591. static struct caam_alg_template driver_algs[] = {
  1592. /* ablkcipher descriptor */
  1593. {
  1594. .name = "cbc(aes)",
  1595. .driver_name = "cbc-aes-caam",
  1596. .blocksize = AES_BLOCK_SIZE,
  1597. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1598. .template_ablkcipher = {
  1599. .setkey = ablkcipher_setkey,
  1600. .encrypt = ablkcipher_encrypt,
  1601. .decrypt = ablkcipher_decrypt,
  1602. .givencrypt = ablkcipher_givencrypt,
  1603. .geniv = "<built-in>",
  1604. .min_keysize = AES_MIN_KEY_SIZE,
  1605. .max_keysize = AES_MAX_KEY_SIZE,
  1606. .ivsize = AES_BLOCK_SIZE,
  1607. },
  1608. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1609. },
  1610. {
  1611. .name = "cbc(des3_ede)",
  1612. .driver_name = "cbc-3des-caam",
  1613. .blocksize = DES3_EDE_BLOCK_SIZE,
  1614. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1615. .template_ablkcipher = {
  1616. .setkey = ablkcipher_setkey,
  1617. .encrypt = ablkcipher_encrypt,
  1618. .decrypt = ablkcipher_decrypt,
  1619. .givencrypt = ablkcipher_givencrypt,
  1620. .geniv = "<built-in>",
  1621. .min_keysize = DES3_EDE_KEY_SIZE,
  1622. .max_keysize = DES3_EDE_KEY_SIZE,
  1623. .ivsize = DES3_EDE_BLOCK_SIZE,
  1624. },
  1625. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1626. },
  1627. {
  1628. .name = "cbc(des)",
  1629. .driver_name = "cbc-des-caam",
  1630. .blocksize = DES_BLOCK_SIZE,
  1631. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1632. .template_ablkcipher = {
  1633. .setkey = ablkcipher_setkey,
  1634. .encrypt = ablkcipher_encrypt,
  1635. .decrypt = ablkcipher_decrypt,
  1636. .givencrypt = ablkcipher_givencrypt,
  1637. .geniv = "<built-in>",
  1638. .min_keysize = DES_KEY_SIZE,
  1639. .max_keysize = DES_KEY_SIZE,
  1640. .ivsize = DES_BLOCK_SIZE,
  1641. },
  1642. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1643. },
  1644. {
  1645. .name = "ctr(aes)",
  1646. .driver_name = "ctr-aes-caam",
  1647. .blocksize = 1,
  1648. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1649. .template_ablkcipher = {
  1650. .setkey = ablkcipher_setkey,
  1651. .encrypt = ablkcipher_encrypt,
  1652. .decrypt = ablkcipher_decrypt,
  1653. .geniv = "chainiv",
  1654. .min_keysize = AES_MIN_KEY_SIZE,
  1655. .max_keysize = AES_MAX_KEY_SIZE,
  1656. .ivsize = AES_BLOCK_SIZE,
  1657. },
  1658. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1659. },
  1660. {
  1661. .name = "rfc3686(ctr(aes))",
  1662. .driver_name = "rfc3686-ctr-aes-caam",
  1663. .blocksize = 1,
  1664. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1665. .template_ablkcipher = {
  1666. .setkey = ablkcipher_setkey,
  1667. .encrypt = ablkcipher_encrypt,
  1668. .decrypt = ablkcipher_decrypt,
  1669. .givencrypt = ablkcipher_givencrypt,
  1670. .geniv = "<built-in>",
  1671. .min_keysize = AES_MIN_KEY_SIZE +
  1672. CTR_RFC3686_NONCE_SIZE,
  1673. .max_keysize = AES_MAX_KEY_SIZE +
  1674. CTR_RFC3686_NONCE_SIZE,
  1675. .ivsize = CTR_RFC3686_IV_SIZE,
  1676. },
  1677. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1678. },
  1679. {
  1680. .name = "xts(aes)",
  1681. .driver_name = "xts-aes-caam",
  1682. .blocksize = AES_BLOCK_SIZE,
  1683. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1684. .template_ablkcipher = {
  1685. .setkey = xts_ablkcipher_setkey,
  1686. .encrypt = ablkcipher_encrypt,
  1687. .decrypt = ablkcipher_decrypt,
  1688. .geniv = "eseqiv",
  1689. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1690. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1691. .ivsize = AES_BLOCK_SIZE,
  1692. },
  1693. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1694. },
  1695. };
  1696. static struct caam_aead_alg driver_aeads[] = {
  1697. {
  1698. .aead = {
  1699. .base = {
  1700. .cra_name = "rfc4106(gcm(aes))",
  1701. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1702. .cra_blocksize = 1,
  1703. },
  1704. .setkey = rfc4106_setkey,
  1705. .setauthsize = rfc4106_setauthsize,
  1706. .encrypt = ipsec_gcm_encrypt,
  1707. .decrypt = ipsec_gcm_decrypt,
  1708. .ivsize = GCM_RFC4106_IV_SIZE,
  1709. .maxauthsize = AES_BLOCK_SIZE,
  1710. },
  1711. .caam = {
  1712. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1713. },
  1714. },
  1715. {
  1716. .aead = {
  1717. .base = {
  1718. .cra_name = "rfc4543(gcm(aes))",
  1719. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1720. .cra_blocksize = 1,
  1721. },
  1722. .setkey = rfc4543_setkey,
  1723. .setauthsize = rfc4543_setauthsize,
  1724. .encrypt = ipsec_gcm_encrypt,
  1725. .decrypt = ipsec_gcm_decrypt,
  1726. .ivsize = GCM_RFC4543_IV_SIZE,
  1727. .maxauthsize = AES_BLOCK_SIZE,
  1728. },
  1729. .caam = {
  1730. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1731. },
  1732. },
  1733. /* Galois Counter Mode */
  1734. {
  1735. .aead = {
  1736. .base = {
  1737. .cra_name = "gcm(aes)",
  1738. .cra_driver_name = "gcm-aes-caam",
  1739. .cra_blocksize = 1,
  1740. },
  1741. .setkey = gcm_setkey,
  1742. .setauthsize = gcm_setauthsize,
  1743. .encrypt = gcm_encrypt,
  1744. .decrypt = gcm_decrypt,
  1745. .ivsize = GCM_AES_IV_SIZE,
  1746. .maxauthsize = AES_BLOCK_SIZE,
  1747. },
  1748. .caam = {
  1749. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1750. },
  1751. },
  1752. /* single-pass ipsec_esp descriptor */
  1753. {
  1754. .aead = {
  1755. .base = {
  1756. .cra_name = "authenc(hmac(md5),"
  1757. "ecb(cipher_null))",
  1758. .cra_driver_name = "authenc-hmac-md5-"
  1759. "ecb-cipher_null-caam",
  1760. .cra_blocksize = NULL_BLOCK_SIZE,
  1761. },
  1762. .setkey = aead_setkey,
  1763. .setauthsize = aead_setauthsize,
  1764. .encrypt = aead_encrypt,
  1765. .decrypt = aead_decrypt,
  1766. .ivsize = NULL_IV_SIZE,
  1767. .maxauthsize = MD5_DIGEST_SIZE,
  1768. },
  1769. .caam = {
  1770. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1771. OP_ALG_AAI_HMAC_PRECOMP,
  1772. },
  1773. },
  1774. {
  1775. .aead = {
  1776. .base = {
  1777. .cra_name = "authenc(hmac(sha1),"
  1778. "ecb(cipher_null))",
  1779. .cra_driver_name = "authenc-hmac-sha1-"
  1780. "ecb-cipher_null-caam",
  1781. .cra_blocksize = NULL_BLOCK_SIZE,
  1782. },
  1783. .setkey = aead_setkey,
  1784. .setauthsize = aead_setauthsize,
  1785. .encrypt = aead_encrypt,
  1786. .decrypt = aead_decrypt,
  1787. .ivsize = NULL_IV_SIZE,
  1788. .maxauthsize = SHA1_DIGEST_SIZE,
  1789. },
  1790. .caam = {
  1791. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1792. OP_ALG_AAI_HMAC_PRECOMP,
  1793. },
  1794. },
  1795. {
  1796. .aead = {
  1797. .base = {
  1798. .cra_name = "authenc(hmac(sha224),"
  1799. "ecb(cipher_null))",
  1800. .cra_driver_name = "authenc-hmac-sha224-"
  1801. "ecb-cipher_null-caam",
  1802. .cra_blocksize = NULL_BLOCK_SIZE,
  1803. },
  1804. .setkey = aead_setkey,
  1805. .setauthsize = aead_setauthsize,
  1806. .encrypt = aead_encrypt,
  1807. .decrypt = aead_decrypt,
  1808. .ivsize = NULL_IV_SIZE,
  1809. .maxauthsize = SHA224_DIGEST_SIZE,
  1810. },
  1811. .caam = {
  1812. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1813. OP_ALG_AAI_HMAC_PRECOMP,
  1814. },
  1815. },
  1816. {
  1817. .aead = {
  1818. .base = {
  1819. .cra_name = "authenc(hmac(sha256),"
  1820. "ecb(cipher_null))",
  1821. .cra_driver_name = "authenc-hmac-sha256-"
  1822. "ecb-cipher_null-caam",
  1823. .cra_blocksize = NULL_BLOCK_SIZE,
  1824. },
  1825. .setkey = aead_setkey,
  1826. .setauthsize = aead_setauthsize,
  1827. .encrypt = aead_encrypt,
  1828. .decrypt = aead_decrypt,
  1829. .ivsize = NULL_IV_SIZE,
  1830. .maxauthsize = SHA256_DIGEST_SIZE,
  1831. },
  1832. .caam = {
  1833. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1834. OP_ALG_AAI_HMAC_PRECOMP,
  1835. },
  1836. },
  1837. {
  1838. .aead = {
  1839. .base = {
  1840. .cra_name = "authenc(hmac(sha384),"
  1841. "ecb(cipher_null))",
  1842. .cra_driver_name = "authenc-hmac-sha384-"
  1843. "ecb-cipher_null-caam",
  1844. .cra_blocksize = NULL_BLOCK_SIZE,
  1845. },
  1846. .setkey = aead_setkey,
  1847. .setauthsize = aead_setauthsize,
  1848. .encrypt = aead_encrypt,
  1849. .decrypt = aead_decrypt,
  1850. .ivsize = NULL_IV_SIZE,
  1851. .maxauthsize = SHA384_DIGEST_SIZE,
  1852. },
  1853. .caam = {
  1854. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1855. OP_ALG_AAI_HMAC_PRECOMP,
  1856. },
  1857. },
  1858. {
  1859. .aead = {
  1860. .base = {
  1861. .cra_name = "authenc(hmac(sha512),"
  1862. "ecb(cipher_null))",
  1863. .cra_driver_name = "authenc-hmac-sha512-"
  1864. "ecb-cipher_null-caam",
  1865. .cra_blocksize = NULL_BLOCK_SIZE,
  1866. },
  1867. .setkey = aead_setkey,
  1868. .setauthsize = aead_setauthsize,
  1869. .encrypt = aead_encrypt,
  1870. .decrypt = aead_decrypt,
  1871. .ivsize = NULL_IV_SIZE,
  1872. .maxauthsize = SHA512_DIGEST_SIZE,
  1873. },
  1874. .caam = {
  1875. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1876. OP_ALG_AAI_HMAC_PRECOMP,
  1877. },
  1878. },
  1879. {
  1880. .aead = {
  1881. .base = {
  1882. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1883. .cra_driver_name = "authenc-hmac-md5-"
  1884. "cbc-aes-caam",
  1885. .cra_blocksize = AES_BLOCK_SIZE,
  1886. },
  1887. .setkey = aead_setkey,
  1888. .setauthsize = aead_setauthsize,
  1889. .encrypt = aead_encrypt,
  1890. .decrypt = aead_decrypt,
  1891. .ivsize = AES_BLOCK_SIZE,
  1892. .maxauthsize = MD5_DIGEST_SIZE,
  1893. },
  1894. .caam = {
  1895. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1896. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1897. OP_ALG_AAI_HMAC_PRECOMP,
  1898. },
  1899. },
  1900. {
  1901. .aead = {
  1902. .base = {
  1903. .cra_name = "echainiv(authenc(hmac(md5),"
  1904. "cbc(aes)))",
  1905. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1906. "cbc-aes-caam",
  1907. .cra_blocksize = AES_BLOCK_SIZE,
  1908. },
  1909. .setkey = aead_setkey,
  1910. .setauthsize = aead_setauthsize,
  1911. .encrypt = aead_encrypt,
  1912. .decrypt = aead_decrypt,
  1913. .ivsize = AES_BLOCK_SIZE,
  1914. .maxauthsize = MD5_DIGEST_SIZE,
  1915. },
  1916. .caam = {
  1917. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1918. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1919. OP_ALG_AAI_HMAC_PRECOMP,
  1920. .geniv = true,
  1921. },
  1922. },
  1923. {
  1924. .aead = {
  1925. .base = {
  1926. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1927. .cra_driver_name = "authenc-hmac-sha1-"
  1928. "cbc-aes-caam",
  1929. .cra_blocksize = AES_BLOCK_SIZE,
  1930. },
  1931. .setkey = aead_setkey,
  1932. .setauthsize = aead_setauthsize,
  1933. .encrypt = aead_encrypt,
  1934. .decrypt = aead_decrypt,
  1935. .ivsize = AES_BLOCK_SIZE,
  1936. .maxauthsize = SHA1_DIGEST_SIZE,
  1937. },
  1938. .caam = {
  1939. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1940. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1941. OP_ALG_AAI_HMAC_PRECOMP,
  1942. },
  1943. },
  1944. {
  1945. .aead = {
  1946. .base = {
  1947. .cra_name = "echainiv(authenc(hmac(sha1),"
  1948. "cbc(aes)))",
  1949. .cra_driver_name = "echainiv-authenc-"
  1950. "hmac-sha1-cbc-aes-caam",
  1951. .cra_blocksize = AES_BLOCK_SIZE,
  1952. },
  1953. .setkey = aead_setkey,
  1954. .setauthsize = aead_setauthsize,
  1955. .encrypt = aead_encrypt,
  1956. .decrypt = aead_decrypt,
  1957. .ivsize = AES_BLOCK_SIZE,
  1958. .maxauthsize = SHA1_DIGEST_SIZE,
  1959. },
  1960. .caam = {
  1961. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1962. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1963. OP_ALG_AAI_HMAC_PRECOMP,
  1964. .geniv = true,
  1965. },
  1966. },
  1967. {
  1968. .aead = {
  1969. .base = {
  1970. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1971. .cra_driver_name = "authenc-hmac-sha224-"
  1972. "cbc-aes-caam",
  1973. .cra_blocksize = AES_BLOCK_SIZE,
  1974. },
  1975. .setkey = aead_setkey,
  1976. .setauthsize = aead_setauthsize,
  1977. .encrypt = aead_encrypt,
  1978. .decrypt = aead_decrypt,
  1979. .ivsize = AES_BLOCK_SIZE,
  1980. .maxauthsize = SHA224_DIGEST_SIZE,
  1981. },
  1982. .caam = {
  1983. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1984. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1985. OP_ALG_AAI_HMAC_PRECOMP,
  1986. },
  1987. },
  1988. {
  1989. .aead = {
  1990. .base = {
  1991. .cra_name = "echainiv(authenc(hmac(sha224),"
  1992. "cbc(aes)))",
  1993. .cra_driver_name = "echainiv-authenc-"
  1994. "hmac-sha224-cbc-aes-caam",
  1995. .cra_blocksize = AES_BLOCK_SIZE,
  1996. },
  1997. .setkey = aead_setkey,
  1998. .setauthsize = aead_setauthsize,
  1999. .encrypt = aead_encrypt,
  2000. .decrypt = aead_decrypt,
  2001. .ivsize = AES_BLOCK_SIZE,
  2002. .maxauthsize = SHA224_DIGEST_SIZE,
  2003. },
  2004. .caam = {
  2005. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2006. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2007. OP_ALG_AAI_HMAC_PRECOMP,
  2008. .geniv = true,
  2009. },
  2010. },
  2011. {
  2012. .aead = {
  2013. .base = {
  2014. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  2015. .cra_driver_name = "authenc-hmac-sha256-"
  2016. "cbc-aes-caam",
  2017. .cra_blocksize = AES_BLOCK_SIZE,
  2018. },
  2019. .setkey = aead_setkey,
  2020. .setauthsize = aead_setauthsize,
  2021. .encrypt = aead_encrypt,
  2022. .decrypt = aead_decrypt,
  2023. .ivsize = AES_BLOCK_SIZE,
  2024. .maxauthsize = SHA256_DIGEST_SIZE,
  2025. },
  2026. .caam = {
  2027. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2028. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2029. OP_ALG_AAI_HMAC_PRECOMP,
  2030. },
  2031. },
  2032. {
  2033. .aead = {
  2034. .base = {
  2035. .cra_name = "echainiv(authenc(hmac(sha256),"
  2036. "cbc(aes)))",
  2037. .cra_driver_name = "echainiv-authenc-"
  2038. "hmac-sha256-cbc-aes-caam",
  2039. .cra_blocksize = AES_BLOCK_SIZE,
  2040. },
  2041. .setkey = aead_setkey,
  2042. .setauthsize = aead_setauthsize,
  2043. .encrypt = aead_encrypt,
  2044. .decrypt = aead_decrypt,
  2045. .ivsize = AES_BLOCK_SIZE,
  2046. .maxauthsize = SHA256_DIGEST_SIZE,
  2047. },
  2048. .caam = {
  2049. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2050. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2051. OP_ALG_AAI_HMAC_PRECOMP,
  2052. .geniv = true,
  2053. },
  2054. },
  2055. {
  2056. .aead = {
  2057. .base = {
  2058. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  2059. .cra_driver_name = "authenc-hmac-sha384-"
  2060. "cbc-aes-caam",
  2061. .cra_blocksize = AES_BLOCK_SIZE,
  2062. },
  2063. .setkey = aead_setkey,
  2064. .setauthsize = aead_setauthsize,
  2065. .encrypt = aead_encrypt,
  2066. .decrypt = aead_decrypt,
  2067. .ivsize = AES_BLOCK_SIZE,
  2068. .maxauthsize = SHA384_DIGEST_SIZE,
  2069. },
  2070. .caam = {
  2071. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2072. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2073. OP_ALG_AAI_HMAC_PRECOMP,
  2074. },
  2075. },
  2076. {
  2077. .aead = {
  2078. .base = {
  2079. .cra_name = "echainiv(authenc(hmac(sha384),"
  2080. "cbc(aes)))",
  2081. .cra_driver_name = "echainiv-authenc-"
  2082. "hmac-sha384-cbc-aes-caam",
  2083. .cra_blocksize = AES_BLOCK_SIZE,
  2084. },
  2085. .setkey = aead_setkey,
  2086. .setauthsize = aead_setauthsize,
  2087. .encrypt = aead_encrypt,
  2088. .decrypt = aead_decrypt,
  2089. .ivsize = AES_BLOCK_SIZE,
  2090. .maxauthsize = SHA384_DIGEST_SIZE,
  2091. },
  2092. .caam = {
  2093. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2094. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2095. OP_ALG_AAI_HMAC_PRECOMP,
  2096. .geniv = true,
  2097. },
  2098. },
  2099. {
  2100. .aead = {
  2101. .base = {
  2102. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  2103. .cra_driver_name = "authenc-hmac-sha512-"
  2104. "cbc-aes-caam",
  2105. .cra_blocksize = AES_BLOCK_SIZE,
  2106. },
  2107. .setkey = aead_setkey,
  2108. .setauthsize = aead_setauthsize,
  2109. .encrypt = aead_encrypt,
  2110. .decrypt = aead_decrypt,
  2111. .ivsize = AES_BLOCK_SIZE,
  2112. .maxauthsize = SHA512_DIGEST_SIZE,
  2113. },
  2114. .caam = {
  2115. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2116. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2117. OP_ALG_AAI_HMAC_PRECOMP,
  2118. },
  2119. },
  2120. {
  2121. .aead = {
  2122. .base = {
  2123. .cra_name = "echainiv(authenc(hmac(sha512),"
  2124. "cbc(aes)))",
  2125. .cra_driver_name = "echainiv-authenc-"
  2126. "hmac-sha512-cbc-aes-caam",
  2127. .cra_blocksize = AES_BLOCK_SIZE,
  2128. },
  2129. .setkey = aead_setkey,
  2130. .setauthsize = aead_setauthsize,
  2131. .encrypt = aead_encrypt,
  2132. .decrypt = aead_decrypt,
  2133. .ivsize = AES_BLOCK_SIZE,
  2134. .maxauthsize = SHA512_DIGEST_SIZE,
  2135. },
  2136. .caam = {
  2137. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2138. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2139. OP_ALG_AAI_HMAC_PRECOMP,
  2140. .geniv = true,
  2141. },
  2142. },
  2143. {
  2144. .aead = {
  2145. .base = {
  2146. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  2147. .cra_driver_name = "authenc-hmac-md5-"
  2148. "cbc-des3_ede-caam",
  2149. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2150. },
  2151. .setkey = aead_setkey,
  2152. .setauthsize = aead_setauthsize,
  2153. .encrypt = aead_encrypt,
  2154. .decrypt = aead_decrypt,
  2155. .ivsize = DES3_EDE_BLOCK_SIZE,
  2156. .maxauthsize = MD5_DIGEST_SIZE,
  2157. },
  2158. .caam = {
  2159. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2160. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2161. OP_ALG_AAI_HMAC_PRECOMP,
  2162. }
  2163. },
  2164. {
  2165. .aead = {
  2166. .base = {
  2167. .cra_name = "echainiv(authenc(hmac(md5),"
  2168. "cbc(des3_ede)))",
  2169. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2170. "cbc-des3_ede-caam",
  2171. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2172. },
  2173. .setkey = aead_setkey,
  2174. .setauthsize = aead_setauthsize,
  2175. .encrypt = aead_encrypt,
  2176. .decrypt = aead_decrypt,
  2177. .ivsize = DES3_EDE_BLOCK_SIZE,
  2178. .maxauthsize = MD5_DIGEST_SIZE,
  2179. },
  2180. .caam = {
  2181. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2182. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2183. OP_ALG_AAI_HMAC_PRECOMP,
  2184. .geniv = true,
  2185. }
  2186. },
  2187. {
  2188. .aead = {
  2189. .base = {
  2190. .cra_name = "authenc(hmac(sha1),"
  2191. "cbc(des3_ede))",
  2192. .cra_driver_name = "authenc-hmac-sha1-"
  2193. "cbc-des3_ede-caam",
  2194. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2195. },
  2196. .setkey = aead_setkey,
  2197. .setauthsize = aead_setauthsize,
  2198. .encrypt = aead_encrypt,
  2199. .decrypt = aead_decrypt,
  2200. .ivsize = DES3_EDE_BLOCK_SIZE,
  2201. .maxauthsize = SHA1_DIGEST_SIZE,
  2202. },
  2203. .caam = {
  2204. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2205. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2206. OP_ALG_AAI_HMAC_PRECOMP,
  2207. },
  2208. },
  2209. {
  2210. .aead = {
  2211. .base = {
  2212. .cra_name = "echainiv(authenc(hmac(sha1),"
  2213. "cbc(des3_ede)))",
  2214. .cra_driver_name = "echainiv-authenc-"
  2215. "hmac-sha1-"
  2216. "cbc-des3_ede-caam",
  2217. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2218. },
  2219. .setkey = aead_setkey,
  2220. .setauthsize = aead_setauthsize,
  2221. .encrypt = aead_encrypt,
  2222. .decrypt = aead_decrypt,
  2223. .ivsize = DES3_EDE_BLOCK_SIZE,
  2224. .maxauthsize = SHA1_DIGEST_SIZE,
  2225. },
  2226. .caam = {
  2227. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2228. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2229. OP_ALG_AAI_HMAC_PRECOMP,
  2230. .geniv = true,
  2231. },
  2232. },
  2233. {
  2234. .aead = {
  2235. .base = {
  2236. .cra_name = "authenc(hmac(sha224),"
  2237. "cbc(des3_ede))",
  2238. .cra_driver_name = "authenc-hmac-sha224-"
  2239. "cbc-des3_ede-caam",
  2240. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2241. },
  2242. .setkey = aead_setkey,
  2243. .setauthsize = aead_setauthsize,
  2244. .encrypt = aead_encrypt,
  2245. .decrypt = aead_decrypt,
  2246. .ivsize = DES3_EDE_BLOCK_SIZE,
  2247. .maxauthsize = SHA224_DIGEST_SIZE,
  2248. },
  2249. .caam = {
  2250. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2251. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2252. OP_ALG_AAI_HMAC_PRECOMP,
  2253. },
  2254. },
  2255. {
  2256. .aead = {
  2257. .base = {
  2258. .cra_name = "echainiv(authenc(hmac(sha224),"
  2259. "cbc(des3_ede)))",
  2260. .cra_driver_name = "echainiv-authenc-"
  2261. "hmac-sha224-"
  2262. "cbc-des3_ede-caam",
  2263. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2264. },
  2265. .setkey = aead_setkey,
  2266. .setauthsize = aead_setauthsize,
  2267. .encrypt = aead_encrypt,
  2268. .decrypt = aead_decrypt,
  2269. .ivsize = DES3_EDE_BLOCK_SIZE,
  2270. .maxauthsize = SHA224_DIGEST_SIZE,
  2271. },
  2272. .caam = {
  2273. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2274. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2275. OP_ALG_AAI_HMAC_PRECOMP,
  2276. .geniv = true,
  2277. },
  2278. },
  2279. {
  2280. .aead = {
  2281. .base = {
  2282. .cra_name = "authenc(hmac(sha256),"
  2283. "cbc(des3_ede))",
  2284. .cra_driver_name = "authenc-hmac-sha256-"
  2285. "cbc-des3_ede-caam",
  2286. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2287. },
  2288. .setkey = aead_setkey,
  2289. .setauthsize = aead_setauthsize,
  2290. .encrypt = aead_encrypt,
  2291. .decrypt = aead_decrypt,
  2292. .ivsize = DES3_EDE_BLOCK_SIZE,
  2293. .maxauthsize = SHA256_DIGEST_SIZE,
  2294. },
  2295. .caam = {
  2296. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2297. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2298. OP_ALG_AAI_HMAC_PRECOMP,
  2299. },
  2300. },
  2301. {
  2302. .aead = {
  2303. .base = {
  2304. .cra_name = "echainiv(authenc(hmac(sha256),"
  2305. "cbc(des3_ede)))",
  2306. .cra_driver_name = "echainiv-authenc-"
  2307. "hmac-sha256-"
  2308. "cbc-des3_ede-caam",
  2309. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2310. },
  2311. .setkey = aead_setkey,
  2312. .setauthsize = aead_setauthsize,
  2313. .encrypt = aead_encrypt,
  2314. .decrypt = aead_decrypt,
  2315. .ivsize = DES3_EDE_BLOCK_SIZE,
  2316. .maxauthsize = SHA256_DIGEST_SIZE,
  2317. },
  2318. .caam = {
  2319. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2320. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2321. OP_ALG_AAI_HMAC_PRECOMP,
  2322. .geniv = true,
  2323. },
  2324. },
  2325. {
  2326. .aead = {
  2327. .base = {
  2328. .cra_name = "authenc(hmac(sha384),"
  2329. "cbc(des3_ede))",
  2330. .cra_driver_name = "authenc-hmac-sha384-"
  2331. "cbc-des3_ede-caam",
  2332. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2333. },
  2334. .setkey = aead_setkey,
  2335. .setauthsize = aead_setauthsize,
  2336. .encrypt = aead_encrypt,
  2337. .decrypt = aead_decrypt,
  2338. .ivsize = DES3_EDE_BLOCK_SIZE,
  2339. .maxauthsize = SHA384_DIGEST_SIZE,
  2340. },
  2341. .caam = {
  2342. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2343. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2344. OP_ALG_AAI_HMAC_PRECOMP,
  2345. },
  2346. },
  2347. {
  2348. .aead = {
  2349. .base = {
  2350. .cra_name = "echainiv(authenc(hmac(sha384),"
  2351. "cbc(des3_ede)))",
  2352. .cra_driver_name = "echainiv-authenc-"
  2353. "hmac-sha384-"
  2354. "cbc-des3_ede-caam",
  2355. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2356. },
  2357. .setkey = aead_setkey,
  2358. .setauthsize = aead_setauthsize,
  2359. .encrypt = aead_encrypt,
  2360. .decrypt = aead_decrypt,
  2361. .ivsize = DES3_EDE_BLOCK_SIZE,
  2362. .maxauthsize = SHA384_DIGEST_SIZE,
  2363. },
  2364. .caam = {
  2365. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2366. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2367. OP_ALG_AAI_HMAC_PRECOMP,
  2368. .geniv = true,
  2369. },
  2370. },
  2371. {
  2372. .aead = {
  2373. .base = {
  2374. .cra_name = "authenc(hmac(sha512),"
  2375. "cbc(des3_ede))",
  2376. .cra_driver_name = "authenc-hmac-sha512-"
  2377. "cbc-des3_ede-caam",
  2378. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2379. },
  2380. .setkey = aead_setkey,
  2381. .setauthsize = aead_setauthsize,
  2382. .encrypt = aead_encrypt,
  2383. .decrypt = aead_decrypt,
  2384. .ivsize = DES3_EDE_BLOCK_SIZE,
  2385. .maxauthsize = SHA512_DIGEST_SIZE,
  2386. },
  2387. .caam = {
  2388. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2389. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2390. OP_ALG_AAI_HMAC_PRECOMP,
  2391. },
  2392. },
  2393. {
  2394. .aead = {
  2395. .base = {
  2396. .cra_name = "echainiv(authenc(hmac(sha512),"
  2397. "cbc(des3_ede)))",
  2398. .cra_driver_name = "echainiv-authenc-"
  2399. "hmac-sha512-"
  2400. "cbc-des3_ede-caam",
  2401. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2402. },
  2403. .setkey = aead_setkey,
  2404. .setauthsize = aead_setauthsize,
  2405. .encrypt = aead_encrypt,
  2406. .decrypt = aead_decrypt,
  2407. .ivsize = DES3_EDE_BLOCK_SIZE,
  2408. .maxauthsize = SHA512_DIGEST_SIZE,
  2409. },
  2410. .caam = {
  2411. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2412. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2413. OP_ALG_AAI_HMAC_PRECOMP,
  2414. .geniv = true,
  2415. },
  2416. },
  2417. {
  2418. .aead = {
  2419. .base = {
  2420. .cra_name = "authenc(hmac(md5),cbc(des))",
  2421. .cra_driver_name = "authenc-hmac-md5-"
  2422. "cbc-des-caam",
  2423. .cra_blocksize = DES_BLOCK_SIZE,
  2424. },
  2425. .setkey = aead_setkey,
  2426. .setauthsize = aead_setauthsize,
  2427. .encrypt = aead_encrypt,
  2428. .decrypt = aead_decrypt,
  2429. .ivsize = DES_BLOCK_SIZE,
  2430. .maxauthsize = MD5_DIGEST_SIZE,
  2431. },
  2432. .caam = {
  2433. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2434. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2435. OP_ALG_AAI_HMAC_PRECOMP,
  2436. },
  2437. },
  2438. {
  2439. .aead = {
  2440. .base = {
  2441. .cra_name = "echainiv(authenc(hmac(md5),"
  2442. "cbc(des)))",
  2443. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2444. "cbc-des-caam",
  2445. .cra_blocksize = DES_BLOCK_SIZE,
  2446. },
  2447. .setkey = aead_setkey,
  2448. .setauthsize = aead_setauthsize,
  2449. .encrypt = aead_encrypt,
  2450. .decrypt = aead_decrypt,
  2451. .ivsize = DES_BLOCK_SIZE,
  2452. .maxauthsize = MD5_DIGEST_SIZE,
  2453. },
  2454. .caam = {
  2455. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2456. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2457. OP_ALG_AAI_HMAC_PRECOMP,
  2458. .geniv = true,
  2459. },
  2460. },
  2461. {
  2462. .aead = {
  2463. .base = {
  2464. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2465. .cra_driver_name = "authenc-hmac-sha1-"
  2466. "cbc-des-caam",
  2467. .cra_blocksize = DES_BLOCK_SIZE,
  2468. },
  2469. .setkey = aead_setkey,
  2470. .setauthsize = aead_setauthsize,
  2471. .encrypt = aead_encrypt,
  2472. .decrypt = aead_decrypt,
  2473. .ivsize = DES_BLOCK_SIZE,
  2474. .maxauthsize = SHA1_DIGEST_SIZE,
  2475. },
  2476. .caam = {
  2477. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2478. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2479. OP_ALG_AAI_HMAC_PRECOMP,
  2480. },
  2481. },
  2482. {
  2483. .aead = {
  2484. .base = {
  2485. .cra_name = "echainiv(authenc(hmac(sha1),"
  2486. "cbc(des)))",
  2487. .cra_driver_name = "echainiv-authenc-"
  2488. "hmac-sha1-cbc-des-caam",
  2489. .cra_blocksize = DES_BLOCK_SIZE,
  2490. },
  2491. .setkey = aead_setkey,
  2492. .setauthsize = aead_setauthsize,
  2493. .encrypt = aead_encrypt,
  2494. .decrypt = aead_decrypt,
  2495. .ivsize = DES_BLOCK_SIZE,
  2496. .maxauthsize = SHA1_DIGEST_SIZE,
  2497. },
  2498. .caam = {
  2499. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2500. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2501. OP_ALG_AAI_HMAC_PRECOMP,
  2502. .geniv = true,
  2503. },
  2504. },
  2505. {
  2506. .aead = {
  2507. .base = {
  2508. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2509. .cra_driver_name = "authenc-hmac-sha224-"
  2510. "cbc-des-caam",
  2511. .cra_blocksize = DES_BLOCK_SIZE,
  2512. },
  2513. .setkey = aead_setkey,
  2514. .setauthsize = aead_setauthsize,
  2515. .encrypt = aead_encrypt,
  2516. .decrypt = aead_decrypt,
  2517. .ivsize = DES_BLOCK_SIZE,
  2518. .maxauthsize = SHA224_DIGEST_SIZE,
  2519. },
  2520. .caam = {
  2521. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2522. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2523. OP_ALG_AAI_HMAC_PRECOMP,
  2524. },
  2525. },
  2526. {
  2527. .aead = {
  2528. .base = {
  2529. .cra_name = "echainiv(authenc(hmac(sha224),"
  2530. "cbc(des)))",
  2531. .cra_driver_name = "echainiv-authenc-"
  2532. "hmac-sha224-cbc-des-caam",
  2533. .cra_blocksize = DES_BLOCK_SIZE,
  2534. },
  2535. .setkey = aead_setkey,
  2536. .setauthsize = aead_setauthsize,
  2537. .encrypt = aead_encrypt,
  2538. .decrypt = aead_decrypt,
  2539. .ivsize = DES_BLOCK_SIZE,
  2540. .maxauthsize = SHA224_DIGEST_SIZE,
  2541. },
  2542. .caam = {
  2543. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2544. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2545. OP_ALG_AAI_HMAC_PRECOMP,
  2546. .geniv = true,
  2547. },
  2548. },
  2549. {
  2550. .aead = {
  2551. .base = {
  2552. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2553. .cra_driver_name = "authenc-hmac-sha256-"
  2554. "cbc-des-caam",
  2555. .cra_blocksize = DES_BLOCK_SIZE,
  2556. },
  2557. .setkey = aead_setkey,
  2558. .setauthsize = aead_setauthsize,
  2559. .encrypt = aead_encrypt,
  2560. .decrypt = aead_decrypt,
  2561. .ivsize = DES_BLOCK_SIZE,
  2562. .maxauthsize = SHA256_DIGEST_SIZE,
  2563. },
  2564. .caam = {
  2565. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2566. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2567. OP_ALG_AAI_HMAC_PRECOMP,
  2568. },
  2569. },
  2570. {
  2571. .aead = {
  2572. .base = {
  2573. .cra_name = "echainiv(authenc(hmac(sha256),"
  2574. "cbc(des)))",
  2575. .cra_driver_name = "echainiv-authenc-"
  2576. "hmac-sha256-cbc-des-caam",
  2577. .cra_blocksize = DES_BLOCK_SIZE,
  2578. },
  2579. .setkey = aead_setkey,
  2580. .setauthsize = aead_setauthsize,
  2581. .encrypt = aead_encrypt,
  2582. .decrypt = aead_decrypt,
  2583. .ivsize = DES_BLOCK_SIZE,
  2584. .maxauthsize = SHA256_DIGEST_SIZE,
  2585. },
  2586. .caam = {
  2587. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2588. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2589. OP_ALG_AAI_HMAC_PRECOMP,
  2590. .geniv = true,
  2591. },
  2592. },
  2593. {
  2594. .aead = {
  2595. .base = {
  2596. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2597. .cra_driver_name = "authenc-hmac-sha384-"
  2598. "cbc-des-caam",
  2599. .cra_blocksize = DES_BLOCK_SIZE,
  2600. },
  2601. .setkey = aead_setkey,
  2602. .setauthsize = aead_setauthsize,
  2603. .encrypt = aead_encrypt,
  2604. .decrypt = aead_decrypt,
  2605. .ivsize = DES_BLOCK_SIZE,
  2606. .maxauthsize = SHA384_DIGEST_SIZE,
  2607. },
  2608. .caam = {
  2609. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2610. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2611. OP_ALG_AAI_HMAC_PRECOMP,
  2612. },
  2613. },
  2614. {
  2615. .aead = {
  2616. .base = {
  2617. .cra_name = "echainiv(authenc(hmac(sha384),"
  2618. "cbc(des)))",
  2619. .cra_driver_name = "echainiv-authenc-"
  2620. "hmac-sha384-cbc-des-caam",
  2621. .cra_blocksize = DES_BLOCK_SIZE,
  2622. },
  2623. .setkey = aead_setkey,
  2624. .setauthsize = aead_setauthsize,
  2625. .encrypt = aead_encrypt,
  2626. .decrypt = aead_decrypt,
  2627. .ivsize = DES_BLOCK_SIZE,
  2628. .maxauthsize = SHA384_DIGEST_SIZE,
  2629. },
  2630. .caam = {
  2631. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2632. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2633. OP_ALG_AAI_HMAC_PRECOMP,
  2634. .geniv = true,
  2635. },
  2636. },
  2637. {
  2638. .aead = {
  2639. .base = {
  2640. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2641. .cra_driver_name = "authenc-hmac-sha512-"
  2642. "cbc-des-caam",
  2643. .cra_blocksize = DES_BLOCK_SIZE,
  2644. },
  2645. .setkey = aead_setkey,
  2646. .setauthsize = aead_setauthsize,
  2647. .encrypt = aead_encrypt,
  2648. .decrypt = aead_decrypt,
  2649. .ivsize = DES_BLOCK_SIZE,
  2650. .maxauthsize = SHA512_DIGEST_SIZE,
  2651. },
  2652. .caam = {
  2653. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2654. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2655. OP_ALG_AAI_HMAC_PRECOMP,
  2656. },
  2657. },
  2658. {
  2659. .aead = {
  2660. .base = {
  2661. .cra_name = "echainiv(authenc(hmac(sha512),"
  2662. "cbc(des)))",
  2663. .cra_driver_name = "echainiv-authenc-"
  2664. "hmac-sha512-cbc-des-caam",
  2665. .cra_blocksize = DES_BLOCK_SIZE,
  2666. },
  2667. .setkey = aead_setkey,
  2668. .setauthsize = aead_setauthsize,
  2669. .encrypt = aead_encrypt,
  2670. .decrypt = aead_decrypt,
  2671. .ivsize = DES_BLOCK_SIZE,
  2672. .maxauthsize = SHA512_DIGEST_SIZE,
  2673. },
  2674. .caam = {
  2675. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2676. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2677. OP_ALG_AAI_HMAC_PRECOMP,
  2678. .geniv = true,
  2679. },
  2680. },
  2681. {
  2682. .aead = {
  2683. .base = {
  2684. .cra_name = "authenc(hmac(md5),"
  2685. "rfc3686(ctr(aes)))",
  2686. .cra_driver_name = "authenc-hmac-md5-"
  2687. "rfc3686-ctr-aes-caam",
  2688. .cra_blocksize = 1,
  2689. },
  2690. .setkey = aead_setkey,
  2691. .setauthsize = aead_setauthsize,
  2692. .encrypt = aead_encrypt,
  2693. .decrypt = aead_decrypt,
  2694. .ivsize = CTR_RFC3686_IV_SIZE,
  2695. .maxauthsize = MD5_DIGEST_SIZE,
  2696. },
  2697. .caam = {
  2698. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2699. OP_ALG_AAI_CTR_MOD128,
  2700. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2701. OP_ALG_AAI_HMAC_PRECOMP,
  2702. .rfc3686 = true,
  2703. },
  2704. },
  2705. {
  2706. .aead = {
  2707. .base = {
  2708. .cra_name = "seqiv(authenc("
  2709. "hmac(md5),rfc3686(ctr(aes))))",
  2710. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2711. "rfc3686-ctr-aes-caam",
  2712. .cra_blocksize = 1,
  2713. },
  2714. .setkey = aead_setkey,
  2715. .setauthsize = aead_setauthsize,
  2716. .encrypt = aead_encrypt,
  2717. .decrypt = aead_decrypt,
  2718. .ivsize = CTR_RFC3686_IV_SIZE,
  2719. .maxauthsize = MD5_DIGEST_SIZE,
  2720. },
  2721. .caam = {
  2722. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2723. OP_ALG_AAI_CTR_MOD128,
  2724. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2725. OP_ALG_AAI_HMAC_PRECOMP,
  2726. .rfc3686 = true,
  2727. .geniv = true,
  2728. },
  2729. },
  2730. {
  2731. .aead = {
  2732. .base = {
  2733. .cra_name = "authenc(hmac(sha1),"
  2734. "rfc3686(ctr(aes)))",
  2735. .cra_driver_name = "authenc-hmac-sha1-"
  2736. "rfc3686-ctr-aes-caam",
  2737. .cra_blocksize = 1,
  2738. },
  2739. .setkey = aead_setkey,
  2740. .setauthsize = aead_setauthsize,
  2741. .encrypt = aead_encrypt,
  2742. .decrypt = aead_decrypt,
  2743. .ivsize = CTR_RFC3686_IV_SIZE,
  2744. .maxauthsize = SHA1_DIGEST_SIZE,
  2745. },
  2746. .caam = {
  2747. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2748. OP_ALG_AAI_CTR_MOD128,
  2749. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2750. OP_ALG_AAI_HMAC_PRECOMP,
  2751. .rfc3686 = true,
  2752. },
  2753. },
  2754. {
  2755. .aead = {
  2756. .base = {
  2757. .cra_name = "seqiv(authenc("
  2758. "hmac(sha1),rfc3686(ctr(aes))))",
  2759. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2760. "rfc3686-ctr-aes-caam",
  2761. .cra_blocksize = 1,
  2762. },
  2763. .setkey = aead_setkey,
  2764. .setauthsize = aead_setauthsize,
  2765. .encrypt = aead_encrypt,
  2766. .decrypt = aead_decrypt,
  2767. .ivsize = CTR_RFC3686_IV_SIZE,
  2768. .maxauthsize = SHA1_DIGEST_SIZE,
  2769. },
  2770. .caam = {
  2771. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2772. OP_ALG_AAI_CTR_MOD128,
  2773. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2774. OP_ALG_AAI_HMAC_PRECOMP,
  2775. .rfc3686 = true,
  2776. .geniv = true,
  2777. },
  2778. },
  2779. {
  2780. .aead = {
  2781. .base = {
  2782. .cra_name = "authenc(hmac(sha224),"
  2783. "rfc3686(ctr(aes)))",
  2784. .cra_driver_name = "authenc-hmac-sha224-"
  2785. "rfc3686-ctr-aes-caam",
  2786. .cra_blocksize = 1,
  2787. },
  2788. .setkey = aead_setkey,
  2789. .setauthsize = aead_setauthsize,
  2790. .encrypt = aead_encrypt,
  2791. .decrypt = aead_decrypt,
  2792. .ivsize = CTR_RFC3686_IV_SIZE,
  2793. .maxauthsize = SHA224_DIGEST_SIZE,
  2794. },
  2795. .caam = {
  2796. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2797. OP_ALG_AAI_CTR_MOD128,
  2798. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2799. OP_ALG_AAI_HMAC_PRECOMP,
  2800. .rfc3686 = true,
  2801. },
  2802. },
  2803. {
  2804. .aead = {
  2805. .base = {
  2806. .cra_name = "seqiv(authenc("
  2807. "hmac(sha224),rfc3686(ctr(aes))))",
  2808. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2809. "rfc3686-ctr-aes-caam",
  2810. .cra_blocksize = 1,
  2811. },
  2812. .setkey = aead_setkey,
  2813. .setauthsize = aead_setauthsize,
  2814. .encrypt = aead_encrypt,
  2815. .decrypt = aead_decrypt,
  2816. .ivsize = CTR_RFC3686_IV_SIZE,
  2817. .maxauthsize = SHA224_DIGEST_SIZE,
  2818. },
  2819. .caam = {
  2820. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2821. OP_ALG_AAI_CTR_MOD128,
  2822. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2823. OP_ALG_AAI_HMAC_PRECOMP,
  2824. .rfc3686 = true,
  2825. .geniv = true,
  2826. },
  2827. },
  2828. {
  2829. .aead = {
  2830. .base = {
  2831. .cra_name = "authenc(hmac(sha256),"
  2832. "rfc3686(ctr(aes)))",
  2833. .cra_driver_name = "authenc-hmac-sha256-"
  2834. "rfc3686-ctr-aes-caam",
  2835. .cra_blocksize = 1,
  2836. },
  2837. .setkey = aead_setkey,
  2838. .setauthsize = aead_setauthsize,
  2839. .encrypt = aead_encrypt,
  2840. .decrypt = aead_decrypt,
  2841. .ivsize = CTR_RFC3686_IV_SIZE,
  2842. .maxauthsize = SHA256_DIGEST_SIZE,
  2843. },
  2844. .caam = {
  2845. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2846. OP_ALG_AAI_CTR_MOD128,
  2847. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2848. OP_ALG_AAI_HMAC_PRECOMP,
  2849. .rfc3686 = true,
  2850. },
  2851. },
  2852. {
  2853. .aead = {
  2854. .base = {
  2855. .cra_name = "seqiv(authenc(hmac(sha256),"
  2856. "rfc3686(ctr(aes))))",
  2857. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2858. "rfc3686-ctr-aes-caam",
  2859. .cra_blocksize = 1,
  2860. },
  2861. .setkey = aead_setkey,
  2862. .setauthsize = aead_setauthsize,
  2863. .encrypt = aead_encrypt,
  2864. .decrypt = aead_decrypt,
  2865. .ivsize = CTR_RFC3686_IV_SIZE,
  2866. .maxauthsize = SHA256_DIGEST_SIZE,
  2867. },
  2868. .caam = {
  2869. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2870. OP_ALG_AAI_CTR_MOD128,
  2871. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2872. OP_ALG_AAI_HMAC_PRECOMP,
  2873. .rfc3686 = true,
  2874. .geniv = true,
  2875. },
  2876. },
  2877. {
  2878. .aead = {
  2879. .base = {
  2880. .cra_name = "authenc(hmac(sha384),"
  2881. "rfc3686(ctr(aes)))",
  2882. .cra_driver_name = "authenc-hmac-sha384-"
  2883. "rfc3686-ctr-aes-caam",
  2884. .cra_blocksize = 1,
  2885. },
  2886. .setkey = aead_setkey,
  2887. .setauthsize = aead_setauthsize,
  2888. .encrypt = aead_encrypt,
  2889. .decrypt = aead_decrypt,
  2890. .ivsize = CTR_RFC3686_IV_SIZE,
  2891. .maxauthsize = SHA384_DIGEST_SIZE,
  2892. },
  2893. .caam = {
  2894. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2895. OP_ALG_AAI_CTR_MOD128,
  2896. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2897. OP_ALG_AAI_HMAC_PRECOMP,
  2898. .rfc3686 = true,
  2899. },
  2900. },
  2901. {
  2902. .aead = {
  2903. .base = {
  2904. .cra_name = "seqiv(authenc(hmac(sha384),"
  2905. "rfc3686(ctr(aes))))",
  2906. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2907. "rfc3686-ctr-aes-caam",
  2908. .cra_blocksize = 1,
  2909. },
  2910. .setkey = aead_setkey,
  2911. .setauthsize = aead_setauthsize,
  2912. .encrypt = aead_encrypt,
  2913. .decrypt = aead_decrypt,
  2914. .ivsize = CTR_RFC3686_IV_SIZE,
  2915. .maxauthsize = SHA384_DIGEST_SIZE,
  2916. },
  2917. .caam = {
  2918. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2919. OP_ALG_AAI_CTR_MOD128,
  2920. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2921. OP_ALG_AAI_HMAC_PRECOMP,
  2922. .rfc3686 = true,
  2923. .geniv = true,
  2924. },
  2925. },
  2926. {
  2927. .aead = {
  2928. .base = {
  2929. .cra_name = "authenc(hmac(sha512),"
  2930. "rfc3686(ctr(aes)))",
  2931. .cra_driver_name = "authenc-hmac-sha512-"
  2932. "rfc3686-ctr-aes-caam",
  2933. .cra_blocksize = 1,
  2934. },
  2935. .setkey = aead_setkey,
  2936. .setauthsize = aead_setauthsize,
  2937. .encrypt = aead_encrypt,
  2938. .decrypt = aead_decrypt,
  2939. .ivsize = CTR_RFC3686_IV_SIZE,
  2940. .maxauthsize = SHA512_DIGEST_SIZE,
  2941. },
  2942. .caam = {
  2943. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2944. OP_ALG_AAI_CTR_MOD128,
  2945. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2946. OP_ALG_AAI_HMAC_PRECOMP,
  2947. .rfc3686 = true,
  2948. },
  2949. },
  2950. {
  2951. .aead = {
  2952. .base = {
  2953. .cra_name = "seqiv(authenc(hmac(sha512),"
  2954. "rfc3686(ctr(aes))))",
  2955. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2956. "rfc3686-ctr-aes-caam",
  2957. .cra_blocksize = 1,
  2958. },
  2959. .setkey = aead_setkey,
  2960. .setauthsize = aead_setauthsize,
  2961. .encrypt = aead_encrypt,
  2962. .decrypt = aead_decrypt,
  2963. .ivsize = CTR_RFC3686_IV_SIZE,
  2964. .maxauthsize = SHA512_DIGEST_SIZE,
  2965. },
  2966. .caam = {
  2967. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2968. OP_ALG_AAI_CTR_MOD128,
  2969. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2970. OP_ALG_AAI_HMAC_PRECOMP,
  2971. .rfc3686 = true,
  2972. .geniv = true,
  2973. },
  2974. },
  2975. };
  2976. struct caam_crypto_alg {
  2977. struct crypto_alg crypto_alg;
  2978. struct list_head entry;
  2979. struct caam_alg_entry caam;
  2980. };
  2981. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
  2982. bool uses_dkp)
  2983. {
  2984. dma_addr_t dma_addr;
  2985. struct caam_drv_private *priv;
  2986. ctx->jrdev = caam_jr_alloc();
  2987. if (IS_ERR(ctx->jrdev)) {
  2988. pr_err("Job Ring Device allocation for transform failed\n");
  2989. return PTR_ERR(ctx->jrdev);
  2990. }
  2991. priv = dev_get_drvdata(ctx->jrdev->parent);
  2992. if (priv->era >= 6 && uses_dkp)
  2993. ctx->dir = DMA_BIDIRECTIONAL;
  2994. else
  2995. ctx->dir = DMA_TO_DEVICE;
  2996. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  2997. offsetof(struct caam_ctx,
  2998. sh_desc_enc_dma),
  2999. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  3000. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  3001. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  3002. caam_jr_free(ctx->jrdev);
  3003. return -ENOMEM;
  3004. }
  3005. ctx->sh_desc_enc_dma = dma_addr;
  3006. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  3007. sh_desc_dec);
  3008. ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
  3009. sh_desc_givenc);
  3010. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
  3011. /* copy descriptor header template value */
  3012. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  3013. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  3014. return 0;
  3015. }
  3016. static int caam_cra_init(struct crypto_tfm *tfm)
  3017. {
  3018. struct crypto_alg *alg = tfm->__crt_alg;
  3019. struct caam_crypto_alg *caam_alg =
  3020. container_of(alg, struct caam_crypto_alg, crypto_alg);
  3021. struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
  3022. return caam_init_common(ctx, &caam_alg->caam, false);
  3023. }
  3024. static int caam_aead_init(struct crypto_aead *tfm)
  3025. {
  3026. struct aead_alg *alg = crypto_aead_alg(tfm);
  3027. struct caam_aead_alg *caam_alg =
  3028. container_of(alg, struct caam_aead_alg, aead);
  3029. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  3030. return caam_init_common(ctx, &caam_alg->caam,
  3031. alg->setkey == aead_setkey);
  3032. }
  3033. static void caam_exit_common(struct caam_ctx *ctx)
  3034. {
  3035. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  3036. offsetof(struct caam_ctx, sh_desc_enc_dma),
  3037. ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
  3038. caam_jr_free(ctx->jrdev);
  3039. }
  3040. static void caam_cra_exit(struct crypto_tfm *tfm)
  3041. {
  3042. caam_exit_common(crypto_tfm_ctx(tfm));
  3043. }
  3044. static void caam_aead_exit(struct crypto_aead *tfm)
  3045. {
  3046. caam_exit_common(crypto_aead_ctx(tfm));
  3047. }
  3048. static void __exit caam_algapi_exit(void)
  3049. {
  3050. struct caam_crypto_alg *t_alg, *n;
  3051. int i;
  3052. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3053. struct caam_aead_alg *t_alg = driver_aeads + i;
  3054. if (t_alg->registered)
  3055. crypto_unregister_aead(&t_alg->aead);
  3056. }
  3057. if (!alg_list.next)
  3058. return;
  3059. list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
  3060. crypto_unregister_alg(&t_alg->crypto_alg);
  3061. list_del(&t_alg->entry);
  3062. kfree(t_alg);
  3063. }
  3064. }
  3065. static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
  3066. *template)
  3067. {
  3068. struct caam_crypto_alg *t_alg;
  3069. struct crypto_alg *alg;
  3070. t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
  3071. if (!t_alg) {
  3072. pr_err("failed to allocate t_alg\n");
  3073. return ERR_PTR(-ENOMEM);
  3074. }
  3075. alg = &t_alg->crypto_alg;
  3076. snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
  3077. snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
  3078. template->driver_name);
  3079. alg->cra_module = THIS_MODULE;
  3080. alg->cra_init = caam_cra_init;
  3081. alg->cra_exit = caam_cra_exit;
  3082. alg->cra_priority = CAAM_CRA_PRIORITY;
  3083. alg->cra_blocksize = template->blocksize;
  3084. alg->cra_alignmask = 0;
  3085. alg->cra_ctxsize = sizeof(struct caam_ctx);
  3086. alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
  3087. template->type;
  3088. switch (template->type) {
  3089. case CRYPTO_ALG_TYPE_GIVCIPHER:
  3090. alg->cra_type = &crypto_givcipher_type;
  3091. alg->cra_ablkcipher = template->template_ablkcipher;
  3092. break;
  3093. case CRYPTO_ALG_TYPE_ABLKCIPHER:
  3094. alg->cra_type = &crypto_ablkcipher_type;
  3095. alg->cra_ablkcipher = template->template_ablkcipher;
  3096. break;
  3097. }
  3098. t_alg->caam.class1_alg_type = template->class1_alg_type;
  3099. t_alg->caam.class2_alg_type = template->class2_alg_type;
  3100. return t_alg;
  3101. }
  3102. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  3103. {
  3104. struct aead_alg *alg = &t_alg->aead;
  3105. alg->base.cra_module = THIS_MODULE;
  3106. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3107. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3108. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  3109. alg->init = caam_aead_init;
  3110. alg->exit = caam_aead_exit;
  3111. }
  3112. static int __init caam_algapi_init(void)
  3113. {
  3114. struct device_node *dev_node;
  3115. struct platform_device *pdev;
  3116. struct device *ctrldev;
  3117. struct caam_drv_private *priv;
  3118. int i = 0, err = 0;
  3119. u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
  3120. unsigned int md_limit = SHA512_DIGEST_SIZE;
  3121. bool registered = false;
  3122. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
  3123. if (!dev_node) {
  3124. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
  3125. if (!dev_node)
  3126. return -ENODEV;
  3127. }
  3128. pdev = of_find_device_by_node(dev_node);
  3129. if (!pdev) {
  3130. of_node_put(dev_node);
  3131. return -ENODEV;
  3132. }
  3133. ctrldev = &pdev->dev;
  3134. priv = dev_get_drvdata(ctrldev);
  3135. of_node_put(dev_node);
  3136. /*
  3137. * If priv is NULL, it's probably because the caam driver wasn't
  3138. * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
  3139. */
  3140. if (!priv)
  3141. return -ENODEV;
  3142. INIT_LIST_HEAD(&alg_list);
  3143. /*
  3144. * Register crypto algorithms the device supports.
  3145. * First, detect presence and attributes of DES, AES, and MD blocks.
  3146. */
  3147. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  3148. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  3149. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
  3150. aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
  3151. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3152. /* If MD is present, limit digest size based on LP256 */
  3153. if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
  3154. md_limit = SHA256_DIGEST_SIZE;
  3155. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3156. struct caam_crypto_alg *t_alg;
  3157. struct caam_alg_template *alg = driver_algs + i;
  3158. u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
  3159. /* Skip DES algorithms if not supported by device */
  3160. if (!des_inst &&
  3161. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  3162. (alg_sel == OP_ALG_ALGSEL_DES)))
  3163. continue;
  3164. /* Skip AES algorithms if not supported by device */
  3165. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  3166. continue;
  3167. /*
  3168. * Check support for AES modes not available
  3169. * on LP devices.
  3170. */
  3171. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3172. if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
  3173. OP_ALG_AAI_XTS)
  3174. continue;
  3175. t_alg = caam_alg_alloc(alg);
  3176. if (IS_ERR(t_alg)) {
  3177. err = PTR_ERR(t_alg);
  3178. pr_warn("%s alg allocation failed\n", alg->driver_name);
  3179. continue;
  3180. }
  3181. err = crypto_register_alg(&t_alg->crypto_alg);
  3182. if (err) {
  3183. pr_warn("%s alg registration failed\n",
  3184. t_alg->crypto_alg.cra_driver_name);
  3185. kfree(t_alg);
  3186. continue;
  3187. }
  3188. list_add_tail(&t_alg->entry, &alg_list);
  3189. registered = true;
  3190. }
  3191. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3192. struct caam_aead_alg *t_alg = driver_aeads + i;
  3193. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  3194. OP_ALG_ALGSEL_MASK;
  3195. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  3196. OP_ALG_ALGSEL_MASK;
  3197. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3198. /* Skip DES algorithms if not supported by device */
  3199. if (!des_inst &&
  3200. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  3201. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  3202. continue;
  3203. /* Skip AES algorithms if not supported by device */
  3204. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  3205. continue;
  3206. /*
  3207. * Check support for AES algorithms not available
  3208. * on LP devices.
  3209. */
  3210. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3211. if (alg_aai == OP_ALG_AAI_GCM)
  3212. continue;
  3213. /*
  3214. * Skip algorithms requiring message digests
  3215. * if MD or MD size is not supported by device.
  3216. */
  3217. if (c2_alg_sel &&
  3218. (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
  3219. continue;
  3220. caam_aead_alg_init(t_alg);
  3221. err = crypto_register_aead(&t_alg->aead);
  3222. if (err) {
  3223. pr_warn("%s alg registration failed\n",
  3224. t_alg->aead.base.cra_driver_name);
  3225. continue;
  3226. }
  3227. t_alg->registered = true;
  3228. registered = true;
  3229. }
  3230. if (registered)
  3231. pr_info("caam algorithms registered in /proc/crypto\n");
  3232. return err;
  3233. }
  3234. module_init(caam_algapi_init);
  3235. module_exit(caam_algapi_exit);
  3236. MODULE_LICENSE("GPL");
  3237. MODULE_DESCRIPTION("FSL CAAM support for crypto API");
  3238. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");