testmgr.c 84 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711
  1. /*
  2. * Algorithm testing framework and tests.
  3. *
  4. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  5. * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
  6. * Copyright (c) 2007 Nokia Siemens Networks
  7. * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  8. *
  9. * Updated RFC4106 AES-GCM testing.
  10. * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11. * Adrian Hoban <adrian.hoban@intel.com>
  12. * Gabriele Paoloni <gabriele.paoloni@intel.com>
  13. * Tadeusz Struk (tadeusz.struk@intel.com)
  14. * Copyright (c) 2010, Intel Corporation.
  15. *
  16. * This program is free software; you can redistribute it and/or modify it
  17. * under the terms of the GNU General Public License as published by the Free
  18. * Software Foundation; either version 2 of the License, or (at your option)
  19. * any later version.
  20. *
  21. */
  22. #include <crypto/aead.h>
  23. #include <crypto/hash.h>
  24. #include <crypto/skcipher.h>
  25. #include <linux/err.h>
  26. #include <linux/fips.h>
  27. #include <linux/module.h>
  28. #include <linux/scatterlist.h>
  29. #include <linux/slab.h>
  30. #include <linux/string.h>
  31. #include <crypto/rng.h>
  32. #include <crypto/drbg.h>
  33. #include <crypto/akcipher.h>
  34. #include <crypto/kpp.h>
  35. #include <crypto/acompress.h>
  36. #include "internal.h"
  37. static bool notests;
  38. module_param(notests, bool, 0644);
  39. MODULE_PARM_DESC(notests, "disable crypto self-tests");
  40. #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  41. /* a perfect nop */
  42. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  43. {
  44. return 0;
  45. }
  46. #else
  47. #include "testmgr.h"
  48. /*
  49. * Need slab memory for testing (size in number of pages).
  50. */
  51. #define XBUFSIZE 8
  52. /*
  53. * Indexes into the xbuf to simulate cross-page access.
  54. */
  55. #define IDX1 32
  56. #define IDX2 32400
  57. #define IDX3 1511
  58. #define IDX4 8193
  59. #define IDX5 22222
  60. #define IDX6 17101
  61. #define IDX7 27333
  62. #define IDX8 3000
  63. /*
  64. * Used by test_cipher()
  65. */
  66. #define ENCRYPT 1
  67. #define DECRYPT 0
  68. struct aead_test_suite {
  69. struct {
  70. const struct aead_testvec *vecs;
  71. unsigned int count;
  72. } enc, dec;
  73. };
  74. struct cipher_test_suite {
  75. const struct cipher_testvec *vecs;
  76. unsigned int count;
  77. };
  78. struct comp_test_suite {
  79. struct {
  80. const struct comp_testvec *vecs;
  81. unsigned int count;
  82. } comp, decomp;
  83. };
  84. struct hash_test_suite {
  85. const struct hash_testvec *vecs;
  86. unsigned int count;
  87. };
  88. struct cprng_test_suite {
  89. const struct cprng_testvec *vecs;
  90. unsigned int count;
  91. };
  92. struct drbg_test_suite {
  93. const struct drbg_testvec *vecs;
  94. unsigned int count;
  95. };
  96. struct akcipher_test_suite {
  97. const struct akcipher_testvec *vecs;
  98. unsigned int count;
  99. };
  100. struct kpp_test_suite {
  101. const struct kpp_testvec *vecs;
  102. unsigned int count;
  103. };
  104. struct alg_test_desc {
  105. const char *alg;
  106. int (*test)(const struct alg_test_desc *desc, const char *driver,
  107. u32 type, u32 mask);
  108. int fips_allowed; /* set if alg is allowed in fips mode */
  109. union {
  110. struct aead_test_suite aead;
  111. struct cipher_test_suite cipher;
  112. struct comp_test_suite comp;
  113. struct hash_test_suite hash;
  114. struct cprng_test_suite cprng;
  115. struct drbg_test_suite drbg;
  116. struct akcipher_test_suite akcipher;
  117. struct kpp_test_suite kpp;
  118. } suite;
  119. };
  120. static const unsigned int IDX[8] = {
  121. IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
  122. static void hexdump(unsigned char *buf, unsigned int len)
  123. {
  124. print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
  125. 16, 1,
  126. buf, len, false);
  127. }
  128. static int testmgr_alloc_buf(char *buf[XBUFSIZE])
  129. {
  130. int i;
  131. for (i = 0; i < XBUFSIZE; i++) {
  132. buf[i] = (void *)__get_free_page(GFP_KERNEL);
  133. if (!buf[i])
  134. goto err_free_buf;
  135. }
  136. return 0;
  137. err_free_buf:
  138. while (i-- > 0)
  139. free_page((unsigned long)buf[i]);
  140. return -ENOMEM;
  141. }
  142. static void testmgr_free_buf(char *buf[XBUFSIZE])
  143. {
  144. int i;
  145. for (i = 0; i < XBUFSIZE; i++)
  146. free_page((unsigned long)buf[i]);
  147. }
  148. static int ahash_guard_result(char *result, char c, int size)
  149. {
  150. int i;
  151. for (i = 0; i < size; i++) {
  152. if (result[i] != c)
  153. return -EINVAL;
  154. }
  155. return 0;
  156. }
  157. static int ahash_partial_update(struct ahash_request **preq,
  158. struct crypto_ahash *tfm, const struct hash_testvec *template,
  159. void *hash_buff, int k, int temp, struct scatterlist *sg,
  160. const char *algo, char *result, struct crypto_wait *wait)
  161. {
  162. char *state;
  163. struct ahash_request *req;
  164. int statesize, ret = -EINVAL;
  165. static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
  166. int digestsize = crypto_ahash_digestsize(tfm);
  167. req = *preq;
  168. statesize = crypto_ahash_statesize(
  169. crypto_ahash_reqtfm(req));
  170. state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
  171. if (!state) {
  172. pr_err("alg: hash: Failed to alloc state for %s\n", algo);
  173. goto out_nostate;
  174. }
  175. memcpy(state + statesize, guard, sizeof(guard));
  176. memset(result, 1, digestsize);
  177. ret = crypto_ahash_export(req, state);
  178. WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
  179. if (ret) {
  180. pr_err("alg: hash: Failed to export() for %s\n", algo);
  181. goto out;
  182. }
  183. ret = ahash_guard_result(result, 1, digestsize);
  184. if (ret) {
  185. pr_err("alg: hash: Failed, export used req->result for %s\n",
  186. algo);
  187. goto out;
  188. }
  189. ahash_request_free(req);
  190. req = ahash_request_alloc(tfm, GFP_KERNEL);
  191. if (!req) {
  192. pr_err("alg: hash: Failed to alloc request for %s\n", algo);
  193. goto out_noreq;
  194. }
  195. ahash_request_set_callback(req,
  196. CRYPTO_TFM_REQ_MAY_BACKLOG,
  197. crypto_req_done, wait);
  198. memcpy(hash_buff, template->plaintext + temp,
  199. template->tap[k]);
  200. sg_init_one(&sg[0], hash_buff, template->tap[k]);
  201. ahash_request_set_crypt(req, sg, result, template->tap[k]);
  202. ret = crypto_ahash_import(req, state);
  203. if (ret) {
  204. pr_err("alg: hash: Failed to import() for %s\n", algo);
  205. goto out;
  206. }
  207. ret = ahash_guard_result(result, 1, digestsize);
  208. if (ret) {
  209. pr_err("alg: hash: Failed, import used req->result for %s\n",
  210. algo);
  211. goto out;
  212. }
  213. ret = crypto_wait_req(crypto_ahash_update(req), wait);
  214. if (ret)
  215. goto out;
  216. *preq = req;
  217. ret = 0;
  218. goto out_noreq;
  219. out:
  220. ahash_request_free(req);
  221. out_noreq:
  222. kfree(state);
  223. out_nostate:
  224. return ret;
  225. }
  226. static int __test_hash(struct crypto_ahash *tfm,
  227. const struct hash_testvec *template, unsigned int tcount,
  228. bool use_digest, const int align_offset)
  229. {
  230. const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
  231. size_t digest_size = crypto_ahash_digestsize(tfm);
  232. unsigned int i, j, k, temp;
  233. struct scatterlist sg[8];
  234. char *result;
  235. char *key;
  236. struct ahash_request *req;
  237. struct crypto_wait wait;
  238. void *hash_buff;
  239. char *xbuf[XBUFSIZE];
  240. int ret = -ENOMEM;
  241. result = kmalloc(digest_size, GFP_KERNEL);
  242. if (!result)
  243. return ret;
  244. key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
  245. if (!key)
  246. goto out_nobuf;
  247. if (testmgr_alloc_buf(xbuf))
  248. goto out_nobuf;
  249. crypto_init_wait(&wait);
  250. req = ahash_request_alloc(tfm, GFP_KERNEL);
  251. if (!req) {
  252. printk(KERN_ERR "alg: hash: Failed to allocate request for "
  253. "%s\n", algo);
  254. goto out_noreq;
  255. }
  256. ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  257. crypto_req_done, &wait);
  258. j = 0;
  259. for (i = 0; i < tcount; i++) {
  260. if (template[i].np)
  261. continue;
  262. ret = -EINVAL;
  263. if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
  264. goto out;
  265. j++;
  266. memset(result, 0, digest_size);
  267. hash_buff = xbuf[0];
  268. hash_buff += align_offset;
  269. memcpy(hash_buff, template[i].plaintext, template[i].psize);
  270. sg_init_one(&sg[0], hash_buff, template[i].psize);
  271. if (template[i].ksize) {
  272. crypto_ahash_clear_flags(tfm, ~0);
  273. if (template[i].ksize > MAX_KEYLEN) {
  274. pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
  275. j, algo, template[i].ksize, MAX_KEYLEN);
  276. ret = -EINVAL;
  277. goto out;
  278. }
  279. memcpy(key, template[i].key, template[i].ksize);
  280. ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
  281. if (ret) {
  282. printk(KERN_ERR "alg: hash: setkey failed on "
  283. "test %d for %s: ret=%d\n", j, algo,
  284. -ret);
  285. goto out;
  286. }
  287. }
  288. ahash_request_set_crypt(req, sg, result, template[i].psize);
  289. if (use_digest) {
  290. ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
  291. if (ret) {
  292. pr_err("alg: hash: digest failed on test %d "
  293. "for %s: ret=%d\n", j, algo, -ret);
  294. goto out;
  295. }
  296. } else {
  297. memset(result, 1, digest_size);
  298. ret = crypto_wait_req(crypto_ahash_init(req), &wait);
  299. if (ret) {
  300. pr_err("alg: hash: init failed on test %d "
  301. "for %s: ret=%d\n", j, algo, -ret);
  302. goto out;
  303. }
  304. ret = ahash_guard_result(result, 1, digest_size);
  305. if (ret) {
  306. pr_err("alg: hash: init failed on test %d "
  307. "for %s: used req->result\n", j, algo);
  308. goto out;
  309. }
  310. ret = crypto_wait_req(crypto_ahash_update(req), &wait);
  311. if (ret) {
  312. pr_err("alg: hash: update failed on test %d "
  313. "for %s: ret=%d\n", j, algo, -ret);
  314. goto out;
  315. }
  316. ret = ahash_guard_result(result, 1, digest_size);
  317. if (ret) {
  318. pr_err("alg: hash: update failed on test %d "
  319. "for %s: used req->result\n", j, algo);
  320. goto out;
  321. }
  322. ret = crypto_wait_req(crypto_ahash_final(req), &wait);
  323. if (ret) {
  324. pr_err("alg: hash: final failed on test %d "
  325. "for %s: ret=%d\n", j, algo, -ret);
  326. goto out;
  327. }
  328. }
  329. if (memcmp(result, template[i].digest,
  330. crypto_ahash_digestsize(tfm))) {
  331. printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
  332. j, algo);
  333. hexdump(result, crypto_ahash_digestsize(tfm));
  334. ret = -EINVAL;
  335. goto out;
  336. }
  337. }
  338. j = 0;
  339. for (i = 0; i < tcount; i++) {
  340. /* alignment tests are only done with continuous buffers */
  341. if (align_offset != 0)
  342. break;
  343. if (!template[i].np)
  344. continue;
  345. j++;
  346. memset(result, 0, digest_size);
  347. temp = 0;
  348. sg_init_table(sg, template[i].np);
  349. ret = -EINVAL;
  350. for (k = 0; k < template[i].np; k++) {
  351. if (WARN_ON(offset_in_page(IDX[k]) +
  352. template[i].tap[k] > PAGE_SIZE))
  353. goto out;
  354. sg_set_buf(&sg[k],
  355. memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
  356. offset_in_page(IDX[k]),
  357. template[i].plaintext + temp,
  358. template[i].tap[k]),
  359. template[i].tap[k]);
  360. temp += template[i].tap[k];
  361. }
  362. if (template[i].ksize) {
  363. if (template[i].ksize > MAX_KEYLEN) {
  364. pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
  365. j, algo, template[i].ksize, MAX_KEYLEN);
  366. ret = -EINVAL;
  367. goto out;
  368. }
  369. crypto_ahash_clear_flags(tfm, ~0);
  370. memcpy(key, template[i].key, template[i].ksize);
  371. ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
  372. if (ret) {
  373. printk(KERN_ERR "alg: hash: setkey "
  374. "failed on chunking test %d "
  375. "for %s: ret=%d\n", j, algo, -ret);
  376. goto out;
  377. }
  378. }
  379. ahash_request_set_crypt(req, sg, result, template[i].psize);
  380. ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
  381. if (ret) {
  382. pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
  383. j, algo, -ret);
  384. goto out;
  385. }
  386. if (memcmp(result, template[i].digest,
  387. crypto_ahash_digestsize(tfm))) {
  388. printk(KERN_ERR "alg: hash: Chunking test %d "
  389. "failed for %s\n", j, algo);
  390. hexdump(result, crypto_ahash_digestsize(tfm));
  391. ret = -EINVAL;
  392. goto out;
  393. }
  394. }
  395. /* partial update exercise */
  396. j = 0;
  397. for (i = 0; i < tcount; i++) {
  398. /* alignment tests are only done with continuous buffers */
  399. if (align_offset != 0)
  400. break;
  401. if (template[i].np < 2)
  402. continue;
  403. j++;
  404. memset(result, 0, digest_size);
  405. ret = -EINVAL;
  406. hash_buff = xbuf[0];
  407. memcpy(hash_buff, template[i].plaintext,
  408. template[i].tap[0]);
  409. sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
  410. if (template[i].ksize) {
  411. crypto_ahash_clear_flags(tfm, ~0);
  412. if (template[i].ksize > MAX_KEYLEN) {
  413. pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
  414. j, algo, template[i].ksize, MAX_KEYLEN);
  415. ret = -EINVAL;
  416. goto out;
  417. }
  418. memcpy(key, template[i].key, template[i].ksize);
  419. ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
  420. if (ret) {
  421. pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
  422. j, algo, -ret);
  423. goto out;
  424. }
  425. }
  426. ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
  427. ret = crypto_wait_req(crypto_ahash_init(req), &wait);
  428. if (ret) {
  429. pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
  430. j, algo, -ret);
  431. goto out;
  432. }
  433. ret = crypto_wait_req(crypto_ahash_update(req), &wait);
  434. if (ret) {
  435. pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
  436. j, algo, -ret);
  437. goto out;
  438. }
  439. temp = template[i].tap[0];
  440. for (k = 1; k < template[i].np; k++) {
  441. ret = ahash_partial_update(&req, tfm, &template[i],
  442. hash_buff, k, temp, &sg[0], algo, result,
  443. &wait);
  444. if (ret) {
  445. pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
  446. j, algo, -ret);
  447. goto out_noreq;
  448. }
  449. temp += template[i].tap[k];
  450. }
  451. ret = crypto_wait_req(crypto_ahash_final(req), &wait);
  452. if (ret) {
  453. pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
  454. j, algo, -ret);
  455. goto out;
  456. }
  457. if (memcmp(result, template[i].digest,
  458. crypto_ahash_digestsize(tfm))) {
  459. pr_err("alg: hash: Partial Test %d failed for %s\n",
  460. j, algo);
  461. hexdump(result, crypto_ahash_digestsize(tfm));
  462. ret = -EINVAL;
  463. goto out;
  464. }
  465. }
  466. ret = 0;
  467. out:
  468. ahash_request_free(req);
  469. out_noreq:
  470. testmgr_free_buf(xbuf);
  471. out_nobuf:
  472. kfree(key);
  473. kfree(result);
  474. return ret;
  475. }
  476. static int test_hash(struct crypto_ahash *tfm,
  477. const struct hash_testvec *template,
  478. unsigned int tcount, bool use_digest)
  479. {
  480. unsigned int alignmask;
  481. int ret;
  482. ret = __test_hash(tfm, template, tcount, use_digest, 0);
  483. if (ret)
  484. return ret;
  485. /* test unaligned buffers, check with one byte offset */
  486. ret = __test_hash(tfm, template, tcount, use_digest, 1);
  487. if (ret)
  488. return ret;
  489. alignmask = crypto_tfm_alg_alignmask(&tfm->base);
  490. if (alignmask) {
  491. /* Check if alignment mask for tfm is correctly set. */
  492. ret = __test_hash(tfm, template, tcount, use_digest,
  493. alignmask + 1);
  494. if (ret)
  495. return ret;
  496. }
  497. return 0;
  498. }
  499. static int __test_aead(struct crypto_aead *tfm, int enc,
  500. const struct aead_testvec *template, unsigned int tcount,
  501. const bool diff_dst, const int align_offset)
  502. {
  503. const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
  504. unsigned int i, j, k, n, temp;
  505. int ret = -ENOMEM;
  506. char *q;
  507. char *key;
  508. struct aead_request *req;
  509. struct scatterlist *sg;
  510. struct scatterlist *sgout;
  511. const char *e, *d;
  512. struct crypto_wait wait;
  513. unsigned int authsize, iv_len;
  514. void *input;
  515. void *output;
  516. void *assoc;
  517. char *iv;
  518. char *xbuf[XBUFSIZE];
  519. char *xoutbuf[XBUFSIZE];
  520. char *axbuf[XBUFSIZE];
  521. iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
  522. if (!iv)
  523. return ret;
  524. key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
  525. if (!key)
  526. goto out_noxbuf;
  527. if (testmgr_alloc_buf(xbuf))
  528. goto out_noxbuf;
  529. if (testmgr_alloc_buf(axbuf))
  530. goto out_noaxbuf;
  531. if (diff_dst && testmgr_alloc_buf(xoutbuf))
  532. goto out_nooutbuf;
  533. /* avoid "the frame size is larger than 1024 bytes" compiler warning */
  534. sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)),
  535. GFP_KERNEL);
  536. if (!sg)
  537. goto out_nosg;
  538. sgout = &sg[16];
  539. if (diff_dst)
  540. d = "-ddst";
  541. else
  542. d = "";
  543. if (enc == ENCRYPT)
  544. e = "encryption";
  545. else
  546. e = "decryption";
  547. crypto_init_wait(&wait);
  548. req = aead_request_alloc(tfm, GFP_KERNEL);
  549. if (!req) {
  550. pr_err("alg: aead%s: Failed to allocate request for %s\n",
  551. d, algo);
  552. goto out;
  553. }
  554. aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  555. crypto_req_done, &wait);
  556. iv_len = crypto_aead_ivsize(tfm);
  557. for (i = 0, j = 0; i < tcount; i++) {
  558. if (template[i].np)
  559. continue;
  560. j++;
  561. /* some templates have no input data but they will
  562. * touch input
  563. */
  564. input = xbuf[0];
  565. input += align_offset;
  566. assoc = axbuf[0];
  567. ret = -EINVAL;
  568. if (WARN_ON(align_offset + template[i].ilen >
  569. PAGE_SIZE || template[i].alen > PAGE_SIZE))
  570. goto out;
  571. memcpy(input, template[i].input, template[i].ilen);
  572. memcpy(assoc, template[i].assoc, template[i].alen);
  573. if (template[i].iv)
  574. memcpy(iv, template[i].iv, iv_len);
  575. else
  576. memset(iv, 0, iv_len);
  577. crypto_aead_clear_flags(tfm, ~0);
  578. if (template[i].wk)
  579. crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
  580. if (template[i].klen > MAX_KEYLEN) {
  581. pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
  582. d, j, algo, template[i].klen,
  583. MAX_KEYLEN);
  584. ret = -EINVAL;
  585. goto out;
  586. }
  587. memcpy(key, template[i].key, template[i].klen);
  588. ret = crypto_aead_setkey(tfm, key, template[i].klen);
  589. if (template[i].fail == !ret) {
  590. pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
  591. d, j, algo, crypto_aead_get_flags(tfm));
  592. goto out;
  593. } else if (ret)
  594. continue;
  595. authsize = abs(template[i].rlen - template[i].ilen);
  596. ret = crypto_aead_setauthsize(tfm, authsize);
  597. if (ret) {
  598. pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
  599. d, authsize, j, algo);
  600. goto out;
  601. }
  602. k = !!template[i].alen;
  603. sg_init_table(sg, k + 1);
  604. sg_set_buf(&sg[0], assoc, template[i].alen);
  605. sg_set_buf(&sg[k], input,
  606. template[i].ilen + (enc ? authsize : 0));
  607. output = input;
  608. if (diff_dst) {
  609. sg_init_table(sgout, k + 1);
  610. sg_set_buf(&sgout[0], assoc, template[i].alen);
  611. output = xoutbuf[0];
  612. output += align_offset;
  613. sg_set_buf(&sgout[k], output,
  614. template[i].rlen + (enc ? 0 : authsize));
  615. }
  616. aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
  617. template[i].ilen, iv);
  618. aead_request_set_ad(req, template[i].alen);
  619. ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
  620. : crypto_aead_decrypt(req), &wait);
  621. switch (ret) {
  622. case 0:
  623. if (template[i].novrfy) {
  624. /* verification was supposed to fail */
  625. pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
  626. d, e, j, algo);
  627. /* so really, we got a bad message */
  628. ret = -EBADMSG;
  629. goto out;
  630. }
  631. break;
  632. case -EBADMSG:
  633. if (template[i].novrfy)
  634. /* verification failure was expected */
  635. continue;
  636. /* fall through */
  637. default:
  638. pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
  639. d, e, j, algo, -ret);
  640. goto out;
  641. }
  642. q = output;
  643. if (memcmp(q, template[i].result, template[i].rlen)) {
  644. pr_err("alg: aead%s: Test %d failed on %s for %s\n",
  645. d, j, e, algo);
  646. hexdump(q, template[i].rlen);
  647. ret = -EINVAL;
  648. goto out;
  649. }
  650. }
  651. for (i = 0, j = 0; i < tcount; i++) {
  652. /* alignment tests are only done with continuous buffers */
  653. if (align_offset != 0)
  654. break;
  655. if (!template[i].np)
  656. continue;
  657. j++;
  658. if (template[i].iv)
  659. memcpy(iv, template[i].iv, iv_len);
  660. else
  661. memset(iv, 0, MAX_IVLEN);
  662. crypto_aead_clear_flags(tfm, ~0);
  663. if (template[i].wk)
  664. crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
  665. if (template[i].klen > MAX_KEYLEN) {
  666. pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
  667. d, j, algo, template[i].klen, MAX_KEYLEN);
  668. ret = -EINVAL;
  669. goto out;
  670. }
  671. memcpy(key, template[i].key, template[i].klen);
  672. ret = crypto_aead_setkey(tfm, key, template[i].klen);
  673. if (template[i].fail == !ret) {
  674. pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
  675. d, j, algo, crypto_aead_get_flags(tfm));
  676. goto out;
  677. } else if (ret)
  678. continue;
  679. authsize = abs(template[i].rlen - template[i].ilen);
  680. ret = -EINVAL;
  681. sg_init_table(sg, template[i].anp + template[i].np);
  682. if (diff_dst)
  683. sg_init_table(sgout, template[i].anp + template[i].np);
  684. ret = -EINVAL;
  685. for (k = 0, temp = 0; k < template[i].anp; k++) {
  686. if (WARN_ON(offset_in_page(IDX[k]) +
  687. template[i].atap[k] > PAGE_SIZE))
  688. goto out;
  689. sg_set_buf(&sg[k],
  690. memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
  691. offset_in_page(IDX[k]),
  692. template[i].assoc + temp,
  693. template[i].atap[k]),
  694. template[i].atap[k]);
  695. if (diff_dst)
  696. sg_set_buf(&sgout[k],
  697. axbuf[IDX[k] >> PAGE_SHIFT] +
  698. offset_in_page(IDX[k]),
  699. template[i].atap[k]);
  700. temp += template[i].atap[k];
  701. }
  702. for (k = 0, temp = 0; k < template[i].np; k++) {
  703. if (WARN_ON(offset_in_page(IDX[k]) +
  704. template[i].tap[k] > PAGE_SIZE))
  705. goto out;
  706. q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
  707. memcpy(q, template[i].input + temp, template[i].tap[k]);
  708. sg_set_buf(&sg[template[i].anp + k],
  709. q, template[i].tap[k]);
  710. if (diff_dst) {
  711. q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
  712. offset_in_page(IDX[k]);
  713. memset(q, 0, template[i].tap[k]);
  714. sg_set_buf(&sgout[template[i].anp + k],
  715. q, template[i].tap[k]);
  716. }
  717. n = template[i].tap[k];
  718. if (k == template[i].np - 1 && enc)
  719. n += authsize;
  720. if (offset_in_page(q) + n < PAGE_SIZE)
  721. q[n] = 0;
  722. temp += template[i].tap[k];
  723. }
  724. ret = crypto_aead_setauthsize(tfm, authsize);
  725. if (ret) {
  726. pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
  727. d, authsize, j, algo);
  728. goto out;
  729. }
  730. if (enc) {
  731. if (WARN_ON(sg[template[i].anp + k - 1].offset +
  732. sg[template[i].anp + k - 1].length +
  733. authsize > PAGE_SIZE)) {
  734. ret = -EINVAL;
  735. goto out;
  736. }
  737. if (diff_dst)
  738. sgout[template[i].anp + k - 1].length +=
  739. authsize;
  740. sg[template[i].anp + k - 1].length += authsize;
  741. }
  742. aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
  743. template[i].ilen,
  744. iv);
  745. aead_request_set_ad(req, template[i].alen);
  746. ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
  747. : crypto_aead_decrypt(req), &wait);
  748. switch (ret) {
  749. case 0:
  750. if (template[i].novrfy) {
  751. /* verification was supposed to fail */
  752. pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
  753. d, e, j, algo);
  754. /* so really, we got a bad message */
  755. ret = -EBADMSG;
  756. goto out;
  757. }
  758. break;
  759. case -EBADMSG:
  760. if (template[i].novrfy)
  761. /* verification failure was expected */
  762. continue;
  763. /* fall through */
  764. default:
  765. pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
  766. d, e, j, algo, -ret);
  767. goto out;
  768. }
  769. ret = -EINVAL;
  770. for (k = 0, temp = 0; k < template[i].np; k++) {
  771. if (diff_dst)
  772. q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
  773. offset_in_page(IDX[k]);
  774. else
  775. q = xbuf[IDX[k] >> PAGE_SHIFT] +
  776. offset_in_page(IDX[k]);
  777. n = template[i].tap[k];
  778. if (k == template[i].np - 1)
  779. n += enc ? authsize : -authsize;
  780. if (memcmp(q, template[i].result + temp, n)) {
  781. pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
  782. d, j, e, k, algo);
  783. hexdump(q, n);
  784. goto out;
  785. }
  786. q += n;
  787. if (k == template[i].np - 1 && !enc) {
  788. if (!diff_dst &&
  789. memcmp(q, template[i].input +
  790. temp + n, authsize))
  791. n = authsize;
  792. else
  793. n = 0;
  794. } else {
  795. for (n = 0; offset_in_page(q + n) && q[n]; n++)
  796. ;
  797. }
  798. if (n) {
  799. pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
  800. d, j, e, k, algo, n);
  801. hexdump(q, n);
  802. goto out;
  803. }
  804. temp += template[i].tap[k];
  805. }
  806. }
  807. ret = 0;
  808. out:
  809. aead_request_free(req);
  810. kfree(sg);
  811. out_nosg:
  812. if (diff_dst)
  813. testmgr_free_buf(xoutbuf);
  814. out_nooutbuf:
  815. testmgr_free_buf(axbuf);
  816. out_noaxbuf:
  817. testmgr_free_buf(xbuf);
  818. out_noxbuf:
  819. kfree(key);
  820. kfree(iv);
  821. return ret;
  822. }
  823. static int test_aead(struct crypto_aead *tfm, int enc,
  824. const struct aead_testvec *template, unsigned int tcount)
  825. {
  826. unsigned int alignmask;
  827. int ret;
  828. /* test 'dst == src' case */
  829. ret = __test_aead(tfm, enc, template, tcount, false, 0);
  830. if (ret)
  831. return ret;
  832. /* test 'dst != src' case */
  833. ret = __test_aead(tfm, enc, template, tcount, true, 0);
  834. if (ret)
  835. return ret;
  836. /* test unaligned buffers, check with one byte offset */
  837. ret = __test_aead(tfm, enc, template, tcount, true, 1);
  838. if (ret)
  839. return ret;
  840. alignmask = crypto_tfm_alg_alignmask(&tfm->base);
  841. if (alignmask) {
  842. /* Check if alignment mask for tfm is correctly set. */
  843. ret = __test_aead(tfm, enc, template, tcount, true,
  844. alignmask + 1);
  845. if (ret)
  846. return ret;
  847. }
  848. return 0;
  849. }
  850. static int test_cipher(struct crypto_cipher *tfm, int enc,
  851. const struct cipher_testvec *template,
  852. unsigned int tcount)
  853. {
  854. const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
  855. unsigned int i, j, k;
  856. char *q;
  857. const char *e;
  858. const char *input, *result;
  859. void *data;
  860. char *xbuf[XBUFSIZE];
  861. int ret = -ENOMEM;
  862. if (testmgr_alloc_buf(xbuf))
  863. goto out_nobuf;
  864. if (enc == ENCRYPT)
  865. e = "encryption";
  866. else
  867. e = "decryption";
  868. j = 0;
  869. for (i = 0; i < tcount; i++) {
  870. if (template[i].np)
  871. continue;
  872. if (fips_enabled && template[i].fips_skip)
  873. continue;
  874. input = enc ? template[i].ptext : template[i].ctext;
  875. result = enc ? template[i].ctext : template[i].ptext;
  876. j++;
  877. ret = -EINVAL;
  878. if (WARN_ON(template[i].len > PAGE_SIZE))
  879. goto out;
  880. data = xbuf[0];
  881. memcpy(data, input, template[i].len);
  882. crypto_cipher_clear_flags(tfm, ~0);
  883. if (template[i].wk)
  884. crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
  885. ret = crypto_cipher_setkey(tfm, template[i].key,
  886. template[i].klen);
  887. if (template[i].fail == !ret) {
  888. printk(KERN_ERR "alg: cipher: setkey failed "
  889. "on test %d for %s: flags=%x\n", j,
  890. algo, crypto_cipher_get_flags(tfm));
  891. goto out;
  892. } else if (ret)
  893. continue;
  894. for (k = 0; k < template[i].len;
  895. k += crypto_cipher_blocksize(tfm)) {
  896. if (enc)
  897. crypto_cipher_encrypt_one(tfm, data + k,
  898. data + k);
  899. else
  900. crypto_cipher_decrypt_one(tfm, data + k,
  901. data + k);
  902. }
  903. q = data;
  904. if (memcmp(q, result, template[i].len)) {
  905. printk(KERN_ERR "alg: cipher: Test %d failed "
  906. "on %s for %s\n", j, e, algo);
  907. hexdump(q, template[i].len);
  908. ret = -EINVAL;
  909. goto out;
  910. }
  911. }
  912. ret = 0;
  913. out:
  914. testmgr_free_buf(xbuf);
  915. out_nobuf:
  916. return ret;
  917. }
  918. static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
  919. const struct cipher_testvec *template,
  920. unsigned int tcount,
  921. const bool diff_dst, const int align_offset)
  922. {
  923. const char *algo =
  924. crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
  925. unsigned int i, j, k, n, temp;
  926. char *q;
  927. struct skcipher_request *req;
  928. struct scatterlist sg[8];
  929. struct scatterlist sgout[8];
  930. const char *e, *d;
  931. struct crypto_wait wait;
  932. const char *input, *result;
  933. void *data;
  934. char iv[MAX_IVLEN];
  935. char *xbuf[XBUFSIZE];
  936. char *xoutbuf[XBUFSIZE];
  937. int ret = -ENOMEM;
  938. unsigned int ivsize = crypto_skcipher_ivsize(tfm);
  939. if (testmgr_alloc_buf(xbuf))
  940. goto out_nobuf;
  941. if (diff_dst && testmgr_alloc_buf(xoutbuf))
  942. goto out_nooutbuf;
  943. if (diff_dst)
  944. d = "-ddst";
  945. else
  946. d = "";
  947. if (enc == ENCRYPT)
  948. e = "encryption";
  949. else
  950. e = "decryption";
  951. crypto_init_wait(&wait);
  952. req = skcipher_request_alloc(tfm, GFP_KERNEL);
  953. if (!req) {
  954. pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
  955. d, algo);
  956. goto out;
  957. }
  958. skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  959. crypto_req_done, &wait);
  960. j = 0;
  961. for (i = 0; i < tcount; i++) {
  962. if (template[i].np && !template[i].also_non_np)
  963. continue;
  964. if (fips_enabled && template[i].fips_skip)
  965. continue;
  966. if (template[i].iv && !(template[i].generates_iv && enc))
  967. memcpy(iv, template[i].iv, ivsize);
  968. else
  969. memset(iv, 0, MAX_IVLEN);
  970. input = enc ? template[i].ptext : template[i].ctext;
  971. result = enc ? template[i].ctext : template[i].ptext;
  972. j++;
  973. ret = -EINVAL;
  974. if (WARN_ON(align_offset + template[i].len > PAGE_SIZE))
  975. goto out;
  976. data = xbuf[0];
  977. data += align_offset;
  978. memcpy(data, input, template[i].len);
  979. crypto_skcipher_clear_flags(tfm, ~0);
  980. if (template[i].wk)
  981. crypto_skcipher_set_flags(tfm,
  982. CRYPTO_TFM_REQ_WEAK_KEY);
  983. ret = crypto_skcipher_setkey(tfm, template[i].key,
  984. template[i].klen);
  985. if (template[i].fail == !ret) {
  986. pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
  987. d, j, algo, crypto_skcipher_get_flags(tfm));
  988. goto out;
  989. } else if (ret)
  990. continue;
  991. sg_init_one(&sg[0], data, template[i].len);
  992. if (diff_dst) {
  993. data = xoutbuf[0];
  994. data += align_offset;
  995. sg_init_one(&sgout[0], data, template[i].len);
  996. }
  997. skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
  998. template[i].len, iv);
  999. ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
  1000. crypto_skcipher_decrypt(req), &wait);
  1001. if (ret) {
  1002. pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
  1003. d, e, j, algo, -ret);
  1004. goto out;
  1005. }
  1006. q = data;
  1007. if (memcmp(q, result, template[i].len)) {
  1008. pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
  1009. d, j, e, algo);
  1010. hexdump(q, template[i].len);
  1011. ret = -EINVAL;
  1012. goto out;
  1013. }
  1014. if (template[i].generates_iv && enc &&
  1015. memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) {
  1016. pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
  1017. d, j, e, algo);
  1018. hexdump(iv, crypto_skcipher_ivsize(tfm));
  1019. ret = -EINVAL;
  1020. goto out;
  1021. }
  1022. }
  1023. j = 0;
  1024. for (i = 0; i < tcount; i++) {
  1025. /* alignment tests are only done with continuous buffers */
  1026. if (align_offset != 0)
  1027. break;
  1028. if (!template[i].np)
  1029. continue;
  1030. if (fips_enabled && template[i].fips_skip)
  1031. continue;
  1032. if (template[i].iv && !(template[i].generates_iv && enc))
  1033. memcpy(iv, template[i].iv, ivsize);
  1034. else
  1035. memset(iv, 0, MAX_IVLEN);
  1036. input = enc ? template[i].ptext : template[i].ctext;
  1037. result = enc ? template[i].ctext : template[i].ptext;
  1038. j++;
  1039. crypto_skcipher_clear_flags(tfm, ~0);
  1040. if (template[i].wk)
  1041. crypto_skcipher_set_flags(tfm,
  1042. CRYPTO_TFM_REQ_WEAK_KEY);
  1043. ret = crypto_skcipher_setkey(tfm, template[i].key,
  1044. template[i].klen);
  1045. if (template[i].fail == !ret) {
  1046. pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
  1047. d, j, algo, crypto_skcipher_get_flags(tfm));
  1048. goto out;
  1049. } else if (ret)
  1050. continue;
  1051. temp = 0;
  1052. ret = -EINVAL;
  1053. sg_init_table(sg, template[i].np);
  1054. if (diff_dst)
  1055. sg_init_table(sgout, template[i].np);
  1056. for (k = 0; k < template[i].np; k++) {
  1057. if (WARN_ON(offset_in_page(IDX[k]) +
  1058. template[i].tap[k] > PAGE_SIZE))
  1059. goto out;
  1060. q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
  1061. memcpy(q, input + temp, template[i].tap[k]);
  1062. if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
  1063. q[template[i].tap[k]] = 0;
  1064. sg_set_buf(&sg[k], q, template[i].tap[k]);
  1065. if (diff_dst) {
  1066. q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
  1067. offset_in_page(IDX[k]);
  1068. sg_set_buf(&sgout[k], q, template[i].tap[k]);
  1069. memset(q, 0, template[i].tap[k]);
  1070. if (offset_in_page(q) +
  1071. template[i].tap[k] < PAGE_SIZE)
  1072. q[template[i].tap[k]] = 0;
  1073. }
  1074. temp += template[i].tap[k];
  1075. }
  1076. skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
  1077. template[i].len, iv);
  1078. ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
  1079. crypto_skcipher_decrypt(req), &wait);
  1080. if (ret) {
  1081. pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
  1082. d, e, j, algo, -ret);
  1083. goto out;
  1084. }
  1085. temp = 0;
  1086. ret = -EINVAL;
  1087. for (k = 0; k < template[i].np; k++) {
  1088. if (diff_dst)
  1089. q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
  1090. offset_in_page(IDX[k]);
  1091. else
  1092. q = xbuf[IDX[k] >> PAGE_SHIFT] +
  1093. offset_in_page(IDX[k]);
  1094. if (memcmp(q, result + temp, template[i].tap[k])) {
  1095. pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
  1096. d, j, e, k, algo);
  1097. hexdump(q, template[i].tap[k]);
  1098. goto out;
  1099. }
  1100. q += template[i].tap[k];
  1101. for (n = 0; offset_in_page(q + n) && q[n]; n++)
  1102. ;
  1103. if (n) {
  1104. pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
  1105. d, j, e, k, algo, n);
  1106. hexdump(q, n);
  1107. goto out;
  1108. }
  1109. temp += template[i].tap[k];
  1110. }
  1111. }
  1112. ret = 0;
  1113. out:
  1114. skcipher_request_free(req);
  1115. if (diff_dst)
  1116. testmgr_free_buf(xoutbuf);
  1117. out_nooutbuf:
  1118. testmgr_free_buf(xbuf);
  1119. out_nobuf:
  1120. return ret;
  1121. }
  1122. static int test_skcipher(struct crypto_skcipher *tfm, int enc,
  1123. const struct cipher_testvec *template,
  1124. unsigned int tcount)
  1125. {
  1126. unsigned int alignmask;
  1127. int ret;
  1128. /* test 'dst == src' case */
  1129. ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
  1130. if (ret)
  1131. return ret;
  1132. /* test 'dst != src' case */
  1133. ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
  1134. if (ret)
  1135. return ret;
  1136. /* test unaligned buffers, check with one byte offset */
  1137. ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
  1138. if (ret)
  1139. return ret;
  1140. alignmask = crypto_tfm_alg_alignmask(&tfm->base);
  1141. if (alignmask) {
  1142. /* Check if alignment mask for tfm is correctly set. */
  1143. ret = __test_skcipher(tfm, enc, template, tcount, true,
  1144. alignmask + 1);
  1145. if (ret)
  1146. return ret;
  1147. }
  1148. return 0;
  1149. }
  1150. static int test_comp(struct crypto_comp *tfm,
  1151. const struct comp_testvec *ctemplate,
  1152. const struct comp_testvec *dtemplate,
  1153. int ctcount, int dtcount)
  1154. {
  1155. const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
  1156. char *output, *decomp_output;
  1157. unsigned int i;
  1158. int ret;
  1159. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  1160. if (!output)
  1161. return -ENOMEM;
  1162. decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  1163. if (!decomp_output) {
  1164. kfree(output);
  1165. return -ENOMEM;
  1166. }
  1167. for (i = 0; i < ctcount; i++) {
  1168. int ilen;
  1169. unsigned int dlen = COMP_BUF_SIZE;
  1170. memset(output, 0, sizeof(COMP_BUF_SIZE));
  1171. memset(decomp_output, 0, sizeof(COMP_BUF_SIZE));
  1172. ilen = ctemplate[i].inlen;
  1173. ret = crypto_comp_compress(tfm, ctemplate[i].input,
  1174. ilen, output, &dlen);
  1175. if (ret) {
  1176. printk(KERN_ERR "alg: comp: compression failed "
  1177. "on test %d for %s: ret=%d\n", i + 1, algo,
  1178. -ret);
  1179. goto out;
  1180. }
  1181. ilen = dlen;
  1182. dlen = COMP_BUF_SIZE;
  1183. ret = crypto_comp_decompress(tfm, output,
  1184. ilen, decomp_output, &dlen);
  1185. if (ret) {
  1186. pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
  1187. i + 1, algo, -ret);
  1188. goto out;
  1189. }
  1190. if (dlen != ctemplate[i].inlen) {
  1191. printk(KERN_ERR "alg: comp: Compression test %d "
  1192. "failed for %s: output len = %d\n", i + 1, algo,
  1193. dlen);
  1194. ret = -EINVAL;
  1195. goto out;
  1196. }
  1197. if (memcmp(decomp_output, ctemplate[i].input,
  1198. ctemplate[i].inlen)) {
  1199. pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
  1200. i + 1, algo);
  1201. hexdump(decomp_output, dlen);
  1202. ret = -EINVAL;
  1203. goto out;
  1204. }
  1205. }
  1206. for (i = 0; i < dtcount; i++) {
  1207. int ilen;
  1208. unsigned int dlen = COMP_BUF_SIZE;
  1209. memset(decomp_output, 0, sizeof(COMP_BUF_SIZE));
  1210. ilen = dtemplate[i].inlen;
  1211. ret = crypto_comp_decompress(tfm, dtemplate[i].input,
  1212. ilen, decomp_output, &dlen);
  1213. if (ret) {
  1214. printk(KERN_ERR "alg: comp: decompression failed "
  1215. "on test %d for %s: ret=%d\n", i + 1, algo,
  1216. -ret);
  1217. goto out;
  1218. }
  1219. if (dlen != dtemplate[i].outlen) {
  1220. printk(KERN_ERR "alg: comp: Decompression test %d "
  1221. "failed for %s: output len = %d\n", i + 1, algo,
  1222. dlen);
  1223. ret = -EINVAL;
  1224. goto out;
  1225. }
  1226. if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
  1227. printk(KERN_ERR "alg: comp: Decompression test %d "
  1228. "failed for %s\n", i + 1, algo);
  1229. hexdump(decomp_output, dlen);
  1230. ret = -EINVAL;
  1231. goto out;
  1232. }
  1233. }
  1234. ret = 0;
  1235. out:
  1236. kfree(decomp_output);
  1237. kfree(output);
  1238. return ret;
  1239. }
  1240. static int test_acomp(struct crypto_acomp *tfm,
  1241. const struct comp_testvec *ctemplate,
  1242. const struct comp_testvec *dtemplate,
  1243. int ctcount, int dtcount)
  1244. {
  1245. const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
  1246. unsigned int i;
  1247. char *output, *decomp_out;
  1248. int ret;
  1249. struct scatterlist src, dst;
  1250. struct acomp_req *req;
  1251. struct crypto_wait wait;
  1252. output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  1253. if (!output)
  1254. return -ENOMEM;
  1255. decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
  1256. if (!decomp_out) {
  1257. kfree(output);
  1258. return -ENOMEM;
  1259. }
  1260. for (i = 0; i < ctcount; i++) {
  1261. unsigned int dlen = COMP_BUF_SIZE;
  1262. int ilen = ctemplate[i].inlen;
  1263. void *input_vec;
  1264. input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
  1265. if (!input_vec) {
  1266. ret = -ENOMEM;
  1267. goto out;
  1268. }
  1269. memset(output, 0, dlen);
  1270. crypto_init_wait(&wait);
  1271. sg_init_one(&src, input_vec, ilen);
  1272. sg_init_one(&dst, output, dlen);
  1273. req = acomp_request_alloc(tfm);
  1274. if (!req) {
  1275. pr_err("alg: acomp: request alloc failed for %s\n",
  1276. algo);
  1277. kfree(input_vec);
  1278. ret = -ENOMEM;
  1279. goto out;
  1280. }
  1281. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  1282. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1283. crypto_req_done, &wait);
  1284. ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
  1285. if (ret) {
  1286. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  1287. i + 1, algo, -ret);
  1288. kfree(input_vec);
  1289. acomp_request_free(req);
  1290. goto out;
  1291. }
  1292. ilen = req->dlen;
  1293. dlen = COMP_BUF_SIZE;
  1294. sg_init_one(&src, output, ilen);
  1295. sg_init_one(&dst, decomp_out, dlen);
  1296. crypto_init_wait(&wait);
  1297. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  1298. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  1299. if (ret) {
  1300. pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
  1301. i + 1, algo, -ret);
  1302. kfree(input_vec);
  1303. acomp_request_free(req);
  1304. goto out;
  1305. }
  1306. if (req->dlen != ctemplate[i].inlen) {
  1307. pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
  1308. i + 1, algo, req->dlen);
  1309. ret = -EINVAL;
  1310. kfree(input_vec);
  1311. acomp_request_free(req);
  1312. goto out;
  1313. }
  1314. if (memcmp(input_vec, decomp_out, req->dlen)) {
  1315. pr_err("alg: acomp: Compression test %d failed for %s\n",
  1316. i + 1, algo);
  1317. hexdump(output, req->dlen);
  1318. ret = -EINVAL;
  1319. kfree(input_vec);
  1320. acomp_request_free(req);
  1321. goto out;
  1322. }
  1323. kfree(input_vec);
  1324. acomp_request_free(req);
  1325. }
  1326. for (i = 0; i < dtcount; i++) {
  1327. unsigned int dlen = COMP_BUF_SIZE;
  1328. int ilen = dtemplate[i].inlen;
  1329. void *input_vec;
  1330. input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
  1331. if (!input_vec) {
  1332. ret = -ENOMEM;
  1333. goto out;
  1334. }
  1335. memset(output, 0, dlen);
  1336. crypto_init_wait(&wait);
  1337. sg_init_one(&src, input_vec, ilen);
  1338. sg_init_one(&dst, output, dlen);
  1339. req = acomp_request_alloc(tfm);
  1340. if (!req) {
  1341. pr_err("alg: acomp: request alloc failed for %s\n",
  1342. algo);
  1343. kfree(input_vec);
  1344. ret = -ENOMEM;
  1345. goto out;
  1346. }
  1347. acomp_request_set_params(req, &src, &dst, ilen, dlen);
  1348. acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1349. crypto_req_done, &wait);
  1350. ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
  1351. if (ret) {
  1352. pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
  1353. i + 1, algo, -ret);
  1354. kfree(input_vec);
  1355. acomp_request_free(req);
  1356. goto out;
  1357. }
  1358. if (req->dlen != dtemplate[i].outlen) {
  1359. pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
  1360. i + 1, algo, req->dlen);
  1361. ret = -EINVAL;
  1362. kfree(input_vec);
  1363. acomp_request_free(req);
  1364. goto out;
  1365. }
  1366. if (memcmp(output, dtemplate[i].output, req->dlen)) {
  1367. pr_err("alg: acomp: Decompression test %d failed for %s\n",
  1368. i + 1, algo);
  1369. hexdump(output, req->dlen);
  1370. ret = -EINVAL;
  1371. kfree(input_vec);
  1372. acomp_request_free(req);
  1373. goto out;
  1374. }
  1375. kfree(input_vec);
  1376. acomp_request_free(req);
  1377. }
  1378. ret = 0;
  1379. out:
  1380. kfree(decomp_out);
  1381. kfree(output);
  1382. return ret;
  1383. }
  1384. static int test_cprng(struct crypto_rng *tfm,
  1385. const struct cprng_testvec *template,
  1386. unsigned int tcount)
  1387. {
  1388. const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
  1389. int err = 0, i, j, seedsize;
  1390. u8 *seed;
  1391. char result[32];
  1392. seedsize = crypto_rng_seedsize(tfm);
  1393. seed = kmalloc(seedsize, GFP_KERNEL);
  1394. if (!seed) {
  1395. printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
  1396. "for %s\n", algo);
  1397. return -ENOMEM;
  1398. }
  1399. for (i = 0; i < tcount; i++) {
  1400. memset(result, 0, 32);
  1401. memcpy(seed, template[i].v, template[i].vlen);
  1402. memcpy(seed + template[i].vlen, template[i].key,
  1403. template[i].klen);
  1404. memcpy(seed + template[i].vlen + template[i].klen,
  1405. template[i].dt, template[i].dtlen);
  1406. err = crypto_rng_reset(tfm, seed, seedsize);
  1407. if (err) {
  1408. printk(KERN_ERR "alg: cprng: Failed to reset rng "
  1409. "for %s\n", algo);
  1410. goto out;
  1411. }
  1412. for (j = 0; j < template[i].loops; j++) {
  1413. err = crypto_rng_get_bytes(tfm, result,
  1414. template[i].rlen);
  1415. if (err < 0) {
  1416. printk(KERN_ERR "alg: cprng: Failed to obtain "
  1417. "the correct amount of random data for "
  1418. "%s (requested %d)\n", algo,
  1419. template[i].rlen);
  1420. goto out;
  1421. }
  1422. }
  1423. err = memcmp(result, template[i].result,
  1424. template[i].rlen);
  1425. if (err) {
  1426. printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
  1427. i, algo);
  1428. hexdump(result, template[i].rlen);
  1429. err = -EINVAL;
  1430. goto out;
  1431. }
  1432. }
  1433. out:
  1434. kfree(seed);
  1435. return err;
  1436. }
  1437. static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
  1438. u32 type, u32 mask)
  1439. {
  1440. struct crypto_aead *tfm;
  1441. int err = 0;
  1442. tfm = crypto_alloc_aead(driver, type, mask);
  1443. if (IS_ERR(tfm)) {
  1444. printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
  1445. "%ld\n", driver, PTR_ERR(tfm));
  1446. return PTR_ERR(tfm);
  1447. }
  1448. if (desc->suite.aead.enc.vecs) {
  1449. err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
  1450. desc->suite.aead.enc.count);
  1451. if (err)
  1452. goto out;
  1453. }
  1454. if (!err && desc->suite.aead.dec.vecs)
  1455. err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
  1456. desc->suite.aead.dec.count);
  1457. out:
  1458. crypto_free_aead(tfm);
  1459. return err;
  1460. }
  1461. static int alg_test_cipher(const struct alg_test_desc *desc,
  1462. const char *driver, u32 type, u32 mask)
  1463. {
  1464. const struct cipher_test_suite *suite = &desc->suite.cipher;
  1465. struct crypto_cipher *tfm;
  1466. int err;
  1467. tfm = crypto_alloc_cipher(driver, type, mask);
  1468. if (IS_ERR(tfm)) {
  1469. printk(KERN_ERR "alg: cipher: Failed to load transform for "
  1470. "%s: %ld\n", driver, PTR_ERR(tfm));
  1471. return PTR_ERR(tfm);
  1472. }
  1473. err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
  1474. if (!err)
  1475. err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
  1476. crypto_free_cipher(tfm);
  1477. return err;
  1478. }
  1479. static int alg_test_skcipher(const struct alg_test_desc *desc,
  1480. const char *driver, u32 type, u32 mask)
  1481. {
  1482. const struct cipher_test_suite *suite = &desc->suite.cipher;
  1483. struct crypto_skcipher *tfm;
  1484. int err;
  1485. tfm = crypto_alloc_skcipher(driver, type, mask);
  1486. if (IS_ERR(tfm)) {
  1487. printk(KERN_ERR "alg: skcipher: Failed to load transform for "
  1488. "%s: %ld\n", driver, PTR_ERR(tfm));
  1489. return PTR_ERR(tfm);
  1490. }
  1491. err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count);
  1492. if (!err)
  1493. err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count);
  1494. crypto_free_skcipher(tfm);
  1495. return err;
  1496. }
  1497. static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
  1498. u32 type, u32 mask)
  1499. {
  1500. struct crypto_comp *comp;
  1501. struct crypto_acomp *acomp;
  1502. int err;
  1503. u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
  1504. if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
  1505. acomp = crypto_alloc_acomp(driver, type, mask);
  1506. if (IS_ERR(acomp)) {
  1507. pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
  1508. driver, PTR_ERR(acomp));
  1509. return PTR_ERR(acomp);
  1510. }
  1511. err = test_acomp(acomp, desc->suite.comp.comp.vecs,
  1512. desc->suite.comp.decomp.vecs,
  1513. desc->suite.comp.comp.count,
  1514. desc->suite.comp.decomp.count);
  1515. crypto_free_acomp(acomp);
  1516. } else {
  1517. comp = crypto_alloc_comp(driver, type, mask);
  1518. if (IS_ERR(comp)) {
  1519. pr_err("alg: comp: Failed to load transform for %s: %ld\n",
  1520. driver, PTR_ERR(comp));
  1521. return PTR_ERR(comp);
  1522. }
  1523. err = test_comp(comp, desc->suite.comp.comp.vecs,
  1524. desc->suite.comp.decomp.vecs,
  1525. desc->suite.comp.comp.count,
  1526. desc->suite.comp.decomp.count);
  1527. crypto_free_comp(comp);
  1528. }
  1529. return err;
  1530. }
  1531. static int __alg_test_hash(const struct hash_testvec *template,
  1532. unsigned int tcount, const char *driver,
  1533. u32 type, u32 mask)
  1534. {
  1535. struct crypto_ahash *tfm;
  1536. int err;
  1537. tfm = crypto_alloc_ahash(driver, type, mask);
  1538. if (IS_ERR(tfm)) {
  1539. printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
  1540. "%ld\n", driver, PTR_ERR(tfm));
  1541. return PTR_ERR(tfm);
  1542. }
  1543. err = test_hash(tfm, template, tcount, true);
  1544. if (!err)
  1545. err = test_hash(tfm, template, tcount, false);
  1546. crypto_free_ahash(tfm);
  1547. return err;
  1548. }
  1549. static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
  1550. u32 type, u32 mask)
  1551. {
  1552. const struct hash_testvec *template = desc->suite.hash.vecs;
  1553. unsigned int tcount = desc->suite.hash.count;
  1554. unsigned int nr_unkeyed, nr_keyed;
  1555. int err;
  1556. /*
  1557. * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
  1558. * first, before setting a key on the tfm. To make this easier, we
  1559. * require that the unkeyed test vectors (if any) are listed first.
  1560. */
  1561. for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
  1562. if (template[nr_unkeyed].ksize)
  1563. break;
  1564. }
  1565. for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
  1566. if (!template[nr_unkeyed + nr_keyed].ksize) {
  1567. pr_err("alg: hash: test vectors for %s out of order, "
  1568. "unkeyed ones must come first\n", desc->alg);
  1569. return -EINVAL;
  1570. }
  1571. }
  1572. err = 0;
  1573. if (nr_unkeyed) {
  1574. err = __alg_test_hash(template, nr_unkeyed, driver, type, mask);
  1575. template += nr_unkeyed;
  1576. }
  1577. if (!err && nr_keyed)
  1578. err = __alg_test_hash(template, nr_keyed, driver, type, mask);
  1579. return err;
  1580. }
  1581. static int alg_test_crc32c(const struct alg_test_desc *desc,
  1582. const char *driver, u32 type, u32 mask)
  1583. {
  1584. struct crypto_shash *tfm;
  1585. u32 val;
  1586. int err;
  1587. err = alg_test_hash(desc, driver, type, mask);
  1588. if (err)
  1589. goto out;
  1590. tfm = crypto_alloc_shash(driver, type, mask);
  1591. if (IS_ERR(tfm)) {
  1592. printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
  1593. "%ld\n", driver, PTR_ERR(tfm));
  1594. err = PTR_ERR(tfm);
  1595. goto out;
  1596. }
  1597. do {
  1598. SHASH_DESC_ON_STACK(shash, tfm);
  1599. u32 *ctx = (u32 *)shash_desc_ctx(shash);
  1600. shash->tfm = tfm;
  1601. shash->flags = 0;
  1602. *ctx = le32_to_cpu(420553207);
  1603. err = crypto_shash_final(shash, (u8 *)&val);
  1604. if (err) {
  1605. printk(KERN_ERR "alg: crc32c: Operation failed for "
  1606. "%s: %d\n", driver, err);
  1607. break;
  1608. }
  1609. if (val != ~420553207) {
  1610. printk(KERN_ERR "alg: crc32c: Test failed for %s: "
  1611. "%d\n", driver, val);
  1612. err = -EINVAL;
  1613. }
  1614. } while (0);
  1615. crypto_free_shash(tfm);
  1616. out:
  1617. return err;
  1618. }
  1619. static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
  1620. u32 type, u32 mask)
  1621. {
  1622. struct crypto_rng *rng;
  1623. int err;
  1624. rng = crypto_alloc_rng(driver, type, mask);
  1625. if (IS_ERR(rng)) {
  1626. printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
  1627. "%ld\n", driver, PTR_ERR(rng));
  1628. return PTR_ERR(rng);
  1629. }
  1630. err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
  1631. crypto_free_rng(rng);
  1632. return err;
  1633. }
  1634. static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
  1635. const char *driver, u32 type, u32 mask)
  1636. {
  1637. int ret = -EAGAIN;
  1638. struct crypto_rng *drng;
  1639. struct drbg_test_data test_data;
  1640. struct drbg_string addtl, pers, testentropy;
  1641. unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
  1642. if (!buf)
  1643. return -ENOMEM;
  1644. drng = crypto_alloc_rng(driver, type, mask);
  1645. if (IS_ERR(drng)) {
  1646. printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
  1647. "%s\n", driver);
  1648. kzfree(buf);
  1649. return -ENOMEM;
  1650. }
  1651. test_data.testentropy = &testentropy;
  1652. drbg_string_fill(&testentropy, test->entropy, test->entropylen);
  1653. drbg_string_fill(&pers, test->pers, test->perslen);
  1654. ret = crypto_drbg_reset_test(drng, &pers, &test_data);
  1655. if (ret) {
  1656. printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
  1657. goto outbuf;
  1658. }
  1659. drbg_string_fill(&addtl, test->addtla, test->addtllen);
  1660. if (pr) {
  1661. drbg_string_fill(&testentropy, test->entpra, test->entprlen);
  1662. ret = crypto_drbg_get_bytes_addtl_test(drng,
  1663. buf, test->expectedlen, &addtl, &test_data);
  1664. } else {
  1665. ret = crypto_drbg_get_bytes_addtl(drng,
  1666. buf, test->expectedlen, &addtl);
  1667. }
  1668. if (ret < 0) {
  1669. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  1670. "driver %s\n", driver);
  1671. goto outbuf;
  1672. }
  1673. drbg_string_fill(&addtl, test->addtlb, test->addtllen);
  1674. if (pr) {
  1675. drbg_string_fill(&testentropy, test->entprb, test->entprlen);
  1676. ret = crypto_drbg_get_bytes_addtl_test(drng,
  1677. buf, test->expectedlen, &addtl, &test_data);
  1678. } else {
  1679. ret = crypto_drbg_get_bytes_addtl(drng,
  1680. buf, test->expectedlen, &addtl);
  1681. }
  1682. if (ret < 0) {
  1683. printk(KERN_ERR "alg: drbg: could not obtain random data for "
  1684. "driver %s\n", driver);
  1685. goto outbuf;
  1686. }
  1687. ret = memcmp(test->expected, buf, test->expectedlen);
  1688. outbuf:
  1689. crypto_free_rng(drng);
  1690. kzfree(buf);
  1691. return ret;
  1692. }
  1693. static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
  1694. u32 type, u32 mask)
  1695. {
  1696. int err = 0;
  1697. int pr = 0;
  1698. int i = 0;
  1699. const struct drbg_testvec *template = desc->suite.drbg.vecs;
  1700. unsigned int tcount = desc->suite.drbg.count;
  1701. if (0 == memcmp(driver, "drbg_pr_", 8))
  1702. pr = 1;
  1703. for (i = 0; i < tcount; i++) {
  1704. err = drbg_cavs_test(&template[i], pr, driver, type, mask);
  1705. if (err) {
  1706. printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
  1707. i, driver);
  1708. err = -EINVAL;
  1709. break;
  1710. }
  1711. }
  1712. return err;
  1713. }
  1714. static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
  1715. const char *alg)
  1716. {
  1717. struct kpp_request *req;
  1718. void *input_buf = NULL;
  1719. void *output_buf = NULL;
  1720. void *a_public = NULL;
  1721. void *a_ss = NULL;
  1722. void *shared_secret = NULL;
  1723. struct crypto_wait wait;
  1724. unsigned int out_len_max;
  1725. int err = -ENOMEM;
  1726. struct scatterlist src, dst;
  1727. req = kpp_request_alloc(tfm, GFP_KERNEL);
  1728. if (!req)
  1729. return err;
  1730. crypto_init_wait(&wait);
  1731. err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
  1732. if (err < 0)
  1733. goto free_req;
  1734. out_len_max = crypto_kpp_maxsize(tfm);
  1735. output_buf = kzalloc(out_len_max, GFP_KERNEL);
  1736. if (!output_buf) {
  1737. err = -ENOMEM;
  1738. goto free_req;
  1739. }
  1740. /* Use appropriate parameter as base */
  1741. kpp_request_set_input(req, NULL, 0);
  1742. sg_init_one(&dst, output_buf, out_len_max);
  1743. kpp_request_set_output(req, &dst, out_len_max);
  1744. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1745. crypto_req_done, &wait);
  1746. /* Compute party A's public key */
  1747. err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
  1748. if (err) {
  1749. pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
  1750. alg, err);
  1751. goto free_output;
  1752. }
  1753. if (vec->genkey) {
  1754. /* Save party A's public key */
  1755. a_public = kzalloc(out_len_max, GFP_KERNEL);
  1756. if (!a_public) {
  1757. err = -ENOMEM;
  1758. goto free_output;
  1759. }
  1760. memcpy(a_public, sg_virt(req->dst), out_len_max);
  1761. } else {
  1762. /* Verify calculated public key */
  1763. if (memcmp(vec->expected_a_public, sg_virt(req->dst),
  1764. vec->expected_a_public_size)) {
  1765. pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
  1766. alg);
  1767. err = -EINVAL;
  1768. goto free_output;
  1769. }
  1770. }
  1771. /* Calculate shared secret key by using counter part (b) public key. */
  1772. input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
  1773. if (!input_buf) {
  1774. err = -ENOMEM;
  1775. goto free_output;
  1776. }
  1777. memcpy(input_buf, vec->b_public, vec->b_public_size);
  1778. sg_init_one(&src, input_buf, vec->b_public_size);
  1779. sg_init_one(&dst, output_buf, out_len_max);
  1780. kpp_request_set_input(req, &src, vec->b_public_size);
  1781. kpp_request_set_output(req, &dst, out_len_max);
  1782. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1783. crypto_req_done, &wait);
  1784. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
  1785. if (err) {
  1786. pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
  1787. alg, err);
  1788. goto free_all;
  1789. }
  1790. if (vec->genkey) {
  1791. /* Save the shared secret obtained by party A */
  1792. a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
  1793. if (!a_ss) {
  1794. err = -ENOMEM;
  1795. goto free_all;
  1796. }
  1797. memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
  1798. /*
  1799. * Calculate party B's shared secret by using party A's
  1800. * public key.
  1801. */
  1802. err = crypto_kpp_set_secret(tfm, vec->b_secret,
  1803. vec->b_secret_size);
  1804. if (err < 0)
  1805. goto free_all;
  1806. sg_init_one(&src, a_public, vec->expected_a_public_size);
  1807. sg_init_one(&dst, output_buf, out_len_max);
  1808. kpp_request_set_input(req, &src, vec->expected_a_public_size);
  1809. kpp_request_set_output(req, &dst, out_len_max);
  1810. kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1811. crypto_req_done, &wait);
  1812. err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
  1813. &wait);
  1814. if (err) {
  1815. pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
  1816. alg, err);
  1817. goto free_all;
  1818. }
  1819. shared_secret = a_ss;
  1820. } else {
  1821. shared_secret = (void *)vec->expected_ss;
  1822. }
  1823. /*
  1824. * verify shared secret from which the user will derive
  1825. * secret key by executing whatever hash it has chosen
  1826. */
  1827. if (memcmp(shared_secret, sg_virt(req->dst),
  1828. vec->expected_ss_size)) {
  1829. pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
  1830. alg);
  1831. err = -EINVAL;
  1832. }
  1833. free_all:
  1834. kfree(a_ss);
  1835. kfree(input_buf);
  1836. free_output:
  1837. kfree(a_public);
  1838. kfree(output_buf);
  1839. free_req:
  1840. kpp_request_free(req);
  1841. return err;
  1842. }
  1843. static int test_kpp(struct crypto_kpp *tfm, const char *alg,
  1844. const struct kpp_testvec *vecs, unsigned int tcount)
  1845. {
  1846. int ret, i;
  1847. for (i = 0; i < tcount; i++) {
  1848. ret = do_test_kpp(tfm, vecs++, alg);
  1849. if (ret) {
  1850. pr_err("alg: %s: test failed on vector %d, err=%d\n",
  1851. alg, i + 1, ret);
  1852. return ret;
  1853. }
  1854. }
  1855. return 0;
  1856. }
  1857. static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
  1858. u32 type, u32 mask)
  1859. {
  1860. struct crypto_kpp *tfm;
  1861. int err = 0;
  1862. tfm = crypto_alloc_kpp(driver, type, mask);
  1863. if (IS_ERR(tfm)) {
  1864. pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
  1865. driver, PTR_ERR(tfm));
  1866. return PTR_ERR(tfm);
  1867. }
  1868. if (desc->suite.kpp.vecs)
  1869. err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
  1870. desc->suite.kpp.count);
  1871. crypto_free_kpp(tfm);
  1872. return err;
  1873. }
  1874. static int test_akcipher_one(struct crypto_akcipher *tfm,
  1875. const struct akcipher_testvec *vecs)
  1876. {
  1877. char *xbuf[XBUFSIZE];
  1878. struct akcipher_request *req;
  1879. void *outbuf_enc = NULL;
  1880. void *outbuf_dec = NULL;
  1881. struct crypto_wait wait;
  1882. unsigned int out_len_max, out_len = 0;
  1883. int err = -ENOMEM;
  1884. struct scatterlist src, dst, src_tab[2];
  1885. if (testmgr_alloc_buf(xbuf))
  1886. return err;
  1887. req = akcipher_request_alloc(tfm, GFP_KERNEL);
  1888. if (!req)
  1889. goto free_xbuf;
  1890. crypto_init_wait(&wait);
  1891. if (vecs->public_key_vec)
  1892. err = crypto_akcipher_set_pub_key(tfm, vecs->key,
  1893. vecs->key_len);
  1894. else
  1895. err = crypto_akcipher_set_priv_key(tfm, vecs->key,
  1896. vecs->key_len);
  1897. if (err)
  1898. goto free_req;
  1899. err = -ENOMEM;
  1900. out_len_max = crypto_akcipher_maxsize(tfm);
  1901. outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
  1902. if (!outbuf_enc)
  1903. goto free_req;
  1904. if (WARN_ON(vecs->m_size > PAGE_SIZE))
  1905. goto free_all;
  1906. memcpy(xbuf[0], vecs->m, vecs->m_size);
  1907. sg_init_table(src_tab, 2);
  1908. sg_set_buf(&src_tab[0], xbuf[0], 8);
  1909. sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
  1910. sg_init_one(&dst, outbuf_enc, out_len_max);
  1911. akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
  1912. out_len_max);
  1913. akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
  1914. crypto_req_done, &wait);
  1915. err = crypto_wait_req(vecs->siggen_sigver_test ?
  1916. /* Run asymmetric signature generation */
  1917. crypto_akcipher_sign(req) :
  1918. /* Run asymmetric encrypt */
  1919. crypto_akcipher_encrypt(req), &wait);
  1920. if (err) {
  1921. pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
  1922. goto free_all;
  1923. }
  1924. if (req->dst_len != vecs->c_size) {
  1925. pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
  1926. err = -EINVAL;
  1927. goto free_all;
  1928. }
  1929. /* verify that encrypted message is equal to expected */
  1930. if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
  1931. pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
  1932. hexdump(outbuf_enc, vecs->c_size);
  1933. err = -EINVAL;
  1934. goto free_all;
  1935. }
  1936. /* Don't invoke decrypt for vectors with public key */
  1937. if (vecs->public_key_vec) {
  1938. err = 0;
  1939. goto free_all;
  1940. }
  1941. outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
  1942. if (!outbuf_dec) {
  1943. err = -ENOMEM;
  1944. goto free_all;
  1945. }
  1946. if (WARN_ON(vecs->c_size > PAGE_SIZE))
  1947. goto free_all;
  1948. memcpy(xbuf[0], vecs->c, vecs->c_size);
  1949. sg_init_one(&src, xbuf[0], vecs->c_size);
  1950. sg_init_one(&dst, outbuf_dec, out_len_max);
  1951. crypto_init_wait(&wait);
  1952. akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
  1953. err = crypto_wait_req(vecs->siggen_sigver_test ?
  1954. /* Run asymmetric signature verification */
  1955. crypto_akcipher_verify(req) :
  1956. /* Run asymmetric decrypt */
  1957. crypto_akcipher_decrypt(req), &wait);
  1958. if (err) {
  1959. pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
  1960. goto free_all;
  1961. }
  1962. out_len = req->dst_len;
  1963. if (out_len < vecs->m_size) {
  1964. pr_err("alg: akcipher: decrypt test failed. "
  1965. "Invalid output len %u\n", out_len);
  1966. err = -EINVAL;
  1967. goto free_all;
  1968. }
  1969. /* verify that decrypted message is equal to the original msg */
  1970. if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
  1971. memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
  1972. vecs->m_size)) {
  1973. pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
  1974. hexdump(outbuf_dec, out_len);
  1975. err = -EINVAL;
  1976. }
  1977. free_all:
  1978. kfree(outbuf_dec);
  1979. kfree(outbuf_enc);
  1980. free_req:
  1981. akcipher_request_free(req);
  1982. free_xbuf:
  1983. testmgr_free_buf(xbuf);
  1984. return err;
  1985. }
  1986. static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
  1987. const struct akcipher_testvec *vecs,
  1988. unsigned int tcount)
  1989. {
  1990. const char *algo =
  1991. crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
  1992. int ret, i;
  1993. for (i = 0; i < tcount; i++) {
  1994. ret = test_akcipher_one(tfm, vecs++);
  1995. if (!ret)
  1996. continue;
  1997. pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
  1998. i + 1, algo, ret);
  1999. return ret;
  2000. }
  2001. return 0;
  2002. }
  2003. static int alg_test_akcipher(const struct alg_test_desc *desc,
  2004. const char *driver, u32 type, u32 mask)
  2005. {
  2006. struct crypto_akcipher *tfm;
  2007. int err = 0;
  2008. tfm = crypto_alloc_akcipher(driver, type, mask);
  2009. if (IS_ERR(tfm)) {
  2010. pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
  2011. driver, PTR_ERR(tfm));
  2012. return PTR_ERR(tfm);
  2013. }
  2014. if (desc->suite.akcipher.vecs)
  2015. err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
  2016. desc->suite.akcipher.count);
  2017. crypto_free_akcipher(tfm);
  2018. return err;
  2019. }
  2020. static int alg_test_null(const struct alg_test_desc *desc,
  2021. const char *driver, u32 type, u32 mask)
  2022. {
  2023. return 0;
  2024. }
  2025. #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
  2026. /* Please keep this list sorted by algorithm name. */
  2027. static const struct alg_test_desc alg_test_descs[] = {
  2028. {
  2029. .alg = "aegis128",
  2030. .test = alg_test_aead,
  2031. .suite = {
  2032. .aead = {
  2033. .enc = __VECS(aegis128_enc_tv_template),
  2034. .dec = __VECS(aegis128_dec_tv_template),
  2035. }
  2036. }
  2037. }, {
  2038. .alg = "aegis128l",
  2039. .test = alg_test_aead,
  2040. .suite = {
  2041. .aead = {
  2042. .enc = __VECS(aegis128l_enc_tv_template),
  2043. .dec = __VECS(aegis128l_dec_tv_template),
  2044. }
  2045. }
  2046. }, {
  2047. .alg = "aegis256",
  2048. .test = alg_test_aead,
  2049. .suite = {
  2050. .aead = {
  2051. .enc = __VECS(aegis256_enc_tv_template),
  2052. .dec = __VECS(aegis256_dec_tv_template),
  2053. }
  2054. }
  2055. }, {
  2056. .alg = "ansi_cprng",
  2057. .test = alg_test_cprng,
  2058. .suite = {
  2059. .cprng = __VECS(ansi_cprng_aes_tv_template)
  2060. }
  2061. }, {
  2062. .alg = "authenc(hmac(md5),ecb(cipher_null))",
  2063. .test = alg_test_aead,
  2064. .suite = {
  2065. .aead = {
  2066. .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
  2067. .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
  2068. }
  2069. }
  2070. }, {
  2071. .alg = "authenc(hmac(sha1),cbc(aes))",
  2072. .test = alg_test_aead,
  2073. .fips_allowed = 1,
  2074. .suite = {
  2075. .aead = {
  2076. .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
  2077. }
  2078. }
  2079. }, {
  2080. .alg = "authenc(hmac(sha1),cbc(des))",
  2081. .test = alg_test_aead,
  2082. .suite = {
  2083. .aead = {
  2084. .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
  2085. }
  2086. }
  2087. }, {
  2088. .alg = "authenc(hmac(sha1),cbc(des3_ede))",
  2089. .test = alg_test_aead,
  2090. .fips_allowed = 1,
  2091. .suite = {
  2092. .aead = {
  2093. .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
  2094. }
  2095. }
  2096. }, {
  2097. .alg = "authenc(hmac(sha1),ctr(aes))",
  2098. .test = alg_test_null,
  2099. .fips_allowed = 1,
  2100. }, {
  2101. .alg = "authenc(hmac(sha1),ecb(cipher_null))",
  2102. .test = alg_test_aead,
  2103. .suite = {
  2104. .aead = {
  2105. .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
  2106. .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
  2107. }
  2108. }
  2109. }, {
  2110. .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
  2111. .test = alg_test_null,
  2112. .fips_allowed = 1,
  2113. }, {
  2114. .alg = "authenc(hmac(sha224),cbc(des))",
  2115. .test = alg_test_aead,
  2116. .suite = {
  2117. .aead = {
  2118. .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
  2119. }
  2120. }
  2121. }, {
  2122. .alg = "authenc(hmac(sha224),cbc(des3_ede))",
  2123. .test = alg_test_aead,
  2124. .fips_allowed = 1,
  2125. .suite = {
  2126. .aead = {
  2127. .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
  2128. }
  2129. }
  2130. }, {
  2131. .alg = "authenc(hmac(sha256),cbc(aes))",
  2132. .test = alg_test_aead,
  2133. .fips_allowed = 1,
  2134. .suite = {
  2135. .aead = {
  2136. .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
  2137. }
  2138. }
  2139. }, {
  2140. .alg = "authenc(hmac(sha256),cbc(des))",
  2141. .test = alg_test_aead,
  2142. .suite = {
  2143. .aead = {
  2144. .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
  2145. }
  2146. }
  2147. }, {
  2148. .alg = "authenc(hmac(sha256),cbc(des3_ede))",
  2149. .test = alg_test_aead,
  2150. .fips_allowed = 1,
  2151. .suite = {
  2152. .aead = {
  2153. .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
  2154. }
  2155. }
  2156. }, {
  2157. .alg = "authenc(hmac(sha256),ctr(aes))",
  2158. .test = alg_test_null,
  2159. .fips_allowed = 1,
  2160. }, {
  2161. .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
  2162. .test = alg_test_null,
  2163. .fips_allowed = 1,
  2164. }, {
  2165. .alg = "authenc(hmac(sha384),cbc(des))",
  2166. .test = alg_test_aead,
  2167. .suite = {
  2168. .aead = {
  2169. .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
  2170. }
  2171. }
  2172. }, {
  2173. .alg = "authenc(hmac(sha384),cbc(des3_ede))",
  2174. .test = alg_test_aead,
  2175. .fips_allowed = 1,
  2176. .suite = {
  2177. .aead = {
  2178. .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
  2179. }
  2180. }
  2181. }, {
  2182. .alg = "authenc(hmac(sha384),ctr(aes))",
  2183. .test = alg_test_null,
  2184. .fips_allowed = 1,
  2185. }, {
  2186. .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
  2187. .test = alg_test_null,
  2188. .fips_allowed = 1,
  2189. }, {
  2190. .alg = "authenc(hmac(sha512),cbc(aes))",
  2191. .fips_allowed = 1,
  2192. .test = alg_test_aead,
  2193. .suite = {
  2194. .aead = {
  2195. .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
  2196. }
  2197. }
  2198. }, {
  2199. .alg = "authenc(hmac(sha512),cbc(des))",
  2200. .test = alg_test_aead,
  2201. .suite = {
  2202. .aead = {
  2203. .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
  2204. }
  2205. }
  2206. }, {
  2207. .alg = "authenc(hmac(sha512),cbc(des3_ede))",
  2208. .test = alg_test_aead,
  2209. .fips_allowed = 1,
  2210. .suite = {
  2211. .aead = {
  2212. .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
  2213. }
  2214. }
  2215. }, {
  2216. .alg = "authenc(hmac(sha512),ctr(aes))",
  2217. .test = alg_test_null,
  2218. .fips_allowed = 1,
  2219. }, {
  2220. .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
  2221. .test = alg_test_null,
  2222. .fips_allowed = 1,
  2223. }, {
  2224. .alg = "cbc(aes)",
  2225. .test = alg_test_skcipher,
  2226. .fips_allowed = 1,
  2227. .suite = {
  2228. .cipher = __VECS(aes_cbc_tv_template)
  2229. },
  2230. }, {
  2231. .alg = "cbc(anubis)",
  2232. .test = alg_test_skcipher,
  2233. .suite = {
  2234. .cipher = __VECS(anubis_cbc_tv_template)
  2235. },
  2236. }, {
  2237. .alg = "cbc(blowfish)",
  2238. .test = alg_test_skcipher,
  2239. .suite = {
  2240. .cipher = __VECS(bf_cbc_tv_template)
  2241. },
  2242. }, {
  2243. .alg = "cbc(camellia)",
  2244. .test = alg_test_skcipher,
  2245. .suite = {
  2246. .cipher = __VECS(camellia_cbc_tv_template)
  2247. },
  2248. }, {
  2249. .alg = "cbc(cast5)",
  2250. .test = alg_test_skcipher,
  2251. .suite = {
  2252. .cipher = __VECS(cast5_cbc_tv_template)
  2253. },
  2254. }, {
  2255. .alg = "cbc(cast6)",
  2256. .test = alg_test_skcipher,
  2257. .suite = {
  2258. .cipher = __VECS(cast6_cbc_tv_template)
  2259. },
  2260. }, {
  2261. .alg = "cbc(des)",
  2262. .test = alg_test_skcipher,
  2263. .suite = {
  2264. .cipher = __VECS(des_cbc_tv_template)
  2265. },
  2266. }, {
  2267. .alg = "cbc(des3_ede)",
  2268. .test = alg_test_skcipher,
  2269. .fips_allowed = 1,
  2270. .suite = {
  2271. .cipher = __VECS(des3_ede_cbc_tv_template)
  2272. },
  2273. }, {
  2274. /* Same as cbc(aes) except the key is stored in
  2275. * hardware secure memory which we reference by index
  2276. */
  2277. .alg = "cbc(paes)",
  2278. .test = alg_test_null,
  2279. .fips_allowed = 1,
  2280. }, {
  2281. .alg = "cbc(serpent)",
  2282. .test = alg_test_skcipher,
  2283. .suite = {
  2284. .cipher = __VECS(serpent_cbc_tv_template)
  2285. },
  2286. }, {
  2287. .alg = "cbc(twofish)",
  2288. .test = alg_test_skcipher,
  2289. .suite = {
  2290. .cipher = __VECS(tf_cbc_tv_template)
  2291. },
  2292. }, {
  2293. .alg = "cbcmac(aes)",
  2294. .fips_allowed = 1,
  2295. .test = alg_test_hash,
  2296. .suite = {
  2297. .hash = __VECS(aes_cbcmac_tv_template)
  2298. }
  2299. }, {
  2300. .alg = "ccm(aes)",
  2301. .test = alg_test_aead,
  2302. .fips_allowed = 1,
  2303. .suite = {
  2304. .aead = {
  2305. .enc = __VECS(aes_ccm_enc_tv_template),
  2306. .dec = __VECS(aes_ccm_dec_tv_template)
  2307. }
  2308. }
  2309. }, {
  2310. .alg = "chacha20",
  2311. .test = alg_test_skcipher,
  2312. .suite = {
  2313. .cipher = __VECS(chacha20_tv_template)
  2314. },
  2315. }, {
  2316. .alg = "cmac(aes)",
  2317. .fips_allowed = 1,
  2318. .test = alg_test_hash,
  2319. .suite = {
  2320. .hash = __VECS(aes_cmac128_tv_template)
  2321. }
  2322. }, {
  2323. .alg = "cmac(des3_ede)",
  2324. .fips_allowed = 1,
  2325. .test = alg_test_hash,
  2326. .suite = {
  2327. .hash = __VECS(des3_ede_cmac64_tv_template)
  2328. }
  2329. }, {
  2330. .alg = "compress_null",
  2331. .test = alg_test_null,
  2332. }, {
  2333. .alg = "crc32",
  2334. .test = alg_test_hash,
  2335. .suite = {
  2336. .hash = __VECS(crc32_tv_template)
  2337. }
  2338. }, {
  2339. .alg = "crc32c",
  2340. .test = alg_test_crc32c,
  2341. .fips_allowed = 1,
  2342. .suite = {
  2343. .hash = __VECS(crc32c_tv_template)
  2344. }
  2345. }, {
  2346. .alg = "crct10dif",
  2347. .test = alg_test_hash,
  2348. .fips_allowed = 1,
  2349. .suite = {
  2350. .hash = __VECS(crct10dif_tv_template)
  2351. }
  2352. }, {
  2353. .alg = "ctr(aes)",
  2354. .test = alg_test_skcipher,
  2355. .fips_allowed = 1,
  2356. .suite = {
  2357. .cipher = __VECS(aes_ctr_tv_template)
  2358. }
  2359. }, {
  2360. .alg = "ctr(blowfish)",
  2361. .test = alg_test_skcipher,
  2362. .suite = {
  2363. .cipher = __VECS(bf_ctr_tv_template)
  2364. }
  2365. }, {
  2366. .alg = "ctr(camellia)",
  2367. .test = alg_test_skcipher,
  2368. .suite = {
  2369. .cipher = __VECS(camellia_ctr_tv_template)
  2370. }
  2371. }, {
  2372. .alg = "ctr(cast5)",
  2373. .test = alg_test_skcipher,
  2374. .suite = {
  2375. .cipher = __VECS(cast5_ctr_tv_template)
  2376. }
  2377. }, {
  2378. .alg = "ctr(cast6)",
  2379. .test = alg_test_skcipher,
  2380. .suite = {
  2381. .cipher = __VECS(cast6_ctr_tv_template)
  2382. }
  2383. }, {
  2384. .alg = "ctr(des)",
  2385. .test = alg_test_skcipher,
  2386. .suite = {
  2387. .cipher = __VECS(des_ctr_tv_template)
  2388. }
  2389. }, {
  2390. .alg = "ctr(des3_ede)",
  2391. .test = alg_test_skcipher,
  2392. .fips_allowed = 1,
  2393. .suite = {
  2394. .cipher = __VECS(des3_ede_ctr_tv_template)
  2395. }
  2396. }, {
  2397. /* Same as ctr(aes) except the key is stored in
  2398. * hardware secure memory which we reference by index
  2399. */
  2400. .alg = "ctr(paes)",
  2401. .test = alg_test_null,
  2402. .fips_allowed = 1,
  2403. }, {
  2404. .alg = "ctr(serpent)",
  2405. .test = alg_test_skcipher,
  2406. .suite = {
  2407. .cipher = __VECS(serpent_ctr_tv_template)
  2408. }
  2409. }, {
  2410. .alg = "ctr(twofish)",
  2411. .test = alg_test_skcipher,
  2412. .suite = {
  2413. .cipher = __VECS(tf_ctr_tv_template)
  2414. }
  2415. }, {
  2416. .alg = "cts(cbc(aes))",
  2417. .test = alg_test_skcipher,
  2418. .suite = {
  2419. .cipher = __VECS(cts_mode_tv_template)
  2420. }
  2421. }, {
  2422. .alg = "deflate",
  2423. .test = alg_test_comp,
  2424. .fips_allowed = 1,
  2425. .suite = {
  2426. .comp = {
  2427. .comp = __VECS(deflate_comp_tv_template),
  2428. .decomp = __VECS(deflate_decomp_tv_template)
  2429. }
  2430. }
  2431. }, {
  2432. .alg = "dh",
  2433. .test = alg_test_kpp,
  2434. .fips_allowed = 1,
  2435. .suite = {
  2436. .kpp = __VECS(dh_tv_template)
  2437. }
  2438. }, {
  2439. .alg = "digest_null",
  2440. .test = alg_test_null,
  2441. }, {
  2442. .alg = "drbg_nopr_ctr_aes128",
  2443. .test = alg_test_drbg,
  2444. .fips_allowed = 1,
  2445. .suite = {
  2446. .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
  2447. }
  2448. }, {
  2449. .alg = "drbg_nopr_ctr_aes192",
  2450. .test = alg_test_drbg,
  2451. .fips_allowed = 1,
  2452. .suite = {
  2453. .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
  2454. }
  2455. }, {
  2456. .alg = "drbg_nopr_ctr_aes256",
  2457. .test = alg_test_drbg,
  2458. .fips_allowed = 1,
  2459. .suite = {
  2460. .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
  2461. }
  2462. }, {
  2463. /*
  2464. * There is no need to specifically test the DRBG with every
  2465. * backend cipher -- covered by drbg_nopr_hmac_sha256 test
  2466. */
  2467. .alg = "drbg_nopr_hmac_sha1",
  2468. .fips_allowed = 1,
  2469. .test = alg_test_null,
  2470. }, {
  2471. .alg = "drbg_nopr_hmac_sha256",
  2472. .test = alg_test_drbg,
  2473. .fips_allowed = 1,
  2474. .suite = {
  2475. .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
  2476. }
  2477. }, {
  2478. /* covered by drbg_nopr_hmac_sha256 test */
  2479. .alg = "drbg_nopr_hmac_sha384",
  2480. .fips_allowed = 1,
  2481. .test = alg_test_null,
  2482. }, {
  2483. .alg = "drbg_nopr_hmac_sha512",
  2484. .test = alg_test_null,
  2485. .fips_allowed = 1,
  2486. }, {
  2487. .alg = "drbg_nopr_sha1",
  2488. .fips_allowed = 1,
  2489. .test = alg_test_null,
  2490. }, {
  2491. .alg = "drbg_nopr_sha256",
  2492. .test = alg_test_drbg,
  2493. .fips_allowed = 1,
  2494. .suite = {
  2495. .drbg = __VECS(drbg_nopr_sha256_tv_template)
  2496. }
  2497. }, {
  2498. /* covered by drbg_nopr_sha256 test */
  2499. .alg = "drbg_nopr_sha384",
  2500. .fips_allowed = 1,
  2501. .test = alg_test_null,
  2502. }, {
  2503. .alg = "drbg_nopr_sha512",
  2504. .fips_allowed = 1,
  2505. .test = alg_test_null,
  2506. }, {
  2507. .alg = "drbg_pr_ctr_aes128",
  2508. .test = alg_test_drbg,
  2509. .fips_allowed = 1,
  2510. .suite = {
  2511. .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
  2512. }
  2513. }, {
  2514. /* covered by drbg_pr_ctr_aes128 test */
  2515. .alg = "drbg_pr_ctr_aes192",
  2516. .fips_allowed = 1,
  2517. .test = alg_test_null,
  2518. }, {
  2519. .alg = "drbg_pr_ctr_aes256",
  2520. .fips_allowed = 1,
  2521. .test = alg_test_null,
  2522. }, {
  2523. .alg = "drbg_pr_hmac_sha1",
  2524. .fips_allowed = 1,
  2525. .test = alg_test_null,
  2526. }, {
  2527. .alg = "drbg_pr_hmac_sha256",
  2528. .test = alg_test_drbg,
  2529. .fips_allowed = 1,
  2530. .suite = {
  2531. .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
  2532. }
  2533. }, {
  2534. /* covered by drbg_pr_hmac_sha256 test */
  2535. .alg = "drbg_pr_hmac_sha384",
  2536. .fips_allowed = 1,
  2537. .test = alg_test_null,
  2538. }, {
  2539. .alg = "drbg_pr_hmac_sha512",
  2540. .test = alg_test_null,
  2541. .fips_allowed = 1,
  2542. }, {
  2543. .alg = "drbg_pr_sha1",
  2544. .fips_allowed = 1,
  2545. .test = alg_test_null,
  2546. }, {
  2547. .alg = "drbg_pr_sha256",
  2548. .test = alg_test_drbg,
  2549. .fips_allowed = 1,
  2550. .suite = {
  2551. .drbg = __VECS(drbg_pr_sha256_tv_template)
  2552. }
  2553. }, {
  2554. /* covered by drbg_pr_sha256 test */
  2555. .alg = "drbg_pr_sha384",
  2556. .fips_allowed = 1,
  2557. .test = alg_test_null,
  2558. }, {
  2559. .alg = "drbg_pr_sha512",
  2560. .fips_allowed = 1,
  2561. .test = alg_test_null,
  2562. }, {
  2563. .alg = "ecb(aes)",
  2564. .test = alg_test_skcipher,
  2565. .fips_allowed = 1,
  2566. .suite = {
  2567. .cipher = __VECS(aes_tv_template)
  2568. }
  2569. }, {
  2570. .alg = "ecb(anubis)",
  2571. .test = alg_test_skcipher,
  2572. .suite = {
  2573. .cipher = __VECS(anubis_tv_template)
  2574. }
  2575. }, {
  2576. .alg = "ecb(arc4)",
  2577. .test = alg_test_skcipher,
  2578. .suite = {
  2579. .cipher = __VECS(arc4_tv_template)
  2580. }
  2581. }, {
  2582. .alg = "ecb(blowfish)",
  2583. .test = alg_test_skcipher,
  2584. .suite = {
  2585. .cipher = __VECS(bf_tv_template)
  2586. }
  2587. }, {
  2588. .alg = "ecb(camellia)",
  2589. .test = alg_test_skcipher,
  2590. .suite = {
  2591. .cipher = __VECS(camellia_tv_template)
  2592. }
  2593. }, {
  2594. .alg = "ecb(cast5)",
  2595. .test = alg_test_skcipher,
  2596. .suite = {
  2597. .cipher = __VECS(cast5_tv_template)
  2598. }
  2599. }, {
  2600. .alg = "ecb(cast6)",
  2601. .test = alg_test_skcipher,
  2602. .suite = {
  2603. .cipher = __VECS(cast6_tv_template)
  2604. }
  2605. }, {
  2606. .alg = "ecb(cipher_null)",
  2607. .test = alg_test_null,
  2608. .fips_allowed = 1,
  2609. }, {
  2610. .alg = "ecb(des)",
  2611. .test = alg_test_skcipher,
  2612. .suite = {
  2613. .cipher = __VECS(des_tv_template)
  2614. }
  2615. }, {
  2616. .alg = "ecb(des3_ede)",
  2617. .test = alg_test_skcipher,
  2618. .fips_allowed = 1,
  2619. .suite = {
  2620. .cipher = __VECS(des3_ede_tv_template)
  2621. }
  2622. }, {
  2623. .alg = "ecb(fcrypt)",
  2624. .test = alg_test_skcipher,
  2625. .suite = {
  2626. .cipher = {
  2627. .vecs = fcrypt_pcbc_tv_template,
  2628. .count = 1
  2629. }
  2630. }
  2631. }, {
  2632. .alg = "ecb(khazad)",
  2633. .test = alg_test_skcipher,
  2634. .suite = {
  2635. .cipher = __VECS(khazad_tv_template)
  2636. }
  2637. }, {
  2638. /* Same as ecb(aes) except the key is stored in
  2639. * hardware secure memory which we reference by index
  2640. */
  2641. .alg = "ecb(paes)",
  2642. .test = alg_test_null,
  2643. .fips_allowed = 1,
  2644. }, {
  2645. .alg = "ecb(seed)",
  2646. .test = alg_test_skcipher,
  2647. .suite = {
  2648. .cipher = __VECS(seed_tv_template)
  2649. }
  2650. }, {
  2651. .alg = "ecb(serpent)",
  2652. .test = alg_test_skcipher,
  2653. .suite = {
  2654. .cipher = __VECS(serpent_tv_template)
  2655. }
  2656. }, {
  2657. .alg = "ecb(sm4)",
  2658. .test = alg_test_skcipher,
  2659. .suite = {
  2660. .cipher = __VECS(sm4_tv_template)
  2661. }
  2662. }, {
  2663. .alg = "ecb(speck128)",
  2664. .test = alg_test_skcipher,
  2665. .suite = {
  2666. .cipher = __VECS(speck128_tv_template)
  2667. }
  2668. }, {
  2669. .alg = "ecb(speck64)",
  2670. .test = alg_test_skcipher,
  2671. .suite = {
  2672. .cipher = __VECS(speck64_tv_template)
  2673. }
  2674. }, {
  2675. .alg = "ecb(tea)",
  2676. .test = alg_test_skcipher,
  2677. .suite = {
  2678. .cipher = __VECS(tea_tv_template)
  2679. }
  2680. }, {
  2681. .alg = "ecb(tnepres)",
  2682. .test = alg_test_skcipher,
  2683. .suite = {
  2684. .cipher = __VECS(tnepres_tv_template)
  2685. }
  2686. }, {
  2687. .alg = "ecb(twofish)",
  2688. .test = alg_test_skcipher,
  2689. .suite = {
  2690. .cipher = __VECS(tf_tv_template)
  2691. }
  2692. }, {
  2693. .alg = "ecb(xeta)",
  2694. .test = alg_test_skcipher,
  2695. .suite = {
  2696. .cipher = __VECS(xeta_tv_template)
  2697. }
  2698. }, {
  2699. .alg = "ecb(xtea)",
  2700. .test = alg_test_skcipher,
  2701. .suite = {
  2702. .cipher = __VECS(xtea_tv_template)
  2703. }
  2704. }, {
  2705. .alg = "ecdh",
  2706. .test = alg_test_kpp,
  2707. .fips_allowed = 1,
  2708. .suite = {
  2709. .kpp = __VECS(ecdh_tv_template)
  2710. }
  2711. }, {
  2712. .alg = "gcm(aes)",
  2713. .test = alg_test_aead,
  2714. .fips_allowed = 1,
  2715. .suite = {
  2716. .aead = {
  2717. .enc = __VECS(aes_gcm_enc_tv_template),
  2718. .dec = __VECS(aes_gcm_dec_tv_template)
  2719. }
  2720. }
  2721. }, {
  2722. .alg = "ghash",
  2723. .test = alg_test_hash,
  2724. .fips_allowed = 1,
  2725. .suite = {
  2726. .hash = __VECS(ghash_tv_template)
  2727. }
  2728. }, {
  2729. .alg = "hmac(md5)",
  2730. .test = alg_test_hash,
  2731. .suite = {
  2732. .hash = __VECS(hmac_md5_tv_template)
  2733. }
  2734. }, {
  2735. .alg = "hmac(rmd128)",
  2736. .test = alg_test_hash,
  2737. .suite = {
  2738. .hash = __VECS(hmac_rmd128_tv_template)
  2739. }
  2740. }, {
  2741. .alg = "hmac(rmd160)",
  2742. .test = alg_test_hash,
  2743. .suite = {
  2744. .hash = __VECS(hmac_rmd160_tv_template)
  2745. }
  2746. }, {
  2747. .alg = "hmac(sha1)",
  2748. .test = alg_test_hash,
  2749. .fips_allowed = 1,
  2750. .suite = {
  2751. .hash = __VECS(hmac_sha1_tv_template)
  2752. }
  2753. }, {
  2754. .alg = "hmac(sha224)",
  2755. .test = alg_test_hash,
  2756. .fips_allowed = 1,
  2757. .suite = {
  2758. .hash = __VECS(hmac_sha224_tv_template)
  2759. }
  2760. }, {
  2761. .alg = "hmac(sha256)",
  2762. .test = alg_test_hash,
  2763. .fips_allowed = 1,
  2764. .suite = {
  2765. .hash = __VECS(hmac_sha256_tv_template)
  2766. }
  2767. }, {
  2768. .alg = "hmac(sha3-224)",
  2769. .test = alg_test_hash,
  2770. .fips_allowed = 1,
  2771. .suite = {
  2772. .hash = __VECS(hmac_sha3_224_tv_template)
  2773. }
  2774. }, {
  2775. .alg = "hmac(sha3-256)",
  2776. .test = alg_test_hash,
  2777. .fips_allowed = 1,
  2778. .suite = {
  2779. .hash = __VECS(hmac_sha3_256_tv_template)
  2780. }
  2781. }, {
  2782. .alg = "hmac(sha3-384)",
  2783. .test = alg_test_hash,
  2784. .fips_allowed = 1,
  2785. .suite = {
  2786. .hash = __VECS(hmac_sha3_384_tv_template)
  2787. }
  2788. }, {
  2789. .alg = "hmac(sha3-512)",
  2790. .test = alg_test_hash,
  2791. .fips_allowed = 1,
  2792. .suite = {
  2793. .hash = __VECS(hmac_sha3_512_tv_template)
  2794. }
  2795. }, {
  2796. .alg = "hmac(sha384)",
  2797. .test = alg_test_hash,
  2798. .fips_allowed = 1,
  2799. .suite = {
  2800. .hash = __VECS(hmac_sha384_tv_template)
  2801. }
  2802. }, {
  2803. .alg = "hmac(sha512)",
  2804. .test = alg_test_hash,
  2805. .fips_allowed = 1,
  2806. .suite = {
  2807. .hash = __VECS(hmac_sha512_tv_template)
  2808. }
  2809. }, {
  2810. .alg = "jitterentropy_rng",
  2811. .fips_allowed = 1,
  2812. .test = alg_test_null,
  2813. }, {
  2814. .alg = "kw(aes)",
  2815. .test = alg_test_skcipher,
  2816. .fips_allowed = 1,
  2817. .suite = {
  2818. .cipher = __VECS(aes_kw_tv_template)
  2819. }
  2820. }, {
  2821. .alg = "lrw(aes)",
  2822. .test = alg_test_skcipher,
  2823. .suite = {
  2824. .cipher = __VECS(aes_lrw_tv_template)
  2825. }
  2826. }, {
  2827. .alg = "lrw(camellia)",
  2828. .test = alg_test_skcipher,
  2829. .suite = {
  2830. .cipher = __VECS(camellia_lrw_tv_template)
  2831. }
  2832. }, {
  2833. .alg = "lrw(cast6)",
  2834. .test = alg_test_skcipher,
  2835. .suite = {
  2836. .cipher = __VECS(cast6_lrw_tv_template)
  2837. }
  2838. }, {
  2839. .alg = "lrw(serpent)",
  2840. .test = alg_test_skcipher,
  2841. .suite = {
  2842. .cipher = __VECS(serpent_lrw_tv_template)
  2843. }
  2844. }, {
  2845. .alg = "lrw(twofish)",
  2846. .test = alg_test_skcipher,
  2847. .suite = {
  2848. .cipher = __VECS(tf_lrw_tv_template)
  2849. }
  2850. }, {
  2851. .alg = "lz4",
  2852. .test = alg_test_comp,
  2853. .fips_allowed = 1,
  2854. .suite = {
  2855. .comp = {
  2856. .comp = __VECS(lz4_comp_tv_template),
  2857. .decomp = __VECS(lz4_decomp_tv_template)
  2858. }
  2859. }
  2860. }, {
  2861. .alg = "lz4hc",
  2862. .test = alg_test_comp,
  2863. .fips_allowed = 1,
  2864. .suite = {
  2865. .comp = {
  2866. .comp = __VECS(lz4hc_comp_tv_template),
  2867. .decomp = __VECS(lz4hc_decomp_tv_template)
  2868. }
  2869. }
  2870. }, {
  2871. .alg = "lzo",
  2872. .test = alg_test_comp,
  2873. .fips_allowed = 1,
  2874. .suite = {
  2875. .comp = {
  2876. .comp = __VECS(lzo_comp_tv_template),
  2877. .decomp = __VECS(lzo_decomp_tv_template)
  2878. }
  2879. }
  2880. }, {
  2881. .alg = "md4",
  2882. .test = alg_test_hash,
  2883. .suite = {
  2884. .hash = __VECS(md4_tv_template)
  2885. }
  2886. }, {
  2887. .alg = "md5",
  2888. .test = alg_test_hash,
  2889. .suite = {
  2890. .hash = __VECS(md5_tv_template)
  2891. }
  2892. }, {
  2893. .alg = "michael_mic",
  2894. .test = alg_test_hash,
  2895. .suite = {
  2896. .hash = __VECS(michael_mic_tv_template)
  2897. }
  2898. }, {
  2899. .alg = "morus1280",
  2900. .test = alg_test_aead,
  2901. .suite = {
  2902. .aead = {
  2903. .enc = __VECS(morus1280_enc_tv_template),
  2904. .dec = __VECS(morus1280_dec_tv_template),
  2905. }
  2906. }
  2907. }, {
  2908. .alg = "morus640",
  2909. .test = alg_test_aead,
  2910. .suite = {
  2911. .aead = {
  2912. .enc = __VECS(morus640_enc_tv_template),
  2913. .dec = __VECS(morus640_dec_tv_template),
  2914. }
  2915. }
  2916. }, {
  2917. .alg = "ofb(aes)",
  2918. .test = alg_test_skcipher,
  2919. .fips_allowed = 1,
  2920. .suite = {
  2921. .cipher = __VECS(aes_ofb_tv_template)
  2922. }
  2923. }, {
  2924. /* Same as ofb(aes) except the key is stored in
  2925. * hardware secure memory which we reference by index
  2926. */
  2927. .alg = "ofb(paes)",
  2928. .test = alg_test_null,
  2929. .fips_allowed = 1,
  2930. }, {
  2931. .alg = "pcbc(fcrypt)",
  2932. .test = alg_test_skcipher,
  2933. .suite = {
  2934. .cipher = __VECS(fcrypt_pcbc_tv_template)
  2935. }
  2936. }, {
  2937. .alg = "pkcs1pad(rsa,sha224)",
  2938. .test = alg_test_null,
  2939. .fips_allowed = 1,
  2940. }, {
  2941. .alg = "pkcs1pad(rsa,sha256)",
  2942. .test = alg_test_akcipher,
  2943. .fips_allowed = 1,
  2944. .suite = {
  2945. .akcipher = __VECS(pkcs1pad_rsa_tv_template)
  2946. }
  2947. }, {
  2948. .alg = "pkcs1pad(rsa,sha384)",
  2949. .test = alg_test_null,
  2950. .fips_allowed = 1,
  2951. }, {
  2952. .alg = "pkcs1pad(rsa,sha512)",
  2953. .test = alg_test_null,
  2954. .fips_allowed = 1,
  2955. }, {
  2956. .alg = "poly1305",
  2957. .test = alg_test_hash,
  2958. .suite = {
  2959. .hash = __VECS(poly1305_tv_template)
  2960. }
  2961. }, {
  2962. .alg = "rfc3686(ctr(aes))",
  2963. .test = alg_test_skcipher,
  2964. .fips_allowed = 1,
  2965. .suite = {
  2966. .cipher = __VECS(aes_ctr_rfc3686_tv_template)
  2967. }
  2968. }, {
  2969. .alg = "rfc4106(gcm(aes))",
  2970. .test = alg_test_aead,
  2971. .fips_allowed = 1,
  2972. .suite = {
  2973. .aead = {
  2974. .enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
  2975. .dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
  2976. }
  2977. }
  2978. }, {
  2979. .alg = "rfc4309(ccm(aes))",
  2980. .test = alg_test_aead,
  2981. .fips_allowed = 1,
  2982. .suite = {
  2983. .aead = {
  2984. .enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
  2985. .dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
  2986. }
  2987. }
  2988. }, {
  2989. .alg = "rfc4543(gcm(aes))",
  2990. .test = alg_test_aead,
  2991. .suite = {
  2992. .aead = {
  2993. .enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
  2994. .dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
  2995. }
  2996. }
  2997. }, {
  2998. .alg = "rfc7539(chacha20,poly1305)",
  2999. .test = alg_test_aead,
  3000. .suite = {
  3001. .aead = {
  3002. .enc = __VECS(rfc7539_enc_tv_template),
  3003. .dec = __VECS(rfc7539_dec_tv_template),
  3004. }
  3005. }
  3006. }, {
  3007. .alg = "rfc7539esp(chacha20,poly1305)",
  3008. .test = alg_test_aead,
  3009. .suite = {
  3010. .aead = {
  3011. .enc = __VECS(rfc7539esp_enc_tv_template),
  3012. .dec = __VECS(rfc7539esp_dec_tv_template),
  3013. }
  3014. }
  3015. }, {
  3016. .alg = "rmd128",
  3017. .test = alg_test_hash,
  3018. .suite = {
  3019. .hash = __VECS(rmd128_tv_template)
  3020. }
  3021. }, {
  3022. .alg = "rmd160",
  3023. .test = alg_test_hash,
  3024. .suite = {
  3025. .hash = __VECS(rmd160_tv_template)
  3026. }
  3027. }, {
  3028. .alg = "rmd256",
  3029. .test = alg_test_hash,
  3030. .suite = {
  3031. .hash = __VECS(rmd256_tv_template)
  3032. }
  3033. }, {
  3034. .alg = "rmd320",
  3035. .test = alg_test_hash,
  3036. .suite = {
  3037. .hash = __VECS(rmd320_tv_template)
  3038. }
  3039. }, {
  3040. .alg = "rsa",
  3041. .test = alg_test_akcipher,
  3042. .fips_allowed = 1,
  3043. .suite = {
  3044. .akcipher = __VECS(rsa_tv_template)
  3045. }
  3046. }, {
  3047. .alg = "salsa20",
  3048. .test = alg_test_skcipher,
  3049. .suite = {
  3050. .cipher = __VECS(salsa20_stream_tv_template)
  3051. }
  3052. }, {
  3053. .alg = "sha1",
  3054. .test = alg_test_hash,
  3055. .fips_allowed = 1,
  3056. .suite = {
  3057. .hash = __VECS(sha1_tv_template)
  3058. }
  3059. }, {
  3060. .alg = "sha224",
  3061. .test = alg_test_hash,
  3062. .fips_allowed = 1,
  3063. .suite = {
  3064. .hash = __VECS(sha224_tv_template)
  3065. }
  3066. }, {
  3067. .alg = "sha256",
  3068. .test = alg_test_hash,
  3069. .fips_allowed = 1,
  3070. .suite = {
  3071. .hash = __VECS(sha256_tv_template)
  3072. }
  3073. }, {
  3074. .alg = "sha3-224",
  3075. .test = alg_test_hash,
  3076. .fips_allowed = 1,
  3077. .suite = {
  3078. .hash = __VECS(sha3_224_tv_template)
  3079. }
  3080. }, {
  3081. .alg = "sha3-256",
  3082. .test = alg_test_hash,
  3083. .fips_allowed = 1,
  3084. .suite = {
  3085. .hash = __VECS(sha3_256_tv_template)
  3086. }
  3087. }, {
  3088. .alg = "sha3-384",
  3089. .test = alg_test_hash,
  3090. .fips_allowed = 1,
  3091. .suite = {
  3092. .hash = __VECS(sha3_384_tv_template)
  3093. }
  3094. }, {
  3095. .alg = "sha3-512",
  3096. .test = alg_test_hash,
  3097. .fips_allowed = 1,
  3098. .suite = {
  3099. .hash = __VECS(sha3_512_tv_template)
  3100. }
  3101. }, {
  3102. .alg = "sha384",
  3103. .test = alg_test_hash,
  3104. .fips_allowed = 1,
  3105. .suite = {
  3106. .hash = __VECS(sha384_tv_template)
  3107. }
  3108. }, {
  3109. .alg = "sha512",
  3110. .test = alg_test_hash,
  3111. .fips_allowed = 1,
  3112. .suite = {
  3113. .hash = __VECS(sha512_tv_template)
  3114. }
  3115. }, {
  3116. .alg = "sm3",
  3117. .test = alg_test_hash,
  3118. .suite = {
  3119. .hash = __VECS(sm3_tv_template)
  3120. }
  3121. }, {
  3122. .alg = "tgr128",
  3123. .test = alg_test_hash,
  3124. .suite = {
  3125. .hash = __VECS(tgr128_tv_template)
  3126. }
  3127. }, {
  3128. .alg = "tgr160",
  3129. .test = alg_test_hash,
  3130. .suite = {
  3131. .hash = __VECS(tgr160_tv_template)
  3132. }
  3133. }, {
  3134. .alg = "tgr192",
  3135. .test = alg_test_hash,
  3136. .suite = {
  3137. .hash = __VECS(tgr192_tv_template)
  3138. }
  3139. }, {
  3140. .alg = "vmac(aes)",
  3141. .test = alg_test_hash,
  3142. .suite = {
  3143. .hash = __VECS(aes_vmac128_tv_template)
  3144. }
  3145. }, {
  3146. .alg = "wp256",
  3147. .test = alg_test_hash,
  3148. .suite = {
  3149. .hash = __VECS(wp256_tv_template)
  3150. }
  3151. }, {
  3152. .alg = "wp384",
  3153. .test = alg_test_hash,
  3154. .suite = {
  3155. .hash = __VECS(wp384_tv_template)
  3156. }
  3157. }, {
  3158. .alg = "wp512",
  3159. .test = alg_test_hash,
  3160. .suite = {
  3161. .hash = __VECS(wp512_tv_template)
  3162. }
  3163. }, {
  3164. .alg = "xcbc(aes)",
  3165. .test = alg_test_hash,
  3166. .suite = {
  3167. .hash = __VECS(aes_xcbc128_tv_template)
  3168. }
  3169. }, {
  3170. .alg = "xts(aes)",
  3171. .test = alg_test_skcipher,
  3172. .fips_allowed = 1,
  3173. .suite = {
  3174. .cipher = __VECS(aes_xts_tv_template)
  3175. }
  3176. }, {
  3177. .alg = "xts(camellia)",
  3178. .test = alg_test_skcipher,
  3179. .suite = {
  3180. .cipher = __VECS(camellia_xts_tv_template)
  3181. }
  3182. }, {
  3183. .alg = "xts(cast6)",
  3184. .test = alg_test_skcipher,
  3185. .suite = {
  3186. .cipher = __VECS(cast6_xts_tv_template)
  3187. }
  3188. }, {
  3189. /* Same as xts(aes) except the key is stored in
  3190. * hardware secure memory which we reference by index
  3191. */
  3192. .alg = "xts(paes)",
  3193. .test = alg_test_null,
  3194. .fips_allowed = 1,
  3195. }, {
  3196. .alg = "xts(serpent)",
  3197. .test = alg_test_skcipher,
  3198. .suite = {
  3199. .cipher = __VECS(serpent_xts_tv_template)
  3200. }
  3201. }, {
  3202. .alg = "xts(speck128)",
  3203. .test = alg_test_skcipher,
  3204. .suite = {
  3205. .cipher = __VECS(speck128_xts_tv_template)
  3206. }
  3207. }, {
  3208. .alg = "xts(speck64)",
  3209. .test = alg_test_skcipher,
  3210. .suite = {
  3211. .cipher = __VECS(speck64_xts_tv_template)
  3212. }
  3213. }, {
  3214. .alg = "xts(twofish)",
  3215. .test = alg_test_skcipher,
  3216. .suite = {
  3217. .cipher = __VECS(tf_xts_tv_template)
  3218. }
  3219. }, {
  3220. .alg = "xts4096(paes)",
  3221. .test = alg_test_null,
  3222. .fips_allowed = 1,
  3223. }, {
  3224. .alg = "xts512(paes)",
  3225. .test = alg_test_null,
  3226. .fips_allowed = 1,
  3227. }, {
  3228. .alg = "zlib-deflate",
  3229. .test = alg_test_comp,
  3230. .fips_allowed = 1,
  3231. .suite = {
  3232. .comp = {
  3233. .comp = __VECS(zlib_deflate_comp_tv_template),
  3234. .decomp = __VECS(zlib_deflate_decomp_tv_template)
  3235. }
  3236. }
  3237. }, {
  3238. .alg = "zstd",
  3239. .test = alg_test_comp,
  3240. .fips_allowed = 1,
  3241. .suite = {
  3242. .comp = {
  3243. .comp = __VECS(zstd_comp_tv_template),
  3244. .decomp = __VECS(zstd_decomp_tv_template)
  3245. }
  3246. }
  3247. }
  3248. };
  3249. static bool alg_test_descs_checked;
  3250. static void alg_test_descs_check_order(void)
  3251. {
  3252. int i;
  3253. /* only check once */
  3254. if (alg_test_descs_checked)
  3255. return;
  3256. alg_test_descs_checked = true;
  3257. for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
  3258. int diff = strcmp(alg_test_descs[i - 1].alg,
  3259. alg_test_descs[i].alg);
  3260. if (WARN_ON(diff > 0)) {
  3261. pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
  3262. alg_test_descs[i - 1].alg,
  3263. alg_test_descs[i].alg);
  3264. }
  3265. if (WARN_ON(diff == 0)) {
  3266. pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
  3267. alg_test_descs[i].alg);
  3268. }
  3269. }
  3270. }
  3271. static int alg_find_test(const char *alg)
  3272. {
  3273. int start = 0;
  3274. int end = ARRAY_SIZE(alg_test_descs);
  3275. while (start < end) {
  3276. int i = (start + end) / 2;
  3277. int diff = strcmp(alg_test_descs[i].alg, alg);
  3278. if (diff > 0) {
  3279. end = i;
  3280. continue;
  3281. }
  3282. if (diff < 0) {
  3283. start = i + 1;
  3284. continue;
  3285. }
  3286. return i;
  3287. }
  3288. return -1;
  3289. }
  3290. int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  3291. {
  3292. int i;
  3293. int j;
  3294. int rc;
  3295. if (!fips_enabled && notests) {
  3296. printk_once(KERN_INFO "alg: self-tests disabled\n");
  3297. return 0;
  3298. }
  3299. alg_test_descs_check_order();
  3300. if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
  3301. char nalg[CRYPTO_MAX_ALG_NAME];
  3302. if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
  3303. sizeof(nalg))
  3304. return -ENAMETOOLONG;
  3305. i = alg_find_test(nalg);
  3306. if (i < 0)
  3307. goto notest;
  3308. if (fips_enabled && !alg_test_descs[i].fips_allowed)
  3309. goto non_fips_alg;
  3310. rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
  3311. goto test_done;
  3312. }
  3313. i = alg_find_test(alg);
  3314. j = alg_find_test(driver);
  3315. if (i < 0 && j < 0)
  3316. goto notest;
  3317. if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
  3318. (j >= 0 && !alg_test_descs[j].fips_allowed)))
  3319. goto non_fips_alg;
  3320. rc = 0;
  3321. if (i >= 0)
  3322. rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
  3323. type, mask);
  3324. if (j >= 0 && j != i)
  3325. rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
  3326. type, mask);
  3327. test_done:
  3328. if (fips_enabled && rc)
  3329. panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
  3330. if (fips_enabled && !rc)
  3331. pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
  3332. return rc;
  3333. notest:
  3334. printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
  3335. return 0;
  3336. non_fips_alg:
  3337. return -EINVAL;
  3338. }
  3339. #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
  3340. EXPORT_SYMBOL_GPL(alg_test);