insn-x86-dat-src.c 88 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662
  1. /*
  2. * This file contains instructions for testing by the test titled:
  3. *
  4. * "Test x86 instruction decoder - new instructions"
  5. *
  6. * Note that the 'Expecting' comment lines are consumed by the
  7. * gen-insn-x86-dat.awk script and have the format:
  8. *
  9. * Expecting: <op> <branch> <rel>
  10. *
  11. * If this file is changed, remember to run the gen-insn-x86-dat.sh
  12. * script and commit the result.
  13. *
  14. * Refer to insn-x86.c for more details.
  15. */
  16. int main(void)
  17. {
  18. /* Following line is a marker for the awk script - do not change */
  19. asm volatile("rdtsc"); /* Start here */
  20. /* Test fix for vcvtph2ps in x86-opcode-map.txt */
  21. asm volatile("vcvtph2ps %xmm3,%ymm5");
  22. #ifdef __x86_64__
  23. /* AVX-512: Instructions with the same op codes as Mask Instructions */
  24. asm volatile("cmovno %rax,%rbx");
  25. asm volatile("cmovno 0x12345678(%rax),%rcx");
  26. asm volatile("cmovno 0x12345678(%rax),%cx");
  27. asm volatile("cmove %rax,%rbx");
  28. asm volatile("cmove 0x12345678(%rax),%rcx");
  29. asm volatile("cmove 0x12345678(%rax),%cx");
  30. asm volatile("seto 0x12345678(%rax)");
  31. asm volatile("setno 0x12345678(%rax)");
  32. asm volatile("setb 0x12345678(%rax)");
  33. asm volatile("setc 0x12345678(%rax)");
  34. asm volatile("setnae 0x12345678(%rax)");
  35. asm volatile("setae 0x12345678(%rax)");
  36. asm volatile("setnb 0x12345678(%rax)");
  37. asm volatile("setnc 0x12345678(%rax)");
  38. asm volatile("sets 0x12345678(%rax)");
  39. asm volatile("setns 0x12345678(%rax)");
  40. /* AVX-512: Mask Instructions */
  41. asm volatile("kandw %k7,%k6,%k5");
  42. asm volatile("kandq %k7,%k6,%k5");
  43. asm volatile("kandb %k7,%k6,%k5");
  44. asm volatile("kandd %k7,%k6,%k5");
  45. asm volatile("kandnw %k7,%k6,%k5");
  46. asm volatile("kandnq %k7,%k6,%k5");
  47. asm volatile("kandnb %k7,%k6,%k5");
  48. asm volatile("kandnd %k7,%k6,%k5");
  49. asm volatile("knotw %k7,%k6");
  50. asm volatile("knotq %k7,%k6");
  51. asm volatile("knotb %k7,%k6");
  52. asm volatile("knotd %k7,%k6");
  53. asm volatile("korw %k7,%k6,%k5");
  54. asm volatile("korq %k7,%k6,%k5");
  55. asm volatile("korb %k7,%k6,%k5");
  56. asm volatile("kord %k7,%k6,%k5");
  57. asm volatile("kxnorw %k7,%k6,%k5");
  58. asm volatile("kxnorq %k7,%k6,%k5");
  59. asm volatile("kxnorb %k7,%k6,%k5");
  60. asm volatile("kxnord %k7,%k6,%k5");
  61. asm volatile("kxorw %k7,%k6,%k5");
  62. asm volatile("kxorq %k7,%k6,%k5");
  63. asm volatile("kxorb %k7,%k6,%k5");
  64. asm volatile("kxord %k7,%k6,%k5");
  65. asm volatile("kaddw %k7,%k6,%k5");
  66. asm volatile("kaddq %k7,%k6,%k5");
  67. asm volatile("kaddb %k7,%k6,%k5");
  68. asm volatile("kaddd %k7,%k6,%k5");
  69. asm volatile("kunpckbw %k7,%k6,%k5");
  70. asm volatile("kunpckwd %k7,%k6,%k5");
  71. asm volatile("kunpckdq %k7,%k6,%k5");
  72. asm volatile("kmovw %k6,%k5");
  73. asm volatile("kmovw (%rcx),%k5");
  74. asm volatile("kmovw 0x123(%rax,%r14,8),%k5");
  75. asm volatile("kmovw %k5,(%rcx)");
  76. asm volatile("kmovw %k5,0x123(%rax,%r14,8)");
  77. asm volatile("kmovw %eax,%k5");
  78. asm volatile("kmovw %ebp,%k5");
  79. asm volatile("kmovw %r13d,%k5");
  80. asm volatile("kmovw %k5,%eax");
  81. asm volatile("kmovw %k5,%ebp");
  82. asm volatile("kmovw %k5,%r13d");
  83. asm volatile("kmovq %k6,%k5");
  84. asm volatile("kmovq (%rcx),%k5");
  85. asm volatile("kmovq 0x123(%rax,%r14,8),%k5");
  86. asm volatile("kmovq %k5,(%rcx)");
  87. asm volatile("kmovq %k5,0x123(%rax,%r14,8)");
  88. asm volatile("kmovq %rax,%k5");
  89. asm volatile("kmovq %rbp,%k5");
  90. asm volatile("kmovq %r13,%k5");
  91. asm volatile("kmovq %k5,%rax");
  92. asm volatile("kmovq %k5,%rbp");
  93. asm volatile("kmovq %k5,%r13");
  94. asm volatile("kmovb %k6,%k5");
  95. asm volatile("kmovb (%rcx),%k5");
  96. asm volatile("kmovb 0x123(%rax,%r14,8),%k5");
  97. asm volatile("kmovb %k5,(%rcx)");
  98. asm volatile("kmovb %k5,0x123(%rax,%r14,8)");
  99. asm volatile("kmovb %eax,%k5");
  100. asm volatile("kmovb %ebp,%k5");
  101. asm volatile("kmovb %r13d,%k5");
  102. asm volatile("kmovb %k5,%eax");
  103. asm volatile("kmovb %k5,%ebp");
  104. asm volatile("kmovb %k5,%r13d");
  105. asm volatile("kmovd %k6,%k5");
  106. asm volatile("kmovd (%rcx),%k5");
  107. asm volatile("kmovd 0x123(%rax,%r14,8),%k5");
  108. asm volatile("kmovd %k5,(%rcx)");
  109. asm volatile("kmovd %k5,0x123(%rax,%r14,8)");
  110. asm volatile("kmovd %eax,%k5");
  111. asm volatile("kmovd %ebp,%k5");
  112. asm volatile("kmovd %r13d,%k5");
  113. asm volatile("kmovd %k5,%eax");
  114. asm volatile("kmovd %k5,%ebp");
  115. asm volatile("kmovd %k5,%r13d");
  116. asm volatile("kortestw %k6,%k5");
  117. asm volatile("kortestq %k6,%k5");
  118. asm volatile("kortestb %k6,%k5");
  119. asm volatile("kortestd %k6,%k5");
  120. asm volatile("ktestw %k6,%k5");
  121. asm volatile("ktestq %k6,%k5");
  122. asm volatile("ktestb %k6,%k5");
  123. asm volatile("ktestd %k6,%k5");
  124. asm volatile("kshiftrw $0x12,%k6,%k5");
  125. asm volatile("kshiftrq $0x5b,%k6,%k5");
  126. asm volatile("kshiftlw $0x12,%k6,%k5");
  127. asm volatile("kshiftlq $0x5b,%k6,%k5");
  128. /* AVX-512: Op code 0f 5b */
  129. asm volatile("vcvtdq2ps %xmm5,%xmm6");
  130. asm volatile("vcvtqq2ps %zmm29,%ymm6{%k7}");
  131. asm volatile("vcvtps2dq %xmm5,%xmm6");
  132. asm volatile("vcvttps2dq %xmm5,%xmm6");
  133. /* AVX-512: Op code 0f 6f */
  134. asm volatile("movq %mm0,%mm4");
  135. asm volatile("vmovdqa %ymm4,%ymm6");
  136. asm volatile("vmovdqa32 %zmm25,%zmm26");
  137. asm volatile("vmovdqa64 %zmm25,%zmm26");
  138. asm volatile("vmovdqu %ymm4,%ymm6");
  139. asm volatile("vmovdqu32 %zmm29,%zmm30");
  140. asm volatile("vmovdqu64 %zmm25,%zmm26");
  141. asm volatile("vmovdqu8 %zmm29,%zmm30");
  142. asm volatile("vmovdqu16 %zmm25,%zmm26");
  143. /* AVX-512: Op code 0f 78 */
  144. asm volatile("vmread %rax,%rbx");
  145. asm volatile("vcvttps2udq %zmm25,%zmm26");
  146. asm volatile("vcvttpd2udq %zmm29,%ymm6{%k7}");
  147. asm volatile("vcvttsd2usi %xmm6,%rax");
  148. asm volatile("vcvttss2usi %xmm6,%rax");
  149. asm volatile("vcvttps2uqq %ymm5,%zmm26{%k7}");
  150. asm volatile("vcvttpd2uqq %zmm29,%zmm30");
  151. /* AVX-512: Op code 0f 79 */
  152. asm volatile("vmwrite %rax,%rbx");
  153. asm volatile("vcvtps2udq %zmm25,%zmm26");
  154. asm volatile("vcvtpd2udq %zmm29,%ymm6{%k7}");
  155. asm volatile("vcvtsd2usi %xmm6,%rax");
  156. asm volatile("vcvtss2usi %xmm6,%rax");
  157. asm volatile("vcvtps2uqq %ymm5,%zmm26{%k7}");
  158. asm volatile("vcvtpd2uqq %zmm29,%zmm30");
  159. /* AVX-512: Op code 0f 7a */
  160. asm volatile("vcvtudq2pd %ymm5,%zmm29{%k7}");
  161. asm volatile("vcvtuqq2pd %zmm25,%zmm26");
  162. asm volatile("vcvtudq2ps %zmm29,%zmm30");
  163. asm volatile("vcvtuqq2ps %zmm25,%ymm26{%k7}");
  164. asm volatile("vcvttps2qq %ymm25,%zmm26{%k7}");
  165. asm volatile("vcvttpd2qq %zmm29,%zmm30");
  166. /* AVX-512: Op code 0f 7b */
  167. asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
  168. asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
  169. asm volatile("vcvtps2qq %ymm5,%zmm26{%k7}");
  170. asm volatile("vcvtpd2qq %zmm29,%zmm30");
  171. /* AVX-512: Op code 0f 7f */
  172. asm volatile("movq.s %mm0,%mm4");
  173. asm volatile("vmovdqa %ymm8,%ymm6");
  174. asm volatile("vmovdqa32.s %zmm25,%zmm26");
  175. asm volatile("vmovdqa64.s %zmm25,%zmm26");
  176. asm volatile("vmovdqu %ymm8,%ymm6");
  177. asm volatile("vmovdqu32.s %zmm25,%zmm26");
  178. asm volatile("vmovdqu64.s %zmm25,%zmm26");
  179. asm volatile("vmovdqu8.s %zmm30,(%rcx)");
  180. asm volatile("vmovdqu16.s %zmm25,%zmm26");
  181. /* AVX-512: Op code 0f db */
  182. asm volatile("pand %mm1,%mm2");
  183. asm volatile("pand %xmm1,%xmm2");
  184. asm volatile("vpand %ymm4,%ymm6,%ymm2");
  185. asm volatile("vpandd %zmm24,%zmm25,%zmm26");
  186. asm volatile("vpandq %zmm24,%zmm25,%zmm26");
  187. /* AVX-512: Op code 0f df */
  188. asm volatile("pandn %mm1,%mm2");
  189. asm volatile("pandn %xmm1,%xmm2");
  190. asm volatile("vpandn %ymm4,%ymm6,%ymm2");
  191. asm volatile("vpandnd %zmm24,%zmm25,%zmm26");
  192. asm volatile("vpandnq %zmm24,%zmm25,%zmm26");
  193. /* AVX-512: Op code 0f e6 */
  194. asm volatile("vcvttpd2dq %xmm1,%xmm2");
  195. asm volatile("vcvtdq2pd %xmm5,%xmm6");
  196. asm volatile("vcvtdq2pd %ymm5,%zmm26{%k7}");
  197. asm volatile("vcvtqq2pd %zmm25,%zmm26");
  198. asm volatile("vcvtpd2dq %xmm1,%xmm2");
  199. /* AVX-512: Op code 0f eb */
  200. asm volatile("por %mm4,%mm6");
  201. asm volatile("vpor %ymm4,%ymm6,%ymm2");
  202. asm volatile("vpord %zmm24,%zmm25,%zmm26");
  203. asm volatile("vporq %zmm24,%zmm25,%zmm26");
  204. /* AVX-512: Op code 0f ef */
  205. asm volatile("pxor %mm4,%mm6");
  206. asm volatile("vpxor %ymm4,%ymm6,%ymm2");
  207. asm volatile("vpxord %zmm24,%zmm25,%zmm26");
  208. asm volatile("vpxorq %zmm24,%zmm25,%zmm26");
  209. /* AVX-512: Op code 0f 38 10 */
  210. asm volatile("pblendvb %xmm1,%xmm0");
  211. asm volatile("vpsrlvw %zmm27,%zmm28,%zmm29");
  212. asm volatile("vpmovuswb %zmm28,%ymm6{%k7}");
  213. /* AVX-512: Op code 0f 38 11 */
  214. asm volatile("vpmovusdb %zmm28,%xmm6{%k7}");
  215. asm volatile("vpsravw %zmm27,%zmm28,%zmm29");
  216. /* AVX-512: Op code 0f 38 12 */
  217. asm volatile("vpmovusqb %zmm27,%xmm6{%k7}");
  218. asm volatile("vpsllvw %zmm27,%zmm28,%zmm29");
  219. /* AVX-512: Op code 0f 38 13 */
  220. asm volatile("vcvtph2ps %xmm3,%ymm5");
  221. asm volatile("vcvtph2ps %ymm5,%zmm27{%k7}");
  222. asm volatile("vpmovusdw %zmm27,%ymm6{%k7}");
  223. /* AVX-512: Op code 0f 38 14 */
  224. asm volatile("blendvps %xmm1,%xmm0");
  225. asm volatile("vpmovusqw %zmm27,%xmm6{%k7}");
  226. asm volatile("vprorvd %zmm27,%zmm28,%zmm29");
  227. asm volatile("vprorvq %zmm27,%zmm28,%zmm29");
  228. /* AVX-512: Op code 0f 38 15 */
  229. asm volatile("blendvpd %xmm1,%xmm0");
  230. asm volatile("vpmovusqd %zmm27,%ymm6{%k7}");
  231. asm volatile("vprolvd %zmm27,%zmm28,%zmm29");
  232. asm volatile("vprolvq %zmm27,%zmm28,%zmm29");
  233. /* AVX-512: Op code 0f 38 16 */
  234. asm volatile("vpermps %ymm4,%ymm6,%ymm2");
  235. asm volatile("vpermps %ymm24,%ymm26,%ymm22{%k7}");
  236. asm volatile("vpermpd %ymm24,%ymm26,%ymm22{%k7}");
  237. /* AVX-512: Op code 0f 38 19 */
  238. asm volatile("vbroadcastsd %xmm4,%ymm6");
  239. asm volatile("vbroadcastf32x2 %xmm27,%zmm26");
  240. /* AVX-512: Op code 0f 38 1a */
  241. asm volatile("vbroadcastf128 (%rcx),%ymm4");
  242. asm volatile("vbroadcastf32x4 (%rcx),%zmm26");
  243. asm volatile("vbroadcastf64x2 (%rcx),%zmm26");
  244. /* AVX-512: Op code 0f 38 1b */
  245. asm volatile("vbroadcastf32x8 (%rcx),%zmm27");
  246. asm volatile("vbroadcastf64x4 (%rcx),%zmm26");
  247. /* AVX-512: Op code 0f 38 1f */
  248. asm volatile("vpabsq %zmm27,%zmm28");
  249. /* AVX-512: Op code 0f 38 20 */
  250. asm volatile("vpmovsxbw %xmm4,%xmm5");
  251. asm volatile("vpmovswb %zmm27,%ymm6{%k7}");
  252. /* AVX-512: Op code 0f 38 21 */
  253. asm volatile("vpmovsxbd %xmm4,%ymm6");
  254. asm volatile("vpmovsdb %zmm27,%xmm6{%k7}");
  255. /* AVX-512: Op code 0f 38 22 */
  256. asm volatile("vpmovsxbq %xmm4,%ymm4");
  257. asm volatile("vpmovsqb %zmm27,%xmm6{%k7}");
  258. /* AVX-512: Op code 0f 38 23 */
  259. asm volatile("vpmovsxwd %xmm4,%ymm4");
  260. asm volatile("vpmovsdw %zmm27,%ymm6{%k7}");
  261. /* AVX-512: Op code 0f 38 24 */
  262. asm volatile("vpmovsxwq %xmm4,%ymm6");
  263. asm volatile("vpmovsqw %zmm27,%xmm6{%k7}");
  264. /* AVX-512: Op code 0f 38 25 */
  265. asm volatile("vpmovsxdq %xmm4,%ymm4");
  266. asm volatile("vpmovsqd %zmm27,%ymm6{%k7}");
  267. /* AVX-512: Op code 0f 38 26 */
  268. asm volatile("vptestmb %zmm27,%zmm28,%k5");
  269. asm volatile("vptestmw %zmm27,%zmm28,%k5");
  270. asm volatile("vptestnmb %zmm26,%zmm27,%k5");
  271. asm volatile("vptestnmw %zmm26,%zmm27,%k5");
  272. /* AVX-512: Op code 0f 38 27 */
  273. asm volatile("vptestmd %zmm27,%zmm28,%k5");
  274. asm volatile("vptestmq %zmm27,%zmm28,%k5");
  275. asm volatile("vptestnmd %zmm26,%zmm27,%k5");
  276. asm volatile("vptestnmq %zmm26,%zmm27,%k5");
  277. /* AVX-512: Op code 0f 38 28 */
  278. asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
  279. asm volatile("vpmovm2b %k5,%zmm28");
  280. asm volatile("vpmovm2w %k5,%zmm28");
  281. /* AVX-512: Op code 0f 38 29 */
  282. asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
  283. asm volatile("vpmovb2m %zmm28,%k5");
  284. asm volatile("vpmovw2m %zmm28,%k5");
  285. /* AVX-512: Op code 0f 38 2a */
  286. asm volatile("vmovntdqa (%rcx),%ymm4");
  287. asm volatile("vpbroadcastmb2q %k6,%zmm30");
  288. /* AVX-512: Op code 0f 38 2c */
  289. asm volatile("vmaskmovps (%rcx),%ymm4,%ymm6");
  290. asm volatile("vscalefps %zmm24,%zmm25,%zmm26");
  291. asm volatile("vscalefpd %zmm24,%zmm25,%zmm26");
  292. /* AVX-512: Op code 0f 38 2d */
  293. asm volatile("vmaskmovpd (%rcx),%ymm4,%ymm6");
  294. asm volatile("vscalefss %xmm24,%xmm25,%xmm26{%k7}");
  295. asm volatile("vscalefsd %xmm24,%xmm25,%xmm26{%k7}");
  296. /* AVX-512: Op code 0f 38 30 */
  297. asm volatile("vpmovzxbw %xmm4,%ymm4");
  298. asm volatile("vpmovwb %zmm27,%ymm6{%k7}");
  299. /* AVX-512: Op code 0f 38 31 */
  300. asm volatile("vpmovzxbd %xmm4,%ymm6");
  301. asm volatile("vpmovdb %zmm27,%xmm6{%k7}");
  302. /* AVX-512: Op code 0f 38 32 */
  303. asm volatile("vpmovzxbq %xmm4,%ymm4");
  304. asm volatile("vpmovqb %zmm27,%xmm6{%k7}");
  305. /* AVX-512: Op code 0f 38 33 */
  306. asm volatile("vpmovzxwd %xmm4,%ymm4");
  307. asm volatile("vpmovdw %zmm27,%ymm6{%k7}");
  308. /* AVX-512: Op code 0f 38 34 */
  309. asm volatile("vpmovzxwq %xmm4,%ymm6");
  310. asm volatile("vpmovqw %zmm27,%xmm6{%k7}");
  311. /* AVX-512: Op code 0f 38 35 */
  312. asm volatile("vpmovzxdq %xmm4,%ymm4");
  313. asm volatile("vpmovqd %zmm27,%ymm6{%k7}");
  314. /* AVX-512: Op code 0f 38 38 */
  315. asm volatile("vpermd %ymm4,%ymm6,%ymm2");
  316. asm volatile("vpermd %ymm24,%ymm26,%ymm22{%k7}");
  317. asm volatile("vpermq %ymm24,%ymm26,%ymm22{%k7}");
  318. /* AVX-512: Op code 0f 38 38 */
  319. asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
  320. asm volatile("vpmovm2d %k5,%zmm28");
  321. asm volatile("vpmovm2q %k5,%zmm28");
  322. /* AVX-512: Op code 0f 38 39 */
  323. asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
  324. asm volatile("vpminsd %zmm24,%zmm25,%zmm26");
  325. asm volatile("vpminsq %zmm24,%zmm25,%zmm26");
  326. asm volatile("vpmovd2m %zmm28,%k5");
  327. asm volatile("vpmovq2m %zmm28,%k5");
  328. /* AVX-512: Op code 0f 38 3a */
  329. asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
  330. asm volatile("vpbroadcastmw2d %k6,%zmm28");
  331. /* AVX-512: Op code 0f 38 3b */
  332. asm volatile("vpminud %ymm4,%ymm6,%ymm2");
  333. asm volatile("vpminud %zmm24,%zmm25,%zmm26");
  334. asm volatile("vpminuq %zmm24,%zmm25,%zmm26");
  335. /* AVX-512: Op code 0f 38 3d */
  336. asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
  337. asm volatile("vpmaxsd %zmm24,%zmm25,%zmm26");
  338. asm volatile("vpmaxsq %zmm24,%zmm25,%zmm26");
  339. /* AVX-512: Op code 0f 38 3f */
  340. asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
  341. asm volatile("vpmaxud %zmm24,%zmm25,%zmm26");
  342. asm volatile("vpmaxuq %zmm24,%zmm25,%zmm26");
  343. /* AVX-512: Op code 0f 38 42 */
  344. asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
  345. asm volatile("vpmulld %zmm24,%zmm25,%zmm26");
  346. asm volatile("vpmullq %zmm24,%zmm25,%zmm26");
  347. /* AVX-512: Op code 0f 38 42 */
  348. asm volatile("vgetexpps %zmm25,%zmm26");
  349. asm volatile("vgetexppd %zmm27,%zmm28");
  350. /* AVX-512: Op code 0f 38 43 */
  351. asm volatile("vgetexpss %xmm24,%xmm25,%xmm26{%k7}");
  352. asm volatile("vgetexpsd %xmm28,%xmm29,%xmm30{%k7}");
  353. /* AVX-512: Op code 0f 38 44 */
  354. asm volatile("vplzcntd %zmm27,%zmm28");
  355. asm volatile("vplzcntq %zmm27,%zmm28");
  356. /* AVX-512: Op code 0f 38 46 */
  357. asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
  358. asm volatile("vpsravd %zmm24,%zmm25,%zmm26");
  359. asm volatile("vpsravq %zmm24,%zmm25,%zmm26");
  360. /* AVX-512: Op code 0f 38 4c */
  361. asm volatile("vrcp14ps %zmm25,%zmm26");
  362. asm volatile("vrcp14pd %zmm27,%zmm28");
  363. /* AVX-512: Op code 0f 38 4d */
  364. asm volatile("vrcp14ss %xmm24,%xmm25,%xmm26{%k7}");
  365. asm volatile("vrcp14sd %xmm24,%xmm25,%xmm26{%k7}");
  366. /* AVX-512: Op code 0f 38 4e */
  367. asm volatile("vrsqrt14ps %zmm25,%zmm26");
  368. asm volatile("vrsqrt14pd %zmm27,%zmm28");
  369. /* AVX-512: Op code 0f 38 4f */
  370. asm volatile("vrsqrt14ss %xmm24,%xmm25,%xmm26{%k7}");
  371. asm volatile("vrsqrt14sd %xmm24,%xmm25,%xmm26{%k7}");
  372. /* AVX-512: Op code 0f 38 59 */
  373. asm volatile("vpbroadcastq %xmm4,%xmm6");
  374. asm volatile("vbroadcasti32x2 %xmm27,%zmm26");
  375. /* AVX-512: Op code 0f 38 5a */
  376. asm volatile("vbroadcasti128 (%rcx),%ymm4");
  377. asm volatile("vbroadcasti32x4 (%rcx),%zmm26");
  378. asm volatile("vbroadcasti64x2 (%rcx),%zmm26");
  379. /* AVX-512: Op code 0f 38 5b */
  380. asm volatile("vbroadcasti32x8 (%rcx),%zmm28");
  381. asm volatile("vbroadcasti64x4 (%rcx),%zmm26");
  382. /* AVX-512: Op code 0f 38 64 */
  383. asm volatile("vpblendmd %zmm26,%zmm27,%zmm28");
  384. asm volatile("vpblendmq %zmm26,%zmm27,%zmm28");
  385. /* AVX-512: Op code 0f 38 65 */
  386. asm volatile("vblendmps %zmm24,%zmm25,%zmm26");
  387. asm volatile("vblendmpd %zmm26,%zmm27,%zmm28");
  388. /* AVX-512: Op code 0f 38 66 */
  389. asm volatile("vpblendmb %zmm26,%zmm27,%zmm28");
  390. asm volatile("vpblendmw %zmm26,%zmm27,%zmm28");
  391. /* AVX-512: Op code 0f 38 75 */
  392. asm volatile("vpermi2b %zmm24,%zmm25,%zmm26");
  393. asm volatile("vpermi2w %zmm26,%zmm27,%zmm28");
  394. /* AVX-512: Op code 0f 38 76 */
  395. asm volatile("vpermi2d %zmm26,%zmm27,%zmm28");
  396. asm volatile("vpermi2q %zmm26,%zmm27,%zmm28");
  397. /* AVX-512: Op code 0f 38 77 */
  398. asm volatile("vpermi2ps %zmm26,%zmm27,%zmm28");
  399. asm volatile("vpermi2pd %zmm26,%zmm27,%zmm28");
  400. /* AVX-512: Op code 0f 38 7a */
  401. asm volatile("vpbroadcastb %eax,%xmm30");
  402. /* AVX-512: Op code 0f 38 7b */
  403. asm volatile("vpbroadcastw %eax,%xmm30");
  404. /* AVX-512: Op code 0f 38 7c */
  405. asm volatile("vpbroadcastd %eax,%xmm30");
  406. asm volatile("vpbroadcastq %rax,%zmm30");
  407. /* AVX-512: Op code 0f 38 7d */
  408. asm volatile("vpermt2b %zmm26,%zmm27,%zmm28");
  409. asm volatile("vpermt2w %zmm26,%zmm27,%zmm28");
  410. /* AVX-512: Op code 0f 38 7e */
  411. asm volatile("vpermt2d %zmm26,%zmm27,%zmm28");
  412. asm volatile("vpermt2q %zmm26,%zmm27,%zmm28");
  413. /* AVX-512: Op code 0f 38 7f */
  414. asm volatile("vpermt2ps %zmm26,%zmm27,%zmm28");
  415. asm volatile("vpermt2pd %zmm26,%zmm27,%zmm28");
  416. /* AVX-512: Op code 0f 38 83 */
  417. asm volatile("vpmultishiftqb %zmm26,%zmm27,%zmm28");
  418. /* AVX-512: Op code 0f 38 88 */
  419. asm volatile("vexpandps (%rcx),%zmm26");
  420. asm volatile("vexpandpd (%rcx),%zmm28");
  421. /* AVX-512: Op code 0f 38 89 */
  422. asm volatile("vpexpandd (%rcx),%zmm28");
  423. asm volatile("vpexpandq (%rcx),%zmm26");
  424. /* AVX-512: Op code 0f 38 8a */
  425. asm volatile("vcompressps %zmm28,(%rcx)");
  426. asm volatile("vcompresspd %zmm28,(%rcx)");
  427. /* AVX-512: Op code 0f 38 8b */
  428. asm volatile("vpcompressd %zmm28,(%rcx)");
  429. asm volatile("vpcompressq %zmm26,(%rcx)");
  430. /* AVX-512: Op code 0f 38 8d */
  431. asm volatile("vpermb %zmm26,%zmm27,%zmm28");
  432. asm volatile("vpermw %zmm26,%zmm27,%zmm28");
  433. /* AVX-512: Op code 0f 38 90 */
  434. asm volatile("vpgatherdd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
  435. asm volatile("vpgatherdq %xmm2,0x04(%rbp,%xmm7,2),%xmm1");
  436. asm volatile("vpgatherdd 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
  437. asm volatile("vpgatherdq 0x7b(%rbp,%ymm27,8),%zmm26{%k1}");
  438. /* AVX-512: Op code 0f 38 91 */
  439. asm volatile("vpgatherqd %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
  440. asm volatile("vpgatherqq %xmm2,0x02(%rbp,%xmm7,2),%xmm1");
  441. asm volatile("vpgatherqd 0x7b(%rbp,%zmm27,8),%ymm26{%k1}");
  442. asm volatile("vpgatherqq 0x7b(%rbp,%zmm27,8),%zmm26{%k1}");
  443. /* AVX-512: Op code 0f 38 a0 */
  444. asm volatile("vpscatterdd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
  445. asm volatile("vpscatterdq %zmm26,0x7b(%rbp,%ymm27,8){%k1}");
  446. /* AVX-512: Op code 0f 38 a1 */
  447. asm volatile("vpscatterqd %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
  448. asm volatile("vpscatterqq %ymm6,0x7b(%rbp,%ymm27,8){%k1}");
  449. /* AVX-512: Op code 0f 38 a2 */
  450. asm volatile("vscatterdps %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
  451. asm volatile("vscatterdpd %zmm28,0x7b(%rbp,%ymm27,8){%k1}");
  452. /* AVX-512: Op code 0f 38 a3 */
  453. asm volatile("vscatterqps %ymm6,0x7b(%rbp,%zmm29,8){%k1}");
  454. asm volatile("vscatterqpd %zmm28,0x7b(%rbp,%zmm29,8){%k1}");
  455. /* AVX-512: Op code 0f 38 b4 */
  456. asm volatile("vpmadd52luq %zmm26,%zmm27,%zmm28");
  457. /* AVX-512: Op code 0f 38 b5 */
  458. asm volatile("vpmadd52huq %zmm26,%zmm27,%zmm28");
  459. /* AVX-512: Op code 0f 38 c4 */
  460. asm volatile("vpconflictd %zmm26,%zmm27");
  461. asm volatile("vpconflictq %zmm26,%zmm27");
  462. /* AVX-512: Op code 0f 38 c8 */
  463. asm volatile("vexp2ps %zmm29,%zmm30");
  464. asm volatile("vexp2pd %zmm26,%zmm27");
  465. /* AVX-512: Op code 0f 38 ca */
  466. asm volatile("vrcp28ps %zmm29,%zmm30");
  467. asm volatile("vrcp28pd %zmm26,%zmm27");
  468. /* AVX-512: Op code 0f 38 cb */
  469. asm volatile("vrcp28ss %xmm28,%xmm29,%xmm30{%k7}");
  470. asm volatile("vrcp28sd %xmm25,%xmm26,%xmm27{%k7}");
  471. /* AVX-512: Op code 0f 38 cc */
  472. asm volatile("vrsqrt28ps %zmm29,%zmm30");
  473. asm volatile("vrsqrt28pd %zmm26,%zmm27");
  474. /* AVX-512: Op code 0f 38 cd */
  475. asm volatile("vrsqrt28ss %xmm28,%xmm29,%xmm30{%k7}");
  476. asm volatile("vrsqrt28sd %xmm25,%xmm26,%xmm27{%k7}");
  477. /* AVX-512: Op code 0f 3a 03 */
  478. asm volatile("valignd $0x12,%zmm28,%zmm29,%zmm30");
  479. asm volatile("valignq $0x12,%zmm25,%zmm26,%zmm27");
  480. /* AVX-512: Op code 0f 3a 08 */
  481. asm volatile("vroundps $0x5,%ymm6,%ymm2");
  482. asm volatile("vrndscaleps $0x12,%zmm25,%zmm26");
  483. /* AVX-512: Op code 0f 3a 09 */
  484. asm volatile("vroundpd $0x5,%ymm6,%ymm2");
  485. asm volatile("vrndscalepd $0x12,%zmm25,%zmm26");
  486. /* AVX-512: Op code 0f 3a 1a */
  487. asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
  488. asm volatile("vrndscaless $0x12,%xmm24,%xmm25,%xmm26{%k7}");
  489. /* AVX-512: Op code 0f 3a 0b */
  490. asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
  491. asm volatile("vrndscalesd $0x12,%xmm24,%xmm25,%xmm26{%k7}");
  492. /* AVX-512: Op code 0f 3a 18 */
  493. asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
  494. asm volatile("vinsertf32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
  495. asm volatile("vinsertf64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
  496. /* AVX-512: Op code 0f 3a 19 */
  497. asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
  498. asm volatile("vextractf32x4 $0x12,%zmm25,%xmm26{%k7}");
  499. asm volatile("vextractf64x2 $0x12,%zmm25,%xmm26{%k7}");
  500. /* AVX-512: Op code 0f 3a 1a */
  501. asm volatile("vinsertf32x8 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
  502. asm volatile("vinsertf64x4 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
  503. /* AVX-512: Op code 0f 3a 1b */
  504. asm volatile("vextractf32x8 $0x12,%zmm29,%ymm30{%k7}");
  505. asm volatile("vextractf64x4 $0x12,%zmm26,%ymm27{%k7}");
  506. /* AVX-512: Op code 0f 3a 1e */
  507. asm volatile("vpcmpud $0x12,%zmm29,%zmm30,%k5");
  508. asm volatile("vpcmpuq $0x12,%zmm26,%zmm27,%k5");
  509. /* AVX-512: Op code 0f 3a 1f */
  510. asm volatile("vpcmpd $0x12,%zmm29,%zmm30,%k5");
  511. asm volatile("vpcmpq $0x12,%zmm26,%zmm27,%k5");
  512. /* AVX-512: Op code 0f 3a 23 */
  513. asm volatile("vshuff32x4 $0x12,%zmm28,%zmm29,%zmm30");
  514. asm volatile("vshuff64x2 $0x12,%zmm25,%zmm26,%zmm27");
  515. /* AVX-512: Op code 0f 3a 25 */
  516. asm volatile("vpternlogd $0x12,%zmm28,%zmm29,%zmm30");
  517. asm volatile("vpternlogq $0x12,%zmm28,%zmm29,%zmm30");
  518. /* AVX-512: Op code 0f 3a 26 */
  519. asm volatile("vgetmantps $0x12,%zmm26,%zmm27");
  520. asm volatile("vgetmantpd $0x12,%zmm29,%zmm30");
  521. /* AVX-512: Op code 0f 3a 27 */
  522. asm volatile("vgetmantss $0x12,%xmm25,%xmm26,%xmm27{%k7}");
  523. asm volatile("vgetmantsd $0x12,%xmm28,%xmm29,%xmm30{%k7}");
  524. /* AVX-512: Op code 0f 3a 38 */
  525. asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
  526. asm volatile("vinserti32x4 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
  527. asm volatile("vinserti64x2 $0x12,%xmm24,%zmm25,%zmm26{%k7}");
  528. /* AVX-512: Op code 0f 3a 39 */
  529. asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
  530. asm volatile("vextracti32x4 $0x12,%zmm25,%xmm26{%k7}");
  531. asm volatile("vextracti64x2 $0x12,%zmm25,%xmm26{%k7}");
  532. /* AVX-512: Op code 0f 3a 3a */
  533. asm volatile("vinserti32x8 $0x12,%ymm28,%zmm29,%zmm30{%k7}");
  534. asm volatile("vinserti64x4 $0x12,%ymm25,%zmm26,%zmm27{%k7}");
  535. /* AVX-512: Op code 0f 3a 3b */
  536. asm volatile("vextracti32x8 $0x12,%zmm29,%ymm30{%k7}");
  537. asm volatile("vextracti64x4 $0x12,%zmm26,%ymm27{%k7}");
  538. /* AVX-512: Op code 0f 3a 3e */
  539. asm volatile("vpcmpub $0x12,%zmm29,%zmm30,%k5");
  540. asm volatile("vpcmpuw $0x12,%zmm26,%zmm27,%k5");
  541. /* AVX-512: Op code 0f 3a 3f */
  542. asm volatile("vpcmpb $0x12,%zmm29,%zmm30,%k5");
  543. asm volatile("vpcmpw $0x12,%zmm26,%zmm27,%k5");
  544. /* AVX-512: Op code 0f 3a 43 */
  545. asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
  546. asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
  547. /* AVX-512: Op code 0f 3a 43 */
  548. asm volatile("vshufi32x4 $0x12,%zmm25,%zmm26,%zmm27");
  549. asm volatile("vshufi64x2 $0x12,%zmm28,%zmm29,%zmm30");
  550. /* AVX-512: Op code 0f 3a 50 */
  551. asm volatile("vrangeps $0x12,%zmm25,%zmm26,%zmm27");
  552. asm volatile("vrangepd $0x12,%zmm28,%zmm29,%zmm30");
  553. /* AVX-512: Op code 0f 3a 51 */
  554. asm volatile("vrangess $0x12,%xmm25,%xmm26,%xmm27");
  555. asm volatile("vrangesd $0x12,%xmm28,%xmm29,%xmm30");
  556. /* AVX-512: Op code 0f 3a 54 */
  557. asm volatile("vfixupimmps $0x12,%zmm28,%zmm29,%zmm30");
  558. asm volatile("vfixupimmpd $0x12,%zmm25,%zmm26,%zmm27");
  559. /* AVX-512: Op code 0f 3a 55 */
  560. asm volatile("vfixupimmss $0x12,%xmm28,%xmm29,%xmm30{%k7}");
  561. asm volatile("vfixupimmsd $0x12,%xmm25,%xmm26,%xmm27{%k7}");
  562. /* AVX-512: Op code 0f 3a 56 */
  563. asm volatile("vreduceps $0x12,%zmm26,%zmm27");
  564. asm volatile("vreducepd $0x12,%zmm29,%zmm30");
  565. /* AVX-512: Op code 0f 3a 57 */
  566. asm volatile("vreducess $0x12,%xmm25,%xmm26,%xmm27");
  567. asm volatile("vreducesd $0x12,%xmm28,%xmm29,%xmm30");
  568. /* AVX-512: Op code 0f 3a 66 */
  569. asm volatile("vfpclassps $0x12,%zmm27,%k5");
  570. asm volatile("vfpclasspd $0x12,%zmm30,%k5");
  571. /* AVX-512: Op code 0f 3a 67 */
  572. asm volatile("vfpclassss $0x12,%xmm27,%k5");
  573. asm volatile("vfpclasssd $0x12,%xmm30,%k5");
  574. /* AVX-512: Op code 0f 72 (Grp13) */
  575. asm volatile("vprord $0x12,%zmm25,%zmm26");
  576. asm volatile("vprorq $0x12,%zmm25,%zmm26");
  577. asm volatile("vprold $0x12,%zmm29,%zmm30");
  578. asm volatile("vprolq $0x12,%zmm29,%zmm30");
  579. asm volatile("psrad $0x2,%mm6");
  580. asm volatile("vpsrad $0x5,%ymm6,%ymm2");
  581. asm volatile("vpsrad $0x5,%zmm26,%zmm22");
  582. asm volatile("vpsraq $0x5,%zmm26,%zmm22");
  583. /* AVX-512: Op code 0f 38 c6 (Grp18) */
  584. asm volatile("vgatherpf0dps 0x7b(%r14,%zmm31,8){%k1}");
  585. asm volatile("vgatherpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
  586. asm volatile("vgatherpf1dps 0x7b(%r14,%zmm31,8){%k1}");
  587. asm volatile("vgatherpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
  588. asm volatile("vscatterpf0dps 0x7b(%r14,%zmm31,8){%k1}");
  589. asm volatile("vscatterpf0dpd 0x7b(%r14,%ymm31,8){%k1}");
  590. asm volatile("vscatterpf1dps 0x7b(%r14,%zmm31,8){%k1}");
  591. asm volatile("vscatterpf1dpd 0x7b(%r14,%ymm31,8){%k1}");
  592. /* AVX-512: Op code 0f 38 c7 (Grp19) */
  593. asm volatile("vgatherpf0qps 0x7b(%r14,%zmm31,8){%k1}");
  594. asm volatile("vgatherpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
  595. asm volatile("vgatherpf1qps 0x7b(%r14,%zmm31,8){%k1}");
  596. asm volatile("vgatherpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
  597. asm volatile("vscatterpf0qps 0x7b(%r14,%zmm31,8){%k1}");
  598. asm volatile("vscatterpf0qpd 0x7b(%r14,%zmm31,8){%k1}");
  599. asm volatile("vscatterpf1qps 0x7b(%r14,%zmm31,8){%k1}");
  600. asm volatile("vscatterpf1qpd 0x7b(%r14,%zmm31,8){%k1}");
  601. /* AVX-512: Examples */
  602. asm volatile("vaddpd %zmm28,%zmm29,%zmm30");
  603. asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}");
  604. asm volatile("vaddpd %zmm28,%zmm29,%zmm30{%k7}{z}");
  605. asm volatile("vaddpd {rn-sae},%zmm28,%zmm29,%zmm30");
  606. asm volatile("vaddpd {ru-sae},%zmm28,%zmm29,%zmm30");
  607. asm volatile("vaddpd {rd-sae},%zmm28,%zmm29,%zmm30");
  608. asm volatile("vaddpd {rz-sae},%zmm28,%zmm29,%zmm30");
  609. asm volatile("vaddpd (%rcx),%zmm29,%zmm30");
  610. asm volatile("vaddpd 0x123(%rax,%r14,8),%zmm29,%zmm30");
  611. asm volatile("vaddpd (%rcx){1to8},%zmm29,%zmm30");
  612. asm volatile("vaddpd 0x1fc0(%rdx),%zmm29,%zmm30");
  613. asm volatile("vaddpd 0x3f8(%rdx){1to8},%zmm29,%zmm30");
  614. asm volatile("vcmpeq_uqps 0x1fc(%rdx){1to16},%zmm30,%k5");
  615. asm volatile("vcmpltsd 0x123(%rax,%r14,8),%xmm29,%k5{%k7}");
  616. asm volatile("vcmplesd {sae},%xmm28,%xmm29,%k5{%k7}");
  617. asm volatile("vgetmantss $0x5b,0x123(%rax,%r14,8),%xmm29,%xmm30{%k7}");
  618. /* bndmk m64, bnd */
  619. asm volatile("bndmk (%rax), %bnd0");
  620. asm volatile("bndmk (%r8), %bnd0");
  621. asm volatile("bndmk (0x12345678), %bnd0");
  622. asm volatile("bndmk (%rax), %bnd3");
  623. asm volatile("bndmk (%rcx,%rax,1), %bnd0");
  624. asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
  625. asm volatile("bndmk (%rax,%rcx,1), %bnd0");
  626. asm volatile("bndmk (%rax,%rcx,8), %bnd0");
  627. asm volatile("bndmk 0x12(%rax), %bnd0");
  628. asm volatile("bndmk 0x12(%rbp), %bnd0");
  629. asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
  630. asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
  631. asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
  632. asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
  633. asm volatile("bndmk 0x12345678(%rax), %bnd0");
  634. asm volatile("bndmk 0x12345678(%rbp), %bnd0");
  635. asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
  636. asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
  637. asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
  638. asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
  639. /* bndcl r/m64, bnd */
  640. asm volatile("bndcl (%rax), %bnd0");
  641. asm volatile("bndcl (%r8), %bnd0");
  642. asm volatile("bndcl (0x12345678), %bnd0");
  643. asm volatile("bndcl (%rax), %bnd3");
  644. asm volatile("bndcl (%rcx,%rax,1), %bnd0");
  645. asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
  646. asm volatile("bndcl (%rax,%rcx,1), %bnd0");
  647. asm volatile("bndcl (%rax,%rcx,8), %bnd0");
  648. asm volatile("bndcl 0x12(%rax), %bnd0");
  649. asm volatile("bndcl 0x12(%rbp), %bnd0");
  650. asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
  651. asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
  652. asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
  653. asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
  654. asm volatile("bndcl 0x12345678(%rax), %bnd0");
  655. asm volatile("bndcl 0x12345678(%rbp), %bnd0");
  656. asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
  657. asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
  658. asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
  659. asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
  660. asm volatile("bndcl %rax, %bnd0");
  661. /* bndcu r/m64, bnd */
  662. asm volatile("bndcu (%rax), %bnd0");
  663. asm volatile("bndcu (%r8), %bnd0");
  664. asm volatile("bndcu (0x12345678), %bnd0");
  665. asm volatile("bndcu (%rax), %bnd3");
  666. asm volatile("bndcu (%rcx,%rax,1), %bnd0");
  667. asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
  668. asm volatile("bndcu (%rax,%rcx,1), %bnd0");
  669. asm volatile("bndcu (%rax,%rcx,8), %bnd0");
  670. asm volatile("bndcu 0x12(%rax), %bnd0");
  671. asm volatile("bndcu 0x12(%rbp), %bnd0");
  672. asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
  673. asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
  674. asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
  675. asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
  676. asm volatile("bndcu 0x12345678(%rax), %bnd0");
  677. asm volatile("bndcu 0x12345678(%rbp), %bnd0");
  678. asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
  679. asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
  680. asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
  681. asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
  682. asm volatile("bndcu %rax, %bnd0");
  683. /* bndcn r/m64, bnd */
  684. asm volatile("bndcn (%rax), %bnd0");
  685. asm volatile("bndcn (%r8), %bnd0");
  686. asm volatile("bndcn (0x12345678), %bnd0");
  687. asm volatile("bndcn (%rax), %bnd3");
  688. asm volatile("bndcn (%rcx,%rax,1), %bnd0");
  689. asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
  690. asm volatile("bndcn (%rax,%rcx,1), %bnd0");
  691. asm volatile("bndcn (%rax,%rcx,8), %bnd0");
  692. asm volatile("bndcn 0x12(%rax), %bnd0");
  693. asm volatile("bndcn 0x12(%rbp), %bnd0");
  694. asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
  695. asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
  696. asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
  697. asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
  698. asm volatile("bndcn 0x12345678(%rax), %bnd0");
  699. asm volatile("bndcn 0x12345678(%rbp), %bnd0");
  700. asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
  701. asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
  702. asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
  703. asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
  704. asm volatile("bndcn %rax, %bnd0");
  705. /* bndmov m128, bnd */
  706. asm volatile("bndmov (%rax), %bnd0");
  707. asm volatile("bndmov (%r8), %bnd0");
  708. asm volatile("bndmov (0x12345678), %bnd0");
  709. asm volatile("bndmov (%rax), %bnd3");
  710. asm volatile("bndmov (%rcx,%rax,1), %bnd0");
  711. asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
  712. asm volatile("bndmov (%rax,%rcx,1), %bnd0");
  713. asm volatile("bndmov (%rax,%rcx,8), %bnd0");
  714. asm volatile("bndmov 0x12(%rax), %bnd0");
  715. asm volatile("bndmov 0x12(%rbp), %bnd0");
  716. asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
  717. asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
  718. asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
  719. asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
  720. asm volatile("bndmov 0x12345678(%rax), %bnd0");
  721. asm volatile("bndmov 0x12345678(%rbp), %bnd0");
  722. asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
  723. asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
  724. asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
  725. asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
  726. /* bndmov bnd, m128 */
  727. asm volatile("bndmov %bnd0, (%rax)");
  728. asm volatile("bndmov %bnd0, (%r8)");
  729. asm volatile("bndmov %bnd0, (0x12345678)");
  730. asm volatile("bndmov %bnd3, (%rax)");
  731. asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
  732. asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
  733. asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
  734. asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
  735. asm volatile("bndmov %bnd0, 0x12(%rax)");
  736. asm volatile("bndmov %bnd0, 0x12(%rbp)");
  737. asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
  738. asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
  739. asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
  740. asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
  741. asm volatile("bndmov %bnd0, 0x12345678(%rax)");
  742. asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
  743. asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
  744. asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
  745. asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
  746. asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
  747. /* bndmov bnd2, bnd1 */
  748. asm volatile("bndmov %bnd0, %bnd1");
  749. asm volatile("bndmov %bnd1, %bnd0");
  750. /* bndldx mib, bnd */
  751. asm volatile("bndldx (%rax), %bnd0");
  752. asm volatile("bndldx (%r8), %bnd0");
  753. asm volatile("bndldx (0x12345678), %bnd0");
  754. asm volatile("bndldx (%rax), %bnd3");
  755. asm volatile("bndldx (%rcx,%rax,1), %bnd0");
  756. asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
  757. asm volatile("bndldx (%rax,%rcx,1), %bnd0");
  758. asm volatile("bndldx 0x12(%rax), %bnd0");
  759. asm volatile("bndldx 0x12(%rbp), %bnd0");
  760. asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
  761. asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
  762. asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
  763. asm volatile("bndldx 0x12345678(%rax), %bnd0");
  764. asm volatile("bndldx 0x12345678(%rbp), %bnd0");
  765. asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
  766. asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
  767. asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
  768. /* bndstx bnd, mib */
  769. asm volatile("bndstx %bnd0, (%rax)");
  770. asm volatile("bndstx %bnd0, (%r8)");
  771. asm volatile("bndstx %bnd0, (0x12345678)");
  772. asm volatile("bndstx %bnd3, (%rax)");
  773. asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
  774. asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
  775. asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
  776. asm volatile("bndstx %bnd0, 0x12(%rax)");
  777. asm volatile("bndstx %bnd0, 0x12(%rbp)");
  778. asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
  779. asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
  780. asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
  781. asm volatile("bndstx %bnd0, 0x12345678(%rax)");
  782. asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
  783. asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
  784. asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
  785. asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
  786. /* bnd prefix on call, ret, jmp and all jcc */
  787. asm volatile("bnd call label1"); /* Expecting: call unconditional 0 */
  788. asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
  789. asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
  790. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
  791. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
  792. asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
  793. asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0 */
  794. /* sha1rnds4 imm8, xmm2/m128, xmm1 */
  795. asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
  796. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
  797. asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
  798. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
  799. asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
  800. asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
  801. asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
  802. asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
  803. asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
  804. asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
  805. asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
  806. asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
  807. asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
  808. asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
  809. asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
  810. asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
  811. asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
  812. asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
  813. asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
  814. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
  815. asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
  816. asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
  817. asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
  818. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
  819. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
  820. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
  821. /* sha1nexte xmm2/m128, xmm1 */
  822. asm volatile("sha1nexte %xmm1, %xmm0");
  823. asm volatile("sha1nexte %xmm7, %xmm2");
  824. asm volatile("sha1nexte %xmm8, %xmm0");
  825. asm volatile("sha1nexte %xmm7, %xmm8");
  826. asm volatile("sha1nexte %xmm15, %xmm8");
  827. asm volatile("sha1nexte (%rax), %xmm0");
  828. asm volatile("sha1nexte (%r8), %xmm0");
  829. asm volatile("sha1nexte (0x12345678), %xmm0");
  830. asm volatile("sha1nexte (%rax), %xmm3");
  831. asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
  832. asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
  833. asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
  834. asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
  835. asm volatile("sha1nexte 0x12(%rax), %xmm0");
  836. asm volatile("sha1nexte 0x12(%rbp), %xmm0");
  837. asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
  838. asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
  839. asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
  840. asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
  841. asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
  842. asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
  843. asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
  844. asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
  845. asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
  846. asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
  847. asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
  848. /* sha1msg1 xmm2/m128, xmm1 */
  849. asm volatile("sha1msg1 %xmm1, %xmm0");
  850. asm volatile("sha1msg1 %xmm7, %xmm2");
  851. asm volatile("sha1msg1 %xmm8, %xmm0");
  852. asm volatile("sha1msg1 %xmm7, %xmm8");
  853. asm volatile("sha1msg1 %xmm15, %xmm8");
  854. asm volatile("sha1msg1 (%rax), %xmm0");
  855. asm volatile("sha1msg1 (%r8), %xmm0");
  856. asm volatile("sha1msg1 (0x12345678), %xmm0");
  857. asm volatile("sha1msg1 (%rax), %xmm3");
  858. asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
  859. asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
  860. asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
  861. asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
  862. asm volatile("sha1msg1 0x12(%rax), %xmm0");
  863. asm volatile("sha1msg1 0x12(%rbp), %xmm0");
  864. asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
  865. asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
  866. asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
  867. asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
  868. asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
  869. asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
  870. asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
  871. asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
  872. asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
  873. asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
  874. asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
  875. /* sha1msg2 xmm2/m128, xmm1 */
  876. asm volatile("sha1msg2 %xmm1, %xmm0");
  877. asm volatile("sha1msg2 %xmm7, %xmm2");
  878. asm volatile("sha1msg2 %xmm8, %xmm0");
  879. asm volatile("sha1msg2 %xmm7, %xmm8");
  880. asm volatile("sha1msg2 %xmm15, %xmm8");
  881. asm volatile("sha1msg2 (%rax), %xmm0");
  882. asm volatile("sha1msg2 (%r8), %xmm0");
  883. asm volatile("sha1msg2 (0x12345678), %xmm0");
  884. asm volatile("sha1msg2 (%rax), %xmm3");
  885. asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
  886. asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
  887. asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
  888. asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
  889. asm volatile("sha1msg2 0x12(%rax), %xmm0");
  890. asm volatile("sha1msg2 0x12(%rbp), %xmm0");
  891. asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
  892. asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
  893. asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
  894. asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
  895. asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
  896. asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
  897. asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
  898. asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
  899. asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
  900. asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
  901. asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
  902. /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
  903. /* Note sha256rnds2 has an implicit operand 'xmm0' */
  904. asm volatile("sha256rnds2 %xmm4, %xmm1");
  905. asm volatile("sha256rnds2 %xmm7, %xmm2");
  906. asm volatile("sha256rnds2 %xmm8, %xmm1");
  907. asm volatile("sha256rnds2 %xmm7, %xmm8");
  908. asm volatile("sha256rnds2 %xmm15, %xmm8");
  909. asm volatile("sha256rnds2 (%rax), %xmm1");
  910. asm volatile("sha256rnds2 (%r8), %xmm1");
  911. asm volatile("sha256rnds2 (0x12345678), %xmm1");
  912. asm volatile("sha256rnds2 (%rax), %xmm3");
  913. asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
  914. asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
  915. asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
  916. asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
  917. asm volatile("sha256rnds2 0x12(%rax), %xmm1");
  918. asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
  919. asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
  920. asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
  921. asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
  922. asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
  923. asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
  924. asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
  925. asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
  926. asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
  927. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
  928. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
  929. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
  930. /* sha256msg1 xmm2/m128, xmm1 */
  931. asm volatile("sha256msg1 %xmm1, %xmm0");
  932. asm volatile("sha256msg1 %xmm7, %xmm2");
  933. asm volatile("sha256msg1 %xmm8, %xmm0");
  934. asm volatile("sha256msg1 %xmm7, %xmm8");
  935. asm volatile("sha256msg1 %xmm15, %xmm8");
  936. asm volatile("sha256msg1 (%rax), %xmm0");
  937. asm volatile("sha256msg1 (%r8), %xmm0");
  938. asm volatile("sha256msg1 (0x12345678), %xmm0");
  939. asm volatile("sha256msg1 (%rax), %xmm3");
  940. asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
  941. asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
  942. asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
  943. asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
  944. asm volatile("sha256msg1 0x12(%rax), %xmm0");
  945. asm volatile("sha256msg1 0x12(%rbp), %xmm0");
  946. asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
  947. asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
  948. asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
  949. asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
  950. asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
  951. asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
  952. asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
  953. asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
  954. asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
  955. asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
  956. asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
  957. /* sha256msg2 xmm2/m128, xmm1 */
  958. asm volatile("sha256msg2 %xmm1, %xmm0");
  959. asm volatile("sha256msg2 %xmm7, %xmm2");
  960. asm volatile("sha256msg2 %xmm8, %xmm0");
  961. asm volatile("sha256msg2 %xmm7, %xmm8");
  962. asm volatile("sha256msg2 %xmm15, %xmm8");
  963. asm volatile("sha256msg2 (%rax), %xmm0");
  964. asm volatile("sha256msg2 (%r8), %xmm0");
  965. asm volatile("sha256msg2 (0x12345678), %xmm0");
  966. asm volatile("sha256msg2 (%rax), %xmm3");
  967. asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
  968. asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
  969. asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
  970. asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
  971. asm volatile("sha256msg2 0x12(%rax), %xmm0");
  972. asm volatile("sha256msg2 0x12(%rbp), %xmm0");
  973. asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
  974. asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
  975. asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
  976. asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
  977. asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
  978. asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
  979. asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
  980. asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
  981. asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
  982. asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
  983. asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
  984. /* clflushopt m8 */
  985. asm volatile("clflushopt (%rax)");
  986. asm volatile("clflushopt (%r8)");
  987. asm volatile("clflushopt (0x12345678)");
  988. asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
  989. asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
  990. /* Also check instructions in the same group encoding as clflushopt */
  991. asm volatile("clflush (%rax)");
  992. asm volatile("clflush (%r8)");
  993. asm volatile("sfence");
  994. /* clwb m8 */
  995. asm volatile("clwb (%rax)");
  996. asm volatile("clwb (%r8)");
  997. asm volatile("clwb (0x12345678)");
  998. asm volatile("clwb 0x12345678(%rax,%rcx,8)");
  999. asm volatile("clwb 0x12345678(%r8,%rcx,8)");
  1000. /* Also check instructions in the same group encoding as clwb */
  1001. asm volatile("xsaveopt (%rax)");
  1002. asm volatile("xsaveopt (%r8)");
  1003. asm volatile("mfence");
  1004. /* xsavec mem */
  1005. asm volatile("xsavec (%rax)");
  1006. asm volatile("xsavec (%r8)");
  1007. asm volatile("xsavec (0x12345678)");
  1008. asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
  1009. asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
  1010. /* xsaves mem */
  1011. asm volatile("xsaves (%rax)");
  1012. asm volatile("xsaves (%r8)");
  1013. asm volatile("xsaves (0x12345678)");
  1014. asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
  1015. asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
  1016. /* xrstors mem */
  1017. asm volatile("xrstors (%rax)");
  1018. asm volatile("xrstors (%r8)");
  1019. asm volatile("xrstors (0x12345678)");
  1020. asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
  1021. asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
  1022. #else /* #ifdef __x86_64__ */
  1023. /* bound r32, mem (same op code as EVEX prefix) */
  1024. asm volatile("bound %eax, 0x12345678(%ecx)");
  1025. asm volatile("bound %ecx, 0x12345678(%eax)");
  1026. asm volatile("bound %edx, 0x12345678(%eax)");
  1027. asm volatile("bound %ebx, 0x12345678(%eax)");
  1028. asm volatile("bound %esp, 0x12345678(%eax)");
  1029. asm volatile("bound %ebp, 0x12345678(%eax)");
  1030. asm volatile("bound %esi, 0x12345678(%eax)");
  1031. asm volatile("bound %edi, 0x12345678(%eax)");
  1032. asm volatile("bound %ecx, (%eax)");
  1033. asm volatile("bound %eax, (0x12345678)");
  1034. asm volatile("bound %edx, (%ecx,%eax,1)");
  1035. asm volatile("bound %edx, 0x12345678(,%eax,1)");
  1036. asm volatile("bound %edx, (%eax,%ecx,1)");
  1037. asm volatile("bound %edx, (%eax,%ecx,8)");
  1038. asm volatile("bound %edx, 0x12(%eax)");
  1039. asm volatile("bound %edx, 0x12(%ebp)");
  1040. asm volatile("bound %edx, 0x12(%ecx,%eax,1)");
  1041. asm volatile("bound %edx, 0x12(%ebp,%eax,1)");
  1042. asm volatile("bound %edx, 0x12(%eax,%ecx,1)");
  1043. asm volatile("bound %edx, 0x12(%eax,%ecx,8)");
  1044. asm volatile("bound %edx, 0x12345678(%eax)");
  1045. asm volatile("bound %edx, 0x12345678(%ebp)");
  1046. asm volatile("bound %edx, 0x12345678(%ecx,%eax,1)");
  1047. asm volatile("bound %edx, 0x12345678(%ebp,%eax,1)");
  1048. asm volatile("bound %edx, 0x12345678(%eax,%ecx,1)");
  1049. asm volatile("bound %edx, 0x12345678(%eax,%ecx,8)");
  1050. /* bound r16, mem (same op code as EVEX prefix) */
  1051. asm volatile("bound %ax, 0x12345678(%ecx)");
  1052. asm volatile("bound %cx, 0x12345678(%eax)");
  1053. asm volatile("bound %dx, 0x12345678(%eax)");
  1054. asm volatile("bound %bx, 0x12345678(%eax)");
  1055. asm volatile("bound %sp, 0x12345678(%eax)");
  1056. asm volatile("bound %bp, 0x12345678(%eax)");
  1057. asm volatile("bound %si, 0x12345678(%eax)");
  1058. asm volatile("bound %di, 0x12345678(%eax)");
  1059. asm volatile("bound %cx, (%eax)");
  1060. asm volatile("bound %ax, (0x12345678)");
  1061. asm volatile("bound %dx, (%ecx,%eax,1)");
  1062. asm volatile("bound %dx, 0x12345678(,%eax,1)");
  1063. asm volatile("bound %dx, (%eax,%ecx,1)");
  1064. asm volatile("bound %dx, (%eax,%ecx,8)");
  1065. asm volatile("bound %dx, 0x12(%eax)");
  1066. asm volatile("bound %dx, 0x12(%ebp)");
  1067. asm volatile("bound %dx, 0x12(%ecx,%eax,1)");
  1068. asm volatile("bound %dx, 0x12(%ebp,%eax,1)");
  1069. asm volatile("bound %dx, 0x12(%eax,%ecx,1)");
  1070. asm volatile("bound %dx, 0x12(%eax,%ecx,8)");
  1071. asm volatile("bound %dx, 0x12345678(%eax)");
  1072. asm volatile("bound %dx, 0x12345678(%ebp)");
  1073. asm volatile("bound %dx, 0x12345678(%ecx,%eax,1)");
  1074. asm volatile("bound %dx, 0x12345678(%ebp,%eax,1)");
  1075. asm volatile("bound %dx, 0x12345678(%eax,%ecx,1)");
  1076. asm volatile("bound %dx, 0x12345678(%eax,%ecx,8)");
  1077. /* AVX-512: Instructions with the same op codes as Mask Instructions */
  1078. asm volatile("cmovno %eax,%ebx");
  1079. asm volatile("cmovno 0x12345678(%eax),%ecx");
  1080. asm volatile("cmovno 0x12345678(%eax),%cx");
  1081. asm volatile("cmove %eax,%ebx");
  1082. asm volatile("cmove 0x12345678(%eax),%ecx");
  1083. asm volatile("cmove 0x12345678(%eax),%cx");
  1084. asm volatile("seto 0x12345678(%eax)");
  1085. asm volatile("setno 0x12345678(%eax)");
  1086. asm volatile("setb 0x12345678(%eax)");
  1087. asm volatile("setc 0x12345678(%eax)");
  1088. asm volatile("setnae 0x12345678(%eax)");
  1089. asm volatile("setae 0x12345678(%eax)");
  1090. asm volatile("setnb 0x12345678(%eax)");
  1091. asm volatile("setnc 0x12345678(%eax)");
  1092. asm volatile("sets 0x12345678(%eax)");
  1093. asm volatile("setns 0x12345678(%eax)");
  1094. /* AVX-512: Mask Instructions */
  1095. asm volatile("kandw %k7,%k6,%k5");
  1096. asm volatile("kandq %k7,%k6,%k5");
  1097. asm volatile("kandb %k7,%k6,%k5");
  1098. asm volatile("kandd %k7,%k6,%k5");
  1099. asm volatile("kandnw %k7,%k6,%k5");
  1100. asm volatile("kandnq %k7,%k6,%k5");
  1101. asm volatile("kandnb %k7,%k6,%k5");
  1102. asm volatile("kandnd %k7,%k6,%k5");
  1103. asm volatile("knotw %k7,%k6");
  1104. asm volatile("knotq %k7,%k6");
  1105. asm volatile("knotb %k7,%k6");
  1106. asm volatile("knotd %k7,%k6");
  1107. asm volatile("korw %k7,%k6,%k5");
  1108. asm volatile("korq %k7,%k6,%k5");
  1109. asm volatile("korb %k7,%k6,%k5");
  1110. asm volatile("kord %k7,%k6,%k5");
  1111. asm volatile("kxnorw %k7,%k6,%k5");
  1112. asm volatile("kxnorq %k7,%k6,%k5");
  1113. asm volatile("kxnorb %k7,%k6,%k5");
  1114. asm volatile("kxnord %k7,%k6,%k5");
  1115. asm volatile("kxorw %k7,%k6,%k5");
  1116. asm volatile("kxorq %k7,%k6,%k5");
  1117. asm volatile("kxorb %k7,%k6,%k5");
  1118. asm volatile("kxord %k7,%k6,%k5");
  1119. asm volatile("kaddw %k7,%k6,%k5");
  1120. asm volatile("kaddq %k7,%k6,%k5");
  1121. asm volatile("kaddb %k7,%k6,%k5");
  1122. asm volatile("kaddd %k7,%k6,%k5");
  1123. asm volatile("kunpckbw %k7,%k6,%k5");
  1124. asm volatile("kunpckwd %k7,%k6,%k5");
  1125. asm volatile("kunpckdq %k7,%k6,%k5");
  1126. asm volatile("kmovw %k6,%k5");
  1127. asm volatile("kmovw (%ecx),%k5");
  1128. asm volatile("kmovw 0x123(%eax,%ecx,8),%k5");
  1129. asm volatile("kmovw %k5,(%ecx)");
  1130. asm volatile("kmovw %k5,0x123(%eax,%ecx,8)");
  1131. asm volatile("kmovw %eax,%k5");
  1132. asm volatile("kmovw %ebp,%k5");
  1133. asm volatile("kmovw %k5,%eax");
  1134. asm volatile("kmovw %k5,%ebp");
  1135. asm volatile("kmovq %k6,%k5");
  1136. asm volatile("kmovq (%ecx),%k5");
  1137. asm volatile("kmovq 0x123(%eax,%ecx,8),%k5");
  1138. asm volatile("kmovq %k5,(%ecx)");
  1139. asm volatile("kmovq %k5,0x123(%eax,%ecx,8)");
  1140. asm volatile("kmovb %k6,%k5");
  1141. asm volatile("kmovb (%ecx),%k5");
  1142. asm volatile("kmovb 0x123(%eax,%ecx,8),%k5");
  1143. asm volatile("kmovb %k5,(%ecx)");
  1144. asm volatile("kmovb %k5,0x123(%eax,%ecx,8)");
  1145. asm volatile("kmovb %eax,%k5");
  1146. asm volatile("kmovb %ebp,%k5");
  1147. asm volatile("kmovb %k5,%eax");
  1148. asm volatile("kmovb %k5,%ebp");
  1149. asm volatile("kmovd %k6,%k5");
  1150. asm volatile("kmovd (%ecx),%k5");
  1151. asm volatile("kmovd 0x123(%eax,%ecx,8),%k5");
  1152. asm volatile("kmovd %k5,(%ecx)");
  1153. asm volatile("kmovd %k5,0x123(%eax,%ecx,8)");
  1154. asm volatile("kmovd %eax,%k5");
  1155. asm volatile("kmovd %ebp,%k5");
  1156. asm volatile("kmovd %k5,%eax");
  1157. asm volatile("kmovd %k5,%ebp");
  1158. asm volatile("kortestw %k6,%k5");
  1159. asm volatile("kortestq %k6,%k5");
  1160. asm volatile("kortestb %k6,%k5");
  1161. asm volatile("kortestd %k6,%k5");
  1162. asm volatile("ktestw %k6,%k5");
  1163. asm volatile("ktestq %k6,%k5");
  1164. asm volatile("ktestb %k6,%k5");
  1165. asm volatile("ktestd %k6,%k5");
  1166. asm volatile("kshiftrw $0x12,%k6,%k5");
  1167. asm volatile("kshiftrq $0x5b,%k6,%k5");
  1168. asm volatile("kshiftlw $0x12,%k6,%k5");
  1169. asm volatile("kshiftlq $0x5b,%k6,%k5");
  1170. /* AVX-512: Op code 0f 5b */
  1171. asm volatile("vcvtdq2ps %xmm5,%xmm6");
  1172. asm volatile("vcvtqq2ps %zmm5,%ymm6{%k7}");
  1173. asm volatile("vcvtps2dq %xmm5,%xmm6");
  1174. asm volatile("vcvttps2dq %xmm5,%xmm6");
  1175. /* AVX-512: Op code 0f 6f */
  1176. asm volatile("movq %mm0,%mm4");
  1177. asm volatile("vmovdqa %ymm4,%ymm6");
  1178. asm volatile("vmovdqa32 %zmm5,%zmm6");
  1179. asm volatile("vmovdqa64 %zmm5,%zmm6");
  1180. asm volatile("vmovdqu %ymm4,%ymm6");
  1181. asm volatile("vmovdqu32 %zmm5,%zmm6");
  1182. asm volatile("vmovdqu64 %zmm5,%zmm6");
  1183. asm volatile("vmovdqu8 %zmm5,%zmm6");
  1184. asm volatile("vmovdqu16 %zmm5,%zmm6");
  1185. /* AVX-512: Op code 0f 78 */
  1186. asm volatile("vmread %eax,%ebx");
  1187. asm volatile("vcvttps2udq %zmm5,%zmm6");
  1188. asm volatile("vcvttpd2udq %zmm5,%ymm6{%k7}");
  1189. asm volatile("vcvttsd2usi %xmm6,%eax");
  1190. asm volatile("vcvttss2usi %xmm6,%eax");
  1191. asm volatile("vcvttps2uqq %ymm5,%zmm6{%k7}");
  1192. asm volatile("vcvttpd2uqq %zmm5,%zmm6");
  1193. /* AVX-512: Op code 0f 79 */
  1194. asm volatile("vmwrite %eax,%ebx");
  1195. asm volatile("vcvtps2udq %zmm5,%zmm6");
  1196. asm volatile("vcvtpd2udq %zmm5,%ymm6{%k7}");
  1197. asm volatile("vcvtsd2usi %xmm6,%eax");
  1198. asm volatile("vcvtss2usi %xmm6,%eax");
  1199. asm volatile("vcvtps2uqq %ymm5,%zmm6{%k7}");
  1200. asm volatile("vcvtpd2uqq %zmm5,%zmm6");
  1201. /* AVX-512: Op code 0f 7a */
  1202. asm volatile("vcvtudq2pd %ymm5,%zmm6{%k7}");
  1203. asm volatile("vcvtuqq2pd %zmm5,%zmm6");
  1204. asm volatile("vcvtudq2ps %zmm5,%zmm6");
  1205. asm volatile("vcvtuqq2ps %zmm5,%ymm6{%k7}");
  1206. asm volatile("vcvttps2qq %ymm5,%zmm6{%k7}");
  1207. asm volatile("vcvttpd2qq %zmm5,%zmm6");
  1208. /* AVX-512: Op code 0f 7b */
  1209. asm volatile("vcvtusi2sd %eax,%xmm5,%xmm6");
  1210. asm volatile("vcvtusi2ss %eax,%xmm5,%xmm6");
  1211. asm volatile("vcvtps2qq %ymm5,%zmm6{%k7}");
  1212. asm volatile("vcvtpd2qq %zmm5,%zmm6");
  1213. /* AVX-512: Op code 0f 7f */
  1214. asm volatile("movq.s %mm0,%mm4");
  1215. asm volatile("vmovdqa.s %ymm5,%ymm6");
  1216. asm volatile("vmovdqa32.s %zmm5,%zmm6");
  1217. asm volatile("vmovdqa64.s %zmm5,%zmm6");
  1218. asm volatile("vmovdqu.s %ymm5,%ymm6");
  1219. asm volatile("vmovdqu32.s %zmm5,%zmm6");
  1220. asm volatile("vmovdqu64.s %zmm5,%zmm6");
  1221. asm volatile("vmovdqu8.s %zmm5,%zmm6");
  1222. asm volatile("vmovdqu16.s %zmm5,%zmm6");
  1223. /* AVX-512: Op code 0f db */
  1224. asm volatile("pand %mm1,%mm2");
  1225. asm volatile("pand %xmm1,%xmm2");
  1226. asm volatile("vpand %ymm4,%ymm6,%ymm2");
  1227. asm volatile("vpandd %zmm4,%zmm5,%zmm6");
  1228. asm volatile("vpandq %zmm4,%zmm5,%zmm6");
  1229. /* AVX-512: Op code 0f df */
  1230. asm volatile("pandn %mm1,%mm2");
  1231. asm volatile("pandn %xmm1,%xmm2");
  1232. asm volatile("vpandn %ymm4,%ymm6,%ymm2");
  1233. asm volatile("vpandnd %zmm4,%zmm5,%zmm6");
  1234. asm volatile("vpandnq %zmm4,%zmm5,%zmm6");
  1235. /* AVX-512: Op code 0f e6 */
  1236. asm volatile("vcvttpd2dq %xmm1,%xmm2");
  1237. asm volatile("vcvtdq2pd %xmm5,%xmm6");
  1238. asm volatile("vcvtdq2pd %ymm5,%zmm6{%k7}");
  1239. asm volatile("vcvtqq2pd %zmm5,%zmm6");
  1240. asm volatile("vcvtpd2dq %xmm1,%xmm2");
  1241. /* AVX-512: Op code 0f eb */
  1242. asm volatile("por %mm4,%mm6");
  1243. asm volatile("vpor %ymm4,%ymm6,%ymm2");
  1244. asm volatile("vpord %zmm4,%zmm5,%zmm6");
  1245. asm volatile("vporq %zmm4,%zmm5,%zmm6");
  1246. /* AVX-512: Op code 0f ef */
  1247. asm volatile("pxor %mm4,%mm6");
  1248. asm volatile("vpxor %ymm4,%ymm6,%ymm2");
  1249. asm volatile("vpxord %zmm4,%zmm5,%zmm6");
  1250. asm volatile("vpxorq %zmm4,%zmm5,%zmm6");
  1251. /* AVX-512: Op code 0f 38 10 */
  1252. asm volatile("pblendvb %xmm1,%xmm0");
  1253. asm volatile("vpsrlvw %zmm4,%zmm5,%zmm6");
  1254. asm volatile("vpmovuswb %zmm5,%ymm6{%k7}");
  1255. /* AVX-512: Op code 0f 38 11 */
  1256. asm volatile("vpmovusdb %zmm5,%xmm6{%k7}");
  1257. asm volatile("vpsravw %zmm4,%zmm5,%zmm6");
  1258. /* AVX-512: Op code 0f 38 12 */
  1259. asm volatile("vpmovusqb %zmm5,%xmm6{%k7}");
  1260. asm volatile("vpsllvw %zmm4,%zmm5,%zmm6");
  1261. /* AVX-512: Op code 0f 38 13 */
  1262. asm volatile("vcvtph2ps %xmm3,%ymm5");
  1263. asm volatile("vcvtph2ps %ymm5,%zmm6{%k7}");
  1264. asm volatile("vpmovusdw %zmm5,%ymm6{%k7}");
  1265. /* AVX-512: Op code 0f 38 14 */
  1266. asm volatile("blendvps %xmm1,%xmm0");
  1267. asm volatile("vpmovusqw %zmm5,%xmm6{%k7}");
  1268. asm volatile("vprorvd %zmm4,%zmm5,%zmm6");
  1269. asm volatile("vprorvq %zmm4,%zmm5,%zmm6");
  1270. /* AVX-512: Op code 0f 38 15 */
  1271. asm volatile("blendvpd %xmm1,%xmm0");
  1272. asm volatile("vpmovusqd %zmm5,%ymm6{%k7}");
  1273. asm volatile("vprolvd %zmm4,%zmm5,%zmm6");
  1274. asm volatile("vprolvq %zmm4,%zmm5,%zmm6");
  1275. /* AVX-512: Op code 0f 38 16 */
  1276. asm volatile("vpermps %ymm4,%ymm6,%ymm2");
  1277. asm volatile("vpermps %ymm4,%ymm6,%ymm2{%k7}");
  1278. asm volatile("vpermpd %ymm4,%ymm6,%ymm2{%k7}");
  1279. /* AVX-512: Op code 0f 38 19 */
  1280. asm volatile("vbroadcastsd %xmm4,%ymm6");
  1281. asm volatile("vbroadcastf32x2 %xmm7,%zmm6");
  1282. /* AVX-512: Op code 0f 38 1a */
  1283. asm volatile("vbroadcastf128 (%ecx),%ymm4");
  1284. asm volatile("vbroadcastf32x4 (%ecx),%zmm6");
  1285. asm volatile("vbroadcastf64x2 (%ecx),%zmm6");
  1286. /* AVX-512: Op code 0f 38 1b */
  1287. asm volatile("vbroadcastf32x8 (%ecx),%zmm6");
  1288. asm volatile("vbroadcastf64x4 (%ecx),%zmm6");
  1289. /* AVX-512: Op code 0f 38 1f */
  1290. asm volatile("vpabsq %zmm4,%zmm6");
  1291. /* AVX-512: Op code 0f 38 20 */
  1292. asm volatile("vpmovsxbw %xmm4,%xmm5");
  1293. asm volatile("vpmovswb %zmm5,%ymm6{%k7}");
  1294. /* AVX-512: Op code 0f 38 21 */
  1295. asm volatile("vpmovsxbd %xmm4,%ymm6");
  1296. asm volatile("vpmovsdb %zmm5,%xmm6{%k7}");
  1297. /* AVX-512: Op code 0f 38 22 */
  1298. asm volatile("vpmovsxbq %xmm4,%ymm4");
  1299. asm volatile("vpmovsqb %zmm5,%xmm6{%k7}");
  1300. /* AVX-512: Op code 0f 38 23 */
  1301. asm volatile("vpmovsxwd %xmm4,%ymm4");
  1302. asm volatile("vpmovsdw %zmm5,%ymm6{%k7}");
  1303. /* AVX-512: Op code 0f 38 24 */
  1304. asm volatile("vpmovsxwq %xmm4,%ymm6");
  1305. asm volatile("vpmovsqw %zmm5,%xmm6{%k7}");
  1306. /* AVX-512: Op code 0f 38 25 */
  1307. asm volatile("vpmovsxdq %xmm4,%ymm4");
  1308. asm volatile("vpmovsqd %zmm5,%ymm6{%k7}");
  1309. /* AVX-512: Op code 0f 38 26 */
  1310. asm volatile("vptestmb %zmm5,%zmm6,%k5");
  1311. asm volatile("vptestmw %zmm5,%zmm6,%k5");
  1312. asm volatile("vptestnmb %zmm4,%zmm5,%k5");
  1313. asm volatile("vptestnmw %zmm4,%zmm5,%k5");
  1314. /* AVX-512: Op code 0f 38 27 */
  1315. asm volatile("vptestmd %zmm5,%zmm6,%k5");
  1316. asm volatile("vptestmq %zmm5,%zmm6,%k5");
  1317. asm volatile("vptestnmd %zmm4,%zmm5,%k5");
  1318. asm volatile("vptestnmq %zmm4,%zmm5,%k5");
  1319. /* AVX-512: Op code 0f 38 28 */
  1320. asm volatile("vpmuldq %ymm4,%ymm6,%ymm2");
  1321. asm volatile("vpmovm2b %k5,%zmm6");
  1322. asm volatile("vpmovm2w %k5,%zmm6");
  1323. /* AVX-512: Op code 0f 38 29 */
  1324. asm volatile("vpcmpeqq %ymm4,%ymm6,%ymm2");
  1325. asm volatile("vpmovb2m %zmm6,%k5");
  1326. asm volatile("vpmovw2m %zmm6,%k5");
  1327. /* AVX-512: Op code 0f 38 2a */
  1328. asm volatile("vmovntdqa (%ecx),%ymm4");
  1329. asm volatile("vpbroadcastmb2q %k6,%zmm1");
  1330. /* AVX-512: Op code 0f 38 2c */
  1331. asm volatile("vmaskmovps (%ecx),%ymm4,%ymm6");
  1332. asm volatile("vscalefps %zmm4,%zmm5,%zmm6");
  1333. asm volatile("vscalefpd %zmm4,%zmm5,%zmm6");
  1334. /* AVX-512: Op code 0f 38 2d */
  1335. asm volatile("vmaskmovpd (%ecx),%ymm4,%ymm6");
  1336. asm volatile("vscalefss %xmm4,%xmm5,%xmm6{%k7}");
  1337. asm volatile("vscalefsd %xmm4,%xmm5,%xmm6{%k7}");
  1338. /* AVX-512: Op code 0f 38 30 */
  1339. asm volatile("vpmovzxbw %xmm4,%ymm4");
  1340. asm volatile("vpmovwb %zmm5,%ymm6{%k7}");
  1341. /* AVX-512: Op code 0f 38 31 */
  1342. asm volatile("vpmovzxbd %xmm4,%ymm6");
  1343. asm volatile("vpmovdb %zmm5,%xmm6{%k7}");
  1344. /* AVX-512: Op code 0f 38 32 */
  1345. asm volatile("vpmovzxbq %xmm4,%ymm4");
  1346. asm volatile("vpmovqb %zmm5,%xmm6{%k7}");
  1347. /* AVX-512: Op code 0f 38 33 */
  1348. asm volatile("vpmovzxwd %xmm4,%ymm4");
  1349. asm volatile("vpmovdw %zmm5,%ymm6{%k7}");
  1350. /* AVX-512: Op code 0f 38 34 */
  1351. asm volatile("vpmovzxwq %xmm4,%ymm6");
  1352. asm volatile("vpmovqw %zmm5,%xmm6{%k7}");
  1353. /* AVX-512: Op code 0f 38 35 */
  1354. asm volatile("vpmovzxdq %xmm4,%ymm4");
  1355. asm volatile("vpmovqd %zmm5,%ymm6{%k7}");
  1356. /* AVX-512: Op code 0f 38 36 */
  1357. asm volatile("vpermd %ymm4,%ymm6,%ymm2");
  1358. asm volatile("vpermd %ymm4,%ymm6,%ymm2{%k7}");
  1359. asm volatile("vpermq %ymm4,%ymm6,%ymm2{%k7}");
  1360. /* AVX-512: Op code 0f 38 38 */
  1361. asm volatile("vpminsb %ymm4,%ymm6,%ymm2");
  1362. asm volatile("vpmovm2d %k5,%zmm6");
  1363. asm volatile("vpmovm2q %k5,%zmm6");
  1364. /* AVX-512: Op code 0f 38 39 */
  1365. asm volatile("vpminsd %xmm1,%xmm2,%xmm3");
  1366. asm volatile("vpminsd %zmm4,%zmm5,%zmm6");
  1367. asm volatile("vpminsq %zmm4,%zmm5,%zmm6");
  1368. asm volatile("vpmovd2m %zmm6,%k5");
  1369. asm volatile("vpmovq2m %zmm6,%k5");
  1370. /* AVX-512: Op code 0f 38 3a */
  1371. asm volatile("vpminuw %ymm4,%ymm6,%ymm2");
  1372. asm volatile("vpbroadcastmw2d %k6,%zmm6");
  1373. /* AVX-512: Op code 0f 38 3b */
  1374. asm volatile("vpminud %ymm4,%ymm6,%ymm2");
  1375. asm volatile("vpminud %zmm4,%zmm5,%zmm6");
  1376. asm volatile("vpminuq %zmm4,%zmm5,%zmm6");
  1377. /* AVX-512: Op code 0f 38 3d */
  1378. asm volatile("vpmaxsd %ymm4,%ymm6,%ymm2");
  1379. asm volatile("vpmaxsd %zmm4,%zmm5,%zmm6");
  1380. asm volatile("vpmaxsq %zmm4,%zmm5,%zmm6");
  1381. /* AVX-512: Op code 0f 38 3f */
  1382. asm volatile("vpmaxud %ymm4,%ymm6,%ymm2");
  1383. asm volatile("vpmaxud %zmm4,%zmm5,%zmm6");
  1384. asm volatile("vpmaxuq %zmm4,%zmm5,%zmm6");
  1385. /* AVX-512: Op code 0f 38 40 */
  1386. asm volatile("vpmulld %ymm4,%ymm6,%ymm2");
  1387. asm volatile("vpmulld %zmm4,%zmm5,%zmm6");
  1388. asm volatile("vpmullq %zmm4,%zmm5,%zmm6");
  1389. /* AVX-512: Op code 0f 38 42 */
  1390. asm volatile("vgetexpps %zmm5,%zmm6");
  1391. asm volatile("vgetexppd %zmm5,%zmm6");
  1392. /* AVX-512: Op code 0f 38 43 */
  1393. asm volatile("vgetexpss %xmm4,%xmm5,%xmm6{%k7}");
  1394. asm volatile("vgetexpsd %xmm2,%xmm3,%xmm4{%k7}");
  1395. /* AVX-512: Op code 0f 38 44 */
  1396. asm volatile("vplzcntd %zmm5,%zmm6");
  1397. asm volatile("vplzcntq %zmm5,%zmm6");
  1398. /* AVX-512: Op code 0f 38 46 */
  1399. asm volatile("vpsravd %ymm4,%ymm6,%ymm2");
  1400. asm volatile("vpsravd %zmm4,%zmm5,%zmm6");
  1401. asm volatile("vpsravq %zmm4,%zmm5,%zmm6");
  1402. /* AVX-512: Op code 0f 38 4c */
  1403. asm volatile("vrcp14ps %zmm5,%zmm6");
  1404. asm volatile("vrcp14pd %zmm5,%zmm6");
  1405. /* AVX-512: Op code 0f 38 4d */
  1406. asm volatile("vrcp14ss %xmm4,%xmm5,%xmm6{%k7}");
  1407. asm volatile("vrcp14sd %xmm4,%xmm5,%xmm6{%k7}");
  1408. /* AVX-512: Op code 0f 38 4e */
  1409. asm volatile("vrsqrt14ps %zmm5,%zmm6");
  1410. asm volatile("vrsqrt14pd %zmm5,%zmm6");
  1411. /* AVX-512: Op code 0f 38 4f */
  1412. asm volatile("vrsqrt14ss %xmm4,%xmm5,%xmm6{%k7}");
  1413. asm volatile("vrsqrt14sd %xmm4,%xmm5,%xmm6{%k7}");
  1414. /* AVX-512: Op code 0f 38 59 */
  1415. asm volatile("vpbroadcastq %xmm4,%xmm6");
  1416. asm volatile("vbroadcasti32x2 %xmm7,%zmm6");
  1417. /* AVX-512: Op code 0f 38 5a */
  1418. asm volatile("vbroadcasti128 (%ecx),%ymm4");
  1419. asm volatile("vbroadcasti32x4 (%ecx),%zmm6");
  1420. asm volatile("vbroadcasti64x2 (%ecx),%zmm6");
  1421. /* AVX-512: Op code 0f 38 5b */
  1422. asm volatile("vbroadcasti32x8 (%ecx),%zmm6");
  1423. asm volatile("vbroadcasti64x4 (%ecx),%zmm6");
  1424. /* AVX-512: Op code 0f 38 64 */
  1425. asm volatile("vpblendmd %zmm4,%zmm5,%zmm6");
  1426. asm volatile("vpblendmq %zmm4,%zmm5,%zmm6");
  1427. /* AVX-512: Op code 0f 38 65 */
  1428. asm volatile("vblendmps %zmm4,%zmm5,%zmm6");
  1429. asm volatile("vblendmpd %zmm4,%zmm5,%zmm6");
  1430. /* AVX-512: Op code 0f 38 66 */
  1431. asm volatile("vpblendmb %zmm4,%zmm5,%zmm6");
  1432. asm volatile("vpblendmw %zmm4,%zmm5,%zmm6");
  1433. /* AVX-512: Op code 0f 38 75 */
  1434. asm volatile("vpermi2b %zmm4,%zmm5,%zmm6");
  1435. asm volatile("vpermi2w %zmm4,%zmm5,%zmm6");
  1436. /* AVX-512: Op code 0f 38 76 */
  1437. asm volatile("vpermi2d %zmm4,%zmm5,%zmm6");
  1438. asm volatile("vpermi2q %zmm4,%zmm5,%zmm6");
  1439. /* AVX-512: Op code 0f 38 77 */
  1440. asm volatile("vpermi2ps %zmm4,%zmm5,%zmm6");
  1441. asm volatile("vpermi2pd %zmm4,%zmm5,%zmm6");
  1442. /* AVX-512: Op code 0f 38 7a */
  1443. asm volatile("vpbroadcastb %eax,%xmm3");
  1444. /* AVX-512: Op code 0f 38 7b */
  1445. asm volatile("vpbroadcastw %eax,%xmm3");
  1446. /* AVX-512: Op code 0f 38 7c */
  1447. asm volatile("vpbroadcastd %eax,%xmm3");
  1448. /* AVX-512: Op code 0f 38 7d */
  1449. asm volatile("vpermt2b %zmm4,%zmm5,%zmm6");
  1450. asm volatile("vpermt2w %zmm4,%zmm5,%zmm6");
  1451. /* AVX-512: Op code 0f 38 7e */
  1452. asm volatile("vpermt2d %zmm4,%zmm5,%zmm6");
  1453. asm volatile("vpermt2q %zmm4,%zmm5,%zmm6");
  1454. /* AVX-512: Op code 0f 38 7f */
  1455. asm volatile("vpermt2ps %zmm4,%zmm5,%zmm6");
  1456. asm volatile("vpermt2pd %zmm4,%zmm5,%zmm6");
  1457. /* AVX-512: Op code 0f 38 83 */
  1458. asm volatile("vpmultishiftqb %zmm4,%zmm5,%zmm6");
  1459. /* AVX-512: Op code 0f 38 88 */
  1460. asm volatile("vexpandps (%ecx),%zmm6");
  1461. asm volatile("vexpandpd (%ecx),%zmm6");
  1462. /* AVX-512: Op code 0f 38 89 */
  1463. asm volatile("vpexpandd (%ecx),%zmm6");
  1464. asm volatile("vpexpandq (%ecx),%zmm6");
  1465. /* AVX-512: Op code 0f 38 8a */
  1466. asm volatile("vcompressps %zmm6,(%ecx)");
  1467. asm volatile("vcompresspd %zmm6,(%ecx)");
  1468. /* AVX-512: Op code 0f 38 8b */
  1469. asm volatile("vpcompressd %zmm6,(%ecx)");
  1470. asm volatile("vpcompressq %zmm6,(%ecx)");
  1471. /* AVX-512: Op code 0f 38 8d */
  1472. asm volatile("vpermb %zmm4,%zmm5,%zmm6");
  1473. asm volatile("vpermw %zmm4,%zmm5,%zmm6");
  1474. /* AVX-512: Op code 0f 38 90 */
  1475. asm volatile("vpgatherdd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
  1476. asm volatile("vpgatherdq %xmm2,0x04(%ebp,%xmm7,2),%xmm1");
  1477. asm volatile("vpgatherdd 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
  1478. asm volatile("vpgatherdq 0x7b(%ebp,%ymm7,8),%zmm6{%k1}");
  1479. /* AVX-512: Op code 0f 38 91 */
  1480. asm volatile("vpgatherqd %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
  1481. asm volatile("vpgatherqq %xmm2,0x02(%ebp,%xmm7,2),%xmm1");
  1482. asm volatile("vpgatherqd 0x7b(%ebp,%zmm7,8),%ymm6{%k1}");
  1483. asm volatile("vpgatherqq 0x7b(%ebp,%zmm7,8),%zmm6{%k1}");
  1484. /* AVX-512: Op code 0f 38 a0 */
  1485. asm volatile("vpscatterdd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
  1486. asm volatile("vpscatterdq %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
  1487. /* AVX-512: Op code 0f 38 a1 */
  1488. asm volatile("vpscatterqd %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
  1489. asm volatile("vpscatterqq %ymm6,0x7b(%ebp,%ymm7,8){%k1}");
  1490. /* AVX-512: Op code 0f 38 a2 */
  1491. asm volatile("vscatterdps %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
  1492. asm volatile("vscatterdpd %zmm6,0x7b(%ebp,%ymm7,8){%k1}");
  1493. /* AVX-512: Op code 0f 38 a3 */
  1494. asm volatile("vscatterqps %ymm6,0x7b(%ebp,%zmm7,8){%k1}");
  1495. asm volatile("vscatterqpd %zmm6,0x7b(%ebp,%zmm7,8){%k1}");
  1496. /* AVX-512: Op code 0f 38 b4 */
  1497. asm volatile("vpmadd52luq %zmm4,%zmm5,%zmm6");
  1498. /* AVX-512: Op code 0f 38 b5 */
  1499. asm volatile("vpmadd52huq %zmm4,%zmm5,%zmm6");
  1500. /* AVX-512: Op code 0f 38 c4 */
  1501. asm volatile("vpconflictd %zmm5,%zmm6");
  1502. asm volatile("vpconflictq %zmm5,%zmm6");
  1503. /* AVX-512: Op code 0f 38 c8 */
  1504. asm volatile("vexp2ps %zmm6,%zmm7");
  1505. asm volatile("vexp2pd %zmm6,%zmm7");
  1506. /* AVX-512: Op code 0f 38 ca */
  1507. asm volatile("vrcp28ps %zmm6,%zmm7");
  1508. asm volatile("vrcp28pd %zmm6,%zmm7");
  1509. /* AVX-512: Op code 0f 38 cb */
  1510. asm volatile("vrcp28ss %xmm5,%xmm6,%xmm7{%k7}");
  1511. asm volatile("vrcp28sd %xmm5,%xmm6,%xmm7{%k7}");
  1512. /* AVX-512: Op code 0f 38 cc */
  1513. asm volatile("vrsqrt28ps %zmm6,%zmm7");
  1514. asm volatile("vrsqrt28pd %zmm6,%zmm7");
  1515. /* AVX-512: Op code 0f 38 cd */
  1516. asm volatile("vrsqrt28ss %xmm5,%xmm6,%xmm7{%k7}");
  1517. asm volatile("vrsqrt28sd %xmm5,%xmm6,%xmm7{%k7}");
  1518. /* AVX-512: Op code 0f 3a 03 */
  1519. asm volatile("valignd $0x12,%zmm5,%zmm6,%zmm7");
  1520. asm volatile("valignq $0x12,%zmm5,%zmm6,%zmm7");
  1521. /* AVX-512: Op code 0f 3a 08 */
  1522. asm volatile("vroundps $0x5,%ymm6,%ymm2");
  1523. asm volatile("vrndscaleps $0x12,%zmm5,%zmm6");
  1524. /* AVX-512: Op code 0f 3a 09 */
  1525. asm volatile("vroundpd $0x5,%ymm6,%ymm2");
  1526. asm volatile("vrndscalepd $0x12,%zmm5,%zmm6");
  1527. /* AVX-512: Op code 0f 3a 0a */
  1528. asm volatile("vroundss $0x5,%xmm4,%xmm6,%xmm2");
  1529. asm volatile("vrndscaless $0x12,%xmm4,%xmm5,%xmm6{%k7}");
  1530. /* AVX-512: Op code 0f 3a 0b */
  1531. asm volatile("vroundsd $0x5,%xmm4,%xmm6,%xmm2");
  1532. asm volatile("vrndscalesd $0x12,%xmm4,%xmm5,%xmm6{%k7}");
  1533. /* AVX-512: Op code 0f 3a 18 */
  1534. asm volatile("vinsertf128 $0x5,%xmm4,%ymm4,%ymm6");
  1535. asm volatile("vinsertf32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
  1536. asm volatile("vinsertf64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
  1537. /* AVX-512: Op code 0f 3a 19 */
  1538. asm volatile("vextractf128 $0x5,%ymm4,%xmm4");
  1539. asm volatile("vextractf32x4 $0x12,%zmm5,%xmm6{%k7}");
  1540. asm volatile("vextractf64x2 $0x12,%zmm5,%xmm6{%k7}");
  1541. /* AVX-512: Op code 0f 3a 1a */
  1542. asm volatile("vinsertf32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
  1543. asm volatile("vinsertf64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
  1544. /* AVX-512: Op code 0f 3a 1b */
  1545. asm volatile("vextractf32x8 $0x12,%zmm6,%ymm7{%k7}");
  1546. asm volatile("vextractf64x4 $0x12,%zmm6,%ymm7{%k7}");
  1547. /* AVX-512: Op code 0f 3a 1e */
  1548. asm volatile("vpcmpud $0x12,%zmm6,%zmm7,%k5");
  1549. asm volatile("vpcmpuq $0x12,%zmm6,%zmm7,%k5");
  1550. /* AVX-512: Op code 0f 3a 1f */
  1551. asm volatile("vpcmpd $0x12,%zmm6,%zmm7,%k5");
  1552. asm volatile("vpcmpq $0x12,%zmm6,%zmm7,%k5");
  1553. /* AVX-512: Op code 0f 3a 23 */
  1554. asm volatile("vshuff32x4 $0x12,%zmm5,%zmm6,%zmm7");
  1555. asm volatile("vshuff64x2 $0x12,%zmm5,%zmm6,%zmm7");
  1556. /* AVX-512: Op code 0f 3a 25 */
  1557. asm volatile("vpternlogd $0x12,%zmm5,%zmm6,%zmm7");
  1558. asm volatile("vpternlogq $0x12,%zmm5,%zmm6,%zmm7");
  1559. /* AVX-512: Op code 0f 3a 26 */
  1560. asm volatile("vgetmantps $0x12,%zmm6,%zmm7");
  1561. asm volatile("vgetmantpd $0x12,%zmm6,%zmm7");
  1562. /* AVX-512: Op code 0f 3a 27 */
  1563. asm volatile("vgetmantss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
  1564. asm volatile("vgetmantsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
  1565. /* AVX-512: Op code 0f 3a 38 */
  1566. asm volatile("vinserti128 $0x5,%xmm4,%ymm4,%ymm6");
  1567. asm volatile("vinserti32x4 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
  1568. asm volatile("vinserti64x2 $0x12,%xmm4,%zmm5,%zmm6{%k7}");
  1569. /* AVX-512: Op code 0f 3a 39 */
  1570. asm volatile("vextracti128 $0x5,%ymm4,%xmm6");
  1571. asm volatile("vextracti32x4 $0x12,%zmm5,%xmm6{%k7}");
  1572. asm volatile("vextracti64x2 $0x12,%zmm5,%xmm6{%k7}");
  1573. /* AVX-512: Op code 0f 3a 3a */
  1574. asm volatile("vinserti32x8 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
  1575. asm volatile("vinserti64x4 $0x12,%ymm5,%zmm6,%zmm7{%k7}");
  1576. /* AVX-512: Op code 0f 3a 3b */
  1577. asm volatile("vextracti32x8 $0x12,%zmm6,%ymm7{%k7}");
  1578. asm volatile("vextracti64x4 $0x12,%zmm6,%ymm7{%k7}");
  1579. /* AVX-512: Op code 0f 3a 3e */
  1580. asm volatile("vpcmpub $0x12,%zmm6,%zmm7,%k5");
  1581. asm volatile("vpcmpuw $0x12,%zmm6,%zmm7,%k5");
  1582. /* AVX-512: Op code 0f 3a 3f */
  1583. asm volatile("vpcmpb $0x12,%zmm6,%zmm7,%k5");
  1584. asm volatile("vpcmpw $0x12,%zmm6,%zmm7,%k5");
  1585. /* AVX-512: Op code 0f 3a 42 */
  1586. asm volatile("vmpsadbw $0x5,%ymm4,%ymm6,%ymm2");
  1587. asm volatile("vdbpsadbw $0x12,%zmm4,%zmm5,%zmm6");
  1588. /* AVX-512: Op code 0f 3a 43 */
  1589. asm volatile("vshufi32x4 $0x12,%zmm5,%zmm6,%zmm7");
  1590. asm volatile("vshufi64x2 $0x12,%zmm5,%zmm6,%zmm7");
  1591. /* AVX-512: Op code 0f 3a 50 */
  1592. asm volatile("vrangeps $0x12,%zmm5,%zmm6,%zmm7");
  1593. asm volatile("vrangepd $0x12,%zmm5,%zmm6,%zmm7");
  1594. /* AVX-512: Op code 0f 3a 51 */
  1595. asm volatile("vrangess $0x12,%xmm5,%xmm6,%xmm7");
  1596. asm volatile("vrangesd $0x12,%xmm5,%xmm6,%xmm7");
  1597. /* AVX-512: Op code 0f 3a 54 */
  1598. asm volatile("vfixupimmps $0x12,%zmm5,%zmm6,%zmm7");
  1599. asm volatile("vfixupimmpd $0x12,%zmm5,%zmm6,%zmm7");
  1600. /* AVX-512: Op code 0f 3a 55 */
  1601. asm volatile("vfixupimmss $0x12,%xmm5,%xmm6,%xmm7{%k7}");
  1602. asm volatile("vfixupimmsd $0x12,%xmm5,%xmm6,%xmm7{%k7}");
  1603. /* AVX-512: Op code 0f 3a 56 */
  1604. asm volatile("vreduceps $0x12,%zmm6,%zmm7");
  1605. asm volatile("vreducepd $0x12,%zmm6,%zmm7");
  1606. /* AVX-512: Op code 0f 3a 57 */
  1607. asm volatile("vreducess $0x12,%xmm5,%xmm6,%xmm7");
  1608. asm volatile("vreducesd $0x12,%xmm5,%xmm6,%xmm7");
  1609. /* AVX-512: Op code 0f 3a 66 */
  1610. asm volatile("vfpclassps $0x12,%zmm7,%k5");
  1611. asm volatile("vfpclasspd $0x12,%zmm7,%k5");
  1612. /* AVX-512: Op code 0f 3a 67 */
  1613. asm volatile("vfpclassss $0x12,%xmm7,%k5");
  1614. asm volatile("vfpclasssd $0x12,%xmm7,%k5");
  1615. /* AVX-512: Op code 0f 72 (Grp13) */
  1616. asm volatile("vprord $0x12,%zmm5,%zmm6");
  1617. asm volatile("vprorq $0x12,%zmm5,%zmm6");
  1618. asm volatile("vprold $0x12,%zmm5,%zmm6");
  1619. asm volatile("vprolq $0x12,%zmm5,%zmm6");
  1620. asm volatile("psrad $0x2,%mm6");
  1621. asm volatile("vpsrad $0x5,%ymm6,%ymm2");
  1622. asm volatile("vpsrad $0x5,%zmm6,%zmm2");
  1623. asm volatile("vpsraq $0x5,%zmm6,%zmm2");
  1624. /* AVX-512: Op code 0f 38 c6 (Grp18) */
  1625. asm volatile("vgatherpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
  1626. asm volatile("vgatherpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
  1627. asm volatile("vgatherpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
  1628. asm volatile("vgatherpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
  1629. asm volatile("vscatterpf0dps 0x7b(%ebp,%zmm7,8){%k1}");
  1630. asm volatile("vscatterpf0dpd 0x7b(%ebp,%ymm7,8){%k1}");
  1631. asm volatile("vscatterpf1dps 0x7b(%ebp,%zmm7,8){%k1}");
  1632. asm volatile("vscatterpf1dpd 0x7b(%ebp,%ymm7,8){%k1}");
  1633. /* AVX-512: Op code 0f 38 c7 (Grp19) */
  1634. asm volatile("vgatherpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
  1635. asm volatile("vgatherpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
  1636. asm volatile("vgatherpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
  1637. asm volatile("vgatherpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
  1638. asm volatile("vscatterpf0qps 0x7b(%ebp,%zmm7,8){%k1}");
  1639. asm volatile("vscatterpf0qpd 0x7b(%ebp,%zmm7,8){%k1}");
  1640. asm volatile("vscatterpf1qps 0x7b(%ebp,%zmm7,8){%k1}");
  1641. asm volatile("vscatterpf1qpd 0x7b(%ebp,%zmm7,8){%k1}");
  1642. /* AVX-512: Examples */
  1643. asm volatile("vaddpd %zmm4,%zmm5,%zmm6");
  1644. asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}");
  1645. asm volatile("vaddpd %zmm4,%zmm5,%zmm6{%k7}{z}");
  1646. asm volatile("vaddpd {rn-sae},%zmm4,%zmm5,%zmm6");
  1647. asm volatile("vaddpd {ru-sae},%zmm4,%zmm5,%zmm6");
  1648. asm volatile("vaddpd {rd-sae},%zmm4,%zmm5,%zmm6");
  1649. asm volatile("vaddpd {rz-sae},%zmm4,%zmm5,%zmm6");
  1650. asm volatile("vaddpd (%ecx),%zmm5,%zmm6");
  1651. asm volatile("vaddpd 0x123(%eax,%ecx,8),%zmm5,%zmm6");
  1652. asm volatile("vaddpd (%ecx){1to8},%zmm5,%zmm6");
  1653. asm volatile("vaddpd 0x1fc0(%edx),%zmm5,%zmm6");
  1654. asm volatile("vaddpd 0x3f8(%edx){1to8},%zmm5,%zmm6");
  1655. asm volatile("vcmpeq_uqps 0x1fc(%edx){1to16},%zmm6,%k5");
  1656. asm volatile("vcmpltsd 0x123(%eax,%ecx,8),%xmm3,%k5{%k7}");
  1657. asm volatile("vcmplesd {sae},%xmm4,%xmm5,%k5{%k7}");
  1658. asm volatile("vgetmantss $0x5b,0x123(%eax,%ecx,8),%xmm4,%xmm5{%k7}");
  1659. /* bndmk m32, bnd */
  1660. asm volatile("bndmk (%eax), %bnd0");
  1661. asm volatile("bndmk (0x12345678), %bnd0");
  1662. asm volatile("bndmk (%eax), %bnd3");
  1663. asm volatile("bndmk (%ecx,%eax,1), %bnd0");
  1664. asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
  1665. asm volatile("bndmk (%eax,%ecx,1), %bnd0");
  1666. asm volatile("bndmk (%eax,%ecx,8), %bnd0");
  1667. asm volatile("bndmk 0x12(%eax), %bnd0");
  1668. asm volatile("bndmk 0x12(%ebp), %bnd0");
  1669. asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
  1670. asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
  1671. asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
  1672. asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
  1673. asm volatile("bndmk 0x12345678(%eax), %bnd0");
  1674. asm volatile("bndmk 0x12345678(%ebp), %bnd0");
  1675. asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
  1676. asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
  1677. asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
  1678. asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
  1679. /* bndcl r/m32, bnd */
  1680. asm volatile("bndcl (%eax), %bnd0");
  1681. asm volatile("bndcl (0x12345678), %bnd0");
  1682. asm volatile("bndcl (%eax), %bnd3");
  1683. asm volatile("bndcl (%ecx,%eax,1), %bnd0");
  1684. asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
  1685. asm volatile("bndcl (%eax,%ecx,1), %bnd0");
  1686. asm volatile("bndcl (%eax,%ecx,8), %bnd0");
  1687. asm volatile("bndcl 0x12(%eax), %bnd0");
  1688. asm volatile("bndcl 0x12(%ebp), %bnd0");
  1689. asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
  1690. asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
  1691. asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
  1692. asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
  1693. asm volatile("bndcl 0x12345678(%eax), %bnd0");
  1694. asm volatile("bndcl 0x12345678(%ebp), %bnd0");
  1695. asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
  1696. asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
  1697. asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
  1698. asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
  1699. asm volatile("bndcl %eax, %bnd0");
  1700. /* bndcu r/m32, bnd */
  1701. asm volatile("bndcu (%eax), %bnd0");
  1702. asm volatile("bndcu (0x12345678), %bnd0");
  1703. asm volatile("bndcu (%eax), %bnd3");
  1704. asm volatile("bndcu (%ecx,%eax,1), %bnd0");
  1705. asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
  1706. asm volatile("bndcu (%eax,%ecx,1), %bnd0");
  1707. asm volatile("bndcu (%eax,%ecx,8), %bnd0");
  1708. asm volatile("bndcu 0x12(%eax), %bnd0");
  1709. asm volatile("bndcu 0x12(%ebp), %bnd0");
  1710. asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
  1711. asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
  1712. asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
  1713. asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
  1714. asm volatile("bndcu 0x12345678(%eax), %bnd0");
  1715. asm volatile("bndcu 0x12345678(%ebp), %bnd0");
  1716. asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
  1717. asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
  1718. asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
  1719. asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
  1720. asm volatile("bndcu %eax, %bnd0");
  1721. /* bndcn r/m32, bnd */
  1722. asm volatile("bndcn (%eax), %bnd0");
  1723. asm volatile("bndcn (0x12345678), %bnd0");
  1724. asm volatile("bndcn (%eax), %bnd3");
  1725. asm volatile("bndcn (%ecx,%eax,1), %bnd0");
  1726. asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
  1727. asm volatile("bndcn (%eax,%ecx,1), %bnd0");
  1728. asm volatile("bndcn (%eax,%ecx,8), %bnd0");
  1729. asm volatile("bndcn 0x12(%eax), %bnd0");
  1730. asm volatile("bndcn 0x12(%ebp), %bnd0");
  1731. asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
  1732. asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
  1733. asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
  1734. asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
  1735. asm volatile("bndcn 0x12345678(%eax), %bnd0");
  1736. asm volatile("bndcn 0x12345678(%ebp), %bnd0");
  1737. asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
  1738. asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
  1739. asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
  1740. asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
  1741. asm volatile("bndcn %eax, %bnd0");
  1742. /* bndmov m64, bnd */
  1743. asm volatile("bndmov (%eax), %bnd0");
  1744. asm volatile("bndmov (0x12345678), %bnd0");
  1745. asm volatile("bndmov (%eax), %bnd3");
  1746. asm volatile("bndmov (%ecx,%eax,1), %bnd0");
  1747. asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
  1748. asm volatile("bndmov (%eax,%ecx,1), %bnd0");
  1749. asm volatile("bndmov (%eax,%ecx,8), %bnd0");
  1750. asm volatile("bndmov 0x12(%eax), %bnd0");
  1751. asm volatile("bndmov 0x12(%ebp), %bnd0");
  1752. asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
  1753. asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
  1754. asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
  1755. asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
  1756. asm volatile("bndmov 0x12345678(%eax), %bnd0");
  1757. asm volatile("bndmov 0x12345678(%ebp), %bnd0");
  1758. asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
  1759. asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
  1760. asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
  1761. asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
  1762. /* bndmov bnd, m64 */
  1763. asm volatile("bndmov %bnd0, (%eax)");
  1764. asm volatile("bndmov %bnd0, (0x12345678)");
  1765. asm volatile("bndmov %bnd3, (%eax)");
  1766. asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
  1767. asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
  1768. asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
  1769. asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
  1770. asm volatile("bndmov %bnd0, 0x12(%eax)");
  1771. asm volatile("bndmov %bnd0, 0x12(%ebp)");
  1772. asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
  1773. asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
  1774. asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
  1775. asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
  1776. asm volatile("bndmov %bnd0, 0x12345678(%eax)");
  1777. asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
  1778. asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
  1779. asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
  1780. asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
  1781. asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
  1782. /* bndmov bnd2, bnd1 */
  1783. asm volatile("bndmov %bnd0, %bnd1");
  1784. asm volatile("bndmov %bnd1, %bnd0");
  1785. /* bndldx mib, bnd */
  1786. asm volatile("bndldx (%eax), %bnd0");
  1787. asm volatile("bndldx (0x12345678), %bnd0");
  1788. asm volatile("bndldx (%eax), %bnd3");
  1789. asm volatile("bndldx (%ecx,%eax,1), %bnd0");
  1790. asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
  1791. asm volatile("bndldx (%eax,%ecx,1), %bnd0");
  1792. asm volatile("bndldx 0x12(%eax), %bnd0");
  1793. asm volatile("bndldx 0x12(%ebp), %bnd0");
  1794. asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
  1795. asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
  1796. asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
  1797. asm volatile("bndldx 0x12345678(%eax), %bnd0");
  1798. asm volatile("bndldx 0x12345678(%ebp), %bnd0");
  1799. asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
  1800. asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
  1801. asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
  1802. /* bndstx bnd, mib */
  1803. asm volatile("bndstx %bnd0, (%eax)");
  1804. asm volatile("bndstx %bnd0, (0x12345678)");
  1805. asm volatile("bndstx %bnd3, (%eax)");
  1806. asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
  1807. asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
  1808. asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
  1809. asm volatile("bndstx %bnd0, 0x12(%eax)");
  1810. asm volatile("bndstx %bnd0, 0x12(%ebp)");
  1811. asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
  1812. asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
  1813. asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
  1814. asm volatile("bndstx %bnd0, 0x12345678(%eax)");
  1815. asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
  1816. asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
  1817. asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
  1818. asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
  1819. /* bnd prefix on call, ret, jmp and all jcc */
  1820. asm volatile("bnd call label1"); /* Expecting: call unconditional 0xfffffffc */
  1821. asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
  1822. asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
  1823. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
  1824. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
  1825. asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
  1826. asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0xfffffffc */
  1827. /* sha1rnds4 imm8, xmm2/m128, xmm1 */
  1828. asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
  1829. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
  1830. asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
  1831. asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
  1832. asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
  1833. asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
  1834. asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
  1835. asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
  1836. asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
  1837. asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
  1838. asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
  1839. asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
  1840. asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
  1841. asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
  1842. asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
  1843. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
  1844. asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
  1845. asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
  1846. asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
  1847. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
  1848. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
  1849. /* sha1nexte xmm2/m128, xmm1 */
  1850. asm volatile("sha1nexte %xmm1, %xmm0");
  1851. asm volatile("sha1nexte %xmm7, %xmm2");
  1852. asm volatile("sha1nexte (%eax), %xmm0");
  1853. asm volatile("sha1nexte (0x12345678), %xmm0");
  1854. asm volatile("sha1nexte (%eax), %xmm3");
  1855. asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
  1856. asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
  1857. asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
  1858. asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
  1859. asm volatile("sha1nexte 0x12(%eax), %xmm0");
  1860. asm volatile("sha1nexte 0x12(%ebp), %xmm0");
  1861. asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
  1862. asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
  1863. asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
  1864. asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
  1865. asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
  1866. asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
  1867. asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
  1868. asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
  1869. asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
  1870. asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
  1871. /* sha1msg1 xmm2/m128, xmm1 */
  1872. asm volatile("sha1msg1 %xmm1, %xmm0");
  1873. asm volatile("sha1msg1 %xmm7, %xmm2");
  1874. asm volatile("sha1msg1 (%eax), %xmm0");
  1875. asm volatile("sha1msg1 (0x12345678), %xmm0");
  1876. asm volatile("sha1msg1 (%eax), %xmm3");
  1877. asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
  1878. asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
  1879. asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
  1880. asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
  1881. asm volatile("sha1msg1 0x12(%eax), %xmm0");
  1882. asm volatile("sha1msg1 0x12(%ebp), %xmm0");
  1883. asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
  1884. asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
  1885. asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
  1886. asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
  1887. asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
  1888. asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
  1889. asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
  1890. asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
  1891. asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
  1892. asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
  1893. /* sha1msg2 xmm2/m128, xmm1 */
  1894. asm volatile("sha1msg2 %xmm1, %xmm0");
  1895. asm volatile("sha1msg2 %xmm7, %xmm2");
  1896. asm volatile("sha1msg2 (%eax), %xmm0");
  1897. asm volatile("sha1msg2 (0x12345678), %xmm0");
  1898. asm volatile("sha1msg2 (%eax), %xmm3");
  1899. asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
  1900. asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
  1901. asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
  1902. asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
  1903. asm volatile("sha1msg2 0x12(%eax), %xmm0");
  1904. asm volatile("sha1msg2 0x12(%ebp), %xmm0");
  1905. asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
  1906. asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
  1907. asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
  1908. asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
  1909. asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
  1910. asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
  1911. asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
  1912. asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
  1913. asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
  1914. asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
  1915. /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
  1916. /* Note sha256rnds2 has an implicit operand 'xmm0' */
  1917. asm volatile("sha256rnds2 %xmm4, %xmm1");
  1918. asm volatile("sha256rnds2 %xmm7, %xmm2");
  1919. asm volatile("sha256rnds2 (%eax), %xmm1");
  1920. asm volatile("sha256rnds2 (0x12345678), %xmm1");
  1921. asm volatile("sha256rnds2 (%eax), %xmm3");
  1922. asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
  1923. asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
  1924. asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
  1925. asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
  1926. asm volatile("sha256rnds2 0x12(%eax), %xmm1");
  1927. asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
  1928. asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
  1929. asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
  1930. asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
  1931. asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
  1932. asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
  1933. asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
  1934. asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
  1935. asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
  1936. asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
  1937. asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
  1938. /* sha256msg1 xmm2/m128, xmm1 */
  1939. asm volatile("sha256msg1 %xmm1, %xmm0");
  1940. asm volatile("sha256msg1 %xmm7, %xmm2");
  1941. asm volatile("sha256msg1 (%eax), %xmm0");
  1942. asm volatile("sha256msg1 (0x12345678), %xmm0");
  1943. asm volatile("sha256msg1 (%eax), %xmm3");
  1944. asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
  1945. asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
  1946. asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
  1947. asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
  1948. asm volatile("sha256msg1 0x12(%eax), %xmm0");
  1949. asm volatile("sha256msg1 0x12(%ebp), %xmm0");
  1950. asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
  1951. asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
  1952. asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
  1953. asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
  1954. asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
  1955. asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
  1956. asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
  1957. asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
  1958. asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
  1959. asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
  1960. /* sha256msg2 xmm2/m128, xmm1 */
  1961. asm volatile("sha256msg2 %xmm1, %xmm0");
  1962. asm volatile("sha256msg2 %xmm7, %xmm2");
  1963. asm volatile("sha256msg2 (%eax), %xmm0");
  1964. asm volatile("sha256msg2 (0x12345678), %xmm0");
  1965. asm volatile("sha256msg2 (%eax), %xmm3");
  1966. asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
  1967. asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
  1968. asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
  1969. asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
  1970. asm volatile("sha256msg2 0x12(%eax), %xmm0");
  1971. asm volatile("sha256msg2 0x12(%ebp), %xmm0");
  1972. asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
  1973. asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
  1974. asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
  1975. asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
  1976. asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
  1977. asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
  1978. asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
  1979. asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
  1980. asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
  1981. asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
  1982. /* clflushopt m8 */
  1983. asm volatile("clflushopt (%eax)");
  1984. asm volatile("clflushopt (0x12345678)");
  1985. asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
  1986. /* Also check instructions in the same group encoding as clflushopt */
  1987. asm volatile("clflush (%eax)");
  1988. asm volatile("sfence");
  1989. /* clwb m8 */
  1990. asm volatile("clwb (%eax)");
  1991. asm volatile("clwb (0x12345678)");
  1992. asm volatile("clwb 0x12345678(%eax,%ecx,8)");
  1993. /* Also check instructions in the same group encoding as clwb */
  1994. asm volatile("xsaveopt (%eax)");
  1995. asm volatile("mfence");
  1996. /* xsavec mem */
  1997. asm volatile("xsavec (%eax)");
  1998. asm volatile("xsavec (0x12345678)");
  1999. asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
  2000. /* xsaves mem */
  2001. asm volatile("xsaves (%eax)");
  2002. asm volatile("xsaves (0x12345678)");
  2003. asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
  2004. /* xrstors mem */
  2005. asm volatile("xrstors (%eax)");
  2006. asm volatile("xrstors (0x12345678)");
  2007. asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
  2008. #endif /* #ifndef __x86_64__ */
  2009. /* Following line is a marker for the awk script - do not change */
  2010. asm volatile("rdtsc"); /* Stop here */
  2011. return 0;
  2012. }