insn-x86-dat-src.c 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877
  1. /*
  2. * This file contains instructions for testing by the test titled:
  3. *
  4. * "Test x86 instruction decoder - new instructions"
  5. *
  6. * Note that the 'Expecting' comment lines are consumed by the
  7. * gen-insn-x86-dat.awk script and have the format:
  8. *
  9. * Expecting: <op> <branch> <rel>
  10. *
  11. * If this file is changed, remember to run the gen-insn-x86-dat.sh
  12. * script and commit the result.
  13. *
  14. * Refer to insn-x86.c for more details.
  15. */
  16. int main(void)
  17. {
  18. /* Following line is a marker for the awk script - do not change */
  19. asm volatile("rdtsc"); /* Start here */
  20. #ifdef __x86_64__
  21. /* bndmk m64, bnd */
  22. asm volatile("bndmk (%rax), %bnd0");
  23. asm volatile("bndmk (%r8), %bnd0");
  24. asm volatile("bndmk (0x12345678), %bnd0");
  25. asm volatile("bndmk (%rax), %bnd3");
  26. asm volatile("bndmk (%rcx,%rax,1), %bnd0");
  27. asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
  28. asm volatile("bndmk (%rax,%rcx,1), %bnd0");
  29. asm volatile("bndmk (%rax,%rcx,8), %bnd0");
  30. asm volatile("bndmk 0x12(%rax), %bnd0");
  31. asm volatile("bndmk 0x12(%rbp), %bnd0");
  32. asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
  33. asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
  34. asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
  35. asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
  36. asm volatile("bndmk 0x12345678(%rax), %bnd0");
  37. asm volatile("bndmk 0x12345678(%rbp), %bnd0");
  38. asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
  39. asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
  40. asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
  41. asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
  42. /* bndcl r/m64, bnd */
  43. asm volatile("bndcl (%rax), %bnd0");
  44. asm volatile("bndcl (%r8), %bnd0");
  45. asm volatile("bndcl (0x12345678), %bnd0");
  46. asm volatile("bndcl (%rax), %bnd3");
  47. asm volatile("bndcl (%rcx,%rax,1), %bnd0");
  48. asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
  49. asm volatile("bndcl (%rax,%rcx,1), %bnd0");
  50. asm volatile("bndcl (%rax,%rcx,8), %bnd0");
  51. asm volatile("bndcl 0x12(%rax), %bnd0");
  52. asm volatile("bndcl 0x12(%rbp), %bnd0");
  53. asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
  54. asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
  55. asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
  56. asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
  57. asm volatile("bndcl 0x12345678(%rax), %bnd0");
  58. asm volatile("bndcl 0x12345678(%rbp), %bnd0");
  59. asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
  60. asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
  61. asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
  62. asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
  63. asm volatile("bndcl %rax, %bnd0");
  64. /* bndcu r/m64, bnd */
  65. asm volatile("bndcu (%rax), %bnd0");
  66. asm volatile("bndcu (%r8), %bnd0");
  67. asm volatile("bndcu (0x12345678), %bnd0");
  68. asm volatile("bndcu (%rax), %bnd3");
  69. asm volatile("bndcu (%rcx,%rax,1), %bnd0");
  70. asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
  71. asm volatile("bndcu (%rax,%rcx,1), %bnd0");
  72. asm volatile("bndcu (%rax,%rcx,8), %bnd0");
  73. asm volatile("bndcu 0x12(%rax), %bnd0");
  74. asm volatile("bndcu 0x12(%rbp), %bnd0");
  75. asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
  76. asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
  77. asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
  78. asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
  79. asm volatile("bndcu 0x12345678(%rax), %bnd0");
  80. asm volatile("bndcu 0x12345678(%rbp), %bnd0");
  81. asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
  82. asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
  83. asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
  84. asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
  85. asm volatile("bndcu %rax, %bnd0");
  86. /* bndcn r/m64, bnd */
  87. asm volatile("bndcn (%rax), %bnd0");
  88. asm volatile("bndcn (%r8), %bnd0");
  89. asm volatile("bndcn (0x12345678), %bnd0");
  90. asm volatile("bndcn (%rax), %bnd3");
  91. asm volatile("bndcn (%rcx,%rax,1), %bnd0");
  92. asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
  93. asm volatile("bndcn (%rax,%rcx,1), %bnd0");
  94. asm volatile("bndcn (%rax,%rcx,8), %bnd0");
  95. asm volatile("bndcn 0x12(%rax), %bnd0");
  96. asm volatile("bndcn 0x12(%rbp), %bnd0");
  97. asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
  98. asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
  99. asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
  100. asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
  101. asm volatile("bndcn 0x12345678(%rax), %bnd0");
  102. asm volatile("bndcn 0x12345678(%rbp), %bnd0");
  103. asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
  104. asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
  105. asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
  106. asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
  107. asm volatile("bndcn %rax, %bnd0");
  108. /* bndmov m128, bnd */
  109. asm volatile("bndmov (%rax), %bnd0");
  110. asm volatile("bndmov (%r8), %bnd0");
  111. asm volatile("bndmov (0x12345678), %bnd0");
  112. asm volatile("bndmov (%rax), %bnd3");
  113. asm volatile("bndmov (%rcx,%rax,1), %bnd0");
  114. asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
  115. asm volatile("bndmov (%rax,%rcx,1), %bnd0");
  116. asm volatile("bndmov (%rax,%rcx,8), %bnd0");
  117. asm volatile("bndmov 0x12(%rax), %bnd0");
  118. asm volatile("bndmov 0x12(%rbp), %bnd0");
  119. asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
  120. asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
  121. asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
  122. asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
  123. asm volatile("bndmov 0x12345678(%rax), %bnd0");
  124. asm volatile("bndmov 0x12345678(%rbp), %bnd0");
  125. asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
  126. asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
  127. asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
  128. asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
  129. /* bndmov bnd, m128 */
  130. asm volatile("bndmov %bnd0, (%rax)");
  131. asm volatile("bndmov %bnd0, (%r8)");
  132. asm volatile("bndmov %bnd0, (0x12345678)");
  133. asm volatile("bndmov %bnd3, (%rax)");
  134. asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
  135. asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
  136. asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
  137. asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
  138. asm volatile("bndmov %bnd0, 0x12(%rax)");
  139. asm volatile("bndmov %bnd0, 0x12(%rbp)");
  140. asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
  141. asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
  142. asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
  143. asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
  144. asm volatile("bndmov %bnd0, 0x12345678(%rax)");
  145. asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
  146. asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
  147. asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
  148. asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
  149. asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
  150. /* bndmov bnd2, bnd1 */
  151. asm volatile("bndmov %bnd0, %bnd1");
  152. asm volatile("bndmov %bnd1, %bnd0");
  153. /* bndldx mib, bnd */
  154. asm volatile("bndldx (%rax), %bnd0");
  155. asm volatile("bndldx (%r8), %bnd0");
  156. asm volatile("bndldx (0x12345678), %bnd0");
  157. asm volatile("bndldx (%rax), %bnd3");
  158. asm volatile("bndldx (%rcx,%rax,1), %bnd0");
  159. asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
  160. asm volatile("bndldx (%rax,%rcx,1), %bnd0");
  161. asm volatile("bndldx 0x12(%rax), %bnd0");
  162. asm volatile("bndldx 0x12(%rbp), %bnd0");
  163. asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
  164. asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
  165. asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
  166. asm volatile("bndldx 0x12345678(%rax), %bnd0");
  167. asm volatile("bndldx 0x12345678(%rbp), %bnd0");
  168. asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
  169. asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
  170. asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
  171. /* bndstx bnd, mib */
  172. asm volatile("bndstx %bnd0, (%rax)");
  173. asm volatile("bndstx %bnd0, (%r8)");
  174. asm volatile("bndstx %bnd0, (0x12345678)");
  175. asm volatile("bndstx %bnd3, (%rax)");
  176. asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
  177. asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
  178. asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
  179. asm volatile("bndstx %bnd0, 0x12(%rax)");
  180. asm volatile("bndstx %bnd0, 0x12(%rbp)");
  181. asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
  182. asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
  183. asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
  184. asm volatile("bndstx %bnd0, 0x12345678(%rax)");
  185. asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
  186. asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
  187. asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
  188. asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
  189. /* bnd prefix on call, ret, jmp and all jcc */
  190. asm volatile("bnd call label1"); /* Expecting: call unconditional 0 */
  191. asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
  192. asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
  193. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
  194. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
  195. asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
  196. asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0 */
  197. /* sha1rnds4 imm8, xmm2/m128, xmm1 */
  198. asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
  199. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
  200. asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
  201. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
  202. asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
  203. asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
  204. asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
  205. asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
  206. asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
  207. asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
  208. asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
  209. asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
  210. asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
  211. asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
  212. asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
  213. asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
  214. asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
  215. asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
  216. asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
  217. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
  218. asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
  219. asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
  220. asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
  221. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
  222. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
  223. asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
  224. /* sha1nexte xmm2/m128, xmm1 */
  225. asm volatile("sha1nexte %xmm1, %xmm0");
  226. asm volatile("sha1nexte %xmm7, %xmm2");
  227. asm volatile("sha1nexte %xmm8, %xmm0");
  228. asm volatile("sha1nexte %xmm7, %xmm8");
  229. asm volatile("sha1nexte %xmm15, %xmm8");
  230. asm volatile("sha1nexte (%rax), %xmm0");
  231. asm volatile("sha1nexte (%r8), %xmm0");
  232. asm volatile("sha1nexte (0x12345678), %xmm0");
  233. asm volatile("sha1nexte (%rax), %xmm3");
  234. asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
  235. asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
  236. asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
  237. asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
  238. asm volatile("sha1nexte 0x12(%rax), %xmm0");
  239. asm volatile("sha1nexte 0x12(%rbp), %xmm0");
  240. asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
  241. asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
  242. asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
  243. asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
  244. asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
  245. asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
  246. asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
  247. asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
  248. asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
  249. asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
  250. asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
  251. /* sha1msg1 xmm2/m128, xmm1 */
  252. asm volatile("sha1msg1 %xmm1, %xmm0");
  253. asm volatile("sha1msg1 %xmm7, %xmm2");
  254. asm volatile("sha1msg1 %xmm8, %xmm0");
  255. asm volatile("sha1msg1 %xmm7, %xmm8");
  256. asm volatile("sha1msg1 %xmm15, %xmm8");
  257. asm volatile("sha1msg1 (%rax), %xmm0");
  258. asm volatile("sha1msg1 (%r8), %xmm0");
  259. asm volatile("sha1msg1 (0x12345678), %xmm0");
  260. asm volatile("sha1msg1 (%rax), %xmm3");
  261. asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
  262. asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
  263. asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
  264. asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
  265. asm volatile("sha1msg1 0x12(%rax), %xmm0");
  266. asm volatile("sha1msg1 0x12(%rbp), %xmm0");
  267. asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
  268. asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
  269. asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
  270. asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
  271. asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
  272. asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
  273. asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
  274. asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
  275. asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
  276. asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
  277. asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
  278. /* sha1msg2 xmm2/m128, xmm1 */
  279. asm volatile("sha1msg2 %xmm1, %xmm0");
  280. asm volatile("sha1msg2 %xmm7, %xmm2");
  281. asm volatile("sha1msg2 %xmm8, %xmm0");
  282. asm volatile("sha1msg2 %xmm7, %xmm8");
  283. asm volatile("sha1msg2 %xmm15, %xmm8");
  284. asm volatile("sha1msg2 (%rax), %xmm0");
  285. asm volatile("sha1msg2 (%r8), %xmm0");
  286. asm volatile("sha1msg2 (0x12345678), %xmm0");
  287. asm volatile("sha1msg2 (%rax), %xmm3");
  288. asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
  289. asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
  290. asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
  291. asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
  292. asm volatile("sha1msg2 0x12(%rax), %xmm0");
  293. asm volatile("sha1msg2 0x12(%rbp), %xmm0");
  294. asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
  295. asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
  296. asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
  297. asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
  298. asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
  299. asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
  300. asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
  301. asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
  302. asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
  303. asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
  304. asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
  305. /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
  306. /* Note sha256rnds2 has an implicit operand 'xmm0' */
  307. asm volatile("sha256rnds2 %xmm4, %xmm1");
  308. asm volatile("sha256rnds2 %xmm7, %xmm2");
  309. asm volatile("sha256rnds2 %xmm8, %xmm1");
  310. asm volatile("sha256rnds2 %xmm7, %xmm8");
  311. asm volatile("sha256rnds2 %xmm15, %xmm8");
  312. asm volatile("sha256rnds2 (%rax), %xmm1");
  313. asm volatile("sha256rnds2 (%r8), %xmm1");
  314. asm volatile("sha256rnds2 (0x12345678), %xmm1");
  315. asm volatile("sha256rnds2 (%rax), %xmm3");
  316. asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
  317. asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
  318. asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
  319. asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
  320. asm volatile("sha256rnds2 0x12(%rax), %xmm1");
  321. asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
  322. asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
  323. asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
  324. asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
  325. asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
  326. asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
  327. asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
  328. asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
  329. asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
  330. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
  331. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
  332. asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
  333. /* sha256msg1 xmm2/m128, xmm1 */
  334. asm volatile("sha256msg1 %xmm1, %xmm0");
  335. asm volatile("sha256msg1 %xmm7, %xmm2");
  336. asm volatile("sha256msg1 %xmm8, %xmm0");
  337. asm volatile("sha256msg1 %xmm7, %xmm8");
  338. asm volatile("sha256msg1 %xmm15, %xmm8");
  339. asm volatile("sha256msg1 (%rax), %xmm0");
  340. asm volatile("sha256msg1 (%r8), %xmm0");
  341. asm volatile("sha256msg1 (0x12345678), %xmm0");
  342. asm volatile("sha256msg1 (%rax), %xmm3");
  343. asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
  344. asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
  345. asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
  346. asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
  347. asm volatile("sha256msg1 0x12(%rax), %xmm0");
  348. asm volatile("sha256msg1 0x12(%rbp), %xmm0");
  349. asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
  350. asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
  351. asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
  352. asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
  353. asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
  354. asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
  355. asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
  356. asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
  357. asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
  358. asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
  359. asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
  360. /* sha256msg2 xmm2/m128, xmm1 */
  361. asm volatile("sha256msg2 %xmm1, %xmm0");
  362. asm volatile("sha256msg2 %xmm7, %xmm2");
  363. asm volatile("sha256msg2 %xmm8, %xmm0");
  364. asm volatile("sha256msg2 %xmm7, %xmm8");
  365. asm volatile("sha256msg2 %xmm15, %xmm8");
  366. asm volatile("sha256msg2 (%rax), %xmm0");
  367. asm volatile("sha256msg2 (%r8), %xmm0");
  368. asm volatile("sha256msg2 (0x12345678), %xmm0");
  369. asm volatile("sha256msg2 (%rax), %xmm3");
  370. asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
  371. asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
  372. asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
  373. asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
  374. asm volatile("sha256msg2 0x12(%rax), %xmm0");
  375. asm volatile("sha256msg2 0x12(%rbp), %xmm0");
  376. asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
  377. asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
  378. asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
  379. asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
  380. asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
  381. asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
  382. asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
  383. asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
  384. asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
  385. asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
  386. asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
  387. /* clflushopt m8 */
  388. asm volatile("clflushopt (%rax)");
  389. asm volatile("clflushopt (%r8)");
  390. asm volatile("clflushopt (0x12345678)");
  391. asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
  392. asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
  393. /* Also check instructions in the same group encoding as clflushopt */
  394. asm volatile("clflush (%rax)");
  395. asm volatile("clflush (%r8)");
  396. asm volatile("sfence");
  397. /* clwb m8 */
  398. asm volatile("clwb (%rax)");
  399. asm volatile("clwb (%r8)");
  400. asm volatile("clwb (0x12345678)");
  401. asm volatile("clwb 0x12345678(%rax,%rcx,8)");
  402. asm volatile("clwb 0x12345678(%r8,%rcx,8)");
  403. /* Also check instructions in the same group encoding as clwb */
  404. asm volatile("xsaveopt (%rax)");
  405. asm volatile("xsaveopt (%r8)");
  406. asm volatile("mfence");
  407. /* xsavec mem */
  408. asm volatile("xsavec (%rax)");
  409. asm volatile("xsavec (%r8)");
  410. asm volatile("xsavec (0x12345678)");
  411. asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
  412. asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
  413. /* xsaves mem */
  414. asm volatile("xsaves (%rax)");
  415. asm volatile("xsaves (%r8)");
  416. asm volatile("xsaves (0x12345678)");
  417. asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
  418. asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
  419. /* xrstors mem */
  420. asm volatile("xrstors (%rax)");
  421. asm volatile("xrstors (%r8)");
  422. asm volatile("xrstors (0x12345678)");
  423. asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
  424. asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
  425. #else /* #ifdef __x86_64__ */
  426. /* bndmk m32, bnd */
  427. asm volatile("bndmk (%eax), %bnd0");
  428. asm volatile("bndmk (0x12345678), %bnd0");
  429. asm volatile("bndmk (%eax), %bnd3");
  430. asm volatile("bndmk (%ecx,%eax,1), %bnd0");
  431. asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
  432. asm volatile("bndmk (%eax,%ecx,1), %bnd0");
  433. asm volatile("bndmk (%eax,%ecx,8), %bnd0");
  434. asm volatile("bndmk 0x12(%eax), %bnd0");
  435. asm volatile("bndmk 0x12(%ebp), %bnd0");
  436. asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
  437. asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
  438. asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
  439. asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
  440. asm volatile("bndmk 0x12345678(%eax), %bnd0");
  441. asm volatile("bndmk 0x12345678(%ebp), %bnd0");
  442. asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
  443. asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
  444. asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
  445. asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
  446. /* bndcl r/m32, bnd */
  447. asm volatile("bndcl (%eax), %bnd0");
  448. asm volatile("bndcl (0x12345678), %bnd0");
  449. asm volatile("bndcl (%eax), %bnd3");
  450. asm volatile("bndcl (%ecx,%eax,1), %bnd0");
  451. asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
  452. asm volatile("bndcl (%eax,%ecx,1), %bnd0");
  453. asm volatile("bndcl (%eax,%ecx,8), %bnd0");
  454. asm volatile("bndcl 0x12(%eax), %bnd0");
  455. asm volatile("bndcl 0x12(%ebp), %bnd0");
  456. asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
  457. asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
  458. asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
  459. asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
  460. asm volatile("bndcl 0x12345678(%eax), %bnd0");
  461. asm volatile("bndcl 0x12345678(%ebp), %bnd0");
  462. asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
  463. asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
  464. asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
  465. asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
  466. asm volatile("bndcl %eax, %bnd0");
  467. /* bndcu r/m32, bnd */
  468. asm volatile("bndcu (%eax), %bnd0");
  469. asm volatile("bndcu (0x12345678), %bnd0");
  470. asm volatile("bndcu (%eax), %bnd3");
  471. asm volatile("bndcu (%ecx,%eax,1), %bnd0");
  472. asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
  473. asm volatile("bndcu (%eax,%ecx,1), %bnd0");
  474. asm volatile("bndcu (%eax,%ecx,8), %bnd0");
  475. asm volatile("bndcu 0x12(%eax), %bnd0");
  476. asm volatile("bndcu 0x12(%ebp), %bnd0");
  477. asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
  478. asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
  479. asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
  480. asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
  481. asm volatile("bndcu 0x12345678(%eax), %bnd0");
  482. asm volatile("bndcu 0x12345678(%ebp), %bnd0");
  483. asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
  484. asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
  485. asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
  486. asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
  487. asm volatile("bndcu %eax, %bnd0");
  488. /* bndcn r/m32, bnd */
  489. asm volatile("bndcn (%eax), %bnd0");
  490. asm volatile("bndcn (0x12345678), %bnd0");
  491. asm volatile("bndcn (%eax), %bnd3");
  492. asm volatile("bndcn (%ecx,%eax,1), %bnd0");
  493. asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
  494. asm volatile("bndcn (%eax,%ecx,1), %bnd0");
  495. asm volatile("bndcn (%eax,%ecx,8), %bnd0");
  496. asm volatile("bndcn 0x12(%eax), %bnd0");
  497. asm volatile("bndcn 0x12(%ebp), %bnd0");
  498. asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
  499. asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
  500. asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
  501. asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
  502. asm volatile("bndcn 0x12345678(%eax), %bnd0");
  503. asm volatile("bndcn 0x12345678(%ebp), %bnd0");
  504. asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
  505. asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
  506. asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
  507. asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
  508. asm volatile("bndcn %eax, %bnd0");
  509. /* bndmov m64, bnd */
  510. asm volatile("bndmov (%eax), %bnd0");
  511. asm volatile("bndmov (0x12345678), %bnd0");
  512. asm volatile("bndmov (%eax), %bnd3");
  513. asm volatile("bndmov (%ecx,%eax,1), %bnd0");
  514. asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
  515. asm volatile("bndmov (%eax,%ecx,1), %bnd0");
  516. asm volatile("bndmov (%eax,%ecx,8), %bnd0");
  517. asm volatile("bndmov 0x12(%eax), %bnd0");
  518. asm volatile("bndmov 0x12(%ebp), %bnd0");
  519. asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
  520. asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
  521. asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
  522. asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
  523. asm volatile("bndmov 0x12345678(%eax), %bnd0");
  524. asm volatile("bndmov 0x12345678(%ebp), %bnd0");
  525. asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
  526. asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
  527. asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
  528. asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
  529. /* bndmov bnd, m64 */
  530. asm volatile("bndmov %bnd0, (%eax)");
  531. asm volatile("bndmov %bnd0, (0x12345678)");
  532. asm volatile("bndmov %bnd3, (%eax)");
  533. asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
  534. asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
  535. asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
  536. asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
  537. asm volatile("bndmov %bnd0, 0x12(%eax)");
  538. asm volatile("bndmov %bnd0, 0x12(%ebp)");
  539. asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
  540. asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
  541. asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
  542. asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
  543. asm volatile("bndmov %bnd0, 0x12345678(%eax)");
  544. asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
  545. asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
  546. asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
  547. asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
  548. asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
  549. /* bndmov bnd2, bnd1 */
  550. asm volatile("bndmov %bnd0, %bnd1");
  551. asm volatile("bndmov %bnd1, %bnd0");
  552. /* bndldx mib, bnd */
  553. asm volatile("bndldx (%eax), %bnd0");
  554. asm volatile("bndldx (0x12345678), %bnd0");
  555. asm volatile("bndldx (%eax), %bnd3");
  556. asm volatile("bndldx (%ecx,%eax,1), %bnd0");
  557. asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
  558. asm volatile("bndldx (%eax,%ecx,1), %bnd0");
  559. asm volatile("bndldx 0x12(%eax), %bnd0");
  560. asm volatile("bndldx 0x12(%ebp), %bnd0");
  561. asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
  562. asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
  563. asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
  564. asm volatile("bndldx 0x12345678(%eax), %bnd0");
  565. asm volatile("bndldx 0x12345678(%ebp), %bnd0");
  566. asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
  567. asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
  568. asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
  569. /* bndstx bnd, mib */
  570. asm volatile("bndstx %bnd0, (%eax)");
  571. asm volatile("bndstx %bnd0, (0x12345678)");
  572. asm volatile("bndstx %bnd3, (%eax)");
  573. asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
  574. asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
  575. asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
  576. asm volatile("bndstx %bnd0, 0x12(%eax)");
  577. asm volatile("bndstx %bnd0, 0x12(%ebp)");
  578. asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
  579. asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
  580. asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
  581. asm volatile("bndstx %bnd0, 0x12345678(%eax)");
  582. asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
  583. asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
  584. asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
  585. asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
  586. /* bnd prefix on call, ret, jmp and all jcc */
  587. asm volatile("bnd call label1"); /* Expecting: call unconditional 0xfffffffc */
  588. asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
  589. asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
  590. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
  591. asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
  592. asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
  593. asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0xfffffffc */
  594. /* sha1rnds4 imm8, xmm2/m128, xmm1 */
  595. asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
  596. asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
  597. asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
  598. asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
  599. asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
  600. asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
  601. asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
  602. asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
  603. asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
  604. asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
  605. asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
  606. asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
  607. asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
  608. asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
  609. asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
  610. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
  611. asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
  612. asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
  613. asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
  614. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
  615. asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
  616. /* sha1nexte xmm2/m128, xmm1 */
  617. asm volatile("sha1nexte %xmm1, %xmm0");
  618. asm volatile("sha1nexte %xmm7, %xmm2");
  619. asm volatile("sha1nexte (%eax), %xmm0");
  620. asm volatile("sha1nexte (0x12345678), %xmm0");
  621. asm volatile("sha1nexte (%eax), %xmm3");
  622. asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
  623. asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
  624. asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
  625. asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
  626. asm volatile("sha1nexte 0x12(%eax), %xmm0");
  627. asm volatile("sha1nexte 0x12(%ebp), %xmm0");
  628. asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
  629. asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
  630. asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
  631. asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
  632. asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
  633. asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
  634. asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
  635. asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
  636. asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
  637. asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
  638. /* sha1msg1 xmm2/m128, xmm1 */
  639. asm volatile("sha1msg1 %xmm1, %xmm0");
  640. asm volatile("sha1msg1 %xmm7, %xmm2");
  641. asm volatile("sha1msg1 (%eax), %xmm0");
  642. asm volatile("sha1msg1 (0x12345678), %xmm0");
  643. asm volatile("sha1msg1 (%eax), %xmm3");
  644. asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
  645. asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
  646. asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
  647. asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
  648. asm volatile("sha1msg1 0x12(%eax), %xmm0");
  649. asm volatile("sha1msg1 0x12(%ebp), %xmm0");
  650. asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
  651. asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
  652. asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
  653. asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
  654. asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
  655. asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
  656. asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
  657. asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
  658. asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
  659. asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
  660. /* sha1msg2 xmm2/m128, xmm1 */
  661. asm volatile("sha1msg2 %xmm1, %xmm0");
  662. asm volatile("sha1msg2 %xmm7, %xmm2");
  663. asm volatile("sha1msg2 (%eax), %xmm0");
  664. asm volatile("sha1msg2 (0x12345678), %xmm0");
  665. asm volatile("sha1msg2 (%eax), %xmm3");
  666. asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
  667. asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
  668. asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
  669. asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
  670. asm volatile("sha1msg2 0x12(%eax), %xmm0");
  671. asm volatile("sha1msg2 0x12(%ebp), %xmm0");
  672. asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
  673. asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
  674. asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
  675. asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
  676. asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
  677. asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
  678. asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
  679. asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
  680. asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
  681. asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
  682. /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
  683. /* Note sha256rnds2 has an implicit operand 'xmm0' */
  684. asm volatile("sha256rnds2 %xmm4, %xmm1");
  685. asm volatile("sha256rnds2 %xmm7, %xmm2");
  686. asm volatile("sha256rnds2 (%eax), %xmm1");
  687. asm volatile("sha256rnds2 (0x12345678), %xmm1");
  688. asm volatile("sha256rnds2 (%eax), %xmm3");
  689. asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
  690. asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
  691. asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
  692. asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
  693. asm volatile("sha256rnds2 0x12(%eax), %xmm1");
  694. asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
  695. asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
  696. asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
  697. asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
  698. asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
  699. asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
  700. asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
  701. asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
  702. asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
  703. asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
  704. asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
  705. /* sha256msg1 xmm2/m128, xmm1 */
  706. asm volatile("sha256msg1 %xmm1, %xmm0");
  707. asm volatile("sha256msg1 %xmm7, %xmm2");
  708. asm volatile("sha256msg1 (%eax), %xmm0");
  709. asm volatile("sha256msg1 (0x12345678), %xmm0");
  710. asm volatile("sha256msg1 (%eax), %xmm3");
  711. asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
  712. asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
  713. asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
  714. asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
  715. asm volatile("sha256msg1 0x12(%eax), %xmm0");
  716. asm volatile("sha256msg1 0x12(%ebp), %xmm0");
  717. asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
  718. asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
  719. asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
  720. asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
  721. asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
  722. asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
  723. asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
  724. asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
  725. asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
  726. asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
  727. /* sha256msg2 xmm2/m128, xmm1 */
  728. asm volatile("sha256msg2 %xmm1, %xmm0");
  729. asm volatile("sha256msg2 %xmm7, %xmm2");
  730. asm volatile("sha256msg2 (%eax), %xmm0");
  731. asm volatile("sha256msg2 (0x12345678), %xmm0");
  732. asm volatile("sha256msg2 (%eax), %xmm3");
  733. asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
  734. asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
  735. asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
  736. asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
  737. asm volatile("sha256msg2 0x12(%eax), %xmm0");
  738. asm volatile("sha256msg2 0x12(%ebp), %xmm0");
  739. asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
  740. asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
  741. asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
  742. asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
  743. asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
  744. asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
  745. asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
  746. asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
  747. asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
  748. asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
  749. /* clflushopt m8 */
  750. asm volatile("clflushopt (%eax)");
  751. asm volatile("clflushopt (0x12345678)");
  752. asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
  753. /* Also check instructions in the same group encoding as clflushopt */
  754. asm volatile("clflush (%eax)");
  755. asm volatile("sfence");
  756. /* clwb m8 */
  757. asm volatile("clwb (%eax)");
  758. asm volatile("clwb (0x12345678)");
  759. asm volatile("clwb 0x12345678(%eax,%ecx,8)");
  760. /* Also check instructions in the same group encoding as clwb */
  761. asm volatile("xsaveopt (%eax)");
  762. asm volatile("mfence");
  763. /* xsavec mem */
  764. asm volatile("xsavec (%eax)");
  765. asm volatile("xsavec (0x12345678)");
  766. asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
  767. /* xsaves mem */
  768. asm volatile("xsaves (%eax)");
  769. asm volatile("xsaves (0x12345678)");
  770. asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
  771. /* xrstors mem */
  772. asm volatile("xrstors (%eax)");
  773. asm volatile("xrstors (0x12345678)");
  774. asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
  775. #endif /* #ifndef __x86_64__ */
  776. /* pcommit */
  777. asm volatile("pcommit");
  778. /* Following line is a marker for the awk script - do not change */
  779. asm volatile("rdtsc"); /* Stop here */
  780. return 0;
  781. }