processor.h 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065
  1. /*
  2. * tools/testing/selftests/kvm/include/x86_64/processor.h
  3. *
  4. * Copyright (C) 2018, Google LLC.
  5. *
  6. * This work is licensed under the terms of the GNU GPL, version 2.
  7. *
  8. */
  9. #ifndef SELFTEST_KVM_PROCESSOR_H
  10. #define SELFTEST_KVM_PROCESSOR_H
  11. #include <assert.h>
  12. #include <stdint.h>
  13. #define X86_EFLAGS_FIXED (1u << 1)
  14. #define X86_CR4_VME (1ul << 0)
  15. #define X86_CR4_PVI (1ul << 1)
  16. #define X86_CR4_TSD (1ul << 2)
  17. #define X86_CR4_DE (1ul << 3)
  18. #define X86_CR4_PSE (1ul << 4)
  19. #define X86_CR4_PAE (1ul << 5)
  20. #define X86_CR4_MCE (1ul << 6)
  21. #define X86_CR4_PGE (1ul << 7)
  22. #define X86_CR4_PCE (1ul << 8)
  23. #define X86_CR4_OSFXSR (1ul << 9)
  24. #define X86_CR4_OSXMMEXCPT (1ul << 10)
  25. #define X86_CR4_UMIP (1ul << 11)
  26. #define X86_CR4_VMXE (1ul << 13)
  27. #define X86_CR4_SMXE (1ul << 14)
  28. #define X86_CR4_FSGSBASE (1ul << 16)
  29. #define X86_CR4_PCIDE (1ul << 17)
  30. #define X86_CR4_OSXSAVE (1ul << 18)
  31. #define X86_CR4_SMEP (1ul << 20)
  32. #define X86_CR4_SMAP (1ul << 21)
  33. #define X86_CR4_PKE (1ul << 22)
  34. /* The enum values match the intruction encoding of each register */
  35. enum x86_register {
  36. RAX = 0,
  37. RCX,
  38. RDX,
  39. RBX,
  40. RSP,
  41. RBP,
  42. RSI,
  43. RDI,
  44. R8,
  45. R9,
  46. R10,
  47. R11,
  48. R12,
  49. R13,
  50. R14,
  51. R15,
  52. };
  53. struct desc64 {
  54. uint16_t limit0;
  55. uint16_t base0;
  56. unsigned base1:8, s:1, type:4, dpl:2, p:1;
  57. unsigned limit1:4, avl:1, l:1, db:1, g:1, base2:8;
  58. uint32_t base3;
  59. uint32_t zero1;
  60. } __attribute__((packed));
  61. struct desc_ptr {
  62. uint16_t size;
  63. uint64_t address;
  64. } __attribute__((packed));
  65. static inline uint64_t get_desc64_base(const struct desc64 *desc)
  66. {
  67. return ((uint64_t)desc->base3 << 32) |
  68. (desc->base0 | ((desc->base1) << 16) | ((desc->base2) << 24));
  69. }
  70. static inline uint64_t rdtsc(void)
  71. {
  72. uint32_t eax, edx;
  73. /*
  74. * The lfence is to wait (on Intel CPUs) until all previous
  75. * instructions have been executed.
  76. */
  77. __asm__ __volatile__("lfence; rdtsc" : "=a"(eax), "=d"(edx));
  78. return ((uint64_t)edx) << 32 | eax;
  79. }
  80. static inline uint64_t rdtscp(uint32_t *aux)
  81. {
  82. uint32_t eax, edx;
  83. __asm__ __volatile__("rdtscp" : "=a"(eax), "=d"(edx), "=c"(*aux));
  84. return ((uint64_t)edx) << 32 | eax;
  85. }
  86. static inline uint64_t rdmsr(uint32_t msr)
  87. {
  88. uint32_t a, d;
  89. __asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory");
  90. return a | ((uint64_t) d << 32);
  91. }
  92. static inline void wrmsr(uint32_t msr, uint64_t value)
  93. {
  94. uint32_t a = value;
  95. uint32_t d = value >> 32;
  96. __asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory");
  97. }
  98. static inline uint16_t inw(uint16_t port)
  99. {
  100. uint16_t tmp;
  101. __asm__ __volatile__("in %%dx, %%ax"
  102. : /* output */ "=a" (tmp)
  103. : /* input */ "d" (port));
  104. return tmp;
  105. }
  106. static inline uint16_t get_es(void)
  107. {
  108. uint16_t es;
  109. __asm__ __volatile__("mov %%es, %[es]"
  110. : /* output */ [es]"=rm"(es));
  111. return es;
  112. }
  113. static inline uint16_t get_cs(void)
  114. {
  115. uint16_t cs;
  116. __asm__ __volatile__("mov %%cs, %[cs]"
  117. : /* output */ [cs]"=rm"(cs));
  118. return cs;
  119. }
  120. static inline uint16_t get_ss(void)
  121. {
  122. uint16_t ss;
  123. __asm__ __volatile__("mov %%ss, %[ss]"
  124. : /* output */ [ss]"=rm"(ss));
  125. return ss;
  126. }
  127. static inline uint16_t get_ds(void)
  128. {
  129. uint16_t ds;
  130. __asm__ __volatile__("mov %%ds, %[ds]"
  131. : /* output */ [ds]"=rm"(ds));
  132. return ds;
  133. }
  134. static inline uint16_t get_fs(void)
  135. {
  136. uint16_t fs;
  137. __asm__ __volatile__("mov %%fs, %[fs]"
  138. : /* output */ [fs]"=rm"(fs));
  139. return fs;
  140. }
  141. static inline uint16_t get_gs(void)
  142. {
  143. uint16_t gs;
  144. __asm__ __volatile__("mov %%gs, %[gs]"
  145. : /* output */ [gs]"=rm"(gs));
  146. return gs;
  147. }
  148. static inline uint16_t get_tr(void)
  149. {
  150. uint16_t tr;
  151. __asm__ __volatile__("str %[tr]"
  152. : /* output */ [tr]"=rm"(tr));
  153. return tr;
  154. }
  155. static inline uint64_t get_cr0(void)
  156. {
  157. uint64_t cr0;
  158. __asm__ __volatile__("mov %%cr0, %[cr0]"
  159. : /* output */ [cr0]"=r"(cr0));
  160. return cr0;
  161. }
  162. static inline uint64_t get_cr3(void)
  163. {
  164. uint64_t cr3;
  165. __asm__ __volatile__("mov %%cr3, %[cr3]"
  166. : /* output */ [cr3]"=r"(cr3));
  167. return cr3;
  168. }
  169. static inline uint64_t get_cr4(void)
  170. {
  171. uint64_t cr4;
  172. __asm__ __volatile__("mov %%cr4, %[cr4]"
  173. : /* output */ [cr4]"=r"(cr4));
  174. return cr4;
  175. }
  176. static inline void set_cr4(uint64_t val)
  177. {
  178. __asm__ __volatile__("mov %0, %%cr4" : : "r" (val) : "memory");
  179. }
  180. static inline uint64_t get_gdt_base(void)
  181. {
  182. struct desc_ptr gdt;
  183. __asm__ __volatile__("sgdt %[gdt]"
  184. : /* output */ [gdt]"=m"(gdt));
  185. return gdt.address;
  186. }
  187. static inline uint64_t get_idt_base(void)
  188. {
  189. struct desc_ptr idt;
  190. __asm__ __volatile__("sidt %[idt]"
  191. : /* output */ [idt]"=m"(idt));
  192. return idt.address;
  193. }
  194. #define SET_XMM(__var, __xmm) \
  195. asm volatile("movq %0, %%"#__xmm : : "r"(__var) : #__xmm)
  196. static inline void set_xmm(int n, unsigned long val)
  197. {
  198. switch (n) {
  199. case 0:
  200. SET_XMM(val, xmm0);
  201. break;
  202. case 1:
  203. SET_XMM(val, xmm1);
  204. break;
  205. case 2:
  206. SET_XMM(val, xmm2);
  207. break;
  208. case 3:
  209. SET_XMM(val, xmm3);
  210. break;
  211. case 4:
  212. SET_XMM(val, xmm4);
  213. break;
  214. case 5:
  215. SET_XMM(val, xmm5);
  216. break;
  217. case 6:
  218. SET_XMM(val, xmm6);
  219. break;
  220. case 7:
  221. SET_XMM(val, xmm7);
  222. break;
  223. }
  224. }
  225. typedef unsigned long v1di __attribute__ ((vector_size (8)));
  226. static inline unsigned long get_xmm(int n)
  227. {
  228. assert(n >= 0 && n <= 7);
  229. register v1di xmm0 __asm__("%xmm0");
  230. register v1di xmm1 __asm__("%xmm1");
  231. register v1di xmm2 __asm__("%xmm2");
  232. register v1di xmm3 __asm__("%xmm3");
  233. register v1di xmm4 __asm__("%xmm4");
  234. register v1di xmm5 __asm__("%xmm5");
  235. register v1di xmm6 __asm__("%xmm6");
  236. register v1di xmm7 __asm__("%xmm7");
  237. switch (n) {
  238. case 0:
  239. return (unsigned long)xmm0;
  240. case 1:
  241. return (unsigned long)xmm1;
  242. case 2:
  243. return (unsigned long)xmm2;
  244. case 3:
  245. return (unsigned long)xmm3;
  246. case 4:
  247. return (unsigned long)xmm4;
  248. case 5:
  249. return (unsigned long)xmm5;
  250. case 6:
  251. return (unsigned long)xmm6;
  252. case 7:
  253. return (unsigned long)xmm7;
  254. }
  255. return 0;
  256. }
  257. struct kvm_x86_state;
  258. struct kvm_x86_state *vcpu_save_state(struct kvm_vm *vm, uint32_t vcpuid);
  259. void vcpu_load_state(struct kvm_vm *vm, uint32_t vcpuid,
  260. struct kvm_x86_state *state);
  261. struct kvm_cpuid2 *kvm_get_supported_cpuid(void);
  262. void vcpu_set_cpuid(struct kvm_vm *vm, uint32_t vcpuid,
  263. struct kvm_cpuid2 *cpuid);
  264. struct kvm_cpuid_entry2 *
  265. kvm_get_supported_cpuid_index(uint32_t function, uint32_t index);
  266. static inline struct kvm_cpuid_entry2 *
  267. kvm_get_supported_cpuid_entry(uint32_t function)
  268. {
  269. return kvm_get_supported_cpuid_index(function, 0);
  270. }
  271. uint64_t vcpu_get_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index);
  272. void vcpu_set_msr(struct kvm_vm *vm, uint32_t vcpuid, uint64_t msr_index,
  273. uint64_t msr_value);
  274. /*
  275. * Basic CPU control in CR0
  276. */
  277. #define X86_CR0_PE (1UL<<0) /* Protection Enable */
  278. #define X86_CR0_MP (1UL<<1) /* Monitor Coprocessor */
  279. #define X86_CR0_EM (1UL<<2) /* Emulation */
  280. #define X86_CR0_TS (1UL<<3) /* Task Switched */
  281. #define X86_CR0_ET (1UL<<4) /* Extension Type */
  282. #define X86_CR0_NE (1UL<<5) /* Numeric Error */
  283. #define X86_CR0_WP (1UL<<16) /* Write Protect */
  284. #define X86_CR0_AM (1UL<<18) /* Alignment Mask */
  285. #define X86_CR0_NW (1UL<<29) /* Not Write-through */
  286. #define X86_CR0_CD (1UL<<30) /* Cache Disable */
  287. #define X86_CR0_PG (1UL<<31) /* Paging */
  288. /*
  289. * CPU model specific register (MSR) numbers.
  290. */
  291. /* x86-64 specific MSRs */
  292. #define MSR_EFER 0xc0000080 /* extended feature register */
  293. #define MSR_STAR 0xc0000081 /* legacy mode SYSCALL target */
  294. #define MSR_LSTAR 0xc0000082 /* long mode SYSCALL target */
  295. #define MSR_CSTAR 0xc0000083 /* compat mode SYSCALL target */
  296. #define MSR_SYSCALL_MASK 0xc0000084 /* EFLAGS mask for syscall */
  297. #define MSR_FS_BASE 0xc0000100 /* 64bit FS base */
  298. #define MSR_GS_BASE 0xc0000101 /* 64bit GS base */
  299. #define MSR_KERNEL_GS_BASE 0xc0000102 /* SwapGS GS shadow */
  300. #define MSR_TSC_AUX 0xc0000103 /* Auxiliary TSC */
  301. /* EFER bits: */
  302. #define EFER_SCE (1<<0) /* SYSCALL/SYSRET */
  303. #define EFER_LME (1<<8) /* Long mode enable */
  304. #define EFER_LMA (1<<10) /* Long mode active (read-only) */
  305. #define EFER_NX (1<<11) /* No execute enable */
  306. #define EFER_SVME (1<<12) /* Enable virtualization */
  307. #define EFER_LMSLE (1<<13) /* Long Mode Segment Limit Enable */
  308. #define EFER_FFXSR (1<<14) /* Enable Fast FXSAVE/FXRSTOR */
  309. /* Intel MSRs. Some also available on other CPUs */
  310. #define MSR_PPIN_CTL 0x0000004e
  311. #define MSR_PPIN 0x0000004f
  312. #define MSR_IA32_PERFCTR0 0x000000c1
  313. #define MSR_IA32_PERFCTR1 0x000000c2
  314. #define MSR_FSB_FREQ 0x000000cd
  315. #define MSR_PLATFORM_INFO 0x000000ce
  316. #define MSR_PLATFORM_INFO_CPUID_FAULT_BIT 31
  317. #define MSR_PLATFORM_INFO_CPUID_FAULT BIT_ULL(MSR_PLATFORM_INFO_CPUID_FAULT_BIT)
  318. #define MSR_PKG_CST_CONFIG_CONTROL 0x000000e2
  319. #define NHM_C3_AUTO_DEMOTE (1UL << 25)
  320. #define NHM_C1_AUTO_DEMOTE (1UL << 26)
  321. #define ATM_LNC_C6_AUTO_DEMOTE (1UL << 25)
  322. #define SNB_C1_AUTO_UNDEMOTE (1UL << 27)
  323. #define SNB_C3_AUTO_UNDEMOTE (1UL << 28)
  324. #define MSR_MTRRcap 0x000000fe
  325. #define MSR_IA32_BBL_CR_CTL 0x00000119
  326. #define MSR_IA32_BBL_CR_CTL3 0x0000011e
  327. #define MSR_IA32_SYSENTER_CS 0x00000174
  328. #define MSR_IA32_SYSENTER_ESP 0x00000175
  329. #define MSR_IA32_SYSENTER_EIP 0x00000176
  330. #define MSR_IA32_MCG_CAP 0x00000179
  331. #define MSR_IA32_MCG_STATUS 0x0000017a
  332. #define MSR_IA32_MCG_CTL 0x0000017b
  333. #define MSR_IA32_MCG_EXT_CTL 0x000004d0
  334. #define MSR_OFFCORE_RSP_0 0x000001a6
  335. #define MSR_OFFCORE_RSP_1 0x000001a7
  336. #define MSR_TURBO_RATIO_LIMIT 0x000001ad
  337. #define MSR_TURBO_RATIO_LIMIT1 0x000001ae
  338. #define MSR_TURBO_RATIO_LIMIT2 0x000001af
  339. #define MSR_LBR_SELECT 0x000001c8
  340. #define MSR_LBR_TOS 0x000001c9
  341. #define MSR_LBR_NHM_FROM 0x00000680
  342. #define MSR_LBR_NHM_TO 0x000006c0
  343. #define MSR_LBR_CORE_FROM 0x00000040
  344. #define MSR_LBR_CORE_TO 0x00000060
  345. #define MSR_LBR_INFO_0 0x00000dc0 /* ... 0xddf for _31 */
  346. #define LBR_INFO_MISPRED BIT_ULL(63)
  347. #define LBR_INFO_IN_TX BIT_ULL(62)
  348. #define LBR_INFO_ABORT BIT_ULL(61)
  349. #define LBR_INFO_CYCLES 0xffff
  350. #define MSR_IA32_PEBS_ENABLE 0x000003f1
  351. #define MSR_IA32_DS_AREA 0x00000600
  352. #define MSR_IA32_PERF_CAPABILITIES 0x00000345
  353. #define MSR_PEBS_LD_LAT_THRESHOLD 0x000003f6
  354. #define MSR_IA32_RTIT_CTL 0x00000570
  355. #define MSR_IA32_RTIT_STATUS 0x00000571
  356. #define MSR_IA32_RTIT_ADDR0_A 0x00000580
  357. #define MSR_IA32_RTIT_ADDR0_B 0x00000581
  358. #define MSR_IA32_RTIT_ADDR1_A 0x00000582
  359. #define MSR_IA32_RTIT_ADDR1_B 0x00000583
  360. #define MSR_IA32_RTIT_ADDR2_A 0x00000584
  361. #define MSR_IA32_RTIT_ADDR2_B 0x00000585
  362. #define MSR_IA32_RTIT_ADDR3_A 0x00000586
  363. #define MSR_IA32_RTIT_ADDR3_B 0x00000587
  364. #define MSR_IA32_RTIT_CR3_MATCH 0x00000572
  365. #define MSR_IA32_RTIT_OUTPUT_BASE 0x00000560
  366. #define MSR_IA32_RTIT_OUTPUT_MASK 0x00000561
  367. #define MSR_MTRRfix64K_00000 0x00000250
  368. #define MSR_MTRRfix16K_80000 0x00000258
  369. #define MSR_MTRRfix16K_A0000 0x00000259
  370. #define MSR_MTRRfix4K_C0000 0x00000268
  371. #define MSR_MTRRfix4K_C8000 0x00000269
  372. #define MSR_MTRRfix4K_D0000 0x0000026a
  373. #define MSR_MTRRfix4K_D8000 0x0000026b
  374. #define MSR_MTRRfix4K_E0000 0x0000026c
  375. #define MSR_MTRRfix4K_E8000 0x0000026d
  376. #define MSR_MTRRfix4K_F0000 0x0000026e
  377. #define MSR_MTRRfix4K_F8000 0x0000026f
  378. #define MSR_MTRRdefType 0x000002ff
  379. #define MSR_IA32_CR_PAT 0x00000277
  380. #define MSR_IA32_DEBUGCTLMSR 0x000001d9
  381. #define MSR_IA32_LASTBRANCHFROMIP 0x000001db
  382. #define MSR_IA32_LASTBRANCHTOIP 0x000001dc
  383. #define MSR_IA32_LASTINTFROMIP 0x000001dd
  384. #define MSR_IA32_LASTINTTOIP 0x000001de
  385. /* DEBUGCTLMSR bits (others vary by model): */
  386. #define DEBUGCTLMSR_LBR (1UL << 0) /* last branch recording */
  387. #define DEBUGCTLMSR_BTF_SHIFT 1
  388. #define DEBUGCTLMSR_BTF (1UL << 1) /* single-step on branches */
  389. #define DEBUGCTLMSR_TR (1UL << 6)
  390. #define DEBUGCTLMSR_BTS (1UL << 7)
  391. #define DEBUGCTLMSR_BTINT (1UL << 8)
  392. #define DEBUGCTLMSR_BTS_OFF_OS (1UL << 9)
  393. #define DEBUGCTLMSR_BTS_OFF_USR (1UL << 10)
  394. #define DEBUGCTLMSR_FREEZE_LBRS_ON_PMI (1UL << 11)
  395. #define DEBUGCTLMSR_FREEZE_IN_SMM_BIT 14
  396. #define DEBUGCTLMSR_FREEZE_IN_SMM (1UL << DEBUGCTLMSR_FREEZE_IN_SMM_BIT)
  397. #define MSR_PEBS_FRONTEND 0x000003f7
  398. #define MSR_IA32_POWER_CTL 0x000001fc
  399. #define MSR_IA32_MC0_CTL 0x00000400
  400. #define MSR_IA32_MC0_STATUS 0x00000401
  401. #define MSR_IA32_MC0_ADDR 0x00000402
  402. #define MSR_IA32_MC0_MISC 0x00000403
  403. /* C-state Residency Counters */
  404. #define MSR_PKG_C3_RESIDENCY 0x000003f8
  405. #define MSR_PKG_C6_RESIDENCY 0x000003f9
  406. #define MSR_ATOM_PKG_C6_RESIDENCY 0x000003fa
  407. #define MSR_PKG_C7_RESIDENCY 0x000003fa
  408. #define MSR_CORE_C3_RESIDENCY 0x000003fc
  409. #define MSR_CORE_C6_RESIDENCY 0x000003fd
  410. #define MSR_CORE_C7_RESIDENCY 0x000003fe
  411. #define MSR_KNL_CORE_C6_RESIDENCY 0x000003ff
  412. #define MSR_PKG_C2_RESIDENCY 0x0000060d
  413. #define MSR_PKG_C8_RESIDENCY 0x00000630
  414. #define MSR_PKG_C9_RESIDENCY 0x00000631
  415. #define MSR_PKG_C10_RESIDENCY 0x00000632
  416. /* Interrupt Response Limit */
  417. #define MSR_PKGC3_IRTL 0x0000060a
  418. #define MSR_PKGC6_IRTL 0x0000060b
  419. #define MSR_PKGC7_IRTL 0x0000060c
  420. #define MSR_PKGC8_IRTL 0x00000633
  421. #define MSR_PKGC9_IRTL 0x00000634
  422. #define MSR_PKGC10_IRTL 0x00000635
  423. /* Run Time Average Power Limiting (RAPL) Interface */
  424. #define MSR_RAPL_POWER_UNIT 0x00000606
  425. #define MSR_PKG_POWER_LIMIT 0x00000610
  426. #define MSR_PKG_ENERGY_STATUS 0x00000611
  427. #define MSR_PKG_PERF_STATUS 0x00000613
  428. #define MSR_PKG_POWER_INFO 0x00000614
  429. #define MSR_DRAM_POWER_LIMIT 0x00000618
  430. #define MSR_DRAM_ENERGY_STATUS 0x00000619
  431. #define MSR_DRAM_PERF_STATUS 0x0000061b
  432. #define MSR_DRAM_POWER_INFO 0x0000061c
  433. #define MSR_PP0_POWER_LIMIT 0x00000638
  434. #define MSR_PP0_ENERGY_STATUS 0x00000639
  435. #define MSR_PP0_POLICY 0x0000063a
  436. #define MSR_PP0_PERF_STATUS 0x0000063b
  437. #define MSR_PP1_POWER_LIMIT 0x00000640
  438. #define MSR_PP1_ENERGY_STATUS 0x00000641
  439. #define MSR_PP1_POLICY 0x00000642
  440. /* Config TDP MSRs */
  441. #define MSR_CONFIG_TDP_NOMINAL 0x00000648
  442. #define MSR_CONFIG_TDP_LEVEL_1 0x00000649
  443. #define MSR_CONFIG_TDP_LEVEL_2 0x0000064A
  444. #define MSR_CONFIG_TDP_CONTROL 0x0000064B
  445. #define MSR_TURBO_ACTIVATION_RATIO 0x0000064C
  446. #define MSR_PLATFORM_ENERGY_STATUS 0x0000064D
  447. #define MSR_PKG_WEIGHTED_CORE_C0_RES 0x00000658
  448. #define MSR_PKG_ANY_CORE_C0_RES 0x00000659
  449. #define MSR_PKG_ANY_GFXE_C0_RES 0x0000065A
  450. #define MSR_PKG_BOTH_CORE_GFXE_C0_RES 0x0000065B
  451. #define MSR_CORE_C1_RES 0x00000660
  452. #define MSR_MODULE_C6_RES_MS 0x00000664
  453. #define MSR_CC6_DEMOTION_POLICY_CONFIG 0x00000668
  454. #define MSR_MC6_DEMOTION_POLICY_CONFIG 0x00000669
  455. #define MSR_ATOM_CORE_RATIOS 0x0000066a
  456. #define MSR_ATOM_CORE_VIDS 0x0000066b
  457. #define MSR_ATOM_CORE_TURBO_RATIOS 0x0000066c
  458. #define MSR_ATOM_CORE_TURBO_VIDS 0x0000066d
  459. #define MSR_CORE_PERF_LIMIT_REASONS 0x00000690
  460. #define MSR_GFX_PERF_LIMIT_REASONS 0x000006B0
  461. #define MSR_RING_PERF_LIMIT_REASONS 0x000006B1
  462. /* Hardware P state interface */
  463. #define MSR_PPERF 0x0000064e
  464. #define MSR_PERF_LIMIT_REASONS 0x0000064f
  465. #define MSR_PM_ENABLE 0x00000770
  466. #define MSR_HWP_CAPABILITIES 0x00000771
  467. #define MSR_HWP_REQUEST_PKG 0x00000772
  468. #define MSR_HWP_INTERRUPT 0x00000773
  469. #define MSR_HWP_REQUEST 0x00000774
  470. #define MSR_HWP_STATUS 0x00000777
  471. /* CPUID.6.EAX */
  472. #define HWP_BASE_BIT (1<<7)
  473. #define HWP_NOTIFICATIONS_BIT (1<<8)
  474. #define HWP_ACTIVITY_WINDOW_BIT (1<<9)
  475. #define HWP_ENERGY_PERF_PREFERENCE_BIT (1<<10)
  476. #define HWP_PACKAGE_LEVEL_REQUEST_BIT (1<<11)
  477. /* IA32_HWP_CAPABILITIES */
  478. #define HWP_HIGHEST_PERF(x) (((x) >> 0) & 0xff)
  479. #define HWP_GUARANTEED_PERF(x) (((x) >> 8) & 0xff)
  480. #define HWP_MOSTEFFICIENT_PERF(x) (((x) >> 16) & 0xff)
  481. #define HWP_LOWEST_PERF(x) (((x) >> 24) & 0xff)
  482. /* IA32_HWP_REQUEST */
  483. #define HWP_MIN_PERF(x) (x & 0xff)
  484. #define HWP_MAX_PERF(x) ((x & 0xff) << 8)
  485. #define HWP_DESIRED_PERF(x) ((x & 0xff) << 16)
  486. #define HWP_ENERGY_PERF_PREFERENCE(x) (((unsigned long long) x & 0xff) << 24)
  487. #define HWP_EPP_PERFORMANCE 0x00
  488. #define HWP_EPP_BALANCE_PERFORMANCE 0x80
  489. #define HWP_EPP_BALANCE_POWERSAVE 0xC0
  490. #define HWP_EPP_POWERSAVE 0xFF
  491. #define HWP_ACTIVITY_WINDOW(x) ((unsigned long long)(x & 0xff3) << 32)
  492. #define HWP_PACKAGE_CONTROL(x) ((unsigned long long)(x & 0x1) << 42)
  493. /* IA32_HWP_STATUS */
  494. #define HWP_GUARANTEED_CHANGE(x) (x & 0x1)
  495. #define HWP_EXCURSION_TO_MINIMUM(x) (x & 0x4)
  496. /* IA32_HWP_INTERRUPT */
  497. #define HWP_CHANGE_TO_GUARANTEED_INT(x) (x & 0x1)
  498. #define HWP_EXCURSION_TO_MINIMUM_INT(x) (x & 0x2)
  499. #define MSR_AMD64_MC0_MASK 0xc0010044
  500. #define MSR_IA32_MCx_CTL(x) (MSR_IA32_MC0_CTL + 4*(x))
  501. #define MSR_IA32_MCx_STATUS(x) (MSR_IA32_MC0_STATUS + 4*(x))
  502. #define MSR_IA32_MCx_ADDR(x) (MSR_IA32_MC0_ADDR + 4*(x))
  503. #define MSR_IA32_MCx_MISC(x) (MSR_IA32_MC0_MISC + 4*(x))
  504. #define MSR_AMD64_MCx_MASK(x) (MSR_AMD64_MC0_MASK + (x))
  505. /* These are consecutive and not in the normal 4er MCE bank block */
  506. #define MSR_IA32_MC0_CTL2 0x00000280
  507. #define MSR_IA32_MCx_CTL2(x) (MSR_IA32_MC0_CTL2 + (x))
  508. #define MSR_P6_PERFCTR0 0x000000c1
  509. #define MSR_P6_PERFCTR1 0x000000c2
  510. #define MSR_P6_EVNTSEL0 0x00000186
  511. #define MSR_P6_EVNTSEL1 0x00000187
  512. #define MSR_KNC_PERFCTR0 0x00000020
  513. #define MSR_KNC_PERFCTR1 0x00000021
  514. #define MSR_KNC_EVNTSEL0 0x00000028
  515. #define MSR_KNC_EVNTSEL1 0x00000029
  516. /* Alternative perfctr range with full access. */
  517. #define MSR_IA32_PMC0 0x000004c1
  518. /* AMD64 MSRs. Not complete. See the architecture manual for a more
  519. complete list. */
  520. #define MSR_AMD64_PATCH_LEVEL 0x0000008b
  521. #define MSR_AMD64_TSC_RATIO 0xc0000104
  522. #define MSR_AMD64_NB_CFG 0xc001001f
  523. #define MSR_AMD64_PATCH_LOADER 0xc0010020
  524. #define MSR_AMD64_OSVW_ID_LENGTH 0xc0010140
  525. #define MSR_AMD64_OSVW_STATUS 0xc0010141
  526. #define MSR_AMD64_LS_CFG 0xc0011020
  527. #define MSR_AMD64_DC_CFG 0xc0011022
  528. #define MSR_AMD64_BU_CFG2 0xc001102a
  529. #define MSR_AMD64_IBSFETCHCTL 0xc0011030
  530. #define MSR_AMD64_IBSFETCHLINAD 0xc0011031
  531. #define MSR_AMD64_IBSFETCHPHYSAD 0xc0011032
  532. #define MSR_AMD64_IBSFETCH_REG_COUNT 3
  533. #define MSR_AMD64_IBSFETCH_REG_MASK ((1UL<<MSR_AMD64_IBSFETCH_REG_COUNT)-1)
  534. #define MSR_AMD64_IBSOPCTL 0xc0011033
  535. #define MSR_AMD64_IBSOPRIP 0xc0011034
  536. #define MSR_AMD64_IBSOPDATA 0xc0011035
  537. #define MSR_AMD64_IBSOPDATA2 0xc0011036
  538. #define MSR_AMD64_IBSOPDATA3 0xc0011037
  539. #define MSR_AMD64_IBSDCLINAD 0xc0011038
  540. #define MSR_AMD64_IBSDCPHYSAD 0xc0011039
  541. #define MSR_AMD64_IBSOP_REG_COUNT 7
  542. #define MSR_AMD64_IBSOP_REG_MASK ((1UL<<MSR_AMD64_IBSOP_REG_COUNT)-1)
  543. #define MSR_AMD64_IBSCTL 0xc001103a
  544. #define MSR_AMD64_IBSBRTARGET 0xc001103b
  545. #define MSR_AMD64_IBSOPDATA4 0xc001103d
  546. #define MSR_AMD64_IBS_REG_COUNT_MAX 8 /* includes MSR_AMD64_IBSBRTARGET */
  547. #define MSR_AMD64_SEV 0xc0010131
  548. #define MSR_AMD64_SEV_ENABLED_BIT 0
  549. #define MSR_AMD64_SEV_ENABLED BIT_ULL(MSR_AMD64_SEV_ENABLED_BIT)
  550. /* Fam 17h MSRs */
  551. #define MSR_F17H_IRPERF 0xc00000e9
  552. /* Fam 16h MSRs */
  553. #define MSR_F16H_L2I_PERF_CTL 0xc0010230
  554. #define MSR_F16H_L2I_PERF_CTR 0xc0010231
  555. #define MSR_F16H_DR1_ADDR_MASK 0xc0011019
  556. #define MSR_F16H_DR2_ADDR_MASK 0xc001101a
  557. #define MSR_F16H_DR3_ADDR_MASK 0xc001101b
  558. #define MSR_F16H_DR0_ADDR_MASK 0xc0011027
  559. /* Fam 15h MSRs */
  560. #define MSR_F15H_PERF_CTL 0xc0010200
  561. #define MSR_F15H_PERF_CTR 0xc0010201
  562. #define MSR_F15H_NB_PERF_CTL 0xc0010240
  563. #define MSR_F15H_NB_PERF_CTR 0xc0010241
  564. #define MSR_F15H_PTSC 0xc0010280
  565. #define MSR_F15H_IC_CFG 0xc0011021
  566. /* Fam 10h MSRs */
  567. #define MSR_FAM10H_MMIO_CONF_BASE 0xc0010058
  568. #define FAM10H_MMIO_CONF_ENABLE (1<<0)
  569. #define FAM10H_MMIO_CONF_BUSRANGE_MASK 0xf
  570. #define FAM10H_MMIO_CONF_BUSRANGE_SHIFT 2
  571. #define FAM10H_MMIO_CONF_BASE_MASK 0xfffffffULL
  572. #define FAM10H_MMIO_CONF_BASE_SHIFT 20
  573. #define MSR_FAM10H_NODE_ID 0xc001100c
  574. #define MSR_F10H_DECFG 0xc0011029
  575. #define MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT 1
  576. #define MSR_F10H_DECFG_LFENCE_SERIALIZE BIT_ULL(MSR_F10H_DECFG_LFENCE_SERIALIZE_BIT)
  577. /* K8 MSRs */
  578. #define MSR_K8_TOP_MEM1 0xc001001a
  579. #define MSR_K8_TOP_MEM2 0xc001001d
  580. #define MSR_K8_SYSCFG 0xc0010010
  581. #define MSR_K8_SYSCFG_MEM_ENCRYPT_BIT 23
  582. #define MSR_K8_SYSCFG_MEM_ENCRYPT BIT_ULL(MSR_K8_SYSCFG_MEM_ENCRYPT_BIT)
  583. #define MSR_K8_INT_PENDING_MSG 0xc0010055
  584. /* C1E active bits in int pending message */
  585. #define K8_INTP_C1E_ACTIVE_MASK 0x18000000
  586. #define MSR_K8_TSEG_ADDR 0xc0010112
  587. #define MSR_K8_TSEG_MASK 0xc0010113
  588. #define K8_MTRRFIXRANGE_DRAM_ENABLE 0x00040000 /* MtrrFixDramEn bit */
  589. #define K8_MTRRFIXRANGE_DRAM_MODIFY 0x00080000 /* MtrrFixDramModEn bit */
  590. #define K8_MTRR_RDMEM_WRMEM_MASK 0x18181818 /* Mask: RdMem|WrMem */
  591. /* K7 MSRs */
  592. #define MSR_K7_EVNTSEL0 0xc0010000
  593. #define MSR_K7_PERFCTR0 0xc0010004
  594. #define MSR_K7_EVNTSEL1 0xc0010001
  595. #define MSR_K7_PERFCTR1 0xc0010005
  596. #define MSR_K7_EVNTSEL2 0xc0010002
  597. #define MSR_K7_PERFCTR2 0xc0010006
  598. #define MSR_K7_EVNTSEL3 0xc0010003
  599. #define MSR_K7_PERFCTR3 0xc0010007
  600. #define MSR_K7_CLK_CTL 0xc001001b
  601. #define MSR_K7_HWCR 0xc0010015
  602. #define MSR_K7_HWCR_SMMLOCK_BIT 0
  603. #define MSR_K7_HWCR_SMMLOCK BIT_ULL(MSR_K7_HWCR_SMMLOCK_BIT)
  604. #define MSR_K7_FID_VID_CTL 0xc0010041
  605. #define MSR_K7_FID_VID_STATUS 0xc0010042
  606. /* K6 MSRs */
  607. #define MSR_K6_WHCR 0xc0000082
  608. #define MSR_K6_UWCCR 0xc0000085
  609. #define MSR_K6_EPMR 0xc0000086
  610. #define MSR_K6_PSOR 0xc0000087
  611. #define MSR_K6_PFIR 0xc0000088
  612. /* Centaur-Hauls/IDT defined MSRs. */
  613. #define MSR_IDT_FCR1 0x00000107
  614. #define MSR_IDT_FCR2 0x00000108
  615. #define MSR_IDT_FCR3 0x00000109
  616. #define MSR_IDT_FCR4 0x0000010a
  617. #define MSR_IDT_MCR0 0x00000110
  618. #define MSR_IDT_MCR1 0x00000111
  619. #define MSR_IDT_MCR2 0x00000112
  620. #define MSR_IDT_MCR3 0x00000113
  621. #define MSR_IDT_MCR4 0x00000114
  622. #define MSR_IDT_MCR5 0x00000115
  623. #define MSR_IDT_MCR6 0x00000116
  624. #define MSR_IDT_MCR7 0x00000117
  625. #define MSR_IDT_MCR_CTRL 0x00000120
  626. /* VIA Cyrix defined MSRs*/
  627. #define MSR_VIA_FCR 0x00001107
  628. #define MSR_VIA_LONGHAUL 0x0000110a
  629. #define MSR_VIA_RNG 0x0000110b
  630. #define MSR_VIA_BCR2 0x00001147
  631. /* Transmeta defined MSRs */
  632. #define MSR_TMTA_LONGRUN_CTRL 0x80868010
  633. #define MSR_TMTA_LONGRUN_FLAGS 0x80868011
  634. #define MSR_TMTA_LRTI_READOUT 0x80868018
  635. #define MSR_TMTA_LRTI_VOLT_MHZ 0x8086801a
  636. /* Intel defined MSRs. */
  637. #define MSR_IA32_P5_MC_ADDR 0x00000000
  638. #define MSR_IA32_P5_MC_TYPE 0x00000001
  639. #define MSR_IA32_TSC 0x00000010
  640. #define MSR_IA32_PLATFORM_ID 0x00000017
  641. #define MSR_IA32_EBL_CR_POWERON 0x0000002a
  642. #define MSR_EBC_FREQUENCY_ID 0x0000002c
  643. #define MSR_SMI_COUNT 0x00000034
  644. #define MSR_IA32_FEATURE_CONTROL 0x0000003a
  645. #define MSR_IA32_TSC_ADJUST 0x0000003b
  646. #define MSR_IA32_BNDCFGS 0x00000d90
  647. #define MSR_IA32_BNDCFGS_RSVD 0x00000ffc
  648. #define MSR_IA32_XSS 0x00000da0
  649. #define FEATURE_CONTROL_LOCKED (1<<0)
  650. #define FEATURE_CONTROL_VMXON_ENABLED_INSIDE_SMX (1<<1)
  651. #define FEATURE_CONTROL_VMXON_ENABLED_OUTSIDE_SMX (1<<2)
  652. #define FEATURE_CONTROL_LMCE (1<<20)
  653. #define MSR_IA32_APICBASE 0x0000001b
  654. #define MSR_IA32_APICBASE_BSP (1<<8)
  655. #define MSR_IA32_APICBASE_ENABLE (1<<11)
  656. #define MSR_IA32_APICBASE_BASE (0xfffff<<12)
  657. #define MSR_IA32_TSCDEADLINE 0x000006e0
  658. #define MSR_IA32_UCODE_WRITE 0x00000079
  659. #define MSR_IA32_UCODE_REV 0x0000008b
  660. #define MSR_IA32_SMM_MONITOR_CTL 0x0000009b
  661. #define MSR_IA32_SMBASE 0x0000009e
  662. #define MSR_IA32_PERF_STATUS 0x00000198
  663. #define MSR_IA32_PERF_CTL 0x00000199
  664. #define INTEL_PERF_CTL_MASK 0xffff
  665. #define MSR_AMD_PSTATE_DEF_BASE 0xc0010064
  666. #define MSR_AMD_PERF_STATUS 0xc0010063
  667. #define MSR_AMD_PERF_CTL 0xc0010062
  668. #define MSR_IA32_MPERF 0x000000e7
  669. #define MSR_IA32_APERF 0x000000e8
  670. #define MSR_IA32_THERM_CONTROL 0x0000019a
  671. #define MSR_IA32_THERM_INTERRUPT 0x0000019b
  672. #define THERM_INT_HIGH_ENABLE (1 << 0)
  673. #define THERM_INT_LOW_ENABLE (1 << 1)
  674. #define THERM_INT_PLN_ENABLE (1 << 24)
  675. #define MSR_IA32_THERM_STATUS 0x0000019c
  676. #define THERM_STATUS_PROCHOT (1 << 0)
  677. #define THERM_STATUS_POWER_LIMIT (1 << 10)
  678. #define MSR_THERM2_CTL 0x0000019d
  679. #define MSR_THERM2_CTL_TM_SELECT (1ULL << 16)
  680. #define MSR_IA32_MISC_ENABLE 0x000001a0
  681. #define MSR_IA32_TEMPERATURE_TARGET 0x000001a2
  682. #define MSR_MISC_FEATURE_CONTROL 0x000001a4
  683. #define MSR_MISC_PWR_MGMT 0x000001aa
  684. #define MSR_IA32_ENERGY_PERF_BIAS 0x000001b0
  685. #define ENERGY_PERF_BIAS_PERFORMANCE 0
  686. #define ENERGY_PERF_BIAS_BALANCE_PERFORMANCE 4
  687. #define ENERGY_PERF_BIAS_NORMAL 6
  688. #define ENERGY_PERF_BIAS_BALANCE_POWERSAVE 8
  689. #define ENERGY_PERF_BIAS_POWERSAVE 15
  690. #define MSR_IA32_PACKAGE_THERM_STATUS 0x000001b1
  691. #define PACKAGE_THERM_STATUS_PROCHOT (1 << 0)
  692. #define PACKAGE_THERM_STATUS_POWER_LIMIT (1 << 10)
  693. #define MSR_IA32_PACKAGE_THERM_INTERRUPT 0x000001b2
  694. #define PACKAGE_THERM_INT_HIGH_ENABLE (1 << 0)
  695. #define PACKAGE_THERM_INT_LOW_ENABLE (1 << 1)
  696. #define PACKAGE_THERM_INT_PLN_ENABLE (1 << 24)
  697. /* Thermal Thresholds Support */
  698. #define THERM_INT_THRESHOLD0_ENABLE (1 << 15)
  699. #define THERM_SHIFT_THRESHOLD0 8
  700. #define THERM_MASK_THRESHOLD0 (0x7f << THERM_SHIFT_THRESHOLD0)
  701. #define THERM_INT_THRESHOLD1_ENABLE (1 << 23)
  702. #define THERM_SHIFT_THRESHOLD1 16
  703. #define THERM_MASK_THRESHOLD1 (0x7f << THERM_SHIFT_THRESHOLD1)
  704. #define THERM_STATUS_THRESHOLD0 (1 << 6)
  705. #define THERM_LOG_THRESHOLD0 (1 << 7)
  706. #define THERM_STATUS_THRESHOLD1 (1 << 8)
  707. #define THERM_LOG_THRESHOLD1 (1 << 9)
  708. /* MISC_ENABLE bits: architectural */
  709. #define MSR_IA32_MISC_ENABLE_FAST_STRING_BIT 0
  710. #define MSR_IA32_MISC_ENABLE_FAST_STRING (1ULL << MSR_IA32_MISC_ENABLE_FAST_STRING_BIT)
  711. #define MSR_IA32_MISC_ENABLE_TCC_BIT 1
  712. #define MSR_IA32_MISC_ENABLE_TCC (1ULL << MSR_IA32_MISC_ENABLE_TCC_BIT)
  713. #define MSR_IA32_MISC_ENABLE_EMON_BIT 7
  714. #define MSR_IA32_MISC_ENABLE_EMON (1ULL << MSR_IA32_MISC_ENABLE_EMON_BIT)
  715. #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT 11
  716. #define MSR_IA32_MISC_ENABLE_BTS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_BTS_UNAVAIL_BIT)
  717. #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT 12
  718. #define MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL (1ULL << MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL_BIT)
  719. #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT 16
  720. #define MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP (1ULL << MSR_IA32_MISC_ENABLE_ENHANCED_SPEEDSTEP_BIT)
  721. #define MSR_IA32_MISC_ENABLE_MWAIT_BIT 18
  722. #define MSR_IA32_MISC_ENABLE_MWAIT (1ULL << MSR_IA32_MISC_ENABLE_MWAIT_BIT)
  723. #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT 22
  724. #define MSR_IA32_MISC_ENABLE_LIMIT_CPUID (1ULL << MSR_IA32_MISC_ENABLE_LIMIT_CPUID_BIT)
  725. #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT 23
  726. #define MSR_IA32_MISC_ENABLE_XTPR_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XTPR_DISABLE_BIT)
  727. #define MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT 34
  728. #define MSR_IA32_MISC_ENABLE_XD_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_XD_DISABLE_BIT)
  729. /* MISC_ENABLE bits: model-specific, meaning may vary from core to core */
  730. #define MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT 2
  731. #define MSR_IA32_MISC_ENABLE_X87_COMPAT (1ULL << MSR_IA32_MISC_ENABLE_X87_COMPAT_BIT)
  732. #define MSR_IA32_MISC_ENABLE_TM1_BIT 3
  733. #define MSR_IA32_MISC_ENABLE_TM1 (1ULL << MSR_IA32_MISC_ENABLE_TM1_BIT)
  734. #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT 4
  735. #define MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_SPLIT_LOCK_DISABLE_BIT)
  736. #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT 6
  737. #define MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_L3CACHE_DISABLE_BIT)
  738. #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT 8
  739. #define MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SUPPRESS_LOCK_BIT)
  740. #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT 9
  741. #define MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_PREFETCH_DISABLE_BIT)
  742. #define MSR_IA32_MISC_ENABLE_FERR_BIT 10
  743. #define MSR_IA32_MISC_ENABLE_FERR (1ULL << MSR_IA32_MISC_ENABLE_FERR_BIT)
  744. #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT 10
  745. #define MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX (1ULL << MSR_IA32_MISC_ENABLE_FERR_MULTIPLEX_BIT)
  746. #define MSR_IA32_MISC_ENABLE_TM2_BIT 13
  747. #define MSR_IA32_MISC_ENABLE_TM2 (1ULL << MSR_IA32_MISC_ENABLE_TM2_BIT)
  748. #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT 19
  749. #define MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_ADJ_PREF_DISABLE_BIT)
  750. #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT 20
  751. #define MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK (1ULL << MSR_IA32_MISC_ENABLE_SPEEDSTEP_LOCK_BIT)
  752. #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT 24
  753. #define MSR_IA32_MISC_ENABLE_L1D_CONTEXT (1ULL << MSR_IA32_MISC_ENABLE_L1D_CONTEXT_BIT)
  754. #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT 37
  755. #define MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_DCU_PREF_DISABLE_BIT)
  756. #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT 38
  757. #define MSR_IA32_MISC_ENABLE_TURBO_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_TURBO_DISABLE_BIT)
  758. #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT 39
  759. #define MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE (1ULL << MSR_IA32_MISC_ENABLE_IP_PREF_DISABLE_BIT)
  760. /* MISC_FEATURES_ENABLES non-architectural features */
  761. #define MSR_MISC_FEATURES_ENABLES 0x00000140
  762. #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT 0
  763. #define MSR_MISC_FEATURES_ENABLES_CPUID_FAULT BIT_ULL(MSR_MISC_FEATURES_ENABLES_CPUID_FAULT_BIT)
  764. #define MSR_MISC_FEATURES_ENABLES_RING3MWAIT_BIT 1
  765. #define MSR_IA32_TSC_DEADLINE 0x000006E0
  766. /* P4/Xeon+ specific */
  767. #define MSR_IA32_MCG_EAX 0x00000180
  768. #define MSR_IA32_MCG_EBX 0x00000181
  769. #define MSR_IA32_MCG_ECX 0x00000182
  770. #define MSR_IA32_MCG_EDX 0x00000183
  771. #define MSR_IA32_MCG_ESI 0x00000184
  772. #define MSR_IA32_MCG_EDI 0x00000185
  773. #define MSR_IA32_MCG_EBP 0x00000186
  774. #define MSR_IA32_MCG_ESP 0x00000187
  775. #define MSR_IA32_MCG_EFLAGS 0x00000188
  776. #define MSR_IA32_MCG_EIP 0x00000189
  777. #define MSR_IA32_MCG_RESERVED 0x0000018a
  778. /* Pentium IV performance counter MSRs */
  779. #define MSR_P4_BPU_PERFCTR0 0x00000300
  780. #define MSR_P4_BPU_PERFCTR1 0x00000301
  781. #define MSR_P4_BPU_PERFCTR2 0x00000302
  782. #define MSR_P4_BPU_PERFCTR3 0x00000303
  783. #define MSR_P4_MS_PERFCTR0 0x00000304
  784. #define MSR_P4_MS_PERFCTR1 0x00000305
  785. #define MSR_P4_MS_PERFCTR2 0x00000306
  786. #define MSR_P4_MS_PERFCTR3 0x00000307
  787. #define MSR_P4_FLAME_PERFCTR0 0x00000308
  788. #define MSR_P4_FLAME_PERFCTR1 0x00000309
  789. #define MSR_P4_FLAME_PERFCTR2 0x0000030a
  790. #define MSR_P4_FLAME_PERFCTR3 0x0000030b
  791. #define MSR_P4_IQ_PERFCTR0 0x0000030c
  792. #define MSR_P4_IQ_PERFCTR1 0x0000030d
  793. #define MSR_P4_IQ_PERFCTR2 0x0000030e
  794. #define MSR_P4_IQ_PERFCTR3 0x0000030f
  795. #define MSR_P4_IQ_PERFCTR4 0x00000310
  796. #define MSR_P4_IQ_PERFCTR5 0x00000311
  797. #define MSR_P4_BPU_CCCR0 0x00000360
  798. #define MSR_P4_BPU_CCCR1 0x00000361
  799. #define MSR_P4_BPU_CCCR2 0x00000362
  800. #define MSR_P4_BPU_CCCR3 0x00000363
  801. #define MSR_P4_MS_CCCR0 0x00000364
  802. #define MSR_P4_MS_CCCR1 0x00000365
  803. #define MSR_P4_MS_CCCR2 0x00000366
  804. #define MSR_P4_MS_CCCR3 0x00000367
  805. #define MSR_P4_FLAME_CCCR0 0x00000368
  806. #define MSR_P4_FLAME_CCCR1 0x00000369
  807. #define MSR_P4_FLAME_CCCR2 0x0000036a
  808. #define MSR_P4_FLAME_CCCR3 0x0000036b
  809. #define MSR_P4_IQ_CCCR0 0x0000036c
  810. #define MSR_P4_IQ_CCCR1 0x0000036d
  811. #define MSR_P4_IQ_CCCR2 0x0000036e
  812. #define MSR_P4_IQ_CCCR3 0x0000036f
  813. #define MSR_P4_IQ_CCCR4 0x00000370
  814. #define MSR_P4_IQ_CCCR5 0x00000371
  815. #define MSR_P4_ALF_ESCR0 0x000003ca
  816. #define MSR_P4_ALF_ESCR1 0x000003cb
  817. #define MSR_P4_BPU_ESCR0 0x000003b2
  818. #define MSR_P4_BPU_ESCR1 0x000003b3
  819. #define MSR_P4_BSU_ESCR0 0x000003a0
  820. #define MSR_P4_BSU_ESCR1 0x000003a1
  821. #define MSR_P4_CRU_ESCR0 0x000003b8
  822. #define MSR_P4_CRU_ESCR1 0x000003b9
  823. #define MSR_P4_CRU_ESCR2 0x000003cc
  824. #define MSR_P4_CRU_ESCR3 0x000003cd
  825. #define MSR_P4_CRU_ESCR4 0x000003e0
  826. #define MSR_P4_CRU_ESCR5 0x000003e1
  827. #define MSR_P4_DAC_ESCR0 0x000003a8
  828. #define MSR_P4_DAC_ESCR1 0x000003a9
  829. #define MSR_P4_FIRM_ESCR0 0x000003a4
  830. #define MSR_P4_FIRM_ESCR1 0x000003a5
  831. #define MSR_P4_FLAME_ESCR0 0x000003a6
  832. #define MSR_P4_FLAME_ESCR1 0x000003a7
  833. #define MSR_P4_FSB_ESCR0 0x000003a2
  834. #define MSR_P4_FSB_ESCR1 0x000003a3
  835. #define MSR_P4_IQ_ESCR0 0x000003ba
  836. #define MSR_P4_IQ_ESCR1 0x000003bb
  837. #define MSR_P4_IS_ESCR0 0x000003b4
  838. #define MSR_P4_IS_ESCR1 0x000003b5
  839. #define MSR_P4_ITLB_ESCR0 0x000003b6
  840. #define MSR_P4_ITLB_ESCR1 0x000003b7
  841. #define MSR_P4_IX_ESCR0 0x000003c8
  842. #define MSR_P4_IX_ESCR1 0x000003c9
  843. #define MSR_P4_MOB_ESCR0 0x000003aa
  844. #define MSR_P4_MOB_ESCR1 0x000003ab
  845. #define MSR_P4_MS_ESCR0 0x000003c0
  846. #define MSR_P4_MS_ESCR1 0x000003c1
  847. #define MSR_P4_PMH_ESCR0 0x000003ac
  848. #define MSR_P4_PMH_ESCR1 0x000003ad
  849. #define MSR_P4_RAT_ESCR0 0x000003bc
  850. #define MSR_P4_RAT_ESCR1 0x000003bd
  851. #define MSR_P4_SAAT_ESCR0 0x000003ae
  852. #define MSR_P4_SAAT_ESCR1 0x000003af
  853. #define MSR_P4_SSU_ESCR0 0x000003be
  854. #define MSR_P4_SSU_ESCR1 0x000003bf /* guess: not in manual */
  855. #define MSR_P4_TBPU_ESCR0 0x000003c2
  856. #define MSR_P4_TBPU_ESCR1 0x000003c3
  857. #define MSR_P4_TC_ESCR0 0x000003c4
  858. #define MSR_P4_TC_ESCR1 0x000003c5
  859. #define MSR_P4_U2L_ESCR0 0x000003b0
  860. #define MSR_P4_U2L_ESCR1 0x000003b1
  861. #define MSR_P4_PEBS_MATRIX_VERT 0x000003f2
  862. /* Intel Core-based CPU performance counters */
  863. #define MSR_CORE_PERF_FIXED_CTR0 0x00000309
  864. #define MSR_CORE_PERF_FIXED_CTR1 0x0000030a
  865. #define MSR_CORE_PERF_FIXED_CTR2 0x0000030b
  866. #define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d
  867. #define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e
  868. #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f
  869. #define MSR_CORE_PERF_GLOBAL_OVF_CTRL 0x00000390
  870. /* Geode defined MSRs */
  871. #define MSR_GEODE_BUSCONT_CONF0 0x00001900
  872. /* Intel VT MSRs */
  873. #define MSR_IA32_VMX_BASIC 0x00000480
  874. #define MSR_IA32_VMX_PINBASED_CTLS 0x00000481
  875. #define MSR_IA32_VMX_PROCBASED_CTLS 0x00000482
  876. #define MSR_IA32_VMX_EXIT_CTLS 0x00000483
  877. #define MSR_IA32_VMX_ENTRY_CTLS 0x00000484
  878. #define MSR_IA32_VMX_MISC 0x00000485
  879. #define MSR_IA32_VMX_CR0_FIXED0 0x00000486
  880. #define MSR_IA32_VMX_CR0_FIXED1 0x00000487
  881. #define MSR_IA32_VMX_CR4_FIXED0 0x00000488
  882. #define MSR_IA32_VMX_CR4_FIXED1 0x00000489
  883. #define MSR_IA32_VMX_VMCS_ENUM 0x0000048a
  884. #define MSR_IA32_VMX_PROCBASED_CTLS2 0x0000048b
  885. #define MSR_IA32_VMX_EPT_VPID_CAP 0x0000048c
  886. #define MSR_IA32_VMX_TRUE_PINBASED_CTLS 0x0000048d
  887. #define MSR_IA32_VMX_TRUE_PROCBASED_CTLS 0x0000048e
  888. #define MSR_IA32_VMX_TRUE_EXIT_CTLS 0x0000048f
  889. #define MSR_IA32_VMX_TRUE_ENTRY_CTLS 0x00000490
  890. #define MSR_IA32_VMX_VMFUNC 0x00000491
  891. /* VMX_BASIC bits and bitmasks */
  892. #define VMX_BASIC_VMCS_SIZE_SHIFT 32
  893. #define VMX_BASIC_TRUE_CTLS (1ULL << 55)
  894. #define VMX_BASIC_64 0x0001000000000000LLU
  895. #define VMX_BASIC_MEM_TYPE_SHIFT 50
  896. #define VMX_BASIC_MEM_TYPE_MASK 0x003c000000000000LLU
  897. #define VMX_BASIC_MEM_TYPE_WB 6LLU
  898. #define VMX_BASIC_INOUT 0x0040000000000000LLU
  899. /* MSR_IA32_VMX_MISC bits */
  900. #define MSR_IA32_VMX_MISC_VMWRITE_SHADOW_RO_FIELDS (1ULL << 29)
  901. #define MSR_IA32_VMX_MISC_PREEMPTION_TIMER_SCALE 0x1F
  902. /* AMD-V MSRs */
  903. #define MSR_VM_CR 0xc0010114
  904. #define MSR_VM_IGNNE 0xc0010115
  905. #define MSR_VM_HSAVE_PA 0xc0010117
  906. #endif /* SELFTEST_KVM_PROCESSOR_H */