vgic-v3-sr.c 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. /*
  2. * Copyright (C) 2012-2015 - ARM Ltd
  3. * Author: Marc Zyngier <marc.zyngier@arm.com>
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License version 2 as
  7. * published by the Free Software Foundation.
  8. *
  9. * This program is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. * GNU General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU General Public License
  15. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  16. */
  17. #include <linux/compiler.h>
  18. #include <linux/irqchip/arm-gic-v3.h>
  19. #include <linux/kvm_host.h>
  20. #include <asm/kvm_hyp.h>
  21. #define vtr_to_max_lr_idx(v) ((v) & 0xf)
  22. #define vtr_to_nr_pri_bits(v) (((u32)(v) >> 29) + 1)
  23. #define read_gicreg(r) \
  24. ({ \
  25. u64 reg; \
  26. asm volatile("mrs_s %0, " __stringify(r) : "=r" (reg)); \
  27. reg; \
  28. })
  29. #define write_gicreg(v,r) \
  30. do { \
  31. u64 __val = (v); \
  32. asm volatile("msr_s " __stringify(r) ", %0" : : "r" (__val));\
  33. } while (0)
  34. static u64 __hyp_text __gic_v3_get_lr(unsigned int lr)
  35. {
  36. switch (lr & 0xf) {
  37. case 0:
  38. return read_gicreg(ICH_LR0_EL2);
  39. case 1:
  40. return read_gicreg(ICH_LR1_EL2);
  41. case 2:
  42. return read_gicreg(ICH_LR2_EL2);
  43. case 3:
  44. return read_gicreg(ICH_LR3_EL2);
  45. case 4:
  46. return read_gicreg(ICH_LR4_EL2);
  47. case 5:
  48. return read_gicreg(ICH_LR5_EL2);
  49. case 6:
  50. return read_gicreg(ICH_LR6_EL2);
  51. case 7:
  52. return read_gicreg(ICH_LR7_EL2);
  53. case 8:
  54. return read_gicreg(ICH_LR8_EL2);
  55. case 9:
  56. return read_gicreg(ICH_LR9_EL2);
  57. case 10:
  58. return read_gicreg(ICH_LR10_EL2);
  59. case 11:
  60. return read_gicreg(ICH_LR11_EL2);
  61. case 12:
  62. return read_gicreg(ICH_LR12_EL2);
  63. case 13:
  64. return read_gicreg(ICH_LR13_EL2);
  65. case 14:
  66. return read_gicreg(ICH_LR14_EL2);
  67. case 15:
  68. return read_gicreg(ICH_LR15_EL2);
  69. }
  70. unreachable();
  71. }
  72. static void __hyp_text __gic_v3_set_lr(u64 val, int lr)
  73. {
  74. switch (lr & 0xf) {
  75. case 0:
  76. write_gicreg(val, ICH_LR0_EL2);
  77. break;
  78. case 1:
  79. write_gicreg(val, ICH_LR1_EL2);
  80. break;
  81. case 2:
  82. write_gicreg(val, ICH_LR2_EL2);
  83. break;
  84. case 3:
  85. write_gicreg(val, ICH_LR3_EL2);
  86. break;
  87. case 4:
  88. write_gicreg(val, ICH_LR4_EL2);
  89. break;
  90. case 5:
  91. write_gicreg(val, ICH_LR5_EL2);
  92. break;
  93. case 6:
  94. write_gicreg(val, ICH_LR6_EL2);
  95. break;
  96. case 7:
  97. write_gicreg(val, ICH_LR7_EL2);
  98. break;
  99. case 8:
  100. write_gicreg(val, ICH_LR8_EL2);
  101. break;
  102. case 9:
  103. write_gicreg(val, ICH_LR9_EL2);
  104. break;
  105. case 10:
  106. write_gicreg(val, ICH_LR10_EL2);
  107. break;
  108. case 11:
  109. write_gicreg(val, ICH_LR11_EL2);
  110. break;
  111. case 12:
  112. write_gicreg(val, ICH_LR12_EL2);
  113. break;
  114. case 13:
  115. write_gicreg(val, ICH_LR13_EL2);
  116. break;
  117. case 14:
  118. write_gicreg(val, ICH_LR14_EL2);
  119. break;
  120. case 15:
  121. write_gicreg(val, ICH_LR15_EL2);
  122. break;
  123. }
  124. }
  125. static void __hyp_text save_maint_int_state(struct kvm_vcpu *vcpu, int nr_lr)
  126. {
  127. struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
  128. int i;
  129. bool expect_mi;
  130. expect_mi = !!(cpu_if->vgic_hcr & ICH_HCR_UIE);
  131. for (i = 0; i < nr_lr; i++) {
  132. if (!(vcpu->arch.vgic_cpu.live_lrs & (1UL << i)))
  133. continue;
  134. expect_mi |= (!(cpu_if->vgic_lr[i] & ICH_LR_HW) &&
  135. (cpu_if->vgic_lr[i] & ICH_LR_EOI));
  136. }
  137. if (expect_mi) {
  138. cpu_if->vgic_misr = read_gicreg(ICH_MISR_EL2);
  139. if (cpu_if->vgic_misr & ICH_MISR_EOI)
  140. cpu_if->vgic_eisr = read_gicreg(ICH_EISR_EL2);
  141. else
  142. cpu_if->vgic_eisr = 0;
  143. } else {
  144. cpu_if->vgic_misr = 0;
  145. cpu_if->vgic_eisr = 0;
  146. }
  147. }
  148. void __hyp_text __vgic_v3_save_state(struct kvm_vcpu *vcpu)
  149. {
  150. struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
  151. u64 val;
  152. /*
  153. * Make sure stores to the GIC via the memory mapped interface
  154. * are now visible to the system register interface.
  155. */
  156. dsb(st);
  157. cpu_if->vgic_vmcr = read_gicreg(ICH_VMCR_EL2);
  158. if (vcpu->arch.vgic_cpu.live_lrs) {
  159. int i;
  160. u32 max_lr_idx, nr_pri_bits;
  161. cpu_if->vgic_elrsr = read_gicreg(ICH_ELSR_EL2);
  162. write_gicreg(0, ICH_HCR_EL2);
  163. val = read_gicreg(ICH_VTR_EL2);
  164. max_lr_idx = vtr_to_max_lr_idx(val);
  165. nr_pri_bits = vtr_to_nr_pri_bits(val);
  166. save_maint_int_state(vcpu, max_lr_idx + 1);
  167. for (i = 0; i <= max_lr_idx; i++) {
  168. if (!(vcpu->arch.vgic_cpu.live_lrs & (1UL << i)))
  169. continue;
  170. if (cpu_if->vgic_elrsr & (1 << i)) {
  171. cpu_if->vgic_lr[i] &= ~ICH_LR_STATE;
  172. continue;
  173. }
  174. cpu_if->vgic_lr[i] = __gic_v3_get_lr(i);
  175. __gic_v3_set_lr(0, i);
  176. }
  177. switch (nr_pri_bits) {
  178. case 7:
  179. cpu_if->vgic_ap0r[3] = read_gicreg(ICH_AP0R3_EL2);
  180. cpu_if->vgic_ap0r[2] = read_gicreg(ICH_AP0R2_EL2);
  181. case 6:
  182. cpu_if->vgic_ap0r[1] = read_gicreg(ICH_AP0R1_EL2);
  183. default:
  184. cpu_if->vgic_ap0r[0] = read_gicreg(ICH_AP0R0_EL2);
  185. }
  186. switch (nr_pri_bits) {
  187. case 7:
  188. cpu_if->vgic_ap1r[3] = read_gicreg(ICH_AP1R3_EL2);
  189. cpu_if->vgic_ap1r[2] = read_gicreg(ICH_AP1R2_EL2);
  190. case 6:
  191. cpu_if->vgic_ap1r[1] = read_gicreg(ICH_AP1R1_EL2);
  192. default:
  193. cpu_if->vgic_ap1r[0] = read_gicreg(ICH_AP1R0_EL2);
  194. }
  195. vcpu->arch.vgic_cpu.live_lrs = 0;
  196. } else {
  197. cpu_if->vgic_misr = 0;
  198. cpu_if->vgic_eisr = 0;
  199. cpu_if->vgic_elrsr = 0xffff;
  200. cpu_if->vgic_ap0r[0] = 0;
  201. cpu_if->vgic_ap0r[1] = 0;
  202. cpu_if->vgic_ap0r[2] = 0;
  203. cpu_if->vgic_ap0r[3] = 0;
  204. cpu_if->vgic_ap1r[0] = 0;
  205. cpu_if->vgic_ap1r[1] = 0;
  206. cpu_if->vgic_ap1r[2] = 0;
  207. cpu_if->vgic_ap1r[3] = 0;
  208. }
  209. val = read_gicreg(ICC_SRE_EL2);
  210. write_gicreg(val | ICC_SRE_EL2_ENABLE, ICC_SRE_EL2);
  211. isb(); /* Make sure ENABLE is set at EL2 before setting SRE at EL1 */
  212. write_gicreg(1, ICC_SRE_EL1);
  213. }
  214. void __hyp_text __vgic_v3_restore_state(struct kvm_vcpu *vcpu)
  215. {
  216. struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
  217. u64 val;
  218. u32 max_lr_idx, nr_pri_bits;
  219. u16 live_lrs = 0;
  220. int i;
  221. /*
  222. * VFIQEn is RES1 if ICC_SRE_EL1.SRE is 1. This causes a
  223. * Group0 interrupt (as generated in GICv2 mode) to be
  224. * delivered as a FIQ to the guest, with potentially fatal
  225. * consequences. So we must make sure that ICC_SRE_EL1 has
  226. * been actually programmed with the value we want before
  227. * starting to mess with the rest of the GIC.
  228. */
  229. write_gicreg(cpu_if->vgic_sre, ICC_SRE_EL1);
  230. isb();
  231. val = read_gicreg(ICH_VTR_EL2);
  232. max_lr_idx = vtr_to_max_lr_idx(val);
  233. nr_pri_bits = vtr_to_nr_pri_bits(val);
  234. for (i = 0; i <= max_lr_idx; i++) {
  235. if (cpu_if->vgic_lr[i] & ICH_LR_STATE)
  236. live_lrs |= (1 << i);
  237. }
  238. write_gicreg(cpu_if->vgic_vmcr, ICH_VMCR_EL2);
  239. if (live_lrs) {
  240. write_gicreg(cpu_if->vgic_hcr, ICH_HCR_EL2);
  241. switch (nr_pri_bits) {
  242. case 7:
  243. write_gicreg(cpu_if->vgic_ap0r[3], ICH_AP0R3_EL2);
  244. write_gicreg(cpu_if->vgic_ap0r[2], ICH_AP0R2_EL2);
  245. case 6:
  246. write_gicreg(cpu_if->vgic_ap0r[1], ICH_AP0R1_EL2);
  247. default:
  248. write_gicreg(cpu_if->vgic_ap0r[0], ICH_AP0R0_EL2);
  249. }
  250. switch (nr_pri_bits) {
  251. case 7:
  252. write_gicreg(cpu_if->vgic_ap1r[3], ICH_AP1R3_EL2);
  253. write_gicreg(cpu_if->vgic_ap1r[2], ICH_AP1R2_EL2);
  254. case 6:
  255. write_gicreg(cpu_if->vgic_ap1r[1], ICH_AP1R1_EL2);
  256. default:
  257. write_gicreg(cpu_if->vgic_ap1r[0], ICH_AP1R0_EL2);
  258. }
  259. for (i = 0; i <= max_lr_idx; i++) {
  260. if (!(live_lrs & (1 << i)))
  261. continue;
  262. __gic_v3_set_lr(cpu_if->vgic_lr[i], i);
  263. }
  264. }
  265. /*
  266. * Ensures that the above will have reached the
  267. * (re)distributors. This ensure the guest will read the
  268. * correct values from the memory-mapped interface.
  269. */
  270. isb();
  271. dsb(sy);
  272. vcpu->arch.vgic_cpu.live_lrs = live_lrs;
  273. /*
  274. * Prevent the guest from touching the GIC system registers if
  275. * SRE isn't enabled for GICv3 emulation.
  276. */
  277. if (!cpu_if->vgic_sre) {
  278. write_gicreg(read_gicreg(ICC_SRE_EL2) & ~ICC_SRE_EL2_ENABLE,
  279. ICC_SRE_EL2);
  280. }
  281. }
  282. void __hyp_text __vgic_v3_init_lrs(void)
  283. {
  284. int max_lr_idx = vtr_to_max_lr_idx(read_gicreg(ICH_VTR_EL2));
  285. int i;
  286. for (i = 0; i <= max_lr_idx; i++)
  287. __gic_v3_set_lr(0, i);
  288. }
  289. static u64 __hyp_text __vgic_v3_read_ich_vtr_el2(void)
  290. {
  291. return read_gicreg(ICH_VTR_EL2);
  292. }
  293. __alias(__vgic_v3_read_ich_vtr_el2) u64 __vgic_v3_get_ich_vtr_el2(void);