interrupts_head.S 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648
  1. #include <linux/irqchip/arm-gic.h>
  2. #include <asm/assembler.h>
  3. #define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4))
  4. #define VCPU_USR_SP (VCPU_USR_REG(13))
  5. #define VCPU_USR_LR (VCPU_USR_REG(14))
  6. #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4))
  7. /*
  8. * Many of these macros need to access the VCPU structure, which is always
  9. * held in r0. These macros should never clobber r1, as it is used to hold the
  10. * exception code on the return path (except of course the macro that switches
  11. * all the registers before the final jump to the VM).
  12. */
  13. vcpu .req r0 @ vcpu pointer always in r0
  14. /* Clobbers {r2-r6} */
  15. .macro store_vfp_state vfp_base
  16. @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions
  17. VFPFMRX r2, FPEXC
  18. @ Make sure VFP is enabled so we can touch the registers.
  19. orr r6, r2, #FPEXC_EN
  20. VFPFMXR FPEXC, r6
  21. VFPFMRX r3, FPSCR
  22. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  23. beq 1f
  24. @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so
  25. @ we only need to save them if FPEXC_EX is set.
  26. VFPFMRX r4, FPINST
  27. tst r2, #FPEXC_FP2V
  28. VFPFMRX r5, FPINST2, ne @ vmrsne
  29. bic r6, r2, #FPEXC_EX @ FPEXC_EX disable
  30. VFPFMXR FPEXC, r6
  31. 1:
  32. VFPFSTMIA \vfp_base, r6 @ Save VFP registers
  33. stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2
  34. .endm
  35. /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */
  36. .macro restore_vfp_state vfp_base
  37. VFPFLDMIA \vfp_base, r6 @ Load VFP registers
  38. ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2
  39. VFPFMXR FPSCR, r3
  40. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  41. beq 1f
  42. VFPFMXR FPINST, r4
  43. tst r2, #FPEXC_FP2V
  44. VFPFMXR FPINST2, r5, ne
  45. 1:
  46. VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN)
  47. .endm
  48. /* These are simply for the macros to work - value don't have meaning */
  49. .equ usr, 0
  50. .equ svc, 1
  51. .equ abt, 2
  52. .equ und, 3
  53. .equ irq, 4
  54. .equ fiq, 5
  55. .macro push_host_regs_mode mode
  56. mrs r2, SP_\mode
  57. mrs r3, LR_\mode
  58. mrs r4, SPSR_\mode
  59. push {r2, r3, r4}
  60. .endm
  61. /*
  62. * Store all host persistent registers on the stack.
  63. * Clobbers all registers, in all modes, except r0 and r1.
  64. */
  65. .macro save_host_regs
  66. /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */
  67. mrs r2, ELR_hyp
  68. push {r2}
  69. /* usr regs */
  70. push {r4-r12} @ r0-r3 are always clobbered
  71. mrs r2, SP_usr
  72. mov r3, lr
  73. push {r2, r3}
  74. push_host_regs_mode svc
  75. push_host_regs_mode abt
  76. push_host_regs_mode und
  77. push_host_regs_mode irq
  78. /* fiq regs */
  79. mrs r2, r8_fiq
  80. mrs r3, r9_fiq
  81. mrs r4, r10_fiq
  82. mrs r5, r11_fiq
  83. mrs r6, r12_fiq
  84. mrs r7, SP_fiq
  85. mrs r8, LR_fiq
  86. mrs r9, SPSR_fiq
  87. push {r2-r9}
  88. .endm
  89. .macro pop_host_regs_mode mode
  90. pop {r2, r3, r4}
  91. msr SP_\mode, r2
  92. msr LR_\mode, r3
  93. msr SPSR_\mode, r4
  94. .endm
  95. /*
  96. * Restore all host registers from the stack.
  97. * Clobbers all registers, in all modes, except r0 and r1.
  98. */
  99. .macro restore_host_regs
  100. pop {r2-r9}
  101. msr r8_fiq, r2
  102. msr r9_fiq, r3
  103. msr r10_fiq, r4
  104. msr r11_fiq, r5
  105. msr r12_fiq, r6
  106. msr SP_fiq, r7
  107. msr LR_fiq, r8
  108. msr SPSR_fiq, r9
  109. pop_host_regs_mode irq
  110. pop_host_regs_mode und
  111. pop_host_regs_mode abt
  112. pop_host_regs_mode svc
  113. pop {r2, r3}
  114. msr SP_usr, r2
  115. mov lr, r3
  116. pop {r4-r12}
  117. pop {r2}
  118. msr ELR_hyp, r2
  119. .endm
  120. /*
  121. * Restore SP, LR and SPSR for a given mode. offset is the offset of
  122. * this mode's registers from the VCPU base.
  123. *
  124. * Assumes vcpu pointer in vcpu reg
  125. *
  126. * Clobbers r1, r2, r3, r4.
  127. */
  128. .macro restore_guest_regs_mode mode, offset
  129. add r1, vcpu, \offset
  130. ldm r1, {r2, r3, r4}
  131. msr SP_\mode, r2
  132. msr LR_\mode, r3
  133. msr SPSR_\mode, r4
  134. .endm
  135. /*
  136. * Restore all guest registers from the vcpu struct.
  137. *
  138. * Assumes vcpu pointer in vcpu reg
  139. *
  140. * Clobbers *all* registers.
  141. */
  142. .macro restore_guest_regs
  143. restore_guest_regs_mode svc, #VCPU_SVC_REGS
  144. restore_guest_regs_mode abt, #VCPU_ABT_REGS
  145. restore_guest_regs_mode und, #VCPU_UND_REGS
  146. restore_guest_regs_mode irq, #VCPU_IRQ_REGS
  147. add r1, vcpu, #VCPU_FIQ_REGS
  148. ldm r1, {r2-r9}
  149. msr r8_fiq, r2
  150. msr r9_fiq, r3
  151. msr r10_fiq, r4
  152. msr r11_fiq, r5
  153. msr r12_fiq, r6
  154. msr SP_fiq, r7
  155. msr LR_fiq, r8
  156. msr SPSR_fiq, r9
  157. @ Load return state
  158. ldr r2, [vcpu, #VCPU_PC]
  159. ldr r3, [vcpu, #VCPU_CPSR]
  160. msr ELR_hyp, r2
  161. msr SPSR_cxsf, r3
  162. @ Load user registers
  163. ldr r2, [vcpu, #VCPU_USR_SP]
  164. ldr r3, [vcpu, #VCPU_USR_LR]
  165. msr SP_usr, r2
  166. mov lr, r3
  167. add vcpu, vcpu, #(VCPU_USR_REGS)
  168. ldm vcpu, {r0-r12}
  169. .endm
  170. /*
  171. * Save SP, LR and SPSR for a given mode. offset is the offset of
  172. * this mode's registers from the VCPU base.
  173. *
  174. * Assumes vcpu pointer in vcpu reg
  175. *
  176. * Clobbers r2, r3, r4, r5.
  177. */
  178. .macro save_guest_regs_mode mode, offset
  179. add r2, vcpu, \offset
  180. mrs r3, SP_\mode
  181. mrs r4, LR_\mode
  182. mrs r5, SPSR_\mode
  183. stm r2, {r3, r4, r5}
  184. .endm
  185. /*
  186. * Save all guest registers to the vcpu struct
  187. * Expects guest's r0, r1, r2 on the stack.
  188. *
  189. * Assumes vcpu pointer in vcpu reg
  190. *
  191. * Clobbers r2, r3, r4, r5.
  192. */
  193. .macro save_guest_regs
  194. @ Store usr registers
  195. add r2, vcpu, #VCPU_USR_REG(3)
  196. stm r2, {r3-r12}
  197. add r2, vcpu, #VCPU_USR_REG(0)
  198. pop {r3, r4, r5} @ r0, r1, r2
  199. stm r2, {r3, r4, r5}
  200. mrs r2, SP_usr
  201. mov r3, lr
  202. str r2, [vcpu, #VCPU_USR_SP]
  203. str r3, [vcpu, #VCPU_USR_LR]
  204. @ Store return state
  205. mrs r2, ELR_hyp
  206. mrs r3, spsr
  207. str r2, [vcpu, #VCPU_PC]
  208. str r3, [vcpu, #VCPU_CPSR]
  209. @ Store other guest registers
  210. save_guest_regs_mode svc, #VCPU_SVC_REGS
  211. save_guest_regs_mode abt, #VCPU_ABT_REGS
  212. save_guest_regs_mode und, #VCPU_UND_REGS
  213. save_guest_regs_mode irq, #VCPU_IRQ_REGS
  214. .endm
  215. /* Reads cp15 registers from hardware and stores them in memory
  216. * @store_to_vcpu: If 0, registers are written in-order to the stack,
  217. * otherwise to the VCPU struct pointed to by vcpup
  218. *
  219. * Assumes vcpu pointer in vcpu reg
  220. *
  221. * Clobbers r2 - r12
  222. */
  223. .macro read_cp15_state store_to_vcpu
  224. mrc p15, 0, r2, c1, c0, 0 @ SCTLR
  225. mrc p15, 0, r3, c1, c0, 2 @ CPACR
  226. mrc p15, 0, r4, c2, c0, 2 @ TTBCR
  227. mrc p15, 0, r5, c3, c0, 0 @ DACR
  228. mrrc p15, 0, r6, r7, c2 @ TTBR 0
  229. mrrc p15, 1, r8, r9, c2 @ TTBR 1
  230. mrc p15, 0, r10, c10, c2, 0 @ PRRR
  231. mrc p15, 0, r11, c10, c2, 1 @ NMRR
  232. mrc p15, 2, r12, c0, c0, 0 @ CSSELR
  233. .if \store_to_vcpu == 0
  234. push {r2-r12} @ Push CP15 registers
  235. .else
  236. str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  237. str r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  238. str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  239. str r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  240. add r2, vcpu, #CP15_OFFSET(c2_TTBR0)
  241. strd r6, r7, [r2]
  242. add r2, vcpu, #CP15_OFFSET(c2_TTBR1)
  243. strd r8, r9, [r2]
  244. str r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  245. str r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  246. str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  247. .endif
  248. mrc p15, 0, r2, c13, c0, 1 @ CID
  249. mrc p15, 0, r3, c13, c0, 2 @ TID_URW
  250. mrc p15, 0, r4, c13, c0, 3 @ TID_URO
  251. mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV
  252. mrc p15, 0, r6, c5, c0, 0 @ DFSR
  253. mrc p15, 0, r7, c5, c0, 1 @ IFSR
  254. mrc p15, 0, r8, c5, c1, 0 @ ADFSR
  255. mrc p15, 0, r9, c5, c1, 1 @ AIFSR
  256. mrc p15, 0, r10, c6, c0, 0 @ DFAR
  257. mrc p15, 0, r11, c6, c0, 2 @ IFAR
  258. mrc p15, 0, r12, c12, c0, 0 @ VBAR
  259. .if \store_to_vcpu == 0
  260. push {r2-r12} @ Push CP15 registers
  261. .else
  262. str r2, [vcpu, #CP15_OFFSET(c13_CID)]
  263. str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  264. str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  265. str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  266. str r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  267. str r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  268. str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  269. str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  270. str r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  271. str r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  272. str r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  273. .endif
  274. mrc p15, 0, r2, c14, c1, 0 @ CNTKCTL
  275. mrrc p15, 0, r4, r5, c7 @ PAR
  276. mrc p15, 0, r6, c10, c3, 0 @ AMAIR0
  277. mrc p15, 0, r7, c10, c3, 1 @ AMAIR1
  278. .if \store_to_vcpu == 0
  279. push {r2,r4-r7}
  280. .else
  281. str r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
  282. add r12, vcpu, #CP15_OFFSET(c7_PAR)
  283. strd r4, r5, [r12]
  284. str r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)]
  285. str r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)]
  286. .endif
  287. .endm
  288. /*
  289. * Reads cp15 registers from memory and writes them to hardware
  290. * @read_from_vcpu: If 0, registers are read in-order from the stack,
  291. * otherwise from the VCPU struct pointed to by vcpup
  292. *
  293. * Assumes vcpu pointer in vcpu reg
  294. */
  295. .macro write_cp15_state read_from_vcpu
  296. .if \read_from_vcpu == 0
  297. pop {r2,r4-r7}
  298. .else
  299. ldr r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
  300. add r12, vcpu, #CP15_OFFSET(c7_PAR)
  301. ldrd r4, r5, [r12]
  302. ldr r6, [vcpu, #CP15_OFFSET(c10_AMAIR0)]
  303. ldr r7, [vcpu, #CP15_OFFSET(c10_AMAIR1)]
  304. .endif
  305. mcr p15, 0, r2, c14, c1, 0 @ CNTKCTL
  306. mcrr p15, 0, r4, r5, c7 @ PAR
  307. mcr p15, 0, r6, c10, c3, 0 @ AMAIR0
  308. mcr p15, 0, r7, c10, c3, 1 @ AMAIR1
  309. .if \read_from_vcpu == 0
  310. pop {r2-r12}
  311. .else
  312. ldr r2, [vcpu, #CP15_OFFSET(c13_CID)]
  313. ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  314. ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  315. ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  316. ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  317. ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  318. ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  319. ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  320. ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  321. ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  322. ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  323. .endif
  324. mcr p15, 0, r2, c13, c0, 1 @ CID
  325. mcr p15, 0, r3, c13, c0, 2 @ TID_URW
  326. mcr p15, 0, r4, c13, c0, 3 @ TID_URO
  327. mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV
  328. mcr p15, 0, r6, c5, c0, 0 @ DFSR
  329. mcr p15, 0, r7, c5, c0, 1 @ IFSR
  330. mcr p15, 0, r8, c5, c1, 0 @ ADFSR
  331. mcr p15, 0, r9, c5, c1, 1 @ AIFSR
  332. mcr p15, 0, r10, c6, c0, 0 @ DFAR
  333. mcr p15, 0, r11, c6, c0, 2 @ IFAR
  334. mcr p15, 0, r12, c12, c0, 0 @ VBAR
  335. .if \read_from_vcpu == 0
  336. pop {r2-r12}
  337. .else
  338. ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  339. ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  340. ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  341. ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  342. add r12, vcpu, #CP15_OFFSET(c2_TTBR0)
  343. ldrd r6, r7, [r12]
  344. add r12, vcpu, #CP15_OFFSET(c2_TTBR1)
  345. ldrd r8, r9, [r12]
  346. ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  347. ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  348. ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  349. .endif
  350. mcr p15, 0, r2, c1, c0, 0 @ SCTLR
  351. mcr p15, 0, r3, c1, c0, 2 @ CPACR
  352. mcr p15, 0, r4, c2, c0, 2 @ TTBCR
  353. mcr p15, 0, r5, c3, c0, 0 @ DACR
  354. mcrr p15, 0, r6, r7, c2 @ TTBR 0
  355. mcrr p15, 1, r8, r9, c2 @ TTBR 1
  356. mcr p15, 0, r10, c10, c2, 0 @ PRRR
  357. mcr p15, 0, r11, c10, c2, 1 @ NMRR
  358. mcr p15, 2, r12, c0, c0, 0 @ CSSELR
  359. .endm
  360. /*
  361. * Save the VGIC CPU state into memory
  362. *
  363. * Assumes vcpu pointer in vcpu reg
  364. */
  365. .macro save_vgic_state
  366. /* Get VGIC VCTRL base into r2 */
  367. ldr r2, [vcpu, #VCPU_KVM]
  368. ldr r2, [r2, #KVM_VGIC_VCTRL]
  369. cmp r2, #0
  370. beq 2f
  371. /* Compute the address of struct vgic_cpu */
  372. add r11, vcpu, #VCPU_VGIC_CPU
  373. /* Save all interesting registers */
  374. ldr r4, [r2, #GICH_VMCR]
  375. ldr r5, [r2, #GICH_MISR]
  376. ldr r6, [r2, #GICH_EISR0]
  377. ldr r7, [r2, #GICH_EISR1]
  378. ldr r8, [r2, #GICH_ELRSR0]
  379. ldr r9, [r2, #GICH_ELRSR1]
  380. ldr r10, [r2, #GICH_APR]
  381. ARM_BE8(rev r4, r4 )
  382. ARM_BE8(rev r5, r5 )
  383. ARM_BE8(rev r6, r6 )
  384. ARM_BE8(rev r7, r7 )
  385. ARM_BE8(rev r8, r8 )
  386. ARM_BE8(rev r9, r9 )
  387. ARM_BE8(rev r10, r10 )
  388. str r4, [r11, #VGIC_V2_CPU_VMCR]
  389. str r5, [r11, #VGIC_V2_CPU_MISR]
  390. #ifdef CONFIG_CPU_ENDIAN_BE8
  391. str r6, [r11, #(VGIC_V2_CPU_EISR + 4)]
  392. str r7, [r11, #VGIC_V2_CPU_EISR]
  393. str r8, [r11, #(VGIC_V2_CPU_ELRSR + 4)]
  394. str r9, [r11, #VGIC_V2_CPU_ELRSR]
  395. #else
  396. str r6, [r11, #VGIC_V2_CPU_EISR]
  397. str r7, [r11, #(VGIC_V2_CPU_EISR + 4)]
  398. str r8, [r11, #VGIC_V2_CPU_ELRSR]
  399. str r9, [r11, #(VGIC_V2_CPU_ELRSR + 4)]
  400. #endif
  401. str r10, [r11, #VGIC_V2_CPU_APR]
  402. /* Clear GICH_HCR */
  403. mov r5, #0
  404. str r5, [r2, #GICH_HCR]
  405. /* Save list registers */
  406. add r2, r2, #GICH_LR0
  407. add r3, r11, #VGIC_V2_CPU_LR
  408. ldr r4, [r11, #VGIC_CPU_NR_LR]
  409. 1: ldr r6, [r2], #4
  410. ARM_BE8(rev r6, r6 )
  411. str r6, [r3], #4
  412. subs r4, r4, #1
  413. bne 1b
  414. 2:
  415. .endm
  416. /*
  417. * Restore the VGIC CPU state from memory
  418. *
  419. * Assumes vcpu pointer in vcpu reg
  420. */
  421. .macro restore_vgic_state
  422. /* Get VGIC VCTRL base into r2 */
  423. ldr r2, [vcpu, #VCPU_KVM]
  424. ldr r2, [r2, #KVM_VGIC_VCTRL]
  425. cmp r2, #0
  426. beq 2f
  427. /* Compute the address of struct vgic_cpu */
  428. add r11, vcpu, #VCPU_VGIC_CPU
  429. /* We only restore a minimal set of registers */
  430. ldr r3, [r11, #VGIC_V2_CPU_HCR]
  431. ldr r4, [r11, #VGIC_V2_CPU_VMCR]
  432. ldr r8, [r11, #VGIC_V2_CPU_APR]
  433. ARM_BE8(rev r3, r3 )
  434. ARM_BE8(rev r4, r4 )
  435. ARM_BE8(rev r8, r8 )
  436. str r3, [r2, #GICH_HCR]
  437. str r4, [r2, #GICH_VMCR]
  438. str r8, [r2, #GICH_APR]
  439. /* Restore list registers */
  440. add r2, r2, #GICH_LR0
  441. add r3, r11, #VGIC_V2_CPU_LR
  442. ldr r4, [r11, #VGIC_CPU_NR_LR]
  443. 1: ldr r6, [r3], #4
  444. ARM_BE8(rev r6, r6 )
  445. str r6, [r2], #4
  446. subs r4, r4, #1
  447. bne 1b
  448. 2:
  449. .endm
  450. #define CNTHCTL_PL1PCTEN (1 << 0)
  451. #define CNTHCTL_PL1PCEN (1 << 1)
  452. /*
  453. * Save the timer state onto the VCPU and allow physical timer/counter access
  454. * for the host.
  455. *
  456. * Assumes vcpu pointer in vcpu reg
  457. * Clobbers r2-r5
  458. */
  459. .macro save_timer_state
  460. ldr r4, [vcpu, #VCPU_KVM]
  461. ldr r2, [r4, #KVM_TIMER_ENABLED]
  462. cmp r2, #0
  463. beq 1f
  464. mrc p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  465. str r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
  466. isb
  467. mrrc p15, 3, rr_lo_hi(r2, r3), c14 @ CNTV_CVAL
  468. ldr r4, =VCPU_TIMER_CNTV_CVAL
  469. add r5, vcpu, r4
  470. strd r2, r3, [r5]
  471. @ Ensure host CNTVCT == CNTPCT
  472. mov r2, #0
  473. mcrr p15, 4, r2, r2, c14 @ CNTVOFF
  474. 1:
  475. mov r2, #0 @ Clear ENABLE
  476. mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  477. @ Allow physical timer/counter access for the host
  478. mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
  479. orr r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN)
  480. mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
  481. .endm
  482. /*
  483. * Load the timer state from the VCPU and deny physical timer/counter access
  484. * for the host.
  485. *
  486. * Assumes vcpu pointer in vcpu reg
  487. * Clobbers r2-r5
  488. */
  489. .macro restore_timer_state
  490. @ Disallow physical timer access for the guest
  491. @ Physical counter access is allowed
  492. mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
  493. orr r2, r2, #CNTHCTL_PL1PCTEN
  494. bic r2, r2, #CNTHCTL_PL1PCEN
  495. mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
  496. ldr r4, [vcpu, #VCPU_KVM]
  497. ldr r2, [r4, #KVM_TIMER_ENABLED]
  498. cmp r2, #0
  499. beq 1f
  500. ldr r2, [r4, #KVM_TIMER_CNTVOFF]
  501. ldr r3, [r4, #(KVM_TIMER_CNTVOFF + 4)]
  502. mcrr p15, 4, rr_lo_hi(r2, r3), c14 @ CNTVOFF
  503. ldr r4, =VCPU_TIMER_CNTV_CVAL
  504. add r5, vcpu, r4
  505. ldrd r2, r3, [r5]
  506. mcrr p15, 3, rr_lo_hi(r2, r3), c14 @ CNTV_CVAL
  507. isb
  508. ldr r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
  509. and r2, r2, #3
  510. mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  511. 1:
  512. .endm
  513. .equ vmentry, 0
  514. .equ vmexit, 1
  515. /* Configures the HSTR (Hyp System Trap Register) on entry/return
  516. * (hardware reset value is 0) */
  517. .macro set_hstr operation
  518. mrc p15, 4, r2, c1, c1, 3
  519. ldr r3, =HSTR_T(15)
  520. .if \operation == vmentry
  521. orr r2, r2, r3 @ Trap CR{15}
  522. .else
  523. bic r2, r2, r3 @ Don't trap any CRx accesses
  524. .endif
  525. mcr p15, 4, r2, c1, c1, 3
  526. .endm
  527. /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return
  528. * (hardware reset value is 0). Keep previous value in r2.
  529. * An ISB is emited on vmexit/vmtrap, but executed on vmexit only if
  530. * VFP wasn't already enabled (always executed on vmtrap).
  531. * If a label is specified with vmexit, it is branched to if VFP wasn't
  532. * enabled.
  533. */
  534. .macro set_hcptr operation, mask, label = none
  535. mrc p15, 4, r2, c1, c1, 2
  536. ldr r3, =\mask
  537. .if \operation == vmentry
  538. orr r3, r2, r3 @ Trap coproc-accesses defined in mask
  539. .else
  540. bic r3, r2, r3 @ Don't trap defined coproc-accesses
  541. .endif
  542. mcr p15, 4, r3, c1, c1, 2
  543. .if \operation != vmentry
  544. .if \operation == vmexit
  545. tst r2, #(HCPTR_TCP(10) | HCPTR_TCP(11))
  546. beq 1f
  547. .endif
  548. isb
  549. .if \label != none
  550. b \label
  551. .endif
  552. 1:
  553. .endif
  554. .endm
  555. /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return
  556. * (hardware reset value is 0) */
  557. .macro set_hdcr operation
  558. mrc p15, 4, r2, c1, c1, 1
  559. ldr r3, =(HDCR_TPM|HDCR_TPMCR)
  560. .if \operation == vmentry
  561. orr r2, r2, r3 @ Trap some perfmon accesses
  562. .else
  563. bic r2, r2, r3 @ Don't trap any perfmon accesses
  564. .endif
  565. mcr p15, 4, r2, c1, c1, 1
  566. .endm
  567. /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */
  568. .macro configure_hyp_role operation
  569. .if \operation == vmentry
  570. ldr r2, [vcpu, #VCPU_HCR]
  571. ldr r3, [vcpu, #VCPU_IRQ_LINES]
  572. orr r2, r2, r3
  573. .else
  574. mov r2, #0
  575. .endif
  576. mcr p15, 4, r2, c1, c1, 0 @ HCR
  577. .endm
  578. .macro load_vcpu
  579. mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR
  580. .endm