|
@@ -69,9 +69,6 @@
|
|
* mappings, and none of this applies in that case.
|
|
* mappings, and none of this applies in that case.
|
|
*/
|
|
*/
|
|
|
|
|
|
-#define HYP_PAGE_OFFSET_HIGH_MASK ((UL(1) << VA_BITS) - 1)
|
|
|
|
-#define HYP_PAGE_OFFSET_LOW_MASK ((UL(1) << (VA_BITS - 1)) - 1)
|
|
|
|
-
|
|
|
|
#ifdef __ASSEMBLY__
|
|
#ifdef __ASSEMBLY__
|
|
|
|
|
|
#include <asm/alternative.h>
|
|
#include <asm/alternative.h>
|
|
@@ -81,28 +78,15 @@
|
|
* Convert a kernel VA into a HYP VA.
|
|
* Convert a kernel VA into a HYP VA.
|
|
* reg: VA to be converted.
|
|
* reg: VA to be converted.
|
|
*
|
|
*
|
|
- * This generates the following sequences:
|
|
|
|
- * - High mask:
|
|
|
|
- * and x0, x0, #HYP_PAGE_OFFSET_HIGH_MASK
|
|
|
|
- * nop
|
|
|
|
- * - Low mask:
|
|
|
|
- * and x0, x0, #HYP_PAGE_OFFSET_HIGH_MASK
|
|
|
|
- * and x0, x0, #HYP_PAGE_OFFSET_LOW_MASK
|
|
|
|
- * - VHE:
|
|
|
|
- * nop
|
|
|
|
- * nop
|
|
|
|
- *
|
|
|
|
- * The "low mask" version works because the mask is a strict subset of
|
|
|
|
- * the "high mask", hence performing the first mask for nothing.
|
|
|
|
- * Should be completely invisible on any viable CPU.
|
|
|
|
|
|
+ * The actual code generation takes place in kvm_update_va_mask, and
|
|
|
|
+ * the instructions below are only there to reserve the space and
|
|
|
|
+ * perform the register allocation (kvm_update_va_mask uses the
|
|
|
|
+ * specific registers encoded in the instructions).
|
|
*/
|
|
*/
|
|
.macro kern_hyp_va reg
|
|
.macro kern_hyp_va reg
|
|
-alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
|
|
|
|
- and \reg, \reg, #HYP_PAGE_OFFSET_HIGH_MASK
|
|
|
|
-alternative_else_nop_endif
|
|
|
|
-alternative_if ARM64_HYP_OFFSET_LOW
|
|
|
|
- and \reg, \reg, #HYP_PAGE_OFFSET_LOW_MASK
|
|
|
|
-alternative_else_nop_endif
|
|
|
|
|
|
+alternative_cb kvm_update_va_mask
|
|
|
|
+ and \reg, \reg, #1
|
|
|
|
+alternative_cb_end
|
|
.endm
|
|
.endm
|
|
|
|
|
|
#else
|
|
#else
|
|
@@ -113,18 +97,14 @@ alternative_else_nop_endif
|
|
#include <asm/mmu_context.h>
|
|
#include <asm/mmu_context.h>
|
|
#include <asm/pgtable.h>
|
|
#include <asm/pgtable.h>
|
|
|
|
|
|
|
|
+void kvm_update_va_mask(struct alt_instr *alt,
|
|
|
|
+ __le32 *origptr, __le32 *updptr, int nr_inst);
|
|
|
|
+
|
|
static inline unsigned long __kern_hyp_va(unsigned long v)
|
|
static inline unsigned long __kern_hyp_va(unsigned long v)
|
|
{
|
|
{
|
|
- asm volatile(ALTERNATIVE("and %0, %0, %1",
|
|
|
|
- "nop",
|
|
|
|
- ARM64_HAS_VIRT_HOST_EXTN)
|
|
|
|
- : "+r" (v)
|
|
|
|
- : "i" (HYP_PAGE_OFFSET_HIGH_MASK));
|
|
|
|
- asm volatile(ALTERNATIVE("nop",
|
|
|
|
- "and %0, %0, %1",
|
|
|
|
- ARM64_HYP_OFFSET_LOW)
|
|
|
|
- : "+r" (v)
|
|
|
|
- : "i" (HYP_PAGE_OFFSET_LOW_MASK));
|
|
|
|
|
|
+ asm volatile(ALTERNATIVE_CB("and %0, %0, #1\n",
|
|
|
|
+ kvm_update_va_mask)
|
|
|
|
+ : "+r" (v));
|
|
return v;
|
|
return v;
|
|
}
|
|
}
|
|
|
|
|