|
@@ -346,23 +346,11 @@ extern struct pv_lock_ops pv_lock_ops;
|
|
|
#define paravirt_clobber(clobber) \
|
|
|
[paravirt_clobber] "i" (clobber)
|
|
|
|
|
|
-/*
|
|
|
- * Generate some code, and mark it as patchable by the
|
|
|
- * apply_paravirt() alternate instruction patcher.
|
|
|
- */
|
|
|
-#define _paravirt_alt(insn_string, type, clobber) \
|
|
|
- "771:\n\t" insn_string "\n" "772:\n" \
|
|
|
- ".pushsection .parainstructions,\"a\"\n" \
|
|
|
- _ASM_ALIGN "\n" \
|
|
|
- _ASM_PTR " 771b\n" \
|
|
|
- " .byte " type "\n" \
|
|
|
- " .byte 772b-771b\n" \
|
|
|
- " .short " clobber "\n" \
|
|
|
- ".popsection\n"
|
|
|
-
|
|
|
/* Generate patchable code, with the default asm parameters. */
|
|
|
-#define paravirt_alt(insn_string) \
|
|
|
- _paravirt_alt(insn_string, "%c[paravirt_typenum]", "%c[paravirt_clobber]")
|
|
|
+#define paravirt_call \
|
|
|
+ "PARAVIRT_CALL type=\"%c[paravirt_typenum]\"" \
|
|
|
+ " clobber=\"%c[paravirt_clobber]\"" \
|
|
|
+ " pv_opptr=\"%c[paravirt_opptr]\";"
|
|
|
|
|
|
/* Simple instruction patching code. */
|
|
|
#define NATIVE_LABEL(a,x,b) "\n\t.globl " a #x "_" #b "\n" a #x "_" #b ":\n\t"
|
|
@@ -390,16 +378,6 @@ unsigned native_patch(u8 type, u16 clobbers, void *ibuf,
|
|
|
|
|
|
int paravirt_disable_iospace(void);
|
|
|
|
|
|
-/*
|
|
|
- * This generates an indirect call based on the operation type number.
|
|
|
- * The type number, computed in PARAVIRT_PATCH, is derived from the
|
|
|
- * offset into the paravirt_patch_template structure, and can therefore be
|
|
|
- * freely converted back into a structure offset.
|
|
|
- */
|
|
|
-#define PARAVIRT_CALL \
|
|
|
- ANNOTATE_RETPOLINE_SAFE \
|
|
|
- "call *%c[paravirt_opptr];"
|
|
|
-
|
|
|
/*
|
|
|
* These macros are intended to wrap calls through one of the paravirt
|
|
|
* ops structs, so that they can be later identified and patched at
|
|
@@ -537,7 +515,7 @@ int paravirt_disable_iospace(void);
|
|
|
/* since this condition will never hold */ \
|
|
|
if (sizeof(rettype) > sizeof(unsigned long)) { \
|
|
|
asm volatile(pre \
|
|
|
- paravirt_alt(PARAVIRT_CALL) \
|
|
|
+ paravirt_call \
|
|
|
post \
|
|
|
: call_clbr, ASM_CALL_CONSTRAINT \
|
|
|
: paravirt_type(op), \
|
|
@@ -547,7 +525,7 @@ int paravirt_disable_iospace(void);
|
|
|
__ret = (rettype)((((u64)__edx) << 32) | __eax); \
|
|
|
} else { \
|
|
|
asm volatile(pre \
|
|
|
- paravirt_alt(PARAVIRT_CALL) \
|
|
|
+ paravirt_call \
|
|
|
post \
|
|
|
: call_clbr, ASM_CALL_CONSTRAINT \
|
|
|
: paravirt_type(op), \
|
|
@@ -574,7 +552,7 @@ int paravirt_disable_iospace(void);
|
|
|
PVOP_VCALL_ARGS; \
|
|
|
PVOP_TEST_NULL(op); \
|
|
|
asm volatile(pre \
|
|
|
- paravirt_alt(PARAVIRT_CALL) \
|
|
|
+ paravirt_call \
|
|
|
post \
|
|
|
: call_clbr, ASM_CALL_CONSTRAINT \
|
|
|
: paravirt_type(op), \
|
|
@@ -694,6 +672,26 @@ struct paravirt_patch_site {
|
|
|
extern struct paravirt_patch_site __parainstructions[],
|
|
|
__parainstructions_end[];
|
|
|
|
|
|
+#else /* __ASSEMBLY__ */
|
|
|
+
|
|
|
+/*
|
|
|
+ * This generates an indirect call based on the operation type number.
|
|
|
+ * The type number, computed in PARAVIRT_PATCH, is derived from the
|
|
|
+ * offset into the paravirt_patch_template structure, and can therefore be
|
|
|
+ * freely converted back into a structure offset.
|
|
|
+ */
|
|
|
+.macro PARAVIRT_CALL type:req clobber:req pv_opptr:req
|
|
|
+771: ANNOTATE_RETPOLINE_SAFE
|
|
|
+ call *\pv_opptr
|
|
|
+772: .pushsection .parainstructions,"a"
|
|
|
+ _ASM_ALIGN
|
|
|
+ _ASM_PTR 771b
|
|
|
+ .byte \type
|
|
|
+ .byte 772b-771b
|
|
|
+ .short \clobber
|
|
|
+ .popsection
|
|
|
+.endm
|
|
|
+
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
|
|
#endif /* _ASM_X86_PARAVIRT_TYPES_H */
|