|
|
@@ -262,7 +262,8 @@ struct kvm_mmu {
|
|
|
struct x86_exception *fault);
|
|
|
gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, gva_t gva, u32 access,
|
|
|
struct x86_exception *exception);
|
|
|
- gpa_t (*translate_gpa)(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access);
|
|
|
+ gpa_t (*translate_gpa)(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
|
|
|
+ struct x86_exception *exception);
|
|
|
int (*sync_page)(struct kvm_vcpu *vcpu,
|
|
|
struct kvm_mmu_page *sp);
|
|
|
void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva);
|
|
|
@@ -923,7 +924,8 @@ void __kvm_mmu_free_some_pages(struct kvm_vcpu *vcpu);
|
|
|
int kvm_mmu_load(struct kvm_vcpu *vcpu);
|
|
|
void kvm_mmu_unload(struct kvm_vcpu *vcpu);
|
|
|
void kvm_mmu_sync_roots(struct kvm_vcpu *vcpu);
|
|
|
-gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access);
|
|
|
+gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
|
|
|
+ struct x86_exception *exception);
|
|
|
gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva,
|
|
|
struct x86_exception *exception);
|
|
|
gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva,
|
|
|
@@ -943,7 +945,8 @@ void kvm_mmu_new_cr3(struct kvm_vcpu *vcpu);
|
|
|
void kvm_enable_tdp(void);
|
|
|
void kvm_disable_tdp(void);
|
|
|
|
|
|
-static inline gpa_t translate_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access)
|
|
|
+static inline gpa_t translate_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
|
|
|
+ struct x86_exception *exception)
|
|
|
{
|
|
|
return gpa;
|
|
|
}
|