|
@@ -595,7 +595,8 @@ static bool mmu_spte_update(u64 *sptep, u64 new_spte)
|
|
|
* we always atomicly update it, see the comments in
|
|
* we always atomicly update it, see the comments in
|
|
|
* spte_has_volatile_bits().
|
|
* spte_has_volatile_bits().
|
|
|
*/
|
|
*/
|
|
|
- if (is_writable_pte(old_spte) && !is_writable_pte(new_spte))
|
|
|
|
|
|
|
+ if (spte_is_locklessly_modifiable(old_spte) &&
|
|
|
|
|
+ !is_writable_pte(new_spte))
|
|
|
ret = true;
|
|
ret = true;
|
|
|
|
|
|
|
|
if (!shadow_accessed_mask)
|
|
if (!shadow_accessed_mask)
|