|
@@ -1247,6 +1247,12 @@ i915_gem_validate_context(struct drm_device *dev, struct drm_file *file,
|
|
return ctx;
|
|
return ctx;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static bool gpu_write_needs_clflush(struct drm_i915_gem_object *obj)
|
|
|
|
+{
|
|
|
|
+ return !(obj->cache_level == I915_CACHE_NONE ||
|
|
|
|
+ obj->cache_level == I915_CACHE_WT);
|
|
|
|
+}
|
|
|
|
+
|
|
void i915_vma_move_to_active(struct i915_vma *vma,
|
|
void i915_vma_move_to_active(struct i915_vma *vma,
|
|
struct drm_i915_gem_request *req,
|
|
struct drm_i915_gem_request *req,
|
|
unsigned int flags)
|
|
unsigned int flags)
|
|
@@ -1276,6 +1282,8 @@ void i915_vma_move_to_active(struct i915_vma *vma,
|
|
|
|
|
|
/* update for the implicit flush after a batch */
|
|
/* update for the implicit flush after a batch */
|
|
obj->base.write_domain &= ~I915_GEM_GPU_DOMAINS;
|
|
obj->base.write_domain &= ~I915_GEM_GPU_DOMAINS;
|
|
|
|
+ if (!obj->cache_dirty && gpu_write_needs_clflush(obj))
|
|
|
|
+ obj->cache_dirty = true;
|
|
}
|
|
}
|
|
|
|
|
|
if (flags & EXEC_OBJECT_NEEDS_FENCE)
|
|
if (flags & EXEC_OBJECT_NEEDS_FENCE)
|