|
@@ -25,6 +25,7 @@ struct udl_gem_object *udl_gem_alloc_object(struct drm_device *dev,
|
|
return NULL;
|
|
return NULL;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ obj->flags = UDL_BO_CACHEABLE;
|
|
return obj;
|
|
return obj;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -56,6 +57,23 @@ udl_gem_create(struct drm_file *file,
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static void update_vm_cache_attr(struct udl_gem_object *obj,
|
|
|
|
+ struct vm_area_struct *vma)
|
|
|
|
+{
|
|
|
|
+ DRM_DEBUG_KMS("flags = 0x%x\n", obj->flags);
|
|
|
|
+
|
|
|
|
+ /* non-cacheable as default. */
|
|
|
|
+ if (obj->flags & UDL_BO_CACHEABLE) {
|
|
|
|
+ vma->vm_page_prot = vm_get_page_prot(vma->vm_flags);
|
|
|
|
+ } else if (obj->flags & UDL_BO_WC) {
|
|
|
|
+ vma->vm_page_prot =
|
|
|
|
+ pgprot_writecombine(vm_get_page_prot(vma->vm_flags));
|
|
|
|
+ } else {
|
|
|
|
+ vma->vm_page_prot =
|
|
|
|
+ pgprot_noncached(vm_get_page_prot(vma->vm_flags));
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
int udl_dumb_create(struct drm_file *file,
|
|
int udl_dumb_create(struct drm_file *file,
|
|
struct drm_device *dev,
|
|
struct drm_device *dev,
|
|
struct drm_mode_create_dumb *args)
|
|
struct drm_mode_create_dumb *args)
|
|
@@ -77,6 +95,8 @@ int udl_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma)
|
|
vma->vm_flags &= ~VM_PFNMAP;
|
|
vma->vm_flags &= ~VM_PFNMAP;
|
|
vma->vm_flags |= VM_MIXEDMAP;
|
|
vma->vm_flags |= VM_MIXEDMAP;
|
|
|
|
|
|
|
|
+ update_vm_cache_attr(to_udl_bo(vma->vm_private_data), vma);
|
|
|
|
+
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
|
|
@@ -279,6 +299,7 @@ struct drm_gem_object *udl_gem_prime_import(struct drm_device *dev,
|
|
}
|
|
}
|
|
|
|
|
|
uobj->base.import_attach = attach;
|
|
uobj->base.import_attach = attach;
|
|
|
|
+ uobj->flags = UDL_BO_WC;
|
|
|
|
|
|
return &uobj->base;
|
|
return &uobj->base;
|
|
|
|
|