|
@@ -42,7 +42,7 @@ void radeon_gem_object_free(struct drm_gem_object *gobj)
|
|
|
|
|
|
int radeon_gem_object_create(struct radeon_device *rdev, int size,
|
|
|
int alignment, int initial_domain,
|
|
|
- bool discardable, bool kernel,
|
|
|
+ u32 flags, bool discardable, bool kernel,
|
|
|
struct drm_gem_object **obj)
|
|
|
{
|
|
|
struct radeon_bo *robj;
|
|
@@ -64,7 +64,8 @@ int radeon_gem_object_create(struct radeon_device *rdev, int size,
|
|
|
}
|
|
|
|
|
|
retry:
|
|
|
- r = radeon_bo_create(rdev, size, alignment, kernel, initial_domain, NULL, &robj);
|
|
|
+ r = radeon_bo_create(rdev, size, alignment, kernel, initial_domain,
|
|
|
+ flags, NULL, &robj);
|
|
|
if (r) {
|
|
|
if (r != -ERESTARTSYS) {
|
|
|
if (initial_domain == RADEON_GEM_DOMAIN_VRAM) {
|
|
@@ -252,8 +253,8 @@ int radeon_gem_create_ioctl(struct drm_device *dev, void *data,
|
|
|
/* create a gem object to contain this object in */
|
|
|
args->size = roundup(args->size, PAGE_SIZE);
|
|
|
r = radeon_gem_object_create(rdev, args->size, args->alignment,
|
|
|
- args->initial_domain, false,
|
|
|
- false, &gobj);
|
|
|
+ args->initial_domain, args->flags,
|
|
|
+ false, false, &gobj);
|
|
|
if (r) {
|
|
|
up_read(&rdev->exclusive_lock);
|
|
|
r = radeon_gem_handle_lockup(rdev, r);
|
|
@@ -461,11 +462,6 @@ int radeon_gem_va_ioctl(struct drm_device *dev, void *data,
|
|
|
args->operation = RADEON_VA_RESULT_ERROR;
|
|
|
return -EINVAL;
|
|
|
}
|
|
|
- if (!(args->flags & RADEON_VM_PAGE_SNOOPED)) {
|
|
|
- dev_err(&dev->pdev->dev, "only supported snooped mapping for now\n");
|
|
|
- args->operation = RADEON_VA_RESULT_ERROR;
|
|
|
- return -EINVAL;
|
|
|
- }
|
|
|
|
|
|
switch (args->operation) {
|
|
|
case RADEON_VA_MAP:
|
|
@@ -572,7 +568,7 @@ int radeon_mode_dumb_create(struct drm_file *file_priv,
|
|
|
args->size = ALIGN(args->size, PAGE_SIZE);
|
|
|
|
|
|
r = radeon_gem_object_create(rdev, args->size, 0,
|
|
|
- RADEON_GEM_DOMAIN_VRAM,
|
|
|
+ RADEON_GEM_DOMAIN_VRAM, 0,
|
|
|
false, ttm_bo_type_device,
|
|
|
&gobj);
|
|
|
if (r)
|