|
@@ -3354,11 +3354,9 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
|
|
|
struct drm_device *dev = obj->base.dev;
|
|
|
struct drm_i915_private *dev_priv = dev->dev_private;
|
|
|
u32 fence_alignment, unfenced_alignment;
|
|
|
+ u32 search_flag, alloc_flag;
|
|
|
+ u64 start, end;
|
|
|
u64 size, fence_size;
|
|
|
- u64 start =
|
|
|
- flags & PIN_OFFSET_BIAS ? flags & PIN_OFFSET_MASK : 0;
|
|
|
- u64 end =
|
|
|
- flags & PIN_MAPPABLE ? dev_priv->gtt.mappable_end : vm->total;
|
|
|
struct i915_vma *vma;
|
|
|
int ret;
|
|
|
|
|
@@ -3398,6 +3396,13 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
|
|
|
size = flags & PIN_MAPPABLE ? fence_size : obj->base.size;
|
|
|
}
|
|
|
|
|
|
+ start = flags & PIN_OFFSET_BIAS ? flags & PIN_OFFSET_MASK : 0;
|
|
|
+ end = vm->total;
|
|
|
+ if (flags & PIN_MAPPABLE)
|
|
|
+ end = min_t(u64, end, dev_priv->gtt.mappable_end);
|
|
|
+ if (flags & PIN_ZONE_4G)
|
|
|
+ end = min_t(u64, end, (1ULL << 32));
|
|
|
+
|
|
|
if (alignment == 0)
|
|
|
alignment = flags & PIN_MAPPABLE ? fence_alignment :
|
|
|
unfenced_alignment;
|
|
@@ -3433,13 +3438,21 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
|
|
|
if (IS_ERR(vma))
|
|
|
goto err_unpin;
|
|
|
|
|
|
+ if (flags & PIN_HIGH) {
|
|
|
+ search_flag = DRM_MM_SEARCH_BELOW;
|
|
|
+ alloc_flag = DRM_MM_CREATE_TOP;
|
|
|
+ } else {
|
|
|
+ search_flag = DRM_MM_SEARCH_DEFAULT;
|
|
|
+ alloc_flag = DRM_MM_CREATE_DEFAULT;
|
|
|
+ }
|
|
|
+
|
|
|
search_free:
|
|
|
ret = drm_mm_insert_node_in_range_generic(&vm->mm, &vma->node,
|
|
|
size, alignment,
|
|
|
obj->cache_level,
|
|
|
start, end,
|
|
|
- DRM_MM_SEARCH_DEFAULT,
|
|
|
- DRM_MM_CREATE_DEFAULT);
|
|
|
+ search_flag,
|
|
|
+ alloc_flag);
|
|
|
if (ret) {
|
|
|
ret = i915_gem_evict_something(dev, vm, size, alignment,
|
|
|
obj->cache_level,
|