|
@@ -212,10 +212,19 @@ int amdgpu_ring_init(struct amdgpu_device *adev, struct amdgpu_ring *ring,
|
|
|
|
|
|
}
|
|
|
|
|
|
- r = amdgpu_wb_get(adev, &ring->fence_offs);
|
|
|
- if (r) {
|
|
|
- dev_err(adev->dev, "(%d) ring fence_offs wb alloc failed\n", r);
|
|
|
- return r;
|
|
|
+ if (amdgpu_sriov_vf(adev) && ring->funcs->type == AMDGPU_RING_TYPE_GFX) {
|
|
|
+ r = amdgpu_wb_get_256Bit(adev, &ring->fence_offs);
|
|
|
+ if (r) {
|
|
|
+ dev_err(adev->dev, "(%d) ring fence_offs wb alloc failed\n", r);
|
|
|
+ return r;
|
|
|
+ }
|
|
|
+
|
|
|
+ } else {
|
|
|
+ r = amdgpu_wb_get(adev, &ring->fence_offs);
|
|
|
+ if (r) {
|
|
|
+ dev_err(adev->dev, "(%d) ring fence_offs wb alloc failed\n", r);
|
|
|
+ return r;
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
r = amdgpu_wb_get(adev, &ring->cond_exe_offs);
|
|
@@ -278,17 +287,18 @@ void amdgpu_ring_fini(struct amdgpu_ring *ring)
|
|
|
ring->ready = false;
|
|
|
|
|
|
if (ring->funcs->support_64bit_ptrs) {
|
|
|
- amdgpu_wb_free_64bit(ring->adev, ring->cond_exe_offs);
|
|
|
- amdgpu_wb_free_64bit(ring->adev, ring->fence_offs);
|
|
|
amdgpu_wb_free_64bit(ring->adev, ring->rptr_offs);
|
|
|
amdgpu_wb_free_64bit(ring->adev, ring->wptr_offs);
|
|
|
} else {
|
|
|
- amdgpu_wb_free(ring->adev, ring->cond_exe_offs);
|
|
|
- amdgpu_wb_free(ring->adev, ring->fence_offs);
|
|
|
amdgpu_wb_free(ring->adev, ring->rptr_offs);
|
|
|
amdgpu_wb_free(ring->adev, ring->wptr_offs);
|
|
|
}
|
|
|
|
|
|
+ amdgpu_wb_free(ring->adev, ring->cond_exe_offs);
|
|
|
+ if (amdgpu_sriov_vf(ring->adev) && ring->funcs->type == AMDGPU_RING_TYPE_GFX)
|
|
|
+ amdgpu_wb_free_256bit(ring->adev, ring->fence_offs);
|
|
|
+ else
|
|
|
+ amdgpu_wb_free(ring->adev, ring->fence_offs);
|
|
|
|
|
|
amdgpu_bo_free_kernel(&ring->ring_obj,
|
|
|
&ring->gpu_addr,
|