|
@@ -906,6 +906,22 @@ static void sdma_v2_4_ring_emit_vm_flush(struct amdgpu_ring *ring,
|
|
SDMA_PKT_POLL_REGMEM_DW5_INTERVAL(10)); /* retry count, poll interval */
|
|
SDMA_PKT_POLL_REGMEM_DW5_INTERVAL(10)); /* retry count, poll interval */
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static unsigned sdma_v2_4_ring_get_emit_ib_size(struct amdgpu_ring *ring)
|
|
|
|
+{
|
|
|
|
+ return
|
|
|
|
+ 7 + 6; /* sdma_v2_4_ring_emit_ib */
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static unsigned sdma_v2_4_ring_get_dma_frame_size(struct amdgpu_ring *ring)
|
|
|
|
+{
|
|
|
|
+ return
|
|
|
|
+ 6 + /* sdma_v2_4_ring_emit_hdp_flush */
|
|
|
|
+ 3 + /* sdma_v2_4_ring_emit_hdp_invalidate */
|
|
|
|
+ 6 + /* sdma_v2_4_ring_emit_pipeline_sync */
|
|
|
|
+ 12 + /* sdma_v2_4_ring_emit_vm_flush */
|
|
|
|
+ 10 + 10 + 10; /* sdma_v2_4_ring_emit_fence x3 for user fence, vm fence */
|
|
|
|
+}
|
|
|
|
+
|
|
static int sdma_v2_4_early_init(void *handle)
|
|
static int sdma_v2_4_early_init(void *handle)
|
|
{
|
|
{
|
|
struct amdgpu_device *adev = (struct amdgpu_device *)handle;
|
|
struct amdgpu_device *adev = (struct amdgpu_device *)handle;
|
|
@@ -1224,6 +1240,8 @@ static const struct amdgpu_ring_funcs sdma_v2_4_ring_funcs = {
|
|
.test_ib = sdma_v2_4_ring_test_ib,
|
|
.test_ib = sdma_v2_4_ring_test_ib,
|
|
.insert_nop = sdma_v2_4_ring_insert_nop,
|
|
.insert_nop = sdma_v2_4_ring_insert_nop,
|
|
.pad_ib = sdma_v2_4_ring_pad_ib,
|
|
.pad_ib = sdma_v2_4_ring_pad_ib,
|
|
|
|
+ .get_emit_ib_size = sdma_v2_4_ring_get_emit_ib_size,
|
|
|
|
+ .get_dma_frame_size = sdma_v2_4_ring_get_dma_frame_size,
|
|
};
|
|
};
|
|
|
|
|
|
static void sdma_v2_4_set_ring_funcs(struct amdgpu_device *adev)
|
|
static void sdma_v2_4_set_ring_funcs(struct amdgpu_device *adev)
|