|
@@ -1108,6 +1108,22 @@ static void sdma_v3_0_ring_emit_vm_flush(struct amdgpu_ring *ring,
|
|
SDMA_PKT_POLL_REGMEM_DW5_INTERVAL(10)); /* retry count, poll interval */
|
|
SDMA_PKT_POLL_REGMEM_DW5_INTERVAL(10)); /* retry count, poll interval */
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static unsigned sdma_v3_0_ring_get_emit_ib_size(struct amdgpu_ring *ring)
|
|
|
|
+{
|
|
|
|
+ return
|
|
|
|
+ 7 + 6; /* sdma_v3_0_ring_emit_ib */
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static unsigned sdma_v3_0_ring_get_dma_frame_size(struct amdgpu_ring *ring)
|
|
|
|
+{
|
|
|
|
+ return
|
|
|
|
+ 6 + /* sdma_v3_0_ring_emit_hdp_flush */
|
|
|
|
+ 3 + /* sdma_v3_0_ring_emit_hdp_invalidate */
|
|
|
|
+ 6 + /* sdma_v3_0_ring_emit_pipeline_sync */
|
|
|
|
+ 12 + /* sdma_v3_0_ring_emit_vm_flush */
|
|
|
|
+ 10 + 10 + 10; /* sdma_v3_0_ring_emit_fence x3 for user fence, vm fence */
|
|
|
|
+}
|
|
|
|
+
|
|
static int sdma_v3_0_early_init(void *handle)
|
|
static int sdma_v3_0_early_init(void *handle)
|
|
{
|
|
{
|
|
struct amdgpu_device *adev = (struct amdgpu_device *)handle;
|
|
struct amdgpu_device *adev = (struct amdgpu_device *)handle;
|
|
@@ -1569,6 +1585,8 @@ static const struct amdgpu_ring_funcs sdma_v3_0_ring_funcs = {
|
|
.test_ib = sdma_v3_0_ring_test_ib,
|
|
.test_ib = sdma_v3_0_ring_test_ib,
|
|
.insert_nop = sdma_v3_0_ring_insert_nop,
|
|
.insert_nop = sdma_v3_0_ring_insert_nop,
|
|
.pad_ib = sdma_v3_0_ring_pad_ib,
|
|
.pad_ib = sdma_v3_0_ring_pad_ib,
|
|
|
|
+ .get_emit_ib_size = sdma_v3_0_ring_get_emit_ib_size,
|
|
|
|
+ .get_dma_frame_size = sdma_v3_0_ring_get_dma_frame_size,
|
|
};
|
|
};
|
|
|
|
|
|
static void sdma_v3_0_set_ring_funcs(struct amdgpu_device *adev)
|
|
static void sdma_v3_0_set_ring_funcs(struct amdgpu_device *adev)
|