|
@@ -445,12 +445,12 @@ static void dmabounce_sync_for_device(struct device *dev,
|
|
arm_dma_ops.sync_single_for_device(dev, handle, size, dir);
|
|
arm_dma_ops.sync_single_for_device(dev, handle, size, dir);
|
|
}
|
|
}
|
|
|
|
|
|
-static int dmabounce_set_mask(struct device *dev, u64 dma_mask)
|
|
|
|
|
|
+static int dmabounce_dma_supported(struct device *dev, u64 dma_mask)
|
|
{
|
|
{
|
|
if (dev->archdata.dmabounce)
|
|
if (dev->archdata.dmabounce)
|
|
return 0;
|
|
return 0;
|
|
|
|
|
|
- return arm_dma_ops.set_dma_mask(dev, dma_mask);
|
|
|
|
|
|
+ return arm_dma_ops.dma_supported(dev, dma_mask);
|
|
}
|
|
}
|
|
|
|
|
|
static int dmabounce_mapping_error(struct device *dev, dma_addr_t dma_addr)
|
|
static int dmabounce_mapping_error(struct device *dev, dma_addr_t dma_addr)
|
|
@@ -471,9 +471,8 @@ static const struct dma_map_ops dmabounce_ops = {
|
|
.unmap_sg = arm_dma_unmap_sg,
|
|
.unmap_sg = arm_dma_unmap_sg,
|
|
.sync_sg_for_cpu = arm_dma_sync_sg_for_cpu,
|
|
.sync_sg_for_cpu = arm_dma_sync_sg_for_cpu,
|
|
.sync_sg_for_device = arm_dma_sync_sg_for_device,
|
|
.sync_sg_for_device = arm_dma_sync_sg_for_device,
|
|
- .set_dma_mask = dmabounce_set_mask,
|
|
|
|
|
|
+ .dma_supported = dmabounce_dma_supported,
|
|
.mapping_error = dmabounce_mapping_error,
|
|
.mapping_error = dmabounce_mapping_error,
|
|
- .dma_supported = arm_dma_supported,
|
|
|
|
};
|
|
};
|
|
|
|
|
|
static int dmabounce_init_pool(struct dmabounce_pool *pool, struct device *dev,
|
|
static int dmabounce_init_pool(struct dmabounce_pool *pool, struct device *dev,
|