|
@@ -96,6 +96,7 @@ static int ccp_platform_probe(struct platform_device *pdev)
|
|
|
struct ccp_platform *ccp_platform;
|
|
|
struct device *dev = &pdev->dev;
|
|
|
struct acpi_device *adev = ACPI_COMPANION(dev);
|
|
|
+ enum dev_dma_attr attr;
|
|
|
struct resource *ior;
|
|
|
int ret;
|
|
|
|
|
@@ -122,18 +123,24 @@ static int ccp_platform_probe(struct platform_device *pdev)
|
|
|
}
|
|
|
ccp->io_regs = ccp->io_map;
|
|
|
|
|
|
- ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(48));
|
|
|
- if (ret) {
|
|
|
- dev_err(dev, "dma_set_mask_and_coherent failed (%d)\n", ret);
|
|
|
+ attr = device_get_dma_attr(dev);
|
|
|
+ if (attr == DEV_DMA_NOT_SUPPORTED) {
|
|
|
+ dev_err(dev, "DMA is not supported");
|
|
|
goto e_err;
|
|
|
}
|
|
|
|
|
|
- ccp_platform->coherent = device_dma_is_coherent(ccp->dev);
|
|
|
+ ccp_platform->coherent = (attr == DEV_DMA_COHERENT);
|
|
|
if (ccp_platform->coherent)
|
|
|
ccp->axcache = CACHE_WB_NO_ALLOC;
|
|
|
else
|
|
|
ccp->axcache = CACHE_NONE;
|
|
|
|
|
|
+ ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(48));
|
|
|
+ if (ret) {
|
|
|
+ dev_err(dev, "dma_set_mask_and_coherent failed (%d)\n", ret);
|
|
|
+ goto e_err;
|
|
|
+ }
|
|
|
+
|
|
|
dev_set_drvdata(dev, ccp);
|
|
|
|
|
|
ret = ccp_init(ccp);
|