|
|
@@ -480,29 +480,21 @@ static void *
|
|
|
gart_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_addr,
|
|
|
gfp_t flag, unsigned long attrs)
|
|
|
{
|
|
|
- dma_addr_t paddr;
|
|
|
- unsigned long align_mask;
|
|
|
- struct page *page;
|
|
|
-
|
|
|
- if (force_iommu && dev->coherent_dma_mask > DMA_BIT_MASK(24)) {
|
|
|
- flag &= ~(__GFP_DMA | __GFP_HIGHMEM | __GFP_DMA32);
|
|
|
- page = alloc_pages(flag | __GFP_ZERO, get_order(size));
|
|
|
- if (!page)
|
|
|
- return NULL;
|
|
|
-
|
|
|
- align_mask = (1UL << get_order(size)) - 1;
|
|
|
- paddr = dma_map_area(dev, page_to_phys(page), size,
|
|
|
- DMA_BIDIRECTIONAL, align_mask);
|
|
|
-
|
|
|
- flush_gart();
|
|
|
- if (paddr != bad_dma_addr) {
|
|
|
- *dma_addr = paddr;
|
|
|
- return page_address(page);
|
|
|
- }
|
|
|
- __free_pages(page, get_order(size));
|
|
|
- } else
|
|
|
- return dma_direct_alloc(dev, size, dma_addr, flag, attrs);
|
|
|
+ void *vaddr;
|
|
|
+
|
|
|
+ vaddr = dma_direct_alloc(dev, size, dma_addr, flag, attrs);
|
|
|
+ if (!vaddr ||
|
|
|
+ !force_iommu || dev->coherent_dma_mask <= DMA_BIT_MASK(24))
|
|
|
+ return vaddr;
|
|
|
|
|
|
+ *dma_addr = dma_map_area(dev, virt_to_phys(vaddr), size,
|
|
|
+ DMA_BIDIRECTIONAL, (1UL << get_order(size)) - 1);
|
|
|
+ flush_gart();
|
|
|
+ if (unlikely(*dma_addr == bad_dma_addr))
|
|
|
+ goto out_free;
|
|
|
+ return vaddr;
|
|
|
+out_free:
|
|
|
+ dma_direct_free(dev, size, vaddr, *dma_addr, attrs);
|
|
|
return NULL;
|
|
|
}
|
|
|
|