dma-mapping.h 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657
  1. #ifndef ___ASM_SPARC_DMA_MAPPING_H
  2. #define ___ASM_SPARC_DMA_MAPPING_H
  3. #include <linux/scatterlist.h>
  4. #include <linux/mm.h>
  5. #include <linux/dma-debug.h>
  6. #define DMA_ERROR_CODE (~(dma_addr_t)0x0)
  7. #define HAVE_ARCH_DMA_SUPPORTED 1
  8. int dma_supported(struct device *dev, u64 mask);
  9. static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  10. enum dma_data_direction dir)
  11. {
  12. /* Since dma_{alloc,free}_noncoherent() allocated coherent memory, this
  13. * routine can be a nop.
  14. */
  15. }
  16. extern struct dma_map_ops *dma_ops;
  17. extern struct dma_map_ops *leon_dma_ops;
  18. extern struct dma_map_ops pci32_dma_ops;
  19. extern struct bus_type pci_bus_type;
  20. static inline struct dma_map_ops *get_dma_ops(struct device *dev)
  21. {
  22. #ifdef CONFIG_SPARC_LEON
  23. if (sparc_cpu_model == sparc_leon)
  24. return leon_dma_ops;
  25. #endif
  26. #if defined(CONFIG_SPARC32) && defined(CONFIG_PCI)
  27. if (dev->bus == &pci_bus_type)
  28. return &pci32_dma_ops;
  29. #endif
  30. return dma_ops;
  31. }
  32. #define HAVE_ARCH_DMA_SET_MASK 1
  33. static inline int dma_set_mask(struct device *dev, u64 mask)
  34. {
  35. #ifdef CONFIG_PCI
  36. if (dev->bus == &pci_bus_type) {
  37. if (!dev->dma_mask || !dma_supported(dev, mask))
  38. return -EINVAL;
  39. *dev->dma_mask = mask;
  40. return 0;
  41. }
  42. #endif
  43. return -EINVAL;
  44. }
  45. #include <asm-generic/dma-mapping-common.h>
  46. #endif