dma-mapping.h 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240
  1. #ifndef _LINUX_DMA_MAPPING_H
  2. #define _LINUX_DMA_MAPPING_H
  3. #include <linux/device.h>
  4. #include <linux/err.h>
  5. #include <linux/dma-attrs.h>
  6. #include <linux/dma-direction.h>
  7. #include <linux/scatterlist.h>
  8. struct dma_map_ops {
  9. void* (*alloc_coherent)(struct device *dev, size_t size,
  10. dma_addr_t *dma_handle, gfp_t gfp);
  11. void (*free_coherent)(struct device *dev, size_t size,
  12. void *vaddr, dma_addr_t dma_handle);
  13. dma_addr_t (*map_page)(struct device *dev, struct page *page,
  14. unsigned long offset, size_t size,
  15. enum dma_data_direction dir,
  16. struct dma_attrs *attrs);
  17. void (*unmap_page)(struct device *dev, dma_addr_t dma_handle,
  18. size_t size, enum dma_data_direction dir,
  19. struct dma_attrs *attrs);
  20. int (*map_sg)(struct device *dev, struct scatterlist *sg,
  21. int nents, enum dma_data_direction dir,
  22. struct dma_attrs *attrs);
  23. void (*unmap_sg)(struct device *dev,
  24. struct scatterlist *sg, int nents,
  25. enum dma_data_direction dir,
  26. struct dma_attrs *attrs);
  27. void (*sync_single_for_cpu)(struct device *dev,
  28. dma_addr_t dma_handle, size_t size,
  29. enum dma_data_direction dir);
  30. void (*sync_single_for_device)(struct device *dev,
  31. dma_addr_t dma_handle, size_t size,
  32. enum dma_data_direction dir);
  33. void (*sync_sg_for_cpu)(struct device *dev,
  34. struct scatterlist *sg, int nents,
  35. enum dma_data_direction dir);
  36. void (*sync_sg_for_device)(struct device *dev,
  37. struct scatterlist *sg, int nents,
  38. enum dma_data_direction dir);
  39. int (*mapping_error)(struct device *dev, dma_addr_t dma_addr);
  40. int (*dma_supported)(struct device *dev, u64 mask);
  41. int (*set_dma_mask)(struct device *dev, u64 mask);
  42. int is_phys;
  43. };
  44. #define DMA_BIT_MASK(n) (((n) == 64) ? ~0ULL : ((1ULL<<(n))-1))
  45. typedef u64 DMA_nnBIT_MASK __deprecated;
  46. /*
  47. * NOTE: do not use the below macros in new code and do not add new definitions
  48. * here.
  49. *
  50. * Instead, just open-code DMA_BIT_MASK(n) within your driver
  51. */
  52. #define DMA_64BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(64)
  53. #define DMA_48BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(48)
  54. #define DMA_47BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(47)
  55. #define DMA_40BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(40)
  56. #define DMA_39BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(39)
  57. #define DMA_35BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(35)
  58. #define DMA_32BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(32)
  59. #define DMA_31BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(31)
  60. #define DMA_30BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(30)
  61. #define DMA_29BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(29)
  62. #define DMA_28BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(28)
  63. #define DMA_24BIT_MASK (DMA_nnBIT_MASK)DMA_BIT_MASK(24)
  64. #define DMA_MASK_NONE 0x0ULL
  65. static inline int valid_dma_direction(int dma_direction)
  66. {
  67. return ((dma_direction == DMA_BIDIRECTIONAL) ||
  68. (dma_direction == DMA_TO_DEVICE) ||
  69. (dma_direction == DMA_FROM_DEVICE));
  70. }
  71. static inline int is_device_dma_capable(struct device *dev)
  72. {
  73. return dev->dma_mask != NULL && *dev->dma_mask != DMA_MASK_NONE;
  74. }
  75. #ifdef CONFIG_HAS_DMA
  76. #include <asm/dma-mapping.h>
  77. #else
  78. #include <asm-generic/dma-mapping-broken.h>
  79. #endif
  80. static inline u64 dma_get_mask(struct device *dev)
  81. {
  82. if (dev && dev->dma_mask && *dev->dma_mask)
  83. return *dev->dma_mask;
  84. return DMA_BIT_MASK(32);
  85. }
  86. #ifdef ARCH_HAS_DMA_SET_COHERENT_MASK
  87. int dma_set_coherent_mask(struct device *dev, u64 mask);
  88. #else
  89. static inline int dma_set_coherent_mask(struct device *dev, u64 mask)
  90. {
  91. if (!dma_supported(dev, mask))
  92. return -EIO;
  93. dev->coherent_dma_mask = mask;
  94. return 0;
  95. }
  96. #endif
  97. extern u64 dma_get_required_mask(struct device *dev);
  98. static inline unsigned int dma_get_max_seg_size(struct device *dev)
  99. {
  100. return dev->dma_parms ? dev->dma_parms->max_segment_size : 65536;
  101. }
  102. static inline unsigned int dma_set_max_seg_size(struct device *dev,
  103. unsigned int size)
  104. {
  105. if (dev->dma_parms) {
  106. dev->dma_parms->max_segment_size = size;
  107. return 0;
  108. } else
  109. return -EIO;
  110. }
  111. static inline unsigned long dma_get_seg_boundary(struct device *dev)
  112. {
  113. return dev->dma_parms ?
  114. dev->dma_parms->segment_boundary_mask : 0xffffffff;
  115. }
  116. static inline int dma_set_seg_boundary(struct device *dev, unsigned long mask)
  117. {
  118. if (dev->dma_parms) {
  119. dev->dma_parms->segment_boundary_mask = mask;
  120. return 0;
  121. } else
  122. return -EIO;
  123. }
  124. #ifdef CONFIG_HAS_DMA
  125. static inline int dma_get_cache_alignment(void)
  126. {
  127. #ifdef ARCH_DMA_MINALIGN
  128. return ARCH_DMA_MINALIGN;
  129. #endif
  130. return 1;
  131. }
  132. #endif
  133. /* flags for the coherent memory api */
  134. #define DMA_MEMORY_MAP 0x01
  135. #define DMA_MEMORY_IO 0x02
  136. #define DMA_MEMORY_INCLUDES_CHILDREN 0x04
  137. #define DMA_MEMORY_EXCLUSIVE 0x08
  138. #ifndef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
  139. static inline int
  140. dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
  141. dma_addr_t device_addr, size_t size, int flags)
  142. {
  143. return 0;
  144. }
  145. static inline void
  146. dma_release_declared_memory(struct device *dev)
  147. {
  148. }
  149. static inline void *
  150. dma_mark_declared_memory_occupied(struct device *dev,
  151. dma_addr_t device_addr, size_t size)
  152. {
  153. return ERR_PTR(-EBUSY);
  154. }
  155. #endif
  156. /*
  157. * Managed DMA API
  158. */
  159. extern void *dmam_alloc_coherent(struct device *dev, size_t size,
  160. dma_addr_t *dma_handle, gfp_t gfp);
  161. extern void dmam_free_coherent(struct device *dev, size_t size, void *vaddr,
  162. dma_addr_t dma_handle);
  163. extern void *dmam_alloc_noncoherent(struct device *dev, size_t size,
  164. dma_addr_t *dma_handle, gfp_t gfp);
  165. extern void dmam_free_noncoherent(struct device *dev, size_t size, void *vaddr,
  166. dma_addr_t dma_handle);
  167. #ifdef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
  168. extern int dmam_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
  169. dma_addr_t device_addr, size_t size,
  170. int flags);
  171. extern void dmam_release_declared_memory(struct device *dev);
  172. #else /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
  173. static inline int dmam_declare_coherent_memory(struct device *dev,
  174. dma_addr_t bus_addr, dma_addr_t device_addr,
  175. size_t size, gfp_t gfp)
  176. {
  177. return 0;
  178. }
  179. static inline void dmam_release_declared_memory(struct device *dev)
  180. {
  181. }
  182. #endif /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */
  183. #ifndef CONFIG_HAVE_DMA_ATTRS
  184. struct dma_attrs;
  185. #define dma_map_single_attrs(dev, cpu_addr, size, dir, attrs) \
  186. dma_map_single(dev, cpu_addr, size, dir)
  187. #define dma_unmap_single_attrs(dev, dma_addr, size, dir, attrs) \
  188. dma_unmap_single(dev, dma_addr, size, dir)
  189. #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \
  190. dma_map_sg(dev, sgl, nents, dir)
  191. #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \
  192. dma_unmap_sg(dev, sgl, nents, dir)
  193. #endif /* CONFIG_HAVE_DMA_ATTRS */
  194. #ifdef CONFIG_NEED_DMA_MAP_STATE
  195. #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME) dma_addr_t ADDR_NAME
  196. #define DEFINE_DMA_UNMAP_LEN(LEN_NAME) __u32 LEN_NAME
  197. #define dma_unmap_addr(PTR, ADDR_NAME) ((PTR)->ADDR_NAME)
  198. #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL) (((PTR)->ADDR_NAME) = (VAL))
  199. #define dma_unmap_len(PTR, LEN_NAME) ((PTR)->LEN_NAME)
  200. #define dma_unmap_len_set(PTR, LEN_NAME, VAL) (((PTR)->LEN_NAME) = (VAL))
  201. #else
  202. #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME)
  203. #define DEFINE_DMA_UNMAP_LEN(LEN_NAME)
  204. #define dma_unmap_addr(PTR, ADDR_NAME) (0)
  205. #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL) do { } while (0)
  206. #define dma_unmap_len(PTR, LEN_NAME) (0)
  207. #define dma_unmap_len_set(PTR, LEN_NAME, VAL) do { } while (0)
  208. #endif
  209. #endif