dma-swiotlb.c 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. #include <linux/mm.h>
  2. #include <linux/init.h>
  3. #include <linux/dma-mapping.h>
  4. #include <linux/scatterlist.h>
  5. #include <linux/swiotlb.h>
  6. #include <linux/bootmem.h>
  7. #include <asm/bootinfo.h>
  8. #include <boot_param.h>
  9. #include <dma-coherence.h>
  10. static void *loongson_dma_alloc_coherent(struct device *dev, size_t size,
  11. dma_addr_t *dma_handle, gfp_t gfp, struct dma_attrs *attrs)
  12. {
  13. void *ret;
  14. if (dma_alloc_from_coherent(dev, size, dma_handle, &ret))
  15. return ret;
  16. /* ignore region specifiers */
  17. gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM);
  18. #ifdef CONFIG_ISA
  19. if (dev == NULL)
  20. gfp |= __GFP_DMA;
  21. else
  22. #endif
  23. #ifdef CONFIG_ZONE_DMA
  24. if (dev->coherent_dma_mask < DMA_BIT_MASK(32))
  25. gfp |= __GFP_DMA;
  26. else
  27. #endif
  28. #ifdef CONFIG_ZONE_DMA32
  29. if (dev->coherent_dma_mask < DMA_BIT_MASK(40))
  30. gfp |= __GFP_DMA32;
  31. else
  32. #endif
  33. ;
  34. gfp |= __GFP_NORETRY;
  35. ret = swiotlb_alloc_coherent(dev, size, dma_handle, gfp);
  36. mb();
  37. return ret;
  38. }
  39. static void loongson_dma_free_coherent(struct device *dev, size_t size,
  40. void *vaddr, dma_addr_t dma_handle, struct dma_attrs *attrs)
  41. {
  42. int order = get_order(size);
  43. if (dma_release_from_coherent(dev, order, vaddr))
  44. return;
  45. swiotlb_free_coherent(dev, size, vaddr, dma_handle);
  46. }
  47. static dma_addr_t loongson_dma_map_page(struct device *dev, struct page *page,
  48. unsigned long offset, size_t size,
  49. enum dma_data_direction dir,
  50. struct dma_attrs *attrs)
  51. {
  52. dma_addr_t daddr = swiotlb_map_page(dev, page, offset, size,
  53. dir, attrs);
  54. mb();
  55. return daddr;
  56. }
  57. static int loongson_dma_map_sg(struct device *dev, struct scatterlist *sg,
  58. int nents, enum dma_data_direction dir,
  59. struct dma_attrs *attrs)
  60. {
  61. int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, NULL);
  62. mb();
  63. return r;
  64. }
  65. static void loongson_dma_sync_single_for_device(struct device *dev,
  66. dma_addr_t dma_handle, size_t size,
  67. enum dma_data_direction dir)
  68. {
  69. swiotlb_sync_single_for_device(dev, dma_handle, size, dir);
  70. mb();
  71. }
  72. static void loongson_dma_sync_sg_for_device(struct device *dev,
  73. struct scatterlist *sg, int nents,
  74. enum dma_data_direction dir)
  75. {
  76. swiotlb_sync_sg_for_device(dev, sg, nents, dir);
  77. mb();
  78. }
  79. static int loongson_dma_set_mask(struct device *dev, u64 mask)
  80. {
  81. if (mask > DMA_BIT_MASK(loongson_sysconf.dma_mask_bits)) {
  82. *dev->dma_mask = DMA_BIT_MASK(loongson_sysconf.dma_mask_bits);
  83. return -EIO;
  84. }
  85. *dev->dma_mask = mask;
  86. return 0;
  87. }
  88. dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
  89. {
  90. long nid;
  91. #ifdef CONFIG_PHYS48_TO_HT40
  92. /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
  93. * Loongson-3's 48bit address space and embed it into 40bit */
  94. nid = (paddr >> 44) & 0x3;
  95. paddr = ((nid << 44) ^ paddr) | (nid << 37);
  96. #endif
  97. return paddr;
  98. }
  99. phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
  100. {
  101. long nid;
  102. #ifdef CONFIG_PHYS48_TO_HT40
  103. /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
  104. * Loongson-3's 48bit address space and embed it into 40bit */
  105. nid = (daddr >> 37) & 0x3;
  106. daddr = ((nid << 37) ^ daddr) | (nid << 44);
  107. #endif
  108. return daddr;
  109. }
  110. static struct dma_map_ops loongson_dma_map_ops = {
  111. .alloc = loongson_dma_alloc_coherent,
  112. .free = loongson_dma_free_coherent,
  113. .map_page = loongson_dma_map_page,
  114. .unmap_page = swiotlb_unmap_page,
  115. .map_sg = loongson_dma_map_sg,
  116. .unmap_sg = swiotlb_unmap_sg_attrs,
  117. .sync_single_for_cpu = swiotlb_sync_single_for_cpu,
  118. .sync_single_for_device = loongson_dma_sync_single_for_device,
  119. .sync_sg_for_cpu = swiotlb_sync_sg_for_cpu,
  120. .sync_sg_for_device = loongson_dma_sync_sg_for_device,
  121. .mapping_error = swiotlb_dma_mapping_error,
  122. .dma_supported = swiotlb_dma_supported,
  123. .set_dma_mask = loongson_dma_set_mask
  124. };
  125. void __init plat_swiotlb_setup(void)
  126. {
  127. swiotlb_init(1);
  128. mips_dma_map_ops = &loongson_dma_map_ops;
  129. }