dma_v2.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175
  1. /*
  2. * Copyright(c) 2004 - 2009 Intel Corporation. All rights reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or modify it
  5. * under the terms of the GNU General Public License as published by the Free
  6. * Software Foundation; either version 2 of the License, or (at your option)
  7. * any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful, but WITHOUT
  10. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
  12. * more details.
  13. *
  14. * The full GNU General Public License is included in this distribution in the
  15. * file called COPYING.
  16. */
  17. #ifndef IOATDMA_V2_H
  18. #define IOATDMA_V2_H
  19. #include <linux/dmaengine.h>
  20. #include <linux/circ_buf.h>
  21. #include "dma.h"
  22. #include "hw.h"
  23. extern int ioat_pending_level;
  24. extern int ioat_ring_alloc_order;
  25. /*
  26. * workaround for IOAT ver.3.0 null descriptor issue
  27. * (channel returns error when size is 0)
  28. */
  29. #define NULL_DESC_BUFFER_SIZE 1
  30. #define IOAT_MAX_ORDER 16
  31. #define ioat_get_alloc_order() \
  32. (min(ioat_ring_alloc_order, IOAT_MAX_ORDER))
  33. #define ioat_get_max_alloc_order() \
  34. (min(ioat_ring_max_alloc_order, IOAT_MAX_ORDER))
  35. /* struct ioat2_dma_chan - ioat v2 / v3 channel attributes
  36. * @base: common ioat channel parameters
  37. * @xfercap_log; log2 of channel max transfer length (for fast division)
  38. * @head: allocated index
  39. * @issued: hardware notification point
  40. * @tail: cleanup index
  41. * @dmacount: identical to 'head' except for occasionally resetting to zero
  42. * @alloc_order: log2 of the number of allocated descriptors
  43. * @produce: number of descriptors to produce at submit time
  44. * @ring: software ring buffer implementation of hardware ring
  45. * @prep_lock: serializes descriptor preparation (producers)
  46. */
  47. struct ioat2_dma_chan {
  48. struct ioat_chan_common base;
  49. size_t xfercap_log;
  50. u16 head;
  51. u16 issued;
  52. u16 tail;
  53. u16 dmacount;
  54. u16 alloc_order;
  55. u16 produce;
  56. struct ioat_ring_ent **ring;
  57. spinlock_t prep_lock;
  58. };
  59. static inline struct ioat2_dma_chan *to_ioat2_chan(struct dma_chan *c)
  60. {
  61. struct ioat_chan_common *chan = to_chan_common(c);
  62. return container_of(chan, struct ioat2_dma_chan, base);
  63. }
  64. static inline u32 ioat2_ring_size(struct ioat2_dma_chan *ioat)
  65. {
  66. return 1 << ioat->alloc_order;
  67. }
  68. /* count of descriptors in flight with the engine */
  69. static inline u16 ioat2_ring_active(struct ioat2_dma_chan *ioat)
  70. {
  71. return CIRC_CNT(ioat->head, ioat->tail, ioat2_ring_size(ioat));
  72. }
  73. /* count of descriptors pending submission to hardware */
  74. static inline u16 ioat2_ring_pending(struct ioat2_dma_chan *ioat)
  75. {
  76. return CIRC_CNT(ioat->head, ioat->issued, ioat2_ring_size(ioat));
  77. }
  78. static inline u32 ioat2_ring_space(struct ioat2_dma_chan *ioat)
  79. {
  80. return ioat2_ring_size(ioat) - ioat2_ring_active(ioat);
  81. }
  82. static inline u16 ioat2_xferlen_to_descs(struct ioat2_dma_chan *ioat, size_t len)
  83. {
  84. u16 num_descs = len >> ioat->xfercap_log;
  85. num_descs += !!(len & ((1 << ioat->xfercap_log) - 1));
  86. return num_descs;
  87. }
  88. /**
  89. * struct ioat_ring_ent - wrapper around hardware descriptor
  90. * @hw: hardware DMA descriptor (for memcpy)
  91. * @fill: hardware fill descriptor
  92. * @xor: hardware xor descriptor
  93. * @xor_ex: hardware xor extension descriptor
  94. * @pq: hardware pq descriptor
  95. * @pq_ex: hardware pq extension descriptor
  96. * @pqu: hardware pq update descriptor
  97. * @raw: hardware raw (un-typed) descriptor
  98. * @txd: the generic software descriptor for all engines
  99. * @len: total transaction length for unmap
  100. * @result: asynchronous result of validate operations
  101. * @id: identifier for debug
  102. */
  103. struct ioat_ring_ent {
  104. union {
  105. struct ioat_dma_descriptor *hw;
  106. struct ioat_xor_descriptor *xor;
  107. struct ioat_xor_ext_descriptor *xor_ex;
  108. struct ioat_pq_descriptor *pq;
  109. struct ioat_pq_ext_descriptor *pq_ex;
  110. struct ioat_pq_update_descriptor *pqu;
  111. struct ioat_raw_descriptor *raw;
  112. };
  113. size_t len;
  114. struct dma_async_tx_descriptor txd;
  115. enum sum_check_flags *result;
  116. #ifdef DEBUG
  117. int id;
  118. #endif
  119. struct ioat_sed_ent *sed;
  120. };
  121. static inline struct ioat_ring_ent *
  122. ioat2_get_ring_ent(struct ioat2_dma_chan *ioat, u16 idx)
  123. {
  124. return ioat->ring[idx & (ioat2_ring_size(ioat) - 1)];
  125. }
  126. static inline void ioat2_set_chainaddr(struct ioat2_dma_chan *ioat, u64 addr)
  127. {
  128. struct ioat_chan_common *chan = &ioat->base;
  129. writel(addr & 0x00000000FFFFFFFF,
  130. chan->reg_base + IOAT2_CHAINADDR_OFFSET_LOW);
  131. writel(addr >> 32,
  132. chan->reg_base + IOAT2_CHAINADDR_OFFSET_HIGH);
  133. }
  134. int ioat2_dma_probe(struct ioatdma_device *dev, int dca);
  135. int ioat3_dma_probe(struct ioatdma_device *dev, int dca);
  136. struct dca_provider *ioat2_dca_init(struct pci_dev *pdev, void __iomem *iobase);
  137. struct dca_provider *ioat3_dca_init(struct pci_dev *pdev, void __iomem *iobase);
  138. int ioat2_check_space_lock(struct ioat2_dma_chan *ioat, int num_descs);
  139. int ioat2_enumerate_channels(struct ioatdma_device *device);
  140. struct dma_async_tx_descriptor *
  141. ioat2_dma_prep_memcpy_lock(struct dma_chan *c, dma_addr_t dma_dest,
  142. dma_addr_t dma_src, size_t len, unsigned long flags);
  143. void ioat2_issue_pending(struct dma_chan *chan);
  144. int ioat2_alloc_chan_resources(struct dma_chan *c);
  145. void ioat2_free_chan_resources(struct dma_chan *c);
  146. void __ioat2_restart_chan(struct ioat2_dma_chan *ioat);
  147. bool reshape_ring(struct ioat2_dma_chan *ioat, int order);
  148. void __ioat2_issue_pending(struct ioat2_dma_chan *ioat);
  149. void ioat2_cleanup_event(unsigned long data);
  150. void ioat2_timer_event(unsigned long data);
  151. int ioat2_quiesce(struct ioat_chan_common *chan, unsigned long tmo);
  152. int ioat2_reset_sync(struct ioat_chan_common *chan, unsigned long tmo);
  153. extern struct kobj_type ioat2_ktype;
  154. extern struct kmem_cache *ioat2_cache;
  155. #endif /* IOATDMA_V2_H */