dma_v2.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180
  1. /*
  2. * Copyright(c) 2004 - 2009 Intel Corporation. All rights reserved.
  3. *
  4. * This program is free software; you can redistribute it and/or modify it
  5. * under the terms of the GNU General Public License as published by the Free
  6. * Software Foundation; either version 2 of the License, or (at your option)
  7. * any later version.
  8. *
  9. * This program is distributed in the hope that it will be useful, but WITHOUT
  10. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
  12. * more details.
  13. *
  14. * You should have received a copy of the GNU General Public License along with
  15. * this program; if not, write to the Free Software Foundation, Inc., 59
  16. * Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  17. *
  18. * The full GNU General Public License is included in this distribution in the
  19. * file called COPYING.
  20. */
  21. #ifndef IOATDMA_V2_H
  22. #define IOATDMA_V2_H
  23. #include <linux/dmaengine.h>
  24. #include <linux/circ_buf.h>
  25. #include "dma.h"
  26. #include "hw.h"
  27. extern int ioat_pending_level;
  28. extern int ioat_ring_alloc_order;
  29. /*
  30. * workaround for IOAT ver.3.0 null descriptor issue
  31. * (channel returns error when size is 0)
  32. */
  33. #define NULL_DESC_BUFFER_SIZE 1
  34. #define IOAT_MAX_ORDER 16
  35. #define ioat_get_alloc_order() \
  36. (min(ioat_ring_alloc_order, IOAT_MAX_ORDER))
  37. #define ioat_get_max_alloc_order() \
  38. (min(ioat_ring_max_alloc_order, IOAT_MAX_ORDER))
  39. /* struct ioat2_dma_chan - ioat v2 / v3 channel attributes
  40. * @base: common ioat channel parameters
  41. * @xfercap_log; log2 of channel max transfer length (for fast division)
  42. * @head: allocated index
  43. * @issued: hardware notification point
  44. * @tail: cleanup index
  45. * @dmacount: identical to 'head' except for occasionally resetting to zero
  46. * @alloc_order: log2 of the number of allocated descriptors
  47. * @produce: number of descriptors to produce at submit time
  48. * @ring: software ring buffer implementation of hardware ring
  49. * @prep_lock: serializes descriptor preparation (producers)
  50. */
  51. struct ioat2_dma_chan {
  52. struct ioat_chan_common base;
  53. size_t xfercap_log;
  54. u16 head;
  55. u16 issued;
  56. u16 tail;
  57. u16 dmacount;
  58. u16 alloc_order;
  59. u16 produce;
  60. struct ioat_ring_ent **ring;
  61. spinlock_t prep_lock;
  62. };
  63. static inline struct ioat2_dma_chan *to_ioat2_chan(struct dma_chan *c)
  64. {
  65. struct ioat_chan_common *chan = to_chan_common(c);
  66. return container_of(chan, struct ioat2_dma_chan, base);
  67. }
  68. static inline u32 ioat2_ring_size(struct ioat2_dma_chan *ioat)
  69. {
  70. return 1 << ioat->alloc_order;
  71. }
  72. /* count of descriptors in flight with the engine */
  73. static inline u16 ioat2_ring_active(struct ioat2_dma_chan *ioat)
  74. {
  75. return CIRC_CNT(ioat->head, ioat->tail, ioat2_ring_size(ioat));
  76. }
  77. /* count of descriptors pending submission to hardware */
  78. static inline u16 ioat2_ring_pending(struct ioat2_dma_chan *ioat)
  79. {
  80. return CIRC_CNT(ioat->head, ioat->issued, ioat2_ring_size(ioat));
  81. }
  82. static inline u32 ioat2_ring_space(struct ioat2_dma_chan *ioat)
  83. {
  84. return ioat2_ring_size(ioat) - ioat2_ring_active(ioat);
  85. }
  86. static inline u16 ioat2_xferlen_to_descs(struct ioat2_dma_chan *ioat, size_t len)
  87. {
  88. u16 num_descs = len >> ioat->xfercap_log;
  89. num_descs += !!(len & ((1 << ioat->xfercap_log) - 1));
  90. return num_descs;
  91. }
  92. /**
  93. * struct ioat_ring_ent - wrapper around hardware descriptor
  94. * @hw: hardware DMA descriptor (for memcpy)
  95. * @fill: hardware fill descriptor
  96. * @xor: hardware xor descriptor
  97. * @xor_ex: hardware xor extension descriptor
  98. * @pq: hardware pq descriptor
  99. * @pq_ex: hardware pq extension descriptor
  100. * @pqu: hardware pq update descriptor
  101. * @raw: hardware raw (un-typed) descriptor
  102. * @txd: the generic software descriptor for all engines
  103. * @len: total transaction length for unmap
  104. * @result: asynchronous result of validate operations
  105. * @id: identifier for debug
  106. */
  107. struct ioat_ring_ent {
  108. union {
  109. struct ioat_dma_descriptor *hw;
  110. struct ioat_fill_descriptor *fill;
  111. struct ioat_xor_descriptor *xor;
  112. struct ioat_xor_ext_descriptor *xor_ex;
  113. struct ioat_pq_descriptor *pq;
  114. struct ioat_pq_ext_descriptor *pq_ex;
  115. struct ioat_pq_update_descriptor *pqu;
  116. struct ioat_raw_descriptor *raw;
  117. };
  118. size_t len;
  119. struct dma_async_tx_descriptor txd;
  120. enum sum_check_flags *result;
  121. #ifdef DEBUG
  122. int id;
  123. #endif
  124. };
  125. static inline struct ioat_ring_ent *
  126. ioat2_get_ring_ent(struct ioat2_dma_chan *ioat, u16 idx)
  127. {
  128. return ioat->ring[idx & (ioat2_ring_size(ioat) - 1)];
  129. }
  130. static inline void ioat2_set_chainaddr(struct ioat2_dma_chan *ioat, u64 addr)
  131. {
  132. struct ioat_chan_common *chan = &ioat->base;
  133. writel(addr & 0x00000000FFFFFFFF,
  134. chan->reg_base + IOAT2_CHAINADDR_OFFSET_LOW);
  135. writel(addr >> 32,
  136. chan->reg_base + IOAT2_CHAINADDR_OFFSET_HIGH);
  137. }
  138. int __devinit ioat2_dma_probe(struct ioatdma_device *dev, int dca);
  139. int __devinit ioat3_dma_probe(struct ioatdma_device *dev, int dca);
  140. struct dca_provider * __devinit ioat2_dca_init(struct pci_dev *pdev, void __iomem *iobase);
  141. struct dca_provider * __devinit ioat3_dca_init(struct pci_dev *pdev, void __iomem *iobase);
  142. int ioat2_check_space_lock(struct ioat2_dma_chan *ioat, int num_descs);
  143. int ioat2_enumerate_channels(struct ioatdma_device *device);
  144. struct dma_async_tx_descriptor *
  145. ioat2_dma_prep_memcpy_lock(struct dma_chan *c, dma_addr_t dma_dest,
  146. dma_addr_t dma_src, size_t len, unsigned long flags);
  147. void ioat2_issue_pending(struct dma_chan *chan);
  148. int ioat2_alloc_chan_resources(struct dma_chan *c);
  149. void ioat2_free_chan_resources(struct dma_chan *c);
  150. void __ioat2_restart_chan(struct ioat2_dma_chan *ioat);
  151. bool reshape_ring(struct ioat2_dma_chan *ioat, int order);
  152. void __ioat2_issue_pending(struct ioat2_dma_chan *ioat);
  153. void ioat2_cleanup_event(unsigned long data);
  154. void ioat2_timer_event(unsigned long data);
  155. int ioat2_quiesce(struct ioat_chan_common *chan, unsigned long tmo);
  156. int ioat2_reset_sync(struct ioat_chan_common *chan, unsigned long tmo);
  157. extern struct kobj_type ioat2_ktype;
  158. extern struct kmem_cache *ioat2_cache;
  159. #endif /* IOATDMA_V2_H */