dma-mapping.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. /*
  2. * linux/arch/unicore32/include/asm/dma-mapping.h
  3. *
  4. * Code specific to PKUnity SoC and UniCore ISA
  5. *
  6. * Copyright (C) 2001-2010 GUAN Xue-tao
  7. *
  8. * This program is free software; you can redistribute it and/or modify
  9. * it under the terms of the GNU General Public License version 2 as
  10. * published by the Free Software Foundation.
  11. */
  12. #ifndef __UNICORE_DMA_MAPPING_H__
  13. #define __UNICORE_DMA_MAPPING_H__
  14. #ifdef __KERNEL__
  15. #include <linux/mm_types.h>
  16. #include <linux/scatterlist.h>
  17. #include <linux/swiotlb.h>
  18. #include <asm-generic/dma-coherent.h>
  19. #include <asm/memory.h>
  20. #include <asm/cacheflush.h>
  21. extern struct dma_map_ops swiotlb_dma_map_ops;
  22. static inline struct dma_map_ops *get_dma_ops(struct device *dev)
  23. {
  24. return &swiotlb_dma_map_ops;
  25. }
  26. static inline int dma_supported(struct device *dev, u64 mask)
  27. {
  28. struct dma_map_ops *dma_ops = get_dma_ops(dev);
  29. if (unlikely(dma_ops == NULL))
  30. return 0;
  31. return dma_ops->dma_supported(dev, mask);
  32. }
  33. static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
  34. {
  35. struct dma_map_ops *dma_ops = get_dma_ops(dev);
  36. if (dma_ops->mapping_error)
  37. return dma_ops->mapping_error(dev, dma_addr);
  38. return 0;
  39. }
  40. #include <asm-generic/dma-mapping-common.h>
  41. static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
  42. {
  43. if (dev && dev->dma_mask)
  44. return addr + size - 1 <= *dev->dma_mask;
  45. return 1;
  46. }
  47. static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
  48. {
  49. return paddr;
  50. }
  51. static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
  52. {
  53. return daddr;
  54. }
  55. static inline void dma_mark_clean(void *addr, size_t size) {}
  56. static inline int dma_set_mask(struct device *dev, u64 dma_mask)
  57. {
  58. if (!dev->dma_mask || !dma_supported(dev, dma_mask))
  59. return -EIO;
  60. *dev->dma_mask = dma_mask;
  61. return 0;
  62. }
  63. #define dma_alloc_coherent(d,s,h,f) dma_alloc_attrs(d,s,h,f,NULL)
  64. static inline void *dma_alloc_attrs(struct device *dev, size_t size,
  65. dma_addr_t *dma_handle, gfp_t flag,
  66. struct dma_attrs *attrs)
  67. {
  68. struct dma_map_ops *dma_ops = get_dma_ops(dev);
  69. return dma_ops->alloc(dev, size, dma_handle, flag, attrs);
  70. }
  71. #define dma_free_coherent(d,s,c,h) dma_free_attrs(d,s,c,h,NULL)
  72. static inline void dma_free_attrs(struct device *dev, size_t size,
  73. void *cpu_addr, dma_addr_t dma_handle,
  74. struct dma_attrs *attrs)
  75. {
  76. struct dma_map_ops *dma_ops = get_dma_ops(dev);
  77. dma_ops->free(dev, size, cpu_addr, dma_handle, attrs);
  78. }
  79. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  80. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  81. static inline void dma_cache_sync(struct device *dev, void *vaddr,
  82. size_t size, enum dma_data_direction direction)
  83. {
  84. unsigned long start = (unsigned long)vaddr;
  85. unsigned long end = start + size;
  86. switch (direction) {
  87. case DMA_NONE:
  88. BUG();
  89. case DMA_FROM_DEVICE:
  90. case DMA_BIDIRECTIONAL: /* writeback and invalidate */
  91. __cpuc_dma_flush_range(start, end);
  92. break;
  93. case DMA_TO_DEVICE: /* writeback only */
  94. __cpuc_dma_clean_range(start, end);
  95. break;
  96. }
  97. }
  98. #endif /* __KERNEL__ */
  99. #endif