2 #include <linux/dma-mapping.h>
4 #include <linux/highmem.h>
6 #include <xen/features.h>
12 /* functions called by SWIOTLB */
14 static void dma_cache_maint(dma_addr_t handle, unsigned long offset,
15 size_t size, enum dma_data_direction dir, enum dma_cache_op op)
20 pfn = (handle >> PAGE_SHIFT) + offset / PAGE_SIZE;
26 /* TODO: cache flush */
34 static void __xen_dma_page_dev_to_cpu(struct device *hwdev, dma_addr_t handle,
35 size_t size, enum dma_data_direction dir)
37 dma_cache_maint(handle & PAGE_MASK, handle & ~PAGE_MASK, size, dir, DMA_UNMAP);
40 static void __xen_dma_page_cpu_to_dev(struct device *hwdev, dma_addr_t handle,
41 size_t size, enum dma_data_direction dir)
43 dma_cache_maint(handle & PAGE_MASK, handle & ~PAGE_MASK, size, dir, DMA_MAP);
46 void __xen_dma_unmap_page(struct device *hwdev, dma_addr_t handle,
47 size_t size, enum dma_data_direction dir,
48 struct dma_attrs *attrs)
51 if (is_device_dma_coherent(hwdev))
53 if (dma_get_attr(DMA_ATTR_SKIP_CPU_SYNC, attrs))
56 __xen_dma_page_dev_to_cpu(hwdev, handle, size, dir);
59 void __xen_dma_sync_single_for_cpu(struct device *hwdev,
60 dma_addr_t handle, size_t size, enum dma_data_direction dir)
62 if (is_device_dma_coherent(hwdev))
64 __xen_dma_page_dev_to_cpu(hwdev, handle, size, dir);
67 void __xen_dma_sync_single_for_device(struct device *hwdev,
68 dma_addr_t handle, size_t size, enum dma_data_direction dir)
70 if (is_device_dma_coherent(hwdev))
72 __xen_dma_page_cpu_to_dev(hwdev, handle, size, dir);
75 int __init xen_mm32_init(void)
77 if (!xen_initial_domain())
82 arch_initcall(xen_mm32_init);