Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 | /* SPDX-License-Identifier: GPL-2.0-only */ /* * Copyright (C) 2008 Advanced Micro Devices, Inc. * * Author: Joerg Roedel <joerg.roedel@amd.com> */ #ifndef _KERNEL_DMA_DEBUG_H #define _KERNEL_DMA_DEBUG_H #ifdef CONFIG_DMA_API_DEBUG extern void debug_dma_map_page(struct device *dev, struct page *page, size_t offset, size_t size, int direction, dma_addr_t dma_addr, unsigned long attrs); extern void debug_dma_unmap_page(struct device *dev, dma_addr_t addr, size_t size, int direction); extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, int mapped_ents, int direction, unsigned long attrs); extern void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, int nelems, int dir); extern void debug_dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t dma_addr, void *virt, unsigned long attrs); extern void debug_dma_free_coherent(struct device *dev, size_t size, void *virt, dma_addr_t addr); extern void debug_dma_map_resource(struct device *dev, phys_addr_t addr, size_t size, int direction, dma_addr_t dma_addr, unsigned long attrs); extern void debug_dma_unmap_resource(struct device *dev, dma_addr_t dma_addr, size_t size, int direction); extern void debug_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, int direction); extern void debug_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, int direction); extern void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, int direction); extern void debug_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, int direction); #else /* CONFIG_DMA_API_DEBUG */ static inline void debug_dma_map_page(struct device *dev, struct page *page, size_t offset, size_t size, int direction, dma_addr_t dma_addr, unsigned long attrs) { } static inline void debug_dma_unmap_page(struct device *dev, dma_addr_t addr, size_t size, int direction) { } static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, int mapped_ents, int direction, unsigned long attrs) { } static inline void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, int nelems, int dir) { } static inline void debug_dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t dma_addr, void *virt, unsigned long attrs) { } static inline void debug_dma_free_coherent(struct device *dev, size_t size, void *virt, dma_addr_t addr) { } static inline void debug_dma_map_resource(struct device *dev, phys_addr_t addr, size_t size, int direction, dma_addr_t dma_addr, unsigned long attrs) { } static inline void debug_dma_unmap_resource(struct device *dev, dma_addr_t dma_addr, size_t size, int direction) { } static inline void debug_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, int direction) { } static inline void debug_dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, int direction) { } static inline void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, int direction) { } static inline void debug_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, int direction) { } #endif /* CONFIG_DMA_API_DEBUG */ #endif /* _KERNEL_DMA_DEBUG_H */ |