Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _LINUX_DMA_NONCOHERENT_H |
| 3 | #define _LINUX_DMA_NONCOHERENT_H 1 |
| 4 | |
| 5 | #include <linux/dma-mapping.h> |
Christoph Hellwig | 419e2f1 | 2019-08-26 09:03:44 +0200 | [diff] [blame] | 6 | #include <asm/pgtable.h> |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 7 | |
Christoph Hellwig | f3ecc0f | 2018-08-19 14:53:20 +0200 | [diff] [blame] | 8 | #ifdef CONFIG_ARCH_HAS_DMA_COHERENCE_H |
| 9 | #include <asm/dma-coherence.h> |
| 10 | #elif defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_DEVICE) || \ |
| 11 | defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU) || \ |
| 12 | defined(CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL) |
| 13 | static inline bool dev_is_dma_coherent(struct device *dev) |
| 14 | { |
| 15 | return dev->dma_coherent; |
| 16 | } |
| 17 | #else |
| 18 | static inline bool dev_is_dma_coherent(struct device *dev) |
| 19 | { |
| 20 | return true; |
| 21 | } |
| 22 | #endif /* CONFIG_ARCH_HAS_DMA_COHERENCE_H */ |
| 23 | |
Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 24 | /* |
| 25 | * Check if an allocation needs to be marked uncached to be coherent. |
| 26 | */ |
Christoph Hellwig | 15ffe5e | 2019-07-08 12:55:27 -0700 | [diff] [blame] | 27 | static __always_inline bool dma_alloc_need_uncached(struct device *dev, |
Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 28 | unsigned long attrs) |
| 29 | { |
| 30 | if (dev_is_dma_coherent(dev)) |
| 31 | return false; |
Christoph Hellwig | d98849a | 2019-06-14 16:17:27 +0200 | [diff] [blame] | 32 | if (attrs & DMA_ATTR_NO_KERNEL_MAPPING) |
| 33 | return false; |
Christoph Hellwig | 4b85fae | 2019-06-14 16:06:10 +0200 | [diff] [blame] | 34 | if (IS_ENABLED(CONFIG_DMA_NONCOHERENT_CACHE_SYNC) && |
| 35 | (attrs & DMA_ATTR_NON_CONSISTENT)) |
| 36 | return false; |
| 37 | return true; |
| 38 | } |
| 39 | |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 40 | void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, |
| 41 | gfp_t gfp, unsigned long attrs); |
| 42 | void arch_dma_free(struct device *dev, size_t size, void *cpu_addr, |
| 43 | dma_addr_t dma_addr, unsigned long attrs); |
Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 44 | |
| 45 | #ifdef CONFIG_MMU |
Christoph Hellwig | 419e2f1 | 2019-08-26 09:03:44 +0200 | [diff] [blame] | 46 | /* |
| 47 | * Page protection so that devices that can't snoop CPU caches can use the |
| 48 | * memory coherently. We default to pgprot_noncached which is usually used |
| 49 | * for ioremap as a safe bet, but architectures can override this with less |
| 50 | * strict semantics if possible. |
| 51 | */ |
| 52 | #ifndef pgprot_dmacoherent |
| 53 | #define pgprot_dmacoherent(prot) pgprot_noncached(prot) |
| 54 | #endif |
| 55 | |
Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 56 | pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs); |
Christoph Hellwig | 58b0440 | 2018-09-11 08:55:28 +0200 | [diff] [blame] | 57 | #else |
Christoph Hellwig | 33dcb37 | 2019-07-26 09:26:40 +0200 | [diff] [blame] | 58 | static inline pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, |
| 59 | unsigned long attrs) |
| 60 | { |
| 61 | return prot; /* no protection bits supported without page tables */ |
| 62 | } |
| 63 | #endif /* CONFIG_MMU */ |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 64 | |
| 65 | #ifdef CONFIG_DMA_NONCOHERENT_CACHE_SYNC |
| 66 | void arch_dma_cache_sync(struct device *dev, void *vaddr, size_t size, |
| 67 | enum dma_data_direction direction); |
| 68 | #else |
Christoph Hellwig | 356da6d | 2018-12-06 13:39:32 -0800 | [diff] [blame] | 69 | static inline void arch_dma_cache_sync(struct device *dev, void *vaddr, |
| 70 | size_t size, enum dma_data_direction direction) |
| 71 | { |
| 72 | } |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 73 | #endif /* CONFIG_DMA_NONCOHERENT_CACHE_SYNC */ |
| 74 | |
| 75 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_DEVICE |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 76 | void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, |
| 77 | enum dma_data_direction dir); |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 78 | #else |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 79 | static inline void arch_sync_dma_for_device(phys_addr_t paddr, size_t size, |
| 80 | enum dma_data_direction dir) |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 81 | { |
| 82 | } |
| 83 | #endif /* ARCH_HAS_SYNC_DMA_FOR_DEVICE */ |
| 84 | |
| 85 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 86 | void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, |
| 87 | enum dma_data_direction dir); |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 88 | #else |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 89 | static inline void arch_sync_dma_for_cpu(phys_addr_t paddr, size_t size, |
| 90 | enum dma_data_direction dir) |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 91 | { |
| 92 | } |
| 93 | #endif /* ARCH_HAS_SYNC_DMA_FOR_CPU */ |
| 94 | |
Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 95 | #ifdef CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 96 | void arch_sync_dma_for_cpu_all(void); |
Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 97 | #else |
Christoph Hellwig | 56e35f9 | 2019-11-07 18:03:11 +0100 | [diff] [blame] | 98 | static inline void arch_sync_dma_for_cpu_all(void) |
Christoph Hellwig | faef877 | 2018-06-15 13:08:51 +0200 | [diff] [blame] | 99 | { |
| 100 | } |
| 101 | #endif /* CONFIG_ARCH_HAS_SYNC_DMA_FOR_CPU_ALL */ |
| 102 | |
Christoph Hellwig | 13bf5ce | 2019-03-25 15:44:06 +0100 | [diff] [blame] | 103 | #ifdef CONFIG_ARCH_HAS_DMA_PREP_COHERENT |
Christoph Hellwig | 0c3b317 | 2018-11-04 20:29:28 +0100 | [diff] [blame] | 104 | void arch_dma_prep_coherent(struct page *page, size_t size); |
Christoph Hellwig | 13bf5ce | 2019-03-25 15:44:06 +0100 | [diff] [blame] | 105 | #else |
| 106 | static inline void arch_dma_prep_coherent(struct page *page, size_t size) |
| 107 | { |
| 108 | } |
| 109 | #endif /* CONFIG_ARCH_HAS_DMA_PREP_COHERENT */ |
Christoph Hellwig | 0c3b317 | 2018-11-04 20:29:28 +0100 | [diff] [blame] | 110 | |
Christoph Hellwig | fa7e224 | 2020-02-21 15:55:43 -0800 | [diff] [blame] | 111 | void *arch_dma_set_uncached(void *addr, size_t size); |
Christoph Hellwig | 999a5d1 | 2020-02-21 12:35:05 -0800 | [diff] [blame] | 112 | void arch_dma_clear_uncached(void *addr, size_t size); |
Christoph Hellwig | c30700d | 2019-06-03 08:43:51 +0200 | [diff] [blame] | 113 | |
Christoph Hellwig | 782e676 | 2018-04-16 15:24:51 +0200 | [diff] [blame] | 114 | #endif /* _LINUX_DMA_NONCOHERENT_H */ |