Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _LINUX_DMA_DIRECT_H |
| 3 | #define _LINUX_DMA_DIRECT_H 1 |
| 4 | |
| 5 | #include <linux/dma-mapping.h> |
Nicolas Saenz Julienne | b12d662 | 2019-11-07 16:06:44 +0100 | [diff] [blame] | 6 | #include <linux/memblock.h> /* for min_low_pfn */ |
Christoph Hellwig | b6e0547 | 2018-03-19 11:38:24 +0100 | [diff] [blame] | 7 | #include <linux/mem_encrypt.h> |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 8 | |
Nicolas Saenz Julienne | 8b5369e | 2019-10-14 20:31:03 +0200 | [diff] [blame] | 9 | extern unsigned int zone_dma_bits; |
| 10 | |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 11 | #ifdef CONFIG_ARCH_HAS_PHYS_TO_DMA |
| 12 | #include <asm/dma-direct.h> |
| 13 | #else |
Christoph Hellwig | b6e0547 | 2018-03-19 11:38:24 +0100 | [diff] [blame] | 14 | static inline dma_addr_t __phys_to_dma(struct device *dev, phys_addr_t paddr) |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 15 | { |
| 16 | dma_addr_t dev_addr = (dma_addr_t)paddr; |
| 17 | |
| 18 | return dev_addr - ((dma_addr_t)dev->dma_pfn_offset << PAGE_SHIFT); |
| 19 | } |
| 20 | |
Christoph Hellwig | b6e0547 | 2018-03-19 11:38:24 +0100 | [diff] [blame] | 21 | static inline phys_addr_t __dma_to_phys(struct device *dev, dma_addr_t dev_addr) |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 22 | { |
| 23 | phys_addr_t paddr = (phys_addr_t)dev_addr; |
| 24 | |
| 25 | return paddr + ((phys_addr_t)dev->dma_pfn_offset << PAGE_SHIFT); |
| 26 | } |
Christoph Hellwig | 130c1cc | 2019-11-12 17:06:04 +0100 | [diff] [blame] | 27 | #endif /* !CONFIG_ARCH_HAS_PHYS_TO_DMA */ |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 28 | |
Tom Lendacky | 9087c37 | 2019-07-10 19:01:19 +0000 | [diff] [blame] | 29 | #ifdef CONFIG_ARCH_HAS_FORCE_DMA_UNENCRYPTED |
| 30 | bool force_dma_unencrypted(struct device *dev); |
| 31 | #else |
| 32 | static inline bool force_dma_unencrypted(struct device *dev) |
| 33 | { |
| 34 | return false; |
| 35 | } |
| 36 | #endif /* CONFIG_ARCH_HAS_FORCE_DMA_UNENCRYPTED */ |
| 37 | |
Christoph Hellwig | b6e0547 | 2018-03-19 11:38:24 +0100 | [diff] [blame] | 38 | /* |
| 39 | * If memory encryption is supported, phys_to_dma will set the memory encryption |
| 40 | * bit in the DMA address, and dma_to_phys will clear it. The raw __phys_to_dma |
| 41 | * and __dma_to_phys versions should only be used on non-encrypted memory for |
| 42 | * special occasions like DMA coherent buffers. |
| 43 | */ |
| 44 | static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) |
| 45 | { |
| 46 | return __sme_set(__phys_to_dma(dev, paddr)); |
| 47 | } |
| 48 | |
| 49 | static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr) |
| 50 | { |
| 51 | return __sme_clr(__dma_to_phys(dev, daddr)); |
| 52 | } |
| 53 | |
Christoph Hellwig | 68a33b1 | 2019-11-19 17:38:58 +0100 | [diff] [blame] | 54 | static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size, |
| 55 | bool is_ram) |
Christoph Hellwig | c734515 | 2019-11-12 17:07:43 +0100 | [diff] [blame] | 56 | { |
| 57 | dma_addr_t end = addr + size - 1; |
| 58 | |
| 59 | if (!dev->dma_mask) |
| 60 | return false; |
| 61 | |
Christoph Hellwig | 68a33b1 | 2019-11-19 17:38:58 +0100 | [diff] [blame] | 62 | if (is_ram && !IS_ENABLED(CONFIG_ARCH_DMA_ADDR_T_64BIT) && |
Christoph Hellwig | c734515 | 2019-11-12 17:07:43 +0100 | [diff] [blame] | 63 | min(addr, end) < phys_to_dma(dev, PFN_PHYS(min_low_pfn))) |
| 64 | return false; |
| 65 | |
Nicolas Saenz Julienne | a7ba70f | 2019-11-21 10:26:44 +0100 | [diff] [blame] | 66 | return end <= min_not_zero(*dev->dma_mask, dev->bus_dma_limit); |
Christoph Hellwig | c734515 | 2019-11-12 17:07:43 +0100 | [diff] [blame] | 67 | } |
| 68 | |
Christoph Hellwig | a20bb05 | 2018-09-20 13:26:13 +0200 | [diff] [blame] | 69 | u64 dma_direct_get_required_mask(struct device *dev); |
David Rientjes | c84dc6e | 2020-04-14 17:04:55 -0700 | [diff] [blame] | 70 | gfp_t dma_direct_optimal_gfp_mask(struct device *dev, u64 dma_mask, |
| 71 | u64 *phys_mask); |
Nicolas Saenz Julienne | 567f6a6 | 2020-07-14 14:39:25 +0200 | [diff] [blame] | 72 | bool dma_coherent_ok(struct device *dev, phys_addr_t phys, size_t size); |
Christoph Hellwig | 19dca8c | 2017-12-23 13:46:06 +0100 | [diff] [blame] | 73 | void *dma_direct_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, |
| 74 | gfp_t gfp, unsigned long attrs); |
| 75 | void dma_direct_free(struct device *dev, size_t size, void *cpu_addr, |
| 76 | dma_addr_t dma_addr, unsigned long attrs); |
Christoph Hellwig | bc3ec75 | 2018-09-08 11:22:43 +0200 | [diff] [blame] | 77 | void *dma_direct_alloc_pages(struct device *dev, size_t size, |
| 78 | dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs); |
| 79 | void dma_direct_free_pages(struct device *dev, size_t size, void *cpu_addr, |
| 80 | dma_addr_t dma_addr, unsigned long attrs); |
Christoph Hellwig | 34dc0ea | 2019-10-29 11:01:37 +0100 | [diff] [blame] | 81 | int dma_direct_get_sgtable(struct device *dev, struct sg_table *sgt, |
| 82 | void *cpu_addr, dma_addr_t dma_addr, size_t size, |
| 83 | unsigned long attrs); |
| 84 | bool dma_direct_can_mmap(struct device *dev); |
| 85 | int dma_direct_mmap(struct device *dev, struct vm_area_struct *vma, |
| 86 | void *cpu_addr, dma_addr_t dma_addr, size_t size, |
| 87 | unsigned long attrs); |
Christoph Hellwig | 1a9777a | 2017-12-24 15:04:32 +0100 | [diff] [blame] | 88 | int dma_direct_supported(struct device *dev, u64 mask); |
Christoph Hellwig | 3aa916250 | 2020-06-29 15:03:56 +0200 | [diff] [blame] | 89 | bool dma_direct_need_sync(struct device *dev, dma_addr_t dma_addr); |
Christoph Hellwig | ea8c64a | 2018-01-10 16:21:13 +0100 | [diff] [blame] | 90 | #endif /* _LINUX_DMA_DIRECT_H */ |