Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | /* |
| 3 | * Copyright (c) 2014 The Linux Foundation |
| 4 | */ |
Christoph Hellwig | 695cebe | 2020-10-20 10:41:07 +0200 | [diff] [blame] | 5 | #include <linux/dma-map-ops.h> |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 6 | #include <linux/slab.h> |
| 7 | #include <linux/vmalloc.h> |
| 8 | |
Christoph Hellwig | 5cf4537 | 2019-06-03 09:14:31 +0200 | [diff] [blame] | 9 | struct page **dma_common_find_pages(void *cpu_addr) |
| 10 | { |
| 11 | struct vm_struct *area = find_vm_area(cpu_addr); |
| 12 | |
| 13 | if (!area || area->flags != VM_DMA_COHERENT) |
| 14 | return NULL; |
| 15 | return area->pages; |
| 16 | } |
| 17 | |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 18 | /* |
| 19 | * Remaps an array of PAGE_SIZE pages into another vm_area. |
| 20 | * Cannot be used in non-sleeping contexts |
| 21 | */ |
| 22 | void *dma_common_pages_remap(struct page **pages, size_t size, |
Christoph Hellwig | 5123174 | 2019-08-30 08:51:01 +0200 | [diff] [blame] | 23 | pgprot_t prot, const void *caller) |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 24 | { |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 25 | void *vaddr; |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 26 | |
Eric Auger | 8e36baf | 2020-06-23 14:07:55 +0200 | [diff] [blame] | 27 | vaddr = vmap(pages, PAGE_ALIGN(size) >> PAGE_SHIFT, |
| 28 | VM_DMA_COHERENT, prot); |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 29 | if (vaddr) |
| 30 | find_vm_area(vaddr)->pages = pages; |
| 31 | return vaddr; |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 32 | } |
| 33 | |
| 34 | /* |
| 35 | * Remaps an allocated contiguous region into another vm_area. |
| 36 | * Cannot be used in non-sleeping contexts |
| 37 | */ |
| 38 | void *dma_common_contiguous_remap(struct page *page, size_t size, |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 39 | pgprot_t prot, const void *caller) |
| 40 | { |
Eric Auger | 8e36baf | 2020-06-23 14:07:55 +0200 | [diff] [blame] | 41 | int count = PAGE_ALIGN(size) >> PAGE_SHIFT; |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 42 | struct page **pages; |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 43 | void *vaddr; |
| 44 | int i; |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 45 | |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 46 | pages = kmalloc_array(count, sizeof(struct page *), GFP_KERNEL); |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 47 | if (!pages) |
| 48 | return NULL; |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 49 | for (i = 0; i < count; i++) |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 50 | pages[i] = nth_page(page, i); |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 51 | vaddr = vmap(pages, count, VM_DMA_COHERENT, prot); |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 52 | kfree(pages); |
| 53 | |
Christoph Hellwig | 515e5b6 | 2020-06-01 21:50:32 -0700 | [diff] [blame] | 54 | return vaddr; |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 55 | } |
| 56 | |
| 57 | /* |
| 58 | * Unmaps a range previously mapped by dma_common_*_remap |
| 59 | */ |
Christoph Hellwig | 5123174 | 2019-08-30 08:51:01 +0200 | [diff] [blame] | 60 | void dma_common_free_remap(void *cpu_addr, size_t size) |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 61 | { |
Andrey Smirnov | 2cf2aa6 | 2019-10-05 10:23:30 +0200 | [diff] [blame] | 62 | struct vm_struct *area = find_vm_area(cpu_addr); |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 63 | |
Andrey Smirnov | 2cf2aa6 | 2019-10-05 10:23:30 +0200 | [diff] [blame] | 64 | if (!area || area->flags != VM_DMA_COHERENT) { |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 65 | WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); |
| 66 | return; |
| 67 | } |
| 68 | |
Christoph Hellwig | f0edfea | 2018-08-24 10:31:08 +0200 | [diff] [blame] | 69 | vunmap(cpu_addr); |
| 70 | } |