Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 2 | /* Support for hardware buffer manager. |
| 3 | * |
| 4 | * Copyright (C) 2016 Marvell |
| 5 | * |
| 6 | * Gregory CLEMENT <gregory.clement@free-electrons.com> |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 7 | */ |
| 8 | #include <linux/kernel.h> |
| 9 | #include <linux/printk.h> |
| 10 | #include <linux/skbuff.h> |
| 11 | #include <net/hwbm.h> |
| 12 | |
| 13 | void hwbm_buf_free(struct hwbm_pool *bm_pool, void *buf) |
| 14 | { |
| 15 | if (likely(bm_pool->frag_size <= PAGE_SIZE)) |
| 16 | skb_free_frag(buf); |
| 17 | else |
| 18 | kfree(buf); |
| 19 | } |
| 20 | EXPORT_SYMBOL_GPL(hwbm_buf_free); |
| 21 | |
| 22 | /* Refill processing for HW buffer management */ |
| 23 | int hwbm_pool_refill(struct hwbm_pool *bm_pool, gfp_t gfp) |
| 24 | { |
| 25 | int frag_size = bm_pool->frag_size; |
| 26 | void *buf; |
| 27 | |
| 28 | if (likely(frag_size <= PAGE_SIZE)) |
| 29 | buf = netdev_alloc_frag(frag_size); |
| 30 | else |
| 31 | buf = kmalloc(frag_size, gfp); |
| 32 | |
| 33 | if (!buf) |
| 34 | return -ENOMEM; |
| 35 | |
| 36 | if (bm_pool->construct) |
| 37 | if (bm_pool->construct(bm_pool, buf)) { |
| 38 | hwbm_buf_free(bm_pool, buf); |
| 39 | return -ENOMEM; |
| 40 | } |
| 41 | |
| 42 | return 0; |
| 43 | } |
| 44 | EXPORT_SYMBOL_GPL(hwbm_pool_refill); |
| 45 | |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 46 | int hwbm_pool_add(struct hwbm_pool *bm_pool, unsigned int buf_num) |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 47 | { |
| 48 | int err, i; |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 49 | |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 50 | mutex_lock(&bm_pool->buf_lock); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 51 | if (bm_pool->buf_num == bm_pool->size) { |
| 52 | pr_warn("pool already filled\n"); |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 53 | mutex_unlock(&bm_pool->buf_lock); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 54 | return bm_pool->buf_num; |
| 55 | } |
| 56 | |
| 57 | if (buf_num + bm_pool->buf_num > bm_pool->size) { |
| 58 | pr_warn("cannot allocate %d buffers for pool\n", |
| 59 | buf_num); |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 60 | mutex_unlock(&bm_pool->buf_lock); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 61 | return 0; |
| 62 | } |
| 63 | |
| 64 | if ((buf_num + bm_pool->buf_num) < bm_pool->buf_num) { |
| 65 | pr_warn("Adding %d buffers to the %d current buffers will overflow\n", |
| 66 | buf_num, bm_pool->buf_num); |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 67 | mutex_unlock(&bm_pool->buf_lock); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 68 | return 0; |
| 69 | } |
| 70 | |
| 71 | for (i = 0; i < buf_num; i++) { |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 72 | err = hwbm_pool_refill(bm_pool, GFP_KERNEL); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 73 | if (err < 0) |
| 74 | break; |
| 75 | } |
| 76 | |
| 77 | /* Update BM driver with number of buffers added to pool */ |
| 78 | bm_pool->buf_num += i; |
| 79 | |
| 80 | pr_debug("hwpm pool: %d of %d buffers added\n", i, buf_num); |
Sebastian Andrzej Siewior | 6dcdd88 | 2019-06-07 21:20:40 +0200 | [diff] [blame] | 81 | mutex_unlock(&bm_pool->buf_lock); |
Gregory CLEMENT | 8cb2d8b | 2016-03-14 09:39:04 +0100 | [diff] [blame] | 82 | |
| 83 | return i; |
| 84 | } |
| 85 | EXPORT_SYMBOL_GPL(hwbm_pool_add); |