Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | #ifndef __NET_GEN_STATS_H |
| 3 | #define __NET_GEN_STATS_H |
| 4 | |
| 5 | #include <linux/gen_stats.h> |
| 6 | #include <linux/socket.h> |
| 7 | #include <linux/rtnetlink.h> |
| 8 | #include <linux/pkt_sched.h> |
| 9 | |
Eric Dumazet | 4d390c2 | 2019-11-04 19:13:13 -0800 | [diff] [blame] | 10 | /* Note: this used to be in include/uapi/linux/gen_stats.h */ |
| 11 | struct gnet_stats_basic_packed { |
| 12 | __u64 bytes; |
Eric Dumazet | d0083d9 | 2019-11-04 19:13:14 -0800 | [diff] [blame] | 13 | __u64 packets; |
| 14 | }; |
Eric Dumazet | 4d390c2 | 2019-11-04 19:13:13 -0800 | [diff] [blame] | 15 | |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 16 | struct gnet_stats_basic_cpu { |
| 17 | struct gnet_stats_basic_packed bstats; |
| 18 | struct u64_stats_sync syncp; |
Eric Dumazet | 001c96d | 2018-11-16 07:43:29 -0800 | [diff] [blame] | 19 | } __aligned(2 * sizeof(u64)); |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 20 | |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 21 | struct net_rate_estimator; |
| 22 | |
Eric Dumazet | fd2c3ef | 2009-11-03 03:26:03 +0000 | [diff] [blame] | 23 | struct gnet_dump { |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | spinlock_t * lock; |
| 25 | struct sk_buff * skb; |
Patrick McHardy | 1e90474 | 2008-01-22 22:11:17 -0800 | [diff] [blame] | 26 | struct nlattr * tail; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 27 | |
Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 28 | /* Backward compatibility */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 29 | int compat_tc_stats; |
| 30 | int compat_xstats; |
Nicolas Dichtel | 9854518e | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 31 | int padattr; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 32 | void * xstats; |
| 33 | int xstats_len; |
| 34 | struct tc_stats tc_stats; |
| 35 | }; |
| 36 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 37 | int gnet_stats_start_copy(struct sk_buff *skb, int type, spinlock_t *lock, |
Nicolas Dichtel | 9854518e | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 38 | struct gnet_dump *d, int padattr); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 39 | |
| 40 | int gnet_stats_start_copy_compat(struct sk_buff *skb, int type, |
| 41 | int tc_stats_type, int xstats_type, |
Nicolas Dichtel | 9854518e | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 42 | spinlock_t *lock, struct gnet_dump *d, |
| 43 | int padattr); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 45 | int gnet_stats_copy_basic(const seqcount_t *running, |
| 46 | struct gnet_dump *d, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 47 | struct gnet_stats_basic_cpu __percpu *cpu, |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 48 | struct gnet_stats_basic_packed *b); |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 49 | void __gnet_stats_copy_basic(const seqcount_t *running, |
| 50 | struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 51 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 52 | struct gnet_stats_basic_packed *b); |
Eelco Chaudron | 5e11121 | 2018-09-21 07:13:54 -0400 | [diff] [blame] | 53 | int gnet_stats_copy_basic_hw(const seqcount_t *running, |
| 54 | struct gnet_dump *d, |
| 55 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 56 | struct gnet_stats_basic_packed *b); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 57 | int gnet_stats_copy_rate_est(struct gnet_dump *d, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 58 | struct net_rate_estimator __rcu **ptr); |
John Fastabend | 6401585 | 2014-09-28 11:53:57 -0700 | [diff] [blame] | 59 | int gnet_stats_copy_queue(struct gnet_dump *d, |
John Fastabend | b0ab6f9 | 2014-09-28 11:54:24 -0700 | [diff] [blame] | 60 | struct gnet_stats_queue __percpu *cpu_q, |
| 61 | struct gnet_stats_queue *q, __u32 qlen); |
John Fastabend | b01ac09 | 2017-12-07 09:57:20 -0800 | [diff] [blame] | 62 | void __gnet_stats_copy_queue(struct gnet_stats_queue *qstats, |
| 63 | const struct gnet_stats_queue __percpu *cpu_q, |
| 64 | const struct gnet_stats_queue *q, __u32 qlen); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 65 | int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 66 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 67 | int gnet_stats_finish_copy(struct gnet_dump *d); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 69 | int gen_new_estimator(struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 70 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 71 | struct net_rate_estimator __rcu **rate_est, |
Vlad Buslov | 51a9f5a | 2018-08-10 20:51:54 +0300 | [diff] [blame] | 72 | spinlock_t *lock, |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 73 | seqcount_t *running, struct nlattr *opt); |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 74 | void gen_kill_estimator(struct net_rate_estimator __rcu **ptr); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 75 | int gen_replace_estimator(struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 76 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 77 | struct net_rate_estimator __rcu **ptr, |
Vlad Buslov | 51a9f5a | 2018-08-10 20:51:54 +0300 | [diff] [blame] | 78 | spinlock_t *lock, |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 79 | seqcount_t *running, struct nlattr *opt); |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 80 | bool gen_estimator_active(struct net_rate_estimator __rcu **ptr); |
| 81 | bool gen_estimator_read(struct net_rate_estimator __rcu **ptr, |
| 82 | struct gnet_stats_rate_est64 *sample); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 83 | #endif |