Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 2 | /* |
| 3 | * udp_diag.c Module for monitoring UDP transport protocols sockets. |
| 4 | * |
| 5 | * Authors: Pavel Emelyanov, <xemul@parallels.com> |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 6 | */ |
| 7 | |
| 8 | |
| 9 | #include <linux/module.h> |
| 10 | #include <linux/inet_diag.h> |
| 11 | #include <linux/udp.h> |
| 12 | #include <net/udp.h> |
| 13 | #include <net/udplite.h> |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 14 | #include <linux/sock_diag.h> |
| 15 | |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 16 | static int sk_diag_dump(struct sock *sk, struct sk_buff *skb, |
Eric Dumazet | 34160ea | 2015-03-10 07:15:54 -0700 | [diff] [blame] | 17 | struct netlink_callback *cb, |
| 18 | const struct inet_diag_req_v2 *req, |
Lorenzo Colitti | d545cac | 2016-09-08 00:42:25 +0900 | [diff] [blame] | 19 | struct nlattr *bc, bool net_admin) |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 20 | { |
| 21 | if (!inet_diag_bc_sk(bc, sk)) |
| 22 | return 0; |
| 23 | |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 24 | return inet_sk_diag_fill(sk, NULL, skb, cb, req, NLM_F_MULTI, |
| 25 | net_admin); |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 26 | } |
| 27 | |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 28 | static int udp_dump_one(struct udp_table *tbl, |
| 29 | struct netlink_callback *cb, |
Eric Dumazet | 34160ea | 2015-03-10 07:15:54 -0700 | [diff] [blame] | 30 | const struct inet_diag_req_v2 *req) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 31 | { |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 32 | struct sk_buff *in_skb = cb->skb; |
Menglong Dong | 6e822c2c2 | 2020-11-06 01:42:38 -0500 | [diff] [blame] | 33 | int err; |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 34 | struct sock *sk = NULL; |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 35 | struct sk_buff *rep; |
Andrey Vagin | 51d7ccc | 2012-07-16 04:28:49 +0000 | [diff] [blame] | 36 | struct net *net = sock_net(in_skb->sk); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 37 | |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 38 | rcu_read_lock(); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 39 | if (req->sdiag_family == AF_INET) |
Lorenzo Colitti | 747569b | 2018-10-29 09:15:22 +0900 | [diff] [blame] | 40 | /* src and dst are swapped for historical reasons */ |
Andrey Vagin | 51d7ccc | 2012-07-16 04:28:49 +0000 | [diff] [blame] | 41 | sk = __udp4_lib_lookup(net, |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 42 | req->id.idiag_src[0], req->id.idiag_sport, |
| 43 | req->id.idiag_dst[0], req->id.idiag_dport, |
David Ahern | fb74c27 | 2017-08-07 08:44:16 -0700 | [diff] [blame] | 44 | req->id.idiag_if, 0, tbl, NULL); |
Pavel Emelyanov | 86e62ad | 2011-12-09 23:35:07 +0000 | [diff] [blame] | 45 | #if IS_ENABLED(CONFIG_IPV6) |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 46 | else if (req->sdiag_family == AF_INET6) |
Andrey Vagin | 51d7ccc | 2012-07-16 04:28:49 +0000 | [diff] [blame] | 47 | sk = __udp6_lib_lookup(net, |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 48 | (struct in6_addr *)req->id.idiag_src, |
| 49 | req->id.idiag_sport, |
| 50 | (struct in6_addr *)req->id.idiag_dst, |
| 51 | req->id.idiag_dport, |
David Ahern | 1801b57 | 2017-08-07 08:44:20 -0700 | [diff] [blame] | 52 | req->id.idiag_if, 0, tbl, NULL); |
Pavel Emelyanov | 86e62ad | 2011-12-09 23:35:07 +0000 | [diff] [blame] | 53 | #endif |
Reshetova, Elena | 41c6d65 | 2017-06-30 13:08:01 +0300 | [diff] [blame] | 54 | if (sk && !refcount_inc_not_zero(&sk->sk_refcnt)) |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 55 | sk = NULL; |
| 56 | rcu_read_unlock(); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 57 | err = -ENOENT; |
Ian Morris | 51456b2 | 2015-04-03 09:17:26 +0100 | [diff] [blame] | 58 | if (!sk) |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 59 | goto out_nosk; |
| 60 | |
Pavel Emelyanov | f65c1b5 | 2011-12-15 02:43:44 +0000 | [diff] [blame] | 61 | err = sock_diag_check_cookie(sk, req->id.idiag_cookie); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 62 | if (err) |
| 63 | goto out; |
| 64 | |
| 65 | err = -ENOMEM; |
Dmitry Yakunin | 83f73c5b | 2020-03-05 15:33:12 +0300 | [diff] [blame] | 66 | rep = nlmsg_new(nla_total_size(sizeof(struct inet_diag_msg)) + |
| 67 | inet_diag_msg_attrs_size() + |
| 68 | nla_total_size(sizeof(struct inet_diag_meminfo)) + 64, |
Hong zhi guo | 573ce26 | 2013-03-27 06:47:04 +0000 | [diff] [blame] | 69 | GFP_KERNEL); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 70 | if (!rep) |
| 71 | goto out; |
| 72 | |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 73 | err = inet_sk_diag_fill(sk, NULL, rep, cb, req, 0, |
| 74 | netlink_net_capable(in_skb, CAP_NET_ADMIN)); |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 75 | if (err < 0) { |
| 76 | WARN_ON(err == -EMSGSIZE); |
| 77 | kfree_skb(rep); |
| 78 | goto out; |
| 79 | } |
Yajun Deng | 01757f5 | 2021-07-13 10:48:24 +0800 | [diff] [blame] | 80 | err = nlmsg_unicast(net->diag_nlsk, rep, NETLINK_CB(in_skb).portid); |
| 81 | |
Pavel Emelyanov | a925aa0 | 2011-12-09 06:24:06 +0000 | [diff] [blame] | 82 | out: |
| 83 | if (sk) |
| 84 | sock_put(sk); |
| 85 | out_nosk: |
| 86 | return err; |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 87 | } |
| 88 | |
Eric Dumazet | 34160ea | 2015-03-10 07:15:54 -0700 | [diff] [blame] | 89 | static void udp_dump(struct udp_table *table, struct sk_buff *skb, |
| 90 | struct netlink_callback *cb, |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 91 | const struct inet_diag_req_v2 *r) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 92 | { |
Lorenzo Colitti | d545cac | 2016-09-08 00:42:25 +0900 | [diff] [blame] | 93 | bool net_admin = netlink_net_capable(cb->skb, CAP_NET_ADMIN); |
Andrey Vagin | 51d7ccc | 2012-07-16 04:28:49 +0000 | [diff] [blame] | 94 | struct net *net = sock_net(skb->sk); |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 95 | struct inet_diag_dump_data *cb_data; |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 96 | int num, s_num, slot, s_slot; |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 97 | struct nlattr *bc; |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 98 | |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 99 | cb_data = cb->data; |
| 100 | bc = cb_data->inet_diag_nla_bc; |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 101 | s_slot = cb->args[0]; |
| 102 | num = s_num = cb->args[1]; |
| 103 | |
Herbert Xu | 86f3cdd | 2015-01-24 08:02:40 +1100 | [diff] [blame] | 104 | for (slot = s_slot; slot <= table->mask; s_num = 0, slot++) { |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 105 | struct udp_hslot *hslot = &table->hash[slot]; |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 106 | struct sock *sk; |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 107 | |
Herbert Xu | 86f3cdd | 2015-01-24 08:02:40 +1100 | [diff] [blame] | 108 | num = 0; |
| 109 | |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 110 | if (hlist_empty(&hslot->head)) |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 111 | continue; |
| 112 | |
| 113 | spin_lock_bh(&hslot->lock); |
Eric Dumazet | ca065d0 | 2016-04-01 08:52:13 -0700 | [diff] [blame] | 114 | sk_for_each(sk, &hslot->head) { |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 115 | struct inet_sock *inet = inet_sk(sk); |
| 116 | |
Andrey Vagin | 51d7ccc | 2012-07-16 04:28:49 +0000 | [diff] [blame] | 117 | if (!net_eq(sock_net(sk), net)) |
| 118 | continue; |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 119 | if (num < s_num) |
| 120 | goto next; |
| 121 | if (!(r->idiag_states & (1 << sk->sk_state))) |
| 122 | goto next; |
| 123 | if (r->sdiag_family != AF_UNSPEC && |
| 124 | sk->sk_family != r->sdiag_family) |
| 125 | goto next; |
| 126 | if (r->id.idiag_sport != inet->inet_sport && |
| 127 | r->id.idiag_sport) |
| 128 | goto next; |
| 129 | if (r->id.idiag_dport != inet->inet_dport && |
| 130 | r->id.idiag_dport) |
| 131 | goto next; |
| 132 | |
Lorenzo Colitti | d545cac | 2016-09-08 00:42:25 +0900 | [diff] [blame] | 133 | if (sk_diag_dump(sk, skb, cb, r, bc, net_admin) < 0) { |
Pavel Emelyanov | b6d640c | 2011-12-09 06:24:21 +0000 | [diff] [blame] | 134 | spin_unlock_bh(&hslot->lock); |
| 135 | goto done; |
| 136 | } |
| 137 | next: |
| 138 | num++; |
| 139 | } |
| 140 | spin_unlock_bh(&hslot->lock); |
| 141 | } |
| 142 | done: |
| 143 | cb->args[0] = slot; |
| 144 | cb->args[1] = num; |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 145 | } |
| 146 | |
| 147 | static void udp_diag_dump(struct sk_buff *skb, struct netlink_callback *cb, |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 148 | const struct inet_diag_req_v2 *r) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 149 | { |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 150 | udp_dump(&udp_table, skb, cb, r); |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 151 | } |
| 152 | |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 153 | static int udp_diag_dump_one(struct netlink_callback *cb, |
Eric Dumazet | 34160ea | 2015-03-10 07:15:54 -0700 | [diff] [blame] | 154 | const struct inet_diag_req_v2 *req) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 155 | { |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 156 | return udp_dump_one(&udp_table, cb, req); |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 157 | } |
| 158 | |
Shan Wei | 62ad6fc | 2012-04-24 18:15:41 +0000 | [diff] [blame] | 159 | static void udp_diag_get_info(struct sock *sk, struct inet_diag_msg *r, |
| 160 | void *info) |
| 161 | { |
Paolo Abeni | 6c206b2 | 2018-06-08 11:35:40 +0200 | [diff] [blame] | 162 | r->idiag_rqueue = udp_rqueue_get(sk); |
Shan Wei | 62ad6fc | 2012-04-24 18:15:41 +0000 | [diff] [blame] | 163 | r->idiag_wqueue = sk_wmem_alloc_get(sk); |
| 164 | } |
| 165 | |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 166 | #ifdef CONFIG_INET_DIAG_DESTROY |
| 167 | static int __udp_diag_destroy(struct sk_buff *in_skb, |
| 168 | const struct inet_diag_req_v2 *req, |
| 169 | struct udp_table *tbl) |
| 170 | { |
| 171 | struct net *net = sock_net(in_skb->sk); |
| 172 | struct sock *sk; |
| 173 | int err; |
| 174 | |
| 175 | rcu_read_lock(); |
| 176 | |
| 177 | if (req->sdiag_family == AF_INET) |
| 178 | sk = __udp4_lib_lookup(net, |
| 179 | req->id.idiag_dst[0], req->id.idiag_dport, |
| 180 | req->id.idiag_src[0], req->id.idiag_sport, |
David Ahern | fb74c27 | 2017-08-07 08:44:16 -0700 | [diff] [blame] | 181 | req->id.idiag_if, 0, tbl, NULL); |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 182 | #if IS_ENABLED(CONFIG_IPV6) |
| 183 | else if (req->sdiag_family == AF_INET6) { |
| 184 | if (ipv6_addr_v4mapped((struct in6_addr *)req->id.idiag_dst) && |
| 185 | ipv6_addr_v4mapped((struct in6_addr *)req->id.idiag_src)) |
| 186 | sk = __udp4_lib_lookup(net, |
Lorenzo Colitti | f95bf34 | 2016-09-07 13:38:35 +0900 | [diff] [blame] | 187 | req->id.idiag_dst[3], req->id.idiag_dport, |
| 188 | req->id.idiag_src[3], req->id.idiag_sport, |
David Ahern | fb74c27 | 2017-08-07 08:44:16 -0700 | [diff] [blame] | 189 | req->id.idiag_if, 0, tbl, NULL); |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 190 | |
| 191 | else |
| 192 | sk = __udp6_lib_lookup(net, |
| 193 | (struct in6_addr *)req->id.idiag_dst, |
| 194 | req->id.idiag_dport, |
| 195 | (struct in6_addr *)req->id.idiag_src, |
| 196 | req->id.idiag_sport, |
David Ahern | 1801b57 | 2017-08-07 08:44:20 -0700 | [diff] [blame] | 197 | req->id.idiag_if, 0, tbl, NULL); |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 198 | } |
| 199 | #endif |
| 200 | else { |
| 201 | rcu_read_unlock(); |
| 202 | return -EINVAL; |
| 203 | } |
| 204 | |
Reshetova, Elena | 41c6d65 | 2017-06-30 13:08:01 +0300 | [diff] [blame] | 205 | if (sk && !refcount_inc_not_zero(&sk->sk_refcnt)) |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 206 | sk = NULL; |
| 207 | |
| 208 | rcu_read_unlock(); |
| 209 | |
| 210 | if (!sk) |
| 211 | return -ENOENT; |
| 212 | |
| 213 | if (sock_diag_check_cookie(sk, req->id.idiag_cookie)) { |
| 214 | sock_put(sk); |
| 215 | return -ENOENT; |
| 216 | } |
| 217 | |
| 218 | err = sock_diag_destroy(sk, ECONNABORTED); |
| 219 | |
| 220 | sock_put(sk); |
| 221 | |
| 222 | return err; |
| 223 | } |
| 224 | |
| 225 | static int udp_diag_destroy(struct sk_buff *in_skb, |
| 226 | const struct inet_diag_req_v2 *req) |
| 227 | { |
| 228 | return __udp_diag_destroy(in_skb, req, &udp_table); |
| 229 | } |
| 230 | |
| 231 | static int udplite_diag_destroy(struct sk_buff *in_skb, |
| 232 | const struct inet_diag_req_v2 *req) |
| 233 | { |
| 234 | return __udp_diag_destroy(in_skb, req, &udplite_table); |
| 235 | } |
| 236 | |
| 237 | #endif |
| 238 | |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 239 | static const struct inet_diag_handler udp_diag_handler = { |
| 240 | .dump = udp_diag_dump, |
| 241 | .dump_one = udp_diag_dump_one, |
Shan Wei | 62ad6fc | 2012-04-24 18:15:41 +0000 | [diff] [blame] | 242 | .idiag_get_info = udp_diag_get_info, |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 243 | .idiag_type = IPPROTO_UDP, |
Craig Gallek | 3fd22af | 2015-06-15 11:26:19 -0400 | [diff] [blame] | 244 | .idiag_info_size = 0, |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 245 | #ifdef CONFIG_INET_DIAG_DESTROY |
| 246 | .destroy = udp_diag_destroy, |
| 247 | #endif |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 248 | }; |
| 249 | |
| 250 | static void udplite_diag_dump(struct sk_buff *skb, struct netlink_callback *cb, |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 251 | const struct inet_diag_req_v2 *r) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 252 | { |
Martin KaFai Lau | 0df6d32 | 2020-02-25 15:04:15 -0800 | [diff] [blame] | 253 | udp_dump(&udplite_table, skb, cb, r); |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 254 | } |
| 255 | |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 256 | static int udplite_diag_dump_one(struct netlink_callback *cb, |
Eric Dumazet | 34160ea | 2015-03-10 07:15:54 -0700 | [diff] [blame] | 257 | const struct inet_diag_req_v2 *req) |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 258 | { |
Martin KaFai Lau | 5682d39 | 2020-02-25 15:04:09 -0800 | [diff] [blame] | 259 | return udp_dump_one(&udplite_table, cb, req); |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 260 | } |
| 261 | |
| 262 | static const struct inet_diag_handler udplite_diag_handler = { |
| 263 | .dump = udplite_diag_dump, |
| 264 | .dump_one = udplite_diag_dump_one, |
Shan Wei | 62ad6fc | 2012-04-24 18:15:41 +0000 | [diff] [blame] | 265 | .idiag_get_info = udp_diag_get_info, |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 266 | .idiag_type = IPPROTO_UDPLITE, |
Craig Gallek | 3fd22af | 2015-06-15 11:26:19 -0400 | [diff] [blame] | 267 | .idiag_info_size = 0, |
David Ahern | 5d77dca | 2016-08-23 21:06:33 -0700 | [diff] [blame] | 268 | #ifdef CONFIG_INET_DIAG_DESTROY |
| 269 | .destroy = udplite_diag_destroy, |
| 270 | #endif |
Pavel Emelyanov | 52b7c59 | 2011-12-09 06:23:51 +0000 | [diff] [blame] | 271 | }; |
| 272 | |
| 273 | static int __init udp_diag_init(void) |
| 274 | { |
| 275 | int err; |
| 276 | |
| 277 | err = inet_diag_register(&udp_diag_handler); |
| 278 | if (err) |
| 279 | goto out; |
| 280 | err = inet_diag_register(&udplite_diag_handler); |
| 281 | if (err) |
| 282 | goto out_lite; |
| 283 | out: |
| 284 | return err; |
| 285 | out_lite: |
| 286 | inet_diag_unregister(&udp_diag_handler); |
| 287 | goto out; |
| 288 | } |
| 289 | |
| 290 | static void __exit udp_diag_exit(void) |
| 291 | { |
| 292 | inet_diag_unregister(&udplite_diag_handler); |
| 293 | inet_diag_unregister(&udp_diag_handler); |
| 294 | } |
| 295 | |
| 296 | module_init(udp_diag_init); |
| 297 | module_exit(udp_diag_exit); |
| 298 | MODULE_LICENSE("GPL"); |
Pavel Emelyanov | aec8dc62 | 2011-12-15 02:43:27 +0000 | [diff] [blame] | 299 | MODULE_ALIAS_NET_PF_PROTO_TYPE(PF_NETLINK, NETLINK_SOCK_DIAG, 2-17 /* AF_INET - IPPROTO_UDP */); |
| 300 | MODULE_ALIAS_NET_PF_PROTO_TYPE(PF_NETLINK, NETLINK_SOCK_DIAG, 2-136 /* AF_INET - IPPROTO_UDPLITE */); |