blob: 8848a147c4bf18e3d2d3d67db85cee20927ddb38 [file] [log] [blame]
Jiri Pirkobf3994d2016-07-21 12:03:11 +02001/*
2 * net/sched/cls_matchll.c Match-all classifier
3 *
4 * Copyright (c) 2016 Jiri Pirko <jiri@mellanox.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/kernel.h>
13#include <linux/init.h>
14#include <linux/module.h>
Cong Wangf88c19a2019-01-17 12:44:25 -080015#include <linux/percpu.h>
Jiri Pirkobf3994d2016-07-21 12:03:11 +020016
17#include <net/sch_generic.h>
18#include <net/pkt_cls.h>
19
Yotam Gigifd62d9f2017-01-31 15:14:29 +020020struct cls_mall_head {
Jiri Pirkobf3994d2016-07-21 12:03:11 +020021 struct tcf_exts exts;
22 struct tcf_result res;
23 u32 handle;
Yotam Gigib87f7932016-07-21 12:03:12 +020024 u32 flags;
John Hurley0efd1b32018-06-25 14:30:07 -070025 unsigned int in_hw_count;
Cong Wangf88c19a2019-01-17 12:44:25 -080026 struct tc_matchall_pcnt __percpu *pf;
Cong Wangaaa908f2018-05-23 15:26:53 -070027 struct rcu_work rwork;
Jiri Pirkobf3994d2016-07-21 12:03:11 +020028};
29
30static int mall_classify(struct sk_buff *skb, const struct tcf_proto *tp,
31 struct tcf_result *res)
32{
33 struct cls_mall_head *head = rcu_dereference_bh(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020034
Yotam Gigifd62d9f2017-01-31 15:14:29 +020035 if (tc_skip_sw(head->flags))
Yotam Gigib87f7932016-07-21 12:03:12 +020036 return -1;
37
Davide Caratti3ff4cbe2017-09-16 14:02:21 +020038 *res = head->res;
Cong Wangf88c19a2019-01-17 12:44:25 -080039 __this_cpu_inc(head->pf->rhit);
Yotam Gigifd62d9f2017-01-31 15:14:29 +020040 return tcf_exts_exec(skb, &head->exts, res);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020041}
42
43static int mall_init(struct tcf_proto *tp)
44{
Jiri Pirkobf3994d2016-07-21 12:03:11 +020045 return 0;
46}
47
Cong Wang57767e782017-11-06 13:47:26 -080048static void __mall_destroy(struct cls_mall_head *head)
49{
50 tcf_exts_destroy(&head->exts);
51 tcf_exts_put_net(&head->exts);
Cong Wangf88c19a2019-01-17 12:44:25 -080052 free_percpu(head->pf);
Cong Wang57767e782017-11-06 13:47:26 -080053 kfree(head);
54}
55
Cong Wangdf2735e2017-10-26 18:24:35 -070056static void mall_destroy_work(struct work_struct *work)
57{
Cong Wangaaa908f2018-05-23 15:26:53 -070058 struct cls_mall_head *head = container_of(to_rcu_work(work),
59 struct cls_mall_head,
60 rwork);
Cong Wangdf2735e2017-10-26 18:24:35 -070061 rtnl_lock();
Cong Wang57767e782017-11-06 13:47:26 -080062 __mall_destroy(head);
Cong Wangdf2735e2017-10-26 18:24:35 -070063 rtnl_unlock();
64}
65
Jiri Pirko2447a962017-10-19 15:50:33 +020066static void mall_destroy_hw_filter(struct tcf_proto *tp,
67 struct cls_mall_head *head,
Jakub Kicinskib505b292018-01-24 12:54:19 -080068 unsigned long cookie,
69 struct netlink_ext_ack *extack)
Jiri Pirko2447a962017-10-19 15:50:33 +020070{
Jiri Pirko2447a962017-10-19 15:50:33 +020071 struct tc_cls_matchall_offload cls_mall = {};
72 struct tcf_block *block = tp->chain->block;
73
Jakub Kicinskib505b292018-01-24 12:54:19 -080074 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirko2447a962017-10-19 15:50:33 +020075 cls_mall.command = TC_CLSMATCHALL_DESTROY;
76 cls_mall.cookie = cookie;
77
Cong Wangaeb3fec2018-12-11 11:15:46 -080078 tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, false);
Jiri Pirkocaa72602018-01-17 11:46:50 +010079 tcf_block_offload_dec(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +020080}
81
Yotam Gigib87f7932016-07-21 12:03:12 +020082static int mall_replace_hw_filter(struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +020083 struct cls_mall_head *head,
Quentin Monnet02798142018-01-19 17:44:44 -080084 unsigned long cookie,
85 struct netlink_ext_ack *extack)
Yotam Gigib87f7932016-07-21 12:03:12 +020086{
Jiri Pirkode4784c2017-08-07 10:15:32 +020087 struct tc_cls_matchall_offload cls_mall = {};
Jiri Pirko2447a962017-10-19 15:50:33 +020088 struct tcf_block *block = tp->chain->block;
89 bool skip_sw = tc_skip_sw(head->flags);
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +020090 int err;
Yotam Gigib87f7932016-07-21 12:03:12 +020091
Jakub Kicinski93da52b2018-01-24 12:54:18 -080092 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirkode4784c2017-08-07 10:15:32 +020093 cls_mall.command = TC_CLSMATCHALL_REPLACE;
94 cls_mall.exts = &head->exts;
95 cls_mall.cookie = cookie;
Yotam Gigib87f7932016-07-21 12:03:12 +020096
Cong Wangaeb3fec2018-12-11 11:15:46 -080097 err = tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, skip_sw);
Jiri Pirko2447a962017-10-19 15:50:33 +020098 if (err < 0) {
Jakub Kicinskib505b292018-01-24 12:54:19 -080099 mall_destroy_hw_filter(tp, head, cookie, NULL);
Jiri Pirko2447a962017-10-19 15:50:33 +0200100 return err;
101 } else if (err > 0) {
John Hurley0efd1b32018-06-25 14:30:07 -0700102 head->in_hw_count = err;
Jiri Pirkocaa72602018-01-17 11:46:50 +0100103 tcf_block_offload_inc(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +0200104 }
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200105
Jiri Pirko2447a962017-10-19 15:50:33 +0200106 if (skip_sw && !(head->flags & TCA_CLS_FLAGS_IN_HW))
107 return -EINVAL;
Yotam Gigib87f7932016-07-21 12:03:12 +0200108
Jiri Pirko2447a962017-10-19 15:50:33 +0200109 return 0;
Yotam Gigib87f7932016-07-21 12:03:12 +0200110}
111
Jakub Kicinski715df5e2018-01-24 12:54:13 -0800112static void mall_destroy(struct tcf_proto *tp, struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200113{
114 struct cls_mall_head *head = rtnl_dereference(tp->root);
115
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200116 if (!head)
WANG Cong763dbf62017-04-19 14:21:21 -0700117 return;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200118
Hangbin Liua51c76b2018-08-14 17:28:26 +0800119 tcf_unbind_filter(tp, &head->res);
120
Jiri Pirko2447a962017-10-19 15:50:33 +0200121 if (!tc_skip_hw(head->flags))
Jakub Kicinskib505b292018-01-24 12:54:19 -0800122 mall_destroy_hw_filter(tp, head, (unsigned long) head, extack);
Yotam Gigib87f7932016-07-21 12:03:12 +0200123
Cong Wang57767e782017-11-06 13:47:26 -0800124 if (tcf_exts_get_net(&head->exts))
Cong Wangaaa908f2018-05-23 15:26:53 -0700125 tcf_queue_work(&head->rwork, mall_destroy_work);
Cong Wang57767e782017-11-06 13:47:26 -0800126 else
127 __mall_destroy(head);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200128}
129
WANG Cong8113c092017-08-04 21:31:43 -0700130static void *mall_get(struct tcf_proto *tp, u32 handle)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200131{
WANG Cong8113c092017-08-04 21:31:43 -0700132 return NULL;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200133}
134
135static const struct nla_policy mall_policy[TCA_MATCHALL_MAX + 1] = {
136 [TCA_MATCHALL_UNSPEC] = { .type = NLA_UNSPEC },
137 [TCA_MATCHALL_CLASSID] = { .type = NLA_U32 },
138};
139
140static int mall_set_parms(struct net *net, struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200141 struct cls_mall_head *head,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200142 unsigned long base, struct nlattr **tb,
Alexander Aring50a56192018-01-18 11:20:52 -0500143 struct nlattr *est, bool ovr,
144 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200145{
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200146 int err;
147
Vlad Buslovec6743a2019-02-11 10:55:43 +0200148 err = tcf_exts_validate(net, tp, tb, est, &head->exts, ovr, true,
149 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200150 if (err < 0)
Jiri Pirkoa74cb362017-08-04 14:29:08 +0200151 return err;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200152
153 if (tb[TCA_MATCHALL_CLASSID]) {
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200154 head->res.classid = nla_get_u32(tb[TCA_MATCHALL_CLASSID]);
155 tcf_bind_filter(tp, &head->res, base);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200156 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200157 return 0;
158}
159
160static int mall_change(struct net *net, struct sk_buff *in_skb,
161 struct tcf_proto *tp, unsigned long base,
162 u32 handle, struct nlattr **tca,
Alexander Aring7306db32018-01-18 11:20:51 -0500163 void **arg, bool ovr, struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200164{
165 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200166 struct nlattr *tb[TCA_MATCHALL_MAX + 1];
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200167 struct cls_mall_head *new;
Yotam Gigib87f7932016-07-21 12:03:12 +0200168 u32 flags = 0;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200169 int err;
170
171 if (!tca[TCA_OPTIONS])
172 return -EINVAL;
173
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200174 if (head)
175 return -EEXIST;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200176
Johannes Bergfceb6432017-04-12 14:34:07 +0200177 err = nla_parse_nested(tb, TCA_MATCHALL_MAX, tca[TCA_OPTIONS],
178 mall_policy, NULL);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200179 if (err < 0)
180 return err;
181
Yotam Gigib87f7932016-07-21 12:03:12 +0200182 if (tb[TCA_MATCHALL_FLAGS]) {
183 flags = nla_get_u32(tb[TCA_MATCHALL_FLAGS]);
184 if (!tc_flags_valid(flags))
185 return -EINVAL;
186 }
187
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200188 new = kzalloc(sizeof(*new), GFP_KERNEL);
189 if (!new)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200190 return -ENOBUFS;
191
David S. Millere2160152017-02-02 16:54:00 -0500192 err = tcf_exts_init(&new->exts, TCA_MATCHALL_ACT, 0);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200193 if (err)
194 goto err_exts_init;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200195
196 if (!handle)
197 handle = 1;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200198 new->handle = handle;
199 new->flags = flags;
Cong Wangf88c19a2019-01-17 12:44:25 -0800200 new->pf = alloc_percpu(struct tc_matchall_pcnt);
201 if (!new->pf) {
202 err = -ENOMEM;
203 goto err_alloc_percpu;
204 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200205
Alexander Aring50a56192018-01-18 11:20:52 -0500206 err = mall_set_parms(net, tp, new, base, tb, tca[TCA_RATE], ovr,
207 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200208 if (err)
Yotam Gigiec2507d2017-01-03 19:20:24 +0200209 goto err_set_parms;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200210
Jiri Pirko2447a962017-10-19 15:50:33 +0200211 if (!tc_skip_hw(new->flags)) {
Quentin Monnet02798142018-01-19 17:44:44 -0800212 err = mall_replace_hw_filter(tp, new, (unsigned long)new,
213 extack);
Jiri Pirko2447a962017-10-19 15:50:33 +0200214 if (err)
215 goto err_replace_hw_filter;
Yotam Gigib87f7932016-07-21 12:03:12 +0200216 }
217
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200218 if (!tc_in_hw(new->flags))
219 new->flags |= TCA_CLS_FLAGS_NOT_IN_HW;
220
WANG Cong8113c092017-08-04 21:31:43 -0700221 *arg = head;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200222 rcu_assign_pointer(tp->root, new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200223 return 0;
224
Yotam Gigiec2507d2017-01-03 19:20:24 +0200225err_replace_hw_filter:
226err_set_parms:
Cong Wangf88c19a2019-01-17 12:44:25 -0800227 free_percpu(new->pf);
228err_alloc_percpu:
David S. Millere2160152017-02-02 16:54:00 -0500229 tcf_exts_destroy(&new->exts);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200230err_exts_init:
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200231 kfree(new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200232 return err;
233}
234
Alexander Aring571acf22018-01-18 11:20:53 -0500235static int mall_delete(struct tcf_proto *tp, void *arg, bool *last,
236 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200237{
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200238 return -EOPNOTSUPP;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200239}
240
241static void mall_walk(struct tcf_proto *tp, struct tcf_walker *arg)
242{
243 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200244
245 if (arg->count < arg->skip)
246 goto skip;
WANG Cong8113c092017-08-04 21:31:43 -0700247 if (arg->fn(tp, head, arg) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200248 arg->stop = 1;
249skip:
250 arg->count++;
251}
252
John Hurley0efd1b32018-06-25 14:30:07 -0700253static int mall_reoffload(struct tcf_proto *tp, bool add, tc_setup_cb_t *cb,
254 void *cb_priv, struct netlink_ext_ack *extack)
255{
256 struct cls_mall_head *head = rtnl_dereference(tp->root);
257 struct tc_cls_matchall_offload cls_mall = {};
258 struct tcf_block *block = tp->chain->block;
259 int err;
260
261 if (tc_skip_hw(head->flags))
262 return 0;
263
264 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
265 cls_mall.command = add ?
266 TC_CLSMATCHALL_REPLACE : TC_CLSMATCHALL_DESTROY;
267 cls_mall.exts = &head->exts;
268 cls_mall.cookie = (unsigned long)head;
269
270 err = cb(TC_SETUP_CLSMATCHALL, &cls_mall, cb_priv);
271 if (err) {
272 if (add && tc_skip_sw(head->flags))
273 return err;
274 return 0;
275 }
276
277 tc_cls_offload_cnt_update(block, &head->in_hw_count, &head->flags, add);
278
279 return 0;
280}
281
WANG Cong8113c092017-08-04 21:31:43 -0700282static int mall_dump(struct net *net, struct tcf_proto *tp, void *fh,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200283 struct sk_buff *skb, struct tcmsg *t)
284{
Cong Wangf88c19a2019-01-17 12:44:25 -0800285 struct tc_matchall_pcnt gpf = {};
WANG Cong8113c092017-08-04 21:31:43 -0700286 struct cls_mall_head *head = fh;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200287 struct nlattr *nest;
Cong Wangf88c19a2019-01-17 12:44:25 -0800288 int cpu;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200289
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200290 if (!head)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200291 return skb->len;
292
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200293 t->tcm_handle = head->handle;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200294
295 nest = nla_nest_start(skb, TCA_OPTIONS);
296 if (!nest)
297 goto nla_put_failure;
298
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200299 if (head->res.classid &&
300 nla_put_u32(skb, TCA_MATCHALL_CLASSID, head->res.classid))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200301 goto nla_put_failure;
302
Or Gerlitz7a335ad2017-02-16 10:31:11 +0200303 if (head->flags && nla_put_u32(skb, TCA_MATCHALL_FLAGS, head->flags))
304 goto nla_put_failure;
305
Cong Wangf88c19a2019-01-17 12:44:25 -0800306 for_each_possible_cpu(cpu) {
307 struct tc_matchall_pcnt *pf = per_cpu_ptr(head->pf, cpu);
308
309 gpf.rhit += pf->rhit;
310 }
311
312 if (nla_put_64bit(skb, TCA_MATCHALL_PCNT,
313 sizeof(struct tc_matchall_pcnt),
314 &gpf, TCA_MATCHALL_PAD))
315 goto nla_put_failure;
316
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200317 if (tcf_exts_dump(skb, &head->exts))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200318 goto nla_put_failure;
319
320 nla_nest_end(skb, nest);
321
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200322 if (tcf_exts_dump_stats(skb, &head->exts) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200323 goto nla_put_failure;
324
325 return skb->len;
326
327nla_put_failure:
328 nla_nest_cancel(skb, nest);
329 return -1;
330}
331
Cong Wang07d79fc2017-08-30 14:30:36 -0700332static void mall_bind_class(void *fh, u32 classid, unsigned long cl)
333{
334 struct cls_mall_head *head = fh;
335
336 if (head && head->res.classid == classid)
337 head->res.class = cl;
338}
339
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200340static struct tcf_proto_ops cls_mall_ops __read_mostly = {
341 .kind = "matchall",
342 .classify = mall_classify,
343 .init = mall_init,
344 .destroy = mall_destroy,
345 .get = mall_get,
346 .change = mall_change,
347 .delete = mall_delete,
348 .walk = mall_walk,
John Hurley0efd1b32018-06-25 14:30:07 -0700349 .reoffload = mall_reoffload,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200350 .dump = mall_dump,
Cong Wang07d79fc2017-08-30 14:30:36 -0700351 .bind_class = mall_bind_class,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200352 .owner = THIS_MODULE,
353};
354
355static int __init cls_mall_init(void)
356{
357 return register_tcf_proto_ops(&cls_mall_ops);
358}
359
360static void __exit cls_mall_exit(void)
361{
362 unregister_tcf_proto_ops(&cls_mall_ops);
363}
364
365module_init(cls_mall_init);
366module_exit(cls_mall_exit);
367
368MODULE_AUTHOR("Jiri Pirko <jiri@mellanox.com>");
369MODULE_DESCRIPTION("Match-all classifier");
370MODULE_LICENSE("GPL v2");