blob: 3d021f2aad1cc1ce215bf78e3fb34b80abe28bb6 [file] [log] [blame]
Jiri Pirkobf3994d2016-07-21 12:03:11 +02001/*
2 * net/sched/cls_matchll.c Match-all classifier
3 *
4 * Copyright (c) 2016 Jiri Pirko <jiri@mellanox.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/kernel.h>
13#include <linux/init.h>
14#include <linux/module.h>
Cong Wangf88c19a2019-01-17 12:44:25 -080015#include <linux/percpu.h>
Jiri Pirkobf3994d2016-07-21 12:03:11 +020016
17#include <net/sch_generic.h>
18#include <net/pkt_cls.h>
19
Yotam Gigifd62d9f2017-01-31 15:14:29 +020020struct cls_mall_head {
Jiri Pirkobf3994d2016-07-21 12:03:11 +020021 struct tcf_exts exts;
22 struct tcf_result res;
23 u32 handle;
Yotam Gigib87f7932016-07-21 12:03:12 +020024 u32 flags;
John Hurley0efd1b32018-06-25 14:30:07 -070025 unsigned int in_hw_count;
Cong Wangf88c19a2019-01-17 12:44:25 -080026 struct tc_matchall_pcnt __percpu *pf;
Cong Wangaaa908f2018-05-23 15:26:53 -070027 struct rcu_work rwork;
Jiri Pirkobf3994d2016-07-21 12:03:11 +020028};
29
30static int mall_classify(struct sk_buff *skb, const struct tcf_proto *tp,
31 struct tcf_result *res)
32{
33 struct cls_mall_head *head = rcu_dereference_bh(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020034
Matteo Croce25426042019-05-02 10:51:05 +020035 if (unlikely(!head))
36 return -1;
37
Yotam Gigifd62d9f2017-01-31 15:14:29 +020038 if (tc_skip_sw(head->flags))
Yotam Gigib87f7932016-07-21 12:03:12 +020039 return -1;
40
Davide Caratti3ff4cbe2017-09-16 14:02:21 +020041 *res = head->res;
Cong Wangf88c19a2019-01-17 12:44:25 -080042 __this_cpu_inc(head->pf->rhit);
Yotam Gigifd62d9f2017-01-31 15:14:29 +020043 return tcf_exts_exec(skb, &head->exts, res);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020044}
45
46static int mall_init(struct tcf_proto *tp)
47{
Jiri Pirkobf3994d2016-07-21 12:03:11 +020048 return 0;
49}
50
Cong Wang57767e782017-11-06 13:47:26 -080051static void __mall_destroy(struct cls_mall_head *head)
52{
53 tcf_exts_destroy(&head->exts);
54 tcf_exts_put_net(&head->exts);
Cong Wangf88c19a2019-01-17 12:44:25 -080055 free_percpu(head->pf);
Cong Wang57767e782017-11-06 13:47:26 -080056 kfree(head);
57}
58
Cong Wangdf2735e2017-10-26 18:24:35 -070059static void mall_destroy_work(struct work_struct *work)
60{
Cong Wangaaa908f2018-05-23 15:26:53 -070061 struct cls_mall_head *head = container_of(to_rcu_work(work),
62 struct cls_mall_head,
63 rwork);
Cong Wangdf2735e2017-10-26 18:24:35 -070064 rtnl_lock();
Cong Wang57767e782017-11-06 13:47:26 -080065 __mall_destroy(head);
Cong Wangdf2735e2017-10-26 18:24:35 -070066 rtnl_unlock();
67}
68
Jiri Pirko2447a962017-10-19 15:50:33 +020069static void mall_destroy_hw_filter(struct tcf_proto *tp,
70 struct cls_mall_head *head,
Jakub Kicinskib505b292018-01-24 12:54:19 -080071 unsigned long cookie,
72 struct netlink_ext_ack *extack)
Jiri Pirko2447a962017-10-19 15:50:33 +020073{
Jiri Pirko2447a962017-10-19 15:50:33 +020074 struct tc_cls_matchall_offload cls_mall = {};
75 struct tcf_block *block = tp->chain->block;
76
Jakub Kicinskib505b292018-01-24 12:54:19 -080077 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirko2447a962017-10-19 15:50:33 +020078 cls_mall.command = TC_CLSMATCHALL_DESTROY;
79 cls_mall.cookie = cookie;
80
Cong Wangaeb3fec2018-12-11 11:15:46 -080081 tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, false);
Jiri Pirkocaa72602018-01-17 11:46:50 +010082 tcf_block_offload_dec(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +020083}
84
Yotam Gigib87f7932016-07-21 12:03:12 +020085static int mall_replace_hw_filter(struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +020086 struct cls_mall_head *head,
Quentin Monnet02798142018-01-19 17:44:44 -080087 unsigned long cookie,
88 struct netlink_ext_ack *extack)
Yotam Gigib87f7932016-07-21 12:03:12 +020089{
Jiri Pirkode4784c2017-08-07 10:15:32 +020090 struct tc_cls_matchall_offload cls_mall = {};
Jiri Pirko2447a962017-10-19 15:50:33 +020091 struct tcf_block *block = tp->chain->block;
92 bool skip_sw = tc_skip_sw(head->flags);
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +020093 int err;
Yotam Gigib87f7932016-07-21 12:03:12 +020094
Jakub Kicinski93da52b2018-01-24 12:54:18 -080095 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirkode4784c2017-08-07 10:15:32 +020096 cls_mall.command = TC_CLSMATCHALL_REPLACE;
97 cls_mall.exts = &head->exts;
98 cls_mall.cookie = cookie;
Yotam Gigib87f7932016-07-21 12:03:12 +020099
Cong Wangaeb3fec2018-12-11 11:15:46 -0800100 err = tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, skip_sw);
Jiri Pirko2447a962017-10-19 15:50:33 +0200101 if (err < 0) {
Jakub Kicinskib505b292018-01-24 12:54:19 -0800102 mall_destroy_hw_filter(tp, head, cookie, NULL);
Jiri Pirko2447a962017-10-19 15:50:33 +0200103 return err;
104 } else if (err > 0) {
John Hurley0efd1b32018-06-25 14:30:07 -0700105 head->in_hw_count = err;
Jiri Pirkocaa72602018-01-17 11:46:50 +0100106 tcf_block_offload_inc(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +0200107 }
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200108
Jiri Pirko2447a962017-10-19 15:50:33 +0200109 if (skip_sw && !(head->flags & TCA_CLS_FLAGS_IN_HW))
110 return -EINVAL;
Yotam Gigib87f7932016-07-21 12:03:12 +0200111
Jiri Pirko2447a962017-10-19 15:50:33 +0200112 return 0;
Yotam Gigib87f7932016-07-21 12:03:12 +0200113}
114
Vlad Buslov12db03b2019-02-11 10:55:45 +0200115static void mall_destroy(struct tcf_proto *tp, bool rtnl_held,
116 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200117{
118 struct cls_mall_head *head = rtnl_dereference(tp->root);
119
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200120 if (!head)
WANG Cong763dbf62017-04-19 14:21:21 -0700121 return;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200122
Hangbin Liua51c76b2018-08-14 17:28:26 +0800123 tcf_unbind_filter(tp, &head->res);
124
Jiri Pirko2447a962017-10-19 15:50:33 +0200125 if (!tc_skip_hw(head->flags))
Jakub Kicinskib505b292018-01-24 12:54:19 -0800126 mall_destroy_hw_filter(tp, head, (unsigned long) head, extack);
Yotam Gigib87f7932016-07-21 12:03:12 +0200127
Cong Wang57767e782017-11-06 13:47:26 -0800128 if (tcf_exts_get_net(&head->exts))
Cong Wangaaa908f2018-05-23 15:26:53 -0700129 tcf_queue_work(&head->rwork, mall_destroy_work);
Cong Wang57767e782017-11-06 13:47:26 -0800130 else
131 __mall_destroy(head);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200132}
133
WANG Cong8113c092017-08-04 21:31:43 -0700134static void *mall_get(struct tcf_proto *tp, u32 handle)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200135{
Nicolas Dichtel0db6f8b2019-03-28 10:35:06 +0100136 struct cls_mall_head *head = rtnl_dereference(tp->root);
137
138 if (head && head->handle == handle)
139 return head;
140
WANG Cong8113c092017-08-04 21:31:43 -0700141 return NULL;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200142}
143
144static const struct nla_policy mall_policy[TCA_MATCHALL_MAX + 1] = {
145 [TCA_MATCHALL_UNSPEC] = { .type = NLA_UNSPEC },
146 [TCA_MATCHALL_CLASSID] = { .type = NLA_U32 },
147};
148
149static int mall_set_parms(struct net *net, struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200150 struct cls_mall_head *head,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200151 unsigned long base, struct nlattr **tb,
Alexander Aring50a56192018-01-18 11:20:52 -0500152 struct nlattr *est, bool ovr,
153 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200154{
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200155 int err;
156
Vlad Buslovec6743a2019-02-11 10:55:43 +0200157 err = tcf_exts_validate(net, tp, tb, est, &head->exts, ovr, true,
158 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200159 if (err < 0)
Jiri Pirkoa74cb362017-08-04 14:29:08 +0200160 return err;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200161
162 if (tb[TCA_MATCHALL_CLASSID]) {
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200163 head->res.classid = nla_get_u32(tb[TCA_MATCHALL_CLASSID]);
164 tcf_bind_filter(tp, &head->res, base);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200165 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200166 return 0;
167}
168
169static int mall_change(struct net *net, struct sk_buff *in_skb,
170 struct tcf_proto *tp, unsigned long base,
171 u32 handle, struct nlattr **tca,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200172 void **arg, bool ovr, bool rtnl_held,
173 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200174{
175 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200176 struct nlattr *tb[TCA_MATCHALL_MAX + 1];
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200177 struct cls_mall_head *new;
Yotam Gigib87f7932016-07-21 12:03:12 +0200178 u32 flags = 0;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200179 int err;
180
181 if (!tca[TCA_OPTIONS])
182 return -EINVAL;
183
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200184 if (head)
185 return -EEXIST;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200186
Johannes Bergfceb6432017-04-12 14:34:07 +0200187 err = nla_parse_nested(tb, TCA_MATCHALL_MAX, tca[TCA_OPTIONS],
188 mall_policy, NULL);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200189 if (err < 0)
190 return err;
191
Yotam Gigib87f7932016-07-21 12:03:12 +0200192 if (tb[TCA_MATCHALL_FLAGS]) {
193 flags = nla_get_u32(tb[TCA_MATCHALL_FLAGS]);
194 if (!tc_flags_valid(flags))
195 return -EINVAL;
196 }
197
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200198 new = kzalloc(sizeof(*new), GFP_KERNEL);
199 if (!new)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200200 return -ENOBUFS;
201
Cong Wang14215102019-02-20 21:37:42 -0800202 err = tcf_exts_init(&new->exts, net, TCA_MATCHALL_ACT, 0);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200203 if (err)
204 goto err_exts_init;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200205
206 if (!handle)
207 handle = 1;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200208 new->handle = handle;
209 new->flags = flags;
Cong Wangf88c19a2019-01-17 12:44:25 -0800210 new->pf = alloc_percpu(struct tc_matchall_pcnt);
211 if (!new->pf) {
212 err = -ENOMEM;
213 goto err_alloc_percpu;
214 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200215
Alexander Aring50a56192018-01-18 11:20:52 -0500216 err = mall_set_parms(net, tp, new, base, tb, tca[TCA_RATE], ovr,
217 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200218 if (err)
Yotam Gigiec2507d2017-01-03 19:20:24 +0200219 goto err_set_parms;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200220
Jiri Pirko2447a962017-10-19 15:50:33 +0200221 if (!tc_skip_hw(new->flags)) {
Quentin Monnet02798142018-01-19 17:44:44 -0800222 err = mall_replace_hw_filter(tp, new, (unsigned long)new,
223 extack);
Jiri Pirko2447a962017-10-19 15:50:33 +0200224 if (err)
225 goto err_replace_hw_filter;
Yotam Gigib87f7932016-07-21 12:03:12 +0200226 }
227
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200228 if (!tc_in_hw(new->flags))
229 new->flags |= TCA_CLS_FLAGS_NOT_IN_HW;
230
WANG Cong8113c092017-08-04 21:31:43 -0700231 *arg = head;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200232 rcu_assign_pointer(tp->root, new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200233 return 0;
234
Yotam Gigiec2507d2017-01-03 19:20:24 +0200235err_replace_hw_filter:
236err_set_parms:
Cong Wangf88c19a2019-01-17 12:44:25 -0800237 free_percpu(new->pf);
238err_alloc_percpu:
David S. Millere2160152017-02-02 16:54:00 -0500239 tcf_exts_destroy(&new->exts);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200240err_exts_init:
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200241 kfree(new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200242 return err;
243}
244
Alexander Aring571acf22018-01-18 11:20:53 -0500245static int mall_delete(struct tcf_proto *tp, void *arg, bool *last,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200246 bool rtnl_held, struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200247{
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200248 return -EOPNOTSUPP;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200249}
250
Vlad Buslov12db03b2019-02-11 10:55:45 +0200251static void mall_walk(struct tcf_proto *tp, struct tcf_walker *arg,
252 bool rtnl_held)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200253{
254 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200255
256 if (arg->count < arg->skip)
257 goto skip;
Vlad Buslovd66022c2019-02-15 17:17:56 +0200258
259 if (!head)
260 return;
WANG Cong8113c092017-08-04 21:31:43 -0700261 if (arg->fn(tp, head, arg) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200262 arg->stop = 1;
263skip:
264 arg->count++;
265}
266
John Hurley0efd1b32018-06-25 14:30:07 -0700267static int mall_reoffload(struct tcf_proto *tp, bool add, tc_setup_cb_t *cb,
268 void *cb_priv, struct netlink_ext_ack *extack)
269{
270 struct cls_mall_head *head = rtnl_dereference(tp->root);
271 struct tc_cls_matchall_offload cls_mall = {};
272 struct tcf_block *block = tp->chain->block;
273 int err;
274
275 if (tc_skip_hw(head->flags))
276 return 0;
277
278 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
279 cls_mall.command = add ?
280 TC_CLSMATCHALL_REPLACE : TC_CLSMATCHALL_DESTROY;
281 cls_mall.exts = &head->exts;
282 cls_mall.cookie = (unsigned long)head;
283
284 err = cb(TC_SETUP_CLSMATCHALL, &cls_mall, cb_priv);
285 if (err) {
286 if (add && tc_skip_sw(head->flags))
287 return err;
288 return 0;
289 }
290
291 tc_cls_offload_cnt_update(block, &head->in_hw_count, &head->flags, add);
292
293 return 0;
294}
295
WANG Cong8113c092017-08-04 21:31:43 -0700296static int mall_dump(struct net *net, struct tcf_proto *tp, void *fh,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200297 struct sk_buff *skb, struct tcmsg *t, bool rtnl_held)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200298{
Cong Wangf88c19a2019-01-17 12:44:25 -0800299 struct tc_matchall_pcnt gpf = {};
WANG Cong8113c092017-08-04 21:31:43 -0700300 struct cls_mall_head *head = fh;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200301 struct nlattr *nest;
Cong Wangf88c19a2019-01-17 12:44:25 -0800302 int cpu;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200303
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200304 if (!head)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200305 return skb->len;
306
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200307 t->tcm_handle = head->handle;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200308
309 nest = nla_nest_start(skb, TCA_OPTIONS);
310 if (!nest)
311 goto nla_put_failure;
312
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200313 if (head->res.classid &&
314 nla_put_u32(skb, TCA_MATCHALL_CLASSID, head->res.classid))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200315 goto nla_put_failure;
316
Or Gerlitz7a335ad2017-02-16 10:31:11 +0200317 if (head->flags && nla_put_u32(skb, TCA_MATCHALL_FLAGS, head->flags))
318 goto nla_put_failure;
319
Cong Wangf88c19a2019-01-17 12:44:25 -0800320 for_each_possible_cpu(cpu) {
321 struct tc_matchall_pcnt *pf = per_cpu_ptr(head->pf, cpu);
322
323 gpf.rhit += pf->rhit;
324 }
325
326 if (nla_put_64bit(skb, TCA_MATCHALL_PCNT,
327 sizeof(struct tc_matchall_pcnt),
328 &gpf, TCA_MATCHALL_PAD))
329 goto nla_put_failure;
330
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200331 if (tcf_exts_dump(skb, &head->exts))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200332 goto nla_put_failure;
333
334 nla_nest_end(skb, nest);
335
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200336 if (tcf_exts_dump_stats(skb, &head->exts) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200337 goto nla_put_failure;
338
339 return skb->len;
340
341nla_put_failure:
342 nla_nest_cancel(skb, nest);
343 return -1;
344}
345
Cong Wang07d79fc2017-08-30 14:30:36 -0700346static void mall_bind_class(void *fh, u32 classid, unsigned long cl)
347{
348 struct cls_mall_head *head = fh;
349
350 if (head && head->res.classid == classid)
351 head->res.class = cl;
352}
353
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200354static struct tcf_proto_ops cls_mall_ops __read_mostly = {
355 .kind = "matchall",
356 .classify = mall_classify,
357 .init = mall_init,
358 .destroy = mall_destroy,
359 .get = mall_get,
360 .change = mall_change,
361 .delete = mall_delete,
362 .walk = mall_walk,
John Hurley0efd1b32018-06-25 14:30:07 -0700363 .reoffload = mall_reoffload,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200364 .dump = mall_dump,
Cong Wang07d79fc2017-08-30 14:30:36 -0700365 .bind_class = mall_bind_class,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200366 .owner = THIS_MODULE,
367};
368
369static int __init cls_mall_init(void)
370{
371 return register_tcf_proto_ops(&cls_mall_ops);
372}
373
374static void __exit cls_mall_exit(void)
375{
376 unregister_tcf_proto_ops(&cls_mall_ops);
377}
378
379module_init(cls_mall_init);
380module_exit(cls_mall_exit);
381
382MODULE_AUTHOR("Jiri Pirko <jiri@mellanox.com>");
383MODULE_DESCRIPTION("Match-all classifier");
384MODULE_LICENSE("GPL v2");