blob: 0c1c8cccf254c3c6dad87e831193a79f3708aae7 [file] [log] [blame]
Thomas Gleixner5b497af2019-05-29 07:18:09 -07001// SPDX-License-Identifier: GPL-2.0-only
Alexei Starovoitov64a89462014-05-08 14:10:52 -07002/*
3 * Testsuite for BPF interpreter and BPF JIT compiler
4 *
5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
Alexei Starovoitov64a89462014-05-08 14:10:52 -07006 */
7
8#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9
10#include <linux/init.h>
11#include <linux/module.h>
12#include <linux/filter.h>
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -070013#include <linux/bpf.h>
Alexei Starovoitov64a89462014-05-08 14:10:52 -070014#include <linux/skbuff.h>
15#include <linux/netdevice.h>
16#include <linux/if_vlan.h>
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020017#include <linux/random.h>
Nicolas Schichanbac142a2015-08-04 15:19:08 +020018#include <linux/highmem.h>
Eric Dumazetd40bc962018-02-26 10:52:46 -080019#include <linux/sched.h>
Alexei Starovoitov64a89462014-05-08 14:10:52 -070020
Daniel Borkmann10f18e02014-05-23 18:44:00 +020021/* General test specific settings */
Alexei Starovoitov64a89462014-05-08 14:10:52 -070022#define MAX_SUBTESTS 3
Eric Dumazet9960d762018-02-28 08:39:20 -080023#define MAX_TESTRUNS 1000
Alexei Starovoitov64a89462014-05-08 14:10:52 -070024#define MAX_DATA 128
25#define MAX_INSNS 512
26#define MAX_K 0xffffFFFF
27
Daniel Borkmann10f18e02014-05-23 18:44:00 +020028/* Few constants used to init test 'skb' */
Alexei Starovoitov64a89462014-05-08 14:10:52 -070029#define SKB_TYPE 3
30#define SKB_MARK 0x1234aaaa
31#define SKB_HASH 0x1234aaab
32#define SKB_QUEUE_MAP 123
33#define SKB_VLAN_TCI 0xffff
Michał Mirosław0c4b2d32018-11-10 19:58:36 +010034#define SKB_VLAN_PRESENT 1
Alexei Starovoitov64a89462014-05-08 14:10:52 -070035#define SKB_DEV_IFINDEX 577
36#define SKB_DEV_TYPE 588
37
Daniel Borkmann10f18e02014-05-23 18:44:00 +020038/* Redefine REGs to make tests less verbose */
39#define R0 BPF_REG_0
40#define R1 BPF_REG_1
41#define R2 BPF_REG_2
42#define R3 BPF_REG_3
43#define R4 BPF_REG_4
44#define R5 BPF_REG_5
45#define R6 BPF_REG_6
46#define R7 BPF_REG_7
47#define R8 BPF_REG_8
48#define R9 BPF_REG_9
49#define R10 BPF_REG_10
50
51/* Flags that can be passed to test cases */
52#define FLAG_NO_DATA BIT(0)
53#define FLAG_EXPECTED_FAIL BIT(1)
Nicolas Schichanbac142a2015-08-04 15:19:08 +020054#define FLAG_SKB_FRAG BIT(2)
Johan Almbladh27cc6da2021-09-14 11:18:36 +020055#define FLAG_VERIFIER_ZEXT BIT(3)
Daniel Borkmann10f18e02014-05-23 18:44:00 +020056
57enum {
58 CLASSIC = BIT(6), /* Old BPF instructions only. */
59 INTERNAL = BIT(7), /* Extended instruction set. */
60};
61
62#define TEST_TYPE_MASK (CLASSIC | INTERNAL)
Alexei Starovoitov64a89462014-05-08 14:10:52 -070063
64struct bpf_test {
65 const char *descr;
66 union {
67 struct sock_filter insns[MAX_INSNS];
Alexei Starovoitov2695fb52014-07-24 16:38:21 -070068 struct bpf_insn insns_int[MAX_INSNS];
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020069 struct {
70 void *insns;
71 unsigned int len;
72 } ptr;
Andrew Mortonece80492014-05-22 10:16:46 -070073 } u;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020074 __u8 aux;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070075 __u8 data[MAX_DATA];
76 struct {
77 int data_size;
78 __u32 result;
79 } test[MAX_SUBTESTS];
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020080 int (*fill_helper)(struct bpf_test *self);
Yonghong Song09584b42018-02-02 22:37:15 -080081 int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
Nicolas Schichanbac142a2015-08-04 15:19:08 +020082 __u8 frag_data[MAX_DATA];
Alexei Starovoitov105c0362017-05-30 13:31:32 -070083 int stack_depth; /* for eBPF only, since tests don't call verifier */
Johan Almbladhc2a228d2021-09-14 11:18:29 +020084 int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
Alexei Starovoitov64a89462014-05-08 14:10:52 -070085};
86
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020087/* Large test cases need separate allocation and fill handler. */
88
89static int bpf_fill_maxinsns1(struct bpf_test *self)
90{
91 unsigned int len = BPF_MAXINSNS;
92 struct sock_filter *insn;
93 __u32 k = ~0;
94 int i;
95
96 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
97 if (!insn)
98 return -ENOMEM;
99
100 for (i = 0; i < len; i++, k--)
101 insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
102
103 self->u.ptr.insns = insn;
104 self->u.ptr.len = len;
105
106 return 0;
107}
108
109static int bpf_fill_maxinsns2(struct bpf_test *self)
110{
111 unsigned int len = BPF_MAXINSNS;
112 struct sock_filter *insn;
113 int i;
114
115 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
116 if (!insn)
117 return -ENOMEM;
118
119 for (i = 0; i < len; i++)
120 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
121
122 self->u.ptr.insns = insn;
123 self->u.ptr.len = len;
124
125 return 0;
126}
127
128static int bpf_fill_maxinsns3(struct bpf_test *self)
129{
130 unsigned int len = BPF_MAXINSNS;
131 struct sock_filter *insn;
132 struct rnd_state rnd;
133 int i;
134
135 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
136 if (!insn)
137 return -ENOMEM;
138
139 prandom_seed_state(&rnd, 3141592653589793238ULL);
140
141 for (i = 0; i < len - 1; i++) {
142 __u32 k = prandom_u32_state(&rnd);
143
144 insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
145 }
146
147 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
148
149 self->u.ptr.insns = insn;
150 self->u.ptr.len = len;
151
152 return 0;
153}
154
155static int bpf_fill_maxinsns4(struct bpf_test *self)
156{
157 unsigned int len = BPF_MAXINSNS + 1;
158 struct sock_filter *insn;
159 int i;
160
161 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
162 if (!insn)
163 return -ENOMEM;
164
165 for (i = 0; i < len; i++)
166 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
167
168 self->u.ptr.insns = insn;
169 self->u.ptr.len = len;
170
171 return 0;
172}
173
174static int bpf_fill_maxinsns5(struct bpf_test *self)
175{
176 unsigned int len = BPF_MAXINSNS;
177 struct sock_filter *insn;
178 int i;
179
180 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
181 if (!insn)
182 return -ENOMEM;
183
184 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
185
186 for (i = 1; i < len - 1; i++)
187 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
188
189 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
190
191 self->u.ptr.insns = insn;
192 self->u.ptr.len = len;
193
194 return 0;
195}
196
197static int bpf_fill_maxinsns6(struct bpf_test *self)
198{
199 unsigned int len = BPF_MAXINSNS;
200 struct sock_filter *insn;
201 int i;
202
203 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
204 if (!insn)
205 return -ENOMEM;
206
207 for (i = 0; i < len - 1; i++)
208 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
209 SKF_AD_VLAN_TAG_PRESENT);
210
211 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
212
213 self->u.ptr.insns = insn;
214 self->u.ptr.len = len;
215
216 return 0;
217}
218
219static int bpf_fill_maxinsns7(struct bpf_test *self)
220{
221 unsigned int len = BPF_MAXINSNS;
222 struct sock_filter *insn;
223 int i;
224
225 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
226 if (!insn)
227 return -ENOMEM;
228
229 for (i = 0; i < len - 4; i++)
230 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
231 SKF_AD_CPU);
232
233 insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
234 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
235 SKF_AD_CPU);
236 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
237 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
238
239 self->u.ptr.insns = insn;
240 self->u.ptr.len = len;
241
242 return 0;
243}
244
245static int bpf_fill_maxinsns8(struct bpf_test *self)
246{
247 unsigned int len = BPF_MAXINSNS;
248 struct sock_filter *insn;
249 int i, jmp_off = len - 3;
250
251 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
252 if (!insn)
253 return -ENOMEM;
254
255 insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
256
257 for (i = 1; i < len - 1; i++)
258 insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
259
260 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
261
262 self->u.ptr.insns = insn;
263 self->u.ptr.len = len;
264
265 return 0;
266}
267
Daniel Borkmann3b529602015-05-23 01:10:07 +0200268static int bpf_fill_maxinsns9(struct bpf_test *self)
269{
270 unsigned int len = BPF_MAXINSNS;
271 struct bpf_insn *insn;
272 int i;
273
274 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
275 if (!insn)
276 return -ENOMEM;
277
278 insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
279 insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
280 insn[2] = BPF_EXIT_INSN();
281
282 for (i = 3; i < len - 2; i++)
283 insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
284
285 insn[len - 2] = BPF_EXIT_INSN();
286 insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
287
288 self->u.ptr.insns = insn;
289 self->u.ptr.len = len;
290
291 return 0;
292}
293
294static int bpf_fill_maxinsns10(struct bpf_test *self)
295{
296 unsigned int len = BPF_MAXINSNS, hlen = len - 2;
297 struct bpf_insn *insn;
298 int i;
299
300 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
301 if (!insn)
302 return -ENOMEM;
303
304 for (i = 0; i < hlen / 2; i++)
305 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
306 for (i = hlen - 1; i > hlen / 2; i--)
307 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
308
309 insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
310 insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
311 insn[hlen + 1] = BPF_EXIT_INSN();
312
313 self->u.ptr.insns = insn;
314 self->u.ptr.len = len;
315
316 return 0;
317}
318
Daniel Borkmannbde28bc2015-05-26 22:35:43 +0200319static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
320 unsigned int plen)
321{
322 struct sock_filter *insn;
323 unsigned int rlen;
324 int i, j;
325
326 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
327 if (!insn)
328 return -ENOMEM;
329
330 rlen = (len % plen) - 1;
331
332 for (i = 0; i + plen < len; i += plen)
333 for (j = 0; j < plen; j++)
334 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
335 plen - 1 - j, 0, 0);
336 for (j = 0; j < rlen; j++)
337 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
338 0, 0);
339
340 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
341
342 self->u.ptr.insns = insn;
343 self->u.ptr.len = len;
344
345 return 0;
346}
347
348static int bpf_fill_maxinsns11(struct bpf_test *self)
349{
Gary Lin16a660e2021-01-19 18:25:00 +0800350 /* Hits 70 passes on x86_64 and triggers NOPs padding. */
Daniel Borkmannbde28bc2015-05-26 22:35:43 +0200351 return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
352}
353
Daniel Borkmannbe088152018-06-02 23:06:32 +0200354static int bpf_fill_maxinsns12(struct bpf_test *self)
355{
356 unsigned int len = BPF_MAXINSNS;
357 struct sock_filter *insn;
358 int i = 0;
359
360 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
361 if (!insn)
362 return -ENOMEM;
363
364 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
365
366 for (i = 1; i < len - 1; i++)
367 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
368
369 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
370
371 self->u.ptr.insns = insn;
372 self->u.ptr.len = len;
373
374 return 0;
375}
376
377static int bpf_fill_maxinsns13(struct bpf_test *self)
378{
379 unsigned int len = BPF_MAXINSNS;
380 struct sock_filter *insn;
381 int i = 0;
382
383 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
384 if (!insn)
385 return -ENOMEM;
386
387 for (i = 0; i < len - 3; i++)
388 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
389
390 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
391 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
392 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
393
394 self->u.ptr.insns = insn;
395 self->u.ptr.len = len;
396
397 return 0;
398}
399
Daniel Borkmannbde28bc2015-05-26 22:35:43 +0200400static int bpf_fill_ja(struct bpf_test *self)
401{
402 /* Hits exactly 11 passes on x86_64 JIT. */
403 return __bpf_fill_ja(self, 12, 9);
404}
405
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -0700406static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
407{
408 unsigned int len = BPF_MAXINSNS;
409 struct sock_filter *insn;
410 int i;
411
412 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
413 if (!insn)
414 return -ENOMEM;
415
416 for (i = 0; i < len - 1; i += 2) {
417 insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
418 insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
419 SKF_AD_OFF + SKF_AD_CPU);
420 }
421
422 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
423
424 self->u.ptr.insns = insn;
425 self->u.ptr.len = len;
426
427 return 0;
428}
429
Daniel Borkmann85f68fe2017-05-01 02:57:20 +0200430static int __bpf_fill_stxdw(struct bpf_test *self, int size)
431{
432 unsigned int len = BPF_MAXINSNS;
433 struct bpf_insn *insn;
434 int i;
435
436 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
437 if (!insn)
438 return -ENOMEM;
439
440 insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
441 insn[1] = BPF_ST_MEM(size, R10, -40, 42);
442
443 for (i = 2; i < len - 2; i++)
444 insn[i] = BPF_STX_XADD(size, R10, R0, -40);
445
446 insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
447 insn[len - 1] = BPF_EXIT_INSN();
448
449 self->u.ptr.insns = insn;
450 self->u.ptr.len = len;
Alexei Starovoitov105c0362017-05-30 13:31:32 -0700451 self->stack_depth = 40;
Daniel Borkmann85f68fe2017-05-01 02:57:20 +0200452
453 return 0;
454}
455
456static int bpf_fill_stxw(struct bpf_test *self)
457{
458 return __bpf_fill_stxdw(self, BPF_W);
459}
460
461static int bpf_fill_stxdw(struct bpf_test *self)
462{
463 return __bpf_fill_stxdw(self, BPF_DW);
464}
465
Johan Almbladh66e5eb82021-08-09 11:18:25 +0200466static int bpf_fill_long_jmp(struct bpf_test *self)
467{
468 unsigned int len = BPF_MAXINSNS;
469 struct bpf_insn *insn;
470 int i;
471
472 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
473 if (!insn)
474 return -ENOMEM;
475
476 insn[0] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
477 insn[1] = BPF_JMP_IMM(BPF_JEQ, R0, 1, len - 2 - 1);
478
479 /*
480 * Fill with a complex 64-bit operation that expands to a lot of
481 * instructions on 32-bit JITs. The large jump offset can then
482 * overflow the conditional branch field size, triggering a branch
483 * conversion mechanism in some JITs.
484 *
485 * Note: BPF_MAXINSNS of ALU64 MUL is enough to trigger such branch
486 * conversion on the 32-bit MIPS JIT. For other JITs, the instruction
487 * count and/or operation may need to be modified to trigger the
488 * branch conversion.
489 */
490 for (i = 2; i < len - 1; i++)
491 insn[i] = BPF_ALU64_IMM(BPF_MUL, R0, (i << 16) + i);
492
493 insn[len - 1] = BPF_EXIT_INSN();
494
495 self->u.ptr.insns = insn;
496 self->u.ptr.len = len;
497
498 return 0;
499}
500
Johan Almbladh68c956f2021-09-14 11:18:31 +0200501static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
502{
503 struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
504
505 memcpy(insns, tmp, sizeof(tmp));
506 return 2;
507}
508
509/* Test an ALU shift operation for all valid shift values */
510static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
511 u8 mode, bool alu32)
512{
513 static const s64 regs[] = {
514 0x0123456789abcdefLL, /* dword > 0, word < 0 */
515 0xfedcba9876543210LL, /* dowrd < 0, word > 0 */
516 0xfedcba0198765432LL, /* dowrd < 0, word < 0 */
517 0x0123458967abcdefLL, /* dword > 0, word > 0 */
518 };
519 int bits = alu32 ? 32 : 64;
520 int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
521 struct bpf_insn *insn;
522 int imm, k;
523 int i = 0;
524
525 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
526 if (!insn)
527 return -ENOMEM;
528
529 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
530
531 for (k = 0; k < ARRAY_SIZE(regs); k++) {
532 s64 reg = regs[k];
533
534 i += __bpf_ld_imm64(&insn[i], R3, reg);
535
536 for (imm = 0; imm < bits; imm++) {
537 u64 val;
538
539 /* Perform operation */
540 insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
541 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
542 if (alu32) {
543 if (mode == BPF_K)
544 insn[i++] = BPF_ALU32_IMM(op, R1, imm);
545 else
546 insn[i++] = BPF_ALU32_REG(op, R1, R2);
547 switch (op) {
548 case BPF_LSH:
549 val = (u32)reg << imm;
550 break;
551 case BPF_RSH:
552 val = (u32)reg >> imm;
553 break;
554 case BPF_ARSH:
555 val = (u32)reg >> imm;
556 if (imm > 0 && (reg & 0x80000000))
557 val |= ~(u32)0 << (32 - imm);
558 break;
559 }
560 } else {
561 if (mode == BPF_K)
562 insn[i++] = BPF_ALU64_IMM(op, R1, imm);
563 else
564 insn[i++] = BPF_ALU64_REG(op, R1, R2);
565 switch (op) {
566 case BPF_LSH:
567 val = (u64)reg << imm;
568 break;
569 case BPF_RSH:
570 val = (u64)reg >> imm;
571 break;
572 case BPF_ARSH:
573 val = (u64)reg >> imm;
574 if (imm > 0 && reg < 0)
575 val |= ~(u64)0 << (64 - imm);
576 break;
577 }
578 }
579
580 /*
581 * When debugging a JIT that fails this test, one
582 * can write the immediate value to R0 here to find
583 * out which operand values that fail.
584 */
585
586 /* Load reference and check the result */
587 i += __bpf_ld_imm64(&insn[i], R4, val);
588 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
589 insn[i++] = BPF_EXIT_INSN();
590 }
591 }
592
593 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
594 insn[i++] = BPF_EXIT_INSN();
595
596 self->u.ptr.insns = insn;
597 self->u.ptr.len = len;
598 BUG_ON(i > len);
599
600 return 0;
601}
602
603static int bpf_fill_alu_lsh_imm(struct bpf_test *self)
604{
605 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
606}
607
608static int bpf_fill_alu_rsh_imm(struct bpf_test *self)
609{
610 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
611}
612
613static int bpf_fill_alu_arsh_imm(struct bpf_test *self)
614{
615 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
616}
617
618static int bpf_fill_alu_lsh_reg(struct bpf_test *self)
619{
620 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
621}
622
623static int bpf_fill_alu_rsh_reg(struct bpf_test *self)
624{
625 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
626}
627
628static int bpf_fill_alu_arsh_reg(struct bpf_test *self)
629{
630 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
631}
632
633static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
634{
635 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
636}
637
638static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
639{
640 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
641}
642
643static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
644{
645 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
646}
647
648static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
649{
650 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
651}
652
653static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
654{
655 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
656}
657
658static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
659{
660 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
661}
662
Johan Almbladh9298e632021-09-14 11:18:32 +0200663/*
664 * Common operand pattern generator for exhaustive power-of-two magnitudes
665 * tests. The block size parameters can be adjusted to increase/reduce the
666 * number of combinatons tested and thereby execution speed and memory
667 * footprint.
668 */
669
670static inline s64 value(int msb, int delta, int sign)
671{
672 return sign * (1LL << msb) + delta;
673}
674
675static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
676 int dbits, int sbits, int block1, int block2,
677 int (*emit)(struct bpf_test*, void*,
678 struct bpf_insn*, s64, s64))
679{
680 static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
681 struct bpf_insn *insns;
682 int di, si, bt, db, sb;
683 int count, len, k;
684 int extra = 1 + 2;
685 int i = 0;
686
687 /* Total number of iterations for the two pattern */
688 count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
689 count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
690
691 /* Compute the maximum number of insns and allocate the buffer */
692 len = extra + count * (*emit)(self, arg, NULL, 0, 0);
693 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
694 if (!insns)
695 return -ENOMEM;
696
697 /* Add head instruction(s) */
698 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
699
700 /*
701 * Pattern 1: all combinations of power-of-two magnitudes and sign,
702 * and with a block of contiguous values around each magnitude.
703 */
704 for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
705 for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
706 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
707 for (db = -(block1 / 2);
708 db < (block1 + 1) / 2; db++)
709 for (sb = -(block1 / 2);
710 sb < (block1 + 1) / 2; sb++) {
711 s64 dst, src;
712
713 dst = value(di, db, sgn[k][0]);
714 src = value(si, sb, sgn[k][1]);
715 i += (*emit)(self, arg,
716 &insns[i],
717 dst, src);
718 }
719 /*
720 * Pattern 2: all combinations for a larger block of values
721 * for each power-of-two magnitude and sign, where the magnitude is
722 * the same for both operands.
723 */
724 for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
725 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
726 for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
727 for (sb = -(block2 / 2);
728 sb < (block2 + 1) / 2; sb++) {
729 s64 dst, src;
730
731 dst = value(bt % dbits, db, sgn[k][0]);
732 src = value(bt % sbits, sb, sgn[k][1]);
733 i += (*emit)(self, arg, &insns[i],
734 dst, src);
735 }
736
737 /* Append tail instructions */
738 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
739 insns[i++] = BPF_EXIT_INSN();
740 BUG_ON(i > len);
741
742 self->u.ptr.insns = insns;
743 self->u.ptr.len = i;
744
745 return 0;
746}
747
748/*
749 * Block size parameters used in pattern tests below. une as needed to
750 * increase/reduce the number combinations tested, see following examples.
751 * block values per operand MSB
752 * ----------------------------------------
753 * 0 none
754 * 1 (1 << MSB)
755 * 2 (1 << MSB) + [-1, 0]
756 * 3 (1 << MSB) + [-1, 0, 1]
757 */
758#define PATTERN_BLOCK1 1
759#define PATTERN_BLOCK2 5
760
761/* Number of test runs for a pattern test */
762#define NR_PATTERN_RUNS 1
763
764/*
765 * Exhaustive tests of ALU operations for all combinations of power-of-two
766 * magnitudes of the operands, both for positive and negative values. The
767 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
768 * emit different code depending on the magnitude of the immediate value.
769 */
770
771static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
772{
773 *res = 0;
774 switch (op) {
775 case BPF_MOV:
776 *res = v2;
777 break;
778 case BPF_AND:
779 *res = v1 & v2;
780 break;
781 case BPF_OR:
782 *res = v1 | v2;
783 break;
784 case BPF_XOR:
785 *res = v1 ^ v2;
786 break;
787 case BPF_ADD:
788 *res = v1 + v2;
789 break;
790 case BPF_SUB:
791 *res = v1 - v2;
792 break;
793 case BPF_MUL:
794 *res = v1 * v2;
795 break;
796 case BPF_DIV:
797 if (v2 == 0)
798 return false;
799 *res = div64_u64(v1, v2);
800 break;
801 case BPF_MOD:
802 if (v2 == 0)
803 return false;
804 div64_u64_rem(v1, v2, res);
805 break;
806 }
807 return true;
808}
809
810static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
811 struct bpf_insn *insns, s64 dst, s64 imm)
812{
813 int op = *(int *)arg;
814 int i = 0;
815 u64 res;
816
817 if (!insns)
818 return 7;
819
820 if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
821 i += __bpf_ld_imm64(&insns[i], R1, dst);
822 i += __bpf_ld_imm64(&insns[i], R3, res);
823 insns[i++] = BPF_ALU64_IMM(op, R1, imm);
824 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
825 insns[i++] = BPF_EXIT_INSN();
826 }
827
828 return i;
829}
830
831static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
832 struct bpf_insn *insns, s64 dst, s64 imm)
833{
834 int op = *(int *)arg;
835 int i = 0;
836 u64 res;
837
838 if (!insns)
839 return 7;
840
841 if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
842 i += __bpf_ld_imm64(&insns[i], R1, dst);
843 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
844 insns[i++] = BPF_ALU32_IMM(op, R1, imm);
845 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
846 insns[i++] = BPF_EXIT_INSN();
847 }
848
849 return i;
850}
851
852static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
853 struct bpf_insn *insns, s64 dst, s64 src)
854{
855 int op = *(int *)arg;
856 int i = 0;
857 u64 res;
858
859 if (!insns)
860 return 9;
861
862 if (__bpf_alu_result(&res, dst, src, op)) {
863 i += __bpf_ld_imm64(&insns[i], R1, dst);
864 i += __bpf_ld_imm64(&insns[i], R2, src);
865 i += __bpf_ld_imm64(&insns[i], R3, res);
866 insns[i++] = BPF_ALU64_REG(op, R1, R2);
867 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
868 insns[i++] = BPF_EXIT_INSN();
869 }
870
871 return i;
872}
873
874static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
875 struct bpf_insn *insns, s64 dst, s64 src)
876{
877 int op = *(int *)arg;
878 int i = 0;
879 u64 res;
880
881 if (!insns)
882 return 9;
883
884 if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
885 i += __bpf_ld_imm64(&insns[i], R1, dst);
886 i += __bpf_ld_imm64(&insns[i], R2, src);
887 i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
888 insns[i++] = BPF_ALU32_REG(op, R1, R2);
889 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
890 insns[i++] = BPF_EXIT_INSN();
891 }
892
893 return i;
894}
895
896static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
897{
898 return __bpf_fill_pattern(self, &op, 64, 32,
899 PATTERN_BLOCK1, PATTERN_BLOCK2,
900 &__bpf_emit_alu64_imm);
901}
902
903static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
904{
905 return __bpf_fill_pattern(self, &op, 64, 32,
906 PATTERN_BLOCK1, PATTERN_BLOCK2,
907 &__bpf_emit_alu32_imm);
908}
909
910static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
911{
912 return __bpf_fill_pattern(self, &op, 64, 64,
913 PATTERN_BLOCK1, PATTERN_BLOCK2,
914 &__bpf_emit_alu64_reg);
915}
916
917static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
918{
919 return __bpf_fill_pattern(self, &op, 64, 64,
920 PATTERN_BLOCK1, PATTERN_BLOCK2,
921 &__bpf_emit_alu32_reg);
922}
923
924/* ALU64 immediate operations */
925static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
926{
927 return __bpf_fill_alu64_imm(self, BPF_MOV);
928}
929
930static int bpf_fill_alu64_and_imm(struct bpf_test *self)
931{
932 return __bpf_fill_alu64_imm(self, BPF_AND);
933}
934
935static int bpf_fill_alu64_or_imm(struct bpf_test *self)
936{
937 return __bpf_fill_alu64_imm(self, BPF_OR);
938}
939
940static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
941{
942 return __bpf_fill_alu64_imm(self, BPF_XOR);
943}
944
945static int bpf_fill_alu64_add_imm(struct bpf_test *self)
946{
947 return __bpf_fill_alu64_imm(self, BPF_ADD);
948}
949
950static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
951{
952 return __bpf_fill_alu64_imm(self, BPF_SUB);
953}
954
955static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
956{
957 return __bpf_fill_alu64_imm(self, BPF_MUL);
958}
959
960static int bpf_fill_alu64_div_imm(struct bpf_test *self)
961{
962 return __bpf_fill_alu64_imm(self, BPF_DIV);
963}
964
965static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
966{
967 return __bpf_fill_alu64_imm(self, BPF_MOD);
968}
969
970/* ALU32 immediate operations */
971static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
972{
973 return __bpf_fill_alu32_imm(self, BPF_MOV);
974}
975
976static int bpf_fill_alu32_and_imm(struct bpf_test *self)
977{
978 return __bpf_fill_alu32_imm(self, BPF_AND);
979}
980
981static int bpf_fill_alu32_or_imm(struct bpf_test *self)
982{
983 return __bpf_fill_alu32_imm(self, BPF_OR);
984}
985
986static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
987{
988 return __bpf_fill_alu32_imm(self, BPF_XOR);
989}
990
991static int bpf_fill_alu32_add_imm(struct bpf_test *self)
992{
993 return __bpf_fill_alu32_imm(self, BPF_ADD);
994}
995
996static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
997{
998 return __bpf_fill_alu32_imm(self, BPF_SUB);
999}
1000
1001static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1002{
1003 return __bpf_fill_alu32_imm(self, BPF_MUL);
1004}
1005
1006static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1007{
1008 return __bpf_fill_alu32_imm(self, BPF_DIV);
1009}
1010
1011static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1012{
1013 return __bpf_fill_alu32_imm(self, BPF_MOD);
1014}
1015
1016/* ALU64 register operations */
1017static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1018{
1019 return __bpf_fill_alu64_reg(self, BPF_MOV);
1020}
1021
1022static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1023{
1024 return __bpf_fill_alu64_reg(self, BPF_AND);
1025}
1026
1027static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1028{
1029 return __bpf_fill_alu64_reg(self, BPF_OR);
1030}
1031
1032static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1033{
1034 return __bpf_fill_alu64_reg(self, BPF_XOR);
1035}
1036
1037static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1038{
1039 return __bpf_fill_alu64_reg(self, BPF_ADD);
1040}
1041
1042static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1043{
1044 return __bpf_fill_alu64_reg(self, BPF_SUB);
1045}
1046
1047static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1048{
1049 return __bpf_fill_alu64_reg(self, BPF_MUL);
1050}
1051
1052static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1053{
1054 return __bpf_fill_alu64_reg(self, BPF_DIV);
1055}
1056
1057static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1058{
1059 return __bpf_fill_alu64_reg(self, BPF_MOD);
1060}
1061
1062/* ALU32 register operations */
1063static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1064{
1065 return __bpf_fill_alu32_reg(self, BPF_MOV);
1066}
1067
1068static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1069{
1070 return __bpf_fill_alu32_reg(self, BPF_AND);
1071}
1072
1073static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1074{
1075 return __bpf_fill_alu32_reg(self, BPF_OR);
1076}
1077
1078static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1079{
1080 return __bpf_fill_alu32_reg(self, BPF_XOR);
1081}
1082
1083static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1084{
1085 return __bpf_fill_alu32_reg(self, BPF_ADD);
1086}
1087
1088static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1089{
1090 return __bpf_fill_alu32_reg(self, BPF_SUB);
1091}
1092
1093static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1094{
1095 return __bpf_fill_alu32_reg(self, BPF_MUL);
1096}
1097
1098static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1099{
1100 return __bpf_fill_alu32_reg(self, BPF_DIV);
1101}
1102
1103static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1104{
1105 return __bpf_fill_alu32_reg(self, BPF_MOD);
1106}
1107
Johan Almbladh2e807612021-09-14 11:18:35 +02001108/*
1109 * Test the two-instruction 64-bit immediate load operation for all
1110 * power-of-two magnitudes of the immediate operand. For each MSB, a block
1111 * of immediate values centered around the power-of-two MSB are tested,
1112 * both for positive and negative values. The test is designed to verify
1113 * the operation for JITs that emit different code depending on the magnitude
1114 * of the immediate value. This is often the case if the native instruction
1115 * immediate field width is narrower than 32 bits.
1116 */
1117static int bpf_fill_ld_imm64(struct bpf_test *self)
1118{
1119 int block = 64; /* Increase for more tests per MSB position */
1120 int len = 3 + 8 * 63 * block * 2;
1121 struct bpf_insn *insn;
1122 int bit, adj, sign;
1123 int i = 0;
1124
1125 insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1126 if (!insn)
1127 return -ENOMEM;
1128
1129 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
1130
1131 for (bit = 0; bit <= 62; bit++) {
1132 for (adj = -block / 2; adj < block / 2; adj++) {
1133 for (sign = -1; sign <= 1; sign += 2) {
1134 s64 imm = sign * ((1LL << bit) + adj);
1135
1136 /* Perform operation */
1137 i += __bpf_ld_imm64(&insn[i], R1, imm);
1138
1139 /* Load reference */
1140 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
1141 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
1142 (u32)(imm >> 32));
1143 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
1144 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
1145
1146 /* Check result */
1147 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
1148 insn[i++] = BPF_EXIT_INSN();
1149 }
1150 }
1151 }
1152
1153 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
1154 insn[i++] = BPF_EXIT_INSN();
1155
1156 self->u.ptr.insns = insn;
1157 self->u.ptr.len = len;
1158 BUG_ON(i != len);
1159
1160 return 0;
1161}
Johan Almbladha5a36542021-09-14 11:18:33 +02001162
1163/*
1164 * Exhaustive tests of JMP operations for all combinations of power-of-two
1165 * magnitudes of the operands, both for positive and negative values. The
1166 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
1167 * emit different code depending on the magnitude of the immediate value.
1168 */
1169
1170static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
1171{
1172 switch (op) {
1173 case BPF_JSET:
1174 return !!(v1 & v2);
1175 case BPF_JEQ:
1176 return v1 == v2;
1177 case BPF_JNE:
1178 return v1 != v2;
1179 case BPF_JGT:
1180 return (u64)v1 > (u64)v2;
1181 case BPF_JGE:
1182 return (u64)v1 >= (u64)v2;
1183 case BPF_JLT:
1184 return (u64)v1 < (u64)v2;
1185 case BPF_JLE:
1186 return (u64)v1 <= (u64)v2;
1187 case BPF_JSGT:
1188 return v1 > v2;
1189 case BPF_JSGE:
1190 return v1 >= v2;
1191 case BPF_JSLT:
1192 return v1 < v2;
1193 case BPF_JSLE:
1194 return v1 <= v2;
1195 }
1196 return false;
1197}
1198
1199static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
1200 struct bpf_insn *insns, s64 dst, s64 imm)
1201{
1202 int op = *(int *)arg;
1203
1204 if (insns) {
1205 bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
1206 int i = 0;
1207
1208 insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
1209
1210 i += __bpf_ld_imm64(&insns[i], R1, dst);
1211 insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
1212 if (!match)
1213 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
1214 insns[i++] = BPF_EXIT_INSN();
1215
1216 return i;
1217 }
1218
1219 return 5 + 1;
1220}
1221
1222static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
1223 struct bpf_insn *insns, s64 dst, s64 imm)
1224{
1225 int op = *(int *)arg;
1226
1227 if (insns) {
1228 bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
1229 int i = 0;
1230
1231 i += __bpf_ld_imm64(&insns[i], R1, dst);
1232 insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
1233 if (!match)
1234 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
1235 insns[i++] = BPF_EXIT_INSN();
1236
1237 return i;
1238 }
1239
1240 return 5;
1241}
1242
1243static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
1244 struct bpf_insn *insns, s64 dst, s64 src)
1245{
1246 int op = *(int *)arg;
1247
1248 if (insns) {
1249 bool match = __bpf_match_jmp_cond(dst, src, op);
1250 int i = 0;
1251
1252 i += __bpf_ld_imm64(&insns[i], R1, dst);
1253 i += __bpf_ld_imm64(&insns[i], R2, src);
1254 insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
1255 if (!match)
1256 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
1257 insns[i++] = BPF_EXIT_INSN();
1258
1259 return i;
1260 }
1261
1262 return 7;
1263}
1264
1265static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
1266 struct bpf_insn *insns, s64 dst, s64 src)
1267{
1268 int op = *(int *)arg;
1269
1270 if (insns) {
1271 bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
1272 int i = 0;
1273
1274 i += __bpf_ld_imm64(&insns[i], R1, dst);
1275 i += __bpf_ld_imm64(&insns[i], R2, src);
1276 insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
1277 if (!match)
1278 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
1279 insns[i++] = BPF_EXIT_INSN();
1280
1281 return i;
1282 }
1283
1284 return 7;
1285}
1286
1287static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
1288{
1289 return __bpf_fill_pattern(self, &op, 64, 32,
1290 PATTERN_BLOCK1, PATTERN_BLOCK2,
1291 &__bpf_emit_jmp_imm);
1292}
1293
1294static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
1295{
1296 return __bpf_fill_pattern(self, &op, 64, 32,
1297 PATTERN_BLOCK1, PATTERN_BLOCK2,
1298 &__bpf_emit_jmp32_imm);
1299}
1300
1301static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
1302{
1303 return __bpf_fill_pattern(self, &op, 64, 64,
1304 PATTERN_BLOCK1, PATTERN_BLOCK2,
1305 &__bpf_emit_jmp_reg);
1306}
1307
1308static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
1309{
1310 return __bpf_fill_pattern(self, &op, 64, 64,
1311 PATTERN_BLOCK1, PATTERN_BLOCK2,
1312 &__bpf_emit_jmp32_reg);
1313}
1314
1315/* JMP immediate tests */
1316static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
1317{
1318 return __bpf_fill_jmp_imm(self, BPF_JSET);
1319}
1320
1321static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
1322{
1323 return __bpf_fill_jmp_imm(self, BPF_JEQ);
1324}
1325
1326static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
1327{
1328 return __bpf_fill_jmp_imm(self, BPF_JNE);
1329}
1330
1331static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
1332{
1333 return __bpf_fill_jmp_imm(self, BPF_JGT);
1334}
1335
1336static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
1337{
1338 return __bpf_fill_jmp_imm(self, BPF_JGE);
1339}
1340
1341static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
1342{
1343 return __bpf_fill_jmp_imm(self, BPF_JLT);
1344}
1345
1346static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
1347{
1348 return __bpf_fill_jmp_imm(self, BPF_JLE);
1349}
1350
1351static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
1352{
1353 return __bpf_fill_jmp_imm(self, BPF_JSGT);
1354}
1355
1356static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
1357{
1358 return __bpf_fill_jmp_imm(self, BPF_JSGE);
1359}
1360
1361static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
1362{
1363 return __bpf_fill_jmp_imm(self, BPF_JSLT);
1364}
1365
1366static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
1367{
1368 return __bpf_fill_jmp_imm(self, BPF_JSLE);
1369}
1370
1371/* JMP32 immediate tests */
1372static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
1373{
1374 return __bpf_fill_jmp32_imm(self, BPF_JSET);
1375}
1376
1377static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
1378{
1379 return __bpf_fill_jmp32_imm(self, BPF_JEQ);
1380}
1381
1382static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
1383{
1384 return __bpf_fill_jmp32_imm(self, BPF_JNE);
1385}
1386
1387static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
1388{
1389 return __bpf_fill_jmp32_imm(self, BPF_JGT);
1390}
1391
1392static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
1393{
1394 return __bpf_fill_jmp32_imm(self, BPF_JGE);
1395}
1396
1397static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
1398{
1399 return __bpf_fill_jmp32_imm(self, BPF_JLT);
1400}
1401
1402static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
1403{
1404 return __bpf_fill_jmp32_imm(self, BPF_JLE);
1405}
1406
1407static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
1408{
1409 return __bpf_fill_jmp32_imm(self, BPF_JSGT);
1410}
1411
1412static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
1413{
1414 return __bpf_fill_jmp32_imm(self, BPF_JSGE);
1415}
1416
1417static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
1418{
1419 return __bpf_fill_jmp32_imm(self, BPF_JSLT);
1420}
1421
1422static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
1423{
1424 return __bpf_fill_jmp32_imm(self, BPF_JSLE);
1425}
1426
1427/* JMP register tests */
1428static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
1429{
1430 return __bpf_fill_jmp_reg(self, BPF_JSET);
1431}
1432
1433static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
1434{
1435 return __bpf_fill_jmp_reg(self, BPF_JEQ);
1436}
1437
1438static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
1439{
1440 return __bpf_fill_jmp_reg(self, BPF_JNE);
1441}
1442
1443static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
1444{
1445 return __bpf_fill_jmp_reg(self, BPF_JGT);
1446}
1447
1448static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
1449{
1450 return __bpf_fill_jmp_reg(self, BPF_JGE);
1451}
1452
1453static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
1454{
1455 return __bpf_fill_jmp_reg(self, BPF_JLT);
1456}
1457
1458static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
1459{
1460 return __bpf_fill_jmp_reg(self, BPF_JLE);
1461}
1462
1463static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
1464{
1465 return __bpf_fill_jmp_reg(self, BPF_JSGT);
1466}
1467
1468static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
1469{
1470 return __bpf_fill_jmp_reg(self, BPF_JSGE);
1471}
1472
1473static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
1474{
1475 return __bpf_fill_jmp_reg(self, BPF_JSLT);
1476}
1477
1478static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
1479{
1480 return __bpf_fill_jmp_reg(self, BPF_JSLE);
1481}
1482
1483/* JMP32 register tests */
1484static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
1485{
1486 return __bpf_fill_jmp32_reg(self, BPF_JSET);
1487}
1488
1489static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
1490{
1491 return __bpf_fill_jmp32_reg(self, BPF_JEQ);
1492}
1493
1494static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
1495{
1496 return __bpf_fill_jmp32_reg(self, BPF_JNE);
1497}
1498
1499static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
1500{
1501 return __bpf_fill_jmp32_reg(self, BPF_JGT);
1502}
1503
1504static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
1505{
1506 return __bpf_fill_jmp32_reg(self, BPF_JGE);
1507}
1508
1509static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
1510{
1511 return __bpf_fill_jmp32_reg(self, BPF_JLT);
1512}
1513
1514static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
1515{
1516 return __bpf_fill_jmp32_reg(self, BPF_JLE);
1517}
1518
1519static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
1520{
1521 return __bpf_fill_jmp32_reg(self, BPF_JSGT);
1522}
1523
1524static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
1525{
1526 return __bpf_fill_jmp32_reg(self, BPF_JSGE);
1527}
1528
1529static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
1530{
1531 return __bpf_fill_jmp32_reg(self, BPF_JSLT);
1532}
1533
1534static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
1535{
1536 return __bpf_fill_jmp32_reg(self, BPF_JSLE);
1537}
1538
Johan Almbladha7d2e752021-09-14 11:18:34 +02001539/*
1540 * Set up a sequence of staggered jumps, forwards and backwards with
1541 * increasing offset. This tests the conversion of relative jumps to
1542 * JITed native jumps. On some architectures, for example MIPS, a large
1543 * PC-relative jump offset may overflow the immediate field of the native
1544 * conditional branch instruction, triggering a conversion to use an
1545 * absolute jump instead. Since this changes the jump offsets, another
1546 * offset computation pass is necessary, and that may in turn trigger
1547 * another branch conversion. This jump sequence is particularly nasty
1548 * in that regard.
1549 *
1550 * The sequence generation is parameterized by size and jump type.
1551 * The size must be even, and the expected result is always size + 1.
1552 * Below is an example with size=8 and result=9.
1553 *
1554 * ________________________Start
1555 * R0 = 0
1556 * R1 = r1
1557 * R2 = r2
1558 * ,------- JMP +4 * 3______________Preamble: 4 insns
1559 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
1560 * | | R0 = 8 |
1561 * | | JMP +7 * 3 ------------------------.
1562 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
1563 * | | | R0 = 6 | | |
1564 * | | | JMP +5 * 3 ------------------. | |
1565 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
1566 * | | | | R0 = 4 | | | | |
1567 * | | | | JMP +3 * 3 ------------. | | | |
1568 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
1569 * | | | | | R0 = 2 | | | | | | |
1570 * | | | | | JMP +1 * 3 ------. | | | | | |
1571 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
1572 * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
1573 * | | | | | JMP -2 * 3 ---' | | | | | | |
1574 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
1575 * | | | | | | R0 = 3 | | | | | |
1576 * | | | | | | JMP -4 * 3 ---------' | | | | |
1577 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
1578 * | | | | | | | R0 = 5 | | | |
1579 * | | | | | | | JMP -6 * 3 ---------------' | | |
1580 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
1581 * | | | | | | | | R0 = 7 | |
1582 * | | Error | | | JMP -8 * 3 ---------------------' |
1583 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
1584 * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
1585 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
1586 *
1587 */
1588
1589/* The maximum size parameter */
1590#define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
1591
1592/* We use a reduced number of iterations to get a reasonable execution time */
1593#define NR_STAGGERED_JMP_RUNS 10
1594
1595static int __bpf_fill_staggered_jumps(struct bpf_test *self,
1596 const struct bpf_insn *jmp,
1597 u64 r1, u64 r2)
1598{
1599 int size = self->test[0].result - 1;
1600 int len = 4 + 3 * (size + 1);
1601 struct bpf_insn *insns;
1602 int off, ind;
1603
1604 insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1605 if (!insns)
1606 return -ENOMEM;
1607
1608 /* Preamble */
1609 insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
1610 insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
1611 insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
1612 insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
1613
1614 /* Sequence */
1615 for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
1616 struct bpf_insn *ins = &insns[4 + 3 * ind];
1617 int loc;
1618
1619 if (off == 0)
1620 off--;
1621
1622 loc = abs(off);
1623 ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
1624 3 * (size - ind) + 1);
1625 ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
1626 ins[2] = *jmp;
1627 ins[2].off = 3 * (off - 1);
1628 }
1629
1630 /* Return */
1631 insns[len - 1] = BPF_EXIT_INSN();
1632
1633 self->u.ptr.insns = insns;
1634 self->u.ptr.len = len;
1635
1636 return 0;
1637}
1638
1639/* 64-bit unconditional jump */
1640static int bpf_fill_staggered_ja(struct bpf_test *self)
1641{
1642 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
1643
1644 return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
1645}
1646
1647/* 64-bit immediate jumps */
1648static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
1649{
1650 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
1651
1652 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1653}
1654
1655static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
1656{
1657 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
1658
1659 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
1660}
1661
1662static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
1663{
1664 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
1665
1666 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
1667}
1668
1669static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
1670{
1671 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
1672
1673 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
1674}
1675
1676static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
1677{
1678 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
1679
1680 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1681}
1682
1683static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
1684{
1685 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
1686
1687 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1688}
1689
1690static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
1691{
1692 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
1693
1694 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1695}
1696
1697static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
1698{
1699 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
1700
1701 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
1702}
1703
1704static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
1705{
1706 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
1707
1708 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
1709}
1710
1711static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
1712{
1713 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
1714
1715 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
1716}
1717
1718static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
1719{
1720 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
1721
1722 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
1723}
1724
1725/* 64-bit register jumps */
1726static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
1727{
1728 struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
1729
1730 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1731}
1732
1733static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
1734{
1735 struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
1736
1737 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
1738}
1739
1740static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
1741{
1742 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
1743
1744 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
1745}
1746
1747static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
1748{
1749 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
1750
1751 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
1752}
1753
1754static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
1755{
1756 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
1757
1758 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1759}
1760
1761static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
1762{
1763 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
1764
1765 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
1766}
1767
1768static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
1769{
1770 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
1771
1772 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1773}
1774
1775static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
1776{
1777 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
1778
1779 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
1780}
1781
1782static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
1783{
1784 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
1785
1786 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
1787}
1788
1789static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
1790{
1791 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
1792
1793 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
1794}
1795
1796static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
1797{
1798 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
1799
1800 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
1801}
1802
1803/* 32-bit immediate jumps */
1804static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
1805{
1806 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
1807
1808 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1809}
1810
1811static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
1812{
1813 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
1814
1815 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
1816}
1817
1818static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
1819{
1820 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
1821
1822 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
1823}
1824
1825static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
1826{
1827 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
1828
1829 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
1830}
1831
1832static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
1833{
1834 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
1835
1836 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1837}
1838
1839static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
1840{
1841 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
1842
1843 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1844}
1845
1846static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
1847{
1848 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
1849
1850 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
1851}
1852
1853static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
1854{
1855 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
1856
1857 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
1858}
1859
1860static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
1861{
1862 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
1863
1864 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
1865}
1866
1867static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
1868{
1869 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
1870
1871 return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
1872}
1873
1874static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
1875{
1876 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
1877
1878 return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
1879}
1880
1881/* 32-bit register jumps */
1882static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
1883{
1884 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
1885
1886 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1887}
1888
1889static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
1890{
1891 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
1892
1893 return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
1894}
1895
1896static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
1897{
1898 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
1899
1900 return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
1901}
1902
1903static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
1904{
1905 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
1906
1907 return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
1908}
1909
1910static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
1911{
1912 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
1913
1914 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1915}
1916
1917static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
1918{
1919 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
1920
1921 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
1922}
1923
1924static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
1925{
1926 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
1927
1928 return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
1929}
1930
1931static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
1932{
1933 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
1934
1935 return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
1936}
1937
1938static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
1939{
1940 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
1941
1942 return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
1943}
1944
1945static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
1946{
1947 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
1948
1949 return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
1950}
1951
1952static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
1953{
1954 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
1955
1956 return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
1957}
1958
Johan Almbladha5a36542021-09-14 11:18:33 +02001959
Alexei Starovoitov64a89462014-05-08 14:10:52 -07001960static struct bpf_test tests[] = {
1961 {
1962 "TAX",
Andrew Mortonece80492014-05-22 10:16:46 -07001963 .u.insns = {
Alexei Starovoitov64a89462014-05-08 14:10:52 -07001964 BPF_STMT(BPF_LD | BPF_IMM, 1),
1965 BPF_STMT(BPF_MISC | BPF_TAX, 0),
1966 BPF_STMT(BPF_LD | BPF_IMM, 2),
1967 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
1968 BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
1969 BPF_STMT(BPF_MISC | BPF_TAX, 0),
1970 BPF_STMT(BPF_LD | BPF_LEN, 0),
1971 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
1972 BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
1973 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
1974 BPF_STMT(BPF_RET | BPF_A, 0)
1975 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02001976 CLASSIC,
Alexei Starovoitov64a89462014-05-08 14:10:52 -07001977 { 10, 20, 30, 40, 50 },
1978 { { 2, 10 }, { 3, 20 }, { 4, 30 } },
1979 },
1980 {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07001981 "TXA",
Andrew Mortonece80492014-05-22 10:16:46 -07001982 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07001983 BPF_STMT(BPF_LDX | BPF_LEN, 0),
1984 BPF_STMT(BPF_MISC | BPF_TXA, 0),
1985 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
1986 BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
1987 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02001988 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07001989 { 10, 20, 30, 40, 50 },
1990 { { 1, 2 }, { 3, 6 }, { 4, 8 } },
1991 },
1992 {
1993 "ADD_SUB_MUL_K",
Andrew Mortonece80492014-05-22 10:16:46 -07001994 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07001995 BPF_STMT(BPF_LD | BPF_IMM, 1),
1996 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
1997 BPF_STMT(BPF_LDX | BPF_IMM, 3),
1998 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
1999 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
2000 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
2001 BPF_STMT(BPF_RET | BPF_A, 0)
2002 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002003 CLASSIC | FLAG_NO_DATA,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002004 { },
2005 { { 0, 0xfffffffd } }
2006 },
2007 {
Denis Kirjanov6867b172014-12-01 13:12:25 +03002008 "DIV_MOD_KX",
Andrew Mortonece80492014-05-22 10:16:46 -07002009 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002010 BPF_STMT(BPF_LD | BPF_IMM, 8),
2011 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
2012 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2013 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
2014 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
2015 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2016 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
2017 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
Denis Kirjanov6867b172014-12-01 13:12:25 +03002018 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2019 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
2020 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
2021 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2022 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
2023 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002024 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2025 BPF_STMT(BPF_RET | BPF_A, 0)
2026 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002027 CLASSIC | FLAG_NO_DATA,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002028 { },
Denis Kirjanov6867b172014-12-01 13:12:25 +03002029 { { 0, 0x20000000 } }
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002030 },
2031 {
2032 "AND_OR_LSH_K",
Andrew Mortonece80492014-05-22 10:16:46 -07002033 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002034 BPF_STMT(BPF_LD | BPF_IMM, 0xff),
2035 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
2036 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
2037 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2038 BPF_STMT(BPF_LD | BPF_IMM, 0xf),
2039 BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
2040 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2041 BPF_STMT(BPF_RET | BPF_A, 0)
2042 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002043 CLASSIC | FLAG_NO_DATA,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002044 { },
2045 { { 0, 0x800000ff }, { 1, 0x800000ff } },
2046 },
2047 {
Chema Gonzaleze9d94502014-05-30 10:15:12 -07002048 "LD_IMM_0",
2049 .u.insns = {
2050 BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
2051 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
2052 BPF_STMT(BPF_RET | BPF_K, 0),
2053 BPF_STMT(BPF_RET | BPF_K, 1),
2054 },
2055 CLASSIC,
2056 { },
2057 { { 1, 1 } },
2058 },
2059 {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002060 "LD_IND",
Andrew Mortonece80492014-05-22 10:16:46 -07002061 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002062 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2063 BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
2064 BPF_STMT(BPF_RET | BPF_K, 1)
2065 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002066 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002067 { },
2068 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
2069 },
2070 {
2071 "LD_ABS",
Andrew Mortonece80492014-05-22 10:16:46 -07002072 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002073 BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
2074 BPF_STMT(BPF_RET | BPF_K, 1)
2075 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002076 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002077 { },
2078 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
2079 },
2080 {
2081 "LD_ABS_LL",
Andrew Mortonece80492014-05-22 10:16:46 -07002082 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002083 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
2084 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2085 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
2086 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2087 BPF_STMT(BPF_RET | BPF_A, 0)
2088 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002089 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002090 { 1, 2, 3 },
2091 { { 1, 0 }, { 2, 3 } },
2092 },
2093 {
2094 "LD_IND_LL",
Andrew Mortonece80492014-05-22 10:16:46 -07002095 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002096 BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
2097 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2098 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2099 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2100 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
2101 BPF_STMT(BPF_RET | BPF_A, 0)
2102 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002103 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002104 { 1, 2, 3, 0xff },
2105 { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
2106 },
2107 {
2108 "LD_ABS_NET",
Andrew Mortonece80492014-05-22 10:16:46 -07002109 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002110 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
2111 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2112 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
2113 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2114 BPF_STMT(BPF_RET | BPF_A, 0)
2115 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002116 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002117 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
2118 { { 15, 0 }, { 16, 3 } },
2119 },
2120 {
2121 "LD_IND_NET",
Andrew Mortonece80492014-05-22 10:16:46 -07002122 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002123 BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
2124 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2125 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
2126 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2127 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
2128 BPF_STMT(BPF_RET | BPF_A, 0)
2129 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002130 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002131 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
2132 { { 14, 0 }, { 15, 1 }, { 17, 3 } },
2133 },
2134 {
2135 "LD_PKTTYPE",
Andrew Mortonece80492014-05-22 10:16:46 -07002136 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002137 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2138 SKF_AD_OFF + SKF_AD_PKTTYPE),
2139 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
2140 BPF_STMT(BPF_RET | BPF_K, 1),
2141 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2142 SKF_AD_OFF + SKF_AD_PKTTYPE),
2143 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
2144 BPF_STMT(BPF_RET | BPF_K, 1),
2145 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2146 SKF_AD_OFF + SKF_AD_PKTTYPE),
2147 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
2148 BPF_STMT(BPF_RET | BPF_K, 1),
2149 BPF_STMT(BPF_RET | BPF_A, 0)
2150 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002151 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002152 { },
2153 { { 1, 3 }, { 10, 3 } },
2154 },
2155 {
2156 "LD_MARK",
Andrew Mortonece80492014-05-22 10:16:46 -07002157 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002158 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2159 SKF_AD_OFF + SKF_AD_MARK),
2160 BPF_STMT(BPF_RET | BPF_A, 0)
2161 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002162 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002163 { },
2164 { { 1, SKB_MARK}, { 10, SKB_MARK} },
2165 },
2166 {
2167 "LD_RXHASH",
Andrew Mortonece80492014-05-22 10:16:46 -07002168 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002169 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2170 SKF_AD_OFF + SKF_AD_RXHASH),
2171 BPF_STMT(BPF_RET | BPF_A, 0)
2172 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002173 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002174 { },
2175 { { 1, SKB_HASH}, { 10, SKB_HASH} },
2176 },
2177 {
2178 "LD_QUEUE",
Andrew Mortonece80492014-05-22 10:16:46 -07002179 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002180 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2181 SKF_AD_OFF + SKF_AD_QUEUE),
2182 BPF_STMT(BPF_RET | BPF_A, 0)
2183 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002184 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002185 { },
2186 { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
2187 },
2188 {
2189 "LD_PROTOCOL",
Andrew Mortonece80492014-05-22 10:16:46 -07002190 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002191 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
2192 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
2193 BPF_STMT(BPF_RET | BPF_K, 0),
2194 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2195 SKF_AD_OFF + SKF_AD_PROTOCOL),
2196 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2197 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
2198 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
2199 BPF_STMT(BPF_RET | BPF_K, 0),
2200 BPF_STMT(BPF_MISC | BPF_TXA, 0),
2201 BPF_STMT(BPF_RET | BPF_A, 0)
2202 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002203 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002204 { 10, 20, 30 },
2205 { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
2206 },
2207 {
2208 "LD_VLAN_TAG",
Andrew Mortonece80492014-05-22 10:16:46 -07002209 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002210 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2211 SKF_AD_OFF + SKF_AD_VLAN_TAG),
2212 BPF_STMT(BPF_RET | BPF_A, 0)
2213 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002214 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002215 { },
2216 {
Michał Mirosław0c4b2d32018-11-10 19:58:36 +01002217 { 1, SKB_VLAN_TCI },
2218 { 10, SKB_VLAN_TCI }
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002219 },
2220 },
2221 {
2222 "LD_VLAN_TAG_PRESENT",
Andrew Mortonece80492014-05-22 10:16:46 -07002223 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002224 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2225 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
2226 BPF_STMT(BPF_RET | BPF_A, 0)
2227 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002228 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002229 { },
2230 {
Michał Mirosław0c4b2d32018-11-10 19:58:36 +01002231 { 1, SKB_VLAN_PRESENT },
2232 { 10, SKB_VLAN_PRESENT }
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002233 },
2234 },
2235 {
2236 "LD_IFINDEX",
Andrew Mortonece80492014-05-22 10:16:46 -07002237 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002238 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2239 SKF_AD_OFF + SKF_AD_IFINDEX),
2240 BPF_STMT(BPF_RET | BPF_A, 0)
2241 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002242 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002243 { },
2244 { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
2245 },
2246 {
2247 "LD_HATYPE",
Andrew Mortonece80492014-05-22 10:16:46 -07002248 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002249 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2250 SKF_AD_OFF + SKF_AD_HATYPE),
2251 BPF_STMT(BPF_RET | BPF_A, 0)
2252 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002253 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002254 { },
2255 { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
2256 },
2257 {
2258 "LD_CPU",
Andrew Mortonece80492014-05-22 10:16:46 -07002259 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002260 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2261 SKF_AD_OFF + SKF_AD_CPU),
2262 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2263 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2264 SKF_AD_OFF + SKF_AD_CPU),
2265 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
2266 BPF_STMT(BPF_RET | BPF_A, 0)
2267 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002268 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002269 { },
2270 { { 1, 0 }, { 10, 0 } },
2271 },
2272 {
2273 "LD_NLATTR",
Andrew Mortonece80492014-05-22 10:16:46 -07002274 .u.insns = {
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002275 BPF_STMT(BPF_LDX | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002276 BPF_STMT(BPF_MISC | BPF_TXA, 0),
2277 BPF_STMT(BPF_LDX | BPF_IMM, 3),
2278 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2279 SKF_AD_OFF + SKF_AD_NLATTR),
2280 BPF_STMT(BPF_RET | BPF_A, 0)
2281 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002282 CLASSIC,
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002283#ifdef __BIG_ENDIAN
2284 { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
2285#else
2286 { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
2287#endif
2288 { { 4, 0 }, { 20, 6 } },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002289 },
2290 {
2291 "LD_NLATTR_NEST",
Andrew Mortonece80492014-05-22 10:16:46 -07002292 .u.insns = {
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002293 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002294 BPF_STMT(BPF_LDX | BPF_IMM, 3),
2295 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2296 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002297 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002298 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2299 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002300 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002301 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2302 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002303 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002304 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2305 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002306 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002307 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2308 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002309 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002310 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2311 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002312 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002313 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2314 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002315 BPF_STMT(BPF_LD | BPF_IMM, 2),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002316 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2317 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
2318 BPF_STMT(BPF_RET | BPF_A, 0)
2319 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002320 CLASSIC,
Alexei Starovoitovdf6d0f9832014-06-06 17:48:20 -07002321#ifdef __BIG_ENDIAN
2322 { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
2323#else
2324 { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
2325#endif
2326 { { 4, 0 }, { 20, 10 } },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002327 },
2328 {
2329 "LD_PAYLOAD_OFF",
Andrew Mortonece80492014-05-22 10:16:46 -07002330 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002331 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2332 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
2333 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2334 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
2335 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2336 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
2337 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2338 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
2339 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2340 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
2341 BPF_STMT(BPF_RET | BPF_A, 0)
2342 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002343 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002344 /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
2345 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
2346 * id 9737, seq 1, length 64
2347 */
2348 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2349 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2350 0x08, 0x00,
2351 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
2352 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
2353 { { 30, 0 }, { 100, 42 } },
2354 },
2355 {
2356 "LD_ANC_XOR",
Andrew Mortonece80492014-05-22 10:16:46 -07002357 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002358 BPF_STMT(BPF_LD | BPF_IMM, 10),
2359 BPF_STMT(BPF_LDX | BPF_IMM, 300),
2360 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
2361 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
2362 BPF_STMT(BPF_RET | BPF_A, 0)
2363 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002364 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002365 { },
Nathan Chancellor09845432019-08-18 21:34:20 -07002366 { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002367 },
2368 {
2369 "SPILL_FILL",
Andrew Mortonece80492014-05-22 10:16:46 -07002370 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002371 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2372 BPF_STMT(BPF_LD | BPF_IMM, 2),
2373 BPF_STMT(BPF_ALU | BPF_RSH, 1),
2374 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
2375 BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
2376 BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
2377 BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
2378 BPF_STMT(BPF_STX, 15), /* M3 = len */
2379 BPF_STMT(BPF_LDX | BPF_MEM, 1),
2380 BPF_STMT(BPF_LD | BPF_MEM, 2),
2381 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
2382 BPF_STMT(BPF_LDX | BPF_MEM, 15),
2383 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
2384 BPF_STMT(BPF_RET | BPF_A, 0)
2385 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002386 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002387 { },
2388 { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
2389 },
2390 {
2391 "JEQ",
Andrew Mortonece80492014-05-22 10:16:46 -07002392 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002393 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2394 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
2395 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
2396 BPF_STMT(BPF_RET | BPF_K, 1),
2397 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2398 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002399 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002400 { 3, 3, 3, 3, 3 },
2401 { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
2402 },
2403 {
2404 "JGT",
Andrew Mortonece80492014-05-22 10:16:46 -07002405 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002406 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2407 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
2408 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
2409 BPF_STMT(BPF_RET | BPF_K, 1),
2410 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2411 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002412 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002413 { 4, 4, 4, 3, 3 },
2414 { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
2415 },
2416 {
Daniel Borkmann92b31a92017-08-10 01:39:55 +02002417 "JGE (jt 0), test 1",
2418 .u.insns = {
2419 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2420 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
2421 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
2422 BPF_STMT(BPF_RET | BPF_K, 1),
2423 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2424 },
2425 CLASSIC,
2426 { 4, 4, 4, 3, 3 },
2427 { { 2, 0 }, { 3, 1 }, { 4, 1 } },
2428 },
2429 {
2430 "JGE (jt 0), test 2",
2431 .u.insns = {
2432 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2433 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
2434 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
2435 BPF_STMT(BPF_RET | BPF_K, 1),
2436 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2437 },
2438 CLASSIC,
2439 { 4, 4, 5, 3, 3 },
2440 { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
2441 },
2442 {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002443 "JGE",
Andrew Mortonece80492014-05-22 10:16:46 -07002444 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002445 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2446 BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
2447 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
2448 BPF_STMT(BPF_RET | BPF_K, 10),
2449 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
2450 BPF_STMT(BPF_RET | BPF_K, 20),
2451 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
2452 BPF_STMT(BPF_RET | BPF_K, 30),
2453 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
2454 BPF_STMT(BPF_RET | BPF_K, 40),
2455 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2456 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002457 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002458 { 1, 2, 3, 4, 5 },
2459 { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
2460 },
2461 {
2462 "JSET",
Andrew Mortonece80492014-05-22 10:16:46 -07002463 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002464 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
2465 BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
2466 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
2467 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
2468 BPF_STMT(BPF_LDX | BPF_LEN, 0),
2469 BPF_STMT(BPF_MISC | BPF_TXA, 0),
2470 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
2471 BPF_STMT(BPF_MISC | BPF_TAX, 0),
2472 BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
2473 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
2474 BPF_STMT(BPF_RET | BPF_K, 10),
2475 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
2476 BPF_STMT(BPF_RET | BPF_K, 20),
2477 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
2478 BPF_STMT(BPF_RET | BPF_K, 30),
2479 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
2480 BPF_STMT(BPF_RET | BPF_K, 30),
2481 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
2482 BPF_STMT(BPF_RET | BPF_K, 30),
2483 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
2484 BPF_STMT(BPF_RET | BPF_K, 30),
2485 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
2486 BPF_STMT(BPF_RET | BPF_K, 30),
2487 BPF_STMT(BPF_RET | BPF_K, MAX_K)
2488 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002489 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002490 { 0, 0xAA, 0x55, 1 },
2491 { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
2492 },
2493 {
Alexei Starovoitov64a89462014-05-08 14:10:52 -07002494 "tcpdump port 22",
Andrew Mortonece80492014-05-22 10:16:46 -07002495 .u.insns = {
Daniel Borkmannce25b682014-05-26 20:17:35 +02002496 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
2497 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
2498 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
2499 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
2500 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
2501 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
2502 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
2503 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
2504 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
2505 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
2506 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
2507 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
2508 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
2509 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
2510 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
2511 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
2512 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
2513 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
2514 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
2515 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
2516 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
2517 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
2518 BPF_STMT(BPF_RET | BPF_K, 0xffff),
2519 BPF_STMT(BPF_RET | BPF_K, 0),
Alexei Starovoitov64a89462014-05-08 14:10:52 -07002520 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002521 CLASSIC,
Alexei Starovoitov64a89462014-05-08 14:10:52 -07002522 /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
2523 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
2524 * seq 1305692979:1305693027, ack 3650467037, win 65535,
2525 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
2526 */
2527 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
2528 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
2529 0x08, 0x00,
2530 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
2531 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
2532 0x0a, 0x01, 0x01, 0x95, /* ip src */
2533 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
2534 0xc2, 0x24,
2535 0x00, 0x16 /* dst port */ },
2536 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
2537 },
2538 {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002539 "tcpdump complex",
Andrew Mortonece80492014-05-22 10:16:46 -07002540 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002541 /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
2542 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
2543 * (len > 115 or len < 30000000000)' -d
2544 */
Daniel Borkmannce25b682014-05-26 20:17:35 +02002545 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
2546 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
2547 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
2548 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
2549 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
2550 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
2551 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
2552 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
2553 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
2554 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
2555 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
2556 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
2557 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
2558 BPF_STMT(BPF_ST, 1),
2559 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
2560 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
2561 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
2562 BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
2563 BPF_STMT(BPF_LD | BPF_MEM, 1),
2564 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
2565 BPF_STMT(BPF_ST, 5),
2566 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
2567 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
2568 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
2569 BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
2570 BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
2571 BPF_STMT(BPF_LD | BPF_MEM, 5),
2572 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
2573 BPF_STMT(BPF_LD | BPF_LEN, 0),
2574 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
2575 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
2576 BPF_STMT(BPF_RET | BPF_K, 0xffff),
2577 BPF_STMT(BPF_RET | BPF_K, 0),
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002578 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002579 CLASSIC,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002580 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
2581 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
2582 0x08, 0x00,
2583 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
2584 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
2585 0x0a, 0x01, 0x01, 0x95, /* ip src */
2586 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
2587 0xc2, 0x24,
2588 0x00, 0x16 /* dst port */ },
2589 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
2590 },
2591 {
2592 "RET_A",
Andrew Mortonece80492014-05-22 10:16:46 -07002593 .u.insns = {
Zhen Lei53b0fe32021-07-07 18:07:28 -07002594 /* check that uninitialized X and A contain zeros */
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002595 BPF_STMT(BPF_MISC | BPF_TXA, 0),
2596 BPF_STMT(BPF_RET | BPF_A, 0)
2597 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002598 CLASSIC,
2599 { },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002600 { {1, 0}, {2, 0} },
2601 },
2602 {
2603 "INT: ADD trivial",
Andrew Mortonece80492014-05-22 10:16:46 -07002604 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002605 BPF_ALU64_IMM(BPF_MOV, R1, 1),
2606 BPF_ALU64_IMM(BPF_ADD, R1, 2),
2607 BPF_ALU64_IMM(BPF_MOV, R2, 3),
2608 BPF_ALU64_REG(BPF_SUB, R1, R2),
2609 BPF_ALU64_IMM(BPF_ADD, R1, -1),
2610 BPF_ALU64_IMM(BPF_MUL, R1, 3),
2611 BPF_ALU64_REG(BPF_MOV, R0, R1),
2612 BPF_EXIT_INSN(),
2613 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002614 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002615 { },
2616 { { 0, 0xfffffffd } }
2617 },
2618 {
2619 "INT: MUL_X",
Andrew Mortonece80492014-05-22 10:16:46 -07002620 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002621 BPF_ALU64_IMM(BPF_MOV, R0, -1),
2622 BPF_ALU64_IMM(BPF_MOV, R1, -1),
2623 BPF_ALU64_IMM(BPF_MOV, R2, 3),
2624 BPF_ALU64_REG(BPF_MUL, R1, R2),
2625 BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
2626 BPF_EXIT_INSN(),
2627 BPF_ALU64_IMM(BPF_MOV, R0, 1),
2628 BPF_EXIT_INSN(),
2629 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002630 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002631 { },
2632 { { 0, 1 } }
2633 },
2634 {
2635 "INT: MUL_X2",
Andrew Mortonece80492014-05-22 10:16:46 -07002636 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002637 BPF_ALU32_IMM(BPF_MOV, R0, -1),
2638 BPF_ALU32_IMM(BPF_MOV, R1, -1),
2639 BPF_ALU32_IMM(BPF_MOV, R2, 3),
2640 BPF_ALU64_REG(BPF_MUL, R1, R2),
2641 BPF_ALU64_IMM(BPF_RSH, R1, 8),
2642 BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
2643 BPF_EXIT_INSN(),
2644 BPF_ALU32_IMM(BPF_MOV, R0, 1),
2645 BPF_EXIT_INSN(),
2646 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002647 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002648 { },
2649 { { 0, 1 } }
2650 },
2651 {
2652 "INT: MUL32_X",
Andrew Mortonece80492014-05-22 10:16:46 -07002653 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002654 BPF_ALU32_IMM(BPF_MOV, R0, -1),
2655 BPF_ALU64_IMM(BPF_MOV, R1, -1),
2656 BPF_ALU32_IMM(BPF_MOV, R2, 3),
2657 BPF_ALU32_REG(BPF_MUL, R1, R2),
2658 BPF_ALU64_IMM(BPF_RSH, R1, 8),
2659 BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
2660 BPF_EXIT_INSN(),
2661 BPF_ALU32_IMM(BPF_MOV, R0, 1),
2662 BPF_EXIT_INSN(),
2663 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002664 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002665 { },
2666 { { 0, 1 } }
2667 },
2668 {
2669 /* Have to test all register combinations, since
2670 * JITing of different registers will produce
2671 * different asm code.
2672 */
2673 "INT: ADD 64-bit",
Andrew Mortonece80492014-05-22 10:16:46 -07002674 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002675 BPF_ALU64_IMM(BPF_MOV, R0, 0),
2676 BPF_ALU64_IMM(BPF_MOV, R1, 1),
2677 BPF_ALU64_IMM(BPF_MOV, R2, 2),
2678 BPF_ALU64_IMM(BPF_MOV, R3, 3),
2679 BPF_ALU64_IMM(BPF_MOV, R4, 4),
2680 BPF_ALU64_IMM(BPF_MOV, R5, 5),
2681 BPF_ALU64_IMM(BPF_MOV, R6, 6),
2682 BPF_ALU64_IMM(BPF_MOV, R7, 7),
2683 BPF_ALU64_IMM(BPF_MOV, R8, 8),
2684 BPF_ALU64_IMM(BPF_MOV, R9, 9),
2685 BPF_ALU64_IMM(BPF_ADD, R0, 20),
2686 BPF_ALU64_IMM(BPF_ADD, R1, 20),
2687 BPF_ALU64_IMM(BPF_ADD, R2, 20),
2688 BPF_ALU64_IMM(BPF_ADD, R3, 20),
2689 BPF_ALU64_IMM(BPF_ADD, R4, 20),
2690 BPF_ALU64_IMM(BPF_ADD, R5, 20),
2691 BPF_ALU64_IMM(BPF_ADD, R6, 20),
2692 BPF_ALU64_IMM(BPF_ADD, R7, 20),
2693 BPF_ALU64_IMM(BPF_ADD, R8, 20),
2694 BPF_ALU64_IMM(BPF_ADD, R9, 20),
2695 BPF_ALU64_IMM(BPF_SUB, R0, 10),
2696 BPF_ALU64_IMM(BPF_SUB, R1, 10),
2697 BPF_ALU64_IMM(BPF_SUB, R2, 10),
2698 BPF_ALU64_IMM(BPF_SUB, R3, 10),
2699 BPF_ALU64_IMM(BPF_SUB, R4, 10),
2700 BPF_ALU64_IMM(BPF_SUB, R5, 10),
2701 BPF_ALU64_IMM(BPF_SUB, R6, 10),
2702 BPF_ALU64_IMM(BPF_SUB, R7, 10),
2703 BPF_ALU64_IMM(BPF_SUB, R8, 10),
2704 BPF_ALU64_IMM(BPF_SUB, R9, 10),
2705 BPF_ALU64_REG(BPF_ADD, R0, R0),
2706 BPF_ALU64_REG(BPF_ADD, R0, R1),
2707 BPF_ALU64_REG(BPF_ADD, R0, R2),
2708 BPF_ALU64_REG(BPF_ADD, R0, R3),
2709 BPF_ALU64_REG(BPF_ADD, R0, R4),
2710 BPF_ALU64_REG(BPF_ADD, R0, R5),
2711 BPF_ALU64_REG(BPF_ADD, R0, R6),
2712 BPF_ALU64_REG(BPF_ADD, R0, R7),
2713 BPF_ALU64_REG(BPF_ADD, R0, R8),
2714 BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
2715 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
2716 BPF_EXIT_INSN(),
2717 BPF_ALU64_REG(BPF_ADD, R1, R0),
2718 BPF_ALU64_REG(BPF_ADD, R1, R1),
2719 BPF_ALU64_REG(BPF_ADD, R1, R2),
2720 BPF_ALU64_REG(BPF_ADD, R1, R3),
2721 BPF_ALU64_REG(BPF_ADD, R1, R4),
2722 BPF_ALU64_REG(BPF_ADD, R1, R5),
2723 BPF_ALU64_REG(BPF_ADD, R1, R6),
2724 BPF_ALU64_REG(BPF_ADD, R1, R7),
2725 BPF_ALU64_REG(BPF_ADD, R1, R8),
2726 BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
2727 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
2728 BPF_EXIT_INSN(),
2729 BPF_ALU64_REG(BPF_ADD, R2, R0),
2730 BPF_ALU64_REG(BPF_ADD, R2, R1),
2731 BPF_ALU64_REG(BPF_ADD, R2, R2),
2732 BPF_ALU64_REG(BPF_ADD, R2, R3),
2733 BPF_ALU64_REG(BPF_ADD, R2, R4),
2734 BPF_ALU64_REG(BPF_ADD, R2, R5),
2735 BPF_ALU64_REG(BPF_ADD, R2, R6),
2736 BPF_ALU64_REG(BPF_ADD, R2, R7),
2737 BPF_ALU64_REG(BPF_ADD, R2, R8),
2738 BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
2739 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
2740 BPF_EXIT_INSN(),
2741 BPF_ALU64_REG(BPF_ADD, R3, R0),
2742 BPF_ALU64_REG(BPF_ADD, R3, R1),
2743 BPF_ALU64_REG(BPF_ADD, R3, R2),
2744 BPF_ALU64_REG(BPF_ADD, R3, R3),
2745 BPF_ALU64_REG(BPF_ADD, R3, R4),
2746 BPF_ALU64_REG(BPF_ADD, R3, R5),
2747 BPF_ALU64_REG(BPF_ADD, R3, R6),
2748 BPF_ALU64_REG(BPF_ADD, R3, R7),
2749 BPF_ALU64_REG(BPF_ADD, R3, R8),
2750 BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
2751 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
2752 BPF_EXIT_INSN(),
2753 BPF_ALU64_REG(BPF_ADD, R4, R0),
2754 BPF_ALU64_REG(BPF_ADD, R4, R1),
2755 BPF_ALU64_REG(BPF_ADD, R4, R2),
2756 BPF_ALU64_REG(BPF_ADD, R4, R3),
2757 BPF_ALU64_REG(BPF_ADD, R4, R4),
2758 BPF_ALU64_REG(BPF_ADD, R4, R5),
2759 BPF_ALU64_REG(BPF_ADD, R4, R6),
2760 BPF_ALU64_REG(BPF_ADD, R4, R7),
2761 BPF_ALU64_REG(BPF_ADD, R4, R8),
2762 BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
2763 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
2764 BPF_EXIT_INSN(),
2765 BPF_ALU64_REG(BPF_ADD, R5, R0),
2766 BPF_ALU64_REG(BPF_ADD, R5, R1),
2767 BPF_ALU64_REG(BPF_ADD, R5, R2),
2768 BPF_ALU64_REG(BPF_ADD, R5, R3),
2769 BPF_ALU64_REG(BPF_ADD, R5, R4),
2770 BPF_ALU64_REG(BPF_ADD, R5, R5),
2771 BPF_ALU64_REG(BPF_ADD, R5, R6),
2772 BPF_ALU64_REG(BPF_ADD, R5, R7),
2773 BPF_ALU64_REG(BPF_ADD, R5, R8),
2774 BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
2775 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
2776 BPF_EXIT_INSN(),
2777 BPF_ALU64_REG(BPF_ADD, R6, R0),
2778 BPF_ALU64_REG(BPF_ADD, R6, R1),
2779 BPF_ALU64_REG(BPF_ADD, R6, R2),
2780 BPF_ALU64_REG(BPF_ADD, R6, R3),
2781 BPF_ALU64_REG(BPF_ADD, R6, R4),
2782 BPF_ALU64_REG(BPF_ADD, R6, R5),
2783 BPF_ALU64_REG(BPF_ADD, R6, R6),
2784 BPF_ALU64_REG(BPF_ADD, R6, R7),
2785 BPF_ALU64_REG(BPF_ADD, R6, R8),
2786 BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
2787 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
2788 BPF_EXIT_INSN(),
2789 BPF_ALU64_REG(BPF_ADD, R7, R0),
2790 BPF_ALU64_REG(BPF_ADD, R7, R1),
2791 BPF_ALU64_REG(BPF_ADD, R7, R2),
2792 BPF_ALU64_REG(BPF_ADD, R7, R3),
2793 BPF_ALU64_REG(BPF_ADD, R7, R4),
2794 BPF_ALU64_REG(BPF_ADD, R7, R5),
2795 BPF_ALU64_REG(BPF_ADD, R7, R6),
2796 BPF_ALU64_REG(BPF_ADD, R7, R7),
2797 BPF_ALU64_REG(BPF_ADD, R7, R8),
2798 BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
2799 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
2800 BPF_EXIT_INSN(),
2801 BPF_ALU64_REG(BPF_ADD, R8, R0),
2802 BPF_ALU64_REG(BPF_ADD, R8, R1),
2803 BPF_ALU64_REG(BPF_ADD, R8, R2),
2804 BPF_ALU64_REG(BPF_ADD, R8, R3),
2805 BPF_ALU64_REG(BPF_ADD, R8, R4),
2806 BPF_ALU64_REG(BPF_ADD, R8, R5),
2807 BPF_ALU64_REG(BPF_ADD, R8, R6),
2808 BPF_ALU64_REG(BPF_ADD, R8, R7),
2809 BPF_ALU64_REG(BPF_ADD, R8, R8),
2810 BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
2811 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
2812 BPF_EXIT_INSN(),
2813 BPF_ALU64_REG(BPF_ADD, R9, R0),
2814 BPF_ALU64_REG(BPF_ADD, R9, R1),
2815 BPF_ALU64_REG(BPF_ADD, R9, R2),
2816 BPF_ALU64_REG(BPF_ADD, R9, R3),
2817 BPF_ALU64_REG(BPF_ADD, R9, R4),
2818 BPF_ALU64_REG(BPF_ADD, R9, R5),
2819 BPF_ALU64_REG(BPF_ADD, R9, R6),
2820 BPF_ALU64_REG(BPF_ADD, R9, R7),
2821 BPF_ALU64_REG(BPF_ADD, R9, R8),
2822 BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
2823 BPF_ALU64_REG(BPF_MOV, R0, R9),
2824 BPF_EXIT_INSN(),
2825 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002826 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002827 { },
2828 { { 0, 2957380 } }
2829 },
2830 {
2831 "INT: ADD 32-bit",
Andrew Mortonece80492014-05-22 10:16:46 -07002832 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002833 BPF_ALU32_IMM(BPF_MOV, R0, 20),
2834 BPF_ALU32_IMM(BPF_MOV, R1, 1),
2835 BPF_ALU32_IMM(BPF_MOV, R2, 2),
2836 BPF_ALU32_IMM(BPF_MOV, R3, 3),
2837 BPF_ALU32_IMM(BPF_MOV, R4, 4),
2838 BPF_ALU32_IMM(BPF_MOV, R5, 5),
2839 BPF_ALU32_IMM(BPF_MOV, R6, 6),
2840 BPF_ALU32_IMM(BPF_MOV, R7, 7),
2841 BPF_ALU32_IMM(BPF_MOV, R8, 8),
2842 BPF_ALU32_IMM(BPF_MOV, R9, 9),
2843 BPF_ALU64_IMM(BPF_ADD, R1, 10),
2844 BPF_ALU64_IMM(BPF_ADD, R2, 10),
2845 BPF_ALU64_IMM(BPF_ADD, R3, 10),
2846 BPF_ALU64_IMM(BPF_ADD, R4, 10),
2847 BPF_ALU64_IMM(BPF_ADD, R5, 10),
2848 BPF_ALU64_IMM(BPF_ADD, R6, 10),
2849 BPF_ALU64_IMM(BPF_ADD, R7, 10),
2850 BPF_ALU64_IMM(BPF_ADD, R8, 10),
2851 BPF_ALU64_IMM(BPF_ADD, R9, 10),
2852 BPF_ALU32_REG(BPF_ADD, R0, R1),
2853 BPF_ALU32_REG(BPF_ADD, R0, R2),
2854 BPF_ALU32_REG(BPF_ADD, R0, R3),
2855 BPF_ALU32_REG(BPF_ADD, R0, R4),
2856 BPF_ALU32_REG(BPF_ADD, R0, R5),
2857 BPF_ALU32_REG(BPF_ADD, R0, R6),
2858 BPF_ALU32_REG(BPF_ADD, R0, R7),
2859 BPF_ALU32_REG(BPF_ADD, R0, R8),
2860 BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
2861 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
2862 BPF_EXIT_INSN(),
2863 BPF_ALU32_REG(BPF_ADD, R1, R0),
2864 BPF_ALU32_REG(BPF_ADD, R1, R1),
2865 BPF_ALU32_REG(BPF_ADD, R1, R2),
2866 BPF_ALU32_REG(BPF_ADD, R1, R3),
2867 BPF_ALU32_REG(BPF_ADD, R1, R4),
2868 BPF_ALU32_REG(BPF_ADD, R1, R5),
2869 BPF_ALU32_REG(BPF_ADD, R1, R6),
2870 BPF_ALU32_REG(BPF_ADD, R1, R7),
2871 BPF_ALU32_REG(BPF_ADD, R1, R8),
2872 BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
2873 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
2874 BPF_EXIT_INSN(),
2875 BPF_ALU32_REG(BPF_ADD, R2, R0),
2876 BPF_ALU32_REG(BPF_ADD, R2, R1),
2877 BPF_ALU32_REG(BPF_ADD, R2, R2),
2878 BPF_ALU32_REG(BPF_ADD, R2, R3),
2879 BPF_ALU32_REG(BPF_ADD, R2, R4),
2880 BPF_ALU32_REG(BPF_ADD, R2, R5),
2881 BPF_ALU32_REG(BPF_ADD, R2, R6),
2882 BPF_ALU32_REG(BPF_ADD, R2, R7),
2883 BPF_ALU32_REG(BPF_ADD, R2, R8),
2884 BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
2885 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
2886 BPF_EXIT_INSN(),
2887 BPF_ALU32_REG(BPF_ADD, R3, R0),
2888 BPF_ALU32_REG(BPF_ADD, R3, R1),
2889 BPF_ALU32_REG(BPF_ADD, R3, R2),
2890 BPF_ALU32_REG(BPF_ADD, R3, R3),
2891 BPF_ALU32_REG(BPF_ADD, R3, R4),
2892 BPF_ALU32_REG(BPF_ADD, R3, R5),
2893 BPF_ALU32_REG(BPF_ADD, R3, R6),
2894 BPF_ALU32_REG(BPF_ADD, R3, R7),
2895 BPF_ALU32_REG(BPF_ADD, R3, R8),
2896 BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
2897 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
2898 BPF_EXIT_INSN(),
2899 BPF_ALU32_REG(BPF_ADD, R4, R0),
2900 BPF_ALU32_REG(BPF_ADD, R4, R1),
2901 BPF_ALU32_REG(BPF_ADD, R4, R2),
2902 BPF_ALU32_REG(BPF_ADD, R4, R3),
2903 BPF_ALU32_REG(BPF_ADD, R4, R4),
2904 BPF_ALU32_REG(BPF_ADD, R4, R5),
2905 BPF_ALU32_REG(BPF_ADD, R4, R6),
2906 BPF_ALU32_REG(BPF_ADD, R4, R7),
2907 BPF_ALU32_REG(BPF_ADD, R4, R8),
2908 BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
2909 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
2910 BPF_EXIT_INSN(),
2911 BPF_ALU32_REG(BPF_ADD, R5, R0),
2912 BPF_ALU32_REG(BPF_ADD, R5, R1),
2913 BPF_ALU32_REG(BPF_ADD, R5, R2),
2914 BPF_ALU32_REG(BPF_ADD, R5, R3),
2915 BPF_ALU32_REG(BPF_ADD, R5, R4),
2916 BPF_ALU32_REG(BPF_ADD, R5, R5),
2917 BPF_ALU32_REG(BPF_ADD, R5, R6),
2918 BPF_ALU32_REG(BPF_ADD, R5, R7),
2919 BPF_ALU32_REG(BPF_ADD, R5, R8),
2920 BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
2921 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
2922 BPF_EXIT_INSN(),
2923 BPF_ALU32_REG(BPF_ADD, R6, R0),
2924 BPF_ALU32_REG(BPF_ADD, R6, R1),
2925 BPF_ALU32_REG(BPF_ADD, R6, R2),
2926 BPF_ALU32_REG(BPF_ADD, R6, R3),
2927 BPF_ALU32_REG(BPF_ADD, R6, R4),
2928 BPF_ALU32_REG(BPF_ADD, R6, R5),
2929 BPF_ALU32_REG(BPF_ADD, R6, R6),
2930 BPF_ALU32_REG(BPF_ADD, R6, R7),
2931 BPF_ALU32_REG(BPF_ADD, R6, R8),
2932 BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
2933 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
2934 BPF_EXIT_INSN(),
2935 BPF_ALU32_REG(BPF_ADD, R7, R0),
2936 BPF_ALU32_REG(BPF_ADD, R7, R1),
2937 BPF_ALU32_REG(BPF_ADD, R7, R2),
2938 BPF_ALU32_REG(BPF_ADD, R7, R3),
2939 BPF_ALU32_REG(BPF_ADD, R7, R4),
2940 BPF_ALU32_REG(BPF_ADD, R7, R5),
2941 BPF_ALU32_REG(BPF_ADD, R7, R6),
2942 BPF_ALU32_REG(BPF_ADD, R7, R7),
2943 BPF_ALU32_REG(BPF_ADD, R7, R8),
2944 BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
2945 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
2946 BPF_EXIT_INSN(),
2947 BPF_ALU32_REG(BPF_ADD, R8, R0),
2948 BPF_ALU32_REG(BPF_ADD, R8, R1),
2949 BPF_ALU32_REG(BPF_ADD, R8, R2),
2950 BPF_ALU32_REG(BPF_ADD, R8, R3),
2951 BPF_ALU32_REG(BPF_ADD, R8, R4),
2952 BPF_ALU32_REG(BPF_ADD, R8, R5),
2953 BPF_ALU32_REG(BPF_ADD, R8, R6),
2954 BPF_ALU32_REG(BPF_ADD, R8, R7),
2955 BPF_ALU32_REG(BPF_ADD, R8, R8),
2956 BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
2957 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
2958 BPF_EXIT_INSN(),
2959 BPF_ALU32_REG(BPF_ADD, R9, R0),
2960 BPF_ALU32_REG(BPF_ADD, R9, R1),
2961 BPF_ALU32_REG(BPF_ADD, R9, R2),
2962 BPF_ALU32_REG(BPF_ADD, R9, R3),
2963 BPF_ALU32_REG(BPF_ADD, R9, R4),
2964 BPF_ALU32_REG(BPF_ADD, R9, R5),
2965 BPF_ALU32_REG(BPF_ADD, R9, R6),
2966 BPF_ALU32_REG(BPF_ADD, R9, R7),
2967 BPF_ALU32_REG(BPF_ADD, R9, R8),
2968 BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
2969 BPF_ALU32_REG(BPF_MOV, R0, R9),
2970 BPF_EXIT_INSN(),
2971 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02002972 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002973 { },
2974 { { 0, 2957380 } }
2975 },
2976 { /* Mainly checking JIT here. */
2977 "INT: SUB",
Andrew Mortonece80492014-05-22 10:16:46 -07002978 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07002979 BPF_ALU64_IMM(BPF_MOV, R0, 0),
2980 BPF_ALU64_IMM(BPF_MOV, R1, 1),
2981 BPF_ALU64_IMM(BPF_MOV, R2, 2),
2982 BPF_ALU64_IMM(BPF_MOV, R3, 3),
2983 BPF_ALU64_IMM(BPF_MOV, R4, 4),
2984 BPF_ALU64_IMM(BPF_MOV, R5, 5),
2985 BPF_ALU64_IMM(BPF_MOV, R6, 6),
2986 BPF_ALU64_IMM(BPF_MOV, R7, 7),
2987 BPF_ALU64_IMM(BPF_MOV, R8, 8),
2988 BPF_ALU64_IMM(BPF_MOV, R9, 9),
2989 BPF_ALU64_REG(BPF_SUB, R0, R0),
2990 BPF_ALU64_REG(BPF_SUB, R0, R1),
2991 BPF_ALU64_REG(BPF_SUB, R0, R2),
2992 BPF_ALU64_REG(BPF_SUB, R0, R3),
2993 BPF_ALU64_REG(BPF_SUB, R0, R4),
2994 BPF_ALU64_REG(BPF_SUB, R0, R5),
2995 BPF_ALU64_REG(BPF_SUB, R0, R6),
2996 BPF_ALU64_REG(BPF_SUB, R0, R7),
2997 BPF_ALU64_REG(BPF_SUB, R0, R8),
2998 BPF_ALU64_REG(BPF_SUB, R0, R9),
2999 BPF_ALU64_IMM(BPF_SUB, R0, 10),
3000 BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
3001 BPF_EXIT_INSN(),
3002 BPF_ALU64_REG(BPF_SUB, R1, R0),
3003 BPF_ALU64_REG(BPF_SUB, R1, R2),
3004 BPF_ALU64_REG(BPF_SUB, R1, R3),
3005 BPF_ALU64_REG(BPF_SUB, R1, R4),
3006 BPF_ALU64_REG(BPF_SUB, R1, R5),
3007 BPF_ALU64_REG(BPF_SUB, R1, R6),
3008 BPF_ALU64_REG(BPF_SUB, R1, R7),
3009 BPF_ALU64_REG(BPF_SUB, R1, R8),
3010 BPF_ALU64_REG(BPF_SUB, R1, R9),
3011 BPF_ALU64_IMM(BPF_SUB, R1, 10),
3012 BPF_ALU64_REG(BPF_SUB, R2, R0),
3013 BPF_ALU64_REG(BPF_SUB, R2, R1),
3014 BPF_ALU64_REG(BPF_SUB, R2, R3),
3015 BPF_ALU64_REG(BPF_SUB, R2, R4),
3016 BPF_ALU64_REG(BPF_SUB, R2, R5),
3017 BPF_ALU64_REG(BPF_SUB, R2, R6),
3018 BPF_ALU64_REG(BPF_SUB, R2, R7),
3019 BPF_ALU64_REG(BPF_SUB, R2, R8),
3020 BPF_ALU64_REG(BPF_SUB, R2, R9),
3021 BPF_ALU64_IMM(BPF_SUB, R2, 10),
3022 BPF_ALU64_REG(BPF_SUB, R3, R0),
3023 BPF_ALU64_REG(BPF_SUB, R3, R1),
3024 BPF_ALU64_REG(BPF_SUB, R3, R2),
3025 BPF_ALU64_REG(BPF_SUB, R3, R4),
3026 BPF_ALU64_REG(BPF_SUB, R3, R5),
3027 BPF_ALU64_REG(BPF_SUB, R3, R6),
3028 BPF_ALU64_REG(BPF_SUB, R3, R7),
3029 BPF_ALU64_REG(BPF_SUB, R3, R8),
3030 BPF_ALU64_REG(BPF_SUB, R3, R9),
3031 BPF_ALU64_IMM(BPF_SUB, R3, 10),
3032 BPF_ALU64_REG(BPF_SUB, R4, R0),
3033 BPF_ALU64_REG(BPF_SUB, R4, R1),
3034 BPF_ALU64_REG(BPF_SUB, R4, R2),
3035 BPF_ALU64_REG(BPF_SUB, R4, R3),
3036 BPF_ALU64_REG(BPF_SUB, R4, R5),
3037 BPF_ALU64_REG(BPF_SUB, R4, R6),
3038 BPF_ALU64_REG(BPF_SUB, R4, R7),
3039 BPF_ALU64_REG(BPF_SUB, R4, R8),
3040 BPF_ALU64_REG(BPF_SUB, R4, R9),
3041 BPF_ALU64_IMM(BPF_SUB, R4, 10),
3042 BPF_ALU64_REG(BPF_SUB, R5, R0),
3043 BPF_ALU64_REG(BPF_SUB, R5, R1),
3044 BPF_ALU64_REG(BPF_SUB, R5, R2),
3045 BPF_ALU64_REG(BPF_SUB, R5, R3),
3046 BPF_ALU64_REG(BPF_SUB, R5, R4),
3047 BPF_ALU64_REG(BPF_SUB, R5, R6),
3048 BPF_ALU64_REG(BPF_SUB, R5, R7),
3049 BPF_ALU64_REG(BPF_SUB, R5, R8),
3050 BPF_ALU64_REG(BPF_SUB, R5, R9),
3051 BPF_ALU64_IMM(BPF_SUB, R5, 10),
3052 BPF_ALU64_REG(BPF_SUB, R6, R0),
3053 BPF_ALU64_REG(BPF_SUB, R6, R1),
3054 BPF_ALU64_REG(BPF_SUB, R6, R2),
3055 BPF_ALU64_REG(BPF_SUB, R6, R3),
3056 BPF_ALU64_REG(BPF_SUB, R6, R4),
3057 BPF_ALU64_REG(BPF_SUB, R6, R5),
3058 BPF_ALU64_REG(BPF_SUB, R6, R7),
3059 BPF_ALU64_REG(BPF_SUB, R6, R8),
3060 BPF_ALU64_REG(BPF_SUB, R6, R9),
3061 BPF_ALU64_IMM(BPF_SUB, R6, 10),
3062 BPF_ALU64_REG(BPF_SUB, R7, R0),
3063 BPF_ALU64_REG(BPF_SUB, R7, R1),
3064 BPF_ALU64_REG(BPF_SUB, R7, R2),
3065 BPF_ALU64_REG(BPF_SUB, R7, R3),
3066 BPF_ALU64_REG(BPF_SUB, R7, R4),
3067 BPF_ALU64_REG(BPF_SUB, R7, R5),
3068 BPF_ALU64_REG(BPF_SUB, R7, R6),
3069 BPF_ALU64_REG(BPF_SUB, R7, R8),
3070 BPF_ALU64_REG(BPF_SUB, R7, R9),
3071 BPF_ALU64_IMM(BPF_SUB, R7, 10),
3072 BPF_ALU64_REG(BPF_SUB, R8, R0),
3073 BPF_ALU64_REG(BPF_SUB, R8, R1),
3074 BPF_ALU64_REG(BPF_SUB, R8, R2),
3075 BPF_ALU64_REG(BPF_SUB, R8, R3),
3076 BPF_ALU64_REG(BPF_SUB, R8, R4),
3077 BPF_ALU64_REG(BPF_SUB, R8, R5),
3078 BPF_ALU64_REG(BPF_SUB, R8, R6),
3079 BPF_ALU64_REG(BPF_SUB, R8, R7),
3080 BPF_ALU64_REG(BPF_SUB, R8, R9),
3081 BPF_ALU64_IMM(BPF_SUB, R8, 10),
3082 BPF_ALU64_REG(BPF_SUB, R9, R0),
3083 BPF_ALU64_REG(BPF_SUB, R9, R1),
3084 BPF_ALU64_REG(BPF_SUB, R9, R2),
3085 BPF_ALU64_REG(BPF_SUB, R9, R3),
3086 BPF_ALU64_REG(BPF_SUB, R9, R4),
3087 BPF_ALU64_REG(BPF_SUB, R9, R5),
3088 BPF_ALU64_REG(BPF_SUB, R9, R6),
3089 BPF_ALU64_REG(BPF_SUB, R9, R7),
3090 BPF_ALU64_REG(BPF_SUB, R9, R8),
3091 BPF_ALU64_IMM(BPF_SUB, R9, 10),
3092 BPF_ALU64_IMM(BPF_SUB, R0, 10),
3093 BPF_ALU64_IMM(BPF_NEG, R0, 0),
3094 BPF_ALU64_REG(BPF_SUB, R0, R1),
3095 BPF_ALU64_REG(BPF_SUB, R0, R2),
3096 BPF_ALU64_REG(BPF_SUB, R0, R3),
3097 BPF_ALU64_REG(BPF_SUB, R0, R4),
3098 BPF_ALU64_REG(BPF_SUB, R0, R5),
3099 BPF_ALU64_REG(BPF_SUB, R0, R6),
3100 BPF_ALU64_REG(BPF_SUB, R0, R7),
3101 BPF_ALU64_REG(BPF_SUB, R0, R8),
3102 BPF_ALU64_REG(BPF_SUB, R0, R9),
3103 BPF_EXIT_INSN(),
3104 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003105 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003106 { },
3107 { { 0, 11 } }
3108 },
3109 { /* Mainly checking JIT here. */
3110 "INT: XOR",
Andrew Mortonece80492014-05-22 10:16:46 -07003111 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003112 BPF_ALU64_REG(BPF_SUB, R0, R0),
3113 BPF_ALU64_REG(BPF_XOR, R1, R1),
3114 BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
3115 BPF_EXIT_INSN(),
3116 BPF_ALU64_IMM(BPF_MOV, R0, 10),
3117 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3118 BPF_ALU64_REG(BPF_SUB, R1, R1),
3119 BPF_ALU64_REG(BPF_XOR, R2, R2),
3120 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
3121 BPF_EXIT_INSN(),
3122 BPF_ALU64_REG(BPF_SUB, R2, R2),
3123 BPF_ALU64_REG(BPF_XOR, R3, R3),
3124 BPF_ALU64_IMM(BPF_MOV, R0, 10),
3125 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3126 BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
3127 BPF_EXIT_INSN(),
3128 BPF_ALU64_REG(BPF_SUB, R3, R3),
3129 BPF_ALU64_REG(BPF_XOR, R4, R4),
3130 BPF_ALU64_IMM(BPF_MOV, R2, 1),
3131 BPF_ALU64_IMM(BPF_MOV, R5, -1),
3132 BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
3133 BPF_EXIT_INSN(),
3134 BPF_ALU64_REG(BPF_SUB, R4, R4),
3135 BPF_ALU64_REG(BPF_XOR, R5, R5),
3136 BPF_ALU64_IMM(BPF_MOV, R3, 1),
3137 BPF_ALU64_IMM(BPF_MOV, R7, -1),
3138 BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
3139 BPF_EXIT_INSN(),
3140 BPF_ALU64_IMM(BPF_MOV, R5, 1),
3141 BPF_ALU64_REG(BPF_SUB, R5, R5),
3142 BPF_ALU64_REG(BPF_XOR, R6, R6),
3143 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3144 BPF_ALU64_IMM(BPF_MOV, R8, -1),
3145 BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
3146 BPF_EXIT_INSN(),
3147 BPF_ALU64_REG(BPF_SUB, R6, R6),
3148 BPF_ALU64_REG(BPF_XOR, R7, R7),
3149 BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
3150 BPF_EXIT_INSN(),
3151 BPF_ALU64_REG(BPF_SUB, R7, R7),
3152 BPF_ALU64_REG(BPF_XOR, R8, R8),
3153 BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
3154 BPF_EXIT_INSN(),
3155 BPF_ALU64_REG(BPF_SUB, R8, R8),
3156 BPF_ALU64_REG(BPF_XOR, R9, R9),
3157 BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
3158 BPF_EXIT_INSN(),
3159 BPF_ALU64_REG(BPF_SUB, R9, R9),
3160 BPF_ALU64_REG(BPF_XOR, R0, R0),
3161 BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
3162 BPF_EXIT_INSN(),
3163 BPF_ALU64_REG(BPF_SUB, R1, R1),
3164 BPF_ALU64_REG(BPF_XOR, R0, R0),
3165 BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
3166 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3167 BPF_EXIT_INSN(),
3168 BPF_ALU64_IMM(BPF_MOV, R0, 1),
3169 BPF_EXIT_INSN(),
3170 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003171 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003172 { },
3173 { { 0, 1 } }
3174 },
3175 { /* Mainly checking JIT here. */
3176 "INT: MUL",
Andrew Mortonece80492014-05-22 10:16:46 -07003177 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003178 BPF_ALU64_IMM(BPF_MOV, R0, 11),
3179 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3180 BPF_ALU64_IMM(BPF_MOV, R2, 2),
3181 BPF_ALU64_IMM(BPF_MOV, R3, 3),
3182 BPF_ALU64_IMM(BPF_MOV, R4, 4),
3183 BPF_ALU64_IMM(BPF_MOV, R5, 5),
3184 BPF_ALU64_IMM(BPF_MOV, R6, 6),
3185 BPF_ALU64_IMM(BPF_MOV, R7, 7),
3186 BPF_ALU64_IMM(BPF_MOV, R8, 8),
3187 BPF_ALU64_IMM(BPF_MOV, R9, 9),
3188 BPF_ALU64_REG(BPF_MUL, R0, R0),
3189 BPF_ALU64_REG(BPF_MUL, R0, R1),
3190 BPF_ALU64_REG(BPF_MUL, R0, R2),
3191 BPF_ALU64_REG(BPF_MUL, R0, R3),
3192 BPF_ALU64_REG(BPF_MUL, R0, R4),
3193 BPF_ALU64_REG(BPF_MUL, R0, R5),
3194 BPF_ALU64_REG(BPF_MUL, R0, R6),
3195 BPF_ALU64_REG(BPF_MUL, R0, R7),
3196 BPF_ALU64_REG(BPF_MUL, R0, R8),
3197 BPF_ALU64_REG(BPF_MUL, R0, R9),
3198 BPF_ALU64_IMM(BPF_MUL, R0, 10),
3199 BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
3200 BPF_EXIT_INSN(),
3201 BPF_ALU64_REG(BPF_MUL, R1, R0),
3202 BPF_ALU64_REG(BPF_MUL, R1, R2),
3203 BPF_ALU64_REG(BPF_MUL, R1, R3),
3204 BPF_ALU64_REG(BPF_MUL, R1, R4),
3205 BPF_ALU64_REG(BPF_MUL, R1, R5),
3206 BPF_ALU64_REG(BPF_MUL, R1, R6),
3207 BPF_ALU64_REG(BPF_MUL, R1, R7),
3208 BPF_ALU64_REG(BPF_MUL, R1, R8),
3209 BPF_ALU64_REG(BPF_MUL, R1, R9),
3210 BPF_ALU64_IMM(BPF_MUL, R1, 10),
3211 BPF_ALU64_REG(BPF_MOV, R2, R1),
3212 BPF_ALU64_IMM(BPF_RSH, R2, 32),
3213 BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
3214 BPF_EXIT_INSN(),
3215 BPF_ALU64_IMM(BPF_LSH, R1, 32),
3216 BPF_ALU64_IMM(BPF_ARSH, R1, 32),
3217 BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
3218 BPF_EXIT_INSN(),
3219 BPF_ALU64_REG(BPF_MUL, R2, R0),
3220 BPF_ALU64_REG(BPF_MUL, R2, R1),
3221 BPF_ALU64_REG(BPF_MUL, R2, R3),
3222 BPF_ALU64_REG(BPF_MUL, R2, R4),
3223 BPF_ALU64_REG(BPF_MUL, R2, R5),
3224 BPF_ALU64_REG(BPF_MUL, R2, R6),
3225 BPF_ALU64_REG(BPF_MUL, R2, R7),
3226 BPF_ALU64_REG(BPF_MUL, R2, R8),
3227 BPF_ALU64_REG(BPF_MUL, R2, R9),
3228 BPF_ALU64_IMM(BPF_MUL, R2, 10),
3229 BPF_ALU64_IMM(BPF_RSH, R2, 32),
3230 BPF_ALU64_REG(BPF_MOV, R0, R2),
3231 BPF_EXIT_INSN(),
3232 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003233 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003234 { },
3235 { { 0, 0x35d97ef2 } }
3236 },
Daniel Borkmann9dd2af82015-12-17 23:51:57 +01003237 { /* Mainly checking JIT here. */
3238 "MOV REG64",
3239 .u.insns_int = {
3240 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
3241 BPF_MOV64_REG(R1, R0),
3242 BPF_MOV64_REG(R2, R1),
3243 BPF_MOV64_REG(R3, R2),
3244 BPF_MOV64_REG(R4, R3),
3245 BPF_MOV64_REG(R5, R4),
3246 BPF_MOV64_REG(R6, R5),
3247 BPF_MOV64_REG(R7, R6),
3248 BPF_MOV64_REG(R8, R7),
3249 BPF_MOV64_REG(R9, R8),
3250 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3251 BPF_ALU64_IMM(BPF_MOV, R1, 0),
3252 BPF_ALU64_IMM(BPF_MOV, R2, 0),
3253 BPF_ALU64_IMM(BPF_MOV, R3, 0),
3254 BPF_ALU64_IMM(BPF_MOV, R4, 0),
3255 BPF_ALU64_IMM(BPF_MOV, R5, 0),
3256 BPF_ALU64_IMM(BPF_MOV, R6, 0),
3257 BPF_ALU64_IMM(BPF_MOV, R7, 0),
3258 BPF_ALU64_IMM(BPF_MOV, R8, 0),
3259 BPF_ALU64_IMM(BPF_MOV, R9, 0),
3260 BPF_ALU64_REG(BPF_ADD, R0, R0),
3261 BPF_ALU64_REG(BPF_ADD, R0, R1),
3262 BPF_ALU64_REG(BPF_ADD, R0, R2),
3263 BPF_ALU64_REG(BPF_ADD, R0, R3),
3264 BPF_ALU64_REG(BPF_ADD, R0, R4),
3265 BPF_ALU64_REG(BPF_ADD, R0, R5),
3266 BPF_ALU64_REG(BPF_ADD, R0, R6),
3267 BPF_ALU64_REG(BPF_ADD, R0, R7),
3268 BPF_ALU64_REG(BPF_ADD, R0, R8),
3269 BPF_ALU64_REG(BPF_ADD, R0, R9),
3270 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
3271 BPF_EXIT_INSN(),
3272 },
3273 INTERNAL,
3274 { },
3275 { { 0, 0xfefe } }
3276 },
3277 { /* Mainly checking JIT here. */
3278 "MOV REG32",
3279 .u.insns_int = {
3280 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
3281 BPF_MOV64_REG(R1, R0),
3282 BPF_MOV64_REG(R2, R1),
3283 BPF_MOV64_REG(R3, R2),
3284 BPF_MOV64_REG(R4, R3),
3285 BPF_MOV64_REG(R5, R4),
3286 BPF_MOV64_REG(R6, R5),
3287 BPF_MOV64_REG(R7, R6),
3288 BPF_MOV64_REG(R8, R7),
3289 BPF_MOV64_REG(R9, R8),
3290 BPF_ALU32_IMM(BPF_MOV, R0, 0),
3291 BPF_ALU32_IMM(BPF_MOV, R1, 0),
3292 BPF_ALU32_IMM(BPF_MOV, R2, 0),
3293 BPF_ALU32_IMM(BPF_MOV, R3, 0),
3294 BPF_ALU32_IMM(BPF_MOV, R4, 0),
3295 BPF_ALU32_IMM(BPF_MOV, R5, 0),
3296 BPF_ALU32_IMM(BPF_MOV, R6, 0),
3297 BPF_ALU32_IMM(BPF_MOV, R7, 0),
3298 BPF_ALU32_IMM(BPF_MOV, R8, 0),
3299 BPF_ALU32_IMM(BPF_MOV, R9, 0),
3300 BPF_ALU64_REG(BPF_ADD, R0, R0),
3301 BPF_ALU64_REG(BPF_ADD, R0, R1),
3302 BPF_ALU64_REG(BPF_ADD, R0, R2),
3303 BPF_ALU64_REG(BPF_ADD, R0, R3),
3304 BPF_ALU64_REG(BPF_ADD, R0, R4),
3305 BPF_ALU64_REG(BPF_ADD, R0, R5),
3306 BPF_ALU64_REG(BPF_ADD, R0, R6),
3307 BPF_ALU64_REG(BPF_ADD, R0, R7),
3308 BPF_ALU64_REG(BPF_ADD, R0, R8),
3309 BPF_ALU64_REG(BPF_ADD, R0, R9),
3310 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
3311 BPF_EXIT_INSN(),
3312 },
3313 INTERNAL,
3314 { },
3315 { { 0, 0xfefe } }
3316 },
3317 { /* Mainly checking JIT here. */
3318 "LD IMM64",
3319 .u.insns_int = {
3320 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
3321 BPF_MOV64_REG(R1, R0),
3322 BPF_MOV64_REG(R2, R1),
3323 BPF_MOV64_REG(R3, R2),
3324 BPF_MOV64_REG(R4, R3),
3325 BPF_MOV64_REG(R5, R4),
3326 BPF_MOV64_REG(R6, R5),
3327 BPF_MOV64_REG(R7, R6),
3328 BPF_MOV64_REG(R8, R7),
3329 BPF_MOV64_REG(R9, R8),
3330 BPF_LD_IMM64(R0, 0x0LL),
3331 BPF_LD_IMM64(R1, 0x0LL),
3332 BPF_LD_IMM64(R2, 0x0LL),
3333 BPF_LD_IMM64(R3, 0x0LL),
3334 BPF_LD_IMM64(R4, 0x0LL),
3335 BPF_LD_IMM64(R5, 0x0LL),
3336 BPF_LD_IMM64(R6, 0x0LL),
3337 BPF_LD_IMM64(R7, 0x0LL),
3338 BPF_LD_IMM64(R8, 0x0LL),
3339 BPF_LD_IMM64(R9, 0x0LL),
3340 BPF_ALU64_REG(BPF_ADD, R0, R0),
3341 BPF_ALU64_REG(BPF_ADD, R0, R1),
3342 BPF_ALU64_REG(BPF_ADD, R0, R2),
3343 BPF_ALU64_REG(BPF_ADD, R0, R3),
3344 BPF_ALU64_REG(BPF_ADD, R0, R4),
3345 BPF_ALU64_REG(BPF_ADD, R0, R5),
3346 BPF_ALU64_REG(BPF_ADD, R0, R6),
3347 BPF_ALU64_REG(BPF_ADD, R0, R7),
3348 BPF_ALU64_REG(BPF_ADD, R0, R8),
3349 BPF_ALU64_REG(BPF_ADD, R0, R9),
3350 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
3351 BPF_EXIT_INSN(),
3352 },
3353 INTERNAL,
3354 { },
3355 { { 0, 0xfefe } }
3356 },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003357 {
3358 "INT: ALU MIX",
Andrew Mortonece80492014-05-22 10:16:46 -07003359 .u.insns_int = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003360 BPF_ALU64_IMM(BPF_MOV, R0, 11),
3361 BPF_ALU64_IMM(BPF_ADD, R0, -1),
3362 BPF_ALU64_IMM(BPF_MOV, R2, 2),
3363 BPF_ALU64_IMM(BPF_XOR, R2, 3),
3364 BPF_ALU64_REG(BPF_DIV, R0, R2),
3365 BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
3366 BPF_EXIT_INSN(),
3367 BPF_ALU64_IMM(BPF_MOD, R0, 3),
3368 BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
3369 BPF_EXIT_INSN(),
3370 BPF_ALU64_IMM(BPF_MOV, R0, -1),
3371 BPF_EXIT_INSN(),
3372 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003373 INTERNAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003374 { },
3375 { { 0, -1 } }
3376 },
3377 {
Alexei Starovoitov72b603e2014-08-25 12:27:02 -07003378 "INT: shifts by register",
3379 .u.insns_int = {
3380 BPF_MOV64_IMM(R0, -1234),
3381 BPF_MOV64_IMM(R1, 1),
3382 BPF_ALU32_REG(BPF_RSH, R0, R1),
3383 BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
3384 BPF_EXIT_INSN(),
3385 BPF_MOV64_IMM(R2, 1),
3386 BPF_ALU64_REG(BPF_LSH, R0, R2),
3387 BPF_MOV32_IMM(R4, -1234),
3388 BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
3389 BPF_EXIT_INSN(),
3390 BPF_ALU64_IMM(BPF_AND, R4, 63),
3391 BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
3392 BPF_MOV64_IMM(R3, 47),
3393 BPF_ALU64_REG(BPF_ARSH, R0, R3),
3394 BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
3395 BPF_EXIT_INSN(),
3396 BPF_MOV64_IMM(R2, 1),
3397 BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
3398 BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
3399 BPF_EXIT_INSN(),
3400 BPF_MOV64_IMM(R4, 4),
3401 BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
3402 BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
3403 BPF_EXIT_INSN(),
3404 BPF_MOV64_IMM(R4, 5),
3405 BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
3406 BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
3407 BPF_EXIT_INSN(),
3408 BPF_MOV64_IMM(R0, -1),
3409 BPF_EXIT_INSN(),
3410 },
3411 INTERNAL,
3412 { },
3413 { { 0, -1 } }
3414 },
3415 {
Johan Almbladh84024a42021-08-09 11:18:23 +02003416 /*
3417 * Register (non-)clobbering test, in the case where a 32-bit
3418 * JIT implements complex ALU64 operations via function calls.
3419 * If so, the function call must be invisible in the eBPF
3420 * registers. The JIT must then save and restore relevant
3421 * registers during the call. The following tests check that
3422 * the eBPF registers retain their values after such a call.
3423 */
3424 "INT: Register clobbering, R1 updated",
3425 .u.insns_int = {
3426 BPF_ALU32_IMM(BPF_MOV, R0, 0),
3427 BPF_ALU32_IMM(BPF_MOV, R1, 123456789),
3428 BPF_ALU32_IMM(BPF_MOV, R2, 2),
3429 BPF_ALU32_IMM(BPF_MOV, R3, 3),
3430 BPF_ALU32_IMM(BPF_MOV, R4, 4),
3431 BPF_ALU32_IMM(BPF_MOV, R5, 5),
3432 BPF_ALU32_IMM(BPF_MOV, R6, 6),
3433 BPF_ALU32_IMM(BPF_MOV, R7, 7),
3434 BPF_ALU32_IMM(BPF_MOV, R8, 8),
3435 BPF_ALU32_IMM(BPF_MOV, R9, 9),
3436 BPF_ALU64_IMM(BPF_DIV, R1, 123456789),
3437 BPF_JMP_IMM(BPF_JNE, R0, 0, 10),
3438 BPF_JMP_IMM(BPF_JNE, R1, 1, 9),
3439 BPF_JMP_IMM(BPF_JNE, R2, 2, 8),
3440 BPF_JMP_IMM(BPF_JNE, R3, 3, 7),
3441 BPF_JMP_IMM(BPF_JNE, R4, 4, 6),
3442 BPF_JMP_IMM(BPF_JNE, R5, 5, 5),
3443 BPF_JMP_IMM(BPF_JNE, R6, 6, 4),
3444 BPF_JMP_IMM(BPF_JNE, R7, 7, 3),
3445 BPF_JMP_IMM(BPF_JNE, R8, 8, 2),
3446 BPF_JMP_IMM(BPF_JNE, R9, 9, 1),
3447 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3448 BPF_EXIT_INSN(),
3449 },
3450 INTERNAL,
3451 { },
3452 { { 0, 1 } }
3453 },
3454 {
3455 "INT: Register clobbering, R2 updated",
3456 .u.insns_int = {
3457 BPF_ALU32_IMM(BPF_MOV, R0, 0),
3458 BPF_ALU32_IMM(BPF_MOV, R1, 1),
3459 BPF_ALU32_IMM(BPF_MOV, R2, 2 * 123456789),
3460 BPF_ALU32_IMM(BPF_MOV, R3, 3),
3461 BPF_ALU32_IMM(BPF_MOV, R4, 4),
3462 BPF_ALU32_IMM(BPF_MOV, R5, 5),
3463 BPF_ALU32_IMM(BPF_MOV, R6, 6),
3464 BPF_ALU32_IMM(BPF_MOV, R7, 7),
3465 BPF_ALU32_IMM(BPF_MOV, R8, 8),
3466 BPF_ALU32_IMM(BPF_MOV, R9, 9),
3467 BPF_ALU64_IMM(BPF_DIV, R2, 123456789),
3468 BPF_JMP_IMM(BPF_JNE, R0, 0, 10),
3469 BPF_JMP_IMM(BPF_JNE, R1, 1, 9),
3470 BPF_JMP_IMM(BPF_JNE, R2, 2, 8),
3471 BPF_JMP_IMM(BPF_JNE, R3, 3, 7),
3472 BPF_JMP_IMM(BPF_JNE, R4, 4, 6),
3473 BPF_JMP_IMM(BPF_JNE, R5, 5, 5),
3474 BPF_JMP_IMM(BPF_JNE, R6, 6, 4),
3475 BPF_JMP_IMM(BPF_JNE, R7, 7, 3),
3476 BPF_JMP_IMM(BPF_JNE, R8, 8, 2),
3477 BPF_JMP_IMM(BPF_JNE, R9, 9, 1),
3478 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3479 BPF_EXIT_INSN(),
3480 },
3481 INTERNAL,
3482 { },
3483 { { 0, 1 } }
3484 },
3485 {
3486 /*
3487 * Test 32-bit JITs that implement complex ALU64 operations as
3488 * function calls R0 = f(R1, R2), and must re-arrange operands.
3489 */
3490#define NUMER 0xfedcba9876543210ULL
3491#define DENOM 0x0123456789abcdefULL
3492 "ALU64_DIV X: Operand register permutations",
3493 .u.insns_int = {
3494 /* R0 / R2 */
3495 BPF_LD_IMM64(R0, NUMER),
3496 BPF_LD_IMM64(R2, DENOM),
3497 BPF_ALU64_REG(BPF_DIV, R0, R2),
3498 BPF_JMP_IMM(BPF_JEQ, R0, NUMER / DENOM, 1),
3499 BPF_EXIT_INSN(),
3500 /* R1 / R0 */
3501 BPF_LD_IMM64(R1, NUMER),
3502 BPF_LD_IMM64(R0, DENOM),
3503 BPF_ALU64_REG(BPF_DIV, R1, R0),
3504 BPF_JMP_IMM(BPF_JEQ, R1, NUMER / DENOM, 1),
3505 BPF_EXIT_INSN(),
3506 /* R0 / R1 */
3507 BPF_LD_IMM64(R0, NUMER),
3508 BPF_LD_IMM64(R1, DENOM),
3509 BPF_ALU64_REG(BPF_DIV, R0, R1),
3510 BPF_JMP_IMM(BPF_JEQ, R0, NUMER / DENOM, 1),
3511 BPF_EXIT_INSN(),
3512 /* R2 / R0 */
3513 BPF_LD_IMM64(R2, NUMER),
3514 BPF_LD_IMM64(R0, DENOM),
3515 BPF_ALU64_REG(BPF_DIV, R2, R0),
3516 BPF_JMP_IMM(BPF_JEQ, R2, NUMER / DENOM, 1),
3517 BPF_EXIT_INSN(),
3518 /* R2 / R1 */
3519 BPF_LD_IMM64(R2, NUMER),
3520 BPF_LD_IMM64(R1, DENOM),
3521 BPF_ALU64_REG(BPF_DIV, R2, R1),
3522 BPF_JMP_IMM(BPF_JEQ, R2, NUMER / DENOM, 1),
3523 BPF_EXIT_INSN(),
3524 /* R1 / R2 */
3525 BPF_LD_IMM64(R1, NUMER),
3526 BPF_LD_IMM64(R2, DENOM),
3527 BPF_ALU64_REG(BPF_DIV, R1, R2),
3528 BPF_JMP_IMM(BPF_JEQ, R1, NUMER / DENOM, 1),
3529 BPF_EXIT_INSN(),
3530 /* R1 / R1 */
3531 BPF_LD_IMM64(R1, NUMER),
3532 BPF_ALU64_REG(BPF_DIV, R1, R1),
3533 BPF_JMP_IMM(BPF_JEQ, R1, 1, 1),
3534 BPF_EXIT_INSN(),
3535 /* R2 / R2 */
3536 BPF_LD_IMM64(R2, DENOM),
3537 BPF_ALU64_REG(BPF_DIV, R2, R2),
3538 BPF_JMP_IMM(BPF_JEQ, R2, 1, 1),
3539 BPF_EXIT_INSN(),
3540 /* R3 / R4 */
3541 BPF_LD_IMM64(R3, NUMER),
3542 BPF_LD_IMM64(R4, DENOM),
3543 BPF_ALU64_REG(BPF_DIV, R3, R4),
3544 BPF_JMP_IMM(BPF_JEQ, R3, NUMER / DENOM, 1),
3545 BPF_EXIT_INSN(),
3546 /* Successful return */
3547 BPF_LD_IMM64(R0, 1),
3548 BPF_EXIT_INSN(),
3549 },
3550 INTERNAL,
3551 { },
3552 { { 0, 1 } },
3553#undef NUMER
3554#undef DENOM
3555 },
Johan Almbladh53e33f92021-08-09 11:18:26 +02003556#ifdef CONFIG_32BIT
3557 {
3558 "INT: 32-bit context pointer word order and zero-extension",
3559 .u.insns_int = {
3560 BPF_ALU32_IMM(BPF_MOV, R0, 0),
3561 BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
3562 BPF_ALU64_IMM(BPF_RSH, R1, 32),
3563 BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
3564 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3565 BPF_EXIT_INSN(),
3566 },
3567 INTERNAL,
3568 { },
3569 { { 0, 1 } }
3570 },
3571#endif
Johan Almbladh84024a42021-08-09 11:18:23 +02003572 {
Alexei Starovoitov64a89462014-05-08 14:10:52 -07003573 "check: missing ret",
Andrew Mortonece80492014-05-22 10:16:46 -07003574 .u.insns = {
Alexei Starovoitov64a89462014-05-08 14:10:52 -07003575 BPF_STMT(BPF_LD | BPF_IMM, 1),
3576 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003577 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
Alexei Starovoitov64a89462014-05-08 14:10:52 -07003578 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003579 { },
3580 .fill_helper = NULL,
3581 .expected_errcode = -EINVAL,
Alexei Starovoitov64a89462014-05-08 14:10:52 -07003582 },
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003583 {
3584 "check: div_k_0",
Andrew Mortonece80492014-05-22 10:16:46 -07003585 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003586 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
3587 BPF_STMT(BPF_RET | BPF_K, 0)
3588 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003589 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003590 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003591 { },
3592 .fill_helper = NULL,
3593 .expected_errcode = -EINVAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003594 },
3595 {
3596 "check: unknown insn",
Andrew Mortonece80492014-05-22 10:16:46 -07003597 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003598 /* seccomp insn, rejected in socket filter */
3599 BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
3600 BPF_STMT(BPF_RET | BPF_K, 0)
3601 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003602 CLASSIC | FLAG_EXPECTED_FAIL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003603 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003604 { },
3605 .fill_helper = NULL,
3606 .expected_errcode = -EINVAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003607 },
3608 {
3609 "check: out of range spill/fill",
Andrew Mortonece80492014-05-22 10:16:46 -07003610 .u.insns = {
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003611 BPF_STMT(BPF_STX, 16),
3612 BPF_STMT(BPF_RET | BPF_K, 0)
3613 },
Daniel Borkmann10f18e02014-05-23 18:44:00 +02003614 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003615 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003616 { },
3617 .fill_helper = NULL,
3618 .expected_errcode = -EINVAL,
Alexei Starovoitov9def6242014-05-08 14:10:53 -07003619 },
Daniel Borkmann2e8a83c2014-05-23 18:44:01 +02003620 {
3621 "JUMPS + HOLES",
3622 .u.insns = {
3623 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3624 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
3625 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3626 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3627 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3628 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3629 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3630 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3631 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3632 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3633 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3634 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3635 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3636 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3637 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3638 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
3639 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3640 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
3641 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3642 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
3643 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
3644 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3645 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3646 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3647 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3648 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3649 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3650 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3651 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3652 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3653 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3654 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3655 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3656 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3657 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
3658 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
3659 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3660 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
3661 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
3662 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3663 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3664 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3665 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3666 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3667 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3668 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3669 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3670 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3671 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3672 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3673 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3674 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3675 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
3676 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
3677 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
3678 BPF_STMT(BPF_RET | BPF_A, 0),
3679 BPF_STMT(BPF_RET | BPF_A, 0),
3680 },
3681 CLASSIC,
Daniel Borkmannce25b682014-05-26 20:17:35 +02003682 { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
3683 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
3684 0x08, 0x00,
3685 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
3686 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
3687 0xc0, 0xa8, 0x33, 0x01,
3688 0xc0, 0xa8, 0x33, 0x02,
3689 0xbb, 0xb6,
3690 0xa9, 0xfa,
3691 0x00, 0x14, 0x00, 0x00,
3692 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3693 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3694 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3695 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3696 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3697 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3698 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
3699 0xcc, 0xcc, 0xcc, 0xcc },
Daniel Borkmann2e8a83c2014-05-23 18:44:01 +02003700 { { 88, 0x001b } }
3701 },
3702 {
3703 "check: RET X",
3704 .u.insns = {
3705 BPF_STMT(BPF_RET | BPF_X, 0),
3706 },
3707 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
3708 { },
3709 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003710 .fill_helper = NULL,
3711 .expected_errcode = -EINVAL,
Daniel Borkmann2e8a83c2014-05-23 18:44:01 +02003712 },
3713 {
3714 "check: LDX + RET X",
3715 .u.insns = {
3716 BPF_STMT(BPF_LDX | BPF_IMM, 42),
3717 BPF_STMT(BPF_RET | BPF_X, 0),
3718 },
3719 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
3720 { },
3721 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003722 .fill_helper = NULL,
3723 .expected_errcode = -EINVAL,
Daniel Borkmann2e8a83c2014-05-23 18:44:01 +02003724 },
Daniel Borkmann108cc222014-05-26 20:17:34 +02003725 { /* Mainly checking JIT here. */
Daniel Borkmann9fe13ba2014-05-29 10:22:48 +02003726 "M[]: alt STX + LDX",
Daniel Borkmann108cc222014-05-26 20:17:34 +02003727 .u.insns = {
3728 BPF_STMT(BPF_LDX | BPF_IMM, 100),
3729 BPF_STMT(BPF_STX, 0),
3730 BPF_STMT(BPF_LDX | BPF_MEM, 0),
3731 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3732 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3733 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3734 BPF_STMT(BPF_STX, 1),
3735 BPF_STMT(BPF_LDX | BPF_MEM, 1),
3736 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3737 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3738 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3739 BPF_STMT(BPF_STX, 2),
3740 BPF_STMT(BPF_LDX | BPF_MEM, 2),
3741 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3742 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3743 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3744 BPF_STMT(BPF_STX, 3),
3745 BPF_STMT(BPF_LDX | BPF_MEM, 3),
3746 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3747 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3748 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3749 BPF_STMT(BPF_STX, 4),
3750 BPF_STMT(BPF_LDX | BPF_MEM, 4),
3751 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3752 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3753 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3754 BPF_STMT(BPF_STX, 5),
3755 BPF_STMT(BPF_LDX | BPF_MEM, 5),
3756 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3757 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3758 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3759 BPF_STMT(BPF_STX, 6),
3760 BPF_STMT(BPF_LDX | BPF_MEM, 6),
3761 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3762 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3763 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3764 BPF_STMT(BPF_STX, 7),
3765 BPF_STMT(BPF_LDX | BPF_MEM, 7),
3766 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3767 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3768 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3769 BPF_STMT(BPF_STX, 8),
3770 BPF_STMT(BPF_LDX | BPF_MEM, 8),
3771 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3772 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3773 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3774 BPF_STMT(BPF_STX, 9),
3775 BPF_STMT(BPF_LDX | BPF_MEM, 9),
3776 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3777 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3778 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3779 BPF_STMT(BPF_STX, 10),
3780 BPF_STMT(BPF_LDX | BPF_MEM, 10),
3781 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3782 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3783 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3784 BPF_STMT(BPF_STX, 11),
3785 BPF_STMT(BPF_LDX | BPF_MEM, 11),
3786 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3787 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3788 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3789 BPF_STMT(BPF_STX, 12),
3790 BPF_STMT(BPF_LDX | BPF_MEM, 12),
3791 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3792 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3793 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3794 BPF_STMT(BPF_STX, 13),
3795 BPF_STMT(BPF_LDX | BPF_MEM, 13),
3796 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3797 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3798 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3799 BPF_STMT(BPF_STX, 14),
3800 BPF_STMT(BPF_LDX | BPF_MEM, 14),
3801 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3802 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3803 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3804 BPF_STMT(BPF_STX, 15),
3805 BPF_STMT(BPF_LDX | BPF_MEM, 15),
3806 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3807 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
3808 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3809 BPF_STMT(BPF_RET | BPF_A, 0),
3810 },
3811 CLASSIC | FLAG_NO_DATA,
3812 { },
3813 { { 0, 116 } },
3814 },
Daniel Borkmann9fe13ba2014-05-29 10:22:48 +02003815 { /* Mainly checking JIT here. */
3816 "M[]: full STX + full LDX",
3817 .u.insns = {
3818 BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
3819 BPF_STMT(BPF_STX, 0),
3820 BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
3821 BPF_STMT(BPF_STX, 1),
3822 BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
3823 BPF_STMT(BPF_STX, 2),
3824 BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
3825 BPF_STMT(BPF_STX, 3),
3826 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
3827 BPF_STMT(BPF_STX, 4),
3828 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
3829 BPF_STMT(BPF_STX, 5),
3830 BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
3831 BPF_STMT(BPF_STX, 6),
3832 BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
3833 BPF_STMT(BPF_STX, 7),
3834 BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
3835 BPF_STMT(BPF_STX, 8),
3836 BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
3837 BPF_STMT(BPF_STX, 9),
3838 BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
3839 BPF_STMT(BPF_STX, 10),
3840 BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
3841 BPF_STMT(BPF_STX, 11),
3842 BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
3843 BPF_STMT(BPF_STX, 12),
3844 BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
3845 BPF_STMT(BPF_STX, 13),
3846 BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
3847 BPF_STMT(BPF_STX, 14),
3848 BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
3849 BPF_STMT(BPF_STX, 15),
3850 BPF_STMT(BPF_LDX | BPF_MEM, 0),
3851 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3852 BPF_STMT(BPF_LDX | BPF_MEM, 1),
3853 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3854 BPF_STMT(BPF_LDX | BPF_MEM, 2),
3855 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3856 BPF_STMT(BPF_LDX | BPF_MEM, 3),
3857 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3858 BPF_STMT(BPF_LDX | BPF_MEM, 4),
3859 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3860 BPF_STMT(BPF_LDX | BPF_MEM, 5),
3861 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3862 BPF_STMT(BPF_LDX | BPF_MEM, 6),
3863 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3864 BPF_STMT(BPF_LDX | BPF_MEM, 7),
3865 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3866 BPF_STMT(BPF_LDX | BPF_MEM, 8),
3867 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3868 BPF_STMT(BPF_LDX | BPF_MEM, 9),
3869 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3870 BPF_STMT(BPF_LDX | BPF_MEM, 10),
3871 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3872 BPF_STMT(BPF_LDX | BPF_MEM, 11),
3873 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3874 BPF_STMT(BPF_LDX | BPF_MEM, 12),
3875 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3876 BPF_STMT(BPF_LDX | BPF_MEM, 13),
3877 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3878 BPF_STMT(BPF_LDX | BPF_MEM, 14),
3879 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3880 BPF_STMT(BPF_LDX | BPF_MEM, 15),
3881 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3882 BPF_STMT(BPF_RET | BPF_A, 0),
3883 },
3884 CLASSIC | FLAG_NO_DATA,
3885 { },
3886 { { 0, 0x2a5a5e5 } },
3887 },
Daniel Borkmannd50bc152014-05-29 10:22:49 +02003888 {
3889 "check: SKF_AD_MAX",
3890 .u.insns = {
3891 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3892 SKF_AD_OFF + SKF_AD_MAX),
3893 BPF_STMT(BPF_RET | BPF_A, 0),
3894 },
3895 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
3896 { },
3897 { },
Yonghong Song09584b42018-02-02 22:37:15 -08003898 .fill_helper = NULL,
3899 .expected_errcode = -EINVAL,
Daniel Borkmannd50bc152014-05-29 10:22:49 +02003900 },
3901 { /* Passes checker but fails during runtime. */
3902 "LD [SKF_AD_OFF-1]",
3903 .u.insns = {
3904 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3905 SKF_AD_OFF - 1),
3906 BPF_STMT(BPF_RET | BPF_K, 1),
3907 },
3908 CLASSIC,
3909 { },
3910 { { 1, 0 } },
3911 },
Alexei Starovoitov02ab6952014-09-04 22:17:17 -07003912 {
3913 "load 64-bit immediate",
3914 .u.insns_int = {
Alexei Starovoitov25ee7322014-09-19 13:53:51 -07003915 BPF_LD_IMM64(R1, 0x567800001234LL),
Alexei Starovoitov02ab6952014-09-04 22:17:17 -07003916 BPF_MOV64_REG(R2, R1),
3917 BPF_MOV64_REG(R3, R2),
3918 BPF_ALU64_IMM(BPF_RSH, R2, 32),
3919 BPF_ALU64_IMM(BPF_LSH, R3, 32),
3920 BPF_ALU64_IMM(BPF_RSH, R3, 32),
3921 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3922 BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
3923 BPF_EXIT_INSN(),
3924 BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
3925 BPF_EXIT_INSN(),
Xi Wang986ccfd2015-05-09 04:14:30 -04003926 BPF_LD_IMM64(R0, 0x1ffffffffLL),
3927 BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
Alexei Starovoitov02ab6952014-09-04 22:17:17 -07003928 BPF_EXIT_INSN(),
3929 },
3930 INTERNAL,
3931 { },
3932 { { 0, 1 } }
3933 },
Michael Holzheucffc6422015-05-11 22:22:44 -07003934 /* BPF_ALU | BPF_MOV | BPF_X */
3935 {
3936 "ALU_MOV_X: dst = 2",
3937 .u.insns_int = {
3938 BPF_ALU32_IMM(BPF_MOV, R1, 2),
3939 BPF_ALU32_REG(BPF_MOV, R0, R1),
3940 BPF_EXIT_INSN(),
3941 },
3942 INTERNAL,
3943 { },
3944 { { 0, 2 } },
3945 },
3946 {
3947 "ALU_MOV_X: dst = 4294967295",
3948 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003949 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07003950 BPF_ALU32_REG(BPF_MOV, R0, R1),
3951 BPF_EXIT_INSN(),
3952 },
3953 INTERNAL,
3954 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003955 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07003956 },
3957 {
3958 "ALU64_MOV_X: dst = 2",
3959 .u.insns_int = {
3960 BPF_ALU32_IMM(BPF_MOV, R1, 2),
3961 BPF_ALU64_REG(BPF_MOV, R0, R1),
3962 BPF_EXIT_INSN(),
3963 },
3964 INTERNAL,
3965 { },
3966 { { 0, 2 } },
3967 },
3968 {
3969 "ALU64_MOV_X: dst = 4294967295",
3970 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003971 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07003972 BPF_ALU64_REG(BPF_MOV, R0, R1),
3973 BPF_EXIT_INSN(),
3974 },
3975 INTERNAL,
3976 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003977 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07003978 },
3979 /* BPF_ALU | BPF_MOV | BPF_K */
3980 {
3981 "ALU_MOV_K: dst = 2",
3982 .u.insns_int = {
3983 BPF_ALU32_IMM(BPF_MOV, R0, 2),
3984 BPF_EXIT_INSN(),
3985 },
3986 INTERNAL,
3987 { },
3988 { { 0, 2 } },
3989 },
3990 {
3991 "ALU_MOV_K: dst = 4294967295",
3992 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003993 BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07003994 BPF_EXIT_INSN(),
3995 },
3996 INTERNAL,
3997 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07003998 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07003999 },
4000 {
4001 "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4002 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004003 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4004 BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004005 BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4006 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4007 BPF_MOV32_IMM(R0, 2),
4008 BPF_EXIT_INSN(),
4009 BPF_MOV32_IMM(R0, 1),
4010 BPF_EXIT_INSN(),
4011 },
4012 INTERNAL,
4013 { },
4014 { { 0, 0x1 } },
4015 },
4016 {
Johan Almbladh565731a2021-08-09 11:18:17 +02004017 "ALU_MOV_K: small negative",
4018 .u.insns_int = {
4019 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4020 BPF_EXIT_INSN(),
4021 },
4022 INTERNAL,
4023 { },
4024 { { 0, -123 } }
4025 },
4026 {
4027 "ALU_MOV_K: small negative zero extension",
4028 .u.insns_int = {
4029 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4030 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4031 BPF_EXIT_INSN(),
4032 },
4033 INTERNAL,
4034 { },
4035 { { 0, 0 } }
4036 },
4037 {
4038 "ALU_MOV_K: large negative",
4039 .u.insns_int = {
4040 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
4041 BPF_EXIT_INSN(),
4042 },
4043 INTERNAL,
4044 { },
4045 { { 0, -123456789 } }
4046 },
4047 {
4048 "ALU_MOV_K: large negative zero extension",
4049 .u.insns_int = {
4050 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
4051 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4052 BPF_EXIT_INSN(),
4053 },
4054 INTERNAL,
4055 { },
4056 { { 0, 0 } }
4057 },
4058 {
Michael Holzheucffc6422015-05-11 22:22:44 -07004059 "ALU64_MOV_K: dst = 2",
4060 .u.insns_int = {
4061 BPF_ALU64_IMM(BPF_MOV, R0, 2),
4062 BPF_EXIT_INSN(),
4063 },
4064 INTERNAL,
4065 { },
4066 { { 0, 2 } },
4067 },
4068 {
4069 "ALU64_MOV_K: dst = 2147483647",
4070 .u.insns_int = {
4071 BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
4072 BPF_EXIT_INSN(),
4073 },
4074 INTERNAL,
4075 { },
4076 { { 0, 2147483647 } },
4077 },
4078 {
4079 "ALU64_OR_K: dst = 0x0",
4080 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004081 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004082 BPF_LD_IMM64(R3, 0x0),
4083 BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
4084 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4085 BPF_MOV32_IMM(R0, 2),
4086 BPF_EXIT_INSN(),
4087 BPF_MOV32_IMM(R0, 1),
4088 BPF_EXIT_INSN(),
4089 },
4090 INTERNAL,
4091 { },
4092 { { 0, 0x1 } },
4093 },
4094 {
4095 "ALU64_MOV_K: dst = -1",
4096 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004097 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4098 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004099 BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
4100 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4101 BPF_MOV32_IMM(R0, 2),
4102 BPF_EXIT_INSN(),
4103 BPF_MOV32_IMM(R0, 1),
4104 BPF_EXIT_INSN(),
4105 },
4106 INTERNAL,
4107 { },
4108 { { 0, 0x1 } },
4109 },
Johan Almbladh565731a2021-08-09 11:18:17 +02004110 {
4111 "ALU64_MOV_K: small negative",
4112 .u.insns_int = {
4113 BPF_ALU64_IMM(BPF_MOV, R0, -123),
4114 BPF_EXIT_INSN(),
4115 },
4116 INTERNAL,
4117 { },
4118 { { 0, -123 } }
4119 },
4120 {
4121 "ALU64_MOV_K: small negative sign extension",
4122 .u.insns_int = {
4123 BPF_ALU64_IMM(BPF_MOV, R0, -123),
4124 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4125 BPF_EXIT_INSN(),
4126 },
4127 INTERNAL,
4128 { },
4129 { { 0, 0xffffffff } }
4130 },
4131 {
4132 "ALU64_MOV_K: large negative",
4133 .u.insns_int = {
4134 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
4135 BPF_EXIT_INSN(),
4136 },
4137 INTERNAL,
4138 { },
4139 { { 0, -123456789 } }
4140 },
4141 {
4142 "ALU64_MOV_K: large negative sign extension",
4143 .u.insns_int = {
4144 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
4145 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4146 BPF_EXIT_INSN(),
4147 },
4148 INTERNAL,
4149 { },
4150 { { 0, 0xffffffff } }
4151 },
Michael Holzheucffc6422015-05-11 22:22:44 -07004152 /* BPF_ALU | BPF_ADD | BPF_X */
4153 {
4154 "ALU_ADD_X: 1 + 2 = 3",
4155 .u.insns_int = {
4156 BPF_LD_IMM64(R0, 1),
4157 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4158 BPF_ALU32_REG(BPF_ADD, R0, R1),
4159 BPF_EXIT_INSN(),
4160 },
4161 INTERNAL,
4162 { },
4163 { { 0, 3 } },
4164 },
4165 {
4166 "ALU_ADD_X: 1 + 4294967294 = 4294967295",
4167 .u.insns_int = {
4168 BPF_LD_IMM64(R0, 1),
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004169 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004170 BPF_ALU32_REG(BPF_ADD, R0, R1),
4171 BPF_EXIT_INSN(),
4172 },
4173 INTERNAL,
4174 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004175 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07004176 },
4177 {
Naveen N. Raob64b50e2016-04-05 15:32:55 +05304178 "ALU_ADD_X: 2 + 4294967294 = 0",
4179 .u.insns_int = {
4180 BPF_LD_IMM64(R0, 2),
4181 BPF_LD_IMM64(R1, 4294967294U),
4182 BPF_ALU32_REG(BPF_ADD, R0, R1),
4183 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
4184 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4185 BPF_EXIT_INSN(),
4186 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4187 BPF_EXIT_INSN(),
4188 },
4189 INTERNAL,
4190 { },
4191 { { 0, 1 } },
4192 },
4193 {
Michael Holzheucffc6422015-05-11 22:22:44 -07004194 "ALU64_ADD_X: 1 + 2 = 3",
4195 .u.insns_int = {
4196 BPF_LD_IMM64(R0, 1),
4197 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4198 BPF_ALU64_REG(BPF_ADD, R0, R1),
4199 BPF_EXIT_INSN(),
4200 },
4201 INTERNAL,
4202 { },
4203 { { 0, 3 } },
4204 },
4205 {
4206 "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
4207 .u.insns_int = {
4208 BPF_LD_IMM64(R0, 1),
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004209 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004210 BPF_ALU64_REG(BPF_ADD, R0, R1),
4211 BPF_EXIT_INSN(),
4212 },
4213 INTERNAL,
4214 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004215 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07004216 },
Naveen N. Raob64b50e2016-04-05 15:32:55 +05304217 {
4218 "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
4219 .u.insns_int = {
4220 BPF_LD_IMM64(R0, 2),
4221 BPF_LD_IMM64(R1, 4294967294U),
4222 BPF_LD_IMM64(R2, 4294967296ULL),
4223 BPF_ALU64_REG(BPF_ADD, R0, R1),
4224 BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
4225 BPF_MOV32_IMM(R0, 0),
4226 BPF_EXIT_INSN(),
4227 BPF_MOV32_IMM(R0, 1),
4228 BPF_EXIT_INSN(),
4229 },
4230 INTERNAL,
4231 { },
4232 { { 0, 1 } },
4233 },
Michael Holzheucffc6422015-05-11 22:22:44 -07004234 /* BPF_ALU | BPF_ADD | BPF_K */
4235 {
4236 "ALU_ADD_K: 1 + 2 = 3",
4237 .u.insns_int = {
4238 BPF_LD_IMM64(R0, 1),
4239 BPF_ALU32_IMM(BPF_ADD, R0, 2),
4240 BPF_EXIT_INSN(),
4241 },
4242 INTERNAL,
4243 { },
4244 { { 0, 3 } },
4245 },
4246 {
4247 "ALU_ADD_K: 3 + 0 = 3",
4248 .u.insns_int = {
4249 BPF_LD_IMM64(R0, 3),
4250 BPF_ALU32_IMM(BPF_ADD, R0, 0),
4251 BPF_EXIT_INSN(),
4252 },
4253 INTERNAL,
4254 { },
4255 { { 0, 3 } },
4256 },
4257 {
4258 "ALU_ADD_K: 1 + 4294967294 = 4294967295",
4259 .u.insns_int = {
4260 BPF_LD_IMM64(R0, 1),
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004261 BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004262 BPF_EXIT_INSN(),
4263 },
4264 INTERNAL,
4265 { },
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004266 { { 0, 4294967295U } },
Michael Holzheucffc6422015-05-11 22:22:44 -07004267 },
4268 {
Naveen N. Raob64b50e2016-04-05 15:32:55 +05304269 "ALU_ADD_K: 4294967294 + 2 = 0",
4270 .u.insns_int = {
4271 BPF_LD_IMM64(R0, 4294967294U),
4272 BPF_ALU32_IMM(BPF_ADD, R0, 2),
4273 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
4274 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4275 BPF_EXIT_INSN(),
4276 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4277 BPF_EXIT_INSN(),
4278 },
4279 INTERNAL,
4280 { },
4281 { { 0, 1 } },
4282 },
4283 {
Michael Holzheucffc6422015-05-11 22:22:44 -07004284 "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
4285 .u.insns_int = {
4286 BPF_LD_IMM64(R2, 0x0),
4287 BPF_LD_IMM64(R3, 0x00000000ffffffff),
4288 BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
4289 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4290 BPF_MOV32_IMM(R0, 2),
4291 BPF_EXIT_INSN(),
4292 BPF_MOV32_IMM(R0, 1),
4293 BPF_EXIT_INSN(),
4294 },
4295 INTERNAL,
4296 { },
4297 { { 0, 0x1 } },
4298 },
4299 {
Naveen N. Rao9c94f6c2016-04-05 15:32:56 +05304300 "ALU_ADD_K: 0 + 0xffff = 0xffff",
4301 .u.insns_int = {
4302 BPF_LD_IMM64(R2, 0x0),
4303 BPF_LD_IMM64(R3, 0xffff),
4304 BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
4305 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4306 BPF_MOV32_IMM(R0, 2),
4307 BPF_EXIT_INSN(),
4308 BPF_MOV32_IMM(R0, 1),
4309 BPF_EXIT_INSN(),
4310 },
4311 INTERNAL,
4312 { },
4313 { { 0, 0x1 } },
4314 },
4315 {
4316 "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
4317 .u.insns_int = {
4318 BPF_LD_IMM64(R2, 0x0),
4319 BPF_LD_IMM64(R3, 0x7fffffff),
4320 BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
4321 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4322 BPF_MOV32_IMM(R0, 2),
4323 BPF_EXIT_INSN(),
4324 BPF_MOV32_IMM(R0, 1),
4325 BPF_EXIT_INSN(),
4326 },
4327 INTERNAL,
4328 { },
4329 { { 0, 0x1 } },
4330 },
4331 {
4332 "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
4333 .u.insns_int = {
4334 BPF_LD_IMM64(R2, 0x0),
4335 BPF_LD_IMM64(R3, 0x80000000),
4336 BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
4337 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4338 BPF_MOV32_IMM(R0, 2),
4339 BPF_EXIT_INSN(),
4340 BPF_MOV32_IMM(R0, 1),
4341 BPF_EXIT_INSN(),
4342 },
4343 INTERNAL,
4344 { },
4345 { { 0, 0x1 } },
4346 },
4347 {
4348 "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
4349 .u.insns_int = {
4350 BPF_LD_IMM64(R2, 0x0),
4351 BPF_LD_IMM64(R3, 0x80008000),
4352 BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
4353 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4354 BPF_MOV32_IMM(R0, 2),
4355 BPF_EXIT_INSN(),
4356 BPF_MOV32_IMM(R0, 1),
4357 BPF_EXIT_INSN(),
4358 },
4359 INTERNAL,
4360 { },
4361 { { 0, 0x1 } },
4362 },
4363 {
Michael Holzheucffc6422015-05-11 22:22:44 -07004364 "ALU64_ADD_K: 1 + 2 = 3",
4365 .u.insns_int = {
4366 BPF_LD_IMM64(R0, 1),
4367 BPF_ALU64_IMM(BPF_ADD, R0, 2),
4368 BPF_EXIT_INSN(),
4369 },
4370 INTERNAL,
4371 { },
4372 { { 0, 3 } },
4373 },
4374 {
4375 "ALU64_ADD_K: 3 + 0 = 3",
4376 .u.insns_int = {
4377 BPF_LD_IMM64(R0, 3),
4378 BPF_ALU64_IMM(BPF_ADD, R0, 0),
4379 BPF_EXIT_INSN(),
4380 },
4381 INTERNAL,
4382 { },
4383 { { 0, 3 } },
4384 },
4385 {
4386 "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
4387 .u.insns_int = {
4388 BPF_LD_IMM64(R0, 1),
4389 BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
4390 BPF_EXIT_INSN(),
4391 },
4392 INTERNAL,
4393 { },
4394 { { 0, 2147483647 } },
4395 },
4396 {
Naveen N. Raob64b50e2016-04-05 15:32:55 +05304397 "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
4398 .u.insns_int = {
4399 BPF_LD_IMM64(R0, 4294967294U),
4400 BPF_LD_IMM64(R1, 4294967296ULL),
4401 BPF_ALU64_IMM(BPF_ADD, R0, 2),
4402 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
4403 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4404 BPF_EXIT_INSN(),
4405 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4406 BPF_EXIT_INSN(),
4407 },
4408 INTERNAL,
4409 { },
4410 { { 0, 1 } },
4411 },
4412 {
Michael Holzheucffc6422015-05-11 22:22:44 -07004413 "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
4414 .u.insns_int = {
4415 BPF_LD_IMM64(R0, 2147483646),
4416 BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
4417 BPF_EXIT_INSN(),
4418 },
4419 INTERNAL,
4420 { },
4421 { { 0, -1 } },
4422 },
4423 {
4424 "ALU64_ADD_K: 1 + 0 = 1",
4425 .u.insns_int = {
4426 BPF_LD_IMM64(R2, 0x1),
4427 BPF_LD_IMM64(R3, 0x1),
4428 BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
4429 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4430 BPF_MOV32_IMM(R0, 2),
4431 BPF_EXIT_INSN(),
4432 BPF_MOV32_IMM(R0, 1),
4433 BPF_EXIT_INSN(),
4434 },
4435 INTERNAL,
4436 { },
4437 { { 0, 0x1 } },
4438 },
4439 {
4440 "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
4441 .u.insns_int = {
4442 BPF_LD_IMM64(R2, 0x0),
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004443 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004444 BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
4445 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4446 BPF_MOV32_IMM(R0, 2),
4447 BPF_EXIT_INSN(),
4448 BPF_MOV32_IMM(R0, 1),
4449 BPF_EXIT_INSN(),
4450 },
4451 INTERNAL,
4452 { },
4453 { { 0, 0x1 } },
4454 },
Naveen N. Rao9c94f6c2016-04-05 15:32:56 +05304455 {
4456 "ALU64_ADD_K: 0 + 0xffff = 0xffff",
4457 .u.insns_int = {
4458 BPF_LD_IMM64(R2, 0x0),
4459 BPF_LD_IMM64(R3, 0xffff),
4460 BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
4461 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4462 BPF_MOV32_IMM(R0, 2),
4463 BPF_EXIT_INSN(),
4464 BPF_MOV32_IMM(R0, 1),
4465 BPF_EXIT_INSN(),
4466 },
4467 INTERNAL,
4468 { },
4469 { { 0, 0x1 } },
4470 },
4471 {
4472 "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
4473 .u.insns_int = {
4474 BPF_LD_IMM64(R2, 0x0),
4475 BPF_LD_IMM64(R3, 0x7fffffff),
4476 BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
4477 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4478 BPF_MOV32_IMM(R0, 2),
4479 BPF_EXIT_INSN(),
4480 BPF_MOV32_IMM(R0, 1),
4481 BPF_EXIT_INSN(),
4482 },
4483 INTERNAL,
4484 { },
4485 { { 0, 0x1 } },
4486 },
4487 {
4488 "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
4489 .u.insns_int = {
4490 BPF_LD_IMM64(R2, 0x0),
4491 BPF_LD_IMM64(R3, 0xffffffff80000000LL),
4492 BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
4493 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4494 BPF_MOV32_IMM(R0, 2),
4495 BPF_EXIT_INSN(),
4496 BPF_MOV32_IMM(R0, 1),
4497 BPF_EXIT_INSN(),
4498 },
4499 INTERNAL,
4500 { },
4501 { { 0, 0x1 } },
4502 },
4503 {
4504 "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
4505 .u.insns_int = {
4506 BPF_LD_IMM64(R2, 0x0),
4507 BPF_LD_IMM64(R3, 0xffffffff80008000LL),
4508 BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
4509 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4510 BPF_MOV32_IMM(R0, 2),
4511 BPF_EXIT_INSN(),
4512 BPF_MOV32_IMM(R0, 1),
4513 BPF_EXIT_INSN(),
4514 },
4515 INTERNAL,
4516 { },
4517 { { 0, 0x1 } },
4518 },
Michael Holzheucffc6422015-05-11 22:22:44 -07004519 /* BPF_ALU | BPF_SUB | BPF_X */
4520 {
4521 "ALU_SUB_X: 3 - 1 = 2",
4522 .u.insns_int = {
4523 BPF_LD_IMM64(R0, 3),
4524 BPF_ALU32_IMM(BPF_MOV, R1, 1),
4525 BPF_ALU32_REG(BPF_SUB, R0, R1),
4526 BPF_EXIT_INSN(),
4527 },
4528 INTERNAL,
4529 { },
4530 { { 0, 2 } },
4531 },
4532 {
4533 "ALU_SUB_X: 4294967295 - 4294967294 = 1",
4534 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004535 BPF_LD_IMM64(R0, 4294967295U),
4536 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004537 BPF_ALU32_REG(BPF_SUB, R0, R1),
4538 BPF_EXIT_INSN(),
4539 },
4540 INTERNAL,
4541 { },
4542 { { 0, 1 } },
4543 },
4544 {
4545 "ALU64_SUB_X: 3 - 1 = 2",
4546 .u.insns_int = {
4547 BPF_LD_IMM64(R0, 3),
4548 BPF_ALU32_IMM(BPF_MOV, R1, 1),
4549 BPF_ALU64_REG(BPF_SUB, R0, R1),
4550 BPF_EXIT_INSN(),
4551 },
4552 INTERNAL,
4553 { },
4554 { { 0, 2 } },
4555 },
4556 {
4557 "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
4558 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004559 BPF_LD_IMM64(R0, 4294967295U),
4560 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004561 BPF_ALU64_REG(BPF_SUB, R0, R1),
4562 BPF_EXIT_INSN(),
4563 },
4564 INTERNAL,
4565 { },
4566 { { 0, 1 } },
4567 },
4568 /* BPF_ALU | BPF_SUB | BPF_K */
4569 {
4570 "ALU_SUB_K: 3 - 1 = 2",
4571 .u.insns_int = {
4572 BPF_LD_IMM64(R0, 3),
4573 BPF_ALU32_IMM(BPF_SUB, R0, 1),
4574 BPF_EXIT_INSN(),
4575 },
4576 INTERNAL,
4577 { },
4578 { { 0, 2 } },
4579 },
4580 {
4581 "ALU_SUB_K: 3 - 0 = 3",
4582 .u.insns_int = {
4583 BPF_LD_IMM64(R0, 3),
4584 BPF_ALU32_IMM(BPF_SUB, R0, 0),
4585 BPF_EXIT_INSN(),
4586 },
4587 INTERNAL,
4588 { },
4589 { { 0, 3 } },
4590 },
4591 {
4592 "ALU_SUB_K: 4294967295 - 4294967294 = 1",
4593 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004594 BPF_LD_IMM64(R0, 4294967295U),
4595 BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004596 BPF_EXIT_INSN(),
4597 },
4598 INTERNAL,
4599 { },
4600 { { 0, 1 } },
4601 },
4602 {
4603 "ALU64_SUB_K: 3 - 1 = 2",
4604 .u.insns_int = {
4605 BPF_LD_IMM64(R0, 3),
4606 BPF_ALU64_IMM(BPF_SUB, R0, 1),
4607 BPF_EXIT_INSN(),
4608 },
4609 INTERNAL,
4610 { },
4611 { { 0, 2 } },
4612 },
4613 {
4614 "ALU64_SUB_K: 3 - 0 = 3",
4615 .u.insns_int = {
4616 BPF_LD_IMM64(R0, 3),
4617 BPF_ALU64_IMM(BPF_SUB, R0, 0),
4618 BPF_EXIT_INSN(),
4619 },
4620 INTERNAL,
4621 { },
4622 { { 0, 3 } },
4623 },
4624 {
4625 "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
4626 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004627 BPF_LD_IMM64(R0, 4294967294U),
4628 BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004629 BPF_EXIT_INSN(),
4630 },
4631 INTERNAL,
4632 { },
4633 { { 0, -1 } },
4634 },
4635 {
4636 "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
4637 .u.insns_int = {
4638 BPF_LD_IMM64(R0, 2147483646),
4639 BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
4640 BPF_EXIT_INSN(),
4641 },
4642 INTERNAL,
4643 { },
4644 { { 0, -1 } },
4645 },
4646 /* BPF_ALU | BPF_MUL | BPF_X */
4647 {
4648 "ALU_MUL_X: 2 * 3 = 6",
4649 .u.insns_int = {
4650 BPF_LD_IMM64(R0, 2),
4651 BPF_ALU32_IMM(BPF_MOV, R1, 3),
4652 BPF_ALU32_REG(BPF_MUL, R0, R1),
4653 BPF_EXIT_INSN(),
4654 },
4655 INTERNAL,
4656 { },
4657 { { 0, 6 } },
4658 },
4659 {
4660 "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
4661 .u.insns_int = {
4662 BPF_LD_IMM64(R0, 2),
4663 BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
4664 BPF_ALU32_REG(BPF_MUL, R0, R1),
4665 BPF_EXIT_INSN(),
4666 },
4667 INTERNAL,
4668 { },
4669 { { 0, 0xFFFFFFF0 } },
4670 },
4671 {
4672 "ALU_MUL_X: -1 * -1 = 1",
4673 .u.insns_int = {
4674 BPF_LD_IMM64(R0, -1),
4675 BPF_ALU32_IMM(BPF_MOV, R1, -1),
4676 BPF_ALU32_REG(BPF_MUL, R0, R1),
4677 BPF_EXIT_INSN(),
4678 },
4679 INTERNAL,
4680 { },
4681 { { 0, 1 } },
4682 },
4683 {
4684 "ALU64_MUL_X: 2 * 3 = 6",
4685 .u.insns_int = {
4686 BPF_LD_IMM64(R0, 2),
4687 BPF_ALU32_IMM(BPF_MOV, R1, 3),
4688 BPF_ALU64_REG(BPF_MUL, R0, R1),
4689 BPF_EXIT_INSN(),
4690 },
4691 INTERNAL,
4692 { },
4693 { { 0, 6 } },
4694 },
4695 {
4696 "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
4697 .u.insns_int = {
4698 BPF_LD_IMM64(R0, 1),
4699 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
4700 BPF_ALU64_REG(BPF_MUL, R0, R1),
4701 BPF_EXIT_INSN(),
4702 },
4703 INTERNAL,
4704 { },
4705 { { 0, 2147483647 } },
4706 },
Johan Almbladhfaa57622021-08-09 11:18:22 +02004707 {
4708 "ALU64_MUL_X: 64x64 multiply, low word",
4709 .u.insns_int = {
4710 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
4711 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
4712 BPF_ALU64_REG(BPF_MUL, R0, R1),
4713 BPF_EXIT_INSN(),
4714 },
4715 INTERNAL,
4716 { },
4717 { { 0, 0xe5618cf0 } }
4718 },
4719 {
4720 "ALU64_MUL_X: 64x64 multiply, high word",
4721 .u.insns_int = {
4722 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
4723 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
4724 BPF_ALU64_REG(BPF_MUL, R0, R1),
4725 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4726 BPF_EXIT_INSN(),
4727 },
4728 INTERNAL,
4729 { },
4730 { { 0, 0x2236d88f } }
4731 },
Michael Holzheucffc6422015-05-11 22:22:44 -07004732 /* BPF_ALU | BPF_MUL | BPF_K */
4733 {
4734 "ALU_MUL_K: 2 * 3 = 6",
4735 .u.insns_int = {
4736 BPF_LD_IMM64(R0, 2),
4737 BPF_ALU32_IMM(BPF_MUL, R0, 3),
4738 BPF_EXIT_INSN(),
4739 },
4740 INTERNAL,
4741 { },
4742 { { 0, 6 } },
4743 },
4744 {
4745 "ALU_MUL_K: 3 * 1 = 3",
4746 .u.insns_int = {
4747 BPF_LD_IMM64(R0, 3),
4748 BPF_ALU32_IMM(BPF_MUL, R0, 1),
4749 BPF_EXIT_INSN(),
4750 },
4751 INTERNAL,
4752 { },
4753 { { 0, 3 } },
4754 },
4755 {
4756 "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
4757 .u.insns_int = {
4758 BPF_LD_IMM64(R0, 2),
4759 BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
4760 BPF_EXIT_INSN(),
4761 },
4762 INTERNAL,
4763 { },
4764 { { 0, 0xFFFFFFF0 } },
4765 },
4766 {
4767 "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
4768 .u.insns_int = {
4769 BPF_LD_IMM64(R2, 0x1),
4770 BPF_LD_IMM64(R3, 0x00000000ffffffff),
4771 BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
4772 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4773 BPF_MOV32_IMM(R0, 2),
4774 BPF_EXIT_INSN(),
4775 BPF_MOV32_IMM(R0, 1),
4776 BPF_EXIT_INSN(),
4777 },
4778 INTERNAL,
4779 { },
4780 { { 0, 0x1 } },
4781 },
4782 {
4783 "ALU64_MUL_K: 2 * 3 = 6",
4784 .u.insns_int = {
4785 BPF_LD_IMM64(R0, 2),
4786 BPF_ALU64_IMM(BPF_MUL, R0, 3),
4787 BPF_EXIT_INSN(),
4788 },
4789 INTERNAL,
4790 { },
4791 { { 0, 6 } },
4792 },
4793 {
4794 "ALU64_MUL_K: 3 * 1 = 3",
4795 .u.insns_int = {
4796 BPF_LD_IMM64(R0, 3),
4797 BPF_ALU64_IMM(BPF_MUL, R0, 1),
4798 BPF_EXIT_INSN(),
4799 },
4800 INTERNAL,
4801 { },
4802 { { 0, 3 } },
4803 },
4804 {
4805 "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
4806 .u.insns_int = {
4807 BPF_LD_IMM64(R0, 1),
4808 BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
4809 BPF_EXIT_INSN(),
4810 },
4811 INTERNAL,
4812 { },
4813 { { 0, 2147483647 } },
4814 },
4815 {
4816 "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
4817 .u.insns_int = {
4818 BPF_LD_IMM64(R0, 1),
4819 BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
4820 BPF_EXIT_INSN(),
4821 },
4822 INTERNAL,
4823 { },
4824 { { 0, -2147483647 } },
4825 },
4826 {
4827 "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
4828 .u.insns_int = {
4829 BPF_LD_IMM64(R2, 0x1),
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004830 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004831 BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
4832 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4833 BPF_MOV32_IMM(R0, 2),
4834 BPF_EXIT_INSN(),
4835 BPF_MOV32_IMM(R0, 1),
4836 BPF_EXIT_INSN(),
4837 },
4838 INTERNAL,
4839 { },
4840 { { 0, 0x1 } },
4841 },
Johan Almbladhfaa57622021-08-09 11:18:22 +02004842 {
4843 "ALU64_MUL_K: 64x32 multiply, low word",
4844 .u.insns_int = {
4845 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
4846 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
4847 BPF_EXIT_INSN(),
4848 },
4849 INTERNAL,
4850 { },
4851 { { 0, 0xe242d208 } }
4852 },
4853 {
4854 "ALU64_MUL_K: 64x32 multiply, high word",
4855 .u.insns_int = {
4856 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
4857 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
4858 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4859 BPF_EXIT_INSN(),
4860 },
4861 INTERNAL,
4862 { },
4863 { { 0, 0xc28f5c28 } }
4864 },
Michael Holzheucffc6422015-05-11 22:22:44 -07004865 /* BPF_ALU | BPF_DIV | BPF_X */
4866 {
4867 "ALU_DIV_X: 6 / 2 = 3",
4868 .u.insns_int = {
4869 BPF_LD_IMM64(R0, 6),
4870 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4871 BPF_ALU32_REG(BPF_DIV, R0, R1),
4872 BPF_EXIT_INSN(),
4873 },
4874 INTERNAL,
4875 { },
4876 { { 0, 3 } },
4877 },
4878 {
4879 "ALU_DIV_X: 4294967295 / 4294967295 = 1",
4880 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004881 BPF_LD_IMM64(R0, 4294967295U),
4882 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004883 BPF_ALU32_REG(BPF_DIV, R0, R1),
4884 BPF_EXIT_INSN(),
4885 },
4886 INTERNAL,
4887 { },
4888 { { 0, 1 } },
4889 },
4890 {
4891 "ALU64_DIV_X: 6 / 2 = 3",
4892 .u.insns_int = {
4893 BPF_LD_IMM64(R0, 6),
4894 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4895 BPF_ALU64_REG(BPF_DIV, R0, R1),
4896 BPF_EXIT_INSN(),
4897 },
4898 INTERNAL,
4899 { },
4900 { { 0, 3 } },
4901 },
4902 {
4903 "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
4904 .u.insns_int = {
4905 BPF_LD_IMM64(R0, 2147483647),
4906 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
4907 BPF_ALU64_REG(BPF_DIV, R0, R1),
4908 BPF_EXIT_INSN(),
4909 },
4910 INTERNAL,
4911 { },
4912 { { 0, 1 } },
4913 },
4914 {
4915 "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
4916 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004917 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
4918 BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
4919 BPF_LD_IMM64(R3, 0x0000000000000001LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004920 BPF_ALU64_REG(BPF_DIV, R2, R4),
4921 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4922 BPF_MOV32_IMM(R0, 2),
4923 BPF_EXIT_INSN(),
4924 BPF_MOV32_IMM(R0, 1),
4925 BPF_EXIT_INSN(),
4926 },
4927 INTERNAL,
4928 { },
4929 { { 0, 0x1 } },
4930 },
4931 /* BPF_ALU | BPF_DIV | BPF_K */
4932 {
4933 "ALU_DIV_K: 6 / 2 = 3",
4934 .u.insns_int = {
4935 BPF_LD_IMM64(R0, 6),
4936 BPF_ALU32_IMM(BPF_DIV, R0, 2),
4937 BPF_EXIT_INSN(),
4938 },
4939 INTERNAL,
4940 { },
4941 { { 0, 3 } },
4942 },
4943 {
4944 "ALU_DIV_K: 3 / 1 = 3",
4945 .u.insns_int = {
4946 BPF_LD_IMM64(R0, 3),
4947 BPF_ALU32_IMM(BPF_DIV, R0, 1),
4948 BPF_EXIT_INSN(),
4949 },
4950 INTERNAL,
4951 { },
4952 { { 0, 3 } },
4953 },
4954 {
4955 "ALU_DIV_K: 4294967295 / 4294967295 = 1",
4956 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004957 BPF_LD_IMM64(R0, 4294967295U),
4958 BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
Michael Holzheucffc6422015-05-11 22:22:44 -07004959 BPF_EXIT_INSN(),
4960 },
4961 INTERNAL,
4962 { },
4963 { { 0, 1 } },
4964 },
4965 {
4966 "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
4967 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07004968 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07004969 BPF_LD_IMM64(R3, 0x1UL),
4970 BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
4971 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4972 BPF_MOV32_IMM(R0, 2),
4973 BPF_EXIT_INSN(),
4974 BPF_MOV32_IMM(R0, 1),
4975 BPF_EXIT_INSN(),
4976 },
4977 INTERNAL,
4978 { },
4979 { { 0, 0x1 } },
4980 },
4981 {
4982 "ALU64_DIV_K: 6 / 2 = 3",
4983 .u.insns_int = {
4984 BPF_LD_IMM64(R0, 6),
4985 BPF_ALU64_IMM(BPF_DIV, R0, 2),
4986 BPF_EXIT_INSN(),
4987 },
4988 INTERNAL,
4989 { },
4990 { { 0, 3 } },
4991 },
4992 {
4993 "ALU64_DIV_K: 3 / 1 = 3",
4994 .u.insns_int = {
4995 BPF_LD_IMM64(R0, 3),
4996 BPF_ALU64_IMM(BPF_DIV, R0, 1),
4997 BPF_EXIT_INSN(),
4998 },
4999 INTERNAL,
5000 { },
5001 { { 0, 3 } },
5002 },
5003 {
5004 "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
5005 .u.insns_int = {
5006 BPF_LD_IMM64(R0, 2147483647),
5007 BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
5008 BPF_EXIT_INSN(),
5009 },
5010 INTERNAL,
5011 { },
5012 { { 0, 1 } },
5013 },
5014 {
5015 "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5016 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005017 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5018 BPF_LD_IMM64(R3, 0x0000000000000001LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005019 BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
5020 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5021 BPF_MOV32_IMM(R0, 2),
5022 BPF_EXIT_INSN(),
5023 BPF_MOV32_IMM(R0, 1),
5024 BPF_EXIT_INSN(),
5025 },
5026 INTERNAL,
5027 { },
5028 { { 0, 0x1 } },
5029 },
5030 /* BPF_ALU | BPF_MOD | BPF_X */
5031 {
5032 "ALU_MOD_X: 3 % 2 = 1",
5033 .u.insns_int = {
5034 BPF_LD_IMM64(R0, 3),
5035 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5036 BPF_ALU32_REG(BPF_MOD, R0, R1),
5037 BPF_EXIT_INSN(),
5038 },
5039 INTERNAL,
5040 { },
5041 { { 0, 1 } },
5042 },
5043 {
5044 "ALU_MOD_X: 4294967295 % 4294967293 = 2",
5045 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005046 BPF_LD_IMM64(R0, 4294967295U),
5047 BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
Michael Holzheucffc6422015-05-11 22:22:44 -07005048 BPF_ALU32_REG(BPF_MOD, R0, R1),
5049 BPF_EXIT_INSN(),
5050 },
5051 INTERNAL,
5052 { },
5053 { { 0, 2 } },
5054 },
5055 {
5056 "ALU64_MOD_X: 3 % 2 = 1",
5057 .u.insns_int = {
5058 BPF_LD_IMM64(R0, 3),
5059 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5060 BPF_ALU64_REG(BPF_MOD, R0, R1),
5061 BPF_EXIT_INSN(),
5062 },
5063 INTERNAL,
5064 { },
5065 { { 0, 1 } },
5066 },
5067 {
5068 "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
5069 .u.insns_int = {
5070 BPF_LD_IMM64(R0, 2147483647),
5071 BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
5072 BPF_ALU64_REG(BPF_MOD, R0, R1),
5073 BPF_EXIT_INSN(),
5074 },
5075 INTERNAL,
5076 { },
5077 { { 0, 2 } },
5078 },
5079 /* BPF_ALU | BPF_MOD | BPF_K */
5080 {
5081 "ALU_MOD_K: 3 % 2 = 1",
5082 .u.insns_int = {
5083 BPF_LD_IMM64(R0, 3),
5084 BPF_ALU32_IMM(BPF_MOD, R0, 2),
5085 BPF_EXIT_INSN(),
5086 },
5087 INTERNAL,
5088 { },
5089 { { 0, 1 } },
5090 },
5091 {
5092 "ALU_MOD_K: 3 % 1 = 0",
5093 .u.insns_int = {
5094 BPF_LD_IMM64(R0, 3),
5095 BPF_ALU32_IMM(BPF_MOD, R0, 1),
5096 BPF_EXIT_INSN(),
5097 },
5098 INTERNAL,
5099 { },
5100 { { 0, 0 } },
5101 },
5102 {
5103 "ALU_MOD_K: 4294967295 % 4294967293 = 2",
5104 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005105 BPF_LD_IMM64(R0, 4294967295U),
5106 BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
Michael Holzheucffc6422015-05-11 22:22:44 -07005107 BPF_EXIT_INSN(),
5108 },
5109 INTERNAL,
5110 { },
5111 { { 0, 2 } },
5112 },
5113 {
5114 "ALU64_MOD_K: 3 % 2 = 1",
5115 .u.insns_int = {
5116 BPF_LD_IMM64(R0, 3),
5117 BPF_ALU64_IMM(BPF_MOD, R0, 2),
5118 BPF_EXIT_INSN(),
5119 },
5120 INTERNAL,
5121 { },
5122 { { 0, 1 } },
5123 },
5124 {
5125 "ALU64_MOD_K: 3 % 1 = 0",
5126 .u.insns_int = {
5127 BPF_LD_IMM64(R0, 3),
5128 BPF_ALU64_IMM(BPF_MOD, R0, 1),
5129 BPF_EXIT_INSN(),
5130 },
5131 INTERNAL,
5132 { },
5133 { { 0, 0 } },
5134 },
5135 {
5136 "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
5137 .u.insns_int = {
5138 BPF_LD_IMM64(R0, 2147483647),
5139 BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
5140 BPF_EXIT_INSN(),
5141 },
5142 INTERNAL,
5143 { },
5144 { { 0, 2 } },
5145 },
5146 /* BPF_ALU | BPF_AND | BPF_X */
5147 {
5148 "ALU_AND_X: 3 & 2 = 2",
5149 .u.insns_int = {
5150 BPF_LD_IMM64(R0, 3),
5151 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5152 BPF_ALU32_REG(BPF_AND, R0, R1),
5153 BPF_EXIT_INSN(),
5154 },
5155 INTERNAL,
5156 { },
5157 { { 0, 2 } },
5158 },
5159 {
5160 "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
5161 .u.insns_int = {
5162 BPF_LD_IMM64(R0, 0xffffffff),
5163 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5164 BPF_ALU32_REG(BPF_AND, R0, R1),
5165 BPF_EXIT_INSN(),
5166 },
5167 INTERNAL,
5168 { },
5169 { { 0, 0xffffffff } },
5170 },
5171 {
5172 "ALU64_AND_X: 3 & 2 = 2",
5173 .u.insns_int = {
5174 BPF_LD_IMM64(R0, 3),
5175 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5176 BPF_ALU64_REG(BPF_AND, R0, R1),
5177 BPF_EXIT_INSN(),
5178 },
5179 INTERNAL,
5180 { },
5181 { { 0, 2 } },
5182 },
5183 {
5184 "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
5185 .u.insns_int = {
5186 BPF_LD_IMM64(R0, 0xffffffff),
5187 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5188 BPF_ALU64_REG(BPF_AND, R0, R1),
5189 BPF_EXIT_INSN(),
5190 },
5191 INTERNAL,
5192 { },
5193 { { 0, 0xffffffff } },
5194 },
5195 /* BPF_ALU | BPF_AND | BPF_K */
5196 {
5197 "ALU_AND_K: 3 & 2 = 2",
5198 .u.insns_int = {
5199 BPF_LD_IMM64(R0, 3),
5200 BPF_ALU32_IMM(BPF_AND, R0, 2),
5201 BPF_EXIT_INSN(),
5202 },
5203 INTERNAL,
5204 { },
5205 { { 0, 2 } },
5206 },
5207 {
5208 "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
5209 .u.insns_int = {
5210 BPF_LD_IMM64(R0, 0xffffffff),
5211 BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
5212 BPF_EXIT_INSN(),
5213 },
5214 INTERNAL,
5215 { },
5216 { { 0, 0xffffffff } },
5217 },
5218 {
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005219 "ALU_AND_K: Small immediate",
5220 .u.insns_int = {
5221 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
5222 BPF_ALU32_IMM(BPF_AND, R0, 15),
5223 BPF_EXIT_INSN(),
5224 },
5225 INTERNAL,
5226 { },
5227 { { 0, 4 } }
5228 },
5229 {
5230 "ALU_AND_K: Large immediate",
5231 .u.insns_int = {
5232 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
5233 BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
5234 BPF_EXIT_INSN(),
5235 },
5236 INTERNAL,
5237 { },
5238 { { 0, 0xa1b2c3d4 } }
5239 },
5240 {
5241 "ALU_AND_K: Zero extension",
5242 .u.insns_int = {
5243 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5244 BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
5245 BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
5246 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5247 BPF_MOV32_IMM(R0, 2),
5248 BPF_EXIT_INSN(),
5249 BPF_MOV32_IMM(R0, 1),
5250 BPF_EXIT_INSN(),
5251 },
5252 INTERNAL,
5253 { },
5254 { { 0, 1 } }
5255 },
5256 {
Michael Holzheucffc6422015-05-11 22:22:44 -07005257 "ALU64_AND_K: 3 & 2 = 2",
5258 .u.insns_int = {
5259 BPF_LD_IMM64(R0, 3),
5260 BPF_ALU64_IMM(BPF_AND, R0, 2),
5261 BPF_EXIT_INSN(),
5262 },
5263 INTERNAL,
5264 { },
5265 { { 0, 2 } },
5266 },
5267 {
5268 "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
5269 .u.insns_int = {
5270 BPF_LD_IMM64(R0, 0xffffffff),
5271 BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
5272 BPF_EXIT_INSN(),
5273 },
5274 INTERNAL,
5275 { },
5276 { { 0, 0xffffffff } },
5277 },
5278 {
Johan Almbladhe92c8132021-08-09 11:18:18 +02005279 "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
Michael Holzheucffc6422015-05-11 22:22:44 -07005280 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005281 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5282 BPF_LD_IMM64(R3, 0x0000000000000000LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005283 BPF_ALU64_IMM(BPF_AND, R2, 0x0),
5284 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5285 BPF_MOV32_IMM(R0, 2),
5286 BPF_EXIT_INSN(),
5287 BPF_MOV32_IMM(R0, 1),
5288 BPF_EXIT_INSN(),
5289 },
5290 INTERNAL,
5291 { },
5292 { { 0, 0x1 } },
5293 },
5294 {
Johan Almbladhe92c8132021-08-09 11:18:18 +02005295 "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
Michael Holzheucffc6422015-05-11 22:22:44 -07005296 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005297 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5298 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005299 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
5300 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5301 BPF_MOV32_IMM(R0, 2),
5302 BPF_EXIT_INSN(),
5303 BPF_MOV32_IMM(R0, 1),
5304 BPF_EXIT_INSN(),
5305 },
5306 INTERNAL,
5307 { },
5308 { { 0, 0x1 } },
5309 },
5310 {
5311 "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
5312 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005313 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5314 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005315 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
5316 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5317 BPF_MOV32_IMM(R0, 2),
5318 BPF_EXIT_INSN(),
5319 BPF_MOV32_IMM(R0, 1),
5320 BPF_EXIT_INSN(),
5321 },
5322 INTERNAL,
5323 { },
5324 { { 0, 0x1 } },
5325 },
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005326 {
5327 "ALU64_AND_K: Sign extension 1",
5328 .u.insns_int = {
5329 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5330 BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
5331 BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
5332 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5333 BPF_MOV32_IMM(R0, 2),
5334 BPF_EXIT_INSN(),
5335 BPF_MOV32_IMM(R0, 1),
5336 BPF_EXIT_INSN(),
5337 },
5338 INTERNAL,
5339 { },
5340 { { 0, 1 } }
5341 },
5342 {
5343 "ALU64_AND_K: Sign extension 2",
5344 .u.insns_int = {
5345 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5346 BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
5347 BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
5348 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5349 BPF_MOV32_IMM(R0, 2),
5350 BPF_EXIT_INSN(),
5351 BPF_MOV32_IMM(R0, 1),
5352 BPF_EXIT_INSN(),
5353 },
5354 INTERNAL,
5355 { },
5356 { { 0, 1 } }
5357 },
Michael Holzheucffc6422015-05-11 22:22:44 -07005358 /* BPF_ALU | BPF_OR | BPF_X */
5359 {
5360 "ALU_OR_X: 1 | 2 = 3",
5361 .u.insns_int = {
5362 BPF_LD_IMM64(R0, 1),
5363 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5364 BPF_ALU32_REG(BPF_OR, R0, R1),
5365 BPF_EXIT_INSN(),
5366 },
5367 INTERNAL,
5368 { },
5369 { { 0, 3 } },
5370 },
5371 {
5372 "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
5373 .u.insns_int = {
5374 BPF_LD_IMM64(R0, 0),
5375 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5376 BPF_ALU32_REG(BPF_OR, R0, R1),
5377 BPF_EXIT_INSN(),
5378 },
5379 INTERNAL,
5380 { },
5381 { { 0, 0xffffffff } },
5382 },
5383 {
5384 "ALU64_OR_X: 1 | 2 = 3",
5385 .u.insns_int = {
5386 BPF_LD_IMM64(R0, 1),
5387 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5388 BPF_ALU64_REG(BPF_OR, R0, R1),
5389 BPF_EXIT_INSN(),
5390 },
5391 INTERNAL,
5392 { },
5393 { { 0, 3 } },
5394 },
5395 {
5396 "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
5397 .u.insns_int = {
5398 BPF_LD_IMM64(R0, 0),
5399 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5400 BPF_ALU64_REG(BPF_OR, R0, R1),
5401 BPF_EXIT_INSN(),
5402 },
5403 INTERNAL,
5404 { },
5405 { { 0, 0xffffffff } },
5406 },
5407 /* BPF_ALU | BPF_OR | BPF_K */
5408 {
5409 "ALU_OR_K: 1 | 2 = 3",
5410 .u.insns_int = {
5411 BPF_LD_IMM64(R0, 1),
5412 BPF_ALU32_IMM(BPF_OR, R0, 2),
5413 BPF_EXIT_INSN(),
5414 },
5415 INTERNAL,
5416 { },
5417 { { 0, 3 } },
5418 },
5419 {
5420 "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
5421 .u.insns_int = {
5422 BPF_LD_IMM64(R0, 0),
5423 BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
5424 BPF_EXIT_INSN(),
5425 },
5426 INTERNAL,
5427 { },
5428 { { 0, 0xffffffff } },
5429 },
5430 {
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005431 "ALU_OR_K: Small immediate",
5432 .u.insns_int = {
5433 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
5434 BPF_ALU32_IMM(BPF_OR, R0, 1),
5435 BPF_EXIT_INSN(),
5436 },
5437 INTERNAL,
5438 { },
5439 { { 0, 0x01020305 } }
5440 },
5441 {
5442 "ALU_OR_K: Large immediate",
5443 .u.insns_int = {
5444 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
5445 BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
5446 BPF_EXIT_INSN(),
5447 },
5448 INTERNAL,
5449 { },
5450 { { 0, 0xa1b2c3d4 } }
5451 },
5452 {
5453 "ALU_OR_K: Zero extension",
5454 .u.insns_int = {
5455 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5456 BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
5457 BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
5458 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5459 BPF_MOV32_IMM(R0, 2),
5460 BPF_EXIT_INSN(),
5461 BPF_MOV32_IMM(R0, 1),
5462 BPF_EXIT_INSN(),
5463 },
5464 INTERNAL,
5465 { },
5466 { { 0, 1 } }
5467 },
5468 {
Michael Holzheucffc6422015-05-11 22:22:44 -07005469 "ALU64_OR_K: 1 | 2 = 3",
5470 .u.insns_int = {
5471 BPF_LD_IMM64(R0, 1),
5472 BPF_ALU64_IMM(BPF_OR, R0, 2),
5473 BPF_EXIT_INSN(),
5474 },
5475 INTERNAL,
5476 { },
5477 { { 0, 3 } },
5478 },
5479 {
5480 "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
5481 .u.insns_int = {
5482 BPF_LD_IMM64(R0, 0),
5483 BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
5484 BPF_EXIT_INSN(),
5485 },
5486 INTERNAL,
5487 { },
5488 { { 0, 0xffffffff } },
5489 },
5490 {
Johan Almbladhe92c8132021-08-09 11:18:18 +02005491 "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
Michael Holzheucffc6422015-05-11 22:22:44 -07005492 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005493 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5494 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005495 BPF_ALU64_IMM(BPF_OR, R2, 0x0),
5496 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5497 BPF_MOV32_IMM(R0, 2),
5498 BPF_EXIT_INSN(),
5499 BPF_MOV32_IMM(R0, 1),
5500 BPF_EXIT_INSN(),
5501 },
5502 INTERNAL,
5503 { },
5504 { { 0, 0x1 } },
5505 },
5506 {
5507 "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
5508 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005509 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5510 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005511 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
5512 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5513 BPF_MOV32_IMM(R0, 2),
5514 BPF_EXIT_INSN(),
5515 BPF_MOV32_IMM(R0, 1),
5516 BPF_EXIT_INSN(),
5517 },
5518 INTERNAL,
5519 { },
5520 { { 0, 0x1 } },
5521 },
5522 {
5523 "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
5524 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005525 BPF_LD_IMM64(R2, 0x0000000000000000LL),
5526 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005527 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
5528 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5529 BPF_MOV32_IMM(R0, 2),
5530 BPF_EXIT_INSN(),
5531 BPF_MOV32_IMM(R0, 1),
5532 BPF_EXIT_INSN(),
5533 },
5534 INTERNAL,
5535 { },
5536 { { 0, 0x1 } },
5537 },
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005538 {
5539 "ALU64_OR_K: Sign extension 1",
5540 .u.insns_int = {
5541 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5542 BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
5543 BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
5544 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5545 BPF_MOV32_IMM(R0, 2),
5546 BPF_EXIT_INSN(),
5547 BPF_MOV32_IMM(R0, 1),
5548 BPF_EXIT_INSN(),
5549 },
5550 INTERNAL,
5551 { },
5552 { { 0, 1 } }
5553 },
5554 {
5555 "ALU64_OR_K: Sign extension 2",
5556 .u.insns_int = {
5557 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5558 BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
5559 BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
5560 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5561 BPF_MOV32_IMM(R0, 2),
5562 BPF_EXIT_INSN(),
5563 BPF_MOV32_IMM(R0, 1),
5564 BPF_EXIT_INSN(),
5565 },
5566 INTERNAL,
5567 { },
5568 { { 0, 1 } }
5569 },
Michael Holzheucffc6422015-05-11 22:22:44 -07005570 /* BPF_ALU | BPF_XOR | BPF_X */
5571 {
5572 "ALU_XOR_X: 5 ^ 6 = 3",
5573 .u.insns_int = {
5574 BPF_LD_IMM64(R0, 5),
5575 BPF_ALU32_IMM(BPF_MOV, R1, 6),
5576 BPF_ALU32_REG(BPF_XOR, R0, R1),
5577 BPF_EXIT_INSN(),
5578 },
5579 INTERNAL,
5580 { },
5581 { { 0, 3 } },
5582 },
5583 {
5584 "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
5585 .u.insns_int = {
5586 BPF_LD_IMM64(R0, 1),
5587 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5588 BPF_ALU32_REG(BPF_XOR, R0, R1),
5589 BPF_EXIT_INSN(),
5590 },
5591 INTERNAL,
5592 { },
5593 { { 0, 0xfffffffe } },
5594 },
5595 {
5596 "ALU64_XOR_X: 5 ^ 6 = 3",
5597 .u.insns_int = {
5598 BPF_LD_IMM64(R0, 5),
5599 BPF_ALU32_IMM(BPF_MOV, R1, 6),
5600 BPF_ALU64_REG(BPF_XOR, R0, R1),
5601 BPF_EXIT_INSN(),
5602 },
5603 INTERNAL,
5604 { },
5605 { { 0, 3 } },
5606 },
5607 {
5608 "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
5609 .u.insns_int = {
5610 BPF_LD_IMM64(R0, 1),
5611 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
5612 BPF_ALU64_REG(BPF_XOR, R0, R1),
5613 BPF_EXIT_INSN(),
5614 },
5615 INTERNAL,
5616 { },
5617 { { 0, 0xfffffffe } },
5618 },
5619 /* BPF_ALU | BPF_XOR | BPF_K */
5620 {
5621 "ALU_XOR_K: 5 ^ 6 = 3",
5622 .u.insns_int = {
5623 BPF_LD_IMM64(R0, 5),
5624 BPF_ALU32_IMM(BPF_XOR, R0, 6),
5625 BPF_EXIT_INSN(),
5626 },
5627 INTERNAL,
5628 { },
5629 { { 0, 3 } },
5630 },
5631 {
5632 "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
5633 .u.insns_int = {
5634 BPF_LD_IMM64(R0, 1),
5635 BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
5636 BPF_EXIT_INSN(),
5637 },
5638 INTERNAL,
5639 { },
5640 { { 0, 0xfffffffe } },
5641 },
5642 {
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005643 "ALU_XOR_K: Small immediate",
5644 .u.insns_int = {
5645 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
5646 BPF_ALU32_IMM(BPF_XOR, R0, 15),
5647 BPF_EXIT_INSN(),
5648 },
5649 INTERNAL,
5650 { },
5651 { { 0, 0x0102030b } }
5652 },
5653 {
5654 "ALU_XOR_K: Large immediate",
5655 .u.insns_int = {
5656 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
5657 BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
5658 BPF_EXIT_INSN(),
5659 },
5660 INTERNAL,
5661 { },
5662 { { 0, 0x5e4d3c2b } }
5663 },
5664 {
5665 "ALU_XOR_K: Zero extension",
5666 .u.insns_int = {
5667 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5668 BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
5669 BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
5670 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5671 BPF_MOV32_IMM(R0, 2),
5672 BPF_EXIT_INSN(),
5673 BPF_MOV32_IMM(R0, 1),
5674 BPF_EXIT_INSN(),
5675 },
5676 INTERNAL,
5677 { },
5678 { { 0, 1 } }
5679 },
5680 {
Michael Holzheucffc6422015-05-11 22:22:44 -07005681 "ALU64_XOR_K: 5 ^ 6 = 3",
5682 .u.insns_int = {
5683 BPF_LD_IMM64(R0, 5),
5684 BPF_ALU64_IMM(BPF_XOR, R0, 6),
5685 BPF_EXIT_INSN(),
5686 },
5687 INTERNAL,
5688 { },
5689 { { 0, 3 } },
5690 },
5691 {
Johan Almbladhe92c8132021-08-09 11:18:18 +02005692 "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
Michael Holzheucffc6422015-05-11 22:22:44 -07005693 .u.insns_int = {
5694 BPF_LD_IMM64(R0, 1),
5695 BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
5696 BPF_EXIT_INSN(),
5697 },
5698 INTERNAL,
5699 { },
5700 { { 0, 0xfffffffe } },
5701 },
5702 {
5703 "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
5704 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005705 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5706 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005707 BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
5708 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5709 BPF_MOV32_IMM(R0, 2),
5710 BPF_EXIT_INSN(),
5711 BPF_MOV32_IMM(R0, 1),
5712 BPF_EXIT_INSN(),
5713 },
5714 INTERNAL,
5715 { },
5716 { { 0, 0x1 } },
5717 },
5718 {
5719 "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
5720 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005721 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5722 BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005723 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
5724 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5725 BPF_MOV32_IMM(R0, 2),
5726 BPF_EXIT_INSN(),
5727 BPF_MOV32_IMM(R0, 1),
5728 BPF_EXIT_INSN(),
5729 },
5730 INTERNAL,
5731 { },
5732 { { 0, 0x1 } },
5733 },
5734 {
5735 "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
5736 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07005737 BPF_LD_IMM64(R2, 0x0000000000000000LL),
5738 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07005739 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
5740 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5741 BPF_MOV32_IMM(R0, 2),
5742 BPF_EXIT_INSN(),
5743 BPF_MOV32_IMM(R0, 1),
5744 BPF_EXIT_INSN(),
5745 },
5746 INTERNAL,
5747 { },
5748 { { 0, 0x1 } },
5749 },
Johan Almbladhba89bcf2021-08-09 11:18:19 +02005750 {
5751 "ALU64_XOR_K: Sign extension 1",
5752 .u.insns_int = {
5753 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5754 BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
5755 BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
5756 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5757 BPF_MOV32_IMM(R0, 2),
5758 BPF_EXIT_INSN(),
5759 BPF_MOV32_IMM(R0, 1),
5760 BPF_EXIT_INSN(),
5761 },
5762 INTERNAL,
5763 { },
5764 { { 0, 1 } }
5765 },
5766 {
5767 "ALU64_XOR_K: Sign extension 2",
5768 .u.insns_int = {
5769 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5770 BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
5771 BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
5772 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5773 BPF_MOV32_IMM(R0, 2),
5774 BPF_EXIT_INSN(),
5775 BPF_MOV32_IMM(R0, 1),
5776 BPF_EXIT_INSN(),
5777 },
5778 INTERNAL,
5779 { },
5780 { { 0, 1 } }
5781 },
Michael Holzheucffc6422015-05-11 22:22:44 -07005782 /* BPF_ALU | BPF_LSH | BPF_X */
5783 {
5784 "ALU_LSH_X: 1 << 1 = 2",
5785 .u.insns_int = {
5786 BPF_LD_IMM64(R0, 1),
5787 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5788 BPF_ALU32_REG(BPF_LSH, R0, R1),
5789 BPF_EXIT_INSN(),
5790 },
5791 INTERNAL,
5792 { },
5793 { { 0, 2 } },
5794 },
5795 {
5796 "ALU_LSH_X: 1 << 31 = 0x80000000",
5797 .u.insns_int = {
5798 BPF_LD_IMM64(R0, 1),
5799 BPF_ALU32_IMM(BPF_MOV, R1, 31),
5800 BPF_ALU32_REG(BPF_LSH, R0, R1),
5801 BPF_EXIT_INSN(),
5802 },
5803 INTERNAL,
5804 { },
5805 { { 0, 0x80000000 } },
5806 },
5807 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02005808 "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
5809 .u.insns_int = {
5810 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
5811 BPF_ALU32_IMM(BPF_MOV, R1, 12),
5812 BPF_ALU32_REG(BPF_LSH, R0, R1),
5813 BPF_EXIT_INSN(),
5814 },
5815 INTERNAL,
5816 { },
5817 { { 0, 0x45678000 } }
5818 },
5819 {
Michael Holzheucffc6422015-05-11 22:22:44 -07005820 "ALU64_LSH_X: 1 << 1 = 2",
5821 .u.insns_int = {
5822 BPF_LD_IMM64(R0, 1),
5823 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5824 BPF_ALU64_REG(BPF_LSH, R0, R1),
5825 BPF_EXIT_INSN(),
5826 },
5827 INTERNAL,
5828 { },
5829 { { 0, 2 } },
5830 },
5831 {
5832 "ALU64_LSH_X: 1 << 31 = 0x80000000",
5833 .u.insns_int = {
5834 BPF_LD_IMM64(R0, 1),
5835 BPF_ALU32_IMM(BPF_MOV, R1, 31),
5836 BPF_ALU64_REG(BPF_LSH, R0, R1),
5837 BPF_EXIT_INSN(),
5838 },
5839 INTERNAL,
5840 { },
5841 { { 0, 0x80000000 } },
5842 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02005843 {
5844 "ALU64_LSH_X: Shift < 32, low word",
5845 .u.insns_int = {
5846 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5847 BPF_ALU32_IMM(BPF_MOV, R1, 12),
5848 BPF_ALU64_REG(BPF_LSH, R0, R1),
5849 BPF_EXIT_INSN(),
5850 },
5851 INTERNAL,
5852 { },
5853 { { 0, 0xbcdef000 } }
5854 },
5855 {
5856 "ALU64_LSH_X: Shift < 32, high word",
5857 .u.insns_int = {
5858 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5859 BPF_ALU32_IMM(BPF_MOV, R1, 12),
5860 BPF_ALU64_REG(BPF_LSH, R0, R1),
5861 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5862 BPF_EXIT_INSN(),
5863 },
5864 INTERNAL,
5865 { },
5866 { { 0, 0x3456789a } }
5867 },
5868 {
5869 "ALU64_LSH_X: Shift > 32, low word",
5870 .u.insns_int = {
5871 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5872 BPF_ALU32_IMM(BPF_MOV, R1, 36),
5873 BPF_ALU64_REG(BPF_LSH, R0, R1),
5874 BPF_EXIT_INSN(),
5875 },
5876 INTERNAL,
5877 { },
5878 { { 0, 0 } }
5879 },
5880 {
5881 "ALU64_LSH_X: Shift > 32, high word",
5882 .u.insns_int = {
5883 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5884 BPF_ALU32_IMM(BPF_MOV, R1, 36),
5885 BPF_ALU64_REG(BPF_LSH, R0, R1),
5886 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5887 BPF_EXIT_INSN(),
5888 },
5889 INTERNAL,
5890 { },
5891 { { 0, 0x9abcdef0 } }
5892 },
5893 {
5894 "ALU64_LSH_X: Shift == 32, low word",
5895 .u.insns_int = {
5896 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5897 BPF_ALU32_IMM(BPF_MOV, R1, 32),
5898 BPF_ALU64_REG(BPF_LSH, R0, R1),
5899 BPF_EXIT_INSN(),
5900 },
5901 INTERNAL,
5902 { },
5903 { { 0, 0 } }
5904 },
5905 {
5906 "ALU64_LSH_X: Shift == 32, high word",
5907 .u.insns_int = {
5908 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5909 BPF_ALU32_IMM(BPF_MOV, R1, 32),
5910 BPF_ALU64_REG(BPF_LSH, R0, R1),
5911 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5912 BPF_EXIT_INSN(),
5913 },
5914 INTERNAL,
5915 { },
5916 { { 0, 0x89abcdef } }
5917 },
5918 {
5919 "ALU64_LSH_X: Zero shift, low word",
5920 .u.insns_int = {
5921 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5922 BPF_ALU32_IMM(BPF_MOV, R1, 0),
5923 BPF_ALU64_REG(BPF_LSH, R0, R1),
5924 BPF_EXIT_INSN(),
5925 },
5926 INTERNAL,
5927 { },
5928 { { 0, 0x89abcdef } }
5929 },
5930 {
5931 "ALU64_LSH_X: Zero shift, high word",
5932 .u.insns_int = {
5933 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5934 BPF_ALU32_IMM(BPF_MOV, R1, 0),
5935 BPF_ALU64_REG(BPF_LSH, R0, R1),
5936 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5937 BPF_EXIT_INSN(),
5938 },
5939 INTERNAL,
5940 { },
5941 { { 0, 0x01234567 } }
5942 },
Michael Holzheucffc6422015-05-11 22:22:44 -07005943 /* BPF_ALU | BPF_LSH | BPF_K */
5944 {
5945 "ALU_LSH_K: 1 << 1 = 2",
5946 .u.insns_int = {
5947 BPF_LD_IMM64(R0, 1),
5948 BPF_ALU32_IMM(BPF_LSH, R0, 1),
5949 BPF_EXIT_INSN(),
5950 },
5951 INTERNAL,
5952 { },
5953 { { 0, 2 } },
5954 },
5955 {
5956 "ALU_LSH_K: 1 << 31 = 0x80000000",
5957 .u.insns_int = {
5958 BPF_LD_IMM64(R0, 1),
5959 BPF_ALU32_IMM(BPF_LSH, R0, 31),
5960 BPF_EXIT_INSN(),
5961 },
5962 INTERNAL,
5963 { },
5964 { { 0, 0x80000000 } },
5965 },
5966 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02005967 "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
5968 .u.insns_int = {
5969 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
5970 BPF_ALU32_IMM(BPF_LSH, R0, 12),
5971 BPF_EXIT_INSN(),
5972 },
5973 INTERNAL,
5974 { },
5975 { { 0, 0x45678000 } }
5976 },
5977 {
5978 "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
5979 .u.insns_int = {
5980 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
5981 BPF_ALU32_IMM(BPF_LSH, R0, 0),
5982 BPF_EXIT_INSN(),
5983 },
5984 INTERNAL,
5985 { },
5986 { { 0, 0x12345678 } }
5987 },
5988 {
Michael Holzheucffc6422015-05-11 22:22:44 -07005989 "ALU64_LSH_K: 1 << 1 = 2",
5990 .u.insns_int = {
5991 BPF_LD_IMM64(R0, 1),
5992 BPF_ALU64_IMM(BPF_LSH, R0, 1),
5993 BPF_EXIT_INSN(),
5994 },
5995 INTERNAL,
5996 { },
5997 { { 0, 2 } },
5998 },
5999 {
6000 "ALU64_LSH_K: 1 << 31 = 0x80000000",
6001 .u.insns_int = {
6002 BPF_LD_IMM64(R0, 1),
6003 BPF_ALU64_IMM(BPF_LSH, R0, 31),
6004 BPF_EXIT_INSN(),
6005 },
6006 INTERNAL,
6007 { },
6008 { { 0, 0x80000000 } },
6009 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006010 {
6011 "ALU64_LSH_K: Shift < 32, low word",
6012 .u.insns_int = {
6013 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6014 BPF_ALU64_IMM(BPF_LSH, R0, 12),
6015 BPF_EXIT_INSN(),
6016 },
6017 INTERNAL,
6018 { },
6019 { { 0, 0xbcdef000 } }
6020 },
6021 {
6022 "ALU64_LSH_K: Shift < 32, high word",
6023 .u.insns_int = {
6024 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6025 BPF_ALU64_IMM(BPF_LSH, R0, 12),
6026 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6027 BPF_EXIT_INSN(),
6028 },
6029 INTERNAL,
6030 { },
6031 { { 0, 0x3456789a } }
6032 },
6033 {
6034 "ALU64_LSH_K: Shift > 32, low word",
6035 .u.insns_int = {
6036 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6037 BPF_ALU64_IMM(BPF_LSH, R0, 36),
6038 BPF_EXIT_INSN(),
6039 },
6040 INTERNAL,
6041 { },
6042 { { 0, 0 } }
6043 },
6044 {
6045 "ALU64_LSH_K: Shift > 32, high word",
6046 .u.insns_int = {
6047 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6048 BPF_ALU64_IMM(BPF_LSH, R0, 36),
6049 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6050 BPF_EXIT_INSN(),
6051 },
6052 INTERNAL,
6053 { },
6054 { { 0, 0x9abcdef0 } }
6055 },
6056 {
6057 "ALU64_LSH_K: Shift == 32, low word",
6058 .u.insns_int = {
6059 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6060 BPF_ALU64_IMM(BPF_LSH, R0, 32),
6061 BPF_EXIT_INSN(),
6062 },
6063 INTERNAL,
6064 { },
6065 { { 0, 0 } }
6066 },
6067 {
6068 "ALU64_LSH_K: Shift == 32, high word",
6069 .u.insns_int = {
6070 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6071 BPF_ALU64_IMM(BPF_LSH, R0, 32),
6072 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6073 BPF_EXIT_INSN(),
6074 },
6075 INTERNAL,
6076 { },
6077 { { 0, 0x89abcdef } }
6078 },
6079 {
6080 "ALU64_LSH_K: Zero shift",
6081 .u.insns_int = {
6082 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6083 BPF_ALU64_IMM(BPF_LSH, R0, 0),
6084 BPF_EXIT_INSN(),
6085 },
6086 INTERNAL,
6087 { },
6088 { { 0, 0x89abcdef } }
6089 },
Michael Holzheucffc6422015-05-11 22:22:44 -07006090 /* BPF_ALU | BPF_RSH | BPF_X */
6091 {
6092 "ALU_RSH_X: 2 >> 1 = 1",
6093 .u.insns_int = {
6094 BPF_LD_IMM64(R0, 2),
6095 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6096 BPF_ALU32_REG(BPF_RSH, R0, R1),
6097 BPF_EXIT_INSN(),
6098 },
6099 INTERNAL,
6100 { },
6101 { { 0, 1 } },
6102 },
6103 {
6104 "ALU_RSH_X: 0x80000000 >> 31 = 1",
6105 .u.insns_int = {
6106 BPF_LD_IMM64(R0, 0x80000000),
6107 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6108 BPF_ALU32_REG(BPF_RSH, R0, R1),
6109 BPF_EXIT_INSN(),
6110 },
6111 INTERNAL,
6112 { },
6113 { { 0, 1 } },
6114 },
6115 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02006116 "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
6117 .u.insns_int = {
6118 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6119 BPF_ALU32_IMM(BPF_MOV, R1, 20),
6120 BPF_ALU32_REG(BPF_RSH, R0, R1),
6121 BPF_EXIT_INSN(),
6122 },
6123 INTERNAL,
6124 { },
6125 { { 0, 0x123 } }
6126 },
6127 {
Michael Holzheucffc6422015-05-11 22:22:44 -07006128 "ALU64_RSH_X: 2 >> 1 = 1",
6129 .u.insns_int = {
6130 BPF_LD_IMM64(R0, 2),
6131 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6132 BPF_ALU64_REG(BPF_RSH, R0, R1),
6133 BPF_EXIT_INSN(),
6134 },
6135 INTERNAL,
6136 { },
6137 { { 0, 1 } },
6138 },
6139 {
6140 "ALU64_RSH_X: 0x80000000 >> 31 = 1",
6141 .u.insns_int = {
6142 BPF_LD_IMM64(R0, 0x80000000),
6143 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6144 BPF_ALU64_REG(BPF_RSH, R0, R1),
6145 BPF_EXIT_INSN(),
6146 },
6147 INTERNAL,
6148 { },
6149 { { 0, 1 } },
6150 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006151 {
6152 "ALU64_RSH_X: Shift < 32, low word",
6153 .u.insns_int = {
6154 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6155 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6156 BPF_ALU64_REG(BPF_RSH, R0, R1),
6157 BPF_EXIT_INSN(),
6158 },
6159 INTERNAL,
6160 { },
6161 { { 0, 0x56789abc } }
6162 },
6163 {
6164 "ALU64_RSH_X: Shift < 32, high word",
6165 .u.insns_int = {
6166 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6167 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6168 BPF_ALU64_REG(BPF_RSH, R0, R1),
6169 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6170 BPF_EXIT_INSN(),
6171 },
6172 INTERNAL,
6173 { },
6174 { { 0, 0x00081234 } }
6175 },
6176 {
6177 "ALU64_RSH_X: Shift > 32, low word",
6178 .u.insns_int = {
6179 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6180 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6181 BPF_ALU64_REG(BPF_RSH, R0, R1),
6182 BPF_EXIT_INSN(),
6183 },
6184 INTERNAL,
6185 { },
6186 { { 0, 0x08123456 } }
6187 },
6188 {
6189 "ALU64_RSH_X: Shift > 32, high word",
6190 .u.insns_int = {
6191 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6192 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6193 BPF_ALU64_REG(BPF_RSH, R0, R1),
6194 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6195 BPF_EXIT_INSN(),
6196 },
6197 INTERNAL,
6198 { },
6199 { { 0, 0 } }
6200 },
6201 {
6202 "ALU64_RSH_X: Shift == 32, low word",
6203 .u.insns_int = {
6204 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6205 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6206 BPF_ALU64_REG(BPF_RSH, R0, R1),
6207 BPF_EXIT_INSN(),
6208 },
6209 INTERNAL,
6210 { },
6211 { { 0, 0x81234567 } }
6212 },
6213 {
6214 "ALU64_RSH_X: Shift == 32, high word",
6215 .u.insns_int = {
6216 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6217 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6218 BPF_ALU64_REG(BPF_RSH, R0, R1),
6219 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6220 BPF_EXIT_INSN(),
6221 },
6222 INTERNAL,
6223 { },
6224 { { 0, 0 } }
6225 },
6226 {
6227 "ALU64_RSH_X: Zero shift, low word",
6228 .u.insns_int = {
6229 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6230 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6231 BPF_ALU64_REG(BPF_RSH, R0, R1),
6232 BPF_EXIT_INSN(),
6233 },
6234 INTERNAL,
6235 { },
6236 { { 0, 0x89abcdef } }
6237 },
6238 {
6239 "ALU64_RSH_X: Zero shift, high word",
6240 .u.insns_int = {
6241 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6242 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6243 BPF_ALU64_REG(BPF_RSH, R0, R1),
6244 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6245 BPF_EXIT_INSN(),
6246 },
6247 INTERNAL,
6248 { },
6249 { { 0, 0x81234567 } }
6250 },
Michael Holzheucffc6422015-05-11 22:22:44 -07006251 /* BPF_ALU | BPF_RSH | BPF_K */
6252 {
6253 "ALU_RSH_K: 2 >> 1 = 1",
6254 .u.insns_int = {
6255 BPF_LD_IMM64(R0, 2),
6256 BPF_ALU32_IMM(BPF_RSH, R0, 1),
6257 BPF_EXIT_INSN(),
6258 },
6259 INTERNAL,
6260 { },
6261 { { 0, 1 } },
6262 },
6263 {
6264 "ALU_RSH_K: 0x80000000 >> 31 = 1",
6265 .u.insns_int = {
6266 BPF_LD_IMM64(R0, 0x80000000),
6267 BPF_ALU32_IMM(BPF_RSH, R0, 31),
6268 BPF_EXIT_INSN(),
6269 },
6270 INTERNAL,
6271 { },
6272 { { 0, 1 } },
6273 },
6274 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02006275 "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
6276 .u.insns_int = {
6277 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6278 BPF_ALU32_IMM(BPF_RSH, R0, 20),
6279 BPF_EXIT_INSN(),
6280 },
6281 INTERNAL,
6282 { },
6283 { { 0, 0x123 } }
6284 },
6285 {
6286 "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
6287 .u.insns_int = {
6288 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6289 BPF_ALU32_IMM(BPF_RSH, R0, 0),
6290 BPF_EXIT_INSN(),
6291 },
6292 INTERNAL,
6293 { },
6294 { { 0, 0x12345678 } }
6295 },
6296 {
Michael Holzheucffc6422015-05-11 22:22:44 -07006297 "ALU64_RSH_K: 2 >> 1 = 1",
6298 .u.insns_int = {
6299 BPF_LD_IMM64(R0, 2),
6300 BPF_ALU64_IMM(BPF_RSH, R0, 1),
6301 BPF_EXIT_INSN(),
6302 },
6303 INTERNAL,
6304 { },
6305 { { 0, 1 } },
6306 },
6307 {
6308 "ALU64_RSH_K: 0x80000000 >> 31 = 1",
6309 .u.insns_int = {
6310 BPF_LD_IMM64(R0, 0x80000000),
6311 BPF_ALU64_IMM(BPF_RSH, R0, 31),
6312 BPF_EXIT_INSN(),
6313 },
6314 INTERNAL,
6315 { },
6316 { { 0, 1 } },
6317 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006318 {
6319 "ALU64_RSH_K: Shift < 32, low word",
6320 .u.insns_int = {
6321 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6322 BPF_ALU64_IMM(BPF_RSH, R0, 12),
6323 BPF_EXIT_INSN(),
6324 },
6325 INTERNAL,
6326 { },
6327 { { 0, 0x56789abc } }
6328 },
6329 {
6330 "ALU64_RSH_K: Shift < 32, high word",
6331 .u.insns_int = {
6332 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6333 BPF_ALU64_IMM(BPF_RSH, R0, 12),
6334 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6335 BPF_EXIT_INSN(),
6336 },
6337 INTERNAL,
6338 { },
6339 { { 0, 0x00081234 } }
6340 },
6341 {
6342 "ALU64_RSH_K: Shift > 32, low word",
6343 .u.insns_int = {
6344 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6345 BPF_ALU64_IMM(BPF_RSH, R0, 36),
6346 BPF_EXIT_INSN(),
6347 },
6348 INTERNAL,
6349 { },
6350 { { 0, 0x08123456 } }
6351 },
6352 {
6353 "ALU64_RSH_K: Shift > 32, high word",
6354 .u.insns_int = {
6355 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6356 BPF_ALU64_IMM(BPF_RSH, R0, 36),
6357 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6358 BPF_EXIT_INSN(),
6359 },
6360 INTERNAL,
6361 { },
6362 { { 0, 0 } }
6363 },
6364 {
6365 "ALU64_RSH_K: Shift == 32, low word",
6366 .u.insns_int = {
6367 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6368 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6369 BPF_EXIT_INSN(),
6370 },
6371 INTERNAL,
6372 { },
6373 { { 0, 0x81234567 } }
6374 },
6375 {
6376 "ALU64_RSH_K: Shift == 32, high word",
6377 .u.insns_int = {
6378 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6379 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6380 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6381 BPF_EXIT_INSN(),
6382 },
6383 INTERNAL,
6384 { },
6385 { { 0, 0 } }
6386 },
6387 {
6388 "ALU64_RSH_K: Zero shift",
6389 .u.insns_int = {
6390 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6391 BPF_ALU64_IMM(BPF_RSH, R0, 0),
6392 BPF_EXIT_INSN(),
6393 },
6394 INTERNAL,
6395 { },
6396 { { 0, 0x89abcdef } }
6397 },
Michael Holzheucffc6422015-05-11 22:22:44 -07006398 /* BPF_ALU | BPF_ARSH | BPF_X */
6399 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02006400 "ALU32_ARSH_X: -1234 >> 7 = -10",
6401 .u.insns_int = {
6402 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
6403 BPF_ALU32_IMM(BPF_MOV, R1, 7),
6404 BPF_ALU32_REG(BPF_ARSH, R0, R1),
6405 BPF_EXIT_INSN(),
6406 },
6407 INTERNAL,
6408 { },
6409 { { 0, -10 } }
6410 },
6411 {
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006412 "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
Michael Holzheucffc6422015-05-11 22:22:44 -07006413 .u.insns_int = {
6414 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
6415 BPF_ALU32_IMM(BPF_MOV, R1, 40),
6416 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6417 BPF_EXIT_INSN(),
6418 },
6419 INTERNAL,
6420 { },
6421 { { 0, 0xffff00ff } },
6422 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006423 {
6424 "ALU64_ARSH_X: Shift < 32, low word",
6425 .u.insns_int = {
6426 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6427 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6428 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6429 BPF_EXIT_INSN(),
6430 },
6431 INTERNAL,
6432 { },
6433 { { 0, 0x56789abc } }
6434 },
6435 {
6436 "ALU64_ARSH_X: Shift < 32, high word",
6437 .u.insns_int = {
6438 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6439 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6440 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6441 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6442 BPF_EXIT_INSN(),
6443 },
6444 INTERNAL,
6445 { },
6446 { { 0, 0xfff81234 } }
6447 },
6448 {
6449 "ALU64_ARSH_X: Shift > 32, low word",
6450 .u.insns_int = {
6451 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6452 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6453 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6454 BPF_EXIT_INSN(),
6455 },
6456 INTERNAL,
6457 { },
6458 { { 0, 0xf8123456 } }
6459 },
6460 {
6461 "ALU64_ARSH_X: Shift > 32, high word",
6462 .u.insns_int = {
6463 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6464 BPF_ALU32_IMM(BPF_MOV, R1, 36),
6465 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6466 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6467 BPF_EXIT_INSN(),
6468 },
6469 INTERNAL,
6470 { },
6471 { { 0, -1 } }
6472 },
6473 {
6474 "ALU64_ARSH_X: Shift == 32, low word",
6475 .u.insns_int = {
6476 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6477 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6478 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6479 BPF_EXIT_INSN(),
6480 },
6481 INTERNAL,
6482 { },
6483 { { 0, 0x81234567 } }
6484 },
6485 {
6486 "ALU64_ARSH_X: Shift == 32, high word",
6487 .u.insns_int = {
6488 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6489 BPF_ALU32_IMM(BPF_MOV, R1, 32),
6490 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6491 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6492 BPF_EXIT_INSN(),
6493 },
6494 INTERNAL,
6495 { },
6496 { { 0, -1 } }
6497 },
6498 {
6499 "ALU64_ARSH_X: Zero shift, low word",
6500 .u.insns_int = {
6501 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6502 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6503 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6504 BPF_EXIT_INSN(),
6505 },
6506 INTERNAL,
6507 { },
6508 { { 0, 0x89abcdef } }
6509 },
6510 {
6511 "ALU64_ARSH_X: Zero shift, high word",
6512 .u.insns_int = {
6513 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6514 BPF_ALU32_IMM(BPF_MOV, R1, 0),
6515 BPF_ALU64_REG(BPF_ARSH, R0, R1),
6516 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6517 BPF_EXIT_INSN(),
6518 },
6519 INTERNAL,
6520 { },
6521 { { 0, 0x81234567 } }
6522 },
Michael Holzheucffc6422015-05-11 22:22:44 -07006523 /* BPF_ALU | BPF_ARSH | BPF_K */
6524 {
Johan Almbladh0f2fca12021-08-09 11:18:20 +02006525 "ALU32_ARSH_K: -1234 >> 7 = -10",
6526 .u.insns_int = {
6527 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
6528 BPF_ALU32_IMM(BPF_ARSH, R0, 7),
6529 BPF_EXIT_INSN(),
6530 },
6531 INTERNAL,
6532 { },
6533 { { 0, -10 } }
6534 },
6535 {
6536 "ALU32_ARSH_K: -1234 >> 0 = -1234",
6537 .u.insns_int = {
6538 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
6539 BPF_ALU32_IMM(BPF_ARSH, R0, 0),
6540 BPF_EXIT_INSN(),
6541 },
6542 INTERNAL,
6543 { },
6544 { { 0, -1234 } }
6545 },
6546 {
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006547 "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
Michael Holzheucffc6422015-05-11 22:22:44 -07006548 .u.insns_int = {
6549 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
6550 BPF_ALU64_IMM(BPF_ARSH, R0, 40),
6551 BPF_EXIT_INSN(),
6552 },
6553 INTERNAL,
6554 { },
6555 { { 0, 0xffff00ff } },
6556 },
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006557 {
6558 "ALU64_ARSH_K: Shift < 32, low word",
6559 .u.insns_int = {
6560 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6561 BPF_ALU64_IMM(BPF_RSH, R0, 12),
6562 BPF_EXIT_INSN(),
6563 },
6564 INTERNAL,
6565 { },
6566 { { 0, 0x56789abc } }
6567 },
6568 {
6569 "ALU64_ARSH_K: Shift < 32, high word",
6570 .u.insns_int = {
6571 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6572 BPF_ALU64_IMM(BPF_ARSH, R0, 12),
6573 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6574 BPF_EXIT_INSN(),
6575 },
6576 INTERNAL,
6577 { },
6578 { { 0, 0xfff81234 } }
6579 },
6580 {
6581 "ALU64_ARSH_K: Shift > 32, low word",
6582 .u.insns_int = {
6583 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6584 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
6585 BPF_EXIT_INSN(),
6586 },
6587 INTERNAL,
6588 { },
6589 { { 0, 0xf8123456 } }
6590 },
6591 {
6592 "ALU64_ARSH_K: Shift > 32, high word",
6593 .u.insns_int = {
6594 BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
6595 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
6596 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6597 BPF_EXIT_INSN(),
6598 },
6599 INTERNAL,
6600 { },
6601 { { 0, -1 } }
6602 },
6603 {
6604 "ALU64_ARSH_K: Shift == 32, low word",
6605 .u.insns_int = {
6606 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6607 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
6608 BPF_EXIT_INSN(),
6609 },
6610 INTERNAL,
6611 { },
6612 { { 0, 0x81234567 } }
6613 },
6614 {
6615 "ALU64_ARSH_K: Shift == 32, high word",
6616 .u.insns_int = {
6617 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6618 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
6619 BPF_ALU64_IMM(BPF_RSH, R0, 32),
6620 BPF_EXIT_INSN(),
6621 },
6622 INTERNAL,
6623 { },
6624 { { 0, -1 } }
6625 },
6626 {
Colin Ian King1bda52f2021-08-15 22:39:50 +01006627 "ALU64_ARSH_K: Zero shift",
Johan Almbladh3b9890e2021-08-09 11:18:21 +02006628 .u.insns_int = {
6629 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
6630 BPF_ALU64_IMM(BPF_ARSH, R0, 0),
6631 BPF_EXIT_INSN(),
6632 },
6633 INTERNAL,
6634 { },
6635 { { 0, 0x89abcdef } }
6636 },
Michael Holzheucffc6422015-05-11 22:22:44 -07006637 /* BPF_ALU | BPF_NEG */
6638 {
6639 "ALU_NEG: -(3) = -3",
6640 .u.insns_int = {
6641 BPF_ALU32_IMM(BPF_MOV, R0, 3),
6642 BPF_ALU32_IMM(BPF_NEG, R0, 0),
6643 BPF_EXIT_INSN(),
6644 },
6645 INTERNAL,
6646 { },
6647 { { 0, -3 } },
6648 },
6649 {
6650 "ALU_NEG: -(-3) = 3",
6651 .u.insns_int = {
6652 BPF_ALU32_IMM(BPF_MOV, R0, -3),
6653 BPF_ALU32_IMM(BPF_NEG, R0, 0),
6654 BPF_EXIT_INSN(),
6655 },
6656 INTERNAL,
6657 { },
6658 { { 0, 3 } },
6659 },
6660 {
6661 "ALU64_NEG: -(3) = -3",
6662 .u.insns_int = {
6663 BPF_LD_IMM64(R0, 3),
6664 BPF_ALU64_IMM(BPF_NEG, R0, 0),
6665 BPF_EXIT_INSN(),
6666 },
6667 INTERNAL,
6668 { },
6669 { { 0, -3 } },
6670 },
6671 {
6672 "ALU64_NEG: -(-3) = 3",
6673 .u.insns_int = {
6674 BPF_LD_IMM64(R0, -3),
6675 BPF_ALU64_IMM(BPF_NEG, R0, 0),
6676 BPF_EXIT_INSN(),
6677 },
6678 INTERNAL,
6679 { },
6680 { { 0, 3 } },
6681 },
6682 /* BPF_ALU | BPF_END | BPF_FROM_BE */
6683 {
6684 "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
6685 .u.insns_int = {
6686 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6687 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
6688 BPF_EXIT_INSN(),
6689 },
6690 INTERNAL,
6691 { },
6692 { { 0, cpu_to_be16(0xcdef) } },
6693 },
6694 {
6695 "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
6696 .u.insns_int = {
6697 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6698 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
Xi Wangba29bec2015-07-08 14:00:56 -07006699 BPF_ALU64_REG(BPF_MOV, R1, R0),
6700 BPF_ALU64_IMM(BPF_RSH, R1, 32),
6701 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
Michael Holzheucffc6422015-05-11 22:22:44 -07006702 BPF_EXIT_INSN(),
6703 },
6704 INTERNAL,
6705 { },
6706 { { 0, cpu_to_be32(0x89abcdef) } },
6707 },
6708 {
6709 "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
6710 .u.insns_int = {
6711 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6712 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
6713 BPF_EXIT_INSN(),
6714 },
6715 INTERNAL,
6716 { },
6717 { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
6718 },
6719 /* BPF_ALU | BPF_END | BPF_FROM_LE */
6720 {
6721 "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
6722 .u.insns_int = {
6723 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6724 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
6725 BPF_EXIT_INSN(),
6726 },
6727 INTERNAL,
6728 { },
6729 { { 0, cpu_to_le16(0xcdef) } },
6730 },
6731 {
6732 "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
6733 .u.insns_int = {
6734 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6735 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
Xi Wangba29bec2015-07-08 14:00:56 -07006736 BPF_ALU64_REG(BPF_MOV, R1, R0),
6737 BPF_ALU64_IMM(BPF_RSH, R1, 32),
6738 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
Michael Holzheucffc6422015-05-11 22:22:44 -07006739 BPF_EXIT_INSN(),
6740 },
6741 INTERNAL,
6742 { },
6743 { { 0, cpu_to_le32(0x89abcdef) } },
6744 },
6745 {
6746 "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
6747 .u.insns_int = {
6748 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6749 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
6750 BPF_EXIT_INSN(),
6751 },
6752 INTERNAL,
6753 { },
6754 { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
6755 },
6756 /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
6757 {
6758 "ST_MEM_B: Store/Load byte: max negative",
6759 .u.insns_int = {
6760 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6761 BPF_ST_MEM(BPF_B, R10, -40, 0xff),
6762 BPF_LDX_MEM(BPF_B, R0, R10, -40),
6763 BPF_EXIT_INSN(),
6764 },
6765 INTERNAL,
6766 { },
6767 { { 0, 0xff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006768 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006769 },
6770 {
6771 "ST_MEM_B: Store/Load byte: max positive",
6772 .u.insns_int = {
6773 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6774 BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
6775 BPF_LDX_MEM(BPF_H, R0, R10, -40),
6776 BPF_EXIT_INSN(),
6777 },
6778 INTERNAL,
6779 { },
6780 { { 0, 0x7f } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006781 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006782 },
6783 {
6784 "STX_MEM_B: Store/Load byte: max negative",
6785 .u.insns_int = {
6786 BPF_LD_IMM64(R0, 0),
6787 BPF_LD_IMM64(R1, 0xffLL),
6788 BPF_STX_MEM(BPF_B, R10, R1, -40),
6789 BPF_LDX_MEM(BPF_B, R0, R10, -40),
6790 BPF_EXIT_INSN(),
6791 },
6792 INTERNAL,
6793 { },
6794 { { 0, 0xff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006795 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006796 },
6797 {
6798 "ST_MEM_H: Store/Load half word: max negative",
6799 .u.insns_int = {
6800 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6801 BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
6802 BPF_LDX_MEM(BPF_H, R0, R10, -40),
6803 BPF_EXIT_INSN(),
6804 },
6805 INTERNAL,
6806 { },
6807 { { 0, 0xffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006808 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006809 },
6810 {
6811 "ST_MEM_H: Store/Load half word: max positive",
6812 .u.insns_int = {
6813 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6814 BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
6815 BPF_LDX_MEM(BPF_H, R0, R10, -40),
6816 BPF_EXIT_INSN(),
6817 },
6818 INTERNAL,
6819 { },
6820 { { 0, 0x7fff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006821 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006822 },
6823 {
6824 "STX_MEM_H: Store/Load half word: max negative",
6825 .u.insns_int = {
6826 BPF_LD_IMM64(R0, 0),
6827 BPF_LD_IMM64(R1, 0xffffLL),
6828 BPF_STX_MEM(BPF_H, R10, R1, -40),
6829 BPF_LDX_MEM(BPF_H, R0, R10, -40),
6830 BPF_EXIT_INSN(),
6831 },
6832 INTERNAL,
6833 { },
6834 { { 0, 0xffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006835 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006836 },
6837 {
6838 "ST_MEM_W: Store/Load word: max negative",
6839 .u.insns_int = {
6840 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6841 BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
6842 BPF_LDX_MEM(BPF_W, R0, R10, -40),
6843 BPF_EXIT_INSN(),
6844 },
6845 INTERNAL,
6846 { },
6847 { { 0, 0xffffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006848 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006849 },
6850 {
6851 "ST_MEM_W: Store/Load word: max positive",
6852 .u.insns_int = {
6853 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6854 BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
6855 BPF_LDX_MEM(BPF_W, R0, R10, -40),
6856 BPF_EXIT_INSN(),
6857 },
6858 INTERNAL,
6859 { },
6860 { { 0, 0x7fffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006861 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006862 },
6863 {
6864 "STX_MEM_W: Store/Load word: max negative",
6865 .u.insns_int = {
6866 BPF_LD_IMM64(R0, 0),
6867 BPF_LD_IMM64(R1, 0xffffffffLL),
6868 BPF_STX_MEM(BPF_W, R10, R1, -40),
6869 BPF_LDX_MEM(BPF_W, R0, R10, -40),
6870 BPF_EXIT_INSN(),
6871 },
6872 INTERNAL,
6873 { },
6874 { { 0, 0xffffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006875 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006876 },
6877 {
6878 "ST_MEM_DW: Store/Load double word: max negative",
6879 .u.insns_int = {
6880 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6881 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
6882 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
6883 BPF_EXIT_INSN(),
6884 },
6885 INTERNAL,
6886 { },
6887 { { 0, 0xffffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006888 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006889 },
6890 {
6891 "ST_MEM_DW: Store/Load double word: max negative 2",
6892 .u.insns_int = {
Michael Holzheu56cbaa42015-05-13 20:40:39 -07006893 BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
6894 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
Michael Holzheucffc6422015-05-11 22:22:44 -07006895 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
6896 BPF_LDX_MEM(BPF_DW, R2, R10, -40),
6897 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6898 BPF_MOV32_IMM(R0, 2),
6899 BPF_EXIT_INSN(),
6900 BPF_MOV32_IMM(R0, 1),
6901 BPF_EXIT_INSN(),
6902 },
6903 INTERNAL,
6904 { },
6905 { { 0, 0x1 } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006906 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006907 },
6908 {
6909 "ST_MEM_DW: Store/Load double word: max positive",
6910 .u.insns_int = {
6911 BPF_ALU32_IMM(BPF_MOV, R0, 1),
6912 BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
6913 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
6914 BPF_EXIT_INSN(),
6915 },
6916 INTERNAL,
6917 { },
6918 { { 0, 0x7fffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006919 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006920 },
6921 {
6922 "STX_MEM_DW: Store/Load double word: max negative",
6923 .u.insns_int = {
6924 BPF_LD_IMM64(R0, 0),
6925 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
Johan Almbladhae7f4702021-07-21 12:40:58 +02006926 BPF_STX_MEM(BPF_DW, R10, R1, -40),
6927 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
Michael Holzheucffc6422015-05-11 22:22:44 -07006928 BPF_EXIT_INSN(),
6929 },
6930 INTERNAL,
6931 { },
6932 { { 0, 0xffffffff } },
Alexei Starovoitov105c0362017-05-30 13:31:32 -07006933 .stack_depth = 40,
Michael Holzheucffc6422015-05-11 22:22:44 -07006934 },
Johan Almbladhe5009b42021-08-09 11:18:24 +02006935 {
6936 "STX_MEM_DW: Store double word: first word in memory",
6937 .u.insns_int = {
6938 BPF_LD_IMM64(R0, 0),
6939 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
6940 BPF_STX_MEM(BPF_DW, R10, R1, -40),
6941 BPF_LDX_MEM(BPF_W, R0, R10, -40),
6942 BPF_EXIT_INSN(),
6943 },
6944 INTERNAL,
6945 { },
6946#ifdef __BIG_ENDIAN
6947 { { 0, 0x01234567 } },
6948#else
6949 { { 0, 0x89abcdef } },
6950#endif
6951 .stack_depth = 40,
6952 },
6953 {
6954 "STX_MEM_DW: Store double word: second word in memory",
6955 .u.insns_int = {
6956 BPF_LD_IMM64(R0, 0),
6957 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
6958 BPF_STX_MEM(BPF_DW, R10, R1, -40),
6959 BPF_LDX_MEM(BPF_W, R0, R10, -36),
6960 BPF_EXIT_INSN(),
6961 },
6962 INTERNAL,
6963 { },
6964#ifdef __BIG_ENDIAN
6965 { { 0, 0x89abcdef } },
6966#else
6967 { { 0, 0x01234567 } },
6968#endif
6969 .stack_depth = 40,
6970 },
Brendan Jackman91c960b2021-01-14 18:17:44 +00006971 /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
Michael Holzheucffc6422015-05-11 22:22:44 -07006972 {
Daniel Borkmann85f68fe2017-05-01 02:57:20 +02006973 "STX_XADD_W: X + 1 + 1 + 1 + ...",
6974 { },
6975 INTERNAL,
6976 { },
6977 { { 0, 4134 } },
6978 .fill_helper = bpf_fill_stxw,
6979 },
6980 {
Daniel Borkmann85f68fe2017-05-01 02:57:20 +02006981 "STX_XADD_DW: X + 1 + 1 + 1 + ...",
6982 { },
6983 INTERNAL,
6984 { },
6985 { { 0, 4134 } },
6986 .fill_helper = bpf_fill_stxdw,
6987 },
Johan Almbladhe4517b32021-08-09 11:18:27 +02006988 /*
6989 * Exhaustive tests of atomic operation variants.
6990 * Individual tests are expanded from template macros for all
6991 * combinations of ALU operation, word size and fetching.
6992 */
6993#define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
6994{ \
6995 "BPF_ATOMIC | " #width ", " #op ": Test: " \
6996 #old " " #logic " " #update " = " #result, \
6997 .u.insns_int = { \
6998 BPF_ALU32_IMM(BPF_MOV, R5, update), \
6999 BPF_ST_MEM(width, R10, -40, old), \
7000 BPF_ATOMIC_OP(width, op, R10, R5, -40), \
7001 BPF_LDX_MEM(width, R0, R10, -40), \
7002 BPF_EXIT_INSN(), \
7003 }, \
7004 INTERNAL, \
7005 { }, \
7006 { { 0, result } }, \
7007 .stack_depth = 40, \
7008}
7009#define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
7010{ \
7011 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
7012 #old " " #logic " " #update " = " #result, \
7013 .u.insns_int = { \
7014 BPF_ALU64_REG(BPF_MOV, R1, R10), \
7015 BPF_ALU32_IMM(BPF_MOV, R0, update), \
7016 BPF_ST_MEM(BPF_W, R10, -40, old), \
7017 BPF_ATOMIC_OP(width, op, R10, R0, -40), \
7018 BPF_ALU64_REG(BPF_MOV, R0, R10), \
7019 BPF_ALU64_REG(BPF_SUB, R0, R1), \
7020 BPF_EXIT_INSN(), \
7021 }, \
7022 INTERNAL, \
7023 { }, \
7024 { { 0, 0 } }, \
7025 .stack_depth = 40, \
7026}
7027#define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
7028{ \
7029 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
7030 #old " " #logic " " #update " = " #result, \
7031 .u.insns_int = { \
7032 BPF_ALU64_REG(BPF_MOV, R0, R10), \
7033 BPF_ALU32_IMM(BPF_MOV, R1, update), \
7034 BPF_ST_MEM(width, R10, -40, old), \
7035 BPF_ATOMIC_OP(width, op, R10, R1, -40), \
7036 BPF_ALU64_REG(BPF_SUB, R0, R10), \
7037 BPF_EXIT_INSN(), \
7038 }, \
7039 INTERNAL, \
7040 { }, \
7041 { { 0, 0 } }, \
7042 .stack_depth = 40, \
7043}
7044#define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
7045{ \
7046 "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
7047 #old " " #logic " " #update " = " #result, \
7048 .u.insns_int = { \
7049 BPF_ALU32_IMM(BPF_MOV, R3, update), \
7050 BPF_ST_MEM(width, R10, -40, old), \
7051 BPF_ATOMIC_OP(width, op, R10, R3, -40), \
7052 BPF_ALU64_REG(BPF_MOV, R0, R3), \
7053 BPF_EXIT_INSN(), \
7054 }, \
7055 INTERNAL, \
7056 { }, \
7057 { { 0, (op) & BPF_FETCH ? old : update } }, \
7058 .stack_depth = 40, \
7059}
7060 /* BPF_ATOMIC | BPF_W: BPF_ADD */
7061 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
7062 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
7063 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
7064 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
7065 /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
7066 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7067 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7068 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7069 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7070 /* BPF_ATOMIC | BPF_DW: BPF_ADD */
7071 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
7072 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
7073 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
7074 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
7075 /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
7076 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7077 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7078 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7079 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
7080 /* BPF_ATOMIC | BPF_W: BPF_AND */
7081 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
7082 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
7083 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
7084 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
7085 /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
7086 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7087 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7088 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7089 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7090 /* BPF_ATOMIC | BPF_DW: BPF_AND */
7091 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
7092 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
7093 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
7094 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
7095 /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
7096 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7097 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7098 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7099 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
7100 /* BPF_ATOMIC | BPF_W: BPF_OR */
7101 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
7102 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
7103 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
7104 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
7105 /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
7106 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7107 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7108 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7109 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7110 /* BPF_ATOMIC | BPF_DW: BPF_OR */
7111 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
7112 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
7113 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
7114 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
7115 /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
7116 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7117 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7118 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7119 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
7120 /* BPF_ATOMIC | BPF_W: BPF_XOR */
7121 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7122 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7123 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7124 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7125 /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
7126 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7127 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7128 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7129 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7130 /* BPF_ATOMIC | BPF_DW: BPF_XOR */
7131 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7132 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7133 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7134 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
7135 /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
7136 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7137 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7138 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7139 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
7140 /* BPF_ATOMIC | BPF_W: BPF_XCHG */
7141 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7142 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7143 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7144 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7145 /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
7146 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7147 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7148 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7149 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
7150#undef BPF_ATOMIC_OP_TEST1
7151#undef BPF_ATOMIC_OP_TEST2
7152#undef BPF_ATOMIC_OP_TEST3
7153#undef BPF_ATOMIC_OP_TEST4
Johan Almbladh6a3b24c2021-08-09 11:18:28 +02007154 /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
7155 {
7156 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
7157 .u.insns_int = {
7158 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
7159 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
7160 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
7161 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7162 BPF_EXIT_INSN(),
7163 },
7164 INTERNAL,
7165 { },
7166 { { 0, 0x01234567 } },
7167 .stack_depth = 40,
7168 },
7169 {
7170 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
7171 .u.insns_int = {
7172 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
7173 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
7174 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
7175 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7176 BPF_LDX_MEM(BPF_W, R0, R10, -40),
7177 BPF_EXIT_INSN(),
7178 },
7179 INTERNAL,
7180 { },
7181 { { 0, 0x89abcdef } },
7182 .stack_depth = 40,
7183 },
7184 {
7185 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
7186 .u.insns_int = {
7187 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
7188 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
7189 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
7190 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7191 BPF_EXIT_INSN(),
7192 },
7193 INTERNAL,
7194 { },
7195 { { 0, 0x01234567 } },
7196 .stack_depth = 40,
7197 },
7198 {
7199 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
7200 .u.insns_int = {
7201 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
7202 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
7203 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
7204 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7205 BPF_LDX_MEM(BPF_W, R0, R10, -40),
7206 BPF_EXIT_INSN(),
7207 },
7208 INTERNAL,
7209 { },
7210 { { 0, 0x01234567 } },
7211 .stack_depth = 40,
7212 },
7213 {
7214 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
7215 .u.insns_int = {
7216 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
7217 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
7218 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
7219 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7220 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
7221 BPF_ALU32_REG(BPF_MOV, R0, R3),
7222 BPF_EXIT_INSN(),
7223 },
7224 INTERNAL,
7225 { },
7226 { { 0, 0x89abcdef } },
7227 .stack_depth = 40,
7228 },
7229 /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
7230 {
7231 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
7232 .u.insns_int = {
7233 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
7234 BPF_LD_IMM64(R2, 0xfecdba9876543210ULL),
7235 BPF_ALU64_REG(BPF_MOV, R0, R1),
7236 BPF_STX_MEM(BPF_DW, R10, R1, -40),
7237 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
7238 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
7239 BPF_ALU64_REG(BPF_SUB, R0, R1),
7240 BPF_EXIT_INSN(),
7241 },
7242 INTERNAL,
7243 { },
7244 { { 0, 0 } },
7245 .stack_depth = 40,
7246 },
7247 {
7248 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
7249 .u.insns_int = {
7250 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
7251 BPF_LD_IMM64(R2, 0xfecdba9876543210ULL),
7252 BPF_ALU64_REG(BPF_MOV, R0, R1),
7253 BPF_STX_MEM(BPF_DW, R10, R0, -40),
7254 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
7255 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
7256 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7257 BPF_ALU64_REG(BPF_SUB, R0, R2),
7258 BPF_EXIT_INSN(),
7259 },
7260 INTERNAL,
7261 { },
7262 { { 0, 0 } },
7263 .stack_depth = 40,
7264 },
7265 {
7266 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
7267 .u.insns_int = {
7268 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
7269 BPF_LD_IMM64(R2, 0xfecdba9876543210ULL),
7270 BPF_ALU64_REG(BPF_MOV, R0, R1),
7271 BPF_ALU64_IMM(BPF_ADD, R0, 1),
7272 BPF_STX_MEM(BPF_DW, R10, R1, -40),
7273 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
7274 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
7275 BPF_ALU64_REG(BPF_SUB, R0, R1),
7276 BPF_EXIT_INSN(),
7277 },
7278 INTERNAL,
7279 { },
7280 { { 0, 0 } },
7281 .stack_depth = 40,
7282 },
7283 {
7284 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
7285 .u.insns_int = {
7286 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
7287 BPF_LD_IMM64(R2, 0xfecdba9876543210ULL),
7288 BPF_ALU64_REG(BPF_MOV, R0, R1),
7289 BPF_ALU64_IMM(BPF_ADD, R0, 1),
7290 BPF_STX_MEM(BPF_DW, R10, R1, -40),
7291 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
7292 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
7293 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
7294 BPF_ALU64_REG(BPF_SUB, R0, R1),
7295 BPF_EXIT_INSN(),
7296 },
7297 INTERNAL,
7298 { },
7299 { { 0, 0 } },
7300 .stack_depth = 40,
7301 },
7302 {
7303 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
7304 .u.insns_int = {
7305 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
7306 BPF_LD_IMM64(R2, 0xfecdba9876543210ULL),
7307 BPF_ALU64_REG(BPF_MOV, R0, R1),
7308 BPF_STX_MEM(BPF_DW, R10, R1, -40),
7309 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
7310 BPF_LD_IMM64(R0, 0xfecdba9876543210ULL),
7311 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
7312 BPF_ALU64_REG(BPF_SUB, R0, R2),
7313 BPF_EXIT_INSN(),
7314 },
7315 INTERNAL,
7316 { },
7317 { { 0, 0 } },
7318 .stack_depth = 40,
7319 },
Johan Almbladhb55dfa82021-08-09 11:18:16 +02007320 /* BPF_JMP32 | BPF_JEQ | BPF_K */
7321 {
7322 "JMP32_JEQ_K: Small immediate",
7323 .u.insns_int = {
7324 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7325 BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
7326 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
7327 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7328 BPF_EXIT_INSN(),
7329 },
7330 INTERNAL,
7331 { },
7332 { { 0, 123 } }
7333 },
7334 {
7335 "JMP32_JEQ_K: Large immediate",
7336 .u.insns_int = {
7337 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
7338 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
7339 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
7340 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7341 BPF_EXIT_INSN(),
7342 },
7343 INTERNAL,
7344 { },
7345 { { 0, 12345678 } }
7346 },
7347 {
7348 "JMP32_JEQ_K: negative immediate",
7349 .u.insns_int = {
7350 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7351 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
7352 BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
7353 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7354 BPF_EXIT_INSN(),
7355 },
7356 INTERNAL,
7357 { },
7358 { { 0, -123 } }
7359 },
7360 /* BPF_JMP32 | BPF_JEQ | BPF_X */
7361 {
7362 "JMP32_JEQ_X",
7363 .u.insns_int = {
7364 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
7365 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
7366 BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
7367 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
7368 BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
7369 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7370 BPF_EXIT_INSN(),
7371 },
7372 INTERNAL,
7373 { },
7374 { { 0, 1234 } }
7375 },
7376 /* BPF_JMP32 | BPF_JNE | BPF_K */
7377 {
7378 "JMP32_JNE_K: Small immediate",
7379 .u.insns_int = {
7380 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7381 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
7382 BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
7383 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7384 BPF_EXIT_INSN(),
7385 },
7386 INTERNAL,
7387 { },
7388 { { 0, 123 } }
7389 },
7390 {
7391 "JMP32_JNE_K: Large immediate",
7392 .u.insns_int = {
7393 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
7394 BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
7395 BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
7396 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7397 BPF_EXIT_INSN(),
7398 },
7399 INTERNAL,
7400 { },
7401 { { 0, 12345678 } }
7402 },
7403 {
7404 "JMP32_JNE_K: negative immediate",
7405 .u.insns_int = {
7406 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7407 BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
7408 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
7409 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7410 BPF_EXIT_INSN(),
7411 },
7412 INTERNAL,
7413 { },
7414 { { 0, -123 } }
7415 },
7416 /* BPF_JMP32 | BPF_JNE | BPF_X */
7417 {
7418 "JMP32_JNE_X",
7419 .u.insns_int = {
7420 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
7421 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
7422 BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
7423 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
7424 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
7425 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7426 BPF_EXIT_INSN(),
7427 },
7428 INTERNAL,
7429 { },
7430 { { 0, 1234 } }
7431 },
7432 /* BPF_JMP32 | BPF_JSET | BPF_K */
7433 {
7434 "JMP32_JSET_K: Small immediate",
7435 .u.insns_int = {
7436 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7437 BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
7438 BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
7439 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7440 BPF_EXIT_INSN(),
7441 },
7442 INTERNAL,
7443 { },
7444 { { 0, 1 } }
7445 },
7446 {
7447 "JMP32_JSET_K: Large immediate",
7448 .u.insns_int = {
7449 BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
7450 BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
7451 BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
7452 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7453 BPF_EXIT_INSN(),
7454 },
7455 INTERNAL,
7456 { },
7457 { { 0, 0x40000000 } }
7458 },
7459 {
7460 "JMP32_JSET_K: negative immediate",
7461 .u.insns_int = {
7462 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7463 BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
7464 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7465 BPF_EXIT_INSN(),
7466 },
7467 INTERNAL,
7468 { },
7469 { { 0, -123 } }
7470 },
7471 /* BPF_JMP32 | BPF_JSET | BPF_X */
7472 {
7473 "JMP32_JSET_X",
7474 .u.insns_int = {
7475 BPF_ALU32_IMM(BPF_MOV, R0, 8),
7476 BPF_ALU32_IMM(BPF_MOV, R1, 7),
7477 BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
7478 BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
7479 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
7480 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7481 BPF_EXIT_INSN(),
7482 },
7483 INTERNAL,
7484 { },
7485 { { 0, 8 } }
7486 },
7487 /* BPF_JMP32 | BPF_JGT | BPF_K */
7488 {
7489 "JMP32_JGT_K: Small immediate",
7490 .u.insns_int = {
7491 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7492 BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
7493 BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
7494 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7495 BPF_EXIT_INSN(),
7496 },
7497 INTERNAL,
7498 { },
7499 { { 0, 123 } }
7500 },
7501 {
7502 "JMP32_JGT_K: Large immediate",
7503 .u.insns_int = {
7504 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7505 BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
7506 BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
7507 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7508 BPF_EXIT_INSN(),
7509 },
7510 INTERNAL,
7511 { },
7512 { { 0, 0xfffffffe } }
7513 },
7514 /* BPF_JMP32 | BPF_JGT | BPF_X */
7515 {
7516 "JMP32_JGT_X",
7517 .u.insns_int = {
7518 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7519 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
7520 BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
7521 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
7522 BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
7523 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7524 BPF_EXIT_INSN(),
7525 },
7526 INTERNAL,
7527 { },
7528 { { 0, 0xfffffffe } }
7529 },
7530 /* BPF_JMP32 | BPF_JGE | BPF_K */
7531 {
7532 "JMP32_JGE_K: Small immediate",
7533 .u.insns_int = {
7534 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7535 BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
7536 BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
7537 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7538 BPF_EXIT_INSN(),
7539 },
7540 INTERNAL,
7541 { },
7542 { { 0, 123 } }
7543 },
7544 {
7545 "JMP32_JGE_K: Large immediate",
7546 .u.insns_int = {
7547 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7548 BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
7549 BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
7550 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7551 BPF_EXIT_INSN(),
7552 },
7553 INTERNAL,
7554 { },
7555 { { 0, 0xfffffffe } }
7556 },
7557 /* BPF_JMP32 | BPF_JGE | BPF_X */
7558 {
7559 "JMP32_JGE_X",
7560 .u.insns_int = {
7561 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7562 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
7563 BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
7564 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
7565 BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
7566 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7567 BPF_EXIT_INSN(),
7568 },
7569 INTERNAL,
7570 { },
7571 { { 0, 0xfffffffe } }
7572 },
7573 /* BPF_JMP32 | BPF_JLT | BPF_K */
7574 {
7575 "JMP32_JLT_K: Small immediate",
7576 .u.insns_int = {
7577 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7578 BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
7579 BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
7580 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7581 BPF_EXIT_INSN(),
7582 },
7583 INTERNAL,
7584 { },
7585 { { 0, 123 } }
7586 },
7587 {
7588 "JMP32_JLT_K: Large immediate",
7589 .u.insns_int = {
7590 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7591 BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
7592 BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
7593 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7594 BPF_EXIT_INSN(),
7595 },
7596 INTERNAL,
7597 { },
7598 { { 0, 0xfffffffe } }
7599 },
7600 /* BPF_JMP32 | BPF_JLT | BPF_X */
7601 {
7602 "JMP32_JLT_X",
7603 .u.insns_int = {
7604 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7605 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
7606 BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
7607 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
7608 BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
7609 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7610 BPF_EXIT_INSN(),
7611 },
7612 INTERNAL,
7613 { },
7614 { { 0, 0xfffffffe } }
7615 },
7616 /* BPF_JMP32 | BPF_JLE | BPF_K */
7617 {
7618 "JMP32_JLE_K: Small immediate",
7619 .u.insns_int = {
7620 BPF_ALU32_IMM(BPF_MOV, R0, 123),
7621 BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
7622 BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
7623 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7624 BPF_EXIT_INSN(),
7625 },
7626 INTERNAL,
7627 { },
7628 { { 0, 123 } }
7629 },
7630 {
7631 "JMP32_JLE_K: Large immediate",
7632 .u.insns_int = {
7633 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7634 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
7635 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
7636 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7637 BPF_EXIT_INSN(),
7638 },
7639 INTERNAL,
7640 { },
7641 { { 0, 0xfffffffe } }
7642 },
7643 /* BPF_JMP32 | BPF_JLE | BPF_X */
7644 {
7645 "JMP32_JLE_X",
7646 .u.insns_int = {
7647 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
7648 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
7649 BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
7650 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
7651 BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
7652 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7653 BPF_EXIT_INSN(),
7654 },
7655 INTERNAL,
7656 { },
7657 { { 0, 0xfffffffe } }
7658 },
7659 /* BPF_JMP32 | BPF_JSGT | BPF_K */
7660 {
7661 "JMP32_JSGT_K: Small immediate",
7662 .u.insns_int = {
7663 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7664 BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
7665 BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
7666 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7667 BPF_EXIT_INSN(),
7668 },
7669 INTERNAL,
7670 { },
7671 { { 0, -123 } }
7672 },
7673 {
7674 "JMP32_JSGT_K: Large immediate",
7675 .u.insns_int = {
7676 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7677 BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
7678 BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
7679 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7680 BPF_EXIT_INSN(),
7681 },
7682 INTERNAL,
7683 { },
7684 { { 0, -12345678 } }
7685 },
7686 /* BPF_JMP32 | BPF_JSGT | BPF_X */
7687 {
7688 "JMP32_JSGT_X",
7689 .u.insns_int = {
7690 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7691 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
7692 BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
7693 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
7694 BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
7695 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7696 BPF_EXIT_INSN(),
7697 },
7698 INTERNAL,
7699 { },
7700 { { 0, -12345678 } }
7701 },
7702 /* BPF_JMP32 | BPF_JSGE | BPF_K */
7703 {
7704 "JMP32_JSGE_K: Small immediate",
7705 .u.insns_int = {
7706 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7707 BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
7708 BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
7709 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7710 BPF_EXIT_INSN(),
7711 },
7712 INTERNAL,
7713 { },
7714 { { 0, -123 } }
7715 },
7716 {
7717 "JMP32_JSGE_K: Large immediate",
7718 .u.insns_int = {
7719 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7720 BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
7721 BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
7722 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7723 BPF_EXIT_INSN(),
7724 },
7725 INTERNAL,
7726 { },
7727 { { 0, -12345678 } }
7728 },
7729 /* BPF_JMP32 | BPF_JSGE | BPF_X */
7730 {
7731 "JMP32_JSGE_X",
7732 .u.insns_int = {
7733 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7734 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
7735 BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
7736 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
7737 BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
7738 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7739 BPF_EXIT_INSN(),
7740 },
7741 INTERNAL,
7742 { },
7743 { { 0, -12345678 } }
7744 },
7745 /* BPF_JMP32 | BPF_JSLT | BPF_K */
7746 {
7747 "JMP32_JSLT_K: Small immediate",
7748 .u.insns_int = {
7749 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7750 BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
7751 BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
7752 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7753 BPF_EXIT_INSN(),
7754 },
7755 INTERNAL,
7756 { },
7757 { { 0, -123 } }
7758 },
7759 {
7760 "JMP32_JSLT_K: Large immediate",
7761 .u.insns_int = {
7762 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7763 BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
7764 BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
7765 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7766 BPF_EXIT_INSN(),
7767 },
7768 INTERNAL,
7769 { },
7770 { { 0, -12345678 } }
7771 },
7772 /* BPF_JMP32 | BPF_JSLT | BPF_X */
7773 {
7774 "JMP32_JSLT_X",
7775 .u.insns_int = {
7776 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7777 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
7778 BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
7779 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
7780 BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
7781 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7782 BPF_EXIT_INSN(),
7783 },
7784 INTERNAL,
7785 { },
7786 { { 0, -12345678 } }
7787 },
7788 /* BPF_JMP32 | BPF_JSLE | BPF_K */
7789 {
7790 "JMP32_JSLE_K: Small immediate",
7791 .u.insns_int = {
7792 BPF_ALU32_IMM(BPF_MOV, R0, -123),
7793 BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
7794 BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
7795 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7796 BPF_EXIT_INSN(),
7797 },
7798 INTERNAL,
7799 { },
7800 { { 0, -123 } }
7801 },
7802 {
7803 "JMP32_JSLE_K: Large immediate",
7804 .u.insns_int = {
7805 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7806 BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
7807 BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
7808 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7809 BPF_EXIT_INSN(),
7810 },
7811 INTERNAL,
7812 { },
7813 { { 0, -12345678 } }
7814 },
7815 /* BPF_JMP32 | BPF_JSLE | BPF_K */
7816 {
7817 "JMP32_JSLE_X",
7818 .u.insns_int = {
7819 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
7820 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
7821 BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
7822 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
7823 BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
7824 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7825 BPF_EXIT_INSN(),
7826 },
7827 INTERNAL,
7828 { },
7829 { { 0, -12345678 } }
7830 },
Michael Holzheucffc6422015-05-11 22:22:44 -07007831 /* BPF_JMP | BPF_EXIT */
7832 {
7833 "JMP_EXIT",
7834 .u.insns_int = {
7835 BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
7836 BPF_EXIT_INSN(),
7837 BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
7838 },
7839 INTERNAL,
7840 { },
7841 { { 0, 0x4711 } },
7842 },
7843 /* BPF_JMP | BPF_JA */
7844 {
7845 "JMP_JA: Unconditional jump: if (true) return 1",
7846 .u.insns_int = {
7847 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7848 BPF_JMP_IMM(BPF_JA, 0, 0, 1),
7849 BPF_EXIT_INSN(),
7850 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7851 BPF_EXIT_INSN(),
7852 },
7853 INTERNAL,
7854 { },
7855 { { 0, 1 } },
7856 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02007857 /* BPF_JMP | BPF_JSLT | BPF_K */
7858 {
7859 "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
7860 .u.insns_int = {
7861 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7862 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
7863 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
7864 BPF_EXIT_INSN(),
7865 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7866 BPF_EXIT_INSN(),
7867 },
7868 INTERNAL,
7869 { },
7870 { { 0, 1 } },
7871 },
7872 {
7873 "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
7874 .u.insns_int = {
7875 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7876 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
7877 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
7878 BPF_EXIT_INSN(),
7879 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7880 BPF_EXIT_INSN(),
7881 },
7882 INTERNAL,
7883 { },
7884 { { 0, 1 } },
7885 },
Michael Holzheucffc6422015-05-11 22:22:44 -07007886 /* BPF_JMP | BPF_JSGT | BPF_K */
7887 {
7888 "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
7889 .u.insns_int = {
7890 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7891 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
7892 BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
7893 BPF_EXIT_INSN(),
7894 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7895 BPF_EXIT_INSN(),
7896 },
7897 INTERNAL,
7898 { },
7899 { { 0, 1 } },
7900 },
7901 {
7902 "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
7903 .u.insns_int = {
7904 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7905 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
7906 BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
7907 BPF_EXIT_INSN(),
7908 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7909 BPF_EXIT_INSN(),
7910 },
7911 INTERNAL,
7912 { },
7913 { { 0, 1 } },
7914 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02007915 /* BPF_JMP | BPF_JSLE | BPF_K */
7916 {
7917 "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
7918 .u.insns_int = {
7919 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7920 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
7921 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
7922 BPF_EXIT_INSN(),
7923 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7924 BPF_EXIT_INSN(),
7925 },
7926 INTERNAL,
7927 { },
7928 { { 0, 1 } },
7929 },
7930 {
7931 "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
7932 .u.insns_int = {
7933 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7934 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
7935 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
7936 BPF_EXIT_INSN(),
7937 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7938 BPF_EXIT_INSN(),
7939 },
7940 INTERNAL,
7941 { },
7942 { { 0, 1 } },
7943 },
7944 {
7945 "JMP_JSLE_K: Signed jump: value walk 1",
7946 .u.insns_int = {
7947 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7948 BPF_LD_IMM64(R1, 3),
7949 BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
7950 BPF_ALU64_IMM(BPF_SUB, R1, 1),
7951 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
7952 BPF_ALU64_IMM(BPF_SUB, R1, 1),
7953 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
7954 BPF_ALU64_IMM(BPF_SUB, R1, 1),
7955 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
7956 BPF_EXIT_INSN(), /* bad exit */
7957 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
7958 BPF_EXIT_INSN(),
7959 },
7960 INTERNAL,
7961 { },
7962 { { 0, 1 } },
7963 },
7964 {
7965 "JMP_JSLE_K: Signed jump: value walk 2",
7966 .u.insns_int = {
7967 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7968 BPF_LD_IMM64(R1, 3),
7969 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
7970 BPF_ALU64_IMM(BPF_SUB, R1, 2),
7971 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
7972 BPF_ALU64_IMM(BPF_SUB, R1, 2),
7973 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
7974 BPF_EXIT_INSN(), /* bad exit */
7975 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
7976 BPF_EXIT_INSN(),
7977 },
7978 INTERNAL,
7979 { },
7980 { { 0, 1 } },
7981 },
Michael Holzheucffc6422015-05-11 22:22:44 -07007982 /* BPF_JMP | BPF_JSGE | BPF_K */
7983 {
7984 "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
7985 .u.insns_int = {
7986 BPF_ALU32_IMM(BPF_MOV, R0, 0),
7987 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
7988 BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
7989 BPF_EXIT_INSN(),
7990 BPF_ALU32_IMM(BPF_MOV, R0, 1),
7991 BPF_EXIT_INSN(),
7992 },
7993 INTERNAL,
7994 { },
7995 { { 0, 1 } },
7996 },
7997 {
7998 "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
7999 .u.insns_int = {
8000 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8001 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8002 BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
8003 BPF_EXIT_INSN(),
8004 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8005 BPF_EXIT_INSN(),
8006 },
8007 INTERNAL,
8008 { },
8009 { { 0, 1 } },
8010 },
David Daney791caeb2017-05-24 16:35:49 -07008011 {
8012 "JMP_JSGE_K: Signed jump: value walk 1",
8013 .u.insns_int = {
8014 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8015 BPF_LD_IMM64(R1, -3),
8016 BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
8017 BPF_ALU64_IMM(BPF_ADD, R1, 1),
8018 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
8019 BPF_ALU64_IMM(BPF_ADD, R1, 1),
8020 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
8021 BPF_ALU64_IMM(BPF_ADD, R1, 1),
8022 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
8023 BPF_EXIT_INSN(), /* bad exit */
8024 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
8025 BPF_EXIT_INSN(),
8026 },
8027 INTERNAL,
8028 { },
8029 { { 0, 1 } },
8030 },
8031 {
8032 "JMP_JSGE_K: Signed jump: value walk 2",
8033 .u.insns_int = {
8034 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8035 BPF_LD_IMM64(R1, -3),
8036 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
8037 BPF_ALU64_IMM(BPF_ADD, R1, 2),
8038 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
8039 BPF_ALU64_IMM(BPF_ADD, R1, 2),
8040 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
8041 BPF_EXIT_INSN(), /* bad exit */
8042 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
8043 BPF_EXIT_INSN(),
8044 },
8045 INTERNAL,
8046 { },
8047 { { 0, 1 } },
8048 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008049 /* BPF_JMP | BPF_JGT | BPF_K */
8050 {
8051 "JMP_JGT_K: if (3 > 2) return 1",
8052 .u.insns_int = {
8053 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8054 BPF_LD_IMM64(R1, 3),
8055 BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
8056 BPF_EXIT_INSN(),
8057 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8058 BPF_EXIT_INSN(),
8059 },
8060 INTERNAL,
8061 { },
8062 { { 0, 1 } },
8063 },
Naveen N. Raoc7395d62016-04-05 15:32:54 +05308064 {
8065 "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
8066 .u.insns_int = {
8067 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8068 BPF_LD_IMM64(R1, -1),
8069 BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
8070 BPF_EXIT_INSN(),
8071 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8072 BPF_EXIT_INSN(),
8073 },
8074 INTERNAL,
8075 { },
8076 { { 0, 1 } },
8077 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008078 /* BPF_JMP | BPF_JLT | BPF_K */
8079 {
8080 "JMP_JLT_K: if (2 < 3) return 1",
8081 .u.insns_int = {
8082 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8083 BPF_LD_IMM64(R1, 2),
8084 BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
8085 BPF_EXIT_INSN(),
8086 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8087 BPF_EXIT_INSN(),
8088 },
8089 INTERNAL,
8090 { },
8091 { { 0, 1 } },
8092 },
8093 {
8094 "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
8095 .u.insns_int = {
8096 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8097 BPF_LD_IMM64(R1, 1),
8098 BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
8099 BPF_EXIT_INSN(),
8100 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8101 BPF_EXIT_INSN(),
8102 },
8103 INTERNAL,
8104 { },
8105 { { 0, 1 } },
8106 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008107 /* BPF_JMP | BPF_JGE | BPF_K */
8108 {
8109 "JMP_JGE_K: if (3 >= 2) return 1",
8110 .u.insns_int = {
8111 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8112 BPF_LD_IMM64(R1, 3),
8113 BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
8114 BPF_EXIT_INSN(),
8115 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8116 BPF_EXIT_INSN(),
8117 },
8118 INTERNAL,
8119 { },
8120 { { 0, 1 } },
8121 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008122 /* BPF_JMP | BPF_JLE | BPF_K */
8123 {
8124 "JMP_JLE_K: if (2 <= 3) return 1",
8125 .u.insns_int = {
8126 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8127 BPF_LD_IMM64(R1, 2),
8128 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
8129 BPF_EXIT_INSN(),
8130 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8131 BPF_EXIT_INSN(),
8132 },
8133 INTERNAL,
8134 { },
8135 { { 0, 1 } },
8136 },
Michael Holzheufe593842015-05-22 08:36:40 -07008137 /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
8138 {
8139 "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
8140 .u.insns_int = {
8141 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
8142 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
8143 BPF_EXIT_INSN(),
8144 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
8145 BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
8146 BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
8147 BPF_EXIT_INSN(),
8148 },
8149 INTERNAL,
8150 { },
8151 { { 0, 1 } },
8152 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008153 {
8154 "JMP_JGE_K: if (3 >= 3) return 1",
8155 .u.insns_int = {
8156 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8157 BPF_LD_IMM64(R1, 3),
8158 BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
8159 BPF_EXIT_INSN(),
8160 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8161 BPF_EXIT_INSN(),
8162 },
8163 INTERNAL,
8164 { },
8165 { { 0, 1 } },
8166 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008167 /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
8168 {
8169 "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
8170 .u.insns_int = {
8171 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
8172 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
8173 BPF_EXIT_INSN(),
8174 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
8175 BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
8176 BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
8177 BPF_EXIT_INSN(),
8178 },
8179 INTERNAL,
8180 { },
8181 { { 0, 1 } },
8182 },
8183 {
8184 "JMP_JLE_K: if (3 <= 3) return 1",
8185 .u.insns_int = {
8186 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8187 BPF_LD_IMM64(R1, 3),
8188 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
8189 BPF_EXIT_INSN(),
8190 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8191 BPF_EXIT_INSN(),
8192 },
8193 INTERNAL,
8194 { },
8195 { { 0, 1 } },
8196 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008197 /* BPF_JMP | BPF_JNE | BPF_K */
8198 {
8199 "JMP_JNE_K: if (3 != 2) return 1",
8200 .u.insns_int = {
8201 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8202 BPF_LD_IMM64(R1, 3),
8203 BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
8204 BPF_EXIT_INSN(),
8205 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8206 BPF_EXIT_INSN(),
8207 },
8208 INTERNAL,
8209 { },
8210 { { 0, 1 } },
8211 },
8212 /* BPF_JMP | BPF_JEQ | BPF_K */
8213 {
8214 "JMP_JEQ_K: if (3 == 3) return 1",
8215 .u.insns_int = {
8216 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8217 BPF_LD_IMM64(R1, 3),
8218 BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
8219 BPF_EXIT_INSN(),
8220 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8221 BPF_EXIT_INSN(),
8222 },
8223 INTERNAL,
8224 { },
8225 { { 0, 1 } },
8226 },
8227 /* BPF_JMP | BPF_JSET | BPF_K */
8228 {
8229 "JMP_JSET_K: if (0x3 & 0x2) return 1",
8230 .u.insns_int = {
8231 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8232 BPF_LD_IMM64(R1, 3),
Naveen N. Rao9f134c32016-04-05 15:32:53 +05308233 BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
Michael Holzheucffc6422015-05-11 22:22:44 -07008234 BPF_EXIT_INSN(),
8235 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8236 BPF_EXIT_INSN(),
8237 },
8238 INTERNAL,
8239 { },
8240 { { 0, 1 } },
8241 },
8242 {
8243 "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
8244 .u.insns_int = {
8245 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8246 BPF_LD_IMM64(R1, 3),
Naveen N. Rao9f134c32016-04-05 15:32:53 +05308247 BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
Michael Holzheucffc6422015-05-11 22:22:44 -07008248 BPF_EXIT_INSN(),
8249 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8250 BPF_EXIT_INSN(),
8251 },
8252 INTERNAL,
8253 { },
8254 { { 0, 1 } },
8255 },
8256 /* BPF_JMP | BPF_JSGT | BPF_X */
8257 {
8258 "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
8259 .u.insns_int = {
8260 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8261 BPF_LD_IMM64(R1, -1),
8262 BPF_LD_IMM64(R2, -2),
8263 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
8264 BPF_EXIT_INSN(),
8265 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8266 BPF_EXIT_INSN(),
8267 },
8268 INTERNAL,
8269 { },
8270 { { 0, 1 } },
8271 },
8272 {
8273 "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
8274 .u.insns_int = {
8275 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8276 BPF_LD_IMM64(R1, -1),
8277 BPF_LD_IMM64(R2, -1),
8278 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
8279 BPF_EXIT_INSN(),
8280 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8281 BPF_EXIT_INSN(),
8282 },
8283 INTERNAL,
8284 { },
8285 { { 0, 1 } },
8286 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008287 /* BPF_JMP | BPF_JSLT | BPF_X */
8288 {
8289 "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
8290 .u.insns_int = {
8291 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8292 BPF_LD_IMM64(R1, -1),
8293 BPF_LD_IMM64(R2, -2),
8294 BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
8295 BPF_EXIT_INSN(),
8296 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8297 BPF_EXIT_INSN(),
8298 },
8299 INTERNAL,
8300 { },
8301 { { 0, 1 } },
8302 },
8303 {
8304 "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
8305 .u.insns_int = {
8306 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8307 BPF_LD_IMM64(R1, -1),
8308 BPF_LD_IMM64(R2, -1),
8309 BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
8310 BPF_EXIT_INSN(),
8311 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8312 BPF_EXIT_INSN(),
8313 },
8314 INTERNAL,
8315 { },
8316 { { 0, 1 } },
8317 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008318 /* BPF_JMP | BPF_JSGE | BPF_X */
8319 {
8320 "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
8321 .u.insns_int = {
8322 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8323 BPF_LD_IMM64(R1, -1),
8324 BPF_LD_IMM64(R2, -2),
8325 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
8326 BPF_EXIT_INSN(),
8327 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8328 BPF_EXIT_INSN(),
8329 },
8330 INTERNAL,
8331 { },
8332 { { 0, 1 } },
8333 },
8334 {
8335 "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
8336 .u.insns_int = {
8337 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8338 BPF_LD_IMM64(R1, -1),
8339 BPF_LD_IMM64(R2, -1),
8340 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
8341 BPF_EXIT_INSN(),
8342 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8343 BPF_EXIT_INSN(),
8344 },
8345 INTERNAL,
8346 { },
8347 { { 0, 1 } },
8348 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008349 /* BPF_JMP | BPF_JSLE | BPF_X */
8350 {
8351 "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
8352 .u.insns_int = {
8353 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8354 BPF_LD_IMM64(R1, -1),
8355 BPF_LD_IMM64(R2, -2),
8356 BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
8357 BPF_EXIT_INSN(),
8358 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8359 BPF_EXIT_INSN(),
8360 },
8361 INTERNAL,
8362 { },
8363 { { 0, 1 } },
8364 },
8365 {
8366 "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
8367 .u.insns_int = {
8368 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8369 BPF_LD_IMM64(R1, -1),
8370 BPF_LD_IMM64(R2, -1),
8371 BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
8372 BPF_EXIT_INSN(),
8373 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8374 BPF_EXIT_INSN(),
8375 },
8376 INTERNAL,
8377 { },
8378 { { 0, 1 } },
8379 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008380 /* BPF_JMP | BPF_JGT | BPF_X */
8381 {
8382 "JMP_JGT_X: if (3 > 2) return 1",
8383 .u.insns_int = {
8384 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8385 BPF_LD_IMM64(R1, 3),
8386 BPF_LD_IMM64(R2, 2),
8387 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
8388 BPF_EXIT_INSN(),
8389 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8390 BPF_EXIT_INSN(),
8391 },
8392 INTERNAL,
8393 { },
8394 { { 0, 1 } },
8395 },
Naveen N. Raoc7395d62016-04-05 15:32:54 +05308396 {
8397 "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
8398 .u.insns_int = {
8399 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8400 BPF_LD_IMM64(R1, -1),
8401 BPF_LD_IMM64(R2, 1),
8402 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
8403 BPF_EXIT_INSN(),
8404 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8405 BPF_EXIT_INSN(),
8406 },
8407 INTERNAL,
8408 { },
8409 { { 0, 1 } },
8410 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008411 /* BPF_JMP | BPF_JLT | BPF_X */
8412 {
8413 "JMP_JLT_X: if (2 < 3) return 1",
8414 .u.insns_int = {
8415 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8416 BPF_LD_IMM64(R1, 3),
8417 BPF_LD_IMM64(R2, 2),
8418 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
8419 BPF_EXIT_INSN(),
8420 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8421 BPF_EXIT_INSN(),
8422 },
8423 INTERNAL,
8424 { },
8425 { { 0, 1 } },
8426 },
8427 {
8428 "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
8429 .u.insns_int = {
8430 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8431 BPF_LD_IMM64(R1, -1),
8432 BPF_LD_IMM64(R2, 1),
8433 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
8434 BPF_EXIT_INSN(),
8435 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8436 BPF_EXIT_INSN(),
8437 },
8438 INTERNAL,
8439 { },
8440 { { 0, 1 } },
8441 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008442 /* BPF_JMP | BPF_JGE | BPF_X */
8443 {
8444 "JMP_JGE_X: if (3 >= 2) return 1",
8445 .u.insns_int = {
8446 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8447 BPF_LD_IMM64(R1, 3),
8448 BPF_LD_IMM64(R2, 2),
8449 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
8450 BPF_EXIT_INSN(),
8451 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8452 BPF_EXIT_INSN(),
8453 },
8454 INTERNAL,
8455 { },
8456 { { 0, 1 } },
8457 },
8458 {
8459 "JMP_JGE_X: if (3 >= 3) return 1",
8460 .u.insns_int = {
8461 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8462 BPF_LD_IMM64(R1, 3),
8463 BPF_LD_IMM64(R2, 3),
8464 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
8465 BPF_EXIT_INSN(),
8466 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8467 BPF_EXIT_INSN(),
8468 },
8469 INTERNAL,
8470 { },
8471 { { 0, 1 } },
8472 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008473 /* BPF_JMP | BPF_JLE | BPF_X */
8474 {
8475 "JMP_JLE_X: if (2 <= 3) return 1",
8476 .u.insns_int = {
8477 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8478 BPF_LD_IMM64(R1, 3),
8479 BPF_LD_IMM64(R2, 2),
8480 BPF_JMP_REG(BPF_JLE, R2, R1, 1),
8481 BPF_EXIT_INSN(),
8482 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8483 BPF_EXIT_INSN(),
8484 },
8485 INTERNAL,
8486 { },
8487 { { 0, 1 } },
8488 },
8489 {
8490 "JMP_JLE_X: if (3 <= 3) return 1",
8491 .u.insns_int = {
8492 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8493 BPF_LD_IMM64(R1, 3),
8494 BPF_LD_IMM64(R2, 3),
8495 BPF_JMP_REG(BPF_JLE, R1, R2, 1),
8496 BPF_EXIT_INSN(),
8497 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8498 BPF_EXIT_INSN(),
8499 },
8500 INTERNAL,
8501 { },
8502 { { 0, 1 } },
8503 },
Daniel Borkmannddc665a2017-05-02 20:34:54 +02008504 {
8505 /* Mainly testing JIT + imm64 here. */
8506 "JMP_JGE_X: ldimm64 test 1",
8507 .u.insns_int = {
8508 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8509 BPF_LD_IMM64(R1, 3),
8510 BPF_LD_IMM64(R2, 2),
8511 BPF_JMP_REG(BPF_JGE, R1, R2, 2),
Geert Uytterhoeven86f8e242017-05-03 13:31:04 +02008512 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
8513 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
Daniel Borkmannddc665a2017-05-02 20:34:54 +02008514 BPF_EXIT_INSN(),
8515 },
8516 INTERNAL,
8517 { },
8518 { { 0, 0xeeeeeeeeU } },
8519 },
8520 {
8521 "JMP_JGE_X: ldimm64 test 2",
8522 .u.insns_int = {
8523 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8524 BPF_LD_IMM64(R1, 3),
8525 BPF_LD_IMM64(R2, 2),
8526 BPF_JMP_REG(BPF_JGE, R1, R2, 0),
Geert Uytterhoeven86f8e242017-05-03 13:31:04 +02008527 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
Daniel Borkmannddc665a2017-05-02 20:34:54 +02008528 BPF_EXIT_INSN(),
8529 },
8530 INTERNAL,
8531 { },
8532 { { 0, 0xffffffffU } },
8533 },
8534 {
8535 "JMP_JGE_X: ldimm64 test 3",
8536 .u.insns_int = {
8537 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8538 BPF_LD_IMM64(R1, 3),
8539 BPF_LD_IMM64(R2, 2),
8540 BPF_JMP_REG(BPF_JGE, R1, R2, 4),
Geert Uytterhoeven86f8e242017-05-03 13:31:04 +02008541 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
8542 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
Daniel Borkmannddc665a2017-05-02 20:34:54 +02008543 BPF_EXIT_INSN(),
8544 },
8545 INTERNAL,
8546 { },
8547 { { 0, 1 } },
8548 },
Daniel Borkmann92b31a92017-08-10 01:39:55 +02008549 {
8550 "JMP_JLE_X: ldimm64 test 1",
8551 .u.insns_int = {
8552 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8553 BPF_LD_IMM64(R1, 3),
8554 BPF_LD_IMM64(R2, 2),
8555 BPF_JMP_REG(BPF_JLE, R2, R1, 2),
8556 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
8557 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
8558 BPF_EXIT_INSN(),
8559 },
8560 INTERNAL,
8561 { },
8562 { { 0, 0xeeeeeeeeU } },
8563 },
8564 {
8565 "JMP_JLE_X: ldimm64 test 2",
8566 .u.insns_int = {
8567 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8568 BPF_LD_IMM64(R1, 3),
8569 BPF_LD_IMM64(R2, 2),
8570 BPF_JMP_REG(BPF_JLE, R2, R1, 0),
8571 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
8572 BPF_EXIT_INSN(),
8573 },
8574 INTERNAL,
8575 { },
8576 { { 0, 0xffffffffU } },
8577 },
8578 {
8579 "JMP_JLE_X: ldimm64 test 3",
8580 .u.insns_int = {
8581 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8582 BPF_LD_IMM64(R1, 3),
8583 BPF_LD_IMM64(R2, 2),
8584 BPF_JMP_REG(BPF_JLE, R2, R1, 4),
8585 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
8586 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
8587 BPF_EXIT_INSN(),
8588 },
8589 INTERNAL,
8590 { },
8591 { { 0, 1 } },
8592 },
Michael Holzheucffc6422015-05-11 22:22:44 -07008593 /* BPF_JMP | BPF_JNE | BPF_X */
8594 {
8595 "JMP_JNE_X: if (3 != 2) return 1",
8596 .u.insns_int = {
8597 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8598 BPF_LD_IMM64(R1, 3),
8599 BPF_LD_IMM64(R2, 2),
8600 BPF_JMP_REG(BPF_JNE, R1, R2, 1),
8601 BPF_EXIT_INSN(),
8602 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8603 BPF_EXIT_INSN(),
8604 },
8605 INTERNAL,
8606 { },
8607 { { 0, 1 } },
8608 },
8609 /* BPF_JMP | BPF_JEQ | BPF_X */
8610 {
8611 "JMP_JEQ_X: if (3 == 3) return 1",
8612 .u.insns_int = {
8613 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8614 BPF_LD_IMM64(R1, 3),
8615 BPF_LD_IMM64(R2, 3),
8616 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
8617 BPF_EXIT_INSN(),
8618 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8619 BPF_EXIT_INSN(),
8620 },
8621 INTERNAL,
8622 { },
8623 { { 0, 1 } },
8624 },
8625 /* BPF_JMP | BPF_JSET | BPF_X */
8626 {
8627 "JMP_JSET_X: if (0x3 & 0x2) return 1",
8628 .u.insns_int = {
8629 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8630 BPF_LD_IMM64(R1, 3),
8631 BPF_LD_IMM64(R2, 2),
Naveen N. Rao9f134c32016-04-05 15:32:53 +05308632 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
Michael Holzheucffc6422015-05-11 22:22:44 -07008633 BPF_EXIT_INSN(),
8634 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8635 BPF_EXIT_INSN(),
8636 },
8637 INTERNAL,
8638 { },
8639 { { 0, 1 } },
8640 },
8641 {
8642 "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
8643 .u.insns_int = {
8644 BPF_ALU32_IMM(BPF_MOV, R0, 0),
8645 BPF_LD_IMM64(R1, 3),
8646 BPF_LD_IMM64(R2, 0xffffffff),
Naveen N. Rao9f134c32016-04-05 15:32:53 +05308647 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
Michael Holzheucffc6422015-05-11 22:22:44 -07008648 BPF_EXIT_INSN(),
8649 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8650 BPF_EXIT_INSN(),
8651 },
8652 INTERNAL,
8653 { },
8654 { { 0, 1 } },
8655 },
Johan Almbladh66e5eb82021-08-09 11:18:25 +02008656 { /* Mainly checking JIT here. */
8657 "BPF_MAXINSNS: Very long conditional jump",
8658 { },
8659 INTERNAL | FLAG_NO_DATA,
8660 { },
8661 { { 0, 1 } },
8662 .fill_helper = bpf_fill_long_jmp,
8663 },
Daniel Borkmannbde28bc2015-05-26 22:35:43 +02008664 {
8665 "JMP_JA: Jump, gap, jump, ...",
8666 { },
8667 CLASSIC | FLAG_NO_DATA,
8668 { },
8669 { { 0, 0xababcbac } },
8670 .fill_helper = bpf_fill_ja,
8671 },
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +02008672 { /* Mainly checking JIT here. */
8673 "BPF_MAXINSNS: Maximum possible literals",
8674 { },
8675 CLASSIC | FLAG_NO_DATA,
8676 { },
8677 { { 0, 0xffffffff } },
8678 .fill_helper = bpf_fill_maxinsns1,
8679 },
8680 { /* Mainly checking JIT here. */
8681 "BPF_MAXINSNS: Single literal",
8682 { },
8683 CLASSIC | FLAG_NO_DATA,
8684 { },
8685 { { 0, 0xfefefefe } },
8686 .fill_helper = bpf_fill_maxinsns2,
8687 },
8688 { /* Mainly checking JIT here. */
8689 "BPF_MAXINSNS: Run/add until end",
8690 { },
8691 CLASSIC | FLAG_NO_DATA,
8692 { },
8693 { { 0, 0x947bf368 } },
8694 .fill_helper = bpf_fill_maxinsns3,
8695 },
8696 {
8697 "BPF_MAXINSNS: Too many instructions",
8698 { },
8699 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
8700 { },
8701 { },
8702 .fill_helper = bpf_fill_maxinsns4,
Yonghong Song09584b42018-02-02 22:37:15 -08008703 .expected_errcode = -EINVAL,
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +02008704 },
8705 { /* Mainly checking JIT here. */
8706 "BPF_MAXINSNS: Very long jump",
8707 { },
8708 CLASSIC | FLAG_NO_DATA,
8709 { },
8710 { { 0, 0xabababab } },
8711 .fill_helper = bpf_fill_maxinsns5,
8712 },
8713 { /* Mainly checking JIT here. */
8714 "BPF_MAXINSNS: Ctx heavy transformations",
8715 { },
8716 CLASSIC,
8717 { },
8718 {
Michał Mirosław0c4b2d32018-11-10 19:58:36 +01008719 { 1, SKB_VLAN_PRESENT },
8720 { 10, SKB_VLAN_PRESENT }
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +02008721 },
8722 .fill_helper = bpf_fill_maxinsns6,
8723 },
8724 { /* Mainly checking JIT here. */
8725 "BPF_MAXINSNS: Call heavy transformations",
8726 { },
8727 CLASSIC | FLAG_NO_DATA,
8728 { },
8729 { { 1, 0 }, { 10, 0 } },
8730 .fill_helper = bpf_fill_maxinsns7,
8731 },
8732 { /* Mainly checking JIT here. */
8733 "BPF_MAXINSNS: Jump heavy test",
8734 { },
8735 CLASSIC | FLAG_NO_DATA,
8736 { },
8737 { { 0, 0xffffffff } },
8738 .fill_helper = bpf_fill_maxinsns8,
8739 },
Daniel Borkmann3b529602015-05-23 01:10:07 +02008740 { /* Mainly checking JIT here. */
8741 "BPF_MAXINSNS: Very long jump backwards",
8742 { },
8743 INTERNAL | FLAG_NO_DATA,
8744 { },
8745 { { 0, 0xcbababab } },
8746 .fill_helper = bpf_fill_maxinsns9,
8747 },
8748 { /* Mainly checking JIT here. */
8749 "BPF_MAXINSNS: Edge hopping nuthouse",
8750 { },
8751 INTERNAL | FLAG_NO_DATA,
8752 { },
8753 { { 0, 0xabababac } },
8754 .fill_helper = bpf_fill_maxinsns10,
8755 },
Daniel Borkmannbde28bc2015-05-26 22:35:43 +02008756 {
8757 "BPF_MAXINSNS: Jump, gap, jump, ...",
8758 { },
8759 CLASSIC | FLAG_NO_DATA,
8760 { },
8761 { { 0, 0xababcbac } },
8762 .fill_helper = bpf_fill_maxinsns11,
8763 },
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -07008764 {
Daniel Borkmannbe088152018-06-02 23:06:32 +02008765 "BPF_MAXINSNS: jump over MSH",
8766 { },
8767 CLASSIC | FLAG_EXPECTED_FAIL,
8768 { 0xfa, 0xfb, 0xfc, 0xfd, },
8769 { { 4, 0xabababab } },
8770 .fill_helper = bpf_fill_maxinsns12,
8771 .expected_errcode = -EINVAL,
8772 },
8773 {
8774 "BPF_MAXINSNS: exec all MSH",
8775 { },
8776 CLASSIC,
8777 { 0xfa, 0xfb, 0xfc, 0xfd, },
8778 { { 4, 0xababab83 } },
8779 .fill_helper = bpf_fill_maxinsns13,
8780 },
8781 {
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -07008782 "BPF_MAXINSNS: ld_abs+get_processor_id",
8783 { },
8784 CLASSIC,
8785 { },
8786 { { 1, 0xbee } },
8787 .fill_helper = bpf_fill_ld_abs_get_processor_id,
8788 },
Nicolas Schichan2cf1ad72015-08-04 15:19:09 +02008789 /*
8790 * LD_IND / LD_ABS on fragmented SKBs
8791 */
8792 {
8793 "LD_IND byte frag",
8794 .u.insns = {
8795 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
8796 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
8797 BPF_STMT(BPF_RET | BPF_A, 0x0),
8798 },
8799 CLASSIC | FLAG_SKB_FRAG,
8800 { },
8801 { {0x40, 0x42} },
8802 .frag_data = {
8803 0x42, 0x00, 0x00, 0x00,
8804 0x43, 0x44, 0x00, 0x00,
8805 0x21, 0x07, 0x19, 0x83,
8806 },
8807 },
8808 {
8809 "LD_IND halfword frag",
8810 .u.insns = {
8811 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
8812 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
8813 BPF_STMT(BPF_RET | BPF_A, 0x0),
8814 },
8815 CLASSIC | FLAG_SKB_FRAG,
8816 { },
8817 { {0x40, 0x4344} },
8818 .frag_data = {
8819 0x42, 0x00, 0x00, 0x00,
8820 0x43, 0x44, 0x00, 0x00,
8821 0x21, 0x07, 0x19, 0x83,
8822 },
8823 },
8824 {
8825 "LD_IND word frag",
8826 .u.insns = {
8827 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
8828 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
8829 BPF_STMT(BPF_RET | BPF_A, 0x0),
8830 },
8831 CLASSIC | FLAG_SKB_FRAG,
8832 { },
8833 { {0x40, 0x21071983} },
8834 .frag_data = {
8835 0x42, 0x00, 0x00, 0x00,
8836 0x43, 0x44, 0x00, 0x00,
8837 0x21, 0x07, 0x19, 0x83,
8838 },
8839 },
8840 {
8841 "LD_IND halfword mixed head/frag",
8842 .u.insns = {
8843 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
8844 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
8845 BPF_STMT(BPF_RET | BPF_A, 0x0),
8846 },
8847 CLASSIC | FLAG_SKB_FRAG,
8848 { [0x3e] = 0x25, [0x3f] = 0x05, },
8849 { {0x40, 0x0519} },
8850 .frag_data = { 0x19, 0x82 },
8851 },
8852 {
8853 "LD_IND word mixed head/frag",
8854 .u.insns = {
8855 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
8856 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
8857 BPF_STMT(BPF_RET | BPF_A, 0x0),
8858 },
8859 CLASSIC | FLAG_SKB_FRAG,
8860 { [0x3e] = 0x25, [0x3f] = 0x05, },
8861 { {0x40, 0x25051982} },
8862 .frag_data = { 0x19, 0x82 },
8863 },
8864 {
8865 "LD_ABS byte frag",
8866 .u.insns = {
8867 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
8868 BPF_STMT(BPF_RET | BPF_A, 0x0),
8869 },
8870 CLASSIC | FLAG_SKB_FRAG,
8871 { },
8872 { {0x40, 0x42} },
8873 .frag_data = {
8874 0x42, 0x00, 0x00, 0x00,
8875 0x43, 0x44, 0x00, 0x00,
8876 0x21, 0x07, 0x19, 0x83,
8877 },
8878 },
8879 {
8880 "LD_ABS halfword frag",
8881 .u.insns = {
8882 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
8883 BPF_STMT(BPF_RET | BPF_A, 0x0),
8884 },
8885 CLASSIC | FLAG_SKB_FRAG,
8886 { },
8887 { {0x40, 0x4344} },
8888 .frag_data = {
8889 0x42, 0x00, 0x00, 0x00,
8890 0x43, 0x44, 0x00, 0x00,
8891 0x21, 0x07, 0x19, 0x83,
8892 },
8893 },
8894 {
8895 "LD_ABS word frag",
8896 .u.insns = {
8897 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
8898 BPF_STMT(BPF_RET | BPF_A, 0x0),
8899 },
8900 CLASSIC | FLAG_SKB_FRAG,
8901 { },
8902 { {0x40, 0x21071983} },
8903 .frag_data = {
8904 0x42, 0x00, 0x00, 0x00,
8905 0x43, 0x44, 0x00, 0x00,
8906 0x21, 0x07, 0x19, 0x83,
8907 },
8908 },
8909 {
8910 "LD_ABS halfword mixed head/frag",
8911 .u.insns = {
8912 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
8913 BPF_STMT(BPF_RET | BPF_A, 0x0),
8914 },
8915 CLASSIC | FLAG_SKB_FRAG,
8916 { [0x3e] = 0x25, [0x3f] = 0x05, },
8917 { {0x40, 0x0519} },
8918 .frag_data = { 0x19, 0x82 },
8919 },
8920 {
8921 "LD_ABS word mixed head/frag",
8922 .u.insns = {
8923 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
8924 BPF_STMT(BPF_RET | BPF_A, 0x0),
8925 },
8926 CLASSIC | FLAG_SKB_FRAG,
8927 { [0x3e] = 0x25, [0x3f] = 0x05, },
8928 { {0x40, 0x25051982} },
8929 .frag_data = { 0x19, 0x82 },
8930 },
Nicolas Schichan08fcb082015-08-04 15:19:11 +02008931 /*
8932 * LD_IND / LD_ABS on non fragmented SKBs
8933 */
8934 {
8935 /*
8936 * this tests that the JIT/interpreter correctly resets X
8937 * before using it in an LD_IND instruction.
8938 */
8939 "LD_IND byte default X",
8940 .u.insns = {
8941 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
8942 BPF_STMT(BPF_RET | BPF_A, 0x0),
8943 },
8944 CLASSIC,
8945 { [0x1] = 0x42 },
8946 { {0x40, 0x42 } },
8947 },
8948 {
8949 "LD_IND byte positive offset",
8950 .u.insns = {
8951 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
8952 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
8953 BPF_STMT(BPF_RET | BPF_A, 0x0),
8954 },
8955 CLASSIC,
8956 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
8957 { {0x40, 0x82 } },
8958 },
8959 {
8960 "LD_IND byte negative offset",
8961 .u.insns = {
8962 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
8963 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
8964 BPF_STMT(BPF_RET | BPF_A, 0x0),
8965 },
8966 CLASSIC,
8967 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
8968 { {0x40, 0x05 } },
8969 },
8970 {
Daniel Borkmann93731ef2018-05-04 01:08:13 +02008971 "LD_IND byte positive offset, all ff",
8972 .u.insns = {
8973 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
8974 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
8975 BPF_STMT(BPF_RET | BPF_A, 0x0),
8976 },
8977 CLASSIC,
8978 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
8979 { {0x40, 0xff } },
8980 },
8981 {
8982 "LD_IND byte positive offset, out of bounds",
8983 .u.insns = {
8984 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
8985 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
8986 BPF_STMT(BPF_RET | BPF_A, 0x0),
8987 },
8988 CLASSIC,
8989 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
8990 { {0x3f, 0 }, },
8991 },
8992 {
8993 "LD_IND byte negative offset, out of bounds",
8994 .u.insns = {
8995 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
8996 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
8997 BPF_STMT(BPF_RET | BPF_A, 0x0),
8998 },
8999 CLASSIC,
9000 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9001 { {0x3f, 0 } },
9002 },
9003 {
9004 "LD_IND byte negative offset, multiple calls",
9005 .u.insns = {
9006 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
9007 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
9008 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
9009 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
9010 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
9011 BPF_STMT(BPF_RET | BPF_A, 0x0),
9012 },
9013 CLASSIC,
9014 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9015 { {0x40, 0x82 }, },
9016 },
9017 {
Nicolas Schichan08fcb082015-08-04 15:19:11 +02009018 "LD_IND halfword positive offset",
9019 .u.insns = {
9020 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9021 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
9022 BPF_STMT(BPF_RET | BPF_A, 0x0),
9023 },
9024 CLASSIC,
9025 {
9026 [0x1c] = 0xaa, [0x1d] = 0x55,
9027 [0x1e] = 0xbb, [0x1f] = 0x66,
9028 [0x20] = 0xcc, [0x21] = 0x77,
9029 [0x22] = 0xdd, [0x23] = 0x88,
9030 },
9031 { {0x40, 0xdd88 } },
9032 },
9033 {
9034 "LD_IND halfword negative offset",
9035 .u.insns = {
9036 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9037 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
9038 BPF_STMT(BPF_RET | BPF_A, 0x0),
9039 },
9040 CLASSIC,
9041 {
9042 [0x1c] = 0xaa, [0x1d] = 0x55,
9043 [0x1e] = 0xbb, [0x1f] = 0x66,
9044 [0x20] = 0xcc, [0x21] = 0x77,
9045 [0x22] = 0xdd, [0x23] = 0x88,
9046 },
9047 { {0x40, 0xbb66 } },
9048 },
9049 {
9050 "LD_IND halfword unaligned",
9051 .u.insns = {
9052 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9053 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
9054 BPF_STMT(BPF_RET | BPF_A, 0x0),
9055 },
9056 CLASSIC,
9057 {
9058 [0x1c] = 0xaa, [0x1d] = 0x55,
9059 [0x1e] = 0xbb, [0x1f] = 0x66,
9060 [0x20] = 0xcc, [0x21] = 0x77,
9061 [0x22] = 0xdd, [0x23] = 0x88,
9062 },
9063 { {0x40, 0x66cc } },
9064 },
9065 {
Daniel Borkmann93731ef2018-05-04 01:08:13 +02009066 "LD_IND halfword positive offset, all ff",
9067 .u.insns = {
9068 BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
9069 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
9070 BPF_STMT(BPF_RET | BPF_A, 0x0),
9071 },
9072 CLASSIC,
9073 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
9074 { {0x40, 0xffff } },
9075 },
9076 {
9077 "LD_IND halfword positive offset, out of bounds",
9078 .u.insns = {
9079 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
9080 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
9081 BPF_STMT(BPF_RET | BPF_A, 0x0),
9082 },
9083 CLASSIC,
9084 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9085 { {0x3f, 0 }, },
9086 },
9087 {
9088 "LD_IND halfword negative offset, out of bounds",
9089 .u.insns = {
9090 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
9091 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
9092 BPF_STMT(BPF_RET | BPF_A, 0x0),
9093 },
9094 CLASSIC,
9095 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9096 { {0x3f, 0 } },
9097 },
9098 {
Nicolas Schichan08fcb082015-08-04 15:19:11 +02009099 "LD_IND word positive offset",
9100 .u.insns = {
9101 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9102 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
9103 BPF_STMT(BPF_RET | BPF_A, 0x0),
9104 },
9105 CLASSIC,
9106 {
9107 [0x1c] = 0xaa, [0x1d] = 0x55,
9108 [0x1e] = 0xbb, [0x1f] = 0x66,
9109 [0x20] = 0xcc, [0x21] = 0x77,
9110 [0x22] = 0xdd, [0x23] = 0x88,
9111 [0x24] = 0xee, [0x25] = 0x99,
9112 [0x26] = 0xff, [0x27] = 0xaa,
9113 },
9114 { {0x40, 0xee99ffaa } },
9115 },
9116 {
9117 "LD_IND word negative offset",
9118 .u.insns = {
9119 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9120 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
9121 BPF_STMT(BPF_RET | BPF_A, 0x0),
9122 },
9123 CLASSIC,
9124 {
9125 [0x1c] = 0xaa, [0x1d] = 0x55,
9126 [0x1e] = 0xbb, [0x1f] = 0x66,
9127 [0x20] = 0xcc, [0x21] = 0x77,
9128 [0x22] = 0xdd, [0x23] = 0x88,
9129 [0x24] = 0xee, [0x25] = 0x99,
9130 [0x26] = 0xff, [0x27] = 0xaa,
9131 },
9132 { {0x40, 0xaa55bb66 } },
9133 },
9134 {
9135 "LD_IND word unaligned (addr & 3 == 2)",
9136 .u.insns = {
9137 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9138 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
9139 BPF_STMT(BPF_RET | BPF_A, 0x0),
9140 },
9141 CLASSIC,
9142 {
9143 [0x1c] = 0xaa, [0x1d] = 0x55,
9144 [0x1e] = 0xbb, [0x1f] = 0x66,
9145 [0x20] = 0xcc, [0x21] = 0x77,
9146 [0x22] = 0xdd, [0x23] = 0x88,
9147 [0x24] = 0xee, [0x25] = 0x99,
9148 [0x26] = 0xff, [0x27] = 0xaa,
9149 },
9150 { {0x40, 0xbb66cc77 } },
9151 },
9152 {
9153 "LD_IND word unaligned (addr & 3 == 1)",
9154 .u.insns = {
9155 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9156 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
9157 BPF_STMT(BPF_RET | BPF_A, 0x0),
9158 },
9159 CLASSIC,
9160 {
9161 [0x1c] = 0xaa, [0x1d] = 0x55,
9162 [0x1e] = 0xbb, [0x1f] = 0x66,
9163 [0x20] = 0xcc, [0x21] = 0x77,
9164 [0x22] = 0xdd, [0x23] = 0x88,
9165 [0x24] = 0xee, [0x25] = 0x99,
9166 [0x26] = 0xff, [0x27] = 0xaa,
9167 },
9168 { {0x40, 0x55bb66cc } },
9169 },
9170 {
9171 "LD_IND word unaligned (addr & 3 == 3)",
9172 .u.insns = {
9173 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
9174 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
9175 BPF_STMT(BPF_RET | BPF_A, 0x0),
9176 },
9177 CLASSIC,
9178 {
9179 [0x1c] = 0xaa, [0x1d] = 0x55,
9180 [0x1e] = 0xbb, [0x1f] = 0x66,
9181 [0x20] = 0xcc, [0x21] = 0x77,
9182 [0x22] = 0xdd, [0x23] = 0x88,
9183 [0x24] = 0xee, [0x25] = 0x99,
9184 [0x26] = 0xff, [0x27] = 0xaa,
9185 },
9186 { {0x40, 0x66cc77dd } },
9187 },
9188 {
Daniel Borkmann93731ef2018-05-04 01:08:13 +02009189 "LD_IND word positive offset, all ff",
9190 .u.insns = {
9191 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
9192 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
9193 BPF_STMT(BPF_RET | BPF_A, 0x0),
9194 },
9195 CLASSIC,
9196 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
9197 { {0x40, 0xffffffff } },
9198 },
9199 {
9200 "LD_IND word positive offset, out of bounds",
9201 .u.insns = {
9202 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
9203 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
9204 BPF_STMT(BPF_RET | BPF_A, 0x0),
9205 },
9206 CLASSIC,
9207 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9208 { {0x3f, 0 }, },
9209 },
9210 {
9211 "LD_IND word negative offset, out of bounds",
9212 .u.insns = {
9213 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
9214 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
9215 BPF_STMT(BPF_RET | BPF_A, 0x0),
9216 },
9217 CLASSIC,
9218 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9219 { {0x3f, 0 } },
9220 },
9221 {
Nicolas Schichan08fcb082015-08-04 15:19:11 +02009222 "LD_ABS byte",
9223 .u.insns = {
9224 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
9225 BPF_STMT(BPF_RET | BPF_A, 0x0),
9226 },
9227 CLASSIC,
9228 {
9229 [0x1c] = 0xaa, [0x1d] = 0x55,
9230 [0x1e] = 0xbb, [0x1f] = 0x66,
9231 [0x20] = 0xcc, [0x21] = 0x77,
9232 [0x22] = 0xdd, [0x23] = 0x88,
9233 [0x24] = 0xee, [0x25] = 0x99,
9234 [0x26] = 0xff, [0x27] = 0xaa,
9235 },
9236 { {0x40, 0xcc } },
9237 },
9238 {
Daniel Borkmann93731ef2018-05-04 01:08:13 +02009239 "LD_ABS byte positive offset, all ff",
9240 .u.insns = {
9241 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
9242 BPF_STMT(BPF_RET | BPF_A, 0x0),
9243 },
9244 CLASSIC,
9245 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
9246 { {0x40, 0xff } },
9247 },
9248 {
9249 "LD_ABS byte positive offset, out of bounds",
9250 .u.insns = {
9251 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
9252 BPF_STMT(BPF_RET | BPF_A, 0x0),
9253 },
9254 CLASSIC,
9255 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9256 { {0x3f, 0 }, },
9257 },
9258 {
9259 "LD_ABS byte negative offset, out of bounds load",
9260 .u.insns = {
9261 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
9262 BPF_STMT(BPF_RET | BPF_A, 0x0),
9263 },
9264 CLASSIC | FLAG_EXPECTED_FAIL,
9265 .expected_errcode = -EINVAL,
9266 },
9267 {
9268 "LD_ABS byte negative offset, in bounds",
9269 .u.insns = {
9270 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
9271 BPF_STMT(BPF_RET | BPF_A, 0x0),
9272 },
9273 CLASSIC,
9274 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9275 { {0x40, 0x82 }, },
9276 },
9277 {
9278 "LD_ABS byte negative offset, out of bounds",
9279 .u.insns = {
9280 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
9281 BPF_STMT(BPF_RET | BPF_A, 0x0),
9282 },
9283 CLASSIC,
9284 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9285 { {0x3f, 0 }, },
9286 },
9287 {
9288 "LD_ABS byte negative offset, multiple calls",
9289 .u.insns = {
9290 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
9291 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
9292 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
9293 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
9294 BPF_STMT(BPF_RET | BPF_A, 0x0),
9295 },
9296 CLASSIC,
9297 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9298 { {0x40, 0x82 }, },
9299 },
9300 {
Nicolas Schichan08fcb082015-08-04 15:19:11 +02009301 "LD_ABS halfword",
9302 .u.insns = {
9303 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
9304 BPF_STMT(BPF_RET | BPF_A, 0x0),
9305 },
9306 CLASSIC,
9307 {
9308 [0x1c] = 0xaa, [0x1d] = 0x55,
9309 [0x1e] = 0xbb, [0x1f] = 0x66,
9310 [0x20] = 0xcc, [0x21] = 0x77,
9311 [0x22] = 0xdd, [0x23] = 0x88,
9312 [0x24] = 0xee, [0x25] = 0x99,
9313 [0x26] = 0xff, [0x27] = 0xaa,
9314 },
9315 { {0x40, 0xdd88 } },
9316 },
9317 {
9318 "LD_ABS halfword unaligned",
9319 .u.insns = {
9320 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
9321 BPF_STMT(BPF_RET | BPF_A, 0x0),
9322 },
9323 CLASSIC,
9324 {
9325 [0x1c] = 0xaa, [0x1d] = 0x55,
9326 [0x1e] = 0xbb, [0x1f] = 0x66,
9327 [0x20] = 0xcc, [0x21] = 0x77,
9328 [0x22] = 0xdd, [0x23] = 0x88,
9329 [0x24] = 0xee, [0x25] = 0x99,
9330 [0x26] = 0xff, [0x27] = 0xaa,
9331 },
9332 { {0x40, 0x99ff } },
9333 },
9334 {
Daniel Borkmann93731ef2018-05-04 01:08:13 +02009335 "LD_ABS halfword positive offset, all ff",
9336 .u.insns = {
9337 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
9338 BPF_STMT(BPF_RET | BPF_A, 0x0),
9339 },
9340 CLASSIC,
9341 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
9342 { {0x40, 0xffff } },
9343 },
9344 {
9345 "LD_ABS halfword positive offset, out of bounds",
9346 .u.insns = {
9347 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
9348 BPF_STMT(BPF_RET | BPF_A, 0x0),
9349 },
9350 CLASSIC,
9351 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9352 { {0x3f, 0 }, },
9353 },
9354 {
9355 "LD_ABS halfword negative offset, out of bounds load",
9356 .u.insns = {
9357 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
9358 BPF_STMT(BPF_RET | BPF_A, 0x0),
9359 },
9360 CLASSIC | FLAG_EXPECTED_FAIL,
9361 .expected_errcode = -EINVAL,
9362 },
9363 {
9364 "LD_ABS halfword negative offset, in bounds",
9365 .u.insns = {
9366 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
9367 BPF_STMT(BPF_RET | BPF_A, 0x0),
9368 },
9369 CLASSIC,
9370 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9371 { {0x40, 0x1982 }, },
9372 },
9373 {
9374 "LD_ABS halfword negative offset, out of bounds",
9375 .u.insns = {
9376 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
9377 BPF_STMT(BPF_RET | BPF_A, 0x0),
9378 },
9379 CLASSIC,
9380 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9381 { {0x3f, 0 }, },
9382 },
9383 {
Nicolas Schichan08fcb082015-08-04 15:19:11 +02009384 "LD_ABS word",
9385 .u.insns = {
9386 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
9387 BPF_STMT(BPF_RET | BPF_A, 0x0),
9388 },
9389 CLASSIC,
9390 {
9391 [0x1c] = 0xaa, [0x1d] = 0x55,
9392 [0x1e] = 0xbb, [0x1f] = 0x66,
9393 [0x20] = 0xcc, [0x21] = 0x77,
9394 [0x22] = 0xdd, [0x23] = 0x88,
9395 [0x24] = 0xee, [0x25] = 0x99,
9396 [0x26] = 0xff, [0x27] = 0xaa,
9397 },
9398 { {0x40, 0xaa55bb66 } },
9399 },
9400 {
9401 "LD_ABS word unaligned (addr & 3 == 2)",
9402 .u.insns = {
9403 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
9404 BPF_STMT(BPF_RET | BPF_A, 0x0),
9405 },
9406 CLASSIC,
9407 {
9408 [0x1c] = 0xaa, [0x1d] = 0x55,
9409 [0x1e] = 0xbb, [0x1f] = 0x66,
9410 [0x20] = 0xcc, [0x21] = 0x77,
9411 [0x22] = 0xdd, [0x23] = 0x88,
9412 [0x24] = 0xee, [0x25] = 0x99,
9413 [0x26] = 0xff, [0x27] = 0xaa,
9414 },
9415 { {0x40, 0xdd88ee99 } },
9416 },
9417 {
9418 "LD_ABS word unaligned (addr & 3 == 1)",
9419 .u.insns = {
9420 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
9421 BPF_STMT(BPF_RET | BPF_A, 0x0),
9422 },
9423 CLASSIC,
9424 {
9425 [0x1c] = 0xaa, [0x1d] = 0x55,
9426 [0x1e] = 0xbb, [0x1f] = 0x66,
9427 [0x20] = 0xcc, [0x21] = 0x77,
9428 [0x22] = 0xdd, [0x23] = 0x88,
9429 [0x24] = 0xee, [0x25] = 0x99,
9430 [0x26] = 0xff, [0x27] = 0xaa,
9431 },
9432 { {0x40, 0x77dd88ee } },
9433 },
9434 {
9435 "LD_ABS word unaligned (addr & 3 == 3)",
9436 .u.insns = {
9437 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
9438 BPF_STMT(BPF_RET | BPF_A, 0x0),
9439 },
9440 CLASSIC,
9441 {
9442 [0x1c] = 0xaa, [0x1d] = 0x55,
9443 [0x1e] = 0xbb, [0x1f] = 0x66,
9444 [0x20] = 0xcc, [0x21] = 0x77,
9445 [0x22] = 0xdd, [0x23] = 0x88,
9446 [0x24] = 0xee, [0x25] = 0x99,
9447 [0x26] = 0xff, [0x27] = 0xaa,
9448 },
9449 { {0x40, 0x88ee99ff } },
9450 },
Daniel Borkmann93731ef2018-05-04 01:08:13 +02009451 {
9452 "LD_ABS word positive offset, all ff",
9453 .u.insns = {
9454 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
9455 BPF_STMT(BPF_RET | BPF_A, 0x0),
9456 },
9457 CLASSIC,
9458 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
9459 { {0x40, 0xffffffff } },
9460 },
9461 {
9462 "LD_ABS word positive offset, out of bounds",
9463 .u.insns = {
9464 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
9465 BPF_STMT(BPF_RET | BPF_A, 0x0),
9466 },
9467 CLASSIC,
9468 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9469 { {0x3f, 0 }, },
9470 },
9471 {
9472 "LD_ABS word negative offset, out of bounds load",
9473 .u.insns = {
9474 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
9475 BPF_STMT(BPF_RET | BPF_A, 0x0),
9476 },
9477 CLASSIC | FLAG_EXPECTED_FAIL,
9478 .expected_errcode = -EINVAL,
9479 },
9480 {
9481 "LD_ABS word negative offset, in bounds",
9482 .u.insns = {
9483 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
9484 BPF_STMT(BPF_RET | BPF_A, 0x0),
9485 },
9486 CLASSIC,
9487 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9488 { {0x40, 0x25051982 }, },
9489 },
9490 {
9491 "LD_ABS word negative offset, out of bounds",
9492 .u.insns = {
9493 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
9494 BPF_STMT(BPF_RET | BPF_A, 0x0),
9495 },
9496 CLASSIC,
9497 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9498 { {0x3f, 0 }, },
9499 },
9500 {
9501 "LDX_MSH standalone, preserved A",
9502 .u.insns = {
9503 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9504 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
9505 BPF_STMT(BPF_RET | BPF_A, 0x0),
9506 },
9507 CLASSIC,
9508 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9509 { {0x40, 0xffeebbaa }, },
9510 },
9511 {
9512 "LDX_MSH standalone, preserved A 2",
9513 .u.insns = {
9514 BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
9515 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
9516 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
9517 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
9518 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
9519 BPF_STMT(BPF_RET | BPF_A, 0x0),
9520 },
9521 CLASSIC,
9522 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9523 { {0x40, 0x175e9d63 }, },
9524 },
9525 {
9526 "LDX_MSH standalone, test result 1",
9527 .u.insns = {
9528 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9529 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
9530 BPF_STMT(BPF_MISC | BPF_TXA, 0),
9531 BPF_STMT(BPF_RET | BPF_A, 0x0),
9532 },
9533 CLASSIC,
9534 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9535 { {0x40, 0x14 }, },
9536 },
9537 {
9538 "LDX_MSH standalone, test result 2",
9539 .u.insns = {
9540 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9541 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
9542 BPF_STMT(BPF_MISC | BPF_TXA, 0),
9543 BPF_STMT(BPF_RET | BPF_A, 0x0),
9544 },
9545 CLASSIC,
9546 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9547 { {0x40, 0x24 }, },
9548 },
9549 {
9550 "LDX_MSH standalone, negative offset",
9551 .u.insns = {
9552 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9553 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
9554 BPF_STMT(BPF_MISC | BPF_TXA, 0),
9555 BPF_STMT(BPF_RET | BPF_A, 0x0),
9556 },
9557 CLASSIC,
9558 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9559 { {0x40, 0 }, },
9560 },
9561 {
9562 "LDX_MSH standalone, negative offset 2",
9563 .u.insns = {
9564 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9565 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
9566 BPF_STMT(BPF_MISC | BPF_TXA, 0),
9567 BPF_STMT(BPF_RET | BPF_A, 0x0),
9568 },
9569 CLASSIC,
9570 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9571 { {0x40, 0x24 }, },
9572 },
9573 {
9574 "LDX_MSH standalone, out of bounds",
9575 .u.insns = {
9576 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
9577 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
9578 BPF_STMT(BPF_MISC | BPF_TXA, 0),
9579 BPF_STMT(BPF_RET | BPF_A, 0x0),
9580 },
9581 CLASSIC,
9582 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
9583 { {0x40, 0 }, },
9584 },
Nicolas Schichan86bf1722015-08-04 15:19:12 +02009585 /*
9586 * verify that the interpreter or JIT correctly sets A and X
9587 * to 0.
9588 */
9589 {
9590 "ADD default X",
9591 .u.insns = {
9592 /*
9593 * A = 0x42
9594 * A = A + X
9595 * ret A
9596 */
9597 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
9598 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
9599 BPF_STMT(BPF_RET | BPF_A, 0x0),
9600 },
9601 CLASSIC | FLAG_NO_DATA,
9602 {},
9603 { {0x1, 0x42 } },
9604 },
9605 {
9606 "ADD default A",
9607 .u.insns = {
9608 /*
9609 * A = A + 0x42
9610 * ret A
9611 */
9612 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
9613 BPF_STMT(BPF_RET | BPF_A, 0x0),
9614 },
9615 CLASSIC | FLAG_NO_DATA,
9616 {},
9617 { {0x1, 0x42 } },
9618 },
9619 {
9620 "SUB default X",
9621 .u.insns = {
9622 /*
9623 * A = 0x66
9624 * A = A - X
9625 * ret A
9626 */
9627 BPF_STMT(BPF_LD | BPF_IMM, 0x66),
9628 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
9629 BPF_STMT(BPF_RET | BPF_A, 0x0),
9630 },
9631 CLASSIC | FLAG_NO_DATA,
9632 {},
9633 { {0x1, 0x66 } },
9634 },
9635 {
9636 "SUB default A",
9637 .u.insns = {
9638 /*
9639 * A = A - -0x66
9640 * ret A
9641 */
9642 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
9643 BPF_STMT(BPF_RET | BPF_A, 0x0),
9644 },
9645 CLASSIC | FLAG_NO_DATA,
9646 {},
9647 { {0x1, 0x66 } },
9648 },
9649 {
9650 "MUL default X",
9651 .u.insns = {
9652 /*
9653 * A = 0x42
9654 * A = A * X
9655 * ret A
9656 */
9657 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
9658 BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
9659 BPF_STMT(BPF_RET | BPF_A, 0x0),
9660 },
9661 CLASSIC | FLAG_NO_DATA,
9662 {},
9663 { {0x1, 0x0 } },
9664 },
9665 {
9666 "MUL default A",
9667 .u.insns = {
9668 /*
9669 * A = A * 0x66
9670 * ret A
9671 */
9672 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
9673 BPF_STMT(BPF_RET | BPF_A, 0x0),
9674 },
9675 CLASSIC | FLAG_NO_DATA,
9676 {},
9677 { {0x1, 0x0 } },
9678 },
9679 {
9680 "DIV default X",
9681 .u.insns = {
9682 /*
9683 * A = 0x42
9684 * A = A / X ; this halt the filter execution if X is 0
9685 * ret 0x42
9686 */
9687 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
9688 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
9689 BPF_STMT(BPF_RET | BPF_K, 0x42),
9690 },
9691 CLASSIC | FLAG_NO_DATA,
9692 {},
9693 { {0x1, 0x0 } },
9694 },
9695 {
9696 "DIV default A",
9697 .u.insns = {
9698 /*
9699 * A = A / 1
9700 * ret A
9701 */
9702 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
9703 BPF_STMT(BPF_RET | BPF_A, 0x0),
9704 },
9705 CLASSIC | FLAG_NO_DATA,
9706 {},
9707 { {0x1, 0x0 } },
9708 },
9709 {
Yang Shid4e4bc12015-11-04 11:36:37 -08009710 "MOD default X",
9711 .u.insns = {
9712 /*
9713 * A = 0x42
9714 * A = A mod X ; this halt the filter execution if X is 0
9715 * ret 0x42
9716 */
9717 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
9718 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
9719 BPF_STMT(BPF_RET | BPF_K, 0x42),
9720 },
9721 CLASSIC | FLAG_NO_DATA,
9722 {},
9723 { {0x1, 0x0 } },
9724 },
9725 {
9726 "MOD default A",
9727 .u.insns = {
9728 /*
9729 * A = A mod 1
9730 * ret A
9731 */
9732 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
9733 BPF_STMT(BPF_RET | BPF_A, 0x0),
9734 },
9735 CLASSIC | FLAG_NO_DATA,
9736 {},
9737 { {0x1, 0x0 } },
9738 },
9739 {
Nicolas Schichan86bf1722015-08-04 15:19:12 +02009740 "JMP EQ default A",
9741 .u.insns = {
9742 /*
9743 * cmp A, 0x0, 0, 1
9744 * ret 0x42
9745 * ret 0x66
9746 */
9747 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
9748 BPF_STMT(BPF_RET | BPF_K, 0x42),
9749 BPF_STMT(BPF_RET | BPF_K, 0x66),
9750 },
9751 CLASSIC | FLAG_NO_DATA,
9752 {},
9753 { {0x1, 0x42 } },
9754 },
9755 {
9756 "JMP EQ default X",
9757 .u.insns = {
9758 /*
9759 * A = 0x0
9760 * cmp A, X, 0, 1
9761 * ret 0x42
9762 * ret 0x66
9763 */
9764 BPF_STMT(BPF_LD | BPF_IMM, 0x0),
9765 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
9766 BPF_STMT(BPF_RET | BPF_K, 0x42),
9767 BPF_STMT(BPF_RET | BPF_K, 0x66),
9768 },
9769 CLASSIC | FLAG_NO_DATA,
9770 {},
9771 { {0x1, 0x42 } },
9772 },
Daniel Borkmannfcd1c912018-01-20 01:24:31 +01009773 /* Checking interpreter vs JIT wrt signed extended imms. */
9774 {
9775 "JNE signed compare, test 1",
9776 .u.insns_int = {
9777 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
9778 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
9779 BPF_MOV64_REG(R2, R1),
9780 BPF_ALU64_REG(BPF_AND, R2, R3),
9781 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9782 BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
9783 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9784 BPF_EXIT_INSN(),
9785 },
9786 INTERNAL,
9787 { },
9788 { { 0, 1 } },
9789 },
9790 {
9791 "JNE signed compare, test 2",
9792 .u.insns_int = {
9793 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
9794 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
9795 BPF_MOV64_REG(R2, R1),
9796 BPF_ALU64_REG(BPF_AND, R2, R3),
9797 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9798 BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
9799 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9800 BPF_EXIT_INSN(),
9801 },
9802 INTERNAL,
9803 { },
9804 { { 0, 1 } },
9805 },
9806 {
9807 "JNE signed compare, test 3",
9808 .u.insns_int = {
9809 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
9810 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
9811 BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
9812 BPF_MOV64_REG(R2, R1),
9813 BPF_ALU64_REG(BPF_AND, R2, R3),
9814 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9815 BPF_JMP_REG(BPF_JNE, R2, R4, 1),
9816 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9817 BPF_EXIT_INSN(),
9818 },
9819 INTERNAL,
9820 { },
9821 { { 0, 2 } },
9822 },
9823 {
9824 "JNE signed compare, test 4",
9825 .u.insns_int = {
9826 BPF_LD_IMM64(R1, -17104896),
9827 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9828 BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
9829 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9830 BPF_EXIT_INSN(),
9831 },
9832 INTERNAL,
9833 { },
9834 { { 0, 2 } },
9835 },
9836 {
9837 "JNE signed compare, test 5",
9838 .u.insns_int = {
9839 BPF_LD_IMM64(R1, 0xfefb0000),
9840 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9841 BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
9842 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9843 BPF_EXIT_INSN(),
9844 },
9845 INTERNAL,
9846 { },
9847 { { 0, 1 } },
9848 },
9849 {
9850 "JNE signed compare, test 6",
9851 .u.insns_int = {
9852 BPF_LD_IMM64(R1, 0x7efb0000),
9853 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9854 BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
9855 BPF_ALU32_IMM(BPF_MOV, R0, 2),
9856 BPF_EXIT_INSN(),
9857 },
9858 INTERNAL,
9859 { },
9860 { { 0, 2 } },
9861 },
9862 {
9863 "JNE signed compare, test 7",
9864 .u.insns = {
9865 BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
9866 BPF_STMT(BPF_MISC | BPF_TAX, 0),
9867 BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
9868 BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
9869 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
9870 BPF_STMT(BPF_RET | BPF_K, 1),
9871 BPF_STMT(BPF_RET | BPF_K, 2),
9872 },
9873 CLASSIC | FLAG_NO_DATA,
9874 {},
9875 { { 0, 2 } },
9876 },
Johan Almbladh68c956f2021-09-14 11:18:31 +02009877 /* Exhaustive test of ALU64 shift operations */
9878 {
9879 "ALU64_LSH_K: all shift values",
9880 { },
9881 INTERNAL | FLAG_NO_DATA,
9882 { },
9883 { { 0, 1 } },
9884 .fill_helper = bpf_fill_alu_lsh_imm,
9885 },
9886 {
9887 "ALU64_RSH_K: all shift values",
9888 { },
9889 INTERNAL | FLAG_NO_DATA,
9890 { },
9891 { { 0, 1 } },
9892 .fill_helper = bpf_fill_alu_rsh_imm,
9893 },
9894 {
9895 "ALU64_ARSH_K: all shift values",
9896 { },
9897 INTERNAL | FLAG_NO_DATA,
9898 { },
9899 { { 0, 1 } },
9900 .fill_helper = bpf_fill_alu_arsh_imm,
9901 },
9902 {
9903 "ALU64_LSH_X: all shift values",
9904 { },
9905 INTERNAL | FLAG_NO_DATA,
9906 { },
9907 { { 0, 1 } },
9908 .fill_helper = bpf_fill_alu_lsh_reg,
9909 },
9910 {
9911 "ALU64_RSH_X: all shift values",
9912 { },
9913 INTERNAL | FLAG_NO_DATA,
9914 { },
9915 { { 0, 1 } },
9916 .fill_helper = bpf_fill_alu_rsh_reg,
9917 },
9918 {
9919 "ALU64_ARSH_X: all shift values",
9920 { },
9921 INTERNAL | FLAG_NO_DATA,
9922 { },
9923 { { 0, 1 } },
9924 .fill_helper = bpf_fill_alu_arsh_reg,
9925 },
9926 /* Exhaustive test of ALU32 shift operations */
9927 {
9928 "ALU32_LSH_K: all shift values",
9929 { },
9930 INTERNAL | FLAG_NO_DATA,
9931 { },
9932 { { 0, 1 } },
9933 .fill_helper = bpf_fill_alu32_lsh_imm,
9934 },
9935 {
9936 "ALU32_RSH_K: all shift values",
9937 { },
9938 INTERNAL | FLAG_NO_DATA,
9939 { },
9940 { { 0, 1 } },
9941 .fill_helper = bpf_fill_alu32_rsh_imm,
9942 },
9943 {
9944 "ALU32_ARSH_K: all shift values",
9945 { },
9946 INTERNAL | FLAG_NO_DATA,
9947 { },
9948 { { 0, 1 } },
9949 .fill_helper = bpf_fill_alu32_arsh_imm,
9950 },
9951 {
9952 "ALU32_LSH_X: all shift values",
9953 { },
9954 INTERNAL | FLAG_NO_DATA,
9955 { },
9956 { { 0, 1 } },
9957 .fill_helper = bpf_fill_alu32_lsh_reg,
9958 },
9959 {
9960 "ALU32_RSH_X: all shift values",
9961 { },
9962 INTERNAL | FLAG_NO_DATA,
9963 { },
9964 { { 0, 1 } },
9965 .fill_helper = bpf_fill_alu32_rsh_reg,
9966 },
9967 {
9968 "ALU32_ARSH_X: all shift values",
9969 { },
9970 INTERNAL | FLAG_NO_DATA,
9971 { },
9972 { { 0, 1 } },
9973 .fill_helper = bpf_fill_alu32_arsh_reg,
9974 },
Johan Almbladh9298e632021-09-14 11:18:32 +02009975 /* ALU64 immediate magnitudes */
9976 {
9977 "ALU64_MOV_K: all immediate value magnitudes",
9978 { },
9979 INTERNAL | FLAG_NO_DATA,
9980 { },
9981 { { 0, 1 } },
9982 .fill_helper = bpf_fill_alu64_mov_imm,
9983 .nr_testruns = NR_PATTERN_RUNS,
9984 },
9985 {
9986 "ALU64_AND_K: all immediate value magnitudes",
9987 { },
9988 INTERNAL | FLAG_NO_DATA,
9989 { },
9990 { { 0, 1 } },
9991 .fill_helper = bpf_fill_alu64_and_imm,
9992 .nr_testruns = NR_PATTERN_RUNS,
9993 },
9994 {
9995 "ALU64_OR_K: all immediate value magnitudes",
9996 { },
9997 INTERNAL | FLAG_NO_DATA,
9998 { },
9999 { { 0, 1 } },
10000 .fill_helper = bpf_fill_alu64_or_imm,
10001 .nr_testruns = NR_PATTERN_RUNS,
10002 },
10003 {
10004 "ALU64_XOR_K: all immediate value magnitudes",
10005 { },
10006 INTERNAL | FLAG_NO_DATA,
10007 { },
10008 { { 0, 1 } },
10009 .fill_helper = bpf_fill_alu64_xor_imm,
10010 .nr_testruns = NR_PATTERN_RUNS,
10011 },
10012 {
10013 "ALU64_ADD_K: all immediate value magnitudes",
10014 { },
10015 INTERNAL | FLAG_NO_DATA,
10016 { },
10017 { { 0, 1 } },
10018 .fill_helper = bpf_fill_alu64_add_imm,
10019 .nr_testruns = NR_PATTERN_RUNS,
10020 },
10021 {
10022 "ALU64_SUB_K: all immediate value magnitudes",
10023 { },
10024 INTERNAL | FLAG_NO_DATA,
10025 { },
10026 { { 0, 1 } },
10027 .fill_helper = bpf_fill_alu64_sub_imm,
10028 .nr_testruns = NR_PATTERN_RUNS,
10029 },
10030 {
10031 "ALU64_MUL_K: all immediate value magnitudes",
10032 { },
10033 INTERNAL | FLAG_NO_DATA,
10034 { },
10035 { { 0, 1 } },
10036 .fill_helper = bpf_fill_alu64_mul_imm,
10037 .nr_testruns = NR_PATTERN_RUNS,
10038 },
10039 {
10040 "ALU64_DIV_K: all immediate value magnitudes",
10041 { },
10042 INTERNAL | FLAG_NO_DATA,
10043 { },
10044 { { 0, 1 } },
10045 .fill_helper = bpf_fill_alu64_div_imm,
10046 .nr_testruns = NR_PATTERN_RUNS,
10047 },
10048 {
10049 "ALU64_MOD_K: all immediate value magnitudes",
10050 { },
10051 INTERNAL | FLAG_NO_DATA,
10052 { },
10053 { { 0, 1 } },
10054 .fill_helper = bpf_fill_alu64_mod_imm,
10055 .nr_testruns = NR_PATTERN_RUNS,
10056 },
10057 /* ALU32 immediate magnitudes */
10058 {
10059 "ALU32_MOV_K: all immediate value magnitudes",
10060 { },
10061 INTERNAL | FLAG_NO_DATA,
10062 { },
10063 { { 0, 1 } },
10064 .fill_helper = bpf_fill_alu32_mov_imm,
10065 .nr_testruns = NR_PATTERN_RUNS,
10066 },
10067 {
10068 "ALU32_AND_K: all immediate value magnitudes",
10069 { },
10070 INTERNAL | FLAG_NO_DATA,
10071 { },
10072 { { 0, 1 } },
10073 .fill_helper = bpf_fill_alu32_and_imm,
10074 .nr_testruns = NR_PATTERN_RUNS,
10075 },
10076 {
10077 "ALU32_OR_K: all immediate value magnitudes",
10078 { },
10079 INTERNAL | FLAG_NO_DATA,
10080 { },
10081 { { 0, 1 } },
10082 .fill_helper = bpf_fill_alu32_or_imm,
10083 .nr_testruns = NR_PATTERN_RUNS,
10084 },
10085 {
10086 "ALU32_XOR_K: all immediate value magnitudes",
10087 { },
10088 INTERNAL | FLAG_NO_DATA,
10089 { },
10090 { { 0, 1 } },
10091 .fill_helper = bpf_fill_alu32_xor_imm,
10092 .nr_testruns = NR_PATTERN_RUNS,
10093 },
10094 {
10095 "ALU32_ADD_K: all immediate value magnitudes",
10096 { },
10097 INTERNAL | FLAG_NO_DATA,
10098 { },
10099 { { 0, 1 } },
10100 .fill_helper = bpf_fill_alu32_add_imm,
10101 .nr_testruns = NR_PATTERN_RUNS,
10102 },
10103 {
10104 "ALU32_SUB_K: all immediate value magnitudes",
10105 { },
10106 INTERNAL | FLAG_NO_DATA,
10107 { },
10108 { { 0, 1 } },
10109 .fill_helper = bpf_fill_alu32_sub_imm,
10110 .nr_testruns = NR_PATTERN_RUNS,
10111 },
10112 {
10113 "ALU32_MUL_K: all immediate value magnitudes",
10114 { },
10115 INTERNAL | FLAG_NO_DATA,
10116 { },
10117 { { 0, 1 } },
10118 .fill_helper = bpf_fill_alu32_mul_imm,
10119 .nr_testruns = NR_PATTERN_RUNS,
10120 },
10121 {
10122 "ALU32_DIV_K: all immediate value magnitudes",
10123 { },
10124 INTERNAL | FLAG_NO_DATA,
10125 { },
10126 { { 0, 1 } },
10127 .fill_helper = bpf_fill_alu32_div_imm,
10128 .nr_testruns = NR_PATTERN_RUNS,
10129 },
10130 {
10131 "ALU32_MOD_K: all immediate value magnitudes",
10132 { },
10133 INTERNAL | FLAG_NO_DATA,
10134 { },
10135 { { 0, 1 } },
10136 .fill_helper = bpf_fill_alu32_mod_imm,
Johan Almbladha5a36542021-09-14 11:18:33 +020010137 .nr_testruns = NR_PATTERN_RUNS,
Johan Almbladh9298e632021-09-14 11:18:32 +020010138 },
10139 /* ALU64 register magnitudes */
10140 {
10141 "ALU64_MOV_X: all register value magnitudes",
10142 { },
10143 INTERNAL | FLAG_NO_DATA,
10144 { },
10145 { { 0, 1 } },
10146 .fill_helper = bpf_fill_alu64_mov_reg,
10147 .nr_testruns = NR_PATTERN_RUNS,
10148 },
10149 {
10150 "ALU64_AND_X: all register value magnitudes",
10151 { },
10152 INTERNAL | FLAG_NO_DATA,
10153 { },
10154 { { 0, 1 } },
10155 .fill_helper = bpf_fill_alu64_and_reg,
10156 .nr_testruns = NR_PATTERN_RUNS,
10157 },
10158 {
10159 "ALU64_OR_X: all register value magnitudes",
10160 { },
10161 INTERNAL | FLAG_NO_DATA,
10162 { },
10163 { { 0, 1 } },
10164 .fill_helper = bpf_fill_alu64_or_reg,
10165 .nr_testruns = NR_PATTERN_RUNS,
10166 },
10167 {
10168 "ALU64_XOR_X: all register value magnitudes",
10169 { },
10170 INTERNAL | FLAG_NO_DATA,
10171 { },
10172 { { 0, 1 } },
10173 .fill_helper = bpf_fill_alu64_xor_reg,
10174 .nr_testruns = NR_PATTERN_RUNS,
10175 },
10176 {
10177 "ALU64_ADD_X: all register value magnitudes",
10178 { },
10179 INTERNAL | FLAG_NO_DATA,
10180 { },
10181 { { 0, 1 } },
10182 .fill_helper = bpf_fill_alu64_add_reg,
10183 .nr_testruns = NR_PATTERN_RUNS,
10184 },
10185 {
10186 "ALU64_SUB_X: all register value magnitudes",
10187 { },
10188 INTERNAL | FLAG_NO_DATA,
10189 { },
10190 { { 0, 1 } },
10191 .fill_helper = bpf_fill_alu64_sub_reg,
10192 .nr_testruns = NR_PATTERN_RUNS,
10193 },
10194 {
10195 "ALU64_MUL_X: all register value magnitudes",
10196 { },
10197 INTERNAL | FLAG_NO_DATA,
10198 { },
10199 { { 0, 1 } },
10200 .fill_helper = bpf_fill_alu64_mul_reg,
10201 .nr_testruns = NR_PATTERN_RUNS,
10202 },
10203 {
10204 "ALU64_DIV_X: all register value magnitudes",
10205 { },
10206 INTERNAL | FLAG_NO_DATA,
10207 { },
10208 { { 0, 1 } },
10209 .fill_helper = bpf_fill_alu64_div_reg,
10210 .nr_testruns = NR_PATTERN_RUNS,
10211 },
10212 {
10213 "ALU64_MOD_X: all register value magnitudes",
10214 { },
10215 INTERNAL | FLAG_NO_DATA,
10216 { },
10217 { { 0, 1 } },
10218 .fill_helper = bpf_fill_alu64_mod_reg,
10219 .nr_testruns = NR_PATTERN_RUNS,
10220 },
10221 /* ALU32 register magnitudes */
10222 {
10223 "ALU32_MOV_X: all register value magnitudes",
10224 { },
10225 INTERNAL | FLAG_NO_DATA,
10226 { },
10227 { { 0, 1 } },
10228 .fill_helper = bpf_fill_alu32_mov_reg,
10229 .nr_testruns = NR_PATTERN_RUNS,
10230 },
10231 {
10232 "ALU32_AND_X: all register value magnitudes",
10233 { },
10234 INTERNAL | FLAG_NO_DATA,
10235 { },
10236 { { 0, 1 } },
10237 .fill_helper = bpf_fill_alu32_and_reg,
10238 .nr_testruns = NR_PATTERN_RUNS,
10239 },
10240 {
10241 "ALU32_OR_X: all register value magnitudes",
10242 { },
10243 INTERNAL | FLAG_NO_DATA,
10244 { },
10245 { { 0, 1 } },
10246 .fill_helper = bpf_fill_alu32_or_reg,
10247 .nr_testruns = NR_PATTERN_RUNS,
10248 },
10249 {
10250 "ALU32_XOR_X: all register value magnitudes",
10251 { },
10252 INTERNAL | FLAG_NO_DATA,
10253 { },
10254 { { 0, 1 } },
10255 .fill_helper = bpf_fill_alu32_xor_reg,
10256 .nr_testruns = NR_PATTERN_RUNS,
10257 },
10258 {
10259 "ALU32_ADD_X: all register value magnitudes",
10260 { },
10261 INTERNAL | FLAG_NO_DATA,
10262 { },
10263 { { 0, 1 } },
10264 .fill_helper = bpf_fill_alu32_add_reg,
10265 .nr_testruns = NR_PATTERN_RUNS,
10266 },
10267 {
10268 "ALU32_SUB_X: all register value magnitudes",
10269 { },
10270 INTERNAL | FLAG_NO_DATA,
10271 { },
10272 { { 0, 1 } },
10273 .fill_helper = bpf_fill_alu32_sub_reg,
10274 .nr_testruns = NR_PATTERN_RUNS,
10275 },
10276 {
10277 "ALU32_MUL_X: all register value magnitudes",
10278 { },
10279 INTERNAL | FLAG_NO_DATA,
10280 { },
10281 { { 0, 1 } },
10282 .fill_helper = bpf_fill_alu32_mul_reg,
10283 .nr_testruns = NR_PATTERN_RUNS,
10284 },
10285 {
10286 "ALU32_DIV_X: all register value magnitudes",
10287 { },
10288 INTERNAL | FLAG_NO_DATA,
10289 { },
10290 { { 0, 1 } },
10291 .fill_helper = bpf_fill_alu32_div_reg,
10292 .nr_testruns = NR_PATTERN_RUNS,
10293 },
10294 {
10295 "ALU32_MOD_X: all register value magnitudes",
10296 { },
10297 INTERNAL | FLAG_NO_DATA,
10298 { },
10299 { { 0, 1 } },
10300 .fill_helper = bpf_fill_alu32_mod_reg,
10301 .nr_testruns = NR_PATTERN_RUNS,
10302 },
Johan Almbladh2e807612021-09-14 11:18:35 +020010303 /* LD_IMM64 immediate magnitudes */
10304 {
10305 "LD_IMM64: all immediate value magnitudes",
10306 { },
10307 INTERNAL | FLAG_NO_DATA,
10308 { },
10309 { { 0, 1 } },
10310 .fill_helper = bpf_fill_ld_imm64,
10311 },
Johan Almbladha5a36542021-09-14 11:18:33 +020010312 /* JMP immediate magnitudes */
10313 {
10314 "JMP_JSET_K: all immediate value magnitudes",
10315 { },
10316 INTERNAL | FLAG_NO_DATA,
10317 { },
10318 { { 0, 1 } },
10319 .fill_helper = bpf_fill_jmp_jset_imm,
10320 .nr_testruns = NR_PATTERN_RUNS,
10321 },
10322 {
10323 "JMP_JEQ_K: all immediate value magnitudes",
10324 { },
10325 INTERNAL | FLAG_NO_DATA,
10326 { },
10327 { { 0, 1 } },
10328 .fill_helper = bpf_fill_jmp_jeq_imm,
10329 .nr_testruns = NR_PATTERN_RUNS,
10330 },
10331 {
10332 "JMP_JNE_K: all immediate value magnitudes",
10333 { },
10334 INTERNAL | FLAG_NO_DATA,
10335 { },
10336 { { 0, 1 } },
10337 .fill_helper = bpf_fill_jmp_jne_imm,
10338 .nr_testruns = NR_PATTERN_RUNS,
10339 },
10340 {
10341 "JMP_JGT_K: all immediate value magnitudes",
10342 { },
10343 INTERNAL | FLAG_NO_DATA,
10344 { },
10345 { { 0, 1 } },
10346 .fill_helper = bpf_fill_jmp_jgt_imm,
10347 .nr_testruns = NR_PATTERN_RUNS,
10348 },
10349 {
10350 "JMP_JGE_K: all immediate value magnitudes",
10351 { },
10352 INTERNAL | FLAG_NO_DATA,
10353 { },
10354 { { 0, 1 } },
10355 .fill_helper = bpf_fill_jmp_jge_imm,
10356 .nr_testruns = NR_PATTERN_RUNS,
10357 },
10358 {
10359 "JMP_JLT_K: all immediate value magnitudes",
10360 { },
10361 INTERNAL | FLAG_NO_DATA,
10362 { },
10363 { { 0, 1 } },
10364 .fill_helper = bpf_fill_jmp_jlt_imm,
10365 .nr_testruns = NR_PATTERN_RUNS,
10366 },
10367 {
10368 "JMP_JLE_K: all immediate value magnitudes",
10369 { },
10370 INTERNAL | FLAG_NO_DATA,
10371 { },
10372 { { 0, 1 } },
10373 .fill_helper = bpf_fill_jmp_jle_imm,
10374 .nr_testruns = NR_PATTERN_RUNS,
10375 },
10376 {
10377 "JMP_JSGT_K: all immediate value magnitudes",
10378 { },
10379 INTERNAL | FLAG_NO_DATA,
10380 { },
10381 { { 0, 1 } },
10382 .fill_helper = bpf_fill_jmp_jsgt_imm,
10383 .nr_testruns = NR_PATTERN_RUNS,
10384 },
10385 {
10386 "JMP_JSGE_K: all immediate value magnitudes",
10387 { },
10388 INTERNAL | FLAG_NO_DATA,
10389 { },
10390 { { 0, 1 } },
10391 .fill_helper = bpf_fill_jmp_jsge_imm,
10392 .nr_testruns = NR_PATTERN_RUNS,
10393 },
10394 {
10395 "JMP_JSLT_K: all immediate value magnitudes",
10396 { },
10397 INTERNAL | FLAG_NO_DATA,
10398 { },
10399 { { 0, 1 } },
10400 .fill_helper = bpf_fill_jmp_jslt_imm,
10401 .nr_testruns = NR_PATTERN_RUNS,
10402 },
10403 {
10404 "JMP_JSLE_K: all immediate value magnitudes",
10405 { },
10406 INTERNAL | FLAG_NO_DATA,
10407 { },
10408 { { 0, 1 } },
10409 .fill_helper = bpf_fill_jmp_jsle_imm,
10410 .nr_testruns = NR_PATTERN_RUNS,
10411 },
10412 /* JMP register magnitudes */
10413 {
10414 "JMP_JSET_X: all register value magnitudes",
10415 { },
10416 INTERNAL | FLAG_NO_DATA,
10417 { },
10418 { { 0, 1 } },
10419 .fill_helper = bpf_fill_jmp_jset_reg,
10420 .nr_testruns = NR_PATTERN_RUNS,
10421 },
10422 {
10423 "JMP_JEQ_X: all register value magnitudes",
10424 { },
10425 INTERNAL | FLAG_NO_DATA,
10426 { },
10427 { { 0, 1 } },
10428 .fill_helper = bpf_fill_jmp_jeq_reg,
10429 .nr_testruns = NR_PATTERN_RUNS,
10430 },
10431 {
10432 "JMP_JNE_X: all register value magnitudes",
10433 { },
10434 INTERNAL | FLAG_NO_DATA,
10435 { },
10436 { { 0, 1 } },
10437 .fill_helper = bpf_fill_jmp_jne_reg,
10438 .nr_testruns = NR_PATTERN_RUNS,
10439 },
10440 {
10441 "JMP_JGT_X: all register value magnitudes",
10442 { },
10443 INTERNAL | FLAG_NO_DATA,
10444 { },
10445 { { 0, 1 } },
10446 .fill_helper = bpf_fill_jmp_jgt_reg,
10447 .nr_testruns = NR_PATTERN_RUNS,
10448 },
10449 {
10450 "JMP_JGE_X: all register value magnitudes",
10451 { },
10452 INTERNAL | FLAG_NO_DATA,
10453 { },
10454 { { 0, 1 } },
10455 .fill_helper = bpf_fill_jmp_jge_reg,
10456 .nr_testruns = NR_PATTERN_RUNS,
10457 },
10458 {
10459 "JMP_JLT_X: all register value magnitudes",
10460 { },
10461 INTERNAL | FLAG_NO_DATA,
10462 { },
10463 { { 0, 1 } },
10464 .fill_helper = bpf_fill_jmp_jlt_reg,
10465 .nr_testruns = NR_PATTERN_RUNS,
10466 },
10467 {
10468 "JMP_JLE_X: all register value magnitudes",
10469 { },
10470 INTERNAL | FLAG_NO_DATA,
10471 { },
10472 { { 0, 1 } },
10473 .fill_helper = bpf_fill_jmp_jle_reg,
10474 .nr_testruns = NR_PATTERN_RUNS,
10475 },
10476 {
10477 "JMP_JSGT_X: all register value magnitudes",
10478 { },
10479 INTERNAL | FLAG_NO_DATA,
10480 { },
10481 { { 0, 1 } },
10482 .fill_helper = bpf_fill_jmp_jsgt_reg,
10483 .nr_testruns = NR_PATTERN_RUNS,
10484 },
10485 {
10486 "JMP_JSGE_X: all register value magnitudes",
10487 { },
10488 INTERNAL | FLAG_NO_DATA,
10489 { },
10490 { { 0, 1 } },
10491 .fill_helper = bpf_fill_jmp_jsge_reg,
10492 .nr_testruns = NR_PATTERN_RUNS,
10493 },
10494 {
10495 "JMP_JSLT_X: all register value magnitudes",
10496 { },
10497 INTERNAL | FLAG_NO_DATA,
10498 { },
10499 { { 0, 1 } },
10500 .fill_helper = bpf_fill_jmp_jslt_reg,
10501 .nr_testruns = NR_PATTERN_RUNS,
10502 },
10503 {
10504 "JMP_JSLE_X: all register value magnitudes",
10505 { },
10506 INTERNAL | FLAG_NO_DATA,
10507 { },
10508 { { 0, 1 } },
10509 .fill_helper = bpf_fill_jmp_jsle_reg,
10510 .nr_testruns = NR_PATTERN_RUNS,
10511 },
10512 /* JMP32 immediate magnitudes */
10513 {
10514 "JMP32_JSET_K: all immediate value magnitudes",
10515 { },
10516 INTERNAL | FLAG_NO_DATA,
10517 { },
10518 { { 0, 1 } },
10519 .fill_helper = bpf_fill_jmp32_jset_imm,
10520 .nr_testruns = NR_PATTERN_RUNS,
10521 },
10522 {
10523 "JMP32_JEQ_K: all immediate value magnitudes",
10524 { },
10525 INTERNAL | FLAG_NO_DATA,
10526 { },
10527 { { 0, 1 } },
10528 .fill_helper = bpf_fill_jmp32_jeq_imm,
10529 .nr_testruns = NR_PATTERN_RUNS,
10530 },
10531 {
10532 "JMP32_JNE_K: all immediate value magnitudes",
10533 { },
10534 INTERNAL | FLAG_NO_DATA,
10535 { },
10536 { { 0, 1 } },
10537 .fill_helper = bpf_fill_jmp32_jne_imm,
10538 .nr_testruns = NR_PATTERN_RUNS,
10539 },
10540 {
10541 "JMP32_JGT_K: all immediate value magnitudes",
10542 { },
10543 INTERNAL | FLAG_NO_DATA,
10544 { },
10545 { { 0, 1 } },
10546 .fill_helper = bpf_fill_jmp32_jgt_imm,
10547 .nr_testruns = NR_PATTERN_RUNS,
10548 },
10549 {
10550 "JMP32_JGE_K: all immediate value magnitudes",
10551 { },
10552 INTERNAL | FLAG_NO_DATA,
10553 { },
10554 { { 0, 1 } },
10555 .fill_helper = bpf_fill_jmp32_jge_imm,
10556 .nr_testruns = NR_PATTERN_RUNS,
10557 },
10558 {
10559 "JMP32_JLT_K: all immediate value magnitudes",
10560 { },
10561 INTERNAL | FLAG_NO_DATA,
10562 { },
10563 { { 0, 1 } },
10564 .fill_helper = bpf_fill_jmp32_jlt_imm,
10565 .nr_testruns = NR_PATTERN_RUNS,
10566 },
10567 {
10568 "JMP32_JLE_K: all immediate value magnitudes",
10569 { },
10570 INTERNAL | FLAG_NO_DATA,
10571 { },
10572 { { 0, 1 } },
10573 .fill_helper = bpf_fill_jmp32_jle_imm,
10574 .nr_testruns = NR_PATTERN_RUNS,
10575 },
10576 {
10577 "JMP32_JSGT_K: all immediate value magnitudes",
10578 { },
10579 INTERNAL | FLAG_NO_DATA,
10580 { },
10581 { { 0, 1 } },
10582 .fill_helper = bpf_fill_jmp32_jsgt_imm,
10583 .nr_testruns = NR_PATTERN_RUNS,
10584 },
10585 {
10586 "JMP32_JSGE_K: all immediate value magnitudes",
10587 { },
10588 INTERNAL | FLAG_NO_DATA,
10589 { },
10590 { { 0, 1 } },
10591 .fill_helper = bpf_fill_jmp32_jsge_imm,
10592 .nr_testruns = NR_PATTERN_RUNS,
10593 },
10594 {
10595 "JMP32_JSLT_K: all immediate value magnitudes",
10596 { },
10597 INTERNAL | FLAG_NO_DATA,
10598 { },
10599 { { 0, 1 } },
10600 .fill_helper = bpf_fill_jmp32_jslt_imm,
10601 .nr_testruns = NR_PATTERN_RUNS,
10602 },
10603 {
10604 "JMP32_JSLE_K: all immediate value magnitudes",
10605 { },
10606 INTERNAL | FLAG_NO_DATA,
10607 { },
10608 { { 0, 1 } },
10609 .fill_helper = bpf_fill_jmp32_jsle_imm,
10610 .nr_testruns = NR_PATTERN_RUNS,
10611 },
10612 /* JMP32 register magnitudes */
10613 {
10614 "JMP32_JSET_X: all register value magnitudes",
10615 { },
10616 INTERNAL | FLAG_NO_DATA,
10617 { },
10618 { { 0, 1 } },
10619 .fill_helper = bpf_fill_jmp32_jset_reg,
10620 .nr_testruns = NR_PATTERN_RUNS,
10621 },
10622 {
10623 "JMP32_JEQ_X: all register value magnitudes",
10624 { },
10625 INTERNAL | FLAG_NO_DATA,
10626 { },
10627 { { 0, 1 } },
10628 .fill_helper = bpf_fill_jmp32_jeq_reg,
10629 .nr_testruns = NR_PATTERN_RUNS,
10630 },
10631 {
10632 "JMP32_JNE_X: all register value magnitudes",
10633 { },
10634 INTERNAL | FLAG_NO_DATA,
10635 { },
10636 { { 0, 1 } },
10637 .fill_helper = bpf_fill_jmp32_jne_reg,
10638 .nr_testruns = NR_PATTERN_RUNS,
10639 },
10640 {
10641 "JMP32_JGT_X: all register value magnitudes",
10642 { },
10643 INTERNAL | FLAG_NO_DATA,
10644 { },
10645 { { 0, 1 } },
10646 .fill_helper = bpf_fill_jmp32_jgt_reg,
10647 .nr_testruns = NR_PATTERN_RUNS,
10648 },
10649 {
10650 "JMP32_JGE_X: all register value magnitudes",
10651 { },
10652 INTERNAL | FLAG_NO_DATA,
10653 { },
10654 { { 0, 1 } },
10655 .fill_helper = bpf_fill_jmp32_jge_reg,
10656 .nr_testruns = NR_PATTERN_RUNS,
10657 },
10658 {
10659 "JMP32_JLT_X: all register value magnitudes",
10660 { },
10661 INTERNAL | FLAG_NO_DATA,
10662 { },
10663 { { 0, 1 } },
10664 .fill_helper = bpf_fill_jmp32_jlt_reg,
10665 .nr_testruns = NR_PATTERN_RUNS,
10666 },
10667 {
10668 "JMP32_JLE_X: all register value magnitudes",
10669 { },
10670 INTERNAL | FLAG_NO_DATA,
10671 { },
10672 { { 0, 1 } },
10673 .fill_helper = bpf_fill_jmp32_jle_reg,
10674 .nr_testruns = NR_PATTERN_RUNS,
10675 },
10676 {
10677 "JMP32_JSGT_X: all register value magnitudes",
10678 { },
10679 INTERNAL | FLAG_NO_DATA,
10680 { },
10681 { { 0, 1 } },
10682 .fill_helper = bpf_fill_jmp32_jsgt_reg,
10683 .nr_testruns = NR_PATTERN_RUNS,
10684 },
10685 {
10686 "JMP32_JSGE_X: all register value magnitudes",
10687 { },
10688 INTERNAL | FLAG_NO_DATA,
10689 { },
10690 { { 0, 1 } },
10691 .fill_helper = bpf_fill_jmp32_jsge_reg,
10692 .nr_testruns = NR_PATTERN_RUNS,
10693 },
10694 {
10695 "JMP32_JSLT_X: all register value magnitudes",
10696 { },
10697 INTERNAL | FLAG_NO_DATA,
10698 { },
10699 { { 0, 1 } },
10700 .fill_helper = bpf_fill_jmp32_jslt_reg,
10701 .nr_testruns = NR_PATTERN_RUNS,
10702 },
10703 {
10704 "JMP32_JSLE_X: all register value magnitudes",
10705 { },
10706 INTERNAL | FLAG_NO_DATA,
10707 { },
10708 { { 0, 1 } },
10709 .fill_helper = bpf_fill_jmp32_jsle_reg,
10710 .nr_testruns = NR_PATTERN_RUNS,
10711 },
Johan Almbladha7d2e752021-09-14 11:18:34 +020010712 /* Staggered jump sequences, immediate */
10713 {
10714 "Staggered jumps: JMP_JA",
10715 { },
10716 INTERNAL | FLAG_NO_DATA,
10717 { },
10718 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10719 .fill_helper = bpf_fill_staggered_ja,
10720 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10721 },
10722 {
10723 "Staggered jumps: JMP_JEQ_K",
10724 { },
10725 INTERNAL | FLAG_NO_DATA,
10726 { },
10727 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10728 .fill_helper = bpf_fill_staggered_jeq_imm,
10729 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10730 },
10731 {
10732 "Staggered jumps: JMP_JNE_K",
10733 { },
10734 INTERNAL | FLAG_NO_DATA,
10735 { },
10736 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10737 .fill_helper = bpf_fill_staggered_jne_imm,
10738 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10739 },
10740 {
10741 "Staggered jumps: JMP_JSET_K",
10742 { },
10743 INTERNAL | FLAG_NO_DATA,
10744 { },
10745 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10746 .fill_helper = bpf_fill_staggered_jset_imm,
10747 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10748 },
10749 {
10750 "Staggered jumps: JMP_JGT_K",
10751 { },
10752 INTERNAL | FLAG_NO_DATA,
10753 { },
10754 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10755 .fill_helper = bpf_fill_staggered_jgt_imm,
10756 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10757 },
10758 {
10759 "Staggered jumps: JMP_JGE_K",
10760 { },
10761 INTERNAL | FLAG_NO_DATA,
10762 { },
10763 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10764 .fill_helper = bpf_fill_staggered_jge_imm,
10765 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10766 },
10767 {
10768 "Staggered jumps: JMP_JLT_K",
10769 { },
10770 INTERNAL | FLAG_NO_DATA,
10771 { },
10772 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10773 .fill_helper = bpf_fill_staggered_jlt_imm,
10774 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10775 },
10776 {
10777 "Staggered jumps: JMP_JLE_K",
10778 { },
10779 INTERNAL | FLAG_NO_DATA,
10780 { },
10781 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10782 .fill_helper = bpf_fill_staggered_jle_imm,
10783 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10784 },
10785 {
10786 "Staggered jumps: JMP_JSGT_K",
10787 { },
10788 INTERNAL | FLAG_NO_DATA,
10789 { },
10790 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10791 .fill_helper = bpf_fill_staggered_jsgt_imm,
10792 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10793 },
10794 {
10795 "Staggered jumps: JMP_JSGE_K",
10796 { },
10797 INTERNAL | FLAG_NO_DATA,
10798 { },
10799 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10800 .fill_helper = bpf_fill_staggered_jsge_imm,
10801 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10802 },
10803 {
10804 "Staggered jumps: JMP_JSLT_K",
10805 { },
10806 INTERNAL | FLAG_NO_DATA,
10807 { },
10808 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10809 .fill_helper = bpf_fill_staggered_jslt_imm,
10810 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10811 },
10812 {
10813 "Staggered jumps: JMP_JSLE_K",
10814 { },
10815 INTERNAL | FLAG_NO_DATA,
10816 { },
10817 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10818 .fill_helper = bpf_fill_staggered_jsle_imm,
10819 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10820 },
10821 /* Staggered jump sequences, register */
10822 {
10823 "Staggered jumps: JMP_JEQ_X",
10824 { },
10825 INTERNAL | FLAG_NO_DATA,
10826 { },
10827 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10828 .fill_helper = bpf_fill_staggered_jeq_reg,
10829 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10830 },
10831 {
10832 "Staggered jumps: JMP_JNE_X",
10833 { },
10834 INTERNAL | FLAG_NO_DATA,
10835 { },
10836 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10837 .fill_helper = bpf_fill_staggered_jne_reg,
10838 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10839 },
10840 {
10841 "Staggered jumps: JMP_JSET_X",
10842 { },
10843 INTERNAL | FLAG_NO_DATA,
10844 { },
10845 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10846 .fill_helper = bpf_fill_staggered_jset_reg,
10847 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10848 },
10849 {
10850 "Staggered jumps: JMP_JGT_X",
10851 { },
10852 INTERNAL | FLAG_NO_DATA,
10853 { },
10854 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10855 .fill_helper = bpf_fill_staggered_jgt_reg,
10856 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10857 },
10858 {
10859 "Staggered jumps: JMP_JGE_X",
10860 { },
10861 INTERNAL | FLAG_NO_DATA,
10862 { },
10863 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10864 .fill_helper = bpf_fill_staggered_jge_reg,
10865 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10866 },
10867 {
10868 "Staggered jumps: JMP_JLT_X",
10869 { },
10870 INTERNAL | FLAG_NO_DATA,
10871 { },
10872 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10873 .fill_helper = bpf_fill_staggered_jlt_reg,
10874 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10875 },
10876 {
10877 "Staggered jumps: JMP_JLE_X",
10878 { },
10879 INTERNAL | FLAG_NO_DATA,
10880 { },
10881 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10882 .fill_helper = bpf_fill_staggered_jle_reg,
10883 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10884 },
10885 {
10886 "Staggered jumps: JMP_JSGT_X",
10887 { },
10888 INTERNAL | FLAG_NO_DATA,
10889 { },
10890 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10891 .fill_helper = bpf_fill_staggered_jsgt_reg,
10892 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10893 },
10894 {
10895 "Staggered jumps: JMP_JSGE_X",
10896 { },
10897 INTERNAL | FLAG_NO_DATA,
10898 { },
10899 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10900 .fill_helper = bpf_fill_staggered_jsge_reg,
10901 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10902 },
10903 {
10904 "Staggered jumps: JMP_JSLT_X",
10905 { },
10906 INTERNAL | FLAG_NO_DATA,
10907 { },
10908 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10909 .fill_helper = bpf_fill_staggered_jslt_reg,
10910 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10911 },
10912 {
10913 "Staggered jumps: JMP_JSLE_X",
10914 { },
10915 INTERNAL | FLAG_NO_DATA,
10916 { },
10917 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10918 .fill_helper = bpf_fill_staggered_jsle_reg,
10919 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10920 },
10921 /* Staggered jump sequences, JMP32 immediate */
10922 {
10923 "Staggered jumps: JMP32_JEQ_K",
10924 { },
10925 INTERNAL | FLAG_NO_DATA,
10926 { },
10927 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10928 .fill_helper = bpf_fill_staggered_jeq32_imm,
10929 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10930 },
10931 {
10932 "Staggered jumps: JMP32_JNE_K",
10933 { },
10934 INTERNAL | FLAG_NO_DATA,
10935 { },
10936 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10937 .fill_helper = bpf_fill_staggered_jne32_imm,
10938 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10939 },
10940 {
10941 "Staggered jumps: JMP32_JSET_K",
10942 { },
10943 INTERNAL | FLAG_NO_DATA,
10944 { },
10945 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10946 .fill_helper = bpf_fill_staggered_jset32_imm,
10947 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10948 },
10949 {
10950 "Staggered jumps: JMP32_JGT_K",
10951 { },
10952 INTERNAL | FLAG_NO_DATA,
10953 { },
10954 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10955 .fill_helper = bpf_fill_staggered_jgt32_imm,
10956 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10957 },
10958 {
10959 "Staggered jumps: JMP32_JGE_K",
10960 { },
10961 INTERNAL | FLAG_NO_DATA,
10962 { },
10963 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10964 .fill_helper = bpf_fill_staggered_jge32_imm,
10965 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10966 },
10967 {
10968 "Staggered jumps: JMP32_JLT_K",
10969 { },
10970 INTERNAL | FLAG_NO_DATA,
10971 { },
10972 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10973 .fill_helper = bpf_fill_staggered_jlt32_imm,
10974 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10975 },
10976 {
10977 "Staggered jumps: JMP32_JLE_K",
10978 { },
10979 INTERNAL | FLAG_NO_DATA,
10980 { },
10981 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10982 .fill_helper = bpf_fill_staggered_jle32_imm,
10983 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10984 },
10985 {
10986 "Staggered jumps: JMP32_JSGT_K",
10987 { },
10988 INTERNAL | FLAG_NO_DATA,
10989 { },
10990 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
10991 .fill_helper = bpf_fill_staggered_jsgt32_imm,
10992 .nr_testruns = NR_STAGGERED_JMP_RUNS,
10993 },
10994 {
10995 "Staggered jumps: JMP32_JSGE_K",
10996 { },
10997 INTERNAL | FLAG_NO_DATA,
10998 { },
10999 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11000 .fill_helper = bpf_fill_staggered_jsge32_imm,
11001 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11002 },
11003 {
11004 "Staggered jumps: JMP32_JSLT_K",
11005 { },
11006 INTERNAL | FLAG_NO_DATA,
11007 { },
11008 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11009 .fill_helper = bpf_fill_staggered_jslt32_imm,
11010 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11011 },
11012 {
11013 "Staggered jumps: JMP32_JSLE_K",
11014 { },
11015 INTERNAL | FLAG_NO_DATA,
11016 { },
11017 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11018 .fill_helper = bpf_fill_staggered_jsle32_imm,
11019 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11020 },
11021 /* Staggered jump sequences, JMP32 register */
11022 {
11023 "Staggered jumps: JMP32_JEQ_X",
11024 { },
11025 INTERNAL | FLAG_NO_DATA,
11026 { },
11027 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11028 .fill_helper = bpf_fill_staggered_jeq32_reg,
11029 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11030 },
11031 {
11032 "Staggered jumps: JMP32_JNE_X",
11033 { },
11034 INTERNAL | FLAG_NO_DATA,
11035 { },
11036 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11037 .fill_helper = bpf_fill_staggered_jne32_reg,
11038 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11039 },
11040 {
11041 "Staggered jumps: JMP32_JSET_X",
11042 { },
11043 INTERNAL | FLAG_NO_DATA,
11044 { },
11045 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11046 .fill_helper = bpf_fill_staggered_jset32_reg,
11047 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11048 },
11049 {
11050 "Staggered jumps: JMP32_JGT_X",
11051 { },
11052 INTERNAL | FLAG_NO_DATA,
11053 { },
11054 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11055 .fill_helper = bpf_fill_staggered_jgt32_reg,
11056 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11057 },
11058 {
11059 "Staggered jumps: JMP32_JGE_X",
11060 { },
11061 INTERNAL | FLAG_NO_DATA,
11062 { },
11063 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11064 .fill_helper = bpf_fill_staggered_jge32_reg,
11065 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11066 },
11067 {
11068 "Staggered jumps: JMP32_JLT_X",
11069 { },
11070 INTERNAL | FLAG_NO_DATA,
11071 { },
11072 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11073 .fill_helper = bpf_fill_staggered_jlt32_reg,
11074 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11075 },
11076 {
11077 "Staggered jumps: JMP32_JLE_X",
11078 { },
11079 INTERNAL | FLAG_NO_DATA,
11080 { },
11081 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11082 .fill_helper = bpf_fill_staggered_jle32_reg,
11083 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11084 },
11085 {
11086 "Staggered jumps: JMP32_JSGT_X",
11087 { },
11088 INTERNAL | FLAG_NO_DATA,
11089 { },
11090 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11091 .fill_helper = bpf_fill_staggered_jsgt32_reg,
11092 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11093 },
11094 {
11095 "Staggered jumps: JMP32_JSGE_X",
11096 { },
11097 INTERNAL | FLAG_NO_DATA,
11098 { },
11099 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11100 .fill_helper = bpf_fill_staggered_jsge32_reg,
11101 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11102 },
11103 {
11104 "Staggered jumps: JMP32_JSLT_X",
11105 { },
11106 INTERNAL | FLAG_NO_DATA,
11107 { },
11108 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11109 .fill_helper = bpf_fill_staggered_jslt32_reg,
11110 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11111 },
11112 {
11113 "Staggered jumps: JMP32_JSLE_X",
11114 { },
11115 INTERNAL | FLAG_NO_DATA,
11116 { },
11117 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
11118 .fill_helper = bpf_fill_staggered_jsle32_reg,
11119 .nr_testruns = NR_STAGGERED_JMP_RUNS,
11120 },
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011121};
11122
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011123static struct net_device dev;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011124
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011125static struct sk_buff *populate_skb(char *buf, int size)
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011126{
11127 struct sk_buff *skb;
11128
11129 if (size >= MAX_DATA)
11130 return NULL;
11131
11132 skb = alloc_skb(MAX_DATA, GFP_KERNEL);
11133 if (!skb)
11134 return NULL;
11135
yuan linyude77b962017-06-18 22:48:17 +080011136 __skb_put_data(skb, buf, size);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011137
11138 /* Initialize a fake skb with test pattern. */
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011139 skb_reset_mac_header(skb);
11140 skb->protocol = htons(ETH_P_IP);
11141 skb->pkt_type = SKB_TYPE;
11142 skb->mark = SKB_MARK;
11143 skb->hash = SKB_HASH;
11144 skb->queue_mapping = SKB_QUEUE_MAP;
11145 skb->vlan_tci = SKB_VLAN_TCI;
Michał Mirosław0c4b2d32018-11-10 19:58:36 +010011146 skb->vlan_present = SKB_VLAN_PRESENT;
Jakub Kicinski5c0ca3f2016-09-12 13:04:57 +010011147 skb->vlan_proto = htons(ETH_P_IP);
Song Liu100811932018-09-27 09:34:41 -070011148 dev_net_set(&dev, &init_net);
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011149 skb->dev = &dev;
11150 skb->dev->ifindex = SKB_DEV_IFINDEX;
11151 skb->dev->type = SKB_DEV_TYPE;
11152 skb_set_network_header(skb, min(size, ETH_HLEN));
11153
11154 return skb;
11155}
11156
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011157static void *generate_test_data(struct bpf_test *test, int sub)
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011158{
Nicolas Schichanbac142a2015-08-04 15:19:08 +020011159 struct sk_buff *skb;
11160 struct page *page;
11161
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011162 if (test->aux & FLAG_NO_DATA)
11163 return NULL;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011164
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011165 /* Test case expects an skb, so populate one. Various
11166 * subtests generate skbs of different sizes based on
11167 * the same data.
11168 */
Nicolas Schichanbac142a2015-08-04 15:19:08 +020011169 skb = populate_skb(test->data, test->test[sub].data_size);
11170 if (!skb)
11171 return NULL;
11172
11173 if (test->aux & FLAG_SKB_FRAG) {
11174 /*
11175 * when the test requires a fragmented skb, add a
11176 * single fragment to the skb, filled with
11177 * test->frag_data.
11178 */
11179 void *ptr;
11180
11181 page = alloc_page(GFP_KERNEL);
11182
11183 if (!page)
11184 goto err_kfree_skb;
11185
11186 ptr = kmap(page);
11187 if (!ptr)
11188 goto err_free_page;
11189 memcpy(ptr, test->frag_data, MAX_DATA);
11190 kunmap(page);
11191 skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
11192 }
11193
11194 return skb;
11195
11196err_free_page:
11197 __free_page(page);
11198err_kfree_skb:
11199 kfree_skb(skb);
11200 return NULL;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011201}
11202
11203static void release_test_data(const struct bpf_test *test, void *data)
11204{
11205 if (test->aux & FLAG_NO_DATA)
11206 return;
11207
11208 kfree_skb(data);
11209}
11210
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011211static int filter_length(int which)
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011212{
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011213 struct sock_filter *fp;
11214 int len;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011215
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011216 if (tests[which].fill_helper)
11217 return tests[which].u.ptr.len;
11218
11219 fp = tests[which].u.insns;
Chema Gonzaleze9d94502014-05-30 10:15:12 -070011220 for (len = MAX_INSNS - 1; len > 0; --len)
11221 if (fp[len].code != 0 || fp[len].k != 0)
11222 break;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011223
Chema Gonzaleze9d94502014-05-30 10:15:12 -070011224 return len + 1;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011225}
11226
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011227static void *filter_pointer(int which)
11228{
11229 if (tests[which].fill_helper)
11230 return tests[which].u.ptr.insns;
11231 else
11232 return tests[which].u.insns;
11233}
11234
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011235static struct bpf_prog *generate_filter(int which, int *err)
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011236{
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011237 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011238 unsigned int flen = filter_length(which);
11239 void *fptr = filter_pointer(which);
11240 struct sock_fprog_kern fprog;
11241 struct bpf_prog *fp;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011242
11243 switch (test_type) {
11244 case CLASSIC:
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011245 fprog.filter = fptr;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011246 fprog.len = flen;
11247
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011248 *err = bpf_prog_create(&fp, &fprog);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011249 if (tests[which].aux & FLAG_EXPECTED_FAIL) {
Yonghong Song09584b42018-02-02 22:37:15 -080011250 if (*err == tests[which].expected_errcode) {
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011251 pr_cont("PASS\n");
11252 /* Verifier rejected filter as expected. */
11253 *err = 0;
11254 return NULL;
11255 } else {
11256 pr_cont("UNEXPECTED_PASS\n");
11257 /* Verifier didn't reject the test that's
11258 * bad enough, just return!
11259 */
11260 *err = -EINVAL;
11261 return NULL;
11262 }
11263 }
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011264 if (*err) {
Alexei Starovoitov290af862018-01-09 10:04:29 -080011265 pr_cont("FAIL to prog_create err=%d len=%d\n",
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011266 *err, fprog.len);
11267 return NULL;
11268 }
11269 break;
11270
11271 case INTERNAL:
Daniel Borkmann60a3b222014-09-02 22:53:44 +020011272 fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011273 if (fp == NULL) {
11274 pr_cont("UNEXPECTED_FAIL no memory left\n");
11275 *err = -ENOMEM;
11276 return NULL;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011277 }
11278
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011279 fp->len = flen;
Daniel Borkmann4962fa12015-07-30 12:42:46 +020011280 /* Type doesn't really matter here as long as it's not unspec. */
11281 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011282 memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
Alexei Starovoitov105c0362017-05-30 13:31:32 -070011283 fp->aux->stack_depth = tests[which].stack_depth;
Johan Almbladh27cc6da2021-09-14 11:18:36 +020011284 fp->aux->verifier_zext = !!(tests[which].aux &
11285 FLAG_VERIFIER_ZEXT);
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011286
Daniel Borkmannd1c55ab2016-05-13 19:08:31 +020011287 /* We cannot error here as we don't need type compatibility
11288 * checks.
11289 */
11290 fp = bpf_prog_select_runtime(fp, err);
Alexei Starovoitov290af862018-01-09 10:04:29 -080011291 if (*err) {
11292 pr_cont("FAIL to select_runtime err=%d\n", *err);
11293 return NULL;
11294 }
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011295 break;
11296 }
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011297
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011298 *err = 0;
11299 return fp;
11300}
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011301
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011302static void release_filter(struct bpf_prog *fp, int which)
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011303{
11304 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011305
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011306 switch (test_type) {
11307 case CLASSIC:
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011308 bpf_prog_destroy(fp);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011309 break;
11310 case INTERNAL:
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011311 bpf_prog_free(fp);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011312 break;
11313 }
11314}
11315
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011316static int __run_one(const struct bpf_prog *fp, const void *data,
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011317 int runs, u64 *duration)
11318{
11319 u64 start, finish;
Alexei Starovoitov25ee7322014-09-19 13:53:51 -070011320 int ret = 0, i;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011321
David Miller6eac7792020-02-24 15:01:44 +010011322 migrate_disable();
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -070011323 start = ktime_get_ns();
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011324
11325 for (i = 0; i < runs; i++)
Andrii Nakryikofb7dd8b2021-08-15 00:05:54 -070011326 ret = bpf_prog_run(fp, data);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011327
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -070011328 finish = ktime_get_ns();
David Miller6eac7792020-02-24 15:01:44 +010011329 migrate_enable();
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011330
Alexei Starovoitov4d9c5c52015-07-20 20:34:19 -070011331 *duration = finish - start;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011332 do_div(*duration, runs);
11333
11334 return ret;
11335}
11336
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011337static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011338{
11339 int err_cnt = 0, i, runs = MAX_TESTRUNS;
11340
Johan Almbladhc2a228d2021-09-14 11:18:29 +020011341 if (test->nr_testruns)
11342 runs = min(test->nr_testruns, MAX_TESTRUNS);
11343
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011344 for (i = 0; i < MAX_SUBTESTS; i++) {
11345 void *data;
11346 u64 duration;
11347 u32 ret;
11348
Johan Almbladh2b7e9f22021-07-21 12:38:22 +020011349 /*
11350 * NOTE: Several sub-tests may be present, in which case
11351 * a zero {data_size, result} tuple indicates the end of
11352 * the sub-test array. The first test is always run,
11353 * even if both data_size and result happen to be zero.
11354 */
11355 if (i > 0 &&
11356 test->test[i].data_size == 0 &&
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011357 test->test[i].result == 0)
11358 break;
11359
11360 data = generate_test_data(test, i);
Nicolas Schichane34684f2015-08-04 15:19:07 +020011361 if (!data && !(test->aux & FLAG_NO_DATA)) {
11362 pr_cont("data generation failed ");
11363 err_cnt++;
11364 break;
11365 }
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011366 ret = __run_one(fp, data, runs, &duration);
11367 release_test_data(test, data);
11368
11369 if (ret == test->test[i].result) {
11370 pr_cont("%lld ", duration);
11371 } else {
11372 pr_cont("ret %d != %d ", ret,
11373 test->test[i].result);
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011374 err_cnt++;
11375 }
11376 }
11377
11378 return err_cnt;
11379}
11380
Nicolas Schichand2648d42015-08-04 15:19:10 +020011381static char test_name[64];
11382module_param_string(test_name, test_name, sizeof(test_name), 0);
11383
11384static int test_id = -1;
11385module_param(test_id, int, 0);
11386
11387static int test_range[2] = { 0, ARRAY_SIZE(tests) - 1 };
11388module_param_array(test_range, int, NULL, 0);
11389
11390static __init int find_test_index(const char *test_name)
11391{
11392 int i;
11393
11394 for (i = 0; i < ARRAY_SIZE(tests); i++) {
11395 if (!strcmp(tests[i].descr, test_name))
11396 return i;
11397 }
11398 return -1;
11399}
11400
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011401static __init int prepare_bpf_tests(void)
11402{
Nicolas Schichand2648d42015-08-04 15:19:10 +020011403 if (test_id >= 0) {
11404 /*
11405 * if a test_id was specified, use test_range to
11406 * cover only that test.
11407 */
11408 if (test_id >= ARRAY_SIZE(tests)) {
11409 pr_err("test_bpf: invalid test_id specified.\n");
11410 return -EINVAL;
11411 }
11412
11413 test_range[0] = test_id;
11414 test_range[1] = test_id;
11415 } else if (*test_name) {
11416 /*
11417 * if a test_name was specified, find it and setup
11418 * test_range to cover only that test.
11419 */
11420 int idx = find_test_index(test_name);
11421
11422 if (idx < 0) {
11423 pr_err("test_bpf: no test named '%s' found.\n",
11424 test_name);
11425 return -EINVAL;
11426 }
11427 test_range[0] = idx;
11428 test_range[1] = idx;
11429 } else {
11430 /*
11431 * check that the supplied test_range is valid.
11432 */
11433 if (test_range[0] >= ARRAY_SIZE(tests) ||
11434 test_range[1] >= ARRAY_SIZE(tests) ||
11435 test_range[0] < 0 || test_range[1] < 0) {
11436 pr_err("test_bpf: test_range is out of bound.\n");
11437 return -EINVAL;
11438 }
11439
11440 if (test_range[1] < test_range[0]) {
11441 pr_err("test_bpf: test_range is ending before it starts.\n");
11442 return -EINVAL;
11443 }
11444 }
11445
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011446 return 0;
11447}
11448
11449static __init void destroy_bpf_tests(void)
11450{
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011451}
11452
Nicolas Schichand2648d42015-08-04 15:19:10 +020011453static bool exclude_test(int test_id)
11454{
11455 return test_id < test_range[0] || test_id > test_range[1];
11456}
11457
Yonghong Song76db8082018-03-21 16:31:04 -070011458static __init struct sk_buff *build_test_skb(void)
11459{
11460 u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
11461 struct sk_buff *skb[2];
11462 struct page *page[2];
11463 int i, data_size = 8;
11464
11465 for (i = 0; i < 2; i++) {
11466 page[i] = alloc_page(GFP_KERNEL);
11467 if (!page[i]) {
11468 if (i == 0)
11469 goto err_page0;
11470 else
11471 goto err_page1;
11472 }
11473
11474 /* this will set skb[i]->head_frag */
11475 skb[i] = dev_alloc_skb(headroom + data_size);
11476 if (!skb[i]) {
11477 if (i == 0)
11478 goto err_skb0;
11479 else
11480 goto err_skb1;
11481 }
11482
11483 skb_reserve(skb[i], headroom);
11484 skb_put(skb[i], data_size);
11485 skb[i]->protocol = htons(ETH_P_IP);
11486 skb_reset_network_header(skb[i]);
11487 skb_set_mac_header(skb[i], -ETH_HLEN);
11488
11489 skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
11490 // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
11491 }
11492
11493 /* setup shinfo */
11494 skb_shinfo(skb[0])->gso_size = 1448;
11495 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
11496 skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
11497 skb_shinfo(skb[0])->gso_segs = 0;
11498 skb_shinfo(skb[0])->frag_list = skb[1];
Vadim Fedorenko3384c7c2021-09-10 01:04:09 +030011499 skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
Yonghong Song76db8082018-03-21 16:31:04 -070011500
11501 /* adjust skb[0]'s len */
11502 skb[0]->len += skb[1]->len;
11503 skb[0]->data_len += skb[1]->data_len;
11504 skb[0]->truesize += skb[1]->truesize;
11505
11506 return skb[0];
11507
11508err_skb1:
11509 __free_page(page[1]);
11510err_page1:
11511 kfree_skb(skb[0]);
11512err_skb0:
11513 __free_page(page[0]);
11514err_page0:
11515 return NULL;
11516}
11517
Shmulik Ladkanicf204a72019-10-25 16:42:23 +030011518static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
11519{
11520 unsigned int alloc_size = 2000;
11521 unsigned int headroom = 102, doffset = 72, data_size = 1308;
11522 struct sk_buff *skb[2];
11523 int i;
11524
11525 /* skbs linked in a frag_list, both with linear data, with head_frag=0
11526 * (data allocated by kmalloc), both have tcp data of 1308 bytes
11527 * (total payload is 2616 bytes).
11528 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
11529 */
11530 for (i = 0; i < 2; i++) {
11531 skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
11532 if (!skb[i]) {
11533 if (i == 0)
11534 goto err_skb0;
11535 else
11536 goto err_skb1;
11537 }
11538
11539 skb[i]->protocol = htons(ETH_P_IPV6);
11540 skb_reserve(skb[i], headroom);
11541 skb_put(skb[i], doffset + data_size);
11542 skb_reset_network_header(skb[i]);
11543 if (i == 0)
11544 skb_reset_mac_header(skb[i]);
11545 else
11546 skb_set_mac_header(skb[i], -ETH_HLEN);
11547 __skb_pull(skb[i], doffset);
11548 }
11549
11550 /* setup shinfo.
11551 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
11552 * reduced gso_size.
11553 */
11554 skb_shinfo(skb[0])->gso_size = 1288;
11555 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
11556 skb_shinfo(skb[0])->gso_segs = 0;
11557 skb_shinfo(skb[0])->frag_list = skb[1];
11558
11559 /* adjust skb[0]'s len */
11560 skb[0]->len += skb[1]->len;
11561 skb[0]->data_len += skb[1]->len;
11562 skb[0]->truesize += skb[1]->truesize;
11563
11564 return skb[0];
11565
11566err_skb1:
11567 kfree_skb(skb[0]);
11568err_skb0:
11569 return NULL;
11570}
11571
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011572struct skb_segment_test {
11573 const char *descr;
11574 struct sk_buff *(*build_skb)(void);
Yonghong Song76db8082018-03-21 16:31:04 -070011575 netdev_features_t features;
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011576};
11577
11578static struct skb_segment_test skb_segment_tests[] __initconst = {
11579 {
11580 .descr = "gso_with_rx_frags",
11581 .build_skb = build_test_skb,
11582 .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
11583 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
Shmulik Ladkanicf204a72019-10-25 16:42:23 +030011584 },
11585 {
11586 .descr = "gso_linear_no_head_frag",
11587 .build_skb = build_test_skb_linear_no_head_frag,
11588 .features = NETIF_F_SG | NETIF_F_FRAGLIST |
11589 NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
11590 NETIF_F_LLTX_BIT | NETIF_F_GRO |
11591 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
11592 NETIF_F_HW_VLAN_STAG_TX_BIT
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011593 }
11594};
11595
11596static __init int test_skb_segment_single(const struct skb_segment_test *test)
11597{
Yonghong Song76db8082018-03-21 16:31:04 -070011598 struct sk_buff *skb, *segs;
11599 int ret = -1;
11600
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011601 skb = test->build_skb();
Yonghong Song76db8082018-03-21 16:31:04 -070011602 if (!skb) {
11603 pr_info("%s: failed to build_test_skb", __func__);
11604 goto done;
11605 }
11606
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011607 segs = skb_segment(skb, test->features);
Dan Carpenter99fe29d2018-03-28 14:48:36 +030011608 if (!IS_ERR(segs)) {
Yonghong Song76db8082018-03-21 16:31:04 -070011609 kfree_skb_list(segs);
11610 ret = 0;
Yonghong Song76db8082018-03-21 16:31:04 -070011611 }
11612 kfree_skb(skb);
11613done:
11614 return ret;
11615}
11616
Shmulik Ladkaniaf21c712019-10-25 16:42:22 +030011617static __init int test_skb_segment(void)
11618{
11619 int i, err_cnt = 0, pass_cnt = 0;
11620
11621 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
11622 const struct skb_segment_test *test = &skb_segment_tests[i];
11623
11624 pr_info("#%d %s ", i, test->descr);
11625
11626 if (test_skb_segment_single(test)) {
11627 pr_cont("FAIL\n");
11628 err_cnt++;
11629 } else {
11630 pr_cont("PASS\n");
11631 pass_cnt++;
11632 }
11633 }
11634
11635 pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
11636 pass_cnt, err_cnt);
11637 return err_cnt ? -EINVAL : 0;
11638}
11639
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011640static __init int test_bpf(void)
11641{
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011642 int i, err_cnt = 0, pass_cnt = 0;
Daniel Borkmann327941f2015-04-30 16:17:27 +020011643 int jit_cnt = 0, run_cnt = 0;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011644
11645 for (i = 0; i < ARRAY_SIZE(tests); i++) {
Alexei Starovoitov7ae457c2014-07-30 20:34:16 -070011646 struct bpf_prog *fp;
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011647 int err;
11648
Eric Dumazetd40bc962018-02-26 10:52:46 -080011649 cond_resched();
Nicolas Schichand2648d42015-08-04 15:19:10 +020011650 if (exclude_test(i))
11651 continue;
11652
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011653 pr_info("#%d %s ", i, tests[i].descr);
11654
Johan Almbladh4bc35412021-09-14 11:18:30 +020011655 if (tests[i].fill_helper &&
11656 tests[i].fill_helper(&tests[i]) < 0) {
11657 pr_cont("FAIL to prog_fill\n");
11658 continue;
11659 }
11660
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011661 fp = generate_filter(i, &err);
Johan Almbladh4bc35412021-09-14 11:18:30 +020011662
11663 if (tests[i].fill_helper) {
11664 kfree(tests[i].u.ptr.insns);
11665 tests[i].u.ptr.insns = NULL;
11666 }
11667
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011668 if (fp == NULL) {
11669 if (err == 0) {
11670 pass_cnt++;
11671 continue;
11672 }
Alexei Starovoitov290af862018-01-09 10:04:29 -080011673 err_cnt++;
11674 continue;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011675 }
Daniel Borkmann327941f2015-04-30 16:17:27 +020011676
11677 pr_cont("jited:%u ", fp->jited);
11678
11679 run_cnt++;
11680 if (fp->jited)
11681 jit_cnt++;
11682
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011683 err = run_one(fp, &tests[i]);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011684 release_filter(fp, i);
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011685
11686 if (err) {
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011687 pr_cont("FAIL (%d times)\n", err);
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011688 err_cnt++;
11689 } else {
11690 pr_cont("PASS\n");
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011691 pass_cnt++;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011692 }
11693 }
11694
Daniel Borkmann327941f2015-04-30 16:17:27 +020011695 pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
11696 pass_cnt, err_cnt, jit_cnt, run_cnt);
11697
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011698 return err_cnt ? -EINVAL : 0;
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011699}
11700
Johan Almbladh874be052021-08-09 11:18:29 +020011701struct tail_call_test {
11702 const char *descr;
11703 struct bpf_insn insns[MAX_INSNS];
11704 int result;
11705 int stack_depth;
11706};
11707
11708/*
11709 * Magic marker used in test snippets for tail calls below.
11710 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
11711 * with the proper values by the test runner.
11712 */
11713#define TAIL_CALL_MARKER 0x7a11ca11
11714
11715/* Special offset to indicate a NULL call target */
11716#define TAIL_CALL_NULL 0x7fff
11717
11718/* Special offset to indicate an out-of-range index */
11719#define TAIL_CALL_INVALID 0x7ffe
11720
11721#define TAIL_CALL(offset) \
11722 BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
11723 BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
11724 offset, TAIL_CALL_MARKER), \
11725 BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
11726
11727/*
11728 * Tail call tests. Each test case may call any other test in the table,
11729 * including itself, specified as a relative index offset from the calling
11730 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
11731 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
11732 * results in a target index that is out of range.
11733 */
11734static struct tail_call_test tail_call_tests[] = {
11735 {
11736 "Tail call leaf",
11737 .insns = {
11738 BPF_ALU64_REG(BPF_MOV, R0, R1),
11739 BPF_ALU64_IMM(BPF_ADD, R0, 1),
11740 BPF_EXIT_INSN(),
11741 },
11742 .result = 1,
11743 },
11744 {
11745 "Tail call 2",
11746 .insns = {
11747 BPF_ALU64_IMM(BPF_ADD, R1, 2),
11748 TAIL_CALL(-1),
11749 BPF_ALU64_IMM(BPF_MOV, R0, -1),
11750 BPF_EXIT_INSN(),
11751 },
11752 .result = 3,
11753 },
11754 {
11755 "Tail call 3",
11756 .insns = {
11757 BPF_ALU64_IMM(BPF_ADD, R1, 3),
11758 TAIL_CALL(-1),
11759 BPF_ALU64_IMM(BPF_MOV, R0, -1),
11760 BPF_EXIT_INSN(),
11761 },
11762 .result = 6,
11763 },
11764 {
11765 "Tail call 4",
11766 .insns = {
11767 BPF_ALU64_IMM(BPF_ADD, R1, 4),
11768 TAIL_CALL(-1),
11769 BPF_ALU64_IMM(BPF_MOV, R0, -1),
11770 BPF_EXIT_INSN(),
11771 },
11772 .result = 10,
11773 },
11774 {
11775 "Tail call error path, max count reached",
11776 .insns = {
11777 BPF_ALU64_IMM(BPF_ADD, R1, 1),
11778 BPF_ALU64_REG(BPF_MOV, R0, R1),
11779 TAIL_CALL(0),
11780 BPF_EXIT_INSN(),
11781 },
11782 .result = MAX_TAIL_CALL_CNT + 1,
11783 },
11784 {
11785 "Tail call error path, NULL target",
11786 .insns = {
11787 BPF_ALU64_IMM(BPF_MOV, R0, -1),
11788 TAIL_CALL(TAIL_CALL_NULL),
11789 BPF_ALU64_IMM(BPF_MOV, R0, 1),
11790 BPF_EXIT_INSN(),
11791 },
11792 .result = 1,
11793 },
11794 {
11795 "Tail call error path, index out of range",
11796 .insns = {
11797 BPF_ALU64_IMM(BPF_MOV, R0, -1),
11798 TAIL_CALL(TAIL_CALL_INVALID),
11799 BPF_ALU64_IMM(BPF_MOV, R0, 1),
11800 BPF_EXIT_INSN(),
11801 },
11802 .result = 1,
11803 },
11804};
11805
11806static void __init destroy_tail_call_tests(struct bpf_array *progs)
11807{
11808 int i;
11809
11810 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
11811 if (progs->ptrs[i])
11812 bpf_prog_free(progs->ptrs[i]);
11813 kfree(progs);
11814}
11815
11816static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
11817{
11818 int ntests = ARRAY_SIZE(tail_call_tests);
11819 struct bpf_array *progs;
11820 int which, err;
11821
11822 /* Allocate the table of programs to be used for tall calls */
11823 progs = kzalloc(sizeof(*progs) + (ntests + 1) * sizeof(progs->ptrs[0]),
11824 GFP_KERNEL);
11825 if (!progs)
11826 goto out_nomem;
11827
11828 /* Create all eBPF programs and populate the table */
11829 for (which = 0; which < ntests; which++) {
11830 struct tail_call_test *test = &tail_call_tests[which];
11831 struct bpf_prog *fp;
11832 int len, i;
11833
11834 /* Compute the number of program instructions */
11835 for (len = 0; len < MAX_INSNS; len++) {
11836 struct bpf_insn *insn = &test->insns[len];
11837
11838 if (len < MAX_INSNS - 1 &&
11839 insn->code == (BPF_LD | BPF_DW | BPF_IMM))
11840 len++;
11841 if (insn->code == 0)
11842 break;
11843 }
11844
11845 /* Allocate and initialize the program */
11846 fp = bpf_prog_alloc(bpf_prog_size(len), 0);
11847 if (!fp)
11848 goto out_nomem;
11849
11850 fp->len = len;
11851 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
11852 fp->aux->stack_depth = test->stack_depth;
11853 memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
11854
11855 /* Relocate runtime tail call offsets and addresses */
11856 for (i = 0; i < len; i++) {
11857 struct bpf_insn *insn = &fp->insnsi[i];
11858
11859 if (insn->imm != TAIL_CALL_MARKER)
11860 continue;
11861
11862 switch (insn->code) {
11863 case BPF_LD | BPF_DW | BPF_IMM:
11864 insn[0].imm = (u32)(long)progs;
11865 insn[1].imm = ((u64)(long)progs) >> 32;
11866 break;
11867
11868 case BPF_ALU | BPF_MOV | BPF_K:
11869 if (insn->off == TAIL_CALL_NULL)
11870 insn->imm = ntests;
11871 else if (insn->off == TAIL_CALL_INVALID)
11872 insn->imm = ntests + 1;
11873 else
11874 insn->imm = which + insn->off;
11875 insn->off = 0;
11876 }
11877 }
11878
11879 fp = bpf_prog_select_runtime(fp, &err);
11880 if (err)
11881 goto out_err;
11882
11883 progs->ptrs[which] = fp;
11884 }
11885
11886 /* The last entry contains a NULL program pointer */
11887 progs->map.max_entries = ntests + 1;
11888 *pprogs = progs;
11889 return 0;
11890
11891out_nomem:
11892 err = -ENOMEM;
11893
11894out_err:
11895 if (progs)
11896 destroy_tail_call_tests(progs);
11897 return err;
11898}
11899
11900static __init int test_tail_calls(struct bpf_array *progs)
11901{
11902 int i, err_cnt = 0, pass_cnt = 0;
11903 int jit_cnt = 0, run_cnt = 0;
11904
11905 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
11906 struct tail_call_test *test = &tail_call_tests[i];
11907 struct bpf_prog *fp = progs->ptrs[i];
11908 u64 duration;
11909 int ret;
11910
11911 cond_resched();
11912
11913 pr_info("#%d %s ", i, test->descr);
11914 if (!fp) {
11915 err_cnt++;
11916 continue;
11917 }
11918 pr_cont("jited:%u ", fp->jited);
11919
11920 run_cnt++;
11921 if (fp->jited)
11922 jit_cnt++;
11923
11924 ret = __run_one(fp, NULL, MAX_TESTRUNS, &duration);
11925 if (ret == test->result) {
11926 pr_cont("%lld PASS", duration);
11927 pass_cnt++;
11928 } else {
11929 pr_cont("ret %d != %d FAIL", ret, test->result);
11930 err_cnt++;
11931 }
11932 }
11933
11934 pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
11935 __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
11936
11937 return err_cnt ? -EINVAL : 0;
11938}
11939
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011940static int __init test_bpf_init(void)
11941{
Johan Almbladh874be052021-08-09 11:18:29 +020011942 struct bpf_array *progs = NULL;
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011943 int ret;
11944
11945 ret = prepare_bpf_tests();
11946 if (ret < 0)
11947 return ret;
11948
11949 ret = test_bpf();
Daniel Borkmanna4afd37b2015-05-13 13:12:43 +020011950 destroy_bpf_tests();
Yonghong Song76db8082018-03-21 16:31:04 -070011951 if (ret)
11952 return ret;
11953
Johan Almbladh874be052021-08-09 11:18:29 +020011954 ret = prepare_tail_call_tests(&progs);
11955 if (ret)
11956 return ret;
11957 ret = test_tail_calls(progs);
11958 destroy_tail_call_tests(progs);
11959 if (ret)
11960 return ret;
11961
Yonghong Song76db8082018-03-21 16:31:04 -070011962 return test_skb_segment();
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011963}
11964
11965static void __exit test_bpf_exit(void)
11966{
11967}
11968
11969module_init(test_bpf_init);
11970module_exit(test_bpf_exit);
Daniel Borkmann10f18e02014-05-23 18:44:00 +020011971
Alexei Starovoitov64a89462014-05-08 14:10:52 -070011972MODULE_LICENSE("GPL");