blob: b87a63dba9c8fb5129898c078b17d1c9eb3406c1 [file] [log] [blame]
Thomas Gleixnerb886d83c2019-06-01 10:08:55 +02001// SPDX-License-Identifier: GPL-2.0-only
Naveen N. Rao156d0e22016-06-22 21:55:07 +05302/*
3 * bpf_jit_comp64.c: eBPF JIT compiler
4 *
5 * Copyright 2016 Naveen N. Rao <naveen.n.rao@linux.vnet.ibm.com>
6 * IBM Corporation
7 *
8 * Based on the powerpc classic BPF JIT compiler by Matt Evans
Naveen N. Rao156d0e22016-06-22 21:55:07 +05309 */
10#include <linux/moduleloader.h>
11#include <asm/cacheflush.h>
Christophe Leroyec0c4642018-07-05 16:24:57 +000012#include <asm/asm-compat.h>
Naveen N. Rao156d0e22016-06-22 21:55:07 +053013#include <linux/netdevice.h>
14#include <linux/filter.h>
15#include <linux/if_vlan.h>
16#include <asm/kprobes.h>
Naveen N. Raoce076142016-09-24 02:05:01 +053017#include <linux/bpf.h>
Naveen N. Rao156d0e22016-06-22 21:55:07 +053018
19#include "bpf_jit64.h"
20
Naveen N. Rao156d0e22016-06-22 21:55:07 +053021static inline bool bpf_has_stack_frame(struct codegen_context *ctx)
22{
23 /*
24 * We only need a stack frame if:
25 * - we call other functions (kernel helpers), or
26 * - the bpf program uses its stack area
27 * The latter condition is deduced from the usage of BPF_REG_FP
28 */
Christophe Leroyed573b52021-03-22 16:37:47 +000029 return ctx->seen & SEEN_FUNC || bpf_is_seen_register(ctx, b2p[BPF_REG_FP]);
Naveen N. Rao156d0e22016-06-22 21:55:07 +053030}
31
Naveen N. Rao7b847f52016-09-24 02:05:00 +053032/*
33 * When not setting up our own stackframe, the redzone usage is:
34 *
35 * [ prev sp ] <-------------
36 * [ ... ] |
37 * sp (r1) ---> [ stack pointer ] --------------
Daniel Borkmanndbf44daf2018-05-04 01:08:21 +020038 * [ nv gpr save area ] 6*8
Naveen N. Rao7b847f52016-09-24 02:05:00 +053039 * [ tail_call_cnt ] 8
40 * [ local_tmp_var ] 8
41 * [ unused red zone ] 208 bytes protected
42 */
43static int bpf_jit_stack_local(struct codegen_context *ctx)
44{
45 if (bpf_has_stack_frame(ctx))
Sandipan Dasac0761e2017-09-02 00:23:01 +053046 return STACK_FRAME_MIN_SIZE + ctx->stack_size;
Naveen N. Rao7b847f52016-09-24 02:05:00 +053047 else
48 return -(BPF_PPC_STACK_SAVE + 16);
49}
50
Naveen N. Raoce076142016-09-24 02:05:01 +053051static int bpf_jit_stack_tailcallcnt(struct codegen_context *ctx)
52{
53 return bpf_jit_stack_local(ctx) + 8;
54}
55
Naveen N. Rao7b847f52016-09-24 02:05:00 +053056static int bpf_jit_stack_offsetof(struct codegen_context *ctx, int reg)
57{
58 if (reg >= BPF_PPC_NVR_MIN && reg < 32)
Sandipan Dasac0761e2017-09-02 00:23:01 +053059 return (bpf_has_stack_frame(ctx) ?
60 (BPF_PPC_STACKFRAME + ctx->stack_size) : 0)
61 - (8 * (32 - reg));
Naveen N. Rao7b847f52016-09-24 02:05:00 +053062
63 pr_err("BPF JIT is asking about unknown registers");
64 BUG();
65}
66
Christophe Leroy40272032021-03-22 16:37:53 +000067void bpf_jit_realloc_regs(struct codegen_context *ctx)
68{
69}
70
Christophe Leroy4ea76e92021-03-22 16:37:49 +000071void bpf_jit_build_prologue(u32 *image, struct codegen_context *ctx)
Naveen N. Rao156d0e22016-06-22 21:55:07 +053072{
73 int i;
Naveen N. Rao156d0e22016-06-22 21:55:07 +053074
Naveen N. Raoce076142016-09-24 02:05:01 +053075 /*
76 * Initialize tail_call_cnt if we do tail calls.
77 * Otherwise, put in NOPs so that it can be skipped when we are
78 * invoked through a tail call.
79 */
80 if (ctx->seen & SEEN_TAILCALL) {
Balamuruhan S3a181232020-06-24 17:00:36 +053081 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], 0));
Naveen N. Raoce076142016-09-24 02:05:01 +053082 /* this goes in the redzone */
83 PPC_BPF_STL(b2p[TMP_REG_1], 1, -(BPF_PPC_STACK_SAVE + 8));
84 } else {
Balamuruhan S3a181232020-06-24 17:00:36 +053085 EMIT(PPC_RAW_NOP());
86 EMIT(PPC_RAW_NOP());
Naveen N. Raoce076142016-09-24 02:05:01 +053087 }
88
89#define BPF_TAILCALL_PROLOGUE_SIZE 8
90
Naveen N. Rao7b847f52016-09-24 02:05:00 +053091 if (bpf_has_stack_frame(ctx)) {
Naveen N. Rao156d0e22016-06-22 21:55:07 +053092 /*
93 * We need a stack frame, but we don't necessarily need to
94 * save/restore LR unless we call other functions
95 */
96 if (ctx->seen & SEEN_FUNC) {
Christophe Leroye08021f2021-05-20 10:23:07 +000097 EMIT(PPC_RAW_MFLR(_R0));
Naveen N. Rao156d0e22016-06-22 21:55:07 +053098 PPC_BPF_STL(0, 1, PPC_LR_STKOFF);
99 }
100
Sandipan Dasac0761e2017-09-02 00:23:01 +0530101 PPC_BPF_STLU(1, 1, -(BPF_PPC_STACKFRAME + ctx->stack_size));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530102 }
103
104 /*
105 * Back up non-volatile regs -- BPF registers 6-10
106 * If we haven't created our own stack frame, we save these
107 * in the protected zone below the previous stack frame
108 */
109 for (i = BPF_REG_6; i <= BPF_REG_10; i++)
Christophe Leroyed573b52021-03-22 16:37:47 +0000110 if (bpf_is_seen_register(ctx, b2p[i]))
Naveen N. Rao7b847f52016-09-24 02:05:00 +0530111 PPC_BPF_STL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530112
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530113 /* Setup frame pointer to point to the bpf stack area */
Christophe Leroyed573b52021-03-22 16:37:47 +0000114 if (bpf_is_seen_register(ctx, b2p[BPF_REG_FP]))
Balamuruhan S3a181232020-06-24 17:00:36 +0530115 EMIT(PPC_RAW_ADDI(b2p[BPF_REG_FP], 1,
116 STACK_FRAME_MIN_SIZE + ctx->stack_size));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530117}
118
Naveen N. Raoce076142016-09-24 02:05:01 +0530119static void bpf_jit_emit_common_epilogue(u32 *image, struct codegen_context *ctx)
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530120{
121 int i;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530122
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530123 /* Restore NVRs */
124 for (i = BPF_REG_6; i <= BPF_REG_10; i++)
Christophe Leroyed573b52021-03-22 16:37:47 +0000125 if (bpf_is_seen_register(ctx, b2p[i]))
Naveen N. Rao7b847f52016-09-24 02:05:00 +0530126 PPC_BPF_LL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530127
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530128 /* Tear down our stack frame */
Naveen N. Rao7b847f52016-09-24 02:05:00 +0530129 if (bpf_has_stack_frame(ctx)) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530130 EMIT(PPC_RAW_ADDI(1, 1, BPF_PPC_STACKFRAME + ctx->stack_size));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530131 if (ctx->seen & SEEN_FUNC) {
132 PPC_BPF_LL(0, 1, PPC_LR_STKOFF);
Balamuruhan S3a181232020-06-24 17:00:36 +0530133 EMIT(PPC_RAW_MTLR(0));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530134 }
135 }
Naveen N. Raoce076142016-09-24 02:05:01 +0530136}
137
Christophe Leroy4ea76e92021-03-22 16:37:49 +0000138void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx)
Naveen N. Raoce076142016-09-24 02:05:01 +0530139{
140 bpf_jit_emit_common_epilogue(image, ctx);
141
142 /* Move result to r3 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530143 EMIT(PPC_RAW_MR(3, b2p[BPF_REG_0]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530144
Balamuruhan S3a181232020-06-24 17:00:36 +0530145 EMIT(PPC_RAW_BLR());
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530146}
147
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100148static void bpf_jit_emit_func_call_hlp(u32 *image, struct codegen_context *ctx,
149 u64 func)
150{
151#ifdef PPC64_ELF_ABI_v1
152 /* func points to the function descriptor */
153 PPC_LI64(b2p[TMP_REG_2], func);
154 /* Load actual entry point from function descriptor */
155 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_2], 0);
Naveen N. Rao20ccb002021-06-09 14:30:24 +0530156 /* ... and move it to CTR */
157 EMIT(PPC_RAW_MTCTR(b2p[TMP_REG_1]));
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100158 /*
159 * Load TOC from function descriptor at offset 8.
160 * We can clobber r2 since we get called through a
161 * function pointer (so caller will save/restore r2)
162 * and since we don't use a TOC ourself.
163 */
164 PPC_BPF_LL(2, b2p[TMP_REG_2], 8);
165#else
166 /* We can clobber r12 */
167 PPC_FUNC_ADDR(12, func);
Naveen N. Rao20ccb002021-06-09 14:30:24 +0530168 EMIT(PPC_RAW_MTCTR(12));
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100169#endif
Naveen N. Rao20ccb002021-06-09 14:30:24 +0530170 EMIT(PPC_RAW_BCTRL());
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100171}
172
Christophe Leroy4ea76e92021-03-22 16:37:49 +0000173void bpf_jit_emit_func_call_rel(u32 *image, struct codegen_context *ctx, u64 func)
Naveen N. Raoce076142016-09-24 02:05:01 +0530174{
Sandipan Das4ea69b22018-05-24 12:26:46 +0530175 unsigned int i, ctx_idx = ctx->idx;
176
177 /* Load function address into r12 */
178 PPC_LI64(12, func);
179
180 /* For bpf-to-bpf function calls, the callee's address is unknown
181 * until the last extra pass. As seen above, we use PPC_LI64() to
182 * load the callee's address, but this may optimize the number of
183 * instructions required based on the nature of the address.
184 *
185 * Since we don't want the number of instructions emitted to change,
186 * we pad the optimized PPC_LI64() call with NOPs to guarantee that
187 * we always have a five-instruction sequence, which is the maximum
188 * that PPC_LI64() can emit.
189 */
190 for (i = ctx->idx - ctx_idx; i < 5; i++)
Balamuruhan S3a181232020-06-24 17:00:36 +0530191 EMIT(PPC_RAW_NOP());
Sandipan Das4ea69b22018-05-24 12:26:46 +0530192
Naveen N. Raoce076142016-09-24 02:05:01 +0530193#ifdef PPC64_ELF_ABI_v1
Naveen N. Raoce076142016-09-24 02:05:01 +0530194 /*
195 * Load TOC from function descriptor at offset 8.
196 * We can clobber r2 since we get called through a
197 * function pointer (so caller will save/restore r2)
198 * and since we don't use a TOC ourself.
199 */
Sandipan Das4ea69b22018-05-24 12:26:46 +0530200 PPC_BPF_LL(2, 12, 8);
201 /* Load actual entry point from function descriptor */
202 PPC_BPF_LL(12, 12, 0);
Naveen N. Raoce076142016-09-24 02:05:01 +0530203#endif
Sandipan Das4ea69b22018-05-24 12:26:46 +0530204
Naveen N. Rao20ccb002021-06-09 14:30:24 +0530205 EMIT(PPC_RAW_MTCTR(12));
206 EMIT(PPC_RAW_BCTRL());
Naveen N. Raoce076142016-09-24 02:05:01 +0530207}
208
209static void bpf_jit_emit_tail_call(u32 *image, struct codegen_context *ctx, u32 out)
210{
211 /*
212 * By now, the eBPF program has already setup parameters in r3, r4 and r5
213 * r3/BPF_REG_1 - pointer to ctx -- passed as is to the next bpf program
214 * r4/BPF_REG_2 - pointer to bpf_array
215 * r5/BPF_REG_3 - index in bpf_array
216 */
217 int b2p_bpf_array = b2p[BPF_REG_2];
218 int b2p_index = b2p[BPF_REG_3];
219
220 /*
221 * if (index >= array->map.max_entries)
222 * goto out;
223 */
Balamuruhan S06541862020-06-24 17:00:35 +0530224 EMIT(PPC_RAW_LWZ(b2p[TMP_REG_1], b2p_bpf_array, offsetof(struct bpf_array, map.max_entries)));
Balamuruhan S3a181232020-06-24 17:00:36 +0530225 EMIT(PPC_RAW_RLWINM(b2p_index, b2p_index, 0, 0, 31));
226 EMIT(PPC_RAW_CMPLW(b2p_index, b2p[TMP_REG_1]));
Naveen N. Raoce076142016-09-24 02:05:01 +0530227 PPC_BCC(COND_GE, out);
228
229 /*
230 * if (tail_call_cnt > MAX_TAIL_CALL_CNT)
231 * goto out;
232 */
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530233 PPC_BPF_LL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
Balamuruhan S3a181232020-06-24 17:00:36 +0530234 EMIT(PPC_RAW_CMPLWI(b2p[TMP_REG_1], MAX_TAIL_CALL_CNT));
Naveen N. Raoce076142016-09-24 02:05:01 +0530235 PPC_BCC(COND_GT, out);
236
237 /*
238 * tail_call_cnt++;
239 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530240 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], 1));
Naveen N. Raoce076142016-09-24 02:05:01 +0530241 PPC_BPF_STL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
242
243 /* prog = array->ptrs[index]; */
Balamuruhan S3a181232020-06-24 17:00:36 +0530244 EMIT(PPC_RAW_MULI(b2p[TMP_REG_1], b2p_index, 8));
Balamuruhan S06541862020-06-24 17:00:35 +0530245 EMIT(PPC_RAW_ADD(b2p[TMP_REG_1], b2p[TMP_REG_1], b2p_bpf_array));
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530246 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_array, ptrs));
Naveen N. Raoce076142016-09-24 02:05:01 +0530247
248 /*
249 * if (prog == NULL)
250 * goto out;
251 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530252 EMIT(PPC_RAW_CMPLDI(b2p[TMP_REG_1], 0));
Naveen N. Raoce076142016-09-24 02:05:01 +0530253 PPC_BCC(COND_EQ, out);
254
255 /* goto *(prog->bpf_func + prologue_size); */
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530256 PPC_BPF_LL(b2p[TMP_REG_1], b2p[TMP_REG_1], offsetof(struct bpf_prog, bpf_func));
Naveen N. Raoce076142016-09-24 02:05:01 +0530257#ifdef PPC64_ELF_ABI_v1
258 /* skip past the function descriptor */
Balamuruhan S3a181232020-06-24 17:00:36 +0530259 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1],
260 FUNCTION_DESCR_SIZE + BPF_TAILCALL_PROLOGUE_SIZE));
Naveen N. Raoce076142016-09-24 02:05:01 +0530261#else
Balamuruhan S3a181232020-06-24 17:00:36 +0530262 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], b2p[TMP_REG_1], BPF_TAILCALL_PROLOGUE_SIZE));
Naveen N. Raoce076142016-09-24 02:05:01 +0530263#endif
Balamuruhan S3a181232020-06-24 17:00:36 +0530264 EMIT(PPC_RAW_MTCTR(b2p[TMP_REG_1]));
Naveen N. Raoce076142016-09-24 02:05:01 +0530265
266 /* tear down stack, restore NVRs, ... */
267 bpf_jit_emit_common_epilogue(image, ctx);
268
Balamuruhan S3a181232020-06-24 17:00:36 +0530269 EMIT(PPC_RAW_BCTR());
Naveen N. Raoce076142016-09-24 02:05:01 +0530270 /* out: */
271}
272
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530273/* Assemble the body code between the prologue & epilogue */
Christophe Leroy4ea76e92021-03-22 16:37:49 +0000274int bpf_jit_build_body(struct bpf_prog *fp, u32 *image, struct codegen_context *ctx,
275 u32 *addrs, bool extra_pass)
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530276{
277 const struct bpf_insn *insn = fp->insnsi;
278 int flen = fp->len;
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100279 int i, ret;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530280
281 /* Start of epilogue code - will only be valid 2nd pass onwards */
282 u32 exit_addr = addrs[flen];
283
284 for (i = 0; i < flen; i++) {
285 u32 code = insn[i].code;
286 u32 dst_reg = b2p[insn[i].dst_reg];
287 u32 src_reg = b2p[insn[i].src_reg];
288 s16 off = insn[i].off;
289 s32 imm = insn[i].imm;
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100290 bool func_addr_fixed;
291 u64 func_addr;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530292 u64 imm64;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530293 u32 true_cond;
Daniel Borkmannb9c1e602018-07-19 18:18:35 +0200294 u32 tmp_idx;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530295
296 /*
297 * addrs[] maps a BPF bytecode address into a real offset from
298 * the start of the body code.
299 */
300 addrs[i] = ctx->idx * 4;
301
302 /*
303 * As an optimization, we note down which non-volatile registers
304 * are used so that we can only save/restore those in our
305 * prologue and epilogue. We do this here regardless of whether
306 * the actual BPF instruction uses src/dst registers or not
307 * (for instance, BPF_CALL does not use them). The expectation
308 * is that those instructions will have src_reg/dst_reg set to
309 * 0. Even otherwise, we just lose some prologue/epilogue
310 * optimization but everything else should work without
311 * any issues.
312 */
Naveen N. Rao7b847f52016-09-24 02:05:00 +0530313 if (dst_reg >= BPF_PPC_NVR_MIN && dst_reg < 32)
Christophe Leroyed573b52021-03-22 16:37:47 +0000314 bpf_set_seen_register(ctx, dst_reg);
Naveen N. Rao7b847f52016-09-24 02:05:00 +0530315 if (src_reg >= BPF_PPC_NVR_MIN && src_reg < 32)
Christophe Leroyed573b52021-03-22 16:37:47 +0000316 bpf_set_seen_register(ctx, src_reg);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530317
318 switch (code) {
319 /*
320 * Arithmetic operations: ADD/SUB/MUL/DIV/MOD/NEG
321 */
322 case BPF_ALU | BPF_ADD | BPF_X: /* (u32) dst += (u32) src */
323 case BPF_ALU64 | BPF_ADD | BPF_X: /* dst += src */
Balamuruhan S06541862020-06-24 17:00:35 +0530324 EMIT(PPC_RAW_ADD(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530325 goto bpf_alu32_trunc;
326 case BPF_ALU | BPF_SUB | BPF_X: /* (u32) dst -= (u32) src */
327 case BPF_ALU64 | BPF_SUB | BPF_X: /* dst -= src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530328 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530329 goto bpf_alu32_trunc;
330 case BPF_ALU | BPF_ADD | BPF_K: /* (u32) dst += (u32) imm */
331 case BPF_ALU | BPF_SUB | BPF_K: /* (u32) dst -= (u32) imm */
332 case BPF_ALU64 | BPF_ADD | BPF_K: /* dst += imm */
333 case BPF_ALU64 | BPF_SUB | BPF_K: /* dst -= imm */
334 if (BPF_OP(code) == BPF_SUB)
335 imm = -imm;
336 if (imm) {
337 if (imm >= -32768 && imm < 32768)
Balamuruhan S3a181232020-06-24 17:00:36 +0530338 EMIT(PPC_RAW_ADDI(dst_reg, dst_reg, IMM_L(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530339 else {
340 PPC_LI32(b2p[TMP_REG_1], imm);
Balamuruhan S06541862020-06-24 17:00:35 +0530341 EMIT(PPC_RAW_ADD(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530342 }
343 }
344 goto bpf_alu32_trunc;
345 case BPF_ALU | BPF_MUL | BPF_X: /* (u32) dst *= (u32) src */
346 case BPF_ALU64 | BPF_MUL | BPF_X: /* dst *= src */
347 if (BPF_CLASS(code) == BPF_ALU)
Balamuruhan S3a181232020-06-24 17:00:36 +0530348 EMIT(PPC_RAW_MULW(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530349 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530350 EMIT(PPC_RAW_MULD(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530351 goto bpf_alu32_trunc;
352 case BPF_ALU | BPF_MUL | BPF_K: /* (u32) dst *= (u32) imm */
353 case BPF_ALU64 | BPF_MUL | BPF_K: /* dst *= imm */
354 if (imm >= -32768 && imm < 32768)
Balamuruhan S3a181232020-06-24 17:00:36 +0530355 EMIT(PPC_RAW_MULI(dst_reg, dst_reg, IMM_L(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530356 else {
357 PPC_LI32(b2p[TMP_REG_1], imm);
358 if (BPF_CLASS(code) == BPF_ALU)
Balamuruhan S3a181232020-06-24 17:00:36 +0530359 EMIT(PPC_RAW_MULW(dst_reg, dst_reg,
360 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530361 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530362 EMIT(PPC_RAW_MULD(dst_reg, dst_reg,
363 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530364 }
365 goto bpf_alu32_trunc;
366 case BPF_ALU | BPF_DIV | BPF_X: /* (u32) dst /= (u32) src */
367 case BPF_ALU | BPF_MOD | BPF_X: /* (u32) dst %= (u32) src */
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530368 if (BPF_OP(code) == BPF_MOD) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530369 EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_1], dst_reg, src_reg));
370 EMIT(PPC_RAW_MULW(b2p[TMP_REG_1], src_reg,
371 b2p[TMP_REG_1]));
372 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530373 } else
Balamuruhan S3a181232020-06-24 17:00:36 +0530374 EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530375 goto bpf_alu32_trunc;
376 case BPF_ALU64 | BPF_DIV | BPF_X: /* dst /= src */
377 case BPF_ALU64 | BPF_MOD | BPF_X: /* dst %= src */
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530378 if (BPF_OP(code) == BPF_MOD) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530379 EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_1], dst_reg, src_reg));
380 EMIT(PPC_RAW_MULD(b2p[TMP_REG_1], src_reg,
381 b2p[TMP_REG_1]));
382 EMIT(PPC_RAW_SUB(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530383 } else
Balamuruhan S3a181232020-06-24 17:00:36 +0530384 EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530385 break;
386 case BPF_ALU | BPF_MOD | BPF_K: /* (u32) dst %= (u32) imm */
387 case BPF_ALU | BPF_DIV | BPF_K: /* (u32) dst /= (u32) imm */
388 case BPF_ALU64 | BPF_MOD | BPF_K: /* dst %= imm */
389 case BPF_ALU64 | BPF_DIV | BPF_K: /* dst /= imm */
390 if (imm == 0)
391 return -EINVAL;
392 else if (imm == 1)
393 goto bpf_alu32_trunc;
394
395 PPC_LI32(b2p[TMP_REG_1], imm);
396 switch (BPF_CLASS(code)) {
397 case BPF_ALU:
398 if (BPF_OP(code) == BPF_MOD) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530399 EMIT(PPC_RAW_DIVWU(b2p[TMP_REG_2],
400 dst_reg,
401 b2p[TMP_REG_1]));
402 EMIT(PPC_RAW_MULW(b2p[TMP_REG_1],
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530403 b2p[TMP_REG_1],
Balamuruhan S3a181232020-06-24 17:00:36 +0530404 b2p[TMP_REG_2]));
405 EMIT(PPC_RAW_SUB(dst_reg, dst_reg,
406 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530407 } else
Balamuruhan S3a181232020-06-24 17:00:36 +0530408 EMIT(PPC_RAW_DIVWU(dst_reg, dst_reg,
409 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530410 break;
411 case BPF_ALU64:
412 if (BPF_OP(code) == BPF_MOD) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530413 EMIT(PPC_RAW_DIVDU(b2p[TMP_REG_2],
414 dst_reg,
415 b2p[TMP_REG_1]));
416 EMIT(PPC_RAW_MULD(b2p[TMP_REG_1],
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530417 b2p[TMP_REG_1],
Balamuruhan S3a181232020-06-24 17:00:36 +0530418 b2p[TMP_REG_2]));
419 EMIT(PPC_RAW_SUB(dst_reg, dst_reg,
420 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530421 } else
Balamuruhan S3a181232020-06-24 17:00:36 +0530422 EMIT(PPC_RAW_DIVDU(dst_reg, dst_reg,
423 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530424 break;
425 }
426 goto bpf_alu32_trunc;
427 case BPF_ALU | BPF_NEG: /* (u32) dst = -dst */
428 case BPF_ALU64 | BPF_NEG: /* dst = -dst */
Balamuruhan S3a181232020-06-24 17:00:36 +0530429 EMIT(PPC_RAW_NEG(dst_reg, dst_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530430 goto bpf_alu32_trunc;
431
432 /*
433 * Logical operations: AND/OR/XOR/[A]LSH/[A]RSH
434 */
435 case BPF_ALU | BPF_AND | BPF_X: /* (u32) dst = dst & src */
436 case BPF_ALU64 | BPF_AND | BPF_X: /* dst = dst & src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530437 EMIT(PPC_RAW_AND(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530438 goto bpf_alu32_trunc;
439 case BPF_ALU | BPF_AND | BPF_K: /* (u32) dst = dst & imm */
440 case BPF_ALU64 | BPF_AND | BPF_K: /* dst = dst & imm */
441 if (!IMM_H(imm))
Balamuruhan S3a181232020-06-24 17:00:36 +0530442 EMIT(PPC_RAW_ANDI(dst_reg, dst_reg, IMM_L(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530443 else {
444 /* Sign-extended */
445 PPC_LI32(b2p[TMP_REG_1], imm);
Balamuruhan S3a181232020-06-24 17:00:36 +0530446 EMIT(PPC_RAW_AND(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530447 }
448 goto bpf_alu32_trunc;
449 case BPF_ALU | BPF_OR | BPF_X: /* dst = (u32) dst | (u32) src */
450 case BPF_ALU64 | BPF_OR | BPF_X: /* dst = dst | src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530451 EMIT(PPC_RAW_OR(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530452 goto bpf_alu32_trunc;
453 case BPF_ALU | BPF_OR | BPF_K:/* dst = (u32) dst | (u32) imm */
454 case BPF_ALU64 | BPF_OR | BPF_K:/* dst = dst | imm */
455 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) {
456 /* Sign-extended */
457 PPC_LI32(b2p[TMP_REG_1], imm);
Balamuruhan S3a181232020-06-24 17:00:36 +0530458 EMIT(PPC_RAW_OR(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530459 } else {
460 if (IMM_L(imm))
Balamuruhan S3a181232020-06-24 17:00:36 +0530461 EMIT(PPC_RAW_ORI(dst_reg, dst_reg, IMM_L(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530462 if (IMM_H(imm))
Balamuruhan S3a181232020-06-24 17:00:36 +0530463 EMIT(PPC_RAW_ORIS(dst_reg, dst_reg, IMM_H(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530464 }
465 goto bpf_alu32_trunc;
466 case BPF_ALU | BPF_XOR | BPF_X: /* (u32) dst ^= src */
467 case BPF_ALU64 | BPF_XOR | BPF_X: /* dst ^= src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530468 EMIT(PPC_RAW_XOR(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530469 goto bpf_alu32_trunc;
470 case BPF_ALU | BPF_XOR | BPF_K: /* (u32) dst ^= (u32) imm */
471 case BPF_ALU64 | BPF_XOR | BPF_K: /* dst ^= imm */
472 if (imm < 0 && BPF_CLASS(code) == BPF_ALU64) {
473 /* Sign-extended */
474 PPC_LI32(b2p[TMP_REG_1], imm);
Balamuruhan S3a181232020-06-24 17:00:36 +0530475 EMIT(PPC_RAW_XOR(dst_reg, dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530476 } else {
477 if (IMM_L(imm))
Balamuruhan S3a181232020-06-24 17:00:36 +0530478 EMIT(PPC_RAW_XORI(dst_reg, dst_reg, IMM_L(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530479 if (IMM_H(imm))
Balamuruhan S3a181232020-06-24 17:00:36 +0530480 EMIT(PPC_RAW_XORIS(dst_reg, dst_reg, IMM_H(imm)));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530481 }
482 goto bpf_alu32_trunc;
483 case BPF_ALU | BPF_LSH | BPF_X: /* (u32) dst <<= (u32) src */
484 /* slw clears top 32 bits */
Balamuruhan S3a181232020-06-24 17:00:36 +0530485 EMIT(PPC_RAW_SLW(dst_reg, dst_reg, src_reg));
Jiong Wanga4c92772019-05-24 23:25:23 +0100486 /* skip zero extension move, but set address map. */
487 if (insn_is_zext(&insn[i + 1]))
488 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530489 break;
490 case BPF_ALU64 | BPF_LSH | BPF_X: /* dst <<= src; */
Balamuruhan S3a181232020-06-24 17:00:36 +0530491 EMIT(PPC_RAW_SLD(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530492 break;
493 case BPF_ALU | BPF_LSH | BPF_K: /* (u32) dst <<== (u32) imm */
494 /* with imm 0, we still need to clear top 32 bits */
Balamuruhan S3a181232020-06-24 17:00:36 +0530495 EMIT(PPC_RAW_SLWI(dst_reg, dst_reg, imm));
Jiong Wanga4c92772019-05-24 23:25:23 +0100496 if (insn_is_zext(&insn[i + 1]))
497 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530498 break;
499 case BPF_ALU64 | BPF_LSH | BPF_K: /* dst <<== imm */
500 if (imm != 0)
Balamuruhan S3a181232020-06-24 17:00:36 +0530501 EMIT(PPC_RAW_SLDI(dst_reg, dst_reg, imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530502 break;
503 case BPF_ALU | BPF_RSH | BPF_X: /* (u32) dst >>= (u32) src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530504 EMIT(PPC_RAW_SRW(dst_reg, dst_reg, src_reg));
Jiong Wanga4c92772019-05-24 23:25:23 +0100505 if (insn_is_zext(&insn[i + 1]))
506 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530507 break;
508 case BPF_ALU64 | BPF_RSH | BPF_X: /* dst >>= src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530509 EMIT(PPC_RAW_SRD(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530510 break;
511 case BPF_ALU | BPF_RSH | BPF_K: /* (u32) dst >>= (u32) imm */
Balamuruhan S3a181232020-06-24 17:00:36 +0530512 EMIT(PPC_RAW_SRWI(dst_reg, dst_reg, imm));
Jiong Wanga4c92772019-05-24 23:25:23 +0100513 if (insn_is_zext(&insn[i + 1]))
514 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530515 break;
516 case BPF_ALU64 | BPF_RSH | BPF_K: /* dst >>= imm */
517 if (imm != 0)
Balamuruhan S3a181232020-06-24 17:00:36 +0530518 EMIT(PPC_RAW_SRDI(dst_reg, dst_reg, imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530519 break;
Jiong Wang44cf43c2018-12-05 13:52:31 -0500520 case BPF_ALU | BPF_ARSH | BPF_X: /* (s32) dst >>= src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530521 EMIT(PPC_RAW_SRAW(dst_reg, dst_reg, src_reg));
Jiong Wang44cf43c2018-12-05 13:52:31 -0500522 goto bpf_alu32_trunc;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530523 case BPF_ALU64 | BPF_ARSH | BPF_X: /* (s64) dst >>= src */
Balamuruhan S3a181232020-06-24 17:00:36 +0530524 EMIT(PPC_RAW_SRAD(dst_reg, dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530525 break;
Jiong Wang44cf43c2018-12-05 13:52:31 -0500526 case BPF_ALU | BPF_ARSH | BPF_K: /* (s32) dst >>= imm */
Balamuruhan S3a181232020-06-24 17:00:36 +0530527 EMIT(PPC_RAW_SRAWI(dst_reg, dst_reg, imm));
Jiong Wang44cf43c2018-12-05 13:52:31 -0500528 goto bpf_alu32_trunc;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530529 case BPF_ALU64 | BPF_ARSH | BPF_K: /* (s64) dst >>= imm */
530 if (imm != 0)
Balamuruhan S3a181232020-06-24 17:00:36 +0530531 EMIT(PPC_RAW_SRADI(dst_reg, dst_reg, imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530532 break;
533
534 /*
535 * MOV
536 */
537 case BPF_ALU | BPF_MOV | BPF_X: /* (u32) dst = src */
538 case BPF_ALU64 | BPF_MOV | BPF_X: /* dst = src */
Jiong Wanga4c92772019-05-24 23:25:23 +0100539 if (imm == 1) {
540 /* special mov32 for zext */
Balamuruhan S3a181232020-06-24 17:00:36 +0530541 EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31));
Jiong Wanga4c92772019-05-24 23:25:23 +0100542 break;
543 }
Balamuruhan S3a181232020-06-24 17:00:36 +0530544 EMIT(PPC_RAW_MR(dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530545 goto bpf_alu32_trunc;
546 case BPF_ALU | BPF_MOV | BPF_K: /* (u32) dst = imm */
547 case BPF_ALU64 | BPF_MOV | BPF_K: /* dst = (s64) imm */
548 PPC_LI32(dst_reg, imm);
549 if (imm < 0)
550 goto bpf_alu32_trunc;
Jiong Wanga4c92772019-05-24 23:25:23 +0100551 else if (insn_is_zext(&insn[i + 1]))
552 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530553 break;
554
555bpf_alu32_trunc:
556 /* Truncate to 32-bits */
Jiong Wanga4c92772019-05-24 23:25:23 +0100557 if (BPF_CLASS(code) == BPF_ALU && !fp->aux->verifier_zext)
Balamuruhan S3a181232020-06-24 17:00:36 +0530558 EMIT(PPC_RAW_RLWINM(dst_reg, dst_reg, 0, 0, 31));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530559 break;
560
561 /*
562 * BPF_FROM_BE/LE
563 */
564 case BPF_ALU | BPF_END | BPF_FROM_LE:
565 case BPF_ALU | BPF_END | BPF_FROM_BE:
566#ifdef __BIG_ENDIAN__
567 if (BPF_SRC(code) == BPF_FROM_BE)
568 goto emit_clear;
569#else /* !__BIG_ENDIAN__ */
570 if (BPF_SRC(code) == BPF_FROM_LE)
571 goto emit_clear;
572#endif
573 switch (imm) {
574 case 16:
575 /* Rotate 8 bits left & mask with 0x0000ff00 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530576 EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 16, 23));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530577 /* Rotate 8 bits right & insert LSB to reg */
Balamuruhan S3a181232020-06-24 17:00:36 +0530578 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 24, 31));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530579 /* Move result back to dst_reg */
Balamuruhan S3a181232020-06-24 17:00:36 +0530580 EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530581 break;
582 case 32:
583 /*
584 * Rotate word left by 8 bits:
585 * 2 bytes are already in their final position
586 * -- byte 2 and 4 (of bytes 1, 2, 3 and 4)
587 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530588 EMIT(PPC_RAW_RLWINM(b2p[TMP_REG_1], dst_reg, 8, 0, 31));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530589 /* Rotate 24 bits and insert byte 1 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530590 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 0, 7));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530591 /* Rotate 24 bits and insert byte 3 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530592 EMIT(PPC_RAW_RLWIMI(b2p[TMP_REG_1], dst_reg, 24, 16, 23));
593 EMIT(PPC_RAW_MR(dst_reg, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530594 break;
595 case 64:
596 /*
597 * Way easier and faster(?) to store the value
598 * into stack and then use ldbrx
599 *
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530600 * ctx->seen will be reliable in pass2, but
601 * the instructions generated will remain the
602 * same across all passes
603 */
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530604 PPC_BPF_STL(dst_reg, 1, bpf_jit_stack_local(ctx));
Balamuruhan S3a181232020-06-24 17:00:36 +0530605 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], 1, bpf_jit_stack_local(ctx)));
606 EMIT(PPC_RAW_LDBRX(dst_reg, 0, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530607 break;
608 }
609 break;
610
611emit_clear:
612 switch (imm) {
613 case 16:
614 /* zero-extend 16 bits into 64 bits */
Balamuruhan S3a181232020-06-24 17:00:36 +0530615 EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 48));
Jiong Wanga4c92772019-05-24 23:25:23 +0100616 if (insn_is_zext(&insn[i + 1]))
617 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530618 break;
619 case 32:
Jiong Wanga4c92772019-05-24 23:25:23 +0100620 if (!fp->aux->verifier_zext)
621 /* zero-extend 32 bits into 64 bits */
Balamuruhan S3a181232020-06-24 17:00:36 +0530622 EMIT(PPC_RAW_RLDICL(dst_reg, dst_reg, 0, 32));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530623 break;
624 case 64:
625 /* nop */
626 break;
627 }
628 break;
629
630 /*
Daniel Borkmannf5e81d12021-07-13 08:18:31 +0000631 * BPF_ST NOSPEC (speculation barrier)
632 */
633 case BPF_ST | BPF_NOSPEC:
634 break;
635
636 /*
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530637 * BPF_ST(X)
638 */
639 case BPF_STX | BPF_MEM | BPF_B: /* *(u8 *)(dst + off) = src */
640 case BPF_ST | BPF_MEM | BPF_B: /* *(u8 *)(dst + off) = imm */
641 if (BPF_CLASS(code) == BPF_ST) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530642 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530643 src_reg = b2p[TMP_REG_1];
644 }
Balamuruhan S3a181232020-06-24 17:00:36 +0530645 EMIT(PPC_RAW_STB(src_reg, dst_reg, off));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530646 break;
647 case BPF_STX | BPF_MEM | BPF_H: /* (u16 *)(dst + off) = src */
648 case BPF_ST | BPF_MEM | BPF_H: /* (u16 *)(dst + off) = imm */
649 if (BPF_CLASS(code) == BPF_ST) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530650 EMIT(PPC_RAW_LI(b2p[TMP_REG_1], imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530651 src_reg = b2p[TMP_REG_1];
652 }
Balamuruhan S3a181232020-06-24 17:00:36 +0530653 EMIT(PPC_RAW_STH(src_reg, dst_reg, off));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530654 break;
655 case BPF_STX | BPF_MEM | BPF_W: /* *(u32 *)(dst + off) = src */
656 case BPF_ST | BPF_MEM | BPF_W: /* *(u32 *)(dst + off) = imm */
657 if (BPF_CLASS(code) == BPF_ST) {
658 PPC_LI32(b2p[TMP_REG_1], imm);
659 src_reg = b2p[TMP_REG_1];
660 }
Balamuruhan S3a181232020-06-24 17:00:36 +0530661 EMIT(PPC_RAW_STW(src_reg, dst_reg, off));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530662 break;
663 case BPF_STX | BPF_MEM | BPF_DW: /* (u64 *)(dst + off) = src */
664 case BPF_ST | BPF_MEM | BPF_DW: /* *(u64 *)(dst + off) = imm */
665 if (BPF_CLASS(code) == BPF_ST) {
666 PPC_LI32(b2p[TMP_REG_1], imm);
667 src_reg = b2p[TMP_REG_1];
668 }
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530669 PPC_BPF_STL(src_reg, dst_reg, off);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530670 break;
671
672 /*
Brendan Jackman91c960b2021-01-14 18:17:44 +0000673 * BPF_STX ATOMIC (atomic ops)
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530674 */
Brendan Jackman91c960b2021-01-14 18:17:44 +0000675 case BPF_STX | BPF_ATOMIC | BPF_W:
Naveen N. Rao419ac822021-07-01 20:38:58 +0530676 if (imm != BPF_ADD) {
Brendan Jackman91c960b2021-01-14 18:17:44 +0000677 pr_err_ratelimited(
678 "eBPF filter atomic op code %02x (@%d) unsupported\n",
679 code, i);
680 return -ENOTSUPP;
681 }
682
683 /* *(u32 *)(dst + off) += src */
684
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530685 /* Get EA into TMP_REG_1 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530686 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off));
Daniel Borkmannb9c1e602018-07-19 18:18:35 +0200687 tmp_idx = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530688 /* load value from memory into TMP_REG_2 */
Balamuruhan S06541862020-06-24 17:00:35 +0530689 EMIT(PPC_RAW_LWARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530690 /* add value from src_reg into this */
Balamuruhan S06541862020-06-24 17:00:35 +0530691 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530692 /* store result back */
Balamuruhan S3a181232020-06-24 17:00:36 +0530693 EMIT(PPC_RAW_STWCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530694 /* we're done if this succeeded */
Daniel Borkmannb9c1e602018-07-19 18:18:35 +0200695 PPC_BCC_SHORT(COND_NE, tmp_idx);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530696 break;
Brendan Jackman91c960b2021-01-14 18:17:44 +0000697 case BPF_STX | BPF_ATOMIC | BPF_DW:
Naveen N. Rao419ac822021-07-01 20:38:58 +0530698 if (imm != BPF_ADD) {
Brendan Jackman91c960b2021-01-14 18:17:44 +0000699 pr_err_ratelimited(
700 "eBPF filter atomic op code %02x (@%d) unsupported\n",
701 code, i);
702 return -ENOTSUPP;
703 }
704 /* *(u64 *)(dst + off) += src */
705
Balamuruhan S3a181232020-06-24 17:00:36 +0530706 EMIT(PPC_RAW_ADDI(b2p[TMP_REG_1], dst_reg, off));
Daniel Borkmannb9c1e602018-07-19 18:18:35 +0200707 tmp_idx = ctx->idx * 4;
Balamuruhan S06541862020-06-24 17:00:35 +0530708 EMIT(PPC_RAW_LDARX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1], 0));
709 EMIT(PPC_RAW_ADD(b2p[TMP_REG_2], b2p[TMP_REG_2], src_reg));
710 EMIT(PPC_RAW_STDCX(b2p[TMP_REG_2], 0, b2p[TMP_REG_1]));
Daniel Borkmannb9c1e602018-07-19 18:18:35 +0200711 PPC_BCC_SHORT(COND_NE, tmp_idx);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530712 break;
713
714 /*
715 * BPF_LDX
716 */
717 /* dst = *(u8 *)(ul) (src + off) */
718 case BPF_LDX | BPF_MEM | BPF_B:
Balamuruhan S3a181232020-06-24 17:00:36 +0530719 EMIT(PPC_RAW_LBZ(dst_reg, src_reg, off));
Jiong Wanga4c92772019-05-24 23:25:23 +0100720 if (insn_is_zext(&insn[i + 1]))
721 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530722 break;
723 /* dst = *(u16 *)(ul) (src + off) */
724 case BPF_LDX | BPF_MEM | BPF_H:
Balamuruhan S3a181232020-06-24 17:00:36 +0530725 EMIT(PPC_RAW_LHZ(dst_reg, src_reg, off));
Jiong Wanga4c92772019-05-24 23:25:23 +0100726 if (insn_is_zext(&insn[i + 1]))
727 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530728 break;
729 /* dst = *(u32 *)(ul) (src + off) */
730 case BPF_LDX | BPF_MEM | BPF_W:
Balamuruhan S06541862020-06-24 17:00:35 +0530731 EMIT(PPC_RAW_LWZ(dst_reg, src_reg, off));
Jiong Wanga4c92772019-05-24 23:25:23 +0100732 if (insn_is_zext(&insn[i + 1]))
733 addrs[++i] = ctx->idx * 4;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530734 break;
735 /* dst = *(u64 *)(ul) (src + off) */
736 case BPF_LDX | BPF_MEM | BPF_DW:
Naveen N. Rao86be36f2019-03-15 20:21:19 +0530737 PPC_BPF_LL(dst_reg, src_reg, off);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530738 break;
739
740 /*
741 * Doubleword load
742 * 16 byte instruction that uses two 'struct bpf_insn'
743 */
744 case BPF_LD | BPF_IMM | BPF_DW: /* dst = (u64) imm */
745 imm64 = ((u64)(u32) insn[i].imm) |
746 (((u64)(u32) insn[i+1].imm) << 32);
747 /* Adjust for two bpf instructions */
748 addrs[++i] = ctx->idx * 4;
749 PPC_LI64(dst_reg, imm64);
750 break;
751
752 /*
753 * Return/Exit
754 */
755 case BPF_JMP | BPF_EXIT:
756 /*
757 * If this isn't the very last instruction, branch to
758 * the epilogue. If we _are_ the last instruction,
759 * we'll just fall through to the epilogue.
760 */
761 if (i != flen - 1)
762 PPC_JMP(exit_addr);
763 /* else fall through to the epilogue */
764 break;
765
766 /*
Sandipan Das8484ce82018-05-24 12:26:47 +0530767 * Call kernel helper or bpf function
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530768 */
769 case BPF_JMP | BPF_CALL:
770 ctx->seen |= SEEN_FUNC;
Sandipan Das8484ce82018-05-24 12:26:47 +0530771
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100772 ret = bpf_jit_get_func_addr(fp, &insn[i], extra_pass,
773 &func_addr, &func_addr_fixed);
774 if (ret < 0)
775 return ret;
776
777 if (func_addr_fixed)
778 bpf_jit_emit_func_call_hlp(image, ctx, func_addr);
Sandipan Das8484ce82018-05-24 12:26:47 +0530779 else
Daniel Borkmanne2c95a62018-11-26 14:05:38 +0100780 bpf_jit_emit_func_call_rel(image, ctx, func_addr);
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530781 /* move return value from r3 to BPF_REG_0 */
Balamuruhan S3a181232020-06-24 17:00:36 +0530782 EMIT(PPC_RAW_MR(b2p[BPF_REG_0], 3));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530783 break;
784
785 /*
786 * Jumps and branches
787 */
788 case BPF_JMP | BPF_JA:
789 PPC_JMP(addrs[i + 1 + off]);
790 break;
791
792 case BPF_JMP | BPF_JGT | BPF_K:
793 case BPF_JMP | BPF_JGT | BPF_X:
794 case BPF_JMP | BPF_JSGT | BPF_K:
795 case BPF_JMP | BPF_JSGT | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500796 case BPF_JMP32 | BPF_JGT | BPF_K:
797 case BPF_JMP32 | BPF_JGT | BPF_X:
798 case BPF_JMP32 | BPF_JSGT | BPF_K:
799 case BPF_JMP32 | BPF_JSGT | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530800 true_cond = COND_GT;
801 goto cond_branch;
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200802 case BPF_JMP | BPF_JLT | BPF_K:
803 case BPF_JMP | BPF_JLT | BPF_X:
804 case BPF_JMP | BPF_JSLT | BPF_K:
805 case BPF_JMP | BPF_JSLT | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500806 case BPF_JMP32 | BPF_JLT | BPF_K:
807 case BPF_JMP32 | BPF_JLT | BPF_X:
808 case BPF_JMP32 | BPF_JSLT | BPF_K:
809 case BPF_JMP32 | BPF_JSLT | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200810 true_cond = COND_LT;
811 goto cond_branch;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530812 case BPF_JMP | BPF_JGE | BPF_K:
813 case BPF_JMP | BPF_JGE | BPF_X:
814 case BPF_JMP | BPF_JSGE | BPF_K:
815 case BPF_JMP | BPF_JSGE | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500816 case BPF_JMP32 | BPF_JGE | BPF_K:
817 case BPF_JMP32 | BPF_JGE | BPF_X:
818 case BPF_JMP32 | BPF_JSGE | BPF_K:
819 case BPF_JMP32 | BPF_JSGE | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530820 true_cond = COND_GE;
821 goto cond_branch;
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200822 case BPF_JMP | BPF_JLE | BPF_K:
823 case BPF_JMP | BPF_JLE | BPF_X:
824 case BPF_JMP | BPF_JSLE | BPF_K:
825 case BPF_JMP | BPF_JSLE | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500826 case BPF_JMP32 | BPF_JLE | BPF_K:
827 case BPF_JMP32 | BPF_JLE | BPF_X:
828 case BPF_JMP32 | BPF_JSLE | BPF_K:
829 case BPF_JMP32 | BPF_JSLE | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200830 true_cond = COND_LE;
831 goto cond_branch;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530832 case BPF_JMP | BPF_JEQ | BPF_K:
833 case BPF_JMP | BPF_JEQ | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500834 case BPF_JMP32 | BPF_JEQ | BPF_K:
835 case BPF_JMP32 | BPF_JEQ | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530836 true_cond = COND_EQ;
837 goto cond_branch;
838 case BPF_JMP | BPF_JNE | BPF_K:
839 case BPF_JMP | BPF_JNE | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500840 case BPF_JMP32 | BPF_JNE | BPF_K:
841 case BPF_JMP32 | BPF_JNE | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530842 true_cond = COND_NE;
843 goto cond_branch;
844 case BPF_JMP | BPF_JSET | BPF_K:
845 case BPF_JMP | BPF_JSET | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500846 case BPF_JMP32 | BPF_JSET | BPF_K:
847 case BPF_JMP32 | BPF_JSET | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530848 true_cond = COND_NE;
849 /* Fall through */
850
851cond_branch:
852 switch (code) {
853 case BPF_JMP | BPF_JGT | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200854 case BPF_JMP | BPF_JLT | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530855 case BPF_JMP | BPF_JGE | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200856 case BPF_JMP | BPF_JLE | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530857 case BPF_JMP | BPF_JEQ | BPF_X:
858 case BPF_JMP | BPF_JNE | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500859 case BPF_JMP32 | BPF_JGT | BPF_X:
860 case BPF_JMP32 | BPF_JLT | BPF_X:
861 case BPF_JMP32 | BPF_JGE | BPF_X:
862 case BPF_JMP32 | BPF_JLE | BPF_X:
863 case BPF_JMP32 | BPF_JEQ | BPF_X:
864 case BPF_JMP32 | BPF_JNE | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530865 /* unsigned comparison */
Jiong Wang5f645992019-01-26 12:26:10 -0500866 if (BPF_CLASS(code) == BPF_JMP32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530867 EMIT(PPC_RAW_CMPLW(dst_reg, src_reg));
Jiong Wang5f645992019-01-26 12:26:10 -0500868 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530869 EMIT(PPC_RAW_CMPLD(dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530870 break;
871 case BPF_JMP | BPF_JSGT | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200872 case BPF_JMP | BPF_JSLT | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530873 case BPF_JMP | BPF_JSGE | BPF_X:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200874 case BPF_JMP | BPF_JSLE | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500875 case BPF_JMP32 | BPF_JSGT | BPF_X:
876 case BPF_JMP32 | BPF_JSLT | BPF_X:
877 case BPF_JMP32 | BPF_JSGE | BPF_X:
878 case BPF_JMP32 | BPF_JSLE | BPF_X:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530879 /* signed comparison */
Jiong Wang5f645992019-01-26 12:26:10 -0500880 if (BPF_CLASS(code) == BPF_JMP32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530881 EMIT(PPC_RAW_CMPW(dst_reg, src_reg));
Jiong Wang5f645992019-01-26 12:26:10 -0500882 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530883 EMIT(PPC_RAW_CMPD(dst_reg, src_reg));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530884 break;
885 case BPF_JMP | BPF_JSET | BPF_X:
Jiong Wang5f645992019-01-26 12:26:10 -0500886 case BPF_JMP32 | BPF_JSET | BPF_X:
887 if (BPF_CLASS(code) == BPF_JMP) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530888 EMIT(PPC_RAW_AND_DOT(b2p[TMP_REG_1], dst_reg,
889 src_reg));
Jiong Wang5f645992019-01-26 12:26:10 -0500890 } else {
891 int tmp_reg = b2p[TMP_REG_1];
892
Balamuruhan S3a181232020-06-24 17:00:36 +0530893 EMIT(PPC_RAW_AND(tmp_reg, dst_reg, src_reg));
894 EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg, 0, 0,
895 31));
Jiong Wang5f645992019-01-26 12:26:10 -0500896 }
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530897 break;
898 case BPF_JMP | BPF_JNE | BPF_K:
899 case BPF_JMP | BPF_JEQ | BPF_K:
900 case BPF_JMP | BPF_JGT | BPF_K:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200901 case BPF_JMP | BPF_JLT | BPF_K:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530902 case BPF_JMP | BPF_JGE | BPF_K:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200903 case BPF_JMP | BPF_JLE | BPF_K:
Jiong Wang5f645992019-01-26 12:26:10 -0500904 case BPF_JMP32 | BPF_JNE | BPF_K:
905 case BPF_JMP32 | BPF_JEQ | BPF_K:
906 case BPF_JMP32 | BPF_JGT | BPF_K:
907 case BPF_JMP32 | BPF_JLT | BPF_K:
908 case BPF_JMP32 | BPF_JGE | BPF_K:
909 case BPF_JMP32 | BPF_JLE | BPF_K:
910 {
911 bool is_jmp32 = BPF_CLASS(code) == BPF_JMP32;
912
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530913 /*
914 * Need sign-extended load, so only positive
915 * values can be used as imm in cmpldi
916 */
Jiong Wang5f645992019-01-26 12:26:10 -0500917 if (imm >= 0 && imm < 32768) {
918 if (is_jmp32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530919 EMIT(PPC_RAW_CMPLWI(dst_reg, imm));
Jiong Wang5f645992019-01-26 12:26:10 -0500920 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530921 EMIT(PPC_RAW_CMPLDI(dst_reg, imm));
Jiong Wang5f645992019-01-26 12:26:10 -0500922 } else {
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530923 /* sign-extending load */
924 PPC_LI32(b2p[TMP_REG_1], imm);
925 /* ... but unsigned comparison */
Jiong Wang5f645992019-01-26 12:26:10 -0500926 if (is_jmp32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530927 EMIT(PPC_RAW_CMPLW(dst_reg,
928 b2p[TMP_REG_1]));
Jiong Wang5f645992019-01-26 12:26:10 -0500929 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530930 EMIT(PPC_RAW_CMPLD(dst_reg,
931 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530932 }
933 break;
Jiong Wang5f645992019-01-26 12:26:10 -0500934 }
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530935 case BPF_JMP | BPF_JSGT | BPF_K:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200936 case BPF_JMP | BPF_JSLT | BPF_K:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530937 case BPF_JMP | BPF_JSGE | BPF_K:
Daniel Borkmann20dbf5c2017-08-10 01:40:00 +0200938 case BPF_JMP | BPF_JSLE | BPF_K:
Jiong Wang5f645992019-01-26 12:26:10 -0500939 case BPF_JMP32 | BPF_JSGT | BPF_K:
940 case BPF_JMP32 | BPF_JSLT | BPF_K:
941 case BPF_JMP32 | BPF_JSGE | BPF_K:
942 case BPF_JMP32 | BPF_JSLE | BPF_K:
943 {
944 bool is_jmp32 = BPF_CLASS(code) == BPF_JMP32;
945
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530946 /*
947 * signed comparison, so any 16-bit value
948 * can be used in cmpdi
949 */
Jiong Wang5f645992019-01-26 12:26:10 -0500950 if (imm >= -32768 && imm < 32768) {
951 if (is_jmp32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530952 EMIT(PPC_RAW_CMPWI(dst_reg, imm));
Jiong Wang5f645992019-01-26 12:26:10 -0500953 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530954 EMIT(PPC_RAW_CMPDI(dst_reg, imm));
Jiong Wang5f645992019-01-26 12:26:10 -0500955 } else {
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530956 PPC_LI32(b2p[TMP_REG_1], imm);
Jiong Wang5f645992019-01-26 12:26:10 -0500957 if (is_jmp32)
Balamuruhan S3a181232020-06-24 17:00:36 +0530958 EMIT(PPC_RAW_CMPW(dst_reg,
959 b2p[TMP_REG_1]));
Jiong Wang5f645992019-01-26 12:26:10 -0500960 else
Balamuruhan S3a181232020-06-24 17:00:36 +0530961 EMIT(PPC_RAW_CMPD(dst_reg,
962 b2p[TMP_REG_1]));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530963 }
964 break;
Jiong Wang5f645992019-01-26 12:26:10 -0500965 }
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530966 case BPF_JMP | BPF_JSET | BPF_K:
Jiong Wang5f645992019-01-26 12:26:10 -0500967 case BPF_JMP32 | BPF_JSET | BPF_K:
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530968 /* andi does not sign-extend the immediate */
969 if (imm >= 0 && imm < 32768)
970 /* PPC_ANDI is _only/always_ dot-form */
Balamuruhan S3a181232020-06-24 17:00:36 +0530971 EMIT(PPC_RAW_ANDI(b2p[TMP_REG_1], dst_reg, imm));
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530972 else {
Jiong Wang5f645992019-01-26 12:26:10 -0500973 int tmp_reg = b2p[TMP_REG_1];
974
975 PPC_LI32(tmp_reg, imm);
976 if (BPF_CLASS(code) == BPF_JMP) {
Balamuruhan S3a181232020-06-24 17:00:36 +0530977 EMIT(PPC_RAW_AND_DOT(tmp_reg, dst_reg,
978 tmp_reg));
Jiong Wang5f645992019-01-26 12:26:10 -0500979 } else {
Balamuruhan S3a181232020-06-24 17:00:36 +0530980 EMIT(PPC_RAW_AND(tmp_reg, dst_reg,
981 tmp_reg));
982 EMIT(PPC_RAW_RLWINM_DOT(tmp_reg, tmp_reg,
983 0, 0, 31));
Jiong Wang5f645992019-01-26 12:26:10 -0500984 }
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530985 }
986 break;
987 }
988 PPC_BCC(true_cond, addrs[i + 1 + off]);
989 break;
990
991 /*
Naveen N. Raoce076142016-09-24 02:05:01 +0530992 * Tail call
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530993 */
Alexei Starovoitov71189fa2017-05-30 13:31:27 -0700994 case BPF_JMP | BPF_TAIL_CALL:
Naveen N. Raoce076142016-09-24 02:05:01 +0530995 ctx->seen |= SEEN_TAILCALL;
996 bpf_jit_emit_tail_call(image, ctx, addrs[i + 1]);
997 break;
Naveen N. Rao156d0e22016-06-22 21:55:07 +0530998
999 default:
1000 /*
1001 * The filter contains something cruel & unusual.
1002 * We don't handle it, but also there shouldn't be
1003 * anything missing from our list.
1004 */
1005 pr_err_ratelimited("eBPF filter opcode %04x (@%d) unsupported\n",
1006 code, i);
1007 return -ENOTSUPP;
1008 }
1009 }
1010
1011 /* Set end-of-body-code address for exit. */
1012 addrs[i] = ctx->idx * 4;
1013
1014 return 0;
1015}