blob: d208a9fd6c528e4599cfc6aecf12151f03372e87 [file] [log] [blame]
Björn Töpel2353ecc2019-02-05 13:41:22 +01001// SPDX-License-Identifier: GPL-2.0
2/* BPF JIT compiler for RV64G
3 *
4 * Copyright(c) 2019 Björn Töpel <bjorn.topel@gmail.com>
5 *
6 */
7
8#include <linux/bpf.h>
9#include <linux/filter.h>
Luke Nelsonca6cb542020-03-04 21:02:04 -080010#include "bpf_jit.h"
Björn Töpel2353ecc2019-02-05 13:41:22 +010011
12#define RV_REG_TCC RV_REG_A6
13#define RV_REG_TCC_SAVED RV_REG_S6 /* Store A6 in S6 if program do calls */
14
15static const int regmap[] = {
16 [BPF_REG_0] = RV_REG_A5,
17 [BPF_REG_1] = RV_REG_A0,
18 [BPF_REG_2] = RV_REG_A1,
19 [BPF_REG_3] = RV_REG_A2,
20 [BPF_REG_4] = RV_REG_A3,
21 [BPF_REG_5] = RV_REG_A4,
22 [BPF_REG_6] = RV_REG_S1,
23 [BPF_REG_7] = RV_REG_S2,
24 [BPF_REG_8] = RV_REG_S3,
25 [BPF_REG_9] = RV_REG_S4,
26 [BPF_REG_FP] = RV_REG_S5,
27 [BPF_REG_AX] = RV_REG_T0,
28};
29
30enum {
31 RV_CTX_F_SEEN_TAIL_CALL = 0,
32 RV_CTX_F_SEEN_CALL = RV_REG_RA,
33 RV_CTX_F_SEEN_S1 = RV_REG_S1,
34 RV_CTX_F_SEEN_S2 = RV_REG_S2,
35 RV_CTX_F_SEEN_S3 = RV_REG_S3,
36 RV_CTX_F_SEEN_S4 = RV_REG_S4,
37 RV_CTX_F_SEEN_S5 = RV_REG_S5,
38 RV_CTX_F_SEEN_S6 = RV_REG_S6,
39};
40
Björn Töpel2353ecc2019-02-05 13:41:22 +010041static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx)
42{
43 u8 reg = regmap[bpf_reg];
44
45 switch (reg) {
46 case RV_CTX_F_SEEN_S1:
47 case RV_CTX_F_SEEN_S2:
48 case RV_CTX_F_SEEN_S3:
49 case RV_CTX_F_SEEN_S4:
50 case RV_CTX_F_SEEN_S5:
51 case RV_CTX_F_SEEN_S6:
52 __set_bit(reg, &ctx->flags);
53 }
54 return reg;
55};
56
57static bool seen_reg(int reg, struct rv_jit_context *ctx)
58{
59 switch (reg) {
60 case RV_CTX_F_SEEN_CALL:
61 case RV_CTX_F_SEEN_S1:
62 case RV_CTX_F_SEEN_S2:
63 case RV_CTX_F_SEEN_S3:
64 case RV_CTX_F_SEEN_S4:
65 case RV_CTX_F_SEEN_S5:
66 case RV_CTX_F_SEEN_S6:
67 return test_bit(reg, &ctx->flags);
68 }
69 return false;
70}
71
Björn Töpelf1003b72019-12-16 10:13:35 +010072static void mark_fp(struct rv_jit_context *ctx)
73{
74 __set_bit(RV_CTX_F_SEEN_S5, &ctx->flags);
75}
76
Björn Töpel2353ecc2019-02-05 13:41:22 +010077static void mark_call(struct rv_jit_context *ctx)
78{
79 __set_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
80}
81
82static bool seen_call(struct rv_jit_context *ctx)
83{
84 return test_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
85}
86
87static void mark_tail_call(struct rv_jit_context *ctx)
88{
89 __set_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
90}
91
92static bool seen_tail_call(struct rv_jit_context *ctx)
93{
94 return test_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
95}
96
97static u8 rv_tail_call_reg(struct rv_jit_context *ctx)
98{
99 mark_tail_call(ctx);
100
101 if (seen_call(ctx)) {
102 __set_bit(RV_CTX_F_SEEN_S6, &ctx->flags);
103 return RV_REG_S6;
104 }
105 return RV_REG_A6;
106}
107
Björn Töpel2353ecc2019-02-05 13:41:22 +0100108static bool is_32b_int(s64 val)
109{
110 return -(1L << 31) <= val && val < (1L << 31);
111}
112
Luke Nelson489553d2020-04-06 22:16:04 +0000113static bool in_auipc_jalr_range(s64 val)
114{
115 /*
116 * auipc+jalr can reach any signed PC-relative offset in the range
117 * [-2^31 - 2^11, 2^31 - 2^11).
118 */
119 return (-(1L << 31) - (1L << 11)) <= val &&
120 val < ((1L << 31) - (1L << 11));
121}
122
Björn Töpel2353ecc2019-02-05 13:41:22 +0100123static void emit_imm(u8 rd, s64 val, struct rv_jit_context *ctx)
124{
125 /* Note that the immediate from the add is sign-extended,
126 * which means that we need to compensate this by adding 2^12,
127 * when the 12th bit is set. A simpler way of doing this, and
128 * getting rid of the check, is to just add 2**11 before the
129 * shift. The "Loading a 32-Bit constant" example from the
130 * "Computer Organization and Design, RISC-V edition" book by
131 * Patterson/Hennessy highlights this fact.
132 *
133 * This also means that we need to process LSB to MSB.
134 */
135 s64 upper = (val + (1 << 11)) >> 12, lower = val & 0xfff;
136 int shift;
137
138 if (is_32b_int(val)) {
139 if (upper)
140 emit(rv_lui(rd, upper), ctx);
141
142 if (!upper) {
143 emit(rv_addi(rd, RV_REG_ZERO, lower), ctx);
144 return;
145 }
146
147 emit(rv_addiw(rd, rd, lower), ctx);
148 return;
149 }
150
151 shift = __ffs(upper);
152 upper >>= shift;
153 shift += 12;
154
155 emit_imm(rd, upper, ctx);
156
157 emit(rv_slli(rd, rd, shift), ctx);
158 if (lower)
159 emit(rv_addi(rd, rd, lower), ctx);
160}
161
Björn Töpelfe8322b2019-12-16 10:13:39 +0100162static void __build_epilogue(bool is_tail_call, struct rv_jit_context *ctx)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100163{
164 int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8;
165
166 if (seen_reg(RV_REG_RA, ctx)) {
167 emit(rv_ld(RV_REG_RA, store_offset, RV_REG_SP), ctx);
168 store_offset -= 8;
169 }
170 emit(rv_ld(RV_REG_FP, store_offset, RV_REG_SP), ctx);
171 store_offset -= 8;
172 if (seen_reg(RV_REG_S1, ctx)) {
173 emit(rv_ld(RV_REG_S1, store_offset, RV_REG_SP), ctx);
174 store_offset -= 8;
175 }
176 if (seen_reg(RV_REG_S2, ctx)) {
177 emit(rv_ld(RV_REG_S2, store_offset, RV_REG_SP), ctx);
178 store_offset -= 8;
179 }
180 if (seen_reg(RV_REG_S3, ctx)) {
181 emit(rv_ld(RV_REG_S3, store_offset, RV_REG_SP), ctx);
182 store_offset -= 8;
183 }
184 if (seen_reg(RV_REG_S4, ctx)) {
185 emit(rv_ld(RV_REG_S4, store_offset, RV_REG_SP), ctx);
186 store_offset -= 8;
187 }
188 if (seen_reg(RV_REG_S5, ctx)) {
189 emit(rv_ld(RV_REG_S5, store_offset, RV_REG_SP), ctx);
190 store_offset -= 8;
191 }
192 if (seen_reg(RV_REG_S6, ctx)) {
193 emit(rv_ld(RV_REG_S6, store_offset, RV_REG_SP), ctx);
194 store_offset -= 8;
195 }
196
197 emit(rv_addi(RV_REG_SP, RV_REG_SP, stack_adjust), ctx);
198 /* Set return value. */
Björn Töpelfe8322b2019-12-16 10:13:39 +0100199 if (!is_tail_call)
Björn Töpelf1003b72019-12-16 10:13:35 +0100200 emit(rv_addi(RV_REG_A0, RV_REG_A5, 0), ctx);
Björn Töpelfe8322b2019-12-16 10:13:39 +0100201 emit(rv_jalr(RV_REG_ZERO, is_tail_call ? RV_REG_T3 : RV_REG_RA,
202 is_tail_call ? 4 : 0), /* skip TCC init */
203 ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100204}
205
Björn Töpel7d1ef132019-12-16 10:13:36 +0100206static void emit_bcc(u8 cond, u8 rd, u8 rs, int rvoff,
207 struct rv_jit_context *ctx)
208{
209 switch (cond) {
210 case BPF_JEQ:
211 emit(rv_beq(rd, rs, rvoff >> 1), ctx);
212 return;
213 case BPF_JGT:
214 emit(rv_bltu(rs, rd, rvoff >> 1), ctx);
215 return;
216 case BPF_JLT:
217 emit(rv_bltu(rd, rs, rvoff >> 1), ctx);
218 return;
219 case BPF_JGE:
220 emit(rv_bgeu(rd, rs, rvoff >> 1), ctx);
221 return;
222 case BPF_JLE:
223 emit(rv_bgeu(rs, rd, rvoff >> 1), ctx);
224 return;
225 case BPF_JNE:
226 emit(rv_bne(rd, rs, rvoff >> 1), ctx);
227 return;
228 case BPF_JSGT:
229 emit(rv_blt(rs, rd, rvoff >> 1), ctx);
230 return;
231 case BPF_JSLT:
232 emit(rv_blt(rd, rs, rvoff >> 1), ctx);
233 return;
234 case BPF_JSGE:
235 emit(rv_bge(rd, rs, rvoff >> 1), ctx);
236 return;
237 case BPF_JSLE:
238 emit(rv_bge(rs, rd, rvoff >> 1), ctx);
239 }
240}
241
242static void emit_branch(u8 cond, u8 rd, u8 rs, int rvoff,
243 struct rv_jit_context *ctx)
244{
245 s64 upper, lower;
246
247 if (is_13b_int(rvoff)) {
248 emit_bcc(cond, rd, rs, rvoff, ctx);
249 return;
250 }
251
252 /* Adjust for jal */
253 rvoff -= 4;
254
255 /* Transform, e.g.:
256 * bne rd,rs,foo
257 * to
258 * beq rd,rs,<.L1>
259 * (auipc foo)
260 * jal(r) foo
261 * .L1
262 */
263 cond = invert_bpf_cond(cond);
264 if (is_21b_int(rvoff)) {
265 emit_bcc(cond, rd, rs, 8, ctx);
266 emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx);
267 return;
268 }
269
270 /* 32b No need for an additional rvoff adjustment, since we
271 * get that from the auipc at PC', where PC = PC' + 4.
272 */
273 upper = (rvoff + (1 << 11)) >> 12;
274 lower = rvoff & 0xfff;
275
276 emit_bcc(cond, rd, rs, 12, ctx);
277 emit(rv_auipc(RV_REG_T1, upper), ctx);
278 emit(rv_jalr(RV_REG_ZERO, RV_REG_T1, lower), ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100279}
280
281static void emit_zext_32(u8 reg, struct rv_jit_context *ctx)
282{
283 emit(rv_slli(reg, reg, 32), ctx);
284 emit(rv_srli(reg, reg, 32), ctx);
285}
286
287static int emit_bpf_tail_call(int insn, struct rv_jit_context *ctx)
288{
289 int tc_ninsn, off, start_insn = ctx->ninsns;
290 u8 tcc = rv_tail_call_reg(ctx);
291
292 /* a0: &ctx
293 * a1: &array
294 * a2: index
295 *
296 * if (index >= array->map.max_entries)
297 * goto out;
298 */
299 tc_ninsn = insn ? ctx->offset[insn] - ctx->offset[insn - 1] :
300 ctx->offset[0];
301 emit_zext_32(RV_REG_A2, ctx);
302
303 off = offsetof(struct bpf_array, map.max_entries);
304 if (is_12b_check(off, insn))
305 return -1;
306 emit(rv_lwu(RV_REG_T1, off, RV_REG_A1), ctx);
307 off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
Björn Töpel29d92ed2019-12-16 10:13:37 +0100308 emit_branch(BPF_JGE, RV_REG_A2, RV_REG_T1, off, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100309
Paul Chaignon96bc4432019-12-09 19:52:07 +0100310 /* if (TCC-- < 0)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100311 * goto out;
312 */
313 emit(rv_addi(RV_REG_T1, tcc, -1), ctx);
314 off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
David S. Miller2bbc0782019-12-27 14:20:10 -0800315 emit_branch(BPF_JSLT, tcc, RV_REG_ZERO, off, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100316
317 /* prog = array->ptrs[index];
318 * if (!prog)
319 * goto out;
320 */
321 emit(rv_slli(RV_REG_T2, RV_REG_A2, 3), ctx);
322 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_A1), ctx);
323 off = offsetof(struct bpf_array, ptrs);
324 if (is_12b_check(off, insn))
325 return -1;
326 emit(rv_ld(RV_REG_T2, off, RV_REG_T2), ctx);
327 off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
Björn Töpel29d92ed2019-12-16 10:13:37 +0100328 emit_branch(BPF_JEQ, RV_REG_T2, RV_REG_ZERO, off, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100329
330 /* goto *(prog->bpf_func + 4); */
331 off = offsetof(struct bpf_prog, bpf_func);
332 if (is_12b_check(off, insn))
333 return -1;
334 emit(rv_ld(RV_REG_T3, off, RV_REG_T2), ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100335 emit(rv_addi(RV_REG_TCC, RV_REG_T1, 0), ctx);
Björn Töpelfe8322b2019-12-16 10:13:39 +0100336 __build_epilogue(true, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100337 return 0;
338}
339
340static void init_regs(u8 *rd, u8 *rs, const struct bpf_insn *insn,
341 struct rv_jit_context *ctx)
342{
343 u8 code = insn->code;
344
345 switch (code) {
346 case BPF_JMP | BPF_JA:
347 case BPF_JMP | BPF_CALL:
348 case BPF_JMP | BPF_EXIT:
349 case BPF_JMP | BPF_TAIL_CALL:
350 break;
351 default:
352 *rd = bpf_to_rv_reg(insn->dst_reg, ctx);
353 }
354
355 if (code & (BPF_ALU | BPF_X) || code & (BPF_ALU64 | BPF_X) ||
356 code & (BPF_JMP | BPF_X) || code & (BPF_JMP32 | BPF_X) ||
357 code & BPF_LDX || code & BPF_STX)
358 *rs = bpf_to_rv_reg(insn->src_reg, ctx);
359}
360
Björn Töpel2353ecc2019-02-05 13:41:22 +0100361static void emit_zext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
362{
363 emit(rv_addi(RV_REG_T2, *rd, 0), ctx);
364 emit_zext_32(RV_REG_T2, ctx);
365 emit(rv_addi(RV_REG_T1, *rs, 0), ctx);
366 emit_zext_32(RV_REG_T1, ctx);
367 *rd = RV_REG_T2;
368 *rs = RV_REG_T1;
369}
370
371static void emit_sext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
372{
373 emit(rv_addiw(RV_REG_T2, *rd, 0), ctx);
374 emit(rv_addiw(RV_REG_T1, *rs, 0), ctx);
375 *rd = RV_REG_T2;
376 *rs = RV_REG_T1;
377}
378
379static void emit_zext_32_rd_t1(u8 *rd, struct rv_jit_context *ctx)
380{
381 emit(rv_addi(RV_REG_T2, *rd, 0), ctx);
382 emit_zext_32(RV_REG_T2, ctx);
383 emit_zext_32(RV_REG_T1, ctx);
384 *rd = RV_REG_T2;
385}
386
387static void emit_sext_32_rd(u8 *rd, struct rv_jit_context *ctx)
388{
389 emit(rv_addiw(RV_REG_T2, *rd, 0), ctx);
390 *rd = RV_REG_T2;
391}
392
Luke Nelson489553d2020-04-06 22:16:04 +0000393static int emit_jump_and_link(u8 rd, s64 rvoff, bool force_jalr,
394 struct rv_jit_context *ctx)
Björn Töpel33203c02019-12-16 10:13:38 +0100395{
396 s64 upper, lower;
397
Björn Töpele368b642019-12-16 10:13:41 +0100398 if (rvoff && is_21b_int(rvoff) && !force_jalr) {
Björn Töpel33203c02019-12-16 10:13:38 +0100399 emit(rv_jal(rd, rvoff >> 1), ctx);
Luke Nelson489553d2020-04-06 22:16:04 +0000400 return 0;
401 } else if (in_auipc_jalr_range(rvoff)) {
402 upper = (rvoff + (1 << 11)) >> 12;
403 lower = rvoff & 0xfff;
404 emit(rv_auipc(RV_REG_T1, upper), ctx);
405 emit(rv_jalr(rd, RV_REG_T1, lower), ctx);
406 return 0;
Björn Töpel33203c02019-12-16 10:13:38 +0100407 }
408
Luke Nelson489553d2020-04-06 22:16:04 +0000409 pr_err("bpf-jit: target offset 0x%llx is out of range\n", rvoff);
410 return -ERANGE;
Björn Töpel33203c02019-12-16 10:13:38 +0100411}
412
Björn Töpel7d1ef132019-12-16 10:13:36 +0100413static bool is_signed_bpf_cond(u8 cond)
414{
415 return cond == BPF_JSGT || cond == BPF_JSLT ||
416 cond == BPF_JSGE || cond == BPF_JSLE;
417}
418
Björn Töpele368b642019-12-16 10:13:41 +0100419static int emit_call(bool fixed, u64 addr, struct rv_jit_context *ctx)
420{
421 s64 off = 0;
422 u64 ip;
423 u8 rd;
Luke Nelson489553d2020-04-06 22:16:04 +0000424 int ret;
Björn Töpele368b642019-12-16 10:13:41 +0100425
426 if (addr && ctx->insns) {
427 ip = (u64)(long)(ctx->insns + ctx->ninsns);
428 off = addr - ip;
Björn Töpele368b642019-12-16 10:13:41 +0100429 }
430
Luke Nelson489553d2020-04-06 22:16:04 +0000431 ret = emit_jump_and_link(RV_REG_RA, off, !fixed, ctx);
432 if (ret)
433 return ret;
Björn Töpele368b642019-12-16 10:13:41 +0100434 rd = bpf_to_rv_reg(BPF_REG_0, ctx);
435 emit(rv_addi(rd, RV_REG_A0, 0), ctx);
436 return 0;
437}
438
Luke Nelsonca6cb542020-03-04 21:02:04 -0800439int bpf_jit_emit_insn(const struct bpf_insn *insn, struct rv_jit_context *ctx,
440 bool extra_pass)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100441{
442 bool is64 = BPF_CLASS(insn->code) == BPF_ALU64 ||
443 BPF_CLASS(insn->code) == BPF_JMP;
Luke Nelson489553d2020-04-06 22:16:04 +0000444 int s, e, rvoff, ret, i = insn - ctx->prog->insnsi;
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100445 struct bpf_prog_aux *aux = ctx->prog->aux;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100446 u8 rd = -1, rs = -1, code = insn->code;
447 s16 off = insn->off;
448 s32 imm = insn->imm;
449
450 init_regs(&rd, &rs, insn, ctx);
451
452 switch (code) {
453 /* dst = src */
454 case BPF_ALU | BPF_MOV | BPF_X:
455 case BPF_ALU64 | BPF_MOV | BPF_X:
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100456 if (imm == 1) {
457 /* Special mov32 for zext */
458 emit_zext_32(rd, ctx);
459 break;
460 }
Björn Töpel2353ecc2019-02-05 13:41:22 +0100461 emit(is64 ? rv_addi(rd, rs, 0) : rv_addiw(rd, rs, 0), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100462 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100463 emit_zext_32(rd, ctx);
464 break;
465
466 /* dst = dst OP src */
467 case BPF_ALU | BPF_ADD | BPF_X:
468 case BPF_ALU64 | BPF_ADD | BPF_X:
469 emit(is64 ? rv_add(rd, rd, rs) : rv_addw(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700470 if (!is64 && !aux->verifier_zext)
Luke Nelson1e692f02019-05-30 15:29:22 -0700471 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100472 break;
473 case BPF_ALU | BPF_SUB | BPF_X:
474 case BPF_ALU64 | BPF_SUB | BPF_X:
475 emit(is64 ? rv_sub(rd, rd, rs) : rv_subw(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700476 if (!is64 && !aux->verifier_zext)
Luke Nelson1e692f02019-05-30 15:29:22 -0700477 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100478 break;
479 case BPF_ALU | BPF_AND | BPF_X:
480 case BPF_ALU64 | BPF_AND | BPF_X:
481 emit(rv_and(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700482 if (!is64 && !aux->verifier_zext)
Björn Töpelfe121ee2019-05-21 15:46:22 +0200483 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100484 break;
485 case BPF_ALU | BPF_OR | BPF_X:
486 case BPF_ALU64 | BPF_OR | BPF_X:
487 emit(rv_or(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700488 if (!is64 && !aux->verifier_zext)
Björn Töpelfe121ee2019-05-21 15:46:22 +0200489 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100490 break;
491 case BPF_ALU | BPF_XOR | BPF_X:
492 case BPF_ALU64 | BPF_XOR | BPF_X:
493 emit(rv_xor(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700494 if (!is64 && !aux->verifier_zext)
Björn Töpelfe121ee2019-05-21 15:46:22 +0200495 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100496 break;
497 case BPF_ALU | BPF_MUL | BPF_X:
498 case BPF_ALU64 | BPF_MUL | BPF_X:
499 emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100500 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100501 emit_zext_32(rd, ctx);
502 break;
503 case BPF_ALU | BPF_DIV | BPF_X:
504 case BPF_ALU64 | BPF_DIV | BPF_X:
505 emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100506 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100507 emit_zext_32(rd, ctx);
508 break;
509 case BPF_ALU | BPF_MOD | BPF_X:
510 case BPF_ALU64 | BPF_MOD | BPF_X:
511 emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100512 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100513 emit_zext_32(rd, ctx);
514 break;
515 case BPF_ALU | BPF_LSH | BPF_X:
516 case BPF_ALU64 | BPF_LSH | BPF_X:
517 emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx);
Luke Nelson1e692f02019-05-30 15:29:22 -0700518 if (!is64)
519 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100520 break;
521 case BPF_ALU | BPF_RSH | BPF_X:
522 case BPF_ALU64 | BPF_RSH | BPF_X:
523 emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700524 if (!is64 && !aux->verifier_zext)
Luke Nelson1e692f02019-05-30 15:29:22 -0700525 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100526 break;
527 case BPF_ALU | BPF_ARSH | BPF_X:
528 case BPF_ALU64 | BPF_ARSH | BPF_X:
529 emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700530 if (!is64 && !aux->verifier_zext)
Luke Nelson1e692f02019-05-30 15:29:22 -0700531 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100532 break;
533
534 /* dst = -dst */
535 case BPF_ALU | BPF_NEG:
536 case BPF_ALU64 | BPF_NEG:
537 emit(is64 ? rv_sub(rd, RV_REG_ZERO, rd) :
538 rv_subw(rd, RV_REG_ZERO, rd), ctx);
Luke Nelson46dd3d72019-07-04 17:18:02 -0700539 if (!is64 && !aux->verifier_zext)
Luke Nelson1e692f02019-05-30 15:29:22 -0700540 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100541 break;
542
543 /* dst = BSWAP##imm(dst) */
544 case BPF_ALU | BPF_END | BPF_FROM_LE:
545 {
546 int shift = 64 - imm;
547
548 emit(rv_slli(rd, rd, shift), ctx);
549 emit(rv_srli(rd, rd, shift), ctx);
550 break;
551 }
552 case BPF_ALU | BPF_END | BPF_FROM_BE:
553 emit(rv_addi(RV_REG_T2, RV_REG_ZERO, 0), ctx);
554
555 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
556 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
557 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
558 emit(rv_srli(rd, rd, 8), ctx);
559 if (imm == 16)
560 goto out_be;
561
562 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
563 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
564 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
565 emit(rv_srli(rd, rd, 8), ctx);
566
567 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
568 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
569 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
570 emit(rv_srli(rd, rd, 8), ctx);
571 if (imm == 32)
572 goto out_be;
573
574 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
575 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
576 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
577 emit(rv_srli(rd, rd, 8), ctx);
578
579 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
580 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
581 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
582 emit(rv_srli(rd, rd, 8), ctx);
583
584 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
585 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
586 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
587 emit(rv_srli(rd, rd, 8), ctx);
588
589 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
590 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
591 emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
592 emit(rv_srli(rd, rd, 8), ctx);
593out_be:
594 emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
595 emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
596
597 emit(rv_addi(rd, RV_REG_T2, 0), ctx);
598 break;
599
600 /* dst = imm */
601 case BPF_ALU | BPF_MOV | BPF_K:
602 case BPF_ALU64 | BPF_MOV | BPF_K:
603 emit_imm(rd, imm, ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100604 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100605 emit_zext_32(rd, ctx);
606 break;
607
608 /* dst = dst OP imm */
609 case BPF_ALU | BPF_ADD | BPF_K:
610 case BPF_ALU64 | BPF_ADD | BPF_K:
611 if (is_12b_int(imm)) {
612 emit(is64 ? rv_addi(rd, rd, imm) :
613 rv_addiw(rd, rd, imm), ctx);
614 } else {
615 emit_imm(RV_REG_T1, imm, ctx);
616 emit(is64 ? rv_add(rd, rd, RV_REG_T1) :
617 rv_addw(rd, rd, RV_REG_T1), ctx);
618 }
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100619 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100620 emit_zext_32(rd, ctx);
621 break;
622 case BPF_ALU | BPF_SUB | BPF_K:
623 case BPF_ALU64 | BPF_SUB | BPF_K:
624 if (is_12b_int(-imm)) {
625 emit(is64 ? rv_addi(rd, rd, -imm) :
626 rv_addiw(rd, rd, -imm), ctx);
627 } else {
628 emit_imm(RV_REG_T1, imm, ctx);
629 emit(is64 ? rv_sub(rd, rd, RV_REG_T1) :
630 rv_subw(rd, rd, RV_REG_T1), ctx);
631 }
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100632 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100633 emit_zext_32(rd, ctx);
634 break;
635 case BPF_ALU | BPF_AND | BPF_K:
636 case BPF_ALU64 | BPF_AND | BPF_K:
637 if (is_12b_int(imm)) {
638 emit(rv_andi(rd, rd, imm), ctx);
639 } else {
640 emit_imm(RV_REG_T1, imm, ctx);
641 emit(rv_and(rd, rd, RV_REG_T1), ctx);
642 }
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100643 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100644 emit_zext_32(rd, ctx);
645 break;
646 case BPF_ALU | BPF_OR | BPF_K:
647 case BPF_ALU64 | BPF_OR | BPF_K:
648 if (is_12b_int(imm)) {
649 emit(rv_ori(rd, rd, imm), ctx);
650 } else {
651 emit_imm(RV_REG_T1, imm, ctx);
652 emit(rv_or(rd, rd, RV_REG_T1), ctx);
653 }
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100654 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100655 emit_zext_32(rd, ctx);
656 break;
657 case BPF_ALU | BPF_XOR | BPF_K:
658 case BPF_ALU64 | BPF_XOR | BPF_K:
659 if (is_12b_int(imm)) {
660 emit(rv_xori(rd, rd, imm), ctx);
661 } else {
662 emit_imm(RV_REG_T1, imm, ctx);
663 emit(rv_xor(rd, rd, RV_REG_T1), ctx);
664 }
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100665 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100666 emit_zext_32(rd, ctx);
667 break;
668 case BPF_ALU | BPF_MUL | BPF_K:
669 case BPF_ALU64 | BPF_MUL | BPF_K:
670 emit_imm(RV_REG_T1, imm, ctx);
671 emit(is64 ? rv_mul(rd, rd, RV_REG_T1) :
672 rv_mulw(rd, rd, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100673 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100674 emit_zext_32(rd, ctx);
675 break;
676 case BPF_ALU | BPF_DIV | BPF_K:
677 case BPF_ALU64 | BPF_DIV | BPF_K:
678 emit_imm(RV_REG_T1, imm, ctx);
679 emit(is64 ? rv_divu(rd, rd, RV_REG_T1) :
680 rv_divuw(rd, rd, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100681 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100682 emit_zext_32(rd, ctx);
683 break;
684 case BPF_ALU | BPF_MOD | BPF_K:
685 case BPF_ALU64 | BPF_MOD | BPF_K:
686 emit_imm(RV_REG_T1, imm, ctx);
687 emit(is64 ? rv_remu(rd, rd, RV_REG_T1) :
688 rv_remuw(rd, rd, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100689 if (!is64 && !aux->verifier_zext)
Björn Töpel2353ecc2019-02-05 13:41:22 +0100690 emit_zext_32(rd, ctx);
691 break;
692 case BPF_ALU | BPF_LSH | BPF_K:
693 case BPF_ALU64 | BPF_LSH | BPF_K:
694 emit(is64 ? rv_slli(rd, rd, imm) : rv_slliw(rd, rd, imm), ctx);
Luke Nelson1e692f02019-05-30 15:29:22 -0700695 if (!is64)
696 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100697 break;
698 case BPF_ALU | BPF_RSH | BPF_K:
699 case BPF_ALU64 | BPF_RSH | BPF_K:
700 emit(is64 ? rv_srli(rd, rd, imm) : rv_srliw(rd, rd, imm), ctx);
Luke Nelson1e692f02019-05-30 15:29:22 -0700701 if (!is64)
702 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100703 break;
704 case BPF_ALU | BPF_ARSH | BPF_K:
705 case BPF_ALU64 | BPF_ARSH | BPF_K:
706 emit(is64 ? rv_srai(rd, rd, imm) : rv_sraiw(rd, rd, imm), ctx);
Luke Nelson1e692f02019-05-30 15:29:22 -0700707 if (!is64)
708 emit_zext_32(rd, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +0100709 break;
710
711 /* JUMP off */
712 case BPF_JMP | BPF_JA:
Björn Töpel7d1ef132019-12-16 10:13:36 +0100713 rvoff = rv_offset(i, off, ctx);
Luke Nelson489553d2020-04-06 22:16:04 +0000714 ret = emit_jump_and_link(RV_REG_ZERO, rvoff, false, ctx);
715 if (ret)
716 return ret;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100717 break;
718
719 /* IF (dst COND src) JUMP off */
720 case BPF_JMP | BPF_JEQ | BPF_X:
721 case BPF_JMP32 | BPF_JEQ | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100722 case BPF_JMP | BPF_JGT | BPF_X:
723 case BPF_JMP32 | BPF_JGT | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100724 case BPF_JMP | BPF_JLT | BPF_X:
725 case BPF_JMP32 | BPF_JLT | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100726 case BPF_JMP | BPF_JGE | BPF_X:
727 case BPF_JMP32 | BPF_JGE | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100728 case BPF_JMP | BPF_JLE | BPF_X:
729 case BPF_JMP32 | BPF_JLE | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100730 case BPF_JMP | BPF_JNE | BPF_X:
731 case BPF_JMP32 | BPF_JNE | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100732 case BPF_JMP | BPF_JSGT | BPF_X:
733 case BPF_JMP32 | BPF_JSGT | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100734 case BPF_JMP | BPF_JSLT | BPF_X:
735 case BPF_JMP32 | BPF_JSLT | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100736 case BPF_JMP | BPF_JSGE | BPF_X:
737 case BPF_JMP32 | BPF_JSGE | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100738 case BPF_JMP | BPF_JSLE | BPF_X:
739 case BPF_JMP32 | BPF_JSLE | BPF_X:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100740 case BPF_JMP | BPF_JSET | BPF_X:
741 case BPF_JMP32 | BPF_JSET | BPF_X:
Björn Töpel7d1ef132019-12-16 10:13:36 +0100742 rvoff = rv_offset(i, off, ctx);
743 if (!is64) {
744 s = ctx->ninsns;
745 if (is_signed_bpf_cond(BPF_OP(code)))
746 emit_sext_32_rd_rs(&rd, &rs, ctx);
747 else
748 emit_zext_32_rd_rs(&rd, &rs, ctx);
749 e = ctx->ninsns;
750
751 /* Adjust for extra insns */
752 rvoff -= (e - s) << 2;
753 }
754
755 if (BPF_OP(code) == BPF_JSET) {
756 /* Adjust for and */
757 rvoff -= 4;
758 emit(rv_and(RV_REG_T1, rd, rs), ctx);
759 emit_branch(BPF_JNE, RV_REG_T1, RV_REG_ZERO, rvoff,
760 ctx);
761 } else {
762 emit_branch(BPF_OP(code), rd, rs, rvoff, ctx);
763 }
Björn Töpel2353ecc2019-02-05 13:41:22 +0100764 break;
765
766 /* IF (dst COND imm) JUMP off */
767 case BPF_JMP | BPF_JEQ | BPF_K:
768 case BPF_JMP32 | BPF_JEQ | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100769 case BPF_JMP | BPF_JGT | BPF_K:
770 case BPF_JMP32 | BPF_JGT | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100771 case BPF_JMP | BPF_JLT | BPF_K:
772 case BPF_JMP32 | BPF_JLT | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100773 case BPF_JMP | BPF_JGE | BPF_K:
774 case BPF_JMP32 | BPF_JGE | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100775 case BPF_JMP | BPF_JLE | BPF_K:
776 case BPF_JMP32 | BPF_JLE | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100777 case BPF_JMP | BPF_JNE | BPF_K:
778 case BPF_JMP32 | BPF_JNE | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100779 case BPF_JMP | BPF_JSGT | BPF_K:
780 case BPF_JMP32 | BPF_JSGT | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100781 case BPF_JMP | BPF_JSLT | BPF_K:
782 case BPF_JMP32 | BPF_JSLT | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100783 case BPF_JMP | BPF_JSGE | BPF_K:
784 case BPF_JMP32 | BPF_JSGE | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100785 case BPF_JMP | BPF_JSLE | BPF_K:
786 case BPF_JMP32 | BPF_JSLE | BPF_K:
Björn Töpel2353ecc2019-02-05 13:41:22 +0100787 case BPF_JMP | BPF_JSET | BPF_K:
788 case BPF_JMP32 | BPF_JSET | BPF_K:
Björn Töpel7d1ef132019-12-16 10:13:36 +0100789 rvoff = rv_offset(i, off, ctx);
790 s = ctx->ninsns;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100791 emit_imm(RV_REG_T1, imm, ctx);
Björn Töpel7d1ef132019-12-16 10:13:36 +0100792 if (!is64) {
793 if (is_signed_bpf_cond(BPF_OP(code)))
794 emit_sext_32_rd(&rd, ctx);
795 else
796 emit_zext_32_rd_t1(&rd, ctx);
797 }
798 e = ctx->ninsns;
799
800 /* Adjust for extra insns */
801 rvoff -= (e - s) << 2;
802
803 if (BPF_OP(code) == BPF_JSET) {
804 /* Adjust for and */
805 rvoff -= 4;
806 emit(rv_and(RV_REG_T1, rd, RV_REG_T1), ctx);
807 emit_branch(BPF_JNE, RV_REG_T1, RV_REG_ZERO, rvoff,
808 ctx);
809 } else {
810 emit_branch(BPF_OP(code), rd, RV_REG_T1, rvoff, ctx);
811 }
Björn Töpel2353ecc2019-02-05 13:41:22 +0100812 break;
813
814 /* function call */
815 case BPF_JMP | BPF_CALL:
816 {
817 bool fixed;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100818 u64 addr;
819
820 mark_call(ctx);
821 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, &addr,
822 &fixed);
823 if (ret < 0)
824 return ret;
Björn Töpele368b642019-12-16 10:13:41 +0100825 ret = emit_call(fixed, addr, ctx);
826 if (ret)
827 return ret;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100828 break;
829 }
830 /* tail call */
831 case BPF_JMP | BPF_TAIL_CALL:
832 if (emit_bpf_tail_call(i, ctx))
833 return -1;
834 break;
835
836 /* function return */
837 case BPF_JMP | BPF_EXIT:
838 if (i == ctx->prog->len - 1)
839 break;
840
841 rvoff = epilogue_offset(ctx);
Luke Nelson489553d2020-04-06 22:16:04 +0000842 ret = emit_jump_and_link(RV_REG_ZERO, rvoff, false, ctx);
843 if (ret)
844 return ret;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100845 break;
846
847 /* dst = imm64 */
848 case BPF_LD | BPF_IMM | BPF_DW:
849 {
850 struct bpf_insn insn1 = insn[1];
851 u64 imm64;
852
853 imm64 = (u64)insn1.imm << 32 | (u32)imm;
854 emit_imm(rd, imm64, ctx);
855 return 1;
856 }
857
858 /* LDX: dst = *(size *)(src + off) */
859 case BPF_LDX | BPF_MEM | BPF_B:
860 if (is_12b_int(off)) {
861 emit(rv_lbu(rd, off, rs), ctx);
862 break;
863 }
864
865 emit_imm(RV_REG_T1, off, ctx);
866 emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
867 emit(rv_lbu(rd, 0, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100868 if (insn_is_zext(&insn[1]))
869 return 1;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100870 break;
871 case BPF_LDX | BPF_MEM | BPF_H:
872 if (is_12b_int(off)) {
873 emit(rv_lhu(rd, off, rs), ctx);
874 break;
875 }
876
877 emit_imm(RV_REG_T1, off, ctx);
878 emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
879 emit(rv_lhu(rd, 0, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100880 if (insn_is_zext(&insn[1]))
881 return 1;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100882 break;
883 case BPF_LDX | BPF_MEM | BPF_W:
884 if (is_12b_int(off)) {
885 emit(rv_lwu(rd, off, rs), ctx);
886 break;
887 }
888
889 emit_imm(RV_REG_T1, off, ctx);
890 emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
891 emit(rv_lwu(rd, 0, RV_REG_T1), ctx);
Jiong Wang66d0d5a2019-05-24 23:25:27 +0100892 if (insn_is_zext(&insn[1]))
893 return 1;
Björn Töpel2353ecc2019-02-05 13:41:22 +0100894 break;
895 case BPF_LDX | BPF_MEM | BPF_DW:
896 if (is_12b_int(off)) {
897 emit(rv_ld(rd, off, rs), ctx);
898 break;
899 }
900
901 emit_imm(RV_REG_T1, off, ctx);
902 emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
903 emit(rv_ld(rd, 0, RV_REG_T1), ctx);
904 break;
905
906 /* ST: *(size *)(dst + off) = imm */
907 case BPF_ST | BPF_MEM | BPF_B:
908 emit_imm(RV_REG_T1, imm, ctx);
909 if (is_12b_int(off)) {
910 emit(rv_sb(rd, off, RV_REG_T1), ctx);
911 break;
912 }
913
914 emit_imm(RV_REG_T2, off, ctx);
915 emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
916 emit(rv_sb(RV_REG_T2, 0, RV_REG_T1), ctx);
917 break;
918
919 case BPF_ST | BPF_MEM | BPF_H:
920 emit_imm(RV_REG_T1, imm, ctx);
921 if (is_12b_int(off)) {
922 emit(rv_sh(rd, off, RV_REG_T1), ctx);
923 break;
924 }
925
926 emit_imm(RV_REG_T2, off, ctx);
927 emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
928 emit(rv_sh(RV_REG_T2, 0, RV_REG_T1), ctx);
929 break;
930 case BPF_ST | BPF_MEM | BPF_W:
931 emit_imm(RV_REG_T1, imm, ctx);
932 if (is_12b_int(off)) {
933 emit(rv_sw(rd, off, RV_REG_T1), ctx);
934 break;
935 }
936
937 emit_imm(RV_REG_T2, off, ctx);
938 emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
939 emit(rv_sw(RV_REG_T2, 0, RV_REG_T1), ctx);
940 break;
941 case BPF_ST | BPF_MEM | BPF_DW:
942 emit_imm(RV_REG_T1, imm, ctx);
943 if (is_12b_int(off)) {
944 emit(rv_sd(rd, off, RV_REG_T1), ctx);
945 break;
946 }
947
948 emit_imm(RV_REG_T2, off, ctx);
949 emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
950 emit(rv_sd(RV_REG_T2, 0, RV_REG_T1), ctx);
951 break;
952
953 /* STX: *(size *)(dst + off) = src */
954 case BPF_STX | BPF_MEM | BPF_B:
955 if (is_12b_int(off)) {
956 emit(rv_sb(rd, off, rs), ctx);
957 break;
958 }
959
960 emit_imm(RV_REG_T1, off, ctx);
961 emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
962 emit(rv_sb(RV_REG_T1, 0, rs), ctx);
963 break;
964 case BPF_STX | BPF_MEM | BPF_H:
965 if (is_12b_int(off)) {
966 emit(rv_sh(rd, off, rs), ctx);
967 break;
968 }
969
970 emit_imm(RV_REG_T1, off, ctx);
971 emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
972 emit(rv_sh(RV_REG_T1, 0, rs), ctx);
973 break;
974 case BPF_STX | BPF_MEM | BPF_W:
975 if (is_12b_int(off)) {
976 emit(rv_sw(rd, off, rs), ctx);
977 break;
978 }
979
980 emit_imm(RV_REG_T1, off, ctx);
981 emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
982 emit(rv_sw(RV_REG_T1, 0, rs), ctx);
983 break;
984 case BPF_STX | BPF_MEM | BPF_DW:
985 if (is_12b_int(off)) {
986 emit(rv_sd(rd, off, rs), ctx);
987 break;
988 }
989
990 emit_imm(RV_REG_T1, off, ctx);
991 emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
992 emit(rv_sd(RV_REG_T1, 0, rs), ctx);
993 break;
994 /* STX XADD: lock *(u32 *)(dst + off) += src */
995 case BPF_STX | BPF_XADD | BPF_W:
996 /* STX XADD: lock *(u64 *)(dst + off) += src */
997 case BPF_STX | BPF_XADD | BPF_DW:
998 if (off) {
999 if (is_12b_int(off)) {
1000 emit(rv_addi(RV_REG_T1, rd, off), ctx);
1001 } else {
1002 emit_imm(RV_REG_T1, off, ctx);
1003 emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
1004 }
1005
1006 rd = RV_REG_T1;
1007 }
1008
1009 emit(BPF_SIZE(code) == BPF_W ?
1010 rv_amoadd_w(RV_REG_ZERO, rs, rd, 0, 0) :
1011 rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0), ctx);
1012 break;
1013 default:
1014 pr_err("bpf-jit: unknown opcode %02x\n", code);
1015 return -EINVAL;
1016 }
1017
1018 return 0;
1019}
1020
Luke Nelsonca6cb542020-03-04 21:02:04 -08001021void bpf_jit_build_prologue(struct rv_jit_context *ctx)
Björn Töpel2353ecc2019-02-05 13:41:22 +01001022{
1023 int stack_adjust = 0, store_offset, bpf_stack_adjust;
1024
Björn Töpelf1003b72019-12-16 10:13:35 +01001025 bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16);
1026 if (bpf_stack_adjust)
1027 mark_fp(ctx);
1028
Björn Töpel2353ecc2019-02-05 13:41:22 +01001029 if (seen_reg(RV_REG_RA, ctx))
1030 stack_adjust += 8;
1031 stack_adjust += 8; /* RV_REG_FP */
1032 if (seen_reg(RV_REG_S1, ctx))
1033 stack_adjust += 8;
1034 if (seen_reg(RV_REG_S2, ctx))
1035 stack_adjust += 8;
1036 if (seen_reg(RV_REG_S3, ctx))
1037 stack_adjust += 8;
1038 if (seen_reg(RV_REG_S4, ctx))
1039 stack_adjust += 8;
1040 if (seen_reg(RV_REG_S5, ctx))
1041 stack_adjust += 8;
1042 if (seen_reg(RV_REG_S6, ctx))
1043 stack_adjust += 8;
1044
1045 stack_adjust = round_up(stack_adjust, 16);
Björn Töpel2353ecc2019-02-05 13:41:22 +01001046 stack_adjust += bpf_stack_adjust;
1047
1048 store_offset = stack_adjust - 8;
1049
1050 /* First instruction is always setting the tail-call-counter
1051 * (TCC) register. This instruction is skipped for tail calls.
1052 */
1053 emit(rv_addi(RV_REG_TCC, RV_REG_ZERO, MAX_TAIL_CALL_CNT), ctx);
1054
1055 emit(rv_addi(RV_REG_SP, RV_REG_SP, -stack_adjust), ctx);
1056
1057 if (seen_reg(RV_REG_RA, ctx)) {
1058 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_RA), ctx);
1059 store_offset -= 8;
1060 }
1061 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_FP), ctx);
1062 store_offset -= 8;
1063 if (seen_reg(RV_REG_S1, ctx)) {
1064 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S1), ctx);
1065 store_offset -= 8;
1066 }
1067 if (seen_reg(RV_REG_S2, ctx)) {
1068 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S2), ctx);
1069 store_offset -= 8;
1070 }
1071 if (seen_reg(RV_REG_S3, ctx)) {
1072 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S3), ctx);
1073 store_offset -= 8;
1074 }
1075 if (seen_reg(RV_REG_S4, ctx)) {
1076 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S4), ctx);
1077 store_offset -= 8;
1078 }
1079 if (seen_reg(RV_REG_S5, ctx)) {
1080 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S5), ctx);
1081 store_offset -= 8;
1082 }
1083 if (seen_reg(RV_REG_S6, ctx)) {
1084 emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S6), ctx);
1085 store_offset -= 8;
1086 }
1087
1088 emit(rv_addi(RV_REG_FP, RV_REG_SP, stack_adjust), ctx);
1089
1090 if (bpf_stack_adjust)
1091 emit(rv_addi(RV_REG_S5, RV_REG_SP, bpf_stack_adjust), ctx);
1092
1093 /* Program contains calls and tail calls, so RV_REG_TCC need
1094 * to be saved across calls.
1095 */
1096 if (seen_tail_call(ctx) && seen_call(ctx))
1097 emit(rv_addi(RV_REG_TCC_SAVED, RV_REG_TCC, 0), ctx);
1098
1099 ctx->stack_size = stack_adjust;
1100}
1101
Luke Nelsonca6cb542020-03-04 21:02:04 -08001102void bpf_jit_build_epilogue(struct rv_jit_context *ctx)
Björn Töpel2353ecc2019-02-05 13:41:22 +01001103{
Björn Töpelfe8322b2019-12-16 10:13:39 +01001104 __build_epilogue(false, ctx);
Björn Töpel2353ecc2019-02-05 13:41:22 +01001105}
1106
Björn Töpel7f3631e2019-12-16 10:13:40 +01001107void *bpf_jit_alloc_exec(unsigned long size)
1108{
1109 return __vmalloc_node_range(size, PAGE_SIZE, BPF_JIT_REGION_START,
1110 BPF_JIT_REGION_END, GFP_KERNEL,
1111 PAGE_KERNEL_EXEC, 0, NUMA_NO_NODE,
1112 __builtin_return_address(0));
1113}
1114
1115void bpf_jit_free_exec(void *addr)
1116{
1117 return vfree(addr);
1118}