Lines Matching refs:meta

42 nfp_meta_has_prev(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta)  in nfp_meta_has_prev()  argument
44 return meta->l.prev != &nfp_prog->insns; in nfp_meta_has_prev()
626 wrp_zext(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst) in wrp_zext() argument
628 if (meta->flags & FLAG_INSN_DO_ZEXT) in wrp_zext()
733 static int nfp_cpp_memcpy(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_cpp_memcpy() argument
735 bool descending_seq = meta->ldst_gather_len < 0; in nfp_cpp_memcpy()
736 s16 len = abs(meta->ldst_gather_len); in nfp_cpp_memcpy()
742 off = re_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in nfp_cpp_memcpy()
743 src_40bit_addr = meta->ptr.type == PTR_TO_MAP_VALUE; in nfp_cpp_memcpy()
744 src_base = reg_a(meta->insn.src_reg * 2); in nfp_cpp_memcpy()
748 addr40_offset(nfp_prog, meta->insn.src_reg * 2, off, &src_base, in nfp_cpp_memcpy()
765 off = re_load_imm_any(nfp_prog, meta->paired_st->off, imm_b(nfp_prog)); in nfp_cpp_memcpy()
770 reg_a(meta->paired_st->dst_reg * 2), off, len - 1, in nfp_cpp_memcpy()
775 reg_a(meta->paired_st->dst_reg * 2), off, xfer_num - 1, in nfp_cpp_memcpy()
782 reg_a(meta->paired_st->dst_reg * 2), off, in nfp_cpp_memcpy()
789 reg_a(meta->paired_st->dst_reg * 2), off, in nfp_cpp_memcpy()
796 reg_a(meta->paired_st->dst_reg * 2), off, 7, in nfp_cpp_memcpy()
799 off = re_load_imm_any(nfp_prog, meta->paired_st->off + 32, in nfp_cpp_memcpy()
802 reg_a(meta->paired_st->dst_reg * 2), off, len - 33, in nfp_cpp_memcpy()
813 reg_a(meta->paired_st->dst_reg * 2), off, in nfp_cpp_memcpy()
815 new_off = meta->paired_st->off + (xfer_num - 1) * 4; in nfp_cpp_memcpy()
818 xfer_num - 1, reg_a(meta->paired_st->dst_reg * 2), off, in nfp_cpp_memcpy()
834 else if (BPF_SIZE(meta->insn.code) != BPF_DW) in nfp_cpp_memcpy()
839 switch (BPF_SIZE(meta->insn.code)) { in nfp_cpp_memcpy()
841 wrp_reg_subpart(nfp_prog, reg_both(meta->insn.dst_reg * 2), in nfp_cpp_memcpy()
846 wrp_reg_subpart(nfp_prog, reg_both(meta->insn.dst_reg * 2), in nfp_cpp_memcpy()
850 wrp_mov(nfp_prog, reg_both(meta->insn.dst_reg * 2), in nfp_cpp_memcpy()
854 wrp_mov(nfp_prog, reg_both(meta->insn.dst_reg * 2), in nfp_cpp_memcpy()
856 wrp_mov(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), in nfp_cpp_memcpy()
861 if (BPF_SIZE(meta->insn.code) != BPF_DW) in nfp_cpp_memcpy()
862 wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), 0); in nfp_cpp_memcpy()
868 data_ld(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, swreg offset, in data_ld() argument
892 wrp_zext(nfp_prog, meta, dst_gpr); in data_ld()
898 data_ld_host_order(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in data_ld_host_order() argument
923 wrp_zext(nfp_prog, meta, dst_gpr); in data_ld_host_order()
929 data_ld_host_order_addr32(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in data_ld_host_order_addr32() argument
932 return data_ld_host_order(nfp_prog, meta, dst_gpr, reg_a(src_gpr), in data_ld_host_order_addr32()
937 data_ld_host_order_addr40(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in data_ld_host_order_addr40() argument
944 return data_ld_host_order(nfp_prog, meta, dst_gpr, rega, regb, in data_ld_host_order_addr40()
949 construct_data_ind_ld(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in construct_data_ind_ld() argument
966 return data_ld(nfp_prog, meta, imm_b(nfp_prog), 0, size); in construct_data_ind_ld()
970 construct_data_ld(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in construct_data_ld() argument
982 return data_ld(nfp_prog, meta, tmp_reg, 0, size); in construct_data_ld()
1161 mem_op_stack(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_op_stack() argument
1165 s32 off = nfp_prog->stack_frame_depth + meta->insn.off + ptr_off; in mem_op_stack()
1174 if (meta->ptr_not_const || in mem_op_stack()
1175 meta->flags & FLAG_INSN_PTR_CALLER_STACK_FRAME) { in mem_op_stack()
1180 stack_off_reg = ur_load_imm_any(nfp_prog, meta->insn.off, in mem_op_stack()
1223 nop_cnt = narrow_ld && meta->flags & FLAG_INSN_DO_ZEXT ? 2 : 3; in mem_op_stack()
1228 wrp_zext(nfp_prog, meta, gpr); in mem_op_stack()
1294 wrp_alu64_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_alu64_imm() argument
1297 const struct bpf_insn *insn = &meta->insn; in wrp_alu64_imm()
1301 meta->flags |= FLAG_INSN_SKIP_NOOP; in wrp_alu64_imm()
1312 wrp_alu64_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_alu64_reg() argument
1315 u8 dst = meta->insn.dst_reg * 2, src = meta->insn.src_reg * 2; in wrp_alu64_reg()
1325 wrp_alu32_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_alu32_imm() argument
1328 const struct bpf_insn *insn = &meta->insn; in wrp_alu32_imm()
1332 wrp_zext(nfp_prog, meta, dst); in wrp_alu32_imm()
1338 wrp_alu32_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_alu32_reg() argument
1341 u8 dst = meta->insn.dst_reg * 2, src = meta->insn.src_reg * 2; in wrp_alu32_reg()
1344 wrp_zext(nfp_prog, meta, dst); in wrp_alu32_reg()
1358 wrp_test_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_test_reg() argument
1361 const struct bpf_insn *insn = &meta->insn; in wrp_test_reg()
1365 if (is_mbpf_jmp64(meta)) in wrp_test_reg()
1386 static const struct jmp_code_map *nfp_jmp_code_get(struct nfp_insn_meta *meta) in nfp_jmp_code_get() argument
1390 op = BPF_OP(meta->insn.code) >> 4; in nfp_jmp_code_get()
1400 static int cmp_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in cmp_imm() argument
1402 const struct bpf_insn *insn = &meta->insn; in cmp_imm()
1409 code = nfp_jmp_code_get(meta); in cmp_imm()
1413 alu_op = meta->jump_neg_op ? ALU_OP_ADD : ALU_OP_SUB; in cmp_imm()
1414 carry_op = meta->jump_neg_op ? ALU_OP_ADD_C : ALU_OP_SUB_C; in cmp_imm()
1422 if (is_mbpf_jmp64(meta)) { in cmp_imm()
1437 static int cmp_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in cmp_reg() argument
1439 const struct bpf_insn *insn = &meta->insn; in cmp_reg()
1443 code = nfp_jmp_code_get(meta); in cmp_reg()
1457 if (is_mbpf_jmp64(meta)) in cmp_reg()
1501 wrp_mul(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in wrp_mul() argument
1505 const struct bpf_insn *insn = &meta->insn; in wrp_mul()
1513 lopnd_max = meta->umax_dst; in wrp_mul()
1516 ropnd_max = meta->umax_src; in wrp_mul()
1600 static int adjust_head(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in adjust_head() argument
1610 if (WARN_ON_ONCE(nfp_prog->adjust_head_location != meta->n)) in adjust_head()
1680 static int adjust_tail(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in adjust_tail() argument
1726 map_call_stack_common(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in map_call_stack_common() argument
1734 lm_off += meta->arg2.reg.var_off.value + meta->arg2.reg.off; in map_call_stack_common()
1735 load_lm_ptr = meta->arg2.var_off || lm_off; in map_call_stack_common()
1740 if (meta->func_id == BPF_FUNC_map_update_elem) in map_call_stack_common()
1743 emit_br_relo(nfp_prog, BR_UNC, BR_OFF_RELO + meta->func_id, in map_call_stack_common()
1767 nfp_get_prandom_u32(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_get_prandom_u32() argument
1779 nfp_perf_event_output(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_perf_event_output() argument
1784 ptr_type = ur_load_imm_any(nfp_prog, meta->arg1.type, imm_a(nfp_prog)); in nfp_perf_event_output()
1788 emit_br_relo(nfp_prog, BR_UNC, BR_OFF_RELO + meta->func_id, in nfp_perf_event_output()
1804 nfp_queue_select(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_queue_select() argument
1811 emit_alu(nfp_prog, reg_none(), reg_a(meta->insn.src_reg * 2), in nfp_queue_select()
1820 pv_qsel_val(nfp_prog), 0x1, reg_b(meta->insn.src_reg * 2), in nfp_queue_select()
1836 static int mov_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mov_reg64() argument
1838 const struct bpf_insn *insn = &meta->insn; in mov_reg64()
1859 static int mov_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mov_imm64() argument
1861 u64 imm = meta->insn.imm; /* sign extend */ in mov_imm64()
1863 wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2), imm & ~0U); in mov_imm64()
1864 wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), imm >> 32); in mov_imm64()
1869 static int xor_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in xor_reg64() argument
1871 return wrp_alu64_reg(nfp_prog, meta, ALU_OP_XOR); in xor_reg64()
1874 static int xor_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in xor_imm64() argument
1876 return wrp_alu64_imm(nfp_prog, meta, ALU_OP_XOR, !meta->insn.imm); in xor_imm64()
1879 static int and_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in and_reg64() argument
1881 return wrp_alu64_reg(nfp_prog, meta, ALU_OP_AND); in and_reg64()
1884 static int and_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in and_imm64() argument
1886 return wrp_alu64_imm(nfp_prog, meta, ALU_OP_AND, !~meta->insn.imm); in and_imm64()
1889 static int or_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in or_reg64() argument
1891 return wrp_alu64_reg(nfp_prog, meta, ALU_OP_OR); in or_reg64()
1894 static int or_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in or_imm64() argument
1896 return wrp_alu64_imm(nfp_prog, meta, ALU_OP_OR, !meta->insn.imm); in or_imm64()
1899 static int add_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in add_reg64() argument
1901 const struct bpf_insn *insn = &meta->insn; in add_reg64()
1913 static int add_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in add_imm64() argument
1915 const struct bpf_insn *insn = &meta->insn; in add_imm64()
1924 static int sub_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in sub_reg64() argument
1926 const struct bpf_insn *insn = &meta->insn; in sub_reg64()
1938 static int sub_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in sub_imm64() argument
1940 const struct bpf_insn *insn = &meta->insn; in sub_imm64()
1949 static int mul_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mul_reg64() argument
1951 return wrp_mul(nfp_prog, meta, true, true); in mul_reg64()
1954 static int mul_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mul_imm64() argument
1956 return wrp_mul(nfp_prog, meta, true, false); in mul_imm64()
1959 static int div_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in div_imm64() argument
1961 const struct bpf_insn *insn = &meta->insn; in div_imm64()
1966 static int div_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in div_reg64() argument
1971 return wrp_div_imm(nfp_prog, meta->insn.dst_reg * 2, meta->umin_src); in div_reg64()
1974 static int neg_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in neg_reg64() argument
1976 const struct bpf_insn *insn = &meta->insn; in neg_reg64()
2019 static int shl_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shl_imm64() argument
2021 const struct bpf_insn *insn = &meta->insn; in shl_imm64()
2058 static int shl_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shl_reg64() argument
2060 const struct bpf_insn *insn = &meta->insn; in shl_reg64()
2065 umin = meta->umin_src; in shl_reg64()
2066 umax = meta->umax_src; in shl_reg64()
2133 static int shr_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shr_imm64() argument
2135 const struct bpf_insn *insn = &meta->insn; in shr_imm64()
2170 static int shr_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shr_reg64() argument
2172 const struct bpf_insn *insn = &meta->insn; in shr_reg64()
2177 umin = meta->umin_src; in shr_reg64()
2178 umax = meta->umax_src; in shr_reg64()
2245 static int ashr_imm64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in ashr_imm64() argument
2247 const struct bpf_insn *insn = &meta->insn; in ashr_imm64()
2287 static int ashr_reg64(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in ashr_reg64() argument
2289 const struct bpf_insn *insn = &meta->insn; in ashr_reg64()
2294 umin = meta->umin_src; in ashr_reg64()
2295 umax = meta->umax_src; in ashr_reg64()
2326 static int mov_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mov_reg() argument
2328 const struct bpf_insn *insn = &meta->insn; in mov_reg()
2336 static int mov_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mov_imm() argument
2338 const struct bpf_insn *insn = &meta->insn; in mov_imm()
2346 static int xor_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in xor_reg() argument
2348 return wrp_alu32_reg(nfp_prog, meta, ALU_OP_XOR); in xor_reg()
2351 static int xor_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in xor_imm() argument
2353 return wrp_alu32_imm(nfp_prog, meta, ALU_OP_XOR); in xor_imm()
2356 static int and_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in and_reg() argument
2358 return wrp_alu32_reg(nfp_prog, meta, ALU_OP_AND); in and_reg()
2361 static int and_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in and_imm() argument
2363 return wrp_alu32_imm(nfp_prog, meta, ALU_OP_AND); in and_imm()
2366 static int or_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in or_reg() argument
2368 return wrp_alu32_reg(nfp_prog, meta, ALU_OP_OR); in or_reg()
2371 static int or_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in or_imm() argument
2373 return wrp_alu32_imm(nfp_prog, meta, ALU_OP_OR); in or_imm()
2376 static int add_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in add_reg() argument
2378 return wrp_alu32_reg(nfp_prog, meta, ALU_OP_ADD); in add_reg()
2381 static int add_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in add_imm() argument
2383 return wrp_alu32_imm(nfp_prog, meta, ALU_OP_ADD); in add_imm()
2386 static int sub_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in sub_reg() argument
2388 return wrp_alu32_reg(nfp_prog, meta, ALU_OP_SUB); in sub_reg()
2391 static int sub_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in sub_imm() argument
2393 return wrp_alu32_imm(nfp_prog, meta, ALU_OP_SUB); in sub_imm()
2396 static int mul_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mul_reg() argument
2398 return wrp_mul(nfp_prog, meta, false, true); in mul_reg()
2401 static int mul_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mul_imm() argument
2403 return wrp_mul(nfp_prog, meta, false, false); in mul_imm()
2406 static int div_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in div_reg() argument
2408 return div_reg64(nfp_prog, meta); in div_reg()
2411 static int div_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in div_imm() argument
2413 return div_imm64(nfp_prog, meta); in div_imm()
2416 static int neg_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in neg_reg() argument
2418 u8 dst = meta->insn.dst_reg * 2; in neg_reg()
2421 wrp_zext(nfp_prog, meta, dst); in neg_reg()
2427 __ashr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, in __ashr_imm() argument
2437 wrp_zext(nfp_prog, meta, dst); in __ashr_imm()
2442 static int ashr_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in ashr_reg() argument
2444 const struct bpf_insn *insn = &meta->insn; in ashr_reg()
2449 umin = meta->umin_src; in ashr_reg()
2450 umax = meta->umax_src; in ashr_reg()
2452 return __ashr_imm(nfp_prog, meta, dst, umin); in ashr_reg()
2461 wrp_zext(nfp_prog, meta, dst); in ashr_reg()
2466 static int ashr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in ashr_imm() argument
2468 const struct bpf_insn *insn = &meta->insn; in ashr_imm()
2471 return __ashr_imm(nfp_prog, meta, dst, insn->imm); in ashr_imm()
2475 __shr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, in __shr_imm() argument
2481 wrp_zext(nfp_prog, meta, dst); in __shr_imm()
2485 static int shr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shr_imm() argument
2487 const struct bpf_insn *insn = &meta->insn; in shr_imm()
2490 return __shr_imm(nfp_prog, meta, dst, insn->imm); in shr_imm()
2493 static int shr_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shr_reg() argument
2495 const struct bpf_insn *insn = &meta->insn; in shr_reg()
2500 umin = meta->umin_src; in shr_reg()
2501 umax = meta->umax_src; in shr_reg()
2503 return __shr_imm(nfp_prog, meta, dst, umin); in shr_reg()
2509 wrp_zext(nfp_prog, meta, dst); in shr_reg()
2514 __shl_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, in __shl_imm() argument
2520 wrp_zext(nfp_prog, meta, dst); in __shl_imm()
2524 static int shl_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shl_imm() argument
2526 const struct bpf_insn *insn = &meta->insn; in shl_imm()
2529 return __shl_imm(nfp_prog, meta, dst, insn->imm); in shl_imm()
2532 static int shl_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in shl_reg() argument
2534 const struct bpf_insn *insn = &meta->insn; in shl_reg()
2539 umin = meta->umin_src; in shl_reg()
2540 umax = meta->umax_src; in shl_reg()
2542 return __shl_imm(nfp_prog, meta, dst, umin); in shl_reg()
2546 wrp_zext(nfp_prog, meta, dst); in shl_reg()
2550 static int end_reg32(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in end_reg32() argument
2552 const struct bpf_insn *insn = &meta->insn; in end_reg32()
2579 static int imm_ld8_part2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in imm_ld8_part2() argument
2581 struct nfp_insn_meta *prev = nfp_meta_prev(meta); in imm_ld8_part2()
2587 imm_hi = meta->insn.imm; in imm_ld8_part2()
2600 static int imm_ld8(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in imm_ld8() argument
2602 meta->double_cb = imm_ld8_part2; in imm_ld8()
2606 static int data_ld1(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ld1() argument
2608 return construct_data_ld(nfp_prog, meta, meta->insn.imm, 1); in data_ld1()
2611 static int data_ld2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ld2() argument
2613 return construct_data_ld(nfp_prog, meta, meta->insn.imm, 2); in data_ld2()
2616 static int data_ld4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ld4() argument
2618 return construct_data_ld(nfp_prog, meta, meta->insn.imm, 4); in data_ld4()
2621 static int data_ind_ld1(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ind_ld1() argument
2623 return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, in data_ind_ld1()
2624 meta->insn.src_reg * 2, 1); in data_ind_ld1()
2627 static int data_ind_ld2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ind_ld2() argument
2629 return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, in data_ind_ld2()
2630 meta->insn.src_reg * 2, 2); in data_ind_ld2()
2633 static int data_ind_ld4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in data_ind_ld4() argument
2635 return construct_data_ind_ld(nfp_prog, meta, meta->insn.imm, in data_ind_ld4()
2636 meta->insn.src_reg * 2, 4); in data_ind_ld4()
2640 mem_ldx_stack(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx_stack() argument
2643 return mem_op_stack(nfp_prog, meta, size, ptr_off, in mem_ldx_stack()
2644 meta->insn.dst_reg * 2, meta->insn.src_reg * 2, in mem_ldx_stack()
2648 static int mem_ldx_skb(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx_skb() argument
2651 swreg dst = reg_both(meta->insn.dst_reg * 2); in mem_ldx_skb()
2653 switch (meta->insn.off) { in mem_ldx_skb()
2674 wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), 0); in mem_ldx_skb()
2679 static int mem_ldx_xdp(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx_xdp() argument
2682 swreg dst = reg_both(meta->insn.dst_reg * 2); in mem_ldx_xdp()
2684 switch (meta->insn.off) { in mem_ldx_xdp()
2700 wrp_immed(nfp_prog, reg_both(meta->insn.dst_reg * 2 + 1), 0); in mem_ldx_xdp()
2706 mem_ldx_data(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx_data() argument
2711 tmp_reg = re_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in mem_ldx_data()
2713 return data_ld_host_order_addr32(nfp_prog, meta, meta->insn.src_reg * 2, in mem_ldx_data()
2714 tmp_reg, meta->insn.dst_reg * 2, size); in mem_ldx_data()
2718 mem_ldx_emem(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx_emem() argument
2723 tmp_reg = re_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in mem_ldx_emem()
2725 return data_ld_host_order_addr40(nfp_prog, meta, meta->insn.src_reg * 2, in mem_ldx_emem()
2726 tmp_reg, meta->insn.dst_reg * 2, size); in mem_ldx_emem()
2731 struct nfp_insn_meta *meta) in mem_ldx_data_init_pktcache() argument
2733 s16 range_start = meta->pkt_cache.range_start; in mem_ldx_data_init_pktcache()
2734 s16 range_end = meta->pkt_cache.range_end; in mem_ldx_data_init_pktcache()
2740 src_base = reg_a(meta->insn.src_reg * 2); in mem_ldx_data_init_pktcache()
2757 struct nfp_insn_meta *meta, in mem_ldx_data_from_pktcache_unaligned() argument
2760 s16 range_start = meta->pkt_cache.range_start; in mem_ldx_data_from_pktcache_unaligned()
2761 s16 insn_off = meta->insn.off - range_start; in mem_ldx_data_from_pktcache_unaligned()
2763 u8 dst_gpr = meta->insn.dst_reg * 2; in mem_ldx_data_from_pktcache_unaligned()
2786 wrp_zext(nfp_prog, meta, dst_gpr); in mem_ldx_data_from_pktcache_unaligned()
2794 wrp_zext(nfp_prog, meta, dst_gpr); in mem_ldx_data_from_pktcache_unaligned()
2811 struct nfp_insn_meta *meta, in mem_ldx_data_from_pktcache_aligned() argument
2817 idx = (meta->insn.off - meta->pkt_cache.range_start) / REG_WIDTH; in mem_ldx_data_from_pktcache_aligned()
2818 dst_gpr = meta->insn.dst_reg * 2; in mem_ldx_data_from_pktcache_aligned()
2825 wrp_zext(nfp_prog, meta, dst_gpr); in mem_ldx_data_from_pktcache_aligned()
2828 wrp_zext(nfp_prog, meta, dst_gpr); in mem_ldx_data_from_pktcache_aligned()
2841 struct nfp_insn_meta *meta, unsigned int size) in mem_ldx_data_from_pktcache() argument
2843 u8 off = meta->insn.off - meta->pkt_cache.range_start; in mem_ldx_data_from_pktcache()
2846 return mem_ldx_data_from_pktcache_aligned(nfp_prog, meta, size); in mem_ldx_data_from_pktcache()
2848 return mem_ldx_data_from_pktcache_unaligned(nfp_prog, meta, size); in mem_ldx_data_from_pktcache()
2852 mem_ldx(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_ldx() argument
2855 if (meta->ldst_gather_len) in mem_ldx()
2856 return nfp_cpp_memcpy(nfp_prog, meta); in mem_ldx()
2858 if (meta->ptr.type == PTR_TO_CTX) { in mem_ldx()
2860 return mem_ldx_xdp(nfp_prog, meta, size); in mem_ldx()
2862 return mem_ldx_skb(nfp_prog, meta, size); in mem_ldx()
2865 if (meta->ptr.type == PTR_TO_PACKET) { in mem_ldx()
2866 if (meta->pkt_cache.range_end) { in mem_ldx()
2867 if (meta->pkt_cache.do_init) in mem_ldx()
2868 mem_ldx_data_init_pktcache(nfp_prog, meta); in mem_ldx()
2870 return mem_ldx_data_from_pktcache(nfp_prog, meta, size); in mem_ldx()
2872 return mem_ldx_data(nfp_prog, meta, size); in mem_ldx()
2876 if (meta->ptr.type == PTR_TO_STACK) in mem_ldx()
2877 return mem_ldx_stack(nfp_prog, meta, size, in mem_ldx()
2878 meta->ptr.off + meta->ptr.var_off.value); in mem_ldx()
2880 if (meta->ptr.type == PTR_TO_MAP_VALUE) in mem_ldx()
2881 return mem_ldx_emem(nfp_prog, meta, size); in mem_ldx()
2886 static int mem_ldx1(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_ldx1() argument
2888 return mem_ldx(nfp_prog, meta, 1); in mem_ldx1()
2891 static int mem_ldx2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_ldx2() argument
2893 return mem_ldx(nfp_prog, meta, 2); in mem_ldx2()
2896 static int mem_ldx4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_ldx4() argument
2898 return mem_ldx(nfp_prog, meta, 4); in mem_ldx4()
2901 static int mem_ldx8(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_ldx8() argument
2903 return mem_ldx(nfp_prog, meta, 8); in mem_ldx8()
2907 mem_st_data(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_st_data() argument
2910 u64 imm = meta->insn.imm; /* sign extend */ in mem_st_data()
2913 off_reg = re_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in mem_st_data()
2915 return data_st_host_order(nfp_prog, meta->insn.dst_reg * 2, off_reg, in mem_st_data()
2919 static int mem_st(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_st() argument
2922 if (meta->ptr.type == PTR_TO_PACKET) in mem_st()
2923 return mem_st_data(nfp_prog, meta, size); in mem_st()
2928 static int mem_st1(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_st1() argument
2930 return mem_st(nfp_prog, meta, 1); in mem_st1()
2933 static int mem_st2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_st2() argument
2935 return mem_st(nfp_prog, meta, 2); in mem_st2()
2938 static int mem_st4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_st4() argument
2940 return mem_st(nfp_prog, meta, 4); in mem_st4()
2943 static int mem_st8(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_st8() argument
2945 return mem_st(nfp_prog, meta, 8); in mem_st8()
2949 mem_stx_data(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_stx_data() argument
2954 off_reg = re_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in mem_stx_data()
2956 return data_stx_host_order(nfp_prog, meta->insn.dst_reg * 2, off_reg, in mem_stx_data()
2957 meta->insn.src_reg * 2, size); in mem_stx_data()
2961 mem_stx_stack(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_stx_stack() argument
2964 return mem_op_stack(nfp_prog, meta, size, ptr_off, in mem_stx_stack()
2965 meta->insn.src_reg * 2, meta->insn.dst_reg * 2, in mem_stx_stack()
2969 static int mem_stx_xdp(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_stx_xdp() argument
2971 switch (meta->insn.off) { in mem_stx_xdp()
2973 return nfp_queue_select(nfp_prog, meta); in mem_stx_xdp()
2981 mem_stx(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in mem_stx() argument
2984 if (meta->ptr.type == PTR_TO_PACKET) in mem_stx()
2985 return mem_stx_data(nfp_prog, meta, size); in mem_stx()
2987 if (meta->ptr.type == PTR_TO_STACK) in mem_stx()
2988 return mem_stx_stack(nfp_prog, meta, size, in mem_stx()
2989 meta->ptr.off + meta->ptr.var_off.value); in mem_stx()
2994 static int mem_stx1(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_stx1() argument
2996 return mem_stx(nfp_prog, meta, 1); in mem_stx1()
2999 static int mem_stx2(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_stx2() argument
3001 return mem_stx(nfp_prog, meta, 2); in mem_stx2()
3004 static int mem_stx4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_stx4() argument
3006 if (meta->ptr.type == PTR_TO_CTX) in mem_stx4()
3008 return mem_stx_xdp(nfp_prog, meta); in mem_stx4()
3009 return mem_stx(nfp_prog, meta, 4); in mem_stx4()
3012 static int mem_stx8(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_stx8() argument
3014 return mem_stx(nfp_prog, meta, 8); in mem_stx8()
3018 mem_xadd(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, bool is64) in mem_xadd() argument
3020 u8 dst_gpr = meta->insn.dst_reg * 2; in mem_xadd()
3021 u8 src_gpr = meta->insn.src_reg * 2; in mem_xadd()
3025 off = ur_load_imm_any(nfp_prog, meta->insn.off, imm_b(nfp_prog)); in mem_xadd()
3035 if (meta->insn.off) { in mem_xadd()
3039 if (meta->xadd_maybe_16bit) { in mem_xadd()
3043 if (meta->xadd_over_16bit) in mem_xadd()
3045 if (meta->xadd_maybe_16bit && meta->xadd_over_16bit) { in mem_xadd()
3051 if (meta->xadd_maybe_16bit && meta->xadd_over_16bit) { in mem_xadd()
3059 emit_br(nfp_prog, BR_BLO, full_add, meta->insn.off ? 2 : 0); in mem_xadd()
3064 if (!meta->insn.off) { in mem_xadd()
3077 if (meta->xadd_maybe_16bit) { in mem_xadd()
3088 if (meta->xadd_over_16bit) in mem_xadd()
3096 if (meta->xadd_over_16bit) { in mem_xadd()
3112 static int mem_atomic4(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_atomic4() argument
3114 if (meta->insn.imm != BPF_ADD) in mem_atomic4()
3117 return mem_xadd(nfp_prog, meta, false); in mem_atomic4()
3120 static int mem_atomic8(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in mem_atomic8() argument
3122 if (meta->insn.imm != BPF_ADD) in mem_atomic8()
3125 return mem_xadd(nfp_prog, meta, true); in mem_atomic8()
3128 static int jump(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jump() argument
3130 emit_br(nfp_prog, BR_UNC, meta->insn.off, 0); in jump()
3135 static int jeq_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jeq_imm() argument
3137 const struct bpf_insn *insn = &meta->insn; in jeq_imm()
3164 static int jeq32_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jeq32_imm() argument
3166 const struct bpf_insn *insn = &meta->insn; in jeq32_imm()
3177 static int jset_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jset_imm() argument
3179 const struct bpf_insn *insn = &meta->insn; in jset_imm()
3190 if (is_mbpf_jmp64(meta) && imm >> 32) { in jset_imm()
3199 static int jne_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jne_imm() argument
3201 const struct bpf_insn *insn = &meta->insn; in jne_imm()
3203 bool is_jmp32 = is_mbpf_jmp32(meta); in jne_imm()
3233 static int jeq_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jeq_reg() argument
3235 const struct bpf_insn *insn = &meta->insn; in jeq_reg()
3239 if (is_mbpf_jmp64(meta)) { in jeq_reg()
3251 static int jset_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jset_reg() argument
3253 return wrp_test_reg(nfp_prog, meta, ALU_OP_AND, BR_BNE); in jset_reg()
3256 static int jne_reg(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jne_reg() argument
3258 return wrp_test_reg(nfp_prog, meta, ALU_OP_XOR, BR_BNE); in jne_reg()
3262 bpf_to_bpf_call(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in bpf_to_bpf_call() argument
3311 if (!meta->jmp_dst) { in bpf_to_bpf_call()
3315 if (nfp_prog->subprog[meta->jmp_dst->subprog_idx].needs_reg_push) { in bpf_to_bpf_call()
3323 emit_br(nfp_prog, BR_UNC, meta->insn.imm, 1); in bpf_to_bpf_call()
3341 meta->num_insns_after_br = nfp_prog_current_offset(nfp_prog); in bpf_to_bpf_call()
3342 meta->num_insns_after_br -= offset_br; in bpf_to_bpf_call()
3347 static int helper_call(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in helper_call() argument
3349 switch (meta->insn.imm) { in helper_call()
3351 return adjust_head(nfp_prog, meta); in helper_call()
3353 return adjust_tail(nfp_prog, meta); in helper_call()
3357 return map_call_stack_common(nfp_prog, meta); in helper_call()
3359 return nfp_get_prandom_u32(nfp_prog, meta); in helper_call()
3361 return nfp_perf_event_output(nfp_prog, meta); in helper_call()
3368 static int call(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in call() argument
3370 if (is_mbpf_pseudo_call(meta)) in call()
3371 return bpf_to_bpf_call(nfp_prog, meta); in call()
3373 return helper_call(nfp_prog, meta); in call()
3376 static bool nfp_is_main_function(struct nfp_insn_meta *meta) in nfp_is_main_function() argument
3378 return meta->subprog_idx == 0; in nfp_is_main_function()
3381 static int goto_out(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in goto_out() argument
3389 nfp_subprog_epilogue(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_subprog_epilogue() argument
3391 if (nfp_prog->subprog[meta->subprog_idx].needs_reg_push) { in nfp_subprog_epilogue()
3413 static int jmp_exit(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in jmp_exit() argument
3415 if (nfp_is_main_function(meta)) in jmp_exit()
3416 return goto_out(nfp_prog, meta); in jmp_exit()
3418 return nfp_subprog_epilogue(nfp_prog, meta); in jmp_exit()
3541 nfp_fixup_immed_relo(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_fixup_immed_relo() argument
3556 struct nfp_insn_meta *meta, *jmp_dst; in nfp_fixup_branches() local
3560 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_fixup_branches()
3561 if (meta->flags & FLAG_INSN_SKIP_MASK) in nfp_fixup_branches()
3563 if (!is_mbpf_jmp(meta)) in nfp_fixup_branches()
3565 if (meta->insn.code == (BPF_JMP | BPF_EXIT) && in nfp_fixup_branches()
3566 !nfp_is_main_function(meta)) in nfp_fixup_branches()
3568 if (is_mbpf_helper_call(meta)) in nfp_fixup_branches()
3571 if (list_is_last(&meta->l, &nfp_prog->insns)) in nfp_fixup_branches()
3574 br_idx = list_next_entry(meta, l)->off - 1; in nfp_fixup_branches()
3581 if (is_mbpf_pseudo_call(meta)) in nfp_fixup_branches()
3582 br_idx -= meta->num_insns_after_br; in nfp_fixup_branches()
3586 br_idx, meta->insn.code, nfp_prog->prog[br_idx]); in nfp_fixup_branches()
3590 if (meta->insn.code == (BPF_JMP | BPF_EXIT)) in nfp_fixup_branches()
3595 RELO_BR_REL && !is_mbpf_pseudo_call(meta)) in nfp_fixup_branches()
3598 if (!meta->jmp_dst) { in nfp_fixup_branches()
3603 jmp_dst = meta->jmp_dst; in nfp_fixup_branches()
3610 if (is_mbpf_pseudo_call(meta) && in nfp_fixup_branches()
3612 err = nfp_fixup_immed_relo(nfp_prog, meta, in nfp_fixup_branches()
3622 for (idx = meta->off; idx <= br_idx; idx++) { in nfp_fixup_branches()
3640 nfp_subprog_prologue(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_subprog_prologue() argument
3647 nfp_start_subprog(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta) in nfp_start_subprog() argument
3649 unsigned int depth = nfp_prog->subprog[meta->subprog_idx].stack_depth; in nfp_start_subprog()
3652 nfp_subprog_prologue(nfp_prog, meta); in nfp_start_subprog()
3655 bool nfp_is_subprog_start(struct nfp_insn_meta *meta) in nfp_is_subprog_start() argument
3657 return meta->flags & FLAG_INSN_IS_SUBPROG_START; in nfp_is_subprog_start()
3833 struct nfp_insn_meta *meta; in nfp_translate() local
3844 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_translate()
3845 instr_cb_t cb = instr_cb[meta->insn.code]; in nfp_translate()
3847 meta->off = nfp_prog_current_offset(nfp_prog); in nfp_translate()
3849 if (nfp_is_subprog_start(meta)) { in nfp_translate()
3850 nfp_start_subprog(nfp_prog, meta); in nfp_translate()
3855 if (meta->flags & FLAG_INSN_SKIP_MASK) { in nfp_translate()
3860 if (nfp_meta_has_prev(nfp_prog, meta) && in nfp_translate()
3861 nfp_meta_prev(meta)->double_cb) in nfp_translate()
3862 cb = nfp_meta_prev(meta)->double_cb; in nfp_translate()
3865 err = cb(nfp_prog, meta); in nfp_translate()
3890 struct nfp_insn_meta *meta; in nfp_bpf_opt_reg_init() local
3892 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_opt_reg_init()
3893 struct bpf_insn insn = meta->insn; in nfp_bpf_opt_reg_init()
3903 meta->flags |= FLAG_INSN_SKIP_PREC_DEPENDENT; in nfp_bpf_opt_reg_init()
3906 if (!(meta->flags & FLAG_INSN_SKIP_MASK)) in nfp_bpf_opt_reg_init()
3916 struct nfp_insn_meta *meta; in nfp_bpf_opt_neg_add_sub() local
3918 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_opt_neg_add_sub()
3919 struct bpf_insn insn = meta->insn; in nfp_bpf_opt_neg_add_sub()
3921 if (meta->flags & FLAG_INSN_SKIP_MASK) in nfp_bpf_opt_neg_add_sub()
3924 if (!is_mbpf_alu(meta) && !is_mbpf_jmp(meta)) in nfp_bpf_opt_neg_add_sub()
3931 if (is_mbpf_jmp(meta)) { in nfp_bpf_opt_neg_add_sub()
3937 meta->jump_neg_op = true; in nfp_bpf_opt_neg_add_sub()
3950 meta->insn.code = insn.code | BPF_K; in nfp_bpf_opt_neg_add_sub()
3953 meta->insn.imm = -insn.imm; in nfp_bpf_opt_neg_add_sub()
4278 struct nfp_insn_meta *meta, *range_node = NULL; in nfp_bpf_opt_pkt_cache() local
4285 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_opt_pkt_cache()
4286 if (meta->flags & FLAG_INSN_IS_JUMP_DST) in nfp_bpf_opt_pkt_cache()
4289 if (meta->flags & FLAG_INSN_SKIP_MASK) in nfp_bpf_opt_pkt_cache()
4292 insn = &meta->insn; in nfp_bpf_opt_pkt_cache()
4294 if (is_mbpf_store_pkt(meta) || in nfp_bpf_opt_pkt_cache()
4296 is_mbpf_classic_store_pkt(meta) || in nfp_bpf_opt_pkt_cache()
4297 is_mbpf_classic_load(meta)) { in nfp_bpf_opt_pkt_cache()
4302 if (!is_mbpf_load(meta)) in nfp_bpf_opt_pkt_cache()
4305 if (meta->ptr.type != PTR_TO_PACKET || meta->ldst_gather_len) { in nfp_bpf_opt_pkt_cache()
4328 if (meta->ptr.id == range_ptr_id && in nfp_bpf_opt_pkt_cache()
4329 meta->ptr.off == range_ptr_off) { in nfp_bpf_opt_pkt_cache()
4361 range_node = meta; in nfp_bpf_opt_pkt_cache()
4374 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_opt_pkt_cache()
4375 if (meta->flags & FLAG_INSN_SKIP_MASK) in nfp_bpf_opt_pkt_cache()
4378 if (is_mbpf_load_pkt(meta) && !meta->ldst_gather_len) { in nfp_bpf_opt_pkt_cache()
4379 if (meta->pkt_cache.do_init) { in nfp_bpf_opt_pkt_cache()
4380 range_start = meta->pkt_cache.range_start; in nfp_bpf_opt_pkt_cache()
4381 range_end = meta->pkt_cache.range_end; in nfp_bpf_opt_pkt_cache()
4383 meta->pkt_cache.range_start = range_start; in nfp_bpf_opt_pkt_cache()
4384 meta->pkt_cache.range_end = range_end; in nfp_bpf_opt_pkt_cache()
4493 struct nfp_insn_meta *meta; in nfp_bpf_jit_prepare() local
4496 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_jit_prepare()
4498 u64 code = meta->insn.code; in nfp_bpf_jit_prepare()
4502 if (!is_mbpf_jmp(meta)) in nfp_bpf_jit_prepare()
4506 if (is_mbpf_helper_call(meta)) in nfp_bpf_jit_prepare()
4515 dst_idx = meta->n + 1 + meta->insn.imm; in nfp_bpf_jit_prepare()
4517 dst_idx = meta->n + 1 + meta->insn.off; in nfp_bpf_jit_prepare()
4519 dst_meta = nfp_bpf_goto_meta(nfp_prog, meta, dst_idx); in nfp_bpf_jit_prepare()
4525 meta->jmp_dst = dst_meta; in nfp_bpf_jit_prepare()