Lines Matching full:emit

92 static inline void emit(const u32 insn, struct jit_ctx *ctx)  in emit()  function
108 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i()
110 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i()
112 emit(A64_MOVK(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
115 emit(A64_MOVZ(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
117 emit(A64_MOVK(is64, reg, hi, 16), ctx); in emit_a64_mov_i()
143 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
145 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
149 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
157 emit(insn, ctx); in emit_bti()
171 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
175 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
184 emit(A64_BLR(tmp), ctx); in emit_call()
333 emit(A64_MOV(1, A64_R(9), A64_LR), ctx); in build_prologue()
334 emit(A64_NOP, ctx); in build_prologue()
338 emit(A64_PACIASP, ctx); in build_prologue()
341 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); in build_prologue()
342 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in build_prologue()
345 emit(A64_PUSH(r6, r7, A64_SP), ctx); in build_prologue()
346 emit(A64_PUSH(r8, r9, A64_SP), ctx); in build_prologue()
347 emit(A64_PUSH(fp, tcc, A64_SP), ctx); in build_prologue()
348 emit(A64_PUSH(fpb, A64_R(28), A64_SP), ctx); in build_prologue()
351 emit(A64_MOV(1, fp, A64_SP), ctx); in build_prologue()
355 emit(A64_MOVZ(1, tcc, 0, 0), ctx); in build_prologue()
368 emit(A64_SUB_I(1, fpb, fp, ctx->fpb_offset), ctx); in build_prologue()
374 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_prologue()
398 emit(A64_LDR32(tmp, r2, tmp), ctx); in emit_bpf_tail_call()
399 emit(A64_MOV(0, r3, r3), ctx); in emit_bpf_tail_call()
400 emit(A64_CMP(0, r3, tmp), ctx); in emit_bpf_tail_call()
401 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call()
409 emit(A64_CMP(1, tcc, tmp), ctx); in emit_bpf_tail_call()
410 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call()
411 emit(A64_ADD_I(1, tcc, tcc, 1), ctx); in emit_bpf_tail_call()
419 emit(A64_ADD(1, tmp, r2, tmp), ctx); in emit_bpf_tail_call()
420 emit(A64_LSL(1, prg, r3, 3), ctx); in emit_bpf_tail_call()
421 emit(A64_LDR64(prg, tmp, prg), ctx); in emit_bpf_tail_call()
422 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call()
427 emit(A64_LDR64(tmp, prg, tmp), ctx); in emit_bpf_tail_call()
428 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx); in emit_bpf_tail_call()
429 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in emit_bpf_tail_call()
430 emit(A64_BR(tmp), ctx); in emit_bpf_tail_call()
461 emit(A64_ADD(1, tmp, tmp, dst), ctx); in emit_lse_atomic()
468 emit(A64_STADD(isdw, reg, src), ctx); in emit_lse_atomic()
471 emit(A64_MVN(isdw, tmp2, src), ctx); in emit_lse_atomic()
472 emit(A64_STCLR(isdw, reg, tmp2), ctx); in emit_lse_atomic()
475 emit(A64_STSET(isdw, reg, src), ctx); in emit_lse_atomic()
478 emit(A64_STEOR(isdw, reg, src), ctx); in emit_lse_atomic()
482 emit(A64_LDADDAL(isdw, src, reg, src), ctx); in emit_lse_atomic()
485 emit(A64_MVN(isdw, tmp2, src), ctx); in emit_lse_atomic()
486 emit(A64_LDCLRAL(isdw, src, reg, tmp2), ctx); in emit_lse_atomic()
489 emit(A64_LDSETAL(isdw, src, reg, src), ctx); in emit_lse_atomic()
492 emit(A64_LDEORAL(isdw, src, reg, src), ctx); in emit_lse_atomic()
496 emit(A64_SWPAL(isdw, src, reg, src), ctx); in emit_lse_atomic()
500 emit(A64_CASAL(isdw, src, reg, bpf2a64[BPF_REG_0]), ctx); in emit_lse_atomic()
535 emit(A64_ADD(1, tmp, tmp, dst), ctx); in emit_ll_sc_atomic()
542 emit(A64_LDXR(isdw, tmp2, reg), ctx); in emit_ll_sc_atomic()
544 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx); in emit_ll_sc_atomic()
546 emit(A64_AND(isdw, tmp2, tmp2, src), ctx); in emit_ll_sc_atomic()
548 emit(A64_ORR(isdw, tmp2, tmp2, src), ctx); in emit_ll_sc_atomic()
550 emit(A64_EOR(isdw, tmp2, tmp2, src), ctx); in emit_ll_sc_atomic()
551 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx); in emit_ll_sc_atomic()
554 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic()
562 emit(A64_MOV(isdw, ax, src), ctx); in emit_ll_sc_atomic()
563 emit(A64_LDXR(isdw, src, reg), ctx); in emit_ll_sc_atomic()
565 emit(A64_ADD(isdw, tmp2, src, ax), ctx); in emit_ll_sc_atomic()
567 emit(A64_AND(isdw, tmp2, src, ax), ctx); in emit_ll_sc_atomic()
569 emit(A64_ORR(isdw, tmp2, src, ax), ctx); in emit_ll_sc_atomic()
571 emit(A64_EOR(isdw, tmp2, src, ax), ctx); in emit_ll_sc_atomic()
572 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx); in emit_ll_sc_atomic()
575 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic()
576 emit(A64_DMB_ISH, ctx); in emit_ll_sc_atomic()
579 emit(A64_MOV(isdw, tmp2, src), ctx); in emit_ll_sc_atomic()
580 emit(A64_LDXR(isdw, src, reg), ctx); in emit_ll_sc_atomic()
581 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx); in emit_ll_sc_atomic()
584 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic()
585 emit(A64_DMB_ISH, ctx); in emit_ll_sc_atomic()
590 emit(A64_MOV(isdw, tmp2, r0), ctx); in emit_ll_sc_atomic()
591 emit(A64_LDXR(isdw, r0, reg), ctx); in emit_ll_sc_atomic()
592 emit(A64_EOR(isdw, tmp3, r0, tmp2), ctx); in emit_ll_sc_atomic()
595 emit(A64_CBNZ(isdw, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic()
596 emit(A64_STLXR(isdw, src, reg, tmp3), ctx); in emit_ll_sc_atomic()
599 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic()
600 emit(A64_DMB_ISH, ctx); in emit_ll_sc_atomic()
645 emit(A64_NOP, ctx); in build_plt()
649 emit(A64_LDR64LIT(tmp, 2 * AARCH64_INSN_SIZE), ctx); in build_plt()
650 emit(A64_BR(tmp), ctx); in build_plt()
667 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_epilogue()
670 emit(A64_POP(fpb, A64_R(28), A64_SP), ctx); in build_epilogue()
672 emit(A64_POP(fp, A64_R(26), A64_SP), ctx); in build_epilogue()
675 emit(A64_POP(r8, r9, A64_SP), ctx); in build_epilogue()
676 emit(A64_POP(r6, r7, A64_SP), ctx); in build_epilogue()
679 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in build_epilogue()
682 emit(A64_MOV(1, A64_R(0), r0), ctx); in build_epilogue()
686 emit(A64_AUTIASP, ctx); in build_epilogue()
688 emit(A64_RET(A64_LR), ctx); in build_epilogue()
791 emit(A64_MOV(is64, dst, src), ctx); in build_insn()
794 emit(A64_SXTB(is64, dst, src), ctx); in build_insn()
797 emit(A64_SXTH(is64, dst, src), ctx); in build_insn()
800 emit(A64_SXTW(is64, dst, src), ctx); in build_insn()
807 emit(A64_ADD(is64, dst, dst, src), ctx); in build_insn()
811 emit(A64_SUB(is64, dst, dst, src), ctx); in build_insn()
815 emit(A64_AND(is64, dst, dst, src), ctx); in build_insn()
819 emit(A64_ORR(is64, dst, dst, src), ctx); in build_insn()
823 emit(A64_EOR(is64, dst, dst, src), ctx); in build_insn()
827 emit(A64_MUL(is64, dst, dst, src), ctx); in build_insn()
832 emit(A64_UDIV(is64, dst, dst, src), ctx); in build_insn()
834 emit(A64_SDIV(is64, dst, dst, src), ctx); in build_insn()
839 emit(A64_UDIV(is64, tmp, dst, src), ctx); in build_insn()
841 emit(A64_SDIV(is64, tmp, dst, src), ctx); in build_insn()
842 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); in build_insn()
846 emit(A64_LSLV(is64, dst, dst, src), ctx); in build_insn()
850 emit(A64_LSRV(is64, dst, dst, src), ctx); in build_insn()
854 emit(A64_ASRV(is64, dst, dst, src), ctx); in build_insn()
859 emit(A64_NEG(is64, dst, dst), ctx); in build_insn()
874 emit(A64_REV16(is64, dst, dst), ctx); in build_insn()
876 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
879 emit(A64_REV32(0, dst, dst), ctx); in build_insn()
883 emit(A64_REV64(dst, dst), ctx); in build_insn()
891 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
895 emit(A64_UXTW(is64, dst, dst), ctx); in build_insn()
911 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
913 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
916 emit(A64_ADD(is64, dst, dst, tmp), ctx); in build_insn()
922 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
924 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
927 emit(A64_SUB(is64, dst, dst, tmp), ctx); in build_insn()
934 emit(a64_insn, ctx); in build_insn()
937 emit(A64_AND(is64, dst, dst, tmp), ctx); in build_insn()
944 emit(a64_insn, ctx); in build_insn()
947 emit(A64_ORR(is64, dst, dst, tmp), ctx); in build_insn()
954 emit(a64_insn, ctx); in build_insn()
957 emit(A64_EOR(is64, dst, dst, tmp), ctx); in build_insn()
963 emit(A64_MUL(is64, dst, dst, tmp), ctx); in build_insn()
969 emit(A64_UDIV(is64, dst, dst, tmp), ctx); in build_insn()
971 emit(A64_SDIV(is64, dst, dst, tmp), ctx); in build_insn()
977 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); in build_insn()
979 emit(A64_SDIV(is64, tmp, dst, tmp2), ctx); in build_insn()
980 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); in build_insn()
984 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
988 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
992 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
1003 emit(A64_B(jmp_offset), ctx); in build_insn()
1026 emit(A64_CMP(is64, dst, src), ctx); in build_insn()
1065 emit(A64_B_(jmp_cond, jmp_offset), ctx); in build_insn()
1069 emit(A64_TST(is64, dst, src), ctx); in build_insn()
1093 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
1095 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
1098 emit(A64_CMP(is64, dst, tmp), ctx); in build_insn()
1105 emit(a64_insn, ctx); in build_insn()
1108 emit(A64_TST(is64, dst, tmp), ctx); in build_insn()
1123 emit(A64_MOV(1, r0, A64_R(0)), ctx); in build_insn()
1139 emit(A64_B(jmp_offset), ctx); in build_insn()
1186 emit(A64_LDRSWI(dst, src_adj, off_adj), ctx); in build_insn()
1188 emit(A64_LDR32I(dst, src_adj, off_adj), ctx); in build_insn()
1192 emit(A64_LDRSW(dst, src, tmp), ctx); in build_insn()
1194 emit(A64_LDR32(dst, src, tmp), ctx); in build_insn()
1200 emit(A64_LDRSHI(dst, src_adj, off_adj), ctx); in build_insn()
1202 emit(A64_LDRHI(dst, src_adj, off_adj), ctx); in build_insn()
1206 emit(A64_LDRSH(dst, src, tmp), ctx); in build_insn()
1208 emit(A64_LDRH(dst, src, tmp), ctx); in build_insn()
1214 emit(A64_LDRSBI(dst, src_adj, off_adj), ctx); in build_insn()
1216 emit(A64_LDRBI(dst, src_adj, off_adj), ctx); in build_insn()
1220 emit(A64_LDRSB(dst, src, tmp), ctx); in build_insn()
1222 emit(A64_LDRB(dst, src, tmp), ctx); in build_insn()
1227 emit(A64_LDR64I(dst, src_adj, off_adj), ctx); in build_insn()
1230 emit(A64_LDR64(dst, src, tmp), ctx); in build_insn()
1270 emit(A64_STR32I(tmp, dst_adj, off_adj), ctx); in build_insn()
1273 emit(A64_STR32(tmp, dst, tmp2), ctx); in build_insn()
1278 emit(A64_STRHI(tmp, dst_adj, off_adj), ctx); in build_insn()
1281 emit(A64_STRH(tmp, dst, tmp2), ctx); in build_insn()
1286 emit(A64_STRBI(tmp, dst_adj, off_adj), ctx); in build_insn()
1289 emit(A64_STRB(tmp, dst, tmp2), ctx); in build_insn()
1294 emit(A64_STR64I(tmp, dst_adj, off_adj), ctx); in build_insn()
1297 emit(A64_STR64(tmp, dst, tmp2), ctx); in build_insn()
1318 emit(A64_STR32I(src, dst_adj, off_adj), ctx); in build_insn()
1321 emit(A64_STR32(src, dst, tmp), ctx); in build_insn()
1326 emit(A64_STRHI(src, dst_adj, off_adj), ctx); in build_insn()
1329 emit(A64_STRH(src, dst, tmp), ctx); in build_insn()
1334 emit(A64_STRBI(src, dst_adj, off_adj), ctx); in build_insn()
1337 emit(A64_STRB(src, dst, tmp), ctx); in build_insn()
1342 emit(A64_STR64I(src, dst_adj, off_adj), ctx); in build_insn()
1345 emit(A64_STR64(src, dst, tmp), ctx); in build_insn()
1722 emit(A64_STR64I(A64_ZR, A64_SP, run_ctx_off + cookie_off), ctx); in invoke_bpf_prog()
1725 emit(A64_STR64I(A64_R(10), A64_SP, run_ctx_off + cookie_off), in invoke_bpf_prog()
1735 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx); in invoke_bpf_prog()
1737 emit(A64_ADD_I(1, A64_R(1), A64_SP, run_ctx_off), ctx); in invoke_bpf_prog()
1742 emit(A64_MOV(1, A64_R(20), A64_R(0)), ctx); in invoke_bpf_prog()
1748 emit(A64_NOP, ctx); in invoke_bpf_prog()
1750 emit(A64_ADD_I(1, A64_R(0), A64_SP, args_off), ctx); in invoke_bpf_prog()
1757 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx); in invoke_bpf_prog()
1765 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx); in invoke_bpf_prog()
1767 emit(A64_MOV(1, A64_R(1), A64_R(20)), ctx); in invoke_bpf_prog()
1769 emit(A64_ADD_I(1, A64_R(2), A64_SP, run_ctx_off), ctx); in invoke_bpf_prog()
1783 emit(A64_STR64I(A64_ZR, A64_SP, retval_off), ctx); in invoke_bpf_mod_ret()
1790 emit(A64_LDR64I(A64_R(10), A64_SP, retval_off), ctx); in invoke_bpf_mod_ret()
1795 emit(A64_NOP, ctx); in invoke_bpf_mod_ret()
1804 emit(A64_STR64I(i, A64_SP, args_off), ctx); in save_args()
1814 emit(A64_LDR64I(i, A64_SP, args_off), ctx); in restore_args()
1926 emit(A64_PUSH(A64_FP, A64_R(9), A64_SP), ctx); in prepare_trampoline()
1927 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in prepare_trampoline()
1931 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); in prepare_trampoline()
1932 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in prepare_trampoline()
1935 emit(A64_SUB_I(1, A64_SP, A64_SP, stack_size), ctx); in prepare_trampoline()
1940 emit(A64_STR64I(A64_R(10), A64_SP, ip_off), ctx); in prepare_trampoline()
1944 emit(A64_MOVZ(1, A64_R(10), nregs, 0), ctx); in prepare_trampoline()
1945 emit(A64_STR64I(A64_R(10), A64_SP, nregs_off), ctx); in prepare_trampoline()
1951 emit(A64_STR64I(A64_R(19), A64_SP, regs_off), ctx); in prepare_trampoline()
1952 emit(A64_STR64I(A64_R(20), A64_SP, regs_off + 8), ctx); in prepare_trampoline()
1977 emit(A64_LDR64I(A64_R(10), A64_SP, retaddr_off), ctx); in prepare_trampoline()
1978 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx); in prepare_trampoline()
1979 emit(A64_RET(A64_R(10)), ctx); in prepare_trampoline()
1981 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx); in prepare_trampoline()
1984 emit(A64_NOP, ctx); in prepare_trampoline()
2007 emit(A64_LDR64I(A64_R(19), A64_SP, regs_off), ctx); in prepare_trampoline()
2008 emit(A64_LDR64I(A64_R(20), A64_SP, regs_off + 8), ctx); in prepare_trampoline()
2011 emit(A64_LDR64I(A64_R(0), A64_SP, retval_off), ctx); in prepare_trampoline()
2014 emit(A64_MOV(1, A64_SP, A64_FP), ctx); in prepare_trampoline()
2017 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in prepare_trampoline()
2018 emit(A64_RET(A64_LR), ctx); in prepare_trampoline()
2021 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in prepare_trampoline()
2022 emit(A64_POP(A64_FP, A64_R(9), A64_SP), ctx); in prepare_trampoline()
2026 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx); in prepare_trampoline()
2027 emit(A64_RET(A64_R(9)), ctx); in prepare_trampoline()
2030 emit(A64_MOV(1, A64_R(10), A64_LR), ctx); in prepare_trampoline()
2031 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx); in prepare_trampoline()
2032 emit(A64_RET(A64_R(10)), ctx); in prepare_trampoline()