1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * bpf_jit.h: BPF JIT compiler for PPC 4 * 5 * Copyright 2011 Matt Evans <matt@ozlabs.org>, IBM Corporation 6 * 2016 Naveen N. Rao <naveen.n.rao@linux.vnet.ibm.com> 7 */ 8 #ifndef _BPF_JIT_H 9 #define _BPF_JIT_H 10 11 #ifndef __ASSEMBLY__ 12 13 #include <asm/types.h> 14 #include <asm/ppc-opcode.h> 15 16 #ifdef PPC64_ELF_ABI_v1 17 #define FUNCTION_DESCR_SIZE 24 18 #else 19 #define FUNCTION_DESCR_SIZE 0 20 #endif 21 22 #define PLANT_INSTR(d, idx, instr) \ 23 do { if (d) { (d)[idx] = instr; } idx++; } while (0) 24 #define EMIT(instr) PLANT_INSTR(image, ctx->idx, instr) 25 26 /* Long jump; (unconditional 'branch') */ 27 #define PPC_JMP(dest) EMIT(PPC_INST_BRANCH | \ 28 (((dest) - (ctx->idx * 4)) & 0x03fffffc)) 29 /* "cond" here covers BO:BI fields. */ 30 #define PPC_BCC_SHORT(cond, dest) EMIT(PPC_INST_BRANCH_COND | \ 31 (((cond) & 0x3ff) << 16) | \ 32 (((dest) - (ctx->idx * 4)) & \ 33 0xfffc)) 34 /* Sign-extended 32-bit immediate load */ 35 #define PPC_LI32(d, i) do { \ 36 if ((int)(uintptr_t)(i) >= -32768 && \ 37 (int)(uintptr_t)(i) < 32768) \ 38 EMIT(PPC_RAW_LI(d, i)); \ 39 else { \ 40 EMIT(PPC_RAW_LIS(d, IMM_H(i))); \ 41 if (IMM_L(i)) \ 42 EMIT(PPC_RAW_ORI(d, d, IMM_L(i))); \ 43 } } while(0) 44 45 #define PPC_LI64(d, i) do { \ 46 if ((long)(i) >= -2147483648 && \ 47 (long)(i) < 2147483648) \ 48 PPC_LI32(d, i); \ 49 else { \ 50 if (!((uintptr_t)(i) & 0xffff800000000000ULL)) \ 51 EMIT(PPC_RAW_LI(d, ((uintptr_t)(i) >> 32) & \ 52 0xffff)); \ 53 else { \ 54 EMIT(PPC_RAW_LIS(d, ((uintptr_t)(i) >> 48))); \ 55 if ((uintptr_t)(i) & 0x0000ffff00000000ULL) \ 56 EMIT(PPC_RAW_ORI(d, d, \ 57 ((uintptr_t)(i) >> 32) & 0xffff)); \ 58 } \ 59 EMIT(PPC_RAW_SLDI(d, d, 32)); \ 60 if ((uintptr_t)(i) & 0x00000000ffff0000ULL) \ 61 EMIT(PPC_RAW_ORIS(d, d, \ 62 ((uintptr_t)(i) >> 16) & 0xffff)); \ 63 if ((uintptr_t)(i) & 0x000000000000ffffULL) \ 64 EMIT(PPC_RAW_ORI(d, d, (uintptr_t)(i) & \ 65 0xffff)); \ 66 } } while (0) 67 68 #ifdef CONFIG_PPC64 69 #define PPC_FUNC_ADDR(d,i) do { PPC_LI64(d, i); } while(0) 70 #else 71 #define PPC_FUNC_ADDR(d,i) do { PPC_LI32(d, i); } while(0) 72 #endif 73 74 static inline bool is_nearbranch(int offset) 75 { 76 return (offset < 32768) && (offset >= -32768); 77 } 78 79 /* 80 * The fly in the ointment of code size changing from pass to pass is 81 * avoided by padding the short branch case with a NOP. If code size differs 82 * with different branch reaches we will have the issue of code moving from 83 * one pass to the next and will need a few passes to converge on a stable 84 * state. 85 */ 86 #define PPC_BCC(cond, dest) do { \ 87 if (is_nearbranch((dest) - (ctx->idx * 4))) { \ 88 PPC_BCC_SHORT(cond, dest); \ 89 EMIT(PPC_RAW_NOP()); \ 90 } else { \ 91 /* Flip the 'T or F' bit to invert comparison */ \ 92 PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, (ctx->idx+2)*4); \ 93 PPC_JMP(dest); \ 94 } } while(0) 95 96 /* To create a branch condition, select a bit of cr0... */ 97 #define CR0_LT 0 98 #define CR0_GT 1 99 #define CR0_EQ 2 100 /* ...and modify BO[3] */ 101 #define COND_CMP_TRUE 0x100 102 #define COND_CMP_FALSE 0x000 103 /* Together, they make all required comparisons: */ 104 #define COND_GT (CR0_GT | COND_CMP_TRUE) 105 #define COND_GE (CR0_LT | COND_CMP_FALSE) 106 #define COND_EQ (CR0_EQ | COND_CMP_TRUE) 107 #define COND_NE (CR0_EQ | COND_CMP_FALSE) 108 #define COND_LT (CR0_LT | COND_CMP_TRUE) 109 #define COND_LE (CR0_GT | COND_CMP_FALSE) 110 111 #endif 112 113 #endif 114