xref: /openbmc/linux/arch/powerpc/net/bpf_jit.h (revision 0ca87f05)
10ca87f05SMatt Evans /* bpf_jit.h: BPF JIT compiler for PPC64
20ca87f05SMatt Evans  *
30ca87f05SMatt Evans  * Copyright 2011 Matt Evans <matt@ozlabs.org>, IBM Corporation
40ca87f05SMatt Evans  *
50ca87f05SMatt Evans  * This program is free software; you can redistribute it and/or
60ca87f05SMatt Evans  * modify it under the terms of the GNU General Public License
70ca87f05SMatt Evans  * as published by the Free Software Foundation; version 2
80ca87f05SMatt Evans  * of the License.
90ca87f05SMatt Evans  */
100ca87f05SMatt Evans #ifndef _BPF_JIT_H
110ca87f05SMatt Evans #define _BPF_JIT_H
120ca87f05SMatt Evans 
130ca87f05SMatt Evans #define BPF_PPC_STACK_LOCALS	32
140ca87f05SMatt Evans #define BPF_PPC_STACK_BASIC	(48+64)
150ca87f05SMatt Evans #define BPF_PPC_STACK_SAVE	(18*8)
160ca87f05SMatt Evans #define BPF_PPC_STACKFRAME	(BPF_PPC_STACK_BASIC+BPF_PPC_STACK_LOCALS+ \
170ca87f05SMatt Evans 				 BPF_PPC_STACK_SAVE)
180ca87f05SMatt Evans #define BPF_PPC_SLOWPATH_FRAME	(48+64)
190ca87f05SMatt Evans 
200ca87f05SMatt Evans /*
210ca87f05SMatt Evans  * Generated code register usage:
220ca87f05SMatt Evans  *
230ca87f05SMatt Evans  * As normal PPC C ABI (e.g. r1=sp, r2=TOC), with:
240ca87f05SMatt Evans  *
250ca87f05SMatt Evans  * skb		r3	(Entry parameter)
260ca87f05SMatt Evans  * A register	r4
270ca87f05SMatt Evans  * X register	r5
280ca87f05SMatt Evans  * addr param	r6
290ca87f05SMatt Evans  * r7-r10	scratch
300ca87f05SMatt Evans  * skb->data	r14
310ca87f05SMatt Evans  * skb headlen	r15	(skb->len - skb->data_len)
320ca87f05SMatt Evans  * m[0]		r16
330ca87f05SMatt Evans  * m[...]	...
340ca87f05SMatt Evans  * m[15]	r31
350ca87f05SMatt Evans  */
360ca87f05SMatt Evans #define r_skb		3
370ca87f05SMatt Evans #define r_ret		3
380ca87f05SMatt Evans #define r_A		4
390ca87f05SMatt Evans #define r_X		5
400ca87f05SMatt Evans #define r_addr		6
410ca87f05SMatt Evans #define r_scratch1	7
420ca87f05SMatt Evans #define r_D		14
430ca87f05SMatt Evans #define r_HL		15
440ca87f05SMatt Evans #define r_M		16
450ca87f05SMatt Evans 
460ca87f05SMatt Evans #ifndef __ASSEMBLY__
470ca87f05SMatt Evans 
480ca87f05SMatt Evans /*
490ca87f05SMatt Evans  * Assembly helpers from arch/powerpc/net/bpf_jit.S:
500ca87f05SMatt Evans  */
510ca87f05SMatt Evans extern u8 sk_load_word[], sk_load_half[], sk_load_byte[], sk_load_byte_msh[];
520ca87f05SMatt Evans 
530ca87f05SMatt Evans #define FUNCTION_DESCR_SIZE	24
540ca87f05SMatt Evans 
550ca87f05SMatt Evans /*
560ca87f05SMatt Evans  * 16-bit immediate helper macros: HA() is for use with sign-extending instrs
570ca87f05SMatt Evans  * (e.g. LD, ADDI).  If the bottom 16 bits is "-ve", add another bit into the
580ca87f05SMatt Evans  * top half to negate the effect (i.e. 0xffff + 1 = 0x(1)0000).
590ca87f05SMatt Evans  */
600ca87f05SMatt Evans #define IMM_H(i)		((uintptr_t)(i)>>16)
610ca87f05SMatt Evans #define IMM_HA(i)		(((uintptr_t)(i)>>16) +			      \
620ca87f05SMatt Evans 				 (((uintptr_t)(i) & 0x8000) >> 15))
630ca87f05SMatt Evans #define IMM_L(i)		((uintptr_t)(i) & 0xffff)
640ca87f05SMatt Evans 
650ca87f05SMatt Evans #define PLANT_INSTR(d, idx, instr)					      \
660ca87f05SMatt Evans 	do { if (d) { (d)[idx] = instr; } idx++; } while (0)
670ca87f05SMatt Evans #define EMIT(instr)		PLANT_INSTR(image, ctx->idx, instr)
680ca87f05SMatt Evans 
690ca87f05SMatt Evans #define PPC_NOP()		EMIT(PPC_INST_NOP)
700ca87f05SMatt Evans #define PPC_BLR()		EMIT(PPC_INST_BLR)
710ca87f05SMatt Evans #define PPC_BLRL()		EMIT(PPC_INST_BLRL)
720ca87f05SMatt Evans #define PPC_MTLR(r)		EMIT(PPC_INST_MTLR | __PPC_RT(r))
730ca87f05SMatt Evans #define PPC_ADDI(d, a, i)	EMIT(PPC_INST_ADDI | __PPC_RT(d) |	      \
740ca87f05SMatt Evans 				     __PPC_RA(a) | IMM_L(i))
750ca87f05SMatt Evans #define PPC_MR(d, a)		PPC_OR(d, a, a)
760ca87f05SMatt Evans #define PPC_LI(r, i)		PPC_ADDI(r, 0, i)
770ca87f05SMatt Evans #define PPC_ADDIS(d, a, i)	EMIT(PPC_INST_ADDIS |			      \
780ca87f05SMatt Evans 				     __PPC_RS(d) | __PPC_RA(a) | IMM_L(i))
790ca87f05SMatt Evans #define PPC_LIS(r, i)		PPC_ADDIS(r, 0, i)
800ca87f05SMatt Evans #define PPC_STD(r, base, i)	EMIT(PPC_INST_STD | __PPC_RS(r) |	      \
810ca87f05SMatt Evans 				     __PPC_RA(base) | ((i) & 0xfffc))
820ca87f05SMatt Evans 
830ca87f05SMatt Evans #define PPC_LD(r, base, i)	EMIT(PPC_INST_LD | __PPC_RT(r) |	      \
840ca87f05SMatt Evans 				     __PPC_RA(base) | IMM_L(i))
850ca87f05SMatt Evans #define PPC_LWZ(r, base, i)	EMIT(PPC_INST_LWZ | __PPC_RT(r) |	      \
860ca87f05SMatt Evans 				     __PPC_RA(base) | IMM_L(i))
870ca87f05SMatt Evans #define PPC_LHZ(r, base, i)	EMIT(PPC_INST_LHZ | __PPC_RT(r) |	      \
880ca87f05SMatt Evans 				     __PPC_RA(base) | IMM_L(i))
890ca87f05SMatt Evans /* Convenience helpers for the above with 'far' offsets: */
900ca87f05SMatt Evans #define PPC_LD_OFFS(r, base, i) do { if ((i) < 32768) PPC_LD(r, base, i);     \
910ca87f05SMatt Evans 		else {	PPC_ADDIS(r, base, IMM_HA(i));			      \
920ca87f05SMatt Evans 			PPC_LD(r, r, IMM_L(i)); } } while(0)
930ca87f05SMatt Evans 
940ca87f05SMatt Evans #define PPC_LWZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LWZ(r, base, i);   \
950ca87f05SMatt Evans 		else {	PPC_ADDIS(r, base, IMM_HA(i));			      \
960ca87f05SMatt Evans 			PPC_LWZ(r, r, IMM_L(i)); } } while(0)
970ca87f05SMatt Evans 
980ca87f05SMatt Evans #define PPC_LHZ_OFFS(r, base, i) do { if ((i) < 32768) PPC_LHZ(r, base, i);   \
990ca87f05SMatt Evans 		else {	PPC_ADDIS(r, base, IMM_HA(i));			      \
1000ca87f05SMatt Evans 			PPC_LHZ(r, r, IMM_L(i)); } } while(0)
1010ca87f05SMatt Evans 
1020ca87f05SMatt Evans #define PPC_CMPWI(a, i)		EMIT(PPC_INST_CMPWI | __PPC_RA(a) | IMM_L(i))
1030ca87f05SMatt Evans #define PPC_CMPDI(a, i)		EMIT(PPC_INST_CMPDI | __PPC_RA(a) | IMM_L(i))
1040ca87f05SMatt Evans #define PPC_CMPLWI(a, i)	EMIT(PPC_INST_CMPLWI | __PPC_RA(a) | IMM_L(i))
1050ca87f05SMatt Evans #define PPC_CMPLW(a, b)		EMIT(PPC_INST_CMPLW | __PPC_RA(a) | __PPC_RB(b))
1060ca87f05SMatt Evans 
1070ca87f05SMatt Evans #define PPC_SUB(d, a, b)	EMIT(PPC_INST_SUB | __PPC_RT(d) |	      \
1080ca87f05SMatt Evans 				     __PPC_RB(a) | __PPC_RA(b))
1090ca87f05SMatt Evans #define PPC_ADD(d, a, b)	EMIT(PPC_INST_ADD | __PPC_RT(d) |	      \
1100ca87f05SMatt Evans 				     __PPC_RA(a) | __PPC_RB(b))
1110ca87f05SMatt Evans #define PPC_MUL(d, a, b)	EMIT(PPC_INST_MULLW | __PPC_RT(d) |	      \
1120ca87f05SMatt Evans 				     __PPC_RA(a) | __PPC_RB(b))
1130ca87f05SMatt Evans #define PPC_MULHWU(d, a, b)	EMIT(PPC_INST_MULHWU | __PPC_RT(d) |	      \
1140ca87f05SMatt Evans 				     __PPC_RA(a) | __PPC_RB(b))
1150ca87f05SMatt Evans #define PPC_MULI(d, a, i)	EMIT(PPC_INST_MULLI | __PPC_RT(d) |	      \
1160ca87f05SMatt Evans 				     __PPC_RA(a) | IMM_L(i))
1170ca87f05SMatt Evans #define PPC_DIVWU(d, a, b)	EMIT(PPC_INST_DIVWU | __PPC_RT(d) |	      \
1180ca87f05SMatt Evans 				     __PPC_RA(a) | __PPC_RB(b))
1190ca87f05SMatt Evans #define PPC_AND(d, a, b)	EMIT(PPC_INST_AND | __PPC_RA(d) |	      \
1200ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_RB(b))
1210ca87f05SMatt Evans #define PPC_ANDI(d, a, i)	EMIT(PPC_INST_ANDI | __PPC_RA(d) |	      \
1220ca87f05SMatt Evans 				     __PPC_RS(a) | IMM_L(i))
1230ca87f05SMatt Evans #define PPC_AND_DOT(d, a, b)	EMIT(PPC_INST_ANDDOT | __PPC_RA(d) |	      \
1240ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_RB(b))
1250ca87f05SMatt Evans #define PPC_OR(d, a, b)		EMIT(PPC_INST_OR | __PPC_RA(d) |	      \
1260ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_RB(b))
1270ca87f05SMatt Evans #define PPC_ORI(d, a, i)	EMIT(PPC_INST_ORI | __PPC_RA(d) |	      \
1280ca87f05SMatt Evans 				     __PPC_RS(a) | IMM_L(i))
1290ca87f05SMatt Evans #define PPC_ORIS(d, a, i)	EMIT(PPC_INST_ORIS | __PPC_RA(d) |	      \
1300ca87f05SMatt Evans 				     __PPC_RS(a) | IMM_L(i))
1310ca87f05SMatt Evans #define PPC_SLW(d, a, s)	EMIT(PPC_INST_SLW | __PPC_RA(d) |	      \
1320ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_RB(s))
1330ca87f05SMatt Evans #define PPC_SRW(d, a, s)	EMIT(PPC_INST_SRW | __PPC_RA(d) |	      \
1340ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_RB(s))
1350ca87f05SMatt Evans /* slwi = rlwinm Rx, Ry, n, 0, 31-n */
1360ca87f05SMatt Evans #define PPC_SLWI(d, a, i)	EMIT(PPC_INST_RLWINM | __PPC_RA(d) |	      \
1370ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_SH(i) |	      \
1380ca87f05SMatt Evans 				     __PPC_MB(0) | __PPC_ME(31-(i)))
1390ca87f05SMatt Evans /* srwi = rlwinm Rx, Ry, 32-n, n, 31 */
1400ca87f05SMatt Evans #define PPC_SRWI(d, a, i)	EMIT(PPC_INST_RLWINM | __PPC_RA(d) |	      \
1410ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_SH(32-(i)) |	      \
1420ca87f05SMatt Evans 				     __PPC_MB(i) | __PPC_ME(31))
1430ca87f05SMatt Evans /* sldi = rldicr Rx, Ry, n, 63-n */
1440ca87f05SMatt Evans #define PPC_SLDI(d, a, i)	EMIT(PPC_INST_RLDICR | __PPC_RA(d) |	      \
1450ca87f05SMatt Evans 				     __PPC_RS(a) | __PPC_SH(i) |	      \
1460ca87f05SMatt Evans 				     __PPC_MB(63-(i)) | (((i) & 0x20) >> 4))
1470ca87f05SMatt Evans #define PPC_NEG(d, a)		EMIT(PPC_INST_NEG | __PPC_RT(d) | __PPC_RA(a))
1480ca87f05SMatt Evans 
1490ca87f05SMatt Evans /* Long jump; (unconditional 'branch') */
1500ca87f05SMatt Evans #define PPC_JMP(dest)		EMIT(PPC_INST_BRANCH |			      \
1510ca87f05SMatt Evans 				     (((dest) - (ctx->idx * 4)) & 0x03fffffc))
1520ca87f05SMatt Evans /* "cond" here covers BO:BI fields. */
1530ca87f05SMatt Evans #define PPC_BCC_SHORT(cond, dest)	EMIT(PPC_INST_BRANCH_COND |	      \
1540ca87f05SMatt Evans 					     (((cond) & 0x3ff) << 16) |	      \
1550ca87f05SMatt Evans 					     (((dest) - (ctx->idx * 4)) &     \
1560ca87f05SMatt Evans 					      0xfffc))
1570ca87f05SMatt Evans #define PPC_LI32(d, i)		do { PPC_LI(d, IMM_L(i));		      \
1580ca87f05SMatt Evans 		if ((u32)(uintptr_t)(i) >= 32768) {			      \
1590ca87f05SMatt Evans 			PPC_ADDIS(d, d, IMM_HA(i));			      \
1600ca87f05SMatt Evans 		} } while(0)
1610ca87f05SMatt Evans #define PPC_LI64(d, i)		do {					      \
1620ca87f05SMatt Evans 		if (!((uintptr_t)(i) & 0xffffffff00000000ULL))		      \
1630ca87f05SMatt Evans 			PPC_LI32(d, i);					      \
1640ca87f05SMatt Evans 		else {							      \
1650ca87f05SMatt Evans 			PPC_LIS(d, ((uintptr_t)(i) >> 48));		      \
1660ca87f05SMatt Evans 			if ((uintptr_t)(i) & 0x0000ffff00000000ULL)	      \
1670ca87f05SMatt Evans 				PPC_ORI(d, d,				      \
1680ca87f05SMatt Evans 					((uintptr_t)(i) >> 32) & 0xffff);     \
1690ca87f05SMatt Evans 			PPC_SLDI(d, d, 32);				      \
1700ca87f05SMatt Evans 			if ((uintptr_t)(i) & 0x00000000ffff0000ULL)	      \
1710ca87f05SMatt Evans 				PPC_ORIS(d, d,				      \
1720ca87f05SMatt Evans 					 ((uintptr_t)(i) >> 16) & 0xffff);    \
1730ca87f05SMatt Evans 			if ((uintptr_t)(i) & 0x000000000000ffffULL)	      \
1740ca87f05SMatt Evans 				PPC_ORI(d, d, (uintptr_t)(i) & 0xffff);	      \
1750ca87f05SMatt Evans 		} } while (0);
1760ca87f05SMatt Evans 
1770ca87f05SMatt Evans static inline bool is_nearbranch(int offset)
1780ca87f05SMatt Evans {
1790ca87f05SMatt Evans 	return (offset < 32768) && (offset >= -32768);
1800ca87f05SMatt Evans }
1810ca87f05SMatt Evans 
1820ca87f05SMatt Evans /*
1830ca87f05SMatt Evans  * The fly in the ointment of code size changing from pass to pass is
1840ca87f05SMatt Evans  * avoided by padding the short branch case with a NOP.	 If code size differs
1850ca87f05SMatt Evans  * with different branch reaches we will have the issue of code moving from
1860ca87f05SMatt Evans  * one pass to the next and will need a few passes to converge on a stable
1870ca87f05SMatt Evans  * state.
1880ca87f05SMatt Evans  */
1890ca87f05SMatt Evans #define PPC_BCC(cond, dest)	do {					      \
1900ca87f05SMatt Evans 		if (is_nearbranch((dest) - (ctx->idx * 4))) {		      \
1910ca87f05SMatt Evans 			PPC_BCC_SHORT(cond, dest);			      \
1920ca87f05SMatt Evans 			PPC_NOP();					      \
1930ca87f05SMatt Evans 		} else {						      \
1940ca87f05SMatt Evans 			/* Flip the 'T or F' bit to invert comparison */      \
1950ca87f05SMatt Evans 			PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, (ctx->idx+2)*4);  \
1960ca87f05SMatt Evans 			PPC_JMP(dest);					      \
1970ca87f05SMatt Evans 		} } while(0)
1980ca87f05SMatt Evans 
1990ca87f05SMatt Evans /* To create a branch condition, select a bit of cr0... */
2000ca87f05SMatt Evans #define CR0_LT		0
2010ca87f05SMatt Evans #define CR0_GT		1
2020ca87f05SMatt Evans #define CR0_EQ		2
2030ca87f05SMatt Evans /* ...and modify BO[3] */
2040ca87f05SMatt Evans #define COND_CMP_TRUE	0x100
2050ca87f05SMatt Evans #define COND_CMP_FALSE	0x000
2060ca87f05SMatt Evans /* Together, they make all required comparisons: */
2070ca87f05SMatt Evans #define COND_GT		(CR0_GT | COND_CMP_TRUE)
2080ca87f05SMatt Evans #define COND_GE		(CR0_LT | COND_CMP_FALSE)
2090ca87f05SMatt Evans #define COND_EQ		(CR0_EQ | COND_CMP_TRUE)
2100ca87f05SMatt Evans #define COND_NE		(CR0_EQ | COND_CMP_FALSE)
2110ca87f05SMatt Evans #define COND_LT		(CR0_LT | COND_CMP_TRUE)
2120ca87f05SMatt Evans 
2130ca87f05SMatt Evans #define SEEN_DATAREF 0x10000 /* might call external helpers */
2140ca87f05SMatt Evans #define SEEN_XREG    0x20000 /* X reg is used */
2150ca87f05SMatt Evans #define SEEN_MEM     0x40000 /* SEEN_MEM+(1<<n) = use mem[n] for temporary
2160ca87f05SMatt Evans 			      * storage */
2170ca87f05SMatt Evans #define SEEN_MEM_MSK 0x0ffff
2180ca87f05SMatt Evans 
2190ca87f05SMatt Evans struct codegen_context {
2200ca87f05SMatt Evans 	unsigned int seen;
2210ca87f05SMatt Evans 	unsigned int idx;
2220ca87f05SMatt Evans 	int pc_ret0; /* bpf index of first RET #0 instruction (if any) */
2230ca87f05SMatt Evans };
2240ca87f05SMatt Evans 
2250ca87f05SMatt Evans #endif
2260ca87f05SMatt Evans 
2270ca87f05SMatt Evans #endif
228