1 { 2 "atomic compare-and-exchange smoketest - 64bit", 3 .insns = { 4 /* val = 3; */ 5 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 3), 6 /* old = atomic_cmpxchg(&val, 2, 4); */ 7 BPF_MOV64_IMM(BPF_REG_1, 4), 8 BPF_MOV64_IMM(BPF_REG_0, 2), 9 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, BPF_REG_10, BPF_REG_1, -8), 10 /* if (old != 3) exit(2); */ 11 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 12 BPF_MOV64_IMM(BPF_REG_0, 2), 13 BPF_EXIT_INSN(), 14 /* if (val != 3) exit(3); */ 15 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8), 16 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 17 BPF_MOV64_IMM(BPF_REG_0, 3), 18 BPF_EXIT_INSN(), 19 /* old = atomic_cmpxchg(&val, 3, 4); */ 20 BPF_MOV64_IMM(BPF_REG_1, 4), 21 BPF_MOV64_IMM(BPF_REG_0, 3), 22 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, BPF_REG_10, BPF_REG_1, -8), 23 /* if (old != 3) exit(4); */ 24 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 25 BPF_MOV64_IMM(BPF_REG_0, 4), 26 BPF_EXIT_INSN(), 27 /* if (val != 4) exit(5); */ 28 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8), 29 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 4, 2), 30 BPF_MOV64_IMM(BPF_REG_0, 5), 31 BPF_EXIT_INSN(), 32 /* exit(0); */ 33 BPF_MOV64_IMM(BPF_REG_0, 0), 34 BPF_EXIT_INSN(), 35 }, 36 .result = ACCEPT, 37 }, 38 { 39 "atomic compare-and-exchange smoketest - 32bit", 40 .insns = { 41 /* val = 3; */ 42 BPF_ST_MEM(BPF_W, BPF_REG_10, -4, 3), 43 /* old = atomic_cmpxchg(&val, 2, 4); */ 44 BPF_MOV32_IMM(BPF_REG_1, 4), 45 BPF_MOV32_IMM(BPF_REG_0, 2), 46 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_1, -4), 47 /* if (old != 3) exit(2); */ 48 BPF_JMP32_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 49 BPF_MOV32_IMM(BPF_REG_0, 2), 50 BPF_EXIT_INSN(), 51 /* if (val != 3) exit(3); */ 52 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -4), 53 BPF_JMP32_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 54 BPF_MOV32_IMM(BPF_REG_0, 3), 55 BPF_EXIT_INSN(), 56 /* old = atomic_cmpxchg(&val, 3, 4); */ 57 BPF_MOV32_IMM(BPF_REG_1, 4), 58 BPF_MOV32_IMM(BPF_REG_0, 3), 59 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_1, -4), 60 /* if (old != 3) exit(4); */ 61 BPF_JMP32_IMM(BPF_JEQ, BPF_REG_0, 3, 2), 62 BPF_MOV32_IMM(BPF_REG_0, 4), 63 BPF_EXIT_INSN(), 64 /* if (val != 4) exit(5); */ 65 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -4), 66 BPF_JMP32_IMM(BPF_JEQ, BPF_REG_0, 4, 2), 67 BPF_MOV32_IMM(BPF_REG_0, 5), 68 BPF_EXIT_INSN(), 69 /* exit(0); */ 70 BPF_MOV32_IMM(BPF_REG_0, 0), 71 BPF_EXIT_INSN(), 72 }, 73 .result = ACCEPT, 74 }, 75 { 76 "Can't use cmpxchg on uninit src reg", 77 .insns = { 78 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 3), 79 BPF_MOV64_IMM(BPF_REG_0, 3), 80 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, BPF_REG_10, BPF_REG_2, -8), 81 BPF_EXIT_INSN(), 82 }, 83 .result = REJECT, 84 .errstr = "!read_ok", 85 }, 86 { 87 "Can't use cmpxchg on uninit memory", 88 .insns = { 89 BPF_MOV64_IMM(BPF_REG_0, 3), 90 BPF_MOV64_IMM(BPF_REG_2, 4), 91 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, BPF_REG_10, BPF_REG_2, -8), 92 BPF_EXIT_INSN(), 93 }, 94 .result = REJECT, 95 .errstr = "invalid read from stack", 96 }, 97 { 98 "BPF_W cmpxchg should zero top 32 bits", 99 .insns = { 100 /* r0 = U64_MAX; */ 101 BPF_MOV64_IMM(BPF_REG_0, 0), 102 BPF_ALU64_IMM(BPF_SUB, BPF_REG_0, 1), 103 /* u64 val = r0; */ 104 BPF_STX_MEM(BPF_DW, BPF_REG_10, BPF_REG_0, -8), 105 /* r0 = (u32)atomic_cmpxchg((u32 *)&val, r0, 1); */ 106 BPF_MOV32_IMM(BPF_REG_1, 1), 107 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_1, -8), 108 /* r1 = 0x00000000FFFFFFFFull; */ 109 BPF_MOV64_IMM(BPF_REG_1, 1), 110 BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 32), 111 BPF_ALU64_IMM(BPF_SUB, BPF_REG_1, 1), 112 /* if (r0 != r1) exit(1); */ 113 BPF_JMP_REG(BPF_JEQ, BPF_REG_0, BPF_REG_1, 2), 114 BPF_MOV32_IMM(BPF_REG_0, 1), 115 BPF_EXIT_INSN(), 116 /* exit(0); */ 117 BPF_MOV32_IMM(BPF_REG_0, 0), 118 BPF_EXIT_INSN(), 119 }, 120 .result = ACCEPT, 121 }, 122