1 #if __LINUX_ARM_ARCH__ >= 6 2 .macro bitop, instr 3 ands ip, r1, #3 4 strneb r1, [ip] @ assert word-aligned 5 mov r2, #1 6 and r3, r0, #31 @ Get bit offset 7 mov r0, r0, lsr #5 8 add r1, r1, r0, lsl #2 @ Get word offset 9 mov r3, r2, lsl r3 10 1: ldrex r2, [r1] 11 \instr r2, r2, r3 12 strex r0, r2, [r1] 13 cmp r0, #0 14 bne 1b 15 bx lr 16 .endm 17 18 .macro testop, instr, store 19 ands ip, r1, #3 20 strneb r1, [ip] @ assert word-aligned 21 mov r2, #1 22 and r3, r0, #31 @ Get bit offset 23 mov r0, r0, lsr #5 24 add r1, r1, r0, lsl #2 @ Get word offset 25 mov r3, r2, lsl r3 @ create mask 26 smp_dmb 27 1: ldrex r2, [r1] 28 ands r0, r2, r3 @ save old value of bit 29 \instr r2, r2, r3 @ toggle bit 30 strex ip, r2, [r1] 31 cmp ip, #0 32 bne 1b 33 smp_dmb 34 cmp r0, #0 35 movne r0, #1 36 2: bx lr 37 .endm 38 #else 39 .macro bitop, instr 40 ands ip, r1, #3 41 strneb r1, [ip] @ assert word-aligned 42 and r2, r0, #31 43 mov r0, r0, lsr #5 44 mov r3, #1 45 mov r3, r3, lsl r2 46 save_and_disable_irqs ip 47 ldr r2, [r1, r0, lsl #2] 48 \instr r2, r2, r3 49 str r2, [r1, r0, lsl #2] 50 restore_irqs ip 51 mov pc, lr 52 .endm 53 54 /** 55 * testop - implement a test_and_xxx_bit operation. 56 * @instr: operational instruction 57 * @store: store instruction 58 * 59 * Note: we can trivially conditionalise the store instruction 60 * to avoid dirtying the data cache. 61 */ 62 .macro testop, instr, store 63 ands ip, r1, #3 64 strneb r1, [ip] @ assert word-aligned 65 and r3, r0, #31 66 mov r0, r0, lsr #5 67 save_and_disable_irqs ip 68 ldr r2, [r1, r0, lsl #2]! 69 mov r0, #1 70 tst r2, r0, lsl r3 71 \instr r2, r2, r0, lsl r3 72 \store r2, [r1] 73 moveq r0, #0 74 restore_irqs ip 75 mov pc, lr 76 .endm 77 #endif 78