2005-07-16 22:21:51 +08:00
|
|
|
#if __LINUX_ARM_ARCH__ >= 6
|
|
|
|
.macro bitop, instr
|
|
|
|
mov r2, #1
|
|
|
|
and r3, r0, #7 @ Get bit offset
|
|
|
|
add r1, r1, r0, lsr #3 @ Get byte offset
|
|
|
|
mov r3, r2, lsl r3
|
|
|
|
1: ldrexb r2, [r1]
|
|
|
|
\instr r2, r2, r3
|
|
|
|
strexb r0, r2, [r1]
|
2005-07-29 03:36:26 +08:00
|
|
|
cmp r0, #0
|
2005-07-16 22:21:51 +08:00
|
|
|
bne 1b
|
|
|
|
mov pc, lr
|
|
|
|
.endm
|
|
|
|
|
|
|
|
.macro testop, instr, store
|
|
|
|
and r3, r0, #7 @ Get bit offset
|
|
|
|
mov r2, #1
|
|
|
|
add r1, r1, r0, lsr #3 @ Get byte offset
|
|
|
|
mov r3, r2, lsl r3 @ create mask
|
|
|
|
1: ldrexb r2, [r1]
|
|
|
|
ands r0, r2, r3 @ save old value of bit
|
2005-07-28 06:00:05 +08:00
|
|
|
\instr r2, r2, r3 @ toggle bit
|
|
|
|
strexb ip, r2, [r1]
|
|
|
|
cmp ip, #0
|
2005-07-16 22:21:51 +08:00
|
|
|
bne 1b
|
|
|
|
cmp r0, #0
|
|
|
|
movne r0, #1
|
|
|
|
2: mov pc, lr
|
|
|
|
.endm
|
|
|
|
#else
|
2005-04-19 05:50:01 +08:00
|
|
|
.macro bitop, instr
|
|
|
|
and r2, r0, #7
|
|
|
|
mov r3, #1
|
|
|
|
mov r3, r3, lsl r2
|
|
|
|
save_and_disable_irqs ip, r2
|
|
|
|
ldrb r2, [r1, r0, lsr #3]
|
|
|
|
\instr r2, r2, r3
|
|
|
|
strb r2, [r1, r0, lsr #3]
|
|
|
|
restore_irqs ip
|
|
|
|
mov pc, lr
|
|
|
|
.endm
|
|
|
|
|
|
|
|
/**
|
|
|
|
* testop - implement a test_and_xxx_bit operation.
|
|
|
|
* @instr: operational instruction
|
|
|
|
* @store: store instruction
|
|
|
|
*
|
|
|
|
* Note: we can trivially conditionalise the store instruction
|
|
|
|
* to avoid dirting the data cache.
|
|
|
|
*/
|
|
|
|
.macro testop, instr, store
|
|
|
|
add r1, r1, r0, lsr #3
|
|
|
|
and r3, r0, #7
|
|
|
|
mov r0, #1
|
|
|
|
save_and_disable_irqs ip, r2
|
|
|
|
ldrb r2, [r1]
|
|
|
|
tst r2, r0, lsl r3
|
|
|
|
\instr r2, r2, r0, lsl r3
|
|
|
|
\store r2, [r1]
|
|
|
|
restore_irqs ip
|
|
|
|
moveq r0, #0
|
|
|
|
mov pc, lr
|
|
|
|
.endm
|
2005-07-16 22:21:51 +08:00
|
|
|
#endif
|