Commit | Line | Data |
---|---|---|
6ebbf2ce | 1 | #include <asm/assembler.h> |
c36ef4b1 WD |
2 | #include <asm/unwind.h> |
3 | ||
6323f0cc | 4 | #if __LINUX_ARM_ARCH__ >= 6 |
c36ef4b1 WD |
5 | .macro bitop, name, instr |
6 | ENTRY( \name ) | |
7 | UNWIND( .fnstart ) | |
a16ede35 RK |
8 | ands ip, r1, #3 |
9 | strneb r1, [ip] @ assert word-aligned | |
54ea06f6 | 10 | mov r2, #1 |
6323f0cc RK |
11 | and r3, r0, #31 @ Get bit offset |
12 | mov r0, r0, lsr #5 | |
13 | add r1, r1, r0, lsl #2 @ Get word offset | |
b7ec6994 | 14 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
d779c07d WD |
15 | .arch_extension mp |
16 | ALT_SMP(W(pldw) [r1]) | |
17 | ALT_UP(W(nop)) | |
18 | #endif | |
54ea06f6 | 19 | mov r3, r2, lsl r3 |
6323f0cc | 20 | 1: ldrex r2, [r1] |
54ea06f6 | 21 | \instr r2, r2, r3 |
6323f0cc | 22 | strex r0, r2, [r1] |
e7ec0293 | 23 | cmp r0, #0 |
54ea06f6 | 24 | bne 1b |
3ba6e69a | 25 | bx lr |
c36ef4b1 WD |
26 | UNWIND( .fnend ) |
27 | ENDPROC(\name ) | |
54ea06f6 RK |
28 | .endm |
29 | ||
c36ef4b1 WD |
30 | .macro testop, name, instr, store |
31 | ENTRY( \name ) | |
32 | UNWIND( .fnstart ) | |
a16ede35 RK |
33 | ands ip, r1, #3 |
34 | strneb r1, [ip] @ assert word-aligned | |
54ea06f6 | 35 | mov r2, #1 |
6323f0cc RK |
36 | and r3, r0, #31 @ Get bit offset |
37 | mov r0, r0, lsr #5 | |
38 | add r1, r1, r0, lsl #2 @ Get word offset | |
54ea06f6 | 39 | mov r3, r2, lsl r3 @ create mask |
bac4e960 | 40 | smp_dmb |
c32ffce0 WD |
41 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
42 | .arch_extension mp | |
43 | ALT_SMP(W(pldw) [r1]) | |
44 | ALT_UP(W(nop)) | |
45 | #endif | |
6323f0cc | 46 | 1: ldrex r2, [r1] |
54ea06f6 | 47 | ands r0, r2, r3 @ save old value of bit |
6323f0cc RK |
48 | \instr r2, r2, r3 @ toggle bit |
49 | strex ip, r2, [r1] | |
614d73ed | 50 | cmp ip, #0 |
54ea06f6 | 51 | bne 1b |
bac4e960 | 52 | smp_dmb |
54ea06f6 RK |
53 | cmp r0, #0 |
54 | movne r0, #1 | |
3ba6e69a | 55 | 2: bx lr |
c36ef4b1 WD |
56 | UNWIND( .fnend ) |
57 | ENDPROC(\name ) | |
54ea06f6 RK |
58 | .endm |
59 | #else | |
c36ef4b1 WD |
60 | .macro bitop, name, instr |
61 | ENTRY( \name ) | |
62 | UNWIND( .fnstart ) | |
a16ede35 RK |
63 | ands ip, r1, #3 |
64 | strneb r1, [ip] @ assert word-aligned | |
6323f0cc RK |
65 | and r2, r0, #31 |
66 | mov r0, r0, lsr #5 | |
7a55fd0b RK |
67 | mov r3, #1 |
68 | mov r3, r3, lsl r2 | |
59d1ff3b | 69 | save_and_disable_irqs ip |
6323f0cc | 70 | ldr r2, [r1, r0, lsl #2] |
7a55fd0b | 71 | \instr r2, r2, r3 |
6323f0cc | 72 | str r2, [r1, r0, lsl #2] |
7a55fd0b | 73 | restore_irqs ip |
6ebbf2ce | 74 | ret lr |
c36ef4b1 WD |
75 | UNWIND( .fnend ) |
76 | ENDPROC(\name ) | |
7a55fd0b RK |
77 | .endm |
78 | ||
79 | /** | |
80 | * testop - implement a test_and_xxx_bit operation. | |
81 | * @instr: operational instruction | |
82 | * @store: store instruction | |
83 | * | |
84 | * Note: we can trivially conditionalise the store instruction | |
6cbdc8c5 | 85 | * to avoid dirtying the data cache. |
7a55fd0b | 86 | */ |
c36ef4b1 WD |
87 | .macro testop, name, instr, store |
88 | ENTRY( \name ) | |
89 | UNWIND( .fnstart ) | |
a16ede35 RK |
90 | ands ip, r1, #3 |
91 | strneb r1, [ip] @ assert word-aligned | |
6323f0cc RK |
92 | and r3, r0, #31 |
93 | mov r0, r0, lsr #5 | |
59d1ff3b | 94 | save_and_disable_irqs ip |
6323f0cc RK |
95 | ldr r2, [r1, r0, lsl #2]! |
96 | mov r0, #1 | |
7a55fd0b RK |
97 | tst r2, r0, lsl r3 |
98 | \instr r2, r2, r0, lsl r3 | |
99 | \store r2, [r1] | |
7a55fd0b | 100 | moveq r0, #0 |
0d928b0b | 101 | restore_irqs ip |
6ebbf2ce | 102 | ret lr |
c36ef4b1 WD |
103 | UNWIND( .fnend ) |
104 | ENDPROC(\name ) | |
7a55fd0b | 105 | .endm |
54ea06f6 | 106 | #endif |