Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
6ebbf2ce | 2 | #include <asm/assembler.h> |
c36ef4b1 WD |
3 | #include <asm/unwind.h> |
4 | ||
6323f0cc | 5 | #if __LINUX_ARM_ARCH__ >= 6 |
c36ef4b1 WD |
6 | .macro bitop, name, instr |
7 | ENTRY( \name ) | |
8 | UNWIND( .fnstart ) | |
a16ede35 RK |
9 | ands ip, r1, #3 |
10 | strneb r1, [ip] @ assert word-aligned | |
54ea06f6 | 11 | mov r2, #1 |
6323f0cc RK |
12 | and r3, r0, #31 @ Get bit offset |
13 | mov r0, r0, lsr #5 | |
14 | add r1, r1, r0, lsl #2 @ Get word offset | |
b7ec6994 | 15 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
d779c07d WD |
16 | .arch_extension mp |
17 | ALT_SMP(W(pldw) [r1]) | |
18 | ALT_UP(W(nop)) | |
19 | #endif | |
54ea06f6 | 20 | mov r3, r2, lsl r3 |
6323f0cc | 21 | 1: ldrex r2, [r1] |
54ea06f6 | 22 | \instr r2, r2, r3 |
6323f0cc | 23 | strex r0, r2, [r1] |
e7ec0293 | 24 | cmp r0, #0 |
54ea06f6 | 25 | bne 1b |
3ba6e69a | 26 | bx lr |
c36ef4b1 WD |
27 | UNWIND( .fnend ) |
28 | ENDPROC(\name ) | |
54ea06f6 RK |
29 | .endm |
30 | ||
c36ef4b1 WD |
31 | .macro testop, name, instr, store |
32 | ENTRY( \name ) | |
33 | UNWIND( .fnstart ) | |
a16ede35 RK |
34 | ands ip, r1, #3 |
35 | strneb r1, [ip] @ assert word-aligned | |
54ea06f6 | 36 | mov r2, #1 |
6323f0cc RK |
37 | and r3, r0, #31 @ Get bit offset |
38 | mov r0, r0, lsr #5 | |
39 | add r1, r1, r0, lsl #2 @ Get word offset | |
54ea06f6 | 40 | mov r3, r2, lsl r3 @ create mask |
bac4e960 | 41 | smp_dmb |
c32ffce0 WD |
42 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
43 | .arch_extension mp | |
44 | ALT_SMP(W(pldw) [r1]) | |
45 | ALT_UP(W(nop)) | |
46 | #endif | |
6323f0cc | 47 | 1: ldrex r2, [r1] |
54ea06f6 | 48 | ands r0, r2, r3 @ save old value of bit |
6323f0cc RK |
49 | \instr r2, r2, r3 @ toggle bit |
50 | strex ip, r2, [r1] | |
614d73ed | 51 | cmp ip, #0 |
54ea06f6 | 52 | bne 1b |
bac4e960 | 53 | smp_dmb |
54ea06f6 RK |
54 | cmp r0, #0 |
55 | movne r0, #1 | |
3ba6e69a | 56 | 2: bx lr |
c36ef4b1 WD |
57 | UNWIND( .fnend ) |
58 | ENDPROC(\name ) | |
54ea06f6 RK |
59 | .endm |
60 | #else | |
c36ef4b1 WD |
61 | .macro bitop, name, instr |
62 | ENTRY( \name ) | |
63 | UNWIND( .fnstart ) | |
a16ede35 RK |
64 | ands ip, r1, #3 |
65 | strneb r1, [ip] @ assert word-aligned | |
6323f0cc RK |
66 | and r2, r0, #31 |
67 | mov r0, r0, lsr #5 | |
7a55fd0b RK |
68 | mov r3, #1 |
69 | mov r3, r3, lsl r2 | |
59d1ff3b | 70 | save_and_disable_irqs ip |
6323f0cc | 71 | ldr r2, [r1, r0, lsl #2] |
7a55fd0b | 72 | \instr r2, r2, r3 |
6323f0cc | 73 | str r2, [r1, r0, lsl #2] |
7a55fd0b | 74 | restore_irqs ip |
6ebbf2ce | 75 | ret lr |
c36ef4b1 WD |
76 | UNWIND( .fnend ) |
77 | ENDPROC(\name ) | |
7a55fd0b RK |
78 | .endm |
79 | ||
80 | /** | |
81 | * testop - implement a test_and_xxx_bit operation. | |
82 | * @instr: operational instruction | |
83 | * @store: store instruction | |
84 | * | |
85 | * Note: we can trivially conditionalise the store instruction | |
6cbdc8c5 | 86 | * to avoid dirtying the data cache. |
7a55fd0b | 87 | */ |
c36ef4b1 WD |
88 | .macro testop, name, instr, store |
89 | ENTRY( \name ) | |
90 | UNWIND( .fnstart ) | |
a16ede35 RK |
91 | ands ip, r1, #3 |
92 | strneb r1, [ip] @ assert word-aligned | |
6323f0cc RK |
93 | and r3, r0, #31 |
94 | mov r0, r0, lsr #5 | |
59d1ff3b | 95 | save_and_disable_irqs ip |
6323f0cc RK |
96 | ldr r2, [r1, r0, lsl #2]! |
97 | mov r0, #1 | |
7a55fd0b RK |
98 | tst r2, r0, lsl r3 |
99 | \instr r2, r2, r0, lsl r3 | |
100 | \store r2, [r1] | |
7a55fd0b | 101 | moveq r0, #0 |
0d928b0b | 102 | restore_irqs ip |
6ebbf2ce | 103 | ret lr |
c36ef4b1 WD |
104 | UNWIND( .fnend ) |
105 | ENDPROC(\name ) | |
7a55fd0b | 106 | .endm |
54ea06f6 | 107 | #endif |