| 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #include <asm/assembler.h> |
| 3 | #include <asm/unwind.h> |
| 4 | |
| 5 | #if __LINUX_ARM_ARCH__ >= 6 |
| 6 | .macro bitop, name, instr |
| 7 | ENTRY( \name ) |
| 8 | UNWIND( .fnstart ) |
| 9 | ands ip, r1, #3 |
| 10 | strbne r1, [ip] @ assert word-aligned |
| 11 | mov r2, #1 |
| 12 | and r3, r0, #31 @ Get bit offset |
| 13 | mov r0, r0, lsr #5 |
| 14 | add r1, r1, r0, lsl #2 @ Get word offset |
| 15 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
| 16 | .arch_extension mp |
| 17 | ALT_SMP(W(pldw) [r1]) |
| 18 | ALT_UP(W(nop)) |
| 19 | #endif |
| 20 | mov r3, r2, lsl r3 |
| 21 | 1: ldrex r2, [r1] |
| 22 | \instr r2, r2, r3 |
| 23 | strex r0, r2, [r1] |
| 24 | cmp r0, #0 |
| 25 | bne 1b |
| 26 | bx lr |
| 27 | UNWIND( .fnend ) |
| 28 | ENDPROC(\name ) |
| 29 | .endm |
| 30 | |
| 31 | .macro __testop, name, instr, store, barrier |
| 32 | ENTRY( \name ) |
| 33 | UNWIND( .fnstart ) |
| 34 | ands ip, r1, #3 |
| 35 | strbne r1, [ip] @ assert word-aligned |
| 36 | mov r2, #1 |
| 37 | and r3, r0, #31 @ Get bit offset |
| 38 | mov r0, r0, lsr #5 |
| 39 | add r1, r1, r0, lsl #2 @ Get word offset |
| 40 | mov r3, r2, lsl r3 @ create mask |
| 41 | \barrier |
| 42 | #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP) |
| 43 | .arch_extension mp |
| 44 | ALT_SMP(W(pldw) [r1]) |
| 45 | ALT_UP(W(nop)) |
| 46 | #endif |
| 47 | 1: ldrex r2, [r1] |
| 48 | ands r0, r2, r3 @ save old value of bit |
| 49 | \instr r2, r2, r3 @ toggle bit |
| 50 | strex ip, r2, [r1] |
| 51 | cmp ip, #0 |
| 52 | bne 1b |
| 53 | \barrier |
| 54 | cmp r0, #0 |
| 55 | movne r0, #1 |
| 56 | 2: bx lr |
| 57 | UNWIND( .fnend ) |
| 58 | ENDPROC(\name ) |
| 59 | .endm |
| 60 | |
| 61 | .macro testop, name, instr, store |
| 62 | __testop \name, \instr, \store, smp_dmb |
| 63 | .endm |
| 64 | |
| 65 | .macro sync_testop, name, instr, store |
| 66 | __testop \name, \instr, \store, __smp_dmb |
| 67 | .endm |
| 68 | #else |
| 69 | .macro bitop, name, instr |
| 70 | ENTRY( \name ) |
| 71 | UNWIND( .fnstart ) |
| 72 | ands ip, r1, #3 |
| 73 | strbne r1, [ip] @ assert word-aligned |
| 74 | and r2, r0, #31 |
| 75 | mov r0, r0, lsr #5 |
| 76 | mov r3, #1 |
| 77 | mov r3, r3, lsl r2 |
| 78 | save_and_disable_irqs ip |
| 79 | ldr r2, [r1, r0, lsl #2] |
| 80 | \instr r2, r2, r3 |
| 81 | str r2, [r1, r0, lsl #2] |
| 82 | restore_irqs ip |
| 83 | ret lr |
| 84 | UNWIND( .fnend ) |
| 85 | ENDPROC(\name ) |
| 86 | .endm |
| 87 | |
| 88 | /** |
| 89 | * testop - implement a test_and_xxx_bit operation. |
| 90 | * @instr: operational instruction |
| 91 | * @store: store instruction |
| 92 | * |
| 93 | * Note: we can trivially conditionalise the store instruction |
| 94 | * to avoid dirtying the data cache. |
| 95 | */ |
| 96 | .macro testop, name, instr, store |
| 97 | ENTRY( \name ) |
| 98 | UNWIND( .fnstart ) |
| 99 | ands ip, r1, #3 |
| 100 | strbne r1, [ip] @ assert word-aligned |
| 101 | and r3, r0, #31 |
| 102 | mov r0, r0, lsr #5 |
| 103 | save_and_disable_irqs ip |
| 104 | ldr r2, [r1, r0, lsl #2]! |
| 105 | mov r0, #1 |
| 106 | tst r2, r0, lsl r3 |
| 107 | \instr r2, r2, r0, lsl r3 |
| 108 | \store r2, [r1] |
| 109 | moveq r0, #0 |
| 110 | restore_irqs ip |
| 111 | ret lr |
| 112 | UNWIND( .fnend ) |
| 113 | ENDPROC(\name ) |
| 114 | .endm |
| 115 | #endif |