1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_METAG_GLOBAL_LOCK_H
3 #define __ASM_METAG_GLOBAL_LOCK_H
5 #include <asm/metag_mem.h>
8 * __global_lock1() - Acquire global voluntary lock (LOCK1).
9 * @flags: Variable to store flags into.
11 * Acquires the Meta global voluntary lock (LOCK1), also taking care to disable
12 * all triggers so we cannot be interrupted, and to enforce a compiler barrier
13 * so that the compiler cannot reorder memory accesses across the lock.
15 * No other hardware thread will be able to acquire the voluntary or exclusive
16 * locks until the voluntary lock is released with @__global_unlock1, but they
17 * may continue to execute as long as they aren't trying to acquire either of
20 #define __global_lock1(flags) do { \
21 unsigned int __trval; \
22 asm volatile("MOV %0,#0\n\t" \
23 "SWAP %0,TXMASKI\n\t" \
32 * __global_unlock1() - Release global voluntary lock (LOCK1).
33 * @flags: Variable to restore flags from.
35 * Releases the Meta global voluntary lock (LOCK1) acquired with
36 * @__global_lock1, also taking care to re-enable triggers, and to enforce a
37 * compiler barrier so that the compiler cannot reorder memory accesses across
40 * This immediately allows another hardware thread to acquire the voluntary or
43 #define __global_unlock1(flags) do { \
44 unsigned int __trval = (flags); \
45 asm volatile("LOCK0\n\t" \
53 * __global_lock2() - Acquire global exclusive lock (LOCK2).
54 * @flags: Variable to store flags into.
56 * Acquires the Meta global voluntary lock and global exclusive lock (LOCK2),
57 * also taking care to disable all triggers so we cannot be interrupted, to take
58 * the atomic lock (system event) and to enforce a compiler barrier so that the
59 * compiler cannot reorder memory accesses across the lock.
61 * No other hardware thread will be able to execute code until the locks are
62 * released with @__global_unlock2.
64 #define __global_lock2(flags) do { \
65 unsigned int __trval; \
66 unsigned int __aloc_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
67 asm volatile("MOV %0,#0\n\t" \
68 "SWAP %0,TXMASKI\n\t" \
70 "SETD [%1+#0x40],D1RtP" \
78 * __global_unlock2() - Release global exclusive lock (LOCK2).
79 * @flags: Variable to restore flags from.
81 * Releases the Meta global exclusive lock (LOCK2) and global voluntary lock
82 * acquired with @__global_lock2, also taking care to release the atomic lock
83 * (system event), re-enable triggers, and to enforce a compiler barrier so that
84 * the compiler cannot reorder memory accesses across the unlock.
86 * This immediately allows other hardware threads to continue executing and one
87 * of them to acquire locks.
89 #define __global_unlock2(flags) do { \
90 unsigned int __trval = (flags); \
91 unsigned int __alock_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
92 asm volatile("SETD [%1+#0x00],D1RtP\n\t" \
101 #endif /* __ASM_METAG_GLOBAL_LOCK_H */