License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[linux-2.6-block.git] / arch / metag / include / asm / global_lock.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_METAG_GLOBAL_LOCK_H
3 #define __ASM_METAG_GLOBAL_LOCK_H
4
5 #include <asm/metag_mem.h>
6
7 /**
8  * __global_lock1() - Acquire global voluntary lock (LOCK1).
9  * @flags:      Variable to store flags into.
10  *
11  * Acquires the Meta global voluntary lock (LOCK1), also taking care to disable
12  * all triggers so we cannot be interrupted, and to enforce a compiler barrier
13  * so that the compiler cannot reorder memory accesses across the lock.
14  *
15  * No other hardware thread will be able to acquire the voluntary or exclusive
16  * locks until the voluntary lock is released with @__global_unlock1, but they
17  * may continue to execute as long as they aren't trying to acquire either of
18  * the locks.
19  */
20 #define __global_lock1(flags) do {                                      \
21         unsigned int __trval;                                           \
22         asm volatile("MOV       %0,#0\n\t"                              \
23                      "SWAP      %0,TXMASKI\n\t"                         \
24                      "LOCK1"                                            \
25                      : "=r" (__trval)                                   \
26                      :                                                  \
27                      : "memory");                                       \
28         (flags) = __trval;                                              \
29 } while (0)
30
31 /**
32  * __global_unlock1() - Release global voluntary lock (LOCK1).
33  * @flags:      Variable to restore flags from.
34  *
35  * Releases the Meta global voluntary lock (LOCK1) acquired with
36  * @__global_lock1, also taking care to re-enable triggers, and to enforce a
37  * compiler barrier so that the compiler cannot reorder memory accesses across
38  * the unlock.
39  *
40  * This immediately allows another hardware thread to acquire the voluntary or
41  * exclusive locks.
42  */
43 #define __global_unlock1(flags) do {                                    \
44         unsigned int __trval = (flags);                                 \
45         asm volatile("LOCK0\n\t"                                        \
46                      "MOV       TXMASKI,%0"                             \
47                      :                                                  \
48                      : "r" (__trval)                                    \
49                      : "memory");                                       \
50 } while (0)
51
52 /**
53  * __global_lock2() - Acquire global exclusive lock (LOCK2).
54  * @flags:      Variable to store flags into.
55  *
56  * Acquires the Meta global voluntary lock and global exclusive lock (LOCK2),
57  * also taking care to disable all triggers so we cannot be interrupted, to take
58  * the atomic lock (system event) and to enforce a compiler barrier so that the
59  * compiler cannot reorder memory accesses across the lock.
60  *
61  * No other hardware thread will be able to execute code until the locks are
62  * released with @__global_unlock2.
63  */
64 #define __global_lock2(flags) do {                                      \
65         unsigned int __trval;                                           \
66         unsigned int __aloc_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
67         asm volatile("MOV       %0,#0\n\t"                              \
68                      "SWAP      %0,TXMASKI\n\t"                         \
69                      "LOCK2\n\t"                                        \
70                      "SETD      [%1+#0x40],D1RtP"                       \
71                      : "=r&" (__trval)                                  \
72                      : "u" (__aloc_hi)                                  \
73                      : "memory");                                       \
74         (flags) = __trval;                                              \
75 } while (0)
76
77 /**
78  * __global_unlock2() - Release global exclusive lock (LOCK2).
79  * @flags:      Variable to restore flags from.
80  *
81  * Releases the Meta global exclusive lock (LOCK2) and global voluntary lock
82  * acquired with @__global_lock2, also taking care to release the atomic lock
83  * (system event), re-enable triggers, and to enforce a compiler barrier so that
84  * the compiler cannot reorder memory accesses across the unlock.
85  *
86  * This immediately allows other hardware threads to continue executing and one
87  * of them to acquire locks.
88  */
89 #define __global_unlock2(flags) do {                                    \
90         unsigned int __trval = (flags);                                 \
91         unsigned int __alock_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
92         asm volatile("SETD      [%1+#0x00],D1RtP\n\t"                   \
93                      "LOCK0\n\t"                                        \
94                      "MOV       TXMASKI,%0"                             \
95                      :                                                  \
96                      : "r" (__trval),                                   \
97                        "u" (__alock_hi)                                 \
98                      : "memory");                                       \
99 } while (0)
100
101 #endif /* __ASM_METAG_GLOBAL_LOCK_H */