x86/debug: Remove perpetually broken, unmaintainable dwarf annotations
[linux-2.6-block.git] / arch / x86 / lib / atomic64_cx8_32.S
CommitLineData
a7e926ab
LB
1/*
2 * atomic64_t for 586+
3 *
4 * Copyright © 2010 Luca Barbieri
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/linkage.h>
13#include <asm/alternative-asm.h>
a7e926ab 14
a7e926ab
LB
15.macro read64 reg
16 movl %ebx, %eax
17 movl %ecx, %edx
18/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
19 LOCK_PREFIX
20 cmpxchg8b (\reg)
21.endm
22
23ENTRY(atomic64_read_cx8)
a7e926ab
LB
24 read64 %ecx
25 ret
a7e926ab
LB
26ENDPROC(atomic64_read_cx8)
27
28ENTRY(atomic64_set_cx8)
a7e926ab
LB
291:
30/* we don't need LOCK_PREFIX since aligned 64-bit writes
31 * are atomic on 586 and newer */
32 cmpxchg8b (%esi)
33 jne 1b
34
35 ret
a7e926ab
LB
36ENDPROC(atomic64_set_cx8)
37
38ENTRY(atomic64_xchg_cx8)
a7e926ab
LB
391:
40 LOCK_PREFIX
41 cmpxchg8b (%esi)
42 jne 1b
43
44 ret
a7e926ab
LB
45ENDPROC(atomic64_xchg_cx8)
46
47.macro addsub_return func ins insc
48ENTRY(atomic64_\func\()_return_cx8)
131484c8
IM
49 pushl %ebp
50 pushl %ebx
51 pushl %esi
52 pushl %edi
a7e926ab
LB
53
54 movl %eax, %esi
55 movl %edx, %edi
56 movl %ecx, %ebp
57
cb8095bb 58 read64 %ecx
a7e926ab
LB
591:
60 movl %eax, %ebx
61 movl %edx, %ecx
62 \ins\()l %esi, %ebx
63 \insc\()l %edi, %ecx
64 LOCK_PREFIX
65 cmpxchg8b (%ebp)
66 jne 1b
67
6810:
69 movl %ebx, %eax
70 movl %ecx, %edx
131484c8
IM
71 popl %edi
72 popl %esi
73 popl %ebx
74 popl %ebp
a7e926ab 75 ret
a7e926ab
LB
76ENDPROC(atomic64_\func\()_return_cx8)
77.endm
78
79addsub_return add add adc
80addsub_return sub sub sbb
81
82.macro incdec_return func ins insc
83ENTRY(atomic64_\func\()_return_cx8)
131484c8 84 pushl %ebx
a7e926ab
LB
85
86 read64 %esi
871:
88 movl %eax, %ebx
89 movl %edx, %ecx
90 \ins\()l $1, %ebx
91 \insc\()l $0, %ecx
92 LOCK_PREFIX
93 cmpxchg8b (%esi)
94 jne 1b
95
9610:
97 movl %ebx, %eax
98 movl %ecx, %edx
131484c8 99 popl %ebx
a7e926ab 100 ret
a7e926ab
LB
101ENDPROC(atomic64_\func\()_return_cx8)
102.endm
103
104incdec_return inc add adc
105incdec_return dec sub sbb
106
107ENTRY(atomic64_dec_if_positive_cx8)
131484c8 108 pushl %ebx
a7e926ab
LB
109
110 read64 %esi
1111:
112 movl %eax, %ebx
113 movl %edx, %ecx
114 subl $1, %ebx
115 sbb $0, %ecx
116 js 2f
117 LOCK_PREFIX
118 cmpxchg8b (%esi)
119 jne 1b
120
1212:
122 movl %ebx, %eax
123 movl %ecx, %edx
131484c8 124 popl %ebx
a7e926ab 125 ret
a7e926ab
LB
126ENDPROC(atomic64_dec_if_positive_cx8)
127
128ENTRY(atomic64_add_unless_cx8)
131484c8
IM
129 pushl %ebp
130 pushl %ebx
a7e926ab 131/* these just push these two parameters on the stack */
131484c8
IM
132 pushl %edi
133 pushl %ecx
a7e926ab 134
cb8095bb 135 movl %eax, %ebp
a7e926ab
LB
136 movl %edx, %edi
137
cb8095bb 138 read64 %esi
a7e926ab
LB
1391:
140 cmpl %eax, 0(%esp)
141 je 4f
1422:
143 movl %eax, %ebx
144 movl %edx, %ecx
cb8095bb 145 addl %ebp, %ebx
a7e926ab
LB
146 adcl %edi, %ecx
147 LOCK_PREFIX
cb8095bb 148 cmpxchg8b (%esi)
a7e926ab
LB
149 jne 1b
150
6e6104fe 151 movl $1, %eax
a7e926ab
LB
1523:
153 addl $8, %esp
131484c8
IM
154 popl %ebx
155 popl %ebp
a7e926ab
LB
156 ret
1574:
158 cmpl %edx, 4(%esp)
159 jne 2b
6e6104fe 160 xorl %eax, %eax
a7e926ab 161 jmp 3b
a7e926ab
LB
162ENDPROC(atomic64_add_unless_cx8)
163
164ENTRY(atomic64_inc_not_zero_cx8)
131484c8 165 pushl %ebx
a7e926ab
LB
166
167 read64 %esi
1681:
cb8095bb
JB
169 movl %eax, %ecx
170 orl %edx, %ecx
171 jz 3f
a7e926ab 172 movl %eax, %ebx
cb8095bb 173 xorl %ecx, %ecx
a7e926ab 174 addl $1, %ebx
cb8095bb 175 adcl %edx, %ecx
a7e926ab
LB
176 LOCK_PREFIX
177 cmpxchg8b (%esi)
178 jne 1b
179
f3e83131 180 movl $1, %eax
a7e926ab 1813:
131484c8 182 popl %ebx
a7e926ab 183 ret
a7e926ab 184ENDPROC(atomic64_inc_not_zero_cx8)