crash: move crashkernel parsing and vmcore related code under CONFIG_CRASH_CORE
[linux-2.6-block.git] / include / linux / atomic.h
index e71835bf60a977a37277d44c6357a02e70c3a41d..c56be74101305f74524bc816d9c8fdb67ccb895d 100644 (file)
 #endif
 #endif /* atomic_cmpxchg_relaxed */
 
+#ifndef atomic_try_cmpxchg
+
+#define __atomic_try_cmpxchg(type, _p, _po, _n)                                \
+({                                                                     \
+       typeof(_po) __po = (_po);                                       \
+       typeof(*(_po)) __r, __o = *__po;                                \
+       __r = atomic_cmpxchg##type((_p), __o, (_n));                    \
+       if (unlikely(__r != __o))                                       \
+               *__po = __r;                                            \
+       likely(__r == __o);                                             \
+})
+
+#define atomic_try_cmpxchg(_p, _po, _n)                __atomic_try_cmpxchg(, _p, _po, _n)
+#define atomic_try_cmpxchg_relaxed(_p, _po, _n)        __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
+#define atomic_try_cmpxchg_acquire(_p, _po, _n)        __atomic_try_cmpxchg(_acquire, _p, _po, _n)
+#define atomic_try_cmpxchg_release(_p, _po, _n)        __atomic_try_cmpxchg(_release, _p, _po, _n)
+
+#else /* atomic_try_cmpxchg */
+#define atomic_try_cmpxchg_relaxed     atomic_try_cmpxchg
+#define atomic_try_cmpxchg_acquire     atomic_try_cmpxchg
+#define atomic_try_cmpxchg_release     atomic_try_cmpxchg
+#endif /* atomic_try_cmpxchg */
+
 /* cmpxchg_relaxed */
 #ifndef cmpxchg_relaxed
 #define  cmpxchg_relaxed               cmpxchg
@@ -996,6 +1019,29 @@ static inline int atomic_dec_if_positive(atomic_t *v)
 #endif
 #endif /* atomic64_cmpxchg_relaxed */
 
+#ifndef atomic64_try_cmpxchg
+
+#define __atomic64_try_cmpxchg(type, _p, _po, _n)                      \
+({                                                                     \
+       typeof(_po) __po = (_po);                                       \
+       typeof(*(_po)) __r, __o = *__po;                                \
+       __r = atomic64_cmpxchg##type((_p), __o, (_n));                  \
+       if (unlikely(__r != __o))                                       \
+               *__po = __r;                                            \
+       likely(__r == __o);                                             \
+})
+
+#define atomic64_try_cmpxchg(_p, _po, _n)              __atomic64_try_cmpxchg(, _p, _po, _n)
+#define atomic64_try_cmpxchg_relaxed(_p, _po, _n)      __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
+#define atomic64_try_cmpxchg_acquire(_p, _po, _n)      __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
+#define atomic64_try_cmpxchg_release(_p, _po, _n)      __atomic64_try_cmpxchg(_release, _p, _po, _n)
+
+#else /* atomic64_try_cmpxchg */
+#define atomic64_try_cmpxchg_relaxed   atomic64_try_cmpxchg
+#define atomic64_try_cmpxchg_acquire   atomic64_try_cmpxchg
+#define atomic64_try_cmpxchg_release   atomic64_try_cmpxchg
+#endif /* atomic64_try_cmpxchg */
+
 #ifndef atomic64_andnot
 static inline void atomic64_andnot(long long i, atomic64_t *v)
 {