locking/atomic: Add generic try_cmpxchg64 support
authorUros Bizjak <ubizjak@gmail.com>
Sun, 15 May 2022 18:42:03 +0000 (20:42 +0200)
committerPeter Zijlstra <peterz@infradead.org>
Tue, 17 May 2022 22:08:27 +0000 (00:08 +0200)
Add generic support for try_cmpxchg64{,_acquire,_release,_relaxed}
and their falbacks involving cmpxchg64.

Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lkml.kernel.org/r/20220515184205.103089-2-ubizjak@gmail.com
include/linux/atomic/atomic-arch-fallback.h
include/linux/atomic/atomic-instrumented.h
scripts/atomic/gen-atomic-fallback.sh
scripts/atomic/gen-atomic-instrumented.sh

index 6db58d1808665879b04a021f27ae005a78306cc6..77bc5522e61c68a040fc8707458188671867a58c 100644 (file)
 
 #endif /* arch_try_cmpxchg_relaxed */
 
+#ifndef arch_try_cmpxchg64_relaxed
+#ifdef arch_try_cmpxchg64
+#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
+#define arch_try_cmpxchg64_release arch_try_cmpxchg64
+#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
+#endif /* arch_try_cmpxchg64 */
+
+#ifndef arch_try_cmpxchg64
+#define arch_try_cmpxchg64(_ptr, _oldp, _new) \
+({ \
+       typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
+       ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
+       if (unlikely(___r != ___o)) \
+               *___op = ___r; \
+       likely(___r == ___o); \
+})
+#endif /* arch_try_cmpxchg64 */
+
+#ifndef arch_try_cmpxchg64_acquire
+#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
+({ \
+       typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
+       ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
+       if (unlikely(___r != ___o)) \
+               *___op = ___r; \
+       likely(___r == ___o); \
+})
+#endif /* arch_try_cmpxchg64_acquire */
+
+#ifndef arch_try_cmpxchg64_release
+#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
+({ \
+       typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
+       ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
+       if (unlikely(___r != ___o)) \
+               *___op = ___r; \
+       likely(___r == ___o); \
+})
+#endif /* arch_try_cmpxchg64_release */
+
+#ifndef arch_try_cmpxchg64_relaxed
+#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
+({ \
+       typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
+       ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
+       if (unlikely(___r != ___o)) \
+               *___op = ___r; \
+       likely(___r == ___o); \
+})
+#endif /* arch_try_cmpxchg64_relaxed */
+
+#else /* arch_try_cmpxchg64_relaxed */
+
+#ifndef arch_try_cmpxchg64_acquire
+#define arch_try_cmpxchg64_acquire(...) \
+       __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
+#endif
+
+#ifndef arch_try_cmpxchg64_release
+#define arch_try_cmpxchg64_release(...) \
+       __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
+#endif
+
+#ifndef arch_try_cmpxchg64
+#define arch_try_cmpxchg64(...) \
+       __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
+#endif
+
+#endif /* arch_try_cmpxchg64_relaxed */
+
 #ifndef arch_atomic_read_acquire
 static __always_inline int
 arch_atomic_read_acquire(const atomic_t *v)
@@ -2386,4 +2456,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
 #endif
 
 #endif /* _LINUX_ATOMIC_FALLBACK_H */
-// 8e2cc06bc0d2c0967d2f8424762bd48555ee40ae
+// b5e87bdd5ede61470c29f7a7e4de781af3770f09
index 5d69b143c28e65faf61e6834d0758e2d262f9ed2..7a139ec030b0c32bcdfc11df93ffb741c48d0282 100644 (file)
@@ -2006,6 +2006,44 @@ atomic_long_dec_if_positive(atomic_long_t *v)
        arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
 })
 
+#define try_cmpxchg64(ptr, oldp, ...) \
+({ \
+       typeof(ptr) __ai_ptr = (ptr); \
+       typeof(oldp) __ai_oldp = (oldp); \
+       kcsan_mb(); \
+       instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
+       instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
+       arch_try_cmpxchg64(__ai_ptr, __ai_oldp, __VA_ARGS__); \
+})
+
+#define try_cmpxchg64_acquire(ptr, oldp, ...) \
+({ \
+       typeof(ptr) __ai_ptr = (ptr); \
+       typeof(oldp) __ai_oldp = (oldp); \
+       instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
+       instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
+       arch_try_cmpxchg64_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
+})
+
+#define try_cmpxchg64_release(ptr, oldp, ...) \
+({ \
+       typeof(ptr) __ai_ptr = (ptr); \
+       typeof(oldp) __ai_oldp = (oldp); \
+       kcsan_release(); \
+       instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
+       instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
+       arch_try_cmpxchg64_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
+})
+
+#define try_cmpxchg64_relaxed(ptr, oldp, ...) \
+({ \
+       typeof(ptr) __ai_ptr = (ptr); \
+       typeof(oldp) __ai_oldp = (oldp); \
+       instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
+       instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
+       arch_try_cmpxchg64_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
+})
+
 #define cmpxchg_local(ptr, ...) \
 ({ \
        typeof(ptr) __ai_ptr = (ptr); \
@@ -2045,4 +2083,4 @@ atomic_long_dec_if_positive(atomic_long_t *v)
 })
 
 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
-// 87c974b93032afd42143613434d1a7788fa598f9
+// 764f741eb77a7ad565dc8d99ce2837d5542e8aee
index 8e2da71f1d5fa7548b0ba96af14571193d2ae052..3a07695e3c8963eea64aef4632c30a25458ab333 100755 (executable)
@@ -164,41 +164,44 @@ gen_xchg_fallbacks()
 
 gen_try_cmpxchg_fallback()
 {
+       local cmpxchg="$1"; shift;
        local order="$1"; shift;
 
 cat <<EOF
-#ifndef arch_try_cmpxchg${order}
-#define arch_try_cmpxchg${order}(_ptr, _oldp, _new) \\
+#ifndef arch_try_${cmpxchg}${order}
+#define arch_try_${cmpxchg}${order}(_ptr, _oldp, _new) \\
 ({ \\
        typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \\
-       ___r = arch_cmpxchg${order}((_ptr), ___o, (_new)); \\
+       ___r = arch_${cmpxchg}${order}((_ptr), ___o, (_new)); \\
        if (unlikely(___r != ___o)) \\
                *___op = ___r; \\
        likely(___r == ___o); \\
 })
-#endif /* arch_try_cmpxchg${order} */
+#endif /* arch_try_${cmpxchg}${order} */
 
 EOF
 }
 
 gen_try_cmpxchg_fallbacks()
 {
-       printf "#ifndef arch_try_cmpxchg_relaxed\n"
-       printf "#ifdef arch_try_cmpxchg\n"
+       local cmpxchg="$1"; shift;
 
-       gen_basic_fallbacks "arch_try_cmpxchg"
+       printf "#ifndef arch_try_${cmpxchg}_relaxed\n"
+       printf "#ifdef arch_try_${cmpxchg}\n"
 
-       printf "#endif /* arch_try_cmpxchg */\n\n"
+       gen_basic_fallbacks "arch_try_${cmpxchg}"
+
+       printf "#endif /* arch_try_${cmpxchg} */\n\n"
 
        for order in "" "_acquire" "_release" "_relaxed"; do
-               gen_try_cmpxchg_fallback "${order}"
+               gen_try_cmpxchg_fallback "${cmpxchg}" "${order}"
        done
 
-       printf "#else /* arch_try_cmpxchg_relaxed */\n"
+       printf "#else /* arch_try_${cmpxchg}_relaxed */\n"
 
-       gen_order_fallbacks "arch_try_cmpxchg"
+       gen_order_fallbacks "arch_try_${cmpxchg}"
 
-       printf "#endif /* arch_try_cmpxchg_relaxed */\n\n"
+       printf "#endif /* arch_try_${cmpxchg}_relaxed */\n\n"
 }
 
 cat << EOF
@@ -218,7 +221,9 @@ for xchg in "arch_xchg" "arch_cmpxchg" "arch_cmpxchg64"; do
        gen_xchg_fallbacks "${xchg}"
 done
 
-gen_try_cmpxchg_fallbacks
+for cmpxchg in "cmpxchg" "cmpxchg64"; do
+       gen_try_cmpxchg_fallbacks "${cmpxchg}"
+done
 
 grep '^[a-z]' "$1" | while read name meta args; do
        gen_proto "${meta}" "${name}" "atomic" "int" ${args}
index 68f902731d0180d1ac185a62a496315970e782fb..77c06526a574495f30e970c96a73e8d5d07d4c62 100755 (executable)
@@ -166,7 +166,7 @@ grep '^[a-z]' "$1" | while read name meta args; do
 done
 
 
-for xchg in "xchg" "cmpxchg" "cmpxchg64" "try_cmpxchg"; do
+for xchg in "xchg" "cmpxchg" "cmpxchg64" "try_cmpxchg" "try_cmpxchg64"; do
        for order in "" "_acquire" "_release" "_relaxed"; do
                gen_xchg "${xchg}" "${order}" ""
                printf "\n"