mirror of
https://github.com/FEX-Emu/linux.git
synced 2024-12-11 18:26:02 +00:00
asm-generic/atomic: Add try_cmpxchg() fallbacks
Only x86 provides try_cmpxchg() outside of the atomic_t interfaces, provide generic fallbacks to create this interface from the widely available cmpxchg() function. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Masami Hiramatsu <mhiramat@kernel.org> Signed-off-by: Ingo Molnar <mingo@kernel.org> Acked-by: Will Deacon <will@kernel.org> Link: https://lore.kernel.org/r/159870621515.1229682.15506193091065001742.stgit@devnote2
This commit is contained in:
parent
d741bf41d7
commit
29f006fdef
@ -199,7 +199,7 @@ static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||
|
||||
static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
|
||||
{
|
||||
return try_cmpxchg(&v->counter, old, new);
|
||||
return arch_try_cmpxchg(&v->counter, old, new);
|
||||
}
|
||||
#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
|
||||
|
||||
|
@ -187,7 +187,7 @@ static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
|
||||
|
||||
static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
|
||||
{
|
||||
return try_cmpxchg(&v->counter, old, new);
|
||||
return arch_try_cmpxchg(&v->counter, old, new);
|
||||
}
|
||||
#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
|
||||
|
||||
|
@ -221,7 +221,7 @@ extern void __add_wrong_size(void)
|
||||
#define __try_cmpxchg(ptr, pold, new, size) \
|
||||
__raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX)
|
||||
|
||||
#define try_cmpxchg(ptr, pold, new) \
|
||||
#define arch_try_cmpxchg(ptr, pold, new) \
|
||||
__try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr)))
|
||||
|
||||
/*
|
||||
|
@ -1749,6 +1749,50 @@ atomic64_dec_if_positive(atomic64_t *v)
|
||||
})
|
||||
#endif
|
||||
|
||||
#if !defined(arch_try_cmpxchg_relaxed) || defined(arch_try_cmpxchg)
|
||||
#define try_cmpxchg(ptr, oldp, ...) \
|
||||
({ \
|
||||
typeof(ptr) __ai_ptr = (ptr); \
|
||||
typeof(oldp) __ai_oldp = (oldp); \
|
||||
instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
|
||||
instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
|
||||
arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
|
||||
})
|
||||
#endif
|
||||
|
||||
#if defined(arch_try_cmpxchg_acquire)
|
||||
#define try_cmpxchg_acquire(ptr, oldp, ...) \
|
||||
({ \
|
||||
typeof(ptr) __ai_ptr = (ptr); \
|
||||
typeof(oldp) __ai_oldp = (oldp); \
|
||||
instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
|
||||
instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
|
||||
arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
|
||||
})
|
||||
#endif
|
||||
|
||||
#if defined(arch_try_cmpxchg_release)
|
||||
#define try_cmpxchg_release(ptr, oldp, ...) \
|
||||
({ \
|
||||
typeof(ptr) __ai_ptr = (ptr); \
|
||||
typeof(oldp) __ai_oldp = (oldp); \
|
||||
instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
|
||||
instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
|
||||
arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
|
||||
})
|
||||
#endif
|
||||
|
||||
#if defined(arch_try_cmpxchg_relaxed)
|
||||
#define try_cmpxchg_relaxed(ptr, oldp, ...) \
|
||||
({ \
|
||||
typeof(ptr) __ai_ptr = (ptr); \
|
||||
typeof(oldp) __ai_oldp = (oldp); \
|
||||
instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
|
||||
instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
|
||||
arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
|
||||
})
|
||||
#endif
|
||||
|
||||
#define cmpxchg_local(ptr, ...) \
|
||||
({ \
|
||||
typeof(ptr) __ai_ptr = (ptr); \
|
||||
@ -1786,4 +1830,4 @@ atomic64_dec_if_positive(atomic64_t *v)
|
||||
})
|
||||
|
||||
#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
|
||||
// 89bf97f3a7509b740845e51ddf31055b48a81f40
|
||||
// ff0fe7f81ee97f01f13bb78b0e3ce800bc56d9dd
|
||||
|
@ -9,9 +9,9 @@
|
||||
#include <linux/compiler.h>
|
||||
|
||||
#ifndef arch_xchg_relaxed
|
||||
#define arch_xchg_relaxed arch_xchg
|
||||
#define arch_xchg_acquire arch_xchg
|
||||
#define arch_xchg_release arch_xchg
|
||||
#define arch_xchg_relaxed arch_xchg
|
||||
#else /* arch_xchg_relaxed */
|
||||
|
||||
#ifndef arch_xchg_acquire
|
||||
@ -32,9 +32,9 @@
|
||||
#endif /* arch_xchg_relaxed */
|
||||
|
||||
#ifndef arch_cmpxchg_relaxed
|
||||
#define arch_cmpxchg_relaxed arch_cmpxchg
|
||||
#define arch_cmpxchg_acquire arch_cmpxchg
|
||||
#define arch_cmpxchg_release arch_cmpxchg
|
||||
#define arch_cmpxchg_relaxed arch_cmpxchg
|
||||
#else /* arch_cmpxchg_relaxed */
|
||||
|
||||
#ifndef arch_cmpxchg_acquire
|
||||
@ -55,9 +55,9 @@
|
||||
#endif /* arch_cmpxchg_relaxed */
|
||||
|
||||
#ifndef arch_cmpxchg64_relaxed
|
||||
#define arch_cmpxchg64_relaxed arch_cmpxchg64
|
||||
#define arch_cmpxchg64_acquire arch_cmpxchg64
|
||||
#define arch_cmpxchg64_release arch_cmpxchg64
|
||||
#define arch_cmpxchg64_relaxed arch_cmpxchg64
|
||||
#else /* arch_cmpxchg64_relaxed */
|
||||
|
||||
#ifndef arch_cmpxchg64_acquire
|
||||
@ -77,6 +77,76 @@
|
||||
|
||||
#endif /* arch_cmpxchg64_relaxed */
|
||||
|
||||
#ifndef arch_try_cmpxchg_relaxed
|
||||
#ifdef arch_try_cmpxchg
|
||||
#define arch_try_cmpxchg_acquire arch_try_cmpxchg
|
||||
#define arch_try_cmpxchg_release arch_try_cmpxchg
|
||||
#define arch_try_cmpxchg_relaxed arch_try_cmpxchg
|
||||
#endif /* arch_try_cmpxchg */
|
||||
|
||||
#ifndef arch_try_cmpxchg
|
||||
#define arch_try_cmpxchg(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = arch_cmpxchg((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* arch_try_cmpxchg */
|
||||
|
||||
#ifndef arch_try_cmpxchg_acquire
|
||||
#define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* arch_try_cmpxchg_acquire */
|
||||
|
||||
#ifndef arch_try_cmpxchg_release
|
||||
#define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* arch_try_cmpxchg_release */
|
||||
|
||||
#ifndef arch_try_cmpxchg_relaxed
|
||||
#define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* arch_try_cmpxchg_relaxed */
|
||||
|
||||
#else /* arch_try_cmpxchg_relaxed */
|
||||
|
||||
#ifndef arch_try_cmpxchg_acquire
|
||||
#define arch_try_cmpxchg_acquire(...) \
|
||||
__atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#ifndef arch_try_cmpxchg_release
|
||||
#define arch_try_cmpxchg_release(...) \
|
||||
__atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#ifndef arch_try_cmpxchg
|
||||
#define arch_try_cmpxchg(...) \
|
||||
__atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#endif /* arch_try_cmpxchg_relaxed */
|
||||
|
||||
#ifndef arch_atomic_read_acquire
|
||||
static __always_inline int
|
||||
arch_atomic_read_acquire(const atomic_t *v)
|
||||
@ -2288,4 +2358,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
|
||||
#endif
|
||||
|
||||
#endif /* _LINUX_ATOMIC_FALLBACK_H */
|
||||
// 90cd26cfd69d2250303d654955a0cc12620fb91b
|
||||
// cca554917d7ea73d5e3e7397dd70c484cad9b2c4
|
||||
|
@ -9,9 +9,9 @@
|
||||
#include <linux/compiler.h>
|
||||
|
||||
#ifndef xchg_relaxed
|
||||
#define xchg_relaxed xchg
|
||||
#define xchg_acquire xchg
|
||||
#define xchg_release xchg
|
||||
#define xchg_relaxed xchg
|
||||
#else /* xchg_relaxed */
|
||||
|
||||
#ifndef xchg_acquire
|
||||
@ -32,9 +32,9 @@
|
||||
#endif /* xchg_relaxed */
|
||||
|
||||
#ifndef cmpxchg_relaxed
|
||||
#define cmpxchg_relaxed cmpxchg
|
||||
#define cmpxchg_acquire cmpxchg
|
||||
#define cmpxchg_release cmpxchg
|
||||
#define cmpxchg_relaxed cmpxchg
|
||||
#else /* cmpxchg_relaxed */
|
||||
|
||||
#ifndef cmpxchg_acquire
|
||||
@ -55,9 +55,9 @@
|
||||
#endif /* cmpxchg_relaxed */
|
||||
|
||||
#ifndef cmpxchg64_relaxed
|
||||
#define cmpxchg64_relaxed cmpxchg64
|
||||
#define cmpxchg64_acquire cmpxchg64
|
||||
#define cmpxchg64_release cmpxchg64
|
||||
#define cmpxchg64_relaxed cmpxchg64
|
||||
#else /* cmpxchg64_relaxed */
|
||||
|
||||
#ifndef cmpxchg64_acquire
|
||||
@ -77,6 +77,76 @@
|
||||
|
||||
#endif /* cmpxchg64_relaxed */
|
||||
|
||||
#ifndef try_cmpxchg_relaxed
|
||||
#ifdef try_cmpxchg
|
||||
#define try_cmpxchg_acquire try_cmpxchg
|
||||
#define try_cmpxchg_release try_cmpxchg
|
||||
#define try_cmpxchg_relaxed try_cmpxchg
|
||||
#endif /* try_cmpxchg */
|
||||
|
||||
#ifndef try_cmpxchg
|
||||
#define try_cmpxchg(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = cmpxchg((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* try_cmpxchg */
|
||||
|
||||
#ifndef try_cmpxchg_acquire
|
||||
#define try_cmpxchg_acquire(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = cmpxchg_acquire((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* try_cmpxchg_acquire */
|
||||
|
||||
#ifndef try_cmpxchg_release
|
||||
#define try_cmpxchg_release(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = cmpxchg_release((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* try_cmpxchg_release */
|
||||
|
||||
#ifndef try_cmpxchg_relaxed
|
||||
#define try_cmpxchg_relaxed(_ptr, _oldp, _new) \
|
||||
({ \
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
|
||||
___r = cmpxchg_relaxed((_ptr), ___o, (_new)); \
|
||||
if (unlikely(___r != ___o)) \
|
||||
*___op = ___r; \
|
||||
likely(___r == ___o); \
|
||||
})
|
||||
#endif /* try_cmpxchg_relaxed */
|
||||
|
||||
#else /* try_cmpxchg_relaxed */
|
||||
|
||||
#ifndef try_cmpxchg_acquire
|
||||
#define try_cmpxchg_acquire(...) \
|
||||
__atomic_op_acquire(try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#ifndef try_cmpxchg_release
|
||||
#define try_cmpxchg_release(...) \
|
||||
__atomic_op_release(try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#ifndef try_cmpxchg
|
||||
#define try_cmpxchg(...) \
|
||||
__atomic_op_fence(try_cmpxchg, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#endif /* try_cmpxchg_relaxed */
|
||||
|
||||
#define arch_atomic_read atomic_read
|
||||
#define arch_atomic_read_acquire atomic_read_acquire
|
||||
|
||||
@ -2522,4 +2592,4 @@ atomic64_dec_if_positive(atomic64_t *v)
|
||||
#endif
|
||||
|
||||
#endif /* _LINUX_ATOMIC_FALLBACK_H */
|
||||
// 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f
|
||||
// d78e6c293c661c15188f0ec05bce45188c8d5892
|
||||
|
@ -144,15 +144,11 @@ gen_proto_order_variants()
|
||||
printf "#endif /* ${basename}_relaxed */\n\n"
|
||||
}
|
||||
|
||||
gen_xchg_fallbacks()
|
||||
gen_order_fallbacks()
|
||||
{
|
||||
local xchg="$1"; shift
|
||||
|
||||
cat <<EOF
|
||||
#ifndef ${xchg}_relaxed
|
||||
#define ${xchg}_relaxed ${xchg}
|
||||
#define ${xchg}_acquire ${xchg}
|
||||
#define ${xchg}_release ${xchg}
|
||||
#else /* ${xchg}_relaxed */
|
||||
|
||||
#ifndef ${xchg}_acquire
|
||||
#define ${xchg}_acquire(...) \\
|
||||
@ -169,11 +165,62 @@ cat <<EOF
|
||||
__atomic_op_fence(${xchg}, __VA_ARGS__)
|
||||
#endif
|
||||
|
||||
#endif /* ${xchg}_relaxed */
|
||||
EOF
|
||||
}
|
||||
|
||||
gen_xchg_fallbacks()
|
||||
{
|
||||
local xchg="$1"; shift
|
||||
printf "#ifndef ${xchg}_relaxed\n"
|
||||
|
||||
gen_basic_fallbacks ${xchg}
|
||||
|
||||
printf "#else /* ${xchg}_relaxed */\n"
|
||||
|
||||
gen_order_fallbacks ${xchg}
|
||||
|
||||
printf "#endif /* ${xchg}_relaxed */\n\n"
|
||||
}
|
||||
|
||||
gen_try_cmpxchg_fallback()
|
||||
{
|
||||
local order="$1"; shift;
|
||||
|
||||
cat <<EOF
|
||||
#ifndef ${ARCH}try_cmpxchg${order}
|
||||
#define ${ARCH}try_cmpxchg${order}(_ptr, _oldp, _new) \\
|
||||
({ \\
|
||||
typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \\
|
||||
___r = ${ARCH}cmpxchg${order}((_ptr), ___o, (_new)); \\
|
||||
if (unlikely(___r != ___o)) \\
|
||||
*___op = ___r; \\
|
||||
likely(___r == ___o); \\
|
||||
})
|
||||
#endif /* ${ARCH}try_cmpxchg${order} */
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
gen_try_cmpxchg_fallbacks()
|
||||
{
|
||||
printf "#ifndef ${ARCH}try_cmpxchg_relaxed\n"
|
||||
printf "#ifdef ${ARCH}try_cmpxchg\n"
|
||||
|
||||
gen_basic_fallbacks "${ARCH}try_cmpxchg"
|
||||
|
||||
printf "#endif /* ${ARCH}try_cmpxchg */\n\n"
|
||||
|
||||
for order in "" "_acquire" "_release" "_relaxed"; do
|
||||
gen_try_cmpxchg_fallback "${order}"
|
||||
done
|
||||
|
||||
printf "#else /* ${ARCH}try_cmpxchg_relaxed */\n"
|
||||
|
||||
gen_order_fallbacks "${ARCH}try_cmpxchg"
|
||||
|
||||
printf "#endif /* ${ARCH}try_cmpxchg_relaxed */\n\n"
|
||||
}
|
||||
|
||||
cat << EOF
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
@ -191,6 +238,8 @@ for xchg in "${ARCH}xchg" "${ARCH}cmpxchg" "${ARCH}cmpxchg64"; do
|
||||
gen_xchg_fallbacks "${xchg}"
|
||||
done
|
||||
|
||||
gen_try_cmpxchg_fallbacks
|
||||
|
||||
grep '^[a-z]' "$1" | while read name meta args; do
|
||||
gen_proto "${meta}" "${name}" "${ARCH}" "atomic" "int" ${args}
|
||||
done
|
||||
|
@ -103,6 +103,21 @@ gen_xchg()
|
||||
local xchg="$1"; shift
|
||||
local mult="$1"; shift
|
||||
|
||||
if [ "${xchg%${xchg#try_cmpxchg}}" = "try_cmpxchg" ] ; then
|
||||
|
||||
cat <<EOF
|
||||
#define ${xchg}(ptr, oldp, ...) \\
|
||||
({ \\
|
||||
typeof(ptr) __ai_ptr = (ptr); \\
|
||||
typeof(oldp) __ai_oldp = (oldp); \\
|
||||
instrument_atomic_write(__ai_ptr, ${mult}sizeof(*__ai_ptr)); \\
|
||||
instrument_atomic_write(__ai_oldp, ${mult}sizeof(*__ai_oldp)); \\
|
||||
arch_${xchg}(__ai_ptr, __ai_oldp, __VA_ARGS__); \\
|
||||
})
|
||||
EOF
|
||||
|
||||
else
|
||||
|
||||
cat <<EOF
|
||||
#define ${xchg}(ptr, ...) \\
|
||||
({ \\
|
||||
@ -111,6 +126,8 @@ cat <<EOF
|
||||
arch_${xchg}(__ai_ptr, __VA_ARGS__); \\
|
||||
})
|
||||
EOF
|
||||
|
||||
fi
|
||||
}
|
||||
|
||||
gen_optional_xchg()
|
||||
@ -160,7 +177,7 @@ grep '^[a-z]' "$1" | while read name meta args; do
|
||||
gen_proto "${meta}" "${name}" "atomic64" "s64" ${args}
|
||||
done
|
||||
|
||||
for xchg in "xchg" "cmpxchg" "cmpxchg64"; do
|
||||
for xchg in "xchg" "cmpxchg" "cmpxchg64" "try_cmpxchg"; do
|
||||
for order in "" "_acquire" "_release" "_relaxed"; do
|
||||
gen_optional_xchg "${xchg}" "${order}"
|
||||
done
|
||||
|
Loading…
Reference in New Issue
Block a user