From 14d72d4b6f0e88b5f683c1a5b7a876a55055852d Mon Sep 17 00:00:00 2001 From: Mark Rutland Date: Mon, 5 Jun 2023 08:00:59 +0100 Subject: locking/atomic: remove fallback comments Currently a subset of the fallback templates have kerneldoc comments, resulting in a haphazard set of generated kerneldoc comments as only some operations have fallback templates to begin with. We'd like to generate more consistent kerneldoc comments, and to do so we'll need to restructure the way the fallback code is generated. To minimize churn and to make it easier to restructure the fallback code, this patch removes the existing kerneldoc comments from the fallback templates. We can add new kerneldoc comments in subsequent patches. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland Signed-off-by: Peter Zijlstra (Intel) Reviewed-by: Kees Cook Link: https://lore.kernel.org/r/20230605070124.3741859-3-mark.rutland@arm.com --- scripts/atomic/fallbacks/inc_and_test | 8 -------- 1 file changed, 8 deletions(-) (limited to 'scripts/atomic/fallbacks/inc_and_test') diff --git a/scripts/atomic/fallbacks/inc_and_test b/scripts/atomic/fallbacks/inc_and_test index 0cf23fe1efb8..c726a6d0634d 100755 --- a/scripts/atomic/fallbacks/inc_and_test +++ b/scripts/atomic/fallbacks/inc_and_test @@ -1,12 +1,4 @@ cat < Date: Mon, 5 Jun 2023 08:01:17 +0100 Subject: locking/atomic: scripts: restructure fallback ifdeffery Currently the various ordering variants of an atomic operation are defined in groups of full/acquire/release/relaxed ordering variants with some shared ifdeffery and several potential definitions of each ordering variant in different branches of the shared ifdeffery. As an ordering variant can have several potential definitions down different branches of the shared ifdeffery, it can be painful for a human to find a relevant definition, and we don't have a good location to place anything common to all definitions of an ordering variant (e.g. kerneldoc). Historically the grouping of full/acquire/release/relaxed ordering variants was necessary as we filled in the missing atomics in the same namespace as the architecture used. It would be easy to accidentally define one ordering fallback in terms of another ordering fallback with redundant barriers, and avoiding that would otherwise require a lot of baroque ifdeffery. With recent changes we no longer need to fill in the missing atomics in the arch_atomic*_() namespace, and only need to fill in the raw_atomic*_() namespace. Due to this, there's no risk of a namespace collision, and we can define each raw_atomic*_ ordering variant with its own ifdeffery checking for the arch_atomic*_ ordering variants. Restructure the fallbacks in this way, with each ordering variant having its own ifdeffery of the form: | #if defined(arch_atomic_fetch_andnot_acquire) | #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire | #elif defined(arch_atomic_fetch_andnot_relaxed) | static __always_inline int | raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) | { | int ret = arch_atomic_fetch_andnot_relaxed(i, v); | __atomic_acquire_fence(); | return ret; | } | #elif defined(arch_atomic_fetch_andnot) | #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot | #else | static __always_inline int | raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) | { | return raw_atomic_fetch_and_acquire(~i, v); | } | #endif Note that where there's no relevant arch_atomic*_() ordering variant, we'll define the operation in terms of a distinct raw_atomic*_(), as this itself might have been filled in with a fallback. As we now generate the raw_atomic*_() implementations directly, we no longer need the trivial wrappers, so they are removed. This makes the ifdeffery easier to follow, and will allow for further improvements in subsequent patches. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland Signed-off-by: Peter Zijlstra (Intel) Reviewed-by: Kees Cook Link: https://lore.kernel.org/r/20230605070124.3741859-21-mark.rutland@arm.com --- include/linux/atomic.h | 1 - include/linux/atomic/atomic-arch-fallback.h | 3178 ++++++++++++++------------ include/linux/atomic/atomic-raw.h | 1135 --------- scripts/atomic/fallbacks/acquire | 2 +- scripts/atomic/fallbacks/add_negative | 4 +- scripts/atomic/fallbacks/add_unless | 4 +- scripts/atomic/fallbacks/andnot | 4 +- scripts/atomic/fallbacks/cmpxchg | 4 +- scripts/atomic/fallbacks/dec | 4 +- scripts/atomic/fallbacks/dec_and_test | 4 +- scripts/atomic/fallbacks/dec_if_positive | 6 +- scripts/atomic/fallbacks/dec_unless_positive | 6 +- scripts/atomic/fallbacks/fence | 2 +- scripts/atomic/fallbacks/fetch_add_unless | 6 +- scripts/atomic/fallbacks/inc | 4 +- scripts/atomic/fallbacks/inc_and_test | 4 +- scripts/atomic/fallbacks/inc_not_zero | 4 +- scripts/atomic/fallbacks/inc_unless_negative | 6 +- scripts/atomic/fallbacks/read_acquire | 4 +- scripts/atomic/fallbacks/release | 2 +- scripts/atomic/fallbacks/set_release | 4 +- scripts/atomic/fallbacks/sub_and_test | 4 +- scripts/atomic/fallbacks/try_cmpxchg | 4 +- scripts/atomic/fallbacks/xchg | 4 +- scripts/atomic/gen-atomic-fallback.sh | 236 +- scripts/atomic/gen-atomic-raw.sh | 80 - scripts/atomic/gen-atomics.sh | 1 - 27 files changed, 1866 insertions(+), 2851 deletions(-) delete mode 100644 include/linux/atomic/atomic-raw.h delete mode 100644 scripts/atomic/gen-atomic-raw.sh (limited to 'scripts/atomic/fallbacks/inc_and_test') diff --git a/include/linux/atomic.h b/include/linux/atomic.h index 296cfae0389f..8dd57c3a99e9 100644 --- a/include/linux/atomic.h +++ b/include/linux/atomic.h @@ -78,7 +78,6 @@ }) #include -#include #include #include diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h index 1a2d81dbc2e4..99bc1a871dc1 100644 --- a/include/linux/atomic/atomic-arch-fallback.h +++ b/include/linux/atomic/atomic-arch-fallback.h @@ -8,2749 +8,2911 @@ #include -#ifndef arch_xchg_relaxed -#define arch_xchg_acquire arch_xchg -#define arch_xchg_release arch_xchg -#define arch_xchg_relaxed arch_xchg -#else /* arch_xchg_relaxed */ - -#ifndef arch_xchg_acquire -#define arch_xchg_acquire(...) \ - __atomic_op_acquire(arch_xchg, __VA_ARGS__) +#if defined(arch_xchg) +#define raw_xchg arch_xchg +#elif defined(arch_xchg_relaxed) +#define raw_xchg(...) \ + __atomic_op_fence(arch_xchg, __VA_ARGS__) +#else +extern void raw_xchg_not_implemented(void); +#define raw_xchg(...) raw_xchg_not_implemented() #endif -#ifndef arch_xchg_release -#define arch_xchg_release(...) \ - __atomic_op_release(arch_xchg, __VA_ARGS__) +#if defined(arch_xchg_acquire) +#define raw_xchg_acquire arch_xchg_acquire +#elif defined(arch_xchg_relaxed) +#define raw_xchg_acquire(...) \ + __atomic_op_acquire(arch_xchg, __VA_ARGS__) +#elif defined(arch_xchg) +#define raw_xchg_acquire arch_xchg +#else +extern void raw_xchg_acquire_not_implemented(void); +#define raw_xchg_acquire(...) raw_xchg_acquire_not_implemented() #endif -#ifndef arch_xchg -#define arch_xchg(...) \ - __atomic_op_fence(arch_xchg, __VA_ARGS__) +#if defined(arch_xchg_release) +#define raw_xchg_release arch_xchg_release +#elif defined(arch_xchg_relaxed) +#define raw_xchg_release(...) \ + __atomic_op_release(arch_xchg, __VA_ARGS__) +#elif defined(arch_xchg) +#define raw_xchg_release arch_xchg +#else +extern void raw_xchg_release_not_implemented(void); +#define raw_xchg_release(...) raw_xchg_release_not_implemented() +#endif + +#if defined(arch_xchg_relaxed) +#define raw_xchg_relaxed arch_xchg_relaxed +#elif defined(arch_xchg) +#define raw_xchg_relaxed arch_xchg +#else +extern void raw_xchg_relaxed_not_implemented(void); +#define raw_xchg_relaxed(...) raw_xchg_relaxed_not_implemented() +#endif + +#if defined(arch_cmpxchg) +#define raw_cmpxchg arch_cmpxchg +#elif defined(arch_cmpxchg_relaxed) +#define raw_cmpxchg(...) \ + __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) +#else +extern void raw_cmpxchg_not_implemented(void); +#define raw_cmpxchg(...) raw_cmpxchg_not_implemented() #endif -#endif /* arch_xchg_relaxed */ - -#ifndef arch_cmpxchg_relaxed -#define arch_cmpxchg_acquire arch_cmpxchg -#define arch_cmpxchg_release arch_cmpxchg -#define arch_cmpxchg_relaxed arch_cmpxchg -#else /* arch_cmpxchg_relaxed */ - -#ifndef arch_cmpxchg_acquire -#define arch_cmpxchg_acquire(...) \ +#if defined(arch_cmpxchg_acquire) +#define raw_cmpxchg_acquire arch_cmpxchg_acquire +#elif defined(arch_cmpxchg_relaxed) +#define raw_cmpxchg_acquire(...) \ __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__) +#elif defined(arch_cmpxchg) +#define raw_cmpxchg_acquire arch_cmpxchg +#else +extern void raw_cmpxchg_acquire_not_implemented(void); +#define raw_cmpxchg_acquire(...) raw_cmpxchg_acquire_not_implemented() #endif -#ifndef arch_cmpxchg_release -#define arch_cmpxchg_release(...) \ +#if defined(arch_cmpxchg_release) +#define raw_cmpxchg_release arch_cmpxchg_release +#elif defined(arch_cmpxchg_relaxed) +#define raw_cmpxchg_release(...) \ __atomic_op_release(arch_cmpxchg, __VA_ARGS__) +#elif defined(arch_cmpxchg) +#define raw_cmpxchg_release arch_cmpxchg +#else +extern void raw_cmpxchg_release_not_implemented(void); +#define raw_cmpxchg_release(...) raw_cmpxchg_release_not_implemented() +#endif + +#if defined(arch_cmpxchg_relaxed) +#define raw_cmpxchg_relaxed arch_cmpxchg_relaxed +#elif defined(arch_cmpxchg) +#define raw_cmpxchg_relaxed arch_cmpxchg +#else +extern void raw_cmpxchg_relaxed_not_implemented(void); +#define raw_cmpxchg_relaxed(...) raw_cmpxchg_relaxed_not_implemented() +#endif + +#if defined(arch_cmpxchg64) +#define raw_cmpxchg64 arch_cmpxchg64 +#elif defined(arch_cmpxchg64_relaxed) +#define raw_cmpxchg64(...) \ + __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) +#else +extern void raw_cmpxchg64_not_implemented(void); +#define raw_cmpxchg64(...) raw_cmpxchg64_not_implemented() #endif -#ifndef arch_cmpxchg -#define arch_cmpxchg(...) \ - __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) -#endif - -#endif /* arch_cmpxchg_relaxed */ - -#ifndef arch_cmpxchg64_relaxed -#define arch_cmpxchg64_acquire arch_cmpxchg64 -#define arch_cmpxchg64_release arch_cmpxchg64 -#define arch_cmpxchg64_relaxed arch_cmpxchg64 -#else /* arch_cmpxchg64_relaxed */ - -#ifndef arch_cmpxchg64_acquire -#define arch_cmpxchg64_acquire(...) \ +#if defined(arch_cmpxchg64_acquire) +#define raw_cmpxchg64_acquire arch_cmpxchg64_acquire +#elif defined(arch_cmpxchg64_relaxed) +#define raw_cmpxchg64_acquire(...) \ __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__) +#elif defined(arch_cmpxchg64) +#define raw_cmpxchg64_acquire arch_cmpxchg64 +#else +extern void raw_cmpxchg64_acquire_not_implemented(void); +#define raw_cmpxchg64_acquire(...) raw_cmpxchg64_acquire_not_implemented() #endif -#ifndef arch_cmpxchg64_release -#define arch_cmpxchg64_release(...) \ +#if defined(arch_cmpxchg64_release) +#define raw_cmpxchg64_release arch_cmpxchg64_release +#elif defined(arch_cmpxchg64_relaxed) +#define raw_cmpxchg64_release(...) \ __atomic_op_release(arch_cmpxchg64, __VA_ARGS__) +#elif defined(arch_cmpxchg64) +#define raw_cmpxchg64_release arch_cmpxchg64 +#else +extern void raw_cmpxchg64_release_not_implemented(void); +#define raw_cmpxchg64_release(...) raw_cmpxchg64_release_not_implemented() +#endif + +#if defined(arch_cmpxchg64_relaxed) +#define raw_cmpxchg64_relaxed arch_cmpxchg64_relaxed +#elif defined(arch_cmpxchg64) +#define raw_cmpxchg64_relaxed arch_cmpxchg64 +#else +extern void raw_cmpxchg64_relaxed_not_implemented(void); +#define raw_cmpxchg64_relaxed(...) raw_cmpxchg64_relaxed_not_implemented() +#endif + +#if defined(arch_cmpxchg128) +#define raw_cmpxchg128 arch_cmpxchg128 +#elif defined(arch_cmpxchg128_relaxed) +#define raw_cmpxchg128(...) \ + __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__) +#else +extern void raw_cmpxchg128_not_implemented(void); +#define raw_cmpxchg128(...) raw_cmpxchg128_not_implemented() #endif -#ifndef arch_cmpxchg64 -#define arch_cmpxchg64(...) \ - __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) -#endif - -#endif /* arch_cmpxchg64_relaxed */ - -#ifndef arch_cmpxchg128_relaxed -#define arch_cmpxchg128_acquire arch_cmpxchg128 -#define arch_cmpxchg128_release arch_cmpxchg128 -#define arch_cmpxchg128_relaxed arch_cmpxchg128 -#else /* arch_cmpxchg128_relaxed */ - -#ifndef arch_cmpxchg128_acquire -#define arch_cmpxchg128_acquire(...) \ +#if defined(arch_cmpxchg128_acquire) +#define raw_cmpxchg128_acquire arch_cmpxchg128_acquire +#elif defined(arch_cmpxchg128_relaxed) +#define raw_cmpxchg128_acquire(...) \ __atomic_op_acquire(arch_cmpxchg128, __VA_ARGS__) +#elif defined(arch_cmpxchg128) +#define raw_cmpxchg128_acquire arch_cmpxchg128 +#else +extern void raw_cmpxchg128_acquire_not_implemented(void); +#define raw_cmpxchg128_acquire(...) raw_cmpxchg128_acquire_not_implemented() #endif -#ifndef arch_cmpxchg128_release -#define arch_cmpxchg128_release(...) \ +#if defined(arch_cmpxchg128_release) +#define raw_cmpxchg128_release arch_cmpxchg128_release +#elif defined(arch_cmpxchg128_relaxed) +#define raw_cmpxchg128_release(...) \ __atomic_op_release(arch_cmpxchg128, __VA_ARGS__) -#endif - -#ifndef arch_cmpxchg128 -#define arch_cmpxchg128(...) \ - __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__) -#endif - -#endif /* arch_cmpxchg128_relaxed */ - -#ifndef arch_try_cmpxchg_relaxed -#ifdef arch_try_cmpxchg -#define arch_try_cmpxchg_acquire arch_try_cmpxchg -#define arch_try_cmpxchg_release arch_try_cmpxchg -#define arch_try_cmpxchg_relaxed arch_try_cmpxchg -#endif /* arch_try_cmpxchg */ - -#ifndef arch_try_cmpxchg -#define arch_try_cmpxchg(_ptr, _oldp, _new) \ +#elif defined(arch_cmpxchg128) +#define raw_cmpxchg128_release arch_cmpxchg128 +#else +extern void raw_cmpxchg128_release_not_implemented(void); +#define raw_cmpxchg128_release(...) raw_cmpxchg128_release_not_implemented() +#endif + +#if defined(arch_cmpxchg128_relaxed) +#define raw_cmpxchg128_relaxed arch_cmpxchg128_relaxed +#elif defined(arch_cmpxchg128) +#define raw_cmpxchg128_relaxed arch_cmpxchg128 +#else +extern void raw_cmpxchg128_relaxed_not_implemented(void); +#define raw_cmpxchg128_relaxed(...) raw_cmpxchg128_relaxed_not_implemented() +#endif + +#if defined(arch_try_cmpxchg) +#define raw_try_cmpxchg arch_try_cmpxchg +#elif defined(arch_try_cmpxchg_relaxed) +#define raw_try_cmpxchg(...) \ + __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__) +#else +#define raw_try_cmpxchg(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg */ +#endif -#ifndef arch_try_cmpxchg_acquire -#define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg_acquire) +#define raw_try_cmpxchg_acquire arch_try_cmpxchg_acquire +#elif defined(arch_try_cmpxchg_relaxed) +#define raw_try_cmpxchg_acquire(...) \ + __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__) +#elif defined(arch_try_cmpxchg) +#define raw_try_cmpxchg_acquire arch_try_cmpxchg +#else +#define raw_try_cmpxchg_acquire(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg_acquire((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg_acquire */ +#endif -#ifndef arch_try_cmpxchg_release -#define arch_try_cmpxchg_release(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg_release) +#define raw_try_cmpxchg_release arch_try_cmpxchg_release +#elif defined(arch_try_cmpxchg_relaxed) +#define raw_try_cmpxchg_release(...) \ + __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__) +#elif defined(arch_try_cmpxchg) +#define raw_try_cmpxchg_release arch_try_cmpxchg +#else +#define raw_try_cmpxchg_release(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg_release((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg_release */ +#endif -#ifndef arch_try_cmpxchg_relaxed -#define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg_relaxed) +#define raw_try_cmpxchg_relaxed arch_try_cmpxchg_relaxed +#elif defined(arch_try_cmpxchg) +#define raw_try_cmpxchg_relaxed arch_try_cmpxchg +#else +#define raw_try_cmpxchg_relaxed(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg_relaxed((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg_relaxed */ - -#else /* arch_try_cmpxchg_relaxed */ - -#ifndef arch_try_cmpxchg_acquire -#define arch_try_cmpxchg_acquire(...) \ - __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__) -#endif - -#ifndef arch_try_cmpxchg_release -#define arch_try_cmpxchg_release(...) \ - __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__) #endif -#ifndef arch_try_cmpxchg -#define arch_try_cmpxchg(...) \ - __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__) -#endif - -#endif /* arch_try_cmpxchg_relaxed */ - -#ifndef arch_try_cmpxchg64_relaxed -#ifdef arch_try_cmpxchg64 -#define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 -#define arch_try_cmpxchg64_release arch_try_cmpxchg64 -#define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 -#endif /* arch_try_cmpxchg64 */ - -#ifndef arch_try_cmpxchg64 -#define arch_try_cmpxchg64(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg64) +#define raw_try_cmpxchg64 arch_try_cmpxchg64 +#elif defined(arch_try_cmpxchg64_relaxed) +#define raw_try_cmpxchg64(...) \ + __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) +#else +#define raw_try_cmpxchg64(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg64((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg64 */ +#endif -#ifndef arch_try_cmpxchg64_acquire -#define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg64_acquire) +#define raw_try_cmpxchg64_acquire arch_try_cmpxchg64_acquire +#elif defined(arch_try_cmpxchg64_relaxed) +#define raw_try_cmpxchg64_acquire(...) \ + __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) +#elif defined(arch_try_cmpxchg64) +#define raw_try_cmpxchg64_acquire arch_try_cmpxchg64 +#else +#define raw_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg64_acquire((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg64_acquire */ +#endif -#ifndef arch_try_cmpxchg64_release -#define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg64_release) +#define raw_try_cmpxchg64_release arch_try_cmpxchg64_release +#elif defined(arch_try_cmpxchg64_relaxed) +#define raw_try_cmpxchg64_release(...) \ + __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) +#elif defined(arch_try_cmpxchg64) +#define raw_try_cmpxchg64_release arch_try_cmpxchg64 +#else +#define raw_try_cmpxchg64_release(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg64_release((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg64_release */ +#endif -#ifndef arch_try_cmpxchg64_relaxed -#define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg64_relaxed) +#define raw_try_cmpxchg64_relaxed arch_try_cmpxchg64_relaxed +#elif defined(arch_try_cmpxchg64) +#define raw_try_cmpxchg64_relaxed arch_try_cmpxchg64 +#else +#define raw_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg64_relaxed */ - -#else /* arch_try_cmpxchg64_relaxed */ - -#ifndef arch_try_cmpxchg64_acquire -#define arch_try_cmpxchg64_acquire(...) \ - __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) -#endif - -#ifndef arch_try_cmpxchg64_release -#define arch_try_cmpxchg64_release(...) \ - __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) -#endif - -#ifndef arch_try_cmpxchg64 -#define arch_try_cmpxchg64(...) \ - __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) #endif -#endif /* arch_try_cmpxchg64_relaxed */ - -#ifndef arch_try_cmpxchg128_relaxed -#ifdef arch_try_cmpxchg128 -#define arch_try_cmpxchg128_acquire arch_try_cmpxchg128 -#define arch_try_cmpxchg128_release arch_try_cmpxchg128 -#define arch_try_cmpxchg128_relaxed arch_try_cmpxchg128 -#endif /* arch_try_cmpxchg128 */ - -#ifndef arch_try_cmpxchg128 -#define arch_try_cmpxchg128(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg128) +#define raw_try_cmpxchg128 arch_try_cmpxchg128 +#elif defined(arch_try_cmpxchg128_relaxed) +#define raw_try_cmpxchg128(...) \ + __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__) +#else +#define raw_try_cmpxchg128(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg128((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg128((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg128 */ +#endif -#ifndef arch_try_cmpxchg128_acquire -#define arch_try_cmpxchg128_acquire(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg128_acquire) +#define raw_try_cmpxchg128_acquire arch_try_cmpxchg128_acquire +#elif defined(arch_try_cmpxchg128_relaxed) +#define raw_try_cmpxchg128_acquire(...) \ + __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__) +#elif defined(arch_try_cmpxchg128) +#define raw_try_cmpxchg128_acquire arch_try_cmpxchg128 +#else +#define raw_try_cmpxchg128_acquire(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg128_acquire((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg128_acquire((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg128_acquire */ +#endif -#ifndef arch_try_cmpxchg128_release -#define arch_try_cmpxchg128_release(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg128_release) +#define raw_try_cmpxchg128_release arch_try_cmpxchg128_release +#elif defined(arch_try_cmpxchg128_relaxed) +#define raw_try_cmpxchg128_release(...) \ + __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__) +#elif defined(arch_try_cmpxchg128) +#define raw_try_cmpxchg128_release arch_try_cmpxchg128 +#else +#define raw_try_cmpxchg128_release(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg128_release((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg128_release((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg128_release */ +#endif -#ifndef arch_try_cmpxchg128_relaxed -#define arch_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \ +#if defined(arch_try_cmpxchg128_relaxed) +#define raw_try_cmpxchg128_relaxed arch_try_cmpxchg128_relaxed +#elif defined(arch_try_cmpxchg128) +#define raw_try_cmpxchg128_relaxed arch_try_cmpxchg128 +#else +#define raw_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg128_relaxed((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg128_relaxed((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg128_relaxed */ - -#else /* arch_try_cmpxchg128_relaxed */ - -#ifndef arch_try_cmpxchg128_acquire -#define arch_try_cmpxchg128_acquire(...) \ - __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__) #endif -#ifndef arch_try_cmpxchg128_release -#define arch_try_cmpxchg128_release(...) \ - __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__) -#endif +#define raw_cmpxchg_local arch_cmpxchg_local -#ifndef arch_try_cmpxchg128 -#define arch_try_cmpxchg128(...) \ - __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__) +#ifdef arch_try_cmpxchg_local +#define raw_try_cmpxchg_local arch_try_cmpxchg_local +#else +#define raw_try_cmpxchg_local(_ptr, _oldp, _new) \ +({ \ + typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ + ___r = raw_cmpxchg_local((_ptr), ___o, (_new)); \ + if (unlikely(___r != ___o)) \ + *___op = ___r; \ + likely(___r == ___o); \ +}) #endif -#endif /* arch_try_cmpxchg128_relaxed */ +#define raw_cmpxchg64_local arch_cmpxchg64_local -#ifndef arch_try_cmpxchg_local -#define arch_try_cmpxchg_local(_ptr, _oldp, _new) \ +#ifdef arch_try_cmpxchg64_local +#define raw_try_cmpxchg64_local arch_try_cmpxchg64_local +#else +#define raw_try_cmpxchg64_local(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg_local((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg64_local((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg_local */ +#endif + +#define raw_cmpxchg128_local arch_cmpxchg128_local -#ifndef arch_try_cmpxchg64_local -#define arch_try_cmpxchg64_local(_ptr, _oldp, _new) \ +#ifdef arch_try_cmpxchg128_local +#define raw_try_cmpxchg128_local arch_try_cmpxchg128_local +#else +#define raw_try_cmpxchg128_local(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ - ___r = arch_cmpxchg64_local((_ptr), ___o, (_new)); \ + ___r = raw_cmpxchg128_local((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) -#endif /* arch_try_cmpxchg64_local */ +#endif + +#define raw_sync_cmpxchg arch_sync_cmpxchg -#ifndef arch_atomic_read_acquire +#define raw_atomic_read arch_atomic_read + +#if defined(arch_atomic_read_acquire) +#define raw_atomic_read_acquire arch_atomic_read_acquire +#elif defined(arch_atomic_read) +#define raw_atomic_read_acquire arch_atomic_read +#else static __always_inline int -arch_atomic_read_acquire(const atomic_t *v) +raw_atomic_read_acquire(const atomic_t *v) { int ret; if (__native_word(atomic_t)) { ret = smp_load_acquire(&(v)->counter); } else { - ret = arch_atomic_read(v); + ret = raw_atomic_read(v); __atomic_acquire_fence(); } return ret; } -#define arch_atomic_read_acquire arch_atomic_read_acquire #endif -#ifndef arch_atomic_set_release +#define raw_atomic_set arch_atomic_set + +#if defined(arch_atomic_set_release) +#define raw_atomic_set_release arch_atomic_set_release +#elif defined(arch_atomic_set) +#define raw_atomic_set_release arch_atomic_set +#else static __always_inline void -arch_atomic_set_release(atomic_t *v, int i) +raw_atomic_set_release(atomic_t *v, int i) { if (__native_word(atomic_t)) { smp_store_release(&(v)->counter, i); } else { __atomic_release_fence(); - arch_atomic_set(v, i); + raw_atomic_set(v, i); } } -#define arch_atomic_set_release arch_atomic_set_release #endif -#ifndef arch_atomic_add_return_relaxed -#define arch_atomic_add_return_acquire arch_atomic_add_return -#define arch_atomic_add_return_release arch_atomic_add_return -#define arch_atomic_add_return_relaxed arch_atomic_add_return -#else /* arch_atomic_add_return_relaxed */ +#define raw_atomic_add arch_atomic_add + +#if defined(arch_atomic_add_return) +#define raw_atomic_add_return arch_atomic_add_return +#elif defined(arch_atomic_add_return_relaxed) +static __always_inline int +raw_atomic_add_return(int i, atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_add_return_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#else +#error "Unable to define raw_atomic_add_return" +#endif -#ifndef arch_atomic_add_return_acquire +#if defined(arch_atomic_add_return_acquire) +#define raw_atomic_add_return_acquire arch_atomic_add_return_acquire +#elif defined(arch_atomic_add_return_relaxed) static __always_inline int -arch_atomic_add_return_acquire(int i, atomic_t *v) +raw_atomic_add_return_acquire(int i, atomic_t *v) { int ret = arch_atomic_add_return_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_add_return_acquire arch_atomic_add_return_acquire +#elif defined(arch_atomic_add_return) +#define raw_atomic_add_return_acquire arch_atomic_add_return +#else +#error "Unable to define raw_atomic_add_return_acquire" #endif -#ifndef arch_atomic_add_return_release +#if defined(arch_atomic_add_return_release) +#define raw_atomic_add_return_release arch_atomic_add_return_release +#elif defined(arch_atomic_add_return_relaxed) static __always_inline int -arch_atomic_add_return_release(int i, atomic_t *v) +raw_atomic_add_return_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_add_return_relaxed(i, v); } -#define arch_atomic_add_return_release arch_atomic_add_return_release +#elif defined(arch_atomic_add_return) +#define raw_atomic_add_return_release arch_atomic_add_return +#else +#error "Unable to define raw_atomic_add_return_release" #endif -#ifndef arch_atomic_add_return +#if defined(arch_atomic_add_return_relaxed) +#define raw_atomic_add_return_relaxed arch_atomic_add_return_relaxed +#elif defined(arch_atomic_add_return) +#define raw_atomic_add_return_relaxed arch_atomic_add_return +#else +#error "Unable to define raw_atomic_add_return_relaxed" +#endif + +#if defined(arch_atomic_fetch_add) +#define raw_atomic_fetch_add arch_atomic_fetch_add +#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int -arch_atomic_add_return(int i, atomic_t *v) +raw_atomic_fetch_add(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_add_return_relaxed(i, v); + ret = arch_atomic_fetch_add_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_add_return arch_atomic_add_return +#else +#error "Unable to define raw_atomic_fetch_add" #endif -#endif /* arch_atomic_add_return_relaxed */ - -#ifndef arch_atomic_fetch_add_relaxed -#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add -#define arch_atomic_fetch_add_release arch_atomic_fetch_add -#define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add -#else /* arch_atomic_fetch_add_relaxed */ - -#ifndef arch_atomic_fetch_add_acquire +#if defined(arch_atomic_fetch_add_acquire) +#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire +#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int -arch_atomic_fetch_add_acquire(int i, atomic_t *v) +raw_atomic_fetch_add_acquire(int i, atomic_t *v) { int ret = arch_atomic_fetch_add_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire +#elif defined(arch_atomic_fetch_add) +#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add +#else +#error "Unable to define raw_atomic_fetch_add_acquire" #endif -#ifndef arch_atomic_fetch_add_release +#if defined(arch_atomic_fetch_add_release) +#define raw_atomic_fetch_add_release arch_atomic_fetch_add_release +#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int -arch_atomic_fetch_add_release(int i, atomic_t *v) +raw_atomic_fetch_add_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_fetch_add_relaxed(i, v); } -#define arch_atomic_fetch_add_release arch_atomic_fetch_add_release +#elif defined(arch_atomic_fetch_add) +#define raw_atomic_fetch_add_release arch_atomic_fetch_add +#else +#error "Unable to define raw_atomic_fetch_add_release" +#endif + +#if defined(arch_atomic_fetch_add_relaxed) +#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed +#elif defined(arch_atomic_fetch_add) +#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add +#else +#error "Unable to define raw_atomic_fetch_add_relaxed" #endif -#ifndef arch_atomic_fetch_add +#define raw_atomic_sub arch_atomic_sub + +#if defined(arch_atomic_sub_return) +#define raw_atomic_sub_return arch_atomic_sub_return +#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int -arch_atomic_fetch_add(int i, atomic_t *v) +raw_atomic_sub_return(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_add_relaxed(i, v); + ret = arch_atomic_sub_return_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_add arch_atomic_fetch_add +#else +#error "Unable to define raw_atomic_sub_return" #endif -#endif /* arch_atomic_fetch_add_relaxed */ - -#ifndef arch_atomic_sub_return_relaxed -#define arch_atomic_sub_return_acquire arch_atomic_sub_return -#define arch_atomic_sub_return_release arch_atomic_sub_return -#define arch_atomic_sub_return_relaxed arch_atomic_sub_return -#else /* arch_atomic_sub_return_relaxed */ - -#ifndef arch_atomic_sub_return_acquire +#if defined(arch_atomic_sub_return_acquire) +#define raw_atomic_sub_return_acquire arch_atomic_sub_return_acquire +#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int -arch_atomic_sub_return_acquire(int i, atomic_t *v) +raw_atomic_sub_return_acquire(int i, atomic_t *v) { int ret = arch_atomic_sub_return_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire +#elif defined(arch_atomic_sub_return) +#define raw_atomic_sub_return_acquire arch_atomic_sub_return +#else +#error "Unable to define raw_atomic_sub_return_acquire" #endif -#ifndef arch_atomic_sub_return_release +#if defined(arch_atomic_sub_return_release) +#define raw_atomic_sub_return_release arch_atomic_sub_return_release +#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int -arch_atomic_sub_return_release(int i, atomic_t *v) +raw_atomic_sub_return_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_sub_return_relaxed(i, v); } -#define arch_atomic_sub_return_release arch_atomic_sub_return_release +#elif defined(arch_atomic_sub_return) +#define raw_atomic_sub_return_release arch_atomic_sub_return +#else +#error "Unable to define raw_atomic_sub_return_release" +#endif + +#if defined(arch_atomic_sub_return_relaxed) +#define raw_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed +#elif defined(arch_atomic_sub_return) +#define raw_atomic_sub_return_relaxed arch_atomic_sub_return +#else +#error "Unable to define raw_atomic_sub_return_relaxed" #endif -#ifndef arch_atomic_sub_return +#if defined(arch_atomic_fetch_sub) +#define raw_atomic_fetch_sub arch_atomic_fetch_sub +#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int -arch_atomic_sub_return(int i, atomic_t *v) +raw_atomic_fetch_sub(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_sub_return_relaxed(i, v); + ret = arch_atomic_fetch_sub_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_sub_return arch_atomic_sub_return +#else +#error "Unable to define raw_atomic_fetch_sub" #endif -#endif /* arch_atomic_sub_return_relaxed */ - -#ifndef arch_atomic_fetch_sub_relaxed -#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub -#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub -#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub -#else /* arch_atomic_fetch_sub_relaxed */ - -#ifndef arch_atomic_fetch_sub_acquire +#if defined(arch_atomic_fetch_sub_acquire) +#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire +#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int -arch_atomic_fetch_sub_acquire(int i, atomic_t *v) +raw_atomic_fetch_sub_acquire(int i, atomic_t *v) { int ret = arch_atomic_fetch_sub_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire +#elif defined(arch_atomic_fetch_sub) +#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub +#else +#error "Unable to define raw_atomic_fetch_sub_acquire" #endif -#ifndef arch_atomic_fetch_sub_release +#if defined(arch_atomic_fetch_sub_release) +#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub_release +#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int -arch_atomic_fetch_sub_release(int i, atomic_t *v) +raw_atomic_fetch_sub_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_fetch_sub_relaxed(i, v); } -#define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release +#elif defined(arch_atomic_fetch_sub) +#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub +#else +#error "Unable to define raw_atomic_fetch_sub_release" #endif -#ifndef arch_atomic_fetch_sub -static __always_inline int -arch_atomic_fetch_sub(int i, atomic_t *v) -{ - int ret; - __atomic_pre_full_fence(); - ret = arch_atomic_fetch_sub_relaxed(i, v); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic_fetch_sub arch_atomic_fetch_sub +#if defined(arch_atomic_fetch_sub_relaxed) +#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed +#elif defined(arch_atomic_fetch_sub) +#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub +#else +#error "Unable to define raw_atomic_fetch_sub_relaxed" #endif -#endif /* arch_atomic_fetch_sub_relaxed */ - -#ifndef arch_atomic_inc +#if defined(arch_atomic_inc) +#define raw_atomic_inc arch_atomic_inc +#else static __always_inline void -arch_atomic_inc(atomic_t *v) +raw_atomic_inc(atomic_t *v) { - arch_atomic_add(1, v); + raw_atomic_add(1, v); } -#define arch_atomic_inc arch_atomic_inc #endif -#ifndef arch_atomic_inc_return_relaxed -#ifdef arch_atomic_inc_return -#define arch_atomic_inc_return_acquire arch_atomic_inc_return -#define arch_atomic_inc_return_release arch_atomic_inc_return -#define arch_atomic_inc_return_relaxed arch_atomic_inc_return -#endif /* arch_atomic_inc_return */ - -#ifndef arch_atomic_inc_return +#if defined(arch_atomic_inc_return) +#define raw_atomic_inc_return arch_atomic_inc_return +#elif defined(arch_atomic_inc_return_relaxed) +static __always_inline int +raw_atomic_inc_return(atomic_t *v) +{ + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_inc_return_relaxed(v); + __atomic_post_full_fence(); + return ret; +} +#else static __always_inline int -arch_atomic_inc_return(atomic_t *v) +raw_atomic_inc_return(atomic_t *v) { - return arch_atomic_add_return(1, v); + return raw_atomic_add_return(1, v); } -#define arch_atomic_inc_return arch_atomic_inc_return #endif -#ifndef arch_atomic_inc_return_acquire +#if defined(arch_atomic_inc_return_acquire) +#define raw_atomic_inc_return_acquire arch_atomic_inc_return_acquire +#elif defined(arch_atomic_inc_return_relaxed) static __always_inline int -arch_atomic_inc_return_acquire(atomic_t *v) +raw_atomic_inc_return_acquire(atomic_t *v) { - return arch_atomic_add_return_acquire(1, v); + int ret = arch_atomic_inc_return_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire -#endif - -#ifndef arch_atomic_inc_return_release +#elif defined(arch_atomic_inc_return) +#define raw_atomic_inc_return_acquire arch_atomic_inc_return +#else static __always_inline int -arch_atomic_inc_return_release(atomic_t *v) +raw_atomic_inc_return_acquire(atomic_t *v) { - return arch_atomic_add_return_release(1, v); + return raw_atomic_add_return_acquire(1, v); } -#define arch_atomic_inc_return_release arch_atomic_inc_return_release #endif -#ifndef arch_atomic_inc_return_relaxed +#if defined(arch_atomic_inc_return_release) +#define raw_atomic_inc_return_release arch_atomic_inc_return_release +#elif defined(arch_atomic_inc_return_relaxed) static __always_inline int -arch_atomic_inc_return_relaxed(atomic_t *v) +raw_atomic_inc_return_release(atomic_t *v) { - return arch_atomic_add_return_relaxed(1, v); + __atomic_release_fence(); + return arch_atomic_inc_return_relaxed(v); } -#define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed -#endif - -#else /* arch_atomic_inc_return_relaxed */ - -#ifndef arch_atomic_inc_return_acquire +#elif defined(arch_atomic_inc_return) +#define raw_atomic_inc_return_release arch_atomic_inc_return +#else static __always_inline int -arch_atomic_inc_return_acquire(atomic_t *v) +raw_atomic_inc_return_release(atomic_t *v) { - int ret = arch_atomic_inc_return_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic_add_return_release(1, v); } -#define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire #endif -#ifndef arch_atomic_inc_return_release +#if defined(arch_atomic_inc_return_relaxed) +#define raw_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed +#elif defined(arch_atomic_inc_return) +#define raw_atomic_inc_return_relaxed arch_atomic_inc_return +#else static __always_inline int -arch_atomic_inc_return_release(atomic_t *v) +raw_atomic_inc_return_relaxed(atomic_t *v) { - __atomic_release_fence(); - return arch_atomic_inc_return_relaxed(v); + return raw_atomic_add_return_relaxed(1, v); } -#define arch_atomic_inc_return_release arch_atomic_inc_return_release #endif -#ifndef arch_atomic_inc_return +#if defined(arch_atomic_fetch_inc) +#define raw_atomic_fetch_inc arch_atomic_fetch_inc +#elif defined(arch_atomic_fetch_inc_relaxed) static __always_inline int -arch_atomic_inc_return(atomic_t *v) +raw_atomic_fetch_inc(atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_inc_return_relaxed(v); + ret = arch_atomic_fetch_inc_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_inc_return arch_atomic_inc_return -#endif - -#endif /* arch_atomic_inc_return_relaxed */ - -#ifndef arch_atomic_fetch_inc_relaxed -#ifdef arch_atomic_fetch_inc -#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc -#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc -#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc -#endif /* arch_atomic_fetch_inc */ - -#ifndef arch_atomic_fetch_inc +#else static __always_inline int -arch_atomic_fetch_inc(atomic_t *v) +raw_atomic_fetch_inc(atomic_t *v) { - return arch_atomic_fetch_add(1, v); + return raw_atomic_fetch_add(1, v); } -#define arch_atomic_fetch_inc arch_atomic_fetch_inc #endif -#ifndef arch_atomic_fetch_inc_acquire +#if defined(arch_atomic_fetch_inc_acquire) +#define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire +#elif defined(arch_atomic_fetch_inc_relaxed) static __always_inline int -arch_atomic_fetch_inc_acquire(atomic_t *v) +raw_atomic_fetch_inc_acquire(atomic_t *v) { - return arch_atomic_fetch_add_acquire(1, v); + int ret = arch_atomic_fetch_inc_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire -#endif - -#ifndef arch_atomic_fetch_inc_release +#elif defined(arch_atomic_fetch_inc) +#define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc +#else static __always_inline int -arch_atomic_fetch_inc_release(atomic_t *v) +raw_atomic_fetch_inc_acquire(atomic_t *v) { - return arch_atomic_fetch_add_release(1, v); + return raw_atomic_fetch_add_acquire(1, v); } -#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release #endif -#ifndef arch_atomic_fetch_inc_relaxed +#if defined(arch_atomic_fetch_inc_release) +#define raw_atomic_fetch_inc_release arch_atomic_fetch_inc_release +#elif defined(arch_atomic_fetch_inc_relaxed) +static __always_inline int +raw_atomic_fetch_inc_release(atomic_t *v) +{ + __atomic_release_fence(); + return arch_atomic_fetch_inc_relaxed(v); +} +#elif defined(arch_atomic_fetch_inc) +#define raw_atomic_fetch_inc_release arch_atomic_fetch_inc +#else static __always_inline int -arch_atomic_fetch_inc_relaxed(atomic_t *v) +raw_atomic_fetch_inc_release(atomic_t *v) { - return arch_atomic_fetch_add_relaxed(1, v); + return raw_atomic_fetch_add_release(1, v); } -#define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed #endif -#else /* arch_atomic_fetch_inc_relaxed */ - -#ifndef arch_atomic_fetch_inc_acquire +#if defined(arch_atomic_fetch_inc_relaxed) +#define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed +#elif defined(arch_atomic_fetch_inc) +#define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc +#else static __always_inline int -arch_atomic_fetch_inc_acquire(atomic_t *v) +raw_atomic_fetch_inc_relaxed(atomic_t *v) { - int ret = arch_atomic_fetch_inc_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic_fetch_add_relaxed(1, v); } -#define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire #endif -#ifndef arch_atomic_fetch_inc_release -static __always_inline int -arch_atomic_fetch_inc_release(atomic_t *v) +#if defined(arch_atomic_dec) +#define raw_atomic_dec arch_atomic_dec +#else +static __always_inline void +raw_atomic_dec(atomic_t *v) { - __atomic_release_fence(); - return arch_atomic_fetch_inc_relaxed(v); + raw_atomic_sub(1, v); } -#define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release #endif -#ifndef arch_atomic_fetch_inc +#if defined(arch_atomic_dec_return) +#define raw_atomic_dec_return arch_atomic_dec_return +#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int -arch_atomic_fetch_inc(atomic_t *v) +raw_atomic_dec_return(atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_inc_relaxed(v); + ret = arch_atomic_dec_return_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_inc arch_atomic_fetch_inc -#endif - -#endif /* arch_atomic_fetch_inc_relaxed */ - -#ifndef arch_atomic_dec -static __always_inline void -arch_atomic_dec(atomic_t *v) -{ - arch_atomic_sub(1, v); -} -#define arch_atomic_dec arch_atomic_dec -#endif - -#ifndef arch_atomic_dec_return_relaxed -#ifdef arch_atomic_dec_return -#define arch_atomic_dec_return_acquire arch_atomic_dec_return -#define arch_atomic_dec_return_release arch_atomic_dec_return -#define arch_atomic_dec_return_relaxed arch_atomic_dec_return -#endif /* arch_atomic_dec_return */ - -#ifndef arch_atomic_dec_return +#else static __always_inline int -arch_atomic_dec_return(atomic_t *v) +raw_atomic_dec_return(atomic_t *v) { - return arch_atomic_sub_return(1, v); + return raw_atomic_sub_return(1, v); } -#define arch_atomic_dec_return arch_atomic_dec_return #endif -#ifndef arch_atomic_dec_return_acquire +#if defined(arch_atomic_dec_return_acquire) +#define raw_atomic_dec_return_acquire arch_atomic_dec_return_acquire +#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int -arch_atomic_dec_return_acquire(atomic_t *v) +raw_atomic_dec_return_acquire(atomic_t *v) { - return arch_atomic_sub_return_acquire(1, v); + int ret = arch_atomic_dec_return_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire -#endif - -#ifndef arch_atomic_dec_return_release +#elif defined(arch_atomic_dec_return) +#define raw_atomic_dec_return_acquire arch_atomic_dec_return +#else static __always_inline int -arch_atomic_dec_return_release(atomic_t *v) +raw_atomic_dec_return_acquire(atomic_t *v) { - return arch_atomic_sub_return_release(1, v); + return raw_atomic_sub_return_acquire(1, v); } -#define arch_atomic_dec_return_release arch_atomic_dec_return_release #endif -#ifndef arch_atomic_dec_return_relaxed +#if defined(arch_atomic_dec_return_release) +#define raw_atomic_dec_return_release arch_atomic_dec_return_release +#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int -arch_atomic_dec_return_relaxed(atomic_t *v) +raw_atomic_dec_return_release(atomic_t *v) { - return arch_atomic_sub_return_relaxed(1, v); + __atomic_release_fence(); + return arch_atomic_dec_return_relaxed(v); } -#define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed -#endif - -#else /* arch_atomic_dec_return_relaxed */ - -#ifndef arch_atomic_dec_return_acquire +#elif defined(arch_atomic_dec_return) +#define raw_atomic_dec_return_release arch_atomic_dec_return +#else static __always_inline int -arch_atomic_dec_return_acquire(atomic_t *v) +raw_atomic_dec_return_release(atomic_t *v) { - int ret = arch_atomic_dec_return_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic_sub_return_release(1, v); } -#define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire #endif -#ifndef arch_atomic_dec_return_release +#if defined(arch_atomic_dec_return_relaxed) +#define raw_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed +#elif defined(arch_atomic_dec_return) +#define raw_atomic_dec_return_relaxed arch_atomic_dec_return +#else static __always_inline int -arch_atomic_dec_return_release(atomic_t *v) +raw_atomic_dec_return_relaxed(atomic_t *v) { - __atomic_release_fence(); - return arch_atomic_dec_return_relaxed(v); + return raw_atomic_sub_return_relaxed(1, v); } -#define arch_atomic_dec_return_release arch_atomic_dec_return_release #endif -#ifndef arch_atomic_dec_return +#if defined(arch_atomic_fetch_dec) +#define raw_atomic_fetch_dec arch_atomic_fetch_dec +#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int -arch_atomic_dec_return(atomic_t *v) +raw_atomic_fetch_dec(atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_dec_return_relaxed(v); + ret = arch_atomic_fetch_dec_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_dec_return arch_atomic_dec_return -#endif - -#endif /* arch_atomic_dec_return_relaxed */ - -#ifndef arch_atomic_fetch_dec_relaxed -#ifdef arch_atomic_fetch_dec -#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec -#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec -#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec -#endif /* arch_atomic_fetch_dec */ - -#ifndef arch_atomic_fetch_dec +#else static __always_inline int -arch_atomic_fetch_dec(atomic_t *v) +raw_atomic_fetch_dec(atomic_t *v) { - return arch_atomic_fetch_sub(1, v); + return raw_atomic_fetch_sub(1, v); } -#define arch_atomic_fetch_dec arch_atomic_fetch_dec #endif -#ifndef arch_atomic_fetch_dec_acquire +#if defined(arch_atomic_fetch_dec_acquire) +#define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire +#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int -arch_atomic_fetch_dec_acquire(atomic_t *v) +raw_atomic_fetch_dec_acquire(atomic_t *v) { - return arch_atomic_fetch_sub_acquire(1, v); + int ret = arch_atomic_fetch_dec_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire -#endif - -#ifndef arch_atomic_fetch_dec_release +#elif defined(arch_atomic_fetch_dec) +#define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec +#else static __always_inline int -arch_atomic_fetch_dec_release(atomic_t *v) +raw_atomic_fetch_dec_acquire(atomic_t *v) { - return arch_atomic_fetch_sub_release(1, v); + return raw_atomic_fetch_sub_acquire(1, v); } -#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release #endif -#ifndef arch_atomic_fetch_dec_relaxed +#if defined(arch_atomic_fetch_dec_release) +#define raw_atomic_fetch_dec_release arch_atomic_fetch_dec_release +#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int -arch_atomic_fetch_dec_relaxed(atomic_t *v) +raw_atomic_fetch_dec_release(atomic_t *v) { - return arch_atomic_fetch_sub_relaxed(1, v); + __atomic_release_fence(); + return arch_atomic_fetch_dec_relaxed(v); } -#define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed -#endif - -#else /* arch_atomic_fetch_dec_relaxed */ - -#ifndef arch_atomic_fetch_dec_acquire +#elif defined(arch_atomic_fetch_dec) +#define raw_atomic_fetch_dec_release arch_atomic_fetch_dec +#else static __always_inline int -arch_atomic_fetch_dec_acquire(atomic_t *v) +raw_atomic_fetch_dec_release(atomic_t *v) { - int ret = arch_atomic_fetch_dec_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic_fetch_sub_release(1, v); } -#define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire #endif -#ifndef arch_atomic_fetch_dec_release +#if defined(arch_atomic_fetch_dec_relaxed) +#define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed +#elif defined(arch_atomic_fetch_dec) +#define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec +#else static __always_inline int -arch_atomic_fetch_dec_release(atomic_t *v) +raw_atomic_fetch_dec_relaxed(atomic_t *v) { - __atomic_release_fence(); - return arch_atomic_fetch_dec_relaxed(v); + return raw_atomic_fetch_sub_relaxed(1, v); } -#define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release #endif -#ifndef arch_atomic_fetch_dec +#define raw_atomic_and arch_atomic_and + +#if defined(arch_atomic_fetch_and) +#define raw_atomic_fetch_and arch_atomic_fetch_and +#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int -arch_atomic_fetch_dec(atomic_t *v) +raw_atomic_fetch_and(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_dec_relaxed(v); + ret = arch_atomic_fetch_and_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_dec arch_atomic_fetch_dec +#else +#error "Unable to define raw_atomic_fetch_and" #endif -#endif /* arch_atomic_fetch_dec_relaxed */ - -#ifndef arch_atomic_fetch_and_relaxed -#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and -#define arch_atomic_fetch_and_release arch_atomic_fetch_and -#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and -#else /* arch_atomic_fetch_and_relaxed */ - -#ifndef arch_atomic_fetch_and_acquire +#if defined(arch_atomic_fetch_and_acquire) +#define raw_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire +#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int -arch_atomic_fetch_and_acquire(int i, atomic_t *v) +raw_atomic_fetch_and_acquire(int i, atomic_t *v) { int ret = arch_atomic_fetch_and_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire +#elif defined(arch_atomic_fetch_and) +#define raw_atomic_fetch_and_acquire arch_atomic_fetch_and +#else +#error "Unable to define raw_atomic_fetch_and_acquire" #endif -#ifndef arch_atomic_fetch_and_release +#if defined(arch_atomic_fetch_and_release) +#define raw_atomic_fetch_and_release arch_atomic_fetch_and_release +#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int -arch_atomic_fetch_and_release(int i, atomic_t *v) +raw_atomic_fetch_and_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_fetch_and_relaxed(i, v); } -#define arch_atomic_fetch_and_release arch_atomic_fetch_and_release +#elif defined(arch_atomic_fetch_and) +#define raw_atomic_fetch_and_release arch_atomic_fetch_and +#else +#error "Unable to define raw_atomic_fetch_and_release" #endif -#ifndef arch_atomic_fetch_and +#if defined(arch_atomic_fetch_and_relaxed) +#define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed +#elif defined(arch_atomic_fetch_and) +#define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and +#else +#error "Unable to define raw_atomic_fetch_and_relaxed" +#endif + +#if defined(arch_atomic_andnot) +#define raw_atomic_andnot arch_atomic_andnot +#else +static __always_inline void +raw_atomic_andnot(int i, atomic_t *v) +{ + raw_atomic_and(~i, v); +} +#endif + +#if defined(arch_atomic_fetch_andnot) +#define raw_atomic_fetch_andnot arch_atomic_fetch_andnot +#elif defined(arch_atomic_fetch_andnot_relaxed) static __always_inline int -arch_atomic_fetch_and(int i, atomic_t *v) +raw_atomic_fetch_andnot(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_and_relaxed(i, v); + ret = arch_atomic_fetch_andnot_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_and arch_atomic_fetch_and -#endif - -#endif /* arch_atomic_fetch_and_relaxed */ - -#ifndef arch_atomic_andnot -static __always_inline void -arch_atomic_andnot(int i, atomic_t *v) +#else +static __always_inline int +raw_atomic_fetch_andnot(int i, atomic_t *v) { - arch_atomic_and(~i, v); + return raw_atomic_fetch_and(~i, v); } -#define arch_atomic_andnot arch_atomic_andnot #endif -#ifndef arch_atomic_fetch_andnot_relaxed -#ifdef arch_atomic_fetch_andnot -#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot -#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot -#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot -#endif /* arch_atomic_fetch_andnot */ - -#ifndef arch_atomic_fetch_andnot +#if defined(arch_atomic_fetch_andnot_acquire) +#define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire +#elif defined(arch_atomic_fetch_andnot_relaxed) +static __always_inline int +raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) +{ + int ret = arch_atomic_fetch_andnot_relaxed(i, v); + __atomic_acquire_fence(); + return ret; +} +#elif defined(arch_atomic_fetch_andnot) +#define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot +#else static __always_inline int -arch_atomic_fetch_andnot(int i, atomic_t *v) +raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) { - return arch_atomic_fetch_and(~i, v); + return raw_atomic_fetch_and_acquire(~i, v); } -#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot #endif -#ifndef arch_atomic_fetch_andnot_acquire +#if defined(arch_atomic_fetch_andnot_release) +#define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release +#elif defined(arch_atomic_fetch_andnot_relaxed) static __always_inline int -arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) +raw_atomic_fetch_andnot_release(int i, atomic_t *v) { - return arch_atomic_fetch_and_acquire(~i, v); + __atomic_release_fence(); + return arch_atomic_fetch_andnot_relaxed(i, v); } -#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire -#endif - -#ifndef arch_atomic_fetch_andnot_release +#elif defined(arch_atomic_fetch_andnot) +#define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot +#else static __always_inline int -arch_atomic_fetch_andnot_release(int i, atomic_t *v) +raw_atomic_fetch_andnot_release(int i, atomic_t *v) { - return arch_atomic_fetch_and_release(~i, v); + return raw_atomic_fetch_and_release(~i, v); } -#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release #endif -#ifndef arch_atomic_fetch_andnot_relaxed +#if defined(arch_atomic_fetch_andnot_relaxed) +#define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed +#elif defined(arch_atomic_fetch_andnot) +#define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot +#else static __always_inline int -arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v) +raw_atomic_fetch_andnot_relaxed(int i, atomic_t *v) { - return arch_atomic_fetch_and_relaxed(~i, v); + return raw_atomic_fetch_and_relaxed(~i, v); } -#define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed #endif -#else /* arch_atomic_fetch_andnot_relaxed */ +#define raw_atomic_or arch_atomic_or -#ifndef arch_atomic_fetch_andnot_acquire +#if defined(arch_atomic_fetch_or) +#define raw_atomic_fetch_or arch_atomic_fetch_or +#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int -arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) +raw_atomic_fetch_or(int i, atomic_t *v) { - int ret = arch_atomic_fetch_andnot_relaxed(i, v); - __atomic_acquire_fence(); + int ret; + __atomic_pre_full_fence(); + ret = arch_atomic_fetch_or_relaxed(i, v); + __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire +#else +#error "Unable to define raw_atomic_fetch_or" #endif -#ifndef arch_atomic_fetch_andnot_release +#if defined(arch_atomic_fetch_or_acquire) +#define raw_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire +#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int -arch_atomic_fetch_andnot_release(int i, atomic_t *v) -{ - __atomic_release_fence(); - return arch_atomic_fetch_andnot_relaxed(i, v); -} -#define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release -#endif - -#ifndef arch_atomic_fetch_andnot -static __always_inline int -arch_atomic_fetch_andnot(int i, atomic_t *v) -{ - int ret; - __atomic_pre_full_fence(); - ret = arch_atomic_fetch_andnot_relaxed(i, v); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot -#endif - -#endif /* arch_atomic_fetch_andnot_relaxed */ - -#ifndef arch_atomic_fetch_or_relaxed -#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or -#define arch_atomic_fetch_or_release arch_atomic_fetch_or -#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or -#else /* arch_atomic_fetch_or_relaxed */ - -#ifndef arch_atomic_fetch_or_acquire -static __always_inline int -arch_atomic_fetch_or_acquire(int i, atomic_t *v) +raw_atomic_fetch_or_acquire(int i, atomic_t *v) { int ret = arch_atomic_fetch_or_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire +#elif defined(arch_atomic_fetch_or) +#define raw_atomic_fetch_or_acquire arch_atomic_fetch_or +#else +#error "Unable to define raw_atomic_fetch_or_acquire" #endif -#ifndef arch_atomic_fetch_or_release +#if defined(arch_atomic_fetch_or_release) +#define raw_atomic_fetch_or_release arch_atomic_fetch_or_release +#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int -arch_atomic_fetch_or_release(int i, atomic_t *v) +raw_atomic_fetch_or_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_fetch_or_relaxed(i, v); } -#define arch_atomic_fetch_or_release arch_atomic_fetch_or_release +#elif defined(arch_atomic_fetch_or) +#define raw_atomic_fetch_or_release arch_atomic_fetch_or +#else +#error "Unable to define raw_atomic_fetch_or_release" #endif -#ifndef arch_atomic_fetch_or +#if defined(arch_atomic_fetch_or_relaxed) +#define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed +#elif defined(arch_atomic_fetch_or) +#define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or +#else +#error "Unable to define raw_atomic_fetch_or_relaxed" +#endif + +#define raw_atomic_xor arch_atomic_xor + +#if defined(arch_atomic_fetch_xor) +#define raw_atomic_fetch_xor arch_atomic_fetch_xor +#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int -arch_atomic_fetch_or(int i, atomic_t *v) +raw_atomic_fetch_xor(int i, atomic_t *v) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_or_relaxed(i, v); + ret = arch_atomic_fetch_xor_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_or arch_atomic_fetch_or +#else +#error "Unable to define raw_atomic_fetch_xor" #endif -#endif /* arch_atomic_fetch_or_relaxed */ - -#ifndef arch_atomic_fetch_xor_relaxed -#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor -#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor -#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor -#else /* arch_atomic_fetch_xor_relaxed */ - -#ifndef arch_atomic_fetch_xor_acquire +#if defined(arch_atomic_fetch_xor_acquire) +#define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire +#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int -arch_atomic_fetch_xor_acquire(int i, atomic_t *v) +raw_atomic_fetch_xor_acquire(int i, atomic_t *v) { int ret = arch_atomic_fetch_xor_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire +#elif defined(arch_atomic_fetch_xor) +#define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor +#else +#error "Unable to define raw_atomic_fetch_xor_acquire" #endif -#ifndef arch_atomic_fetch_xor_release +#if defined(arch_atomic_fetch_xor_release) +#define raw_atomic_fetch_xor_release arch_atomic_fetch_xor_release +#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int -arch_atomic_fetch_xor_release(int i, atomic_t *v) +raw_atomic_fetch_xor_release(int i, atomic_t *v) { __atomic_release_fence(); return arch_atomic_fetch_xor_relaxed(i, v); } -#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release +#elif defined(arch_atomic_fetch_xor) +#define raw_atomic_fetch_xor_release arch_atomic_fetch_xor +#else +#error "Unable to define raw_atomic_fetch_xor_release" #endif -#ifndef arch_atomic_fetch_xor +#if defined(arch_atomic_fetch_xor_relaxed) +#define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed +#elif defined(arch_atomic_fetch_xor) +#define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor +#else +#error "Unable to define raw_atomic_fetch_xor_relaxed" +#endif + +#if defined(arch_atomic_xchg) +#define raw_atomic_xchg arch_atomic_xchg +#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -arch_atomic_fetch_xor(int i, atomic_t *v) +raw_atomic_xchg(atomic_t *v, int i) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_fetch_xor_relaxed(i, v); + ret = arch_atomic_xchg_relaxed(v, i); __atomic_post_full_fence(); return ret; } -#define arch_atomic_fetch_xor arch_atomic_fetch_xor -#endif - -#endif /* arch_atomic_fetch_xor_relaxed */ - -#ifndef arch_atomic_xchg_relaxed -#ifdef arch_atomic_xchg -#define arch_atomic_xchg_acquire arch_atomic_xchg -#define arch_atomic_xchg_release arch_atomic_xchg -#define arch_atomic_xchg_relaxed arch_atomic_xchg -#endif /* arch_atomic_xchg */ - -#ifndef arch_atomic_xchg +#else static __always_inline int -arch_atomic_xchg(atomic_t *v, int new) +raw_atomic_xchg(atomic_t *v, int new) { - return arch_xchg(&v->counter, new); + return raw_xchg(&v->counter, new); } -#define arch_atomic_xchg arch_atomic_xchg #endif -#ifndef arch_atomic_xchg_acquire +#if defined(arch_atomic_xchg_acquire) +#define raw_atomic_xchg_acquire arch_atomic_xchg_acquire +#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -arch_atomic_xchg_acquire(atomic_t *v, int new) +raw_atomic_xchg_acquire(atomic_t *v, int i) { - return arch_xchg_acquire(&v->counter, new); + int ret = arch_atomic_xchg_relaxed(v, i); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_xchg_acquire arch_atomic_xchg_acquire -#endif - -#ifndef arch_atomic_xchg_release +#elif defined(arch_atomic_xchg) +#define raw_atomic_xchg_acquire arch_atomic_xchg +#else static __always_inline int -arch_atomic_xchg_release(atomic_t *v, int new) +raw_atomic_xchg_acquire(atomic_t *v, int new) { - return arch_xchg_release(&v->counter, new); + return raw_xchg_acquire(&v->counter, new); } -#define arch_atomic_xchg_release arch_atomic_xchg_release #endif -#ifndef arch_atomic_xchg_relaxed +#if defined(arch_atomic_xchg_release) +#define raw_atomic_xchg_release arch_atomic_xchg_release +#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -arch_atomic_xchg_relaxed(atomic_t *v, int new) +raw_atomic_xchg_release(atomic_t *v, int i) { - return arch_xchg_relaxed(&v->counter, new); + __atomic_release_fence(); + return arch_atomic_xchg_relaxed(v, i); } -#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed -#endif - -#else /* arch_atomic_xchg_relaxed */ - -#ifndef arch_atomic_xchg_acquire +#elif defined(arch_atomic_xchg) +#define raw_atomic_xchg_release arch_atomic_xchg +#else static __always_inline int -arch_atomic_xchg_acquire(atomic_t *v, int i) +raw_atomic_xchg_release(atomic_t *v, int new) { - int ret = arch_atomic_xchg_relaxed(v, i); - __atomic_acquire_fence(); - return ret; + return raw_xchg_release(&v->counter, new); } -#define arch_atomic_xchg_acquire arch_atomic_xchg_acquire #endif -#ifndef arch_atomic_xchg_release +#if defined(arch_atomic_xchg_relaxed) +#define raw_atomic_xchg_relaxed arch_atomic_xchg_relaxed +#elif defined(arch_atomic_xchg) +#define raw_atomic_xchg_relaxed arch_atomic_xchg +#else static __always_inline int -arch_atomic_xchg_release(atomic_t *v, int i) +raw_atomic_xchg_relaxed(atomic_t *v, int new) { - __atomic_release_fence(); - return arch_atomic_xchg_relaxed(v, i); + return raw_xchg_relaxed(&v->counter, new); } -#define arch_atomic_xchg_release arch_atomic_xchg_release #endif -#ifndef arch_atomic_xchg +#if defined(arch_atomic_cmpxchg) +#define raw_atomic_cmpxchg arch_atomic_cmpxchg +#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int -arch_atomic_xchg(atomic_t *v, int i) +raw_atomic_cmpxchg(atomic_t *v, int old, int new) { int ret; __atomic_pre_full_fence(); - ret = arch_atomic_xchg_relaxed(v, i); + ret = arch_atomic_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; } -#define arch_atomic_xchg arch_atomic_xchg -#endif - -#endif /* arch_atomic_xchg_relaxed */ - -#ifndef arch_atomic_cmpxchg_relaxed -#ifdef arch_atomic_cmpxchg -#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg -#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg -#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg -#endif /* arch_atomic_cmpxchg */ - -#ifndef arch_atomic_cmpxchg +#else static __always_inline int -arch_atomic_cmpxchg(atomic_t *v, int old, int new) +raw_atomic_cmpxchg(atomic_t *v, int old, int new) { - return arch_cmpxchg(&v->counter, old, new); + return raw_cmpxchg(&v->counter, old, new); } -#define arch_atomic_cmpxchg arch_atomic_cmpxchg #endif -#ifndef arch_atomic_cmpxchg_acquire +#if defined(arch_atomic_cmpxchg_acquire) +#define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire +#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int -arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) +raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) { - return arch_cmpxchg_acquire(&v->counter, old, new); + int ret = arch_atomic_cmpxchg_relaxed(v, old, new); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire -#endif - -#ifndef arch_atomic_cmpxchg_release +#elif defined(arch_atomic_cmpxchg) +#define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg +#else static __always_inline int -arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) +raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) { - return arch_cmpxchg_release(&v->counter, old, new); + return raw_cmpxchg_acquire(&v->counter, old, new); } -#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release #endif -#ifndef arch_atomic_cmpxchg_relaxed +#if defined(arch_atomic_cmpxchg_release) +#define raw_atomic_cmpxchg_release arch_atomic_cmpxchg_release +#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int -arch_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) +raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) { - return arch_cmpxchg_relaxed(&v->counter, old, new); + __atomic_release_fence(); + return arch_atomic_cmpxchg_relaxed(v, old, new); } -#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed -#endif - -#else /* arch_atomic_cmpxchg_relaxed */ - -#ifndef arch_atomic_cmpxchg_acquire +#elif defined(arch_atomic_cmpxchg) +#define raw_atomic_cmpxchg_release arch_atomic_cmpxchg +#else static __always_inline int -arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) +raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) { - int ret = arch_atomic_cmpxchg_relaxed(v, old, new); - __atomic_acquire_fence(); - return ret; + return raw_cmpxchg_release(&v->counter, old, new); } -#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire #endif -#ifndef arch_atomic_cmpxchg_release +#if defined(arch_atomic_cmpxchg_relaxed) +#define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed +#elif defined(arch_atomic_cmpxchg) +#define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg +#else static __always_inline int -arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) +raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) { - __atomic_release_fence(); - return arch_atomic_cmpxchg_relaxed(v, old, new); + return raw_cmpxchg_relaxed(&v->counter, old, new); } -#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release #endif -#ifndef arch_atomic_cmpxchg -static __always_inline int -arch_atomic_cmpxchg(atomic_t *v, int old, int new) +#if defined(arch_atomic_try_cmpxchg) +#define raw_atomic_try_cmpxchg arch_atomic_try_cmpxchg +#elif defined(arch_atomic_try_cmpxchg_relaxed) +static __always_inline bool +raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) { - int ret; + bool ret; __atomic_pre_full_fence(); - ret = arch_atomic_cmpxchg_relaxed(v, old, new); + ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; } -#define arch_atomic_cmpxchg arch_atomic_cmpxchg -#endif - -#endif /* arch_atomic_cmpxchg_relaxed */ - -#ifndef arch_atomic_try_cmpxchg_relaxed -#ifdef arch_atomic_try_cmpxchg -#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg -#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg -#define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg -#endif /* arch_atomic_try_cmpxchg */ - -#ifndef arch_atomic_try_cmpxchg +#else static __always_inline bool -arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) +raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) { int r, o = *old; - r = arch_atomic_cmpxchg(v, o, new); + r = raw_atomic_cmpxchg(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg #endif -#ifndef arch_atomic_try_cmpxchg_acquire +#if defined(arch_atomic_try_cmpxchg_acquire) +#define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire +#elif defined(arch_atomic_try_cmpxchg_relaxed) +static __always_inline bool +raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) +{ + bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); + __atomic_acquire_fence(); + return ret; +} +#elif defined(arch_atomic_try_cmpxchg) +#define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg +#else static __always_inline bool -arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) +raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) { int r, o = *old; - r = arch_atomic_cmpxchg_acquire(v, o, new); + r = raw_atomic_cmpxchg_acquire(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire #endif -#ifndef arch_atomic_try_cmpxchg_release +#if defined(arch_atomic_try_cmpxchg_release) +#define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release +#elif defined(arch_atomic_try_cmpxchg_relaxed) static __always_inline bool -arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) +raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) +{ + __atomic_release_fence(); + return arch_atomic_try_cmpxchg_relaxed(v, old, new); +} +#elif defined(arch_atomic_try_cmpxchg) +#define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg +#else +static __always_inline bool +raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) { int r, o = *old; - r = arch_atomic_cmpxchg_release(v, o, new); + r = raw_atomic_cmpxchg_release(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release #endif -#ifndef arch_atomic_try_cmpxchg_relaxed +#if defined(arch_atomic_try_cmpxchg_relaxed) +#define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed +#elif defined(arch_atomic_try_cmpxchg) +#define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg +#else static __always_inline bool -arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) +raw_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) { int r, o = *old; - r = arch_atomic_cmpxchg_relaxed(v, o, new); + r = raw_atomic_cmpxchg_relaxed(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed -#endif - -#else /* arch_atomic_try_cmpxchg_relaxed */ - -#ifndef arch_atomic_try_cmpxchg_acquire -static __always_inline bool -arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) -{ - bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); - __atomic_acquire_fence(); - return ret; -} -#define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire -#endif - -#ifndef arch_atomic_try_cmpxchg_release -static __always_inline bool -arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) -{ - __atomic_release_fence(); - return arch_atomic_try_cmpxchg_relaxed(v, old, new); -} -#define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release -#endif - -#ifndef arch_atomic_try_cmpxchg -static __always_inline bool -arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) -{ - bool ret; - __atomic_pre_full_fence(); - ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg #endif -#endif /* arch_atomic_try_cmpxchg_relaxed */ - -#ifndef arch_atomic_sub_and_test +#if defined(arch_atomic_sub_and_test) +#define raw_atomic_sub_and_test arch_atomic_sub_and_test +#else static __always_inline bool -arch_atomic_sub_and_test(int i, atomic_t *v) +raw_atomic_sub_and_test(int i, atomic_t *v) { - return arch_atomic_sub_return(i, v) == 0; + return raw_atomic_sub_return(i, v) == 0; } -#define arch_atomic_sub_and_test arch_atomic_sub_and_test #endif -#ifndef arch_atomic_dec_and_test +#if defined(arch_atomic_dec_and_test) +#define raw_atomic_dec_and_test arch_atomic_dec_and_test +#else static __always_inline bool -arch_atomic_dec_and_test(atomic_t *v) +raw_atomic_dec_and_test(atomic_t *v) { - return arch_atomic_dec_return(v) == 0; + return raw_atomic_dec_return(v) == 0; } -#define arch_atomic_dec_and_test arch_atomic_dec_and_test #endif -#ifndef arch_atomic_inc_and_test +#if defined(arch_atomic_inc_and_test) +#define raw_atomic_inc_and_test arch_atomic_inc_and_test +#else static __always_inline bool -arch_atomic_inc_and_test(atomic_t *v) +raw_atomic_inc_and_test(atomic_t *v) { - return arch_atomic_inc_return(v) == 0; + return raw_atomic_inc_return(v) == 0; } -#define arch_atomic_inc_and_test arch_atomic_inc_and_test #endif -#ifndef arch_atomic_add_negative_relaxed -#ifdef arch_atomic_add_negative -#define arch_atomic_add_negative_acquire arch_atomic_add_negative -#define arch_atomic_add_negative_release arch_atomic_add_negative -#define arch_atomic_add_negative_relaxed arch_atomic_add_negative -#endif /* arch_atomic_add_negative */ - -#ifndef arch_atomic_add_negative +#if defined(arch_atomic_add_negative) +#define raw_atomic_add_negative arch_atomic_add_negative +#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool -arch_atomic_add_negative(int i, atomic_t *v) +raw_atomic_add_negative(int i, atomic_t *v) { - return arch_atomic_add_return(i, v) < 0; + bool ret; + __atomic_pre_full_fence(); + ret = arch_atomic_add_negative_relaxed(i, v); + __atomic_post_full_fence(); + return ret; } -#define arch_atomic_add_negative arch_atomic_add_negative -#endif - -#ifndef arch_atomic_add_negative_acquire +#else static __always_inline bool -arch_atomic_add_negative_acquire(int i, atomic_t *v) +raw_atomic_add_negative(int i, atomic_t *v) { - return arch_atomic_add_return_acquire(i, v) < 0; + return raw_atomic_add_return(i, v) < 0; } -#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire #endif -#ifndef arch_atomic_add_negative_release +#if defined(arch_atomic_add_negative_acquire) +#define raw_atomic_add_negative_acquire arch_atomic_add_negative_acquire +#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool -arch_atomic_add_negative_release(int i, atomic_t *v) +raw_atomic_add_negative_acquire(int i, atomic_t *v) { - return arch_atomic_add_return_release(i, v) < 0; + bool ret = arch_atomic_add_negative_relaxed(i, v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic_add_negative_release arch_atomic_add_negative_release -#endif - -#ifndef arch_atomic_add_negative_relaxed +#elif defined(arch_atomic_add_negative) +#define raw_atomic_add_negative_acquire arch_atomic_add_negative +#else static __always_inline bool -arch_atomic_add_negative_relaxed(int i, atomic_t *v) +raw_atomic_add_negative_acquire(int i, atomic_t *v) { - return arch_atomic_add_return_relaxed(i, v) < 0; + return raw_atomic_add_return_acquire(i, v) < 0; } -#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed #endif -#else /* arch_atomic_add_negative_relaxed */ - -#ifndef arch_atomic_add_negative_acquire +#if defined(arch_atomic_add_negative_release) +#define raw_atomic_add_negative_release arch_atomic_add_negative_release +#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool -arch_atomic_add_negative_acquire(int i, atomic_t *v) +raw_atomic_add_negative_release(int i, atomic_t *v) { - bool ret = arch_atomic_add_negative_relaxed(i, v); - __atomic_acquire_fence(); - return ret; + __atomic_release_fence(); + return arch_atomic_add_negative_relaxed(i, v); } -#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire -#endif - -#ifndef arch_atomic_add_negative_release +#elif defined(arch_atomic_add_negative) +#define raw_atomic_add_negative_release arch_atomic_add_negative +#else static __always_inline bool -arch_atomic_add_negative_release(int i, atomic_t *v) +raw_atomic_add_negative_release(int i, atomic_t *v) { - __atomic_release_fence(); - return arch_atomic_add_negative_relaxed(i, v); + return raw_atomic_add_return_release(i, v) < 0; } -#define arch_atomic_add_negative_release arch_atomic_add_negative_release #endif -#ifndef arch_atomic_add_negative +#if defined(arch_atomic_add_negative_relaxed) +#define raw_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed +#elif defined(arch_atomic_add_negative) +#define raw_atomic_add_negative_relaxed arch_atomic_add_negative +#else static __always_inline bool -arch_atomic_add_negative(int i, atomic_t *v) +raw_atomic_add_negative_relaxed(int i, atomic_t *v) { - bool ret; - __atomic_pre_full_fence(); - ret = arch_atomic_add_negative_relaxed(i, v); - __atomic_post_full_fence(); - return ret; + return raw_atomic_add_return_relaxed(i, v) < 0; } -#define arch_atomic_add_negative arch_atomic_add_negative #endif -#endif /* arch_atomic_add_negative_relaxed */ - -#ifndef arch_atomic_fetch_add_unless +#if defined(arch_atomic_fetch_add_unless) +#define raw_atomic_fetch_add_unless arch_atomic_fetch_add_unless +#else static __always_inline int -arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) +raw_atomic_fetch_add_unless(atomic_t *v, int a, int u) { - int c = arch_atomic_read(v); + int c = raw_atomic_read(v); do { if (unlikely(c == u)) break; - } while (!arch_atomic_try_cmpxchg(v, &c, c + a)); + } while (!raw_atomic_try_cmpxchg(v, &c, c + a)); return c; } -#define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless #endif -#ifndef arch_atomic_add_unless +#if defined(arch_atomic_add_unless) +#define raw_atomic_add_unless arch_atomic_add_unless +#else static __always_inline bool -arch_atomic_add_unless(atomic_t *v, int a, int u) +raw_atomic_add_unless(atomic_t *v, int a, int u) { - return arch_atomic_fetch_add_unless(v, a, u) != u; + return raw_atomic_fetch_add_unless(v, a, u) != u; } -#define arch_atomic_add_unless arch_atomic_add_unless #endif -#ifndef arch_atomic_inc_not_zero +#if defined(arch_atomic_inc_not_zero) +#define raw_atomic_inc_not_zero arch_atomic_inc_not_zero +#else static __always_inline bool -arch_atomic_inc_not_zero(atomic_t *v) +raw_atomic_inc_not_zero(atomic_t *v) { - return arch_atomic_add_unless(v, 1, 0); + return raw_atomic_add_unless(v, 1, 0); } -#define arch_atomic_inc_not_zero arch_atomic_inc_not_zero #endif -#ifndef arch_atomic_inc_unless_negative +#if defined(arch_atomic_inc_unless_negative) +#define raw_atomic_inc_unless_negative arch_atomic_inc_unless_negative +#else static __always_inline bool -arch_atomic_inc_unless_negative(atomic_t *v) +raw_atomic_inc_unless_negative(atomic_t *v) { - int c = arch_atomic_read(v); + int c = raw_atomic_read(v); do { if (unlikely(c < 0)) return false; - } while (!arch_atomic_try_cmpxchg(v, &c, c + 1)); + } while (!raw_atomic_try_cmpxchg(v, &c, c + 1)); return true; } -#define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative #endif -#ifndef arch_atomic_dec_unless_positive +#if defined(arch_atomic_dec_unless_positive) +#define raw_atomic_dec_unless_positive arch_atomic_dec_unless_positive +#else static __always_inline bool -arch_atomic_dec_unless_positive(atomic_t *v) +raw_atomic_dec_unless_positive(atomic_t *v) { - int c = arch_atomic_read(v); + int c = raw_atomic_read(v); do { if (unlikely(c > 0)) return false; - } while (!arch_atomic_try_cmpxchg(v, &c, c - 1)); + } while (!raw_atomic_try_cmpxchg(v, &c, c - 1)); return true; } -#define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive #endif -#ifndef arch_atomic_dec_if_positive +#if defined(arch_atomic_dec_if_positive) +#define raw_atomic_dec_if_positive arch_atomic_dec_if_positive +#else static __always_inline int -arch_atomic_dec_if_positive(atomic_t *v) +raw_atomic_dec_if_positive(atomic_t *v) { - int dec, c = arch_atomic_read(v); + int dec, c = raw_atomic_read(v); do { dec = c - 1; if (unlikely(dec < 0)) break; - } while (!arch_atomic_try_cmpxchg(v, &c, dec)); + } while (!raw_atomic_try_cmpxchg(v, &c, dec)); return dec; } -#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive #endif #ifdef CONFIG_GENERIC_ATOMIC64 #include #endif -#ifndef arch_atomic64_read_acquire +#define raw_atomic64_read arch_atomic64_read + +#if defined(arch_atomic64_read_acquire) +#define raw_atomic64_read_acquire arch_atomic64_read_acquire +#elif defined(arch_atomic64_read) +#define raw_atomic64_read_acquire arch_atomic64_read +#else static __always_inline s64 -arch_atomic64_read_acquire(const atomic64_t *v) +raw_atomic64_read_acquire(const atomic64_t *v) { s64 ret; if (__native_word(atomic64_t)) { ret = smp_load_acquire(&(v)->counter); } else { - ret = arch_atomic64_read(v); + ret = raw_atomic64_read(v); __atomic_acquire_fence(); } return ret; } -#define arch_atomic64_read_acquire arch_atomic64_read_acquire #endif -#ifndef arch_atomic64_set_release +#define raw_atomic64_set arch_atomic64_set + +#if defined(arch_atomic64_set_release) +#define raw_atomic64_set_release arch_atomic64_set_release +#elif defined(arch_atomic64_set) +#define raw_atomic64_set_release arch_atomic64_set +#else static __always_inline void -arch_atomic64_set_release(atomic64_t *v, s64 i) +raw_atomic64_set_release(atomic64_t *v, s64 i) { if (__native_word(atomic64_t)) { smp_store_release(&(v)->counter, i); } else { __atomic_release_fence(); - arch_atomic64_set(v, i); + raw_atomic64_set(v, i); } } -#define arch_atomic64_set_release arch_atomic64_set_release #endif -#ifndef arch_atomic64_add_return_relaxed -#define arch_atomic64_add_return_acquire arch_atomic64_add_return -#define arch_atomic64_add_return_release arch_atomic64_add_return -#define arch_atomic64_add_return_relaxed arch_atomic64_add_return -#else /* arch_atomic64_add_return_relaxed */ +#define raw_atomic64_add arch_atomic64_add + +#if defined(arch_atomic64_add_return) +#define raw_atomic64_add_return arch_atomic64_add_return +#elif defined(arch_atomic64_add_return_relaxed) +static __always_inline s64 +raw_atomic64_add_return(s64 i, atomic64_t *v) +{ + s64 ret; + __atomic_pre_full_fence(); + ret = arch_atomic64_add_return_relaxed(i, v); + __atomic_post_full_fence(); + return ret; +} +#else +#error "Unable to define raw_atomic64_add_return" +#endif -#ifndef arch_atomic64_add_return_acquire +#if defined(arch_atomic64_add_return_acquire) +#define raw_atomic64_add_return_acquire arch_atomic64_add_return_acquire +#elif defined(arch_atomic64_add_return_relaxed) static __always_inline s64 -arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) +raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_add_return_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire +#elif defined(arch_atomic64_add_return) +#define raw_atomic64_add_return_acquire arch_atomic64_add_return +#else +#error "Unable to define raw_atomic64_add_return_acquire" #endif -#ifndef arch_atomic64_add_return_release +#if defined(arch_atomic64_add_return_release) +#define raw_atomic64_add_return_release arch_atomic64_add_return_release +#elif defined(arch_atomic64_add_return_relaxed) static __always_inline s64 -arch_atomic64_add_return_release(s64 i, atomic64_t *v) +raw_atomic64_add_return_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_add_return_relaxed(i, v); } -#define arch_atomic64_add_return_release arch_atomic64_add_return_release +#elif defined(arch_atomic64_add_return) +#define raw_atomic64_add_return_release arch_atomic64_add_return +#else +#error "Unable to define raw_atomic64_add_return_release" +#endif + +#if defined(arch_atomic64_add_return_relaxed) +#define raw_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed +#elif defined(arch_atomic64_add_return) +#define raw_atomic64_add_return_relaxed arch_atomic64_add_return +#else +#error "Unable to define raw_atomic64_add_return_relaxed" #endif -#ifndef arch_atomic64_add_return +#if defined(arch_atomic64_fetch_add) +#define raw_atomic64_fetch_add arch_atomic64_fetch_add +#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 -arch_atomic64_add_return(s64 i, atomic64_t *v) +raw_atomic64_fetch_add(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_add_return_relaxed(i, v); + ret = arch_atomic64_fetch_add_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_add_return arch_atomic64_add_return +#else +#error "Unable to define raw_atomic64_fetch_add" #endif -#endif /* arch_atomic64_add_return_relaxed */ - -#ifndef arch_atomic64_fetch_add_relaxed -#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add -#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add -#define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add -#else /* arch_atomic64_fetch_add_relaxed */ - -#ifndef arch_atomic64_fetch_add_acquire +#if defined(arch_atomic64_fetch_add_acquire) +#define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire +#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 -arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_fetch_add_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire +#elif defined(arch_atomic64_fetch_add) +#define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add +#else +#error "Unable to define raw_atomic64_fetch_add_acquire" #endif -#ifndef arch_atomic64_fetch_add_release +#if defined(arch_atomic64_fetch_add_release) +#define raw_atomic64_fetch_add_release arch_atomic64_fetch_add_release +#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 -arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_fetch_add_relaxed(i, v); } -#define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release +#elif defined(arch_atomic64_fetch_add) +#define raw_atomic64_fetch_add_release arch_atomic64_fetch_add +#else +#error "Unable to define raw_atomic64_fetch_add_release" +#endif + +#if defined(arch_atomic64_fetch_add_relaxed) +#define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed +#elif defined(arch_atomic64_fetch_add) +#define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add +#else +#error "Unable to define raw_atomic64_fetch_add_relaxed" #endif -#ifndef arch_atomic64_fetch_add +#define raw_atomic64_sub arch_atomic64_sub + +#if defined(arch_atomic64_sub_return) +#define raw_atomic64_sub_return arch_atomic64_sub_return +#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 -arch_atomic64_fetch_add(s64 i, atomic64_t *v) +raw_atomic64_sub_return(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_add_relaxed(i, v); + ret = arch_atomic64_sub_return_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_add arch_atomic64_fetch_add +#else +#error "Unable to define raw_atomic64_sub_return" #endif -#endif /* arch_atomic64_fetch_add_relaxed */ - -#ifndef arch_atomic64_sub_return_relaxed -#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return -#define arch_atomic64_sub_return_release arch_atomic64_sub_return -#define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return -#else /* arch_atomic64_sub_return_relaxed */ - -#ifndef arch_atomic64_sub_return_acquire +#if defined(arch_atomic64_sub_return_acquire) +#define raw_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire +#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 -arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) +raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_sub_return_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire +#elif defined(arch_atomic64_sub_return) +#define raw_atomic64_sub_return_acquire arch_atomic64_sub_return +#else +#error "Unable to define raw_atomic64_sub_return_acquire" #endif -#ifndef arch_atomic64_sub_return_release +#if defined(arch_atomic64_sub_return_release) +#define raw_atomic64_sub_return_release arch_atomic64_sub_return_release +#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 -arch_atomic64_sub_return_release(s64 i, atomic64_t *v) +raw_atomic64_sub_return_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_sub_return_relaxed(i, v); } -#define arch_atomic64_sub_return_release arch_atomic64_sub_return_release +#elif defined(arch_atomic64_sub_return) +#define raw_atomic64_sub_return_release arch_atomic64_sub_return +#else +#error "Unable to define raw_atomic64_sub_return_release" +#endif + +#if defined(arch_atomic64_sub_return_relaxed) +#define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed +#elif defined(arch_atomic64_sub_return) +#define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return +#else +#error "Unable to define raw_atomic64_sub_return_relaxed" #endif -#ifndef arch_atomic64_sub_return +#if defined(arch_atomic64_fetch_sub) +#define raw_atomic64_fetch_sub arch_atomic64_fetch_sub +#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 -arch_atomic64_sub_return(s64 i, atomic64_t *v) +raw_atomic64_fetch_sub(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_sub_return_relaxed(i, v); + ret = arch_atomic64_fetch_sub_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_sub_return arch_atomic64_sub_return +#else +#error "Unable to define raw_atomic64_fetch_sub" #endif -#endif /* arch_atomic64_sub_return_relaxed */ - -#ifndef arch_atomic64_fetch_sub_relaxed -#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub -#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub -#define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub -#else /* arch_atomic64_fetch_sub_relaxed */ - -#ifndef arch_atomic64_fetch_sub_acquire +#if defined(arch_atomic64_fetch_sub_acquire) +#define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire +#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 -arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire +#elif defined(arch_atomic64_fetch_sub) +#define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub +#else +#error "Unable to define raw_atomic64_fetch_sub_acquire" #endif -#ifndef arch_atomic64_fetch_sub_release +#if defined(arch_atomic64_fetch_sub_release) +#define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release +#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 -arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_fetch_sub_relaxed(i, v); } -#define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release +#elif defined(arch_atomic64_fetch_sub) +#define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub +#else +#error "Unable to define raw_atomic64_fetch_sub_release" #endif -#ifndef arch_atomic64_fetch_sub -static __always_inline s64 -arch_atomic64_fetch_sub(s64 i, atomic64_t *v) -{ - s64 ret; - __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_sub_relaxed(i, v); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic64_fetch_sub arch_atomic64_fetch_sub +#if defined(arch_atomic64_fetch_sub_relaxed) +#define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed +#elif defined(arch_atomic64_fetch_sub) +#define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub +#else +#error "Unable to define raw_atomic64_fetch_sub_relaxed" #endif -#endif /* arch_atomic64_fetch_sub_relaxed */ - -#ifndef arch_atomic64_inc +#if defined(arch_atomic64_inc) +#define raw_atomic64_inc arch_atomic64_inc +#else static __always_inline void -arch_atomic64_inc(atomic64_t *v) +raw_atomic64_inc(atomic64_t *v) { - arch_atomic64_add(1, v); + raw_atomic64_add(1, v); } -#define arch_atomic64_inc arch_atomic64_inc #endif -#ifndef arch_atomic64_inc_return_relaxed -#ifdef arch_atomic64_inc_return -#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return -#define arch_atomic64_inc_return_release arch_atomic64_inc_return -#define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return -#endif /* arch_atomic64_inc_return */ - -#ifndef arch_atomic64_inc_return +#if defined(arch_atomic64_inc_return) +#define raw_atomic64_inc_return arch_atomic64_inc_return +#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 -arch_atomic64_inc_return(atomic64_t *v) +raw_atomic64_inc_return(atomic64_t *v) { - return arch_atomic64_add_return(1, v); + s64 ret; + __atomic_pre_full_fence(); + ret = arch_atomic64_inc_return_relaxed(v); + __atomic_post_full_fence(); + return ret; } -#define arch_atomic64_inc_return arch_atomic64_inc_return -#endif - -#ifndef arch_atomic64_inc_return_acquire +#else static __always_inline s64 -arch_atomic64_inc_return_acquire(atomic64_t *v) +raw_atomic64_inc_return(atomic64_t *v) { - return arch_atomic64_add_return_acquire(1, v); + return raw_atomic64_add_return(1, v); } -#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire #endif -#ifndef arch_atomic64_inc_return_release +#if defined(arch_atomic64_inc_return_acquire) +#define raw_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire +#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 -arch_atomic64_inc_return_release(atomic64_t *v) +raw_atomic64_inc_return_acquire(atomic64_t *v) { - return arch_atomic64_add_return_release(1, v); + s64 ret = arch_atomic64_inc_return_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_inc_return_release arch_atomic64_inc_return_release -#endif - -#ifndef arch_atomic64_inc_return_relaxed +#elif defined(arch_atomic64_inc_return) +#define raw_atomic64_inc_return_acquire arch_atomic64_inc_return +#else static __always_inline s64 -arch_atomic64_inc_return_relaxed(atomic64_t *v) +raw_atomic64_inc_return_acquire(atomic64_t *v) { - return arch_atomic64_add_return_relaxed(1, v); + return raw_atomic64_add_return_acquire(1, v); } -#define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed #endif -#else /* arch_atomic64_inc_return_relaxed */ - -#ifndef arch_atomic64_inc_return_acquire +#if defined(arch_atomic64_inc_return_release) +#define raw_atomic64_inc_return_release arch_atomic64_inc_return_release +#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 -arch_atomic64_inc_return_acquire(atomic64_t *v) +raw_atomic64_inc_return_release(atomic64_t *v) { - s64 ret = arch_atomic64_inc_return_relaxed(v); - __atomic_acquire_fence(); - return ret; + __atomic_release_fence(); + return arch_atomic64_inc_return_relaxed(v); +} +#elif defined(arch_atomic64_inc_return) +#define raw_atomic64_inc_return_release arch_atomic64_inc_return +#else +static __always_inline s64 +raw_atomic64_inc_return_release(atomic64_t *v) +{ + return raw_atomic64_add_return_release(1, v); } -#define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire #endif -#ifndef arch_atomic64_inc_return_release +#if defined(arch_atomic64_inc_return_relaxed) +#define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed +#elif defined(arch_atomic64_inc_return) +#define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return +#else static __always_inline s64 -arch_atomic64_inc_return_release(atomic64_t *v) +raw_atomic64_inc_return_relaxed(atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_inc_return_relaxed(v); + return raw_atomic64_add_return_relaxed(1, v); } -#define arch_atomic64_inc_return_release arch_atomic64_inc_return_release #endif -#ifndef arch_atomic64_inc_return +#if defined(arch_atomic64_fetch_inc) +#define raw_atomic64_fetch_inc arch_atomic64_fetch_inc +#elif defined(arch_atomic64_fetch_inc_relaxed) static __always_inline s64 -arch_atomic64_inc_return(atomic64_t *v) +raw_atomic64_fetch_inc(atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_inc_return_relaxed(v); + ret = arch_atomic64_fetch_inc_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_inc_return arch_atomic64_inc_return -#endif - -#endif /* arch_atomic64_inc_return_relaxed */ - -#ifndef arch_atomic64_fetch_inc_relaxed -#ifdef arch_atomic64_fetch_inc -#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc -#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc -#define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc -#endif /* arch_atomic64_fetch_inc */ - -#ifndef arch_atomic64_fetch_inc +#else static __always_inline s64 -arch_atomic64_fetch_inc(atomic64_t *v) +raw_atomic64_fetch_inc(atomic64_t *v) { - return arch_atomic64_fetch_add(1, v); + return raw_atomic64_fetch_add(1, v); } -#define arch_atomic64_fetch_inc arch_atomic64_fetch_inc #endif -#ifndef arch_atomic64_fetch_inc_acquire +#if defined(arch_atomic64_fetch_inc_acquire) +#define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire +#elif defined(arch_atomic64_fetch_inc_relaxed) +static __always_inline s64 +raw_atomic64_fetch_inc_acquire(atomic64_t *v) +{ + s64 ret = arch_atomic64_fetch_inc_relaxed(v); + __atomic_acquire_fence(); + return ret; +} +#elif defined(arch_atomic64_fetch_inc) +#define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc +#else static __always_inline s64 -arch_atomic64_fetch_inc_acquire(atomic64_t *v) +raw_atomic64_fetch_inc_acquire(atomic64_t *v) { - return arch_atomic64_fetch_add_acquire(1, v); + return raw_atomic64_fetch_add_acquire(1, v); } -#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire #endif -#ifndef arch_atomic64_fetch_inc_release +#if defined(arch_atomic64_fetch_inc_release) +#define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release +#elif defined(arch_atomic64_fetch_inc_relaxed) static __always_inline s64 -arch_atomic64_fetch_inc_release(atomic64_t *v) +raw_atomic64_fetch_inc_release(atomic64_t *v) { - return arch_atomic64_fetch_add_release(1, v); + __atomic_release_fence(); + return arch_atomic64_fetch_inc_relaxed(v); } -#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release -#endif - -#ifndef arch_atomic64_fetch_inc_relaxed +#elif defined(arch_atomic64_fetch_inc) +#define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc +#else static __always_inline s64 -arch_atomic64_fetch_inc_relaxed(atomic64_t *v) +raw_atomic64_fetch_inc_release(atomic64_t *v) { - return arch_atomic64_fetch_add_relaxed(1, v); + return raw_atomic64_fetch_add_release(1, v); } -#define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed #endif -#else /* arch_atomic64_fetch_inc_relaxed */ - -#ifndef arch_atomic64_fetch_inc_acquire +#if defined(arch_atomic64_fetch_inc_relaxed) +#define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed +#elif defined(arch_atomic64_fetch_inc) +#define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc +#else static __always_inline s64 -arch_atomic64_fetch_inc_acquire(atomic64_t *v) +raw_atomic64_fetch_inc_relaxed(atomic64_t *v) { - s64 ret = arch_atomic64_fetch_inc_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic64_fetch_add_relaxed(1, v); } -#define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire #endif -#ifndef arch_atomic64_fetch_inc_release -static __always_inline s64 -arch_atomic64_fetch_inc_release(atomic64_t *v) +#if defined(arch_atomic64_dec) +#define raw_atomic64_dec arch_atomic64_dec +#else +static __always_inline void +raw_atomic64_dec(atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_fetch_inc_relaxed(v); + raw_atomic64_sub(1, v); } -#define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release #endif -#ifndef arch_atomic64_fetch_inc +#if defined(arch_atomic64_dec_return) +#define raw_atomic64_dec_return arch_atomic64_dec_return +#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 -arch_atomic64_fetch_inc(atomic64_t *v) +raw_atomic64_dec_return(atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_inc_relaxed(v); + ret = arch_atomic64_dec_return_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_inc arch_atomic64_fetch_inc -#endif - -#endif /* arch_atomic64_fetch_inc_relaxed */ - -#ifndef arch_atomic64_dec -static __always_inline void -arch_atomic64_dec(atomic64_t *v) -{ - arch_atomic64_sub(1, v); -} -#define arch_atomic64_dec arch_atomic64_dec -#endif - -#ifndef arch_atomic64_dec_return_relaxed -#ifdef arch_atomic64_dec_return -#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return -#define arch_atomic64_dec_return_release arch_atomic64_dec_return -#define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return -#endif /* arch_atomic64_dec_return */ - -#ifndef arch_atomic64_dec_return +#else static __always_inline s64 -arch_atomic64_dec_return(atomic64_t *v) +raw_atomic64_dec_return(atomic64_t *v) { - return arch_atomic64_sub_return(1, v); + return raw_atomic64_sub_return(1, v); } -#define arch_atomic64_dec_return arch_atomic64_dec_return #endif -#ifndef arch_atomic64_dec_return_acquire +#if defined(arch_atomic64_dec_return_acquire) +#define raw_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire +#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 -arch_atomic64_dec_return_acquire(atomic64_t *v) +raw_atomic64_dec_return_acquire(atomic64_t *v) { - return arch_atomic64_sub_return_acquire(1, v); + s64 ret = arch_atomic64_dec_return_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire -#endif - -#ifndef arch_atomic64_dec_return_release +#elif defined(arch_atomic64_dec_return) +#define raw_atomic64_dec_return_acquire arch_atomic64_dec_return +#else static __always_inline s64 -arch_atomic64_dec_return_release(atomic64_t *v) +raw_atomic64_dec_return_acquire(atomic64_t *v) { - return arch_atomic64_sub_return_release(1, v); + return raw_atomic64_sub_return_acquire(1, v); } -#define arch_atomic64_dec_return_release arch_atomic64_dec_return_release #endif -#ifndef arch_atomic64_dec_return_relaxed +#if defined(arch_atomic64_dec_return_release) +#define raw_atomic64_dec_return_release arch_atomic64_dec_return_release +#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 -arch_atomic64_dec_return_relaxed(atomic64_t *v) +raw_atomic64_dec_return_release(atomic64_t *v) { - return arch_atomic64_sub_return_relaxed(1, v); + __atomic_release_fence(); + return arch_atomic64_dec_return_relaxed(v); } -#define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed -#endif - -#else /* arch_atomic64_dec_return_relaxed */ - -#ifndef arch_atomic64_dec_return_acquire +#elif defined(arch_atomic64_dec_return) +#define raw_atomic64_dec_return_release arch_atomic64_dec_return +#else static __always_inline s64 -arch_atomic64_dec_return_acquire(atomic64_t *v) +raw_atomic64_dec_return_release(atomic64_t *v) { - s64 ret = arch_atomic64_dec_return_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic64_sub_return_release(1, v); } -#define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire #endif -#ifndef arch_atomic64_dec_return_release +#if defined(arch_atomic64_dec_return_relaxed) +#define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed +#elif defined(arch_atomic64_dec_return) +#define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return +#else static __always_inline s64 -arch_atomic64_dec_return_release(atomic64_t *v) +raw_atomic64_dec_return_relaxed(atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_dec_return_relaxed(v); + return raw_atomic64_sub_return_relaxed(1, v); } -#define arch_atomic64_dec_return_release arch_atomic64_dec_return_release #endif -#ifndef arch_atomic64_dec_return +#if defined(arch_atomic64_fetch_dec) +#define raw_atomic64_fetch_dec arch_atomic64_fetch_dec +#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 -arch_atomic64_dec_return(atomic64_t *v) +raw_atomic64_fetch_dec(atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_dec_return_relaxed(v); + ret = arch_atomic64_fetch_dec_relaxed(v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_dec_return arch_atomic64_dec_return -#endif - -#endif /* arch_atomic64_dec_return_relaxed */ - -#ifndef arch_atomic64_fetch_dec_relaxed -#ifdef arch_atomic64_fetch_dec -#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec -#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec -#define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec -#endif /* arch_atomic64_fetch_dec */ - -#ifndef arch_atomic64_fetch_dec +#else static __always_inline s64 -arch_atomic64_fetch_dec(atomic64_t *v) +raw_atomic64_fetch_dec(atomic64_t *v) { - return arch_atomic64_fetch_sub(1, v); + return raw_atomic64_fetch_sub(1, v); } -#define arch_atomic64_fetch_dec arch_atomic64_fetch_dec #endif -#ifndef arch_atomic64_fetch_dec_acquire +#if defined(arch_atomic64_fetch_dec_acquire) +#define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire +#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 -arch_atomic64_fetch_dec_acquire(atomic64_t *v) +raw_atomic64_fetch_dec_acquire(atomic64_t *v) { - return arch_atomic64_fetch_sub_acquire(1, v); + s64 ret = arch_atomic64_fetch_dec_relaxed(v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire -#endif - -#ifndef arch_atomic64_fetch_dec_release +#elif defined(arch_atomic64_fetch_dec) +#define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec +#else static __always_inline s64 -arch_atomic64_fetch_dec_release(atomic64_t *v) +raw_atomic64_fetch_dec_acquire(atomic64_t *v) { - return arch_atomic64_fetch_sub_release(1, v); + return raw_atomic64_fetch_sub_acquire(1, v); } -#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release #endif -#ifndef arch_atomic64_fetch_dec_relaxed +#if defined(arch_atomic64_fetch_dec_release) +#define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release +#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 -arch_atomic64_fetch_dec_relaxed(atomic64_t *v) +raw_atomic64_fetch_dec_release(atomic64_t *v) { - return arch_atomic64_fetch_sub_relaxed(1, v); + __atomic_release_fence(); + return arch_atomic64_fetch_dec_relaxed(v); } -#define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed -#endif - -#else /* arch_atomic64_fetch_dec_relaxed */ - -#ifndef arch_atomic64_fetch_dec_acquire +#elif defined(arch_atomic64_fetch_dec) +#define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec +#else static __always_inline s64 -arch_atomic64_fetch_dec_acquire(atomic64_t *v) +raw_atomic64_fetch_dec_release(atomic64_t *v) { - s64 ret = arch_atomic64_fetch_dec_relaxed(v); - __atomic_acquire_fence(); - return ret; + return raw_atomic64_fetch_sub_release(1, v); } -#define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire #endif -#ifndef arch_atomic64_fetch_dec_release +#if defined(arch_atomic64_fetch_dec_relaxed) +#define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed +#elif defined(arch_atomic64_fetch_dec) +#define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec +#else static __always_inline s64 -arch_atomic64_fetch_dec_release(atomic64_t *v) +raw_atomic64_fetch_dec_relaxed(atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_fetch_dec_relaxed(v); + return raw_atomic64_fetch_sub_relaxed(1, v); } -#define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release #endif -#ifndef arch_atomic64_fetch_dec +#define raw_atomic64_and arch_atomic64_and + +#if defined(arch_atomic64_fetch_and) +#define raw_atomic64_fetch_and arch_atomic64_fetch_and +#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 -arch_atomic64_fetch_dec(atomic64_t *v) +raw_atomic64_fetch_and(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_dec_relaxed(v); + ret = arch_atomic64_fetch_and_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_dec arch_atomic64_fetch_dec +#else +#error "Unable to define raw_atomic64_fetch_and" #endif -#endif /* arch_atomic64_fetch_dec_relaxed */ - -#ifndef arch_atomic64_fetch_and_relaxed -#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and -#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and -#define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and -#else /* arch_atomic64_fetch_and_relaxed */ - -#ifndef arch_atomic64_fetch_and_acquire +#if defined(arch_atomic64_fetch_and_acquire) +#define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire +#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 -arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_fetch_and_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire +#elif defined(arch_atomic64_fetch_and) +#define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and +#else +#error "Unable to define raw_atomic64_fetch_and_acquire" #endif -#ifndef arch_atomic64_fetch_and_release +#if defined(arch_atomic64_fetch_and_release) +#define raw_atomic64_fetch_and_release arch_atomic64_fetch_and_release +#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 -arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_fetch_and_relaxed(i, v); } -#define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release +#elif defined(arch_atomic64_fetch_and) +#define raw_atomic64_fetch_and_release arch_atomic64_fetch_and +#else +#error "Unable to define raw_atomic64_fetch_and_release" #endif -#ifndef arch_atomic64_fetch_and -static __always_inline s64 -arch_atomic64_fetch_and(s64 i, atomic64_t *v) -{ - s64 ret; - __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_and_relaxed(i, v); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic64_fetch_and arch_atomic64_fetch_and +#if defined(arch_atomic64_fetch_and_relaxed) +#define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed +#elif defined(arch_atomic64_fetch_and) +#define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and +#else +#error "Unable to define raw_atomic64_fetch_and_relaxed" #endif -#endif /* arch_atomic64_fetch_and_relaxed */ - -#ifndef arch_atomic64_andnot +#if defined(arch_atomic64_andnot) +#define raw_atomic64_andnot arch_atomic64_andnot +#else static __always_inline void -arch_atomic64_andnot(s64 i, atomic64_t *v) +raw_atomic64_andnot(s64 i, atomic64_t *v) { - arch_atomic64_and(~i, v); + raw_atomic64_and(~i, v); } -#define arch_atomic64_andnot arch_atomic64_andnot #endif -#ifndef arch_atomic64_fetch_andnot_relaxed -#ifdef arch_atomic64_fetch_andnot -#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot -#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot -#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot -#endif /* arch_atomic64_fetch_andnot */ - -#ifndef arch_atomic64_fetch_andnot +#if defined(arch_atomic64_fetch_andnot) +#define raw_atomic64_fetch_andnot arch_atomic64_fetch_andnot +#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 -arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) { - return arch_atomic64_fetch_and(~i, v); + s64 ret; + __atomic_pre_full_fence(); + ret = arch_atomic64_fetch_andnot_relaxed(i, v); + __atomic_post_full_fence(); + return ret; } -#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot -#endif - -#ifndef arch_atomic64_fetch_andnot_acquire +#else static __always_inline s64 -arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) { - return arch_atomic64_fetch_and_acquire(~i, v); + return raw_atomic64_fetch_and(~i, v); } -#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire #endif -#ifndef arch_atomic64_fetch_andnot_release +#if defined(arch_atomic64_fetch_andnot_acquire) +#define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire +#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 -arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) { - return arch_atomic64_fetch_and_release(~i, v); + s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release -#endif - -#ifndef arch_atomic64_fetch_andnot_relaxed +#elif defined(arch_atomic64_fetch_andnot) +#define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot +#else static __always_inline s64 -arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) { - return arch_atomic64_fetch_and_relaxed(~i, v); + return raw_atomic64_fetch_and_acquire(~i, v); } -#define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed #endif -#else /* arch_atomic64_fetch_andnot_relaxed */ - -#ifndef arch_atomic64_fetch_andnot_acquire +#if defined(arch_atomic64_fetch_andnot_release) +#define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release +#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 -arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) { - s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); - __atomic_acquire_fence(); - return ret; + __atomic_release_fence(); + return arch_atomic64_fetch_andnot_relaxed(i, v); +} +#elif defined(arch_atomic64_fetch_andnot) +#define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot +#else +static __always_inline s64 +raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) +{ + return raw_atomic64_fetch_and_release(~i, v); } -#define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire #endif -#ifndef arch_atomic64_fetch_andnot_release +#if defined(arch_atomic64_fetch_andnot_relaxed) +#define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed +#elif defined(arch_atomic64_fetch_andnot) +#define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot +#else static __always_inline s64 -arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_fetch_andnot_relaxed(i, v); + return raw_atomic64_fetch_and_relaxed(~i, v); } -#define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release #endif -#ifndef arch_atomic64_fetch_andnot +#define raw_atomic64_or arch_atomic64_or + +#if defined(arch_atomic64_fetch_or) +#define raw_atomic64_fetch_or arch_atomic64_fetch_or +#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 -arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) +raw_atomic64_fetch_or(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_andnot_relaxed(i, v); + ret = arch_atomic64_fetch_or_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot +#else +#error "Unable to define raw_atomic64_fetch_or" #endif -#endif /* arch_atomic64_fetch_andnot_relaxed */ - -#ifndef arch_atomic64_fetch_or_relaxed -#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or -#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or -#define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or -#else /* arch_atomic64_fetch_or_relaxed */ - -#ifndef arch_atomic64_fetch_or_acquire +#if defined(arch_atomic64_fetch_or_acquire) +#define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire +#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 -arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_fetch_or_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire +#elif defined(arch_atomic64_fetch_or) +#define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or +#else +#error "Unable to define raw_atomic64_fetch_or_acquire" #endif -#ifndef arch_atomic64_fetch_or_release +#if defined(arch_atomic64_fetch_or_release) +#define raw_atomic64_fetch_or_release arch_atomic64_fetch_or_release +#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 -arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_fetch_or_relaxed(i, v); } -#define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release +#elif defined(arch_atomic64_fetch_or) +#define raw_atomic64_fetch_or_release arch_atomic64_fetch_or +#else +#error "Unable to define raw_atomic64_fetch_or_release" +#endif + +#if defined(arch_atomic64_fetch_or_relaxed) +#define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed +#elif defined(arch_atomic64_fetch_or) +#define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or +#else +#error "Unable to define raw_atomic64_fetch_or_relaxed" #endif -#ifndef arch_atomic64_fetch_or +#define raw_atomic64_xor arch_atomic64_xor + +#if defined(arch_atomic64_fetch_xor) +#define raw_atomic64_fetch_xor arch_atomic64_fetch_xor +#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 -arch_atomic64_fetch_or(s64 i, atomic64_t *v) +raw_atomic64_fetch_xor(s64 i, atomic64_t *v) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_or_relaxed(i, v); + ret = arch_atomic64_fetch_xor_relaxed(i, v); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_or arch_atomic64_fetch_or +#else +#error "Unable to define raw_atomic64_fetch_xor" #endif -#endif /* arch_atomic64_fetch_or_relaxed */ - -#ifndef arch_atomic64_fetch_xor_relaxed -#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor -#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor -#define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor -#else /* arch_atomic64_fetch_xor_relaxed */ - -#ifndef arch_atomic64_fetch_xor_acquire +#if defined(arch_atomic64_fetch_xor_acquire) +#define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire +#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 -arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) +raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) { s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); __atomic_acquire_fence(); return ret; } -#define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire +#elif defined(arch_atomic64_fetch_xor) +#define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor +#else +#error "Unable to define raw_atomic64_fetch_xor_acquire" #endif -#ifndef arch_atomic64_fetch_xor_release +#if defined(arch_atomic64_fetch_xor_release) +#define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release +#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 -arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) +raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) { __atomic_release_fence(); return arch_atomic64_fetch_xor_relaxed(i, v); } -#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release +#elif defined(arch_atomic64_fetch_xor) +#define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor +#else +#error "Unable to define raw_atomic64_fetch_xor_release" +#endif + +#if defined(arch_atomic64_fetch_xor_relaxed) +#define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed +#elif defined(arch_atomic64_fetch_xor) +#define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor +#else +#error "Unable to define raw_atomic64_fetch_xor_relaxed" #endif -#ifndef arch_atomic64_fetch_xor +#if defined(arch_atomic64_xchg) +#define raw_atomic64_xchg arch_atomic64_xchg +#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -arch_atomic64_fetch_xor(s64 i, atomic64_t *v) +raw_atomic64_xchg(atomic64_t *v, s64 i) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_fetch_xor_relaxed(i, v); + ret = arch_atomic64_xchg_relaxed(v, i); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor -#endif - -#endif /* arch_atomic64_fetch_xor_relaxed */ - -#ifndef arch_atomic64_xchg_relaxed -#ifdef arch_atomic64_xchg -#define arch_atomic64_xchg_acquire arch_atomic64_xchg -#define arch_atomic64_xchg_release arch_atomic64_xchg -#define arch_atomic64_xchg_relaxed arch_atomic64_xchg -#endif /* arch_atomic64_xchg */ - -#ifndef arch_atomic64_xchg +#else static __always_inline s64 -arch_atomic64_xchg(atomic64_t *v, s64 new) +raw_atomic64_xchg(atomic64_t *v, s64 new) { - return arch_xchg(&v->counter, new); + return raw_xchg(&v->counter, new); } -#define arch_atomic64_xchg arch_atomic64_xchg #endif -#ifndef arch_atomic64_xchg_acquire +#if defined(arch_atomic64_xchg_acquire) +#define raw_atomic64_xchg_acquire arch_atomic64_xchg_acquire +#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -arch_atomic64_xchg_acquire(atomic64_t *v, s64 new) +raw_atomic64_xchg_acquire(atomic64_t *v, s64 i) { - return arch_xchg_acquire(&v->counter, new); + s64 ret = arch_atomic64_xchg_relaxed(v, i); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire -#endif - -#ifndef arch_atomic64_xchg_release +#elif defined(arch_atomic64_xchg) +#define raw_atomic64_xchg_acquire arch_atomic64_xchg +#else static __always_inline s64 -arch_atomic64_xchg_release(atomic64_t *v, s64 new) +raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) { - return arch_xchg_release(&v->counter, new); + return raw_xchg_acquire(&v->counter, new); } -#define arch_atomic64_xchg_release arch_atomic64_xchg_release #endif -#ifndef arch_atomic64_xchg_relaxed +#if defined(arch_atomic64_xchg_release) +#define raw_atomic64_xchg_release arch_atomic64_xchg_release +#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -arch_atomic64_xchg_relaxed(atomic64_t *v, s64 new) +raw_atomic64_xchg_release(atomic64_t *v, s64 i) { - return arch_xchg_relaxed(&v->counter, new); + __atomic_release_fence(); + return arch_atomic64_xchg_relaxed(v, i); } -#define arch_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed -#endif - -#else /* arch_atomic64_xchg_relaxed */ - -#ifndef arch_atomic64_xchg_acquire +#elif defined(arch_atomic64_xchg) +#define raw_atomic64_xchg_release arch_atomic64_xchg +#else static __always_inline s64 -arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) +raw_atomic64_xchg_release(atomic64_t *v, s64 new) { - s64 ret = arch_atomic64_xchg_relaxed(v, i); - __atomic_acquire_fence(); - return ret; + return raw_xchg_release(&v->counter, new); } -#define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire #endif -#ifndef arch_atomic64_xchg_release +#if defined(arch_atomic64_xchg_relaxed) +#define raw_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed +#elif defined(arch_atomic64_xchg) +#define raw_atomic64_xchg_relaxed arch_atomic64_xchg +#else static __always_inline s64 -arch_atomic64_xchg_release(atomic64_t *v, s64 i) +raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new) { - __atomic_release_fence(); - return arch_atomic64_xchg_relaxed(v, i); + return raw_xchg_relaxed(&v->counter, new); } -#define arch_atomic64_xchg_release arch_atomic64_xchg_release #endif -#ifndef arch_atomic64_xchg +#if defined(arch_atomic64_cmpxchg) +#define raw_atomic64_cmpxchg arch_atomic64_cmpxchg +#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 -arch_atomic64_xchg(atomic64_t *v, s64 i) +raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) { s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_xchg_relaxed(v, i); + ret = arch_atomic64_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_xchg arch_atomic64_xchg -#endif - -#endif /* arch_atomic64_xchg_relaxed */ - -#ifndef arch_atomic64_cmpxchg_relaxed -#ifdef arch_atomic64_cmpxchg -#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg -#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg -#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg -#endif /* arch_atomic64_cmpxchg */ - -#ifndef arch_atomic64_cmpxchg +#else static __always_inline s64 -arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) { - return arch_cmpxchg(&v->counter, old, new); + return raw_cmpxchg(&v->counter, old, new); } -#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg #endif -#ifndef arch_atomic64_cmpxchg_acquire +#if defined(arch_atomic64_cmpxchg_acquire) +#define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire +#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 -arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) { - return arch_cmpxchg_acquire(&v->counter, old, new); + s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire -#endif - -#ifndef arch_atomic64_cmpxchg_release +#elif defined(arch_atomic64_cmpxchg) +#define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg +#else static __always_inline s64 -arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) { - return arch_cmpxchg_release(&v->counter, old, new); + return raw_cmpxchg_acquire(&v->counter, old, new); } -#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release #endif -#ifndef arch_atomic64_cmpxchg_relaxed +#if defined(arch_atomic64_cmpxchg_release) +#define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release +#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 -arch_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) { - return arch_cmpxchg_relaxed(&v->counter, old, new); + __atomic_release_fence(); + return arch_atomic64_cmpxchg_relaxed(v, old, new); } -#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed -#endif - -#else /* arch_atomic64_cmpxchg_relaxed */ - -#ifndef arch_atomic64_cmpxchg_acquire +#elif defined(arch_atomic64_cmpxchg) +#define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg +#else static __always_inline s64 -arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) { - s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); - __atomic_acquire_fence(); - return ret; + return raw_cmpxchg_release(&v->counter, old, new); } -#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire #endif -#ifndef arch_atomic64_cmpxchg_release +#if defined(arch_atomic64_cmpxchg_relaxed) +#define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed +#elif defined(arch_atomic64_cmpxchg) +#define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg +#else static __always_inline s64 -arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) +raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) { - __atomic_release_fence(); - return arch_atomic64_cmpxchg_relaxed(v, old, new); + return raw_cmpxchg_relaxed(&v->counter, old, new); } -#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release #endif -#ifndef arch_atomic64_cmpxchg -static __always_inline s64 -arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) +#if defined(arch_atomic64_try_cmpxchg) +#define raw_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg +#elif defined(arch_atomic64_try_cmpxchg_relaxed) +static __always_inline bool +raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) { - s64 ret; + bool ret; __atomic_pre_full_fence(); - ret = arch_atomic64_cmpxchg_relaxed(v, old, new); + ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; } -#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg -#endif - -#endif /* arch_atomic64_cmpxchg_relaxed */ - -#ifndef arch_atomic64_try_cmpxchg_relaxed -#ifdef arch_atomic64_try_cmpxchg -#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg -#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg -#define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg -#endif /* arch_atomic64_try_cmpxchg */ - -#ifndef arch_atomic64_try_cmpxchg +#else static __always_inline bool -arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) +raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) { s64 r, o = *old; - r = arch_atomic64_cmpxchg(v, o, new); + r = raw_atomic64_cmpxchg(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg #endif -#ifndef arch_atomic64_try_cmpxchg_acquire +#if defined(arch_atomic64_try_cmpxchg_acquire) +#define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire +#elif defined(arch_atomic64_try_cmpxchg_relaxed) +static __always_inline bool +raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) +{ + bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); + __atomic_acquire_fence(); + return ret; +} +#elif defined(arch_atomic64_try_cmpxchg) +#define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg +#else static __always_inline bool -arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) +raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) { s64 r, o = *old; - r = arch_atomic64_cmpxchg_acquire(v, o, new); + r = raw_atomic64_cmpxchg_acquire(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire #endif -#ifndef arch_atomic64_try_cmpxchg_release +#if defined(arch_atomic64_try_cmpxchg_release) +#define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release +#elif defined(arch_atomic64_try_cmpxchg_relaxed) +static __always_inline bool +raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) +{ + __atomic_release_fence(); + return arch_atomic64_try_cmpxchg_relaxed(v, old, new); +} +#elif defined(arch_atomic64_try_cmpxchg) +#define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg +#else static __always_inline bool -arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) +raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) { s64 r, o = *old; - r = arch_atomic64_cmpxchg_release(v, o, new); + r = raw_atomic64_cmpxchg_release(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release #endif -#ifndef arch_atomic64_try_cmpxchg_relaxed +#if defined(arch_atomic64_try_cmpxchg_relaxed) +#define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed +#elif defined(arch_atomic64_try_cmpxchg) +#define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg +#else static __always_inline bool -arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) +raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) { s64 r, o = *old; - r = arch_atomic64_cmpxchg_relaxed(v, o, new); + r = raw_atomic64_cmpxchg_relaxed(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); } -#define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed #endif -#else /* arch_atomic64_try_cmpxchg_relaxed */ - -#ifndef arch_atomic64_try_cmpxchg_acquire -static __always_inline bool -arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) -{ - bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); - __atomic_acquire_fence(); - return ret; -} -#define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire -#endif - -#ifndef arch_atomic64_try_cmpxchg_release -static __always_inline bool -arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) -{ - __atomic_release_fence(); - return arch_atomic64_try_cmpxchg_relaxed(v, old, new); -} -#define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release -#endif - -#ifndef arch_atomic64_try_cmpxchg -static __always_inline bool -arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) -{ - bool ret; - __atomic_pre_full_fence(); - ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); - __atomic_post_full_fence(); - return ret; -} -#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg -#endif - -#endif /* arch_atomic64_try_cmpxchg_relaxed */ - -#ifndef arch_atomic64_sub_and_test +#if defined(arch_atomic64_sub_and_test) +#define raw_atomic64_sub_and_test arch_atomic64_sub_and_test +#else static __always_inline bool -arch_atomic64_sub_and_test(s64 i, atomic64_t *v) +raw_atomic64_sub_and_test(s64 i, atomic64_t *v) { - return arch_atomic64_sub_return(i, v) == 0; + return raw_atomic64_sub_return(i, v) == 0; } -#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test #endif -#ifndef arch_atomic64_dec_and_test +#if defined(arch_atomic64_dec_and_test) +#define raw_atomic64_dec_and_test arch_atomic64_dec_and_test +#else static __always_inline bool -arch_atomic64_dec_and_test(atomic64_t *v) +raw_atomic64_dec_and_test(atomic64_t *v) { - return arch_atomic64_dec_return(v) == 0; + return raw_atomic64_dec_return(v) == 0; } -#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test #endif -#ifndef arch_atomic64_inc_and_test +#if defined(arch_atomic64_inc_and_test) +#define raw_atomic64_inc_and_test arch_atomic64_inc_and_test +#else static __always_inline bool -arch_atomic64_inc_and_test(atomic64_t *v) +raw_atomic64_inc_and_test(atomic64_t *v) { - return arch_atomic64_inc_return(v) == 0; + return raw_atomic64_inc_return(v) == 0; } -#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test #endif -#ifndef arch_atomic64_add_negative_relaxed -#ifdef arch_atomic64_add_negative -#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative -#define arch_atomic64_add_negative_release arch_atomic64_add_negative -#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative -#endif /* arch_atomic64_add_negative */ - -#ifndef arch_atomic64_add_negative +#if defined(arch_atomic64_add_negative) +#define raw_atomic64_add_negative arch_atomic64_add_negative +#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool -arch_atomic64_add_negative(s64 i, atomic64_t *v) +raw_atomic64_add_negative(s64 i, atomic64_t *v) { - return arch_atomic64_add_return(i, v) < 0; + bool ret; + __atomic_pre_full_fence(); + ret = arch_atomic64_add_negative_relaxed(i, v); + __atomic_post_full_fence(); + return ret; } -#define arch_atomic64_add_negative arch_atomic64_add_negative -#endif - -#ifndef arch_atomic64_add_negative_acquire +#else static __always_inline bool -arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) +raw_atomic64_add_negative(s64 i, atomic64_t *v) { - return arch_atomic64_add_return_acquire(i, v) < 0; + return raw_atomic64_add_return(i, v) < 0; } -#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire #endif -#ifndef arch_atomic64_add_negative_release +#if defined(arch_atomic64_add_negative_acquire) +#define raw_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire +#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool -arch_atomic64_add_negative_release(s64 i, atomic64_t *v) +raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) { - return arch_atomic64_add_return_release(i, v) < 0; + bool ret = arch_atomic64_add_negative_relaxed(i, v); + __atomic_acquire_fence(); + return ret; } -#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release -#endif - -#ifndef arch_atomic64_add_negative_relaxed +#elif defined(arch_atomic64_add_negative) +#define raw_atomic64_add_negative_acquire arch_atomic64_add_negative +#else static __always_inline bool -arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) +raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) { - return arch_atomic64_add_return_relaxed(i, v) < 0; + return raw_atomic64_add_return_acquire(i, v) < 0; } -#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed #endif -#else /* arch_atomic64_add_negative_relaxed */ - -#ifndef arch_atomic64_add_negative_acquire +#if defined(arch_atomic64_add_negative_release) +#define raw_atomic64_add_negative_release arch_atomic64_add_negative_release +#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool -arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) +raw_atomic64_add_negative_release(s64 i, atomic64_t *v) { - bool ret = arch_atomic64_add_negative_relaxed(i, v); - __atomic_acquire_fence(); - return ret; + __atomic_release_fence(); + return arch_atomic64_add_negative_relaxed(i, v); } -#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire -#endif - -#ifndef arch_atomic64_add_negative_release +#elif defined(arch_atomic64_add_negative) +#define raw_atomic64_add_negative_release arch_atomic64_add_negative +#else static __always_inline bool -arch_atomic64_add_negative_release(s64 i, atomic64_t *v) +raw_atomic64_add_negative_release(s64 i, atomic64_t *v) { - __atomic_release_fence(); - return arch_atomic64_add_negative_relaxed(i, v); + return raw_atomic64_add_return_release(i, v) < 0; } -#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release #endif -#ifndef arch_atomic64_add_negative +#if defined(arch_atomic64_add_negative_relaxed) +#define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed +#elif defined(arch_atomic64_add_negative) +#define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative +#else static __always_inline bool -arch_atomic64_add_negative(s64 i, atomic64_t *v) +raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) { - bool ret; - __atomic_pre_full_fence(); - ret = arch_atomic64_add_negative_relaxed(i, v); - __atomic_post_full_fence(); - return ret; + return raw_atomic64_add_return_relaxed(i, v) < 0; } -#define arch_atomic64_add_negative arch_atomic64_add_negative #endif -#endif /* arch_atomic64_add_negative_relaxed */ - -#ifndef arch_atomic64_fetch_add_unless +#if defined(arch_atomic64_fetch_add_unless) +#define raw_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless +#else static __always_inline s64 -arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) +raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) { - s64 c = arch_atomic64_read(v); + s64 c = raw_atomic64_read(v); do { if (unlikely(c == u)) break; - } while (!arch_atomic64_try_cmpxchg(v, &c, c + a)); + } while (!raw_atomic64_try_cmpxchg(v, &c, c + a)); return c; } -#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless #endif -#ifndef arch_atomic64_add_unless +#if defined(arch_atomic64_add_unless) +#define raw_atomic64_add_unless arch_atomic64_add_unless +#else static __always_inline bool -arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) +raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) { - return arch_atomic64_fetch_add_unless(v, a, u) != u; + return raw_atomic64_fetch_add_unless(v, a, u) != u; } -#define arch_atomic64_add_unless arch_atomic64_add_unless #endif -#ifndef arch_atomic64_inc_not_zero +#if defined(arch_atomic64_inc_not_zero) +#define raw_atomic64_inc_not_zero arch_atomic64_inc_not_zero +#else static __always_inline bool -arch_atomic64_inc_not_zero(atomic64_t *v) +raw_atomic64_inc_not_zero(atomic64_t *v) { - return arch_atomic64_add_unless(v, 1, 0); + return raw_atomic64_add_unless(v, 1, 0); } -#define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero #endif -#ifndef arch_atomic64_inc_unless_negative +#if defined(arch_atomic64_inc_unless_negative) +#define raw_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative +#else static __always_inline bool -arch_atomic64_inc_unless_negative(atomic64_t *v) +raw_atomic64_inc_unless_negative(atomic64_t *v) { - s64 c = arch_atomic64_read(v); + s64 c = raw_atomic64_read(v); do { if (unlikely(c < 0)) return false; - } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1)); + } while (!raw_atomic64_try_cmpxchg(v, &c, c + 1)); return true; } -#define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative #endif -#ifndef arch_atomic64_dec_unless_positive +#if defined(arch_atomic64_dec_unless_positive) +#define raw_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive +#else static __always_inline bool -arch_atomic64_dec_unless_positive(atomic64_t *v) +raw_atomic64_dec_unless_positive(atomic64_t *v) { - s64 c = arch_atomic64_read(v); + s64 c = raw_atomic64_read(v); do { if (unlikely(c > 0)) return false; - } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1)); + } while (!raw_atomic64_try_cmpxchg(v, &c, c - 1)); return true; } -#define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive #endif -#ifndef arch_atomic64_dec_if_positive +#if defined(arch_atomic64_dec_if_positive) +#define raw_atomic64_dec_if_positive arch_atomic64_dec_if_positive +#else static __always_inline s64 -arch_atomic64_dec_if_positive(atomic64_t *v) +raw_atomic64_dec_if_positive(atomic64_t *v) { - s64 dec, c = arch_atomic64_read(v); + s64 dec, c = raw_atomic64_read(v); do { dec = c - 1; if (unlikely(dec < 0)) break; - } while (!arch_atomic64_try_cmpxchg(v, &c, dec)); + } while (!raw_atomic64_try_cmpxchg(v, &c, dec)); return dec; } -#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive #endif #endif /* _LINUX_ATOMIC_FALLBACK_H */ -// e1cee558cc61cae887890db30fcdf93baca9f498 +// c2048fccede6fac923252290e2b303949d5dec83 diff --git a/include/linux/atomic/atomic-raw.h b/include/linux/atomic/atomic-raw.h deleted file mode 100644 index 8b2fc04cf8c5..000000000000 --- a/include/linux/atomic/atomic-raw.h +++ /dev/null @@ -1,1135 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0 - -// Generated by scripts/atomic/gen-atomic-raw.sh -// DO NOT MODIFY THIS FILE DIRECTLY - -#ifndef _LINUX_ATOMIC_RAW_H -#define _LINUX_ATOMIC_RAW_H - -static __always_inline int -raw_atomic_read(const atomic_t *v) -{ - return arch_atomic_read(v); -} - -static __always_inline int -raw_atomic_read_acquire(const atomic_t *v) -{ - return arch_atomic_read_acquire(v); -} - -static __always_inline void -raw_atomic_set(atomic_t *v, int i) -{ - arch_atomic_set(v, i); -} - -static __always_inline void -raw_atomic_set_release(atomic_t *v, int i) -{ - arch_atomic_set_release(v, i); -} - -static __always_inline void -raw_atomic_add(int i, atomic_t *v) -{ - arch_atomic_add(i, v); -} - -static __always_inline int -raw_atomic_add_return(int i, atomic_t *v) -{ - return arch_atomic_add_return(i, v); -} - -static __always_inline int -raw_atomic_add_return_acquire(int i, atomic_t *v) -{ - return arch_atomic_add_return_acquire(i, v); -} - -static __always_inline int -raw_atomic_add_return_release(int i, atomic_t *v) -{ - return arch_atomic_add_return_release(i, v); -} - -static __always_inline int -raw_atomic_add_return_relaxed(int i, atomic_t *v) -{ - return arch_atomic_add_return_relaxed(i, v); -} - -static __always_inline int -raw_atomic_fetch_add(int i, atomic_t *v) -{ - return arch_atomic_fetch_add(i, v); -} - -static __always_inline int -raw_atomic_fetch_add_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_add_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_add_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_add_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_add_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_add_relaxed(i, v); -} - -static __always_inline void -raw_atomic_sub(int i, atomic_t *v) -{ - arch_atomic_sub(i, v); -} - -static __always_inline int -raw_atomic_sub_return(int i, atomic_t *v) -{ - return arch_atomic_sub_return(i, v); -} - -static __always_inline int -raw_atomic_sub_return_acquire(int i, atomic_t *v) -{ - return arch_atomic_sub_return_acquire(i, v); -} - -static __always_inline int -raw_atomic_sub_return_release(int i, atomic_t *v) -{ - return arch_atomic_sub_return_release(i, v); -} - -static __always_inline int -raw_atomic_sub_return_relaxed(int i, atomic_t *v) -{ - return arch_atomic_sub_return_relaxed(i, v); -} - -static __always_inline int -raw_atomic_fetch_sub(int i, atomic_t *v) -{ - return arch_atomic_fetch_sub(i, v); -} - -static __always_inline int -raw_atomic_fetch_sub_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_sub_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_sub_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_sub_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_sub_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_sub_relaxed(i, v); -} - -static __always_inline void -raw_atomic_inc(atomic_t *v) -{ - arch_atomic_inc(v); -} - -static __always_inline int -raw_atomic_inc_return(atomic_t *v) -{ - return arch_atomic_inc_return(v); -} - -static __always_inline int -raw_atomic_inc_return_acquire(atomic_t *v) -{ - return arch_atomic_inc_return_acquire(v); -} - -static __always_inline int -raw_atomic_inc_return_release(atomic_t *v) -{ - return arch_atomic_inc_return_release(v); -} - -static __always_inline int -raw_atomic_inc_return_relaxed(atomic_t *v) -{ - return arch_atomic_inc_return_relaxed(v); -} - -static __always_inline int -raw_atomic_fetch_inc(atomic_t *v) -{ - return arch_atomic_fetch_inc(v); -} - -static __always_inline int -raw_atomic_fetch_inc_acquire(atomic_t *v) -{ - return arch_atomic_fetch_inc_acquire(v); -} - -static __always_inline int -raw_atomic_fetch_inc_release(atomic_t *v) -{ - return arch_atomic_fetch_inc_release(v); -} - -static __always_inline int -raw_atomic_fetch_inc_relaxed(atomic_t *v) -{ - return arch_atomic_fetch_inc_relaxed(v); -} - -static __always_inline void -raw_atomic_dec(atomic_t *v) -{ - arch_atomic_dec(v); -} - -static __always_inline int -raw_atomic_dec_return(atomic_t *v) -{ - return arch_atomic_dec_return(v); -} - -static __always_inline int -raw_atomic_dec_return_acquire(atomic_t *v) -{ - return arch_atomic_dec_return_acquire(v); -} - -static __always_inline int -raw_atomic_dec_return_release(atomic_t *v) -{ - return arch_atomic_dec_return_release(v); -} - -static __always_inline int -raw_atomic_dec_return_relaxed(atomic_t *v) -{ - return arch_atomic_dec_return_relaxed(v); -} - -static __always_inline int -raw_atomic_fetch_dec(atomic_t *v) -{ - return arch_atomic_fetch_dec(v); -} - -static __always_inline int -raw_atomic_fetch_dec_acquire(atomic_t *v) -{ - return arch_atomic_fetch_dec_acquire(v); -} - -static __always_inline int -raw_atomic_fetch_dec_release(atomic_t *v) -{ - return arch_atomic_fetch_dec_release(v); -} - -static __always_inline int -raw_atomic_fetch_dec_relaxed(atomic_t *v) -{ - return arch_atomic_fetch_dec_relaxed(v); -} - -static __always_inline void -raw_atomic_and(int i, atomic_t *v) -{ - arch_atomic_and(i, v); -} - -static __always_inline int -raw_atomic_fetch_and(int i, atomic_t *v) -{ - return arch_atomic_fetch_and(i, v); -} - -static __always_inline int -raw_atomic_fetch_and_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_and_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_and_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_and_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_and_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_and_relaxed(i, v); -} - -static __always_inline void -raw_atomic_andnot(int i, atomic_t *v) -{ - arch_atomic_andnot(i, v); -} - -static __always_inline int -raw_atomic_fetch_andnot(int i, atomic_t *v) -{ - return arch_atomic_fetch_andnot(i, v); -} - -static __always_inline int -raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_andnot_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_andnot_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_andnot_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_andnot_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_andnot_relaxed(i, v); -} - -static __always_inline void -raw_atomic_or(int i, atomic_t *v) -{ - arch_atomic_or(i, v); -} - -static __always_inline int -raw_atomic_fetch_or(int i, atomic_t *v) -{ - return arch_atomic_fetch_or(i, v); -} - -static __always_inline int -raw_atomic_fetch_or_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_or_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_or_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_or_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_or_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_or_relaxed(i, v); -} - -static __always_inline void -raw_atomic_xor(int i, atomic_t *v) -{ - arch_atomic_xor(i, v); -} - -static __always_inline int -raw_atomic_fetch_xor(int i, atomic_t *v) -{ - return arch_atomic_fetch_xor(i, v); -} - -static __always_inline int -raw_atomic_fetch_xor_acquire(int i, atomic_t *v) -{ - return arch_atomic_fetch_xor_acquire(i, v); -} - -static __always_inline int -raw_atomic_fetch_xor_release(int i, atomic_t *v) -{ - return arch_atomic_fetch_xor_release(i, v); -} - -static __always_inline int -raw_atomic_fetch_xor_relaxed(int i, atomic_t *v) -{ - return arch_atomic_fetch_xor_relaxed(i, v); -} - -static __always_inline int -raw_atomic_xchg(atomic_t *v, int i) -{ - return arch_atomic_xchg(v, i); -} - -static __always_inline int -raw_atomic_xchg_acquire(atomic_t *v, int i) -{ - return arch_atomic_xchg_acquire(v, i); -} - -static __always_inline int -raw_atomic_xchg_release(atomic_t *v, int i) -{ - return arch_atomic_xchg_release(v, i); -} - -static __always_inline int -raw_atomic_xchg_relaxed(atomic_t *v, int i) -{ - return arch_atomic_xchg_relaxed(v, i); -} - -static __always_inline int -raw_atomic_cmpxchg(atomic_t *v, int old, int new) -{ - return arch_atomic_cmpxchg(v, old, new); -} - -static __always_inline int -raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) -{ - return arch_atomic_cmpxchg_acquire(v, old, new); -} - -static __always_inline int -raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) -{ - return arch_atomic_cmpxchg_release(v, old, new); -} - -static __always_inline int -raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) -{ - return arch_atomic_cmpxchg_relaxed(v, old, new); -} - -static __always_inline bool -raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) -{ - return arch_atomic_try_cmpxchg(v, old, new); -} - -static __always_inline bool -raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) -{ - return arch_atomic_try_cmpxchg_acquire(v, old, new); -} - -static __always_inline bool -raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) -{ - return arch_atomic_try_cmpxchg_release(v, old, new); -} - -static __always_inline bool -raw_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) -{ - return arch_atomic_try_cmpxchg_relaxed(v, old, new); -} - -static __always_inline bool -raw_atomic_sub_and_test(int i, atomic_t *v) -{ - return arch_atomic_sub_and_test(i, v); -} - -static __always_inline bool -raw_atomic_dec_and_test(atomic_t *v) -{ - return arch_atomic_dec_and_test(v); -} - -static __always_inline bool -raw_atomic_inc_and_test(atomic_t *v) -{ - return arch_atomic_inc_and_test(v); -} - -static __always_inline bool -raw_atomic_add_negative(int i, atomic_t *v) -{ - return arch_atomic_add_negative(i, v); -} - -static __always_inline bool -raw_atomic_add_negative_acquire(int i, atomic_t *v) -{ - return arch_atomic_add_negative_acquire(i, v); -} - -static __always_inline bool -raw_atomic_add_negative_release(int i, atomic_t *v) -{ - return arch_atomic_add_negative_release(i, v); -} - -static __always_inline bool -raw_atomic_add_negative_relaxed(int i, atomic_t *v) -{ - return arch_atomic_add_negative_relaxed(i, v); -} - -static __always_inline int -raw_atomic_fetch_add_unless(atomic_t *v, int a, int u) -{ - return arch_atomic_fetch_add_unless(v, a, u); -} - -static __always_inline bool -raw_atomic_add_unless(atomic_t *v, int a, int u) -{ - return arch_atomic_add_unless(v, a, u); -} - -static __always_inline bool -raw_atomic_inc_not_zero(atomic_t *v) -{ - return arch_atomic_inc_not_zero(v); -} - -static __always_inline bool -raw_atomic_inc_unless_negative(atomic_t *v) -{ - return arch_atomic_inc_unless_negative(v); -} - -static __always_inline bool -raw_atomic_dec_unless_positive(atomic_t *v) -{ - return arch_atomic_dec_unless_positive(v); -} - -static __always_inline int -raw_atomic_dec_if_positive(atomic_t *v) -{ - return arch_atomic_dec_if_positive(v); -} - -static __always_inline s64 -raw_atomic64_read(const atomic64_t *v) -{ - return arch_atomic64_read(v); -} - -static __always_inline s64 -raw_atomic64_read_acquire(const atomic64_t *v) -{ - return arch_atomic64_read_acquire(v); -} - -static __always_inline void -raw_atomic64_set(atomic64_t *v, s64 i) -{ - arch_atomic64_set(v, i); -} - -static __always_inline void -raw_atomic64_set_release(atomic64_t *v, s64 i) -{ - arch_atomic64_set_release(v, i); -} - -static __always_inline void -raw_atomic64_add(s64 i, atomic64_t *v) -{ - arch_atomic64_add(i, v); -} - -static __always_inline s64 -raw_atomic64_add_return(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_return(i, v); -} - -static __always_inline s64 -raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_return_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_add_return_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_return_release(i, v); -} - -static __always_inline s64 -raw_atomic64_add_return_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_return_relaxed(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_add(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_add(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_add_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_add_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_add_relaxed(i, v); -} - -static __always_inline void -raw_atomic64_sub(s64 i, atomic64_t *v) -{ - arch_atomic64_sub(i, v); -} - -static __always_inline s64 -raw_atomic64_sub_return(s64 i, atomic64_t *v) -{ - return arch_atomic64_sub_return(i, v); -} - -static __always_inline s64 -raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_sub_return_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_sub_return_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_sub_return_release(i, v); -} - -static __always_inline s64 -raw_atomic64_sub_return_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_sub_return_relaxed(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_sub(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_sub(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_sub_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_sub_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_sub_relaxed(i, v); -} - -static __always_inline void -raw_atomic64_inc(atomic64_t *v) -{ - arch_atomic64_inc(v); -} - -static __always_inline s64 -raw_atomic64_inc_return(atomic64_t *v) -{ - return arch_atomic64_inc_return(v); -} - -static __always_inline s64 -raw_atomic64_inc_return_acquire(atomic64_t *v) -{ - return arch_atomic64_inc_return_acquire(v); -} - -static __always_inline s64 -raw_atomic64_inc_return_release(atomic64_t *v) -{ - return arch_atomic64_inc_return_release(v); -} - -static __always_inline s64 -raw_atomic64_inc_return_relaxed(atomic64_t *v) -{ - return arch_atomic64_inc_return_relaxed(v); -} - -static __always_inline s64 -raw_atomic64_fetch_inc(atomic64_t *v) -{ - return arch_atomic64_fetch_inc(v); -} - -static __always_inline s64 -raw_atomic64_fetch_inc_acquire(atomic64_t *v) -{ - return arch_atomic64_fetch_inc_acquire(v); -} - -static __always_inline s64 -raw_atomic64_fetch_inc_release(atomic64_t *v) -{ - return arch_atomic64_fetch_inc_release(v); -} - -static __always_inline s64 -raw_atomic64_fetch_inc_relaxed(atomic64_t *v) -{ - return arch_atomic64_fetch_inc_relaxed(v); -} - -static __always_inline void -raw_atomic64_dec(atomic64_t *v) -{ - arch_atomic64_dec(v); -} - -static __always_inline s64 -raw_atomic64_dec_return(atomic64_t *v) -{ - return arch_atomic64_dec_return(v); -} - -static __always_inline s64 -raw_atomic64_dec_return_acquire(atomic64_t *v) -{ - return arch_atomic64_dec_return_acquire(v); -} - -static __always_inline s64 -raw_atomic64_dec_return_release(atomic64_t *v) -{ - return arch_atomic64_dec_return_release(v); -} - -static __always_inline s64 -raw_atomic64_dec_return_relaxed(atomic64_t *v) -{ - return arch_atomic64_dec_return_relaxed(v); -} - -static __always_inline s64 -raw_atomic64_fetch_dec(atomic64_t *v) -{ - return arch_atomic64_fetch_dec(v); -} - -static __always_inline s64 -raw_atomic64_fetch_dec_acquire(atomic64_t *v) -{ - return arch_atomic64_fetch_dec_acquire(v); -} - -static __always_inline s64 -raw_atomic64_fetch_dec_release(atomic64_t *v) -{ - return arch_atomic64_fetch_dec_release(v); -} - -static __always_inline s64 -raw_atomic64_fetch_dec_relaxed(atomic64_t *v) -{ - return arch_atomic64_fetch_dec_relaxed(v); -} - -static __always_inline void -raw_atomic64_and(s64 i, atomic64_t *v) -{ - arch_atomic64_and(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_and(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_and(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_and_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_and_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_and_relaxed(i, v); -} - -static __always_inline void -raw_atomic64_andnot(s64 i, atomic64_t *v) -{ - arch_atomic64_andnot(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_andnot(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_andnot_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_andnot_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_andnot_relaxed(i, v); -} - -static __always_inline void -raw_atomic64_or(s64 i, atomic64_t *v) -{ - arch_atomic64_or(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_or(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_or(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_or_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_or_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_or_relaxed(i, v); -} - -static __always_inline void -raw_atomic64_xor(s64 i, atomic64_t *v) -{ - arch_atomic64_xor(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_xor(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_xor(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_xor_acquire(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_xor_release(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_fetch_xor_relaxed(i, v); -} - -static __always_inline s64 -raw_atomic64_xchg(atomic64_t *v, s64 i) -{ - return arch_atomic64_xchg(v, i); -} - -static __always_inline s64 -raw_atomic64_xchg_acquire(atomic64_t *v, s64 i) -{ - return arch_atomic64_xchg_acquire(v, i); -} - -static __always_inline s64 -raw_atomic64_xchg_release(atomic64_t *v, s64 i) -{ - return arch_atomic64_xchg_release(v, i); -} - -static __always_inline s64 -raw_atomic64_xchg_relaxed(atomic64_t *v, s64 i) -{ - return arch_atomic64_xchg_relaxed(v, i); -} - -static __always_inline s64 -raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) -{ - return arch_atomic64_cmpxchg(v, old, new); -} - -static __always_inline s64 -raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) -{ - return arch_atomic64_cmpxchg_acquire(v, old, new); -} - -static __always_inline s64 -raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) -{ - return arch_atomic64_cmpxchg_release(v, old, new); -} - -static __always_inline s64 -raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) -{ - return arch_atomic64_cmpxchg_relaxed(v, old, new); -} - -static __always_inline bool -raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) -{ - return arch_atomic64_try_cmpxchg(v, old, new); -} - -static __always_inline bool -raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) -{ - return arch_atomic64_try_cmpxchg_acquire(v, old, new); -} - -static __always_inline bool -raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) -{ - return arch_atomic64_try_cmpxchg_release(v, old, new); -} - -static __always_inline bool -raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) -{ - return arch_atomic64_try_cmpxchg_relaxed(v, old, new); -} - -static __always_inline bool -raw_atomic64_sub_and_test(s64 i, atomic64_t *v) -{ - return arch_atomic64_sub_and_test(i, v); -} - -static __always_inline bool -raw_atomic64_dec_and_test(atomic64_t *v) -{ - return arch_atomic64_dec_and_test(v); -} - -static __always_inline bool -raw_atomic64_inc_and_test(atomic64_t *v) -{ - return arch_atomic64_inc_and_test(v); -} - -static __always_inline bool -raw_atomic64_add_negative(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_negative(i, v); -} - -static __always_inline bool -raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_negative_acquire(i, v); -} - -static __always_inline bool -raw_atomic64_add_negative_release(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_negative_release(i, v); -} - -static __always_inline bool -raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) -{ - return arch_atomic64_add_negative_relaxed(i, v); -} - -static __always_inline s64 -raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) -{ - return arch_atomic64_fetch_add_unless(v, a, u); -} - -static __always_inline bool -raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) -{ - return arch_atomic64_add_unless(v, a, u); -} - -static __always_inline bool -raw_atomic64_inc_not_zero(atomic64_t *v) -{ - return arch_atomic64_inc_not_zero(v); -} - -static __always_inline bool -raw_atomic64_inc_unless_negative(atomic64_t *v) -{ - return arch_atomic64_inc_unless_negative(v); -} - -static __always_inline bool -raw_atomic64_dec_unless_positive(atomic64_t *v) -{ - return arch_atomic64_dec_unless_positive(v); -} - -static __always_inline s64 -raw_atomic64_dec_if_positive(atomic64_t *v) -{ - return arch_atomic64_dec_if_positive(v); -} - -#define raw_xchg(...) \ - arch_xchg(__VA_ARGS__) - -#define raw_xchg_acquire(...) \ - arch_xchg_acquire(__VA_ARGS__) - -#define raw_xchg_release(...) \ - arch_xchg_release(__VA_ARGS__) - -#define raw_xchg_relaxed(...) \ - arch_xchg_relaxed(__VA_ARGS__) - -#define raw_cmpxchg(...) \ - arch_cmpxchg(__VA_ARGS__) - -#define raw_cmpxchg_acquire(...) \ - arch_cmpxchg_acquire(__VA_ARGS__) - -#define raw_cmpxchg_release(...) \ - arch_cmpxchg_release(__VA_ARGS__) - -#define raw_cmpxchg_relaxed(...) \ - arch_cmpxchg_relaxed(__VA_ARGS__) - -#define raw_cmpxchg64(...) \ - arch_cmpxchg64(__VA_ARGS__) - -#define raw_cmpxchg64_acquire(...) \ - arch_cmpxchg64_acquire(__VA_ARGS__) - -#define raw_cmpxchg64_release(...) \ - arch_cmpxchg64_release(__VA_ARGS__) - -#define raw_cmpxchg64_relaxed(...) \ - arch_cmpxchg64_relaxed(__VA_ARGS__) - -#define raw_cmpxchg128(...) \ - arch_cmpxchg128(__VA_ARGS__) - -#define raw_cmpxchg128_acquire(...) \ - arch_cmpxchg128_acquire(__VA_ARGS__) - -#define raw_cmpxchg128_release(...) \ - arch_cmpxchg128_release(__VA_ARGS__) - -#define raw_cmpxchg128_relaxed(...) \ - arch_cmpxchg128_relaxed(__VA_ARGS__) - -#define raw_try_cmpxchg(...) \ - arch_try_cmpxchg(__VA_ARGS__) - -#define raw_try_cmpxchg_acquire(...) \ - arch_try_cmpxchg_acquire(__VA_ARGS__) - -#define raw_try_cmpxchg_release(...) \ - arch_try_cmpxchg_release(__VA_ARGS__) - -#define raw_try_cmpxchg_relaxed(...) \ - arch_try_cmpxchg_relaxed(__VA_ARGS__) - -#define raw_try_cmpxchg64(...) \ - arch_try_cmpxchg64(__VA_ARGS__) - -#define raw_try_cmpxchg64_acquire(...) \ - arch_try_cmpxchg64_acquire(__VA_ARGS__) - -#define raw_try_cmpxchg64_release(...) \ - arch_try_cmpxchg64_release(__VA_ARGS__) - -#define raw_try_cmpxchg64_relaxed(...) \ - arch_try_cmpxchg64_relaxed(__VA_ARGS__) - -#define raw_try_cmpxchg128(...) \ - arch_try_cmpxchg128(__VA_ARGS__) - -#define raw_try_cmpxchg128_acquire(...) \ - arch_try_cmpxchg128_acquire(__VA_ARGS__) - -#define raw_try_cmpxchg128_release(...) \ - arch_try_cmpxchg128_release(__VA_ARGS__) - -#define raw_try_cmpxchg128_relaxed(...) \ - arch_try_cmpxchg128_relaxed(__VA_ARGS__) - -#define raw_cmpxchg_local(...) \ - arch_cmpxchg_local(__VA_ARGS__) - -#define raw_cmpxchg64_local(...) \ - arch_cmpxchg64_local(__VA_ARGS__) - -#define raw_cmpxchg128_local(...) \ - arch_cmpxchg128_local(__VA_ARGS__) - -#define raw_sync_cmpxchg(...) \ - arch_sync_cmpxchg(__VA_ARGS__) - -#define raw_try_cmpxchg_local(...) \ - arch_try_cmpxchg_local(__VA_ARGS__) - -#define raw_try_cmpxchg64_local(...) \ - arch_try_cmpxchg64_local(__VA_ARGS__) - -#define raw_try_cmpxchg128_local(...) \ - arch_try_cmpxchg128_local(__VA_ARGS__) - -#endif /* _LINUX_ATOMIC_RAW_H */ -// b23ed4424e85200e200ded094522e1d743b3a5b1 diff --git a/scripts/atomic/fallbacks/acquire b/scripts/atomic/fallbacks/acquire index ef764085c79a..b0f732a5c46e 100755 --- a/scripts/atomic/fallbacks/acquire +++ b/scripts/atomic/fallbacks/acquire @@ -1,6 +1,6 @@ cat <counter, old, new); + return raw_cmpxchg${order}(&v->counter, old, new); } EOF diff --git a/scripts/atomic/fallbacks/dec b/scripts/atomic/fallbacks/dec index 8c144c818e9e..a660ac65994b 100755 --- a/scripts/atomic/fallbacks/dec +++ b/scripts/atomic/fallbacks/dec @@ -1,7 +1,7 @@ cat < 0)) return false; - } while (!arch_${atomic}_try_cmpxchg(v, &c, c - 1)); + } while (!raw_${atomic}_try_cmpxchg(v, &c, c - 1)); return true; } diff --git a/scripts/atomic/fallbacks/fence b/scripts/atomic/fallbacks/fence index 07757d8e338e..067eea553f5e 100755 --- a/scripts/atomic/fallbacks/fence +++ b/scripts/atomic/fallbacks/fence @@ -1,6 +1,6 @@ cat <counter); } else { - ret = arch_${atomic}_read(v); + ret = raw_${atomic}_read(v); __atomic_acquire_fence(); } diff --git a/scripts/atomic/fallbacks/release b/scripts/atomic/fallbacks/release index b46feb56d69c..cbbff708129b 100755 --- a/scripts/atomic/fallbacks/release +++ b/scripts/atomic/fallbacks/release @@ -1,6 +1,6 @@ cat <counter, i); } else { __atomic_release_fence(); - arch_${atomic}_set(v, i); + raw_${atomic}_set(v, i); } } EOF diff --git a/scripts/atomic/fallbacks/sub_and_test b/scripts/atomic/fallbacks/sub_and_test index da8a049c9b02..8975a496d495 100755 --- a/scripts/atomic/fallbacks/sub_and_test +++ b/scripts/atomic/fallbacks/sub_and_test @@ -1,7 +1,7 @@ cat <counter, new); + return raw_xchg${order}(&v->counter, new); } EOF diff --git a/scripts/atomic/gen-atomic-fallback.sh b/scripts/atomic/gen-atomic-fallback.sh index 337330865fa2..86aca4f9f315 100755 --- a/scripts/atomic/gen-atomic-fallback.sh +++ b/scripts/atomic/gen-atomic-fallback.sh @@ -17,19 +17,12 @@ gen_template_fallback() local atomic="$1"; shift local int="$1"; shift - local atomicname="arch_${atomic}_${pfx}${name}${sfx}${order}" - local ret="$(gen_ret_type "${meta}" "${int}")" local retstmt="$(gen_ret_stmt "${meta}")" local params="$(gen_params "${int}" "${atomic}" "$@")" local args="$(gen_args "$@")" - if [ ! -z "${template}" ]; then - printf "#ifndef ${atomicname}\n" - . ${template} - printf "#define ${atomicname} ${atomicname}\n" - printf "#endif\n\n" - fi + . ${template} } #gen_order_fallback(meta, pfx, name, sfx, order, atomic, int, args...) @@ -59,69 +52,92 @@ gen_proto_fallback() gen_template_fallback "${tmpl}" "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" } -#gen_basic_fallbacks(basename) -gen_basic_fallbacks() -{ - local basename="$1"; shift -cat << EOF -#define ${basename}_acquire ${basename} -#define ${basename}_release ${basename} -#define ${basename}_relaxed ${basename} -EOF -} - -#gen_proto_order_variants(meta, pfx, name, sfx, atomic, int, args...) -gen_proto_order_variants() +#gen_proto_order_variant(meta, pfx, name, sfx, order, atomic, int, args...) +gen_proto_order_variant() { local meta="$1"; shift local pfx="$1"; shift local name="$1"; shift local sfx="$1"; shift + local order="$1"; shift local atomic="$1" - local basename="arch_${atomic}_${pfx}${name}${sfx}" - - local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "")" + local atomicname="${atomic}_${pfx}${name}${sfx}${order}" + local basename="${atomic}_${pfx}${name}${sfx}" - # If we don't have relaxed atomics, then we don't bother with ordering fallbacks - # read_acquire and set_release need to be templated, though - if ! meta_has_relaxed "${meta}"; then - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" + local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" - if meta_has_acquire "${meta}"; then - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" - fi + # Where there is no possible fallback, this order variant is mandatory + # and must be provided by arch code. Add a comment to the header to + # make this obvious. + # + # Ideally we'd error on a missing definition, but arch code might + # define this order variant as a C function without a preprocessor + # symbol. + if [ -z ${template} ] && [ -z "${order}" ] && ! meta_has_relaxed "${meta}"; then + printf "#define raw_${atomicname} arch_${atomicname}\n\n" + return + fi - if meta_has_release "${meta}"; then - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" - fi + printf "#if defined(arch_${atomicname})\n" + printf "#define raw_${atomicname} arch_${atomicname}\n" - return + # Allow FULL/ACQUIRE/RELEASE ops to be defined in terms of RELAXED ops + if [ "${order}" != "_relaxed" ] && meta_has_relaxed "${meta}"; then + printf "#elif defined(arch_${basename}_relaxed)\n" + gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" fi - printf "#ifndef ${basename}_relaxed\n" + # Allow ACQUIRE/RELEASE/RELAXED ops to be defined in terms of FULL ops + if [ ! -z "${order}" ]; then + printf "#elif defined(arch_${basename})\n" + printf "#define raw_${atomicname} arch_${basename}\n" + fi + printf "#else\n" if [ ! -z "${template}" ]; then - printf "#ifdef ${basename}\n" + gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" + else + printf "#error \"Unable to define raw_${atomicname}\"\n" fi - gen_basic_fallbacks "${basename}" + printf "#endif\n\n" +} - if [ ! -z "${template}" ]; then - printf "#endif /* ${basename} */\n\n" - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_relaxed" "$@" + +#gen_proto_order_variants(meta, pfx, name, sfx, atomic, int, args...) +gen_proto_order_variants() +{ + local meta="$1"; shift + local pfx="$1"; shift + local name="$1"; shift + local sfx="$1"; shift + local atomic="$1" + + gen_proto_order_variant "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" + + if meta_has_acquire "${meta}"; then + gen_proto_order_variant "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" fi - printf "#else /* ${basename}_relaxed */\n\n" + if meta_has_release "${meta}"; then + gen_proto_order_variant "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" + fi - gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" - gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" - gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" + if meta_has_relaxed "${meta}"; then + gen_proto_order_variant "${meta}" "${pfx}" "${name}" "${sfx}" "_relaxed" "$@" + fi +} - printf "#endif /* ${basename}_relaxed */\n\n" +#gen_basic_fallbacks(basename) +gen_basic_fallbacks() +{ + local basename="$1"; shift +cat << EOF +#define raw_${basename}_acquire arch_${basename} +#define raw_${basename}_release arch_${basename} +#define raw_${basename}_relaxed arch_${basename} +EOF } gen_order_fallbacks() @@ -130,36 +146,65 @@ gen_order_fallbacks() cat < ${LINUXDIR}/include/${header} -- cgit v1.2.3 From 1d78814d41701c216e28fcf2656526146dec4a1a Mon Sep 17 00:00:00 2001 From: Mark Rutland Date: Mon, 5 Jun 2023 08:01:20 +0100 Subject: locking/atomic: scripts: simplify raw_atomic*() definitions Currently each ordering variant has several potential definitions, with a mixture of preprocessor and C definitions, including several copies of its C prototype, e.g. | #if defined(arch_atomic_fetch_andnot_acquire) | #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire | #elif defined(arch_atomic_fetch_andnot_relaxed) | static __always_inline int | raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) | { | int ret = arch_atomic_fetch_andnot_relaxed(i, v); | __atomic_acquire_fence(); | return ret; | } | #elif defined(arch_atomic_fetch_andnot) | #define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot | #else | static __always_inline int | raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) | { | return raw_atomic_fetch_and_acquire(~i, v); | } | #endif Make this a bit simpler by defining the C prototype once, and writing the various potential definitions as plain C code guarded by ifdeffery. For example, the above becomes: | static __always_inline int | raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) | { | #if defined(arch_atomic_fetch_andnot_acquire) | return arch_atomic_fetch_andnot_acquire(i, v); | #elif defined(arch_atomic_fetch_andnot_relaxed) | int ret = arch_atomic_fetch_andnot_relaxed(i, v); | __atomic_acquire_fence(); | return ret; | #elif defined(arch_atomic_fetch_andnot) | return arch_atomic_fetch_andnot(i, v); | #else | return raw_atomic_fetch_and_acquire(~i, v); | #endif | } Which is far easier to read. As we now always have a single copy of the C prototype wrapping all the potential definitions, we now have an obvious single location for kerneldoc comments. At the same time, the fallbacks for raw_atomic*_xhcg() are made to use 'new' rather than 'i' as the name of the new value. This is what the existing fallback template used, and is more consistent with the raw_atomic{_try,}cmpxchg() fallbacks. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland Signed-off-by: Peter Zijlstra (Intel) Reviewed-by: Kees Cook Link: https://lore.kernel.org/r/20230605070124.3741859-24-mark.rutland@arm.com --- include/linux/atomic/atomic-arch-fallback.h | 1790 ++++++++++++-------------- include/linux/atomic/atomic-instrumented.h | 50 +- include/linux/atomic/atomic-long.h | 26 +- scripts/atomic/atomics.tbl | 2 +- scripts/atomic/fallbacks/acquire | 4 - scripts/atomic/fallbacks/add_negative | 4 - scripts/atomic/fallbacks/add_unless | 4 - scripts/atomic/fallbacks/andnot | 4 - scripts/atomic/fallbacks/cmpxchg | 4 - scripts/atomic/fallbacks/dec | 4 - scripts/atomic/fallbacks/dec_and_test | 4 - scripts/atomic/fallbacks/dec_if_positive | 4 - scripts/atomic/fallbacks/dec_unless_positive | 4 - scripts/atomic/fallbacks/fence | 4 - scripts/atomic/fallbacks/fetch_add_unless | 4 - scripts/atomic/fallbacks/inc | 4 - scripts/atomic/fallbacks/inc_and_test | 4 - scripts/atomic/fallbacks/inc_not_zero | 4 - scripts/atomic/fallbacks/inc_unless_negative | 4 - scripts/atomic/fallbacks/read_acquire | 4 - scripts/atomic/fallbacks/release | 4 - scripts/atomic/fallbacks/set_release | 4 - scripts/atomic/fallbacks/sub_and_test | 4 - scripts/atomic/fallbacks/try_cmpxchg | 4 - scripts/atomic/fallbacks/xchg | 4 - scripts/atomic/gen-atomic-fallback.sh | 26 +- 26 files changed, 901 insertions(+), 1077 deletions(-) (limited to 'scripts/atomic/fallbacks/inc_and_test') diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h index 99bc1a871dc1..470c2890ab8d 100644 --- a/include/linux/atomic/atomic-arch-fallback.h +++ b/include/linux/atomic/atomic-arch-fallback.h @@ -428,16 +428,20 @@ extern void raw_cmpxchg128_relaxed_not_implemented(void); #define raw_sync_cmpxchg arch_sync_cmpxchg -#define raw_atomic_read arch_atomic_read +static __always_inline int +raw_atomic_read(const atomic_t *v) +{ + return arch_atomic_read(v); +} -#if defined(arch_atomic_read_acquire) -#define raw_atomic_read_acquire arch_atomic_read_acquire -#elif defined(arch_atomic_read) -#define raw_atomic_read_acquire arch_atomic_read -#else static __always_inline int raw_atomic_read_acquire(const atomic_t *v) { +#if defined(arch_atomic_read_acquire) + return arch_atomic_read_acquire(v); +#elif defined(arch_atomic_read) + return arch_atomic_read(v); +#else int ret; if (__native_word(atomic_t)) { @@ -448,1144 +452,1088 @@ raw_atomic_read_acquire(const atomic_t *v) } return ret; -} #endif +} -#define raw_atomic_set arch_atomic_set +static __always_inline void +raw_atomic_set(atomic_t *v, int i) +{ + arch_atomic_set(v, i); +} -#if defined(arch_atomic_set_release) -#define raw_atomic_set_release arch_atomic_set_release -#elif defined(arch_atomic_set) -#define raw_atomic_set_release arch_atomic_set -#else static __always_inline void raw_atomic_set_release(atomic_t *v, int i) { +#if defined(arch_atomic_set_release) + arch_atomic_set_release(v, i); +#elif defined(arch_atomic_set) + arch_atomic_set(v, i); +#else if (__native_word(atomic_t)) { smp_store_release(&(v)->counter, i); } else { __atomic_release_fence(); raw_atomic_set(v, i); } -} #endif +} -#define raw_atomic_add arch_atomic_add +static __always_inline void +raw_atomic_add(int i, atomic_t *v) +{ + arch_atomic_add(i, v); +} -#if defined(arch_atomic_add_return) -#define raw_atomic_add_return arch_atomic_add_return -#elif defined(arch_atomic_add_return_relaxed) static __always_inline int raw_atomic_add_return(int i, atomic_t *v) { +#if defined(arch_atomic_add_return) + return arch_atomic_add_return(i, v); +#elif defined(arch_atomic_add_return_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_add_return_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_add_return" #endif +} -#if defined(arch_atomic_add_return_acquire) -#define raw_atomic_add_return_acquire arch_atomic_add_return_acquire -#elif defined(arch_atomic_add_return_relaxed) static __always_inline int raw_atomic_add_return_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_add_return_acquire) + return arch_atomic_add_return_acquire(i, v); +#elif defined(arch_atomic_add_return_relaxed) int ret = arch_atomic_add_return_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_add_return) -#define raw_atomic_add_return_acquire arch_atomic_add_return + return arch_atomic_add_return(i, v); #else #error "Unable to define raw_atomic_add_return_acquire" #endif +} -#if defined(arch_atomic_add_return_release) -#define raw_atomic_add_return_release arch_atomic_add_return_release -#elif defined(arch_atomic_add_return_relaxed) static __always_inline int raw_atomic_add_return_release(int i, atomic_t *v) { +#if defined(arch_atomic_add_return_release) + return arch_atomic_add_return_release(i, v); +#elif defined(arch_atomic_add_return_relaxed) __atomic_release_fence(); return arch_atomic_add_return_relaxed(i, v); -} #elif defined(arch_atomic_add_return) -#define raw_atomic_add_return_release arch_atomic_add_return + return arch_atomic_add_return(i, v); #else #error "Unable to define raw_atomic_add_return_release" #endif +} +static __always_inline int +raw_atomic_add_return_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_add_return_relaxed) -#define raw_atomic_add_return_relaxed arch_atomic_add_return_relaxed + return arch_atomic_add_return_relaxed(i, v); #elif defined(arch_atomic_add_return) -#define raw_atomic_add_return_relaxed arch_atomic_add_return + return arch_atomic_add_return(i, v); #else #error "Unable to define raw_atomic_add_return_relaxed" #endif +} -#if defined(arch_atomic_fetch_add) -#define raw_atomic_fetch_add arch_atomic_fetch_add -#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int raw_atomic_fetch_add(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_add) + return arch_atomic_fetch_add(i, v); +#elif defined(arch_atomic_fetch_add_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_add_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_fetch_add" #endif +} -#if defined(arch_atomic_fetch_add_acquire) -#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire -#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int raw_atomic_fetch_add_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_add_acquire) + return arch_atomic_fetch_add_acquire(i, v); +#elif defined(arch_atomic_fetch_add_relaxed) int ret = arch_atomic_fetch_add_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_add) -#define raw_atomic_fetch_add_acquire arch_atomic_fetch_add + return arch_atomic_fetch_add(i, v); #else #error "Unable to define raw_atomic_fetch_add_acquire" #endif +} -#if defined(arch_atomic_fetch_add_release) -#define raw_atomic_fetch_add_release arch_atomic_fetch_add_release -#elif defined(arch_atomic_fetch_add_relaxed) static __always_inline int raw_atomic_fetch_add_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_add_release) + return arch_atomic_fetch_add_release(i, v); +#elif defined(arch_atomic_fetch_add_relaxed) __atomic_release_fence(); return arch_atomic_fetch_add_relaxed(i, v); -} #elif defined(arch_atomic_fetch_add) -#define raw_atomic_fetch_add_release arch_atomic_fetch_add + return arch_atomic_fetch_add(i, v); #else #error "Unable to define raw_atomic_fetch_add_release" #endif +} +static __always_inline int +raw_atomic_fetch_add_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_fetch_add_relaxed) -#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add_relaxed + return arch_atomic_fetch_add_relaxed(i, v); #elif defined(arch_atomic_fetch_add) -#define raw_atomic_fetch_add_relaxed arch_atomic_fetch_add + return arch_atomic_fetch_add(i, v); #else #error "Unable to define raw_atomic_fetch_add_relaxed" #endif +} -#define raw_atomic_sub arch_atomic_sub +static __always_inline void +raw_atomic_sub(int i, atomic_t *v) +{ + arch_atomic_sub(i, v); +} -#if defined(arch_atomic_sub_return) -#define raw_atomic_sub_return arch_atomic_sub_return -#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int raw_atomic_sub_return(int i, atomic_t *v) { +#if defined(arch_atomic_sub_return) + return arch_atomic_sub_return(i, v); +#elif defined(arch_atomic_sub_return_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_sub_return_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_sub_return" #endif +} -#if defined(arch_atomic_sub_return_acquire) -#define raw_atomic_sub_return_acquire arch_atomic_sub_return_acquire -#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int raw_atomic_sub_return_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_sub_return_acquire) + return arch_atomic_sub_return_acquire(i, v); +#elif defined(arch_atomic_sub_return_relaxed) int ret = arch_atomic_sub_return_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_sub_return) -#define raw_atomic_sub_return_acquire arch_atomic_sub_return + return arch_atomic_sub_return(i, v); #else #error "Unable to define raw_atomic_sub_return_acquire" #endif +} -#if defined(arch_atomic_sub_return_release) -#define raw_atomic_sub_return_release arch_atomic_sub_return_release -#elif defined(arch_atomic_sub_return_relaxed) static __always_inline int raw_atomic_sub_return_release(int i, atomic_t *v) { +#if defined(arch_atomic_sub_return_release) + return arch_atomic_sub_return_release(i, v); +#elif defined(arch_atomic_sub_return_relaxed) __atomic_release_fence(); return arch_atomic_sub_return_relaxed(i, v); -} #elif defined(arch_atomic_sub_return) -#define raw_atomic_sub_return_release arch_atomic_sub_return + return arch_atomic_sub_return(i, v); #else #error "Unable to define raw_atomic_sub_return_release" #endif +} +static __always_inline int +raw_atomic_sub_return_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_sub_return_relaxed) -#define raw_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed + return arch_atomic_sub_return_relaxed(i, v); #elif defined(arch_atomic_sub_return) -#define raw_atomic_sub_return_relaxed arch_atomic_sub_return + return arch_atomic_sub_return(i, v); #else #error "Unable to define raw_atomic_sub_return_relaxed" #endif +} -#if defined(arch_atomic_fetch_sub) -#define raw_atomic_fetch_sub arch_atomic_fetch_sub -#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int raw_atomic_fetch_sub(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_sub) + return arch_atomic_fetch_sub(i, v); +#elif defined(arch_atomic_fetch_sub_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_sub_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_fetch_sub" #endif +} -#if defined(arch_atomic_fetch_sub_acquire) -#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire -#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int raw_atomic_fetch_sub_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_sub_acquire) + return arch_atomic_fetch_sub_acquire(i, v); +#elif defined(arch_atomic_fetch_sub_relaxed) int ret = arch_atomic_fetch_sub_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_sub) -#define raw_atomic_fetch_sub_acquire arch_atomic_fetch_sub + return arch_atomic_fetch_sub(i, v); #else #error "Unable to define raw_atomic_fetch_sub_acquire" #endif +} -#if defined(arch_atomic_fetch_sub_release) -#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub_release -#elif defined(arch_atomic_fetch_sub_relaxed) static __always_inline int raw_atomic_fetch_sub_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_sub_release) + return arch_atomic_fetch_sub_release(i, v); +#elif defined(arch_atomic_fetch_sub_relaxed) __atomic_release_fence(); return arch_atomic_fetch_sub_relaxed(i, v); -} #elif defined(arch_atomic_fetch_sub) -#define raw_atomic_fetch_sub_release arch_atomic_fetch_sub + return arch_atomic_fetch_sub(i, v); #else #error "Unable to define raw_atomic_fetch_sub_release" #endif +} +static __always_inline int +raw_atomic_fetch_sub_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_fetch_sub_relaxed) -#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed + return arch_atomic_fetch_sub_relaxed(i, v); #elif defined(arch_atomic_fetch_sub) -#define raw_atomic_fetch_sub_relaxed arch_atomic_fetch_sub + return arch_atomic_fetch_sub(i, v); #else #error "Unable to define raw_atomic_fetch_sub_relaxed" #endif +} -#if defined(arch_atomic_inc) -#define raw_atomic_inc arch_atomic_inc -#else static __always_inline void raw_atomic_inc(atomic_t *v) { +#if defined(arch_atomic_inc) + arch_atomic_inc(v); +#else raw_atomic_add(1, v); -} #endif +} -#if defined(arch_atomic_inc_return) -#define raw_atomic_inc_return arch_atomic_inc_return -#elif defined(arch_atomic_inc_return_relaxed) static __always_inline int raw_atomic_inc_return(atomic_t *v) { +#if defined(arch_atomic_inc_return) + return arch_atomic_inc_return(v); +#elif defined(arch_atomic_inc_return_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_inc_return_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_inc_return(atomic_t *v) -{ return raw_atomic_add_return(1, v); -} #endif +} -#if defined(arch_atomic_inc_return_acquire) -#define raw_atomic_inc_return_acquire arch_atomic_inc_return_acquire -#elif defined(arch_atomic_inc_return_relaxed) static __always_inline int raw_atomic_inc_return_acquire(atomic_t *v) { +#if defined(arch_atomic_inc_return_acquire) + return arch_atomic_inc_return_acquire(v); +#elif defined(arch_atomic_inc_return_relaxed) int ret = arch_atomic_inc_return_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_inc_return) -#define raw_atomic_inc_return_acquire arch_atomic_inc_return + return arch_atomic_inc_return(v); #else -static __always_inline int -raw_atomic_inc_return_acquire(atomic_t *v) -{ return raw_atomic_add_return_acquire(1, v); -} #endif +} -#if defined(arch_atomic_inc_return_release) -#define raw_atomic_inc_return_release arch_atomic_inc_return_release -#elif defined(arch_atomic_inc_return_relaxed) static __always_inline int raw_atomic_inc_return_release(atomic_t *v) { +#if defined(arch_atomic_inc_return_release) + return arch_atomic_inc_return_release(v); +#elif defined(arch_atomic_inc_return_relaxed) __atomic_release_fence(); return arch_atomic_inc_return_relaxed(v); -} #elif defined(arch_atomic_inc_return) -#define raw_atomic_inc_return_release arch_atomic_inc_return + return arch_atomic_inc_return(v); #else -static __always_inline int -raw_atomic_inc_return_release(atomic_t *v) -{ return raw_atomic_add_return_release(1, v); -} #endif +} -#if defined(arch_atomic_inc_return_relaxed) -#define raw_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed -#elif defined(arch_atomic_inc_return) -#define raw_atomic_inc_return_relaxed arch_atomic_inc_return -#else static __always_inline int raw_atomic_inc_return_relaxed(atomic_t *v) { +#if defined(arch_atomic_inc_return_relaxed) + return arch_atomic_inc_return_relaxed(v); +#elif defined(arch_atomic_inc_return) + return arch_atomic_inc_return(v); +#else return raw_atomic_add_return_relaxed(1, v); -} #endif +} -#if defined(arch_atomic_fetch_inc) -#define raw_atomic_fetch_inc arch_atomic_fetch_inc -#elif defined(arch_atomic_fetch_inc_relaxed) static __always_inline int raw_atomic_fetch_inc(atomic_t *v) { +#if defined(arch_atomic_fetch_inc) + return arch_atomic_fetch_inc(v); +#elif defined(arch_atomic_fetch_inc_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_inc_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_fetch_inc(atomic_t *v) -{ return raw_atomic_fetch_add(1, v); -} #endif +} -#if defined(arch_atomic_fetch_inc_acquire) -#define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire -#elif defined(arch_atomic_fetch_inc_relaxed) static __always_inline int raw_atomic_fetch_inc_acquire(atomic_t *v) { +#if defined(arch_atomic_fetch_inc_acquire) + return arch_atomic_fetch_inc_acquire(v); +#elif defined(arch_atomic_fetch_inc_relaxed) int ret = arch_atomic_fetch_inc_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_inc) -#define raw_atomic_fetch_inc_acquire arch_atomic_fetch_inc + return arch_atomic_fetch_inc(v); #else -static __always_inline int -raw_atomic_fetch_inc_acquire(atomic_t *v) -{ return raw_atomic_fetch_add_acquire(1, v); -} #endif +} -#if defined(arch_atomic_fetch_inc_release) -#define raw_atomic_fetch_inc_release arch_atomic_fetch_inc_release -#elif defined(arch_atomic_fetch_inc_relaxed) static __always_inline int raw_atomic_fetch_inc_release(atomic_t *v) { +#if defined(arch_atomic_fetch_inc_release) + return arch_atomic_fetch_inc_release(v); +#elif defined(arch_atomic_fetch_inc_relaxed) __atomic_release_fence(); return arch_atomic_fetch_inc_relaxed(v); -} #elif defined(arch_atomic_fetch_inc) -#define raw_atomic_fetch_inc_release arch_atomic_fetch_inc + return arch_atomic_fetch_inc(v); #else -static __always_inline int -raw_atomic_fetch_inc_release(atomic_t *v) -{ return raw_atomic_fetch_add_release(1, v); -} #endif +} -#if defined(arch_atomic_fetch_inc_relaxed) -#define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed -#elif defined(arch_atomic_fetch_inc) -#define raw_atomic_fetch_inc_relaxed arch_atomic_fetch_inc -#else static __always_inline int raw_atomic_fetch_inc_relaxed(atomic_t *v) { +#if defined(arch_atomic_fetch_inc_relaxed) + return arch_atomic_fetch_inc_relaxed(v); +#elif defined(arch_atomic_fetch_inc) + return arch_atomic_fetch_inc(v); +#else return raw_atomic_fetch_add_relaxed(1, v); -} #endif +} -#if defined(arch_atomic_dec) -#define raw_atomic_dec arch_atomic_dec -#else static __always_inline void raw_atomic_dec(atomic_t *v) { +#if defined(arch_atomic_dec) + arch_atomic_dec(v); +#else raw_atomic_sub(1, v); -} #endif +} -#if defined(arch_atomic_dec_return) -#define raw_atomic_dec_return arch_atomic_dec_return -#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int raw_atomic_dec_return(atomic_t *v) { +#if defined(arch_atomic_dec_return) + return arch_atomic_dec_return(v); +#elif defined(arch_atomic_dec_return_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_dec_return_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_dec_return(atomic_t *v) -{ return raw_atomic_sub_return(1, v); -} #endif +} -#if defined(arch_atomic_dec_return_acquire) -#define raw_atomic_dec_return_acquire arch_atomic_dec_return_acquire -#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int raw_atomic_dec_return_acquire(atomic_t *v) { +#if defined(arch_atomic_dec_return_acquire) + return arch_atomic_dec_return_acquire(v); +#elif defined(arch_atomic_dec_return_relaxed) int ret = arch_atomic_dec_return_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_dec_return) -#define raw_atomic_dec_return_acquire arch_atomic_dec_return + return arch_atomic_dec_return(v); #else -static __always_inline int -raw_atomic_dec_return_acquire(atomic_t *v) -{ return raw_atomic_sub_return_acquire(1, v); -} #endif +} -#if defined(arch_atomic_dec_return_release) -#define raw_atomic_dec_return_release arch_atomic_dec_return_release -#elif defined(arch_atomic_dec_return_relaxed) static __always_inline int raw_atomic_dec_return_release(atomic_t *v) { +#if defined(arch_atomic_dec_return_release) + return arch_atomic_dec_return_release(v); +#elif defined(arch_atomic_dec_return_relaxed) __atomic_release_fence(); return arch_atomic_dec_return_relaxed(v); -} #elif defined(arch_atomic_dec_return) -#define raw_atomic_dec_return_release arch_atomic_dec_return + return arch_atomic_dec_return(v); #else -static __always_inline int -raw_atomic_dec_return_release(atomic_t *v) -{ return raw_atomic_sub_return_release(1, v); -} #endif +} -#if defined(arch_atomic_dec_return_relaxed) -#define raw_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed -#elif defined(arch_atomic_dec_return) -#define raw_atomic_dec_return_relaxed arch_atomic_dec_return -#else static __always_inline int raw_atomic_dec_return_relaxed(atomic_t *v) { +#if defined(arch_atomic_dec_return_relaxed) + return arch_atomic_dec_return_relaxed(v); +#elif defined(arch_atomic_dec_return) + return arch_atomic_dec_return(v); +#else return raw_atomic_sub_return_relaxed(1, v); -} #endif +} -#if defined(arch_atomic_fetch_dec) -#define raw_atomic_fetch_dec arch_atomic_fetch_dec -#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int raw_atomic_fetch_dec(atomic_t *v) { +#if defined(arch_atomic_fetch_dec) + return arch_atomic_fetch_dec(v); +#elif defined(arch_atomic_fetch_dec_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_dec_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_fetch_dec(atomic_t *v) -{ return raw_atomic_fetch_sub(1, v); -} #endif +} -#if defined(arch_atomic_fetch_dec_acquire) -#define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire -#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int raw_atomic_fetch_dec_acquire(atomic_t *v) { +#if defined(arch_atomic_fetch_dec_acquire) + return arch_atomic_fetch_dec_acquire(v); +#elif defined(arch_atomic_fetch_dec_relaxed) int ret = arch_atomic_fetch_dec_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_dec) -#define raw_atomic_fetch_dec_acquire arch_atomic_fetch_dec + return arch_atomic_fetch_dec(v); #else -static __always_inline int -raw_atomic_fetch_dec_acquire(atomic_t *v) -{ return raw_atomic_fetch_sub_acquire(1, v); -} #endif +} -#if defined(arch_atomic_fetch_dec_release) -#define raw_atomic_fetch_dec_release arch_atomic_fetch_dec_release -#elif defined(arch_atomic_fetch_dec_relaxed) static __always_inline int raw_atomic_fetch_dec_release(atomic_t *v) { +#if defined(arch_atomic_fetch_dec_release) + return arch_atomic_fetch_dec_release(v); +#elif defined(arch_atomic_fetch_dec_relaxed) __atomic_release_fence(); return arch_atomic_fetch_dec_relaxed(v); -} #elif defined(arch_atomic_fetch_dec) -#define raw_atomic_fetch_dec_release arch_atomic_fetch_dec + return arch_atomic_fetch_dec(v); #else -static __always_inline int -raw_atomic_fetch_dec_release(atomic_t *v) -{ return raw_atomic_fetch_sub_release(1, v); -} #endif +} -#if defined(arch_atomic_fetch_dec_relaxed) -#define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed -#elif defined(arch_atomic_fetch_dec) -#define raw_atomic_fetch_dec_relaxed arch_atomic_fetch_dec -#else static __always_inline int raw_atomic_fetch_dec_relaxed(atomic_t *v) { +#if defined(arch_atomic_fetch_dec_relaxed) + return arch_atomic_fetch_dec_relaxed(v); +#elif defined(arch_atomic_fetch_dec) + return arch_atomic_fetch_dec(v); +#else return raw_atomic_fetch_sub_relaxed(1, v); -} #endif +} -#define raw_atomic_and arch_atomic_and +static __always_inline void +raw_atomic_and(int i, atomic_t *v) +{ + arch_atomic_and(i, v); +} -#if defined(arch_atomic_fetch_and) -#define raw_atomic_fetch_and arch_atomic_fetch_and -#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int raw_atomic_fetch_and(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_and) + return arch_atomic_fetch_and(i, v); +#elif defined(arch_atomic_fetch_and_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_and_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_fetch_and" #endif +} -#if defined(arch_atomic_fetch_and_acquire) -#define raw_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire -#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int raw_atomic_fetch_and_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_and_acquire) + return arch_atomic_fetch_and_acquire(i, v); +#elif defined(arch_atomic_fetch_and_relaxed) int ret = arch_atomic_fetch_and_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_and) -#define raw_atomic_fetch_and_acquire arch_atomic_fetch_and + return arch_atomic_fetch_and(i, v); #else #error "Unable to define raw_atomic_fetch_and_acquire" #endif +} -#if defined(arch_atomic_fetch_and_release) -#define raw_atomic_fetch_and_release arch_atomic_fetch_and_release -#elif defined(arch_atomic_fetch_and_relaxed) static __always_inline int raw_atomic_fetch_and_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_and_release) + return arch_atomic_fetch_and_release(i, v); +#elif defined(arch_atomic_fetch_and_relaxed) __atomic_release_fence(); return arch_atomic_fetch_and_relaxed(i, v); -} #elif defined(arch_atomic_fetch_and) -#define raw_atomic_fetch_and_release arch_atomic_fetch_and + return arch_atomic_fetch_and(i, v); #else #error "Unable to define raw_atomic_fetch_and_release" #endif +} +static __always_inline int +raw_atomic_fetch_and_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_fetch_and_relaxed) -#define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed + return arch_atomic_fetch_and_relaxed(i, v); #elif defined(arch_atomic_fetch_and) -#define raw_atomic_fetch_and_relaxed arch_atomic_fetch_and + return arch_atomic_fetch_and(i, v); #else #error "Unable to define raw_atomic_fetch_and_relaxed" #endif +} -#if defined(arch_atomic_andnot) -#define raw_atomic_andnot arch_atomic_andnot -#else static __always_inline void raw_atomic_andnot(int i, atomic_t *v) { +#if defined(arch_atomic_andnot) + arch_atomic_andnot(i, v); +#else raw_atomic_and(~i, v); -} #endif +} -#if defined(arch_atomic_fetch_andnot) -#define raw_atomic_fetch_andnot arch_atomic_fetch_andnot -#elif defined(arch_atomic_fetch_andnot_relaxed) static __always_inline int raw_atomic_fetch_andnot(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_andnot) + return arch_atomic_fetch_andnot(i, v); +#elif defined(arch_atomic_fetch_andnot_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_andnot_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_fetch_andnot(int i, atomic_t *v) -{ return raw_atomic_fetch_and(~i, v); -} #endif +} -#if defined(arch_atomic_fetch_andnot_acquire) -#define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire -#elif defined(arch_atomic_fetch_andnot_relaxed) static __always_inline int raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_andnot_acquire) + return arch_atomic_fetch_andnot_acquire(i, v); +#elif defined(arch_atomic_fetch_andnot_relaxed) int ret = arch_atomic_fetch_andnot_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_andnot) -#define raw_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot + return arch_atomic_fetch_andnot(i, v); #else -static __always_inline int -raw_atomic_fetch_andnot_acquire(int i, atomic_t *v) -{ return raw_atomic_fetch_and_acquire(~i, v); -} #endif +} -#if defined(arch_atomic_fetch_andnot_release) -#define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release -#elif defined(arch_atomic_fetch_andnot_relaxed) static __always_inline int raw_atomic_fetch_andnot_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_andnot_release) + return arch_atomic_fetch_andnot_release(i, v); +#elif defined(arch_atomic_fetch_andnot_relaxed) __atomic_release_fence(); return arch_atomic_fetch_andnot_relaxed(i, v); -} #elif defined(arch_atomic_fetch_andnot) -#define raw_atomic_fetch_andnot_release arch_atomic_fetch_andnot + return arch_atomic_fetch_andnot(i, v); #else -static __always_inline int -raw_atomic_fetch_andnot_release(int i, atomic_t *v) -{ return raw_atomic_fetch_and_release(~i, v); -} #endif +} -#if defined(arch_atomic_fetch_andnot_relaxed) -#define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed -#elif defined(arch_atomic_fetch_andnot) -#define raw_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot -#else static __always_inline int raw_atomic_fetch_andnot_relaxed(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_andnot_relaxed) + return arch_atomic_fetch_andnot_relaxed(i, v); +#elif defined(arch_atomic_fetch_andnot) + return arch_atomic_fetch_andnot(i, v); +#else return raw_atomic_fetch_and_relaxed(~i, v); -} #endif +} -#define raw_atomic_or arch_atomic_or +static __always_inline void +raw_atomic_or(int i, atomic_t *v) +{ + arch_atomic_or(i, v); +} -#if defined(arch_atomic_fetch_or) -#define raw_atomic_fetch_or arch_atomic_fetch_or -#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int raw_atomic_fetch_or(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_or) + return arch_atomic_fetch_or(i, v); +#elif defined(arch_atomic_fetch_or_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_or_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_fetch_or" #endif +} -#if defined(arch_atomic_fetch_or_acquire) -#define raw_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire -#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int raw_atomic_fetch_or_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_or_acquire) + return arch_atomic_fetch_or_acquire(i, v); +#elif defined(arch_atomic_fetch_or_relaxed) int ret = arch_atomic_fetch_or_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_or) -#define raw_atomic_fetch_or_acquire arch_atomic_fetch_or + return arch_atomic_fetch_or(i, v); #else #error "Unable to define raw_atomic_fetch_or_acquire" #endif +} -#if defined(arch_atomic_fetch_or_release) -#define raw_atomic_fetch_or_release arch_atomic_fetch_or_release -#elif defined(arch_atomic_fetch_or_relaxed) static __always_inline int raw_atomic_fetch_or_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_or_release) + return arch_atomic_fetch_or_release(i, v); +#elif defined(arch_atomic_fetch_or_relaxed) __atomic_release_fence(); return arch_atomic_fetch_or_relaxed(i, v); -} #elif defined(arch_atomic_fetch_or) -#define raw_atomic_fetch_or_release arch_atomic_fetch_or + return arch_atomic_fetch_or(i, v); #else #error "Unable to define raw_atomic_fetch_or_release" #endif +} +static __always_inline int +raw_atomic_fetch_or_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_fetch_or_relaxed) -#define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed + return arch_atomic_fetch_or_relaxed(i, v); #elif defined(arch_atomic_fetch_or) -#define raw_atomic_fetch_or_relaxed arch_atomic_fetch_or + return arch_atomic_fetch_or(i, v); #else #error "Unable to define raw_atomic_fetch_or_relaxed" #endif +} -#define raw_atomic_xor arch_atomic_xor +static __always_inline void +raw_atomic_xor(int i, atomic_t *v) +{ + arch_atomic_xor(i, v); +} -#if defined(arch_atomic_fetch_xor) -#define raw_atomic_fetch_xor arch_atomic_fetch_xor -#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int raw_atomic_fetch_xor(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_xor) + return arch_atomic_fetch_xor(i, v); +#elif defined(arch_atomic_fetch_xor_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_fetch_xor_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic_fetch_xor" #endif +} -#if defined(arch_atomic_fetch_xor_acquire) -#define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire -#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int raw_atomic_fetch_xor_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_xor_acquire) + return arch_atomic_fetch_xor_acquire(i, v); +#elif defined(arch_atomic_fetch_xor_relaxed) int ret = arch_atomic_fetch_xor_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_fetch_xor) -#define raw_atomic_fetch_xor_acquire arch_atomic_fetch_xor + return arch_atomic_fetch_xor(i, v); #else #error "Unable to define raw_atomic_fetch_xor_acquire" #endif +} -#if defined(arch_atomic_fetch_xor_release) -#define raw_atomic_fetch_xor_release arch_atomic_fetch_xor_release -#elif defined(arch_atomic_fetch_xor_relaxed) static __always_inline int raw_atomic_fetch_xor_release(int i, atomic_t *v) { +#if defined(arch_atomic_fetch_xor_release) + return arch_atomic_fetch_xor_release(i, v); +#elif defined(arch_atomic_fetch_xor_relaxed) __atomic_release_fence(); return arch_atomic_fetch_xor_relaxed(i, v); -} #elif defined(arch_atomic_fetch_xor) -#define raw_atomic_fetch_xor_release arch_atomic_fetch_xor + return arch_atomic_fetch_xor(i, v); #else #error "Unable to define raw_atomic_fetch_xor_release" #endif +} +static __always_inline int +raw_atomic_fetch_xor_relaxed(int i, atomic_t *v) +{ #if defined(arch_atomic_fetch_xor_relaxed) -#define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed + return arch_atomic_fetch_xor_relaxed(i, v); #elif defined(arch_atomic_fetch_xor) -#define raw_atomic_fetch_xor_relaxed arch_atomic_fetch_xor + return arch_atomic_fetch_xor(i, v); #else #error "Unable to define raw_atomic_fetch_xor_relaxed" #endif +} -#if defined(arch_atomic_xchg) -#define raw_atomic_xchg arch_atomic_xchg -#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -raw_atomic_xchg(atomic_t *v, int i) +raw_atomic_xchg(atomic_t *v, int new) { +#if defined(arch_atomic_xchg) + return arch_atomic_xchg(v, new); +#elif defined(arch_atomic_xchg_relaxed) int ret; __atomic_pre_full_fence(); - ret = arch_atomic_xchg_relaxed(v, i); + ret = arch_atomic_xchg_relaxed(v, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_xchg(atomic_t *v, int new) -{ return raw_xchg(&v->counter, new); -} #endif +} -#if defined(arch_atomic_xchg_acquire) -#define raw_atomic_xchg_acquire arch_atomic_xchg_acquire -#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -raw_atomic_xchg_acquire(atomic_t *v, int i) +raw_atomic_xchg_acquire(atomic_t *v, int new) { - int ret = arch_atomic_xchg_relaxed(v, i); +#if defined(arch_atomic_xchg_acquire) + return arch_atomic_xchg_acquire(v, new); +#elif defined(arch_atomic_xchg_relaxed) + int ret = arch_atomic_xchg_relaxed(v, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_xchg) -#define raw_atomic_xchg_acquire arch_atomic_xchg + return arch_atomic_xchg(v, new); #else -static __always_inline int -raw_atomic_xchg_acquire(atomic_t *v, int new) -{ return raw_xchg_acquire(&v->counter, new); -} #endif +} -#if defined(arch_atomic_xchg_release) -#define raw_atomic_xchg_release arch_atomic_xchg_release -#elif defined(arch_atomic_xchg_relaxed) static __always_inline int -raw_atomic_xchg_release(atomic_t *v, int i) +raw_atomic_xchg_release(atomic_t *v, int new) { +#if defined(arch_atomic_xchg_release) + return arch_atomic_xchg_release(v, new); +#elif defined(arch_atomic_xchg_relaxed) __atomic_release_fence(); - return arch_atomic_xchg_relaxed(v, i); -} + return arch_atomic_xchg_relaxed(v, new); #elif defined(arch_atomic_xchg) -#define raw_atomic_xchg_release arch_atomic_xchg + return arch_atomic_xchg(v, new); #else -static __always_inline int -raw_atomic_xchg_release(atomic_t *v, int new) -{ return raw_xchg_release(&v->counter, new); -} #endif +} -#if defined(arch_atomic_xchg_relaxed) -#define raw_atomic_xchg_relaxed arch_atomic_xchg_relaxed -#elif defined(arch_atomic_xchg) -#define raw_atomic_xchg_relaxed arch_atomic_xchg -#else static __always_inline int raw_atomic_xchg_relaxed(atomic_t *v, int new) { +#if defined(arch_atomic_xchg_relaxed) + return arch_atomic_xchg_relaxed(v, new); +#elif defined(arch_atomic_xchg) + return arch_atomic_xchg(v, new); +#else return raw_xchg_relaxed(&v->counter, new); -} #endif +} -#if defined(arch_atomic_cmpxchg) -#define raw_atomic_cmpxchg arch_atomic_cmpxchg -#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int raw_atomic_cmpxchg(atomic_t *v, int old, int new) { +#if defined(arch_atomic_cmpxchg) + return arch_atomic_cmpxchg(v, old, new); +#elif defined(arch_atomic_cmpxchg_relaxed) int ret; __atomic_pre_full_fence(); ret = arch_atomic_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline int -raw_atomic_cmpxchg(atomic_t *v, int old, int new) -{ return raw_cmpxchg(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic_cmpxchg_acquire) -#define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire -#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) { +#if defined(arch_atomic_cmpxchg_acquire) + return arch_atomic_cmpxchg_acquire(v, old, new); +#elif defined(arch_atomic_cmpxchg_relaxed) int ret = arch_atomic_cmpxchg_relaxed(v, old, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_cmpxchg) -#define raw_atomic_cmpxchg_acquire arch_atomic_cmpxchg + return arch_atomic_cmpxchg(v, old, new); #else -static __always_inline int -raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) -{ return raw_cmpxchg_acquire(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic_cmpxchg_release) -#define raw_atomic_cmpxchg_release arch_atomic_cmpxchg_release -#elif defined(arch_atomic_cmpxchg_relaxed) static __always_inline int raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) { +#if defined(arch_atomic_cmpxchg_release) + return arch_atomic_cmpxchg_release(v, old, new); +#elif defined(arch_atomic_cmpxchg_relaxed) __atomic_release_fence(); return arch_atomic_cmpxchg_relaxed(v, old, new); -} #elif defined(arch_atomic_cmpxchg) -#define raw_atomic_cmpxchg_release arch_atomic_cmpxchg + return arch_atomic_cmpxchg(v, old, new); #else -static __always_inline int -raw_atomic_cmpxchg_release(atomic_t *v, int old, int new) -{ return raw_cmpxchg_release(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic_cmpxchg_relaxed) -#define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed -#elif defined(arch_atomic_cmpxchg) -#define raw_atomic_cmpxchg_relaxed arch_atomic_cmpxchg -#else static __always_inline int raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) { +#if defined(arch_atomic_cmpxchg_relaxed) + return arch_atomic_cmpxchg_relaxed(v, old, new); +#elif defined(arch_atomic_cmpxchg) + return arch_atomic_cmpxchg(v, old, new); +#else return raw_cmpxchg_relaxed(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic_try_cmpxchg) -#define raw_atomic_try_cmpxchg arch_atomic_try_cmpxchg -#elif defined(arch_atomic_try_cmpxchg_relaxed) static __always_inline bool raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) { +#if defined(arch_atomic_try_cmpxchg) + return arch_atomic_try_cmpxchg(v, old, new); +#elif defined(arch_atomic_try_cmpxchg_relaxed) bool ret; __atomic_pre_full_fence(); ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline bool -raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new) -{ int r, o = *old; r = raw_atomic_cmpxchg(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic_try_cmpxchg_acquire) -#define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire -#elif defined(arch_atomic_try_cmpxchg_relaxed) static __always_inline bool raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) { +#if defined(arch_atomic_try_cmpxchg_acquire) + return arch_atomic_try_cmpxchg_acquire(v, old, new); +#elif defined(arch_atomic_try_cmpxchg_relaxed) bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_try_cmpxchg) -#define raw_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg + return arch_atomic_try_cmpxchg(v, old, new); #else -static __always_inline bool -raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) -{ int r, o = *old; r = raw_atomic_cmpxchg_acquire(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic_try_cmpxchg_release) -#define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release -#elif defined(arch_atomic_try_cmpxchg_relaxed) static __always_inline bool raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) { +#if defined(arch_atomic_try_cmpxchg_release) + return arch_atomic_try_cmpxchg_release(v, old, new); +#elif defined(arch_atomic_try_cmpxchg_relaxed) __atomic_release_fence(); return arch_atomic_try_cmpxchg_relaxed(v, old, new); -} #elif defined(arch_atomic_try_cmpxchg) -#define raw_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg + return arch_atomic_try_cmpxchg(v, old, new); #else -static __always_inline bool -raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) -{ int r, o = *old; r = raw_atomic_cmpxchg_release(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic_try_cmpxchg_relaxed) -#define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed -#elif defined(arch_atomic_try_cmpxchg) -#define raw_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg -#else static __always_inline bool raw_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) { +#if defined(arch_atomic_try_cmpxchg_relaxed) + return arch_atomic_try_cmpxchg_relaxed(v, old, new); +#elif defined(arch_atomic_try_cmpxchg) + return arch_atomic_try_cmpxchg(v, old, new); +#else int r, o = *old; r = raw_atomic_cmpxchg_relaxed(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic_sub_and_test) -#define raw_atomic_sub_and_test arch_atomic_sub_and_test -#else static __always_inline bool raw_atomic_sub_and_test(int i, atomic_t *v) { +#if defined(arch_atomic_sub_and_test) + return arch_atomic_sub_and_test(i, v); +#else return raw_atomic_sub_return(i, v) == 0; -} #endif +} -#if defined(arch_atomic_dec_and_test) -#define raw_atomic_dec_and_test arch_atomic_dec_and_test -#else static __always_inline bool raw_atomic_dec_and_test(atomic_t *v) { +#if defined(arch_atomic_dec_and_test) + return arch_atomic_dec_and_test(v); +#else return raw_atomic_dec_return(v) == 0; -} #endif +} -#if defined(arch_atomic_inc_and_test) -#define raw_atomic_inc_and_test arch_atomic_inc_and_test -#else static __always_inline bool raw_atomic_inc_and_test(atomic_t *v) { +#if defined(arch_atomic_inc_and_test) + return arch_atomic_inc_and_test(v); +#else return raw_atomic_inc_return(v) == 0; -} #endif +} -#if defined(arch_atomic_add_negative) -#define raw_atomic_add_negative arch_atomic_add_negative -#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool raw_atomic_add_negative(int i, atomic_t *v) { +#if defined(arch_atomic_add_negative) + return arch_atomic_add_negative(i, v); +#elif defined(arch_atomic_add_negative_relaxed) bool ret; __atomic_pre_full_fence(); ret = arch_atomic_add_negative_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline bool -raw_atomic_add_negative(int i, atomic_t *v) -{ return raw_atomic_add_return(i, v) < 0; -} #endif +} -#if defined(arch_atomic_add_negative_acquire) -#define raw_atomic_add_negative_acquire arch_atomic_add_negative_acquire -#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool raw_atomic_add_negative_acquire(int i, atomic_t *v) { +#if defined(arch_atomic_add_negative_acquire) + return arch_atomic_add_negative_acquire(i, v); +#elif defined(arch_atomic_add_negative_relaxed) bool ret = arch_atomic_add_negative_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic_add_negative) -#define raw_atomic_add_negative_acquire arch_atomic_add_negative + return arch_atomic_add_negative(i, v); #else -static __always_inline bool -raw_atomic_add_negative_acquire(int i, atomic_t *v) -{ return raw_atomic_add_return_acquire(i, v) < 0; -} #endif +} -#if defined(arch_atomic_add_negative_release) -#define raw_atomic_add_negative_release arch_atomic_add_negative_release -#elif defined(arch_atomic_add_negative_relaxed) static __always_inline bool raw_atomic_add_negative_release(int i, atomic_t *v) { +#if defined(arch_atomic_add_negative_release) + return arch_atomic_add_negative_release(i, v); +#elif defined(arch_atomic_add_negative_relaxed) __atomic_release_fence(); return arch_atomic_add_negative_relaxed(i, v); -} #elif defined(arch_atomic_add_negative) -#define raw_atomic_add_negative_release arch_atomic_add_negative + return arch_atomic_add_negative(i, v); #else -static __always_inline bool -raw_atomic_add_negative_release(int i, atomic_t *v) -{ return raw_atomic_add_return_release(i, v) < 0; -} #endif +} -#if defined(arch_atomic_add_negative_relaxed) -#define raw_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed -#elif defined(arch_atomic_add_negative) -#define raw_atomic_add_negative_relaxed arch_atomic_add_negative -#else static __always_inline bool raw_atomic_add_negative_relaxed(int i, atomic_t *v) { +#if defined(arch_atomic_add_negative_relaxed) + return arch_atomic_add_negative_relaxed(i, v); +#elif defined(arch_atomic_add_negative) + return arch_atomic_add_negative(i, v); +#else return raw_atomic_add_return_relaxed(i, v) < 0; -} #endif +} -#if defined(arch_atomic_fetch_add_unless) -#define raw_atomic_fetch_add_unless arch_atomic_fetch_add_unless -#else static __always_inline int raw_atomic_fetch_add_unless(atomic_t *v, int a, int u) { +#if defined(arch_atomic_fetch_add_unless) + return arch_atomic_fetch_add_unless(v, a, u); +#else int c = raw_atomic_read(v); do { @@ -1594,35 +1542,35 @@ raw_atomic_fetch_add_unless(atomic_t *v, int a, int u) } while (!raw_atomic_try_cmpxchg(v, &c, c + a)); return c; -} #endif +} -#if defined(arch_atomic_add_unless) -#define raw_atomic_add_unless arch_atomic_add_unless -#else static __always_inline bool raw_atomic_add_unless(atomic_t *v, int a, int u) { +#if defined(arch_atomic_add_unless) + return arch_atomic_add_unless(v, a, u); +#else return raw_atomic_fetch_add_unless(v, a, u) != u; -} #endif +} -#if defined(arch_atomic_inc_not_zero) -#define raw_atomic_inc_not_zero arch_atomic_inc_not_zero -#else static __always_inline bool raw_atomic_inc_not_zero(atomic_t *v) { +#if defined(arch_atomic_inc_not_zero) + return arch_atomic_inc_not_zero(v); +#else return raw_atomic_add_unless(v, 1, 0); -} #endif +} -#if defined(arch_atomic_inc_unless_negative) -#define raw_atomic_inc_unless_negative arch_atomic_inc_unless_negative -#else static __always_inline bool raw_atomic_inc_unless_negative(atomic_t *v) { +#if defined(arch_atomic_inc_unless_negative) + return arch_atomic_inc_unless_negative(v); +#else int c = raw_atomic_read(v); do { @@ -1631,15 +1579,15 @@ raw_atomic_inc_unless_negative(atomic_t *v) } while (!raw_atomic_try_cmpxchg(v, &c, c + 1)); return true; -} #endif +} -#if defined(arch_atomic_dec_unless_positive) -#define raw_atomic_dec_unless_positive arch_atomic_dec_unless_positive -#else static __always_inline bool raw_atomic_dec_unless_positive(atomic_t *v) { +#if defined(arch_atomic_dec_unless_positive) + return arch_atomic_dec_unless_positive(v); +#else int c = raw_atomic_read(v); do { @@ -1648,15 +1596,15 @@ raw_atomic_dec_unless_positive(atomic_t *v) } while (!raw_atomic_try_cmpxchg(v, &c, c - 1)); return true; -} #endif +} -#if defined(arch_atomic_dec_if_positive) -#define raw_atomic_dec_if_positive arch_atomic_dec_if_positive -#else static __always_inline int raw_atomic_dec_if_positive(atomic_t *v) { +#if defined(arch_atomic_dec_if_positive) + return arch_atomic_dec_if_positive(v); +#else int dec, c = raw_atomic_read(v); do { @@ -1666,23 +1614,27 @@ raw_atomic_dec_if_positive(atomic_t *v) } while (!raw_atomic_try_cmpxchg(v, &c, dec)); return dec; -} #endif +} #ifdef CONFIG_GENERIC_ATOMIC64 #include #endif -#define raw_atomic64_read arch_atomic64_read +static __always_inline s64 +raw_atomic64_read(const atomic64_t *v) +{ + return arch_atomic64_read(v); +} -#if defined(arch_atomic64_read_acquire) -#define raw_atomic64_read_acquire arch_atomic64_read_acquire -#elif defined(arch_atomic64_read) -#define raw_atomic64_read_acquire arch_atomic64_read -#else static __always_inline s64 raw_atomic64_read_acquire(const atomic64_t *v) { +#if defined(arch_atomic64_read_acquire) + return arch_atomic64_read_acquire(v); +#elif defined(arch_atomic64_read) + return arch_atomic64_read(v); +#else s64 ret; if (__native_word(atomic64_t)) { @@ -1693,1144 +1645,1088 @@ raw_atomic64_read_acquire(const atomic64_t *v) } return ret; -} #endif +} -#define raw_atomic64_set arch_atomic64_set +static __always_inline void +raw_atomic64_set(atomic64_t *v, s64 i) +{ + arch_atomic64_set(v, i); +} -#if defined(arch_atomic64_set_release) -#define raw_atomic64_set_release arch_atomic64_set_release -#elif defined(arch_atomic64_set) -#define raw_atomic64_set_release arch_atomic64_set -#else static __always_inline void raw_atomic64_set_release(atomic64_t *v, s64 i) { +#if defined(arch_atomic64_set_release) + arch_atomic64_set_release(v, i); +#elif defined(arch_atomic64_set) + arch_atomic64_set(v, i); +#else if (__native_word(atomic64_t)) { smp_store_release(&(v)->counter, i); } else { __atomic_release_fence(); raw_atomic64_set(v, i); } -} #endif +} -#define raw_atomic64_add arch_atomic64_add +static __always_inline void +raw_atomic64_add(s64 i, atomic64_t *v) +{ + arch_atomic64_add(i, v); +} -#if defined(arch_atomic64_add_return) -#define raw_atomic64_add_return arch_atomic64_add_return -#elif defined(arch_atomic64_add_return_relaxed) static __always_inline s64 raw_atomic64_add_return(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_return) + return arch_atomic64_add_return(i, v); +#elif defined(arch_atomic64_add_return_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_add_return_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_add_return" #endif +} -#if defined(arch_atomic64_add_return_acquire) -#define raw_atomic64_add_return_acquire arch_atomic64_add_return_acquire -#elif defined(arch_atomic64_add_return_relaxed) static __always_inline s64 raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_return_acquire) + return arch_atomic64_add_return_acquire(i, v); +#elif defined(arch_atomic64_add_return_relaxed) s64 ret = arch_atomic64_add_return_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_add_return) -#define raw_atomic64_add_return_acquire arch_atomic64_add_return + return arch_atomic64_add_return(i, v); #else #error "Unable to define raw_atomic64_add_return_acquire" #endif +} -#if defined(arch_atomic64_add_return_release) -#define raw_atomic64_add_return_release arch_atomic64_add_return_release -#elif defined(arch_atomic64_add_return_relaxed) static __always_inline s64 raw_atomic64_add_return_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_return_release) + return arch_atomic64_add_return_release(i, v); +#elif defined(arch_atomic64_add_return_relaxed) __atomic_release_fence(); return arch_atomic64_add_return_relaxed(i, v); -} #elif defined(arch_atomic64_add_return) -#define raw_atomic64_add_return_release arch_atomic64_add_return + return arch_atomic64_add_return(i, v); #else #error "Unable to define raw_atomic64_add_return_release" #endif +} +static __always_inline s64 +raw_atomic64_add_return_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_add_return_relaxed) -#define raw_atomic64_add_return_relaxed arch_atomic64_add_return_relaxed + return arch_atomic64_add_return_relaxed(i, v); #elif defined(arch_atomic64_add_return) -#define raw_atomic64_add_return_relaxed arch_atomic64_add_return + return arch_atomic64_add_return(i, v); #else #error "Unable to define raw_atomic64_add_return_relaxed" #endif +} -#if defined(arch_atomic64_fetch_add) -#define raw_atomic64_fetch_add arch_atomic64_fetch_add -#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 raw_atomic64_fetch_add(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_add) + return arch_atomic64_fetch_add(i, v); +#elif defined(arch_atomic64_fetch_add_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_add_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_fetch_add" #endif +} -#if defined(arch_atomic64_fetch_add_acquire) -#define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire -#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_add_acquire) + return arch_atomic64_fetch_add_acquire(i, v); +#elif defined(arch_atomic64_fetch_add_relaxed) s64 ret = arch_atomic64_fetch_add_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_add) -#define raw_atomic64_fetch_add_acquire arch_atomic64_fetch_add + return arch_atomic64_fetch_add(i, v); #else #error "Unable to define raw_atomic64_fetch_add_acquire" #endif +} -#if defined(arch_atomic64_fetch_add_release) -#define raw_atomic64_fetch_add_release arch_atomic64_fetch_add_release -#elif defined(arch_atomic64_fetch_add_relaxed) static __always_inline s64 raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_add_release) + return arch_atomic64_fetch_add_release(i, v); +#elif defined(arch_atomic64_fetch_add_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_add_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_add) -#define raw_atomic64_fetch_add_release arch_atomic64_fetch_add + return arch_atomic64_fetch_add(i, v); #else #error "Unable to define raw_atomic64_fetch_add_release" #endif +} +static __always_inline s64 +raw_atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_fetch_add_relaxed) -#define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add_relaxed + return arch_atomic64_fetch_add_relaxed(i, v); #elif defined(arch_atomic64_fetch_add) -#define raw_atomic64_fetch_add_relaxed arch_atomic64_fetch_add + return arch_atomic64_fetch_add(i, v); #else #error "Unable to define raw_atomic64_fetch_add_relaxed" #endif +} -#define raw_atomic64_sub arch_atomic64_sub +static __always_inline void +raw_atomic64_sub(s64 i, atomic64_t *v) +{ + arch_atomic64_sub(i, v); +} -#if defined(arch_atomic64_sub_return) -#define raw_atomic64_sub_return arch_atomic64_sub_return -#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 raw_atomic64_sub_return(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_sub_return) + return arch_atomic64_sub_return(i, v); +#elif defined(arch_atomic64_sub_return_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_sub_return_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_sub_return" #endif +} -#if defined(arch_atomic64_sub_return_acquire) -#define raw_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire -#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_sub_return_acquire) + return arch_atomic64_sub_return_acquire(i, v); +#elif defined(arch_atomic64_sub_return_relaxed) s64 ret = arch_atomic64_sub_return_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_sub_return) -#define raw_atomic64_sub_return_acquire arch_atomic64_sub_return + return arch_atomic64_sub_return(i, v); #else #error "Unable to define raw_atomic64_sub_return_acquire" #endif +} -#if defined(arch_atomic64_sub_return_release) -#define raw_atomic64_sub_return_release arch_atomic64_sub_return_release -#elif defined(arch_atomic64_sub_return_relaxed) static __always_inline s64 raw_atomic64_sub_return_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_sub_return_release) + return arch_atomic64_sub_return_release(i, v); +#elif defined(arch_atomic64_sub_return_relaxed) __atomic_release_fence(); return arch_atomic64_sub_return_relaxed(i, v); -} #elif defined(arch_atomic64_sub_return) -#define raw_atomic64_sub_return_release arch_atomic64_sub_return + return arch_atomic64_sub_return(i, v); #else #error "Unable to define raw_atomic64_sub_return_release" #endif +} +static __always_inline s64 +raw_atomic64_sub_return_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_sub_return_relaxed) -#define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return_relaxed + return arch_atomic64_sub_return_relaxed(i, v); #elif defined(arch_atomic64_sub_return) -#define raw_atomic64_sub_return_relaxed arch_atomic64_sub_return + return arch_atomic64_sub_return(i, v); #else #error "Unable to define raw_atomic64_sub_return_relaxed" #endif +} -#if defined(arch_atomic64_fetch_sub) -#define raw_atomic64_fetch_sub arch_atomic64_fetch_sub -#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 raw_atomic64_fetch_sub(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_sub) + return arch_atomic64_fetch_sub(i, v); +#elif defined(arch_atomic64_fetch_sub_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_sub_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_fetch_sub" #endif +} -#if defined(arch_atomic64_fetch_sub_acquire) -#define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire -#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_sub_acquire) + return arch_atomic64_fetch_sub_acquire(i, v); +#elif defined(arch_atomic64_fetch_sub_relaxed) s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_sub) -#define raw_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub + return arch_atomic64_fetch_sub(i, v); #else #error "Unable to define raw_atomic64_fetch_sub_acquire" #endif +} -#if defined(arch_atomic64_fetch_sub_release) -#define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release -#elif defined(arch_atomic64_fetch_sub_relaxed) static __always_inline s64 raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_sub_release) + return arch_atomic64_fetch_sub_release(i, v); +#elif defined(arch_atomic64_fetch_sub_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_sub_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_sub) -#define raw_atomic64_fetch_sub_release arch_atomic64_fetch_sub + return arch_atomic64_fetch_sub(i, v); #else #error "Unable to define raw_atomic64_fetch_sub_release" #endif +} +static __always_inline s64 +raw_atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_fetch_sub_relaxed) -#define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub_relaxed + return arch_atomic64_fetch_sub_relaxed(i, v); #elif defined(arch_atomic64_fetch_sub) -#define raw_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub + return arch_atomic64_fetch_sub(i, v); #else #error "Unable to define raw_atomic64_fetch_sub_relaxed" #endif +} -#if defined(arch_atomic64_inc) -#define raw_atomic64_inc arch_atomic64_inc -#else static __always_inline void raw_atomic64_inc(atomic64_t *v) { +#if defined(arch_atomic64_inc) + arch_atomic64_inc(v); +#else raw_atomic64_add(1, v); -} #endif +} -#if defined(arch_atomic64_inc_return) -#define raw_atomic64_inc_return arch_atomic64_inc_return -#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 raw_atomic64_inc_return(atomic64_t *v) { +#if defined(arch_atomic64_inc_return) + return arch_atomic64_inc_return(v); +#elif defined(arch_atomic64_inc_return_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_inc_return_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_inc_return(atomic64_t *v) -{ return raw_atomic64_add_return(1, v); -} #endif +} -#if defined(arch_atomic64_inc_return_acquire) -#define raw_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire -#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 raw_atomic64_inc_return_acquire(atomic64_t *v) { +#if defined(arch_atomic64_inc_return_acquire) + return arch_atomic64_inc_return_acquire(v); +#elif defined(arch_atomic64_inc_return_relaxed) s64 ret = arch_atomic64_inc_return_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_inc_return) -#define raw_atomic64_inc_return_acquire arch_atomic64_inc_return + return arch_atomic64_inc_return(v); #else -static __always_inline s64 -raw_atomic64_inc_return_acquire(atomic64_t *v) -{ return raw_atomic64_add_return_acquire(1, v); -} #endif +} -#if defined(arch_atomic64_inc_return_release) -#define raw_atomic64_inc_return_release arch_atomic64_inc_return_release -#elif defined(arch_atomic64_inc_return_relaxed) static __always_inline s64 raw_atomic64_inc_return_release(atomic64_t *v) { +#if defined(arch_atomic64_inc_return_release) + return arch_atomic64_inc_return_release(v); +#elif defined(arch_atomic64_inc_return_relaxed) __atomic_release_fence(); return arch_atomic64_inc_return_relaxed(v); -} #elif defined(arch_atomic64_inc_return) -#define raw_atomic64_inc_return_release arch_atomic64_inc_return + return arch_atomic64_inc_return(v); #else -static __always_inline s64 -raw_atomic64_inc_return_release(atomic64_t *v) -{ return raw_atomic64_add_return_release(1, v); -} #endif +} -#if defined(arch_atomic64_inc_return_relaxed) -#define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed -#elif defined(arch_atomic64_inc_return) -#define raw_atomic64_inc_return_relaxed arch_atomic64_inc_return -#else static __always_inline s64 raw_atomic64_inc_return_relaxed(atomic64_t *v) { +#if defined(arch_atomic64_inc_return_relaxed) + return arch_atomic64_inc_return_relaxed(v); +#elif defined(arch_atomic64_inc_return) + return arch_atomic64_inc_return(v); +#else return raw_atomic64_add_return_relaxed(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_inc) -#define raw_atomic64_fetch_inc arch_atomic64_fetch_inc -#elif defined(arch_atomic64_fetch_inc_relaxed) static __always_inline s64 raw_atomic64_fetch_inc(atomic64_t *v) { +#if defined(arch_atomic64_fetch_inc) + return arch_atomic64_fetch_inc(v); +#elif defined(arch_atomic64_fetch_inc_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_inc_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_fetch_inc(atomic64_t *v) -{ return raw_atomic64_fetch_add(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_inc_acquire) -#define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire -#elif defined(arch_atomic64_fetch_inc_relaxed) static __always_inline s64 raw_atomic64_fetch_inc_acquire(atomic64_t *v) { +#if defined(arch_atomic64_fetch_inc_acquire) + return arch_atomic64_fetch_inc_acquire(v); +#elif defined(arch_atomic64_fetch_inc_relaxed) s64 ret = arch_atomic64_fetch_inc_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_inc) -#define raw_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc + return arch_atomic64_fetch_inc(v); #else -static __always_inline s64 -raw_atomic64_fetch_inc_acquire(atomic64_t *v) -{ return raw_atomic64_fetch_add_acquire(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_inc_release) -#define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release -#elif defined(arch_atomic64_fetch_inc_relaxed) static __always_inline s64 raw_atomic64_fetch_inc_release(atomic64_t *v) { +#if defined(arch_atomic64_fetch_inc_release) + return arch_atomic64_fetch_inc_release(v); +#elif defined(arch_atomic64_fetch_inc_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_inc_relaxed(v); -} #elif defined(arch_atomic64_fetch_inc) -#define raw_atomic64_fetch_inc_release arch_atomic64_fetch_inc + return arch_atomic64_fetch_inc(v); #else -static __always_inline s64 -raw_atomic64_fetch_inc_release(atomic64_t *v) -{ return raw_atomic64_fetch_add_release(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_inc_relaxed) -#define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed -#elif defined(arch_atomic64_fetch_inc) -#define raw_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc -#else static __always_inline s64 raw_atomic64_fetch_inc_relaxed(atomic64_t *v) { +#if defined(arch_atomic64_fetch_inc_relaxed) + return arch_atomic64_fetch_inc_relaxed(v); +#elif defined(arch_atomic64_fetch_inc) + return arch_atomic64_fetch_inc(v); +#else return raw_atomic64_fetch_add_relaxed(1, v); -} #endif +} -#if defined(arch_atomic64_dec) -#define raw_atomic64_dec arch_atomic64_dec -#else static __always_inline void raw_atomic64_dec(atomic64_t *v) { +#if defined(arch_atomic64_dec) + arch_atomic64_dec(v); +#else raw_atomic64_sub(1, v); -} #endif +} -#if defined(arch_atomic64_dec_return) -#define raw_atomic64_dec_return arch_atomic64_dec_return -#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 raw_atomic64_dec_return(atomic64_t *v) { +#if defined(arch_atomic64_dec_return) + return arch_atomic64_dec_return(v); +#elif defined(arch_atomic64_dec_return_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_dec_return_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_dec_return(atomic64_t *v) -{ return raw_atomic64_sub_return(1, v); -} #endif +} -#if defined(arch_atomic64_dec_return_acquire) -#define raw_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire -#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 raw_atomic64_dec_return_acquire(atomic64_t *v) { +#if defined(arch_atomic64_dec_return_acquire) + return arch_atomic64_dec_return_acquire(v); +#elif defined(arch_atomic64_dec_return_relaxed) s64 ret = arch_atomic64_dec_return_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_dec_return) -#define raw_atomic64_dec_return_acquire arch_atomic64_dec_return + return arch_atomic64_dec_return(v); #else -static __always_inline s64 -raw_atomic64_dec_return_acquire(atomic64_t *v) -{ return raw_atomic64_sub_return_acquire(1, v); -} #endif +} -#if defined(arch_atomic64_dec_return_release) -#define raw_atomic64_dec_return_release arch_atomic64_dec_return_release -#elif defined(arch_atomic64_dec_return_relaxed) static __always_inline s64 raw_atomic64_dec_return_release(atomic64_t *v) { +#if defined(arch_atomic64_dec_return_release) + return arch_atomic64_dec_return_release(v); +#elif defined(arch_atomic64_dec_return_relaxed) __atomic_release_fence(); return arch_atomic64_dec_return_relaxed(v); -} #elif defined(arch_atomic64_dec_return) -#define raw_atomic64_dec_return_release arch_atomic64_dec_return + return arch_atomic64_dec_return(v); #else -static __always_inline s64 -raw_atomic64_dec_return_release(atomic64_t *v) -{ return raw_atomic64_sub_return_release(1, v); -} #endif +} -#if defined(arch_atomic64_dec_return_relaxed) -#define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed -#elif defined(arch_atomic64_dec_return) -#define raw_atomic64_dec_return_relaxed arch_atomic64_dec_return -#else static __always_inline s64 raw_atomic64_dec_return_relaxed(atomic64_t *v) { +#if defined(arch_atomic64_dec_return_relaxed) + return arch_atomic64_dec_return_relaxed(v); +#elif defined(arch_atomic64_dec_return) + return arch_atomic64_dec_return(v); +#else return raw_atomic64_sub_return_relaxed(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_dec) -#define raw_atomic64_fetch_dec arch_atomic64_fetch_dec -#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 raw_atomic64_fetch_dec(atomic64_t *v) { +#if defined(arch_atomic64_fetch_dec) + return arch_atomic64_fetch_dec(v); +#elif defined(arch_atomic64_fetch_dec_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_dec_relaxed(v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_fetch_dec(atomic64_t *v) -{ return raw_atomic64_fetch_sub(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_dec_acquire) -#define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire -#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 raw_atomic64_fetch_dec_acquire(atomic64_t *v) { +#if defined(arch_atomic64_fetch_dec_acquire) + return arch_atomic64_fetch_dec_acquire(v); +#elif defined(arch_atomic64_fetch_dec_relaxed) s64 ret = arch_atomic64_fetch_dec_relaxed(v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_dec) -#define raw_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec + return arch_atomic64_fetch_dec(v); #else -static __always_inline s64 -raw_atomic64_fetch_dec_acquire(atomic64_t *v) -{ return raw_atomic64_fetch_sub_acquire(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_dec_release) -#define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release -#elif defined(arch_atomic64_fetch_dec_relaxed) static __always_inline s64 raw_atomic64_fetch_dec_release(atomic64_t *v) { +#if defined(arch_atomic64_fetch_dec_release) + return arch_atomic64_fetch_dec_release(v); +#elif defined(arch_atomic64_fetch_dec_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_dec_relaxed(v); -} #elif defined(arch_atomic64_fetch_dec) -#define raw_atomic64_fetch_dec_release arch_atomic64_fetch_dec + return arch_atomic64_fetch_dec(v); #else -static __always_inline s64 -raw_atomic64_fetch_dec_release(atomic64_t *v) -{ return raw_atomic64_fetch_sub_release(1, v); -} #endif +} -#if defined(arch_atomic64_fetch_dec_relaxed) -#define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed -#elif defined(arch_atomic64_fetch_dec) -#define raw_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec -#else static __always_inline s64 raw_atomic64_fetch_dec_relaxed(atomic64_t *v) { +#if defined(arch_atomic64_fetch_dec_relaxed) + return arch_atomic64_fetch_dec_relaxed(v); +#elif defined(arch_atomic64_fetch_dec) + return arch_atomic64_fetch_dec(v); +#else return raw_atomic64_fetch_sub_relaxed(1, v); -} #endif +} -#define raw_atomic64_and arch_atomic64_and +static __always_inline void +raw_atomic64_and(s64 i, atomic64_t *v) +{ + arch_atomic64_and(i, v); +} -#if defined(arch_atomic64_fetch_and) -#define raw_atomic64_fetch_and arch_atomic64_fetch_and -#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 raw_atomic64_fetch_and(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_and) + return arch_atomic64_fetch_and(i, v); +#elif defined(arch_atomic64_fetch_and_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_and_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_fetch_and" #endif +} -#if defined(arch_atomic64_fetch_and_acquire) -#define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire -#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_and_acquire) + return arch_atomic64_fetch_and_acquire(i, v); +#elif defined(arch_atomic64_fetch_and_relaxed) s64 ret = arch_atomic64_fetch_and_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_and) -#define raw_atomic64_fetch_and_acquire arch_atomic64_fetch_and + return arch_atomic64_fetch_and(i, v); #else #error "Unable to define raw_atomic64_fetch_and_acquire" #endif +} -#if defined(arch_atomic64_fetch_and_release) -#define raw_atomic64_fetch_and_release arch_atomic64_fetch_and_release -#elif defined(arch_atomic64_fetch_and_relaxed) static __always_inline s64 raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_and_release) + return arch_atomic64_fetch_and_release(i, v); +#elif defined(arch_atomic64_fetch_and_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_and_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_and) -#define raw_atomic64_fetch_and_release arch_atomic64_fetch_and + return arch_atomic64_fetch_and(i, v); #else #error "Unable to define raw_atomic64_fetch_and_release" #endif +} +static __always_inline s64 +raw_atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_fetch_and_relaxed) -#define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and_relaxed + return arch_atomic64_fetch_and_relaxed(i, v); #elif defined(arch_atomic64_fetch_and) -#define raw_atomic64_fetch_and_relaxed arch_atomic64_fetch_and + return arch_atomic64_fetch_and(i, v); #else #error "Unable to define raw_atomic64_fetch_and_relaxed" #endif +} -#if defined(arch_atomic64_andnot) -#define raw_atomic64_andnot arch_atomic64_andnot -#else static __always_inline void raw_atomic64_andnot(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_andnot) + arch_atomic64_andnot(i, v); +#else raw_atomic64_and(~i, v); -} #endif +} -#if defined(arch_atomic64_fetch_andnot) -#define raw_atomic64_fetch_andnot arch_atomic64_fetch_andnot -#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_andnot) + return arch_atomic64_fetch_andnot(i, v); +#elif defined(arch_atomic64_fetch_andnot_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_andnot_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) -{ return raw_atomic64_fetch_and(~i, v); -} #endif +} -#if defined(arch_atomic64_fetch_andnot_acquire) -#define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire -#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_andnot_acquire) + return arch_atomic64_fetch_andnot_acquire(i, v); +#elif defined(arch_atomic64_fetch_andnot_relaxed) s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_andnot) -#define raw_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot + return arch_atomic64_fetch_andnot(i, v); #else -static __always_inline s64 -raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) -{ return raw_atomic64_fetch_and_acquire(~i, v); -} #endif +} -#if defined(arch_atomic64_fetch_andnot_release) -#define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release -#elif defined(arch_atomic64_fetch_andnot_relaxed) static __always_inline s64 raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_andnot_release) + return arch_atomic64_fetch_andnot_release(i, v); +#elif defined(arch_atomic64_fetch_andnot_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_andnot_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_andnot) -#define raw_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot + return arch_atomic64_fetch_andnot(i, v); #else -static __always_inline s64 -raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) -{ return raw_atomic64_fetch_and_release(~i, v); -} #endif +} -#if defined(arch_atomic64_fetch_andnot_relaxed) -#define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed -#elif defined(arch_atomic64_fetch_andnot) -#define raw_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot -#else static __always_inline s64 raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_andnot_relaxed) + return arch_atomic64_fetch_andnot_relaxed(i, v); +#elif defined(arch_atomic64_fetch_andnot) + return arch_atomic64_fetch_andnot(i, v); +#else return raw_atomic64_fetch_and_relaxed(~i, v); -} #endif +} -#define raw_atomic64_or arch_atomic64_or +static __always_inline void +raw_atomic64_or(s64 i, atomic64_t *v) +{ + arch_atomic64_or(i, v); +} -#if defined(arch_atomic64_fetch_or) -#define raw_atomic64_fetch_or arch_atomic64_fetch_or -#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 raw_atomic64_fetch_or(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_or) + return arch_atomic64_fetch_or(i, v); +#elif defined(arch_atomic64_fetch_or_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_or_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_fetch_or" #endif +} -#if defined(arch_atomic64_fetch_or_acquire) -#define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire -#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_or_acquire) + return arch_atomic64_fetch_or_acquire(i, v); +#elif defined(arch_atomic64_fetch_or_relaxed) s64 ret = arch_atomic64_fetch_or_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_or) -#define raw_atomic64_fetch_or_acquire arch_atomic64_fetch_or + return arch_atomic64_fetch_or(i, v); #else #error "Unable to define raw_atomic64_fetch_or_acquire" #endif +} -#if defined(arch_atomic64_fetch_or_release) -#define raw_atomic64_fetch_or_release arch_atomic64_fetch_or_release -#elif defined(arch_atomic64_fetch_or_relaxed) static __always_inline s64 raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_or_release) + return arch_atomic64_fetch_or_release(i, v); +#elif defined(arch_atomic64_fetch_or_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_or_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_or) -#define raw_atomic64_fetch_or_release arch_atomic64_fetch_or + return arch_atomic64_fetch_or(i, v); #else #error "Unable to define raw_atomic64_fetch_or_release" #endif +} +static __always_inline s64 +raw_atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_fetch_or_relaxed) -#define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or_relaxed + return arch_atomic64_fetch_or_relaxed(i, v); #elif defined(arch_atomic64_fetch_or) -#define raw_atomic64_fetch_or_relaxed arch_atomic64_fetch_or + return arch_atomic64_fetch_or(i, v); #else #error "Unable to define raw_atomic64_fetch_or_relaxed" #endif +} -#define raw_atomic64_xor arch_atomic64_xor +static __always_inline void +raw_atomic64_xor(s64 i, atomic64_t *v) +{ + arch_atomic64_xor(i, v); +} -#if defined(arch_atomic64_fetch_xor) -#define raw_atomic64_fetch_xor arch_atomic64_fetch_xor -#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 raw_atomic64_fetch_xor(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_xor) + return arch_atomic64_fetch_xor(i, v); +#elif defined(arch_atomic64_fetch_xor_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_fetch_xor_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else #error "Unable to define raw_atomic64_fetch_xor" #endif +} -#if defined(arch_atomic64_fetch_xor_acquire) -#define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire -#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_xor_acquire) + return arch_atomic64_fetch_xor_acquire(i, v); +#elif defined(arch_atomic64_fetch_xor_relaxed) s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_fetch_xor) -#define raw_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor + return arch_atomic64_fetch_xor(i, v); #else #error "Unable to define raw_atomic64_fetch_xor_acquire" #endif +} -#if defined(arch_atomic64_fetch_xor_release) -#define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release -#elif defined(arch_atomic64_fetch_xor_relaxed) static __always_inline s64 raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_fetch_xor_release) + return arch_atomic64_fetch_xor_release(i, v); +#elif defined(arch_atomic64_fetch_xor_relaxed) __atomic_release_fence(); return arch_atomic64_fetch_xor_relaxed(i, v); -} #elif defined(arch_atomic64_fetch_xor) -#define raw_atomic64_fetch_xor_release arch_atomic64_fetch_xor + return arch_atomic64_fetch_xor(i, v); #else #error "Unable to define raw_atomic64_fetch_xor_release" #endif +} +static __always_inline s64 +raw_atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) +{ #if defined(arch_atomic64_fetch_xor_relaxed) -#define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor_relaxed + return arch_atomic64_fetch_xor_relaxed(i, v); #elif defined(arch_atomic64_fetch_xor) -#define raw_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor + return arch_atomic64_fetch_xor(i, v); #else #error "Unable to define raw_atomic64_fetch_xor_relaxed" #endif +} -#if defined(arch_atomic64_xchg) -#define raw_atomic64_xchg arch_atomic64_xchg -#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -raw_atomic64_xchg(atomic64_t *v, s64 i) +raw_atomic64_xchg(atomic64_t *v, s64 new) { +#if defined(arch_atomic64_xchg) + return arch_atomic64_xchg(v, new); +#elif defined(arch_atomic64_xchg_relaxed) s64 ret; __atomic_pre_full_fence(); - ret = arch_atomic64_xchg_relaxed(v, i); + ret = arch_atomic64_xchg_relaxed(v, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_xchg(atomic64_t *v, s64 new) -{ return raw_xchg(&v->counter, new); -} #endif +} -#if defined(arch_atomic64_xchg_acquire) -#define raw_atomic64_xchg_acquire arch_atomic64_xchg_acquire -#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -raw_atomic64_xchg_acquire(atomic64_t *v, s64 i) +raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) { - s64 ret = arch_atomic64_xchg_relaxed(v, i); +#if defined(arch_atomic64_xchg_acquire) + return arch_atomic64_xchg_acquire(v, new); +#elif defined(arch_atomic64_xchg_relaxed) + s64 ret = arch_atomic64_xchg_relaxed(v, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_xchg) -#define raw_atomic64_xchg_acquire arch_atomic64_xchg + return arch_atomic64_xchg(v, new); #else -static __always_inline s64 -raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) -{ return raw_xchg_acquire(&v->counter, new); -} #endif +} -#if defined(arch_atomic64_xchg_release) -#define raw_atomic64_xchg_release arch_atomic64_xchg_release -#elif defined(arch_atomic64_xchg_relaxed) static __always_inline s64 -raw_atomic64_xchg_release(atomic64_t *v, s64 i) +raw_atomic64_xchg_release(atomic64_t *v, s64 new) { +#if defined(arch_atomic64_xchg_release) + return arch_atomic64_xchg_release(v, new); +#elif defined(arch_atomic64_xchg_relaxed) __atomic_release_fence(); - return arch_atomic64_xchg_relaxed(v, i); -} + return arch_atomic64_xchg_relaxed(v, new); #elif defined(arch_atomic64_xchg) -#define raw_atomic64_xchg_release arch_atomic64_xchg + return arch_atomic64_xchg(v, new); #else -static __always_inline s64 -raw_atomic64_xchg_release(atomic64_t *v, s64 new) -{ return raw_xchg_release(&v->counter, new); -} #endif +} -#if defined(arch_atomic64_xchg_relaxed) -#define raw_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed -#elif defined(arch_atomic64_xchg) -#define raw_atomic64_xchg_relaxed arch_atomic64_xchg -#else static __always_inline s64 raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new) { +#if defined(arch_atomic64_xchg_relaxed) + return arch_atomic64_xchg_relaxed(v, new); +#elif defined(arch_atomic64_xchg) + return arch_atomic64_xchg(v, new); +#else return raw_xchg_relaxed(&v->counter, new); -} #endif +} -#if defined(arch_atomic64_cmpxchg) -#define raw_atomic64_cmpxchg arch_atomic64_cmpxchg -#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) { +#if defined(arch_atomic64_cmpxchg) + return arch_atomic64_cmpxchg(v, old, new); +#elif defined(arch_atomic64_cmpxchg_relaxed) s64 ret; __atomic_pre_full_fence(); ret = arch_atomic64_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline s64 -raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) -{ return raw_cmpxchg(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic64_cmpxchg_acquire) -#define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire -#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) { +#if defined(arch_atomic64_cmpxchg_acquire) + return arch_atomic64_cmpxchg_acquire(v, old, new); +#elif defined(arch_atomic64_cmpxchg_relaxed) s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_cmpxchg) -#define raw_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg + return arch_atomic64_cmpxchg(v, old, new); #else -static __always_inline s64 -raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) -{ return raw_cmpxchg_acquire(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic64_cmpxchg_release) -#define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release -#elif defined(arch_atomic64_cmpxchg_relaxed) static __always_inline s64 raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) { +#if defined(arch_atomic64_cmpxchg_release) + return arch_atomic64_cmpxchg_release(v, old, new); +#elif defined(arch_atomic64_cmpxchg_relaxed) __atomic_release_fence(); return arch_atomic64_cmpxchg_relaxed(v, old, new); -} #elif defined(arch_atomic64_cmpxchg) -#define raw_atomic64_cmpxchg_release arch_atomic64_cmpxchg + return arch_atomic64_cmpxchg(v, old, new); #else -static __always_inline s64 -raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) -{ return raw_cmpxchg_release(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic64_cmpxchg_relaxed) -#define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed -#elif defined(arch_atomic64_cmpxchg) -#define raw_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg -#else static __always_inline s64 raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) { +#if defined(arch_atomic64_cmpxchg_relaxed) + return arch_atomic64_cmpxchg_relaxed(v, old, new); +#elif defined(arch_atomic64_cmpxchg) + return arch_atomic64_cmpxchg(v, old, new); +#else return raw_cmpxchg_relaxed(&v->counter, old, new); -} #endif +} -#if defined(arch_atomic64_try_cmpxchg) -#define raw_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg -#elif defined(arch_atomic64_try_cmpxchg_relaxed) static __always_inline bool raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) { +#if defined(arch_atomic64_try_cmpxchg) + return arch_atomic64_try_cmpxchg(v, old, new); +#elif defined(arch_atomic64_try_cmpxchg_relaxed) bool ret; __atomic_pre_full_fence(); ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); __atomic_post_full_fence(); return ret; -} #else -static __always_inline bool -raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) -{ s64 r, o = *old; r = raw_atomic64_cmpxchg(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic64_try_cmpxchg_acquire) -#define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire -#elif defined(arch_atomic64_try_cmpxchg_relaxed) static __always_inline bool raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) { +#if defined(arch_atomic64_try_cmpxchg_acquire) + return arch_atomic64_try_cmpxchg_acquire(v, old, new); +#elif defined(arch_atomic64_try_cmpxchg_relaxed) bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_try_cmpxchg) -#define raw_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg + return arch_atomic64_try_cmpxchg(v, old, new); #else -static __always_inline bool -raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) -{ s64 r, o = *old; r = raw_atomic64_cmpxchg_acquire(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic64_try_cmpxchg_release) -#define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release -#elif defined(arch_atomic64_try_cmpxchg_relaxed) static __always_inline bool raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) { +#if defined(arch_atomic64_try_cmpxchg_release) + return arch_atomic64_try_cmpxchg_release(v, old, new); +#elif defined(arch_atomic64_try_cmpxchg_relaxed) __atomic_release_fence(); return arch_atomic64_try_cmpxchg_relaxed(v, old, new); -} #elif defined(arch_atomic64_try_cmpxchg) -#define raw_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg + return arch_atomic64_try_cmpxchg(v, old, new); #else -static __always_inline bool -raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) -{ s64 r, o = *old; r = raw_atomic64_cmpxchg_release(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic64_try_cmpxchg_relaxed) -#define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed -#elif defined(arch_atomic64_try_cmpxchg) -#define raw_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg -#else static __always_inline bool raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) { +#if defined(arch_atomic64_try_cmpxchg_relaxed) + return arch_atomic64_try_cmpxchg_relaxed(v, old, new); +#elif defined(arch_atomic64_try_cmpxchg) + return arch_atomic64_try_cmpxchg(v, old, new); +#else s64 r, o = *old; r = raw_atomic64_cmpxchg_relaxed(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); -} #endif +} -#if defined(arch_atomic64_sub_and_test) -#define raw_atomic64_sub_and_test arch_atomic64_sub_and_test -#else static __always_inline bool raw_atomic64_sub_and_test(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_sub_and_test) + return arch_atomic64_sub_and_test(i, v); +#else return raw_atomic64_sub_return(i, v) == 0; -} #endif +} -#if defined(arch_atomic64_dec_and_test) -#define raw_atomic64_dec_and_test arch_atomic64_dec_and_test -#else static __always_inline bool raw_atomic64_dec_and_test(atomic64_t *v) { +#if defined(arch_atomic64_dec_and_test) + return arch_atomic64_dec_and_test(v); +#else return raw_atomic64_dec_return(v) == 0; -} #endif +} -#if defined(arch_atomic64_inc_and_test) -#define raw_atomic64_inc_and_test arch_atomic64_inc_and_test -#else static __always_inline bool raw_atomic64_inc_and_test(atomic64_t *v) { +#if defined(arch_atomic64_inc_and_test) + return arch_atomic64_inc_and_test(v); +#else return raw_atomic64_inc_return(v) == 0; -} #endif +} -#if defined(arch_atomic64_add_negative) -#define raw_atomic64_add_negative arch_atomic64_add_negative -#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool raw_atomic64_add_negative(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_negative) + return arch_atomic64_add_negative(i, v); +#elif defined(arch_atomic64_add_negative_relaxed) bool ret; __atomic_pre_full_fence(); ret = arch_atomic64_add_negative_relaxed(i, v); __atomic_post_full_fence(); return ret; -} #else -static __always_inline bool -raw_atomic64_add_negative(s64 i, atomic64_t *v) -{ return raw_atomic64_add_return(i, v) < 0; -} #endif +} -#if defined(arch_atomic64_add_negative_acquire) -#define raw_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire -#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_negative_acquire) + return arch_atomic64_add_negative_acquire(i, v); +#elif defined(arch_atomic64_add_negative_relaxed) bool ret = arch_atomic64_add_negative_relaxed(i, v); __atomic_acquire_fence(); return ret; -} #elif defined(arch_atomic64_add_negative) -#define raw_atomic64_add_negative_acquire arch_atomic64_add_negative + return arch_atomic64_add_negative(i, v); #else -static __always_inline bool -raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) -{ return raw_atomic64_add_return_acquire(i, v) < 0; -} #endif +} -#if defined(arch_atomic64_add_negative_release) -#define raw_atomic64_add_negative_release arch_atomic64_add_negative_release -#elif defined(arch_atomic64_add_negative_relaxed) static __always_inline bool raw_atomic64_add_negative_release(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_negative_release) + return arch_atomic64_add_negative_release(i, v); +#elif defined(arch_atomic64_add_negative_relaxed) __atomic_release_fence(); return arch_atomic64_add_negative_relaxed(i, v); -} #elif defined(arch_atomic64_add_negative) -#define raw_atomic64_add_negative_release arch_atomic64_add_negative + return arch_atomic64_add_negative(i, v); #else -static __always_inline bool -raw_atomic64_add_negative_release(s64 i, atomic64_t *v) -{ return raw_atomic64_add_return_release(i, v) < 0; -} #endif +} -#if defined(arch_atomic64_add_negative_relaxed) -#define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed -#elif defined(arch_atomic64_add_negative) -#define raw_atomic64_add_negative_relaxed arch_atomic64_add_negative -#else static __always_inline bool raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) { +#if defined(arch_atomic64_add_negative_relaxed) + return arch_atomic64_add_negative_relaxed(i, v); +#elif defined(arch_atomic64_add_negative) + return arch_atomic64_add_negative(i, v); +#else return raw_atomic64_add_return_relaxed(i, v) < 0; -} #endif +} -#if defined(arch_atomic64_fetch_add_unless) -#define raw_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless -#else static __always_inline s64 raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) { +#if defined(arch_atomic64_fetch_add_unless) + return arch_atomic64_fetch_add_unless(v, a, u); +#else s64 c = raw_atomic64_read(v); do { @@ -2839,35 +2735,35 @@ raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) } while (!raw_atomic64_try_cmpxchg(v, &c, c + a)); return c; -} #endif +} -#if defined(arch_atomic64_add_unless) -#define raw_atomic64_add_unless arch_atomic64_add_unless -#else static __always_inline bool raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) { +#if defined(arch_atomic64_add_unless) + return arch_atomic64_add_unless(v, a, u); +#else return raw_atomic64_fetch_add_unless(v, a, u) != u; -} #endif +} -#if defined(arch_atomic64_inc_not_zero) -#define raw_atomic64_inc_not_zero arch_atomic64_inc_not_zero -#else static __always_inline bool raw_atomic64_inc_not_zero(atomic64_t *v) { +#if defined(arch_atomic64_inc_not_zero) + return arch_atomic64_inc_not_zero(v); +#else return raw_atomic64_add_unless(v, 1, 0); -} #endif +} -#if defined(arch_atomic64_inc_unless_negative) -#define raw_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative -#else static __always_inline bool raw_atomic64_inc_unless_negative(atomic64_t *v) { +#if defined(arch_atomic64_inc_unless_negative) + return arch_atomic64_inc_unless_negative(v); +#else s64 c = raw_atomic64_read(v); do { @@ -2876,15 +2772,15 @@ raw_atomic64_inc_unless_negative(atomic64_t *v) } while (!raw_atomic64_try_cmpxchg(v, &c, c + 1)); return true; -} #endif +} -#if defined(arch_atomic64_dec_unless_positive) -#define raw_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive -#else static __always_inline bool raw_atomic64_dec_unless_positive(atomic64_t *v) { +#if defined(arch_atomic64_dec_unless_positive) + return arch_atomic64_dec_unless_positive(v); +#else s64 c = raw_atomic64_read(v); do { @@ -2893,15 +2789,15 @@ raw_atomic64_dec_unless_positive(atomic64_t *v) } while (!raw_atomic64_try_cmpxchg(v, &c, c - 1)); return true; -} #endif +} -#if defined(arch_atomic64_dec_if_positive) -#define raw_atomic64_dec_if_positive arch_atomic64_dec_if_positive -#else static __always_inline s64 raw_atomic64_dec_if_positive(atomic64_t *v) { +#if defined(arch_atomic64_dec_if_positive) + return arch_atomic64_dec_if_positive(v); +#else s64 dec, c = raw_atomic64_read(v); do { @@ -2911,8 +2807,8 @@ raw_atomic64_dec_if_positive(atomic64_t *v) } while (!raw_atomic64_try_cmpxchg(v, &c, dec)); return dec; -} #endif +} #endif /* _LINUX_ATOMIC_FALLBACK_H */ -// c2048fccede6fac923252290e2b303949d5dec83 +// 205e090382132f1fc85e48b46e722865f9c81309 diff --git a/include/linux/atomic/atomic-instrumented.h b/include/linux/atomic/atomic-instrumented.h index 90ee2f55af77..5491c89dc03a 100644 --- a/include/linux/atomic/atomic-instrumented.h +++ b/include/linux/atomic/atomic-instrumented.h @@ -462,33 +462,33 @@ atomic_fetch_xor_relaxed(int i, atomic_t *v) } static __always_inline int -atomic_xchg(atomic_t *v, int i) +atomic_xchg(atomic_t *v, int new) { kcsan_mb(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_xchg(v, i); + return raw_atomic_xchg(v, new); } static __always_inline int -atomic_xchg_acquire(atomic_t *v, int i) +atomic_xchg_acquire(atomic_t *v, int new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_xchg_acquire(v, i); + return raw_atomic_xchg_acquire(v, new); } static __always_inline int -atomic_xchg_release(atomic_t *v, int i) +atomic_xchg_release(atomic_t *v, int new) { kcsan_release(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_xchg_release(v, i); + return raw_atomic_xchg_release(v, new); } static __always_inline int -atomic_xchg_relaxed(atomic_t *v, int i) +atomic_xchg_relaxed(atomic_t *v, int new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_xchg_relaxed(v, i); + return raw_atomic_xchg_relaxed(v, new); } static __always_inline int @@ -1103,33 +1103,33 @@ atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) } static __always_inline s64 -atomic64_xchg(atomic64_t *v, s64 i) +atomic64_xchg(atomic64_t *v, s64 new) { kcsan_mb(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic64_xchg(v, i); + return raw_atomic64_xchg(v, new); } static __always_inline s64 -atomic64_xchg_acquire(atomic64_t *v, s64 i) +atomic64_xchg_acquire(atomic64_t *v, s64 new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic64_xchg_acquire(v, i); + return raw_atomic64_xchg_acquire(v, new); } static __always_inline s64 -atomic64_xchg_release(atomic64_t *v, s64 i) +atomic64_xchg_release(atomic64_t *v, s64 new) { kcsan_release(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic64_xchg_release(v, i); + return raw_atomic64_xchg_release(v, new); } static __always_inline s64 -atomic64_xchg_relaxed(atomic64_t *v, s64 i) +atomic64_xchg_relaxed(atomic64_t *v, s64 new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic64_xchg_relaxed(v, i); + return raw_atomic64_xchg_relaxed(v, new); } static __always_inline s64 @@ -1744,33 +1744,33 @@ atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) } static __always_inline long -atomic_long_xchg(atomic_long_t *v, long i) +atomic_long_xchg(atomic_long_t *v, long new) { kcsan_mb(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_long_xchg(v, i); + return raw_atomic_long_xchg(v, new); } static __always_inline long -atomic_long_xchg_acquire(atomic_long_t *v, long i) +atomic_long_xchg_acquire(atomic_long_t *v, long new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_long_xchg_acquire(v, i); + return raw_atomic_long_xchg_acquire(v, new); } static __always_inline long -atomic_long_xchg_release(atomic_long_t *v, long i) +atomic_long_xchg_release(atomic_long_t *v, long new) { kcsan_release(); instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_long_xchg_release(v, i); + return raw_atomic_long_xchg_release(v, new); } static __always_inline long -atomic_long_xchg_relaxed(atomic_long_t *v, long i) +atomic_long_xchg_relaxed(atomic_long_t *v, long new) { instrument_atomic_read_write(v, sizeof(*v)); - return raw_atomic_long_xchg_relaxed(v, i); + return raw_atomic_long_xchg_relaxed(v, new); } static __always_inline long @@ -2231,4 +2231,4 @@ atomic_long_dec_if_positive(atomic_long_t *v) #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ -// f6502977180430e61c1a7c4e5e665f04f501fb8d +// a4c3d2b229f907654cc53cb5d40e80f7fed1ec9c diff --git a/include/linux/atomic/atomic-long.h b/include/linux/atomic/atomic-long.h index 63e0b4078ebd..f564f71ff8af 100644 --- a/include/linux/atomic/atomic-long.h +++ b/include/linux/atomic/atomic-long.h @@ -622,42 +622,42 @@ raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) } static __always_inline long -raw_atomic_long_xchg(atomic_long_t *v, long i) +raw_atomic_long_xchg(atomic_long_t *v, long new) { #ifdef CONFIG_64BIT - return raw_atomic64_xchg(v, i); + return raw_atomic64_xchg(v, new); #else - return raw_atomic_xchg(v, i); + return raw_atomic_xchg(v, new); #endif } static __always_inline long -raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) +raw_atomic_long_xchg_acquire(atomic_long_t *v, long new) { #ifdef CONFIG_64BIT - return raw_atomic64_xchg_acquire(v, i); + return raw_atomic64_xchg_acquire(v, new); #else - return raw_atomic_xchg_acquire(v, i); + return raw_atomic_xchg_acquire(v, new); #endif } static __always_inline long -raw_atomic_long_xchg_release(atomic_long_t *v, long i) +raw_atomic_long_xchg_release(atomic_long_t *v, long new) { #ifdef CONFIG_64BIT - return raw_atomic64_xchg_release(v, i); + return raw_atomic64_xchg_release(v, new); #else - return raw_atomic_xchg_release(v, i); + return raw_atomic_xchg_release(v, new); #endif } static __always_inline long -raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) +raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new) { #ifdef CONFIG_64BIT - return raw_atomic64_xchg_relaxed(v, i); + return raw_atomic64_xchg_relaxed(v, new); #else - return raw_atomic_xchg_relaxed(v, i); + return raw_atomic_xchg_relaxed(v, new); #endif } @@ -872,4 +872,4 @@ raw_atomic_long_dec_if_positive(atomic_long_t *v) } #endif /* _LINUX_ATOMIC_LONG_H */ -// ad09f849db0db5b30c82e497eeb9056a394c5f22 +// e785d25cc3f220b7d473d36aac9da85dd7eb13a8 diff --git a/scripts/atomic/atomics.tbl b/scripts/atomic/atomics.tbl index 85ca8d9b5c27..903946cbf1b3 100644 --- a/scripts/atomic/atomics.tbl +++ b/scripts/atomic/atomics.tbl @@ -27,7 +27,7 @@ and vF i v andnot vF i v or vF i v xor vF i v -xchg I v i +xchg I v i:new cmpxchg I v i:old i:new try_cmpxchg B v p:old i:new sub_and_test b i v diff --git a/scripts/atomic/fallbacks/acquire b/scripts/atomic/fallbacks/acquire index b0f732a5c46e..4da0cab3604e 100755 --- a/scripts/atomic/fallbacks/acquire +++ b/scripts/atomic/fallbacks/acquire @@ -1,9 +1,5 @@ cat <counter, old, new); -} EOF diff --git a/scripts/atomic/fallbacks/dec b/scripts/atomic/fallbacks/dec index a660ac65994b..60d286d40300 100755 --- a/scripts/atomic/fallbacks/dec +++ b/scripts/atomic/fallbacks/dec @@ -1,7 +1,3 @@ cat <counter, i); } else { __atomic_release_fence(); raw_${atomic}_set(v, i); } -} EOF diff --git a/scripts/atomic/fallbacks/sub_and_test b/scripts/atomic/fallbacks/sub_and_test index 8975a496d495..d1f746fe0ca4 100755 --- a/scripts/atomic/fallbacks/sub_and_test +++ b/scripts/atomic/fallbacks/sub_and_test @@ -1,7 +1,3 @@ cat <counter, new); -} EOF diff --git a/scripts/atomic/gen-atomic-fallback.sh b/scripts/atomic/gen-atomic-fallback.sh index 86aca4f9f315..2b470d31e353 100755 --- a/scripts/atomic/gen-atomic-fallback.sh +++ b/scripts/atomic/gen-atomic-fallback.sh @@ -60,13 +60,23 @@ gen_proto_order_variant() local name="$1"; shift local sfx="$1"; shift local order="$1"; shift - local atomic="$1" + local atomic="$1"; shift + local int="$1"; shift local atomicname="${atomic}_${pfx}${name}${sfx}${order}" local basename="${atomic}_${pfx}${name}${sfx}" local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" + local ret="$(gen_ret_type "${meta}" "${int}")" + local retstmt="$(gen_ret_stmt "${meta}")" + local params="$(gen_params "${int}" "${atomic}" "$@")" + local args="$(gen_args "$@")" + + printf "static __always_inline ${ret}\n" + printf "raw_${atomicname}(${params})\n" + printf "{\n" + # Where there is no possible fallback, this order variant is mandatory # and must be provided by arch code. Add a comment to the header to # make this obvious. @@ -75,33 +85,35 @@ gen_proto_order_variant() # define this order variant as a C function without a preprocessor # symbol. if [ -z ${template} ] && [ -z "${order}" ] && ! meta_has_relaxed "${meta}"; then - printf "#define raw_${atomicname} arch_${atomicname}\n\n" + printf "\t${retstmt}arch_${atomicname}(${args});\n" + printf "}\n\n" return fi printf "#if defined(arch_${atomicname})\n" - printf "#define raw_${atomicname} arch_${atomicname}\n" + printf "\t${retstmt}arch_${atomicname}(${args});\n" # Allow FULL/ACQUIRE/RELEASE ops to be defined in terms of RELAXED ops if [ "${order}" != "_relaxed" ] && meta_has_relaxed "${meta}"; then printf "#elif defined(arch_${basename}_relaxed)\n" - gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" + gen_order_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "${atomic}" "${int}" "$@" fi # Allow ACQUIRE/RELEASE/RELAXED ops to be defined in terms of FULL ops if [ ! -z "${order}" ]; then printf "#elif defined(arch_${basename})\n" - printf "#define raw_${atomicname} arch_${basename}\n" + printf "\t${retstmt}arch_${basename}(${args});\n" fi printf "#else\n" if [ ! -z "${template}" ]; then - gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "$@" + gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "${order}" "${atomic}" "${int}" "$@" else printf "#error \"Unable to define raw_${atomicname}\"\n" fi - printf "#endif\n\n" + printf "#endif\n" + printf "}\n\n" } -- cgit v1.2.3