[tip:locking/core] locking/atomics: Combine the atomic_andnot() and atomic64_andnot() API definitions
From: tip-bot for Ingo Molnar
Date: Sun May 06 2018 - 08:15:36 EST
Commit-ID: 7b9b2e57c7edaeac5404f39c5974ff227540d41e
Gitweb: https://git.kernel.org/tip/7b9b2e57c7edaeac5404f39c5974ff227540d41e
Author: Ingo Molnar <mingo@xxxxxxxxxx>
AuthorDate: Sat, 5 May 2018 10:54:45 +0200
Committer: Ingo Molnar <mingo@xxxxxxxxxx>
CommitDate: Sat, 5 May 2018 15:22:45 +0200
locking/atomics: Combine the atomic_andnot() and atomic64_andnot() API definitions
The atomic_andnot() and atomic64_andnot() are defined in 4 separate groups
spred out in the atomic.h header:
#ifdef atomic_andnot
...
#endif /* atomic_andnot */
...
#ifndef atomic_andnot
...
#endif
...
#ifdef atomic64_andnot
...
#endif /* atomic64_andnot */
...
#ifndef atomic64_andnot
...
#endif
Combine them into unify them into two groups:
#ifdef atomic_andnot
#else
#endif
...
#ifdef atomic64_andnot
#else
#endif
So that one API group is defined in a single place within the header.
Cc: Andrew Morton <akpm@xxxxxxxxxxxxxxxxxxxx>
Cc: Linus Torvalds <torvalds@xxxxxxxxxxxxxxxxxxxx>
Cc: Mark Rutland <mark.rutland@xxxxxxx>
Cc: Paul E. McKenney <paulmck@xxxxxxxxxx>
Cc: Peter Zijlstra <peterz@xxxxxxxxxxxxx>
Cc: Thomas Gleixner <tglx@xxxxxxxxxxxxx>
Cc: Will Deacon <will.deacon@xxxxxxx>
Cc: aryabinin@xxxxxxxxxxxxx
Cc: boqun.feng@xxxxxxxxx
Cc: catalin.marinas@xxxxxxx
Cc: dvyukov@xxxxxxxxxx
Cc: linux-arm-kernel@xxxxxxxxxxxxxxxxxxx
Link: http://lkml.kernel.org/r/20180505085445.cmdnqh6xpnpfoqzb@xxxxxxxxx
Signed-off-by: Ingo Molnar <mingo@xxxxxxxxxx>
---
include/linux/atomic.h | 72 +++++++++++++++++++++++++-------------------------
1 file changed, 36 insertions(+), 36 deletions(-)
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 352ecc72d7f5..1176cf7c6f03 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -205,22 +205,6 @@
# endif
#endif
-#ifdef atomic_andnot
-
-#ifndef atomic_fetch_andnot_relaxed
-# define atomic_fetch_andnot_relaxed atomic_fetch_andnot
-# define atomic_fetch_andnot_acquire atomic_fetch_andnot
-# define atomic_fetch_andnot_release atomic_fetch_andnot
-#else
-# ifndef atomic_fetch_andnot
-# define atomic_fetch_andnot(...) __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
-# define atomic_fetch_andnot_acquire(...) __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
-# define atomic_fetch_andnot_release(...) __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
-# endif
-#endif
-
-#endif /* atomic_andnot */
-
#ifndef atomic_fetch_xor_relaxed
# define atomic_fetch_xor_relaxed atomic_fetch_xor
# define atomic_fetch_xor_acquire atomic_fetch_xor
@@ -338,7 +322,22 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
# define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
#endif
-#ifndef atomic_andnot
+#ifdef atomic_andnot
+
+#ifndef atomic_fetch_andnot_relaxed
+# define atomic_fetch_andnot_relaxed atomic_fetch_andnot
+# define atomic_fetch_andnot_acquire atomic_fetch_andnot
+# define atomic_fetch_andnot_release atomic_fetch_andnot
+#else
+# ifndef atomic_fetch_andnot
+# define atomic_fetch_andnot(...) __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
+# define atomic_fetch_andnot_acquire(...) __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
+# define atomic_fetch_andnot_release(...) __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
+# endif
+#endif
+
+#else /* !atomic_andnot: */
+
static inline void atomic_andnot(int i, atomic_t *v)
{
atomic_and(~i, v);
@@ -363,7 +362,8 @@ static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
{
return atomic_fetch_and_release(~i, v);
}
-#endif
+
+#endif /* !atomic_andnot */
/**
* atomic_inc_not_zero_hint - increment if not null
@@ -600,22 +600,6 @@ static inline int atomic_dec_if_positive(atomic_t *v)
# endif
#endif
-#ifdef atomic64_andnot
-
-#ifndef atomic64_fetch_andnot_relaxed
-# define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
-# define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
-# define atomic64_fetch_andnot_release atomic64_fetch_andnot
-#else
-# ifndef atomic64_fetch_andnot
-# define atomic64_fetch_andnot(...) __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
-# define atomic64_fetch_andnot_acquire(...) __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
-# define atomic64_fetch_andnot_release(...) __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
-# endif
-#endif
-
-#endif /* atomic64_andnot */
-
#ifndef atomic64_fetch_xor_relaxed
# define atomic64_fetch_xor_relaxed atomic64_fetch_xor
# define atomic64_fetch_xor_acquire atomic64_fetch_xor
@@ -672,7 +656,22 @@ static inline int atomic_dec_if_positive(atomic_t *v)
# define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
#endif
-#ifndef atomic64_andnot
+#ifdef atomic64_andnot
+
+#ifndef atomic64_fetch_andnot_relaxed
+# define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
+# define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
+# define atomic64_fetch_andnot_release atomic64_fetch_andnot
+#else
+# ifndef atomic64_fetch_andnot
+# define atomic64_fetch_andnot(...) __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
+# define atomic64_fetch_andnot_acquire(...) __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
+# define atomic64_fetch_andnot_release(...) __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
+# endif
+#endif
+
+#else /* !atomic64_andnot: */
+
static inline void atomic64_andnot(long long i, atomic64_t *v)
{
atomic64_and(~i, v);
@@ -697,7 +696,8 @@ static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v
{
return atomic64_fetch_and_release(~i, v);
}
-#endif
+
+#endif /* !atomic64_andnot */
#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))