Re: [PATCH 01/10] locking/atomic: Add missing cast to try_cmpxchg() fallbacks

From: Mark Rutland
Date: Fri Mar 24 2023 - 10:13:23 EST


On Sun, Mar 05, 2023 at 09:56:19PM +0100, Uros Bizjak wrote:
> Cast _oldp to the type of _ptr to avoid incompatible-pointer-types warning.

Can you give an example of where we are passing an incompatible pointer?

That sounds indicative of a bug in the caller, but maybe I'm missing some
reason this is necessary due to some indirection.

> Fixes: 29f006fdefe6 ("asm-generic/atomic: Add try_cmpxchg() fallbacks")

I'm not sure that this needs a fixes tag. Does anything go wrong today, or only
later in this series?

Thanks,
Mark.

> Cc: Will Deacon <will@xxxxxxxxxx>
> Cc: Peter Zijlstra <peterz@xxxxxxxxxxxxx>
> Cc: Boqun Feng <boqun.feng@xxxxxxxxx>
> Cc: Mark Rutland <mark.rutland@xxxxxxx>
> Signed-off-by: Uros Bizjak <ubizjak@xxxxxxxxx>
> ---
> include/linux/atomic/atomic-arch-fallback.h | 18 +++++++++---------
> scripts/atomic/gen-atomic-fallback.sh | 2 +-
> 2 files changed, 10 insertions(+), 10 deletions(-)
>
> diff --git a/include/linux/atomic/atomic-arch-fallback.h b/include/linux/atomic/atomic-arch-fallback.h
> index 77bc5522e61c..19debd501ee7 100644
> --- a/include/linux/atomic/atomic-arch-fallback.h
> +++ b/include/linux/atomic/atomic-arch-fallback.h
> @@ -87,7 +87,7 @@
> #ifndef arch_try_cmpxchg
> #define arch_try_cmpxchg(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -98,7 +98,7 @@
> #ifndef arch_try_cmpxchg_acquire
> #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -109,7 +109,7 @@
> #ifndef arch_try_cmpxchg_release
> #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -120,7 +120,7 @@
> #ifndef arch_try_cmpxchg_relaxed
> #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -157,7 +157,7 @@
> #ifndef arch_try_cmpxchg64
> #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -168,7 +168,7 @@
> #ifndef arch_try_cmpxchg64_acquire
> #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -179,7 +179,7 @@
> #ifndef arch_try_cmpxchg64_release
> #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -190,7 +190,7 @@
> #ifndef arch_try_cmpxchg64_relaxed
> #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
> ({ \
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \
> ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
> if (unlikely(___r != ___o)) \
> *___op = ___r; \
> @@ -2456,4 +2456,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
> #endif
>
> #endif /* _LINUX_ATOMIC_FALLBACK_H */
> -// b5e87bdd5ede61470c29f7a7e4de781af3770f09
> +// 1b4d4c82ae653389cd1538d5b07170267d9b3837
> diff --git a/scripts/atomic/gen-atomic-fallback.sh b/scripts/atomic/gen-atomic-fallback.sh
> index 3a07695e3c89..39f447161108 100755
> --- a/scripts/atomic/gen-atomic-fallback.sh
> +++ b/scripts/atomic/gen-atomic-fallback.sh
> @@ -171,7 +171,7 @@ cat <<EOF
> #ifndef arch_try_${cmpxchg}${order}
> #define arch_try_${cmpxchg}${order}(_ptr, _oldp, _new) \\
> ({ \\
> - typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \\
> + typeof(*(_ptr)) *___op = (typeof(_ptr))(_oldp), ___o = *___op, ___r; \\
> ___r = arch_${cmpxchg}${order}((_ptr), ___o, (_new)); \\
> if (unlikely(___r != ___o)) \\
> *___op = ___r; \\
> --
> 2.39.2
>