[PATCH 28/37] MIPS: bitops: Use smp_mb__before_atomic in test_* ops
From: Paul Burton
Date: Mon Sep 30 2019 - 19:10:02 EST
Use smp_mb__before_atomic() rather than smp_mb__before_llsc() in
test_and_set_bit(), test_and_clear_bit() & test_and_change_bit(). The
_atomic() versions make semantic sense in these cases, and will allow a
later patch to omit redundant barriers for Loongson3 systems that
already include a barrier within __test_bit_op().
Signed-off-by: Paul Burton <paul.burton@xxxxxxxx>
---
arch/mips/include/asm/bitops.h | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index 9e967d6622c8..e6d97238a321 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -209,7 +209,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
- smp_mb__before_llsc();
+ smp_mb__before_atomic();
return test_and_set_bit_lock(nr, addr);
}
@@ -228,7 +228,7 @@ static inline int test_and_clear_bit(unsigned long nr,
int bit = nr % BITS_PER_LONG;
unsigned long res, orig;
- smp_mb__before_llsc();
+ smp_mb__before_atomic();
if (!kernel_uses_llsc) {
res = __mips_test_and_clear_bit(nr, addr);
@@ -265,7 +265,7 @@ static inline int test_and_change_bit(unsigned long nr,
int bit = nr % BITS_PER_LONG;
unsigned long res, orig;
- smp_mb__before_llsc();
+ smp_mb__before_atomic();
if (!kernel_uses_llsc) {
res = __mips_test_and_change_bit(nr, addr);
--
2.23.0