[PATCHv2 2/2] x86: Match bitops prototypes

From: Joakim Tjernlund
Date: Fri Mar 17 2017 - 13:16:35 EST


Building VirtualBox modules causes type mismatch complaints when
mixing void * and unsigned long *.
Adjust bitops function prototypes in asm-generic/bitops/le.h
to match the generic ones in arch/x86/include/asm/le.h
That is, replace void* with unsigned long *

Cc: <stable@xxxxxxxxxxxxxxx> # v4.9+
Signed-off-by: Joakim Tjernlund <joakim.tjernlund@xxxxxxxxxxxx>
---
v2 - Improve wording

include/asm-generic/bitops/le.h | 42 ++++++++++++++++++++++-------------------
1 file changed, 23 insertions(+), 19 deletions(-)

diff --git a/include/asm-generic/bitops/le.h b/include/asm-generic/bitops/le.h
index 6173154..92a3afa 100644
--- a/include/asm-generic/bitops/le.h
+++ b/include/asm-generic/bitops/le.h
@@ -8,20 +8,22 @@

#define BITOP_LE_SWIZZLE 0

-static inline unsigned long find_next_zero_bit_le(const void *addr,
- unsigned long size, unsigned long offset)
+static inline unsigned long find_next_zero_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset)
{
return find_next_zero_bit(addr, size, offset);
}

-static inline unsigned long find_next_bit_le(const void *addr,
- unsigned long size, unsigned long offset)
+static inline unsigned long find_next_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset)
{
return find_next_bit(addr, size, offset);
}

-static inline unsigned long find_first_zero_bit_le(const void *addr,
- unsigned long size)
+static inline unsigned long find_first_zero_bit_le(const unsigned long *addr,
+ unsigned long size)
{
return find_first_zero_bit(addr, size);
}
@@ -31,13 +33,15 @@ static inline unsigned long find_first_zero_bit_le(const void *addr,
#define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)

#ifndef find_next_zero_bit_le
-extern unsigned long find_next_zero_bit_le(const void *addr,
- unsigned long size, unsigned long offset);
+extern unsigned long find_next_zero_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset);
#endif

#ifndef find_next_bit_le
-extern unsigned long find_next_bit_le(const void *addr,
- unsigned long size, unsigned long offset);
+extern unsigned long find_next_bit_le(const unsigned long *addr,
+ unsigned long size,
+ unsigned long offset);
#endif

#ifndef find_first_zero_bit_le
@@ -49,47 +53,47 @@ extern unsigned long find_next_bit_le(const void *addr,
#error "Please fix <asm/byteorder.h>"
#endif

-static inline int test_bit_le(int nr, const void *addr)
+static inline int test_bit_le(int nr, const unsigned long *addr)
{
return test_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void set_bit_le(int nr, void *addr)
+static inline void set_bit_le(int nr, unsigned long *addr)
{
set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void clear_bit_le(int nr, void *addr)
+static inline void clear_bit_le(int nr, unsigned long *addr)
{
clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void __set_bit_le(int nr, void *addr)
+static inline void __set_bit_le(int nr, unsigned long *addr)
{
__set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline void __clear_bit_le(int nr, void *addr)
+static inline void __clear_bit_le(int nr, unsigned long *addr)
{
__clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int test_and_set_bit_le(int nr, void *addr)
+static inline int test_and_set_bit_le(int nr, unsigned long *addr)
{
return test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int test_and_clear_bit_le(int nr, void *addr)
+static inline int test_and_clear_bit_le(int nr, unsigned long *addr)
{
return test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int __test_and_set_bit_le(int nr, void *addr)
+static inline int __test_and_set_bit_le(int nr, unsigned long *addr)
{
return __test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}

-static inline int __test_and_clear_bit_le(int nr, void *addr)
+static inline int __test_and_clear_bit_le(int nr, unsigned long *addr)
{
return __test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr);
}
--
2.10.2