[PATCH v2 02/12] x86/asm: Introduce inline memcpy and memset

From: Alexander Shishkin
Date: Tue May 30 2023 - 07:44:23 EST


From: Peter Zijlstra <peterz@xxxxxxxxxxxxx>

Provide inline memcpy and memset functions that can be used instead of
the GCC builtins whenever necessary.

Code posted by Peter Zijlstra <peterz@xxxxxxxxxxxxx>.
Link: https://lore.kernel.org/lkml/Y759AJ%2F0N9fqwDED@xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/
[Missing Signed-off-by from PeterZ]
Signed-off-by: Sohil Mehta <sohil.mehta@xxxxxxxxx>
---
arch/x86/include/asm/string_32.h | 21 +++++++++++++++++++++
arch/x86/include/asm/string_64.h | 21 +++++++++++++++++++++
2 files changed, 42 insertions(+)

diff --git a/arch/x86/include/asm/string_32.h b/arch/x86/include/asm/string_32.h
index 32c0d981a82a..8896270e5eda 100644
--- a/arch/x86/include/asm/string_32.h
+++ b/arch/x86/include/asm/string_32.h
@@ -151,6 +151,16 @@ extern void *memcpy(void *, const void *, size_t);

#endif /* !CONFIG_FORTIFY_SOURCE */

+static __always_inline void *__inline_memcpy(void *to, const void *from, size_t len)
+{
+ void *ret = to;
+
+ asm volatile("rep movsb"
+ : "+D" (to), "+S" (from), "+c" (len)
+ : : "memory");
+ return ret;
+}
+
#define __HAVE_ARCH_MEMMOVE
void *memmove(void *dest, const void *src, size_t n);

@@ -195,6 +205,17 @@ extern void *memset(void *, int, size_t);
#define memset(s, c, count) __builtin_memset(s, c, count)
#endif /* !CONFIG_FORTIFY_SOURCE */

+static __always_inline void *__inline_memset(void *s, int v, size_t n)
+{
+ void *ret = s;
+
+ asm volatile("rep stosb"
+ : "+D" (s), "+c" (n)
+ : "a" ((uint8_t)v)
+ : "memory");
+ return ret;
+}
+
#define __HAVE_ARCH_MEMSET16
static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
{
diff --git a/arch/x86/include/asm/string_64.h b/arch/x86/include/asm/string_64.h
index 857d364b9888..ea51e2d73265 100644
--- a/arch/x86/include/asm/string_64.h
+++ b/arch/x86/include/asm/string_64.h
@@ -18,10 +18,31 @@
extern void *memcpy(void *to, const void *from, size_t len);
extern void *__memcpy(void *to, const void *from, size_t len);

+static __always_inline void *__inline_memcpy(void *to, const void *from, size_t len)
+{
+ void *ret = to;
+
+ asm volatile("rep movsb"
+ : "+D" (to), "+S" (from), "+c" (len)
+ : : "memory");
+ return ret;
+}
+
#define __HAVE_ARCH_MEMSET
void *memset(void *s, int c, size_t n);
void *__memset(void *s, int c, size_t n);

+static __always_inline void *__inline_memset(void *s, int v, size_t n)
+{
+ void *ret = s;
+
+ asm volatile("rep stosb"
+ : "+D" (s), "+c" (n)
+ : "a" ((uint8_t)v)
+ : "memory");
+ return ret;
+}
+
/*
* KMSAN needs to instrument as much code as possible. Use C versions of
* memsetXX() from lib/string.c under KMSAN.
--
2.39.2