1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_STRING_64_H
3 #define _ASM_X86_STRING_64_H
6 #include <linux/jump_label.h>
8 /* Written 2002 by Andi Kleen */
10 /* Even with __builtin_ the compiler may decide to use the out of line
13 #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
14 #include <linux/kmsan_string.h>
17 #define __HAVE_ARCH_MEMCPY 1
18 extern void *memcpy(void *to, const void *from, size_t len);
19 extern void *__memcpy(void *to, const void *from, size_t len);
21 #define __HAVE_ARCH_MEMSET
22 void *memset(void *s, int c, size_t n);
23 void *__memset(void *s, int c, size_t n);
26 * KMSAN needs to instrument as much code as possible. Use C versions of
27 * memsetXX() from lib/string.c under KMSAN.
29 #if !defined(CONFIG_KMSAN)
30 #define __HAVE_ARCH_MEMSET16
31 static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
34 asm volatile("rep\n\t"
36 : "=&c" (d0), "=&D" (d1)
37 : "a" (v), "1" (s), "0" (n)
42 #define __HAVE_ARCH_MEMSET32
43 static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
46 asm volatile("rep\n\t"
48 : "=&c" (d0), "=&D" (d1)
49 : "a" (v), "1" (s), "0" (n)
54 #define __HAVE_ARCH_MEMSET64
55 static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
58 asm volatile("rep\n\t"
60 : "=&c" (d0), "=&D" (d1)
61 : "a" (v), "1" (s), "0" (n)
67 #define __HAVE_ARCH_MEMMOVE
68 void *memmove(void *dest, const void *src, size_t count);
69 void *__memmove(void *dest, const void *src, size_t count);
71 int memcmp(const void *cs, const void *ct, size_t count);
72 size_t strlen(const char *s);
73 char *strcpy(char *dest, const char *src);
74 char *strcat(char *dest, const char *src);
75 int strcmp(const char *cs, const char *ct);
77 #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
78 #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
79 void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
80 static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
82 if (__builtin_constant_p(cnt)) {
85 asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
88 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
91 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
92 asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
96 __memcpy_flushcache(dst, src, cnt);
100 #endif /* __KERNEL__ */
102 #endif /* _ASM_X86_STRING_64_H */