s390/kasan: replace some memory functions

Follow the common kasan approach:

    "KASan replaces memory functions with manually instrumented
    variants.  Original functions declared as weak symbols so strong
    definitions in mm/kasan/kasan.c could replace them. Original
    functions have aliases with '__' prefix in name, so we could call
    non-instrumented variant if needed."

Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
diff --git a/arch/s390/include/asm/string.h b/arch/s390/include/asm/string.h
index 50f26fc..116cc15 100644
--- a/arch/s390/include/asm/string.h
+++ b/arch/s390/include/asm/string.h
@@ -53,6 +53,27 @@ char *strstr(const char *s1, const char *s2);
 #undef __HAVE_ARCH_STRSEP
 #undef __HAVE_ARCH_STRSPN
 
+#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
+
+extern void *__memcpy(void *dest, const void *src, size_t n);
+extern void *__memset(void *s, int c, size_t n);
+extern void *__memmove(void *dest, const void *src, size_t n);
+
+/*
+ * For files that are not instrumented (e.g. mm/slub.c) we
+ * should use not instrumented version of mem* functions.
+ */
+
+#define memcpy(dst, src, len) __memcpy(dst, src, len)
+#define memmove(dst, src, len) __memmove(dst, src, len)
+#define memset(s, c, n) __memset(s, c, n)
+
+#ifndef __NO_FORTIFY
+#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
+#endif
+
+#endif /* defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) */
+
 void *__memset16(uint16_t *s, uint16_t v, size_t count);
 void *__memset32(uint32_t *s, uint32_t v, size_t count);
 void *__memset64(uint64_t *s, uint64_t v, size_t count);
diff --git a/arch/s390/lib/mem.S b/arch/s390/lib/mem.S
index 40c4d59..53008da 100644
--- a/arch/s390/lib/mem.S
+++ b/arch/s390/lib/mem.S
@@ -14,7 +14,8 @@
 /*
  * void *memmove(void *dest, const void *src, size_t n)
  */
-ENTRY(memmove)
+WEAK(memmove)
+ENTRY(__memmove)
 	ltgr	%r4,%r4
 	lgr	%r1,%r2
 	jz	.Lmemmove_exit
@@ -47,6 +48,7 @@
 	BR_EX	%r14
 .Lmemmove_mvc:
 	mvc	0(1,%r1),0(%r3)
+ENDPROC(__memmove)
 EXPORT_SYMBOL(memmove)
 
 /*
@@ -64,7 +66,8 @@
  *	return __builtin_memset(s, c, n);
  * }
  */
-ENTRY(memset)
+WEAK(memset)
+ENTRY(__memset)
 	ltgr	%r4,%r4
 	jz	.Lmemset_exit
 	ltgr	%r3,%r3
@@ -108,6 +111,7 @@
 	xc	0(1,%r1),0(%r1)
 .Lmemset_mvc:
 	mvc	1(1,%r1),0(%r1)
+ENDPROC(__memset)
 EXPORT_SYMBOL(memset)
 
 /*
@@ -115,7 +119,8 @@
  *
  * void *memcpy(void *dest, const void *src, size_t n)
  */
-ENTRY(memcpy)
+WEAK(memcpy)
+ENTRY(__memcpy)
 	ltgr	%r4,%r4
 	jz	.Lmemcpy_exit
 	aghi	%r4,-1
@@ -136,6 +141,7 @@
 	j	.Lmemcpy_remainder
 .Lmemcpy_mvc:
 	mvc	0(1,%r1),0(%r3)
+ENDPROC(__memcpy)
 EXPORT_SYMBOL(memcpy)
 
 /*