^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * This provides an optimized implementation of memcpy, and a simplified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * implementation of memset and memmove. These are used here because the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * standard kernel runtime versions are not yet available and we don't
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * trust the gcc built-in implementations as they may do unexpected things
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * (e.g. FPU ops) in the minimal decompression stub execution environment.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include "error.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include "../string.c"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #ifdef CONFIG_X86_32
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) static void *____memcpy(void *dest, const void *src, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) int d0, d1, d2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) asm volatile(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) "rep ; movsl\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) "movl %4,%%ecx\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) "rep ; movsb\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) : "=&c" (d0), "=&D" (d1), "=&S" (d2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) : "0" (n >> 2), "g" (n & 3), "1" (dest), "2" (src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) return dest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) static void *____memcpy(void *dest, const void *src, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) long d0, d1, d2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) asm volatile(
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) "rep ; movsq\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) "movq %4,%%rcx\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) "rep ; movsb\n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) : "=&c" (d0), "=&D" (d1), "=&S" (d2)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) : "0" (n >> 3), "g" (n & 7), "1" (dest), "2" (src)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) : "memory");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) return dest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) void *memset(void *s, int c, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) int i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) char *ss = s;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) for (i = 0; i < n; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) ss[i] = c;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) return s;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) void *memmove(void *dest, const void *src, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) unsigned char *d = dest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) const unsigned char *s = src;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) if (d <= s || d - s >= n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) return ____memcpy(dest, src, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) while (n-- > 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) d[n] = s[n];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) return dest;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) /* Detect and warn about potential overlaps, but handle them with memmove. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) void *memcpy(void *dest, const void *src, size_t n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) if (dest > src && dest - src < n) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) warn("Avoiding potentially unsafe overlapping memcpy()!");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) return memmove(dest, src, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) return ____memcpy(dest, src, n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) #ifdef CONFIG_KASAN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) extern void *__memset(void *s, int c, size_t n) __alias(memset);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) extern void *__memmove(void *dest, const void *src, size_t n) __alias(memmove);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) extern void *__memcpy(void *dest, const void *src, size_t n) __alias(memcpy);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) #endif