summaryrefslogtreecommitdiff
path: root/src/string/memmove.c
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2012-09-10 18:16:11 -0400
committerRich Felker <dalias@aerifal.cx>2012-09-10 18:16:11 -0400
commit1701e4f3d46b14c4c4be8a46e64f8eaf15a5c061 (patch)
treeff00b3c7db479c467be152984b9ec4ea4d3e4cec /src/string/memmove.c
parent3b5e69052a867e9d99cf4c655d775bd06e3437f1 (diff)
downloadmusl-1701e4f3d46b14c4c4be8a46e64f8eaf15a5c061.tar.gz
reenable word-at-at-time copying in memmove
before restrict was added, memove called memcpy for forward copies and used a byte-at-a-time loop for reverse copies. this was changed to avoid invoking UB now that memcpy has an undefined copying order, making memmove considerably slower. performance is still rather bad, so I'll be adding asm soon.
Diffstat (limited to 'src/string/memmove.c')
-rw-r--r--src/string/memmove.c31
1 files changed, 27 insertions, 4 deletions
diff --git a/src/string/memmove.c b/src/string/memmove.c
index 9153a644..27f670e1 100644
--- a/src/string/memmove.c
+++ b/src/string/memmove.c
@@ -1,13 +1,36 @@
#include <string.h>
+#include <stdint.h>
+
+#define WT size_t
+#define WS (sizeof(WT))
void *memmove(void *dest, const void *src, size_t n)
{
char *d = dest;
const char *s = src;
+
if (d==s) return d;
- if ((size_t)(d-s) < n)
- while (n--) d[n] = s[n];
- else
- while (n--) *d++ = *s++;
+ if (s+n <= d || d+n <= s) return memcpy(d, s, n);
+
+ if (d<s) {
+ if ((uintptr_t)s % WS == (uintptr_t)d % WS) {
+ while ((uintptr_t)d % WS) {
+ if (!n--) return dest;
+ *d++ = *s++;
+ }
+ for (; n>=WS; n-=WS, d+=WS, s+=WS) *(WT *)d = *(WT *)s;
+ }
+ for (; n; n--) *d++ = *s++;
+ } else {
+ if ((uintptr_t)s % WS == (uintptr_t)d % WS) {
+ while ((uintptr_t)(d+n) % WS) {
+ if (!n--) return dest;
+ d[n] = s[n];
+ }
+ while (n>=WS) n-=WS, *(WT *)(d+n) = *(WT *)(s+n);
+ }
+ while (n) n--, d[n] = s[n];
+ }
+
return dest;
}