aboutsummaryrefslogtreecommitdiffstats
path: root/src/memmove.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/memmove.cpp')
-rw-r--r--src/memmove.cpp78
1 files changed, 78 insertions, 0 deletions
diff --git a/src/memmove.cpp b/src/memmove.cpp
new file mode 100644
index 0000000..06e37df
--- /dev/null
+++ b/src/memmove.cpp
@@ -0,0 +1,78 @@
+/* Taken from newlib... */
+
+#include <cstring>
+#include <cstddef>
+#include <climits>
+
+/* Nonzero if either X or Y is not aligned on a "long" boundary. */
+#define UNALIGNED(X, Y) \
+ (((long)X & (sizeof (long) - 1)) | ((long)Y & (sizeof (long) - 1)))
+
+/* How many bytes are copied each iteration of the 4X unrolled loop. */
+#define BIGBLOCKSIZE (sizeof (long) << 2)
+
+/* How many bytes are copied each iteration of the word copy loop. */
+#define LITTLEBLOCKSIZE (sizeof (long))
+
+/* Threshhold for punting to the byte copier. */
+#define TOO_SMALL(LEN) ((LEN) < BIGBLOCKSIZE)
+
+extern "C"
+void *memmove (void *dst_void, const void *src_void, size_t length)
+{
+ char *dst = reinterpret_cast<char *>(dst_void);
+ const char *src = reinterpret_cast<const char *>(src_void);
+ long *aligned_dst;
+ const long *aligned_src;
+
+ if (src < dst && dst < src + length)
+ {
+ /* Destructive overlap...have to copy backwards */
+ src += length;
+ dst += length;
+ while (length--)
+ {
+ *--dst = *--src;
+ }
+ }
+ else
+ {
+ /* Use optimizing algorithm for a non-destructive copy to closely
+ match memcpy. If the size is small or either SRC or DST is unaligned,
+ then punt into the byte copy loop. This should be rare. */
+ if (!TOO_SMALL(length) && !UNALIGNED (src, dst))
+ {
+ aligned_dst = (long*)dst;
+ aligned_src = (long*)src;
+
+ /* Copy 4X long words at a time if possible. */
+ while (length >= BIGBLOCKSIZE)
+ {
+ *aligned_dst++ = *aligned_src++;
+ *aligned_dst++ = *aligned_src++;
+ *aligned_dst++ = *aligned_src++;
+ *aligned_dst++ = *aligned_src++;
+ length -= BIGBLOCKSIZE;
+ }
+
+ /* Copy one long word at a time if possible. */
+ while (length >= LITTLEBLOCKSIZE)
+ {
+ *aligned_dst++ = *aligned_src++;
+ length -= LITTLEBLOCKSIZE;
+ }
+
+ /* Pick up any residual with a byte copier. */
+ dst = (char*)aligned_dst;
+ src = (char*)aligned_src;
+ }
+
+ while (length--)
+ {
+ *dst++ = *src++;
+ }
+ }
+
+ return dst_void;
+}
+