summaryrefslogtreecommitdiff
path: root/firmware/asm/memmove.c
diff options
context:
space:
mode:
Diffstat (limited to 'firmware/asm/memmove.c')
-rw-r--r--firmware/asm/memmove.c4
1 files changed, 2 insertions, 2 deletions
diff --git a/firmware/asm/memmove.c b/firmware/asm/memmove.c
index 5f423964bb..5c2adf20a7 100644
--- a/firmware/asm/memmove.c
+++ b/firmware/asm/memmove.c
@@ -40,7 +40,7 @@ QUICKREF
40#include <string.h> 40#include <string.h>
41 41
42/* Nonzero if either X or Y is not aligned on a "long" boundary. */ 42/* Nonzero if either X or Y is not aligned on a "long" boundary. */
43#define UNALIGNED(X, Y) \ 43#define ROCKBOX_UNALIGNED(X, Y) \
44 (((long)X & (sizeof (long) - 1)) | ((long)Y & (sizeof (long) - 1))) 44 (((long)X & (sizeof (long) - 1)) | ((long)Y & (sizeof (long) - 1)))
45 45
46/* How many bytes are copied each iteration of the 4X unrolled loop. */ 46/* How many bytes are copied each iteration of the 4X unrolled loop. */
@@ -109,7 +109,7 @@ _DEFUN (memmove, (dst_void, src_void, length),
109 /* Use optimizing algorithm for a non-destructive copy to closely 109 /* Use optimizing algorithm for a non-destructive copy to closely
110 match memcpy. If the size is small or either SRC or DST is unaligned, 110 match memcpy. If the size is small or either SRC or DST is unaligned,
111 then punt into the byte copy loop. This should be rare. */ 111 then punt into the byte copy loop. This should be rare. */
112 if (!TOO_SMALL(len) && !UNALIGNED (src, dst)) 112 if (!TOO_SMALL(len) && !ROCKBOX_UNALIGNED (src, dst))
113 { 113 {
114 aligned_dst = (long*)dst; 114 aligned_dst = (long*)dst;
115 aligned_src = (long*)src; 115 aligned_src = (long*)src;