summaryrefslogtreecommitdiff
path: root/firmware/asm/memmove.c
diff options
context:
space:
mode:
authorMatthias Mohr <Rockbox@Mohrenclan.de>2017-01-15 13:29:40 +0100
committerAmaury Pouly <amaury.pouly@gmail.com>2017-01-15 21:32:49 +0100
commitd984725cbf38d0a9e71c866ae61c48ad488373b4 (patch)
treefc064f5fa0baf667bbcd09d56bc350fbb2b5a0d9 /firmware/asm/memmove.c
parent955be5b34af2e6cf374162ea8b4d4451b1644952 (diff)
downloadrockbox-d984725cbf38d0a9e71c866ae61c48ad488373b4.tar.gz
rockbox-d984725cbf38d0a9e71c866ae61c48ad488373b4.zip
Renamed defines UNALIGNED to ROCKBOX_UNALIGNED - UNALIGNED is already
defined in mingw environments. Renamed defines of UNALIGNED to ROCKBOX_UNALIGNED so that they don't conflict with definitions in mingw32 cross-compiling environments (defined in _mingw.h). Change-Id: I369848c0f507e6bf5ff9ab4a60663bbbda6edc52
Diffstat (limited to 'firmware/asm/memmove.c')
-rw-r--r--firmware/asm/memmove.c4
1 files changed, 2 insertions, 2 deletions
diff --git a/firmware/asm/memmove.c b/firmware/asm/memmove.c
index 5f423964bb..5c2adf20a7 100644
--- a/firmware/asm/memmove.c
+++ b/firmware/asm/memmove.c
@@ -40,7 +40,7 @@ QUICKREF
40#include <string.h> 40#include <string.h>
41 41
42/* Nonzero if either X or Y is not aligned on a "long" boundary. */ 42/* Nonzero if either X or Y is not aligned on a "long" boundary. */
43#define UNALIGNED(X, Y) \ 43#define ROCKBOX_UNALIGNED(X, Y) \
44 (((long)X & (sizeof (long) - 1)) | ((long)Y & (sizeof (long) - 1))) 44 (((long)X & (sizeof (long) - 1)) | ((long)Y & (sizeof (long) - 1)))
45 45
46/* How many bytes are copied each iteration of the 4X unrolled loop. */ 46/* How many bytes are copied each iteration of the 4X unrolled loop. */
@@ -109,7 +109,7 @@ _DEFUN (memmove, (dst_void, src_void, length),
109 /* Use optimizing algorithm for a non-destructive copy to closely 109 /* Use optimizing algorithm for a non-destructive copy to closely
110 match memcpy. If the size is small or either SRC or DST is unaligned, 110 match memcpy. If the size is small or either SRC or DST is unaligned,
111 then punt into the byte copy loop. This should be rare. */ 111 then punt into the byte copy loop. This should be rare. */
112 if (!TOO_SMALL(len) && !UNALIGNED (src, dst)) 112 if (!TOO_SMALL(len) && !ROCKBOX_UNALIGNED (src, dst))
113 { 113 {
114 aligned_dst = (long*)dst; 114 aligned_dst = (long*)dst;
115 aligned_src = (long*)src; 115 aligned_src = (long*)src;