--- src/fastmemcpy.cpp.old 2004-05-12 02:10:17.000000000 +0900 +++ src/fastmemcpy.cpp 2004-11-20 01:08:53.775730544 +0900 @@ -229,8 +229,8 @@ MOVNTQ" %%mm6, 48(%1)\n" MOVNTQ" %%mm7, 56(%1)\n" :: "r" (from), "r" (to) : "memory"); - ((const unsigned char *)from)+=64; - ((unsigned char *)to)+=64; + from = (unsigned char *)from + 64; + to = (unsigned char *)to + 64; } #ifdef HAVE_MMX2 /* since movntq is weakly-ordered, a "sfence" @@ -316,8 +316,8 @@ "movq %%mm6, 48(%1)\n" "movq %%mm7, 56(%1)\n" :: "r" (from), "r" (to) : "memory"); - ((const unsigned char *)from)+=64; - ((unsigned char *)to)+=64; + from = (unsigned char *)from + 64; + to = (unsigned char *)to + 64; } __asm__ __volatile__ ("emms":::"memory"); } @@ -403,8 +403,8 @@ "movntq %%mm6, 48(%1)\n" "movntq %%mm7, 56(%1)\n" :: "r" (from), "r" (to) : "memory"); - ((const unsigned char *)from)+=64; - ((unsigned char *)to)+=64; + from = (unsigned char *)from + 64; + to = (unsigned char *) to + 64; } /* since movntq is weakly-ordered, a "sfence" * is needed to become ordered again. */ @@ -461,8 +461,8 @@ "movntps %%xmm2, 32(%1)\n" "movntps %%xmm3, 48(%1)\n" :: "r" (from), "r" (to) : "memory"); - ((const unsigned char *)from)+=64; - ((unsigned char *)to)+=64; + from = (unsigned char *)from + 64; + to = (unsigned char *) to + 64; } else /* @@ -483,8 +483,8 @@ "movntps %%xmm2, 32(%1)\n" "movntps %%xmm3, 48(%1)\n" :: "r" (from), "r" (to) : "memory"); - ((const unsigned char *)from)+=64; - ((unsigned char *)to)+=64; + from = (unsigned char *)from + 64; + to = (unsigned char *)to + 64; } /* since movntq is weakly-ordered, a "sfence" * is needed to become ordered again. */