summaryrefslogtreecommitdiff
blob: 972e4910ad3d71c7d3313cf29267f73d8f44b761 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
--- src/fastmemcpy.cpp.old	2004-05-12 02:10:17.000000000 +0900
+++ src/fastmemcpy.cpp	2004-11-20 01:08:53.775730544 +0900
@@ -229,8 +229,8 @@
 		MOVNTQ" %%mm6, 48(%1)\n"
 		MOVNTQ" %%mm7, 56(%1)\n"
 		:: "r" (from), "r" (to) : "memory");
-		((const unsigned char *)from)+=64;
-		((unsigned char *)to)+=64;
+		from = (unsigned char *)from + 64;
+		to = (unsigned char *)to + 64;
 	}
 #ifdef HAVE_MMX2
                 /* since movntq is weakly-ordered, a "sfence"
@@ -316,8 +316,8 @@
                                     "movq %%mm6, 48(%1)\n"
                                     "movq %%mm7, 56(%1)\n"
                                     :: "r" (from), "r" (to) : "memory");
-               ((const unsigned char *)from)+=64;
-               ((unsigned char *)to)+=64;
+               from = (unsigned char *)from + 64;
+               to = (unsigned char *)to + 64;
           }
           __asm__ __volatile__ ("emms":::"memory");
      }
@@ -403,8 +403,8 @@
                                     "movntq %%mm6, 48(%1)\n"
                                     "movntq %%mm7, 56(%1)\n"
                                     :: "r" (from), "r" (to) : "memory");
-               ((const unsigned char *)from)+=64;
-               ((unsigned char *)to)+=64;
+               from = (unsigned char *)from + 64;
+               to = (unsigned char *) to + 64;
           }
           /* since movntq is weakly-ordered, a "sfence"
           * is needed to become ordered again. */
@@ -461,8 +461,8 @@
                                          "movntps %%xmm2, 32(%1)\n"
                                          "movntps %%xmm3, 48(%1)\n"
                                          :: "r" (from), "r" (to) : "memory");
-                    ((const unsigned char *)from)+=64;
-                    ((unsigned char *)to)+=64;
+                    from = (unsigned char *)from + 64;
+                    to = (unsigned char *) to + 64;
                }
           else
                /*
@@ -483,8 +483,8 @@
                                          "movntps %%xmm2, 32(%1)\n"
                                          "movntps %%xmm3, 48(%1)\n"
                                          :: "r" (from), "r" (to) : "memory");
-                    ((const unsigned char *)from)+=64;
-                    ((unsigned char *)to)+=64;
+                    from = (unsigned char *)from + 64;
+                    to = (unsigned char *)to + 64;
                }
           /* since movntq is weakly-ordered, a "sfence"
            * is needed to become ordered again. */