1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
|
TEXT memmove(SB), $0
MOVQ RARG, DI
MOVQ DI, AX /* return value */
MOVQ p2+8(FP), SI
MOVL n+16(FP), BX
CMPL BX, $0
JGT _ok
JEQ _return /* nothing to do if n == 0 */
MOVL $0, SI /* fault if n < 0 */
/*
* check and set for backwards:
* (p2 < p1) && ((p2+n) > p1)
*/
_ok:
CMPQ SI, DI
JGT _forward
JEQ _return /* nothing to do if p2 == p1 */
MOVQ SI, DX
ADDQ BX, DX
CMPQ DX, DI
JGT _back
/*
* copy whole longs if aligned
*/
_forward:
CLD
MOVQ SI, DX
ORQ DI, DX
ANDL $3, DX
JNE c3f
MOVQ BX, CX
SHRQ $2, CX
ANDL $3, BX
REP; MOVSL
/*
* copy the rest, by bytes
*/
JEQ _return /* flags set by above ANDL */
c3f:
MOVL BX, CX
REP; MOVSB
RET
/*
* whole thing backwards has
* adjusted addresses
*/
_back:
ADDQ BX, DI
ADDQ BX, SI
STD
SUBQ $4, DI
SUBQ $4, SI
/*
* copy whole longs, if aligned
*/
MOVQ DI, DX
ORQ SI, DX
ANDL $3, DX
JNE c3b
MOVL BX, CX
SHRQ $2, CX
ANDL $3, BX
REP; MOVSL
/*
* copy the rest, by bytes
*/
JEQ _return /* flags set by above ANDL */
c3b:
ADDQ $3, DI
ADDQ $3, SI
MOVL BX, CX
REP; MOVSB
_return:
RET
|