1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
/* memset/bzero -- set memory area to CH/0
Highly optimized version for ix86, x>=5.
Copyright (C) 1996, 1997, 2000 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Torbjorn Granlund, <tege@matematik.su.se>
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, write to the Free
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
#include <sysdep.h>
#include "asm-syntax.h"
#include "bp-sym.h"
#include "bp-asm.h"
/* BEWARE: `#ifdef memset' means that memset is redefined as `bzero' */
#define BZERO_P (defined memset)
#define PARMS LINKAGE+4 /* space for 1 saved reg */
#define RTN PARMS
#define DEST RTN+RTN_SIZE
#if BZERO_P
# define LEN DEST+PTR_SIZE
#else
# define CHR DEST+PTR_SIZE
# define LEN CHR+4
#endif
.text
ENTRY (BP_SYM (memset))
ENTER
pushl %edi
movl DEST(%esp), %edi
movl LEN(%esp), %edx
CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %edx)
#if BZERO_P
xorl %eax, %eax /* we fill with 0 */
#else
movb CHR(%esp), %al
movb %al, %ah
movl %eax, %ecx
shll $16, %eax
movw %cx, %ax
#endif
cld
/* If less than 36 bytes to write, skip tricky code (it wouldn't work). */
cmpl $36, %edx
movl %edx, %ecx /* needed when branch is taken! */
jl L(2)
/* First write 0-3 bytes to make the pointer 32-bit aligned. */
movl %edi, %ecx /* Copy ptr to ecx... */
negl %ecx /* ...and negate that and... */
andl $3, %ecx /* ...mask to get byte count. */
subl %ecx, %edx /* adjust global byte count */
rep
stosb
subl $32, %edx /* offset count for unrolled loop */
movl (%edi), %ecx /* Fetch destination cache line */
.align 2, 0x90 /* supply 0x90 for broken assemblers */
L(1): movl 28(%edi), %ecx /* allocate cache line for destination */
subl $32, %edx /* decr loop count */
movl %eax, 0(%edi) /* store words pairwise */
movl %eax, 4(%edi)
movl %eax, 8(%edi)
movl %eax, 12(%edi)
movl %eax, 16(%edi)
movl %eax, 20(%edi)
movl %eax, 24(%edi)
movl %eax, 28(%edi)
leal 32(%edi), %edi /* update destination pointer */
jge L(1)
leal 32(%edx), %ecx /* reset offset count */
/* Write last 0-7 full 32-bit words (up to 8 words if loop was skipped). */
L(2): shrl $2, %ecx /* convert byte count to longword count */
rep
stosl
/* Finally write the last 0-3 bytes. */
movl %edx, %ecx
andl $3, %ecx
rep
stosb
#if !BZERO_P
/* Load result (only if used as memset). */
movl DEST(%esp), %eax /* start address of destination is result */
RETURN_BOUNDED_POINTER (DEST(%esp))
#endif
popl %edi
LEAVE
#if BZERO_P
ret
#else
RET_PTR
#endif
END (BP_SYM (memset))
|