aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/mips/mips64/memset.S
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/mips/mips64/memset.S')
-rw-r--r--sysdeps/mips/mips64/memset.S91
1 files changed, 91 insertions, 0 deletions
diff --git a/sysdeps/mips/mips64/memset.S b/sysdeps/mips/mips64/memset.S
new file mode 100644
index 0000000000..6a3b154bad
--- /dev/null
+++ b/sysdeps/mips/mips64/memset.S
@@ -0,0 +1,91 @@
+/* Copyright (C) 2002, 2003 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Hartvig Ekner <hartvige@mips.com>, 2002.
+ Ported to mips3 n32/n64 by Alexandre Oliva <aoliva@redhat.com>
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, write to the Free
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+ 02111-1307 USA. */
+
+#include <sysdep.h>
+#include <endian.h>
+#include <sys/asm.h>
+
+
+/* void *memset(void *s, int c, size_t n);
+
+ This could probably be optimized further. */
+
+#if __BYTE_ORDER == __BIG_ENDIAN
+# define SDHI sdl /* high part is left in big-endian */
+#else
+# define SDHI sdr /* high part is right in little-endian */
+#endif
+
+ENTRY (memset)
+ .set noreorder
+
+ slti t5, a2, 8 # Less than 8?
+ bne t5, zero, L(last8)
+ move v0, a0 # Setup exit value before too late
+
+ beq a1, zero, L(ueven) # If zero pattern, no need to extend
+ andi a1, 0xff # Avoid problems with bogus arguments
+ dsll t4, a1, 8
+ or a1, t4
+ dsll t4, a1, 16
+ or a1, t4 # a1 is now pattern in full word
+ dsll t4, a1, 32
+ or a1, t4 # a1 is now pattern in double word
+
+L(ueven):
+ PTR_SUBU t4, zero, a0 # Unaligned address?
+ andi t4, 0x7
+ beq t4, zero, L(chkw)
+ PTR_SUBU a2, t4
+ SDHI a1, 0(a0) # Yes, handle first unaligned part
+ PTR_ADDU a0, t4 # Now both a0 and a2 are updated
+
+L(chkw):
+ andi t4, a2, 0xf # Enough left for one loop iteration?
+ beq t4, a2, L(chkl)
+ PTR_SUBU a3, a2, t4
+ PTR_ADDU a3, a0 # a3 is last loop address +1
+ move a2, t4 # a2 is now # of bytes left after loop
+L(loopw):
+ PTR_ADDIU a0, 16 # Handle 2 words pr. iteration
+ sd a1, -16(a0)
+ bne a0, a3, L(loopw)
+ sd a1, -8(a0)
+
+L(chkl):
+ andi t4, a2, 0x8 # Check if there is at least a double
+ beq t4, zero, L(last8) # word remaining after the loop
+ PTR_SUBU a2, t4
+ sd a1, 0(a0) # Yes...
+ PTR_ADDIU a0, 8
+
+L(last8):
+ blez a2, L(exit) # Handle last 8 bytes (if cnt>0)
+ PTR_ADDU a3, a2, a0 # a3 is last address +1
+L(lst8l):
+ PTR_ADDIU a0, 1
+ bne a0, a3, L(lst8l)
+ sb a1, -1(a0)
+L(exit):
+ j ra # Bye, bye
+ nop
+
+ .set reorder
+END (memset)