aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/powerpc/strcmp.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>1999-10-11 22:31:36 +0000
committerUlrich Drepper <drepper@redhat.com>1999-10-11 22:31:36 +0000
commit217eb19b6f28429aa02792764bfd7b9f51743be2 (patch)
tree8889fa7ed4cc9b561e2fc8372b604cbd55eb2402 /sysdeps/powerpc/strcmp.S
parentdf08cc56eb0a050bd1d7cf569d78d4f9d2a20964 (diff)
downloadglibc-217eb19b6f28429aa02792764bfd7b9f51743be2.tar
glibc-217eb19b6f28429aa02792764bfd7b9f51743be2.tar.gz
glibc-217eb19b6f28429aa02792764bfd7b9f51743be2.tar.bz2
glibc-217eb19b6f28429aa02792764bfd7b9f51743be2.zip
Update.
1999-10-11 Ulrich Drepper <drepper@cygnus.com> * sysdeps/powerpc/Makefile [math] (libm-support): Remove t_sqrt. * sysdeps/powerpc/e_sqrt.c: Moved to... * sysdeps/powerpc/fpu/e_sqrt.c: ...here. * sysdeps/powerpc/e_sqrtf.c: Moved to... * sysdeps/powerpc/fpu/e_sqrtf.c: ...here. * sysdeps/powerpc/submul_1.S: Adjust asm syntax. * sysdeps/powerpc/sub_n.S: Likewise. * sysdeps/powerpc/strlen.S: Likewise. * sysdeps/powerpc/strcpy.S: Likewise. * sysdeps/powerpc/strcmp.S: Likewise. * sysdeps/powerpc/strchr.S: Likewise. * sysdeps/powerpc/stpcpy.S: Likewise. * sysdeps/powerpc/setjmp.S: Likewise. * sysdeps/powerpc/rshift.S: Likewise. * sysdeps/powerpc/ppc-mcount.S: Likewise. * sysdeps/powerpc/mul_1.S: Likewise. * sysdeps/powerpc/memset.S: Likewise. * sysdeps/powerpc/lshift.S: Likewise. * sysdeps/powerpc/dl-start.S: Likewise. * sysdeps/powerpc/bzero.S: Likewise. * sysdeps/powerpc/bsd-setjmp.S: Likewise. * sysdeps/powerpc/bsd-_setjmp.S: Likewise. * sysdeps/powerpc/addmul_1.S: Likewise. * sysdeps/powerpc/add_n.S: Likewise. * sysdeps/powerpc/__longjmp.S: Likewise. * sysdeps/powerpc/elf/start.S: Likewise. 1999-10-11 Cristian Gafton <gafton@redhat.com> * sysdeps/unix/sysv/linux/alpha/bits/sigaction.h: Declare
Diffstat (limited to 'sysdeps/powerpc/strcmp.S')
-rw-r--r--sysdeps/powerpc/strcmp.S94
1 files changed, 47 insertions, 47 deletions
diff --git a/sysdeps/powerpc/strcmp.S b/sysdeps/powerpc/strcmp.S
index de8872963a..a4afead1b6 100644
--- a/sysdeps/powerpc/strcmp.S
+++ b/sysdeps/powerpc/strcmp.S
@@ -1,5 +1,5 @@
/* Optimized strcmp implementation for PowerPC.
- Copyright (C) 1997 Free Software Foundation, Inc.
+ Copyright (C) 1997, 1999 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@@ -38,78 +38,78 @@ EALIGN(strcmp,4,0)
r10: low 2 bits of p2-p1
r11: mask to orc with r5/r6 */
- or %r0,%r4,%r3
- clrlwi. %r0,%r0,30
- lis %r7,0xfeff
+ or r0,r4,r3
+ clrlwi. r0,r0,30
+ lis r7,0xfeff
bne L(unaligned)
- lwz %r5,0(%r3)
- lwz %r6,0(%r4)
- lis %r8,0x7f7f
- addi %r7,%r7,-0x101
- addi %r8,%r8,0x7f7f
+ lwz r5,0(r3)
+ lwz r6,0(r4)
+ lis r8,0x7f7f
+ addi r7,r7,-0x101
+ addi r8,r8,0x7f7f
b 1f
-0: lwzu %r5,4(%r3)
- bne %cr1,L(different)
- lwzu %r6,4(%r4)
-1: add %r0,%r7,%r5
- nor %r9,%r8,%r5
- and. %r0,%r0,%r9
- cmpw %cr1,%r5,%r6
+0: lwzu r5,4(r3)
+ bne cr1,L(different)
+ lwzu r6,4(r4)
+1: add r0,r7,r5
+ nor r9,r8,r5
+ and. r0,r0,r9
+ cmpw cr1,r5,r6
beq+ 0b
L(endstring):
/* OK. We've hit the end of the string. We need to be careful that
we don't compare two strings as different because of gunk beyond
the end of the strings... */
- and %r0,%r8,%r5
- beq %cr1,L(equal)
- add %r0,%r0,%r8
- xor. %r10,%r5,%r6
- andc %r9,%r9,%r0
+ and r0,r8,r5
+ beq cr1,L(equal)
+ add r0,r0,r8
+ xor. r10,r5,r6
+ andc r9,r9,r0
blt- L(highbit)
- cntlzw %r10,%r10
- cntlzw %r9,%r9
- addi %r9,%r9,7
- cmpw %cr1,%r9,%r10
- sub %r3,%r5,%r6
- bgelr+ %cr1
+ cntlzw r10,r10
+ cntlzw r9,r9
+ addi r9,r9,7
+ cmpw cr1,r9,r10
+ sub r3,r5,r6
+ bgelr+ cr1
L(equal):
- li %r3,0
+ li r3,0
blr
L(different):
- lwz %r5,-4(%r3)
- xor. %r10,%r5,%r6
- sub %r3,%r5,%r6
+ lwz r5,-4(r3)
+ xor. r10,r5,r6
+ sub r3,r5,r6
bgelr+
L(highbit):
- ori %r3,%r6,1
+ ori r3,r6,1
blr
/* Oh well. In this case, we just do a byte-by-byte comparison. */
.align 4
L(unaligned):
- lbz %r5,0(%r3)
- lbz %r6,0(%r4)
+ lbz r5,0(r3)
+ lbz r6,0(r4)
b 1f
-0: lbzu %r5,1(%r3)
+0: lbzu r5,1(r3)
bne- 4f
- lbzu %r6,1(%r4)
-1: cmpwi %cr1,%r5,0
- beq- %cr1,3f
- cmpw %r5,%r6
+ lbzu r6,1(r4)
+1: cmpwi cr1,r5,0
+ beq- cr1,3f
+ cmpw r5,r6
bne- 3f
- lbzu %r5,1(%r3)
- lbzu %r6,1(%r4)
- cmpwi %cr1,%r5,0
- cmpw %r5,%r6
- bne+ %cr1,0b
-3: sub %r3,%r5,%r6
+ lbzu r5,1(r3)
+ lbzu r6,1(r4)
+ cmpwi cr1,r5,0
+ cmpw r5,r6
+ bne+ cr1,0b
+3: sub r3,r5,r6
blr
-4: lbz %r5,-1(%r3)
- sub %r3,%r5,%r6
+4: lbz r5,-1(r3)
+ sub r3,r5,r6
blr
END(strcmp)