aboutsummaryrefslogtreecommitdiff
path: root/sysdeps
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps')
-rw-r--r--sysdeps/powerpc/powerpc64/power8/strlen.S11
-rw-r--r--sysdeps/powerpc/powerpc64/power8/strnlen.S10
2 files changed, 1 insertions, 20 deletions
diff --git a/sysdeps/powerpc/powerpc64/power8/strlen.S b/sysdeps/powerpc/powerpc64/power8/strlen.S
index 8fdb6f5cc1..5691d1d93a 100644
--- a/sysdeps/powerpc/powerpc64/power8/strlen.S
+++ b/sysdeps/powerpc/powerpc64/power8/strlen.S
@@ -144,17 +144,6 @@ L(align64):
or r5,r10,r11
cmpdi cr7,r5,0
addi r9,r9,16
- bne cr7,L(dword_zero)
-
- andi. r10,r9,63
- beq cr0,L(preloop)
- ld r6,8(r4)
- ldu r5,16(r4)
- cmpb r10,r6,r0
- cmpb r11,r5,r0
- or r5,r10,r11
- cmpdi cr7,r5,0
- addi r9,r9,16
/* At this point, we are necessarily 64-byte aligned. If no zeroes were
found, jump to the vectorized loop. */
diff --git a/sysdeps/powerpc/powerpc64/power8/strnlen.S b/sysdeps/powerpc/powerpc64/power8/strnlen.S
index 07608ffa26..6d669d4a54 100644
--- a/sysdeps/powerpc/powerpc64/power8/strnlen.S
+++ b/sysdeps/powerpc/powerpc64/power8/strnlen.S
@@ -141,15 +141,7 @@ ENTRY_TOCLESS (__strnlen)
addi r4,r4,-16 /* Decrement maxlen in 16 bytes. */
bne cr6,L(found_aligning64B) /* If found null bytes. */
- /* Unroll 3x above code block until aligned or find null bytes. */
- andi. r7,r5,63
- beq cr0,L(preloop_64B)
- lvx v1,r5,r6
- vcmpequb. v1,v1,v0
- addi r5,r5,16
- addi r4,r4,-16
- bne cr6,L(found_aligning64B)
-
+ /* Unroll 2x above code block until aligned or find null bytes. */
andi. r7,r5,63
beq cr0,L(preloop_64B)
lvx v1,r5,r6