aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog10
-rw-r--r--string/test-strcmp.c28
-rw-r--r--sysdeps/sparc/sparc64/strcmp.S31
3 files changed, 69 insertions, 0 deletions
diff --git a/ChangeLog b/ChangeLog
index 55724f6fef..581d243345 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2014-05-01 David S. Miller <davem@davemloft.net>
+
+ [BZ #16885]
+ * sysdeps/sparc/sparc64/strcmp.S: Fix end comparison handling when
+ multiple zero bytes exist at the end of a string.
+ Reported by Aurelien Jarno <aurelien@aurel32.net>
+
+ * string/test-strcmp.c (check): Add explicit test for situations where
+ there are multiple zero bytes after the first.
+
2014-05-01 Andreas Schwab <schwab@linux-m68k.org>
[BZ #16890]
diff --git a/string/test-strcmp.c b/string/test-strcmp.c
index b395dc7fbe..fcd059f30a 100644
--- a/string/test-strcmp.c
+++ b/string/test-strcmp.c
@@ -329,6 +329,34 @@ check (void)
FOR_EACH_IMPL (impl, 0)
check_result (impl, s1 + i1, s2 + i2, exp_result);
}
+
+ /* Test cases where there are multiple zero bytes after the first. */
+
+ for (size_t i = 0; i < 16 + 1; i++)
+ {
+ s1[i] = 0x00;
+ s2[i] = 0x00;
+ }
+
+ for (size_t i = 0; i < 16; i++)
+ {
+ int exp_result;
+
+ for (int val = 0x01; val < 0x100; val++)
+ {
+ for (size_t j = 0; j < i; j++)
+ {
+ s1[j] = val;
+ s2[j] = val;
+ }
+
+ s2[i] = val;
+
+ exp_result = SIMPLE_STRCMP (s1, s2);
+ FOR_EACH_IMPL (impl, 0)
+ check_result (impl, s1, s2, exp_result);
+ }
+ }
}
diff --git a/sysdeps/sparc/sparc64/strcmp.S b/sysdeps/sparc/sparc64/strcmp.S
index 8925396ec6..f32b0fc779 100644
--- a/sysdeps/sparc/sparc64/strcmp.S
+++ b/sysdeps/sparc/sparc64/strcmp.S
@@ -121,6 +121,37 @@ ENTRY(strcmp)
movleu %xcc, -1, %o0
srlx rTMP1, 7, rTMP1
+ /* In order not to be influenced by bytes after the zero byte, we
+ * have to retain only the highest bit in the mask for the comparison
+ * with rSTRXOR to work properly.
+ */
+ mov 0, rTMP2
+ andcc rTMP1, 0x0100, %g0
+
+ movne %xcc, 8, rTMP2
+ sllx rTMP1, 63 - 16, %o1
+
+ movrlz %o1, 16, rTMP2
+ sllx rTMP1, 63 - 24, %o1
+
+ movrlz %o1, 24, rTMP2
+ sllx rTMP1, 63 - 32, %o1
+
+ movrlz %o1, 32, rTMP2
+ sllx rTMP1, 63 - 40, %o1
+
+ movrlz %o1, 40, rTMP2
+ sllx rTMP1, 63 - 48, %o1
+
+ movrlz %o1, 48, rTMP2
+ sllx rTMP1, 63 - 56, %o1
+
+ movrlz %o1, 56, rTMP2
+
+ srlx rTMP1, rTMP2, rTMP1
+
+ sllx rTMP1, rTMP2, rTMP1
+
cmp rTMP1, rSTRXOR
retl
movgu %xcc, 0, %o0