diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2020-06-11 12:41:18 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2020-06-17 05:44:02 -0700 |
commit | a35a59036ebae3efcdf5e8167610e0656fca9770 (patch) | |
tree | cc68ab38ace7dfb2efc80990dc2903e3812f5b4b /sysdeps/x86_64/multiarch | |
parent | b7c9bb183b799b10c09ec32e98d1843546ea4324 (diff) | |
download | glibc-a35a59036ebae3efcdf5e8167610e0656fca9770.tar glibc-a35a59036ebae3efcdf5e8167610e0656fca9770.tar.gz glibc-a35a59036ebae3efcdf5e8167610e0656fca9770.tar.bz2 glibc-a35a59036ebae3efcdf5e8167610e0656fca9770.zip |
x86_64: Use %xmmN with vpxor to clear a vector register
Since "vpxor %xmmN, %xmmN, %xmmN" clears the whole vector register, use
%xmmN, instead of %ymmN, with vpxor to clear a vector register.
Diffstat (limited to 'sysdeps/x86_64/multiarch')
-rw-r--r-- | sysdeps/x86_64/multiarch/strcmp-avx2.S | 4 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/strrchr-avx2.S | 2 |
2 files changed, 3 insertions, 3 deletions
diff --git a/sysdeps/x86_64/multiarch/strcmp-avx2.S b/sysdeps/x86_64/multiarch/strcmp-avx2.S index 48d03a9f46..5f88a68262 100644 --- a/sysdeps/x86_64/multiarch/strcmp-avx2.S +++ b/sysdeps/x86_64/multiarch/strcmp-avx2.S @@ -91,8 +91,8 @@ ENTRY (STRCMP) # endif movl %edi, %eax xorl %edx, %edx - /* Make %ymm7 all zeros in this function. */ - vpxor %ymm7, %ymm7, %ymm7 + /* Make %xmm7 (%ymm7) all zeros in this function. */ + vpxor %xmm7, %xmm7, %xmm7 orl %esi, %eax andl $(PAGE_SIZE - 1), %eax cmpl $(PAGE_SIZE - (VEC_SIZE * 4)), %eax diff --git a/sysdeps/x86_64/multiarch/strrchr-avx2.S b/sysdeps/x86_64/multiarch/strrchr-avx2.S index 23077b4c45..146bdd51d0 100644 --- a/sysdeps/x86_64/multiarch/strrchr-avx2.S +++ b/sysdeps/x86_64/multiarch/strrchr-avx2.S @@ -44,7 +44,7 @@ ENTRY (STRRCHR) movl %edi, %ecx /* Broadcast CHAR to YMM4. */ VPBROADCAST %xmm4, %ymm4 - vpxor %ymm0, %ymm0, %ymm0 + vpxor %xmm0, %xmm0, %xmm0 /* Check if we may cross page boundary with one vector load. */ andl $(2 * VEC_SIZE - 1), %ecx |