diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2019-02-01 12:21:41 -0800 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2019-02-02 06:23:58 -0800 |
commit | cfea023b20a40e959300dfbe9be28e5d235b59a3 (patch) | |
tree | 30ec553026dbba99e5fde96265e61385bd3ba80b | |
parent | 6f9ce82c1ec1390aa5b21f8b30bde718c2ba98fb (diff) | |
download | glibc-cfea023b20a40e959300dfbe9be28e5d235b59a3.tar glibc-cfea023b20a40e959300dfbe9be28e5d235b59a3.tar.gz glibc-cfea023b20a40e959300dfbe9be28e5d235b59a3.tar.bz2 glibc-cfea023b20a40e959300dfbe9be28e5d235b59a3.zip |
x86-64 memset/wmemset: Properly handle the length parameter [BZ #24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a
64-bit register with the non-zero upper 32 bits. The string/memory
functions written in assembly can only use the lower 32 bits of a
64-bit register as length or must clear the upper 32 bits before using
the full 64-bit register for length.
This pach fixes memset/wmemset for x32. Tested on x86-64 and x32. On
x86-64, libc.so is the same with and withou the fix.
[BZ #24097]
CVE-2019-6488
* sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S: Use
RDX_LP for length. Clear the upper 32 bits of RDX register.
* sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S: Likewise.
* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memset.
* sysdeps/x86_64/x32/tst-size_t-memset.c: New file.
(cherry picked from commit 82d0b4a4d76db554eb6757acb790fcea30b19965)
-rw-r--r-- | ChangeLog | 10 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S | 6 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S | 26 | ||||
-rw-r--r-- | sysdeps/x86_64/x32/Makefile | 2 | ||||
-rw-r--r-- | sysdeps/x86_64/x32/tst-size_t-memset.c | 73 |
5 files changed, 106 insertions, 11 deletions
@@ -2,6 +2,16 @@ [BZ #24097] CVE-2019-6488 + * sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S: Use + RDX_LP for length. Clear the upper 32 bits of RDX register. + * sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S: Likewise. + * sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memset. + * sysdeps/x86_64/x32/tst-size_t-memset.c: New file. + +2019-02-01 H.J. Lu <hongjiu.lu@intel.com> + + [BZ #24097] + CVE-2019-6488 * sysdeps/x86_64/memrchr.S: Use RDX_LP for length. * sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memrchr. * sysdeps/x86_64/x32/tst-size_t-memrchr.c: New file. diff --git a/sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S b/sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S index 9687df0d3e..a7cb9d631b 100644 --- a/sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S +++ b/sysdeps/x86_64/multiarch/memset-avx512-no-vzeroupper.S @@ -29,12 +29,16 @@ .section .text.avx512,"ax",@progbits #if defined PIC ENTRY (MEMSET_CHK) - cmpq %rdx, %rcx + cmp %RDX_LP, %RCX_LP jb HIDDEN_JUMPTARGET (__chk_fail) END (MEMSET_CHK) #endif ENTRY (MEMSET) +# ifdef __ILP32__ + /* Clear the upper 32 bits. */ + mov %edx, %edx +# endif vpxor %xmm0, %xmm0, %xmm0 vmovd %esi, %xmm1 lea (%rdi, %rdx), %rsi diff --git a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S index acf448c9a6..5e375f288b 100644 --- a/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S @@ -71,8 +71,8 @@ .section SECTION(.text),"ax",@progbits #if VEC_SIZE == 16 && IS_IN (libc) ENTRY (__bzero) - movq %rdi, %rax /* Set return value. */ - movq %rsi, %rdx /* Set n. */ + mov %RDI_LP, %RAX_LP /* Set return value. */ + mov %RSI_LP, %RDX_LP /* Set n. */ pxor %xmm0, %xmm0 jmp L(entry_from_bzero) END (__bzero) @@ -81,7 +81,7 @@ weak_alias (__bzero, bzero) #if defined SHARED && IS_IN (libc) ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned)) - cmpq %rdx, %rcx + cmp %RDX_LP, %RCX_LP jb HIDDEN_JUMPTARGET (__chk_fail) END_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned)) #endif @@ -89,6 +89,10 @@ END_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned)) ENTRY (MEMSET_SYMBOL (__memset, unaligned)) L(memset_entry): VDUP_TO_VEC0_AND_SET_RETURN (%esi, %rdi) +# ifdef __ILP32__ + /* Clear the upper 32 bits. */ + mov %edx, %edx +# endif L(entry_from_bzero): cmpq $VEC_SIZE, %rdx jb L(less_vec) @@ -112,11 +116,11 @@ ENTRY (MEMSET_SYMBOL (__memset, erms)) L(stosb): /* Issue vzeroupper before rep stosb. */ VZEROUPPER - movq %rdx, %rcx + mov %RDX_LP, %RCX_LP movzbl %sil, %eax - movq %rdi, %rdx + mov %RDI_LP, %RDX_LP rep stosb - movq %rdx, %rax + mov %RDX_LP, %RAX_LP ret # if VEC_SIZE == 16 END (__memset_erms) @@ -126,16 +130,20 @@ END (MEMSET_SYMBOL (__memset, erms)) # if defined SHARED && IS_IN (libc) ENTRY_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms)) - cmpq %rdx, %rcx + cmp %RDX_LP, %RCX_LP jb HIDDEN_JUMPTARGET (__chk_fail) END_CHK (MEMSET_CHK_SYMBOL (__memset_chk, unaligned_erms)) # endif ENTRY (MEMSET_SYMBOL (__memset, unaligned_erms)) VDUP_TO_VEC0_AND_SET_RETURN (%esi, %rdi) - cmpq $VEC_SIZE, %rdx +# ifdef __ILP32__ + /* Clear the upper 32 bits. */ + mov %edx, %edx +# endif + cmp $VEC_SIZE, %RDX_LP jb L(less_vec) - cmpq $(VEC_SIZE * 2), %rdx + cmp $(VEC_SIZE * 2), %RDX_LP ja L(stosb_more_2x_vec) /* From VEC and to 2 * VEC. No branch when size == VEC_SIZE. */ VMOVU %VEC(0), -VEC_SIZE(%rdi,%rdx) diff --git a/sysdeps/x86_64/x32/Makefile b/sysdeps/x86_64/x32/Makefile index e02bf2bfbb..abe6ca0670 100644 --- a/sysdeps/x86_64/x32/Makefile +++ b/sysdeps/x86_64/x32/Makefile @@ -7,7 +7,7 @@ endif ifeq ($(subdir),string) tests += tst-size_t-memchr tst-size_t-memcmp tst-size_t-memcpy \ - tst-size_t-memrchr + tst-size_t-memrchr tst-size_t-memset endif ifeq ($(subdir),wcsmbs) diff --git a/sysdeps/x86_64/x32/tst-size_t-memset.c b/sysdeps/x86_64/x32/tst-size_t-memset.c new file mode 100644 index 0000000000..95f671f186 --- /dev/null +++ b/sysdeps/x86_64/x32/tst-size_t-memset.c @@ -0,0 +1,73 @@ +/* Test memset with size_t in the lower 32 bits of 64-bit register. + Copyright (C) 2019 Free Software Foundation, Inc. + This file is part of the GNU C Library. + + The GNU C Library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + The GNU C Library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with the GNU C Library; if not, see + <http://www.gnu.org/licenses/>. */ + +#ifdef WIDE +# define TEST_NAME "wmemset" +#else +# define TEST_NAME "memset" +#endif /* WIDE */ + +#include "test-size_t.h" + +#ifdef WIDE +# include <wchar.h> +# define MEMSET wmemset +# define CHAR wchar_t +#else +# define MEMSET memset +# define CHAR char +#endif /* WIDE */ + +IMPL (MEMSET, 1) + +typedef CHAR *(*proto_t) (CHAR *, int, size_t); + +static void * +__attribute__ ((noinline, noclone)) +do_memset (parameter_t a, parameter_t b) +{ + return CALL (&b, a.p, (uintptr_t) b.p, a.len); +} + +static int +test_main (void) +{ + test_init (); + + CHAR ch = 0x23; + parameter_t src = { { page_size / sizeof (CHAR) }, buf2 }; + parameter_t c = { { 0 }, (void *) (uintptr_t) ch }; + + int ret = 0; + FOR_EACH_IMPL (impl, 0) + { + c.fn = impl->fn; + CHAR *p = (CHAR *) do_memset (src, c); + size_t i; + for (i = 0; i < src.len; i++) + if (p[i] != ch) + { + error (0, 0, "Wrong result in function %s", impl->name); + ret = 1; + } + } + + return ret ? EXIT_FAILURE : EXIT_SUCCESS; +} + +#include <test-skeleton.c> |