aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/x86_64/multiarch
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/x86_64/multiarch')
-rw-r--r--sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S16
-rw-r--r--sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S2
-rw-r--r--sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S16
-rw-r--r--sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S18
-rw-r--r--sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S2
-rw-r--r--sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S18
6 files changed, 40 insertions, 32 deletions
diff --git a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
index 3a72c7eafd..44711c37ca 100644
--- a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
@@ -1,9 +1,11 @@
-#define VEC_SIZE 32
-#define VEC(i) ymm##i
-#define VMOVU vmovdqu
-#define VMOVA vmovdqa
+#if IS_IN (libc)
+# define VEC_SIZE 32
+# define VEC(i) ymm##i
+# define VMOVU vmovdqu
+# define VMOVA vmovdqa
-#define SECTION(p) p##.avx
-#define MEMMOVE_SYMBOL(p,s) p##_avx_##s
+# define SECTION(p) p##.avx
+# define MEMMOVE_SYMBOL(p,s) p##_avx_##s
-#include "memmove-vec-unaligned-erms.S"
+# include "memmove-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
index 38358fa37c..c2c52937bf 100644
--- a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
@@ -1,4 +1,4 @@
-#ifdef HAVE_AVX512_ASM_SUPPORT
+#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
# define VEC_SIZE 64
# define VEC(i) zmm##i
# define VMOVU vmovdqu64
diff --git a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
index 52b9ae08fc..85214fe725 100644
--- a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
@@ -1,9 +1,11 @@
-#define VEC_SIZE 16
-#define VEC(i) xmm##i
-#define VMOVU movdqu
-#define VMOVA movdqa
+#if IS_IN (libc)
+# define VEC_SIZE 16
+# define VEC(i) xmm##i
+# define VMOVU movdqu
+# define VMOVA movdqa
-#define SECTION(p) p
-#define MEMMOVE_SYMBOL(p,s) p##_sse2_##s
+# define SECTION(p) p
+# define MEMMOVE_SYMBOL(p,s) p##_sse2_##s
-#include "memmove-vec-unaligned-erms.S"
+# include "memmove-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
index e0dc56512e..79975e0825 100644
--- a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
@@ -1,14 +1,16 @@
-#define VEC_SIZE 32
-#define VEC(i) ymm##i
-#define VMOVU vmovdqu
-#define VMOVA vmovdqa
+#if IS_IN (libc)
+# define VEC_SIZE 32
+# define VEC(i) ymm##i
+# define VMOVU vmovdqu
+# define VMOVA vmovdqa
-#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
+# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
vmovd d, %xmm0; \
movq r, %rax; \
vpbroadcastb %xmm0, %ymm0
-#define SECTION(p) p##.avx
-#define MEMSET_SYMBOL(p,s) p##_avx2_##s
+# define SECTION(p) p##.avx
+# define MEMSET_SYMBOL(p,s) p##_avx2_##s
-#include "memset-vec-unaligned-erms.S"
+# include "memset-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
index 72f4095831..f1b3cb23d3 100644
--- a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
@@ -1,4 +1,4 @@
-#ifdef HAVE_AVX512_ASM_SUPPORT
+#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
# define VEC_SIZE 64
# define VEC(i) zmm##i
# define VMOVU vmovdqu64
diff --git a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
index 437a858dab..2deba42c16 100644
--- a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
@@ -1,16 +1,18 @@
-#define VEC_SIZE 16
-#define VEC(i) xmm##i
-#define VMOVU movdqu
-#define VMOVA movdqa
+#if IS_IN (libc)
+# define VEC_SIZE 16
+# define VEC(i) xmm##i
+# define VMOVU movdqu
+# define VMOVA movdqa
-#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
+# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
movd d, %xmm0; \
movq r, %rax; \
punpcklbw %xmm0, %xmm0; \
punpcklwd %xmm0, %xmm0; \
pshufd $0, %xmm0, %xmm0
-#define SECTION(p) p
-#define MEMSET_SYMBOL(p,s) p##_sse2_##s
+# define SECTION(p) p
+# define MEMSET_SYMBOL(p,s) p##_sse2_##s
-#include "memset-vec-unaligned-erms.S"
+# include "memset-vec-unaligned-erms.S"
+#endif