aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/x86
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/x86')
-rw-r--r--sysdeps/x86/cpu-features.c80
-rw-r--r--sysdeps/x86/cpu-features.h222
2 files changed, 155 insertions, 147 deletions
diff --git a/sysdeps/x86/cpu-features.c b/sysdeps/x86/cpu-features.c
index 218ff2bd86..1787716220 100644
--- a/sysdeps/x86/cpu-features.c
+++ b/sysdeps/x86/cpu-features.c
@@ -75,13 +75,14 @@ init_cpu_features (struct cpu_features *cpu_features)
case 0x1c:
case 0x26:
/* BSF is slow on Atom. */
- cpu_features->feature[index_Slow_BSF] |= bit_Slow_BSF;
+ cpu_features->feature[index_arch_Slow_BSF]
+ |= bit_arch_Slow_BSF;
break;
case 0x57:
/* Knights Landing. Enable Silvermont optimizations. */
- cpu_features->feature[index_Prefer_No_VZEROUPPER]
- |= bit_Prefer_No_VZEROUPPER;
+ cpu_features->feature[index_arch_Prefer_No_VZEROUPPER]
+ |= bit_arch_Prefer_No_VZEROUPPER;
case 0x37:
case 0x4a:
@@ -90,22 +91,22 @@ init_cpu_features (struct cpu_features *cpu_features)
case 0x5d:
/* Unaligned load versions are faster than SSSE3
on Silvermont. */
-#if index_Fast_Unaligned_Load != index_Prefer_PMINUB_for_stringop
-# error index_Fast_Unaligned_Load != index_Prefer_PMINUB_for_stringop
+#if index_arch_Fast_Unaligned_Load != index_arch_Prefer_PMINUB_for_stringop
+# error index_arch_Fast_Unaligned_Load != index_arch_Prefer_PMINUB_for_stringop
#endif
-#if index_Fast_Unaligned_Load != index_Slow_SSE4_2
-# error index_Fast_Unaligned_Load != index_Slow_SSE4_2
+#if index_arch_Fast_Unaligned_Load != index_arch_Slow_SSE4_2
+# error index_arch_Fast_Unaligned_Load != index_arch_Slow_SSE4_2
#endif
- cpu_features->feature[index_Fast_Unaligned_Load]
- |= (bit_Fast_Unaligned_Load
- | bit_Prefer_PMINUB_for_stringop
- | bit_Slow_SSE4_2);
+ cpu_features->feature[index_arch_Fast_Unaligned_Load]
+ |= (bit_arch_Fast_Unaligned_Load
+ | bit_arch_Prefer_PMINUB_for_stringop
+ | bit_arch_Slow_SSE4_2);
break;
default:
/* Unknown family 0x06 processors. Assuming this is one
of Core i3/i5/i7 processors if AVX is available. */
- if ((ecx & bit_AVX) == 0)
+ if ((ecx & bit_cpu_AVX) == 0)
break;
case 0x1a:
@@ -117,20 +118,20 @@ init_cpu_features (struct cpu_features *cpu_features)
case 0x2f:
/* Rep string instructions, copy backward, unaligned loads
and pminub are fast on Intel Core i3, i5 and i7. */
-#if index_Fast_Rep_String != index_Fast_Copy_Backward
-# error index_Fast_Rep_String != index_Fast_Copy_Backward
+#if index_arch_Fast_Rep_String != index_arch_Fast_Copy_Backward
+# error index_arch_Fast_Rep_String != index_arch_Fast_Copy_Backward
#endif
-#if index_Fast_Rep_String != index_Fast_Unaligned_Load
-# error index_Fast_Rep_String != index_Fast_Unaligned_Load
+#if index_arch_Fast_Rep_String != index_arch_Fast_Unaligned_Load
+# error index_arch_Fast_Rep_String != index_arch_Fast_Unaligned_Load
#endif
-#if index_Fast_Rep_String != index_Prefer_PMINUB_for_stringop
-# error index_Fast_Rep_String != index_Prefer_PMINUB_for_stringop
+#if index_arch_Fast_Rep_String != index_arch_Prefer_PMINUB_for_stringop
+# error index_arch_Fast_Rep_String != index_arch_Prefer_PMINUB_for_stringop
#endif
- cpu_features->feature[index_Fast_Rep_String]
- |= (bit_Fast_Rep_String
- | bit_Fast_Copy_Backward
- | bit_Fast_Unaligned_Load
- | bit_Prefer_PMINUB_for_stringop);
+ cpu_features->feature[index_arch_Fast_Rep_String]
+ |= (bit_arch_Fast_Rep_String
+ | bit_arch_Fast_Copy_Backward
+ | bit_arch_Fast_Unaligned_Load
+ | bit_arch_Prefer_PMINUB_for_stringop);
break;
}
}
@@ -159,8 +160,8 @@ init_cpu_features (struct cpu_features *cpu_features)
{
/* "Excavator" */
if (model >= 0x60 && model <= 0x7f)
- cpu_features->feature[index_Fast_Unaligned_Load]
- |= bit_Fast_Unaligned_Load;
+ cpu_features->feature[index_arch_Fast_Unaligned_Load]
+ |= bit_arch_Fast_Unaligned_Load;
}
}
else
@@ -168,11 +169,11 @@ init_cpu_features (struct cpu_features *cpu_features)
/* Support i586 if CX8 is available. */
if (HAS_CPU_FEATURE (CX8))
- cpu_features->feature[index_I586] |= bit_I586;
+ cpu_features->feature[index_arch_I586] |= bit_arch_I586;
/* Support i686 if CMOV is available. */
if (HAS_CPU_FEATURE (CMOV))
- cpu_features->feature[index_I686] |= bit_I686;
+ cpu_features->feature[index_arch_I686] |= bit_arch_I686;
if (cpu_features->max_cpuid >= 7)
__cpuid_count (7, 0,
@@ -193,15 +194,16 @@ init_cpu_features (struct cpu_features *cpu_features)
{
/* Determine if AVX is usable. */
if (HAS_CPU_FEATURE (AVX))
- cpu_features->feature[index_AVX_Usable] |= bit_AVX_Usable;
-#if index_AVX2_Usable != index_AVX_Fast_Unaligned_Load
-# error index_AVX2_Usable != index_AVX_Fast_Unaligned_Load
+ cpu_features->feature[index_arch_AVX_Usable]
+ |= bit_arch_AVX_Usable;
+#if index_arch_AVX2_Usable != index_arch_AVX_Fast_Unaligned_Load
+# error index_arch_AVX2_Usable != index_arch_AVX_Fast_Unaligned_Load
#endif
/* Determine if AVX2 is usable. Unaligned load with 256-bit
AVX registers are faster on processors with AVX2. */
if (HAS_CPU_FEATURE (AVX2))
- cpu_features->feature[index_AVX2_Usable]
- |= bit_AVX2_Usable | bit_AVX_Fast_Unaligned_Load;
+ cpu_features->feature[index_arch_AVX2_Usable]
+ |= bit_arch_AVX2_Usable | bit_arch_AVX_Fast_Unaligned_Load;
/* Check if OPMASK state, upper 256-bit of ZMM0-ZMM15 and
ZMM16-ZMM31 state are enabled. */
if ((xcrlow & (bit_Opmask_state | bit_ZMM0_15_state
@@ -211,20 +213,22 @@ init_cpu_features (struct cpu_features *cpu_features)
/* Determine if AVX512F is usable. */
if (HAS_CPU_FEATURE (AVX512F))
{
- cpu_features->feature[index_AVX512F_Usable]
- |= bit_AVX512F_Usable;
+ cpu_features->feature[index_arch_AVX512F_Usable]
+ |= bit_arch_AVX512F_Usable;
/* Determine if AVX512DQ is usable. */
if (HAS_CPU_FEATURE (AVX512DQ))
- cpu_features->feature[index_AVX512DQ_Usable]
- |= bit_AVX512DQ_Usable;
+ cpu_features->feature[index_arch_AVX512DQ_Usable]
+ |= bit_arch_AVX512DQ_Usable;
}
}
/* Determine if FMA is usable. */
if (HAS_CPU_FEATURE (FMA))
- cpu_features->feature[index_FMA_Usable] |= bit_FMA_Usable;
+ cpu_features->feature[index_arch_FMA_Usable]
+ |= bit_arch_FMA_Usable;
/* Determine if FMA4 is usable. */
if (HAS_CPU_FEATURE (FMA4))
- cpu_features->feature[index_FMA4_Usable] |= bit_FMA4_Usable;
+ cpu_features->feature[index_arch_FMA4_Usable]
+ |= bit_arch_FMA4_Usable;
}
}
diff --git a/sysdeps/x86/cpu-features.h b/sysdeps/x86/cpu-features.h
index e354920d5d..0624a92b2f 100644
--- a/sysdeps/x86/cpu-features.h
+++ b/sysdeps/x86/cpu-features.h
@@ -18,48 +18,48 @@
#ifndef cpu_features_h
#define cpu_features_h
-#define bit_Fast_Rep_String (1 << 0)
-#define bit_Fast_Copy_Backward (1 << 1)
-#define bit_Slow_BSF (1 << 2)
-#define bit_Fast_Unaligned_Load (1 << 4)
-#define bit_Prefer_PMINUB_for_stringop (1 << 5)
-#define bit_AVX_Usable (1 << 6)
-#define bit_FMA_Usable (1 << 7)
-#define bit_FMA4_Usable (1 << 8)
-#define bit_Slow_SSE4_2 (1 << 9)
-#define bit_AVX2_Usable (1 << 10)
-#define bit_AVX_Fast_Unaligned_Load (1 << 11)
-#define bit_AVX512F_Usable (1 << 12)
-#define bit_AVX512DQ_Usable (1 << 13)
-#define bit_I586 (1 << 14)
-#define bit_I686 (1 << 15)
-#define bit_Prefer_MAP_32BIT_EXEC (1 << 16)
-#define bit_Prefer_No_VZEROUPPER (1 << 17)
+#define bit_arch_Fast_Rep_String (1 << 0)
+#define bit_arch_Fast_Copy_Backward (1 << 1)
+#define bit_arch_Slow_BSF (1 << 2)
+#define bit_arch_Fast_Unaligned_Load (1 << 4)
+#define bit_arch_Prefer_PMINUB_for_stringop (1 << 5)
+#define bit_arch_AVX_Usable (1 << 6)
+#define bit_arch_FMA_Usable (1 << 7)
+#define bit_arch_FMA4_Usable (1 << 8)
+#define bit_arch_Slow_SSE4_2 (1 << 9)
+#define bit_arch_AVX2_Usable (1 << 10)
+#define bit_arch_AVX_Fast_Unaligned_Load (1 << 11)
+#define bit_arch_AVX512F_Usable (1 << 12)
+#define bit_arch_AVX512DQ_Usable (1 << 13)
+#define bit_arch_I586 (1 << 14)
+#define bit_arch_I686 (1 << 15)
+#define bit_arch_Prefer_MAP_32BIT_EXEC (1 << 16)
+#define bit_arch_Prefer_No_VZEROUPPER (1 << 17)
/* CPUID Feature flags. */
/* COMMON_CPUID_INDEX_1. */
-#define bit_CX8 (1 << 8)
-#define bit_CMOV (1 << 15)
-#define bit_SSE2 (1 << 26)
-#define bit_SSSE3 (1 << 9)
-#define bit_SSE4_1 (1 << 19)
-#define bit_SSE4_2 (1 << 20)
-#define bit_OSXSAVE (1 << 27)
-#define bit_AVX (1 << 28)
-#define bit_POPCOUNT (1 << 23)
-#define bit_FMA (1 << 12)
-#define bit_FMA4 (1 << 16)
+#define bit_cpu_CX8 (1 << 8)
+#define bit_cpu_CMOV (1 << 15)
+#define bit_cpu_SSE2 (1 << 26)
+#define bit_cpu_SSSE3 (1 << 9)
+#define bit_cpu_SSE4_1 (1 << 19)
+#define bit_cpu_SSE4_2 (1 << 20)
+#define bit_cpu_OSXSAVE (1 << 27)
+#define bit_cpu_AVX (1 << 28)
+#define bit_cpu_POPCOUNT (1 << 23)
+#define bit_cpu_FMA (1 << 12)
+#define bit_cpu_FMA4 (1 << 16)
/* COMMON_CPUID_INDEX_7. */
-#define bit_RTM (1 << 11)
-#define bit_AVX2 (1 << 5)
-#define bit_AVX512F (1 << 16)
-#define bit_AVX512DQ (1 << 17)
+#define bit_cpu_RTM (1 << 11)
+#define bit_cpu_AVX2 (1 << 5)
+#define bit_cpu_AVX512F (1 << 16)
+#define bit_cpu_AVX512DQ (1 << 17)
/* XCR0 Feature flags. */
-#define bit_XMM_state (1 << 1)
-#define bit_YMM_state (2 << 1)
+#define bit_XMM_state (1 << 1)
+#define bit_YMM_state (2 << 1)
#define bit_Opmask_state (1 << 5)
#define bit_ZMM0_15_state (1 << 6)
#define bit_ZMM16_31_state (1 << 7)
@@ -75,32 +75,32 @@
# include <ifunc-defines.h>
# include <rtld-global-offsets.h>
-# define index_CX8 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
-# define index_CMOV COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
-# define index_SSE2 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
-# define index_SSSE3 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
-# define index_SSE4_1 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
-# define index_SSE4_2 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
-# define index_AVX COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
-# define index_AVX2 COMMON_CPUID_INDEX_7*CPUID_SIZE+CPUID_EBX_OFFSET
+# define index_cpu_CX8 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
+# define index_cpu_CMOV COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
+# define index_cpu_SSE2 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_EDX_OFFSET
+# define index_cpu_SSSE3 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
+# define index_cpu_SSE4_1 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
+# define index_cpu_SSE4_2 COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
+# define index_cpu_AVX COMMON_CPUID_INDEX_1*CPUID_SIZE+CPUID_ECX_OFFSET
+# define index_cpu_AVX2 COMMON_CPUID_INDEX_7*CPUID_SIZE+CPUID_EBX_OFFSET
-# define index_Fast_Rep_String FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Fast_Copy_Backward FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Slow_BSF FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Fast_Unaligned_Load FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Prefer_PMINUB_for_stringop FEATURE_INDEX_1*FEATURE_SIZE
-# define index_AVX_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_FMA_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_FMA4_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Slow_SSE4_2 FEATURE_INDEX_1*FEATURE_SIZE
-# define index_AVX2_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_AVX_Fast_Unaligned_Load FEATURE_INDEX_1*FEATURE_SIZE
-# define index_AVX512F_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_AVX512DQ_Usable FEATURE_INDEX_1*FEATURE_SIZE
-# define index_I586 FEATURE_INDEX_1*FEATURE_SIZE
-# define index_I686 FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1*FEATURE_SIZE
-# define index_Prefer_No_VZEROUPPER FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Fast_Rep_String FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Fast_Copy_Backward FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Slow_BSF FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Fast_Unaligned_Load FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Prefer_PMINUB_for_stringop FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_AVX_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_FMA_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_FMA4_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Slow_SSE4_2 FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_AVX2_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_AVX_Fast_Unaligned_Load FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_AVX512F_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_AVX512DQ_Usable FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_I586 FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_I686 FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1*FEATURE_SIZE
+# define index_arch_Prefer_No_VZEROUPPER FEATURE_INDEX_1*FEATURE_SIZE
# if defined (_LIBC) && !IS_IN (nonlib)
@@ -108,19 +108,21 @@
# ifdef SHARED
# if IS_IN (rtld)
# define LOAD_RTLD_GLOBAL_RO_RDX
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), _rtld_local_ro+offset+(index_##name)(%rip)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ _rtld_local_ro+offset+(index_##field##_##name)(%rip)
# else
# define LOAD_RTLD_GLOBAL_RO_RDX \
mov _rtld_global_ro@GOTPCREL(%rip), %RDX_LP
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), \
- RTLD_GLOBAL_RO_DL_X86_CPU_FEATURES_OFFSET+offset+(index_##name)(%rdx)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ RTLD_GLOBAL_RO_DL_X86_CPU_FEATURES_OFFSET+offset+(index_##field##_##name)(%rdx)
# endif
# else /* SHARED */
# define LOAD_RTLD_GLOBAL_RO_RDX
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), _dl_x86_cpu_features+offset+(index_##name)(%rip)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ _dl_x86_cpu_features+offset+(index_##field##_##name)(%rip)
# endif /* !SHARED */
# else /* __x86_64__ */
# ifdef SHARED
@@ -129,22 +131,24 @@
# if IS_IN (rtld)
# define LOAD_GOT_AND_RTLD_GLOBAL_RO \
LOAD_PIC_REG(dx)
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), offset+(index_##name)+_rtld_local_ro@GOTOFF(%edx)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ offset+(index_##field##_##name)+_rtld_local_ro@GOTOFF(%edx)
# else
# define LOAD_GOT_AND_RTLD_GLOBAL_RO \
LOAD_PIC_REG(dx); \
mov _rtld_global_ro@GOT(%edx), %ecx
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), \
- RTLD_GLOBAL_RO_DL_X86_CPU_FEATURES_OFFSET+offset+(index_##name)(%ecx)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ RTLD_GLOBAL_RO_DL_X86_CPU_FEATURES_OFFSET+offset+(index_##field##_##name)(%ecx)
# endif
# else /* SHARED */
# define LOAD_FUNC_GOT_EAX(func) \
leal func, %eax
# define LOAD_GOT_AND_RTLD_GLOBAL_RO
-# define HAS_FEATURE(offset, name) \
- testl $(bit_##name), _dl_x86_cpu_features+offset+(index_##name)
+# define HAS_FEATURE(offset, field, name) \
+ testl $(bit_##field##_##name), \
+ _dl_x86_cpu_features+offset+(index_##field##_##name)
# endif /* !SHARED */
# endif /* !__x86_64__ */
# else /* _LIBC && !nonlib */
@@ -152,8 +156,8 @@
# endif /* !_LIBC || nonlib */
/* HAS_* evaluates to true if we may use the feature at runtime. */
-# define HAS_CPU_FEATURE(name) HAS_FEATURE (CPUID_OFFSET, name)
-# define HAS_ARCH_FEATURE(name) HAS_FEATURE (FEATURE_OFFSET, name)
+# define HAS_CPU_FEATURE(name) HAS_FEATURE (CPUID_OFFSET, cpu, name)
+# define HAS_ARCH_FEATURE(name) HAS_FEATURE (FEATURE_OFFSET, arch, name)
#else /* __ASSEMBLER__ */
@@ -202,25 +206,25 @@ extern const struct cpu_features *__get_cpu_features (void)
/* HAS_* evaluates to true if we may use the feature at runtime. */
# define HAS_CPU_FEATURE(name) \
- ((__get_cpu_features ()->cpuid[index_##name].reg_##name & (bit_##name)) != 0)
+ ((__get_cpu_features ()->cpuid[index_cpu_##name].reg_##name & (bit_cpu_##name)) != 0)
# define HAS_ARCH_FEATURE(name) \
- ((__get_cpu_features ()->feature[index_##name] & (bit_##name)) != 0)
+ ((__get_cpu_features ()->feature[index_arch_##name] & (bit_arch_##name)) != 0)
-# define index_CX8 COMMON_CPUID_INDEX_1
-# define index_CMOV COMMON_CPUID_INDEX_1
-# define index_SSE2 COMMON_CPUID_INDEX_1
-# define index_SSSE3 COMMON_CPUID_INDEX_1
-# define index_SSE4_1 COMMON_CPUID_INDEX_1
-# define index_SSE4_2 COMMON_CPUID_INDEX_1
-# define index_AVX COMMON_CPUID_INDEX_1
-# define index_AVX2 COMMON_CPUID_INDEX_7
-# define index_AVX512F COMMON_CPUID_INDEX_7
-# define index_AVX512DQ COMMON_CPUID_INDEX_7
-# define index_RTM COMMON_CPUID_INDEX_7
-# define index_FMA COMMON_CPUID_INDEX_1
-# define index_FMA4 COMMON_CPUID_INDEX_80000001
-# define index_POPCOUNT COMMON_CPUID_INDEX_1
-# define index_OSXSAVE COMMON_CPUID_INDEX_1
+# define index_cpu_CX8 COMMON_CPUID_INDEX_1
+# define index_cpu_CMOV COMMON_CPUID_INDEX_1
+# define index_cpu_SSE2 COMMON_CPUID_INDEX_1
+# define index_cpu_SSSE3 COMMON_CPUID_INDEX_1
+# define index_cpu_SSE4_1 COMMON_CPUID_INDEX_1
+# define index_cpu_SSE4_2 COMMON_CPUID_INDEX_1
+# define index_cpu_AVX COMMON_CPUID_INDEX_1
+# define index_cpu_AVX2 COMMON_CPUID_INDEX_7
+# define index_cpu_AVX512F COMMON_CPUID_INDEX_7
+# define index_cpu_AVX512DQ COMMON_CPUID_INDEX_7
+# define index_cpu_RTM COMMON_CPUID_INDEX_7
+# define index_cpu_FMA COMMON_CPUID_INDEX_1
+# define index_cpu_FMA4 COMMON_CPUID_INDEX_80000001
+# define index_cpu_POPCOUNT COMMON_CPUID_INDEX_1
+# define index_cpu_OSXSAVE COMMON_CPUID_INDEX_1
# define reg_CX8 edx
# define reg_CMOV edx
@@ -238,23 +242,23 @@ extern const struct cpu_features *__get_cpu_features (void)
# define reg_POPCOUNT ecx
# define reg_OSXSAVE ecx
-# define index_Fast_Rep_String FEATURE_INDEX_1
-# define index_Fast_Copy_Backward FEATURE_INDEX_1
-# define index_Slow_BSF FEATURE_INDEX_1
-# define index_Fast_Unaligned_Load FEATURE_INDEX_1
-# define index_Prefer_PMINUB_for_stringop FEATURE_INDEX_1
-# define index_AVX_Usable FEATURE_INDEX_1
-# define index_FMA_Usable FEATURE_INDEX_1
-# define index_FMA4_Usable FEATURE_INDEX_1
-# define index_Slow_SSE4_2 FEATURE_INDEX_1
-# define index_AVX2_Usable FEATURE_INDEX_1
-# define index_AVX_Fast_Unaligned_Load FEATURE_INDEX_1
-# define index_AVX512F_Usable FEATURE_INDEX_1
-# define index_AVX512DQ_Usable FEATURE_INDEX_1
-# define index_I586 FEATURE_INDEX_1
-# define index_I686 FEATURE_INDEX_1
-# define index_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1
-# define index_Prefer_No_VZEROUPPER FEATURE_INDEX_1
+# define index_arch_Fast_Rep_String FEATURE_INDEX_1
+# define index_arch_Fast_Copy_Backward FEATURE_INDEX_1
+# define index_arch_Slow_BSF FEATURE_INDEX_1
+# define index_arch_Fast_Unaligned_Load FEATURE_INDEX_1
+# define index_arch_Prefer_PMINUB_for_stringop FEATURE_INDEX_1
+# define index_arch_AVX_Usable FEATURE_INDEX_1
+# define index_arch_FMA_Usable FEATURE_INDEX_1
+# define index_arch_FMA4_Usable FEATURE_INDEX_1
+# define index_arch_Slow_SSE4_2 FEATURE_INDEX_1
+# define index_arch_AVX2_Usable FEATURE_INDEX_1
+# define index_arch_AVX_Fast_Unaligned_Load FEATURE_INDEX_1
+# define index_arch_AVX512F_Usable FEATURE_INDEX_1
+# define index_arch_AVX512DQ_Usable FEATURE_INDEX_1
+# define index_arch_I586 FEATURE_INDEX_1
+# define index_arch_I686 FEATURE_INDEX_1
+# define index_arch_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1
+# define index_arch_Prefer_No_VZEROUPPER FEATURE_INDEX_1
#endif /* !__ASSEMBLER__ */