aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/sparc/sparc32/bits
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2006-01-04 00:20:45 +0000
committerUlrich Drepper <drepper@redhat.com>2006-01-04 00:20:45 +0000
commitb01fe5f792564a4f6606f33f7827b649bdfbd1f4 (patch)
tree968a10dc426c94e9ad8cbf522d19bd1999761641 /sysdeps/sparc/sparc32/bits
parent6d3aff23e2c64b64fe68fe88360185e17b3e936a (diff)
downloadglibc-b01fe5f792564a4f6606f33f7827b649bdfbd1f4.tar
glibc-b01fe5f792564a4f6606f33f7827b649bdfbd1f4.tar.gz
glibc-b01fe5f792564a4f6606f33f7827b649bdfbd1f4.tar.bz2
glibc-b01fe5f792564a4f6606f33f7827b649bdfbd1f4.zip
* sysdeps/unix/sysv/linux/m68k/fchownat.c: Include string.h.
* sysdeps/sparc/sparc32/dl-machine.h (LD_SO_PRELOAD): Remove unused define. * sysdeps/sparc/sparc32/bits/atomic.h: Include stdint.h, add {,u}atomic*_t typedefs. (__sparc32_atomic_do_lock): Add __volatile and memory clobber. (__sparc32_atomic_do_unlock): Add memory barrier. (__sparc32_atomic_do_lock24, __sparc32_atomic_do_unlock24): Define. [!SHARED] (__v9_compare_and_exchange_val_32_acq): Define. (__v7_compare_and_exchange_val_acq, __v7_compare_and_exchange_bool_acq, __v7_exchange_acq, __v7_exchange_and_add, __v7_exchange_24_rel, __v7_compare_and_exchange_val_24_acq, __atomic_is_v9, atomic_exchange_acq, atomic_compare_and_exchange_val_24_acq, atomic_exchange_24_rel): Define. [SHARED] (atomic_exchange_and_add): Define. [!SHARED] (__ATOMIC_HWCAP_SPARC_V9): Define. [!SHARED] (_dl_hwcap): New weak decl. * sysdeps/sparc/sparc32/sparcv9/bits/atomic.h (atomic_exchange_24_rel, atomic_compare_and_exchange_val_24_acq): Define. * sysdeps/sparc/sparc64/bits/atomic.h (atomic_exchange_24_rel, atomic_compare_and_exchange_val_24_acq): Define.
Diffstat (limited to 'sysdeps/sparc/sparc32/bits')
-rw-r--r--sysdeps/sparc/sparc32/bits/atomic.h260
1 files changed, 251 insertions, 9 deletions
diff --git a/sysdeps/sparc/sparc32/bits/atomic.h b/sysdeps/sparc/sparc32/bits/atomic.h
index 736372a3d4..707a4b0a52 100644
--- a/sysdeps/sparc/sparc32/bits/atomic.h
+++ b/sysdeps/sparc/sparc32/bits/atomic.h
@@ -1,5 +1,5 @@
/* Atomic operations. sparc32 version.
- Copyright (C) 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2006 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Jakub Jelinek <jakub@redhat.com>, 2003.
@@ -21,6 +21,34 @@
#ifndef _BITS_ATOMIC_H
#define _BITS_ATOMIC_H 1
+#include <stdint.h>
+
+typedef int8_t atomic8_t;
+typedef uint8_t uatomic8_t;
+typedef int_fast8_t atomic_fast8_t;
+typedef uint_fast8_t uatomic_fast8_t;
+
+typedef int16_t atomic16_t;
+typedef uint16_t uatomic16_t;
+typedef int_fast16_t atomic_fast16_t;
+typedef uint_fast16_t uatomic_fast16_t;
+
+typedef int32_t atomic32_t;
+typedef uint32_t uatomic32_t;
+typedef int_fast32_t atomic_fast32_t;
+typedef uint_fast32_t uatomic_fast32_t;
+
+typedef int64_t atomic64_t;
+typedef uint64_t uatomic64_t;
+typedef int_fast64_t atomic_fast64_t;
+typedef uint_fast64_t uatomic_fast64_t;
+
+typedef intptr_t atomicptr_t;
+typedef uintptr_t uatomicptr_t;
+typedef intmax_t atomic_max_t;
+typedef uintmax_t uatomic_max_t;
+
+
/* We have no compare and swap, just test and set.
The following implementation contends on 64 global locks
per library and assumes no variable will be accessed using atomic.h
@@ -41,22 +69,65 @@ volatile unsigned char __sparc32_atomic_locks[64]
unsigned int __idx = (((long) addr >> 2) ^ ((long) addr >> 12)) \
& 63; \
do \
- __asm ("ldstub %1, %0" \
- : "=r" (__old_lock), \
- "=m" (__sparc32_atomic_locks[__idx]) \
- : "m" (__sparc32_atomic_locks[__idx])); \
+ __asm __volatile ("ldstub %1, %0" \
+ : "=r" (__old_lock), \
+ "=m" (__sparc32_atomic_locks[__idx]) \
+ : "m" (__sparc32_atomic_locks[__idx]) \
+ : "memory"); \
while (__old_lock); \
} \
while (0)
#define __sparc32_atomic_do_unlock(addr) \
do \
- __sparc32_atomic_locks[(((long) addr >> 2) \
- ^ ((long) addr >> 12)) & 63] = 0; \
+ { \
+ __sparc32_atomic_locks[(((long) addr >> 2) \
+ ^ ((long) addr >> 12)) & 63] = 0; \
+ __asm __volatile ("" ::: "memory"); \
+ } \
+ while (0)
+
+#define __sparc32_atomic_do_lock24(addr) \
+ do \
+ { \
+ unsigned int __old_lock; \
+ do \
+ __asm __volatile ("ldstub %1, %0" \
+ : "=r" (__old_lock), "=m" (*(addr)) \
+ : "m" (*(addr)) \
+ : "memory"); \
+ while (__old_lock); \
+ } \
while (0)
+#define __sparc32_atomic_do_unlock24(addr) \
+ do \
+ { \
+ *(char *) (addr) = 0; \
+ __asm __volatile ("" ::: "memory"); \
+ } \
+ while (0)
+
+
+#ifndef SHARED
+# define __v9_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+({ \
+ register __typeof (*(mem)) __acev_tmp __asm ("%g6"); \
+ register __typeof (mem) __acev_mem __asm ("%g1") = (mem); \
+ register __typeof (*(mem)) __acev_oldval __asm ("%g5"); \
+ __acev_tmp = (newval); \
+ __acev_oldval = (oldval); \
+ /* .word 0xcde05005 is cas [%g1], %g5, %g6. Can't use cas here though, \
+ because as will then mark the object file as V8+ arch. */ \
+ __asm __volatile (".word 0xcde05005" \
+ : "+r" (__acev_tmp), "=m" (*__acev_mem) \
+ : "r" (__acev_oldval), "m" (*__acev_mem), \
+ "r" (__acev_mem)); \
+ __acev_tmp; })
+#endif
+
/* The only basic operation needed is compare and exchange. */
-#define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
+#define __v7_compare_and_exchange_val_acq(mem, newval, oldval) \
({ __typeof (mem) __acev_memp = (mem); \
__typeof (*mem) __acev_ret; \
__typeof (*mem) __acev_newval = (newval); \
@@ -68,7 +139,7 @@ volatile unsigned char __sparc32_atomic_locks[64]
__sparc32_atomic_do_unlock (__acev_memp); \
__acev_ret; })
-#define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
+#define __v7_compare_and_exchange_bool_acq(mem, newval, oldval) \
({ __typeof (mem) __aceb_memp = (mem); \
int __aceb_ret; \
__typeof (*mem) __aceb_newval = (newval); \
@@ -82,4 +153,175 @@ volatile unsigned char __sparc32_atomic_locks[64]
__sparc32_atomic_do_unlock (__aceb_memp); \
__aceb_ret; })
+#define __v7_exchange_acq(mem, newval) \
+ ({ __typeof (mem) __acev_memp = (mem); \
+ __typeof (*mem) __acev_ret; \
+ __typeof (*mem) __acev_newval = (newval); \
+ \
+ __sparc32_atomic_do_lock (__acev_memp); \
+ __acev_ret = *__acev_memp; \
+ *__acev_memp = __acev_newval; \
+ __sparc32_atomic_do_unlock (__acev_memp); \
+ __acev_ret; })
+
+#define __v7_exchange_and_add(mem, value) \
+ ({ __typeof (mem) __acev_memp = (mem); \
+ __typeof (*mem) __acev_ret; \
+ \
+ __sparc32_atomic_do_lock (__acev_memp); \
+ __acev_ret = *__acev_memp; \
+ *__acev_memp = __acev_ret + (value); \
+ __sparc32_atomic_do_unlock (__acev_memp); \
+ __acev_ret; })
+
+/* Special versions, which guarantee that top 8 bits of all values
+ are cleared and use those bits as the ldstub lock. */
+#define __v7_compare_and_exchange_val_24_acq(mem, newval, oldval) \
+ ({ __typeof (mem) __acev_memp = (mem); \
+ __typeof (*mem) __acev_ret; \
+ __typeof (*mem) __acev_newval = (newval); \
+ \
+ __sparc32_atomic_do_lock24 (__acev_memp); \
+ __acev_ret = *__acev_memp & 0xffffff; \
+ if (__acev_ret == (oldval)) \
+ *__acev_memp = __acev_newval; \
+ else \
+ __sparc32_atomic_do_unlock24 (__acev_memp); \
+ __asm __volatile ("" ::: "memory"); \
+ __acev_ret; })
+
+#define __v7_exchange_24_rel(mem, newval) \
+ ({ __typeof (mem) __acev_memp = (mem); \
+ __typeof (*mem) __acev_ret; \
+ __typeof (*mem) __acev_newval = (newval); \
+ \
+ __sparc32_atomic_do_lock24 (__acev_memp); \
+ __acev_ret = *__acev_memp & 0xffffff; \
+ *__acev_memp = __acev_newval; \
+ __asm __volatile ("" ::: "memory"); \
+ __acev_ret; })
+
+#ifdef SHARED
+
+/* When dynamically linked, we assume pre-v9 libraries are only ever
+ used on pre-v9 CPU. */
+# define __atomic_is_v9 0
+
+# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
+ __v7_compare_and_exchange_val_acq (mem, newval, oldval)
+
+# define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
+ __v7_compare_and_exchange_bool_acq (mem, newval, oldval)
+
+# define atomic_exchange_acq(mem, newval) \
+ __v7_exchange_acq (mem, newval)
+
+# define atomic_exchange_and_add(mem, value) \
+ __v7_exchange_and_add (mem, value)
+
+# define atomic_compare_and_exchange_val_24_acq(mem, newval, oldval) \
+ ({ \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ __v7_compare_and_exchange_val_24_acq (mem, newval, oldval); })
+
+# define atomic_exchange_24_rel(mem, newval) \
+ ({ \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ __v7_exchange_24_rel (mem, newval); })
+
+#else
+
+/* In libc.a/libpthread.a etc. we don't know if we'll be run on
+ pre-v9 or v9 CPU. To be interoperable with dynamically linked
+ apps on v9 CPUs e.g. with process shared primitives, use cas insn
+ on v9 CPUs and ldstub on pre-v9. */
+
+/* Avoid <ldsodefs.h> include here. */
+extern uint64_t _dl_hwcap __attribute__((weak));
+# define __ATOMIC_HWCAP_SPARC_V9 16
+# define __atomic_is_v9 \
+ (__builtin_expect (&_dl_hwcap != 0, 1) \
+ && __builtin_expect (_dl_hwcap & __ATOMIC_HWCAP_SPARC_V9, \
+ __ATOMIC_HWCAP_SPARC_V9))
+
+# define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
+ ({ \
+ __typeof (*mem) __acev_wret; \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ if (__atomic_is_v9) \
+ __acev_wret \
+ = __v9_compare_and_exchange_val_32_acq (mem, newval, oldval);\
+ else \
+ __acev_wret \
+ = __v7_compare_and_exchange_val_acq (mem, newval, oldval); \
+ __acev_wret; })
+
+# define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
+ ({ \
+ int __acev_wret; \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ if (__atomic_is_v9) \
+ { \
+ __typeof (oldval) __acev_woldval = (oldval); \
+ __acev_wret \
+ = __v9_compare_and_exchange_val_32_acq (mem, newval, \
+ __acev_woldval) \
+ != __acev_woldval; \
+ } \
+ else \
+ __acev_wret \
+ = __v7_compare_and_exchange_bool_acq (mem, newval, oldval); \
+ __acev_wret; })
+
+# define atomic_exchange_rel(mem, newval) \
+ ({ \
+ __typeof (*mem) __acev_wret; \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ if (__atomic_is_v9) \
+ { \
+ __typeof (mem) __acev_wmemp = (mem); \
+ __typeof (*(mem)) __acev_wval = (newval); \
+ do \
+ __acev_wret = *__acev_wmemp; \
+ while (__builtin_expect \
+ (__v9_compare_and_exchange_val_32_acq (__acev_wmemp,\
+ __acev_wval, \
+ __acev_wret) \
+ != __acev_wret, 0)); \
+ } \
+ else \
+ __acev_wret = __v7_exchange_acq (mem, newval); \
+ __acev_wret; })
+
+# define atomic_compare_and_exchange_val_24_acq(mem, newval, oldval) \
+ ({ \
+ __typeof (*mem) __acev_wret; \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ if (__atomic_is_v9) \
+ __acev_wret \
+ = __v9_compare_and_exchange_val_32_acq (mem, newval, oldval);\
+ else \
+ __acev_wret \
+ = __v7_compare_and_exchange_val_24_acq (mem, newval, oldval);\
+ __acev_wret; })
+
+# define atomic_exchange_24_rel(mem, newval) \
+ ({ \
+ __typeof (*mem) __acev_w24ret; \
+ if (sizeof (*mem) != 4) \
+ abort (); \
+ if (__atomic_is_v9) \
+ __acev_w24ret = atomic_exchange_rel (mem, newval); \
+ else \
+ __acev_w24ret = __v7_exchange_24_rel (mem, newval); \
+ __acev_w24ret; })
+
+#endif
+
#endif /* bits/atomic.h */