aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/x86_64
diff options
context:
space:
mode:
authorRoland McGrath <roland@gnu.org>2003-03-26 04:02:03 +0000
committerRoland McGrath <roland@gnu.org>2003-03-26 04:02:03 +0000
commit859e708f0e44d50deff617c7fd939f4fba295afb (patch)
treec02276928e6c984391f089af6b54f81220165d0e /sysdeps/x86_64
parentdd410d4d4585883d02aa497f4227551c5c4cf811 (diff)
downloadglibc-859e708f0e44d50deff617c7fd939f4fba295afb.tar
glibc-859e708f0e44d50deff617c7fd939f4fba295afb.tar.gz
glibc-859e708f0e44d50deff617c7fd939f4fba295afb.tar.bz2
glibc-859e708f0e44d50deff617c7fd939f4fba295afb.zip
* csu/tst-atomic.c (do_test): Add some new
atomic_compare_and_exchange_val_acq, atomic_add_zero, atomic_compare_and_exchange_bool_acq and atomic_add_negative tests. * include/atomic.h (atomic_add_negative, atomic_add_zero): Prefix local variable so that it doesn't clash with the one in atomic_exchange_and_add. * sysdeps/ia64/bits/atomic.h (atomic_exchange): Fix for long/void * pointers. (atomic_exchange_and_add): Implement using __sync_fetch_and_add_?i. * sysdeps/powerpc/bits/atomic.h (atomic_exchange_and_add): Force value into register. * sysdeps/s390/bits/atomic.h (__arch_compare_and_exchange_val_64_acq): Cast newval to long. * sysdeps/x86_64/bits/atomic.h (__arch_compare_and_exchange_val_64_acq): Cast newval and oldval to long. (atomic_exchange): Cast newvalue to long if sizeof == 8. (atomic_exchange_and_add): Cast value to long if sizeof == 8. (atomic_add, atomic_add_negative, atomic_add_zero): Likewise. (atomic_bit_set): Shift 1L up in all cases to shut up warnings.
Diffstat (limited to 'sysdeps/x86_64')
-rw-r--r--sysdeps/x86_64/bits/atomic.h21
1 files changed, 11 insertions, 10 deletions
diff --git a/sysdeps/x86_64/bits/atomic.h b/sysdeps/x86_64/bits/atomic.h
index 1d41e7761c..0582103f2a 100644
--- a/sysdeps/x86_64/bits/atomic.h
+++ b/sysdeps/x86_64/bits/atomic.h
@@ -80,7 +80,8 @@ typedef uintmax_t uatomic_max_t;
({ __typeof (*mem) ret; \
__asm __volatile (LOCK "cmpxchgq %q2, %1" \
: "=a" (ret), "=m" (*mem) \
- : "r" (newval), "m" (*mem), "0" (oldval)); \
+ : "r" ((long) (newval)), "m" (*mem), \
+ "0" ((long) (oldval))); \
ret; })
@@ -102,7 +103,7 @@ typedef uintmax_t uatomic_max_t;
else \
__asm __volatile ("xchgq %q0, %1" \
: "=r" (result), "=m" (*mem) \
- : "0" (newvalue), "m" (*mem)); \
+ : "0" ((long) (newvalue)), "m" (*mem)); \
result; })
@@ -123,7 +124,7 @@ typedef uintmax_t uatomic_max_t;
else \
__asm __volatile (LOCK "xaddq %q0, %1" \
: "=r" (result), "=m" (*mem) \
- : "0" (value), "m" (*mem)); \
+ : "0" ((long) (value)), "m" (*mem)); \
result; })
@@ -147,7 +148,7 @@ typedef uintmax_t uatomic_max_t;
else \
__asm __volatile (LOCK "addq %q1, %0" \
: "=m" (*mem) \
- : "ir" (value), "m" (*mem)); \
+ : "ir" ((long) (value)), "m" (*mem)); \
})
@@ -168,7 +169,7 @@ typedef uintmax_t uatomic_max_t;
else \
__asm __volatile (LOCK "addq %q2, %0; sets %1" \
: "=m" (*mem), "=qm" (__result) \
- : "ir" (value), "m" (*mem)); \
+ : "ir" ((long) (value)), "m" (*mem)); \
__result; })
@@ -189,7 +190,7 @@ typedef uintmax_t uatomic_max_t;
else \
__asm __volatile (LOCK "addq %q2, %0; setz %1" \
: "=m" (*mem), "=qm" (__result) \
- : "ir" (value), "m" (*mem)); \
+ : "ir" ((long) (value)), "m" (*mem)); \
__result; })
@@ -279,19 +280,19 @@ typedef uintmax_t uatomic_max_t;
(void) ({ if (sizeof (*mem) == 1) \
__asm __volatile (LOCK "orb %b2, %0" \
: "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
+ : "m" (*mem), "ir" (1L << (bit))); \
else if (sizeof (*mem) == 2) \
__asm __volatile (LOCK "orw %w2, %0" \
: "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
+ : "m" (*mem), "ir" (1L << (bit))); \
else if (sizeof (*mem) == 4) \
__asm __volatile (LOCK "orl %2, %0" \
: "=m" (*mem) \
- : "m" (*mem), "ir" (1 << (bit))); \
+ : "m" (*mem), "ir" (1L << (bit))); \
else if (__builtin_constant_p (bit) && (bit) < 32) \
__asm __volatile (LOCK "orq %2, %0" \
: "=m" (*mem) \
- : "m" (*mem), "i" (1 << (bit))); \
+ : "m" (*mem), "i" (1L << (bit))); \
else \
__asm __volatile (LOCK "orq %q2, %0" \
: "=m" (*mem) \