aboutsummaryrefslogtreecommitdiff
path: root/nptl
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2012-05-15 10:23:22 -0700
committerH.J. Lu <hjl.tools@gmail.com>2012-05-15 10:23:22 -0700
commit592f90e6ec52961ec44300f489d90b6231e85566 (patch)
tree5790236ccca42e13331ddf8acb8f6c12f5134224 /nptl
parent0e8860ad21c60450da7e08c36975a88667c4d4be (diff)
downloadglibc-592f90e6ec52961ec44300f489d90b6231e85566.tar
glibc-592f90e6ec52961ec44300f489d90b6231e85566.tar.gz
glibc-592f90e6ec52961ec44300f489d90b6231e85566.tar.bz2
glibc-592f90e6ec52961ec44300f489d90b6231e85566.zip
Use LP_OP(cmp), R*_LP, LP_SIZE and ASM_ADDR
Diffstat (limited to 'nptl')
-rw-r--r--nptl/ChangeLog8
-rw-r--r--nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S40
2 files changed, 28 insertions, 20 deletions
diff --git a/nptl/ChangeLog b/nptl/ChangeLog
index 5580286064..dab7c27073 100644
--- a/nptl/ChangeLog
+++ b/nptl/ChangeLog
@@ -1,5 +1,13 @@
2012-05-15 H.J. Lu <hongjiu.lu@intel.com>
+ * sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S: Use
+ LP_OP(cmp), RSI_LP and R8_LP on dep_mutex pointer. Load
+ __vdso_clock_gettime pointer into RAX_LP.
+ (__gcc_personality_v0): Replace 8-byte data alignment with
+ LP_SIZE alignment and .quad with ASM_ADDR.
+
+2012-05-15 H.J. Lu <hongjiu.lu@intel.com>
+
* sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S: Use
LP_OP(cmp), RSI_LP and R8_LP on dep_mutex pointer. Load
__vdso_clock_gettime pointer into RAX_LP.
diff --git a/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S b/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
index fa0455f355..6c1031ee05 100644
--- a/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
+++ b/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
@@ -65,14 +65,14 @@ __pthread_cond_wait:
+--------------------------+
*/
- cmpq $-1, dep_mutex(%rdi)
+ LP_OP(cmp) $-1, dep_mutex(%rdi)
/* Prepare structure passed to cancellation handler. */
movq %rdi, 8(%rsp)
movq %rsi, 16(%rsp)
je 15f
- movq %rsi, dep_mutex(%rdi)
+ mov %RSI_LP, dep_mutex(%rdi)
/* Get internal lock. */
15: movl $1, %esi
@@ -120,12 +120,12 @@ __pthread_cond_wait:
movl %eax, (%rsp)
xorq %r10, %r10
- cmpq $-1, dep_mutex(%rdi)
+ LP_OP(cmp) $-1, dep_mutex(%rdi)
leaq cond_futex(%rdi), %rdi
movl $FUTEX_WAIT, %esi
je 60f
- movq dep_mutex-cond_futex(%rdi), %r8
+ mov dep_mutex-cond_futex(%rdi), %R8_LP
/* Requeue to a non-robust PI mutex if the PI bit is set and
the robust bit is not set. */
movl MUTEX_KIND(%r8), %eax
@@ -206,7 +206,7 @@ __pthread_cond_wait:
jne 17f
addq $cond_nwaiters, %rdi
- cmpq $-1, dep_mutex-cond_nwaiters(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_nwaiters(%rdi)
movl $1, %edx
#ifdef __ASSUME_PRIVATE_FUTEX
movl $FUTEX_WAKE, %eax
@@ -255,7 +255,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -267,7 +267,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -283,7 +283,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -298,7 +298,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -319,7 +319,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -348,7 +348,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -374,7 +374,7 @@ __pthread_cond_wait:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -386,7 +386,7 @@ __pthread_cond_wait:
93:
/* Set the rest of SYS_futex args for FUTEX_WAIT_REQUEUE_PI. */
xorq %r10, %r10
- movq dep_mutex(%rdi), %r8
+ mov dep_mutex(%rdi), %R8_LP
leaq cond_futex(%rdi), %rdi
jmp 90b
.LcleanupEND2:
@@ -434,7 +434,7 @@ __condvar_cleanup1:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -467,7 +467,7 @@ __condvar_cleanup1:
andl $~((1 << nwaiters_shift) - 1), %eax
jne 4f
- cmpq $-1, dep_mutex(%rdi)
+ LP_OP(cmp) $-1, dep_mutex(%rdi)
leaq cond_nwaiters(%rdi), %rdi
movl $1, %edx
#ifdef __ASSUME_PRIVATE_FUTEX
@@ -495,7 +495,7 @@ __condvar_cleanup1:
#if cond_lock != 0
addq $cond_lock, %rdi
#endif
- cmpq $-1, dep_mutex-cond_lock(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi)
movl $LLL_PRIVATE, %eax
movl $LLL_SHARED, %esi
cmovne %eax, %esi
@@ -506,7 +506,7 @@ __condvar_cleanup1:
2: testl %ecx, %ecx
jnz 5f
addq $cond_futex, %rdi
- cmpq $-1, dep_mutex-cond_futex(%rdi)
+ LP_OP(cmp) $-1, dep_mutex-cond_futex(%rdi)
movl $0x7fffffff, %edx
#ifdef __ASSUME_PRIVATE_FUTEX
movl $FUTEX_WAKE, %eax
@@ -559,9 +559,9 @@ __condvar_cleanup1:
.hidden DW.ref.__gcc_personality_v0
.weak DW.ref.__gcc_personality_v0
.section .gnu.linkonce.d.DW.ref.__gcc_personality_v0,"aw",@progbits
- .align 8
+ .align LP_SIZE
.type DW.ref.__gcc_personality_v0, @object
- .size DW.ref.__gcc_personality_v0, 8
+ .size DW.ref.__gcc_personality_v0, LP_SIZE
DW.ref.__gcc_personality_v0:
- .quad __gcc_personality_v0
+ ASM_ADDR __gcc_personality_v0
#endif