diff options
Diffstat (limited to 'sysdeps/x86_64')
-rw-r--r-- | sysdeps/x86_64/__longjmp.S | 14 | ||||
-rw-r--r-- | sysdeps/x86_64/setjmp.S | 15 |
2 files changed, 22 insertions, 7 deletions
diff --git a/sysdeps/x86_64/__longjmp.S b/sysdeps/x86_64/__longjmp.S index 22beb888a1..c0c15dc810 100644 --- a/sysdeps/x86_64/__longjmp.S +++ b/sysdeps/x86_64/__longjmp.S @@ -1,4 +1,4 @@ -/* Copyright (C) 2001,2004,2005,2006,2009 Free Software Foundation, Inc. +/* Copyright (C) 2001-2012 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -26,12 +26,20 @@ ENTRY(__longjmp) /* Restore registers. */ mov (JB_RSP*8)(%rdi),%R8_LP - movq (JB_RBP*8)(%rdi),%r9 + mov (JB_RBP*8)(%rdi),%R9_LP mov (JB_PC*8)(%rdi),%RDX_LP #ifdef PTR_DEMANGLE PTR_DEMANGLE (%R8_LP) - PTR_DEMANGLE (%r9) + PTR_DEMANGLE (%R9_LP) PTR_DEMANGLE (%RDX_LP) +# ifdef __ILP32__ + /* We ignored the high bits of the %rbp value because only the low + bits are mangled. But we cannot presume that %rbp is being used + as a pointer and truncate it, so recover the high bits. */ + movl (JB_RBP*8 + 4)(%rdi), %eax + shlq $32, %rax + orq %rax, %r9 +# endif #endif /* We add unwind information for the target here. */ cfi_def_cfa(%rdi, 0) diff --git a/sysdeps/x86_64/setjmp.S b/sysdeps/x86_64/setjmp.S index 87c095238c..5639292da4 100644 --- a/sysdeps/x86_64/setjmp.S +++ b/sysdeps/x86_64/setjmp.S @@ -1,5 +1,5 @@ /* setjmp for x86-64. - Copyright (C) 2001, 2003, 2005, 2006 Free Software Foundation, Inc. + Copyright (C) 2001-2012 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -24,9 +24,16 @@ ENTRY (__sigsetjmp) /* Save registers. */ movq %rbx, (JB_RBX*8)(%rdi) #ifdef PTR_MANGLE - movq %rbp, %rax - PTR_MANGLE (%rax) - movq %rax, (JB_RBP*8)(%rdi) +# ifdef __ILP32__ + /* Save the high bits of %rbp first, since PTR_MANGLE will + only handle the low bits but we cannot presume %rbp is + being used as a pointer and truncate it. Here we write all + of %rbp, but the low bits will be overwritten below. */ + movq %rbp, (JB_RBP*8)(%rdi) +# endif + mov %RBP_LP, %RAX_LP + PTR_MANGLE (%RAX_LP) + mov %RAX_LP, (JB_RBP*8)(%rdi) #else movq %rbp, (JB_RBP*8)(%rdi) #endif |