diff options
Diffstat (limited to 'sysdeps/powerpc')
-rw-r--r-- | sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S | 6 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/__longjmp-common.S | 7 |
2 files changed, 6 insertions, 7 deletions
diff --git a/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S b/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S index 7c6e27c209..f9f010fcac 100644 --- a/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S +++ b/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S @@ -1,5 +1,5 @@ /* longjmp for PowerPC. - Copyright (C) 1995-99, 2000, 2003-2005, 2006 Free Software Foundation, Inc. + Copyright (C) 1995-99, 2000, 2003-2006, 2009 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -64,7 +64,7 @@ ENTRY (BP_SYM (__longjmp)) andi. r6,r5,0xf lwz r0,((JB_VRSAVE)*4)(3) mtspr VRSAVE,r0 - beq+ aligned_restore_vmx + beq+ L(aligned_restore_vmx) addi r6,r5,16 lvsl v0,0,r5 lvx v1,0,r5 @@ -88,7 +88,7 @@ ENTRY (BP_SYM (__longjmp)) lvx v1,0,r5 vperm v31,v31,v1,v0 b L(no_vmx) -aligned_restore_vmx: +L(aligned_restore_vmx): addi r6,r5,16 lvx v20,0,r5 addi r5,r5,32 diff --git a/sysdeps/powerpc/powerpc64/__longjmp-common.S b/sysdeps/powerpc/powerpc64/__longjmp-common.S index 700a2a543c..19b2849c01 100644 --- a/sysdeps/powerpc/powerpc64/__longjmp-common.S +++ b/sysdeps/powerpc/powerpc64/__longjmp-common.S @@ -1,6 +1,5 @@ /* longjmp for PowerPC64. - Copyright (C) 1995, 1996,1997,1999,2000,2001,2002,2003,2004,2005,2006 - Free Software Foundation, Inc. + Copyright (C) 1995, 1996,1997,1999-2006,2009 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or @@ -58,7 +57,7 @@ ENTRY (BP_SYM (__longjmp)) andi. r6,r5,0xf lwz r0,((JB_VRSAVE)*8)(3) mtspr VRSAVE,r0 - beq+ aligned_restore_vmx + beq+ L(aligned_restore_vmx) addi r6,r5,16 lvsl v0,0,r5 lvx v1,0,r5 @@ -82,7 +81,7 @@ ENTRY (BP_SYM (__longjmp)) lvx v1,0,r5 vperm v31,v31,v1,v0 b L(no_vmx) -aligned_restore_vmx: +L(aligned_restore_vmx): addi r6,r5,16 lvx v20,0,r5 addi r5,r5,32 |