diff options
author | Alan Modra <amodra@gmail.com> | 2013-08-17 18:31:45 +0930 |
---|---|---|
committer | Alan Modra <amodra@gmail.com> | 2013-10-04 10:35:43 +0930 |
commit | 7b88401f3b25325b1381798a0eccb3efe7751fec (patch) | |
tree | 862973bae84d0411a612406f7801529f1e3caf45 /sysdeps/powerpc/powerpc32/power6/fpu | |
parent | 9c008155b7d5d1bd81d909497850a2ece28aec50 (diff) | |
download | glibc-7b88401f3b25325b1381798a0eccb3efe7751fec.tar glibc-7b88401f3b25325b1381798a0eccb3efe7751fec.tar.gz glibc-7b88401f3b25325b1381798a0eccb3efe7751fec.tar.bz2 glibc-7b88401f3b25325b1381798a0eccb3efe7751fec.zip |
PowerPC floating point little-endian [12 of 15]
http://sourceware.org/ml/libc-alpha/2013-08/msg00087.html
Fixes for little-endian in 32-bit assembly.
* sysdeps/powerpc/sysdep.h (LOWORD, HIWORD, HISHORT): Define.
* sysdeps/powerpc/powerpc32/fpu/s_copysign.S: Load little-endian
words of double from correct stack offsets.
* sysdeps/powerpc/powerpc32/fpu/s_copysignl.S: Likewise.
* sysdeps/powerpc/powerpc32/fpu/s_lrint.S: Likewise.
* sysdeps/powerpc/powerpc32/fpu/s_lround.S: Likewise.
* sysdeps/powerpc/powerpc32/power4/fpu/s_llrint.S: Likewise.
* sysdeps/powerpc/powerpc32/power4/fpu/s_llrintf.S: Likewise.
* sysdeps/powerpc/powerpc32/power5+/fpu/s_llround.S: Likewise.
* sysdeps/powerpc/powerpc32/power5+/fpu/s_lround.S: Likewise.
* sysdeps/powerpc/powerpc32/power5/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S: Likewise.
* sysdeps/powerpc/powerpc32/power6/fpu/s_llrintf.S: Likewise.
* sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S: Likewise.
* sysdeps/powerpc/powerpc32/power7/fpu/s_finite.S: Likewise.
* sysdeps/powerpc/powerpc32/power7/fpu/s_isinf.S: Likewise.
* sysdeps/powerpc/powerpc32/power7/fpu/s_isnan.S: Likewise.
* sysdeps/powerpc/powerpc64/power7/fpu/s_finite.S: Use HISHORT.
* sysdeps/powerpc/powerpc64/power7/fpu/s_isinf.S: Likewise.
Diffstat (limited to 'sysdeps/powerpc/powerpc32/power6/fpu')
4 files changed, 8 insertions, 8 deletions
diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S index 2c095db1d4..3ea18589c8 100644 --- a/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S +++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_isnan.S @@ -27,8 +27,8 @@ EALIGN (__isnan, 4, 0) ori r1,r1,0 stfd fp1,24(r1) /* copy FPR to GPR */ ori r1,r1,0 - lwz r4,24(r1) - lwz r5,28(r1) + lwz r4,24+HIWORD(r1) + lwz r5,24+LOWORD(r1) lis r0,0x7ff0 /* const long r0 0x7ff00000 00000000 */ clrlwi r4,r4,1 /* x = fabs(x) */ cmpw cr7,r4,r0 /* if (fabs(x) =< inf) */ diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S index 3344b312e2..c0660cf6ec 100644 --- a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S +++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrint.S @@ -29,8 +29,8 @@ ENTRY (__llrint) /* Insure the following load is in a different dispatch group by inserting "group ending nop". */ ori r1,r1,0 - lwz r3,8(r1) - lwz r4,12(r1) + lwz r3,8+HIWORD(r1) + lwz r4,8+LOWORD(r1) addi r1,r1,16 blr END (__llrint) diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrintf.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrintf.S index 7f64f8d12b..ce298905c1 100644 --- a/sysdeps/powerpc/powerpc32/power6/fpu/s_llrintf.S +++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_llrintf.S @@ -28,8 +28,8 @@ ENTRY (__llrintf) /* Insure the following load is in a different dispatch group by inserting "group ending nop". */ ori r1,r1,0 - lwz r3,8(r1) - lwz r4,12(r1) + lwz r3,8+HIWORD(r1) + lwz r4,8+LOWORD(r1) addi r1,r1,16 blr END (__llrintf) diff --git a/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S b/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S index 0ff04cb718..abb0840d18 100644 --- a/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S +++ b/sysdeps/powerpc/powerpc32/power6/fpu/s_llround.S @@ -39,8 +39,8 @@ ENTRY (__llround) /* Insure the following load is in a different dispatch group by inserting "group ending nop". */ ori r1,r1,0 - lwz r4,12(r1) - lwz r3,8(r1) + lwz r3,8+HIWORD(r1) + lwz r4,8+LOWORD(r1) addi r1,r1,16 blr END (__llround) |