aboutsummaryrefslogtreecommitdiff
path: root/linuxthreads/sysdeps/powerpc/pt-machine.h
blob: a52af8a7eb927daeba49ee33bec1fdf16c9d4866 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
/* Machine-dependent pthreads configuration and inline functions.
   powerpc version.
   Copyright (C) 1996, 1997, 1998 Free Software Foundation, Inc.
   This file is part of the GNU C Library.
   Contributed by Richard Henderson <rth@tamu.edu>.

   The GNU C Library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Library General Public License as
   published by the Free Software Foundation; either version 2 of the
   License, or (at your option) any later version.

   The GNU C Library is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   Library General Public License for more details.

   You should have received a copy of the GNU Library General Public
   License along with the GNU C Library; see the file COPYING.LIB.  If
   not, write to the Free Software Foundation, Inc.,
   59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.  */

/* These routines are from Appendix G of the 'PowerPC 601 RISC Microprocessor
   User's Manual', by IBM and Motorola.  */

/* For multiprocessor systems, we want to ensure all memory accesses
   are completed before we reset a lock.  */
#if 0
/* on non multiprocessor systems, you can just: */
#define sync() /* nothing */
#else
#define sync() __asm__ __volatile__ ("sync")
#endif

/* Spinlock implementation; required.  */
#if BROKEN_PPC_ASM_CR0
static
#else
extern inline
#endif
int
testandset (int *spinlock)
{
  int ret;

  sync();
  __asm__ __volatile__(
		       "0:    lwarx %0,0,%1 ;"
		       "      cmpwi %0,0;"
		       "      bne 1f;"
		       "      stwcx. %2,0,%1;"
		       "      bne- 0b;"
		       "1:    "
	: "=&r"(ret)
	: "r"(spinlock), "r"(1)
	: "cr0", "memory");
  sync();

  return ret != 0;
}


/* Get some notion of the current stack.  Need not be exactly the top
   of the stack, just something somewhere in the current frame.  */
#define CURRENT_STACK_FRAME  stack_pointer
register char * stack_pointer __asm__ ("r1");

/* Compare-and-swap for semaphores. */
/* note that test-and-set(x) is the same as compare-and-swap(x, 0, 1) */

#define HAS_COMPARE_AND_SWAP
#if BROKEN_PPC_ASM_CR0
static
#else
extern inline
#endif
int
__compare_and_swap (int *p, int oldval, int newval)
{
  int ret;

  sync();
  __asm__ __volatile__(
		       "0:    lwarx %0,0,%1 ;"
		       "      xor. %0,%3,%0;"
		       "      bne 1f;"
		       "      stwcx. %2,0,%1;"
		       "      bne- 0b;"
		       "1:    "
	: "=&r"(ret)
	: "r"(p), "r"(newval), "r"(oldval)
	: "cr0", "memory");
  sync();
  return ret == 0;
}