Bugzilla Bug 260899: do not use the spinlock based implementation of

PR_StackPush and PR_StackPop for Solaris x86.  The patch is contributed by
Julien Pierre of Sun.  r=wtc.
Modified Files: _solaris.h os_SunOS_x86.s
This commit is contained in:
wtchang%redhat.com 2005-02-24 02:58:45 +00:00
parent a4621e0461
commit f98d28fa08
2 changed files with 0 additions and 92 deletions

View File

@ -88,20 +88,6 @@
#define _PR_HAVE_ATOMIC_OPS
#endif
#if defined(_PR_GLOBAL_THREADS_ONLY) || defined(_PR_PTHREADS)
/*
* We have assembly language implementation of atomic
* stacks for the 32-bit sparc and x86 architectures only.
*
* Note: We ran into thread starvation problem with the
* 32-bit sparc assembly language implementation of atomic
* stacks, so we do not use it now. (Bugzilla bug 113740)
*/
#if !defined(sparc) && !defined(__x86_64)
#define _PR_HAVE_ATOMIC_CAS
#endif
#endif
#define _PR_POLL_AVAILABLE
#define _PR_USE_POLL
#define _PR_STAT_HAS_ST_ATIM

View File

@ -153,81 +153,3 @@ _MD_AtomicAdd:
xaddl %eax, (%ecx)
addl %edx, %eax
ret
/
/ PR_StackPush(listp, elementp)
/
/ Atomically push ElementP onto linked list ListP.
/
.text
.globl PR_StackPush
.align 4
PR_StackPush:
movl 4(%esp), %ecx
movl $-1,%eax
pulock:
/ Already locked?
cmpl %eax,(%ecx)
je pulock
/ Attempt to lock it
lock
xchgl %eax, (%ecx)
/ Did we set the lock?
cmpl $-1, %eax
je pulock
/ We now have the lock. Update pointers
movl 8(%esp), %edx
movl %eax, (%edx)
movl %edx, (%ecx)
/ Done
ret
/
/ elementp = PR_StackPop(listp)
/
/ Atomically pop ElementP off linked list ListP
/
.text
.globl PR_StackPop
.align 4
PR_StackPop:
movl 4(%esp), %ecx
movl $-1, %eax
polock:
/ Already locked?
cmpl %eax, (%ecx)
je polock
/ Attempt to lock it
lock
xchgl %eax, (%ecx)
/ Did we set the lock?
cmpl $-1, %eax
je polock
/ We set the lock so now update pointers
/ Was it empty?
movl $0, %edx
cmpl %eax,%edx
je empty
/ Get element "next" pointer
movl (%eax), %edx
/ Write NULL to the element "next" pointer
movl $0, (%eax)
empty:
/ Put elements previous "next" value into listp
/ NOTE: This also unlocks the listp
movl %edx, (%ecx)
/ Return previous listp value (already in eax)
ret