- KxAcquireSpinLock: check for already owned lock only on debug builds, fix check in inner loop, don't call Kii386SpinOnSpinLock inside the inner loop, but instead of it, simplify the code
- stubplement Kii386SpinOnSpinLock in C

svn path=/branches/ros-amd64-bringup/; revision=44886
This commit is contained in:
Timo Kreuzer 2010-01-02 16:22:43 +00:00
parent d786ac57d2
commit 27d2ac3ad6
3 changed files with 39 additions and 54 deletions

View file

@ -101,6 +101,10 @@ KeGetPreviousMode(VOID)
} \ } \
} }
VOID
NTAPI
Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
#ifndef CONFIG_SMP #ifndef CONFIG_SMP
// //
// Spinlock Acquire at IRQL >= DISPATCH_LEVEL // Spinlock Acquire at IRQL >= DISPATCH_LEVEL
@ -310,44 +314,34 @@ FORCEINLINE
VOID VOID
KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock) KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
{ {
#ifdef DBG
/* Make sure that we don't own the lock already */ /* Make sure that we don't own the lock already */
if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock) if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
{ {
/* We do, bugcheck! */ /* We do, bugcheck! */
KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0); KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
} }
#endif
/* Start acquire loop */ /* Try to acquire the lock */
for (;;) while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
{ {
/* Try to acquire it */ #if defined(_M_IX86) && defined(DBG)
if (InterlockedBitTestAndSet((PLONG)SpinLock, 0)) /* On x86 debug builds, we use a much slower but useful routine */
{ Kii386SpinOnSpinLock(SpinLock, 5);
/* Value changed... wait until it's unlocked */
while (*(volatile KSPIN_LOCK *)SpinLock == 1)
{
#if DBG
/* On debug builds, we use a much slower but useful routine */
//Kii386SpinOnSpinLock(SpinLock, 5);
/* FIXME: Do normal yield for now */
YieldProcessor();
#else #else
/* Otherwise, just yield and keep looping */ /* It's locked... spin until it's unlocked */
YieldProcessor(); while (*(volatile KSPIN_LOCK *)SpinLock & 1)
#endif
}
}
else
{ {
#if DBG /* Yield and keep looping */
/* On debug builds, we OR in the KTHREAD */ YieldProcessor();
*SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
#endif
/* All is well, break out */
break;
} }
#endif
} }
#ifdef DBG
/* On debug builds, we OR in the KTHREAD */
*SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
#endif
} }
// //

View file

@ -2828,31 +2828,3 @@ _KeSynchronizeExecution@12:
ret 12 ret 12
.endfunc .endfunc
/*++
* Kii386SpinOnSpinLock
*
* FILLMEIN
*
* Params:
* SpinLock - FILLMEIN
*
* Flags - FILLMEIN
*
* Returns:
* None.
*
* Remarks:
* FILLMEIN
*
*--*/
.globl _Kii386SpinOnSpinLock@8
.func Kii386SpinOnSpinLock@8
_Kii386SpinOnSpinLock@8:
#ifdef CONFIG_SMP
/* FIXME: TODO */
int 3
#endif
ret 8
.endfunc

View file

@ -454,3 +454,22 @@ KeTestSpinLock(IN PKSPIN_LOCK SpinLock)
/* Spinlock appears to be free */ /* Spinlock appears to be free */
return TRUE; return TRUE;
} }
#ifdef _M_IX86
VOID
NTAPI
Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags)
{
// FIXME: Handle flags
UNREFERENCED_PARAMETER(Flags);
/* Spin until it's unlocked */
while (*(volatile KSPIN_LOCK *)SpinLock & 1)
{
// FIXME: Check for timeout
/* Yield and keep looping */
YieldProcessor();
}
}
#endif