2010-01-02 19:41:03 +00:00
|
|
|
/*
|
|
|
|
* PROJECT: ReactOS Kernel
|
|
|
|
* LICENSE: GPL - See COPYING in the top level directory
|
2015-10-04 11:54:25 +00:00
|
|
|
* FILE: ntoskrnl/include/internal/spinlock.h
|
2010-01-02 19:41:03 +00:00
|
|
|
* PURPOSE: Internal Inlined Functions for spinlocks, shared with HAL
|
|
|
|
* PROGRAMMERS: Alex Ionescu (alex.ionescu@reactos.org)
|
|
|
|
*/
|
|
|
|
|
|
|
|
VOID
|
|
|
|
NTAPI
|
|
|
|
Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
|
|
|
|
|
|
|
|
#ifndef CONFIG_SMP
|
|
|
|
|
|
|
|
//
|
|
|
|
// Spinlock Acquire at IRQL >= DISPATCH_LEVEL
|
|
|
|
//
|
|
|
|
FORCEINLINE
|
|
|
|
VOID
|
|
|
|
KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
|
|
|
|
{
|
|
|
|
/* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
|
|
|
|
UNREFERENCED_PARAMETER(SpinLock);
|
2011-06-04 12:33:54 +00:00
|
|
|
|
|
|
|
/* Add an explicit memory barrier to prevent the compiler from reordering
|
|
|
|
memory accesses across the borders of spinlocks */
|
2011-06-04 15:42:32 +00:00
|
|
|
KeMemoryBarrierWithoutFence();
|
2010-01-02 19:41:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// Spinlock Release at IRQL >= DISPATCH_LEVEL
|
|
|
|
//
|
|
|
|
FORCEINLINE
|
|
|
|
VOID
|
|
|
|
KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
|
|
|
|
{
|
|
|
|
/* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
|
|
|
|
UNREFERENCED_PARAMETER(SpinLock);
|
2011-06-04 12:33:54 +00:00
|
|
|
|
|
|
|
/* Add an explicit memory barrier to prevent the compiler from reordering
|
|
|
|
memory accesses across the borders of spinlocks */
|
2011-06-04 15:42:32 +00:00
|
|
|
KeMemoryBarrierWithoutFence();
|
2010-01-02 19:41:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
|
|
|
//
|
|
|
|
// Spinlock Acquisition at IRQL >= DISPATCH_LEVEL
|
|
|
|
//
|
|
|
|
FORCEINLINE
|
|
|
|
VOID
|
|
|
|
KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
|
|
|
|
{
|
2012-03-08 09:18:28 +00:00
|
|
|
#if DBG
|
2010-01-02 19:41:03 +00:00
|
|
|
/* Make sure that we don't own the lock already */
|
|
|
|
if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
|
|
|
|
{
|
|
|
|
/* We do, bugcheck! */
|
|
|
|
KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/* Try to acquire the lock */
|
|
|
|
while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
|
|
|
|
{
|
2012-03-08 09:18:28 +00:00
|
|
|
#if defined(_M_IX86) && DBG
|
2010-01-02 19:41:03 +00:00
|
|
|
/* On x86 debug builds, we use a much slower but useful routine */
|
|
|
|
Kii386SpinOnSpinLock(SpinLock, 5);
|
|
|
|
#else
|
|
|
|
/* It's locked... spin until it's unlocked */
|
|
|
|
while (*(volatile KSPIN_LOCK *)SpinLock & 1)
|
|
|
|
{
|
|
|
|
/* Yield and keep looping */
|
|
|
|
YieldProcessor();
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
2012-03-08 09:23:24 +00:00
|
|
|
#if DBG
|
2010-01-02 19:41:03 +00:00
|
|
|
/* On debug builds, we OR in the KTHREAD */
|
|
|
|
*SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// Spinlock Release at IRQL >= DISPATCH_LEVEL
|
|
|
|
//
|
|
|
|
FORCEINLINE
|
|
|
|
VOID
|
|
|
|
KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
|
|
|
|
{
|
|
|
|
#if DBG
|
|
|
|
/* Make sure that the threads match */
|
|
|
|
if (((KSPIN_LOCK)KeGetCurrentThread() | 1) != *SpinLock)
|
|
|
|
{
|
|
|
|
/* They don't, bugcheck */
|
|
|
|
KeBugCheckEx(SPIN_LOCK_NOT_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
/* Clear the lock */
|
|
|
|
InterlockedAnd((PLONG)SpinLock, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|