[NTOS:KE] Test spinlock ownership on both UP & MP build

There is no reason not to, and this avoids introducing bugs stupidly.
This commit is contained in:
Jérôme Gardou 2021-05-20 10:13:40 +02:00 committed by Jérôme Gardou
parent 835c30232e
commit f30136bc79
5 changed files with 62 additions and 49 deletions

View file

@ -28,6 +28,14 @@ FASTCALL
KefAcquireSpinLockAtDpcLevel(
IN PKSPIN_LOCK SpinLock)
{
#if DBG
/* To be on par with HAL/NTOSKRNL */
#ifdef _M_AMD64
*SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
#else
*SpinLock = (KSPIN_LOCK)(((PKIPCR)KeGetPcr())->PrcbData.CurrentThread) | 1;
#endif
#endif
}
VOID

View file

@ -6,50 +6,25 @@
* PROGRAMMERS: Alex Ionescu (alex.ionescu@reactos.org)
*/
#if defined(_M_IX86)
VOID
NTAPI
Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
#ifndef CONFIG_SMP
//
// Spinlock Acquire at IRQL >= DISPATCH_LEVEL
//
FORCEINLINE
VOID
KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
{
/* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
UNREFERENCED_PARAMETER(SpinLock);
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
KeMemoryBarrierWithoutFence();
}
//
// Spinlock Release at IRQL >= DISPATCH_LEVEL
//
FORCEINLINE
VOID
KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
{
/* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
UNREFERENCED_PARAMETER(SpinLock);
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
KeMemoryBarrierWithoutFence();
}
#else
#endif
//
// Spinlock Acquisition at IRQL >= DISPATCH_LEVEL
//
_Acquires_nonreentrant_lock_(SpinLock)
FORCEINLINE
VOID
KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
KxAcquireSpinLock(
#if defined(CONFIG_SMP) || DBG
_Inout_
#else
_Unreferenced_parameter_
#endif
PKSPIN_LOCK SpinLock)
{
#if DBG
/* Make sure that we don't own the lock already */
@ -60,6 +35,7 @@ KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
}
#endif
#ifdef CONFIG_SMP
/* Try to acquire the lock */
while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
{
@ -75,6 +51,12 @@ KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
}
#endif
}
#endif
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
KeMemoryBarrierWithoutFence();
#if DBG
/* On debug builds, we OR in the KTHREAD */
*SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
@ -84,9 +66,16 @@ KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
//
// Spinlock Release at IRQL >= DISPATCH_LEVEL
//
_Releases_nonreentrant_lock_(SpinLock)
FORCEINLINE
VOID
KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
KxReleaseSpinLock(
#if defined(CONFIG_SMP) || DBG
_Inout_
#else
_Unreferenced_parameter_
#endif
PKSPIN_LOCK SpinLock)
{
#if DBG
/* Make sure that the threads match */
@ -96,12 +85,17 @@ KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
KeBugCheckEx(SPIN_LOCK_NOT_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
}
#endif
/* Clear the lock */
#if defined(CONFIG_SMP) || DBG
/* Clear the lock */
#ifdef _WIN64
InterlockedAnd64((PLONG64)SpinLock, 0);
#else
InterlockedAnd((PLONG)SpinLock, 0);
#endif
}
#endif
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
KeMemoryBarrierWithoutFence();
}

View file

@ -109,10 +109,10 @@ KeAcquireQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
0,
0);
}
#endif
/* Do the inlined function */
KxAcquireSpinLock(LockHandle->Lock);
#endif
}
VOID
@ -130,10 +130,10 @@ KeReleaseQueuedSpinLockFromDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
0,
0);
}
#endif
/* Do the inlined function */
KxReleaseSpinLock(LockHandle->Lock);
#endif
}
#endif
@ -302,6 +302,15 @@ BOOLEAN
FASTCALL
KeTryToAcquireSpinLockAtDpcLevel(IN OUT PKSPIN_LOCK SpinLock)
{
#if DBG
/* Make sure that we don't own the lock already */
if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
{
/* We do, bugcheck! */
KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
}
#endif
#ifdef CONFIG_SMP
/* Check if it's already acquired */
if (!(*SpinLock))
@ -318,11 +327,11 @@ KeTryToAcquireSpinLockAtDpcLevel(IN OUT PKSPIN_LOCK SpinLock)
/* It was already acquired */
return FALSE;
}
#endif
#if DBG
/* On debug builds, we OR in the KTHREAD */
*SpinLock = (ULONG_PTR)KeGetCurrentThread() | 1;
#endif
#endif
/* All is well, return TRUE */
@ -337,10 +346,10 @@ FASTCALL
KeAcquireInStackQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock,
IN PKLOCK_QUEUE_HANDLE LockHandle)
{
#ifdef CONFIG_SMP
/* Set it up properly */
LockHandle->LockQueue.Next = NULL;
LockHandle->LockQueue.Lock = SpinLock;
#ifdef CONFIG_SMP
#if 0
KeAcquireQueuedSpinLockAtDpcLevel(LockHandle->LockQueue.Next);
#else
@ -354,11 +363,11 @@ KeAcquireInStackQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock,
0,
0);
}
#endif
#endif
/* Acquire the lock */
KxAcquireSpinLock(LockHandle->LockQueue.Lock); // HACK
#endif
#endif
}
/*
@ -383,11 +392,11 @@ KeReleaseInStackQueuedSpinLockFromDpcLevel(IN PKLOCK_QUEUE_HANDLE LockHandle)
0,
0);
}
#endif
#endif
/* Release the lock */
KxReleaseSpinLock(LockHandle->LockQueue.Lock); // HACK
#endif
#endif
}
/*

View file

@ -117,6 +117,7 @@ list(APPEND CRT_SOURCE
mem/memcmp.c
mem/memccpy.c
mem/memicmp.c
mem/memset.c
misc/__crt_MessageBoxA.c
misc/amsg.c
misc/assert.c
@ -399,7 +400,7 @@ if(ARCH STREQUAL "i386")
math/i386/fmodf_asm.s
mem/i386/memchr_asm.s
mem/i386/memmove_asm.s
mem/i386/memset_asm.s
# mem/i386/memset_asm.s
misc/i386/readcr4.S
setjmp/i386/setjmp.s
string/i386/strcat_asm.s

View file

@ -9,6 +9,7 @@ list(APPEND LIBCNTPR_SOURCE
mem/memccpy.c
mem/memcmp.c
mem/memicmp.c
mem/memset.c
misc/fltused.c
printf/_snprintf.c
printf/_snwprintf.c
@ -186,7 +187,7 @@ if(ARCH STREQUAL "i386")
list(APPEND LIBCNTPR_ASM_SOURCE
mem/i386/memchr_asm.s
mem/i386/memmove_asm.s
mem/i386/memset_asm.s
# mem/i386/memset_asm.s
string/i386/strcat_asm.s
string/i386/strchr_asm.s
string/i386/strcmp_asm.s