[NTOSKRNL/DDK]

- add definition for KeMemoryBarrierWithoutFence
- add x64 version of KeMemoryBarrier and some related defines
- Use KeMemoryBarrierWithoutFence in spinlocks as suggested by Alex

svn path=/trunk/; revision=52079
This commit is contained in:
Timo Kreuzer 2011-06-04 15:42:32 +00:00
parent b19da39918
commit 3c54c6f84f
5 changed files with 43 additions and 8 deletions

View file

@ -7718,6 +7718,8 @@ KeMemoryBarrier(VOID)
#endif
}
#define KeMemoryBarrierWithoutFence() _ReadWriteBarrier()
NTHALAPI
KIRQL
NTAPI
@ -7875,6 +7877,21 @@ typedef XSAVE_FORMAT XMM_SAVE_AREA32, *PXMM_SAVE_AREA32;
#define KeGetDcacheFillSize() 1L
#define YieldProcessor _mm_pause
#define FastFence __faststorefence
#define LoadFence _mm_lfence
#define MemoryFence _mm_mfence
#define StoreFence _mm_sfence
#define LFENCE_ACQUIRE() LoadFence()
FORCEINLINE
VOID
KeMemoryBarrier(VOID)
{
FastFence();
LFENCE_ACQUIRE();
}
#define KeMemoryBarrierWithoutFence() _ReadWriteBarrier()
FORCEINLINE
KIRQL

View file

@ -45,6 +45,22 @@ typedef XSAVE_FORMAT XMM_SAVE_AREA32, *PXMM_SAVE_AREA32;
#define KeGetDcacheFillSize() 1L
#define YieldProcessor _mm_pause
#define FastFence __faststorefence
#define LoadFence _mm_lfence
#define MemoryFence _mm_mfence
#define StoreFence _mm_sfence
#define LFENCE_ACQUIRE() LoadFence()
FORCEINLINE
VOID
KeMemoryBarrier(VOID)
{
// FIXME: Do we really need lfence after the __faststorefence ?
FastFence();
LFENCE_ACQUIRE();
}
#define KeMemoryBarrierWithoutFence() _ReadWriteBarrier()
FORCEINLINE
KIRQL

View file

@ -59,6 +59,8 @@ KeMemoryBarrier(VOID)
#endif
}
#define KeMemoryBarrierWithoutFence() _ReadWriteBarrier()
NTHALAPI
KIRQL
NTAPI

View file

@ -24,7 +24,7 @@ KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
_ReadWriteBarrier();
KeMemoryBarrierWithoutFence();
}
//
@ -39,7 +39,7 @@ KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
_ReadWriteBarrier();
KeMemoryBarrierWithoutFence();
}
#else

View file

@ -173,7 +173,7 @@ KeTryToAcquireQueuedSpinLockRaiseToSynch(IN KSPIN_LOCK_QUEUE_NUMBER LockNumber,
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
_ReadWriteBarrier();
KeMemoryBarrierWithoutFence();
/* Always return true on UP Machines */
return TRUE;
@ -196,7 +196,7 @@ KeTryToAcquireQueuedSpinLock(IN KSPIN_LOCK_QUEUE_NUMBER LockNumber,
/* Add an explicit memory barrier to prevent the compiler from reordering
memory accesses across the borders of spinlocks */
_ReadWriteBarrier();
KeMemoryBarrierWithoutFence();
/* Always return true on UP Machines */
return TRUE;