Merge 34739 - 34769, 34796 - 34905, 34916 - 34967, 34970 - 35135, 35167, 35325, 35326, 35443 - 35506, 35510 - 35546, 35587, 35616,

35644 from ros-amd64-bringup branch:

- Update AMD64 intrinsic Interlocked functions
- Add _InterlockedExchange64 to intin_x86.h
-Fix definitions of IsBadHugeReadPtr, IsBadHugeWritePtr, IsBadReadPtr, IsBadStringPtrA, IsBadStringPtrW, IsBadWritePtr and SetProcessWorkingSetSize
- Define CONTEXT and XMM_SAVE_AREA32, taken from WINE
- Include _M_AMD64 for defintitions in ioaccess.h
- Add Interlocked intrinisc definitions for AMD64 platform
- Add KI_USER_SHARED_DATA and KeGetCurrentIrql() prototype to winddh
- Fix prototypes for KfAcquireSpinLock, KfReleaseSpinLock, KeAcquireInStackQueuedSpinLock and KeReleaseInStackQueuedSpinLock on amd64
- Fix rotl declaration, add mysteriously missing rotr.
winnt.h:
- Fix CONTEXT_AMD64 and friends, add various constants.
- Define RUNTIME_FUNCTION, RtlCaptureContext, RtlRestoreContext, RtlAddFunctionTable, RtlInstallFunctionTableCallback, RtlDeleteFunctionTable.
- Fix definitions for EXCEPTION_RECORD and friends to support amd64.
- Define IMAGE_THUNK_DATA64 and related constants.
- Define IMAGE_TLS_DIRECTORY64 and related constants.
- Remove WINEisms
- Remove multiple declarations and minor fixes.
- Fix KESEG0_BASE for amd64
- Fix definition for NdisCopyLookaheadData on amd64
- Only add function prototypes if NO_INTERLOCKED_INTRINSICS is defined
- Move the inlined InterlockedAnd/Or from rtl to winbase.h and rename it to InterlockedAnd/Or_Inline
- Fix TreeView_EnsureVisible macro.
- Add missing 64 bit intrinsic Interlocked functions
- Fix _InterlockedDecrement64
- Fix InterlockedExchangeAddSizeT
- Fix __writecrx instrinsics
- Fix ExQueryPoolBlockSize prototype
- Make KI_USER_SHARED_DATA and IMAGE_ORDINAL_FLAG64 a ULONGLONG
- Fix definition if IMAGE_OPTIONAL_HEADER64
- Add KPCR structure
- Add __readcrx intrinsics
- Ad some definitions to winddk.h
- Add a field for a DbgPrint function pointer to the ROS_LOADER_PARAMETER_BLOCK for early debug prints in ntoskrnl
- Update KPCR and KIPCR
- Add KeGetPcr() and update KeGetCurrentProcessorNumber 
- Fix SECURITY_DESCRIPTOR_RELATIVE and KDPC_DATA
- Implement byteswap intrinsics
- Add macro definitions for KeQuerySystemTime, KeQueryTickCount and KeQueryInterruptTime to ddk
- Add NtCurrentTeb inline function
- Update amd64 prototypes for KeGetCurrentIrql, KfRaiseIrql, KfLowerIrql, KeRaiseIrqlToDpcLevel, KeRaiseIrqlToSynchLevel, KeLowerIrql, KeRaiseIrql
- Implement __readcr8, __writecr8, __lidt and __sidt intrinsics.
- Implement KeGetCurrentIrql as intrinsic.
- Make KeGetCurrentIrql, KeLowerIrql, KfRaiseIrql, KeRaiseIrql, KeRaiseIrqlToDpcLevel and KeRaiseIrqlToSynchLevel intrinsics as in the WDK 2008.
- Fix _interlockedbittest intrinsics
- Fix __readmsr and __writemsr on amd64
- Fix __readgsqword, it was using a long internally. Add volatile keyword to segment addressing intrinsics. Add "memory" to clobber list on all those that do write.
- Merge __readcr and __writecr x86 / x64 definitions, because they are the same. Implement __readdr and __writedr for x64.
- __readcr3() returns an unsigned __int64, fix x86 MmGetPageDirectory accordingly.


svn path=/trunk/; revision=35646
This commit is contained in:
Timo Kreuzer 2008-08-25 21:57:13 +00:00
parent 909de8f972
commit c9b3cf161b
14 changed files with 998 additions and 251 deletions

View file

@ -31,7 +31,7 @@ extern "C" {
#ifndef NO_PORT_MACROS
#if defined(_X86_)
#if defined(_X86_) || defined(_M_AMD64)
#define READ_REGISTER_UCHAR(r) (*(volatile UCHAR *)(r))
#define READ_REGISTER_USHORT(r) (*(volatile USHORT *)(r))
#define READ_REGISTER_ULONG(r) (*(volatile ULONG *)(r))

View file

@ -1589,7 +1589,7 @@ NdisCopyFromPacketToPacket(
* IN ULONG ReceiveFlags);
*/
#ifdef _M_IX86
#if defined(_M_IX86) || defined(_M_AMD64)
#define NdisCopyLookaheadData(Destination, Source, Length, MacOptions) \
RtlCopyMemory(Destination, Source, Length)
#else

View file

@ -2499,7 +2499,7 @@ ExDisableResourceBoostLite (
);
NTKERNELAPI
ULONG
SIZE_T
NTAPI
ExQueryPoolBlockSize (
IN PVOID PoolBlock,

View file

@ -770,27 +770,6 @@ typedef IO_ALLOCATION_ACTION
IN PVOID Context);
typedef struct _EXCEPTION_RECORD32
{
NTSTATUS ExceptionCode;
ULONG ExceptionFlags;
ULONG ExceptionRecord;
ULONG ExceptionAddress;
ULONG NumberParameters;
ULONG ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD32, *PEXCEPTION_RECORD32;
typedef struct _EXCEPTION_RECORD64
{
NTSTATUS ExceptionCode;
ULONG ExceptionFlags;
ULONG64 ExceptionRecord;
ULONG64 ExceptionAddress;
ULONG NumberParameters;
ULONG __unusedAlignment;
ULONG64 ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD64, *PEXCEPTION_RECORD64;
typedef EXCEPTION_DISPOSITION
(DDKAPI *PEXCEPTION_ROUTINE)(
IN struct _EXCEPTION_RECORD *ExceptionRecord,
@ -5510,10 +5489,117 @@ KeGetCurrentThread(
#elif defined(__x86_64__)
#define PASSIVE_LEVEL 0
#define LOW_LEVEL 0
#define APC_LEVEL 1
#define DISPATCH_LEVEL 2
#define CLOCK_LEVEL 13
#define IPI_LEVEL 14
#define POWER_LEVEL 14
#define PROFILE_LEVEL 15
#define HIGH_LEVEL 15
#define PAGE_SIZE 0x1000
#define PAGE_SHIFT 12L
#define PTI_SHIFT 12L
#define PDI_SHIFT 21L
#define PPI_SHIFT 30L
#define PXI_SHIFT 39L
#define PTE_PER_PAGE 512
#define PDE_PER_PAGE 512
#define PPE_PER_PAGE 512
#define PXE_PER_PAGE 512
#define PTI_MASK_AMD64 (PTE_PER_PAGE - 1)
#define PDI_MASK_AMD64 (PDE_PER_PAGE - 1)
#define PPI_MASK (PPE_PER_PAGE - 1)
#define PXI_MASK (PXE_PER_PAGE - 1)
#define PXE_BASE 0xFFFFF6FB7DBED000ULL
#define PXE_SELFMAP 0xFFFFF6FB7DBEDF68ULL
#define PPE_BASE 0xFFFFF6FB7DA00000ULL
#define PDE_BASE 0xFFFFF6FB40000000ULL
#define PTE_BASE 0xFFFFF68000000000ULL
#define PXE_TOP 0xFFFFF6FB7DBEDFFFULL
#define PPE_TOP 0xFFFFF6FB7DBFFFFFULL
#define PDE_TOP 0xFFFFF6FB7FFFFFFFULL
#define PTE_TOP 0xFFFFF6FFFFFFFFFFULL
#define MM_LOWEST_USER_ADDRESS (PVOID)0x10000
#define MM_LOWEST_SYSTEM_ADDRESS (PVOID)0xFFFF080000000000ULL
#define KI_USER_SHARED_DATA 0xFFFFF78000000000ULL
#define SharedUserData ((PKUSER_SHARED_DATA const)KI_USER_SHARED_DATA)
#define SharedInterruptTime (&SharedUserData->InterruptTime)
#define SharedSystemTime (&SharedUserData->SystemTime)
#define SharedTickCount (&SharedUserData->TickCount)
#define KeQueryInterruptTime() \
(*(volatile ULONG64*)SharedInterruptTime)
#define KeQuerySystemTime(CurrentCount) \
*(ULONG64*)(CurrentCount) = *(volatile ULONG64*)SharedSystemTime
#define KeQueryTickCount(CurrentCount) \
*(ULONG64*)(CurrentCount) = *(volatile ULONG64*)SharedTickCount
typedef struct _KPCR
{
union
{
NT_TIB NtTib;
struct
{
union _KGDTENTRY64 *GdtBase;
struct _KTSS64 *TssBase;
ULONG64 UserRsp;
struct _KPCR *Self;
struct _KPRCB *CurrentPrcb;
PKSPIN_LOCK_QUEUE LockArray;
PVOID Used_Self;
};
};
union _KIDTENTRY64 *IdtBase;
ULONG64 Unused[2];
KIRQL Irql;
UCHAR SecondLevelCacheAssociativity;
UCHAR ObsoleteNumber;
UCHAR Fill0;
ULONG Unused0[3];
USHORT MajorVersion;
USHORT MinorVersion;
ULONG StallScaleFactor;
PVOID Unused1[3];
ULONG KernelReserved[15];
ULONG SecondLevelCacheSize;
ULONG HalReserved[16];
ULONG Unused2;
PVOID KdVersionBlock;
PVOID Unused3;
ULONG PcrAlign1[24];
} KPCR, *PKPCR;
typedef struct _KFLOATING_SAVE {
ULONG Dummy;
} KFLOATING_SAVE, *PKFLOATING_SAVE;
NTKERNELAPI
PRKTHREAD
NTAPI
KeGetCurrentThread(
VOID);
FORCEINLINE
PKPCR
KeGetPcr(VOID)
{
return (PKPCR)__readgsqword(FIELD_OFFSET(KPCR, Self));
}
FORCEINLINE
ULONG
KeGetCurrentProcessorNumber(VOID)
{
return (ULONG)__readgsword(0x184);
}
#elif defined(__PowerPC__)
typedef ULONG PFN_NUMBER, *PPFN_NUMBER;
@ -5704,6 +5790,8 @@ typedef struct _PCIBUSDATA
#if !defined(__INTERLOCKED_DECLARED)
#define __INTERLOCKED_DECLARED
#if defined (_X86_)
#if defined(NO_INTERLOCKED_INTRINSICS)
NTKERNELAPI
LONG
FASTCALL
@ -5738,18 +5826,30 @@ InterlockedExchangeAdd(
IN OUT LONG volatile *Addend,
IN LONG Value);
#else // !defined(NO_INTERLOCKED_INTRINSICS)
#define InterlockedExchange _InterlockedExchange
#define InterlockedIncrement _InterlockedIncrement
#define InterlockedDecrement _InterlockedDecrement
#define InterlockedExchangeAdd _InterlockedExchangeAdd
#define InterlockedCompareExchange _InterlockedCompareExchange
#define InterlockedOr _InterlockedOr
#define InterlockedAnd _InterlockedAnd
#define InterlockedXor _InterlockedXor
#endif // !defined(NO_INTERLOCKED_INTRINSICS)
#endif // defined (_X86_)
#if !defined (_WIN64)
/*
* PVOID
* InterlockedExchangePointer(
* IN OUT PVOID VOLATILE *Target,
* IN PVOID Value)
*/
#if defined (_M_AMD64)
#define InterlockedExchangePointer _InterlockedExchangePointer
#else
#define InterlockedExchangePointer(Target, Value) \
((PVOID) InterlockedExchange((PLONG) Target, (LONG) Value))
#endif
/*
* PVOID
@ -5758,17 +5858,54 @@ InterlockedExchangeAdd(
* IN PVOID Exchange,
* IN PVOID Comparand)
*/
#if defined (_M_AMD64)
#define InterlockedCompareExchangePointer _InterlockedCompareExchangePointer
#else
#define InterlockedCompareExchangePointer(Destination, Exchange, Comparand) \
((PVOID) InterlockedCompareExchange((PLONG) Destination, (LONG) Exchange, (LONG) Comparand))
#endif
#define InterlockedExchangeAddSizeT(a, b) InterlockedExchangeAdd((LONG *)a, b)
#define InterlockedIncrementSizeT(a) InterlockedIncrement((LONG *)a)
#define InterlockedDecrementSizeT(a) InterlockedDecrement((LONG *)a)
#endif // !defined (_WIN64)
#if defined (_M_AMD64)
#define InterlockedExchangeAddSizeT(a, b) InterlockedExchangeAdd64((LONGLONG *)a, (LONGLONG)b)
#define InterlockedIncrementSizeT(a) InterlockedIncrement64((LONGLONG *)a)
#define InterlockedDecrementSizeT(a) InterlockedDecrement64((LONGLONG *)a)
#define InterlockedAnd _InterlockedAnd
#define InterlockedOr _InterlockedOr
#define InterlockedXor _InterlockedXor
#define InterlockedIncrement _InterlockedIncrement
#define InterlockedDecrement _InterlockedDecrement
#define InterlockedAdd _InterlockedAdd
#define InterlockedExchange _InterlockedExchange
#define InterlockedExchangeAdd _InterlockedExchangeAdd
#define InterlockedCompareExchange _InterlockedCompareExchange
#define InterlockedAnd64 _InterlockedAnd64
#define InterlockedOr64 _InterlockedOr64
#define InterlockedXor64 _InterlockedXor64
#define InterlockedIncrement64 _InterlockedIncrement64
#define InterlockedDecrement64 _InterlockedDecrement64
#define InterlockedAdd64 _InterlockedAdd64
#define InterlockedExchange64 _InterlockedExchange64
#define InterlockedExchangeAdd64 _InterlockedExchangeAdd64
#define InterlockedCompareExchange64 _InterlockedCompareExchange64
#define InterlockedCompareExchangePointer _InterlockedCompareExchangePointer
#define InterlockedExchangePointer _InterlockedExchangePointer
#define ExInterlockedPopEntrySList(Head, Lock) ExpInterlockedPopEntrySList(Head)
#define ExInterlockedPushEntrySList(Head, Entry, Lock) ExpInterlockedPushEntrySList(Head, Entry)
#define ExInterlockedFlushSList(Head) ExpInterlockedFlushSList(Head)
#if !defined(_WINBASE_)
#define InterlockedPopEntrySList(Head) ExpInterlockedPopEntrySList(Head)
#define InterlockedPushEntrySList(Head, Entry) ExpInterlockedPushEntrySList(Head, Entry)
#define InterlockedFlushSList(Head) ExpInterlockedFlushSList(Head)
#define QueryDepthSList(Head) ExQueryDepthSList(Head)
#endif // !defined(_WINBASE_
#endif // _M_AMD64
#endif /* !__INTERLOCKED_DECLARED */
NTKERNELAPI
@ -5783,6 +5920,20 @@ FASTCALL
KefReleaseSpinLockFromDpcLevel(
IN PKSPIN_LOCK SpinLock);
#if defined(_M_AMD64)
NTKERNELAPI
KIRQL
FASTCALL
KfAcquireSpinLock(
IN PKSPIN_LOCK SpinLock);
NTKERNELAPI
VOID
FASTCALL
KfReleaseSpinLock(
IN PKSPIN_LOCK SpinLock,
IN KIRQL NewIrql);
#else
NTHALAPI
KIRQL
FASTCALL
@ -5795,6 +5946,7 @@ FASTCALL
KfReleaseSpinLock(
IN PKSPIN_LOCK SpinLock,
IN KIRQL NewIrql);
#endif
NTKERNELAPI
BOOLEAN
@ -9288,6 +9440,20 @@ IoWritePartitionTableEx(
/** Kernel routines **/
#if defined (_M_AMD64)
NTKERNELAPI
VOID
FASTCALL
KeAcquireInStackQueuedSpinLock(
IN PKSPIN_LOCK SpinLock,
IN PKLOCK_QUEUE_HANDLE LockHandle);
NTKERNELAPI
VOID
FASTCALL
KeReleaseInStackQueuedSpinLock(
IN PKLOCK_QUEUE_HANDLE LockHandle);
#else
NTHALAPI
VOID
FASTCALL
@ -9295,6 +9461,13 @@ KeAcquireInStackQueuedSpinLock(
IN PKSPIN_LOCK SpinLock,
IN PKLOCK_QUEUE_HANDLE LockHandle);
NTHALAPI
VOID
FASTCALL
KeReleaseInStackQueuedSpinLock(
IN PKLOCK_QUEUE_HANDLE LockHandle);
#endif
NTKERNELAPI
VOID
FASTCALL
@ -9481,12 +9654,6 @@ KePulseEvent(
IN KPRIORITY Increment,
IN BOOLEAN Wait);
NTKERNELAPI
ULONGLONG
NTAPI
KeQueryInterruptTime(
VOID);
NTHALAPI
LARGE_INTEGER
NTAPI
@ -9499,6 +9666,13 @@ NTAPI
KeQueryPriorityThread(
IN PRKTHREAD Thread);
#if !defined(_M_AMD64)
NTKERNELAPI
ULONGLONG
NTAPI
KeQueryInterruptTime(
VOID);
NTKERNELAPI
VOID
NTAPI
@ -9510,6 +9684,7 @@ VOID
NTAPI
KeQueryTickCount(
OUT PLARGE_INTEGER TickCount);
#endif
NTKERNELAPI
ULONG
@ -9552,12 +9727,6 @@ KeRegisterBugCheckCallback(
IN ULONG Length,
IN PUCHAR Component);
NTHALAPI
VOID
FASTCALL
KeReleaseInStackQueuedSpinLock(
IN PKLOCK_QUEUE_HANDLE LockHandle);
NTKERNELAPI
VOID
FASTCALL
@ -9791,6 +9960,50 @@ KeRaiseIrqlToSynchLevel(
#define KeLowerIrql(a) KfLowerIrql(a)
#define KeRaiseIrql(a,b) *(b) = KfRaiseIrql(a)
#elif defined(_M_AMD64)
FORCEINLINE
KIRQL
KeGetCurrentIrql(VOID)
{
return (KIRQL)__readcr8();
}
FORCEINLINE
VOID
KeLowerIrql(IN KIRQL NewIrql)
{
ASSERT(KeGetCurrentIrql() >= NewIrql);
__writecr8(NewIrql);
}
FORCEINLINE
KIRQL
KfRaiseIrql(IN KIRQL NewIrql)
{
KIRQL OldIrql;
OldIrql = __readcr8();
ASSERT(OldIrql <= NewIrql);
__writecr8(NewIrql);
return OldIrql;
}
#define KeRaiseIrql(a,b) *(b) = KfRaiseIrql(a)
FORCEINLINE
KIRQL
KeRaiseIrqlToDpcLevel(VOID)
{
return KfRaiseIrql(DISPATCH_LEVEL);
}
FORCEINLINE
KIRQL
KeRaiseIrqlToSynchLevel(VOID)
{
return KfRaiseIrql(12); // SYNCH_LEVEL = IPI_LEVEL - 2
}
#elif defined(__PowerPC__)
NTHALAPI

View file

@ -315,6 +315,11 @@ NtCurrentTeb(VOID)
return ret;
#endif
}
#elif defined (_M_AMD64)
FORCEINLINE struct _TEB * NtCurrentTeb(VOID)
{
return (struct _TEB *)__readgsqword(FIELD_OFFSET(NT_TIB, Self));
}
#endif
#else
struct _TEB * NtCurrentTeb(void);

View file

@ -2812,7 +2812,7 @@ typedef struct tagTVKEYDOWN
(LPARAM)(HTREEITEM)(hitem))
#define TreeView_EnsureVisible(hwnd, hitem) \
(BOOL)SNDMSGA((hwnd), TVM_ENSUREVISIBLE, 0, (LPARAM)(UINT)(hitem))
(BOOL)SNDMSGA((hwnd), TVM_ENSUREVISIBLE, 0, (LPARAM)(HTREEITEM)(hitem))
#define TreeView_SortChildrenCB(hwnd, psort, recurse) \
(BOOL)SNDMSGA((hwnd), TVM_SORTCHILDRENCB, (WPARAM)recurse, \

View file

@ -121,6 +121,15 @@ static __inline__ __attribute__((always_inline)) long _InterlockedExchange(volat
return __sync_lock_test_and_set(Target, Value);
}
#if defined(_M_AMD64)
static __inline__ __attribute__((always_inline)) long long _InterlockedExchange64(volatile long long * const Target, const long long Value)
{
/* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
__sync_synchronize();
return __sync_lock_test_and_set(Target, Value);
}
#endif
static __inline__ __attribute__((always_inline)) void * _InterlockedExchangePointer(void * volatile * const Target, void * const Value)
{
/* NOTE: ditto */
@ -138,6 +147,13 @@ static __inline__ __attribute__((always_inline)) long _InterlockedExchangeAdd(vo
return __sync_fetch_and_add(Addend, Value);
}
#if defined(_M_AMD64)
static __inline__ __attribute__((always_inline)) long long _InterlockedExchangeAdd64(volatile long long * const Addend, const long long Value)
{
return __sync_fetch_and_add(Addend, Value);
}
#endif
static __inline__ __attribute__((always_inline)) char _InterlockedAnd8(volatile char * const value, const char mask)
{
return __sync_fetch_and_and(value, mask);
@ -462,10 +478,22 @@ static __inline__ __attribute__((always_inline)) long _InterlockedIncrement16(vo
return _InterlockedExchangeAdd16(lpAddend, 1) + 1;
}
#if defined(_M_AMD64)
static __inline__ __attribute__((always_inline)) long long _InterlockedDecrement64(volatile long long * const lpAddend)
{
return _InterlockedExchangeAdd64(lpAddend, -1) - 1;
}
static __inline__ __attribute__((always_inline)) long long _InterlockedIncrement64(volatile long long * const lpAddend)
{
return _InterlockedExchangeAdd64(lpAddend, 1) + 1;
}
#endif
static __inline__ __attribute__((always_inline)) unsigned char _interlockedbittestandreset(volatile long * a, const long b)
{
unsigned char retval;
__asm__("lock; btrl %[b], %[a]; setb %b[retval]" : [retval] "=r" (retval), [a] "=m" (a) : [b] "Ir" (b) : "memory");
__asm__("lock; btrl %[b], %[a]; setb %b[retval]" : [retval] "=r" (retval), [a] "=m" (*a) : [b] "Ir" (b) : "memory");
return retval;
}
@ -473,7 +501,7 @@ static __inline__ __attribute__((always_inline)) unsigned char _interlockedbitte
static __inline__ __attribute__((always_inline)) unsigned char _interlockedbittestandreset64(volatile long long * a, const long long b)
{
unsigned char retval;
__asm__("lock; btrq %[b], %[a]; setb %b[retval]" : [retval] "=r" (retval), [a] "=m" (a) : [b] "Ir" (b) : "memory");
__asm__("lock; btrq %[b], %[a]; setb %b[retval]" : [retval] "=r" (retval), [a] "=m" (*a) : [b] "Ir" (b) : "memory");
return retval;
}
#endif
@ -481,7 +509,7 @@ static __inline__ __attribute__((always_inline)) unsigned char _interlockedbitte
static __inline__ __attribute__((always_inline)) unsigned char _interlockedbittestandset(volatile long * a, const long b)
{
unsigned char retval;
__asm__("lock; btsl %[b], %[a]; setc %b[retval]" : [retval] "=r" (retval), [a] "=m" (a) : [b] "Ir" (b) : "memory");
__asm__("lock; btsl %[b], %[a]; setc %b[retval]" : [retval] "=r" (retval), [a] "=m" (*a) : [b] "Ir" (b) : "memory");
return retval;
}
@ -489,7 +517,7 @@ static __inline__ __attribute__((always_inline)) unsigned char _interlockedbitte
static __inline__ __attribute__((always_inline)) unsigned char _interlockedbittestandset64(volatile long long * a, const long long b)
{
unsigned char retval;
__asm__("lock; btsq %[b], %[a]; setc %b[retval]" : [retval] "=r" (retval), [a] "=m" (a) : [b] "Ir" (b) : "memory");
__asm__("lock; btsq %[b], %[a]; setc %b[retval]" : [retval] "=r" (retval), [a] "=m" (*a) : [b] "Ir" (b) : "memory");
return retval;
}
#endif
@ -561,168 +589,168 @@ static __inline__ __attribute__((always_inline)) void __movsd(unsigned long * De
static __inline__ __attribute__((always_inline)) void __writegsbyte(const unsigned long Offset, const unsigned char Data)
{
__asm__("movb %b[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movb %b[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writegsword(const unsigned long Offset, const unsigned short Data)
{
__asm__("movw %w[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movw %w[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writegsdword(const unsigned long Offset, const unsigned long Data)
{
__asm__("movl %k[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movl %k[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writegsqword(const unsigned long Offset, const unsigned __int64 Data)
{
__asm__("movq %q[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movq %q[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) unsigned char __readgsbyte(const unsigned long Offset)
{
unsigned char value;
__asm__("movb %%gs:%a[Offset], %b[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movb %%gs:%a[Offset], %b[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned short __readgsword(const unsigned long Offset)
{
unsigned short value;
__asm__("movw %%gs:%a[Offset], %w[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movw %%gs:%a[Offset], %w[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned long __readgsdword(const unsigned long Offset)
{
unsigned long value;
__asm__("movl %%gs:%a[Offset], %k[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movl %%gs:%a[Offset], %k[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned __int64 __readgsqword(const unsigned long Offset)
{
unsigned long value;
__asm__("movq %%gs:%a[Offset], %q[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
unsigned __int64 value;
__asm__ __volatile__("movq %%gs:%a[Offset], %q[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) void __incgsbyte(const unsigned long Offset)
{
__asm__("incb %%gs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incb %%gs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
static __inline__ __attribute__((always_inline)) void __incgsword(const unsigned long Offset)
{
__asm__("incw %%gs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incw %%gs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
static __inline__ __attribute__((always_inline)) void __incgsdword(const unsigned long Offset)
{
__asm__("incl %%gs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incl %%gs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
/* NOTE: the bizarre implementation of __addgsxxx mimics the broken Visual C++ behavior */
static __inline__ __attribute__((always_inline)) void __addgsbyte(const unsigned long Offset, const unsigned char Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addb %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addb %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addb %b[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addb %b[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __addgsword(const unsigned long Offset, const unsigned short Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addw %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addw %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addw %w[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addw %w[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __addgsdword(const unsigned long Offset, const unsigned int Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addl %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addl %k[Offset], %%gs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addl %k[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addl %k[Data], %%gs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
#else
/*** FS segment addressing ***/
static __inline__ __attribute__((always_inline)) void __writefsbyte(const unsigned long Offset, const unsigned char Data)
{
__asm__("movb %b[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movb %b[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writefsword(const unsigned long Offset, const unsigned short Data)
{
__asm__("movw %w[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movw %w[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writefsdword(const unsigned long Offset, const unsigned long Data)
{
__asm__("movl %k[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("movl %k[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) unsigned char __readfsbyte(const unsigned long Offset)
{
unsigned char value;
__asm__("movb %%fs:%a[Offset], %b[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movb %%fs:%a[Offset], %b[value]" : [value] "=q" (value) : [Offset] "irm" (Offset) : "memory");
return value;
}
static __inline__ __attribute__((always_inline)) unsigned short __readfsword(const unsigned long Offset)
{
unsigned short value;
__asm__("movw %%fs:%a[Offset], %w[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movw %%fs:%a[Offset], %w[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned long __readfsdword(const unsigned long Offset)
{
unsigned long value;
__asm__("movl %%fs:%a[Offset], %k[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
__asm__ __volatile__("movl %%fs:%a[Offset], %k[value]" : [value] "=q" (value) : [Offset] "irm" (Offset));
return value;
}
static __inline__ __attribute__((always_inline)) void __incfsbyte(const unsigned long Offset)
{
__asm__("incb %%fs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incb %%fs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
static __inline__ __attribute__((always_inline)) void __incfsword(const unsigned long Offset)
{
__asm__("incw %%fs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incw %%fs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
static __inline__ __attribute__((always_inline)) void __incfsdword(const unsigned long Offset)
{
__asm__("incl %%fs:%a[Offset]" : : [Offset] "ir" (Offset));
__asm__ __volatile__("incl %%fs:%a[Offset]" : : [Offset] "ir" (Offset) : "memory");
}
/* NOTE: the bizarre implementation of __addfsxxx mimics the broken Visual C++ behavior */
static __inline__ __attribute__((always_inline)) void __addfsbyte(const unsigned long Offset, const unsigned char Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addb %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addb %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addb %b[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addb %b[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __addfsword(const unsigned long Offset, const unsigned short Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addw %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addw %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addw %w[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addw %w[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __addfsdword(const unsigned long Offset, const unsigned int Data)
{
if(!__builtin_constant_p(Offset))
__asm__("addl %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset));
__asm__ __volatile__("addl %k[Offset], %%fs:%a[Offset]" : : [Offset] "r" (Offset) : "memory");
else
__asm__("addl %k[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data));
__asm__ __volatile__("addl %k[Data], %%fs:%a[Offset]" : : [Offset] "ir" (Offset), [Data] "iq" (Data) : "memory");
}
#endif
@ -804,12 +832,19 @@ static __inline__ __attribute__((always_inline)) unsigned short _rotl16(const un
}
#ifndef __MSVCRT__
static __inline__ __attribute__((always_inline)) unsigned long _rotl(const unsigned long value, const unsigned char shift)
static __inline__ __attribute__((always_inline)) unsigned int _rotl(const unsigned int value, const int shift)
{
unsigned long retval;
__asm__("roll %b[shift], %k[retval]" : [retval] "=rm" (retval) : "[retval]" (value), [shift] "Nc" (shift));
return retval;
}
static __inline__ __attribute__((always_inline)) unsigned long _rotr(const unsigned int value, const unsigned char shift)
{
unsigned long retval;
__asm__("rorl %b[shift], %k[retval]" : [retval] "=rm" (retval) : "[retval]" (value), [shift] "Nc" (shift));
return retval;
}
#endif
static __inline__ __attribute__((always_inline)) unsigned char _rotr8(const unsigned char value, const unsigned char shift)
@ -875,6 +910,44 @@ static __inline__ __attribute__((always_inline)) unsigned long long __ull_rshift
return retval;
}
static __inline__ __attribute__((always_inline)) unsigned short _byteswap_ushort(unsigned short value)
{
unsigned short retval;
__asm__("rorw $8, %w[retval]" : [retval] "=rm" (retval) : "[retval]" (value));
return retval;
}
static __inline__ __attribute__((always_inline)) unsigned long _byteswap_ulong(unsigned long value)
{
unsigned long retval;
__asm__("bswapl %[retval]" : [retval] "=rm" (retval) : "[retval]" (value));
return retval;
}
#ifdef _M_AMD64
static __inline__ __attribute__((always_inline)) unsigned __int64 _byteswap_uint64(unsigned __int64 value)
{
unsigned __int64 retval;
__asm__("bswapq %[retval]" : [retval] "=rm" (retval) : "[retval]" (value));
return retval;
}
#else
static __inline__ __attribute__((always_inline)) unsigned __int64 _byteswap_uint64(unsigned __int64 value)
{
union {
__int64 int64part;
struct {
unsigned long lowpart;
unsigned long hipart;
};
} retval;
retval.int64part = value;
__asm__("bswapl %[lowpart]\n"
"bswapl %[hipart]\n"
: [lowpart] "=rm" (retval.hipart), [hipart] "=rm" (retval.lowpart) : "[lowpart]" (retval.lowpart), "[hipart]" (retval.hipart) );
return retval.int64part;
}
#endif
/*** 64-bit math ***/
static __inline__ __attribute__((always_inline)) long long __emul(const int a, const int b)
@ -1015,48 +1088,131 @@ static __inline__ __attribute__((always_inline)) void _enable(void)
/*** Protected memory management ***/
static __inline__ __attribute__((always_inline)) unsigned long __readcr0(void)
static __inline__ __attribute__((always_inline)) void __writecr0(const unsigned __int64 Data)
{
unsigned long value;
__asm__("mov %[Data], %%cr0" : : [Data] "q" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writecr3(const unsigned __int64 Data)
{
__asm__("mov %[Data], %%cr3" : : [Data] "q" (Data) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writecr4(const unsigned __int64 Data)
{
__asm__("mov %[Data], %%cr4" : : [Data] "q" (Data) : "memory");
}
#ifdef _M_AMD64
static __inline__ __attribute__((always_inline)) void __writecr8(const unsigned __int64 Data)
{
__asm__("mov %[Data], %%cr8" : : [Data] "q" (Data) : "memory");
}
#endif
static __inline__ __attribute__((always_inline)) unsigned __int64 __readcr0(void)
{
unsigned __int64 value;
__asm__ __volatile__("mov %%cr0, %[value]" : [value] "=q" (value));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned long __readcr2(void)
static __inline__ __attribute__((always_inline)) unsigned __int64 __readcr2(void)
{
unsigned long value;
unsigned __int64 value;
__asm__ __volatile__("mov %%cr2, %[value]" : [value] "=q" (value));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned long __readcr3(void)
static __inline__ __attribute__((always_inline)) unsigned __int64 __readcr3(void)
{
unsigned long value;
unsigned __int64 value;
__asm__ __volatile__("mov %%cr3, %[value]" : [value] "=q" (value));
return value;
}
static __inline__ __attribute__((always_inline)) unsigned long __readcr4(void)
static __inline__ __attribute__((always_inline)) unsigned __int64 __readcr4(void)
{
unsigned long value;
unsigned __int64 value;
__asm__ __volatile__("mov %%cr4, %[value]" : [value] "=q" (value));
return value;
}
static __inline__ __attribute__((always_inline)) void __writecr0(const unsigned long long Data)
#ifdef _M_AMD64
static __inline__ __attribute__((always_inline)) unsigned __int64 __readcr8(void)
{
__asm__("mov %[Data], %%cr0" : : [Data] "q" ((const unsigned long)(Data & 0xFFFFFFFF)) : "memory");
unsigned __int64 value;
__asm__ __volatile__("movq %%cr8, %q[value]" : [value] "=q" (value));
return value;
}
#endif
#ifdef _M_AMD64
static __inline__ __attribute__((always_inline)) unsigned __int64 __readdr(unsigned int reg)
{
unsigned __int64 value;
switch (reg)
{
case 0:
__asm__ __volatile__("movq %%dr0, %q[value]" : [value] "=q" (value));
break;
case 1:
__asm__ __volatile__("movq %%dr1, %q[value]" : [value] "=q" (value));
break;
case 2:
__asm__ __volatile__("movq %%dr2, %q[value]" : [value] "=q" (value));
break;
case 3:
__asm__ __volatile__("movq %%dr3, %q[value]" : [value] "=q" (value));
break;
case 4:
__asm__ __volatile__("movq %%dr4, %q[value]" : [value] "=q" (value));
break;
case 5:
__asm__ __volatile__("movq %%dr5, %q[value]" : [value] "=q" (value));
break;
case 6:
__asm__ __volatile__("movq %%dr6, %q[value]" : [value] "=q" (value));
break;
case 7:
__asm__ __volatile__("movq %%dr7, %q[value]" : [value] "=q" (value));
break;
}
return value;
}
static __inline__ __attribute__((always_inline)) void __writecr3(const unsigned long long Data)
static __inline__ __attribute__((always_inline)) void __writedr(unsigned reg, unsigned __int64 value)
{
__asm__("mov %[Data], %%cr3" : : [Data] "q" ((const unsigned long)(Data & 0xFFFFFFFF)) : "memory");
}
static __inline__ __attribute__((always_inline)) void __writecr4(const unsigned long long Data)
{
__asm__("mov %[Data], %%cr4" : : [Data] "q" ((const unsigned long)(Data & 0xFFFFFFFF)) : "memory");
switch (reg)
{
case 0:
__asm__("movq %q[value], %%dr0" : : [value] "q" (value) : "memory");
break;
case 1:
__asm__("movq %q[value], %%dr1" : : [value] "q" (value) : "memory");
break;
case 2:
__asm__("movq %q[value], %%dr2" : : [value] "q" (value) : "memory");
break;
case 3:
__asm__("movq %q[value], %%dr3" : : [value] "q" (value) : "memory");
break;
case 4:
__asm__("movq %q[value], %%dr4" : : [value] "q" (value) : "memory");
break;
case 5:
__asm__("movq %q[value], %%dr5" : : [value] "q" (value) : "memory");
break;
case 6:
__asm__("movq %q[value], %%dr6" : : [value] "q" (value) : "memory");
break;
case 7:
__asm__("movq %q[value], %%dr7" : : [value] "q" (value) : "memory");
break;
}
}
#endif
static __inline__ __attribute__((always_inline)) void __invlpg(void * const Address)
{
@ -1067,14 +1223,24 @@ static __inline__ __attribute__((always_inline)) void __invlpg(void * const Addr
/*** System operations ***/
static __inline__ __attribute__((always_inline)) unsigned long long __readmsr(const int reg)
{
#ifdef _M_AMD64
unsigned long low, high;
__asm__ __volatile__("rdmsr" : "=a" (low), "=d" (high) : "c" (reg));
return (high << 32) | low;
#else
unsigned long long retval;
__asm__ __volatile__("rdmsr" : "=A" (retval) : "c" (reg));
return retval;
#endif
}
static __inline__ __attribute__((always_inline)) void __writemsr(const unsigned long Register, const unsigned long long Value)
{
#ifdef _M_AMD64
__asm__ __volatile__("wrmsr" : : "a" (Value), "d" (Value >> 32), "c" (Register));
#else
__asm__ __volatile__("wrmsr" : : "A" (Value), "c" (Register));
#endif
}
static __inline__ __attribute__((always_inline)) unsigned long long __readpmc(const int counter)
@ -1097,6 +1263,16 @@ static __inline__ __attribute__((always_inline)) void __wbinvd(void)
__asm__ __volatile__("wbinvd");
}
static __inline__ __attribute__((always_inline)) void __lidt(void *Source)
{
__asm__ __volatile__("lidt %0" : : "m"(*(short*)Source));
}
static __inline__ __attribute__((always_inline)) void __sidt(void *Destination)
{
__asm__ __volatile__("sidt %0" : : "m"(*(short*)Destination) : "memory");
}
#endif /* KJK_INTRIN_X86_H_ */
/* EOF */

View file

@ -1706,16 +1706,52 @@ VOID WINAPI InitializeSRWLock(PSRWLOCK);
#endif
#ifndef __INTERLOCKED_DECLARED
#define __INTERLOCKED_DECLARED
#if defined (_M_AMD64) || defined (_M_IA64)
#define InterlockedAnd _InterlockedAnd
#define InterlockedOr _InterlockedOr
#define InterlockedXor _InterlockedXor
#define InterlockedIncrement _InterlockedIncrement
#define InterlockedIncrementAcquire InterlockedIncrement
#define InterlockedIncrementRelease InterlockedIncrement
#define InterlockedDecrement _InterlockedDecrement
#define InterlockedDecrementAcquire InterlockedDecrement
#define InterlockedDecrementRelease InterlockedDecrement
#define InterlockedExchange _InterlockedExchange
#define InterlockedExchangeAdd _InterlockedExchangeAdd
#define InterlockedCompareExchange _InterlockedCompareExchange
#define InterlockedCompareExchangeAcquire InterlockedCompareExchange
#define InterlockedCompareExchangeRelease InterlockedCompareExchange
#define InterlockedExchangePointer _InterlockedExchangePointer
#define InterlockedCompareExchangePointer _InterlockedCompareExchangePointer
#define InterlockedCompareExchangePointerAcquire _InterlockedCompareExchangePointer
#define InterlockedCompareExchangePointerRelease _InterlockedCompareExchangePointer
#define InterlockedAnd64 _InterlockedAnd64
#define InterlockedOr64 _InterlockedOr64
#define InterlockedXor64 _InterlockedXor64
#define InterlockedIncrement64 _InterlockedIncrement64
#define InterlockedDecrement64 _InterlockedDecrement64
#define InterlockedExchange64 _InterlockedExchange64
#define InterlockedExchangeAdd64 _InterlockedExchangeAdd64
#define InterlockedCompareExchange64 _InterlockedCompareExchange64
#define InterlockedCompareExchangeAcquire64 InterlockedCompareExchange64
#define InterlockedCompareExchangeRelease64 InterlockedCompareExchange64
#else // !(defined (_M_AMD64) || defined (_M_IA64))
LONG WINAPI InterlockedOr(IN OUT LONG volatile *,LONG);
LONG WINAPI InterlockedAnd(IN OUT LONG volatile *,LONG);
LONG WINAPI InterlockedCompareExchange(IN OUT LONG volatile *,LONG,LONG);
LONG WINAPI InterlockedDecrement(IN OUT LONG volatile *);
LONG WINAPI InterlockedExchange(IN OUT LONG volatile *,LONG);
#if defined(_WIN64)
/* PVOID WINAPI InterlockedExchangePointer(PVOID*,PVOID); */
#define InterlockedExchangePointer(t,v) \
(PVOID)InterlockedExchange64((LONGLONG*)(t),(LONGLONG)(v))
(PVOID)_InterlockedExchange64((LONGLONG*)(t),(LONGLONG)(v))
/* PVOID WINAPI InterlockedCompareExchangePointer(PVOID*,PVOID,PVOID); */
#define InterlockedCompareExchangePointer(d,e,c) \
(PVOID)InterlockedCompareExchange64((LONGLONG*)(d),(LONGLONG)(e),(LONGLONG)(c))
(PVOID)_InterlockedCompareExchange64((LONGLONG*)(d),(LONGLONG)(e),(LONGLONG)(c))
#else
/* PVOID WINAPI InterlockedExchangePointer(PVOID*,PVOID); */
#define InterlockedExchangePointer(t,v) \
@ -1733,14 +1769,64 @@ LONG WINAPI InterlockedIncrement(IN OUT LONG volatile *);
PSLIST_ENTRY WINAPI InterlockedPopEntrySList(PSLIST_HEADER);
PSLIST_ENTRY WINAPI InterlockedPushEntrySList(PSLIST_HEADER,PSLIST_ENTRY);
#endif
#endif // !(defined (_M_AMD64) || defined (_M_IA64))
#if !defined(InterlockedAnd)
#define InterlockedAnd InterlockedAnd_Inline
FORCEINLINE
LONG
InterlockedAnd_Inline(IN OUT volatile LONG *Target,
IN LONG Set)
{
LONG i;
LONG j;
j = *Target;
do {
i = j;
j = _InterlockedCompareExchange((PLONG)Target,
i & Set,
i);
} while (i != j);
return j;
}
#endif
#if !defined(InterlockedOr)
#define InterlockedOr InterlockedOr_Inline
FORCEINLINE
LONG
InterlockedOr_Inline(IN OUT volatile LONG *Target,
IN LONG Set)
{
LONG i;
LONG j;
j = *Target;
do {
i = j;
j = _InterlockedCompareExchange((PLONG)Target,
i | Set,
i);
} while (i != j);
return j;
}
#endif
#endif /* __INTERLOCKED_DECLARED */
BOOL WINAPI IsBadCodePtr(FARPROC);
BOOL WINAPI IsBadHugeReadPtr(PCVOID,UINT);
BOOL WINAPI IsBadHugeWritePtr(PVOID,UINT);
BOOL WINAPI IsBadReadPtr(PCVOID,UINT);
BOOL WINAPI IsBadStringPtrA(LPCSTR,UINT);
BOOL WINAPI IsBadStringPtrW(LPCWSTR,UINT);
BOOL WINAPI IsBadWritePtr(PVOID,UINT);
BOOL WINAPI IsBadHugeReadPtr(PCVOID,UINT_PTR);
BOOL WINAPI IsBadHugeWritePtr(PVOID,UINT_PTR);
BOOL WINAPI IsBadReadPtr(PCVOID,UINT_PTR);
BOOL WINAPI IsBadStringPtrA(LPCSTR,UINT_PTR);
BOOL WINAPI IsBadStringPtrW(LPCWSTR,UINT_PTR);
BOOL WINAPI IsBadWritePtr(PVOID,UINT_PTR);
BOOL WINAPI IsDebuggerPresent(void);
#if (_WIN32_WINNT >= 0x0501)
BOOL WINAPI IsProcessInJob(HANDLE,HANDLE,PBOOL);
@ -1994,7 +2080,7 @@ BOOL WINAPI SetPrivateObjectSecurity(SECURITY_INFORMATION,PSECURITY_DESCRIPTOR,P
BOOL WINAPI SetProcessAffinityMask(HANDLE,DWORD);
BOOL WINAPI SetProcessPriorityBoost(HANDLE,BOOL);
BOOL WINAPI SetProcessShutdownParameters(DWORD,DWORD);
BOOL WINAPI SetProcessWorkingSetSize(HANDLE,DWORD,DWORD);
BOOL WINAPI SetProcessWorkingSetSize(HANDLE,SIZE_T,SIZE_T);
#if (_WIN32_WINNT >= 0x0600)
VOID WINAPI SetSecurityAccessMask(SECURITY_INFORMATION,LPDWORD);
#endif

View file

@ -123,14 +123,27 @@ extern "C" {
#define pascal __stdcall
#define _pascal __stdcall
#define __pascal __stdcall
#define PASCAL _pascal
#define CDECL _cdecl
#define PASCAL _pascal
#if !defined(__x86_64__) //defined(_STDCALL_SUPPORTED)
#define STDCALL __stdcall
#define WINAPI __stdcall
#define WINAPIV __cdecl
#define APIENTRY __stdcall
#define CALLBACK __stdcall
#define APIPRIVATE __stdcall
#define CALLBACK __stdcall
#define WINAPI __stdcall
#define WINAPIV __cdecl
#define APIENTRY WINAPI
#define APIPRIVATE __stdcall
#define PASCAL __stdcall
#else
#define STDCALL
#define CALLBACK
#define WINAPI
#define WINAPIV
#define APIENTRY WINAPI
#define APIPRIVATE
#define PASCAL pascal
#endif
#define DECLSPEC_IMPORT __declspec(dllimport)
#define DECLSPEC_EXPORT __declspec(dllexport)

View file

@ -92,15 +92,6 @@ extern "C" {
#endif
#endif
/* i386 context definitions */
#ifdef __i386__
#define EXCEPTION_READ_FAULT 0
#define EXCEPTION_WRITE_FAULT 1
#define EXCEPTION_EXECUTE_FAULT 8
#endif /* __i386__ */
#ifndef VOID
#define VOID void
#endif
@ -511,8 +502,6 @@ typedef DWORD FLONG;
#define THREAD_DIRECT_IMPERSONATION 0x200
#endif
#define THREAD_ALL_ACCESS (STANDARD_RIGHTS_REQUIRED|SYNCHRONIZE|0x3FF)
#define EXCEPTION_NONCONTINUABLE 1
#define EXCEPTION_MAXIMUM_PARAMETERS 15
/* FIXME: Oh how I wish, I wish the w32api DDK wouldn't include winnt.h... */
#ifndef __NTDDK_H
#define MUTANT_QUERY_STATE 0x0001
@ -1350,7 +1339,7 @@ typedef enum
#define IMAGE_DLLCHARACTERISTICS_NO_BIND 0x0800
#define IMAGE_DLLCHARACTERISTICS_WDM_DRIVER 0x2000
#define IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE 0x8000
#define IMAGE_FIRST_SECTION(h) ((PIMAGE_SECTION_HEADER) ((DWORD_PTR)h+FIELD_OFFSET(IMAGE_NT_HEADERS,OptionalHeader)+((PIMAGE_NT_HEADERS)(h))->FileHeader.SizeOfOptionalHeader))
#define IMAGE_FIRST_SECTION(h) ((PIMAGE_SECTION_HEADER) ((ULONG_PTR)h+FIELD_OFFSET(IMAGE_NT_HEADERS,OptionalHeader)+((PIMAGE_NT_HEADERS)(h))->FileHeader.SizeOfOptionalHeader))
#define IMAGE_DIRECTORY_ENTRY_EXPORT 0
#define IMAGE_DIRECTORY_ENTRY_IMPORT 1
#define IMAGE_DIRECTORY_ENTRY_RESOURCE 2
@ -2010,6 +1999,38 @@ typedef struct _ACL_SIZE_INFORMATION {
DWORD AclBytesFree;
} ACL_SIZE_INFORMATION;
#ifndef _LDT_ENTRY_DEFINED
#define _LDT_ENTRY_DEFINED
typedef struct _LDT_ENTRY
{
USHORT LimitLow;
USHORT BaseLow;
union
{
struct
{
UCHAR BaseMid;
UCHAR Flags1;
UCHAR Flags2;
UCHAR BaseHi;
} Bytes;
struct
{
ULONG BaseMid:8;
ULONG Type:5;
ULONG Dpl:2;
ULONG Pres:1;
ULONG LimitHi:4;
ULONG Sys:1;
ULONG Reserved_0:1;
ULONG Default_Big:1;
ULONG Granularity:1;
ULONG BaseHi:8;
} Bits;
} HighWord;
} LDT_ENTRY, *PLDT_ENTRY, *LPLDT_ENTRY;
#endif
/* FIXME: add more machines */
#if defined(_X86_) && !defined(__PowerPC__)
#define SIZE_OF_80387_REGISTERS 80
@ -2023,6 +2044,11 @@ typedef struct _ACL_SIZE_INFORMATION {
#define CONTEXT_EXTENDED_REGISTERS (CONTEXT_i386|0x00000020L)
#define CONTEXT_FULL (CONTEXT_CONTROL|CONTEXT_INTEGER|CONTEXT_SEGMENTS)
#define MAXIMUM_SUPPORTED_EXTENSION 512
#define EXCEPTION_READ_FAULT 0
#define EXCEPTION_WRITE_FAULT 1
#define EXCEPTION_EXECUTE_FAULT 8
typedef struct _FLOATING_SAVE_AREA {
DWORD ControlWord;
DWORD StatusWord;
@ -2061,6 +2087,237 @@ typedef struct _CONTEXT {
DWORD SegSs;
BYTE ExtendedRegisters[MAXIMUM_SUPPORTED_EXTENSION];
} CONTEXT;
#elif defined(__x86_64__)
#define CONTEXT_AMD64 0x100000
#if !defined(RC_INVOKED)
#define CONTEXT_CONTROL (CONTEXT_AMD64 | 0x1L)
#define CONTEXT_INTEGER (CONTEXT_AMD64 | 0x2L)
#define CONTEXT_SEGMENTS (CONTEXT_AMD64 | 0x4L)
#define CONTEXT_FLOATING_POINT (CONTEXT_AMD64 | 0x8L)
#define CONTEXT_DEBUG_REGISTERS (CONTEXT_AMD64 | 0x10L)
#define CONTEXT_FULL (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_FLOATING_POINT)
#define CONTEXT_ALL (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS | CONTEXT_FLOATING_POINT | CONTEXT_DEBUG_REGISTERS)
#define CONTEXT_EXCEPTION_ACTIVE 0x8000000
#define CONTEXT_SERVICE_ACTIVE 0x10000000
#define CONTEXT_EXCEPTION_REQUEST 0x40000000
#define CONTEXT_EXCEPTION_REPORTING 0x80000000
#endif
#define INITIAL_MXCSR 0x1f80
#define INITIAL_FPCSR 0x027f
#define EXCEPTION_READ_FAULT 0
#define EXCEPTION_WRITE_FAULT 1
#define EXCEPTION_EXECUTE_FAULT 8
typedef struct DECLSPEC_ALIGN(16) _M128A {
ULONGLONG Low;
LONGLONG High;
} M128A, *PM128A;
typedef struct _XMM_SAVE_AREA32 {
WORD ControlWord;
WORD StatusWord;
BYTE TagWord;
BYTE Reserved1;
WORD ErrorOpcode;
DWORD ErrorOffset;
WORD ErrorSelector;
WORD Reserved2;
DWORD DataOffset;
WORD DataSelector;
WORD Reserved3;
DWORD MxCsr;
DWORD MxCsr_Mask;
M128A FloatRegisters[8];
M128A XmmRegisters[16];
BYTE Reserved4[96];
} XMM_SAVE_AREA32, *PXMM_SAVE_AREA32;
typedef struct DECLSPEC_ALIGN(16) _CONTEXT {
DWORD64 P1Home;
DWORD64 P2Home;
DWORD64 P3Home;
DWORD64 P4Home;
DWORD64 P5Home;
DWORD64 P6Home;
/* Control flags */
DWORD ContextFlags;
DWORD MxCsr;
/* Segment */
WORD SegCs;
WORD SegDs;
WORD SegEs;
WORD SegFs;
WORD SegGs;
WORD SegSs;
DWORD EFlags;
/* Debug */
DWORD64 Dr0;
DWORD64 Dr1;
DWORD64 Dr2;
DWORD64 Dr3;
DWORD64 Dr6;
DWORD64 Dr7;
/* Integer */
DWORD64 Rax;
DWORD64 Rcx;
DWORD64 Rdx;
DWORD64 Rbx;
DWORD64 Rsp;
DWORD64 Rbp;
DWORD64 Rsi;
DWORD64 Rdi;
DWORD64 R8;
DWORD64 R9;
DWORD64 R10;
DWORD64 R11;
DWORD64 R12;
DWORD64 R13;
DWORD64 R14;
DWORD64 R15;
/* Counter */
DWORD64 Rip;
/* Floating point */
union {
XMM_SAVE_AREA32 FltSave;
struct {
M128A Header[2];
M128A Legacy[8];
M128A Xmm0;
M128A Xmm1;
M128A Xmm2;
M128A Xmm3;
M128A Xmm4;
M128A Xmm5;
M128A Xmm6;
M128A Xmm7;
M128A Xmm8;
M128A Xmm9;
M128A Xmm10;
M128A Xmm11;
M128A Xmm12;
M128A Xmm13;
M128A Xmm14;
M128A Xmm15;
} DUMMYSTRUCTNAME;
} DUMMYUNIONNAME;
/* Vector */
M128A VectorRegister[26];
DWORD64 VectorControl;
/* Debug control */
DWORD64 DebugControl;
DWORD64 LastBranchToRip;
DWORD64 LastBranchFromRip;
DWORD64 LastExceptionToRip;
DWORD64 LastExceptionFromRip;
} CONTEXT, *PCONTEXT;
typedef struct _KNONVOLATILE_CONTEXT_POINTERS {
union {
PM128A FloatingContext[16];
struct {
PM128A Xmm0;
PM128A Xmm1;
PM128A Xmm2;
PM128A Xmm3;
PM128A Xmm4;
PM128A Xmm5;
PM128A Xmm6;
PM128A Xmm7;
PM128A Xmm8;
PM128A Xmm9;
PM128A Xmm10;
PM128A Xmm11;
PM128A Xmm12;
PM128A Xmm13;
PM128A Xmm14;
PM128A Xmm15;
};
};
union {
PULONG64 IntegerContext[16];
struct {
PULONG64 Rax;
PULONG64 Rcx;
PULONG64 Rdx;
PULONG64 Rbx;
PULONG64 Rsp;
PULONG64 Rbp;
PULONG64 Rsi;
PULONG64 Rdi;
PULONG64 R8;
PULONG64 R9;
PULONG64 R10;
PULONG64 R11;
PULONG64 R12;
PULONG64 R13;
PULONG64 R14;
PULONG64 R15;
};
};
} KNONVOLATILE_CONTEXT_POINTERS, *PKNONVOLATILE_CONTEXT_POINTERS;
#define UNW_FLAG_NHANDLER 0x0 /* No handler. */
#define UNW_FLAG_EHANDLER 0x1 /* Exception handler should be called */
#define UNW_FLAG_UHANDLER 0x2 /* Termination handler that should be called when unwinding an exception */
#define UNW_FLAG_CHAININFO 0x4 /* FunctionEntry member is the contents of a previous function table entry */
#define RUNTIME_FUNCTION_INDIRECT 0x1
typedef struct _RUNTIME_FUNCTION {
DWORD BeginAddress;
DWORD EndAddress;
DWORD UnwindData;
} RUNTIME_FUNCTION,*PRUNTIME_FUNCTION;
typedef PRUNTIME_FUNCTION (*PGET_RUNTIME_FUNCTION_CALLBACK)(DWORD64 ControlPc,PVOID Context);
typedef DWORD (*POUT_OF_PROCESS_FUNCTION_TABLE_CALLBACK)(HANDLE Process,PVOID TableAddress,PDWORD Entries,PRUNTIME_FUNCTION *Functions);
#define OUT_OF_PROCESS_FUNCTION_TABLE_CALLBACK_EXPORT_NAME "OutOfProcessFunctionTableCallback"
NTSYSAPI
VOID
__cdecl
RtlRestoreContext(PCONTEXT ContextRecord,
struct _EXCEPTION_RECORD *ExceptionRecord);
NTSYSAPI
BOOLEAN
__cdecl
RtlAddFunctionTable(PRUNTIME_FUNCTION FunctionTable,
DWORD EntryCount,
DWORD64 BaseAddress);
NTSYSAPI
BOOLEAN
__cdecl
RtlInstallFunctionTableCallback(DWORD64 TableIdentifier,
DWORD64 BaseAddress,
DWORD Length,
PGET_RUNTIME_FUNCTION_CALLBACK Callback,
PVOID Context,
PCWSTR OutOfProcessCallbackDll);
NTSYSAPI
BOOLEAN
__cdecl
RtlDeleteFunctionTable(PRUNTIME_FUNCTION FunctionTable);
#elif defined(_PPC_)
#define CONTEXT_CONTROL 1L
#define CONTEXT_FLOATING_POINT 2L
@ -2548,18 +2805,44 @@ typedef struct _CONTEXT {
#error "undefined processor type"
#endif
typedef CONTEXT *PCONTEXT,*LPCONTEXT;
typedef struct _EXCEPTION_RECORD {
DWORD ExceptionCode;
DWORD ExceptionFlags;
struct _EXCEPTION_RECORD *ExceptionRecord;
PVOID ExceptionAddress;
DWORD NumberParameters;
ULONG_PTR ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD,*PEXCEPTION_RECORD,*LPEXCEPTION_RECORD;
typedef struct _EXCEPTION_POINTERS {
PEXCEPTION_RECORD ExceptionRecord;
PCONTEXT ContextRecord;
} EXCEPTION_POINTERS,*PEXCEPTION_POINTERS,*LPEXCEPTION_POINTERS;
#define EXCEPTION_NONCONTINUABLE 1
#define EXCEPTION_MAXIMUM_PARAMETERS 15
typedef struct _EXCEPTION_RECORD {
DWORD ExceptionCode;
DWORD ExceptionFlags;
struct _EXCEPTION_RECORD *ExceptionRecord;
PVOID ExceptionAddress;
DWORD NumberParameters;
ULONG_PTR ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD, *PEXCEPTION_RECORD, *LPEXCEPTION_RECORD;
typedef EXCEPTION_RECORD *PEXCEPTION_RECORD;
typedef struct _EXCEPTION_RECORD32 {
DWORD ExceptionCode;
DWORD ExceptionFlags;
DWORD ExceptionRecord;
DWORD ExceptionAddress;
DWORD NumberParameters;
DWORD ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD32,*PEXCEPTION_RECORD32;
typedef struct _EXCEPTION_RECORD64 {
DWORD ExceptionCode;
DWORD ExceptionFlags;
DWORD64 ExceptionRecord;
DWORD64 ExceptionAddress;
DWORD NumberParameters;
DWORD __unusedAlignment;
DWORD64 ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS];
} EXCEPTION_RECORD64,*PEXCEPTION_RECORD64;
typedef struct _EXCEPTION_POINTERS {
PEXCEPTION_RECORD ExceptionRecord;
PCONTEXT ContextRecord;
} EXCEPTION_POINTERS,*PEXCEPTION_POINTERS, *LPEXCEPTION_POINTERS;
#ifdef _M_PPC
#define LARGE_INTEGER_ORDER(x) x HighPart; DWORD LowPart;
@ -2911,6 +3194,12 @@ RtlCaptureStackBackTrace(
OUT PDWORD BackTraceHash OPTIONAL
);
NTSYSAPI
VOID
NTAPI
RtlCaptureContext(
PCONTEXT ContextRecord
);
NTSYSAPI
PVOID
@ -3128,7 +3417,7 @@ typedef struct _IMAGE_OPTIONAL_HEADER64 {
WORD MinorImageVersion;
WORD MajorSubsystemVersion;
WORD MinorSubsystemVersion;
DWORD Reserved1;
DWORD Win32VersionValue;
DWORD SizeOfImage;
DWORD SizeOfHeaders;
DWORD CheckSum;
@ -3365,14 +3654,75 @@ typedef struct _IMAGE_IMPORT_BY_NAME {
WORD Hint;
BYTE Name[1];
} IMAGE_IMPORT_BY_NAME,*PIMAGE_IMPORT_BY_NAME;
typedef struct _IMAGE_THUNK_DATA {
union {
ULONG ForwarderString;
ULONG Function;
DWORD Ordinal;
ULONG AddressOfData;
} u1;
} IMAGE_THUNK_DATA,*PIMAGE_THUNK_DATA;
#include "pshpack8.h"
typedef struct _IMAGE_THUNK_DATA64 {
union {
ULONGLONG ForwarderString;
ULONGLONG Function;
ULONGLONG Ordinal;
ULONGLONG AddressOfData;
} u1;
} IMAGE_THUNK_DATA64;
typedef IMAGE_THUNK_DATA64 *PIMAGE_THUNK_DATA64;
#include "poppack.h"
typedef struct _IMAGE_THUNK_DATA32 {
union {
DWORD ForwarderString;
DWORD Function;
DWORD Ordinal;
DWORD AddressOfData;
} u1;
} IMAGE_THUNK_DATA32;
typedef IMAGE_THUNK_DATA32 *PIMAGE_THUNK_DATA32;
#define IMAGE_ORDINAL_FLAG64 0x8000000000000000ULL
#define IMAGE_ORDINAL_FLAG32 0x80000000
#define IMAGE_ORDINAL64(Ordinal) (Ordinal & 0xffff)
#define IMAGE_ORDINAL32(Ordinal) (Ordinal & 0xffff)
#define IMAGE_SNAP_BY_ORDINAL64(Ordinal) ((Ordinal & IMAGE_ORDINAL_FLAG64)!=0)
#define IMAGE_SNAP_BY_ORDINAL32(Ordinal) ((Ordinal & IMAGE_ORDINAL_FLAG32)!=0)
typedef VOID
(NTAPI *PIMAGE_TLS_CALLBACK)(PVOID DllHandle,DWORD Reason,PVOID Reserved);
typedef struct _IMAGE_TLS_DIRECTORY64 {
ULONGLONG StartAddressOfRawData;
ULONGLONG EndAddressOfRawData;
ULONGLONG AddressOfIndex;
ULONGLONG AddressOfCallBacks;
DWORD SizeOfZeroFill;
DWORD Characteristics;
} IMAGE_TLS_DIRECTORY64;
typedef IMAGE_TLS_DIRECTORY64 *PIMAGE_TLS_DIRECTORY64;
typedef struct _IMAGE_TLS_DIRECTORY32 {
DWORD StartAddressOfRawData;
DWORD EndAddressOfRawData;
DWORD AddressOfIndex;
DWORD AddressOfCallBacks;
DWORD SizeOfZeroFill;
DWORD Characteristics;
} IMAGE_TLS_DIRECTORY32;
typedef IMAGE_TLS_DIRECTORY32 *PIMAGE_TLS_DIRECTORY32;
#ifdef _WIN64
#define IMAGE_ORDINAL_FLAG IMAGE_ORDINAL_FLAG64
#define IMAGE_ORDINAL(Ordinal) IMAGE_ORDINAL64(Ordinal)
typedef IMAGE_THUNK_DATA64 IMAGE_THUNK_DATA;
typedef PIMAGE_THUNK_DATA64 PIMAGE_THUNK_DATA;
#define IMAGE_SNAP_BY_ORDINAL(Ordinal) IMAGE_SNAP_BY_ORDINAL64(Ordinal)
typedef IMAGE_TLS_DIRECTORY64 IMAGE_TLS_DIRECTORY;
typedef PIMAGE_TLS_DIRECTORY64 PIMAGE_TLS_DIRECTORY;
#else
#define IMAGE_ORDINAL_FLAG IMAGE_ORDINAL_FLAG32
#define IMAGE_ORDINAL(Ordinal) IMAGE_ORDINAL32(Ordinal)
typedef IMAGE_THUNK_DATA32 IMAGE_THUNK_DATA;
typedef PIMAGE_THUNK_DATA32 PIMAGE_THUNK_DATA;
#define IMAGE_SNAP_BY_ORDINAL(Ordinal) IMAGE_SNAP_BY_ORDINAL32(Ordinal)
typedef IMAGE_TLS_DIRECTORY32 IMAGE_TLS_DIRECTORY;
typedef PIMAGE_TLS_DIRECTORY32 PIMAGE_TLS_DIRECTORY;
#endif
typedef struct _IMAGE_IMPORT_DESCRIPTOR {
_ANONYMOUS_UNION union {
DWORD Characteristics;
@ -3394,14 +3744,6 @@ typedef struct _IMAGE_BOUND_FORWARDER_REF {
WORD Reserved;
} IMAGE_BOUND_FORWARDER_REF,*PIMAGE_BOUND_FORWARDER_REF;
typedef void(NTAPI *PIMAGE_TLS_CALLBACK)(PVOID,DWORD,PVOID);
typedef struct _IMAGE_TLS_DIRECTORY {
DWORD StartAddressOfRawData;
DWORD EndAddressOfRawData;
PDWORD AddressOfIndex;
PIMAGE_TLS_CALLBACK *AddressOfCallBacks;
DWORD SizeOfZeroFill;
DWORD Characteristics;
} IMAGE_TLS_DIRECTORY,*PIMAGE_TLS_DIRECTORY;
typedef struct _IMAGE_RESOURCE_DIRECTORY {
DWORD Characteristics;
DWORD TimeDateStamp;

View file

@ -45,6 +45,7 @@ typedef struct _ROS_LOADER_PARAMETER_BLOCK
ULONG_PTR PageDirectoryEnd;
ULONG_PTR KernelBase;
ULONG_PTR ArchExtra;
ULONG (*FrLdrDbgPrint)(const char *Format, ...);
} ROS_LOADER_PARAMETER_BLOCK, *PROS_LOADER_PARAMETER_BLOCK;
extern BOOLEAN AcpiTableDetected;

View file

@ -328,53 +328,12 @@ extern int spawnvp(int mode, const char *cmdname, const char * const argv[]);
#if defined(__i386__) && defined(__GNUC__) && !defined(WINE_PORT_NO_INTERLOCKED)
extern inline long interlocked_cmpxchg( long *dest, long xchg, long compare )
{
long ret;
__asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
: "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
return ret;
}
#define interlocked_cmpxchg InterlockedCompareExchange
#define interlocked_cmpxchg_ptr InterlockedCompareExchangePtr
#define interlocked_xchg InterlockedExchange
#define interlocked_xchg_ptr InterlockedExchangePtr
#define interlocked_xchg_add InterlockedExchangeAdd
extern inline void *interlocked_cmpxchg_ptr( void **dest, void *xchg, void *compare )
{
void *ret;
__asm__ __volatile__( "lock; cmpxchgl %2,(%1)"
: "=a" (ret) : "r" (dest), "r" (xchg), "0" (compare) : "memory" );
return ret;
}
extern inline long interlocked_xchg( long *dest, long val )
{
long ret;
__asm__ __volatile__( "lock; xchgl %0,(%1)"
: "=r" (ret) : "r" (dest), "0" (val) : "memory" );
return ret;
}
extern inline void *interlocked_xchg_ptr( void **dest, void *val )
{
void *ret;
__asm__ __volatile__( "lock; xchgl %0,(%1)"
: "=r" (ret) : "r" (dest), "0" (val) : "memory" );
return ret;
}
extern inline long interlocked_xchg_add( long *dest, long incr )
{
long ret;
__asm__ __volatile__( "lock; xaddl %0,(%1)"
: "=r" (ret) : "r" (dest), "0" (incr) : "memory" );
return ret;
}
#else /* __i386___ && __GNUC__ */
extern long interlocked_cmpxchg( long *dest, long xchg, long compare );
extern void *interlocked_cmpxchg_ptr( void **dest, void *xchg, void *compare );
extern long interlocked_xchg( long *dest, long val );
extern void *interlocked_xchg_ptr( void **dest, void *val );
extern long interlocked_xchg_add( long *dest, long incr );
#endif /* __i386___ && __GNUC__ */
@ -385,11 +344,6 @@ extern long interlocked_xchg_add( long *dest, long incr );
#define getopt_long __WINE_NOT_PORTABLE(getopt_long)
#define getopt_long_only __WINE_NOT_PORTABLE(getopt_long_only)
#define getpagesize __WINE_NOT_PORTABLE(getpagesize)
#define interlocked_cmpxchg __WINE_NOT_PORTABLE(interlocked_cmpxchg)
#define interlocked_cmpxchg_ptr __WINE_NOT_PORTABLE(interlocked_cmpxchg_ptr)
#define interlocked_xchg __WINE_NOT_PORTABLE(interlocked_xchg)
#define interlocked_xchg_ptr __WINE_NOT_PORTABLE(interlocked_xchg_ptr)
#define interlocked_xchg_add __WINE_NOT_PORTABLE(interlocked_xchg_add)
#define lstat __WINE_NOT_PORTABLE(lstat)
#define memcpy_unaligned __WINE_NOT_PORTABLE(memcpy_unaligned)
#define memmove __WINE_NOT_PORTABLE(memmove)

View file

@ -18,49 +18,6 @@
#define NDEBUG
#include <debug.h>
/* FIXME *********************************************************************/
/* FIXME: Interlocked functions that need to be made into a public header */
FORCEINLINE
LONG
InterlockedAnd(IN OUT volatile LONG *Target,
IN LONG Set)
{
LONG i;
LONG j;
j = *Target;
do {
i = j;
j = _InterlockedCompareExchange((PLONG)Target,
i & Set,
i);
} while (i != j);
return j;
}
FORCEINLINE
LONG
InterlockedOr(IN OUT volatile LONG *Target,
IN LONG Set)
{
LONG i;
LONG j;
j = *Target;
do {
i = j;
j = _InterlockedCompareExchange((PLONG)Target,
i | Set,
i);
} while (i != j);
return j;
}
/* FUNCTIONS *****************************************************************/
#ifdef _WIN64

View file

@ -74,7 +74,7 @@ MiFlushTlb(PULONG Pt, PVOID Address)
PULONG
MmGetPageDirectory(VOID)
{
return (PULONG)__readcr3();
return (PULONG)(ULONG_PTR)__readcr3();
}
static ULONG