Portability fixes.

svn path=/trunk/; revision=12950
This commit is contained in:
Filip Navara 2005-01-12 10:05:31 +00:00
parent 54b6cbb3a2
commit 6ce344b434
15 changed files with 147 additions and 138 deletions

View file

@ -58,7 +58,7 @@ KiIpiSendRequest(ULONG TargetSet,
ULONG IpiRequest);
VOID
KeIpiGenericCall(VOID STDCALL (*WorkerRoutine)(PVOID),
KeIpiGenericCall(VOID (STDCALL *WorkerRoutine)(PVOID),
PVOID Argument);
/* next file ***************************************************************/

View file

@ -453,9 +453,9 @@ VOID MmBuildMdlFromPages(PMDL Mdl, PULONG Pages);
VOID MiShutdownMemoryManager(VOID);
VOID MmInit1(ULONG FirstKernelPhysAddress,
ULONG LastKernelPhysAddress,
ULONG LastKernelAddress,
VOID MmInit1(ULONG_PTR FirstKernelPhysAddress,
ULONG_PTR LastKernelPhysAddress,
ULONG_PTR LastKernelAddress,
PADDRESS_RANGE BIOSMemoryMap,
ULONG AddressRangeCount,
ULONG MaxMemInMeg);
@ -507,11 +507,11 @@ NTSTATUS MmPageFault(ULONG Cs,
/* mm.c **********************************************************************/
NTSTATUS MmAccessFault(KPROCESSOR_MODE Mode,
ULONG Address,
ULONG_PTR Address,
BOOLEAN FromMdl);
NTSTATUS MmNotPresentFault(KPROCESSOR_MODE Mode,
ULONG Address,
ULONG_PTR Address,
BOOLEAN FromMdl);
/* anonmem.c *****************************************************************/
@ -633,10 +633,10 @@ VOID MmUnlockPage(PFN_TYPE Page);
ULONG MmGetLockCountPage(PFN_TYPE Page);
PVOID MmInitializePageList(PVOID FirstPhysKernelAddress,
PVOID LastPhysKernelAddress,
PVOID MmInitializePageList(ULONG_PTR FirstPhysKernelAddress,
ULONG_PTR LastPhysKernelAddress,
ULONG MemorySizeInPages,
ULONG LastKernelBase,
ULONG_PTR LastKernelBase,
PADDRESS_RANGE BIOSMemoryMap,
ULONG AddressRangeCount);

View file

@ -45,7 +45,7 @@ extern ULONG MmPagedPoolSize;
*/
#define MM_CACHE_LINE_SIZE 32
#define MM_ROUND_UP(x,s) ((PVOID)(((ULONG)(x)+(s)-1) & ~((s)-1)))
#define MM_ROUND_DOWN(x,s) ((PVOID)(((ULONG)(x)) & ~((s)-1)))
#define MM_ROUND_UP(x,s) ((PVOID)(((ULONG_PTR)(x)+(s)-1) & ~((ULONG_PTR)(s)-1)))
#define MM_ROUND_DOWN(x,s) ((PVOID)(((ULONG_PTR)(x)) & ~((ULONG_PTR)(s)-1)))
#endif /* __INTERNAL_POOL_H */

View file

@ -76,9 +76,9 @@ static CHAR KeLoaderModuleStrings[64][256];
static CHAR KeLoaderCommandLine[256];
static ADDRESS_RANGE KeMemoryMap[64];
static ULONG KeMemoryMapRangeCount;
static ULONG FirstKrnlPhysAddr;
static ULONG LastKrnlPhysAddr;
static ULONG LastKernelAddress;
static ULONG_PTR FirstKrnlPhysAddr;
static ULONG_PTR LastKrnlPhysAddr;
static ULONG_PTR LastKernelAddress;
volatile BOOLEAN Initialized = FALSE;
extern ULONG MmCoreDumpType;
extern CHAR KiTimerSystemAuditing;

View file

@ -13,16 +13,16 @@
typedef unsigned long rulong;
#define R_IS_POOL_PTR(pool,ptr) (void*)(ptr) >= pool->UserBase && (char*)(ptr) < ((char*)pool->UserBase+pool->UserSize)
#define R_IS_POOL_PTR(pool,ptr) (void*)(ptr) >= pool->UserBase && (ULONG_PTR)(ptr) < ((ULONG_PTR)pool->UserBase + pool->UserSize)
#define R_ASSERT_PTR(pool,ptr) ASSERT( R_IS_POOL_PTR(pool,ptr) )
#define R_ASSERT_SIZE(pool,sz) ASSERT( sz > (sizeof(R_USED)+2*R_RZ) && sz >= sizeof(R_FREE) && sz < pool->UserSize )
#ifndef R_ROUND_UP
#define R_ROUND_UP(x,s) ((PVOID)(((rulong)(x)+(s)-1) & ~((s)-1)))
#define R_ROUND_UP(x,s) ((PVOID)(((ULONG_PTR)(x)+(s)-1) & ~((ULONG_PTR)(s)-1)))
#endif//R_ROUND_UP
#ifndef R_ROUND_DOWN
#define R_ROUND_DOWN(x,s) ((PVOID)(((rulong)(x)) & ~((s)-1)))
#define R_ROUND_DOWN(x,s) ((PVOID)(((ULONG_PTR)(x)) & ~((ULONG_PTR)(s)-1)))
#endif//R_ROUND_DOWN
#ifndef R_QUEMIN
@ -79,7 +79,7 @@ typedef struct _R_FREE
rulong Status : 2;
rulong Size;
#if R_STACK
rulong LastOwnerStack[R_STACK];
ULONG_PTR LastOwnerStack[R_STACK];
#endif//R_STACK
struct _R_FREE* NextFree;
struct _R_FREE* PrevFree;
@ -95,7 +95,7 @@ typedef struct _R_USED
rulong Status : 2;
rulong Size;
#if R_STACK
rulong LastOwnerStack[R_STACK];
ULONG_PTR LastOwnerStack[R_STACK];
#endif//R_STACK
struct _R_USED* NextUsed;
#if R_RZ
@ -245,7 +245,7 @@ RPoolRemoveFree ( PR_POOL pool, PR_FREE Item )
pool->FirstFree = Item->NextFree;
}
#if defined(DBG) || defined(KDBG)
Item->NextFree = Item->PrevFree = (PR_FREE)0xDEADBEEF;
Item->NextFree = Item->PrevFree = (PR_FREE)(ULONG_PTR)0xDEADBEEF;
#endif//DBG || KDBG
}
@ -281,7 +281,7 @@ RFreeInit ( void* memory )
block->Status = 0;
RFreeFillStack ( block );
#if defined(DBG) || defined(KDBG)
block->PrevFree = block->NextFree = (PR_FREE)0xDEADBEEF;
block->PrevFree = block->NextFree = (PR_FREE)(ULONG_PTR)0xDEADBEEF;
#endif//DBG || KDBG
return block;
}
@ -644,7 +644,7 @@ RiUsedInit ( PR_USED Block, rulong Tag )
// now add the block to the used block list
#if defined(DBG) || defined(KDBG)
Block->NextUsed = (PR_USED)0xDEADBEEF;
Block->NextUsed = (PR_USED)(ULONG_PTR)0xDEADBEEF;
#endif//R_USED_LIST
Block->Tag = Tag;

View file

@ -284,10 +284,10 @@ MiIsPfnRam(PADDRESS_RANGE BIOSMemoryMap,
PVOID INIT_FUNCTION
MmInitializePageList(PVOID FirstPhysKernelAddress,
PVOID LastPhysKernelAddress,
MmInitializePageList(ULONG_PTR FirstPhysKernelAddress,
ULONG_PTR LastPhysKernelAddress,
ULONG MemorySizeInPages,
ULONG LastKernelAddress,
ULONG_PTR LastKernelAddress,
PADDRESS_RANGE BIOSMemoryMap,
ULONG AddressRangeCount)
/*
@ -331,9 +331,9 @@ MmInitializePageList(PVOID FirstPhysKernelAddress,
DPRINT("Reserved %d\n", Reserved);
LastKernelAddress = PAGE_ROUND_UP(LastKernelAddress);
LastKernelAddress = ((ULONG)LastKernelAddress + (Reserved * PAGE_SIZE));
LastPhysKernelAddress = (PVOID)PAGE_ROUND_UP(LastPhysKernelAddress);
LastPhysKernelAddress = (char*)LastPhysKernelAddress + (Reserved * PAGE_SIZE);
LastKernelAddress = ((ULONG_PTR)LastKernelAddress + (Reserved * PAGE_SIZE));
LastPhysKernelAddress = (ULONG_PTR)PAGE_ROUND_UP(LastPhysKernelAddress);
LastPhysKernelAddress = (ULONG_PTR)LastPhysKernelAddress + (Reserved * PAGE_SIZE);
MmStats.NrTotalPages = 0;
MmStats.NrSystemPages = 0;
@ -349,7 +349,7 @@ MmInitializePageList(PVOID FirstPhysKernelAddress,
LastPage = MmPageArraySize;
for (i = 0; i < Reserved; i++)
{
PVOID Address = (char*)(ULONG)MmPageArray + (i * PAGE_SIZE);
PVOID Address = (char*)MmPageArray + (i * PAGE_SIZE);
ULONG j, start, end;
if (!MmIsPagePresent(NULL, Address))
{

View file

@ -32,7 +32,7 @@ VOID
ExUnmapPage(PVOID Addr)
{
KIRQL oldIrql;
ULONG Base = ((char*)Addr - (char*)MM_KERNEL_MAP_BASE) / PAGE_SIZE;
ULONG_PTR Base = ((ULONG_PTR)Addr - (ULONG_PTR)MM_KERNEL_MAP_BASE) / PAGE_SIZE;
DPRINT("ExUnmapPage(Addr %x)\n",Addr);
@ -93,12 +93,12 @@ ExAllocatePageWithPhysPage(PFN_TYPE Page)
{
KIRQL oldlvl;
PVOID Addr;
ULONG Base;
ULONG_PTR Base;
NTSTATUS Status;
KeAcquireSpinLock(&AllocMapLock, &oldlvl);
Base = RtlFindClearBitsAndSet(&AllocMap, 1, AllocMapHint);
if (Base != 0xFFFFFFFF)
if (Base != (ULONG_PTR)-1)
{
AllocMapHint = Base + 1;
KeReleaseSpinLock(&AllocMapLock, oldlvl);
@ -131,7 +131,7 @@ VOID
MiFreeNonPagedPoolRegion(PVOID Addr, ULONG Count, BOOLEAN Free)
{
ULONG i;
ULONG Base = ((char*)Addr - (char*)MM_KERNEL_MAP_BASE) / PAGE_SIZE;
ULONG_PTR Base = ((char*)Addr - (char*)MM_KERNEL_MAP_BASE) / PAGE_SIZE;
KIRQL oldlvl;
for (i = 0; i < Count; i++)
@ -154,12 +154,12 @@ MiAllocNonPagedPoolRegion(ULONG nr_pages)
* FUNCTION: Allocates a region of pages within the nonpaged pool area
*/
{
ULONG Base;
ULONG_PTR Base;
KIRQL oldlvl;
KeAcquireSpinLock(&AllocMapLock, &oldlvl);
Base = RtlFindClearBitsAndSet(&AllocMap, nr_pages, AllocMapHint);
if (Base == 0xFFFFFFFF)
if (Base == (ULONG_PTR)-1)
{
DbgPrint("CRITICAL: Out of non-paged pool space\n");
KEBUGCHECK(0);

View file

@ -188,7 +188,7 @@ MmDumpMemoryAreas(PMADDRESS_SPACE AddressSpace)
Node != NULL;
Node = MmIterateNextNode(Node))
{
DbgPrint("Start %x End %x Attributes %x\n",
DbgPrint("Start %p End %p Attributes %x\n",
Node->StartingAddress, Node->EndingAddress,
Node->Attributes);
}
@ -203,7 +203,7 @@ MmLocateMemoryAreaByAddress(
{
PMEMORY_AREA Node = AddressSpace->MemoryAreaRoot;
DPRINT("MmLocateMemoryAreaByAddress(AddressSpace %x, Address %x)\n",
DPRINT("MmLocateMemoryAreaByAddress(AddressSpace %p, Address %p)\n",
AddressSpace, Address);
if (!(KdDebugState & KD_DEBUG_SCREEN))
@ -217,13 +217,13 @@ MmLocateMemoryAreaByAddress(
Node = Node->RightChild;
else
{
DPRINT("MmLocateMemoryAreaByAddress(%x): %x [%x - %x]\n",
DPRINT("MmLocateMemoryAreaByAddress(%p): %p [%p - %p]\n",
Address, Node, Node->StartingAddress, Node->EndingAddress);
return Node;
}
}
DPRINT("MmLocateMemoryAreaByAddress(%x): 0\n", Address);
DPRINT("MmLocateMemoryAreaByAddress(%p): 0\n", Address);
return NULL;
}
@ -250,24 +250,24 @@ MmLocateMemoryAreaByRegion(
if (Node->StartingAddress >= Address &&
Node->StartingAddress < Extent)
{
DPRINT("MmLocateMemoryAreaByRegion(%x - %x): %x - %x\n",
Address, Address + Length, Node->StartingAddress,
DPRINT("MmLocateMemoryAreaByRegion(%p - %p): %p - %p\n",
Address, (ULONG_PTR)Address + Length, Node->StartingAddress,
Node->EndingAddress);
return Node;
}
if (Node->EndingAddress > Address &&
Node->EndingAddress < Extent)
{
DPRINT("MmLocateMemoryAreaByRegion(%x - %x): %x - %x\n",
Address, Address + Length, Node->StartingAddress,
DPRINT("MmLocateMemoryAreaByRegion(%p - %p): %p - %p\n",
Address, (ULONG_PTR)Address + Length, Node->StartingAddress,
Node->EndingAddress);
return Node;
}
if (Node->StartingAddress <= Address &&
Node->EndingAddress >= Extent)
{
DPRINT("MmLocateMemoryAreaByRegion(%x - %x): %x - %x\n",
Address, Address + Length, Node->StartingAddress,
DPRINT("MmLocateMemoryAreaByRegion(%p - %p): %p - %p\n",
Address, (ULONG_PTR)Address + Length, Node->StartingAddress,
Node->EndingAddress);
return Node;
}
@ -417,9 +417,9 @@ MmInsertMemoryArea(
Node = AddressSpace->MemoryAreaRoot;
do
{
DPRINT("marea->EndingAddress: %x Node->StartingAddress: %x\n",
DPRINT("marea->EndingAddress: %p Node->StartingAddress: %p\n",
marea->EndingAddress, Node->StartingAddress);
DPRINT("marea->StartingAddress: %x Node->EndingAddress: %x\n",
DPRINT("marea->StartingAddress: %p Node->EndingAddress: %p\n",
marea->StartingAddress, Node->EndingAddress);
ASSERT(marea->EndingAddress <= Node->StartingAddress ||
marea->StartingAddress >= Node->EndingAddress);
@ -467,7 +467,7 @@ MmFindGapBottomUp(
MmVerifyMemoryAreas(AddressSpace);
DPRINT("LowestAddress: %x HighestAddress: %x\n",
DPRINT("LowestAddress: %p HighestAddress: %p\n",
AddressSpace->LowestAddress, HighestAddress);
AlignedAddress = MM_ROUND_UP(AddressSpace->LowestAddress, Granularity);
@ -475,9 +475,9 @@ MmFindGapBottomUp(
/* Special case for empty tree. */
if (AddressSpace->MemoryAreaRoot == NULL)
{
if (HighestAddress - AlignedAddress >= Length)
if ((ULONG_PTR)HighestAddress - (ULONG_PTR)AlignedAddress >= Length)
{
DPRINT("MmFindGapBottomUp: %x\n", AlignedAddress);
DPRINT("MmFindGapBottomUp: %p\n", AlignedAddress);
return AlignedAddress;
}
DPRINT("MmFindGapBottomUp: 0\n");
@ -497,9 +497,9 @@ MmFindGapBottomUp(
AlignedAddress = MM_ROUND_UP(PreviousNode->EndingAddress, Granularity);
if (Node->StartingAddress > AlignedAddress &&
Node->StartingAddress - AlignedAddress >= Length)
(ULONG_PTR)Node->StartingAddress - (ULONG_PTR)AlignedAddress >= Length)
{
DPRINT("MmFindGapBottomUp: %x\n", AlignedAddress);
DPRINT("MmFindGapBottomUp: %p\n", AlignedAddress);
return AlignedAddress;
}
@ -508,18 +508,18 @@ MmFindGapBottomUp(
/* Check if there is enough space after the last memory area. */
AlignedAddress = MM_ROUND_UP(PreviousNode->EndingAddress, Granularity);
if (HighestAddress - AlignedAddress >= Length)
if ((ULONG_PTR)HighestAddress - (ULONG_PTR)AlignedAddress >= Length)
{
DPRINT("MmFindGapBottomUp: %x\n", AlignedAddress);
DPRINT("MmFindGapBottomUp: %p\n", AlignedAddress);
return AlignedAddress;
}
/* Check if there is enough space before the first memory area. */
AlignedAddress = MM_ROUND_UP(AddressSpace->LowestAddress, Granularity);
if (FirstNode->StartingAddress > AlignedAddress &&
FirstNode->StartingAddress - AlignedAddress >= Length)
(ULONG_PTR)FirstNode->StartingAddress - (ULONG_PTR)AlignedAddress >= Length)
{
DPRINT("MmFindGapBottomUp: %x\n", AlignedAddress);
DPRINT("MmFindGapBottomUp: %p\n", AlignedAddress);
return AlignedAddress;
}
@ -542,10 +542,10 @@ MmFindGapTopDown(
MmVerifyMemoryAreas(AddressSpace);
DPRINT("LowestAddress: %x HighestAddress: %x\n",
DPRINT("LowestAddress: %p HighestAddress: %p\n",
AddressSpace->LowestAddress, HighestAddress);
AlignedAddress = MM_ROUND_DOWN(HighestAddress - Length + 1, Granularity);
AlignedAddress = MM_ROUND_DOWN((ULONG_PTR)HighestAddress - Length + 1, Granularity);
/* Check for overflow. */
if (AlignedAddress > HighestAddress)
@ -556,7 +556,7 @@ MmFindGapTopDown(
{
if (AlignedAddress >= (PVOID)AddressSpace->LowestAddress)
{
DPRINT("MmFindGapTopDown: %x\n", AlignedAddress);
DPRINT("MmFindGapTopDown: %p\n", AlignedAddress);
return AlignedAddress;
}
DPRINT("MmFindGapTopDown: 0\n");
@ -569,7 +569,7 @@ MmFindGapTopDown(
/* Check if there is enough space after the last memory area. */
if (Node->EndingAddress <= AlignedAddress)
{
DPRINT("MmFindGapTopDown: %x\n", AlignedAddress);
DPRINT("MmFindGapTopDown: %p\n", AlignedAddress);
return AlignedAddress;
}
@ -581,7 +581,7 @@ MmFindGapTopDown(
if (Node == NULL)
break;
AlignedAddress = MM_ROUND_DOWN(PreviousNode->StartingAddress - Length + 1, Granularity);
AlignedAddress = MM_ROUND_DOWN((ULONG_PTR)PreviousNode->StartingAddress - Length + 1, Granularity);
/* Check for overflow. */
if (AlignedAddress > PreviousNode->StartingAddress)
@ -589,14 +589,14 @@ MmFindGapTopDown(
if (Node->EndingAddress <= AlignedAddress)
{
DPRINT("MmFindGapTopDown: %x\n", AlignedAddress);
DPRINT("MmFindGapTopDown: %p\n", AlignedAddress);
return AlignedAddress;
}
PreviousNode = Node;
}
AlignedAddress = MM_ROUND_DOWN(PreviousNode->StartingAddress - Length + 1, Granularity);
AlignedAddress = MM_ROUND_DOWN((ULONG_PTR)PreviousNode->StartingAddress - Length + 1, Granularity);
/* Check for overflow. */
if (AlignedAddress > PreviousNode->StartingAddress)
@ -604,7 +604,7 @@ MmFindGapTopDown(
if (AlignedAddress >= (PVOID)AddressSpace->LowestAddress)
{
DPRINT("MmFindGapTopDown: %x\n", AlignedAddress);
DPRINT("MmFindGapTopDown: %p\n", AlignedAddress);
return AlignedAddress;
}
@ -675,13 +675,15 @@ MmFindGapAtAddress(
if (RightNeighbour)
{
DPRINT("MmFindGapAtAddress: %x [%x]\n", Address, RightNeighbour->StartingAddress - Address);
return RightNeighbour->StartingAddress - Address;
DPRINT("MmFindGapAtAddress: %p [%p]\n", Address,
(ULONG_PTR)RightNeighbour->StartingAddress - (ULONG_PTR)Address);
return (ULONG_PTR)RightNeighbour->StartingAddress - (ULONG_PTR)Address;
}
else
{
DPRINT("MmFindGapAtAddress: %x [%x]\n", Address, HighestAddress - Address);
return HighestAddress - Address;
DPRINT("MmFindGapAtAddress: %p [%p]\n", Address,
(ULONG_PTR)HighestAddress - (ULONG_PTR)Address);
return (ULONG_PTR)HighestAddress - (ULONG_PTR)Address;
}
}
@ -726,7 +728,7 @@ MmFreeMemoryArea(
PVOID FreePageContext)
{
PMEMORY_AREA *ParentReplace;
PVOID Address;
ULONG_PTR Address;
PVOID EndAddress;
PEPROCESS CurrentProcess = PsGetCurrentProcess();
@ -737,7 +739,9 @@ MmFreeMemoryArea(
}
EndAddress = MM_ROUND_UP(MemoryArea->EndingAddress, PAGE_SIZE);
for (Address = MemoryArea->StartingAddress; Address < EndAddress; Address += PAGE_SIZE)
for (Address = (ULONG_PTR)MemoryArea->StartingAddress;
Address < (ULONG_PTR)EndAddress;
Address += PAGE_SIZE)
{
if (MemoryArea->Type == MEMORY_AREA_IO_MAPPING)
{
@ -866,8 +870,8 @@ MmFreeMemoryAreaByPtr(
{
PMEMORY_AREA MemoryArea;
DPRINT("MmFreeMemoryArea(AddressSpace %x, BaseAddress %x, "
"FreePageContext %d)\n", AddressSpace, BaseAddress,
DPRINT("MmFreeMemoryArea(AddressSpace %p, BaseAddress %p, "
"FreePageContext %p)\n", AddressSpace, BaseAddress,
FreePageContext);
MmVerifyMemoryAreas(AddressSpace);
@ -926,9 +930,9 @@ MmCreateMemoryArea(PEPROCESS Process,
ULONG tmpLength;
PMEMORY_AREA MemoryArea;
DPRINT("MmCreateMemoryArea(Type %d, BaseAddress %x, "
"*BaseAddress %x, Length %x, Attributes %x, TopDown: %x, "
"FixedAddress %x, Result %x)\n",
DPRINT("MmCreateMemoryArea(Type %d, BaseAddress %p, "
"*BaseAddress %p, Length %p, Attributes %x, TopDown: %x, "
"FixedAddress %x, Result %p)\n",
Type, BaseAddress, *BaseAddress, Length, Attributes, TopDown,
FixedAddress, Result);
@ -988,7 +992,7 @@ MmCreateMemoryArea(PEPROCESS Process,
RtlZeroMemory(MemoryArea, sizeof(MEMORY_AREA));
MemoryArea->Type = Type;
MemoryArea->StartingAddress = *BaseAddress;
MemoryArea->EndingAddress = *BaseAddress + tmpLength;
MemoryArea->EndingAddress = (PVOID)((ULONG_PTR)*BaseAddress + tmpLength);
MemoryArea->Attributes = Attributes;
MemoryArea->LockCount = 0;
MemoryArea->Process = Process;
@ -999,7 +1003,7 @@ MmCreateMemoryArea(PEPROCESS Process,
*Result = MemoryArea;
DPRINT("MmCreateMemoryArea() succeeded (%x)\n", *BaseAddress);
DPRINT("MmCreateMemoryArea() succeeded (%p)\n", *BaseAddress);
return STATUS_SUCCESS;
}

View file

@ -98,11 +98,11 @@ MmInitializeMdlImplementation(VOID)
PVOID
MmGetMdlPageAddress(PMDL Mdl, PVOID Offset)
{
PULONG MdlPages;
PPFN_NUMBER MdlPages;
MdlPages = (PULONG)(Mdl + 1);
MdlPages = (PPFN_NUMBER)(Mdl + 1);
return((PVOID)MdlPages[((ULONG)Offset) / PAGE_SIZE]);
return((PVOID)MdlPages[((ULONG_PTR)Offset) / PAGE_SIZE]);
}
@ -123,8 +123,8 @@ MmUnlockPages(PMDL Mdl)
*/
{
ULONG i;
PULONG MdlPages;
PFN_TYPE Page;
PPFN_NUMBER MdlPages;
PFN_NUMBER Page;
/*
* MmProbeAndLockPages MUST have been called to lock this mdl!
@ -161,7 +161,7 @@ MmUnlockPages(PMDL Mdl)
}
MdlPages = (PULONG)(Mdl + 1);
MdlPages = (PPFN_NUMBER)(Mdl + 1);
for (i=0; i<(PAGE_ROUND_UP(Mdl->ByteCount+Mdl->ByteOffset)/PAGE_SIZE); i++)
{
Page = MdlPages[i];
@ -422,7 +422,7 @@ VOID STDCALL MmProbeAndLockPages (PMDL Mdl,
if (!MmIsPagePresent(NULL, Address))
{
Status = MmNotPresentFault(Mode, (ULONG)Address, TRUE);
Status = MmNotPresentFault(Mode, (ULONG_PTR)Address, TRUE);
if (!NT_SUCCESS(Status))
{
for (j = 0; j < i; j++)
@ -442,7 +442,7 @@ VOID STDCALL MmProbeAndLockPages (PMDL Mdl,
if ((Operation == IoWriteAccess || Operation == IoModifyAccess) &&
(!(MmGetPageProtect(NULL, (PVOID)Address) & PAGE_READWRITE)))
{
Status = MmAccessFault(Mode, (ULONG)Address, TRUE);
Status = MmAccessFault(Mode, (ULONG_PTR)Address, TRUE);
if (!NT_SUCCESS(Status))
{
for (j = 0; j < i; j++)
@ -541,7 +541,7 @@ MmBuildMdlForNonPagedPool (PMDL Mdl)
* mdl buffer must (at least) be in kernel space, thou this doesn't
* necesarely mean that the buffer in within _nonpaged_ kernel space...
*/
ASSERT((ULONG)Mdl->StartVa >= KERNEL_BASE);
ASSERT((ULONG_PTR)Mdl->StartVa >= KERNEL_BASE);
PageCount = PAGE_ROUND_UP(Mdl->ByteOffset + Mdl->ByteCount) / PAGE_SIZE;
MdlPages = (PPFN_TYPE)(Mdl + 1);

View file

@ -55,7 +55,7 @@ MmCopyToCaller(PVOID Dest, const VOID *Src, ULONG NumberOfBytes)
if (ExGetPreviousMode() == UserMode)
{
if ((ULONG)Dest >= KERNEL_BASE)
if ((ULONG_PTR)Dest >= KERNEL_BASE)
{
return(STATUS_ACCESS_VIOLATION);
}
@ -76,7 +76,7 @@ MmCopyFromCaller(PVOID Dest, const VOID *Src, ULONG NumberOfBytes)
if (ExGetPreviousMode() == UserMode)
{
if ((ULONG)Src >= KERNEL_BASE)
if ((ULONG_PTR)Src >= KERNEL_BASE)
{
return(STATUS_ACCESS_VIOLATION);
}
@ -177,7 +177,7 @@ BOOLEAN STDCALL MmIsAddressValid(PVOID VirtualAddress)
MEMORY_AREA* MemoryArea;
PMADDRESS_SPACE AddressSpace;
if ((ULONG)VirtualAddress >= KERNEL_BASE)
if ((ULONG_PTR)VirtualAddress >= KERNEL_BASE)
{
AddressSpace = MmGetKernelAddressSpace();
}
@ -200,7 +200,7 @@ BOOLEAN STDCALL MmIsAddressValid(PVOID VirtualAddress)
}
NTSTATUS MmAccessFault(KPROCESSOR_MODE Mode,
ULONG Address, /* FiN TODO: Should be ULONG_PTR! */
ULONG_PTR Address,
BOOLEAN FromMdl)
{
PMADDRESS_SPACE AddressSpace;
@ -322,7 +322,7 @@ NTSTATUS MmCommitPagedPoolAddress(PVOID Address, BOOLEAN Locked)
}
NTSTATUS MmNotPresentFault(KPROCESSOR_MODE Mode,
ULONG Address, /* FiN TODO: Should be ULONG_PTR! */
ULONG_PTR Address,
BOOLEAN FromMdl)
{
PMADDRESS_SPACE AddressSpace;

View file

@ -72,7 +72,7 @@ VOID MiShutdownMemoryManager(VOID)
{}
VOID INIT_FUNCTION
MmInitVirtualMemory(ULONG LastKernelAddress,
MmInitVirtualMemory(ULONG_PTR LastKernelAddress,
ULONG KernelLength)
/*
* FUNCTION: Intialize the memory areas list
@ -133,7 +133,7 @@ MmInitVirtualMemory(ULONG LastKernelAddress,
BoundaryAddressMultiple);
BaseAddress = (PVOID)KERNEL_BASE;
Length = PAGE_ROUND_UP(((ULONG)&_text_end__)) - KERNEL_BASE;
Length = PAGE_ROUND_UP(((ULONG_PTR)&_text_end__)) - KERNEL_BASE;
ParamLength = ParamLength - Length;
/*
@ -151,10 +151,10 @@ MmInitVirtualMemory(ULONG LastKernelAddress,
FALSE,
BoundaryAddressMultiple);
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG)&_text_end__));
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG_PTR)&_text_end__));
ASSERT(BaseAddress == (PVOID)&_init_start__);
Length = PAGE_ROUND_UP(((ULONG)&_init_end__)) -
PAGE_ROUND_UP(((ULONG)&_text_end__));
Length = PAGE_ROUND_UP(((ULONG_PTR)&_init_end__)) -
PAGE_ROUND_UP(((ULONG_PTR)&_text_end__));
ParamLength = ParamLength - Length;
MmCreateMemoryArea(NULL,
@ -168,11 +168,11 @@ MmInitVirtualMemory(ULONG LastKernelAddress,
FALSE,
BoundaryAddressMultiple);
Length = PAGE_ROUND_UP(((ULONG)&_bss_end__)) -
PAGE_ROUND_UP(((ULONG)&_init_end__));
Length = PAGE_ROUND_UP(((ULONG_PTR)&_bss_end__)) -
PAGE_ROUND_UP(((ULONG_PTR)&_init_end__));
ParamLength = ParamLength - Length;
DPRINT("Length %x\n",Length);
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG)&_init_end__));
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG_PTR)&_init_end__));
DPRINT("BaseAddress %x\n",BaseAddress);
/*
@ -190,8 +190,8 @@ MmInitVirtualMemory(ULONG LastKernelAddress,
FALSE,
BoundaryAddressMultiple);
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG)&_bss_end__));
Length = LastKernelAddress - (ULONG)BaseAddress;
BaseAddress = (PVOID)PAGE_ROUND_UP(((ULONG_PTR)&_bss_end__));
Length = LastKernelAddress - (ULONG_PTR)BaseAddress;
MmCreateMemoryArea(NULL,
MmGetKernelAddressSpace(),
MEMORY_AREA_SYSTEM,
@ -277,9 +277,9 @@ MmInitVirtualMemory(ULONG LastKernelAddress,
}
VOID INIT_FUNCTION
MmInit1(ULONG FirstKrnlPhysAddr,
ULONG LastKrnlPhysAddr,
ULONG LastKernelAddress,
MmInit1(ULONG_PTR FirstKrnlPhysAddr,
ULONG_PTR LastKrnlPhysAddr,
ULONG_PTR LastKernelAddress,
PADDRESS_RANGE BIOSMemoryMap,
ULONG AddressRangeCount,
ULONG MaxMem)
@ -289,12 +289,13 @@ MmInit1(ULONG FirstKrnlPhysAddr,
{
ULONG i;
ULONG kernel_len;
ULONG_PTR MappingAddress;
#ifndef CONFIG_SMP
extern unsigned int unmap_me, unmap_me2, unmap_me3;
#endif
extern unsigned int pagetable_start, pagetable_end;
DPRINT("MmInit1(FirstKrnlPhysAddr, %x, LastKrnlPhysAddr %x, LastKernelAddress %x)\n",
DPRINT("MmInit1(FirstKrnlPhysAddr, %p, LastKrnlPhysAddr %p, LastKernelAddress %p)\n",
FirstKrnlPhysAddr,
LastKrnlPhysAddr,
LastKernelAddress);
@ -383,8 +384,9 @@ MmInit1(ULONG FirstKrnlPhysAddr,
DbgPrint("Used memory %dKb\n", (MmStats.NrTotalPages * PAGE_SIZE) / 1024);
LastKernelAddress = (ULONG)MmInitializePageList((PVOID)FirstKrnlPhysAddr,
(PVOID)LastKrnlPhysAddr,
LastKernelAddress = (ULONG_PTR)MmInitializePageList(
FirstKrnlPhysAddr,
LastKrnlPhysAddr,
MmStats.NrTotalPages,
PAGE_ROUND_UP(LastKernelAddress),
BIOSMemoryMap,
@ -405,15 +407,18 @@ MmInit1(ULONG FirstKrnlPhysAddr,
DPRINT("Invalidating between %x and %x\n",
LastKernelAddress, KERNEL_BASE + 0x00600000);
for (i=(LastKernelAddress); i<KERNEL_BASE + 0x00600000; i+=PAGE_SIZE)
for (MappingAddress = LastKernelAddress;
MappingAddress < KERNEL_BASE + 0x00600000;
MappingAddress += PAGE_SIZE)
{
MmRawDeleteVirtualMapping((PVOID)(i));
MmRawDeleteVirtualMapping((PVOID)MappingAddress);
}
extern unsigned int pagetable_start, pagetable_end;
for (i = (ULONG_PTR)&pagetable_start; i < (ULONG_PTR)&pagetable_end; i += PAGE_SIZE)
for (MappingAddress = (ULONG_PTR)&pagetable_start;
MappingAddress < (ULONG_PTR)&pagetable_end;
MappingAddress += PAGE_SIZE)
{
MmDeleteVirtualMapping(NULL, (PVOID)i, FALSE, NULL, NULL);
MmDeleteVirtualMapping(NULL, (PVOID)MappingAddress, FALSE, NULL, NULL);
}
DPRINT("Almost done MmInit()\n");

View file

@ -1235,11 +1235,11 @@ grow_block(BLOCK_HDR* blk, PVOID end)
{
NTSTATUS Status;
PFN_TYPE Page[32];
ULONG StartIndex, EndIndex;
ULONG_PTR StartIndex, EndIndex;
ULONG i, j, k;
StartIndex = (ULONG)((char*)(PVOID)PAGE_ROUND_UP((ULONG)((char*)blk + BLOCK_HDR_SIZE)) - (char*)MiNonPagedPoolStart) / PAGE_SIZE;
EndIndex = (ULONG)((char*)PAGE_ROUND_UP(end) - (char*)MiNonPagedPoolStart) / PAGE_SIZE;
StartIndex = (ULONG_PTR)(PAGE_ROUND_UP((ULONG_PTR)blk + BLOCK_HDR_SIZE - (ULONG_PTR)MiNonPagedPoolStart)) / PAGE_SIZE;
EndIndex = ((ULONG_PTR)PAGE_ROUND_UP(end) - (ULONG_PTR)MiNonPagedPoolStart) / PAGE_SIZE;
for (i = StartIndex; i < EndIndex; i++)
@ -1266,7 +1266,7 @@ grow_block(BLOCK_HDR* blk, PVOID end)
}
}
Status = MmCreateVirtualMapping(NULL,
MiNonPagedPoolStart + i * PAGE_SIZE,
(PVOID)((ULONG_PTR)MiNonPagedPoolStart + i * PAGE_SIZE),
PAGE_READWRITE|PAGE_SYSTEM,
Page,
k);
@ -1333,12 +1333,12 @@ static BLOCK_HDR* get_block(unsigned int size, unsigned long alignment)
* from alignment. If not, calculate forward to the next alignment
* and see if we allocate there...
*/
new_size = (ULONG)aligned_addr - (ULONG)addr + size;
if ((ULONG)aligned_addr - (ULONG)addr < BLOCK_HDR_SIZE)
new_size = (ULONG_PTR)aligned_addr - (ULONG_PTR)addr + size;
if ((ULONG_PTR)aligned_addr - (ULONG_PTR)addr < BLOCK_HDR_SIZE)
{
/* not enough room for a free block header, add some more bytes */
aligned_addr = MM_ROUND_UP(block_to_address((BLOCK_HDR*)((char*)current + BLOCK_HDR_SIZE)), alignment);
new_size = (ULONG)aligned_addr - (ULONG)addr + size;
new_size = (ULONG_PTR)aligned_addr - (ULONG_PTR)addr + size;
}
if (current->Size >= new_size &&
(best == NULL || current->Size < best->Size))
@ -1382,9 +1382,9 @@ static BLOCK_HDR* get_block(unsigned int size, unsigned long alignment)
* if size-aligned, break off the preceding bytes into their own block...
*/
previous = current;
previous_size = (ULONG)blk - (ULONG)previous - BLOCK_HDR_SIZE;
previous_size = (ULONG_PTR)blk - (ULONG_PTR)previous - BLOCK_HDR_SIZE;
current = blk;
current_size -= ((ULONG)current - (ULONG)previous);
current_size -= ((ULONG_PTR)current - (ULONG_PTR)previous);
}
}
@ -1393,7 +1393,7 @@ static BLOCK_HDR* get_block(unsigned int size, unsigned long alignment)
if (current_size >= size + BLOCK_HDR_SIZE + MM_POOL_ALIGNMENT)
{
/* create a new free block after our block, if the memory size is >= 4 byte for this block */
next = (BLOCK_HDR*)((ULONG)current + size + BLOCK_HDR_SIZE);
next = (BLOCK_HDR*)((ULONG_PTR)current + size + BLOCK_HDR_SIZE);
next_size = current_size - size - BLOCK_HDR_SIZE;
current_size = size;
end = (char*)next + BLOCK_HDR_SIZE;
@ -1828,9 +1828,9 @@ MiInitializeNonPagedPool(VOID)
/* the second block is the first free block */
blk = (BLOCK_HDR*)((char*)blk + BLOCK_HDR_SIZE + blk->Size);
memset(blk, 0, BLOCK_HDR_SIZE);
memset((char*)blk + BLOCK_HDR_SIZE, 0x0cc, MiNonPagedPoolNrOfPages * PAGE_SIZE - ((ULONG)blk + BLOCK_HDR_SIZE - (ULONG)MiNonPagedPoolStart));
memset((char*)blk + BLOCK_HDR_SIZE, 0x0cc, MiNonPagedPoolNrOfPages * PAGE_SIZE - ((ULONG_PTR)blk + BLOCK_HDR_SIZE - (ULONG_PTR)MiNonPagedPoolStart));
blk->Magic = BLOCK_HDR_FREE_MAGIC;
blk->Size = MiNonPagedPoolLength - ((ULONG)blk + BLOCK_HDR_SIZE - (ULONG)MiNonPagedPoolStart);
blk->Size = MiNonPagedPoolLength - ((ULONG_PTR)blk + BLOCK_HDR_SIZE - (ULONG_PTR)MiNonPagedPoolStart);
blk->previous = (BLOCK_HDR*)MiNonPagedPoolStart;
add_to_free_list(blk);
#endif

View file

@ -532,7 +532,7 @@ MmDumpToPagingFile(ULONG BugCode,
Headers->BugCheckParameters[3] = BugCodeParameter4;
Headers->FaultingStackBase = (PVOID)Thread->Tcb.StackLimit;
Headers->FaultingStackSize =
StackSize = (ULONG_PTR)(Thread->Tcb.StackBase - Thread->Tcb.StackLimit);
StackSize = (ULONG_PTR)Thread->Tcb.StackBase - (ULONG_PTR)Thread->Tcb.StackLimit;
Headers->PhysicalMemorySize = MmStats.NrTotalPages * PAGE_SIZE;
/* Initialize the dump device. */

View file

@ -70,7 +70,7 @@ PMM_PAGEOP
MmCheckForPageOp(PMEMORY_AREA MArea, ULONG Pid, PVOID Address,
PMM_SECTION_SEGMENT Segment, ULONG Offset)
{
ULONG Hash;
ULONG_PTR Hash;
KIRQL oldIrql;
PMM_PAGEOP PageOp;
@ -79,11 +79,11 @@ MmCheckForPageOp(PMEMORY_AREA MArea, ULONG Pid, PVOID Address,
*/
if (MArea->Type == MEMORY_AREA_SECTION_VIEW)
{
Hash = (((ULONG)Segment) | (((ULONG)Offset) / PAGE_SIZE));
Hash = (((ULONG_PTR)Segment) | (((ULONG_PTR)Offset) / PAGE_SIZE));
}
else
{
Hash = (((ULONG)Pid) | (((ULONG)Address) / PAGE_SIZE));
Hash = (((ULONG_PTR)Pid) | (((ULONG_PTR)Address) / PAGE_SIZE));
}
Hash = Hash % PAGEOP_HASH_TABLE_SIZE;
@ -137,7 +137,7 @@ MmGetPageOp(PMEMORY_AREA MArea, ULONG Pid, PVOID Address,
* pid, address pair.
*/
{
ULONG Hash;
ULONG_PTR Hash;
KIRQL oldIrql;
PMM_PAGEOP PageOp;
@ -146,11 +146,11 @@ MmGetPageOp(PMEMORY_AREA MArea, ULONG Pid, PVOID Address,
*/
if (MArea->Type == MEMORY_AREA_SECTION_VIEW)
{
Hash = (((ULONG)Segment) | (((ULONG)Offset) / PAGE_SIZE));
Hash = (((ULONG_PTR)Segment) | (((ULONG_PTR)Offset) / PAGE_SIZE));
}
else
{
Hash = (((ULONG)Pid) | (((ULONG)Address) / PAGE_SIZE));
Hash = (((ULONG_PTR)Pid) | (((ULONG_PTR)Address) / PAGE_SIZE));
}
Hash = Hash % PAGEOP_HASH_TABLE_SIZE;

View file

@ -3265,8 +3265,8 @@ MmCreateImageSection(PSECTION_OBJECT *SectionObject,
return(Status);
}
if (0 != InterlockedCompareExchangeUL(&FileObject->SectionObjectPointer->ImageSectionObject,
ImageSectionObject, 0))
if (NULL != InterlockedCompareExchangePointer(&FileObject->SectionObjectPointer->ImageSectionObject,
ImageSectionObject, NULL))
{
/*
* An other thread has initialized the some image in the background
@ -4089,7 +4089,7 @@ MmAllocateSection (IN ULONG Length, PVOID BaseAddress)
KEBUGCHECK(0);
}
Status = MmCreateVirtualMapping (NULL,
(PVOID)(Result + (i * PAGE_SIZE)),
(PVOID)((ULONG_PTR)Result + (i * PAGE_SIZE)),
PAGE_READWRITE,
&Page,
1);
@ -4201,9 +4201,9 @@ MmMapViewOfSection(IN PVOID SectionObject,
{
if (!(SectionSegments[i].Characteristics & IMAGE_SCN_TYPE_NOLOAD))
{
ULONG MaxExtent;
MaxExtent = (ULONG)((char*)SectionSegments[i].VirtualAddress +
SectionSegments[i].Length);
ULONG_PTR MaxExtent;
MaxExtent = (ULONG_PTR)SectionSegments[i].VirtualAddress +
SectionSegments[i].Length;
ImageSize = max(ImageSize, MaxExtent);
}
}