[VCRUNTIME] Move compiler runtime headers into their own folder

These headers come with the MS compiler. Some of them are standard, like emmintrin.h, others are MS specific like crtdefs.h
This separation will allow using MS CRT headers. Eventually it can allow compiling with the compilers' runtime headers.
This commit is contained in:
Timo Kreuzer 2024-05-11 08:11:36 +03:00
parent 7b2bb7ecc8
commit 84344399b5
39 changed files with 2 additions and 1 deletions

View file

@ -0,0 +1,420 @@
#ifdef __cplusplus
extern "C" {
#endif
/*** Stack frame juggling ***/
#pragma intrinsic(_ReturnAddress)
#pragma intrinsic(_AddressOfReturnAddress)
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__getcallerseflags)
#endif
/*** Memory barriers ***/
#pragma intrinsic(_ReadWriteBarrier)
#pragma intrinsic(_ReadBarrier)
#pragma intrinsic(_WriteBarrier)
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(_mm_mfence)
#pragma intrinsic(_mm_lfence)
#pragma intrinsic(_mm_sfence)
#endif
#if defined(_M_AMD64)
#pragma intrinsic(__faststorefence)
#elif defined(_M_ARM)
#pragma intrinsic(__iso_volatile_load16)
#pragma intrinsic(__iso_volatile_load32)
#pragma intrinsic(__iso_volatile_load64)
#pragma intrinsic(__iso_volatile_load8)
#pragma intrinsic(__iso_volatile_store16)
#pragma intrinsic(__iso_volatile_store32)
#pragma intrinsic(__iso_volatile_store64)
#pragma intrinsic(__iso_volatile_store8)
#endif
/*** Atomic operations ***/
#pragma intrinsic(_InterlockedCompareExchange)
#pragma intrinsic(_InterlockedCompareExchange8)
#pragma intrinsic(_InterlockedCompareExchange16)
#pragma intrinsic(_InterlockedCompareExchange64)
#pragma intrinsic(_InterlockedExchange)
#pragma intrinsic(_InterlockedExchange8)
#pragma intrinsic(_InterlockedExchange16)
#pragma intrinsic(_InterlockedExchangeAdd)
#pragma intrinsic(_InterlockedExchangeAdd8)
#pragma intrinsic(_InterlockedExchangeAdd16)
#pragma intrinsic(_InterlockedAnd8)
#pragma intrinsic(_InterlockedAnd16)
#pragma intrinsic(_InterlockedAnd)
#pragma intrinsic(_InterlockedOr8)
#pragma intrinsic(_InterlockedOr16)
#pragma intrinsic(_InterlockedOr)
#pragma intrinsic(_InterlockedXor8)
#pragma intrinsic(_InterlockedXor16)
#pragma intrinsic(_InterlockedXor)
#pragma intrinsic(_InterlockedDecrement)
#pragma intrinsic(_InterlockedIncrement)
#pragma intrinsic(_InterlockedDecrement16)
#pragma intrinsic(_InterlockedIncrement16)
#pragma intrinsic(_interlockedbittestandreset)
#pragma intrinsic(_interlockedbittestandset)
#if defined(_M_IX86)
#pragma intrinsic(_InterlockedAddLargeStatistic)
#elif defined(_M_AMD64)
#pragma intrinsic(_InterlockedExchange64)
#pragma intrinsic(_InterlockedExchangeAdd64)
#pragma intrinsic(_InterlockedCompareExchangePointer)
#pragma intrinsic(_InterlockedExchangePointer)
#pragma intrinsic(_InterlockedCompareExchange128)
#pragma intrinsic(_InterlockedAnd64)
#pragma intrinsic(_InterlockedOr64)
#pragma intrinsic(_InterlockedDecrement64)
#pragma intrinsic(_InterlockedIncrement64)
#pragma intrinsic(_interlockedbittestandreset64)
#pragma intrinsic(_interlockedbittestandset64)
#pragma intrinsic(_InterlockedAnd_np)
#pragma intrinsic(_InterlockedAnd8_np)
#pragma intrinsic(_InterlockedAnd16_np)
#pragma intrinsic(_InterlockedAnd64_np)
#pragma intrinsic(_InterlockedCompareExchange16_np)
#pragma intrinsic(_InterlockedCompareExchange64_np)
#pragma intrinsic(_InterlockedCompareExchange128_np)
#pragma intrinsic(_InterlockedCompareExchangePointer_np)
#pragma intrinsic(_InterlockedCompareExchange_np)
#pragma intrinsic(_InterlockedOr16_np)
#pragma intrinsic(_InterlockedOr8_np)
#pragma intrinsic(_InterlockedOr_np)
#pragma intrinsic(_InterlockedXor16_np)
#pragma intrinsic(_InterlockedXor64_np)
#pragma intrinsic(_InterlockedXor8_np)
#pragma intrinsic(_InterlockedXor_np)
#pragma intrinsic(_InterlockedOr64_np)
#elif defined(_M_ARM)
#endif
#if defined(_M_AMD64) || defined(_M_ARM)
#endif
/*** String operations ***/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__stosb)
#pragma intrinsic(__stosw)
#pragma intrinsic(__stosd)
#pragma intrinsic(__movsb)
#pragma intrinsic(__movsw)
#pragma intrinsic(__movsd)
#endif
#ifdef _M_AMD64
#pragma intrinsic(__stosq)
#pragma intrinsic(__movsq)
#endif
/*** GS segment addressing ***/
#if defined(_M_AMD64)
#pragma intrinsic(__writegsbyte)
#pragma intrinsic(__writegsword)
#pragma intrinsic(__writegsdword)
#pragma intrinsic(__writegsqword)
#pragma intrinsic(__readgsbyte)
#pragma intrinsic(__readgsword)
#pragma intrinsic(__readgsdword)
#pragma intrinsic(__readgsqword)
#pragma intrinsic(__incgsbyte)
#pragma intrinsic(__incgsword)
#pragma intrinsic(__incgsdword)
#pragma intrinsic(__incgsqword)
#pragma intrinsic(__addgsbyte)
#pragma intrinsic(__addgsword)
#pragma intrinsic(__addgsdword)
#pragma intrinsic(__addgsqword)
#endif
/*** FS segment addressing ***/
#if defined(_M_IX86)
#pragma intrinsic(__writefsbyte)
#pragma intrinsic(__writefsword)
#pragma intrinsic(__writefsdword)
#pragma intrinsic(__writefsdword)
#pragma intrinsic(__readfsbyte)
#pragma intrinsic(__readfsword)
#pragma intrinsic(__readfsdword)
#pragma intrinsic(__incfsbyte)
#pragma intrinsic(__incfsword)
#pragma intrinsic(__incfsdword)
#pragma intrinsic(__addfsbyte)
#pragma intrinsic(__addfsword)
#pragma intrinsic(__addfsdword)
#endif
/*** Bit manipulation ***/
#pragma intrinsic(_BitScanForward)
#pragma intrinsic(_BitScanReverse)
#ifdef _WIN64
#pragma intrinsic(_BitScanForward64)
#pragma intrinsic(_BitScanReverse64)
#endif
#pragma intrinsic(_bittest)
#pragma intrinsic(_bittestandcomplement)
#pragma intrinsic(_bittestandreset)
#pragma intrinsic(_bittestandset)
#pragma intrinsic(_rotl8)
#pragma intrinsic(_rotl16)
#pragma intrinsic(_rotl)
#pragma intrinsic(_rotl64)
#pragma intrinsic(_lrotl)
#pragma intrinsic(_rotr8)
#pragma intrinsic(_rotr16)
#pragma intrinsic(_rotr)
#pragma intrinsic(_rotr64)
#pragma intrinsic(_lrotr)
#pragma intrinsic(_byteswap_ushort)
#pragma intrinsic(_byteswap_ulong)
#pragma intrinsic(_byteswap_uint64)
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__ll_lshift)
#pragma intrinsic(__ll_rshift)
#pragma intrinsic(__ull_rshift)
#pragma intrinsic(__lzcnt)
#pragma intrinsic(__lzcnt16)
#pragma intrinsic(__popcnt)
#pragma intrinsic(__popcnt16)
#endif
#ifdef _M_AMD64
#pragma intrinsic(__shiftleft128)
#pragma intrinsic(__shiftright128)
#pragma intrinsic(_bittest64)
#pragma intrinsic(_bittestandcomplement64)
#pragma intrinsic(_bittestandreset64)
#pragma intrinsic(_bittestandset64)
#pragma intrinsic(__lzcnt64)
#pragma intrinsic(__popcnt64)
#elif defined(_M_ARM)
#endif
/*** 64/128-bit math ***/
#pragma intrinsic(_abs64)
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__emul)
#pragma intrinsic(__emulu)
#endif
#ifdef _M_AMD64
#pragma intrinsic(__mulh)
#pragma intrinsic(__umulh)
#pragma intrinsic(_mul128)
#pragma intrinsic(_umul128)
#elif defined(_M_ARM)
#pragma intrinsic(_MulHigh)
#pragma intrinsic(_MulUnsignedHigh)
#endif
/** Floating point stuff **/
#if defined(_M_ARM)
#pragma intrinsic(_isunordered)
#pragma intrinsic(_isunorderedf)
#pragma intrinsic(_CopyDoubleFromInt64)
#pragma intrinsic(_CopyFloatFromInt32)
#pragma intrinsic(_CopyInt32FromFloat)
#pragma intrinsic(_CopyInt64FromDouble)
#endif
/*** Port I/O ***/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__inbyte)
#pragma intrinsic(__inword)
#pragma intrinsic(__indword)
#pragma intrinsic(__inbytestring)
#pragma intrinsic(__inwordstring)
#pragma intrinsic(__indwordstring)
#pragma intrinsic(__outbyte)
#pragma intrinsic(__outword)
#pragma intrinsic(__outdword)
#pragma intrinsic(__outbytestring)
#pragma intrinsic(__outwordstring)
#pragma intrinsic(__outdwordstring)
#pragma intrinsic(_inp)
#pragma intrinsic(_inpd)
#pragma intrinsic(_inpw)
#pragma intrinsic(inp)
#pragma intrinsic(inpd)
#pragma intrinsic(inpw)
#pragma intrinsic(_outp)
#pragma intrinsic(_outpd)
#pragma intrinsic(_outpw)
#pragma intrinsic(outp)
#pragma intrinsic(outpd)
#pragma intrinsic(outpw)
#endif
/*** System information ***/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__cpuid)
#pragma intrinsic(__cpuidex)
#pragma intrinsic(__rdtsc)
#pragma intrinsic(__rdtscp)
#pragma intrinsic(__writeeflags)
#pragma intrinsic(__readeflags)
#endif
/*** Interrupts and traps ***/
#pragma intrinsic(__debugbreak)
#pragma intrinsic(_disable)
#pragma intrinsic(_enable)
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__int2c)
#pragma intrinsic(__halt)
#pragma intrinsic(__ud2)
#if (_MSC_VER >= 1700)
#pragma intrinsic(__fastfail)
#else
#if defined(_M_IX86)
__declspec(noreturn) __forceinline
void __fastfail(unsigned int Code)
{
__asm
{
mov ecx, Code
int 29h
}
}
#else
void __fastfail(unsigned int Code);
#endif // defined(_M_IX86)
#endif
#endif
#if defined(_M_ARM)
#endif
/*** Protected memory management ***/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__writecr0)
#pragma intrinsic(__writecr3)
#pragma intrinsic(__writecr4)
#pragma intrinsic(__writecr8)
#endif
#if defined(_M_IX86)
#pragma intrinsic(__readcr0)
#pragma intrinsic(__readcr2)
#pragma intrinsic(__readcr3)
//#pragma intrinsic(__readcr4)
// HACK: MSVC is broken
unsigned long __cdecl ___readcr4(void);
#define __readcr4 ___readcr4
#pragma intrinsic(__readcr8)
#pragma intrinsic(__readdr)
#pragma intrinsic(__writedr)
// This intrinsic is broken and generates wrong opcodes,
// when optimization is enabled!
#pragma warning(push)
#pragma warning(disable:4711)
void __forceinline __invlpg_fixed(void * Address)
{
_ReadWriteBarrier();
__asm
{
mov eax, Address
invlpg [eax]
}
_ReadWriteBarrier();
}
#pragma warning(pop)
#define __invlpg __invlpg_fixed
#elif defined(_M_AMD64)
#pragma intrinsic(__invlpg)
#pragma intrinsic(__readcr0)
#pragma intrinsic(__readcr2)
#pragma intrinsic(__readcr3)
#pragma intrinsic(__readcr4)
#pragma intrinsic(__readcr8)
#pragma intrinsic(__readdr)
#pragma intrinsic(__writedr)
#elif defined(_M_ARM)
#pragma intrinsic(__prefetch)
#endif
/*** System operations ***/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__readmsr)
#pragma intrinsic(__writemsr)
#pragma intrinsic(__readpmc)
#pragma intrinsic(__segmentlimit)
#pragma intrinsic(__wbinvd)
#pragma intrinsic(__lidt)
#pragma intrinsic(__sidt)
#if (_MSC_VER >= 1800)
#pragma intrinsic(_sgdt)
#else
#if defined(_M_IX86)
__forceinline
void _sgdt(void *Destination)
{
__asm
{
mov eax, Destination
sgdt [eax]
}
}
#else
void _sgdt(void *Destination);
#endif // defined(_M_IX86)
#endif
#pragma intrinsic(_mm_pause)
#endif
#if defined(_M_ARM)
#pragma intrinsic(_MoveFromCoprocessor)
#pragma intrinsic(_MoveFromCoprocessor2)
#pragma intrinsic(_MoveFromCoprocessor64)
#pragma intrinsic(_MoveToCoprocessor)
#pragma intrinsic(_MoveToCoprocessor2)
#pragma intrinsic(_MoveToCoprocessor64)
#pragma intrinsic(_ReadStatusReg)
#pragma intrinsic(_WriteStatusReg)
#pragma intrinsic(__yield)
#pragma intrinsic(__wfe)
#pragma intrinsic(__wfi)
#pragma intrinsic(__swi)
#pragma intrinsic(__hvc)
#pragma intrinsic(__ldrexd)
#pragma intrinsic(__rdpmccntr64)
#pragma intrinsic(__sev)
#endif
/** Secure virtual machine **/
#if defined(_M_IX86) || defined(_M_AMD64)
#pragma intrinsic(__svm_clgi)
#pragma intrinsic(__svm_invlpga)
#pragma intrinsic(__svm_skinit)
#pragma intrinsic(__svm_stgi)
#pragma intrinsic(__svm_vmload)
#pragma intrinsic(__svm_vmrun)
#pragma intrinsic(__svm_vmsave)
#endif
/** Virtual machine extension **/
#if defined(_M_IX86) || defined(_M_AMD64)
#endif
#if defined(_M_AMD64)
#endif
/** Misc **/
#pragma intrinsic(__nop)
#if (_MSC_VER >= 1700)
#pragma intrinsic(__code_seg)
#endif
#ifdef _M_ARM
#pragma intrinsic(_AddSatInt)
#pragma intrinsic(_DAddSatInt)
#pragma intrinsic(_DSubSatInt)
#pragma intrinsic(_SubSatInt)
#pragma intrinsic(__emit)
#pragma intrinsic(__static_assert)
#endif
#ifdef __cplusplus
}
#endif
/* EOF */

View file

@ -0,0 +1,44 @@
#ifndef _FLOAT_H___
#define _FLOAT_H___
#define FLT_RADIX 2
#define FLT_MANT_DIG 24
#define DBL_MANT_DIG 53
#define LDBL_MANT_DIG DBL_MANT_DIG
#define FLT_DIG 6
#define DBL_DIG 15
#define LDBL_DIG 15
#define FLT_MIN_EXP (-125)
#define DBL_MIN_EXP (-1021)
#define LDBL_MIN_EXP (-1021)
#define FLT_MIN_10_EXP (-37)
#define DBL_MIN_10_EXP (-307)
#define LDBL_MIN_10_EXP (-307)
#define FLT_MAX_EXP 128
#define DBL_MAX_EXP 1024
#define LDBL_MAX_EXP 1024
#define FLT_MAX_10_EXP 38
#define DBL_MAX_10_EXP 308
#define LDBL_MAX_10_EXP 308
#define FLT_MAX 3.402823466e+38F
#define DBL_MAX 1.7976931348623158e+308
#define LDBL_MAX 1.7976931348623158e+308
#define FLT_EPSILON 1.192092896e-07F
#define DBL_EPSILON 2.2204460492503131e-016
#define LDBL_EPSILON 2.2204460492503131e-016
#define FLT_MIN 1.175494351e-38F
#define DBL_MIN 2.2250738585072014e-308
#define LDBL_MIN 2.2250738585072014e-308
#define FLT_ROUNDS 1
#endif