[CRT/INTRIN_X86]

* Skip some intrinsics when compiling with Clang.

svn path=/trunk/; revision=64062
This commit is contained in:
Amine Khaldi 2014-09-07 17:09:34 +00:00
parent 85304acb22
commit 6327d11e06

View file

@ -190,17 +190,19 @@ __INTRIN_INLINE short _InterlockedCompareExchange16(volatile short * const Desti
} }
#ifndef __clang__ #ifndef __clang__
__INTRIN_INLINE long _InterlockedCompareExchange(volatile long * const Destination, const long Exchange, const long Comperand) __INTRIN_INLINE long _InterlockedCompareExchange(volatile long * const Destination, const long Exchange, const long Comperand)
{ {
return __sync_val_compare_and_swap(Destination, Comperand, Exchange); return __sync_val_compare_and_swap(Destination, Comperand, Exchange);
} }
#endif
__INTRIN_INLINE void * _InterlockedCompareExchangePointer(void * volatile * const Destination, void * const Exchange, void * const Comperand) __INTRIN_INLINE void * _InterlockedCompareExchangePointer(void * volatile * const Destination, void * const Exchange, void * const Comperand)
{ {
return (void *)__sync_val_compare_and_swap(Destination, Comperand, Exchange); return (void *)__sync_val_compare_and_swap(Destination, Comperand, Exchange);
} }
#endif
__INTRIN_INLINE char _InterlockedExchange8(volatile char * const Target, const char Value) __INTRIN_INLINE char _InterlockedExchange8(volatile char * const Target, const char Value)
{ {
/* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */ /* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
@ -215,6 +217,8 @@ __INTRIN_INLINE short _InterlockedExchange16(volatile short * const Target, cons
return __sync_lock_test_and_set(Target, Value); return __sync_lock_test_and_set(Target, Value);
} }
#ifndef __clang__
__INTRIN_INLINE long _InterlockedExchange(volatile long * const Target, const long Value) __INTRIN_INLINE long _InterlockedExchange(volatile long * const Target, const long Value)
{ {
/* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */ /* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
@ -222,6 +226,15 @@ __INTRIN_INLINE long _InterlockedExchange(volatile long * const Target, const lo
return __sync_lock_test_and_set(Target, Value); return __sync_lock_test_and_set(Target, Value);
} }
__INTRIN_INLINE void * _InterlockedExchangePointer(void * volatile * const Target, void * const Value)
{
/* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
__sync_synchronize();
return (void *)__sync_lock_test_and_set(Target, Value);
}
#endif
#if defined(_M_AMD64) #if defined(_M_AMD64)
__INTRIN_INLINE long long _InterlockedExchange64(volatile long long * const Target, const long long Value) __INTRIN_INLINE long long _InterlockedExchange64(volatile long long * const Target, const long long Value)
{ {
@ -231,13 +244,6 @@ __INTRIN_INLINE long long _InterlockedExchange64(volatile long long * const Targ
} }
#endif #endif
__INTRIN_INLINE void * _InterlockedExchangePointer(void * volatile * const Target, void * const Value)
{
/* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
__sync_synchronize();
return (void *)__sync_lock_test_and_set(Target, Value);
}
__INTRIN_INLINE long _InterlockedExchangeAdd16(volatile short * const Addend, const short Value) __INTRIN_INLINE long _InterlockedExchangeAdd16(volatile short * const Addend, const short Value)
{ {
return __sync_fetch_and_add(Addend, Value); return __sync_fetch_and_add(Addend, Value);