33 #ifndef __ATOMIC_OPS_MSVC_H__
34 #define __ATOMIC_OPS_MSVC_H__
42 #define WIN32_LEAN_AND_MEAN
47 #if defined(__clang__)
48 # pragma GCC diagnostic push
49 # pragma GCC diagnostic ignored "-Wincompatible-pointer-types"
55 #define __atomic_impl_load_generic(v) (MemoryBarrier(), *(v))
56 #define __atomic_impl_store_generic(p, v) \
76 return InterlockedCompareExchange64((
int64_t *)
v, _new, old);
102 return InterlockedExchangeAdd64(p,
x) +
x;
107 return InterlockedExchangeAdd64(p, -
x) -
x;
112 return InterlockedCompareExchange64(
v, _new, old);
127 return InterlockedExchangeAdd64(p,
x);
132 return InterlockedExchangeAdd64(p, -
x);
140 return InterlockedExchangeAdd(p,
x) +
x;
145 return InterlockedExchangeAdd(p, -((
int32_t)
x)) -
x;
150 return InterlockedCompareExchange((
long *)
v, _new, old);
165 return InterlockedExchangeAdd(p,
x);
170 return InterlockedOr((
long *)p,
x);
175 return InterlockedAnd((
long *)p,
x);
181 return InterlockedExchangeAdd((
long *)p,
x) +
x;
186 return InterlockedExchangeAdd((
long *)p, -
x) -
x;
191 return InterlockedCompareExchange((
long *)
v, _new, old);
206 return InterlockedExchangeAdd((
long *)p,
x);
211 return InterlockedOr((
long *)p,
x);
216 return InterlockedAnd((
long *)p,
x);
225 return InterlockedOr16((
short *)p,
x);
230 return InterlockedAnd16((
short *)p,
x);
237 #pragma intrinsic(_InterlockedAnd8)
240 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
241 return InterlockedAnd8((
char *)p, (
char)
b);
243 return _InterlockedAnd8((
char *)p, (
char)
b);
247 #pragma intrinsic(_InterlockedOr8)
250 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
251 return InterlockedOr8((
char *)p, (
char)
b);
253 return _InterlockedOr8((
char *)p, (
char)
b);
258 #pragma intrinsic(_InterlockedAnd8)
261 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
262 return InterlockedAnd8((
char *)p, (
char)
b);
264 return _InterlockedAnd8((
char *)p, (
char)
b);
268 #pragma intrinsic(_InterlockedOr8)
271 #if (LG_SIZEOF_PTR == 8 || LG_SIZEOF_INT == 8)
272 return InterlockedOr8((
char *)p, (
char)
b);
274 return _InterlockedOr8((
char *)p, (
char)
b);
278 #undef __atomic_impl_load_generic
279 #undef __atomic_impl_store_generic
281 #if defined(__clang__)
282 # pragma GCC diagnostic pop
ATOMIC_INLINE uint32_t atomic_fetch_and_or_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE int32_t atomic_add_and_fetch_int32(int32_t *p, int32_t x)
ATOMIC_INLINE void atomic_store_uint64(uint64_t *p, uint64_t v)
ATOMIC_INLINE uint64_t atomic_fetch_and_sub_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE int64_t atomic_sub_and_fetch_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint8_t atomic_fetch_and_and_uint8(uint8_t *p, uint8_t b)
ATOMIC_INLINE uint64_t atomic_load_uint64(const uint64_t *v)
ATOMIC_INLINE int64_t atomic_cas_int64(int64_t *v, int64_t old, int64_t _new)
ATOMIC_INLINE uint32_t atomic_fetch_and_add_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE uint8_t atomic_fetch_and_or_uint8(uint8_t *p, uint8_t b)
ATOMIC_INLINE int32_t atomic_load_int32(const int32_t *v)
ATOMIC_INLINE int64_t atomic_load_int64(const int64_t *v)
ATOMIC_INLINE int32_t atomic_fetch_and_or_int32(int32_t *p, int32_t x)
ATOMIC_INLINE uint32_t atomic_fetch_and_and_uint32(uint32_t *p, uint32_t x)
#define __atomic_impl_load_generic(v)
ATOMIC_INLINE void atomic_store_int64(int64_t *p, int64_t v)
ATOMIC_INLINE int64_t atomic_fetch_and_add_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint32_t atomic_add_and_fetch_uint32(uint32_t *p, uint32_t x)
ATOMIC_INLINE void atomic_store_int32(int32_t *p, int32_t v)
ATOMIC_INLINE int32_t atomic_fetch_and_add_int32(int32_t *p, int32_t x)
ATOMIC_INLINE uint64_t atomic_cas_uint64(uint64_t *v, uint64_t old, uint64_t _new)
ATOMIC_INLINE uint64_t atomic_fetch_and_add_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE uint32_t atomic_sub_and_fetch_uint32(uint32_t *p, uint32_t x)
#define __atomic_impl_store_generic(p, v)
ATOMIC_INLINE int64_t atomic_add_and_fetch_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint64_t atomic_add_and_fetch_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE uint32_t atomic_load_uint32(const uint32_t *v)
ATOMIC_INLINE uint64_t atomic_sub_and_fetch_uint64(uint64_t *p, uint64_t x)
ATOMIC_INLINE void atomic_store_uint32(uint32_t *p, uint32_t v)
ATOMIC_INLINE int32_t atomic_cas_int32(int32_t *v, int32_t old, int32_t _new)
ATOMIC_INLINE int32_t atomic_fetch_and_and_int32(int32_t *p, int32_t x)
ATOMIC_INLINE int8_t atomic_fetch_and_or_int8(int8_t *p, int8_t b)
ATOMIC_INLINE int16_t atomic_fetch_and_and_int16(int16_t *p, int16_t x)
ATOMIC_INLINE int32_t atomic_sub_and_fetch_int32(int32_t *p, int32_t x)
ATOMIC_INLINE int16_t atomic_fetch_and_or_int16(int16_t *p, int16_t x)
ATOMIC_INLINE int8_t atomic_fetch_and_and_int8(int8_t *p, int8_t b)
ATOMIC_INLINE int64_t atomic_fetch_and_sub_int64(int64_t *p, int64_t x)
ATOMIC_INLINE uint32_t atomic_cas_uint32(uint32_t *v, uint32_t old, uint32_t _new)
ATTR_WARN_UNUSED_RESULT const BMVert * v
static const pxr::TfToken b("b", pxr::TfToken::Immortal)
unsigned __int64 uint64_t