summaryrefslogtreecommitdiffhomepage
path: root/include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h
diff options
context:
space:
mode:
authormidipix <writeonce@midipix.org>2016-12-11 11:16:32 -0500
committermidipix <writeonce@midipix.org>2016-12-11 11:16:32 -0500
commit52d044bf2d8d38844819f682f16f64af8c57e42d (patch)
treed84c67981a95f9cf32ac9fa75f8a7adfad3754cd /include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h
parent826d41ad264857c31ec500bdf9ee4a0bf5b8eba7 (diff)
downloadntapi-52d044bf2d8d38844819f682f16f64af8c57e42d.tar.bz2
ntapi-52d044bf2d8d38844819f682f16f64af8c57e42d.tar.xz
api headers: bits sub-dirs: i386 --> nt32, x86_64 --> nt64.
Diffstat (limited to 'include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h')
-rw-r--r--include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h350
1 files changed, 350 insertions, 0 deletions
diff --git a/include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h b/include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h
new file mode 100644
index 0000000..c0a0ba8
--- /dev/null
+++ b/include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h
@@ -0,0 +1,350 @@
+#include <psxtypes/psxtypes.h>
+
+long _InterlockedIncrement(long volatile * ptr);
+int64_t _InterlockedIncrement64(int64_t volatile * ptr);
+long _InterlockedDecrement(long volatile * ptr);
+int64_t _InterlockedDecrement64(int64_t volatile * ptr);
+long _InterlockedExchangeAdd(long volatile * ptr, long val);
+int64_t _InterlockedExchangeAdd64(int64_t volatile * ptr, int64_t val);
+long _InterlockedCompareExchange(long volatile * dst, long xchg, long cmp);
+int64_t _InterlockedCompareExchange64(int64_t volatile * dst, int64_t xchg, int64_t cmp);
+long _InterlockedAnd(long volatile * dst, long mask);
+int64_t _InterlockedAnd64(int64_t volatile * dst, int64_t mask);
+long _InterlockedOr(long volatile * dst, long mask);
+int64_t _InterlockedOr64(int64_t volatile * dst, int64_t mask);
+long _InterlockedXor(long volatile * dst, long mask);
+int64_t _InterlockedXor64(int64_t volatile * dst, int64_t mask);
+uint16_t __popcnt16(uint16_t mask);
+unsigned int __popcnt(uint32_t mask);
+uint64_t __popcnt64(uint64_t mask);
+void _ReadWriteBarrier(void);
+unsigned char _BitScanForward(unsigned int * index, uintptr_t mask);
+unsigned char _BitScanReverse(unsigned int * index, uintptr_t mask);
+
+static __inline__ void at_locked_inc(
+ intptr_t volatile * ptr)
+{
+ _InterlockedIncrement(ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_inc_32(
+ int32_t volatile * ptr)
+{
+ _InterlockedIncrement((long *)ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_inc_64(
+ int64_t volatile * ptr)
+{
+ _InterlockedIncrement64(ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_dec(
+ intptr_t volatile * ptr)
+{
+ _InterlockedDecrement(ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_dec_32(
+ int32_t volatile * ptr)
+{
+ _InterlockedDecrement((long *)ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_dec_64(
+ int64_t volatile * ptr)
+{
+ _InterlockedDecrement64(ptr);
+ return;
+}
+
+
+static __inline__ void at_locked_add(
+ intptr_t volatile * ptr,
+ intptr_t val)
+{
+ _InterlockedExchangeAdd(ptr, val);
+ return;
+}
+
+
+static __inline__ void at_locked_add_32(
+ int32_t volatile * ptr,
+ int32_t val)
+{
+ _InterlockedExchangeAdd((long *)ptr, val);
+ return;
+}
+
+
+static __inline__ void at_locked_add_64(
+ int64_t volatile * ptr,
+ int64_t val)
+{
+ _InterlockedExchangeAdd64(ptr, val);
+ return;
+}
+
+
+static __inline__ void at_locked_sub(
+ intptr_t volatile * ptr,
+ intptr_t val)
+{
+ _InterlockedExchangeAdd(ptr, -val);
+ return;
+}
+
+
+static __inline__ void at_locked_sub_32(
+ int32_t volatile * ptr,
+ int32_t val)
+{
+ _InterlockedExchangeAdd((long *)ptr, -val);
+ return;
+}
+
+
+static __inline__ void at_locked_sub_64(
+ int64_t volatile * ptr,
+ int64_t val)
+{
+ _InterlockedExchangeAdd64(ptr, -val);
+ return;
+}
+
+
+static __inline__ intptr_t at_locked_xadd(
+ intptr_t volatile * ptr,
+ intptr_t val)
+{
+ return _InterlockedExchangeAdd(ptr, val);
+}
+
+
+static __inline__ int32_t at_locked_xadd_32(
+ int32_t volatile * ptr,
+ int32_t val)
+{
+ return _InterlockedExchangeAdd((long *)ptr, val);
+}
+
+
+static __inline__ int64_t at_locked_xadd_64(
+ int64_t volatile * ptr,
+ int64_t val)
+{
+ return _InterlockedExchangeAdd64(ptr, val);
+}
+
+
+static __inline__ intptr_t at_locked_xsub(
+ intptr_t volatile * ptr,
+ intptr_t val)
+{
+ return _InterlockedExchangeAdd(ptr, -val);
+}
+
+
+static __inline__ int32_t at_locked_xsub_32(
+ int32_t volatile * ptr,
+ int32_t val)
+{
+ return _InterlockedExchangeAdd((long *)ptr, -val);
+}
+
+
+static __inline__ int64_t at_locked_xsub_64(
+ int64_t volatile * ptr,
+ int64_t val)
+{
+ return _InterlockedExchangeAdd64(ptr, -val);
+}
+
+
+static __inline__ intptr_t at_locked_cas(
+ intptr_t volatile * dst,
+ intptr_t cmp,
+ intptr_t xchg)
+{
+ return _InterlockedCompareExchange(dst,xchg,cmp);
+}
+
+
+static __inline__ int32_t at_locked_cas_32(
+ int32_t volatile * dst,
+ int32_t cmp,
+ int32_t xchg)
+{
+ return _InterlockedCompareExchange((long *)dst,xchg,cmp);
+}
+
+
+static __inline__ int64_t at_locked_cas_64(
+ int64_t volatile * dst,
+ int64_t cmp,
+ int64_t xchg)
+{
+ return _InterlockedCompareExchange64(dst,xchg,cmp);
+}
+
+
+static __inline__ intptr_t at_locked_and(
+ intptr_t volatile * dst,
+ intptr_t mask)
+{
+ return _InterlockedAnd(dst,mask);
+}
+
+
+static __inline__ int32_t at_locked_and_32(
+ int32_t volatile * dst,
+ int32_t mask)
+{
+ return _InterlockedAnd((long *)dst,mask);
+}
+
+
+static __inline__ int64_t at_locked_and_64(
+ int64_t volatile * dst,
+ int64_t mask)
+{
+ return _InterlockedAnd64(dst,mask);
+}
+
+
+static __inline__ intptr_t at_locked_or(
+ intptr_t volatile * dst,
+ intptr_t mask)
+{
+ return _InterlockedOr(dst,mask);
+}
+
+
+static __inline__ int32_t at_locked_or_32(
+ int32_t volatile * dst,
+ int32_t mask)
+{
+ return _InterlockedOr((long *)dst,mask);
+}
+
+
+static __inline__ int64_t at_locked_or_64(
+ int64_t volatile * dst,
+ int64_t mask)
+{
+ return _InterlockedOr64(dst,mask);
+}
+
+
+static __inline__ intptr_t at_locked_xor(
+ intptr_t volatile * dst,
+ intptr_t mask)
+{
+ return _InterlockedXor(dst,mask);
+}
+
+
+static __inline__ int32_t at_locked_xor_32(
+ int32_t volatile * dst,
+ int32_t mask)
+{
+ return _InterlockedXor((long *)dst,mask);
+}
+
+
+static __inline__ int64_t at_locked_xor_64(
+ int64_t volatile * dst,
+ int64_t mask)
+{
+ return _InterlockedXor64(dst,mask);
+}
+
+
+static __inline__ void at_store(
+ volatile intptr_t * dst,
+ intptr_t val)
+{
+ _ReadWriteBarrier();
+ *dst = val;
+ _ReadWriteBarrier();
+
+ return;
+}
+
+
+static __inline__ void at_store_32(
+ volatile int32_t * dst,
+ int32_t val)
+{
+ _ReadWriteBarrier();
+ *dst = val;
+ _ReadWriteBarrier();
+
+ return;
+}
+
+
+static __inline__ void at_store_64(
+ volatile int64_t * dst,
+ int64_t val)
+{
+ _ReadWriteBarrier();
+ *dst = val;
+ _ReadWriteBarrier();
+
+ return;
+}
+
+
+static __inline__ int at_bsf(
+ unsigned int * index,
+ uintptr_t mask)
+{
+ return (int)_BitScanForward(index,mask);
+}
+
+
+static __inline__ int at_bsr(
+ unsigned int * index,
+ uintptr_t mask)
+{
+ return (int)_BitScanReverse(index,mask);
+}
+
+
+static __inline__ size_t at_popcount(
+ uintptr_t mask)
+{
+ return __popcnt(mask);
+}
+
+
+static __inline__ size_t at_popcount_16(
+ uint16_t mask)
+{
+ return __popcnt16(mask);
+}
+
+
+static __inline__ size_t at_popcount_32(
+ uint32_t mask)
+{
+ return __popcnt(mask);
+}
+
+
+static __inline__ size_t at_popcount_64(
+ uint64_t mask)
+{
+ return (size_t)__popcnt64(mask);
+}