summaryrefslogtreecommitdiffhomepage
path: root/include/ntapi/bits/nt32/nt_atomic_i386_asm__msvc.h
blob: c0a0ba8aa624dea41af7cfc855c5e756cd056a70 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
#include <psxtypes/psxtypes.h>

long		_InterlockedIncrement(long volatile * ptr);
int64_t		_InterlockedIncrement64(int64_t volatile * ptr);
long		_InterlockedDecrement(long volatile * ptr);
int64_t		_InterlockedDecrement64(int64_t volatile * ptr);
long		_InterlockedExchangeAdd(long volatile * ptr, long val);
int64_t		_InterlockedExchangeAdd64(int64_t volatile * ptr, int64_t val);
long		_InterlockedCompareExchange(long volatile * dst, long xchg, long cmp);
int64_t		_InterlockedCompareExchange64(int64_t volatile * dst, int64_t xchg, int64_t cmp);
long		_InterlockedAnd(long volatile * dst, long mask);
int64_t		_InterlockedAnd64(int64_t volatile * dst, int64_t mask);
long		_InterlockedOr(long volatile * dst, long mask);
int64_t		_InterlockedOr64(int64_t volatile * dst, int64_t mask);
long		_InterlockedXor(long volatile * dst, long mask);
int64_t		_InterlockedXor64(int64_t volatile * dst, int64_t mask);
uint16_t	__popcnt16(uint16_t mask);
unsigned int	__popcnt(uint32_t mask);
uint64_t	__popcnt64(uint64_t mask);
void		_ReadWriteBarrier(void);
unsigned char	_BitScanForward(unsigned int * index, uintptr_t mask);
unsigned char	_BitScanReverse(unsigned int * index, uintptr_t mask);

static __inline__ void at_locked_inc(
	intptr_t volatile * ptr)
{
	_InterlockedIncrement(ptr);
	return;
}


static __inline__ void at_locked_inc_32(
	int32_t volatile * ptr)
{
	_InterlockedIncrement((long *)ptr);
	return;
}


static __inline__ void at_locked_inc_64(
	int64_t volatile * ptr)
{
	_InterlockedIncrement64(ptr);
	return;
}


static __inline__ void at_locked_dec(
	intptr_t volatile * ptr)
{
	_InterlockedDecrement(ptr);
	return;
}


static __inline__ void at_locked_dec_32(
	int32_t volatile * ptr)
{
	_InterlockedDecrement((long *)ptr);
	return;
}


static __inline__ void at_locked_dec_64(
	int64_t volatile * ptr)
{
	_InterlockedDecrement64(ptr);
	return;
}


static __inline__ void at_locked_add(
	intptr_t volatile *	ptr,
	intptr_t		val)
{
	_InterlockedExchangeAdd(ptr, val);
	return;
}


static __inline__ void at_locked_add_32(
	int32_t volatile *	ptr,
	int32_t			val)
{
	_InterlockedExchangeAdd((long *)ptr, val);
	return;
}


static __inline__ void at_locked_add_64(
	int64_t volatile *	ptr,
	int64_t			val)
{
	_InterlockedExchangeAdd64(ptr, val);
	return;
}


static __inline__ void at_locked_sub(
	intptr_t volatile *	ptr,
	intptr_t		val)
{
	_InterlockedExchangeAdd(ptr, -val);
	return;
}


static __inline__ void at_locked_sub_32(
	int32_t volatile *	ptr,
	int32_t			val)
{
	_InterlockedExchangeAdd((long *)ptr, -val);
	return;
}


static __inline__ void at_locked_sub_64(
	int64_t volatile *	ptr,
	int64_t			val)
{
	_InterlockedExchangeAdd64(ptr, -val);
	return;
}


static __inline__ intptr_t at_locked_xadd(
	intptr_t volatile *	ptr,
	intptr_t		val)
{
	return _InterlockedExchangeAdd(ptr, val);
}


static __inline__ int32_t at_locked_xadd_32(
	int32_t volatile *	ptr,
	int32_t			val)
{
	return _InterlockedExchangeAdd((long *)ptr, val);
}


static __inline__ int64_t at_locked_xadd_64(
	int64_t volatile *	ptr,
	int64_t			val)
{
	return _InterlockedExchangeAdd64(ptr, val);
}


static __inline__ intptr_t at_locked_xsub(
	intptr_t volatile *	ptr,
	intptr_t		val)
{
	return _InterlockedExchangeAdd(ptr, -val);
}


static __inline__ int32_t at_locked_xsub_32(
	int32_t volatile *	ptr,
	int32_t			val)
{
	return _InterlockedExchangeAdd((long *)ptr, -val);
}


static __inline__ int64_t at_locked_xsub_64(
	int64_t volatile *	ptr,
	int64_t			val)
{
	return _InterlockedExchangeAdd64(ptr, -val);
}


static __inline__ intptr_t at_locked_cas(
	intptr_t volatile *	dst,
	intptr_t		cmp,
	intptr_t		xchg)
{
	return _InterlockedCompareExchange(dst,xchg,cmp);
}


static __inline__ int32_t at_locked_cas_32(
	int32_t volatile *	dst,
	int32_t			cmp,
	int32_t			xchg)
{
	return _InterlockedCompareExchange((long *)dst,xchg,cmp);
}


static __inline__ int64_t at_locked_cas_64(
	int64_t volatile *	dst,
	int64_t			cmp,
	int64_t			xchg)
{
	return _InterlockedCompareExchange64(dst,xchg,cmp);
}


static __inline__ intptr_t at_locked_and(
	intptr_t volatile *	dst,
	intptr_t		mask)
{
	return _InterlockedAnd(dst,mask);
}


static __inline__ int32_t at_locked_and_32(
	int32_t volatile *	dst,
	int32_t			mask)
{
	return _InterlockedAnd((long *)dst,mask);
}


static __inline__ int64_t at_locked_and_64(
	int64_t volatile *	dst,
	int64_t			mask)
{
	return _InterlockedAnd64(dst,mask);
}


static __inline__ intptr_t at_locked_or(
	intptr_t volatile *	dst,
	intptr_t		mask)
{
	return _InterlockedOr(dst,mask);
}


static __inline__ int32_t at_locked_or_32(
	int32_t volatile *	dst,
	int32_t			mask)
{
	return _InterlockedOr((long *)dst,mask);
}


static __inline__ int64_t at_locked_or_64(
	int64_t volatile *	dst,
	int64_t			mask)
{
	return _InterlockedOr64(dst,mask);
}


static __inline__ intptr_t at_locked_xor(
	intptr_t volatile *	dst,
	intptr_t		mask)
{
	return _InterlockedXor(dst,mask);
}


static __inline__ int32_t at_locked_xor_32(
	int32_t volatile *	dst,
	int32_t			mask)
{
	return _InterlockedXor((long *)dst,mask);
}


static __inline__ int64_t at_locked_xor_64(
	int64_t volatile *	dst,
	int64_t			mask)
{
	return _InterlockedXor64(dst,mask);
}


static __inline__ void at_store(
	volatile intptr_t *	dst,
	intptr_t		val)
{
	_ReadWriteBarrier();
	*dst = val;
	_ReadWriteBarrier();

	return;
}


static __inline__ void at_store_32(
	volatile int32_t *	dst,
	int32_t			val)
{
	_ReadWriteBarrier();
	*dst = val;
	_ReadWriteBarrier();

	return;
}


static __inline__ void at_store_64(
	volatile int64_t *	dst,
	int64_t			val)
{
	_ReadWriteBarrier();
	*dst = val;
	_ReadWriteBarrier();

	return;
}


static __inline__ int at_bsf(
	unsigned int *		index,
	uintptr_t		mask)
{
	return (int)_BitScanForward(index,mask);
}


static __inline__ int at_bsr(
	unsigned int *		index,
	uintptr_t		mask)
{
	return (int)_BitScanReverse(index,mask);
}


static __inline__ size_t at_popcount(
	uintptr_t		mask)
{
	return __popcnt(mask);
}


static __inline__ size_t at_popcount_16(
	uint16_t		mask)
{
	return __popcnt16(mask);
}


static __inline__ size_t at_popcount_32(
	uint32_t		mask)
{
	return __popcnt(mask);
}


static __inline__ size_t at_popcount_64(
	uint64_t		mask)
{
	return (size_t)__popcnt64(mask);
}