summaryrefslogtreecommitdiffhomepage
path: root/include/ntapi/nt_atomic.h
blob: b03fa19673577890b6ff117cd6883fb9a8e328ba (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#ifndef _NT_ATOMIC_H_
#define _NT_ATOMIC_H_

#include "nt_abi.h"

static __inline__ void at_locked_inc(
	intptr_t volatile * ptr);

static __inline__ void at_locked_inc_32(
	int32_t volatile * ptr);

static __inline__ void at_locked_inc_64(
	int64_t volatile * ptr);

static __inline__ void at_locked_dec(
	intptr_t volatile * ptr);

static __inline__ void at_locked_dec_32(
	int32_t volatile * ptr);

static __inline__ void at_locked_dec_64(
	int64_t volatile * ptr);

static __inline__ void at_locked_add(
	intptr_t volatile *	ptr,
	intptr_t		val);

static __inline__ void at_locked_add_32(
	int32_t volatile *	ptr,
	int32_t			val);

static __inline__ void at_locked_add_64(
	int64_t volatile *	ptr,
	int64_t			val);

static __inline__ void at_locked_sub(
	intptr_t volatile *	ptr,
	intptr_t		val);

static __inline__ void at_locked_sub_32(
	int32_t volatile *	ptr,
	int32_t		val);

static __inline__ void at_locked_sub_64(
	int64_t volatile *	ptr,
	int64_t		val);

static __inline__ intptr_t at_locked_xadd(
	intptr_t volatile *	ptr,
	intptr_t		val);

static __inline__ int32_t at_locked_xadd_32(
	int32_t volatile *	ptr,
	int32_t			val);

static __inline__ int64_t at_locked_xadd_64(
	int64_t volatile *	ptr,
	int64_t			val);

static __inline__ intptr_t at_locked_xsub(
	intptr_t volatile *	ptr,
	intptr_t		val);

static __inline__ int32_t at_locked_xsub_32(
	int32_t volatile *	ptr,
	int32_t			val);

static __inline__ int64_t at_locked_xsub_64(
	int64_t volatile *	ptr,
	int64_t			val);

static __inline__ intptr_t at_locked_cas(
	intptr_t volatile *	dst,
	intptr_t		cmp,
	intptr_t		xchg);

static __inline__ int32_t at_locked_cas_32(
	int32_t volatile *	dst,
	int32_t			cmp,
	int32_t			xchg);

static __inline__ int64_t at_locked_cas_64(
	int64_t volatile *	dst,
	int64_t			cmp,
	int64_t			xchg);

static __inline__ void at_locked_and(
	intptr_t volatile *	dst,
	intptr_t		mask);


static __inline__ void at_locked_and_32(
	int32_t volatile *	dst,
	int32_t			mask);


static __inline__ void at_locked_and_64(
	int64_t volatile *	dst,
	int64_t			mask);


static __inline__ void at_locked_or(
	intptr_t volatile *	dst,
	intptr_t		mask);


static __inline__ void at_locked_or_32(
	int32_t volatile *	dst,
	int32_t			mask);


static __inline__ void at_locked_or_64(
	int64_t volatile *	dst,
	int64_t			mask);


static __inline__ void at_locked_xor(
	intptr_t volatile *	dst,
	intptr_t		mask);


static __inline__ void at_locked_xor_32(
	int32_t volatile *	dst,
	int32_t			mask);


static __inline__ void at_locked_xor_64(
	int64_t volatile *	dst,
	int64_t			mask);

static __inline__ void at_store(
	volatile intptr_t *	dst,
	intptr_t		val);

static __inline__ int at_bsf(
	unsigned int *		index,
	uintptr_t		mask);

static __inline__ int at_bsr(
	unsigned int *		index,
	uintptr_t		mask);

static __inline__ size_t at_popcount(
	uintptr_t		mask);

static __inline__ size_t at_popcount_16(
	uint16_t		mask);

static __inline__ size_t at_popcount_32(
	uint32_t		mask);

static __inline__ size_t at_popcount_64(
	uint64_t		mask);

#include "bits/nt_atomic_inline_asm.h"

#endif