12 #include_next <intrin0.h>
18 #if defined(__x86_64__) && !defined(__arm64ec__)
26 unsigned char _BitScanForward(
unsigned long *_Index,
unsigned long _Mask);
27 unsigned char _BitScanReverse(
unsigned long *_Index,
unsigned long _Mask);
28 void _ReadWriteBarrier(
void);
30 #if defined(__aarch64__) || defined(__arm64ec__)
31 unsigned int _CountLeadingZeros(
unsigned long);
32 unsigned int _CountLeadingZeros64(
unsigned _int64);
33 unsigned char _InterlockedCompareExchange128_acq(__int64
volatile *_Destination,
34 __int64 _ExchangeHigh,
36 __int64 *_ComparandResult);
37 unsigned char _InterlockedCompareExchange128_nf(__int64
volatile *_Destination,
38 __int64 _ExchangeHigh,
40 __int64 *_ComparandResult);
41 unsigned char _InterlockedCompareExchange128_rel(__int64
volatile *_Destination,
42 __int64 _ExchangeHigh,
44 __int64 *_ComparandResult);
47 #ifdef __x86_64__ && !defined(__arm64ec__)
48 unsigned __int64 _umul128(
unsigned __int64,
unsigned __int64,
50 unsigned __int64 __shiftleft128(
unsigned __int64 _LowPart,
51 unsigned __int64 _HighPart,
52 unsigned char _Shift);
53 unsigned __int64 __shiftright128(
unsigned __int64 _LowPart,
54 unsigned __int64 _HighPart,
55 unsigned char _Shift);
58 #if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
62 #if defined(__x86_64__) || defined(__aarch64__)
63 unsigned char _InterlockedCompareExchange128(__int64
volatile *_Destination,
64 __int64 _ExchangeHigh,
66 __int64 *_ComparandResult);
69 #if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
70 unsigned char _BitScanForward64(
unsigned long *_Index,
unsigned __int64 _Mask);
71 unsigned char _BitScanReverse64(
unsigned long *_Index,
unsigned __int64 _Mask);
74 #if defined(__i386__) || defined(__x86_64__) || defined(__arm__) || \
76 __int64 _InterlockedDecrement64(__int64
volatile *_Addend);
77 __int64 _InterlockedExchange64(__int64
volatile *_Target, __int64 _Value);
78 __int64 _InterlockedExchangeAdd64(__int64
volatile *_Addend, __int64 _Value);
79 __int64 _InterlockedExchangeSub64(__int64
volatile *_Subend, __int64 _Value);
80 __int64 _InterlockedIncrement64(__int64
volatile *_Addend);
81 __int64 _InterlockedOr64(__int64
volatile *_Value, __int64 _Mask);
82 __int64 _InterlockedXor64(__int64
volatile *_Value, __int64 _Mask);
83 __int64 _InterlockedAnd64(__int64
volatile *_Value, __int64 _Mask);
86 #if defined(__arm__) || defined(__aarch64__) || defined(__arm64ec__)
90 char _InterlockedExchangeAdd8_acq(
char volatile *_Addend,
char _Value);
91 char _InterlockedExchangeAdd8_nf(
char volatile *_Addend,
char _Value);
92 char _InterlockedExchangeAdd8_rel(
char volatile *_Addend,
char _Value);
93 short _InterlockedExchangeAdd16_acq(
short volatile *_Addend,
short _Value);
94 short _InterlockedExchangeAdd16_nf(
short volatile *_Addend,
short _Value);
95 short _InterlockedExchangeAdd16_rel(
short volatile *_Addend,
short _Value);
96 long _InterlockedExchangeAdd_acq(
long volatile *_Addend,
long _Value);
97 long _InterlockedExchangeAdd_nf(
long volatile *_Addend,
long _Value);
98 long _InterlockedExchangeAdd_rel(
long volatile *_Addend,
long _Value);
99 __int64 _InterlockedExchangeAdd64_acq(__int64
volatile *_Addend,
101 __int64 _InterlockedExchangeAdd64_nf(__int64
volatile *_Addend, __int64 _Value);
102 __int64 _InterlockedExchangeAdd64_rel(__int64
volatile *_Addend,
108 short _InterlockedIncrement16_acq(
short volatile *_Value);
109 short _InterlockedIncrement16_nf(
short volatile *_Value);
110 short _InterlockedIncrement16_rel(
short volatile *_Value);
111 long _InterlockedIncrement_acq(
long volatile *_Value);
112 long _InterlockedIncrement_nf(
long volatile *_Value);
113 long _InterlockedIncrement_rel(
long volatile *_Value);
114 __int64 _InterlockedIncrement64_acq(__int64
volatile *_Value);
115 __int64 _InterlockedIncrement64_nf(__int64
volatile *_Value);
116 __int64 _InterlockedIncrement64_rel(__int64
volatile *_Value);
121 short _InterlockedDecrement16_acq(
short volatile *_Value);
122 short _InterlockedDecrement16_nf(
short volatile *_Value);
123 short _InterlockedDecrement16_rel(
short volatile *_Value);
124 long _InterlockedDecrement_acq(
long volatile *_Value);
125 long _InterlockedDecrement_nf(
long volatile *_Value);
126 long _InterlockedDecrement_rel(
long volatile *_Value);
127 __int64 _InterlockedDecrement64_acq(__int64
volatile *_Value);
128 __int64 _InterlockedDecrement64_nf(__int64
volatile *_Value);
129 __int64 _InterlockedDecrement64_rel(__int64
volatile *_Value);
134 char _InterlockedAnd8_acq(
char volatile *_Value,
char _Mask);
135 char _InterlockedAnd8_nf(
char volatile *_Value,
char _Mask);
136 char _InterlockedAnd8_rel(
char volatile *_Value,
char _Mask);
137 short _InterlockedAnd16_acq(
short volatile *_Value,
short _Mask);
138 short _InterlockedAnd16_nf(
short volatile *_Value,
short _Mask);
139 short _InterlockedAnd16_rel(
short volatile *_Value,
short _Mask);
140 long _InterlockedAnd_acq(
long volatile *_Value,
long _Mask);
141 long _InterlockedAnd_nf(
long volatile *_Value,
long _Mask);
142 long _InterlockedAnd_rel(
long volatile *_Value,
long _Mask);
143 __int64 _InterlockedAnd64_acq(__int64
volatile *_Value, __int64 _Mask);
144 __int64 _InterlockedAnd64_nf(__int64
volatile *_Value, __int64 _Mask);
145 __int64 _InterlockedAnd64_rel(__int64
volatile *_Value, __int64 _Mask);
150 unsigned char _interlockedbittestandset_acq(
long volatile *_BitBase,
152 unsigned char _interlockedbittestandset_nf(
long volatile *_BitBase,
154 unsigned char _interlockedbittestandset_rel(
long volatile *_BitBase,
156 unsigned char _interlockedbittestandreset_acq(
long volatile *_BitBase,
158 unsigned char _interlockedbittestandreset_nf(
long volatile *_BitBase,
160 unsigned char _interlockedbittestandreset_rel(
long volatile *_BitBase,
166 char _InterlockedOr8_acq(
char volatile *_Value,
char _Mask);
167 char _InterlockedOr8_nf(
char volatile *_Value,
char _Mask);
168 char _InterlockedOr8_rel(
char volatile *_Value,
char _Mask);
169 short _InterlockedOr16_acq(
short volatile *_Value,
short _Mask);
170 short _InterlockedOr16_nf(
short volatile *_Value,
short _Mask);
171 short _InterlockedOr16_rel(
short volatile *_Value,
short _Mask);
172 long _InterlockedOr_acq(
long volatile *_Value,
long _Mask);
173 long _InterlockedOr_nf(
long volatile *_Value,
long _Mask);
174 long _InterlockedOr_rel(
long volatile *_Value,
long _Mask);
175 __int64 _InterlockedOr64_acq(__int64
volatile *_Value, __int64 _Mask);
176 __int64 _InterlockedOr64_nf(__int64
volatile *_Value, __int64 _Mask);
177 __int64 _InterlockedOr64_rel(__int64
volatile *_Value, __int64 _Mask);
182 char _InterlockedXor8_acq(
char volatile *_Value,
char _Mask);
183 char _InterlockedXor8_nf(
char volatile *_Value,
char _Mask);
184 char _InterlockedXor8_rel(
char volatile *_Value,
char _Mask);
185 short _InterlockedXor16_acq(
short volatile *_Value,
short _Mask);
186 short _InterlockedXor16_nf(
short volatile *_Value,
short _Mask);
187 short _InterlockedXor16_rel(
short volatile *_Value,
short _Mask);
188 long _InterlockedXor_acq(
long volatile *_Value,
long _Mask);
189 long _InterlockedXor_nf(
long volatile *_Value,
long _Mask);
190 long _InterlockedXor_rel(
long volatile *_Value,
long _Mask);
191 __int64 _InterlockedXor64_acq(__int64
volatile *_Value, __int64 _Mask);
192 __int64 _InterlockedXor64_nf(__int64
volatile *_Value, __int64 _Mask);
193 __int64 _InterlockedXor64_rel(__int64
volatile *_Value, __int64 _Mask);
198 char _InterlockedExchange8_acq(
char volatile *_Target,
char _Value);
199 char _InterlockedExchange8_nf(
char volatile *_Target,
char _Value);
200 char _InterlockedExchange8_rel(
char volatile *_Target,
char _Value);
201 short _InterlockedExchange16_acq(
short volatile *_Target,
short _Value);
202 short _InterlockedExchange16_nf(
short volatile *_Target,
short _Value);
203 short _InterlockedExchange16_rel(
short volatile *_Target,
short _Value);
204 long _InterlockedExchange_acq(
long volatile *_Target,
long _Value);
205 long _InterlockedExchange_nf(
long volatile *_Target,
long _Value);
206 long _InterlockedExchange_rel(
long volatile *_Target,
long _Value);
207 __int64 _InterlockedExchange64_acq(__int64
volatile *_Target, __int64 _Value);
208 __int64 _InterlockedExchange64_nf(__int64
volatile *_Target, __int64 _Value);
209 __int64 _InterlockedExchange64_rel(__int64
volatile *_Target, __int64 _Value);
214 char _InterlockedCompareExchange8_acq(
char volatile *_Destination,
215 char _Exchange,
char _Comparand);
216 char _InterlockedCompareExchange8_nf(
char volatile *_Destination,
217 char _Exchange,
char _Comparand);
218 char _InterlockedCompareExchange8_rel(
char volatile *_Destination,
219 char _Exchange,
char _Comparand);
220 short _InterlockedCompareExchange16_acq(
short volatile *_Destination,
221 short _Exchange,
short _Comparand);
222 short _InterlockedCompareExchange16_nf(
short volatile *_Destination,
223 short _Exchange,
short _Comparand);
224 short _InterlockedCompareExchange16_rel(
short volatile *_Destination,
225 short _Exchange,
short _Comparand);
226 long _InterlockedCompareExchange_acq(
long volatile *_Destination,
227 long _Exchange,
long _Comparand);
228 long _InterlockedCompareExchange_nf(
long volatile *_Destination,
long _Exchange,
230 long _InterlockedCompareExchange_rel(
long volatile *_Destination,
231 long _Exchange,
long _Comparand);
232 __int64 _InterlockedCompareExchange64_acq(__int64
volatile *_Destination,
235 __int64 _InterlockedCompareExchange64_nf(__int64
volatile *_Destination,
236 __int64 _Exchange, __int64 _Comparand);
237 __int64 _InterlockedCompareExchange64_rel(__int64
volatile *_Destination,
void _mm_pause(void)
Indicates that a spin loop is being executed for the purposes of optimizing power consumption during ...