Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
screetsec
GitHub Repository: screetsec/TheFatRat
Path: blob/master/tools/android-sdk/renderscript/clang-include/intrin.h
496 views
1
/* ===-------- intrin.h ---------------------------------------------------===
2
*
3
* Permission is hereby granted, free of charge, to any person obtaining a copy
4
* of this software and associated documentation files (the "Software"), to deal
5
* in the Software without restriction, including without limitation the rights
6
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
* copies of the Software, and to permit persons to whom the Software is
8
* furnished to do so, subject to the following conditions:
9
*
10
* The above copyright notice and this permission notice shall be included in
11
* all copies or substantial portions of the Software.
12
*
13
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19
* THE SOFTWARE.
20
*
21
*===-----------------------------------------------------------------------===
22
*/
23
24
/* Only include this if we're compiling for the windows platform. */
25
#ifndef _MSC_VER
26
#include_next <intrin.h>
27
#else
28
29
#ifndef __INTRIN_H
30
#define __INTRIN_H
31
32
/* First include the standard intrinsics. */
33
#if defined(__i386__) || defined(__x86_64__)
34
#include <x86intrin.h>
35
#endif
36
37
/* For the definition of jmp_buf. */
38
#if __STDC_HOSTED__
39
#include <setjmp.h>
40
#endif
41
42
/* Define the default attributes for the functions in this file. */
43
#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
44
45
#ifdef __cplusplus
46
extern "C" {
47
#endif
48
49
#if defined(__MMX__)
50
/* And the random ones that aren't in those files. */
51
__m64 _m_from_float(float);
52
float _m_to_float(__m64);
53
#endif
54
55
/* Other assorted instruction intrinsics. */
56
void __addfsbyte(unsigned long, unsigned char);
57
void __addfsdword(unsigned long, unsigned long);
58
void __addfsword(unsigned long, unsigned short);
59
void __code_seg(const char *);
60
static __inline__
61
void __cpuid(int[4], int);
62
static __inline__
63
void __cpuidex(int[4], int, int);
64
void __debugbreak(void);
65
__int64 __emul(int, int);
66
unsigned __int64 __emulu(unsigned int, unsigned int);
67
void __cdecl __fastfail(unsigned int);
68
unsigned int __getcallerseflags(void);
69
static __inline__
70
void __halt(void);
71
unsigned char __inbyte(unsigned short);
72
void __inbytestring(unsigned short, unsigned char *, unsigned long);
73
void __incfsbyte(unsigned long);
74
void __incfsdword(unsigned long);
75
void __incfsword(unsigned long);
76
unsigned long __indword(unsigned short);
77
void __indwordstring(unsigned short, unsigned long *, unsigned long);
78
void __int2c(void);
79
void __invlpg(void *);
80
unsigned short __inword(unsigned short);
81
void __inwordstring(unsigned short, unsigned short *, unsigned long);
82
void __lidt(void *);
83
unsigned __int64 __ll_lshift(unsigned __int64, int);
84
__int64 __ll_rshift(__int64, int);
85
void __llwpcb(void *);
86
unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
87
void __lwpval32(unsigned int, unsigned int, unsigned int);
88
unsigned int __lzcnt(unsigned int);
89
unsigned short __lzcnt16(unsigned short);
90
static __inline__
91
void __movsb(unsigned char *, unsigned char const *, size_t);
92
static __inline__
93
void __movsd(unsigned long *, unsigned long const *, size_t);
94
static __inline__
95
void __movsw(unsigned short *, unsigned short const *, size_t);
96
void __nop(void);
97
void __nvreg_restore_fence(void);
98
void __nvreg_save_fence(void);
99
void __outbyte(unsigned short, unsigned char);
100
void __outbytestring(unsigned short, unsigned char *, unsigned long);
101
void __outdword(unsigned short, unsigned long);
102
void __outdwordstring(unsigned short, unsigned long *, unsigned long);
103
void __outword(unsigned short, unsigned short);
104
void __outwordstring(unsigned short, unsigned short *, unsigned long);
105
static __inline__
106
unsigned int __popcnt(unsigned int);
107
static __inline__
108
unsigned short __popcnt16(unsigned short);
109
unsigned long __readcr0(void);
110
unsigned long __readcr2(void);
111
static __inline__
112
unsigned long __readcr3(void);
113
unsigned long __readcr4(void);
114
unsigned long __readcr8(void);
115
unsigned int __readdr(unsigned int);
116
#ifdef __i386__
117
static __inline__
118
unsigned char __readfsbyte(unsigned long);
119
static __inline__
120
unsigned long __readfsdword(unsigned long);
121
static __inline__
122
unsigned __int64 __readfsqword(unsigned long);
123
static __inline__
124
unsigned short __readfsword(unsigned long);
125
#endif
126
static __inline__
127
unsigned __int64 __readmsr(unsigned long);
128
unsigned __int64 __readpmc(unsigned long);
129
unsigned long __segmentlimit(unsigned long);
130
void __sidt(void *);
131
void *__slwpcb(void);
132
static __inline__
133
void __stosb(unsigned char *, unsigned char, size_t);
134
static __inline__
135
void __stosd(unsigned long *, unsigned long, size_t);
136
static __inline__
137
void __stosw(unsigned short *, unsigned short, size_t);
138
void __svm_clgi(void);
139
void __svm_invlpga(void *, int);
140
void __svm_skinit(int);
141
void __svm_stgi(void);
142
void __svm_vmload(size_t);
143
void __svm_vmrun(size_t);
144
void __svm_vmsave(size_t);
145
void __ud2(void);
146
unsigned __int64 __ull_rshift(unsigned __int64, int);
147
void __vmx_off(void);
148
void __vmx_vmptrst(unsigned __int64 *);
149
void __wbinvd(void);
150
void __writecr0(unsigned int);
151
static __inline__
152
void __writecr3(unsigned int);
153
void __writecr4(unsigned int);
154
void __writecr8(unsigned int);
155
void __writedr(unsigned int, unsigned int);
156
void __writefsbyte(unsigned long, unsigned char);
157
void __writefsdword(unsigned long, unsigned long);
158
void __writefsqword(unsigned long, unsigned __int64);
159
void __writefsword(unsigned long, unsigned short);
160
void __writemsr(unsigned long, unsigned __int64);
161
static __inline__
162
void *_AddressOfReturnAddress(void);
163
static __inline__
164
unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
165
static __inline__
166
unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
167
static __inline__
168
unsigned char _bittest(long const *, long);
169
static __inline__
170
unsigned char _bittestandcomplement(long *, long);
171
static __inline__
172
unsigned char _bittestandreset(long *, long);
173
static __inline__
174
unsigned char _bittestandset(long *, long);
175
unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
176
unsigned long __cdecl _byteswap_ulong(unsigned long);
177
unsigned short __cdecl _byteswap_ushort(unsigned short);
178
void __cdecl _disable(void);
179
void __cdecl _enable(void);
180
long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
181
static __inline__
182
long _InterlockedAnd(long volatile *_Value, long _Mask);
183
static __inline__
184
short _InterlockedAnd16(short volatile *_Value, short _Mask);
185
static __inline__
186
char _InterlockedAnd8(char volatile *_Value, char _Mask);
187
unsigned char _interlockedbittestandreset(long volatile *, long);
188
static __inline__
189
unsigned char _interlockedbittestandset(long volatile *, long);
190
static __inline__
191
long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
192
long _Exchange, long _Comparand);
193
long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
194
long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
195
static __inline__
196
short _InterlockedCompareExchange16(short volatile *_Destination,
197
short _Exchange, short _Comparand);
198
static __inline__
199
__int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
200
__int64 _Exchange, __int64 _Comparand);
201
__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
202
__int64);
203
__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
204
__int64);
205
static __inline__
206
char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
207
char _Comparand);
208
void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
209
void *);
210
void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
211
void *);
212
static __inline__
213
long __cdecl _InterlockedDecrement(long volatile *_Addend);
214
static __inline__
215
short _InterlockedDecrement16(short volatile *_Addend);
216
long _InterlockedExchange(long volatile *_Target, long _Value);
217
static __inline__
218
short _InterlockedExchange16(short volatile *_Target, short _Value);
219
static __inline__
220
char _InterlockedExchange8(char volatile *_Target, char _Value);
221
static __inline__
222
long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
223
long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
224
long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
225
static __inline__
226
short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
227
__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
228
__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
229
static __inline__
230
char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
231
static __inline__
232
long __cdecl _InterlockedIncrement(long volatile *_Addend);
233
static __inline__
234
short _InterlockedIncrement16(short volatile *_Addend);
235
static __inline__
236
long _InterlockedOr(long volatile *_Value, long _Mask);
237
static __inline__
238
short _InterlockedOr16(short volatile *_Value, short _Mask);
239
static __inline__
240
char _InterlockedOr8(char volatile *_Value, char _Mask);
241
static __inline__
242
long _InterlockedXor(long volatile *_Value, long _Mask);
243
static __inline__
244
short _InterlockedXor16(short volatile *_Value, short _Mask);
245
static __inline__
246
char _InterlockedXor8(char volatile *_Value, char _Mask);
247
void __cdecl _invpcid(unsigned int, void *);
248
static __inline__
249
unsigned long __cdecl _lrotl(unsigned long, int);
250
static __inline__
251
unsigned long __cdecl _lrotr(unsigned long, int);
252
static __inline__
253
void _ReadBarrier(void);
254
static __inline__
255
void _ReadWriteBarrier(void);
256
static __inline__
257
void *_ReturnAddress(void);
258
unsigned int _rorx_u32(unsigned int, const unsigned int);
259
static __inline__
260
unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
261
static __inline__
262
unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
263
static __inline__
264
unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
265
static __inline__
266
unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
267
static __inline__
268
unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
269
static __inline__
270
unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
271
static __inline__
272
unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
273
static __inline__
274
unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
275
int _sarx_i32(int, unsigned int);
276
#if __STDC_HOSTED__
277
int __cdecl _setjmp(jmp_buf);
278
#endif
279
unsigned int _shlx_u32(unsigned int, unsigned int);
280
unsigned int _shrx_u32(unsigned int, unsigned int);
281
void _Store_HLERelease(long volatile *, long);
282
void _Store64_HLERelease(__int64 volatile *, __int64);
283
void _StorePointer_HLERelease(void *volatile *, void *);
284
static __inline__
285
void _WriteBarrier(void);
286
unsigned __int32 xbegin(void);
287
void _xend(void);
288
static __inline__
289
#define _XCR_XFEATURE_ENABLED_MASK 0
290
unsigned __int64 __cdecl _xgetbv(unsigned int);
291
void __cdecl _xsetbv(unsigned int, unsigned __int64);
292
293
/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
294
#ifdef __x86_64__
295
void __addgsbyte(unsigned long, unsigned char);
296
void __addgsdword(unsigned long, unsigned long);
297
void __addgsqword(unsigned long, unsigned __int64);
298
void __addgsword(unsigned long, unsigned short);
299
static __inline__
300
void __faststorefence(void);
301
void __incgsbyte(unsigned long);
302
void __incgsdword(unsigned long);
303
void __incgsqword(unsigned long);
304
void __incgsword(unsigned long);
305
unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
306
void __lwpval64(unsigned __int64, unsigned int, unsigned int);
307
unsigned __int64 __lzcnt64(unsigned __int64);
308
static __inline__
309
void __movsq(unsigned long long *, unsigned long long const *, size_t);
310
__int64 __mulh(__int64, __int64);
311
static __inline__
312
unsigned __int64 __popcnt64(unsigned __int64);
313
static __inline__
314
unsigned char __readgsbyte(unsigned long);
315
static __inline__
316
unsigned long __readgsdword(unsigned long);
317
static __inline__
318
unsigned __int64 __readgsqword(unsigned long);
319
unsigned short __readgsword(unsigned long);
320
unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
321
unsigned __int64 _HighPart,
322
unsigned char _Shift);
323
unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
324
unsigned __int64 _HighPart,
325
unsigned char _Shift);
326
static __inline__
327
void __stosq(unsigned __int64 *, unsigned __int64, size_t);
328
unsigned char __vmx_on(unsigned __int64 *);
329
unsigned char __vmx_vmclear(unsigned __int64 *);
330
unsigned char __vmx_vmlaunch(void);
331
unsigned char __vmx_vmptrld(unsigned __int64 *);
332
unsigned char __vmx_vmread(size_t, size_t *);
333
unsigned char __vmx_vmresume(void);
334
unsigned char __vmx_vmwrite(size_t, size_t);
335
void __writegsbyte(unsigned long, unsigned char);
336
void __writegsdword(unsigned long, unsigned long);
337
void __writegsqword(unsigned long, unsigned __int64);
338
void __writegsword(unsigned long, unsigned short);
339
static __inline__
340
unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
341
static __inline__
342
unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
343
static __inline__
344
unsigned char _bittest64(__int64 const *, __int64);
345
static __inline__
346
unsigned char _bittestandcomplement64(__int64 *, __int64);
347
static __inline__
348
unsigned char _bittestandreset64(__int64 *, __int64);
349
static __inline__
350
unsigned char _bittestandset64(__int64 *, __int64);
351
unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
352
long _InterlockedAnd_np(long volatile *_Value, long _Mask);
353
short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
354
__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
355
char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
356
unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
357
static __inline__
358
unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
359
long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
360
long _Comparand);
361
unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
362
__int64 _ExchangeHigh,
363
__int64 _ExchangeLow,
364
__int64 *_CompareandResult);
365
unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
366
__int64 _ExchangeHigh,
367
__int64 _ExchangeLow,
368
__int64 *_ComparandResult);
369
short _InterlockedCompareExchange16_np(short volatile *_Destination,
370
short _Exchange, short _Comparand);
371
__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
372
__int64);
373
__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
374
__int64);
375
__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
376
__int64 _Exchange, __int64 _Comparand);
377
void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
378
void *_Exchange, void *_Comparand);
379
void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
380
void *_Exchange, void *_Comparand);
381
static __inline__
382
__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
383
static __inline__
384
__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
385
static __inline__
386
__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
387
void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
388
static __inline__
389
__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
390
long _InterlockedOr_np(long volatile *_Value, long _Mask);
391
short _InterlockedOr16_np(short volatile *_Value, short _Mask);
392
static __inline__
393
__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
394
__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
395
char _InterlockedOr8_np(char volatile *_Value, char _Mask);
396
long _InterlockedXor_np(long volatile *_Value, long _Mask);
397
short _InterlockedXor16_np(short volatile *_Value, short _Mask);
398
static __inline__
399
__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
400
__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
401
char _InterlockedXor8_np(char volatile *_Value, char _Mask);
402
static __inline__
403
__int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
404
__int64 *_HighProduct);
405
unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
406
__int64 _sarx_i64(__int64, unsigned int);
407
#if __STDC_HOSTED__
408
int __cdecl _setjmpex(jmp_buf);
409
#endif
410
unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
411
unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
412
/*
413
* Multiply two 64-bit integers and obtain a 64-bit result.
414
* The low-half is returned directly and the high half is in an out parameter.
415
*/
416
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
417
_umul128(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand,
418
unsigned __int64 *_HighProduct) {
419
unsigned __int128 _FullProduct =
420
(unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
421
*_HighProduct = _FullProduct >> 64;
422
return _FullProduct;
423
}
424
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
425
__umulh(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand) {
426
unsigned __int128 _FullProduct =
427
(unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
428
return _FullProduct >> 64;
429
}
430
431
#endif /* __x86_64__ */
432
433
/*----------------------------------------------------------------------------*\
434
|* Multiplication
435
\*----------------------------------------------------------------------------*/
436
static __inline__ __int64 __DEFAULT_FN_ATTRS
437
__emul(int __in1, int __in2) {
438
return (__int64)__in1 * (__int64)__in2;
439
}
440
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
441
__emulu(unsigned int __in1, unsigned int __in2) {
442
return (unsigned __int64)__in1 * (unsigned __int64)__in2;
443
}
444
/*----------------------------------------------------------------------------*\
445
|* Bit Twiddling
446
\*----------------------------------------------------------------------------*/
447
static __inline__ unsigned char __DEFAULT_FN_ATTRS
448
_rotl8(unsigned char _Value, unsigned char _Shift) {
449
_Shift &= 0x7;
450
return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
451
}
452
static __inline__ unsigned char __DEFAULT_FN_ATTRS
453
_rotr8(unsigned char _Value, unsigned char _Shift) {
454
_Shift &= 0x7;
455
return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
456
}
457
static __inline__ unsigned short __DEFAULT_FN_ATTRS
458
_rotl16(unsigned short _Value, unsigned char _Shift) {
459
_Shift &= 0xf;
460
return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
461
}
462
static __inline__ unsigned short __DEFAULT_FN_ATTRS
463
_rotr16(unsigned short _Value, unsigned char _Shift) {
464
_Shift &= 0xf;
465
return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
466
}
467
static __inline__ unsigned int __DEFAULT_FN_ATTRS
468
_rotl(unsigned int _Value, int _Shift) {
469
_Shift &= 0x1f;
470
return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
471
}
472
static __inline__ unsigned int __DEFAULT_FN_ATTRS
473
_rotr(unsigned int _Value, int _Shift) {
474
_Shift &= 0x1f;
475
return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
476
}
477
static __inline__ unsigned long __DEFAULT_FN_ATTRS
478
_lrotl(unsigned long _Value, int _Shift) {
479
_Shift &= 0x1f;
480
return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
481
}
482
static __inline__ unsigned long __DEFAULT_FN_ATTRS
483
_lrotr(unsigned long _Value, int _Shift) {
484
_Shift &= 0x1f;
485
return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
486
}
487
static
488
__inline__ unsigned __int64 __DEFAULT_FN_ATTRS
489
_rotl64(unsigned __int64 _Value, int _Shift) {
490
_Shift &= 0x3f;
491
return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
492
}
493
static
494
__inline__ unsigned __int64 __DEFAULT_FN_ATTRS
495
_rotr64(unsigned __int64 _Value, int _Shift) {
496
_Shift &= 0x3f;
497
return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
498
}
499
/*----------------------------------------------------------------------------*\
500
|* Bit Counting and Testing
501
\*----------------------------------------------------------------------------*/
502
static __inline__ unsigned char __DEFAULT_FN_ATTRS
503
_BitScanForward(unsigned long *_Index, unsigned long _Mask) {
504
if (!_Mask)
505
return 0;
506
*_Index = __builtin_ctzl(_Mask);
507
return 1;
508
}
509
static __inline__ unsigned char __DEFAULT_FN_ATTRS
510
_BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
511
if (!_Mask)
512
return 0;
513
*_Index = 31 - __builtin_clzl(_Mask);
514
return 1;
515
}
516
static __inline__ unsigned short __DEFAULT_FN_ATTRS
517
__popcnt16(unsigned short _Value) {
518
return __builtin_popcount((int)_Value);
519
}
520
static __inline__ unsigned int __DEFAULT_FN_ATTRS
521
__popcnt(unsigned int _Value) {
522
return __builtin_popcount(_Value);
523
}
524
static __inline__ unsigned char __DEFAULT_FN_ATTRS
525
_bittest(long const *_BitBase, long _BitPos) {
526
return (*_BitBase >> _BitPos) & 1;
527
}
528
static __inline__ unsigned char __DEFAULT_FN_ATTRS
529
_bittestandcomplement(long *_BitBase, long _BitPos) {
530
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
531
*_BitBase = *_BitBase ^ (1 << _BitPos);
532
return _Res;
533
}
534
static __inline__ unsigned char __DEFAULT_FN_ATTRS
535
_bittestandreset(long *_BitBase, long _BitPos) {
536
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
537
*_BitBase = *_BitBase & ~(1 << _BitPos);
538
return _Res;
539
}
540
static __inline__ unsigned char __DEFAULT_FN_ATTRS
541
_bittestandset(long *_BitBase, long _BitPos) {
542
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
543
*_BitBase = *_BitBase | (1 << _BitPos);
544
return _Res;
545
}
546
static __inline__ unsigned char __DEFAULT_FN_ATTRS
547
_interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
548
long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
549
return (_PrevVal >> _BitPos) & 1;
550
}
551
#ifdef __x86_64__
552
static __inline__ unsigned char __DEFAULT_FN_ATTRS
553
_BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
554
if (!_Mask)
555
return 0;
556
*_Index = __builtin_ctzll(_Mask);
557
return 1;
558
}
559
static __inline__ unsigned char __DEFAULT_FN_ATTRS
560
_BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
561
if (!_Mask)
562
return 0;
563
*_Index = 63 - __builtin_clzll(_Mask);
564
return 1;
565
}
566
static __inline__
567
unsigned __int64 __DEFAULT_FN_ATTRS
568
__popcnt64(unsigned __int64 _Value) {
569
return __builtin_popcountll(_Value);
570
}
571
static __inline__ unsigned char __DEFAULT_FN_ATTRS
572
_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
573
return (*_BitBase >> _BitPos) & 1;
574
}
575
static __inline__ unsigned char __DEFAULT_FN_ATTRS
576
_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
577
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
578
*_BitBase = *_BitBase ^ (1ll << _BitPos);
579
return _Res;
580
}
581
static __inline__ unsigned char __DEFAULT_FN_ATTRS
582
_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
583
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
584
*_BitBase = *_BitBase & ~(1ll << _BitPos);
585
return _Res;
586
}
587
static __inline__ unsigned char __DEFAULT_FN_ATTRS
588
_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
589
unsigned char _Res = (*_BitBase >> _BitPos) & 1;
590
*_BitBase = *_BitBase | (1ll << _BitPos);
591
return _Res;
592
}
593
static __inline__ unsigned char __DEFAULT_FN_ATTRS
594
_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
595
long long _PrevVal =
596
__atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
597
return (_PrevVal >> _BitPos) & 1;
598
}
599
#endif
600
/*----------------------------------------------------------------------------*\
601
|* Interlocked Exchange Add
602
\*----------------------------------------------------------------------------*/
603
static __inline__ char __DEFAULT_FN_ATTRS
604
_InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
605
return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
606
}
607
static __inline__ short __DEFAULT_FN_ATTRS
608
_InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
609
return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
610
}
611
#ifdef __x86_64__
612
static __inline__ __int64 __DEFAULT_FN_ATTRS
613
_InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
614
return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
615
}
616
#endif
617
/*----------------------------------------------------------------------------*\
618
|* Interlocked Exchange Sub
619
\*----------------------------------------------------------------------------*/
620
static __inline__ char __DEFAULT_FN_ATTRS
621
_InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
622
return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
623
}
624
static __inline__ short __DEFAULT_FN_ATTRS
625
_InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
626
return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
627
}
628
static __inline__ long __DEFAULT_FN_ATTRS
629
_InterlockedExchangeSub(long volatile *_Subend, long _Value) {
630
return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
631
}
632
#ifdef __x86_64__
633
static __inline__ __int64 __DEFAULT_FN_ATTRS
634
_InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
635
return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
636
}
637
#endif
638
/*----------------------------------------------------------------------------*\
639
|* Interlocked Increment
640
\*----------------------------------------------------------------------------*/
641
static __inline__ short __DEFAULT_FN_ATTRS
642
_InterlockedIncrement16(short volatile *_Value) {
643
return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
644
}
645
#ifdef __x86_64__
646
static __inline__ __int64 __DEFAULT_FN_ATTRS
647
_InterlockedIncrement64(__int64 volatile *_Value) {
648
return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
649
}
650
#endif
651
/*----------------------------------------------------------------------------*\
652
|* Interlocked Decrement
653
\*----------------------------------------------------------------------------*/
654
static __inline__ short __DEFAULT_FN_ATTRS
655
_InterlockedDecrement16(short volatile *_Value) {
656
return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
657
}
658
#ifdef __x86_64__
659
static __inline__ __int64 __DEFAULT_FN_ATTRS
660
_InterlockedDecrement64(__int64 volatile *_Value) {
661
return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
662
}
663
#endif
664
/*----------------------------------------------------------------------------*\
665
|* Interlocked And
666
\*----------------------------------------------------------------------------*/
667
static __inline__ char __DEFAULT_FN_ATTRS
668
_InterlockedAnd8(char volatile *_Value, char _Mask) {
669
return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
670
}
671
static __inline__ short __DEFAULT_FN_ATTRS
672
_InterlockedAnd16(short volatile *_Value, short _Mask) {
673
return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
674
}
675
static __inline__ long __DEFAULT_FN_ATTRS
676
_InterlockedAnd(long volatile *_Value, long _Mask) {
677
return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
678
}
679
#ifdef __x86_64__
680
static __inline__ __int64 __DEFAULT_FN_ATTRS
681
_InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
682
return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
683
}
684
#endif
685
/*----------------------------------------------------------------------------*\
686
|* Interlocked Or
687
\*----------------------------------------------------------------------------*/
688
static __inline__ char __DEFAULT_FN_ATTRS
689
_InterlockedOr8(char volatile *_Value, char _Mask) {
690
return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
691
}
692
static __inline__ short __DEFAULT_FN_ATTRS
693
_InterlockedOr16(short volatile *_Value, short _Mask) {
694
return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
695
}
696
static __inline__ long __DEFAULT_FN_ATTRS
697
_InterlockedOr(long volatile *_Value, long _Mask) {
698
return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
699
}
700
#ifdef __x86_64__
701
static __inline__ __int64 __DEFAULT_FN_ATTRS
702
_InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
703
return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
704
}
705
#endif
706
/*----------------------------------------------------------------------------*\
707
|* Interlocked Xor
708
\*----------------------------------------------------------------------------*/
709
static __inline__ char __DEFAULT_FN_ATTRS
710
_InterlockedXor8(char volatile *_Value, char _Mask) {
711
return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
712
}
713
static __inline__ short __DEFAULT_FN_ATTRS
714
_InterlockedXor16(short volatile *_Value, short _Mask) {
715
return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
716
}
717
static __inline__ long __DEFAULT_FN_ATTRS
718
_InterlockedXor(long volatile *_Value, long _Mask) {
719
return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
720
}
721
#ifdef __x86_64__
722
static __inline__ __int64 __DEFAULT_FN_ATTRS
723
_InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
724
return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
725
}
726
#endif
727
/*----------------------------------------------------------------------------*\
728
|* Interlocked Exchange
729
\*----------------------------------------------------------------------------*/
730
static __inline__ char __DEFAULT_FN_ATTRS
731
_InterlockedExchange8(char volatile *_Target, char _Value) {
732
__atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
733
return _Value;
734
}
735
static __inline__ short __DEFAULT_FN_ATTRS
736
_InterlockedExchange16(short volatile *_Target, short _Value) {
737
__atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
738
return _Value;
739
}
740
#ifdef __x86_64__
741
static __inline__ __int64 __DEFAULT_FN_ATTRS
742
_InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
743
__atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
744
return _Value;
745
}
746
#endif
747
/*----------------------------------------------------------------------------*\
748
|* Interlocked Compare Exchange
749
\*----------------------------------------------------------------------------*/
750
static __inline__ char __DEFAULT_FN_ATTRS
751
_InterlockedCompareExchange8(char volatile *_Destination,
752
char _Exchange, char _Comparand) {
753
__atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
754
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
755
return _Comparand;
756
}
757
static __inline__ short __DEFAULT_FN_ATTRS
758
_InterlockedCompareExchange16(short volatile *_Destination,
759
short _Exchange, short _Comparand) {
760
__atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
761
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
762
return _Comparand;
763
}
764
static __inline__ __int64 __DEFAULT_FN_ATTRS
765
_InterlockedCompareExchange64(__int64 volatile *_Destination,
766
__int64 _Exchange, __int64 _Comparand) {
767
__atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
768
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
769
return _Comparand;
770
}
771
/*----------------------------------------------------------------------------*\
772
|* Barriers
773
\*----------------------------------------------------------------------------*/
774
static __inline__ void __DEFAULT_FN_ATTRS
775
__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
776
_ReadWriteBarrier(void) {
777
__atomic_signal_fence(__ATOMIC_SEQ_CST);
778
}
779
static __inline__ void __DEFAULT_FN_ATTRS
780
__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
781
_ReadBarrier(void) {
782
__atomic_signal_fence(__ATOMIC_SEQ_CST);
783
}
784
static __inline__ void __DEFAULT_FN_ATTRS
785
__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
786
_WriteBarrier(void) {
787
__atomic_signal_fence(__ATOMIC_SEQ_CST);
788
}
789
#ifdef __x86_64__
790
static __inline__ void __DEFAULT_FN_ATTRS
791
__faststorefence(void) {
792
__atomic_thread_fence(__ATOMIC_SEQ_CST);
793
}
794
#endif
795
/*----------------------------------------------------------------------------*\
796
|* readfs, readgs
797
|* (Pointers in address space #256 and #257 are relative to the GS and FS
798
|* segment registers, respectively.)
799
\*----------------------------------------------------------------------------*/
800
#define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
801
((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
802
(__offset))
803
804
#ifdef __i386__
805
static __inline__ unsigned char __DEFAULT_FN_ATTRS
806
__readfsbyte(unsigned long __offset) {
807
return *__ptr_to_addr_space(257, unsigned char, __offset);
808
}
809
static __inline__ unsigned short __DEFAULT_FN_ATTRS
810
__readfsword(unsigned long __offset) {
811
return *__ptr_to_addr_space(257, unsigned short, __offset);
812
}
813
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
814
__readfsqword(unsigned long __offset) {
815
return *__ptr_to_addr_space(257, unsigned __int64, __offset);
816
}
817
#endif
818
#ifdef __x86_64__
819
static __inline__ unsigned char __DEFAULT_FN_ATTRS
820
__readgsbyte(unsigned long __offset) {
821
return *__ptr_to_addr_space(256, unsigned char, __offset);
822
}
823
static __inline__ unsigned short __DEFAULT_FN_ATTRS
824
__readgsword(unsigned long __offset) {
825
return *__ptr_to_addr_space(256, unsigned short, __offset);
826
}
827
static __inline__ unsigned long __DEFAULT_FN_ATTRS
828
__readgsdword(unsigned long __offset) {
829
return *__ptr_to_addr_space(256, unsigned long, __offset);
830
}
831
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
832
__readgsqword(unsigned long __offset) {
833
return *__ptr_to_addr_space(256, unsigned __int64, __offset);
834
}
835
#endif
836
#undef __ptr_to_addr_space
837
/*----------------------------------------------------------------------------*\
838
|* movs, stos
839
\*----------------------------------------------------------------------------*/
840
#if defined(__i386__) || defined(__x86_64__)
841
static __inline__ void __DEFAULT_FN_ATTRS
842
__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
843
__asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)
844
: "%edi", "%esi", "%ecx");
845
}
846
static __inline__ void __DEFAULT_FN_ATTRS
847
__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
848
__asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)
849
: "%edi", "%esi", "%ecx");
850
}
851
static __inline__ void __DEFAULT_FN_ATTRS
852
__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
853
__asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n)
854
: "%edi", "%esi", "%ecx");
855
}
856
static __inline__ void __DEFAULT_FN_ATTRS
857
__stosb(unsigned char *__dst, unsigned char __x, size_t __n) {
858
__asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n)
859
: "%edi", "%ecx");
860
}
861
static __inline__ void __DEFAULT_FN_ATTRS
862
__stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
863
__asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)
864
: "%edi", "%ecx");
865
}
866
static __inline__ void __DEFAULT_FN_ATTRS
867
__stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
868
__asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n)
869
: "%edi", "%ecx");
870
}
871
#endif
872
#ifdef __x86_64__
873
static __inline__ void __DEFAULT_FN_ATTRS
874
__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
875
__asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)
876
: "%edi", "%esi", "%ecx");
877
}
878
static __inline__ void __DEFAULT_FN_ATTRS
879
__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
880
__asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)
881
: "%edi", "%ecx");
882
}
883
#endif
884
885
/*----------------------------------------------------------------------------*\
886
|* Misc
887
\*----------------------------------------------------------------------------*/
888
static __inline__ void * __DEFAULT_FN_ATTRS
889
_AddressOfReturnAddress(void) {
890
return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
891
}
892
static __inline__ void * __DEFAULT_FN_ATTRS
893
_ReturnAddress(void) {
894
return __builtin_return_address(0);
895
}
896
#if defined(__i386__) || defined(__x86_64__)
897
static __inline__ void __DEFAULT_FN_ATTRS
898
__cpuid(int __info[4], int __level) {
899
__asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
900
: "a"(__level));
901
}
902
static __inline__ void __DEFAULT_FN_ATTRS
903
__cpuidex(int __info[4], int __level, int __ecx) {
904
__asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
905
: "a"(__level), "c"(__ecx));
906
}
907
static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
908
_xgetbv(unsigned int __xcr_no) {
909
unsigned int __eax, __edx;
910
__asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
911
return ((unsigned __int64)__edx << 32) | __eax;
912
}
913
static __inline__ void __DEFAULT_FN_ATTRS
914
__halt(void) {
915
__asm__ volatile ("hlt");
916
}
917
#endif
918
919
/*----------------------------------------------------------------------------*\
920
|* Privileged intrinsics
921
\*----------------------------------------------------------------------------*/
922
#if defined(__i386__) || defined(__x86_64__)
923
static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
924
__readmsr(unsigned long __register) {
925
// Loads the contents of a 64-bit model specific register (MSR) specified in
926
// the ECX register into registers EDX:EAX. The EDX register is loaded with
927
// the high-order 32 bits of the MSR and the EAX register is loaded with the
928
// low-order 32 bits. If less than 64 bits are implemented in the MSR being
929
// read, the values returned to EDX:EAX in unimplemented bit locations are
930
// undefined.
931
unsigned long __edx;
932
unsigned long __eax;
933
__asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
934
return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
935
}
936
937
static __inline__ unsigned long __DEFAULT_FN_ATTRS
938
__readcr3(void) {
939
unsigned long __cr3_val;
940
__asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
941
return __cr3_val;
942
}
943
944
static __inline__ void __DEFAULT_FN_ATTRS
945
__writecr3(unsigned int __cr3_val) {
946
__asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
947
}
948
#endif
949
950
#ifdef __cplusplus
951
}
952
#endif
953
954
#undef __DEFAULT_FN_ATTRS
955
956
#endif /* __INTRIN_H */
957
#endif /* _MSC_VER */
958
959