Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Kitware
GitHub Repository: Kitware/CMake
Path: blob/master/Utilities/cmlibrhash/librhash/byte_order.h
3150 views
1
/* byte_order.h */
2
#ifndef BYTE_ORDER_H
3
#define BYTE_ORDER_H
4
#include "ustd.h"
5
#include <stdlib.h>
6
7
#if 0
8
#if defined(__GLIBC__)
9
# include <endian.h>
10
#endif
11
#endif
12
13
#if defined(__FreeBSD__) || defined(__DragonFly__) || defined(__APPLE__)
14
# include <sys/types.h>
15
#elif defined (__NetBSD__) || defined(__OpenBSD__)
16
# include <sys/param.h>
17
#endif
18
19
20
#ifdef __cplusplus
21
extern "C" {
22
#endif
23
24
/* if x86 compatible cpu */
25
#if defined(i386) || defined(__i386__) || defined(__i486__) || \
26
defined(__i586__) || defined(__i686__) || defined(__pentium__) || \
27
defined(__pentiumpro__) || defined(__pentium4__) || \
28
defined(__nocona__) || defined(prescott) || defined(__core2__) || \
29
defined(__k6__) || defined(__k8__) || defined(__athlon__) || \
30
defined(__amd64) || defined(__amd64__) || \
31
defined(__x86_64) || defined(__x86_64__) || defined(_M_IX86) || \
32
defined(_M_AMD64) || defined(_M_IA64) || defined(_M_X64)
33
/* detect if x86-64 instruction set is supported */
34
# if defined(_LP64) || defined(__LP64__) || defined(__x86_64) || \
35
defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
36
# define CPU_X64
37
# else
38
# define CPU_IA32
39
# endif
40
#endif
41
42
#include <cm3p/kwiml/abi.h>
43
#if KWIML_ABI_ENDIAN_ID == KWIML_ABI_ENDIAN_ID_LITTLE
44
# define CPU_LITTLE_ENDIAN
45
# define IS_BIG_ENDIAN 0
46
# define IS_LITTLE_ENDIAN 1
47
#elif KWIML_ABI_ENDIAN_ID == KWIML_ABI_ENDIAN_ID_BIG
48
# define CPU_BIG_ENDIAN
49
# define IS_BIG_ENDIAN 1
50
# define IS_LITTLE_ENDIAN 0
51
#endif
52
53
#if 0
54
#define RHASH_BYTE_ORDER_LE 1234
55
#define RHASH_BYTE_ORDER_BE 4321
56
57
#if (defined(__BYTE_ORDER) && defined(__LITTLE_ENDIAN) && __BYTE_ORDER == __LITTLE_ENDIAN) || \
58
(defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
59
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
60
#elif (defined(__BYTE_ORDER) && defined(__BIG_ENDIAN) && __BYTE_ORDER == __BIG_ENDIAN) || \
61
(defined(__BYTE_ORDER__) && defined(__ORDER_BIG_ENDIAN__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
62
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
63
#elif defined(_BYTE_ORDER)
64
# if defined(_LITTLE_ENDIAN) && (_BYTE_ORDER == _LITTLE_ENDIAN)
65
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
66
# elif defined(_BIG_ENDIAN) && (_BYTE_ORDER == _BIG_ENDIAN)
67
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
68
# endif
69
#elif defined(__sun) && defined(_LITTLE_ENDIAN)
70
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
71
#elif defined(__sun) && defined(_BIG_ENDIAN)
72
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
73
#endif
74
75
/* try detecting endianness by CPU */
76
#ifdef RHASH_BYTE_ORDER
77
#elif defined(CPU_IA32) || defined(CPU_X64) || defined(__ia64) || defined(__ia64__) || \
78
defined(__alpha__) || defined(_M_ALPHA) || defined(vax) || defined(MIPSEL) || \
79
defined(_ARM_) || defined(__arm__) || defined(_M_ARM64) || defined(_M_ARM64EC) || \
80
defined(__loongarch64) || defined(__sw_64)
81
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
82
#elif defined(__sparc) || defined(__sparc__) || defined(sparc) || \
83
defined(_ARCH_PPC) || defined(_ARCH_PPC64) || defined(_POWER) || \
84
defined(__POWERPC__) || defined(POWERPC) || defined(__powerpc) || \
85
defined(__powerpc__) || defined(__powerpc64__) || defined(__ppc__) || \
86
defined(__hpux) || defined(_MIPSEB) || defined(mc68000) || \
87
defined(__s390__) || defined(__s390x__) || defined(sel) || defined(__hppa__)
88
# define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
89
#else
90
# error "Can't detect CPU architechture"
91
#endif
92
93
#define IS_BIG_ENDIAN (RHASH_BYTE_ORDER == RHASH_BYTE_ORDER_BE)
94
#define IS_LITTLE_ENDIAN (RHASH_BYTE_ORDER == RHASH_BYTE_ORDER_LE)
95
#endif
96
97
#ifndef __has_builtin
98
# define __has_builtin(x) 0
99
#endif
100
101
#define IS_ALIGNED_32(p) (0 == (3 & (uintptr_t)(p)))
102
#define IS_ALIGNED_64(p) (0 == (7 & (uintptr_t)(p)))
103
104
#if defined(_MSC_VER)
105
#define ALIGN_ATTR(n) __declspec(align(n))
106
#elif defined(__GNUC__)
107
#define ALIGN_ATTR(n) __attribute__((aligned (n)))
108
#else
109
#define ALIGN_ATTR(n) /* nothing */
110
#endif
111
112
113
#if defined(_MSC_VER) || defined(__BORLANDC__)
114
#define I64(x) x##ui64
115
#else
116
#define I64(x) x##ULL
117
#endif
118
119
#if defined(_MSC_VER)
120
#define RHASH_INLINE __inline
121
#elif defined(__GNUC__) && !defined(__STRICT_ANSI__)
122
#define RHASH_INLINE inline
123
#elif defined(__GNUC__)
124
#define RHASH_INLINE __inline__
125
#else
126
#define RHASH_INLINE
127
#endif
128
129
/* define rhash_ctz - count traling zero bits */
130
#if (defined(__GNUC__) && __GNUC__ >= 4 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) || \
131
(defined(__clang__) && __has_builtin(__builtin_ctz))
132
/* GCC >= 3.4 or clang */
133
# define rhash_ctz(x) __builtin_ctz(x)
134
#else
135
unsigned rhash_ctz(unsigned); /* define as function */
136
#endif
137
138
void rhash_swap_copy_str_to_u32(void* to, int index, const void* from, size_t length);
139
void rhash_swap_copy_str_to_u64(void* to, int index, const void* from, size_t length);
140
void rhash_swap_copy_u64_to_str(void* to, const void* from, size_t length);
141
void rhash_u32_mem_swap(unsigned* p, int length_in_u32);
142
143
/* bswap definitions */
144
#if (defined(__GNUC__) && (__GNUC__ >= 4) && (__GNUC__ > 4 || __GNUC_MINOR__ >= 3)) || \
145
(defined(__clang__) && __has_builtin(__builtin_bswap32) && __has_builtin(__builtin_bswap64))
146
/* GCC >= 4.3 or clang */
147
# define bswap_32(x) __builtin_bswap32(x)
148
# define bswap_64(x) __builtin_bswap64(x)
149
#elif (_MSC_VER > 1300) && (defined(CPU_IA32) || defined(CPU_X64)) /* MS VC */
150
# define bswap_32(x) _byteswap_ulong((unsigned long)x)
151
# define bswap_64(x) _byteswap_uint64((__int64)x)
152
#else
153
/* fallback to generic bswap definition */
154
static RHASH_INLINE uint32_t bswap_32(uint32_t x)
155
{
156
# if defined(__GNUC__) && defined(CPU_IA32) && !defined(__i386__) && !defined(RHASH_NO_ASM)
157
__asm("bswap\t%0" : "=r" (x) : "0" (x)); /* gcc x86 version */
158
return x;
159
# else
160
x = ((x << 8) & 0xFF00FF00u) | ((x >> 8) & 0x00FF00FFu);
161
return (x >> 16) | (x << 16);
162
# endif
163
}
164
static RHASH_INLINE uint64_t bswap_64(uint64_t x)
165
{
166
union {
167
uint64_t ll;
168
uint32_t l[2];
169
} w, r;
170
w.ll = x;
171
r.l[0] = bswap_32(w.l[1]);
172
r.l[1] = bswap_32(w.l[0]);
173
return r.ll;
174
}
175
#endif /* bswap definitions */
176
177
#if IS_BIG_ENDIAN
178
# define be2me_32(x) (x)
179
# define be2me_64(x) (x)
180
# define le2me_32(x) bswap_32(x)
181
# define le2me_64(x) bswap_64(x)
182
183
# define be32_copy(to, index, from, length) memcpy((char*)(to) + (index), (from), (length))
184
# define le32_copy(to, index, from, length) rhash_swap_copy_str_to_u32((to), (index), (from), (length))
185
# define be64_copy(to, index, from, length) memcpy((char*)(to) + (index), (from), (length))
186
# define le64_copy(to, index, from, length) rhash_swap_copy_str_to_u64((to), (index), (from), (length))
187
# define me64_to_be_str(to, from, length) memcpy((to), (from), (length))
188
# define me64_to_le_str(to, from, length) rhash_swap_copy_u64_to_str((to), (from), (length))
189
190
#else /* IS_BIG_ENDIAN */
191
# define be2me_32(x) bswap_32(x)
192
# define be2me_64(x) bswap_64(x)
193
# define le2me_32(x) (x)
194
# define le2me_64(x) (x)
195
196
# define be32_copy(to, index, from, length) rhash_swap_copy_str_to_u32((to), (index), (from), (length))
197
# define le32_copy(to, index, from, length) memcpy((char*)(to) + (index), (from), (length))
198
# define be64_copy(to, index, from, length) rhash_swap_copy_str_to_u64((to), (index), (from), (length))
199
# define le64_copy(to, index, from, length) memcpy((char*)(to) + (index), (from), (length))
200
# define me64_to_be_str(to, from, length) rhash_swap_copy_u64_to_str((to), (from), (length))
201
# define me64_to_le_str(to, from, length) memcpy((to), (from), (length))
202
#endif /* IS_BIG_ENDIAN */
203
204
/* ROTL/ROTR macros rotate a 32/64-bit word left/right by n bits */
205
#define ROTL32(dword, n) ((dword) << (n) ^ ((dword) >> (32 - (n))))
206
#define ROTR32(dword, n) ((dword) >> (n) ^ ((dword) << (32 - (n))))
207
#define ROTL64(qword, n) ((qword) << (n) ^ ((qword) >> (64 - (n))))
208
#define ROTR64(qword, n) ((qword) >> (n) ^ ((qword) << (64 - (n))))
209
210
#define CPU_FEATURE_SSE4_2 (52)
211
212
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) \
213
&& (defined(CPU_X64) || defined(CPU_IA32))
214
# define HAS_INTEL_CPUID
215
int has_cpu_feature(unsigned feature_bit);
216
#else
217
# define has_cpu_feature(x) (0)
218
#endif
219
220
#ifdef __cplusplus
221
} /* extern "C" */
222
#endif /* __cplusplus */
223
224
#endif /* BYTE_ORDER_H */
225
226