31 #ifndef THIRD_PARTY_SNAPPY_OPENSOURCE_SNAPPY_STUBS_INTERNAL_H_ 32 #define THIRD_PARTY_SNAPPY_OPENSOURCE_SNAPPY_STUBS_INTERNAL_H_ 44 #ifdef HAVE_SYS_MMAN_H 48 #include "snappy-stubs-public.h" 50 #if defined(__x86_64__) 59 #define MAP_ANONYMOUS MAP_ANON 72 #define ARRAYSIZE(a) (sizeof(a) / sizeof(*(a))) 75 #ifdef HAVE_BUILTIN_EXPECT 76 #define PREDICT_FALSE(x) (__builtin_expect(x, 0)) 77 #define PREDICT_TRUE(x) (__builtin_expect(!!(x), 1)) 79 #define PREDICT_FALSE(x) x 80 #define PREDICT_TRUE(x) x 87 #define DEFINE_bool(flag_name, default_value, description) \ 88 bool FLAGS_ ## flag_name = default_value 89 #define DECLARE_bool(flag_name) \ 90 extern bool FLAGS_ ## flag_name 94 static const uint32 kuint32max =
static_cast<uint32
>(0xFFFFFFFF);
95 static const int64 kint64max =
static_cast<int64
>(0x7FFFFFFFFFFFFFFFLL);
101 #if defined(__i386__) || defined(__x86_64__) || defined(__powerpc__) 103 #define UNALIGNED_LOAD16(_p) (*reinterpret_cast<const uint16 *>(_p)) 104 #define UNALIGNED_LOAD32(_p) (*reinterpret_cast<const uint32 *>(_p)) 105 #define UNALIGNED_LOAD64(_p) (*reinterpret_cast<const uint64 *>(_p)) 107 #define UNALIGNED_STORE16(_p, _val) (*reinterpret_cast<uint16 *>(_p) = (_val)) 108 #define UNALIGNED_STORE32(_p, _val) (*reinterpret_cast<uint32 *>(_p) = (_val)) 109 #define UNALIGNED_STORE64(_p, _val) (*reinterpret_cast<uint64 *>(_p) = (_val)) 120 #elif defined(__arm__) && \ 121 !defined(__ARM_ARCH_4__) && \ 122 !defined(__ARM_ARCH_4T__) && \ 123 !defined(__ARM_ARCH_5__) && \ 124 !defined(__ARM_ARCH_5T__) && \ 125 !defined(__ARM_ARCH_5TE__) && \ 126 !defined(__ARM_ARCH_5TEJ__) && \ 127 !defined(__ARM_ARCH_6__) && \ 128 !defined(__ARM_ARCH_6J__) && \ 129 !defined(__ARM_ARCH_6K__) && \ 130 !defined(__ARM_ARCH_6Z__) && \ 131 !defined(__ARM_ARCH_6ZK__) && \ 132 !defined(__ARM_ARCH_6T2__) 134 #define UNALIGNED_LOAD16(_p) (*reinterpret_cast<const uint16 *>(_p)) 135 #define UNALIGNED_LOAD32(_p) (*reinterpret_cast<const uint32 *>(_p)) 137 #define UNALIGNED_STORE16(_p, _val) (*reinterpret_cast<uint16 *>(_p) = (_val)) 138 #define UNALIGNED_STORE32(_p, _val) (*reinterpret_cast<uint32 *>(_p) = (_val)) 144 inline uint64 UNALIGNED_LOAD64(
const void *p) {
146 memcpy(&t, p,
sizeof t);
150 inline void UNALIGNED_STORE64(
void *p, uint64 v) {
151 memcpy(p, &v,
sizeof v);
159 inline uint16 UNALIGNED_LOAD16(
const void *p) {
161 memcpy(&t, p,
sizeof t);
165 inline uint32 UNALIGNED_LOAD32(
const void *p) {
167 memcpy(&t, p,
sizeof t);
171 inline uint64 UNALIGNED_LOAD64(
const void *p) {
173 memcpy(&t, p,
sizeof t);
177 inline void UNALIGNED_STORE16(
void *p, uint16 v) {
178 memcpy(p, &v,
sizeof v);
181 inline void UNALIGNED_STORE32(
void *p, uint32 v) {
182 memcpy(p, &v,
sizeof v);
185 inline void UNALIGNED_STORE64(
void *p, uint64 v) {
186 memcpy(p, &v,
sizeof v);
193 inline void UnalignedCopy64(
const void *src,
void *dst) {
194 if (
sizeof(
void *) == 8) {
195 UNALIGNED_STORE64(dst, UNALIGNED_LOAD64(src));
197 const char *src_char =
reinterpret_cast<const char *
>(src);
198 char *dst_char =
reinterpret_cast<char *
>(dst);
200 UNALIGNED_STORE32(dst_char, UNALIGNED_LOAD32(src_char));
201 UNALIGNED_STORE32(dst_char + 4, UNALIGNED_LOAD32(src_char + 4));
206 #ifdef WORDS_BIGENDIAN 208 #ifdef HAVE_SYS_BYTEORDER_H 209 #include <sys/byteorder.h> 212 #ifdef HAVE_SYS_ENDIAN_H 213 #include <sys/endian.h> 218 #define bswap_16(x) _byteswap_ushort(x) 219 #define bswap_32(x) _byteswap_ulong(x) 220 #define bswap_64(x) _byteswap_uint64(x) 222 #elif defined(__APPLE__) 224 #include <libkern/OSByteOrder.h> 225 #define bswap_16(x) OSSwapInt16(x) 226 #define bswap_32(x) OSSwapInt32(x) 227 #define bswap_64(x) OSSwapInt64(x) 229 #elif defined(HAVE_BYTESWAP_H) 230 #include <byteswap.h> 232 #elif defined(bswap32) 234 #define bswap_16(x) bswap16(x) 235 #define bswap_32(x) bswap32(x) 236 #define bswap_64(x) bswap64(x) 238 #elif defined(BSWAP_64) 240 #define bswap_16(x) BSWAP_16(x) 241 #define bswap_32(x) BSWAP_32(x) 242 #define bswap_64(x) BSWAP_64(x) 246 inline uint16 bswap_16(uint16 x) {
247 return (x << 8) | (x >> 8);
250 inline uint32 bswap_32(uint32 x) {
251 x = ((x & 0xff00ff00UL) >> 8) | ((x & 0x00ff00ffUL) << 8);
252 return (x >> 16) | (x << 16);
255 inline uint64 bswap_64(uint64 x) {
256 x = ((x & 0xff00ff00ff00ff00ULL) >> 8) | ((x & 0x00ff00ff00ff00ffULL) << 8);
257 x = ((x & 0xffff0000ffff0000ULL) >> 16) | ((x & 0x0000ffff0000ffffULL) << 16);
258 return (x >> 32) | (x << 32);
263 #endif // WORDS_BIGENDIAN 277 #ifdef WORDS_BIGENDIAN 279 static uint16 FromHost16(uint16 x) {
return bswap_16(x); }
280 static uint16 ToHost16(uint16 x) {
return bswap_16(x); }
282 static uint32 FromHost32(uint32 x) {
return bswap_32(x); }
283 static uint32 ToHost32(uint32 x) {
return bswap_32(x); }
285 static bool IsLittleEndian() {
return false; }
287 #else // !defined(WORDS_BIGENDIAN) 289 static uint16 FromHost16(uint16 x) {
return x; }
290 static uint16 ToHost16(uint16 x) {
return x; }
292 static uint32 FromHost32(uint32 x) {
return x; }
293 static uint32 ToHost32(uint32 x) {
return x; }
295 static bool IsLittleEndian() {
return true; }
297 #endif // !defined(WORDS_BIGENDIAN) 300 static uint16 Load16(
const void *p) {
301 return ToHost16(UNALIGNED_LOAD16(p));
304 static void Store16(
void *p, uint16 v) {
305 UNALIGNED_STORE16(p, FromHost16(v));
308 static uint32 Load32(
const void *p) {
309 return ToHost32(UNALIGNED_LOAD32(p));
312 static void Store32(
void *p, uint32 v) {
313 UNALIGNED_STORE32(p, FromHost32(v));
321 static int Log2Floor(uint32 n);
326 static int FindLSBSetNonZero(uint32 n);
327 static int FindLSBSetNonZero64(uint64 n);
330 DISALLOW_COPY_AND_ASSIGN(
Bits);
333 #ifdef HAVE_BUILTIN_CTZ 335 inline int Bits::Log2Floor(uint32 n) {
336 return n == 0 ? -1 : 31 ^ __builtin_clz(n);
339 inline int Bits::FindLSBSetNonZero(uint32 n) {
340 return __builtin_ctz(n);
343 inline int Bits::FindLSBSetNonZero64(uint64 n) {
344 return __builtin_ctzll(n);
347 #else // Portable versions. 349 inline int Bits::Log2Floor(uint32 n) {
354 for (
int i = 4; i >= 0; --i) {
355 int shift = (1 << i);
356 uint32 x = value >> shift;
366 inline int Bits::FindLSBSetNonZero(uint32 n) {
368 for (
int i = 4, shift = 1 << 4; i >= 0; --i) {
369 const uint32 x = n << shift;
380 inline int Bits::FindLSBSetNonZero64(uint64 n) {
381 const uint32 bottombits =
static_cast<uint32
>(n);
382 if (bottombits == 0) {
384 return 32 + FindLSBSetNonZero(static_cast<uint32>(n >> 32));
386 return FindLSBSetNonZero(bottombits);
390 #endif // End portable versions. 396 static const int kMax32 = 5;
403 static const char* Parse32WithLimit(
const char* ptr,
const char* limit,
409 static char* Encode32(
char* ptr, uint32 v);
412 static void Append32(
string* s, uint32 value);
415 inline const char* Varint::Parse32WithLimit(
const char* p,
418 const unsigned char* ptr =
reinterpret_cast<const unsigned char*
>(p);
419 const unsigned char* limit =
reinterpret_cast<const unsigned char*
>(l);
421 if (ptr >= limit)
return NULL;
422 b = *(ptr++); result = b & 127;
if (b < 128)
goto done;
423 if (ptr >= limit)
return NULL;
424 b = *(ptr++); result |= (b & 127) << 7;
if (b < 128)
goto done;
425 if (ptr >= limit)
return NULL;
426 b = *(ptr++); result |= (b & 127) << 14;
if (b < 128)
goto done;
427 if (ptr >= limit)
return NULL;
428 b = *(ptr++); result |= (b & 127) << 21;
if (b < 128)
goto done;
429 if (ptr >= limit)
return NULL;
430 b = *(ptr++); result |= (b & 127) << 28;
if (b < 16)
goto done;
434 return reinterpret_cast<const char*
>(ptr);
437 inline char* Varint::Encode32(
char* sptr, uint32 v) {
439 unsigned char* ptr =
reinterpret_cast<unsigned char*
>(sptr);
440 static const int B = 128;
443 }
else if (v < (1<<14)) {
446 }
else if (v < (1<<21)) {
448 *(ptr++) = (v>>7) | B;
450 }
else if (v < (1<<28)) {
452 *(ptr++) = (v>>7) | B;
453 *(ptr++) = (v>>14) | B;
457 *(ptr++) = (v>>7) | B;
458 *(ptr++) = (v>>14) | B;
459 *(ptr++) = (v>>21) | B;
462 return reinterpret_cast<char*
>(ptr);
469 inline void STLStringResizeUninitialized(
string* s,
size_t new_size) {
485 inline char* string_as_array(
string* str) {
486 return str->empty() ? NULL : &*str->begin();
491 #endif // THIRD_PARTY_SNAPPY_OPENSOURCE_SNAPPY_STUBS_INTERNAL_H_