CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!
CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!
Path: blob/master/ext/basis_universal/basisu_containers.h
Views: 1401
// basisu_containers.h1#pragma once23#undef new45#include <stdlib.h>6#include <stdio.h>7#include <stdint.h>8#include <assert.h>9#include <algorithm>1011#if defined(__linux__) && !defined(ANDROID)12// Only for malloc_usable_size() in basisu_containers_impl.h13#include <malloc.h>14#define HAS_MALLOC_USABLE_SIZE 115#endif1617// Set to 1 to always check vector operator[], front(), and back() even in release.18#define BASISU_VECTOR_FORCE_CHECKING 01920// If 1, the vector container will not query the CRT to get the size of resized memory blocks.21#define BASISU_VECTOR_DETERMINISTIC 12223#ifdef _MSC_VER24#define BASISU_FORCE_INLINE __forceinline25#else26#define BASISU_FORCE_INLINE inline27#endif2829namespace basisu30{31enum { cInvalidIndex = -1 };3233namespace helpers34{35inline bool is_power_of_2(uint32_t x) { return x && ((x & (x - 1U)) == 0U); }36inline bool is_power_of_2(uint64_t x) { return x && ((x & (x - 1U)) == 0U); }37template<class T> const T& minimum(const T& a, const T& b) { return (b < a) ? b : a; }38template<class T> const T& maximum(const T& a, const T& b) { return (a < b) ? b : a; }3940inline uint32_t floor_log2i(uint32_t v)41{42uint32_t l = 0;43while (v > 1U)44{45v >>= 1;46l++;47}48return l;49}5051inline uint32_t next_pow2(uint32_t val)52{53val--;54val |= val >> 16;55val |= val >> 8;56val |= val >> 4;57val |= val >> 2;58val |= val >> 1;59return val + 1;60}6162inline uint64_t next_pow2(uint64_t val)63{64val--;65val |= val >> 32;66val |= val >> 16;67val |= val >> 8;68val |= val >> 4;69val |= val >> 2;70val |= val >> 1;71return val + 1;72}73} // namespace helpers7475template <typename T>76inline T* construct(T* p)77{78return new (static_cast<void*>(p)) T;79}8081template <typename T, typename U>82inline T* construct(T* p, const U& init)83{84return new (static_cast<void*>(p)) T(init);85}8687template <typename T>88inline void construct_array(T* p, size_t n)89{90T* q = p + n;91for (; p != q; ++p)92new (static_cast<void*>(p)) T;93}9495template <typename T, typename U>96inline void construct_array(T* p, size_t n, const U& init)97{98T* q = p + n;99for (; p != q; ++p)100new (static_cast<void*>(p)) T(init);101}102103template <typename T>104inline void destruct(T* p)105{106(void)p;107p->~T();108}109110template <typename T> inline void destruct_array(T* p, size_t n)111{112T* q = p + n;113for (; p != q; ++p)114p->~T();115}116117template<typename T> struct int_traits { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };118119template<> struct int_traits<int8_t> { enum { cMin = INT8_MIN, cMax = INT8_MAX, cSigned = true }; };120template<> struct int_traits<int16_t> { enum { cMin = INT16_MIN, cMax = INT16_MAX, cSigned = true }; };121template<> struct int_traits<int32_t> { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };122123template<> struct int_traits<uint8_t> { enum { cMin = 0, cMax = UINT8_MAX, cSigned = false }; };124template<> struct int_traits<uint16_t> { enum { cMin = 0, cMax = UINT16_MAX, cSigned = false }; };125template<> struct int_traits<uint32_t> { enum { cMin = 0, cMax = UINT32_MAX, cSigned = false }; };126127template<typename T>128struct scalar_type129{130enum { cFlag = false };131static inline void construct(T* p) { basisu::construct(p); }132static inline void construct(T* p, const T& init) { basisu::construct(p, init); }133static inline void construct_array(T* p, size_t n) { basisu::construct_array(p, n); }134static inline void destruct(T* p) { basisu::destruct(p); }135static inline void destruct_array(T* p, size_t n) { basisu::destruct_array(p, n); }136};137138template<typename T> struct scalar_type<T*>139{140enum { cFlag = true };141static inline void construct(T** p) { memset(p, 0, sizeof(T*)); }142static inline void construct(T** p, T* init) { *p = init; }143static inline void construct_array(T** p, size_t n) { memset(p, 0, sizeof(T*) * n); }144static inline void destruct(T** p) { p; }145static inline void destruct_array(T** p, size_t n) { p, n; }146};147148#define BASISU_DEFINE_BUILT_IN_TYPE(X) \149template<> struct scalar_type<X> { \150enum { cFlag = true }; \151static inline void construct(X* p) { memset(p, 0, sizeof(X)); } \152static inline void construct(X* p, const X& init) { memcpy(p, &init, sizeof(X)); } \153static inline void construct_array(X* p, size_t n) { memset(p, 0, sizeof(X) * n); } \154static inline void destruct(X* p) { p; } \155static inline void destruct_array(X* p, size_t n) { p, n; } };156157BASISU_DEFINE_BUILT_IN_TYPE(bool)158BASISU_DEFINE_BUILT_IN_TYPE(char)159BASISU_DEFINE_BUILT_IN_TYPE(unsigned char)160BASISU_DEFINE_BUILT_IN_TYPE(short)161BASISU_DEFINE_BUILT_IN_TYPE(unsigned short)162BASISU_DEFINE_BUILT_IN_TYPE(int)163BASISU_DEFINE_BUILT_IN_TYPE(unsigned int)164BASISU_DEFINE_BUILT_IN_TYPE(long)165BASISU_DEFINE_BUILT_IN_TYPE(unsigned long)166#ifdef __GNUC__167BASISU_DEFINE_BUILT_IN_TYPE(long long)168BASISU_DEFINE_BUILT_IN_TYPE(unsigned long long)169#else170BASISU_DEFINE_BUILT_IN_TYPE(__int64)171BASISU_DEFINE_BUILT_IN_TYPE(unsigned __int64)172#endif173BASISU_DEFINE_BUILT_IN_TYPE(float)174BASISU_DEFINE_BUILT_IN_TYPE(double)175BASISU_DEFINE_BUILT_IN_TYPE(long double)176177#undef BASISU_DEFINE_BUILT_IN_TYPE178179template<typename T>180struct bitwise_movable { enum { cFlag = false }; };181182#define BASISU_DEFINE_BITWISE_MOVABLE(Q) template<> struct bitwise_movable<Q> { enum { cFlag = true }; };183184template<typename T>185struct bitwise_copyable { enum { cFlag = false }; };186187#define BASISU_DEFINE_BITWISE_COPYABLE(Q) template<> struct bitwise_copyable<Q> { enum { cFlag = true }; };188189#define BASISU_IS_POD(T) __is_pod(T)190191#define BASISU_IS_SCALAR_TYPE(T) (scalar_type<T>::cFlag)192193#if defined(__GNUC__) && __GNUC__<5194#define BASISU_IS_TRIVIALLY_COPYABLE(...) __has_trivial_copy(__VA_ARGS__)195#else196#define BASISU_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value197#endif198199// TODO: clean this up200#define BASISU_IS_BITWISE_COPYABLE(T) (BASISU_IS_SCALAR_TYPE(T) || BASISU_IS_POD(T) || BASISU_IS_TRIVIALLY_COPYABLE(T) || (bitwise_copyable<T>::cFlag))201202#define BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) (BASISU_IS_BITWISE_COPYABLE(T) || (bitwise_movable<T>::cFlag))203204#define BASISU_HAS_DESTRUCTOR(T) ((!scalar_type<T>::cFlag) && (!__is_pod(T)))205206typedef char(&yes_t)[1];207typedef char(&no_t)[2];208209template <class U> yes_t class_test(int U::*);210template <class U> no_t class_test(...);211212template <class T> struct is_class213{214enum { value = (sizeof(class_test<T>(0)) == sizeof(yes_t)) };215};216217template <typename T> struct is_pointer218{219enum { value = false };220};221222template <typename T> struct is_pointer<T*>223{224enum { value = true };225};226227struct empty_type { };228229BASISU_DEFINE_BITWISE_COPYABLE(empty_type);230BASISU_DEFINE_BITWISE_MOVABLE(empty_type);231232template<typename T> struct rel_ops233{234friend bool operator!=(const T& x, const T& y) { return (!(x == y)); }235friend bool operator> (const T& x, const T& y) { return (y < x); }236friend bool operator<=(const T& x, const T& y) { return (!(y < x)); }237friend bool operator>=(const T& x, const T& y) { return (!(x < y)); }238};239240struct elemental_vector241{242void* m_p;243uint32_t m_size;244uint32_t m_capacity;245246typedef void (*object_mover)(void* pDst, void* pSrc, uint32_t num);247248bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, uint32_t element_size, object_mover pRelocate, bool nofail);249};250251template<typename T>252class vector : public rel_ops< vector<T> >253{254public:255typedef T* iterator;256typedef const T* const_iterator;257typedef T value_type;258typedef T& reference;259typedef const T& const_reference;260typedef T* pointer;261typedef const T* const_pointer;262263inline vector() :264m_p(NULL),265m_size(0),266m_capacity(0)267{268}269270inline vector(uint32_t n, const T& init) :271m_p(NULL),272m_size(0),273m_capacity(0)274{275increase_capacity(n, false);276construct_array(m_p, n, init);277m_size = n;278}279280inline vector(const vector& other) :281m_p(NULL),282m_size(0),283m_capacity(0)284{285increase_capacity(other.m_size, false);286287m_size = other.m_size;288289if (BASISU_IS_BITWISE_COPYABLE(T))290{291if ((m_p) && (other.m_p))292memcpy(m_p, other.m_p, m_size * sizeof(T));293}294else295{296T* pDst = m_p;297const T* pSrc = other.m_p;298for (uint32_t i = m_size; i > 0; i--)299construct(pDst++, *pSrc++);300}301}302303inline explicit vector(size_t size) :304m_p(NULL),305m_size(0),306m_capacity(0)307{308resize(size);309}310311inline ~vector()312{313if (m_p)314{315scalar_type<T>::destruct_array(m_p, m_size);316free(m_p);317m_p = nullptr;318}319}320321inline vector& operator= (const vector& other)322{323if (this == &other)324return *this;325326if (m_capacity >= other.m_size)327resize(0);328else329{330clear();331increase_capacity(other.m_size, false);332}333334if (BASISU_IS_BITWISE_COPYABLE(T))335{336if ((m_p) && (other.m_p))337memcpy(m_p, other.m_p, other.m_size * sizeof(T));338}339else340{341T* pDst = m_p;342const T* pSrc = other.m_p;343for (uint32_t i = other.m_size; i > 0; i--)344construct(pDst++, *pSrc++);345}346347m_size = other.m_size;348349return *this;350}351352BASISU_FORCE_INLINE const T* begin() const { return m_p; }353BASISU_FORCE_INLINE T* begin() { return m_p; }354355BASISU_FORCE_INLINE const T* end() const { return m_p + m_size; }356BASISU_FORCE_INLINE T* end() { return m_p + m_size; }357358BASISU_FORCE_INLINE bool empty() const { return !m_size; }359BASISU_FORCE_INLINE uint32_t size() const { return m_size; }360BASISU_FORCE_INLINE uint32_t size_in_bytes() const { return m_size * sizeof(T); }361BASISU_FORCE_INLINE uint32_t capacity() const { return m_capacity; }362363// operator[] will assert on out of range indices, but in final builds there is (and will never be) any range checking on this method.364//BASISU_FORCE_INLINE const T& operator[] (uint32_t i) const { assert(i < m_size); return m_p[i]; }365//BASISU_FORCE_INLINE T& operator[] (uint32_t i) { assert(i < m_size); return m_p[i]; }366367#if !BASISU_VECTOR_FORCE_CHECKING368BASISU_FORCE_INLINE const T& operator[] (size_t i) const { assert(i < m_size); return m_p[i]; }369BASISU_FORCE_INLINE T& operator[] (size_t i) { assert(i < m_size); return m_p[i]; }370#else371BASISU_FORCE_INLINE const T& operator[] (size_t i) const372{373if (i >= m_size)374{375fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));376abort();377}378return m_p[i];379}380BASISU_FORCE_INLINE T& operator[] (size_t i)381{382if (i >= m_size)383{384fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));385abort();386}387return m_p[i];388}389#endif390391// at() always includes range checking, even in final builds, unlike operator [].392// The first element is returned if the index is out of range.393BASISU_FORCE_INLINE const T& at(size_t i) const { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }394BASISU_FORCE_INLINE T& at(size_t i) { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }395396#if !BASISU_VECTOR_FORCE_CHECKING397BASISU_FORCE_INLINE const T& front() const { assert(m_size); return m_p[0]; }398BASISU_FORCE_INLINE T& front() { assert(m_size); return m_p[0]; }399400BASISU_FORCE_INLINE const T& back() const { assert(m_size); return m_p[m_size - 1]; }401BASISU_FORCE_INLINE T& back() { assert(m_size); return m_p[m_size - 1]; }402#else403BASISU_FORCE_INLINE const T& front() const404{405if (!m_size)406{407fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));408abort();409}410return m_p[0];411}412BASISU_FORCE_INLINE T& front()413{414if (!m_size)415{416fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));417abort();418}419return m_p[0];420}421422BASISU_FORCE_INLINE const T& back() const423{424if(!m_size)425{426fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));427abort();428}429return m_p[m_size - 1];430}431BASISU_FORCE_INLINE T& back()432{433if (!m_size)434{435fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));436abort();437}438return m_p[m_size - 1];439}440#endif441442BASISU_FORCE_INLINE const T* get_ptr() const { return m_p; }443BASISU_FORCE_INLINE T* get_ptr() { return m_p; }444445BASISU_FORCE_INLINE const T* data() const { return m_p; }446BASISU_FORCE_INLINE T* data() { return m_p; }447448// clear() sets the container to empty, then frees the allocated block.449inline void clear()450{451if (m_p)452{453scalar_type<T>::destruct_array(m_p, m_size);454free(m_p);455m_p = NULL;456m_size = 0;457m_capacity = 0;458}459}460461inline void clear_no_destruction()462{463if (m_p)464{465free(m_p);466m_p = NULL;467m_size = 0;468m_capacity = 0;469}470}471472inline void reserve(size_t new_capacity_size_t)473{474if (new_capacity_size_t > UINT32_MAX)475{476assert(0);477return;478}479480uint32_t new_capacity = (uint32_t)new_capacity_size_t;481482if (new_capacity > m_capacity)483increase_capacity(new_capacity, false);484else if (new_capacity < m_capacity)485{486// Must work around the lack of a "decrease_capacity()" method.487// This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.488vector tmp;489tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false);490tmp = *this;491swap(tmp);492}493}494495inline bool try_reserve(size_t new_capacity_size_t)496{497if (new_capacity_size_t > UINT32_MAX)498{499assert(0);500return false;501}502503uint32_t new_capacity = (uint32_t)new_capacity_size_t;504505if (new_capacity > m_capacity)506{507if (!increase_capacity(new_capacity, false))508return false;509}510else if (new_capacity < m_capacity)511{512// Must work around the lack of a "decrease_capacity()" method.513// This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.514vector tmp;515tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false);516tmp = *this;517swap(tmp);518}519520return true;521}522523// resize(0) sets the container to empty, but does not free the allocated block.524inline void resize(size_t new_size_size_t, bool grow_hint = false)525{526if (new_size_size_t > UINT32_MAX)527{528assert(0);529return;530}531532uint32_t new_size = (uint32_t)new_size_size_t;533534if (m_size != new_size)535{536if (new_size < m_size)537scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);538else539{540if (new_size > m_capacity)541increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint);542543scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);544}545546m_size = new_size;547}548}549550inline bool try_resize(size_t new_size_size_t, bool grow_hint = false)551{552if (new_size_size_t > UINT32_MAX)553{554assert(0);555return false;556}557558uint32_t new_size = (uint32_t)new_size_size_t;559560if (m_size != new_size)561{562if (new_size < m_size)563scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);564else565{566if (new_size > m_capacity)567{568if (!increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint, true))569return false;570}571572scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);573}574575m_size = new_size;576}577578return true;579}580581// If size >= capacity/2, reset() sets the container's size to 0 but doesn't free the allocated block (because the container may be similarly loaded in the future).582// Otherwise it blows away the allocated block. See http://www.codercorner.com/blog/?p=494583inline void reset()584{585if (m_size >= (m_capacity >> 1))586resize(0);587else588clear();589}590591inline T* enlarge(uint32_t i)592{593uint32_t cur_size = m_size;594resize(cur_size + i, true);595return get_ptr() + cur_size;596}597598inline T* try_enlarge(uint32_t i)599{600uint32_t cur_size = m_size;601if (!try_resize(cur_size + i, true))602return NULL;603return get_ptr() + cur_size;604}605606BASISU_FORCE_INLINE void push_back(const T& obj)607{608assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));609610if (m_size >= m_capacity)611increase_capacity(m_size + 1, true);612613scalar_type<T>::construct(m_p + m_size, obj);614m_size++;615}616617inline bool try_push_back(const T& obj)618{619assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));620621if (m_size >= m_capacity)622{623if (!increase_capacity(m_size + 1, true, true))624return false;625}626627scalar_type<T>::construct(m_p + m_size, obj);628m_size++;629630return true;631}632633inline void push_back_value(T obj)634{635if (m_size >= m_capacity)636increase_capacity(m_size + 1, true);637638scalar_type<T>::construct(m_p + m_size, obj);639m_size++;640}641642inline void pop_back()643{644assert(m_size);645646if (m_size)647{648m_size--;649scalar_type<T>::destruct(&m_p[m_size]);650}651}652653inline void insert(uint32_t index, const T* p, uint32_t n)654{655assert(index <= m_size);656if (!n)657return;658659const uint32_t orig_size = m_size;660resize(m_size + n, true);661662const uint32_t num_to_move = orig_size - index;663664if (BASISU_IS_BITWISE_COPYABLE(T))665{666// This overwrites the destination object bits, but bitwise copyable means we don't need to worry about destruction.667memmove(m_p + index + n, m_p + index, sizeof(T) * num_to_move);668}669else670{671const T* pSrc = m_p + orig_size - 1;672T* pDst = const_cast<T*>(pSrc) + n;673674for (uint32_t i = 0; i < num_to_move; i++)675{676assert((pDst - m_p) < (int)m_size);677*pDst-- = *pSrc--;678}679}680681T* pDst = m_p + index;682683if (BASISU_IS_BITWISE_COPYABLE(T))684{685// This copies in the new bits, overwriting the existing objects, which is OK for copyable types that don't need destruction.686memcpy(pDst, p, sizeof(T) * n);687}688else689{690for (uint32_t i = 0; i < n; i++)691{692assert((pDst - m_p) < (int)m_size);693*pDst++ = *p++;694}695}696}697698inline void insert(T* p, const T& obj)699{700int64_t ofs = p - begin();701if ((ofs < 0) || (ofs > UINT32_MAX))702{703assert(0);704return;705}706707insert((uint32_t)ofs, &obj, 1);708}709710// push_front() isn't going to be very fast - it's only here for usability.711inline void push_front(const T& obj)712{713insert(0, &obj, 1);714}715716vector& append(const vector& other)717{718if (other.m_size)719insert(m_size, &other[0], other.m_size);720return *this;721}722723vector& append(const T* p, uint32_t n)724{725if (n)726insert(m_size, p, n);727return *this;728}729730inline void erase(uint32_t start, uint32_t n)731{732assert((start + n) <= m_size);733if ((start + n) > m_size)734return;735736if (!n)737return;738739const uint32_t num_to_move = m_size - (start + n);740741T* pDst = m_p + start;742743const T* pSrc = m_p + start + n;744745if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T))746{747// This test is overly cautious.748if ((!BASISU_IS_BITWISE_COPYABLE(T)) || (BASISU_HAS_DESTRUCTOR(T)))749{750// Type has been marked explictly as bitwise movable, which means we can move them around but they may need to be destructed.751// First destroy the erased objects.752scalar_type<T>::destruct_array(pDst, n);753}754755// Copy "down" the objects to preserve, filling in the empty slots.756memmove(pDst, pSrc, num_to_move * sizeof(T));757}758else759{760// Type is not bitwise copyable or movable.761// Move them down one at a time by using the equals operator, and destroying anything that's left over at the end.762T* pDst_end = pDst + num_to_move;763while (pDst != pDst_end)764*pDst++ = *pSrc++;765766scalar_type<T>::destruct_array(pDst_end, n);767}768769m_size -= n;770}771772inline void erase(uint32_t index)773{774erase(index, 1);775}776777inline void erase(T* p)778{779assert((p >= m_p) && (p < (m_p + m_size)));780erase(static_cast<uint32_t>(p - m_p));781}782783inline void erase(T *pFirst, T *pEnd)784{785assert(pFirst <= pEnd);786assert(pFirst >= begin() && pFirst <= end());787assert(pEnd >= begin() && pEnd <= end());788789int64_t ofs = pFirst - begin();790if ((ofs < 0) || (ofs > UINT32_MAX))791{792assert(0);793return;794}795796int64_t n = pEnd - pFirst;797if ((n < 0) || (n > UINT32_MAX))798{799assert(0);800return;801}802803erase((uint32_t)ofs, (uint32_t)n);804}805806void erase_unordered(uint32_t index)807{808assert(index < m_size);809810if ((index + 1) < m_size)811(*this)[index] = back();812813pop_back();814}815816inline bool operator== (const vector& rhs) const817{818if (m_size != rhs.m_size)819return false;820else if (m_size)821{822if (scalar_type<T>::cFlag)823return memcmp(m_p, rhs.m_p, sizeof(T) * m_size) == 0;824else825{826const T* pSrc = m_p;827const T* pDst = rhs.m_p;828for (uint32_t i = m_size; i; i--)829if (!(*pSrc++ == *pDst++))830return false;831}832}833834return true;835}836837inline bool operator< (const vector& rhs) const838{839const uint32_t min_size = helpers::minimum(m_size, rhs.m_size);840841const T* pSrc = m_p;842const T* pSrc_end = m_p + min_size;843const T* pDst = rhs.m_p;844845while ((pSrc < pSrc_end) && (*pSrc == *pDst))846{847pSrc++;848pDst++;849}850851if (pSrc < pSrc_end)852return *pSrc < *pDst;853854return m_size < rhs.m_size;855}856857inline void swap(vector& other)858{859std::swap(m_p, other.m_p);860std::swap(m_size, other.m_size);861std::swap(m_capacity, other.m_capacity);862}863864inline void sort()865{866std::sort(begin(), end());867}868869inline void unique()870{871if (!empty())872{873sort();874875resize(std::unique(begin(), end()) - begin());876}877}878879inline void reverse()880{881uint32_t j = m_size >> 1;882for (uint32_t i = 0; i < j; i++)883std::swap(m_p[i], m_p[m_size - 1 - i]);884}885886inline int find(const T& key) const887{888const T* p = m_p;889const T* p_end = m_p + m_size;890891uint32_t index = 0;892893while (p != p_end)894{895if (key == *p)896return index;897898p++;899index++;900}901902return cInvalidIndex;903}904905inline int find_sorted(const T& key) const906{907if (m_size)908{909// Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.910int i = ((m_size + 1) >> 1) - 1;911int m = m_size;912913for (; ; )914{915assert(i >= 0 && i < (int)m_size);916const T* pKey_i = m_p + i;917int cmp = key < *pKey_i;918#if defined(_DEBUG) || defined(DEBUG)919int cmp2 = *pKey_i < key;920assert((cmp != cmp2) || (key == *pKey_i));921#endif922if ((!cmp) && (key == *pKey_i)) return i;923m >>= 1;924if (!m) break;925cmp = -cmp;926i += (((m + 1) >> 1) ^ cmp) - cmp;927if (i < 0)928break;929930assert(i >= 0 && i < (int)m_size);931pKey_i = m_p + i;932cmp = key < *pKey_i;933#if defined(_DEBUG) || defined(DEBUG)934cmp2 = *pKey_i < key;935assert((cmp != cmp2) || (key == *pKey_i));936#endif937if ((!cmp) && (key == *pKey_i)) return i;938m >>= 1;939if (!m) break;940cmp = -cmp;941i += (((m + 1) >> 1) ^ cmp) - cmp;942if (i < 0)943break;944}945}946947return cInvalidIndex;948}949950template<typename Q>951inline int find_sorted(const T& key, Q less_than) const952{953if (m_size)954{955// Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.956int i = ((m_size + 1) >> 1) - 1;957int m = m_size;958959for (; ; )960{961assert(i >= 0 && i < (int)m_size);962const T* pKey_i = m_p + i;963int cmp = less_than(key, *pKey_i);964if ((!cmp) && (!less_than(*pKey_i, key))) return i;965m >>= 1;966if (!m) break;967cmp = -cmp;968i += (((m + 1) >> 1) ^ cmp) - cmp;969if (i < 0)970break;971972assert(i >= 0 && i < (int)m_size);973pKey_i = m_p + i;974cmp = less_than(key, *pKey_i);975if ((!cmp) && (!less_than(*pKey_i, key))) return i;976m >>= 1;977if (!m) break;978cmp = -cmp;979i += (((m + 1) >> 1) ^ cmp) - cmp;980if (i < 0)981break;982}983}984985return cInvalidIndex;986}987988inline uint32_t count_occurences(const T& key) const989{990uint32_t c = 0;991992const T* p = m_p;993const T* p_end = m_p + m_size;994995while (p != p_end)996{997if (key == *p)998c++;9991000p++;1001}10021003return c;1004}10051006inline void set_all(const T& o)1007{1008if ((sizeof(T) == 1) && (scalar_type<T>::cFlag))1009memset(m_p, *reinterpret_cast<const uint8_t*>(&o), m_size);1010else1011{1012T* pDst = m_p;1013T* pDst_end = pDst + m_size;1014while (pDst != pDst_end)1015*pDst++ = o;1016}1017}10181019// Caller assumes ownership of the heap block associated with the container. Container is cleared.1020inline void* assume_ownership()1021{1022T* p = m_p;1023m_p = NULL;1024m_size = 0;1025m_capacity = 0;1026return p;1027}10281029// Caller is granting ownership of the indicated heap block.1030// Block must have size constructed elements, and have enough room for capacity elements.1031// The block must have been allocated using malloc().1032// Important: This method is used in Basis Universal. If you change how this container allocates memory, you'll need to change any users of this method.1033inline bool grant_ownership(T* p, uint32_t size, uint32_t capacity)1034{1035// To to prevent the caller from obviously shooting themselves in the foot.1036if (((p + capacity) > m_p) && (p < (m_p + m_capacity)))1037{1038// Can grant ownership of a block inside the container itself!1039assert(0);1040return false;1041}10421043if (size > capacity)1044{1045assert(0);1046return false;1047}10481049if (!p)1050{1051if (capacity)1052{1053assert(0);1054return false;1055}1056}1057else if (!capacity)1058{1059assert(0);1060return false;1061}10621063clear();1064m_p = p;1065m_size = size;1066m_capacity = capacity;1067return true;1068}10691070private:1071T* m_p;1072uint32_t m_size;1073uint32_t m_capacity;10741075template<typename Q> struct is_vector { enum { cFlag = false }; };1076template<typename Q> struct is_vector< vector<Q> > { enum { cFlag = true }; };10771078static void object_mover(void* pDst_void, void* pSrc_void, uint32_t num)1079{1080T* pSrc = static_cast<T*>(pSrc_void);1081T* const pSrc_end = pSrc + num;1082T* pDst = static_cast<T*>(pDst_void);10831084while (pSrc != pSrc_end)1085{1086// placement new1087new (static_cast<void*>(pDst)) T(*pSrc);1088pSrc->~T();1089++pSrc;1090++pDst;1091}1092}10931094inline bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, bool nofail = false)1095{1096return reinterpret_cast<elemental_vector*>(this)->increase_capacity(1097min_new_capacity, grow_hint, sizeof(T),1098(BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) || (is_vector<T>::cFlag)) ? NULL : object_mover, nofail);1099}1100};11011102template<typename T> struct bitwise_movable< vector<T> > { enum { cFlag = true }; };11031104// Hash map11051106template <typename T>1107struct hasher1108{1109inline size_t operator() (const T& key) const { return static_cast<size_t>(key); }1110};11111112template <typename T>1113struct equal_to1114{1115inline bool operator()(const T& a, const T& b) const { return a == b; }1116};11171118// Important: The Hasher and Equals objects must be bitwise movable!1119template<typename Key, typename Value = empty_type, typename Hasher = hasher<Key>, typename Equals = equal_to<Key> >1120class hash_map1121{1122public:1123class iterator;1124class const_iterator;11251126private:1127friend class iterator;1128friend class const_iterator;11291130enum state1131{1132cStateInvalid = 0,1133cStateValid = 11134};11351136enum1137{1138cMinHashSize = 4U1139};11401141public:1142typedef hash_map<Key, Value, Hasher, Equals> hash_map_type;1143typedef std::pair<Key, Value> value_type;1144typedef Key key_type;1145typedef Value referent_type;1146typedef Hasher hasher_type;1147typedef Equals equals_type;11481149hash_map() :1150m_hash_shift(32), m_num_valid(0), m_grow_threshold(0)1151{1152}11531154hash_map(const hash_map& other) :1155m_values(other.m_values),1156m_hash_shift(other.m_hash_shift),1157m_hasher(other.m_hasher),1158m_equals(other.m_equals),1159m_num_valid(other.m_num_valid),1160m_grow_threshold(other.m_grow_threshold)1161{1162}11631164hash_map& operator= (const hash_map& other)1165{1166if (this == &other)1167return *this;11681169clear();11701171m_values = other.m_values;1172m_hash_shift = other.m_hash_shift;1173m_num_valid = other.m_num_valid;1174m_grow_threshold = other.m_grow_threshold;1175m_hasher = other.m_hasher;1176m_equals = other.m_equals;11771178return *this;1179}11801181inline ~hash_map()1182{1183clear();1184}11851186const Equals& get_equals() const { return m_equals; }1187Equals& get_equals() { return m_equals; }11881189void set_equals(const Equals& equals) { m_equals = equals; }11901191const Hasher& get_hasher() const { return m_hasher; }1192Hasher& get_hasher() { return m_hasher; }11931194void set_hasher(const Hasher& hasher) { m_hasher = hasher; }11951196inline void clear()1197{1198if (!m_values.empty())1199{1200if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))1201{1202node* p = &get_node(0);1203node* p_end = p + m_values.size();12041205uint32_t num_remaining = m_num_valid;1206while (p != p_end)1207{1208if (p->state)1209{1210destruct_value_type(p);1211num_remaining--;1212if (!num_remaining)1213break;1214}12151216p++;1217}1218}12191220m_values.clear_no_destruction();12211222m_hash_shift = 32;1223m_num_valid = 0;1224m_grow_threshold = 0;1225}1226}12271228inline void reset()1229{1230if (!m_num_valid)1231return;12321233if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))1234{1235node* p = &get_node(0);1236node* p_end = p + m_values.size();12371238uint32_t num_remaining = m_num_valid;1239while (p != p_end)1240{1241if (p->state)1242{1243destruct_value_type(p);1244p->state = cStateInvalid;12451246num_remaining--;1247if (!num_remaining)1248break;1249}12501251p++;1252}1253}1254else if (sizeof(node) <= 32)1255{1256memset(&m_values[0], 0, m_values.size_in_bytes());1257}1258else1259{1260node* p = &get_node(0);1261node* p_end = p + m_values.size();12621263uint32_t num_remaining = m_num_valid;1264while (p != p_end)1265{1266if (p->state)1267{1268p->state = cStateInvalid;12691270num_remaining--;1271if (!num_remaining)1272break;1273}12741275p++;1276}1277}12781279m_num_valid = 0;1280}12811282inline uint32_t size()1283{1284return m_num_valid;1285}12861287inline uint32_t get_table_size()1288{1289return m_values.size();1290}12911292inline bool empty()1293{1294return !m_num_valid;1295}12961297inline void reserve(uint32_t new_capacity)1298{1299uint64_t new_hash_size = helpers::maximum(1U, new_capacity);13001301new_hash_size = new_hash_size * 2ULL;13021303if (!helpers::is_power_of_2(new_hash_size))1304new_hash_size = helpers::next_pow2(new_hash_size);13051306new_hash_size = helpers::maximum<uint64_t>(cMinHashSize, new_hash_size);13071308new_hash_size = helpers::minimum<uint64_t>(0x80000000UL, new_hash_size);13091310if (new_hash_size > m_values.size())1311rehash((uint32_t)new_hash_size);1312}13131314class iterator1315{1316friend class hash_map<Key, Value, Hasher, Equals>;1317friend class hash_map<Key, Value, Hasher, Equals>::const_iterator;13181319public:1320inline iterator() : m_pTable(NULL), m_index(0) { }1321inline iterator(hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }1322inline iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }13231324inline iterator& operator= (const iterator& other)1325{1326m_pTable = other.m_pTable;1327m_index = other.m_index;1328return *this;1329}13301331// post-increment1332inline iterator operator++(int)1333{1334iterator result(*this);1335++*this;1336return result;1337}13381339// pre-increment1340inline iterator& operator++()1341{1342probe();1343return *this;1344}13451346inline value_type& operator*() const { return *get_cur(); }1347inline value_type* operator->() const { return get_cur(); }13481349inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }1350inline bool operator != (const iterator& b) const { return !(*this == b); }1351inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }1352inline bool operator != (const const_iterator& b) const { return !(*this == b); }13531354private:1355hash_map_type* m_pTable;1356uint32_t m_index;13571358inline value_type* get_cur() const1359{1360assert(m_pTable && (m_index < m_pTable->m_values.size()));1361assert(m_pTable->get_node_state(m_index) == cStateValid);13621363return &m_pTable->get_node(m_index);1364}13651366inline void probe()1367{1368assert(m_pTable);1369m_index = m_pTable->find_next(m_index);1370}1371};13721373class const_iterator1374{1375friend class hash_map<Key, Value, Hasher, Equals>;1376friend class hash_map<Key, Value, Hasher, Equals>::iterator;13771378public:1379inline const_iterator() : m_pTable(NULL), m_index(0) { }1380inline const_iterator(const hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }1381inline const_iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }1382inline const_iterator(const const_iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }13831384inline const_iterator& operator= (const const_iterator& other)1385{1386m_pTable = other.m_pTable;1387m_index = other.m_index;1388return *this;1389}13901391inline const_iterator& operator= (const iterator& other)1392{1393m_pTable = other.m_pTable;1394m_index = other.m_index;1395return *this;1396}13971398// post-increment1399inline const_iterator operator++(int)1400{1401const_iterator result(*this);1402++*this;1403return result;1404}14051406// pre-increment1407inline const_iterator& operator++()1408{1409probe();1410return *this;1411}14121413inline const value_type& operator*() const { return *get_cur(); }1414inline const value_type* operator->() const { return get_cur(); }14151416inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }1417inline bool operator != (const const_iterator& b) const { return !(*this == b); }1418inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }1419inline bool operator != (const iterator& b) const { return !(*this == b); }14201421private:1422const hash_map_type* m_pTable;1423uint32_t m_index;14241425inline const value_type* get_cur() const1426{1427assert(m_pTable && (m_index < m_pTable->m_values.size()));1428assert(m_pTable->get_node_state(m_index) == cStateValid);14291430return &m_pTable->get_node(m_index);1431}14321433inline void probe()1434{1435assert(m_pTable);1436m_index = m_pTable->find_next(m_index);1437}1438};14391440inline const_iterator begin() const1441{1442if (!m_num_valid)1443return end();14441445return const_iterator(*this, find_next(UINT32_MAX));1446}14471448inline const_iterator end() const1449{1450return const_iterator(*this, m_values.size());1451}14521453inline iterator begin()1454{1455if (!m_num_valid)1456return end();14571458return iterator(*this, find_next(UINT32_MAX));1459}14601461inline iterator end()1462{1463return iterator(*this, m_values.size());1464}14651466// insert_result.first will always point to inserted key/value (or the already existing key/value).1467// insert_resutt.second will be true if a new key/value was inserted, or false if the key already existed (in which case first will point to the already existing value).1468typedef std::pair<iterator, bool> insert_result;14691470inline insert_result insert(const Key& k, const Value& v = Value())1471{1472insert_result result;1473if (!insert_no_grow(result, k, v))1474{1475grow();14761477// This must succeed.1478if (!insert_no_grow(result, k, v))1479{1480fprintf(stderr, "insert() failed");1481abort();1482}1483}14841485return result;1486}14871488inline insert_result insert(const value_type& v)1489{1490return insert(v.first, v.second);1491}14921493inline const_iterator find(const Key& k) const1494{1495return const_iterator(*this, find_index(k));1496}14971498inline iterator find(const Key& k)1499{1500return iterator(*this, find_index(k));1501}15021503inline bool erase(const Key& k)1504{1505uint32_t i = find_index(k);15061507if (i >= m_values.size())1508return false;15091510node* pDst = &get_node(i);1511destruct_value_type(pDst);1512pDst->state = cStateInvalid;15131514m_num_valid--;15151516for (; ; )1517{1518uint32_t r, j = i;15191520node* pSrc = pDst;15211522do1523{1524if (!i)1525{1526i = m_values.size() - 1;1527pSrc = &get_node(i);1528}1529else1530{1531i--;1532pSrc--;1533}15341535if (!pSrc->state)1536return true;15371538r = hash_key(pSrc->first);15391540} while ((i <= r && r < j) || (r < j && j < i) || (j < i && i <= r));15411542move_node(pDst, pSrc);15431544pDst = pSrc;1545}1546}15471548inline void swap(hash_map_type& other)1549{1550m_values.swap(other.m_values);1551std::swap(m_hash_shift, other.m_hash_shift);1552std::swap(m_num_valid, other.m_num_valid);1553std::swap(m_grow_threshold, other.m_grow_threshold);1554std::swap(m_hasher, other.m_hasher);1555std::swap(m_equals, other.m_equals);1556}15571558private:1559struct node : public value_type1560{1561uint8_t state;1562};15631564static inline void construct_value_type(value_type* pDst, const Key& k, const Value& v)1565{1566if (BASISU_IS_BITWISE_COPYABLE(Key))1567memcpy(&pDst->first, &k, sizeof(Key));1568else1569scalar_type<Key>::construct(&pDst->first, k);15701571if (BASISU_IS_BITWISE_COPYABLE(Value))1572memcpy(&pDst->second, &v, sizeof(Value));1573else1574scalar_type<Value>::construct(&pDst->second, v);1575}15761577static inline void construct_value_type(value_type* pDst, const value_type* pSrc)1578{1579if ((BASISU_IS_BITWISE_COPYABLE(Key)) && (BASISU_IS_BITWISE_COPYABLE(Value)))1580{1581memcpy(pDst, pSrc, sizeof(value_type));1582}1583else1584{1585if (BASISU_IS_BITWISE_COPYABLE(Key))1586memcpy(&pDst->first, &pSrc->first, sizeof(Key));1587else1588scalar_type<Key>::construct(&pDst->first, pSrc->first);15891590if (BASISU_IS_BITWISE_COPYABLE(Value))1591memcpy(&pDst->second, &pSrc->second, sizeof(Value));1592else1593scalar_type<Value>::construct(&pDst->second, pSrc->second);1594}1595}15961597static inline void destruct_value_type(value_type* p)1598{1599scalar_type<Key>::destruct(&p->first);1600scalar_type<Value>::destruct(&p->second);1601}16021603// Moves *pSrc to *pDst efficiently.1604// pDst should NOT be constructed on entry.1605static inline void move_node(node* pDst, node* pSrc, bool update_src_state = true)1606{1607assert(!pDst->state);16081609if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key) && BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))1610{1611memcpy(pDst, pSrc, sizeof(node));1612}1613else1614{1615if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key))1616memcpy(&pDst->first, &pSrc->first, sizeof(Key));1617else1618{1619scalar_type<Key>::construct(&pDst->first, pSrc->first);1620scalar_type<Key>::destruct(&pSrc->first);1621}16221623if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))1624memcpy(&pDst->second, &pSrc->second, sizeof(Value));1625else1626{1627scalar_type<Value>::construct(&pDst->second, pSrc->second);1628scalar_type<Value>::destruct(&pSrc->second);1629}16301631pDst->state = cStateValid;1632}16331634if (update_src_state)1635pSrc->state = cStateInvalid;1636}16371638struct raw_node1639{1640inline raw_node()1641{1642node* p = reinterpret_cast<node*>(this);1643p->state = cStateInvalid;1644}16451646inline ~raw_node()1647{1648node* p = reinterpret_cast<node*>(this);1649if (p->state)1650hash_map_type::destruct_value_type(p);1651}16521653inline raw_node(const raw_node& other)1654{1655node* pDst = reinterpret_cast<node*>(this);1656const node* pSrc = reinterpret_cast<const node*>(&other);16571658if (pSrc->state)1659{1660hash_map_type::construct_value_type(pDst, pSrc);1661pDst->state = cStateValid;1662}1663else1664pDst->state = cStateInvalid;1665}16661667inline raw_node& operator= (const raw_node& rhs)1668{1669if (this == &rhs)1670return *this;16711672node* pDst = reinterpret_cast<node*>(this);1673const node* pSrc = reinterpret_cast<const node*>(&rhs);16741675if (pSrc->state)1676{1677if (pDst->state)1678{1679pDst->first = pSrc->first;1680pDst->second = pSrc->second;1681}1682else1683{1684hash_map_type::construct_value_type(pDst, pSrc);1685pDst->state = cStateValid;1686}1687}1688else if (pDst->state)1689{1690hash_map_type::destruct_value_type(pDst);1691pDst->state = cStateInvalid;1692}16931694return *this;1695}16961697uint8_t m_bits[sizeof(node)];1698};16991700typedef basisu::vector<raw_node> node_vector;17011702node_vector m_values;1703uint32_t m_hash_shift;17041705Hasher m_hasher;1706Equals m_equals;17071708uint32_t m_num_valid;17091710uint32_t m_grow_threshold;17111712inline uint32_t hash_key(const Key& k) const1713{1714assert((1U << (32U - m_hash_shift)) == m_values.size());17151716uint32_t hash = static_cast<uint32_t>(m_hasher(k));17171718// Fibonacci hashing1719hash = (2654435769U * hash) >> m_hash_shift;17201721assert(hash < m_values.size());1722return hash;1723}17241725inline const node& get_node(uint32_t index) const1726{1727return *reinterpret_cast<const node*>(&m_values[index]);1728}17291730inline node& get_node(uint32_t index)1731{1732return *reinterpret_cast<node*>(&m_values[index]);1733}17341735inline state get_node_state(uint32_t index) const1736{1737return static_cast<state>(get_node(index).state);1738}17391740inline void set_node_state(uint32_t index, bool valid)1741{1742get_node(index).state = valid;1743}17441745inline void grow()1746{1747uint64_t n = m_values.size() * 3ULL; // was * 217481749if (!helpers::is_power_of_2(n))1750n = helpers::next_pow2(n);17511752if (n > 0x80000000UL)1753n = 0x80000000UL;17541755rehash(helpers::maximum<uint32_t>(cMinHashSize, (uint32_t)n));1756}17571758inline void rehash(uint32_t new_hash_size)1759{1760assert(new_hash_size >= m_num_valid);1761assert(helpers::is_power_of_2(new_hash_size));17621763if ((new_hash_size < m_num_valid) || (new_hash_size == m_values.size()))1764return;17651766hash_map new_map;1767new_map.m_values.resize(new_hash_size);1768new_map.m_hash_shift = 32U - helpers::floor_log2i(new_hash_size);1769assert(new_hash_size == (1U << (32U - new_map.m_hash_shift)));1770new_map.m_grow_threshold = UINT_MAX;17711772node* pNode = reinterpret_cast<node*>(m_values.begin());1773node* pNode_end = pNode + m_values.size();17741775while (pNode != pNode_end)1776{1777if (pNode->state)1778{1779new_map.move_into(pNode);17801781if (new_map.m_num_valid == m_num_valid)1782break;1783}17841785pNode++;1786}17871788new_map.m_grow_threshold = (new_hash_size + 1U) >> 1U;17891790m_values.clear_no_destruction();1791m_hash_shift = 32;17921793swap(new_map);1794}17951796inline uint32_t find_next(uint32_t index) const1797{1798index++;17991800if (index >= m_values.size())1801return index;18021803const node* pNode = &get_node(index);18041805for (; ; )1806{1807if (pNode->state)1808break;18091810if (++index >= m_values.size())1811break;18121813pNode++;1814}18151816return index;1817}18181819inline uint32_t find_index(const Key& k) const1820{1821if (m_num_valid)1822{1823uint32_t index = hash_key(k);1824const node* pNode = &get_node(index);18251826if (pNode->state)1827{1828if (m_equals(pNode->first, k))1829return index;18301831const uint32_t orig_index = index;18321833for (; ; )1834{1835if (!index)1836{1837index = m_values.size() - 1;1838pNode = &get_node(index);1839}1840else1841{1842index--;1843pNode--;1844}18451846if (index == orig_index)1847break;18481849if (!pNode->state)1850break;18511852if (m_equals(pNode->first, k))1853return index;1854}1855}1856}18571858return m_values.size();1859}18601861inline bool insert_no_grow(insert_result& result, const Key& k, const Value& v = Value())1862{1863if (!m_values.size())1864return false;18651866uint32_t index = hash_key(k);1867node* pNode = &get_node(index);18681869if (pNode->state)1870{1871if (m_equals(pNode->first, k))1872{1873result.first = iterator(*this, index);1874result.second = false;1875return true;1876}18771878const uint32_t orig_index = index;18791880for (; ; )1881{1882if (!index)1883{1884index = m_values.size() - 1;1885pNode = &get_node(index);1886}1887else1888{1889index--;1890pNode--;1891}18921893if (orig_index == index)1894return false;18951896if (!pNode->state)1897break;18981899if (m_equals(pNode->first, k))1900{1901result.first = iterator(*this, index);1902result.second = false;1903return true;1904}1905}1906}19071908if (m_num_valid >= m_grow_threshold)1909return false;19101911construct_value_type(pNode, k, v);19121913pNode->state = cStateValid;19141915m_num_valid++;1916assert(m_num_valid <= m_values.size());19171918result.first = iterator(*this, index);1919result.second = true;19201921return true;1922}19231924inline void move_into(node* pNode)1925{1926uint32_t index = hash_key(pNode->first);1927node* pDst_node = &get_node(index);19281929if (pDst_node->state)1930{1931const uint32_t orig_index = index;19321933for (; ; )1934{1935if (!index)1936{1937index = m_values.size() - 1;1938pDst_node = &get_node(index);1939}1940else1941{1942index--;1943pDst_node--;1944}19451946if (index == orig_index)1947{1948assert(false);1949return;1950}19511952if (!pDst_node->state)1953break;1954}1955}19561957move_node(pDst_node, pNode, false);19581959m_num_valid++;1960}1961};19621963template<typename Key, typename Value, typename Hasher, typename Equals>1964struct bitwise_movable< hash_map<Key, Value, Hasher, Equals> > { enum { cFlag = true }; };19651966#if BASISU_HASHMAP_TEST1967extern void hash_map_test();1968#endif19691970} // namespace basisu19711972namespace std1973{1974template<typename T>1975inline void swap(basisu::vector<T>& a, basisu::vector<T>& b)1976{1977a.swap(b);1978}19791980template<typename Key, typename Value, typename Hasher, typename Equals>1981inline void swap(basisu::hash_map<Key, Value, Hasher, Equals>& a, basisu::hash_map<Key, Value, Hasher, Equals>& b)1982{1983a.swap(b);1984}19851986} // namespace std198719881989