Path: blob/master/thirdparty/basis_universal/transcoder/basisu_containers.h
9905 views
// basisu_containers.h1#pragma once2#include <stdlib.h>3#include <stdio.h>4#include <stdint.h>5#include <assert.h>6#include <algorithm>78#if defined(__linux__) && !defined(ANDROID)9// Only for malloc_usable_size() in basisu_containers_impl.h10#include <malloc.h>11#define HAS_MALLOC_USABLE_SIZE 112#endif1314// Set to 1 to always check vector operator[], front(), and back() even in release.15#define BASISU_VECTOR_FORCE_CHECKING 01617// If 1, the vector container will not query the CRT to get the size of resized memory blocks.18#define BASISU_VECTOR_DETERMINISTIC 11920#ifdef _MSC_VER21#define BASISU_FORCE_INLINE __forceinline22#else23#define BASISU_FORCE_INLINE inline24#endif2526#define BASISU_HASHMAP_TEST 02728namespace basisu29{30enum { cInvalidIndex = -1 };3132template <typename S> inline S clamp(S value, S low, S high) { return (value < low) ? low : ((value > high) ? high : value); }3334template <typename S> inline S maximum(S a, S b) { return (a > b) ? a : b; }35template <typename S> inline S maximum(S a, S b, S c) { return maximum(maximum(a, b), c); }36template <typename S> inline S maximum(S a, S b, S c, S d) { return maximum(maximum(maximum(a, b), c), d); }3738template <typename S> inline S minimum(S a, S b) { return (a < b) ? a : b; }39template <typename S> inline S minimum(S a, S b, S c) { return minimum(minimum(a, b), c); }40template <typename S> inline S minimum(S a, S b, S c, S d) { return minimum(minimum(minimum(a, b), c), d); }4142#ifdef _MSC_VER43__declspec(noreturn)44#else45[[noreturn]]46#endif47void container_abort(const char* pMsg, ...);4849namespace helpers50{51inline bool is_power_of_2(uint32_t x) { return x && ((x & (x - 1U)) == 0U); }52inline bool is_power_of_2(uint64_t x) { return x && ((x & (x - 1U)) == 0U); }5354template<class T> const T& minimum(const T& a, const T& b) { return (b < a) ? b : a; }55template<class T> const T& maximum(const T& a, const T& b) { return (a < b) ? b : a; }5657inline uint32_t floor_log2i(uint32_t v)58{59uint32_t l = 0;60while (v > 1U)61{62v >>= 1;63l++;64}65return l;66}6768inline uint32_t floor_log2i(uint64_t v)69{70uint32_t l = 0;71while (v > 1U)72{73v >>= 1;74l++;75}76return l;77}7879inline uint32_t next_pow2(uint32_t val)80{81val--;82val |= val >> 16;83val |= val >> 8;84val |= val >> 4;85val |= val >> 2;86val |= val >> 1;87return val + 1;88}8990inline uint64_t next_pow2(uint64_t val)91{92val--;93val |= val >> 32;94val |= val >> 16;95val |= val >> 8;96val |= val >> 4;97val |= val >> 2;98val |= val >> 1;99return val + 1;100}101} // namespace helpers102103template <typename T>104inline T* construct(T* p)105{106return new (static_cast<void*>(p)) T;107}108109template <typename T, typename U>110inline T* construct(T* p, const U& init)111{112return new (static_cast<void*>(p)) T(init);113}114115template <typename T>116inline void construct_array(T* p, size_t n)117{118T* q = p + n;119for (; p != q; ++p)120new (static_cast<void*>(p)) T;121}122123template <typename T, typename U>124inline void construct_array(T* p, size_t n, const U& init)125{126T* q = p + n;127for (; p != q; ++p)128new (static_cast<void*>(p)) T(init);129}130131template <typename T>132inline void destruct(T* p)133{134p->~T();135}136137template <typename T> inline void destruct_array(T* p, size_t n)138{139T* q = p + n;140for (; p != q; ++p)141p->~T();142}143144template<typename T>145struct scalar_type146{147enum { cFlag = false };148static inline void construct(T* p) { basisu::construct(p); }149static inline void construct(T* p, const T& init) { basisu::construct(p, init); }150static inline void construct_array(T* p, size_t n) { basisu::construct_array(p, n); }151static inline void destruct(T* p) { basisu::destruct(p); }152static inline void destruct_array(T* p, size_t n) { basisu::destruct_array(p, n); }153};154155template<typename T> struct scalar_type<T*>156{157enum { cFlag = true };158static inline void construct(T** p) { memset(p, 0, sizeof(T*)); }159static inline void construct(T** p, T* init) { *p = init; }160static inline void construct_array(T** p, size_t n) { memset(p, 0, sizeof(T*) * n); }161static inline void destruct(T** p) { p; }162static inline void destruct_array(T** p, size_t n) { p, n; }163};164165#define BASISU_DEFINE_BUILT_IN_TYPE(X) \166template<> struct scalar_type<X> { \167enum { cFlag = true }; \168static inline void construct(X* p) { memset(p, 0, sizeof(X)); } \169static inline void construct(X* p, const X& init) { memcpy(p, &init, sizeof(X)); } \170static inline void construct_array(X* p, size_t n) { memset(p, 0, sizeof(X) * n); } \171static inline void destruct(X* p) { p; } \172static inline void destruct_array(X* p, size_t n) { p, n; } };173174BASISU_DEFINE_BUILT_IN_TYPE(bool)175BASISU_DEFINE_BUILT_IN_TYPE(char)176BASISU_DEFINE_BUILT_IN_TYPE(unsigned char)177BASISU_DEFINE_BUILT_IN_TYPE(short)178BASISU_DEFINE_BUILT_IN_TYPE(unsigned short)179BASISU_DEFINE_BUILT_IN_TYPE(int)180BASISU_DEFINE_BUILT_IN_TYPE(unsigned int)181BASISU_DEFINE_BUILT_IN_TYPE(long)182BASISU_DEFINE_BUILT_IN_TYPE(unsigned long)183#ifdef __GNUC__184BASISU_DEFINE_BUILT_IN_TYPE(long long)185BASISU_DEFINE_BUILT_IN_TYPE(unsigned long long)186#else187BASISU_DEFINE_BUILT_IN_TYPE(__int64)188BASISU_DEFINE_BUILT_IN_TYPE(unsigned __int64)189#endif190BASISU_DEFINE_BUILT_IN_TYPE(float)191BASISU_DEFINE_BUILT_IN_TYPE(double)192BASISU_DEFINE_BUILT_IN_TYPE(long double)193194#undef BASISU_DEFINE_BUILT_IN_TYPE195196template<typename T>197struct bitwise_movable { enum { cFlag = false }; };198199#define BASISU_DEFINE_BITWISE_MOVABLE(Q) template<> struct bitwise_movable<Q> { enum { cFlag = true }; };200201template<typename T>202struct bitwise_copyable { enum { cFlag = false }; };203204#define BASISU_DEFINE_BITWISE_COPYABLE(Q) template<> struct bitwise_copyable<Q> { enum { cFlag = true }; };205206#define BASISU_IS_POD(T) __is_pod(T)207208#define BASISU_IS_SCALAR_TYPE(T) (scalar_type<T>::cFlag)209210#if !defined(BASISU_HAVE_STD_TRIVIALLY_COPYABLE) && defined(__GNUC__) && (__GNUC__ < 5)211#define BASISU_IS_TRIVIALLY_COPYABLE(...) __is_trivially_copyable(__VA_ARGS__)212#else213#define BASISU_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value214#endif215216// TODO: clean this up, it's still confusing (copying vs. movable).217#define BASISU_IS_BITWISE_COPYABLE(T) (BASISU_IS_SCALAR_TYPE(T) || BASISU_IS_POD(T) || BASISU_IS_TRIVIALLY_COPYABLE(T) || std::is_trivial<T>::value || (bitwise_copyable<T>::cFlag))218219#define BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) (BASISU_IS_BITWISE_COPYABLE(T) || (bitwise_movable<T>::cFlag))220221#define BASISU_HAS_DESTRUCTOR(T) ((!scalar_type<T>::cFlag) && (!__is_pod(T)) && (!std::is_trivially_destructible<T>::value))222223typedef char(&yes_t)[1];224typedef char(&no_t)[2];225226template <class U> yes_t class_test(int U::*);227template <class U> no_t class_test(...);228229template <class T> struct is_class230{231enum { value = (sizeof(class_test<T>(0)) == sizeof(yes_t)) };232};233234template <typename T> struct is_pointer235{236enum { value = false };237};238239template <typename T> struct is_pointer<T*>240{241enum { value = true };242};243244struct empty_type { };245246BASISU_DEFINE_BITWISE_COPYABLE(empty_type);247BASISU_DEFINE_BITWISE_MOVABLE(empty_type);248249template<typename T> struct rel_ops250{251friend bool operator!=(const T& x, const T& y) { return (!(x == y)); }252friend bool operator> (const T& x, const T& y) { return (y < x); }253friend bool operator<=(const T& x, const T& y) { return (!(y < x)); }254friend bool operator>=(const T& x, const T& y) { return (!(x < y)); }255};256257struct elemental_vector258{259void* m_p;260size_t m_size;261size_t m_capacity;262263typedef void (*object_mover)(void* pDst, void* pSrc, size_t num);264265bool increase_capacity(size_t min_new_capacity, bool grow_hint, size_t element_size, object_mover pRelocate, bool nofail);266};267268// Returns true if a+b would overflow a size_t.269inline bool add_overflow_check(size_t a, size_t b)270{271size_t c = a + b;272return c < a;273}274275// Returns false on overflow, true if OK.276template<typename T>277inline bool can_fit_into_size_t(T val)278{279static_assert(std::is_integral<T>::value, "T must be an integral type");280281return (val >= 0) && (static_cast<size_t>(val) == val);282}283284// Returns true if a*b would overflow a size_t.285inline bool mul_overflow_check(size_t a, size_t b)286{287// Avoid the division on 32-bit platforms288if (sizeof(size_t) == sizeof(uint32_t))289return !can_fit_into_size_t(static_cast<uint64_t>(a) * b);290else291return b && (a > (SIZE_MAX / b));292}293294template<typename T>295class writable_span;296297template<typename T>298class readable_span299{300public:301using value_type = T;302using size_type = size_t;303using const_pointer = const T*;304using const_reference = const T&;305using const_iterator = const T*;306307inline readable_span() :308m_p(nullptr),309m_size(0)310{311}312313inline readable_span(const writable_span<T>& other);314inline readable_span& operator= (const writable_span<T>& rhs);315316inline readable_span(const_pointer p, size_t n)317{318set(p, n);319}320321inline readable_span(const_pointer s, const_pointer e)322{323set(s, e);324}325326inline readable_span(const readable_span& other) :327m_p(other.m_p),328m_size(other.m_size)329{330assert(!m_size || m_p);331}332333inline readable_span(readable_span&& other) :334m_p(other.m_p),335m_size(other.m_size)336{337assert(!m_size || m_p);338339other.m_p = nullptr;340other.m_size = 0;341}342343template <size_t N>344inline readable_span(const T(&arr)[N]) :345m_p(arr),346m_size(N)347{348}349350template <size_t N>351inline readable_span& set(const T(&arr)[N])352{353m_p = arr;354m_size = N;355return *this;356}357358inline readable_span& set(const_pointer p, size_t n)359{360if (!p && n)361{362assert(0);363m_p = nullptr;364m_size = 0;365}366else367{368m_p = p;369m_size = n;370}371372return *this;373}374375inline readable_span& set(const_pointer s, const_pointer e)376{377if ((e < s) || (!s && e))378{379assert(0);380m_p = nullptr;381m_size = 0;382}383else384{385m_p = s;386m_size = e - s;387}388389return *this;390}391392inline bool operator== (const readable_span& rhs) const393{394return (m_p == rhs.m_p) && (m_size == rhs.m_size);395}396397inline bool operator!= (const readable_span& rhs) const398{399return (m_p != rhs.m_p) || (m_size != rhs.m_size);400}401402// only true if the region is totally inside the span403inline bool is_inside_ptr(const_pointer p, size_t n) const404{405if (!is_valid())406{407assert(0);408return false;409}410411if (!p)412{413assert(!n);414return false;415}416417return (p >= m_p) && ((p + n) <= end());418}419420inline bool is_inside(size_t ofs, size_t size) const421{422if (add_overflow_check(ofs, size))423{424assert(0);425return false;426}427428if (!is_valid())429{430assert(0);431return false;432}433434if ((ofs + size) > m_size)435return false;436437return true;438}439440inline readable_span subspan(size_t ofs, size_t n) const441{442if (!is_valid())443{444assert(0);445return readable_span((const_pointer)nullptr, (size_t)0);446}447448if (add_overflow_check(ofs, n))449{450assert(0);451return readable_span((const_pointer)nullptr, (size_t)0);452}453454if ((ofs + n) > m_size)455{456assert(0);457return readable_span((const_pointer)nullptr, (size_t)0);458}459460return readable_span(m_p + ofs, n);461}462463void clear()464{465m_p = nullptr;466m_size = 0;467}468469inline bool empty() const { return !m_size; }470471// true if the span is non-nullptr and is not empty472inline bool is_valid() const { return m_p && m_size; }473474inline bool is_nullptr() const { return m_p == nullptr; }475476inline size_t size() const { return m_size; }477inline size_t size_in_bytes() const { assert(can_fit_into_size_t((uint64_t)m_size * sizeof(T))); return m_size * sizeof(T); }478479inline const_pointer get_ptr() const { return m_p; }480481inline const_iterator begin() const { return m_p; }482inline const_iterator end() const { assert(m_p || !m_size); return m_p + m_size; }483484inline const_iterator cbegin() const { return m_p; }485inline const_iterator cend() const { assert(m_p || !m_size); return m_p + m_size; }486487inline const_reference front() const488{489if (!(m_p && m_size))490container_abort("readable_span invalid\n");491492return m_p[0];493}494495inline const_reference back() const496{497if (!(m_p && m_size))498container_abort("readable_span invalid\n");499500return m_p[m_size - 1];501}502503inline readable_span& operator= (const readable_span& rhs)504{505m_p = rhs.m_p;506m_size = rhs.m_size;507return *this;508}509510inline readable_span& operator= (readable_span&& rhs)511{512if (this != &rhs)513{514m_p = rhs.m_p;515m_size = rhs.m_size;516rhs.m_p = nullptr;517rhs.m_size = 0;518}519520return *this;521}522523inline const_reference operator* () const524{525if (!(m_p && m_size))526container_abort("readable_span invalid\n");527528return *m_p;529}530531inline const_pointer operator-> () const532{533if (!(m_p && m_size))534container_abort("readable_span invalid\n");535536return m_p;537}538539inline readable_span& remove_prefix(size_t n)540{541if ((!m_p) || (n > m_size))542{543assert(0);544return *this;545}546547m_p += n;548m_size -= n;549return *this;550}551552inline readable_span& remove_suffix(size_t n)553{554if ((!m_p) || (n > m_size))555{556assert(0);557return *this;558}559560m_size -= n;561return *this;562}563564inline readable_span& enlarge(size_t n)565{566if (!m_p)567{568assert(0);569return *this;570}571572if (add_overflow_check(m_size, n))573{574assert(0);575return *this;576}577578m_size += n;579return *this;580}581582bool copy_from(size_t src_ofs, size_t src_size, T* pDst, size_t dst_ofs) const583{584if (!src_size)585return true;586587if (!pDst)588{589assert(0);590return false;591}592593if (!is_inside(src_ofs, src_size))594{595assert(0);596return false;597}598599const_pointer pS = m_p + src_ofs;600601if (BASISU_IS_BITWISE_COPYABLE(T))602{603const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);604605if (!can_fit_into_size_t(num_bytes))606{607assert(0);608return false;609}610611memcpy(pDst, pS, (size_t)num_bytes);612}613else614{615T* pD = pDst + dst_ofs;616T* pDst_end = pD + src_size;617618while (pD != pDst_end)619*pD++ = *pS++;620}621622return true;623}624625inline const_reference operator[] (size_t idx) const626{627if ((!is_valid()) || (idx >= m_size))628container_abort("readable_span: invalid span or index\n");629630return m_p[idx];631}632633inline uint16_t read_le16(size_t ofs) const634{635static_assert(sizeof(T) == 1, "T must be byte size");636637if (!is_inside(ofs, sizeof(uint16_t)))638{639assert(0);640return false;641}642643const uint8_t a = (uint8_t)m_p[ofs];644const uint8_t b = (uint8_t)m_p[ofs + 1];645return a | (b << 8u);646}647648template<typename R>649inline R read_val(size_t ofs) const650{651static_assert(sizeof(T) == 1, "T must be byte size");652653if (!is_inside(ofs, sizeof(R)))654{655assert(0);656return (R)0;657}658659return *reinterpret_cast<const R*>(&m_p[ofs]);660}661662inline uint16_t read_be16(size_t ofs) const663{664static_assert(sizeof(T) == 1, "T must be byte size");665666if (!is_inside(ofs, sizeof(uint16_t)))667{668assert(0);669return 0;670}671672const uint8_t b = (uint8_t)m_p[ofs];673const uint8_t a = (uint8_t)m_p[ofs + 1];674return a | (b << 8u);675}676677inline uint32_t read_le32(size_t ofs) const678{679static_assert(sizeof(T) == 1, "T must be byte size");680681if (!is_inside(ofs, sizeof(uint32_t)))682{683assert(0);684return 0;685}686687const uint8_t a = (uint8_t)m_p[ofs];688const uint8_t b = (uint8_t)m_p[ofs + 1];689const uint8_t c = (uint8_t)m_p[ofs + 2];690const uint8_t d = (uint8_t)m_p[ofs + 3];691return a | (b << 8u) | (c << 16u) | (d << 24u);692}693694inline uint32_t read_be32(size_t ofs) const695{696static_assert(sizeof(T) == 1, "T must be byte size");697698if (!is_inside(ofs, sizeof(uint32_t)))699{700assert(0);701return 0;702}703704const uint8_t d = (uint8_t)m_p[ofs];705const uint8_t c = (uint8_t)m_p[ofs + 1];706const uint8_t b = (uint8_t)m_p[ofs + 2];707const uint8_t a = (uint8_t)m_p[ofs + 3];708return a | (b << 8u) | (c << 16u) | (d << 24u);709}710711inline uint64_t read_le64(size_t ofs) const712{713if (!add_overflow_check(ofs, sizeof(uint64_t)))714{715assert(0);716return 0;717}718const uint64_t l = read_le32(ofs);719const uint64_t h = read_le32(ofs + sizeof(uint32_t));720return l | (h << 32u);721}722723inline uint64_t read_be64(size_t ofs) const724{725if (!add_overflow_check(ofs, sizeof(uint64_t)))726{727assert(0);728return 0;729}730const uint64_t h = read_be32(ofs);731const uint64_t l = read_be32(ofs + sizeof(uint32_t));732return l | (h << 32u);733}734735private:736const_pointer m_p;737size_t m_size;738};739740template<typename T>741class writable_span742{743friend readable_span<T>;744745public:746using value_type = T;747using size_type = size_t;748using const_pointer = const T*;749using const_reference = const T&;750using const_iterator = const T*;751using pointer = T*;752using reference = T&;753using iterator = T*;754755inline writable_span() :756m_p(nullptr),757m_size(0)758{759}760761inline writable_span(T* p, size_t n)762{763set(p, n);764}765766inline writable_span(T* s, T* e)767{768set(s, e);769}770771inline writable_span(const writable_span& other) :772m_p(other.m_p),773m_size(other.m_size)774{775assert(!m_size || m_p);776}777778inline writable_span(writable_span&& other) :779m_p(other.m_p),780m_size(other.m_size)781{782assert(!m_size || m_p);783784other.m_p = nullptr;785other.m_size = 0;786}787788template <size_t N>789inline writable_span(T(&arr)[N]) :790m_p(arr),791m_size(N)792{793}794795readable_span<T> get_readable_span() const796{797return readable_span<T>(m_p, m_size);798}799800template <size_t N>801inline writable_span& set(T(&arr)[N])802{803m_p = arr;804m_size = N;805return *this;806}807808inline writable_span& set(T* p, size_t n)809{810if (!p && n)811{812assert(0);813m_p = nullptr;814m_size = 0;815}816else817{818m_p = p;819m_size = n;820}821822return *this;823}824825inline writable_span& set(T* s, T* e)826{827if ((e < s) || (!s && e))828{829assert(0);830m_p = nullptr;831m_size = 0;832}833else834{835m_p = s;836m_size = e - s;837}838839return *this;840}841842inline bool operator== (const writable_span& rhs) const843{844return (m_p == rhs.m_p) && (m_size == rhs.m_size);845}846847inline bool operator== (const readable_span<T>& rhs) const848{849return (m_p == rhs.m_p) && (m_size == rhs.m_size);850}851852inline bool operator!= (const writable_span& rhs) const853{854return (m_p != rhs.m_p) || (m_size != rhs.m_size);855}856857inline bool operator!= (const readable_span<T>& rhs) const858{859return (m_p != rhs.m_p) || (m_size != rhs.m_size);860}861862// only true if the region is totally inside the span863inline bool is_inside_ptr(const_pointer p, size_t n) const864{865if (!is_valid())866{867assert(0);868return false;869}870871if (!p)872{873assert(!n);874return false;875}876877return (p >= m_p) && ((p + n) <= end());878}879880inline bool is_inside(size_t ofs, size_t size) const881{882if (add_overflow_check(ofs, size))883{884assert(0);885return false;886}887888if (!is_valid())889{890assert(0);891return false;892}893894if ((ofs + size) > m_size)895return false;896897return true;898}899900inline writable_span subspan(size_t ofs, size_t n) const901{902if (!is_valid())903{904assert(0);905return writable_span((T*)nullptr, (size_t)0);906}907908if (add_overflow_check(ofs, n))909{910assert(0);911return writable_span((T*)nullptr, (size_t)0);912}913914if ((ofs + n) > m_size)915{916assert(0);917return writable_span((T*)nullptr, (size_t)0);918}919920return writable_span(m_p + ofs, n);921}922923void clear()924{925m_p = nullptr;926m_size = 0;927}928929inline bool empty() const { return !m_size; }930931// true if the span is non-nullptr and is not empty932inline bool is_valid() const { return m_p && m_size; }933934inline bool is_nullptr() const { return m_p == nullptr; }935936inline size_t size() const { return m_size; }937inline size_t size_in_bytes() const { assert(can_fit_into_size_t((uint64_t)m_size * sizeof(T))); return m_size * sizeof(T); }938939inline T* get_ptr() const { return m_p; }940941inline iterator begin() const { return m_p; }942inline iterator end() const { assert(m_p || !m_size); return m_p + m_size; }943944inline const_iterator cbegin() const { return m_p; }945inline const_iterator cend() const { assert(m_p || !m_size); return m_p + m_size; }946947inline T& front() const948{949if (!(m_p && m_size))950container_abort("writable_span invalid\n");951952return m_p[0];953}954955inline T& back() const956{957if (!(m_p && m_size))958container_abort("writable_span invalid\n");959960return m_p[m_size - 1];961}962963inline writable_span& operator= (const writable_span& rhs)964{965m_p = rhs.m_p;966m_size = rhs.m_size;967return *this;968}969970inline writable_span& operator= (writable_span&& rhs)971{972if (this != &rhs)973{974m_p = rhs.m_p;975m_size = rhs.m_size;976rhs.m_p = nullptr;977rhs.m_size = 0;978}979980return *this;981}982983inline T& operator* () const984{985if (!(m_p && m_size))986container_abort("writable_span invalid\n");987988return *m_p;989}990991inline T* operator-> () const992{993if (!(m_p && m_size))994container_abort("writable_span invalid\n");995996return m_p;997}998999inline bool set_all(size_t ofs, size_t size, const_reference val)1000{1001if (!size)1002return true;10031004if (!is_inside(ofs, size))1005{1006assert(0);1007return false;1008}10091010T* pDst = m_p + ofs;10111012if ((sizeof(T) == sizeof(uint8_t)) && (BASISU_IS_BITWISE_COPYABLE(T)))1013{1014memset(pDst, (int)((uint8_t)val), size);1015}1016else1017{10181019T* pDst_end = pDst + size;10201021while (pDst != pDst_end)1022*pDst++ = val;1023}10241025return true;1026}10271028inline bool set_all(const_reference val)1029{1030return set_all(0, m_size, val);1031}10321033inline writable_span& remove_prefix(size_t n)1034{1035if ((!m_p) || (n > m_size))1036{1037assert(0);1038return *this;1039}10401041m_p += n;1042m_size -= n;1043return *this;1044}10451046inline writable_span& remove_suffix(size_t n)1047{1048if ((!m_p) || (n > m_size))1049{1050assert(0);1051return *this;1052}10531054m_size -= n;1055return *this;1056}10571058inline writable_span& enlarge(size_t n)1059{1060if (!m_p)1061{1062assert(0);1063return *this;1064}10651066if (add_overflow_check(m_size, n))1067{1068assert(0);1069return *this;1070}10711072m_size += n;1073return *this;1074}10751076// copy from this span to the destination ptr1077bool copy_from(size_t src_ofs, size_t src_size, T* pDst, size_t dst_ofs) const1078{1079if (!src_size)1080return true;10811082if (!pDst)1083{1084assert(0);1085return false;1086}10871088if (!is_inside(src_ofs, src_size))1089{1090assert(0);1091return false;1092}10931094const_pointer pS = m_p + src_ofs;10951096if (BASISU_IS_BITWISE_COPYABLE(T))1097{1098const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);10991100if (!can_fit_into_size_t(num_bytes))1101{1102assert(0);1103return false;1104}11051106memcpy(pDst, pS, (size_t)num_bytes);1107}1108else1109{1110T* pD = pDst + dst_ofs;1111T* pDst_end = pD + src_size;11121113while (pD != pDst_end)1114*pD++ = *pS++;1115}11161117return true;1118}11191120// copy from the source ptr into this span1121bool copy_into(const_pointer pSrc, size_t src_ofs, size_t src_size, size_t dst_ofs) const1122{1123if (!src_size)1124return true;11251126if (!pSrc)1127{1128assert(0);1129return false;1130}11311132if (add_overflow_check(src_ofs, src_size) || add_overflow_check(dst_ofs, src_size))1133{1134assert(0);1135return false;1136}11371138if (!is_valid())1139{1140assert(0);1141return false;1142}11431144if (!is_inside(dst_ofs, src_size))1145{1146assert(0);1147return false;1148}11491150const_pointer pS = pSrc + src_ofs;1151T* pD = m_p + dst_ofs;11521153if (BASISU_IS_BITWISE_COPYABLE(T))1154{1155const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);11561157if (!can_fit_into_size_t(num_bytes))1158{1159assert(0);1160return false;1161}11621163memcpy(pD, pS, (size_t)num_bytes);1164}1165else1166{1167T* pDst_end = pD + src_size;11681169while (pD != pDst_end)1170*pD++ = *pS++;1171}11721173return true;1174}11751176// copy from a source span into this span1177bool copy_into(const readable_span<T>& src, size_t src_ofs, size_t src_size, size_t dst_ofs) const1178{1179if (!src.is_inside(src_ofs, src_size))1180{1181assert(0);1182return false;1183}11841185return copy_into(src.get_ptr(), src_ofs, src_size, dst_ofs);1186}11871188// copy from a source span into this span1189bool copy_into(const writable_span& src, size_t src_ofs, size_t src_size, size_t dst_ofs) const1190{1191if (!src.is_inside(src_ofs, src_size))1192{1193assert(0);1194return false;1195}11961197return copy_into(src.get_ptr(), src_ofs, src_size, dst_ofs);1198}11991200inline T& operator[] (size_t idx) const1201{1202if ((!is_valid()) || (idx >= m_size))1203container_abort("writable_span: invalid span or index\n");12041205return m_p[idx];1206}12071208template<typename R>1209inline R read_val(size_t ofs) const1210{1211static_assert(sizeof(T) == 1, "T must be byte size");12121213if (!is_inside(ofs, sizeof(R)))1214{1215assert(0);1216return (R)0;1217}12181219return *reinterpret_cast<const R*>(&m_p[ofs]);1220}12211222template<typename R>1223inline bool write_val(size_t ofs, R val) const1224{1225static_assert(sizeof(T) == 1, "T must be byte size");12261227if (!is_inside(ofs, sizeof(R)))1228{1229assert(0);1230return false;1231}12321233*reinterpret_cast<R*>(&m_p[ofs]) = val;1234return true;1235}12361237inline bool write_le16(size_t ofs, uint16_t val) const1238{1239static_assert(sizeof(T) == 1, "T must be byte size");12401241if (!is_inside(ofs, sizeof(uint16_t)))1242{1243assert(0);1244return false;1245}12461247m_p[ofs] = (uint8_t)val;1248m_p[ofs + 1] = (uint8_t)(val >> 8u);1249return true;1250}12511252inline bool write_be16(size_t ofs, uint16_t val) const1253{1254static_assert(sizeof(T) == 1, "T must be byte size");12551256if (!is_inside(ofs, sizeof(uint16_t)))1257{1258assert(0);1259return false;1260}12611262m_p[ofs + 1] = (uint8_t)val;1263m_p[ofs] = (uint8_t)(val >> 8u);1264return true;1265}12661267inline bool write_le32(size_t ofs, uint32_t val) const1268{1269static_assert(sizeof(T) == 1, "T must be byte size");12701271if (!is_inside(ofs, sizeof(uint32_t)))1272{1273assert(0);1274return false;1275}12761277m_p[ofs] = (uint8_t)val;1278m_p[ofs + 1] = (uint8_t)(val >> 8u);1279m_p[ofs + 2] = (uint8_t)(val >> 16u);1280m_p[ofs + 3] = (uint8_t)(val >> 24u);1281return true;1282}12831284inline bool write_be32(size_t ofs, uint32_t val) const1285{1286static_assert(sizeof(T) == 1, "T must be byte size");12871288if (!is_inside(ofs, sizeof(uint32_t)))1289{1290assert(0);1291return false;1292}12931294m_p[ofs + 3] = (uint8_t)val;1295m_p[ofs + 2] = (uint8_t)(val >> 8u);1296m_p[ofs + 1] = (uint8_t)(val >> 16u);1297m_p[ofs] = (uint8_t)(val >> 24u);1298return true;1299}13001301inline bool write_le64(size_t ofs, uint64_t val) const1302{1303if (!add_overflow_check(ofs, sizeof(uint64_t)))1304{1305assert(0);1306return false;1307}13081309return write_le32(ofs, (uint32_t)val) && write_le32(ofs + sizeof(uint32_t), (uint32_t)(val >> 32u));1310}13111312inline bool write_be64(size_t ofs, uint64_t val) const1313{1314if (!add_overflow_check(ofs, sizeof(uint64_t)))1315{1316assert(0);1317return false;1318}13191320return write_be32(ofs + sizeof(uint32_t), (uint32_t)val) && write_be32(ofs, (uint32_t)(val >> 32u));1321}13221323inline uint16_t read_le16(size_t ofs) const1324{1325static_assert(sizeof(T) == 1, "T must be byte size");13261327if (!is_inside(ofs, sizeof(uint16_t)))1328{1329assert(0);1330return 0;1331}13321333const uint8_t a = (uint8_t)m_p[ofs];1334const uint8_t b = (uint8_t)m_p[ofs + 1];1335return a | (b << 8u);1336}13371338inline uint16_t read_be16(size_t ofs) const1339{1340static_assert(sizeof(T) == 1, "T must be byte size");13411342if (!is_inside(ofs, sizeof(uint16_t)))1343{1344assert(0);1345return 0;1346}13471348const uint8_t b = (uint8_t)m_p[ofs];1349const uint8_t a = (uint8_t)m_p[ofs + 1];1350return a | (b << 8u);1351}13521353inline uint32_t read_le32(size_t ofs) const1354{1355static_assert(sizeof(T) == 1, "T must be byte size");13561357if (!is_inside(ofs, sizeof(uint32_t)))1358{1359assert(0);1360return 0;1361}13621363const uint8_t a = (uint8_t)m_p[ofs];1364const uint8_t b = (uint8_t)m_p[ofs + 1];1365const uint8_t c = (uint8_t)m_p[ofs + 2];1366const uint8_t d = (uint8_t)m_p[ofs + 3];1367return a | (b << 8u) | (c << 16u) | (d << 24u);1368}13691370inline uint32_t read_be32(size_t ofs) const1371{1372static_assert(sizeof(T) == 1, "T must be byte size");13731374if (!is_inside(ofs, sizeof(uint32_t)))1375{1376assert(0);1377return 0;1378}13791380const uint8_t d = (uint8_t)m_p[ofs];1381const uint8_t c = (uint8_t)m_p[ofs + 1];1382const uint8_t b = (uint8_t)m_p[ofs + 2];1383const uint8_t a = (uint8_t)m_p[ofs + 3];1384return a | (b << 8u) | (c << 16u) | (d << 24u);1385}13861387inline uint64_t read_le64(size_t ofs) const1388{1389if (!add_overflow_check(ofs, sizeof(uint64_t)))1390{1391assert(0);1392return 0;1393}1394const uint64_t l = read_le32(ofs);1395const uint64_t h = read_le32(ofs + sizeof(uint32_t));1396return l | (h << 32u);1397}13981399inline uint64_t read_be64(size_t ofs) const1400{1401if (!add_overflow_check(ofs, sizeof(uint64_t)))1402{1403assert(0);1404return 0;1405}1406const uint64_t h = read_be32(ofs);1407const uint64_t l = read_be32(ofs + sizeof(uint32_t));1408return l | (h << 32u);1409}14101411private:1412T* m_p;1413size_t m_size;1414};14151416template<typename T>1417inline readable_span<T>::readable_span(const writable_span<T>& other) :1418m_p(other.m_p),1419m_size(other.m_size)1420{1421}14221423template<typename T>1424inline readable_span<T>& readable_span<T>::operator= (const writable_span<T>& rhs)1425{1426m_p = rhs.m_p;1427m_size = rhs.m_size;1428return *this;1429}14301431template<typename T>1432inline bool span_copy(const writable_span<T>& dst, const readable_span<T>& src)1433{1434return dst.copy_into(src, 0, src.size(), 0);1435}14361437template<typename T>1438inline bool span_copy(const writable_span<T>& dst, const writable_span<T>& src)1439{1440return dst.copy_into(src, 0, src.size(), 0);1441}14421443template<typename T>1444inline bool span_copy(const writable_span<T>& dst, size_t dst_ofs, const writable_span<T>& src, size_t src_ofs, size_t len)1445{1446return dst.copy_into(src, src_ofs, len, dst_ofs);1447}14481449template<typename T>1450inline bool span_copy(const writable_span<T>& dst, size_t dst_ofs, const readable_span<T>& src, size_t src_ofs, size_t len)1451{1452return dst.copy_into(src, src_ofs, len, dst_ofs);1453}14541455template<typename T>1456class vector : public rel_ops< vector<T> >1457{1458public:1459typedef T* iterator;1460typedef const T* const_iterator;1461typedef T value_type;1462typedef T& reference;1463typedef const T& const_reference;1464typedef T* pointer;1465typedef const T* const_pointer;14661467inline vector() :1468m_p(nullptr),1469m_size(0),1470m_capacity(0)1471{1472}14731474inline vector(size_t n, const T& init) :1475m_p(nullptr),1476m_size(0),1477m_capacity(0)1478{1479increase_capacity(n, false);1480construct_array(m_p, n, init);1481m_size = n;1482}14831484inline vector(vector&& other) :1485m_p(other.m_p),1486m_size(other.m_size),1487m_capacity(other.m_capacity)1488{1489other.m_p = nullptr;1490other.m_size = 0;1491other.m_capacity = 0;1492}14931494inline vector(const vector& other) :1495m_p(nullptr),1496m_size(0),1497m_capacity(0)1498{1499increase_capacity(other.m_size, false);15001501m_size = other.m_size;15021503if (BASISU_IS_BITWISE_COPYABLE(T))1504{15051506#ifndef __EMSCRIPTEN__1507#ifdef __GNUC__1508#pragma GCC diagnostic push1509#pragma GCC diagnostic ignored "-Wclass-memaccess"1510#endif1511#endif1512if ((m_p) && (other.m_p))1513{1514memcpy(m_p, other.m_p, m_size * sizeof(T));1515}1516#ifndef __EMSCRIPTEN__1517#ifdef __GNUC__1518#pragma GCC diagnostic pop1519#endif1520#endif1521}1522else1523{1524T* pDst = m_p;1525const T* pSrc = other.m_p;1526for (size_t i = m_size; i > 0; i--)1527construct(pDst++, *pSrc++);1528}1529}15301531inline explicit vector(size_t size) :1532m_p(nullptr),1533m_size(0),1534m_capacity(0)1535{1536resize(size);1537}15381539inline explicit vector(std::initializer_list<T> init_list) :1540m_p(nullptr),1541m_size(0),1542m_capacity(0)1543{1544resize(init_list.size());15451546size_t idx = 0;1547for (const T& elem : init_list)1548m_p[idx++] = elem;15491550assert(idx == m_size);1551}15521553inline vector(const readable_span<T>& rs) :1554m_p(nullptr),1555m_size(0),1556m_capacity(0)1557{1558set(rs);1559}15601561inline vector(const writable_span<T>& ws) :1562m_p(nullptr),1563m_size(0),1564m_capacity(0)1565{1566set(ws);1567}15681569// Set contents of vector to contents of the readable span1570bool set(const readable_span<T>& rs)1571{1572if (!rs.is_valid())1573{1574assert(0);1575return false;1576}15771578const size_t new_size = rs.size();15791580// Could call resize(), but it'll redundantly construct trivial types.1581if (m_size != new_size)1582{1583if (new_size < m_size)1584{1585if (BASISU_HAS_DESTRUCTOR(T))1586{1587scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);1588}1589}1590else1591{1592if (new_size > m_capacity)1593{1594if (!increase_capacity(new_size, false, true))1595return false;1596}1597}15981599// Don't bother constructing trivial types, because we're going to memcpy() over them anyway.1600if (!BASISU_IS_BITWISE_COPYABLE(T))1601{1602scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);1603}16041605m_size = new_size;1606}16071608if (!rs.copy_from(0, rs.size(), m_p, 0))1609{1610assert(0);1611return false;1612}16131614return true;1615}16161617// Set contents of vector to contents of the writable span1618inline bool set(const writable_span<T>& ws)1619{1620return set(ws.get_readable_span());1621}16221623inline ~vector()1624{1625if (m_p)1626{1627if (BASISU_HAS_DESTRUCTOR(T))1628{1629scalar_type<T>::destruct_array(m_p, m_size);1630}16311632free(m_p);1633}1634}16351636inline vector& operator= (const vector& other)1637{1638if (this == &other)1639return *this;16401641if (m_capacity >= other.m_size)1642resize(0);1643else1644{1645clear();1646increase_capacity(other.m_size, false);1647}16481649if (BASISU_IS_BITWISE_COPYABLE(T))1650{1651#ifndef __EMSCRIPTEN__1652#ifdef __GNUC__1653#pragma GCC diagnostic push1654#pragma GCC diagnostic ignored "-Wclass-memaccess"1655#endif1656#endif1657if ((m_p) && (other.m_p))1658memcpy(m_p, other.m_p, other.m_size * sizeof(T));1659#ifndef __EMSCRIPTEN__1660#ifdef __GNUC__1661#pragma GCC diagnostic pop1662#endif1663#endif1664}1665else1666{1667T* pDst = m_p;1668const T* pSrc = other.m_p;1669for (size_t i = other.m_size; i > 0; i--)1670construct(pDst++, *pSrc++);1671}16721673m_size = other.m_size;16741675return *this;1676}16771678inline vector& operator= (vector&& rhs)1679{1680if (this != &rhs)1681{1682clear();16831684m_p = rhs.m_p;1685m_size = rhs.m_size;1686m_capacity = rhs.m_capacity;16871688rhs.m_p = nullptr;1689rhs.m_size = 0;1690rhs.m_capacity = 0;1691}1692return *this;1693}16941695BASISU_FORCE_INLINE const T* begin() const { return m_p; }1696BASISU_FORCE_INLINE T* begin() { return m_p; }16971698BASISU_FORCE_INLINE const T* end() const { return m_p + m_size; }1699BASISU_FORCE_INLINE T* end() { return m_p + m_size; }17001701BASISU_FORCE_INLINE bool empty() const { return !m_size; }17021703BASISU_FORCE_INLINE size_t size() const { return m_size; }1704BASISU_FORCE_INLINE uint32_t size_u32() const { assert(m_size <= UINT32_MAX); return static_cast<uint32_t>(m_size); }17051706BASISU_FORCE_INLINE size_t size_in_bytes() const { return m_size * sizeof(T); }1707BASISU_FORCE_INLINE uint32_t size_in_bytes_u32() const { assert((m_size * sizeof(T)) <= UINT32_MAX); return static_cast<uint32_t>(m_size * sizeof(T)); }17081709BASISU_FORCE_INLINE size_t capacity() const { return m_capacity; }17101711#if !BASISU_VECTOR_FORCE_CHECKING1712BASISU_FORCE_INLINE const T& operator[] (size_t i) const { assert(i < m_size); return m_p[i]; }1713BASISU_FORCE_INLINE T& operator[] (size_t i) { assert(i < m_size); return m_p[i]; }1714#else1715BASISU_FORCE_INLINE const T& operator[] (size_t i) const1716{1717if (i >= m_size)1718container_abort("vector::operator[] invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17191720return m_p[i];1721}1722BASISU_FORCE_INLINE T& operator[] (size_t i)1723{1724if (i >= m_size)1725container_abort("vector::operator[] invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17261727return m_p[i];1728}1729#endif17301731// at() always includes range checking, even in final builds, unlike operator [].1732BASISU_FORCE_INLINE const T& at(size_t i) const1733{1734if (i >= m_size)1735container_abort("vector::at() invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17361737return m_p[i];1738}1739BASISU_FORCE_INLINE T& at(size_t i)1740{1741if (i >= m_size)1742container_abort("vector::at() invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17431744return m_p[i];1745}17461747#if !BASISU_VECTOR_FORCE_CHECKING1748BASISU_FORCE_INLINE const T& front() const { assert(m_size); return m_p[0]; }1749BASISU_FORCE_INLINE T& front() { assert(m_size); return m_p[0]; }17501751BASISU_FORCE_INLINE const T& back() const { assert(m_size); return m_p[m_size - 1]; }1752BASISU_FORCE_INLINE T& back() { assert(m_size); return m_p[m_size - 1]; }1753#else1754BASISU_FORCE_INLINE const T& front() const1755{1756if (!m_size)1757container_abort("front: vector is empty, type size %zu\n", sizeof(T));17581759return m_p[0];1760}1761BASISU_FORCE_INLINE T& front()1762{1763if (!m_size)1764container_abort("front: vector is empty, type size %zu\n", sizeof(T));17651766return m_p[0];1767}17681769BASISU_FORCE_INLINE const T& back() const1770{1771if (!m_size)1772container_abort("back: vector is empty, type size %zu\n", sizeof(T));17731774return m_p[m_size - 1];1775}1776BASISU_FORCE_INLINE T& back()1777{1778if (!m_size)1779container_abort("back: vector is empty, type size %zu\n", sizeof(T));17801781return m_p[m_size - 1];1782}1783#endif17841785BASISU_FORCE_INLINE const T* get_ptr() const { return m_p; }1786BASISU_FORCE_INLINE T* get_ptr() { return m_p; }17871788BASISU_FORCE_INLINE const T* data() const { return m_p; }1789BASISU_FORCE_INLINE T* data() { return m_p; }17901791// clear() sets the container to empty, then frees the allocated block.1792inline void clear()1793{1794if (m_p)1795{1796if (BASISU_HAS_DESTRUCTOR(T))1797{1798scalar_type<T>::destruct_array(m_p, m_size);1799}18001801free(m_p);18021803m_p = nullptr;1804m_size = 0;1805m_capacity = 0;1806}1807}18081809inline void clear_no_destruction()1810{1811if (m_p)1812{1813free(m_p);1814m_p = nullptr;1815m_size = 0;1816m_capacity = 0;1817}1818}18191820inline void reserve(size_t new_capacity)1821{1822if (!try_reserve(new_capacity))1823container_abort("vector:reserve: try_reserve failed!\n");1824}18251826inline bool try_reserve(size_t new_capacity)1827{1828if (new_capacity > m_capacity)1829{1830if (!increase_capacity(new_capacity, false, true))1831return false;1832}1833else if (new_capacity < m_capacity)1834{1835// Must work around the lack of a "decrease_capacity()" method.1836// This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.1837vector tmp;1838if (!tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false, true))1839return false;18401841tmp = *this;1842swap(tmp);1843}18441845return true;1846}18471848// try_resize(0) sets the container to empty, but does not free the allocated block.1849inline bool try_resize(size_t new_size, bool grow_hint = false)1850{1851if (m_size != new_size)1852{1853if (new_size < m_size)1854{1855if (BASISU_HAS_DESTRUCTOR(T))1856{1857scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);1858}1859}1860else1861{1862if (new_size > m_capacity)1863{1864if (!increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint, true))1865return false;1866}18671868scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);1869}18701871m_size = new_size;1872}18731874return true;1875}18761877// resize(0) sets the container to empty, but does not free the allocated block.1878inline void resize(size_t new_size, bool grow_hint = false)1879{1880if (!try_resize(new_size, grow_hint))1881container_abort("vector::resize failed, new size %zu\n", new_size);1882}18831884// If size >= capacity/2, reset() sets the container's size to 0 but doesn't free the allocated block (because the container may be similarly loaded in the future).1885// Otherwise it blows away the allocated block. See http://www.codercorner.com/blog/?p=4941886inline void reset()1887{1888if (m_size >= (m_capacity >> 1))1889resize(0);1890else1891clear();1892}18931894inline T* try_enlarge(size_t i)1895{1896size_t cur_size = m_size;18971898if (add_overflow_check(cur_size, i))1899return nullptr;19001901if (!try_resize(cur_size + i, true))1902return nullptr;19031904return get_ptr() + cur_size;1905}19061907inline T* enlarge(size_t i)1908{1909T* p = try_enlarge(i);1910if (!p)1911container_abort("vector::enlarge failed, amount %zu!\n", i);1912return p;1913}19141915BASISU_FORCE_INLINE void push_back(const T& obj)1916{1917assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19181919if (m_size >= m_capacity)1920{1921if (add_overflow_check(m_size, 1))1922container_abort("vector::push_back: vector too large\n");19231924increase_capacity(m_size + 1, true);1925}19261927scalar_type<T>::construct(m_p + m_size, obj);1928m_size++;1929}19301931BASISU_FORCE_INLINE void push_back_value(T&& obj)1932{1933assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19341935if (m_size >= m_capacity)1936{1937if (add_overflow_check(m_size, 1))1938container_abort("vector::push_back_value: vector too large\n");19391940increase_capacity(m_size + 1, true);1941}19421943new ((void*)(m_p + m_size)) T(std::move(obj));1944m_size++;1945}19461947inline bool try_push_back(const T& obj)1948{1949assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19501951if (m_size >= m_capacity)1952{1953if (add_overflow_check(m_size, 1))1954return false;19551956if (!increase_capacity(m_size + 1, true, true))1957return false;1958}19591960scalar_type<T>::construct(m_p + m_size, obj);1961m_size++;19621963return true;1964}19651966inline bool try_push_back(T&& obj)1967{1968assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19691970if (m_size >= m_capacity)1971{1972if (add_overflow_check(m_size, 1))1973return false;19741975if (!increase_capacity(m_size + 1, true, true))1976return false;1977}19781979new ((void*)(m_p + m_size)) T(std::move(obj));1980m_size++;19811982return true;1983}19841985// obj is explictly passed in by value, not ref1986inline void push_back_value(T obj)1987{1988if (m_size >= m_capacity)1989{1990if (add_overflow_check(m_size, 1))1991container_abort("vector::push_back_value: vector too large\n");19921993increase_capacity(m_size + 1, true);1994}19951996scalar_type<T>::construct(m_p + m_size, obj);1997m_size++;1998}19992000// obj is explictly passed in by value, not ref2001inline bool try_push_back_value(T obj)2002{2003if (m_size >= m_capacity)2004{2005if (add_overflow_check(m_size, 1))2006return false;20072008if (!increase_capacity(m_size + 1, true, true))2009return false;2010}20112012scalar_type<T>::construct(m_p + m_size, obj);2013m_size++;20142015return true;2016}20172018template<typename... Args>2019BASISU_FORCE_INLINE void emplace_back(Args&&... args)2020{2021if (m_size >= m_capacity)2022{2023if (add_overflow_check(m_size, 1))2024container_abort("vector::enlarge: vector too large\n");20252026increase_capacity(m_size + 1, true);2027}20282029new ((void*)(m_p + m_size)) T(std::forward<Args>(args)...); // perfect forwarding2030m_size++;2031}20322033template<typename... Args>2034BASISU_FORCE_INLINE bool try_emplace_back(Args&&... args)2035{2036if (m_size >= m_capacity)2037{2038if (add_overflow_check(m_size, 1))2039return false;20402041if (!increase_capacity(m_size + 1, true, true))2042return false;2043}20442045new ((void*)(m_p + m_size)) T(std::forward<Args>(args)...); // perfect forwarding2046m_size++;20472048return true;2049}20502051inline void pop_back()2052{2053assert(m_size);20542055if (m_size)2056{2057m_size--;2058scalar_type<T>::destruct(&m_p[m_size]);2059}2060}20612062inline bool try_insert(size_t index, const T* p, size_t n)2063{2064assert(index <= m_size);20652066if (index > m_size)2067return false;20682069if (!n)2070return true;20712072const size_t orig_size = m_size;20732074if (add_overflow_check(m_size, n))2075return false;20762077if (!try_resize(m_size + n, true))2078return false;20792080const size_t num_to_move = orig_size - index;20812082if (BASISU_IS_BITWISE_COPYABLE(T))2083{2084// This overwrites the destination object bits, but bitwise copyable means we don't need to worry about destruction.2085memmove(m_p + index + n, m_p + index, sizeof(T) * num_to_move);2086}2087else2088{2089const T* pSrc = m_p + orig_size - 1;2090T* pDst = const_cast<T*>(pSrc) + n;20912092for (size_t i = 0; i < num_to_move; i++)2093{2094assert((uint64_t)(pDst - m_p) < (uint64_t)m_size);20952096*pDst = std::move(*pSrc);2097pDst--;2098pSrc--;2099}2100}21012102T* pDst = m_p + index;21032104if (BASISU_IS_BITWISE_COPYABLE(T))2105{2106// This copies in the new bits, overwriting the existing objects, which is OK for copyable types that don't need destruction.2107memcpy(pDst, p, sizeof(T) * n);2108}2109else2110{2111for (size_t i = 0; i < n; i++)2112{2113assert((uint64_t)(pDst - m_p) < (uint64_t)m_size);2114*pDst++ = *p++;2115}2116}21172118return true;2119}21202121inline void insert(size_t index, const T* p, size_t n)2122{2123if (!try_insert(index, p, n))2124container_abort("vector::insert() failed!\n");2125}21262127inline bool try_insert(T* p, const T& obj)2128{2129if (p < begin())2130{2131assert(0);2132return false;2133}21342135uint64_t ofs = p - begin();21362137if (ofs > m_size)2138{2139assert(0);2140return false;2141}21422143if ((size_t)ofs != ofs)2144{2145assert(0);2146return false;2147}21482149return try_insert((size_t)ofs, &obj, 1);2150}21512152inline void insert(T* p, const T& obj)2153{2154if (!try_insert(p, obj))2155container_abort("vector::insert() failed!\n");2156}21572158// push_front() isn't going to be very fast - it's only here for usability.2159inline void push_front(const T& obj)2160{2161insert(0, &obj, 1);2162}21632164inline bool try_push_front(const T& obj)2165{2166return try_insert(0, &obj, 1);2167}21682169vector& append(const vector& other)2170{2171if (other.m_size)2172insert(m_size, &other[0], other.m_size);2173return *this;2174}21752176bool try_append(const vector& other)2177{2178if (other.m_size)2179return try_insert(m_size, &other[0], other.m_size);21802181return true;2182}21832184vector& append(const T* p, size_t n)2185{2186if (n)2187insert(m_size, p, n);2188return *this;2189}21902191bool try_append(const T* p, size_t n)2192{2193if (n)2194return try_insert(m_size, p, n);21952196return true;2197}21982199inline bool erase(size_t start, size_t n)2200{2201if (add_overflow_check(start, n))2202{2203assert(0);2204return false;2205}22062207assert((start + n) <= m_size);22082209if ((start + n) > m_size)2210{2211assert(0);2212return false;2213}22142215if (!n)2216return true;22172218const size_t num_to_move = m_size - (start + n);22192220T* pDst = m_p + start;22212222const T* pSrc = m_p + start + n;22232224if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T))2225{2226// This test is overly cautious.2227if ((!BASISU_IS_BITWISE_COPYABLE(T)) || (BASISU_HAS_DESTRUCTOR(T)))2228{2229// Type has been marked explictly as bitwise movable, which means we can move them around but they may need to be destructed.2230// First destroy the erased objects.2231scalar_type<T>::destruct_array(pDst, n);2232}22332234// Copy "down" the objects to preserve, filling in the empty slots.22352236#ifndef __EMSCRIPTEN__2237#ifdef __GNUC__2238#pragma GCC diagnostic push2239#pragma GCC diagnostic ignored "-Wclass-memaccess"2240#endif2241#endif22422243memmove(pDst, pSrc, num_to_move * sizeof(T));22442245#ifndef __EMSCRIPTEN__2246#ifdef __GNUC__2247#pragma GCC diagnostic pop2248#endif2249#endif2250}2251else2252{2253// Type is not bitwise copyable or movable.2254// Move them down one at a time by using the equals operator, and destroying anything that's left over at the end.2255T* pDst_end = pDst + num_to_move;22562257while (pDst != pDst_end)2258{2259*pDst = std::move(*pSrc);22602261++pDst;2262++pSrc;2263}22642265scalar_type<T>::destruct_array(pDst_end, n);2266}22672268m_size -= n;22692270return true;2271}22722273inline bool erase_index(size_t index)2274{2275return erase(index, 1);2276}22772278inline bool erase(T* p)2279{2280assert((p >= m_p) && (p < (m_p + m_size)));22812282if (p < m_p)2283return false;22842285return erase_index(static_cast<size_t>(p - m_p));2286}22872288inline bool erase(T* pFirst, T* pEnd)2289{2290assert(pFirst <= pEnd);2291assert(pFirst >= begin() && pFirst <= end());2292assert(pEnd >= begin() && pEnd <= end());22932294if ((pFirst < begin()) || (pEnd < pFirst))2295{2296assert(0);2297return false;2298}22992300uint64_t ofs = pFirst - begin();2301if ((size_t)ofs != ofs)2302{2303assert(0);2304return false;2305}23062307uint64_t n = pEnd - pFirst;2308if ((size_t)n != n)2309{2310assert(0);2311return false;2312}23132314return erase((size_t)ofs, (size_t)n);2315}23162317bool erase_unordered(size_t index)2318{2319if (index >= m_size)2320{2321assert(0);2322return false;2323}23242325if ((index + 1) < m_size)2326{2327(*this)[index] = std::move(back());2328}23292330pop_back();2331return true;2332}23332334inline bool operator== (const vector& rhs) const2335{2336if (m_size != rhs.m_size)2337return false;2338else if (m_size)2339{2340if (scalar_type<T>::cFlag)2341return memcmp(m_p, rhs.m_p, sizeof(T) * m_size) == 0;2342else2343{2344const T* pSrc = m_p;2345const T* pDst = rhs.m_p;2346for (size_t i = m_size; i; i--)2347if (!(*pSrc++ == *pDst++))2348return false;2349}2350}23512352return true;2353}23542355inline bool operator< (const vector& rhs) const2356{2357const size_t min_size = helpers::minimum(m_size, rhs.m_size);23582359const T* pSrc = m_p;2360const T* pSrc_end = m_p + min_size;2361const T* pDst = rhs.m_p;23622363while ((pSrc < pSrc_end) && (*pSrc == *pDst))2364{2365pSrc++;2366pDst++;2367}23682369if (pSrc < pSrc_end)2370return *pSrc < *pDst;23712372return m_size < rhs.m_size;2373}23742375inline void swap(vector& other)2376{2377std::swap(m_p, other.m_p);2378std::swap(m_size, other.m_size);2379std::swap(m_capacity, other.m_capacity);2380}23812382inline void sort()2383{2384std::sort(begin(), end());2385}23862387inline void unique()2388{2389if (!empty())2390{2391sort();23922393resize(std::unique(begin(), end()) - begin());2394}2395}23962397inline void reverse()2398{2399const size_t j = m_size >> 1;24002401for (size_t i = 0; i < j; i++)2402std::swap(m_p[i], m_p[m_size - 1 - i]);2403}24042405inline bool find(const T& key, size_t &idx) const2406{2407idx = 0;24082409const T* p = m_p;2410const T* p_end = m_p + m_size;24112412size_t index = 0;24132414while (p != p_end)2415{2416if (key == *p)2417{2418idx = index;2419return true;2420}24212422p++;2423index++;2424}24252426return false;2427}24282429inline bool find_sorted(const T& key, size_t& idx) const2430{2431idx = 0;24322433if (!m_size)2434return false;24352436// Inclusive range2437size_t low = 0, high = m_size - 1;24382439while (low <= high)2440{2441size_t mid = (size_t)(((uint64_t)low + (uint64_t)high) >> 1);24422443const T* pTrial_key = m_p + mid;24442445// Sanity check comparison operator2446assert(!((*pTrial_key < key) && (key < *pTrial_key)));24472448if (*pTrial_key < key)2449{2450if (add_overflow_check(mid, 1))2451break;24522453low = mid + 1;2454}2455else if (key < *pTrial_key)2456{2457if (!mid)2458break;24592460high = mid - 1;2461}2462else2463{2464idx = mid;2465return true;2466}2467}24682469return false;2470}24712472inline size_t count_occurences(const T& key) const2473{2474size_t c = 0;24752476const T* p = m_p;2477const T* p_end = m_p + m_size;24782479while (p != p_end)2480{2481if (key == *p)2482c++;24832484p++;2485}24862487return c;2488}24892490inline void set_all(const T& o)2491{2492if ((sizeof(T) == 1) && (scalar_type<T>::cFlag))2493{2494#ifndef __EMSCRIPTEN__2495#ifdef __GNUC__2496#pragma GCC diagnostic push2497#pragma GCC diagnostic ignored "-Wclass-memaccess"2498#endif2499#endif2500memset(m_p, *reinterpret_cast<const uint8_t*>(&o), m_size);25012502#ifndef __EMSCRIPTEN__2503#ifdef __GNUC__2504#pragma GCC diagnostic pop2505#endif2506#endif2507}2508else2509{2510T* pDst = m_p;2511T* pDst_end = pDst + m_size;2512while (pDst != pDst_end)2513*pDst++ = o;2514}2515}25162517// Caller assumes ownership of the heap block associated with the container. Container is cleared.2518// Caller must use free() on the returned pointer.2519inline void* assume_ownership()2520{2521T* p = m_p;2522m_p = nullptr;2523m_size = 0;2524m_capacity = 0;2525return p;2526}25272528// Caller is granting ownership of the indicated heap block.2529// Block must have size constructed elements, and have enough room for capacity elements.2530// The block must have been allocated using malloc().2531// Important: This method is used in Basis Universal. If you change how this container allocates memory, you'll need to change any users of this method.2532inline bool grant_ownership(T* p, size_t size, size_t capacity)2533{2534// To prevent the caller from obviously shooting themselves in the foot.2535if (((p + capacity) > m_p) && (p < (m_p + m_capacity)))2536{2537// Can grant ownership of a block inside the container itself!2538assert(0);2539return false;2540}25412542if (size > capacity)2543{2544assert(0);2545return false;2546}25472548if (!p)2549{2550if (capacity)2551{2552assert(0);2553return false;2554}2555}2556else if (!capacity)2557{2558assert(0);2559return false;2560}25612562clear();2563m_p = p;2564m_size = size;2565m_capacity = capacity;2566return true;2567}25682569readable_span<T> get_readable_span() const2570{2571return readable_span<T>(m_p, m_size);2572}25732574writable_span<T> get_writable_span()2575{2576return writable_span<T>(m_p, m_size);2577}25782579private:2580T* m_p;2581size_t m_size; // the number of constructed objects2582size_t m_capacity; // the size of the allocation25832584template<typename Q> struct is_vector { enum { cFlag = false }; };2585template<typename Q> struct is_vector< vector<Q> > { enum { cFlag = true }; };25862587static void object_mover(void* pDst_void, void* pSrc_void, size_t num)2588{2589T* pSrc = static_cast<T*>(pSrc_void);2590T* const pSrc_end = pSrc + num;2591T* pDst = static_cast<T*>(pDst_void);25922593while (pSrc != pSrc_end)2594{2595new ((void*)(pDst)) T(std::move(*pSrc));2596scalar_type<T>::destruct(pSrc);25972598++pSrc;2599++pDst;2600}2601}26022603inline bool increase_capacity(size_t min_new_capacity, bool grow_hint, bool nofail = false)2604{2605return reinterpret_cast<elemental_vector*>(this)->increase_capacity(2606min_new_capacity, grow_hint, sizeof(T),2607(BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) || (is_vector<T>::cFlag)) ? nullptr : object_mover, nofail);2608}2609};26102611template<typename T> struct bitwise_movable< vector<T> > { enum { cFlag = true }; };26122613// Hash map2614// rg TODO 9/8/2024: I've upgraded this class to support 64-bit size_t, and it needs a lot more testing.26152616const uint32_t SIZE_T_BITS = sizeof(size_t) * 8U;26172618inline uint32_t safe_shift_left(uint32_t v, uint32_t l)2619{2620return (l < 32U) ? (v << l) : 0;2621}26222623inline uint64_t safe_shift_left(uint64_t v, uint32_t l)2624{2625return (l < 64U) ? (v << l) : 0;2626}26272628template <typename T>2629struct hasher2630{2631inline size_t operator() (const T& key) const { return static_cast<size_t>(key); }2632};26332634template <typename T>2635struct equal_to2636{2637inline bool operator()(const T& a, const T& b) const { return a == b; }2638};26392640// Important: The Hasher and Equals objects must be bitwise movable!2641template<typename Key, typename Value = empty_type, typename Hasher = hasher<Key>, typename Equals = equal_to<Key> >2642class hash_map2643{2644public:2645class iterator;2646class const_iterator;26472648private:2649friend class iterator;2650friend class const_iterator;26512652enum state2653{2654cStateInvalid = 0,2655cStateValid = 12656};26572658enum2659{2660cMinHashSize = 4U2661};26622663public:2664typedef hash_map<Key, Value, Hasher, Equals> hash_map_type;2665typedef std::pair<Key, Value> value_type;2666typedef Key key_type;2667typedef Value referent_type;2668typedef Hasher hasher_type;2669typedef Equals equals_type;26702671hash_map() :2672m_num_valid(0),2673m_grow_threshold(0),2674m_hash_shift(SIZE_T_BITS)2675{2676static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");2677}26782679hash_map(const hash_map& other) :2680m_values(other.m_values),2681m_num_valid(other.m_num_valid),2682m_grow_threshold(other.m_grow_threshold),2683m_hash_shift(other.m_hash_shift),2684m_hasher(other.m_hasher),2685m_equals(other.m_equals)2686{2687static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");2688}26892690hash_map(hash_map&& other) :2691m_values(std::move(other.m_values)),2692m_num_valid(other.m_num_valid),2693m_grow_threshold(other.m_grow_threshold),2694m_hash_shift(other.m_hash_shift),2695m_hasher(std::move(other.m_hasher)),2696m_equals(std::move(other.m_equals))2697{2698static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");26992700other.m_hash_shift = SIZE_T_BITS;2701other.m_num_valid = 0;2702other.m_grow_threshold = 0;2703}27042705hash_map& operator= (const hash_map& other)2706{2707if (this == &other)2708return *this;27092710clear();27112712m_values = other.m_values;2713m_hash_shift = other.m_hash_shift;2714m_num_valid = other.m_num_valid;2715m_grow_threshold = other.m_grow_threshold;2716m_hasher = other.m_hasher;2717m_equals = other.m_equals;27182719return *this;2720}27212722hash_map& operator= (hash_map&& other)2723{2724if (this == &other)2725return *this;27262727clear();27282729m_values = std::move(other.m_values);2730m_hash_shift = other.m_hash_shift;2731m_num_valid = other.m_num_valid;2732m_grow_threshold = other.m_grow_threshold;2733m_hasher = std::move(other.m_hasher);2734m_equals = std::move(other.m_equals);27352736other.m_hash_shift = SIZE_T_BITS;2737other.m_num_valid = 0;2738other.m_grow_threshold = 0;27392740return *this;2741}27422743inline ~hash_map()2744{2745clear();2746}27472748inline const Equals& get_equals() const { return m_equals; }2749inline Equals& get_equals() { return m_equals; }2750inline void set_equals(const Equals& equals) { m_equals = equals; }27512752inline const Hasher& get_hasher() const { return m_hasher; }2753inline Hasher& get_hasher() { return m_hasher; }2754inline void set_hasher(const Hasher& hasher) { m_hasher = hasher; }27552756inline void clear()2757{2758if (m_values.empty())2759return;27602761if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))2762{2763node* p = &get_node(0);2764node* p_end = p + m_values.size();27652766size_t num_remaining = m_num_valid;2767while (p != p_end)2768{2769if (p->state)2770{2771destruct_value_type(p);2772num_remaining--;2773if (!num_remaining)2774break;2775}27762777p++;2778}2779}27802781m_values.clear_no_destruction();27822783m_hash_shift = SIZE_T_BITS;2784m_num_valid = 0;2785m_grow_threshold = 0;2786}27872788inline void reset()2789{2790if (!m_num_valid)2791return;27922793if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))2794{2795node* p = &get_node(0);2796node* p_end = p + m_values.size();27972798size_t num_remaining = m_num_valid;2799while (p != p_end)2800{2801if (p->state)2802{2803destruct_value_type(p);2804p->state = cStateInvalid;28052806num_remaining--;2807if (!num_remaining)2808break;2809}28102811p++;2812}2813}2814else if (sizeof(node) <= 16)2815{2816memset(&m_values[0], 0, m_values.size_in_bytes());2817}2818else2819{2820node* p = &get_node(0);2821node* p_end = p + m_values.size();28222823size_t num_remaining = m_num_valid;2824while (p != p_end)2825{2826if (p->state)2827{2828p->state = cStateInvalid;28292830num_remaining--;2831if (!num_remaining)2832break;2833}28342835p++;2836}2837}28382839m_num_valid = 0;2840}28412842inline size_t size()2843{2844return m_num_valid;2845}28462847inline size_t get_table_size()2848{2849return m_values.size();2850}28512852inline bool empty()2853{2854return !m_num_valid;2855}28562857inline bool reserve(size_t new_capacity)2858{2859if (!new_capacity)2860return true;28612862uint64_t new_hash_size = new_capacity;28632864new_hash_size = new_hash_size * 2ULL;28652866if (!helpers::is_power_of_2(new_hash_size))2867new_hash_size = helpers::next_pow2(new_hash_size);28682869new_hash_size = helpers::maximum<uint64_t>(cMinHashSize, new_hash_size);28702871if (!can_fit_into_size_t(new_hash_size))2872{2873assert(0);2874return false;2875}28762877assert(new_hash_size >= new_capacity);28782879if (new_hash_size <= m_values.size())2880return true;28812882return rehash((size_t)new_hash_size);2883}28842885class iterator2886{2887friend class hash_map<Key, Value, Hasher, Equals>;2888friend class hash_map<Key, Value, Hasher, Equals>::const_iterator;28892890public:2891inline iterator() : m_pTable(nullptr), m_index(0) { }2892inline iterator(hash_map_type& table, size_t index) : m_pTable(&table), m_index(index) { }2893inline iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }28942895inline iterator& operator= (const iterator& other)2896{2897m_pTable = other.m_pTable;2898m_index = other.m_index;2899return *this;2900}29012902// post-increment2903inline iterator operator++(int)2904{2905iterator result(*this);2906++*this;2907return result;2908}29092910// pre-increment2911inline iterator& operator++()2912{2913probe();2914return *this;2915}29162917inline value_type& operator*() const { return *get_cur(); }2918inline value_type* operator->() const { return get_cur(); }29192920inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2921inline bool operator != (const iterator& b) const { return !(*this == b); }2922inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2923inline bool operator != (const const_iterator& b) const { return !(*this == b); }29242925private:2926hash_map_type* m_pTable;2927size_t m_index;29282929inline value_type* get_cur() const2930{2931assert(m_pTable && (m_index < m_pTable->m_values.size()));2932assert(m_pTable->get_node_state(m_index) == cStateValid);29332934return &m_pTable->get_node(m_index);2935}29362937inline void probe()2938{2939assert(m_pTable);2940m_index = m_pTable->find_next(m_index);2941}2942};29432944class const_iterator2945{2946friend class hash_map<Key, Value, Hasher, Equals>;2947friend class hash_map<Key, Value, Hasher, Equals>::iterator;29482949public:2950inline const_iterator() : m_pTable(nullptr), m_index(0) { }2951inline const_iterator(const hash_map_type& table, size_t index) : m_pTable(&table), m_index(index) { }2952inline const_iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }2953inline const_iterator(const const_iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }29542955inline const_iterator& operator= (const const_iterator& other)2956{2957m_pTable = other.m_pTable;2958m_index = other.m_index;2959return *this;2960}29612962inline const_iterator& operator= (const iterator& other)2963{2964m_pTable = other.m_pTable;2965m_index = other.m_index;2966return *this;2967}29682969// post-increment2970inline const_iterator operator++(int)2971{2972const_iterator result(*this);2973++*this;2974return result;2975}29762977// pre-increment2978inline const_iterator& operator++()2979{2980probe();2981return *this;2982}29832984inline const value_type& operator*() const { return *get_cur(); }2985inline const value_type* operator->() const { return get_cur(); }29862987inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2988inline bool operator != (const const_iterator& b) const { return !(*this == b); }2989inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2990inline bool operator != (const iterator& b) const { return !(*this == b); }29912992private:2993const hash_map_type* m_pTable;2994size_t m_index;29952996inline const value_type* get_cur() const2997{2998assert(m_pTable && (m_index < m_pTable->m_values.size()));2999assert(m_pTable->get_node_state(m_index) == cStateValid);30003001return &m_pTable->get_node(m_index);3002}30033004inline void probe()3005{3006assert(m_pTable);3007m_index = m_pTable->find_next(m_index);3008}3009};30103011inline const_iterator begin() const3012{3013if (!m_num_valid)3014return end();30153016return const_iterator(*this, find_next(std::numeric_limits<size_t>::max()));3017}30183019inline const_iterator end() const3020{3021return const_iterator(*this, m_values.size());3022}30233024inline iterator begin()3025{3026if (!m_num_valid)3027return end();30283029return iterator(*this, find_next(std::numeric_limits<size_t>::max()));3030}30313032inline iterator end()3033{3034return iterator(*this, m_values.size());3035}30363037// insert_result.first will always point to inserted key/value (or the already existing key/value).3038// insert_result.second will be true if a new key/value was inserted, or false if the key already existed (in which case first will point to the already existing value).3039typedef std::pair<iterator, bool> insert_result;30403041inline insert_result insert(const Key& k, const Value& v = Value())3042{3043insert_result result;3044if (!insert_no_grow(result, k, v))3045{3046if (!try_grow())3047container_abort("hash_map::try_grow() failed");30483049// This must succeed.3050if (!insert_no_grow(result, k, v))3051container_abort("hash_map::insert() failed");3052}30533054return result;3055}30563057inline bool try_insert(insert_result& result, const Key& k, const Value& v = Value())3058{3059if (!insert_no_grow(result, k, v))3060{3061if (!try_grow())3062return false;30633064if (!insert_no_grow(result, k, v))3065return false;3066}30673068return true;3069}30703071inline insert_result insert(Key&& k, Value&& v = Value())3072{3073insert_result result;3074if (!insert_no_grow_move(result, std::move(k), std::move(v)))3075{3076if (!try_grow())3077container_abort("hash_map::try_grow() failed");30783079// This must succeed.3080if (!insert_no_grow_move(result, std::move(k), std::move(v)))3081container_abort("hash_map::insert() failed");3082}30833084return result;3085}30863087inline bool try_insert(insert_result& result, Key&& k, Value&& v = Value())3088{3089if (!insert_no_grow_move(result, std::move(k), std::move(v)))3090{3091if (!try_grow())3092return false;30933094if (!insert_no_grow_move(result, std::move(k), std::move(v)))3095return false;3096}30973098return true;3099}31003101inline insert_result insert(const value_type& v)3102{3103return insert(v.first, v.second);3104}31053106inline bool try_insert(insert_result& result, const value_type& v)3107{3108return try_insert(result, v.first, v.second);3109}31103111inline insert_result insert(value_type&& v)3112{3113return insert(std::move(v.first), std::move(v.second));3114}31153116inline bool try_insert(insert_result& result, value_type&& v)3117{3118return try_insert(result, std::move(v.first), std::move(v.second));3119}31203121inline const_iterator find(const Key& k) const3122{3123return const_iterator(*this, find_index(k));3124}31253126inline iterator find(const Key& k)3127{3128return iterator(*this, find_index(k));3129}31303131inline bool contains(const Key& k) const3132{3133const size_t idx = find_index(k);3134return idx != m_values.size();3135}31363137inline bool erase(const Key& k)3138{3139size_t i = find_index(k);31403141if (i >= m_values.size())3142return false;31433144node* pDst = &get_node(i);3145destruct_value_type(pDst);3146pDst->state = cStateInvalid;31473148m_num_valid--;31493150for (; ; )3151{3152size_t r, j = i;31533154node* pSrc = pDst;31553156do3157{3158if (!i)3159{3160i = m_values.size() - 1;3161pSrc = &get_node(i);3162}3163else3164{3165i--;3166pSrc--;3167}31683169if (!pSrc->state)3170return true;31713172r = hash_key(pSrc->first);31733174} while ((i <= r && r < j) || (r < j && j < i) || (j < i && i <= r));31753176move_node(pDst, pSrc);31773178pDst = pSrc;3179}3180}31813182inline void swap(hash_map_type& other)3183{3184m_values.swap(other.m_values);3185std::swap(m_hash_shift, other.m_hash_shift);3186std::swap(m_num_valid, other.m_num_valid);3187std::swap(m_grow_threshold, other.m_grow_threshold);3188std::swap(m_hasher, other.m_hasher);3189std::swap(m_equals, other.m_equals);3190}31913192private:3193struct node : public value_type3194{3195uint8_t state;3196};31973198static inline void construct_value_type(value_type* pDst, const Key& k, const Value& v)3199{3200if (BASISU_IS_BITWISE_COPYABLE(Key))3201memcpy(&pDst->first, &k, sizeof(Key));3202else3203scalar_type<Key>::construct(&pDst->first, k);32043205if (BASISU_IS_BITWISE_COPYABLE(Value))3206memcpy(&pDst->second, &v, sizeof(Value));3207else3208scalar_type<Value>::construct(&pDst->second, v);3209}32103211static inline void construct_value_type(value_type* pDst, const value_type* pSrc)3212{3213if ((BASISU_IS_BITWISE_COPYABLE(Key)) && (BASISU_IS_BITWISE_COPYABLE(Value)))3214{3215memcpy(pDst, pSrc, sizeof(value_type));3216}3217else3218{3219if (BASISU_IS_BITWISE_COPYABLE(Key))3220memcpy(&pDst->first, &pSrc->first, sizeof(Key));3221else3222scalar_type<Key>::construct(&pDst->first, pSrc->first);32233224if (BASISU_IS_BITWISE_COPYABLE(Value))3225memcpy(&pDst->second, &pSrc->second, sizeof(Value));3226else3227scalar_type<Value>::construct(&pDst->second, pSrc->second);3228}3229}32303231static inline void destruct_value_type(value_type* p)3232{3233scalar_type<Key>::destruct(&p->first);3234scalar_type<Value>::destruct(&p->second);3235}32363237// Moves nodes *pSrc to *pDst efficiently from one hashmap to another.3238// pDst should NOT be constructed on entry.3239static inline void move_node(node* pDst, node* pSrc, bool update_src_state = true)3240{3241assert(!pDst->state);32423243if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key) && BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))3244{3245memcpy(pDst, pSrc, sizeof(node));32463247assert(pDst->state == cStateValid);3248}3249else3250{3251if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key))3252memcpy(&pDst->first, &pSrc->first, sizeof(Key));3253else3254{3255new ((void*)&pDst->first) Key(std::move(pSrc->first));3256scalar_type<Key>::destruct(&pSrc->first);3257}32583259if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))3260memcpy(&pDst->second, &pSrc->second, sizeof(Value));3261else3262{3263new ((void*)&pDst->second) Value(std::move(pSrc->second));3264scalar_type<Value>::destruct(&pSrc->second);3265}32663267pDst->state = cStateValid;3268}32693270if (update_src_state)3271pSrc->state = cStateInvalid;3272}32733274struct raw_node3275{3276inline raw_node()3277{3278node* p = reinterpret_cast<node*>(this);3279p->state = cStateInvalid;3280}32813282// In practice, this should never be called (right?). We manage destruction ourselves.3283inline ~raw_node()3284{3285node* p = reinterpret_cast<node*>(this);3286if (p->state)3287hash_map_type::destruct_value_type(p);3288}32893290inline raw_node(const raw_node& other)3291{3292node* pDst = reinterpret_cast<node*>(this);3293const node* pSrc = reinterpret_cast<const node*>(&other);32943295if (pSrc->state)3296{3297hash_map_type::construct_value_type(pDst, pSrc);3298pDst->state = cStateValid;3299}3300else3301pDst->state = cStateInvalid;3302}33033304inline raw_node& operator= (const raw_node& rhs)3305{3306if (this == &rhs)3307return *this;33083309node* pDst = reinterpret_cast<node*>(this);3310const node* pSrc = reinterpret_cast<const node*>(&rhs);33113312if (pSrc->state)3313{3314if (pDst->state)3315{3316pDst->first = pSrc->first;3317pDst->second = pSrc->second;3318}3319else3320{3321hash_map_type::construct_value_type(pDst, pSrc);3322pDst->state = cStateValid;3323}3324}3325else if (pDst->state)3326{3327hash_map_type::destruct_value_type(pDst);3328pDst->state = cStateInvalid;3329}33303331return *this;3332}33333334uint8_t m_bits[sizeof(node)];3335};33363337typedef basisu::vector<raw_node> node_vector;33383339node_vector m_values;33403341size_t m_num_valid;3342size_t m_grow_threshold;33433344uint32_t m_hash_shift;33453346Hasher m_hasher;3347Equals m_equals;33483349inline size_t hash_key(const Key& k) const3350{3351assert((safe_shift_left(static_cast<uint64_t>(1), (SIZE_T_BITS - m_hash_shift))) == m_values.size());33523353// Fibonacci hashing3354if (SIZE_T_BITS == 32)3355{3356assert(m_hash_shift != 32);33573358uint32_t hash = static_cast<uint32_t>(m_hasher(k));3359hash = (2654435769U * hash) >> m_hash_shift;33603361assert(hash < m_values.size());3362return (size_t)hash;3363}3364else3365{3366assert(m_hash_shift != 64);33673368uint64_t hash = static_cast<uint64_t>(m_hasher(k));3369hash = (0x9E3779B97F4A7C15ULL * hash) >> m_hash_shift;33703371assert(hash < m_values.size());3372return (size_t)hash;3373}3374}33753376inline const node& get_node(size_t index) const3377{3378return *reinterpret_cast<const node*>(&m_values[index]);3379}33803381inline node& get_node(size_t index)3382{3383return *reinterpret_cast<node*>(&m_values[index]);3384}33853386inline state get_node_state(size_t index) const3387{3388return static_cast<state>(get_node(index).state);3389}33903391inline void set_node_state(size_t index, bool valid)3392{3393get_node(index).state = valid;3394}33953396inline bool try_grow()3397{3398uint64_t n = m_values.size() * 2ULL;33993400if (!helpers::is_power_of_2(n))3401n = helpers::next_pow2(n);34023403if (!can_fit_into_size_t(n))3404{3405assert(0);3406return false;3407}34083409return rehash(helpers::maximum<size_t>(cMinHashSize, (size_t)n));3410}34113412// new_hash_size must be a power of 2.3413inline bool rehash(size_t new_hash_size)3414{3415if (!helpers::is_power_of_2((uint64_t)new_hash_size))3416{3417assert(0);3418return false;3419}34203421if (new_hash_size < m_num_valid)3422{3423assert(0);3424return false;3425}34263427if (new_hash_size == m_values.size())3428return true;34293430hash_map new_map;3431if (!new_map.m_values.try_resize(new_hash_size))3432return false;34333434new_map.m_hash_shift = SIZE_T_BITS - helpers::floor_log2i((uint64_t)new_hash_size);3435assert(new_hash_size == safe_shift_left(static_cast<uint64_t>(1), SIZE_T_BITS - new_map.m_hash_shift));34363437new_map.m_grow_threshold = std::numeric_limits<size_t>::max();34383439node* pNode = reinterpret_cast<node*>(m_values.begin());3440node* pNode_end = pNode + m_values.size();34413442while (pNode != pNode_end)3443{3444if (pNode->state)3445{3446new_map.move_into(pNode);34473448if (new_map.m_num_valid == m_num_valid)3449break;3450}34513452pNode++;3453}34543455new_map.m_grow_threshold = new_hash_size >> 1U;3456if (new_hash_size & 1)3457new_map.m_grow_threshold++;34583459m_values.clear_no_destruction();3460m_hash_shift = SIZE_T_BITS;34613462swap(new_map);34633464return true;3465}34663467inline size_t find_next(size_t index) const3468{3469index++;34703471if (index >= m_values.size())3472return index;34733474const node* pNode = &get_node(index);34753476for (; ; )3477{3478if (pNode->state)3479break;34803481if (++index >= m_values.size())3482break;34833484pNode++;3485}34863487return index;3488}34893490inline size_t find_index(const Key& k) const3491{3492if (m_num_valid)3493{3494size_t index = hash_key(k);3495const node* pNode = &get_node(index);34963497if (pNode->state)3498{3499if (m_equals(pNode->first, k))3500return index;35013502const size_t orig_index = index;35033504for (; ; )3505{3506if (!index)3507{3508index = m_values.size() - 1;3509pNode = &get_node(index);3510}3511else3512{3513index--;3514pNode--;3515}35163517if (index == orig_index)3518break;35193520if (!pNode->state)3521break;35223523if (m_equals(pNode->first, k))3524return index;3525}3526}3527}35283529return m_values.size();3530}35313532inline bool insert_no_grow(insert_result& result, const Key& k, const Value& v)3533{3534if (!m_values.size())3535return false;35363537size_t index = hash_key(k);3538node* pNode = &get_node(index);35393540if (pNode->state)3541{3542if (m_equals(pNode->first, k))3543{3544result.first = iterator(*this, index);3545result.second = false;3546return true;3547}35483549const size_t orig_index = index;35503551for (; ; )3552{3553if (!index)3554{3555index = m_values.size() - 1;3556pNode = &get_node(index);3557}3558else3559{3560index--;3561pNode--;3562}35633564if (orig_index == index)3565return false;35663567if (!pNode->state)3568break;35693570if (m_equals(pNode->first, k))3571{3572result.first = iterator(*this, index);3573result.second = false;3574return true;3575}3576}3577}35783579if (m_num_valid >= m_grow_threshold)3580return false;35813582construct_value_type(pNode, k, v);35833584pNode->state = cStateValid;35853586m_num_valid++;3587assert(m_num_valid <= m_values.size());35883589result.first = iterator(*this, index);3590result.second = true;35913592return true;3593}35943595// Move user supplied key/value into a node.3596static inline void move_value_type(value_type* pDst, Key&& k, Value&& v)3597{3598// Not checking for is MOVABLE because the caller could later destruct k and/or v (what state do we set them to?)3599if (BASISU_IS_BITWISE_COPYABLE(Key))3600{3601memcpy(&pDst->first, &k, sizeof(Key));3602}3603else3604{3605new ((void*)&pDst->first) Key(std::move(k));3606// No destruction - user will do that (we don't own k).3607}36083609if (BASISU_IS_BITWISE_COPYABLE(Value))3610{3611memcpy(&pDst->second, &v, sizeof(Value));3612}3613else3614{3615new ((void*)&pDst->second) Value(std::move(v));3616// No destruction - user will do that (we don't own v).3617}3618}36193620// Insert user provided k/v, by moving, into the current hash table3621inline bool insert_no_grow_move(insert_result& result, Key&& k, Value&& v)3622{3623if (!m_values.size())3624return false;36253626size_t index = hash_key(k);3627node* pNode = &get_node(index);36283629if (pNode->state)3630{3631if (m_equals(pNode->first, k))3632{3633result.first = iterator(*this, index);3634result.second = false;3635return true;3636}36373638const size_t orig_index = index;36393640for (; ; )3641{3642if (!index)3643{3644index = m_values.size() - 1;3645pNode = &get_node(index);3646}3647else3648{3649index--;3650pNode--;3651}36523653if (orig_index == index)3654return false;36553656if (!pNode->state)3657break;36583659if (m_equals(pNode->first, k))3660{3661result.first = iterator(*this, index);3662result.second = false;3663return true;3664}3665}3666}36673668if (m_num_valid >= m_grow_threshold)3669return false;36703671move_value_type(pNode, std::move(k), std::move(v));36723673pNode->state = cStateValid;36743675m_num_valid++;3676assert(m_num_valid <= m_values.size());36773678result.first = iterator(*this, index);3679result.second = true;36803681return true;3682}36833684// Insert pNode by moving into the current hash table3685inline void move_into(node* pNode)3686{3687size_t index = hash_key(pNode->first);3688node* pDst_node = &get_node(index);36893690if (pDst_node->state)3691{3692const size_t orig_index = index;36933694for (; ; )3695{3696if (!index)3697{3698index = m_values.size() - 1;3699pDst_node = &get_node(index);3700}3701else3702{3703index--;3704pDst_node--;3705}37063707if (index == orig_index)3708{3709assert(false);3710return;3711}37123713if (!pDst_node->state)3714break;3715}3716}37173718// No need to update the source node's state (it's going away)3719move_node(pDst_node, pNode, false);37203721m_num_valid++;3722}3723};37243725template<typename Key, typename Value, typename Hasher, typename Equals>3726struct bitwise_movable< hash_map<Key, Value, Hasher, Equals> > { enum { cFlag = true }; };37273728#if BASISU_HASHMAP_TEST3729extern void hash_map_test();3730#endif37313732// String formatting3733inline std::string string_format(const char* pFmt, ...)3734{3735char buf[2048];37363737va_list args;3738va_start(args, pFmt);3739#ifdef _WIN323740vsprintf_s(buf, sizeof(buf), pFmt, args);3741#else3742vsnprintf(buf, sizeof(buf), pFmt, args);3743#endif3744va_end(args);37453746return std::string(buf);3747}37483749enum class variant_type3750{3751cInvalid,3752cI32, cU32,3753cI64, cU64,3754cFlt, cDbl, cBool,3755cStrPtr, cStdStr3756};37573758struct fmt_variant3759{3760union3761{3762int32_t m_i32;3763uint32_t m_u32;3764int64_t m_i64;3765uint64_t m_u64;3766float m_flt;3767double m_dbl;3768bool m_bool;3769const char* m_pStr;3770};37713772std::string m_str;37733774variant_type m_type;37753776inline fmt_variant() :3777m_u64(0),3778m_type(variant_type::cInvalid)3779{3780}37813782inline fmt_variant(const fmt_variant& other) :3783m_u64(other.m_u64),3784m_str(other.m_str),3785m_type(other.m_type)3786{3787}37883789inline fmt_variant(fmt_variant&& other) :3790m_u64(other.m_u64),3791m_str(std::move(other.m_str)),3792m_type(other.m_type)3793{3794other.m_type = variant_type::cInvalid;3795other.m_u64 = 0;3796}37973798inline fmt_variant& operator= (fmt_variant&& other)3799{3800if (this == &other)3801return *this;38023803m_type = other.m_type;3804m_u64 = other.m_u64;3805m_str = std::move(other.m_str);38063807other.m_type = variant_type::cInvalid;3808other.m_u64 = 0;38093810return *this;3811}38123813inline fmt_variant& operator= (const fmt_variant& rhs)3814{3815if (this == &rhs)3816return *this;38173818m_u64 = rhs.m_u64;3819m_type = rhs.m_type;3820m_str = rhs.m_str;38213822return *this;3823}38243825inline fmt_variant(int32_t v) : m_i32(v), m_type(variant_type::cI32) { }3826inline fmt_variant(uint32_t v) : m_u32(v), m_type(variant_type::cU32) { }3827inline fmt_variant(int64_t v) : m_i64(v), m_type(variant_type::cI64) { }3828inline fmt_variant(uint64_t v) : m_u64(v), m_type(variant_type::cU64) { }3829#ifdef _MSC_VER3830inline fmt_variant(unsigned long v) : m_u64(v), m_type(variant_type::cU64) {}3831inline fmt_variant(long v) : m_i64(v), m_type(variant_type::cI64) {}3832#endif3833inline fmt_variant(float v) : m_flt(v), m_type(variant_type::cFlt) { }3834inline fmt_variant(double v) : m_dbl(v), m_type(variant_type::cDbl) { }3835inline fmt_variant(const char* pStr) : m_pStr(pStr), m_type(variant_type::cStrPtr) { }3836inline fmt_variant(const std::string& str) : m_u64(0), m_str(str), m_type(variant_type::cStdStr) { }3837inline fmt_variant(bool val) : m_bool(val), m_type(variant_type::cBool) { }38383839bool to_string(std::string& res, std::string& fmt) const;3840};38413842typedef basisu::vector<fmt_variant> fmt_variant_vec;38433844bool fmt_variants(std::string& res, const char* pFmt, const fmt_variant_vec& variants);38453846template <typename... Args>3847inline bool fmt_string(std::string& res, const char* pFmt, Args&&... args)3848{3849return fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... });3850}38513852template <typename... Args>3853inline std::string fmt_string(const char* pFmt, Args&&... args)3854{3855std::string res;3856fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... });3857return res;3858}38593860template <typename... Args>3861inline int fmt_printf(const char* pFmt, Args&&... args)3862{3863std::string res;3864if (!fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... }))3865return EOF;38663867return fputs(res.c_str(), stdout);3868}38693870template <typename... Args>3871inline int fmt_fprintf(FILE* pFile, const char* pFmt, Args&&... args)3872{3873std::string res;3874if (!fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... }))3875return EOF;38763877return fputs(res.c_str(), pFile);3878}38793880// fixed_array - zero initialized by default, operator[] is always bounds checked.3881template <std::size_t N, typename T>3882class fixed_array3883{3884static_assert(N >= 1, "fixed_array size must be at least 1");38853886public:3887using value_type = T;3888using size_type = std::size_t;3889using difference_type = std::ptrdiff_t;3890using reference = T&;3891using const_reference = const T&;3892using pointer = T*;3893using const_pointer = const T*;3894using iterator = T*;3895using const_iterator = const T*;38963897T m_data[N];38983899BASISU_FORCE_INLINE fixed_array()3900{3901initialize_array();3902}39033904BASISU_FORCE_INLINE fixed_array(std::initializer_list<T> list)3905{3906assert(list.size() <= N);39073908std::size_t copy_size = std::min(list.size(), N);3909std::copy_n(list.begin(), copy_size, m_data); // Copy up to min(list.size(), N)39103911if (list.size() < N)3912{3913// Initialize the rest of the array3914std::fill(m_data + copy_size, m_data + N, T{});3915}3916}39173918BASISU_FORCE_INLINE T& operator[](std::size_t index)3919{3920if (index >= N)3921container_abort("fixed_array: Index out of bounds.");3922return m_data[index];3923}39243925BASISU_FORCE_INLINE const T& operator[](std::size_t index) const3926{3927if (index >= N)3928container_abort("fixed_array: Index out of bounds.");3929return m_data[index];3930}39313932BASISU_FORCE_INLINE T* begin() { return m_data; }3933BASISU_FORCE_INLINE const T* begin() const { return m_data; }39343935BASISU_FORCE_INLINE T* end() { return m_data + N; }3936BASISU_FORCE_INLINE const T* end() const { return m_data + N; }39373938BASISU_FORCE_INLINE const T* data() const { return m_data; }3939BASISU_FORCE_INLINE T* data() { return m_data; }39403941BASISU_FORCE_INLINE const T& front() const { return m_data[0]; }3942BASISU_FORCE_INLINE T& front() { return m_data[0]; }39433944BASISU_FORCE_INLINE const T& back() const { return m_data[N - 1]; }3945BASISU_FORCE_INLINE T& back() { return m_data[N - 1]; }39463947BASISU_FORCE_INLINE constexpr std::size_t size() const { return N; }39483949BASISU_FORCE_INLINE void clear()3950{3951initialize_array(); // Reinitialize the array3952}39533954BASISU_FORCE_INLINE void set_all(const T& value)3955{3956std::fill(m_data, m_data + N, value);3957}39583959BASISU_FORCE_INLINE readable_span<T> get_readable_span() const3960{3961return readable_span<T>(m_data, N);3962}39633964BASISU_FORCE_INLINE writable_span<T> get_writable_span()3965{3966return writable_span<T>(m_data, N);3967}39683969private:3970BASISU_FORCE_INLINE void initialize_array()3971{3972if constexpr (std::is_integral<T>::value || std::is_floating_point<T>::value)3973memset(m_data, 0, sizeof(m_data));3974else3975std::fill(m_data, m_data + N, T{});3976}39773978BASISU_FORCE_INLINE T& access_element(std::size_t index)3979{3980if (index >= N)3981container_abort("fixed_array: Index out of bounds.");3982return m_data[index];3983}39843985BASISU_FORCE_INLINE const T& access_element(std::size_t index) const3986{3987if (index >= N)3988container_abort("fixed_array: Index out of bounds.");3989return m_data[index];3990}3991};39923993// 2D array39943995template<typename T>3996class vector2D3997{3998typedef basisu::vector<T> vec_type;39994000uint32_t m_width, m_height;4001vec_type m_values;40024003public:4004vector2D() :4005m_width(0),4006m_height(0)4007{4008}40094010vector2D(uint32_t w, uint32_t h) :4011m_width(0),4012m_height(0)4013{4014resize(w, h);4015}40164017vector2D(const vector2D& other)4018{4019*this = other;4020}40214022vector2D(vector2D&& other) :4023m_width(0),4024m_height(0)4025{4026*this = std::move(other);4027}40284029vector2D& operator= (const vector2D& other)4030{4031if (this != &other)4032{4033m_width = other.m_width;4034m_height = other.m_height;4035m_values = other.m_values;4036}4037return *this;4038}40394040vector2D& operator= (vector2D&& other)4041{4042if (this != &other)4043{4044m_width = other.m_width;4045m_height = other.m_height;4046m_values = std::move(other.m_values);40474048other.m_width = 0;4049other.m_height = 0;4050}4051return *this;4052}40534054inline bool operator== (const vector2D& rhs) const4055{4056return (m_width == rhs.m_width) && (m_height == rhs.m_height) && (m_values == rhs.m_values);4057}40584059inline size_t size_in_bytes() const { return m_values.size_in_bytes(); }40604061inline uint32_t get_width() const { return m_width; }4062inline uint32_t get_height() const { return m_height; }40634064inline const T& operator() (uint32_t x, uint32_t y) const { assert(x < m_width && y < m_height); return m_values[x + y * m_width]; }4065inline T& operator() (uint32_t x, uint32_t y) { assert(x < m_width && y < m_height); return m_values[x + y * m_width]; }40664067inline size_t size() const { return m_values.size(); }40684069inline const T& operator[] (uint32_t i) const { return m_values[i]; }4070inline T& operator[] (uint32_t i) { return m_values[i]; }40714072inline const T& at_clamped(int x, int y) const { return (*this)(clamp<int>(x, 0, m_width - 1), clamp<int>(y, 0, m_height - 1)); }4073inline T& at_clamped(int x, int y) { return (*this)(clamp<int>(x, 0, m_width - 1), clamp<int>(y, 0, m_height - 1)); }40744075void clear()4076{4077m_width = 0;4078m_height = 0;4079m_values.clear();4080}40814082void set_all(const T& val)4083{4084vector_set_all(m_values, val);4085}40864087inline const T* get_ptr() const { return m_values.data(); }4088inline T* get_ptr() { return m_values.data(); }40894090vector2D& resize(uint32_t new_width, uint32_t new_height)4091{4092if ((m_width == new_width) && (m_height == new_height))4093return *this;40944095const uint64_t total_vals = (uint64_t)new_width * new_height;40964097if (!can_fit_into_size_t(total_vals))4098{4099// What can we do?4100assert(0);4101return *this;4102}41034104vec_type oldVals((size_t)total_vals);4105oldVals.swap(m_values);41064107const uint32_t w = minimum(m_width, new_width);4108const uint32_t h = minimum(m_height, new_height);41094110if ((w) && (h))4111{4112for (uint32_t y = 0; y < h; y++)4113for (uint32_t x = 0; x < w; x++)4114m_values[x + y * new_width] = oldVals[x + y * m_width];4115}41164117m_width = new_width;4118m_height = new_height;41194120return *this;4121}41224123bool try_resize(uint32_t new_width, uint32_t new_height)4124{4125if ((m_width == new_width) && (m_height == new_height))4126return true;41274128const uint64_t total_vals = (uint64_t)new_width * new_height;41294130if (!can_fit_into_size_t(total_vals))4131{4132// What can we do?4133assert(0);4134return false;4135}41364137vec_type oldVals;4138if (!oldVals.try_resize((size_t)total_vals))4139return false;41404141oldVals.swap(m_values);41424143const uint32_t w = minimum(m_width, new_width);4144const uint32_t h = minimum(m_height, new_height);41454146if ((w) && (h))4147{4148for (uint32_t y = 0; y < h; y++)4149for (uint32_t x = 0; x < w; x++)4150m_values[x + y * new_width] = oldVals[x + y * m_width];4151}41524153m_width = new_width;4154m_height = new_height;41554156return true;4157}41584159const vector2D& extract_block_clamped(T* pDst, uint32_t src_x, uint32_t src_y, uint32_t w, uint32_t h) const4160{4161// HACK HACK4162if (((src_x + w) > m_width) || ((src_y + h) > m_height))4163{4164// Slower clamping case4165for (uint32_t y = 0; y < h; y++)4166for (uint32_t x = 0; x < w; x++)4167*pDst++ = at_clamped(src_x + x, src_y + y);4168}4169else4170{4171const T* pSrc = &m_values[src_x + src_y * m_width];41724173for (uint32_t y = 0; y < h; y++)4174{4175memcpy(pDst, pSrc, w * sizeof(T));4176pSrc += m_width;4177pDst += w;4178}4179}41804181return *this;4182}4183};41844185} // namespace basisu41864187namespace std4188{4189template<typename T>4190inline void swap(basisu::vector<T>& a, basisu::vector<T>& b)4191{4192a.swap(b);4193}41944195template<typename Key, typename Value, typename Hasher, typename Equals>4196inline void swap(basisu::hash_map<Key, Value, Hasher, Equals>& a, basisu::hash_map<Key, Value, Hasher, Equals>& b)4197{4198a.swap(b);4199}42004201} // namespace std420242034204