Path: blob/master/thirdparty/basis_universal/transcoder/basisu_containers.h
21349 views
// basisu_containers.h1#pragma once2#include <stdlib.h>3#include <stdio.h>4#include <stdint.h>5#include <assert.h>6#include <algorithm>78#if defined(__linux__) && !defined(ANDROID)9// Only for malloc_usable_size() in basisu_containers_impl.h10#include <malloc.h>11#define HAS_MALLOC_USABLE_SIZE 112#endif1314// Set to 1 to always check vector operator[], front(), and back() even in release.15#define BASISU_VECTOR_FORCE_CHECKING 01617// If 1, the vector container will not query the CRT to get the size of resized memory blocks.18#define BASISU_VECTOR_DETERMINISTIC 11920#ifdef _MSC_VER21#define BASISU_FORCE_INLINE __forceinline22#else23#define BASISU_FORCE_INLINE inline24#endif2526#define BASISU_HASHMAP_TEST 02728namespace basisu29{30enum { cInvalidIndex = -1 };3132template <typename S> inline S clamp(S value, S low, S high) { return (value < low) ? low : ((value > high) ? high : value); }3334template <typename S> inline S maximum(S a, S b) { return (a > b) ? a : b; }35template <typename S> inline S maximum(S a, S b, S c) { return maximum(maximum(a, b), c); }36template <typename S> inline S maximum(S a, S b, S c, S d) { return maximum(maximum(maximum(a, b), c), d); }3738template <typename S> inline S minimum(S a, S b) { return (a < b) ? a : b; }39template <typename S> inline S minimum(S a, S b, S c) { return minimum(minimum(a, b), c); }40template <typename S> inline S minimum(S a, S b, S c, S d) { return minimum(minimum(minimum(a, b), c), d); }4142#ifdef _MSC_VER43__declspec(noreturn)44#else45[[noreturn]]46#endif47void container_abort(const char* pMsg, ...);4849namespace helpers50{51inline bool is_power_of_2(uint32_t x) { return x && ((x & (x - 1U)) == 0U); }52inline bool is_power_of_2(uint64_t x) { return x && ((x & (x - 1U)) == 0U); }5354template<class T> const T& minimum(const T& a, const T& b) { return (b < a) ? b : a; }55template<class T> const T& maximum(const T& a, const T& b) { return (a < b) ? b : a; }5657inline uint32_t floor_log2i(uint32_t v)58{59uint32_t l = 0;60while (v > 1U)61{62v >>= 1;63l++;64}65return l;66}6768inline uint32_t floor_log2i(uint64_t v)69{70uint32_t l = 0;71while (v > 1U)72{73v >>= 1;74l++;75}76return l;77}7879inline uint32_t next_pow2(uint32_t val)80{81val--;82val |= val >> 16;83val |= val >> 8;84val |= val >> 4;85val |= val >> 2;86val |= val >> 1;87return val + 1;88}8990inline uint64_t next_pow2(uint64_t val)91{92val--;93val |= val >> 32;94val |= val >> 16;95val |= val >> 8;96val |= val >> 4;97val |= val >> 2;98val |= val >> 1;99return val + 1;100}101} // namespace helpers102103template <typename T>104inline T* construct(T* p)105{106return new (static_cast<void*>(p)) T;107}108109template <typename T, typename U>110inline T* construct(T* p, const U& init)111{112return new (static_cast<void*>(p)) T(init);113}114115template <typename T>116inline void construct_array(T* p, size_t n)117{118T* q = p + n;119for (; p != q; ++p)120new (static_cast<void*>(p)) T;121}122123template <typename T, typename U>124inline void construct_array(T* p, size_t n, const U& init)125{126T* q = p + n;127for (; p != q; ++p)128new (static_cast<void*>(p)) T(init);129}130131template <typename T>132inline void destruct(T* p)133{134p->~T();135}136137template <typename T> inline void destruct_array(T* p, size_t n)138{139T* q = p + n;140for (; p != q; ++p)141p->~T();142}143144template<typename T>145struct scalar_type146{147enum { cFlag = false };148static inline void construct(T* p) { basisu::construct(p); }149static inline void construct(T* p, const T& init) { basisu::construct(p, init); }150static inline void construct_array(T* p, size_t n) { basisu::construct_array(p, n); }151static inline void destruct(T* p) { basisu::destruct(p); }152static inline void destruct_array(T* p, size_t n) { basisu::destruct_array(p, n); }153};154155template<typename T> struct scalar_type<T*>156{157enum { cFlag = true };158static inline void construct(T** p) { memset(p, 0, sizeof(T*)); }159static inline void construct(T** p, T* init) { *p = init; }160static inline void construct_array(T** p, size_t n) { memset(p, 0, sizeof(T*) * n); }161static inline void destruct(T** p) { p; }162static inline void destruct_array(T** p, size_t n) { p, n; }163};164165#define BASISU_DEFINE_BUILT_IN_TYPE(X) \166template<> struct scalar_type<X> { \167enum { cFlag = true }; \168static inline void construct(X* p) { memset(p, 0, sizeof(X)); } \169static inline void construct(X* p, const X& init) { memcpy(p, &init, sizeof(X)); } \170static inline void construct_array(X* p, size_t n) { memset(p, 0, sizeof(X) * n); } \171static inline void destruct(X* p) { p; } \172static inline void destruct_array(X* p, size_t n) { p, n; } };173174BASISU_DEFINE_BUILT_IN_TYPE(bool)175BASISU_DEFINE_BUILT_IN_TYPE(char)176BASISU_DEFINE_BUILT_IN_TYPE(unsigned char)177BASISU_DEFINE_BUILT_IN_TYPE(short)178BASISU_DEFINE_BUILT_IN_TYPE(unsigned short)179BASISU_DEFINE_BUILT_IN_TYPE(int)180BASISU_DEFINE_BUILT_IN_TYPE(unsigned int)181BASISU_DEFINE_BUILT_IN_TYPE(long)182BASISU_DEFINE_BUILT_IN_TYPE(unsigned long)183#ifdef __GNUC__184BASISU_DEFINE_BUILT_IN_TYPE(long long)185BASISU_DEFINE_BUILT_IN_TYPE(unsigned long long)186#else187BASISU_DEFINE_BUILT_IN_TYPE(__int64)188BASISU_DEFINE_BUILT_IN_TYPE(unsigned __int64)189#endif190BASISU_DEFINE_BUILT_IN_TYPE(float)191BASISU_DEFINE_BUILT_IN_TYPE(double)192BASISU_DEFINE_BUILT_IN_TYPE(long double)193194#undef BASISU_DEFINE_BUILT_IN_TYPE195196template<typename T>197struct bitwise_movable { enum { cFlag = false }; };198199#define BASISU_DEFINE_BITWISE_MOVABLE(Q) template<> struct bitwise_movable<Q> { enum { cFlag = true }; };200201template<typename T>202struct bitwise_copyable { enum { cFlag = false }; };203204#define BASISU_DEFINE_BITWISE_COPYABLE(Q) template<> struct bitwise_copyable<Q> { enum { cFlag = true }; };205206#define BASISU_IS_POD(T) __is_pod(T)207208#define BASISU_IS_SCALAR_TYPE(T) (scalar_type<T>::cFlag)209210#if !defined(BASISU_HAVE_STD_TRIVIALLY_COPYABLE) && defined(__GNUC__) && (__GNUC__ < 5)211#define BASISU_IS_TRIVIALLY_COPYABLE(...) __is_trivially_copyable(__VA_ARGS__)212#else213#define BASISU_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value214#endif215216// TODO: clean this up, it's still confusing (copying vs. movable).217#define BASISU_IS_BITWISE_COPYABLE(T) (BASISU_IS_SCALAR_TYPE(T) || BASISU_IS_POD(T) || BASISU_IS_TRIVIALLY_COPYABLE(T) || std::is_trivial<T>::value || (bitwise_copyable<T>::cFlag))218219#define BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) (BASISU_IS_BITWISE_COPYABLE(T) || (bitwise_movable<T>::cFlag))220221#define BASISU_HAS_DESTRUCTOR(T) ((!scalar_type<T>::cFlag) && (!__is_pod(T)) && (!std::is_trivially_destructible<T>::value))222223typedef char(&yes_t)[1];224typedef char(&no_t)[2];225226template <class U> yes_t class_test(int U::*);227template <class U> no_t class_test(...);228229template <class T> struct is_class230{231enum { value = (sizeof(class_test<T>(0)) == sizeof(yes_t)) };232};233234template <typename T> struct is_pointer235{236enum { value = false };237};238239template <typename T> struct is_pointer<T*>240{241enum { value = true };242};243244struct empty_type { };245246BASISU_DEFINE_BITWISE_COPYABLE(empty_type);247BASISU_DEFINE_BITWISE_MOVABLE(empty_type);248249template<typename T> struct rel_ops250{251friend bool operator!=(const T& x, const T& y) { return (!(x == y)); }252friend bool operator> (const T& x, const T& y) { return (y < x); }253friend bool operator<=(const T& x, const T& y) { return (!(y < x)); }254friend bool operator>=(const T& x, const T& y) { return (!(x < y)); }255};256257struct elemental_vector258{259void* m_p;260size_t m_size;261size_t m_capacity;262263typedef void (*object_mover)(void* pDst, void* pSrc, size_t num);264265bool increase_capacity(size_t min_new_capacity, bool grow_hint, size_t element_size, object_mover pRelocate, bool nofail);266};267268// Returns true if a+b would overflow a size_t.269inline bool add_overflow_check(size_t a, size_t b)270{271size_t c = a + b;272return c < a;273}274275// Returns false on overflow, true if OK.276template<typename T>277inline bool can_fit_into_size_t(T val)278{279static_assert(std::is_integral<T>::value, "T must be an integral type");280281return (val >= 0) && (static_cast<size_t>(val) == val);282}283284// Returns true if a*b would overflow a size_t.285inline bool mul_overflow_check(size_t a, size_t b)286{287// Avoid the division on 32-bit platforms288if (sizeof(size_t) == sizeof(uint32_t))289return !can_fit_into_size_t(static_cast<uint64_t>(a) * b);290else291return b && (a > (SIZE_MAX / b));292}293294template<typename T>295class writable_span;296297template<typename T>298class readable_span299{300public:301using value_type = T;302using size_type = size_t;303using const_pointer = const T*;304using const_reference = const T&;305using const_iterator = const T*;306307inline readable_span() :308m_p(nullptr),309m_size(0)310{311}312313inline readable_span(const writable_span<T>& other);314inline readable_span& operator= (const writable_span<T>& rhs);315316inline readable_span(const_pointer p, size_t n)317{318set(p, n);319}320321inline readable_span(const_pointer s, const_pointer e)322{323set(s, e);324}325326inline readable_span(const readable_span& other) :327m_p(other.m_p),328m_size(other.m_size)329{330assert(!m_size || m_p);331}332333inline readable_span(readable_span&& other) :334m_p(other.m_p),335m_size(other.m_size)336{337assert(!m_size || m_p);338339other.m_p = nullptr;340other.m_size = 0;341}342343template <size_t N>344inline readable_span(const T(&arr)[N]) :345m_p(arr),346m_size(N)347{348}349350template <size_t N>351inline readable_span& set(const T(&arr)[N])352{353m_p = arr;354m_size = N;355return *this;356}357358inline readable_span& set(const_pointer p, size_t n)359{360if (!p && n)361{362assert(0);363m_p = nullptr;364m_size = 0;365}366else367{368m_p = p;369m_size = n;370}371372return *this;373}374375inline readable_span& set(const_pointer s, const_pointer e)376{377if ((e < s) || (!s && e))378{379assert(0);380m_p = nullptr;381m_size = 0;382}383else384{385m_p = s;386m_size = e - s;387}388389return *this;390}391392inline bool operator== (const readable_span& rhs) const393{394return (m_p == rhs.m_p) && (m_size == rhs.m_size);395}396397inline bool operator!= (const readable_span& rhs) const398{399return (m_p != rhs.m_p) || (m_size != rhs.m_size);400}401402// only true if the region is totally inside the span403inline bool is_inside_ptr(const_pointer p, size_t n) const404{405if (!is_valid())406{407assert(0);408return false;409}410411if (!p)412{413assert(!n);414return false;415}416417return (p >= m_p) && ((p + n) <= end());418}419420inline bool is_inside(size_t ofs, size_t size) const421{422if (add_overflow_check(ofs, size))423{424assert(0);425return false;426}427428if (!is_valid())429{430assert(0);431return false;432}433434if ((ofs + size) > m_size)435return false;436437return true;438}439440inline readable_span subspan(size_t ofs, size_t n) const441{442if (!is_valid())443{444assert(0);445return readable_span((const_pointer)nullptr, (size_t)0);446}447448if (add_overflow_check(ofs, n))449{450assert(0);451return readable_span((const_pointer)nullptr, (size_t)0);452}453454if ((ofs + n) > m_size)455{456assert(0);457return readable_span((const_pointer)nullptr, (size_t)0);458}459460return readable_span(m_p + ofs, n);461}462463void clear()464{465m_p = nullptr;466m_size = 0;467}468469inline bool empty() const { return !m_size; }470471// true if the span is non-nullptr and is not empty472inline bool is_valid() const { return m_p && m_size; }473474inline bool is_nullptr() const { return m_p == nullptr; }475476inline size_t size() const { return m_size; }477inline size_t size_in_bytes() const { assert(can_fit_into_size_t((uint64_t)m_size * sizeof(T))); return m_size * sizeof(T); }478479inline const_pointer get_ptr() const { return m_p; }480481inline const_iterator begin() const { return m_p; }482inline const_iterator end() const { assert(m_p || !m_size); return m_p + m_size; }483484inline const_iterator cbegin() const { return m_p; }485inline const_iterator cend() const { assert(m_p || !m_size); return m_p + m_size; }486487inline const_reference front() const488{489if (!(m_p && m_size))490container_abort("readable_span invalid\n");491492return m_p[0];493}494495inline const_reference back() const496{497if (!(m_p && m_size))498container_abort("readable_span invalid\n");499500return m_p[m_size - 1];501}502503inline readable_span& operator= (const readable_span& rhs)504{505m_p = rhs.m_p;506m_size = rhs.m_size;507return *this;508}509510inline readable_span& operator= (readable_span&& rhs)511{512if (this != &rhs)513{514m_p = rhs.m_p;515m_size = rhs.m_size;516rhs.m_p = nullptr;517rhs.m_size = 0;518}519520return *this;521}522523inline const_reference operator* () const524{525if (!(m_p && m_size))526container_abort("readable_span invalid\n");527528return *m_p;529}530531inline const_pointer operator-> () const532{533if (!(m_p && m_size))534container_abort("readable_span invalid\n");535536return m_p;537}538539inline readable_span& remove_prefix(size_t n)540{541if ((!m_p) || (n > m_size))542{543assert(0);544return *this;545}546547m_p += n;548m_size -= n;549return *this;550}551552inline readable_span& remove_suffix(size_t n)553{554if ((!m_p) || (n > m_size))555{556assert(0);557return *this;558}559560m_size -= n;561return *this;562}563564inline readable_span& enlarge(size_t n)565{566if (!m_p)567{568assert(0);569return *this;570}571572if (add_overflow_check(m_size, n))573{574assert(0);575return *this;576}577578m_size += n;579return *this;580}581582bool copy_from(size_t src_ofs, size_t src_size, T* pDst, size_t dst_ofs) const583{584if (!src_size)585return true;586587if (!pDst)588{589assert(0);590return false;591}592593if (!is_inside(src_ofs, src_size))594{595assert(0);596return false;597}598599const_pointer pS = m_p + src_ofs;600601if (BASISU_IS_BITWISE_COPYABLE(T))602{603const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);604605if (!can_fit_into_size_t(num_bytes))606{607assert(0);608return false;609}610611memcpy(pDst, pS, (size_t)num_bytes);612}613else614{615T* pD = pDst + dst_ofs;616T* pDst_end = pD + src_size;617618while (pD != pDst_end)619*pD++ = *pS++;620}621622return true;623}624625inline const_reference operator[] (size_t idx) const626{627if ((!is_valid()) || (idx >= m_size))628container_abort("readable_span: invalid span or index\n");629630return m_p[idx];631}632633inline uint16_t read_le16(size_t ofs) const634{635static_assert(sizeof(T) == 1, "T must be byte size");636637if (!is_inside(ofs, sizeof(uint16_t)))638{639assert(0);640return false;641}642643const uint8_t a = (uint8_t)m_p[ofs];644const uint8_t b = (uint8_t)m_p[ofs + 1];645return a | (b << 8u);646}647648template<typename R>649inline R read_val(size_t ofs) const650{651static_assert(sizeof(T) == 1, "T must be byte size");652653if (!is_inside(ofs, sizeof(R)))654{655assert(0);656return (R)0;657}658659return *reinterpret_cast<const R*>(&m_p[ofs]);660}661662inline uint16_t read_be16(size_t ofs) const663{664static_assert(sizeof(T) == 1, "T must be byte size");665666if (!is_inside(ofs, sizeof(uint16_t)))667{668assert(0);669return 0;670}671672const uint8_t b = (uint8_t)m_p[ofs];673const uint8_t a = (uint8_t)m_p[ofs + 1];674return a | (b << 8u);675}676677inline uint32_t read_le32(size_t ofs) const678{679static_assert(sizeof(T) == 1, "T must be byte size");680681if (!is_inside(ofs, sizeof(uint32_t)))682{683assert(0);684return 0;685}686687const uint8_t a = (uint8_t)m_p[ofs];688const uint8_t b = (uint8_t)m_p[ofs + 1];689const uint8_t c = (uint8_t)m_p[ofs + 2];690const uint8_t d = (uint8_t)m_p[ofs + 3];691return a | (b << 8u) | (c << 16u) | (d << 24u);692}693694inline uint32_t read_be32(size_t ofs) const695{696static_assert(sizeof(T) == 1, "T must be byte size");697698if (!is_inside(ofs, sizeof(uint32_t)))699{700assert(0);701return 0;702}703704const uint8_t d = (uint8_t)m_p[ofs];705const uint8_t c = (uint8_t)m_p[ofs + 1];706const uint8_t b = (uint8_t)m_p[ofs + 2];707const uint8_t a = (uint8_t)m_p[ofs + 3];708return a | (b << 8u) | (c << 16u) | (d << 24u);709}710711inline uint64_t read_le64(size_t ofs) const712{713if (!add_overflow_check(ofs, sizeof(uint64_t)))714{715assert(0);716return 0;717}718const uint64_t l = read_le32(ofs);719const uint64_t h = read_le32(ofs + sizeof(uint32_t));720return l | (h << 32u);721}722723inline uint64_t read_be64(size_t ofs) const724{725if (!add_overflow_check(ofs, sizeof(uint64_t)))726{727assert(0);728return 0;729}730const uint64_t h = read_be32(ofs);731const uint64_t l = read_be32(ofs + sizeof(uint32_t));732return l | (h << 32u);733}734735private:736const_pointer m_p;737size_t m_size;738};739740template<typename T>741class writable_span742{743friend readable_span<T>;744745public:746using value_type = T;747using size_type = size_t;748using const_pointer = const T*;749using const_reference = const T&;750using const_iterator = const T*;751using pointer = T*;752using reference = T&;753using iterator = T*;754755inline writable_span() :756m_p(nullptr),757m_size(0)758{759}760761inline writable_span(T* p, size_t n)762{763set(p, n);764}765766inline writable_span(T* s, T* e)767{768set(s, e);769}770771inline writable_span(const writable_span& other) :772m_p(other.m_p),773m_size(other.m_size)774{775assert(!m_size || m_p);776}777778inline writable_span(writable_span&& other) :779m_p(other.m_p),780m_size(other.m_size)781{782assert(!m_size || m_p);783784other.m_p = nullptr;785other.m_size = 0;786}787788template <size_t N>789inline writable_span(T(&arr)[N]) :790m_p(arr),791m_size(N)792{793}794795readable_span<T> get_readable_span() const796{797return readable_span<T>(m_p, m_size);798}799800template <size_t N>801inline writable_span& set(T(&arr)[N])802{803m_p = arr;804m_size = N;805return *this;806}807808inline writable_span& set(T* p, size_t n)809{810if (!p && n)811{812assert(0);813m_p = nullptr;814m_size = 0;815}816else817{818m_p = p;819m_size = n;820}821822return *this;823}824825inline writable_span& set(T* s, T* e)826{827if ((e < s) || (!s && e))828{829assert(0);830m_p = nullptr;831m_size = 0;832}833else834{835m_p = s;836m_size = e - s;837}838839return *this;840}841842inline bool operator== (const writable_span& rhs) const843{844return (m_p == rhs.m_p) && (m_size == rhs.m_size);845}846847inline bool operator== (const readable_span<T>& rhs) const848{849return (m_p == rhs.m_p) && (m_size == rhs.m_size);850}851852inline bool operator!= (const writable_span& rhs) const853{854return (m_p != rhs.m_p) || (m_size != rhs.m_size);855}856857inline bool operator!= (const readable_span<T>& rhs) const858{859return (m_p != rhs.m_p) || (m_size != rhs.m_size);860}861862// only true if the region is totally inside the span863inline bool is_inside_ptr(const_pointer p, size_t n) const864{865if (!is_valid())866{867assert(0);868return false;869}870871if (!p)872{873assert(!n);874return false;875}876877return (p >= m_p) && ((p + n) <= end());878}879880inline bool is_inside(size_t ofs, size_t size) const881{882if (add_overflow_check(ofs, size))883{884assert(0);885return false;886}887888if (!is_valid())889{890assert(0);891return false;892}893894if ((ofs + size) > m_size)895return false;896897return true;898}899900inline writable_span subspan(size_t ofs, size_t n) const901{902if (!is_valid())903{904assert(0);905return writable_span((T*)nullptr, (size_t)0);906}907908if (add_overflow_check(ofs, n))909{910assert(0);911return writable_span((T*)nullptr, (size_t)0);912}913914if ((ofs + n) > m_size)915{916assert(0);917return writable_span((T*)nullptr, (size_t)0);918}919920return writable_span(m_p + ofs, n);921}922923void clear()924{925m_p = nullptr;926m_size = 0;927}928929inline bool empty() const { return !m_size; }930931// true if the span is non-nullptr and is not empty932inline bool is_valid() const { return m_p && m_size; }933934inline bool is_nullptr() const { return m_p == nullptr; }935936inline size_t size() const { return m_size; }937inline size_t size_in_bytes() const { assert(can_fit_into_size_t((uint64_t)m_size * sizeof(T))); return m_size * sizeof(T); }938939inline T* get_ptr() const { return m_p; }940941inline iterator begin() const { return m_p; }942inline iterator end() const { assert(m_p || !m_size); return m_p + m_size; }943944inline const_iterator cbegin() const { return m_p; }945inline const_iterator cend() const { assert(m_p || !m_size); return m_p + m_size; }946947inline T& front() const948{949if (!(m_p && m_size))950container_abort("writable_span invalid\n");951952return m_p[0];953}954955inline T& back() const956{957if (!(m_p && m_size))958container_abort("writable_span invalid\n");959960return m_p[m_size - 1];961}962963inline writable_span& operator= (const writable_span& rhs)964{965m_p = rhs.m_p;966m_size = rhs.m_size;967return *this;968}969970inline writable_span& operator= (writable_span&& rhs)971{972if (this != &rhs)973{974m_p = rhs.m_p;975m_size = rhs.m_size;976rhs.m_p = nullptr;977rhs.m_size = 0;978}979980return *this;981}982983inline T& operator* () const984{985if (!(m_p && m_size))986container_abort("writable_span invalid\n");987988return *m_p;989}990991inline T* operator-> () const992{993if (!(m_p && m_size))994container_abort("writable_span invalid\n");995996return m_p;997}998999inline bool set_all(size_t ofs, size_t size, const_reference val)1000{1001if (!size)1002return true;10031004if (!is_inside(ofs, size))1005{1006assert(0);1007return false;1008}10091010T* pDst = m_p + ofs;10111012if ((sizeof(T) == sizeof(uint8_t)) && (BASISU_IS_BITWISE_COPYABLE(T)))1013{1014memset(pDst, (int)((uint8_t)val), size);1015}1016else1017{10181019T* pDst_end = pDst + size;10201021while (pDst != pDst_end)1022*pDst++ = val;1023}10241025return true;1026}10271028inline bool set_all(const_reference val)1029{1030return set_all(0, m_size, val);1031}10321033inline writable_span& remove_prefix(size_t n)1034{1035if ((!m_p) || (n > m_size))1036{1037assert(0);1038return *this;1039}10401041m_p += n;1042m_size -= n;1043return *this;1044}10451046inline writable_span& remove_suffix(size_t n)1047{1048if ((!m_p) || (n > m_size))1049{1050assert(0);1051return *this;1052}10531054m_size -= n;1055return *this;1056}10571058inline writable_span& enlarge(size_t n)1059{1060if (!m_p)1061{1062assert(0);1063return *this;1064}10651066if (add_overflow_check(m_size, n))1067{1068assert(0);1069return *this;1070}10711072m_size += n;1073return *this;1074}10751076// copy from this span to the destination ptr1077bool copy_from(size_t src_ofs, size_t src_size, T* pDst, size_t dst_ofs) const1078{1079if (!src_size)1080return true;10811082if (!pDst)1083{1084assert(0);1085return false;1086}10871088if (!is_inside(src_ofs, src_size))1089{1090assert(0);1091return false;1092}10931094const_pointer pS = m_p + src_ofs;10951096if (BASISU_IS_BITWISE_COPYABLE(T))1097{1098const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);10991100if (!can_fit_into_size_t(num_bytes))1101{1102assert(0);1103return false;1104}11051106memcpy(pDst, pS, (size_t)num_bytes);1107}1108else1109{1110T* pD = pDst + dst_ofs;1111T* pDst_end = pD + src_size;11121113while (pD != pDst_end)1114*pD++ = *pS++;1115}11161117return true;1118}11191120// copy from the source ptr into this span1121bool copy_into(const_pointer pSrc, size_t src_ofs, size_t src_size, size_t dst_ofs) const1122{1123if (!src_size)1124return true;11251126if (!pSrc)1127{1128assert(0);1129return false;1130}11311132if (add_overflow_check(src_ofs, src_size) || add_overflow_check(dst_ofs, src_size))1133{1134assert(0);1135return false;1136}11371138if (!is_valid())1139{1140assert(0);1141return false;1142}11431144if (!is_inside(dst_ofs, src_size))1145{1146assert(0);1147return false;1148}11491150const_pointer pS = pSrc + src_ofs;1151T* pD = m_p + dst_ofs;11521153if (BASISU_IS_BITWISE_COPYABLE(T))1154{1155const uint64_t num_bytes = (uint64_t)src_size * sizeof(T);11561157if (!can_fit_into_size_t(num_bytes))1158{1159assert(0);1160return false;1161}11621163memcpy(pD, pS, (size_t)num_bytes);1164}1165else1166{1167T* pDst_end = pD + src_size;11681169while (pD != pDst_end)1170*pD++ = *pS++;1171}11721173return true;1174}11751176// copy from a source span into this span1177bool copy_into(const readable_span<T>& src, size_t src_ofs, size_t src_size, size_t dst_ofs) const1178{1179if (!src.is_inside(src_ofs, src_size))1180{1181assert(0);1182return false;1183}11841185return copy_into(src.get_ptr(), src_ofs, src_size, dst_ofs);1186}11871188// copy from a source span into this span1189bool copy_into(const writable_span& src, size_t src_ofs, size_t src_size, size_t dst_ofs) const1190{1191if (!src.is_inside(src_ofs, src_size))1192{1193assert(0);1194return false;1195}11961197return copy_into(src.get_ptr(), src_ofs, src_size, dst_ofs);1198}11991200inline T& operator[] (size_t idx) const1201{1202if ((!is_valid()) || (idx >= m_size))1203container_abort("writable_span: invalid span or index\n");12041205return m_p[idx];1206}12071208template<typename R>1209inline R read_val(size_t ofs) const1210{1211static_assert(sizeof(T) == 1, "T must be byte size");12121213if (!is_inside(ofs, sizeof(R)))1214{1215assert(0);1216return (R)0;1217}12181219return *reinterpret_cast<const R*>(&m_p[ofs]);1220}12211222template<typename R>1223inline bool write_val(size_t ofs, R val) const1224{1225static_assert(sizeof(T) == 1, "T must be byte size");12261227if (!is_inside(ofs, sizeof(R)))1228{1229assert(0);1230return false;1231}12321233*reinterpret_cast<R*>(&m_p[ofs]) = val;1234return true;1235}12361237inline bool write_le16(size_t ofs, uint16_t val) const1238{1239static_assert(sizeof(T) == 1, "T must be byte size");12401241if (!is_inside(ofs, sizeof(uint16_t)))1242{1243assert(0);1244return false;1245}12461247m_p[ofs] = (uint8_t)val;1248m_p[ofs + 1] = (uint8_t)(val >> 8u);1249return true;1250}12511252inline bool write_be16(size_t ofs, uint16_t val) const1253{1254static_assert(sizeof(T) == 1, "T must be byte size");12551256if (!is_inside(ofs, sizeof(uint16_t)))1257{1258assert(0);1259return false;1260}12611262m_p[ofs + 1] = (uint8_t)val;1263m_p[ofs] = (uint8_t)(val >> 8u);1264return true;1265}12661267inline bool write_le32(size_t ofs, uint32_t val) const1268{1269static_assert(sizeof(T) == 1, "T must be byte size");12701271if (!is_inside(ofs, sizeof(uint32_t)))1272{1273assert(0);1274return false;1275}12761277m_p[ofs] = (uint8_t)val;1278m_p[ofs + 1] = (uint8_t)(val >> 8u);1279m_p[ofs + 2] = (uint8_t)(val >> 16u);1280m_p[ofs + 3] = (uint8_t)(val >> 24u);1281return true;1282}12831284inline bool write_be32(size_t ofs, uint32_t val) const1285{1286static_assert(sizeof(T) == 1, "T must be byte size");12871288if (!is_inside(ofs, sizeof(uint32_t)))1289{1290assert(0);1291return false;1292}12931294m_p[ofs + 3] = (uint8_t)val;1295m_p[ofs + 2] = (uint8_t)(val >> 8u);1296m_p[ofs + 1] = (uint8_t)(val >> 16u);1297m_p[ofs] = (uint8_t)(val >> 24u);1298return true;1299}13001301inline bool write_le64(size_t ofs, uint64_t val) const1302{1303if (!add_overflow_check(ofs, sizeof(uint64_t)))1304{1305assert(0);1306return false;1307}13081309return write_le32(ofs, (uint32_t)val) && write_le32(ofs + sizeof(uint32_t), (uint32_t)(val >> 32u));1310}13111312inline bool write_be64(size_t ofs, uint64_t val) const1313{1314if (!add_overflow_check(ofs, sizeof(uint64_t)))1315{1316assert(0);1317return false;1318}13191320return write_be32(ofs + sizeof(uint32_t), (uint32_t)val) && write_be32(ofs, (uint32_t)(val >> 32u));1321}13221323inline uint16_t read_le16(size_t ofs) const1324{1325static_assert(sizeof(T) == 1, "T must be byte size");13261327if (!is_inside(ofs, sizeof(uint16_t)))1328{1329assert(0);1330return 0;1331}13321333const uint8_t a = (uint8_t)m_p[ofs];1334const uint8_t b = (uint8_t)m_p[ofs + 1];1335return a | (b << 8u);1336}13371338inline uint16_t read_be16(size_t ofs) const1339{1340static_assert(sizeof(T) == 1, "T must be byte size");13411342if (!is_inside(ofs, sizeof(uint16_t)))1343{1344assert(0);1345return 0;1346}13471348const uint8_t b = (uint8_t)m_p[ofs];1349const uint8_t a = (uint8_t)m_p[ofs + 1];1350return a | (b << 8u);1351}13521353inline uint32_t read_le32(size_t ofs) const1354{1355static_assert(sizeof(T) == 1, "T must be byte size");13561357if (!is_inside(ofs, sizeof(uint32_t)))1358{1359assert(0);1360return 0;1361}13621363const uint8_t a = (uint8_t)m_p[ofs];1364const uint8_t b = (uint8_t)m_p[ofs + 1];1365const uint8_t c = (uint8_t)m_p[ofs + 2];1366const uint8_t d = (uint8_t)m_p[ofs + 3];1367return a | (b << 8u) | (c << 16u) | (d << 24u);1368}13691370inline uint32_t read_be32(size_t ofs) const1371{1372static_assert(sizeof(T) == 1, "T must be byte size");13731374if (!is_inside(ofs, sizeof(uint32_t)))1375{1376assert(0);1377return 0;1378}13791380const uint8_t d = (uint8_t)m_p[ofs];1381const uint8_t c = (uint8_t)m_p[ofs + 1];1382const uint8_t b = (uint8_t)m_p[ofs + 2];1383const uint8_t a = (uint8_t)m_p[ofs + 3];1384return a | (b << 8u) | (c << 16u) | (d << 24u);1385}13861387inline uint64_t read_le64(size_t ofs) const1388{1389if (!add_overflow_check(ofs, sizeof(uint64_t)))1390{1391assert(0);1392return 0;1393}1394const uint64_t l = read_le32(ofs);1395const uint64_t h = read_le32(ofs + sizeof(uint32_t));1396return l | (h << 32u);1397}13981399inline uint64_t read_be64(size_t ofs) const1400{1401if (!add_overflow_check(ofs, sizeof(uint64_t)))1402{1403assert(0);1404return 0;1405}1406const uint64_t h = read_be32(ofs);1407const uint64_t l = read_be32(ofs + sizeof(uint32_t));1408return l | (h << 32u);1409}14101411private:1412T* m_p;1413size_t m_size;1414};14151416template<typename T>1417inline readable_span<T>::readable_span(const writable_span<T>& other) :1418m_p(other.m_p),1419m_size(other.m_size)1420{1421}14221423template<typename T>1424inline readable_span<T>& readable_span<T>::operator= (const writable_span<T>& rhs)1425{1426m_p = rhs.m_p;1427m_size = rhs.m_size;1428return *this;1429}14301431template<typename T>1432inline bool span_copy(const writable_span<T>& dst, const readable_span<T>& src)1433{1434return dst.copy_into(src, 0, src.size(), 0);1435}14361437template<typename T>1438inline bool span_copy(const writable_span<T>& dst, const writable_span<T>& src)1439{1440return dst.copy_into(src, 0, src.size(), 0);1441}14421443template<typename T>1444inline bool span_copy(const writable_span<T>& dst, size_t dst_ofs, const writable_span<T>& src, size_t src_ofs, size_t len)1445{1446return dst.copy_into(src, src_ofs, len, dst_ofs);1447}14481449template<typename T>1450inline bool span_copy(const writable_span<T>& dst, size_t dst_ofs, const readable_span<T>& src, size_t src_ofs, size_t len)1451{1452return dst.copy_into(src, src_ofs, len, dst_ofs);1453}14541455template<typename T>1456class vector : public rel_ops< vector<T> >1457{1458public:1459typedef T* iterator;1460typedef const T* const_iterator;1461typedef T value_type;1462typedef T& reference;1463typedef const T& const_reference;1464typedef T* pointer;1465typedef const T* const_pointer;14661467inline vector() :1468m_p(nullptr),1469m_size(0),1470m_capacity(0)1471{1472}14731474inline vector(size_t n, const T& init) :1475m_p(nullptr),1476m_size(0),1477m_capacity(0)1478{1479increase_capacity(n, false);1480construct_array(m_p, n, init);1481m_size = n;1482}14831484inline vector(vector&& other) :1485m_p(other.m_p),1486m_size(other.m_size),1487m_capacity(other.m_capacity)1488{1489other.m_p = nullptr;1490other.m_size = 0;1491other.m_capacity = 0;1492}14931494inline vector(const vector& other) :1495m_p(nullptr),1496m_size(0),1497m_capacity(0)1498{1499increase_capacity(other.m_size, false);15001501m_size = other.m_size;15021503if (BASISU_IS_BITWISE_COPYABLE(T))1504{1505if ((m_p) && (other.m_p))1506{1507memcpy((void *)m_p, other.m_p, m_size * sizeof(T));1508}1509}1510else1511{1512T* pDst = m_p;1513const T* pSrc = other.m_p;1514for (size_t i = m_size; i > 0; i--)1515construct(pDst++, *pSrc++);1516}1517}15181519inline explicit vector(size_t size) :1520m_p(nullptr),1521m_size(0),1522m_capacity(0)1523{1524resize(size);1525}15261527inline explicit vector(std::initializer_list<T> init_list) :1528m_p(nullptr),1529m_size(0),1530m_capacity(0)1531{1532resize(init_list.size());15331534size_t idx = 0;1535for (const T& elem : init_list)1536m_p[idx++] = elem;15371538assert(idx == m_size);1539}15401541inline vector(const readable_span<T>& rs) :1542m_p(nullptr),1543m_size(0),1544m_capacity(0)1545{1546set(rs);1547}15481549inline vector(const writable_span<T>& ws) :1550m_p(nullptr),1551m_size(0),1552m_capacity(0)1553{1554set(ws);1555}15561557// Set contents of vector to contents of the readable span1558bool set(const readable_span<T>& rs)1559{1560if (!rs.is_valid())1561{1562assert(0);1563return false;1564}15651566const size_t new_size = rs.size();15671568// Could call resize(), but it'll redundantly construct trivial types.1569if (m_size != new_size)1570{1571if (new_size < m_size)1572{1573if (BASISU_HAS_DESTRUCTOR(T))1574{1575scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);1576}1577}1578else1579{1580if (new_size > m_capacity)1581{1582if (!increase_capacity(new_size, false, true))1583return false;1584}1585}15861587// Don't bother constructing trivial types, because we're going to memcpy() over them anyway.1588if (!BASISU_IS_BITWISE_COPYABLE(T))1589{1590scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);1591}15921593m_size = new_size;1594}15951596if (!rs.copy_from(0, rs.size(), m_p, 0))1597{1598assert(0);1599return false;1600}16011602return true;1603}16041605// Set contents of vector to contents of the writable span1606inline bool set(const writable_span<T>& ws)1607{1608return set(ws.get_readable_span());1609}16101611inline ~vector()1612{1613if (m_p)1614{1615if (BASISU_HAS_DESTRUCTOR(T))1616{1617scalar_type<T>::destruct_array(m_p, m_size);1618}16191620free(m_p);1621}1622}16231624inline vector& operator= (const vector& other)1625{1626if (this == &other)1627return *this;16281629if (m_capacity >= other.m_size)1630resize(0);1631else1632{1633clear();1634increase_capacity(other.m_size, false);1635}16361637if (BASISU_IS_BITWISE_COPYABLE(T))1638{1639if ((m_p) && (other.m_p))1640memcpy((void *)m_p, other.m_p, other.m_size * sizeof(T));1641}1642else1643{1644T* pDst = m_p;1645const T* pSrc = other.m_p;1646for (size_t i = other.m_size; i > 0; i--)1647construct(pDst++, *pSrc++);1648}16491650m_size = other.m_size;16511652return *this;1653}16541655inline vector& operator= (vector&& rhs)1656{1657if (this != &rhs)1658{1659clear();16601661m_p = rhs.m_p;1662m_size = rhs.m_size;1663m_capacity = rhs.m_capacity;16641665rhs.m_p = nullptr;1666rhs.m_size = 0;1667rhs.m_capacity = 0;1668}1669return *this;1670}16711672BASISU_FORCE_INLINE const T* begin() const { return m_p; }1673BASISU_FORCE_INLINE T* begin() { return m_p; }16741675BASISU_FORCE_INLINE const T* end() const { return m_p + m_size; }1676BASISU_FORCE_INLINE T* end() { return m_p + m_size; }16771678BASISU_FORCE_INLINE bool empty() const { return !m_size; }16791680BASISU_FORCE_INLINE size_t size() const { return m_size; }1681BASISU_FORCE_INLINE uint32_t size_u32() const { assert(m_size <= UINT32_MAX); return static_cast<uint32_t>(m_size); }16821683BASISU_FORCE_INLINE size_t size_in_bytes() const { return m_size * sizeof(T); }1684BASISU_FORCE_INLINE uint32_t size_in_bytes_u32() const { assert((m_size * sizeof(T)) <= UINT32_MAX); return static_cast<uint32_t>(m_size * sizeof(T)); }16851686BASISU_FORCE_INLINE size_t capacity() const { return m_capacity; }16871688#if !BASISU_VECTOR_FORCE_CHECKING1689BASISU_FORCE_INLINE const T& operator[] (size_t i) const { assert(i < m_size); return m_p[i]; }1690BASISU_FORCE_INLINE T& operator[] (size_t i) { assert(i < m_size); return m_p[i]; }1691#else1692BASISU_FORCE_INLINE const T& operator[] (size_t i) const1693{1694if (i >= m_size)1695container_abort("vector::operator[] invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));16961697return m_p[i];1698}1699BASISU_FORCE_INLINE T& operator[] (size_t i)1700{1701if (i >= m_size)1702container_abort("vector::operator[] invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17031704return m_p[i];1705}1706#endif17071708// at() always includes range checking, even in final builds, unlike operator [].1709BASISU_FORCE_INLINE const T& at(size_t i) const1710{1711if (i >= m_size)1712container_abort("vector::at() invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17131714return m_p[i];1715}1716BASISU_FORCE_INLINE T& at(size_t i)1717{1718if (i >= m_size)1719container_abort("vector::at() invalid index: %zu, max entries %u, type size %zu\n", i, m_size, sizeof(T));17201721return m_p[i];1722}17231724#if !BASISU_VECTOR_FORCE_CHECKING1725BASISU_FORCE_INLINE const T& front() const { assert(m_size); return m_p[0]; }1726BASISU_FORCE_INLINE T& front() { assert(m_size); return m_p[0]; }17271728BASISU_FORCE_INLINE const T& back() const { assert(m_size); return m_p[m_size - 1]; }1729BASISU_FORCE_INLINE T& back() { assert(m_size); return m_p[m_size - 1]; }1730#else1731BASISU_FORCE_INLINE const T& front() const1732{1733if (!m_size)1734container_abort("front: vector is empty, type size %zu\n", sizeof(T));17351736return m_p[0];1737}1738BASISU_FORCE_INLINE T& front()1739{1740if (!m_size)1741container_abort("front: vector is empty, type size %zu\n", sizeof(T));17421743return m_p[0];1744}17451746BASISU_FORCE_INLINE const T& back() const1747{1748if (!m_size)1749container_abort("back: vector is empty, type size %zu\n", sizeof(T));17501751return m_p[m_size - 1];1752}1753BASISU_FORCE_INLINE T& back()1754{1755if (!m_size)1756container_abort("back: vector is empty, type size %zu\n", sizeof(T));17571758return m_p[m_size - 1];1759}1760#endif17611762BASISU_FORCE_INLINE const T* get_ptr() const { return m_p; }1763BASISU_FORCE_INLINE T* get_ptr() { return m_p; }17641765BASISU_FORCE_INLINE const T* data() const { return m_p; }1766BASISU_FORCE_INLINE T* data() { return m_p; }17671768// clear() sets the container to empty, then frees the allocated block.1769inline void clear()1770{1771if (m_p)1772{1773if (BASISU_HAS_DESTRUCTOR(T))1774{1775scalar_type<T>::destruct_array(m_p, m_size);1776}17771778free(m_p);17791780m_p = nullptr;1781m_size = 0;1782m_capacity = 0;1783}1784}17851786inline void clear_no_destruction()1787{1788if (m_p)1789{1790free(m_p);1791m_p = nullptr;1792m_size = 0;1793m_capacity = 0;1794}1795}17961797inline void reserve(size_t new_capacity)1798{1799if (!try_reserve(new_capacity))1800container_abort("vector:reserve: try_reserve failed!\n");1801}18021803inline bool try_reserve(size_t new_capacity)1804{1805if (new_capacity > m_capacity)1806{1807if (!increase_capacity(new_capacity, false, true))1808return false;1809}1810else if (new_capacity < m_capacity)1811{1812// Must work around the lack of a "decrease_capacity()" method.1813// This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.1814vector tmp;1815if (!tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false, true))1816return false;18171818tmp = *this;1819swap(tmp);1820}18211822return true;1823}18241825// try_resize(0) sets the container to empty, but does not free the allocated block.1826inline bool try_resize(size_t new_size, bool grow_hint = false)1827{1828if (m_size != new_size)1829{1830if (new_size < m_size)1831{1832if (BASISU_HAS_DESTRUCTOR(T))1833{1834scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);1835}1836}1837else1838{1839if (new_size > m_capacity)1840{1841if (!increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint, true))1842return false;1843}18441845scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);1846}18471848m_size = new_size;1849}18501851return true;1852}18531854// resize(0) sets the container to empty, but does not free the allocated block.1855inline void resize(size_t new_size, bool grow_hint = false)1856{1857if (!try_resize(new_size, grow_hint))1858container_abort("vector::resize failed, new size %zu\n", new_size);1859}18601861// If size >= capacity/2, reset() sets the container's size to 0 but doesn't free the allocated block (because the container may be similarly loaded in the future).1862// Otherwise it blows away the allocated block. See http://www.codercorner.com/blog/?p=4941863inline void reset()1864{1865if (m_size >= (m_capacity >> 1))1866resize(0);1867else1868clear();1869}18701871inline T* try_enlarge(size_t i)1872{1873size_t cur_size = m_size;18741875if (add_overflow_check(cur_size, i))1876return nullptr;18771878if (!try_resize(cur_size + i, true))1879return nullptr;18801881return get_ptr() + cur_size;1882}18831884inline T* enlarge(size_t i)1885{1886T* p = try_enlarge(i);1887if (!p)1888container_abort("vector::enlarge failed, amount %zu!\n", i);1889return p;1890}18911892BASISU_FORCE_INLINE void push_back(const T& obj)1893{1894assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));18951896if (m_size >= m_capacity)1897{1898if (add_overflow_check(m_size, 1))1899container_abort("vector::push_back: vector too large\n");19001901increase_capacity(m_size + 1, true);1902}19031904scalar_type<T>::construct(m_p + m_size, obj);1905m_size++;1906}19071908BASISU_FORCE_INLINE void push_back_value(T&& obj)1909{1910assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19111912if (m_size >= m_capacity)1913{1914if (add_overflow_check(m_size, 1))1915container_abort("vector::push_back_value: vector too large\n");19161917increase_capacity(m_size + 1, true);1918}19191920new ((void*)(m_p + m_size)) T(std::move(obj));1921m_size++;1922}19231924inline bool try_push_back(const T& obj)1925{1926assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19271928if (m_size >= m_capacity)1929{1930if (add_overflow_check(m_size, 1))1931return false;19321933if (!increase_capacity(m_size + 1, true, true))1934return false;1935}19361937scalar_type<T>::construct(m_p + m_size, obj);1938m_size++;19391940return true;1941}19421943inline bool try_push_back(T&& obj)1944{1945assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));19461947if (m_size >= m_capacity)1948{1949if (add_overflow_check(m_size, 1))1950return false;19511952if (!increase_capacity(m_size + 1, true, true))1953return false;1954}19551956new ((void*)(m_p + m_size)) T(std::move(obj));1957m_size++;19581959return true;1960}19611962// obj is explictly passed in by value, not ref1963inline void push_back_value(T obj)1964{1965if (m_size >= m_capacity)1966{1967if (add_overflow_check(m_size, 1))1968container_abort("vector::push_back_value: vector too large\n");19691970increase_capacity(m_size + 1, true);1971}19721973scalar_type<T>::construct(m_p + m_size, obj);1974m_size++;1975}19761977// obj is explictly passed in by value, not ref1978inline bool try_push_back_value(T obj)1979{1980if (m_size >= m_capacity)1981{1982if (add_overflow_check(m_size, 1))1983return false;19841985if (!increase_capacity(m_size + 1, true, true))1986return false;1987}19881989scalar_type<T>::construct(m_p + m_size, obj);1990m_size++;19911992return true;1993}19941995template<typename... Args>1996BASISU_FORCE_INLINE void emplace_back(Args&&... args)1997{1998if (m_size >= m_capacity)1999{2000if (add_overflow_check(m_size, 1))2001container_abort("vector::enlarge: vector too large\n");20022003increase_capacity(m_size + 1, true);2004}20052006new ((void*)(m_p + m_size)) T(std::forward<Args>(args)...); // perfect forwarding2007m_size++;2008}20092010template<typename... Args>2011BASISU_FORCE_INLINE bool try_emplace_back(Args&&... args)2012{2013if (m_size >= m_capacity)2014{2015if (add_overflow_check(m_size, 1))2016return false;20172018if (!increase_capacity(m_size + 1, true, true))2019return false;2020}20212022new ((void*)(m_p + m_size)) T(std::forward<Args>(args)...); // perfect forwarding2023m_size++;20242025return true;2026}20272028inline void pop_back()2029{2030assert(m_size);20312032if (m_size)2033{2034m_size--;2035scalar_type<T>::destruct(&m_p[m_size]);2036}2037}20382039inline bool try_insert(size_t index, const T* p, size_t n)2040{2041assert(index <= m_size);20422043if (index > m_size)2044return false;20452046if (!n)2047return true;20482049const size_t orig_size = m_size;20502051if (add_overflow_check(m_size, n))2052return false;20532054if (!try_resize(m_size + n, true))2055return false;20562057const size_t num_to_move = orig_size - index;20582059if (BASISU_IS_BITWISE_COPYABLE(T))2060{2061// This overwrites the destination object bits, but bitwise copyable means we don't need to worry about destruction.2062memmove(m_p + index + n, m_p + index, sizeof(T) * num_to_move);2063}2064else2065{2066const T* pSrc = m_p + orig_size - 1;2067T* pDst = const_cast<T*>(pSrc) + n;20682069for (size_t i = 0; i < num_to_move; i++)2070{2071assert((uint64_t)(pDst - m_p) < (uint64_t)m_size);20722073*pDst = std::move(*pSrc);2074pDst--;2075pSrc--;2076}2077}20782079T* pDst = m_p + index;20802081if (BASISU_IS_BITWISE_COPYABLE(T))2082{2083// This copies in the new bits, overwriting the existing objects, which is OK for copyable types that don't need destruction.2084memcpy(pDst, p, sizeof(T) * n);2085}2086else2087{2088for (size_t i = 0; i < n; i++)2089{2090assert((uint64_t)(pDst - m_p) < (uint64_t)m_size);2091*pDst++ = *p++;2092}2093}20942095return true;2096}20972098inline void insert(size_t index, const T* p, size_t n)2099{2100if (!try_insert(index, p, n))2101container_abort("vector::insert() failed!\n");2102}21032104inline bool try_insert(T* p, const T& obj)2105{2106if (p < begin())2107{2108assert(0);2109return false;2110}21112112uint64_t ofs = p - begin();21132114if (ofs > m_size)2115{2116assert(0);2117return false;2118}21192120if ((size_t)ofs != ofs)2121{2122assert(0);2123return false;2124}21252126return try_insert((size_t)ofs, &obj, 1);2127}21282129inline void insert(T* p, const T& obj)2130{2131if (!try_insert(p, obj))2132container_abort("vector::insert() failed!\n");2133}21342135// push_front() isn't going to be very fast - it's only here for usability.2136inline void push_front(const T& obj)2137{2138insert(0, &obj, 1);2139}21402141inline bool try_push_front(const T& obj)2142{2143return try_insert(0, &obj, 1);2144}21452146vector& append(const vector& other)2147{2148if (other.m_size)2149insert(m_size, &other[0], other.m_size);2150return *this;2151}21522153bool try_append(const vector& other)2154{2155if (other.m_size)2156return try_insert(m_size, &other[0], other.m_size);21572158return true;2159}21602161vector& append(const T* p, size_t n)2162{2163if (n)2164insert(m_size, p, n);2165return *this;2166}21672168bool try_append(const T* p, size_t n)2169{2170if (n)2171return try_insert(m_size, p, n);21722173return true;2174}21752176inline bool erase(size_t start, size_t n)2177{2178if (add_overflow_check(start, n))2179{2180assert(0);2181return false;2182}21832184assert((start + n) <= m_size);21852186if ((start + n) > m_size)2187{2188assert(0);2189return false;2190}21912192if (!n)2193return true;21942195const size_t num_to_move = m_size - (start + n);21962197T* pDst = m_p + start;21982199const T* pSrc = m_p + start + n;22002201if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T))2202{2203// This test is overly cautious.2204if ((!BASISU_IS_BITWISE_COPYABLE(T)) || (BASISU_HAS_DESTRUCTOR(T)))2205{2206// Type has been marked explictly as bitwise movable, which means we can move them around but they may need to be destructed.2207// First destroy the erased objects.2208scalar_type<T>::destruct_array(pDst, n);2209}22102211// Copy "down" the objects to preserve, filling in the empty slots.2212memmove((void *)pDst, pSrc, num_to_move * sizeof(T));2213}2214else2215{2216// Type is not bitwise copyable or movable.2217// Move them down one at a time by using the equals operator, and destroying anything that's left over at the end.2218T* pDst_end = pDst + num_to_move;22192220while (pDst != pDst_end)2221{2222*pDst = std::move(*pSrc);22232224++pDst;2225++pSrc;2226}22272228scalar_type<T>::destruct_array(pDst_end, n);2229}22302231m_size -= n;22322233return true;2234}22352236inline bool erase_index(size_t index)2237{2238return erase(index, 1);2239}22402241inline bool erase(T* p)2242{2243assert((p >= m_p) && (p < (m_p + m_size)));22442245if (p < m_p)2246return false;22472248return erase_index(static_cast<size_t>(p - m_p));2249}22502251inline bool erase(T* pFirst, T* pEnd)2252{2253assert(pFirst <= pEnd);2254assert(pFirst >= begin() && pFirst <= end());2255assert(pEnd >= begin() && pEnd <= end());22562257if ((pFirst < begin()) || (pEnd < pFirst))2258{2259assert(0);2260return false;2261}22622263uint64_t ofs = pFirst - begin();2264if ((size_t)ofs != ofs)2265{2266assert(0);2267return false;2268}22692270uint64_t n = pEnd - pFirst;2271if ((size_t)n != n)2272{2273assert(0);2274return false;2275}22762277return erase((size_t)ofs, (size_t)n);2278}22792280bool erase_unordered(size_t index)2281{2282if (index >= m_size)2283{2284assert(0);2285return false;2286}22872288if ((index + 1) < m_size)2289{2290(*this)[index] = std::move(back());2291}22922293pop_back();2294return true;2295}22962297inline bool operator== (const vector& rhs) const2298{2299if (m_size != rhs.m_size)2300return false;2301else if (m_size)2302{2303if (scalar_type<T>::cFlag)2304return memcmp(m_p, rhs.m_p, sizeof(T) * m_size) == 0;2305else2306{2307const T* pSrc = m_p;2308const T* pDst = rhs.m_p;2309for (size_t i = m_size; i; i--)2310if (!(*pSrc++ == *pDst++))2311return false;2312}2313}23142315return true;2316}23172318inline bool operator< (const vector& rhs) const2319{2320const size_t min_size = helpers::minimum(m_size, rhs.m_size);23212322const T* pSrc = m_p;2323const T* pSrc_end = m_p + min_size;2324const T* pDst = rhs.m_p;23252326while ((pSrc < pSrc_end) && (*pSrc == *pDst))2327{2328pSrc++;2329pDst++;2330}23312332if (pSrc < pSrc_end)2333return *pSrc < *pDst;23342335return m_size < rhs.m_size;2336}23372338inline void swap(vector& other)2339{2340std::swap(m_p, other.m_p);2341std::swap(m_size, other.m_size);2342std::swap(m_capacity, other.m_capacity);2343}23442345inline void sort()2346{2347std::sort(begin(), end());2348}23492350inline void unique()2351{2352if (!empty())2353{2354sort();23552356resize(std::unique(begin(), end()) - begin());2357}2358}23592360inline void reverse()2361{2362const size_t j = m_size >> 1;23632364for (size_t i = 0; i < j; i++)2365std::swap(m_p[i], m_p[m_size - 1 - i]);2366}23672368inline bool find(const T& key, size_t &idx) const2369{2370idx = 0;23712372const T* p = m_p;2373const T* p_end = m_p + m_size;23742375size_t index = 0;23762377while (p != p_end)2378{2379if (key == *p)2380{2381idx = index;2382return true;2383}23842385p++;2386index++;2387}23882389return false;2390}23912392inline bool find_sorted(const T& key, size_t& idx) const2393{2394idx = 0;23952396if (!m_size)2397return false;23982399// Inclusive range2400size_t low = 0, high = m_size - 1;24012402while (low <= high)2403{2404size_t mid = (size_t)(((uint64_t)low + (uint64_t)high) >> 1);24052406const T* pTrial_key = m_p + mid;24072408// Sanity check comparison operator2409assert(!((*pTrial_key < key) && (key < *pTrial_key)));24102411if (*pTrial_key < key)2412{2413if (add_overflow_check(mid, 1))2414break;24152416low = mid + 1;2417}2418else if (key < *pTrial_key)2419{2420if (!mid)2421break;24222423high = mid - 1;2424}2425else2426{2427idx = mid;2428return true;2429}2430}24312432return false;2433}24342435inline size_t count_occurences(const T& key) const2436{2437size_t c = 0;24382439const T* p = m_p;2440const T* p_end = m_p + m_size;24412442while (p != p_end)2443{2444if (key == *p)2445c++;24462447p++;2448}24492450return c;2451}24522453inline void set_all(const T& o)2454{2455if ((sizeof(T) == 1) && (scalar_type<T>::cFlag))2456{2457#if defined(__GNUC__) && !defined(__clang__)2458#pragma GCC diagnostic push2459#pragma GCC diagnostic ignored "-Wclass-memaccess"2460#endif2461memset(m_p, *reinterpret_cast<const uint8_t*>(&o), m_size);2462#if defined(__GNUC__) && !defined(__clang__)2463#pragma GCC diagnostic pop2464#endif2465}2466else2467{2468T* pDst = m_p;2469T* pDst_end = pDst + m_size;2470while (pDst != pDst_end)2471*pDst++ = o;2472}2473}24742475// Caller assumes ownership of the heap block associated with the container. Container is cleared.2476// Caller must use free() on the returned pointer.2477inline void* assume_ownership()2478{2479T* p = m_p;2480m_p = nullptr;2481m_size = 0;2482m_capacity = 0;2483return p;2484}24852486// Caller is granting ownership of the indicated heap block.2487// Block must have size constructed elements, and have enough room for capacity elements.2488// The block must have been allocated using malloc().2489// Important: This method is used in Basis Universal. If you change how this container allocates memory, you'll need to change any users of this method.2490inline bool grant_ownership(T* p, size_t size, size_t capacity)2491{2492// To prevent the caller from obviously shooting themselves in the foot.2493if (((p + capacity) > m_p) && (p < (m_p + m_capacity)))2494{2495// Can grant ownership of a block inside the container itself!2496assert(0);2497return false;2498}24992500if (size > capacity)2501{2502assert(0);2503return false;2504}25052506if (!p)2507{2508if (capacity)2509{2510assert(0);2511return false;2512}2513}2514else if (!capacity)2515{2516assert(0);2517return false;2518}25192520clear();2521m_p = p;2522m_size = size;2523m_capacity = capacity;2524return true;2525}25262527readable_span<T> get_readable_span() const2528{2529return readable_span<T>(m_p, m_size);2530}25312532writable_span<T> get_writable_span()2533{2534return writable_span<T>(m_p, m_size);2535}25362537private:2538T* m_p;2539size_t m_size; // the number of constructed objects2540size_t m_capacity; // the size of the allocation25412542template<typename Q> struct is_vector { enum { cFlag = false }; };2543template<typename Q> struct is_vector< vector<Q> > { enum { cFlag = true }; };25442545static void object_mover(void* pDst_void, void* pSrc_void, size_t num)2546{2547T* pSrc = static_cast<T*>(pSrc_void);2548T* const pSrc_end = pSrc + num;2549T* pDst = static_cast<T*>(pDst_void);25502551while (pSrc != pSrc_end)2552{2553new ((void*)(pDst)) T(std::move(*pSrc));2554scalar_type<T>::destruct(pSrc);25552556++pSrc;2557++pDst;2558}2559}25602561inline bool increase_capacity(size_t min_new_capacity, bool grow_hint, bool nofail = false)2562{2563return reinterpret_cast<elemental_vector*>(this)->increase_capacity(2564min_new_capacity, grow_hint, sizeof(T),2565(BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) || (is_vector<T>::cFlag)) ? nullptr : object_mover, nofail);2566}2567};25682569template<typename T> struct bitwise_movable< vector<T> > { enum { cFlag = true }; };25702571// Hash map2572// rg TODO 9/8/2024: I've upgraded this class to support 64-bit size_t, and it needs a lot more testing.25732574const uint32_t SIZE_T_BITS = sizeof(size_t) * 8U;25752576inline uint32_t safe_shift_left(uint32_t v, uint32_t l)2577{2578return (l < 32U) ? (v << l) : 0;2579}25802581inline uint64_t safe_shift_left(uint64_t v, uint32_t l)2582{2583return (l < 64U) ? (v << l) : 0;2584}25852586template <typename T>2587struct hasher2588{2589inline size_t operator() (const T& key) const { return static_cast<size_t>(key); }2590};25912592template <typename T>2593struct equal_to2594{2595inline bool operator()(const T& a, const T& b) const { return a == b; }2596};25972598// Important: The Hasher and Equals objects must be bitwise movable!2599template<typename Key, typename Value = empty_type, typename Hasher = hasher<Key>, typename Equals = equal_to<Key> >2600class hash_map2601{2602public:2603class iterator;2604class const_iterator;26052606private:2607friend class iterator;2608friend class const_iterator;26092610enum state2611{2612cStateInvalid = 0,2613cStateValid = 12614};26152616enum2617{2618cMinHashSize = 4U2619};26202621public:2622typedef hash_map<Key, Value, Hasher, Equals> hash_map_type;2623typedef std::pair<Key, Value> value_type;2624typedef Key key_type;2625typedef Value referent_type;2626typedef Hasher hasher_type;2627typedef Equals equals_type;26282629hash_map() :2630m_num_valid(0),2631m_grow_threshold(0),2632m_hash_shift(SIZE_T_BITS)2633{2634static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");2635}26362637hash_map(const hash_map& other) :2638m_values(other.m_values),2639m_num_valid(other.m_num_valid),2640m_grow_threshold(other.m_grow_threshold),2641m_hash_shift(other.m_hash_shift),2642m_hasher(other.m_hasher),2643m_equals(other.m_equals)2644{2645static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");2646}26472648hash_map(hash_map&& other) :2649m_values(std::move(other.m_values)),2650m_num_valid(other.m_num_valid),2651m_grow_threshold(other.m_grow_threshold),2652m_hash_shift(other.m_hash_shift),2653m_hasher(std::move(other.m_hasher)),2654m_equals(std::move(other.m_equals))2655{2656static_assert((SIZE_T_BITS == 32) || (SIZE_T_BITS == 64), "SIZE_T_BITS must be 32 or 64");26572658other.m_hash_shift = SIZE_T_BITS;2659other.m_num_valid = 0;2660other.m_grow_threshold = 0;2661}26622663hash_map& operator= (const hash_map& other)2664{2665if (this == &other)2666return *this;26672668clear();26692670m_values = other.m_values;2671m_hash_shift = other.m_hash_shift;2672m_num_valid = other.m_num_valid;2673m_grow_threshold = other.m_grow_threshold;2674m_hasher = other.m_hasher;2675m_equals = other.m_equals;26762677return *this;2678}26792680hash_map& operator= (hash_map&& other)2681{2682if (this == &other)2683return *this;26842685clear();26862687m_values = std::move(other.m_values);2688m_hash_shift = other.m_hash_shift;2689m_num_valid = other.m_num_valid;2690m_grow_threshold = other.m_grow_threshold;2691m_hasher = std::move(other.m_hasher);2692m_equals = std::move(other.m_equals);26932694other.m_hash_shift = SIZE_T_BITS;2695other.m_num_valid = 0;2696other.m_grow_threshold = 0;26972698return *this;2699}27002701inline ~hash_map()2702{2703clear();2704}27052706inline const Equals& get_equals() const { return m_equals; }2707inline Equals& get_equals() { return m_equals; }2708inline void set_equals(const Equals& equals) { m_equals = equals; }27092710inline const Hasher& get_hasher() const { return m_hasher; }2711inline Hasher& get_hasher() { return m_hasher; }2712inline void set_hasher(const Hasher& hasher) { m_hasher = hasher; }27132714inline void clear()2715{2716if (m_values.empty())2717return;27182719if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))2720{2721node* p = &get_node(0);2722node* p_end = p + m_values.size();27232724size_t num_remaining = m_num_valid;2725while (p != p_end)2726{2727if (p->state)2728{2729destruct_value_type(p);2730num_remaining--;2731if (!num_remaining)2732break;2733}27342735p++;2736}2737}27382739m_values.clear_no_destruction();27402741m_hash_shift = SIZE_T_BITS;2742m_num_valid = 0;2743m_grow_threshold = 0;2744}27452746inline void reset()2747{2748if (!m_num_valid)2749return;27502751if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))2752{2753node* p = &get_node(0);2754node* p_end = p + m_values.size();27552756size_t num_remaining = m_num_valid;2757while (p != p_end)2758{2759if (p->state)2760{2761destruct_value_type(p);2762p->state = cStateInvalid;27632764num_remaining--;2765if (!num_remaining)2766break;2767}27682769p++;2770}2771}2772else if (sizeof(node) <= 16)2773{2774memset(&m_values[0], 0, m_values.size_in_bytes());2775}2776else2777{2778node* p = &get_node(0);2779node* p_end = p + m_values.size();27802781size_t num_remaining = m_num_valid;2782while (p != p_end)2783{2784if (p->state)2785{2786p->state = cStateInvalid;27872788num_remaining--;2789if (!num_remaining)2790break;2791}27922793p++;2794}2795}27962797m_num_valid = 0;2798}27992800inline size_t size()2801{2802return m_num_valid;2803}28042805inline size_t get_table_size()2806{2807return m_values.size();2808}28092810inline bool empty()2811{2812return !m_num_valid;2813}28142815inline bool reserve(size_t new_capacity)2816{2817if (!new_capacity)2818return true;28192820uint64_t new_hash_size = new_capacity;28212822new_hash_size = new_hash_size * 2ULL;28232824if (!helpers::is_power_of_2(new_hash_size))2825new_hash_size = helpers::next_pow2(new_hash_size);28262827new_hash_size = helpers::maximum<uint64_t>(cMinHashSize, new_hash_size);28282829if (!can_fit_into_size_t(new_hash_size))2830{2831assert(0);2832return false;2833}28342835assert(new_hash_size >= new_capacity);28362837if (new_hash_size <= m_values.size())2838return true;28392840return rehash((size_t)new_hash_size);2841}28422843class iterator2844{2845friend class hash_map<Key, Value, Hasher, Equals>;2846friend class hash_map<Key, Value, Hasher, Equals>::const_iterator;28472848public:2849inline iterator() : m_pTable(nullptr), m_index(0) { }2850inline iterator(hash_map_type& table, size_t index) : m_pTable(&table), m_index(index) { }2851inline iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }28522853inline iterator& operator= (const iterator& other)2854{2855m_pTable = other.m_pTable;2856m_index = other.m_index;2857return *this;2858}28592860// post-increment2861inline iterator operator++(int)2862{2863iterator result(*this);2864++*this;2865return result;2866}28672868// pre-increment2869inline iterator& operator++()2870{2871probe();2872return *this;2873}28742875inline value_type& operator*() const { return *get_cur(); }2876inline value_type* operator->() const { return get_cur(); }28772878inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2879inline bool operator != (const iterator& b) const { return !(*this == b); }2880inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2881inline bool operator != (const const_iterator& b) const { return !(*this == b); }28822883private:2884hash_map_type* m_pTable;2885size_t m_index;28862887inline value_type* get_cur() const2888{2889assert(m_pTable && (m_index < m_pTable->m_values.size()));2890assert(m_pTable->get_node_state(m_index) == cStateValid);28912892return &m_pTable->get_node(m_index);2893}28942895inline void probe()2896{2897assert(m_pTable);2898m_index = m_pTable->find_next(m_index);2899}2900};29012902class const_iterator2903{2904friend class hash_map<Key, Value, Hasher, Equals>;2905friend class hash_map<Key, Value, Hasher, Equals>::iterator;29062907public:2908inline const_iterator() : m_pTable(nullptr), m_index(0) { }2909inline const_iterator(const hash_map_type& table, size_t index) : m_pTable(&table), m_index(index) { }2910inline const_iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }2911inline const_iterator(const const_iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }29122913inline const_iterator& operator= (const const_iterator& other)2914{2915m_pTable = other.m_pTable;2916m_index = other.m_index;2917return *this;2918}29192920inline const_iterator& operator= (const iterator& other)2921{2922m_pTable = other.m_pTable;2923m_index = other.m_index;2924return *this;2925}29262927// post-increment2928inline const_iterator operator++(int)2929{2930const_iterator result(*this);2931++*this;2932return result;2933}29342935// pre-increment2936inline const_iterator& operator++()2937{2938probe();2939return *this;2940}29412942inline const value_type& operator*() const { return *get_cur(); }2943inline const value_type* operator->() const { return get_cur(); }29442945inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2946inline bool operator != (const const_iterator& b) const { return !(*this == b); }2947inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }2948inline bool operator != (const iterator& b) const { return !(*this == b); }29492950private:2951const hash_map_type* m_pTable;2952size_t m_index;29532954inline const value_type* get_cur() const2955{2956assert(m_pTable && (m_index < m_pTable->m_values.size()));2957assert(m_pTable->get_node_state(m_index) == cStateValid);29582959return &m_pTable->get_node(m_index);2960}29612962inline void probe()2963{2964assert(m_pTable);2965m_index = m_pTable->find_next(m_index);2966}2967};29682969inline const_iterator begin() const2970{2971if (!m_num_valid)2972return end();29732974return const_iterator(*this, find_next(std::numeric_limits<size_t>::max()));2975}29762977inline const_iterator end() const2978{2979return const_iterator(*this, m_values.size());2980}29812982inline iterator begin()2983{2984if (!m_num_valid)2985return end();29862987return iterator(*this, find_next(std::numeric_limits<size_t>::max()));2988}29892990inline iterator end()2991{2992return iterator(*this, m_values.size());2993}29942995// insert_result.first will always point to inserted key/value (or the already existing key/value).2996// insert_result.second will be true if a new key/value was inserted, or false if the key already existed (in which case first will point to the already existing value).2997typedef std::pair<iterator, bool> insert_result;29982999inline insert_result insert(const Key& k, const Value& v = Value())3000{3001insert_result result;3002if (!insert_no_grow(result, k, v))3003{3004if (!try_grow())3005container_abort("hash_map::try_grow() failed");30063007// This must succeed.3008if (!insert_no_grow(result, k, v))3009container_abort("hash_map::insert() failed");3010}30113012return result;3013}30143015inline bool try_insert(insert_result& result, const Key& k, const Value& v = Value())3016{3017if (!insert_no_grow(result, k, v))3018{3019if (!try_grow())3020return false;30213022if (!insert_no_grow(result, k, v))3023return false;3024}30253026return true;3027}30283029inline insert_result insert(Key&& k, Value&& v = Value())3030{3031insert_result result;3032if (!insert_no_grow_move(result, std::move(k), std::move(v)))3033{3034if (!try_grow())3035container_abort("hash_map::try_grow() failed");30363037// This must succeed.3038if (!insert_no_grow_move(result, std::move(k), std::move(v)))3039container_abort("hash_map::insert() failed");3040}30413042return result;3043}30443045inline bool try_insert(insert_result& result, Key&& k, Value&& v = Value())3046{3047if (!insert_no_grow_move(result, std::move(k), std::move(v)))3048{3049if (!try_grow())3050return false;30513052if (!insert_no_grow_move(result, std::move(k), std::move(v)))3053return false;3054}30553056return true;3057}30583059inline insert_result insert(const value_type& v)3060{3061return insert(v.first, v.second);3062}30633064inline bool try_insert(insert_result& result, const value_type& v)3065{3066return try_insert(result, v.first, v.second);3067}30683069inline insert_result insert(value_type&& v)3070{3071return insert(std::move(v.first), std::move(v.second));3072}30733074inline bool try_insert(insert_result& result, value_type&& v)3075{3076return try_insert(result, std::move(v.first), std::move(v.second));3077}30783079inline const_iterator find(const Key& k) const3080{3081return const_iterator(*this, find_index(k));3082}30833084inline iterator find(const Key& k)3085{3086return iterator(*this, find_index(k));3087}30883089inline bool contains(const Key& k) const3090{3091const size_t idx = find_index(k);3092return idx != m_values.size();3093}30943095inline bool erase(const Key& k)3096{3097size_t i = find_index(k);30983099if (i >= m_values.size())3100return false;31013102node* pDst = &get_node(i);3103destruct_value_type(pDst);3104pDst->state = cStateInvalid;31053106m_num_valid--;31073108for (; ; )3109{3110size_t r, j = i;31113112node* pSrc = pDst;31133114do3115{3116if (!i)3117{3118i = m_values.size() - 1;3119pSrc = &get_node(i);3120}3121else3122{3123i--;3124pSrc--;3125}31263127if (!pSrc->state)3128return true;31293130r = hash_key(pSrc->first);31313132} while ((i <= r && r < j) || (r < j && j < i) || (j < i && i <= r));31333134move_node(pDst, pSrc);31353136pDst = pSrc;3137}3138}31393140inline void swap(hash_map_type& other)3141{3142m_values.swap(other.m_values);3143std::swap(m_hash_shift, other.m_hash_shift);3144std::swap(m_num_valid, other.m_num_valid);3145std::swap(m_grow_threshold, other.m_grow_threshold);3146std::swap(m_hasher, other.m_hasher);3147std::swap(m_equals, other.m_equals);3148}31493150private:3151struct node : public value_type3152{3153uint8_t state;3154};31553156static inline void construct_value_type(value_type* pDst, const Key& k, const Value& v)3157{3158if (BASISU_IS_BITWISE_COPYABLE(Key))3159memcpy(&pDst->first, &k, sizeof(Key));3160else3161scalar_type<Key>::construct(&pDst->first, k);31623163if (BASISU_IS_BITWISE_COPYABLE(Value))3164memcpy(&pDst->second, &v, sizeof(Value));3165else3166scalar_type<Value>::construct(&pDst->second, v);3167}31683169static inline void construct_value_type(value_type* pDst, const value_type* pSrc)3170{3171if ((BASISU_IS_BITWISE_COPYABLE(Key)) && (BASISU_IS_BITWISE_COPYABLE(Value)))3172{3173memcpy(pDst, pSrc, sizeof(value_type));3174}3175else3176{3177if (BASISU_IS_BITWISE_COPYABLE(Key))3178memcpy(&pDst->first, &pSrc->first, sizeof(Key));3179else3180scalar_type<Key>::construct(&pDst->first, pSrc->first);31813182if (BASISU_IS_BITWISE_COPYABLE(Value))3183memcpy(&pDst->second, &pSrc->second, sizeof(Value));3184else3185scalar_type<Value>::construct(&pDst->second, pSrc->second);3186}3187}31883189static inline void destruct_value_type(value_type* p)3190{3191scalar_type<Key>::destruct(&p->first);3192scalar_type<Value>::destruct(&p->second);3193}31943195// Moves nodes *pSrc to *pDst efficiently from one hashmap to another.3196// pDst should NOT be constructed on entry.3197static inline void move_node(node* pDst, node* pSrc, bool update_src_state = true)3198{3199assert(!pDst->state);32003201if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key) && BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))3202{3203memcpy(pDst, pSrc, sizeof(node));32043205assert(pDst->state == cStateValid);3206}3207else3208{3209if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key))3210memcpy(&pDst->first, &pSrc->first, sizeof(Key));3211else3212{3213new ((void*)&pDst->first) Key(std::move(pSrc->first));3214scalar_type<Key>::destruct(&pSrc->first);3215}32163217if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))3218memcpy(&pDst->second, &pSrc->second, sizeof(Value));3219else3220{3221new ((void*)&pDst->second) Value(std::move(pSrc->second));3222scalar_type<Value>::destruct(&pSrc->second);3223}32243225pDst->state = cStateValid;3226}32273228if (update_src_state)3229pSrc->state = cStateInvalid;3230}32313232struct raw_node3233{3234inline raw_node()3235{3236node* p = reinterpret_cast<node*>(this);3237p->state = cStateInvalid;3238}32393240// In practice, this should never be called (right?). We manage destruction ourselves.3241inline ~raw_node()3242{3243node* p = reinterpret_cast<node*>(this);3244if (p->state)3245hash_map_type::destruct_value_type(p);3246}32473248inline raw_node(const raw_node& other)3249{3250node* pDst = reinterpret_cast<node*>(this);3251const node* pSrc = reinterpret_cast<const node*>(&other);32523253if (pSrc->state)3254{3255hash_map_type::construct_value_type(pDst, pSrc);3256pDst->state = cStateValid;3257}3258else3259pDst->state = cStateInvalid;3260}32613262inline raw_node& operator= (const raw_node& rhs)3263{3264if (this == &rhs)3265return *this;32663267node* pDst = reinterpret_cast<node*>(this);3268const node* pSrc = reinterpret_cast<const node*>(&rhs);32693270if (pSrc->state)3271{3272if (pDst->state)3273{3274pDst->first = pSrc->first;3275pDst->second = pSrc->second;3276}3277else3278{3279hash_map_type::construct_value_type(pDst, pSrc);3280pDst->state = cStateValid;3281}3282}3283else if (pDst->state)3284{3285hash_map_type::destruct_value_type(pDst);3286pDst->state = cStateInvalid;3287}32883289return *this;3290}32913292uint8_t m_bits[sizeof(node)];3293};32943295typedef basisu::vector<raw_node> node_vector;32963297node_vector m_values;32983299size_t m_num_valid;3300size_t m_grow_threshold;33013302uint32_t m_hash_shift;33033304Hasher m_hasher;3305Equals m_equals;33063307inline size_t hash_key(const Key& k) const3308{3309assert((safe_shift_left(static_cast<uint64_t>(1), (SIZE_T_BITS - m_hash_shift))) == m_values.size());33103311// Fibonacci hashing3312if (SIZE_T_BITS == 32)3313{3314assert(m_hash_shift != 32);33153316uint32_t hash = static_cast<uint32_t>(m_hasher(k));3317hash = (2654435769U * hash) >> m_hash_shift;33183319assert(hash < m_values.size());3320return (size_t)hash;3321}3322else3323{3324assert(m_hash_shift != 64);33253326uint64_t hash = static_cast<uint64_t>(m_hasher(k));3327hash = (0x9E3779B97F4A7C15ULL * hash) >> m_hash_shift;33283329assert(hash < m_values.size());3330return (size_t)hash;3331}3332}33333334inline const node& get_node(size_t index) const3335{3336return *reinterpret_cast<const node*>(&m_values[index]);3337}33383339inline node& get_node(size_t index)3340{3341return *reinterpret_cast<node*>(&m_values[index]);3342}33433344inline state get_node_state(size_t index) const3345{3346return static_cast<state>(get_node(index).state);3347}33483349inline void set_node_state(size_t index, bool valid)3350{3351get_node(index).state = valid;3352}33533354inline bool try_grow()3355{3356uint64_t n = m_values.size() * 2ULL;33573358if (!helpers::is_power_of_2(n))3359n = helpers::next_pow2(n);33603361if (!can_fit_into_size_t(n))3362{3363assert(0);3364return false;3365}33663367return rehash(helpers::maximum<size_t>(cMinHashSize, (size_t)n));3368}33693370// new_hash_size must be a power of 2.3371inline bool rehash(size_t new_hash_size)3372{3373if (!helpers::is_power_of_2((uint64_t)new_hash_size))3374{3375assert(0);3376return false;3377}33783379if (new_hash_size < m_num_valid)3380{3381assert(0);3382return false;3383}33843385if (new_hash_size == m_values.size())3386return true;33873388hash_map new_map;3389if (!new_map.m_values.try_resize(new_hash_size))3390return false;33913392new_map.m_hash_shift = SIZE_T_BITS - helpers::floor_log2i((uint64_t)new_hash_size);3393assert(new_hash_size == safe_shift_left(static_cast<uint64_t>(1), SIZE_T_BITS - new_map.m_hash_shift));33943395new_map.m_grow_threshold = std::numeric_limits<size_t>::max();33963397node* pNode = reinterpret_cast<node*>(m_values.begin());3398node* pNode_end = pNode + m_values.size();33993400while (pNode != pNode_end)3401{3402if (pNode->state)3403{3404new_map.move_into(pNode);34053406if (new_map.m_num_valid == m_num_valid)3407break;3408}34093410pNode++;3411}34123413new_map.m_grow_threshold = new_hash_size >> 1U;3414if (new_hash_size & 1)3415new_map.m_grow_threshold++;34163417m_values.clear_no_destruction();3418m_hash_shift = SIZE_T_BITS;34193420swap(new_map);34213422return true;3423}34243425inline size_t find_next(size_t index) const3426{3427index++;34283429if (index >= m_values.size())3430return index;34313432const node* pNode = &get_node(index);34333434for (; ; )3435{3436if (pNode->state)3437break;34383439if (++index >= m_values.size())3440break;34413442pNode++;3443}34443445return index;3446}34473448inline size_t find_index(const Key& k) const3449{3450if (m_num_valid)3451{3452size_t index = hash_key(k);3453const node* pNode = &get_node(index);34543455if (pNode->state)3456{3457if (m_equals(pNode->first, k))3458return index;34593460const size_t orig_index = index;34613462for (; ; )3463{3464if (!index)3465{3466index = m_values.size() - 1;3467pNode = &get_node(index);3468}3469else3470{3471index--;3472pNode--;3473}34743475if (index == orig_index)3476break;34773478if (!pNode->state)3479break;34803481if (m_equals(pNode->first, k))3482return index;3483}3484}3485}34863487return m_values.size();3488}34893490inline bool insert_no_grow(insert_result& result, const Key& k, const Value& v)3491{3492if (!m_values.size())3493return false;34943495size_t index = hash_key(k);3496node* pNode = &get_node(index);34973498if (pNode->state)3499{3500if (m_equals(pNode->first, k))3501{3502result.first = iterator(*this, index);3503result.second = false;3504return true;3505}35063507const size_t orig_index = index;35083509for (; ; )3510{3511if (!index)3512{3513index = m_values.size() - 1;3514pNode = &get_node(index);3515}3516else3517{3518index--;3519pNode--;3520}35213522if (orig_index == index)3523return false;35243525if (!pNode->state)3526break;35273528if (m_equals(pNode->first, k))3529{3530result.first = iterator(*this, index);3531result.second = false;3532return true;3533}3534}3535}35363537if (m_num_valid >= m_grow_threshold)3538return false;35393540construct_value_type(pNode, k, v);35413542pNode->state = cStateValid;35433544m_num_valid++;3545assert(m_num_valid <= m_values.size());35463547result.first = iterator(*this, index);3548result.second = true;35493550return true;3551}35523553// Move user supplied key/value into a node.3554static inline void move_value_type(value_type* pDst, Key&& k, Value&& v)3555{3556// Not checking for is MOVABLE because the caller could later destruct k and/or v (what state do we set them to?)3557if (BASISU_IS_BITWISE_COPYABLE(Key))3558{3559memcpy(&pDst->first, &k, sizeof(Key));3560}3561else3562{3563new ((void*)&pDst->first) Key(std::move(k));3564// No destruction - user will do that (we don't own k).3565}35663567if (BASISU_IS_BITWISE_COPYABLE(Value))3568{3569memcpy(&pDst->second, &v, sizeof(Value));3570}3571else3572{3573new ((void*)&pDst->second) Value(std::move(v));3574// No destruction - user will do that (we don't own v).3575}3576}35773578// Insert user provided k/v, by moving, into the current hash table3579inline bool insert_no_grow_move(insert_result& result, Key&& k, Value&& v)3580{3581if (!m_values.size())3582return false;35833584size_t index = hash_key(k);3585node* pNode = &get_node(index);35863587if (pNode->state)3588{3589if (m_equals(pNode->first, k))3590{3591result.first = iterator(*this, index);3592result.second = false;3593return true;3594}35953596const size_t orig_index = index;35973598for (; ; )3599{3600if (!index)3601{3602index = m_values.size() - 1;3603pNode = &get_node(index);3604}3605else3606{3607index--;3608pNode--;3609}36103611if (orig_index == index)3612return false;36133614if (!pNode->state)3615break;36163617if (m_equals(pNode->first, k))3618{3619result.first = iterator(*this, index);3620result.second = false;3621return true;3622}3623}3624}36253626if (m_num_valid >= m_grow_threshold)3627return false;36283629move_value_type(pNode, std::move(k), std::move(v));36303631pNode->state = cStateValid;36323633m_num_valid++;3634assert(m_num_valid <= m_values.size());36353636result.first = iterator(*this, index);3637result.second = true;36383639return true;3640}36413642// Insert pNode by moving into the current hash table3643inline void move_into(node* pNode)3644{3645size_t index = hash_key(pNode->first);3646node* pDst_node = &get_node(index);36473648if (pDst_node->state)3649{3650const size_t orig_index = index;36513652for (; ; )3653{3654if (!index)3655{3656index = m_values.size() - 1;3657pDst_node = &get_node(index);3658}3659else3660{3661index--;3662pDst_node--;3663}36643665if (index == orig_index)3666{3667assert(false);3668return;3669}36703671if (!pDst_node->state)3672break;3673}3674}36753676// No need to update the source node's state (it's going away)3677move_node(pDst_node, pNode, false);36783679m_num_valid++;3680}3681};36823683template<typename Key, typename Value, typename Hasher, typename Equals>3684struct bitwise_movable< hash_map<Key, Value, Hasher, Equals> > { enum { cFlag = true }; };36853686#if BASISU_HASHMAP_TEST3687extern void hash_map_test();3688#endif36893690// String formatting3691inline std::string string_format(const char* pFmt, ...)3692{3693char buf[2048];36943695va_list args;3696va_start(args, pFmt);3697#ifdef _WIN323698vsprintf_s(buf, sizeof(buf), pFmt, args);3699#else3700vsnprintf(buf, sizeof(buf), pFmt, args);3701#endif3702va_end(args);37033704return std::string(buf);3705}37063707enum class variant_type3708{3709cInvalid,3710cI32, cU32,3711cI64, cU64,3712cFlt, cDbl, cBool,3713cStrPtr, cStdStr3714};37153716struct fmt_variant3717{3718union3719{3720int32_t m_i32;3721uint32_t m_u32;3722int64_t m_i64;3723uint64_t m_u64;3724float m_flt;3725double m_dbl;3726bool m_bool;3727const char* m_pStr;3728};37293730std::string m_str;37313732variant_type m_type;37333734inline fmt_variant() :3735m_u64(0),3736m_type(variant_type::cInvalid)3737{3738}37393740inline fmt_variant(const fmt_variant& other) :3741m_u64(other.m_u64),3742m_str(other.m_str),3743m_type(other.m_type)3744{3745}37463747inline fmt_variant(fmt_variant&& other) :3748m_u64(other.m_u64),3749m_str(std::move(other.m_str)),3750m_type(other.m_type)3751{3752other.m_type = variant_type::cInvalid;3753other.m_u64 = 0;3754}37553756inline fmt_variant& operator= (fmt_variant&& other)3757{3758if (this == &other)3759return *this;37603761m_type = other.m_type;3762m_u64 = other.m_u64;3763m_str = std::move(other.m_str);37643765other.m_type = variant_type::cInvalid;3766other.m_u64 = 0;37673768return *this;3769}37703771inline fmt_variant& operator= (const fmt_variant& rhs)3772{3773if (this == &rhs)3774return *this;37753776m_u64 = rhs.m_u64;3777m_type = rhs.m_type;3778m_str = rhs.m_str;37793780return *this;3781}37823783inline fmt_variant(int32_t v) : m_i32(v), m_type(variant_type::cI32) { }3784inline fmt_variant(uint32_t v) : m_u32(v), m_type(variant_type::cU32) { }3785inline fmt_variant(int64_t v) : m_i64(v), m_type(variant_type::cI64) { }3786inline fmt_variant(uint64_t v) : m_u64(v), m_type(variant_type::cU64) { }3787#ifdef _MSC_VER3788inline fmt_variant(unsigned long v) : m_u64(v), m_type(variant_type::cU64) {}3789inline fmt_variant(long v) : m_i64(v), m_type(variant_type::cI64) {}3790#endif3791inline fmt_variant(float v) : m_flt(v), m_type(variant_type::cFlt) { }3792inline fmt_variant(double v) : m_dbl(v), m_type(variant_type::cDbl) { }3793inline fmt_variant(const char* pStr) : m_pStr(pStr), m_type(variant_type::cStrPtr) { }3794inline fmt_variant(const std::string& str) : m_u64(0), m_str(str), m_type(variant_type::cStdStr) { }3795inline fmt_variant(bool val) : m_bool(val), m_type(variant_type::cBool) { }37963797bool to_string(std::string& res, std::string& fmt) const;3798};37993800typedef basisu::vector<fmt_variant> fmt_variant_vec;38013802bool fmt_variants(std::string& res, const char* pFmt, const fmt_variant_vec& variants);38033804template <typename... Args>3805inline bool fmt_string(std::string& res, const char* pFmt, Args&&... args)3806{3807return fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... });3808}38093810template <typename... Args>3811inline std::string fmt_string(const char* pFmt, Args&&... args)3812{3813std::string res;3814fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... });3815return res;3816}38173818template <typename... Args>3819inline int fmt_printf(const char* pFmt, Args&&... args)3820{3821std::string res;3822if (!fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... }))3823return EOF;38243825return fputs(res.c_str(), stdout);3826}38273828template <typename... Args>3829inline int fmt_fprintf(FILE* pFile, const char* pFmt, Args&&... args)3830{3831std::string res;3832if (!fmt_variants(res, pFmt, fmt_variant_vec{ fmt_variant(std::forward<Args>(args))... }))3833return EOF;38343835return fputs(res.c_str(), pFile);3836}38373838// fixed_array - zero initialized by default, operator[] is always bounds checked.3839template <std::size_t N, typename T>3840class fixed_array3841{3842static_assert(N >= 1, "fixed_array size must be at least 1");38433844public:3845using value_type = T;3846using size_type = std::size_t;3847using difference_type = std::ptrdiff_t;3848using reference = T&;3849using const_reference = const T&;3850using pointer = T*;3851using const_pointer = const T*;3852using iterator = T*;3853using const_iterator = const T*;38543855T m_data[N];38563857BASISU_FORCE_INLINE fixed_array()3858{3859initialize_array();3860}38613862BASISU_FORCE_INLINE fixed_array(std::initializer_list<T> list)3863{3864assert(list.size() <= N);38653866std::size_t copy_size = std::min(list.size(), N);3867std::copy_n(list.begin(), copy_size, m_data); // Copy up to min(list.size(), N)38683869if (list.size() < N)3870{3871// Initialize the rest of the array3872std::fill(m_data + copy_size, m_data + N, T{});3873}3874}38753876BASISU_FORCE_INLINE T& operator[](std::size_t index)3877{3878if (index >= N)3879container_abort("fixed_array: Index out of bounds.");3880return m_data[index];3881}38823883BASISU_FORCE_INLINE const T& operator[](std::size_t index) const3884{3885if (index >= N)3886container_abort("fixed_array: Index out of bounds.");3887return m_data[index];3888}38893890BASISU_FORCE_INLINE T* begin() { return m_data; }3891BASISU_FORCE_INLINE const T* begin() const { return m_data; }38923893BASISU_FORCE_INLINE T* end() { return m_data + N; }3894BASISU_FORCE_INLINE const T* end() const { return m_data + N; }38953896BASISU_FORCE_INLINE const T* data() const { return m_data; }3897BASISU_FORCE_INLINE T* data() { return m_data; }38983899BASISU_FORCE_INLINE const T& front() const { return m_data[0]; }3900BASISU_FORCE_INLINE T& front() { return m_data[0]; }39013902BASISU_FORCE_INLINE const T& back() const { return m_data[N - 1]; }3903BASISU_FORCE_INLINE T& back() { return m_data[N - 1]; }39043905BASISU_FORCE_INLINE constexpr std::size_t size() const { return N; }39063907BASISU_FORCE_INLINE void clear()3908{3909initialize_array(); // Reinitialize the array3910}39113912BASISU_FORCE_INLINE void set_all(const T& value)3913{3914std::fill(m_data, m_data + N, value);3915}39163917BASISU_FORCE_INLINE readable_span<T> get_readable_span() const3918{3919return readable_span<T>(m_data, N);3920}39213922BASISU_FORCE_INLINE writable_span<T> get_writable_span()3923{3924return writable_span<T>(m_data, N);3925}39263927private:3928BASISU_FORCE_INLINE void initialize_array()3929{3930if constexpr (std::is_integral<T>::value || std::is_floating_point<T>::value)3931memset(m_data, 0, sizeof(m_data));3932else3933std::fill(m_data, m_data + N, T{});3934}39353936BASISU_FORCE_INLINE T& access_element(std::size_t index)3937{3938if (index >= N)3939container_abort("fixed_array: Index out of bounds.");3940return m_data[index];3941}39423943BASISU_FORCE_INLINE const T& access_element(std::size_t index) const3944{3945if (index >= N)3946container_abort("fixed_array: Index out of bounds.");3947return m_data[index];3948}3949};39503951// 2D array39523953template<typename T>3954class vector2D3955{3956typedef basisu::vector<T> vec_type;39573958uint32_t m_width, m_height;3959vec_type m_values;39603961public:3962vector2D() :3963m_width(0),3964m_height(0)3965{3966}39673968vector2D(uint32_t w, uint32_t h) :3969m_width(0),3970m_height(0)3971{3972resize(w, h);3973}39743975vector2D(const vector2D& other)3976{3977*this = other;3978}39793980vector2D(vector2D&& other) :3981m_width(0),3982m_height(0)3983{3984*this = std::move(other);3985}39863987vector2D& operator= (const vector2D& other)3988{3989if (this != &other)3990{3991m_width = other.m_width;3992m_height = other.m_height;3993m_values = other.m_values;3994}3995return *this;3996}39973998vector2D& operator= (vector2D&& other)3999{4000if (this != &other)4001{4002m_width = other.m_width;4003m_height = other.m_height;4004m_values = std::move(other.m_values);40054006other.m_width = 0;4007other.m_height = 0;4008}4009return *this;4010}40114012inline bool operator== (const vector2D& rhs) const4013{4014return (m_width == rhs.m_width) && (m_height == rhs.m_height) && (m_values == rhs.m_values);4015}40164017inline size_t size_in_bytes() const { return m_values.size_in_bytes(); }40184019inline uint32_t get_width() const { return m_width; }4020inline uint32_t get_height() const { return m_height; }40214022inline const T& operator() (uint32_t x, uint32_t y) const { assert(x < m_width && y < m_height); return m_values[x + y * m_width]; }4023inline T& operator() (uint32_t x, uint32_t y) { assert(x < m_width && y < m_height); return m_values[x + y * m_width]; }40244025inline size_t size() const { return m_values.size(); }40264027inline const T& operator[] (uint32_t i) const { return m_values[i]; }4028inline T& operator[] (uint32_t i) { return m_values[i]; }40294030inline const T& at_clamped(int x, int y) const { return (*this)(clamp<int>(x, 0, m_width - 1), clamp<int>(y, 0, m_height - 1)); }4031inline T& at_clamped(int x, int y) { return (*this)(clamp<int>(x, 0, m_width - 1), clamp<int>(y, 0, m_height - 1)); }40324033void clear()4034{4035m_width = 0;4036m_height = 0;4037m_values.clear();4038}40394040void set_all(const T& val)4041{4042vector_set_all(m_values, val);4043}40444045inline const T* get_ptr() const { return m_values.data(); }4046inline T* get_ptr() { return m_values.data(); }40474048vector2D& resize(uint32_t new_width, uint32_t new_height)4049{4050if ((m_width == new_width) && (m_height == new_height))4051return *this;40524053const uint64_t total_vals = (uint64_t)new_width * new_height;40544055if (!can_fit_into_size_t(total_vals))4056{4057// What can we do?4058assert(0);4059return *this;4060}40614062vec_type oldVals((size_t)total_vals);4063oldVals.swap(m_values);40644065const uint32_t w = minimum(m_width, new_width);4066const uint32_t h = minimum(m_height, new_height);40674068if ((w) && (h))4069{4070for (uint32_t y = 0; y < h; y++)4071for (uint32_t x = 0; x < w; x++)4072m_values[x + y * new_width] = oldVals[x + y * m_width];4073}40744075m_width = new_width;4076m_height = new_height;40774078return *this;4079}40804081bool try_resize(uint32_t new_width, uint32_t new_height)4082{4083if ((m_width == new_width) && (m_height == new_height))4084return true;40854086const uint64_t total_vals = (uint64_t)new_width * new_height;40874088if (!can_fit_into_size_t(total_vals))4089{4090// What can we do?4091assert(0);4092return false;4093}40944095vec_type oldVals;4096if (!oldVals.try_resize((size_t)total_vals))4097return false;40984099oldVals.swap(m_values);41004101const uint32_t w = minimum(m_width, new_width);4102const uint32_t h = minimum(m_height, new_height);41034104if ((w) && (h))4105{4106for (uint32_t y = 0; y < h; y++)4107for (uint32_t x = 0; x < w; x++)4108m_values[x + y * new_width] = oldVals[x + y * m_width];4109}41104111m_width = new_width;4112m_height = new_height;41134114return true;4115}41164117const vector2D& extract_block_clamped(T* pDst, uint32_t src_x, uint32_t src_y, uint32_t w, uint32_t h) const4118{4119// HACK HACK4120if (((src_x + w) > m_width) || ((src_y + h) > m_height))4121{4122// Slower clamping case4123for (uint32_t y = 0; y < h; y++)4124for (uint32_t x = 0; x < w; x++)4125*pDst++ = at_clamped(src_x + x, src_y + y);4126}4127else4128{4129const T* pSrc = &m_values[src_x + src_y * m_width];41304131for (uint32_t y = 0; y < h; y++)4132{4133memcpy(pDst, pSrc, w * sizeof(T));4134pSrc += m_width;4135pDst += w;4136}4137}41384139return *this;4140}4141};41424143} // namespace basisu41444145namespace std4146{4147template<typename T>4148inline void swap(basisu::vector<T>& a, basisu::vector<T>& b)4149{4150a.swap(b);4151}41524153template<typename Key, typename Value, typename Hasher, typename Equals>4154inline void swap(basisu::hash_map<Key, Value, Hasher, Equals>& a, basisu::hash_map<Key, Value, Hasher, Equals>& b)4155{4156a.swap(b);4157}41584159} // namespace std416041614162