basisu_containers.h 55 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036
  1. // basisu_containers.h
  2. #pragma once
  3. #include <stdlib.h>
  4. #include <stdio.h>
  5. #include <stdint.h>
  6. #include <assert.h>
  7. #include <algorithm>
  8. #if defined(__linux__) && !defined(ANDROID)
  9. // Only for malloc_usable_size() in basisu_containers_impl.h
  10. #include <malloc.h>
  11. #define HAS_MALLOC_USABLE_SIZE 1
  12. #endif
  13. // Set to 1 to always check vector operator[], front(), and back() even in release.
  14. #define BASISU_VECTOR_FORCE_CHECKING 0
  15. // If 1, the vector container will not query the CRT to get the size of resized memory blocks.
  16. #define BASISU_VECTOR_DETERMINISTIC 1
  17. #ifdef _MSC_VER
  18. #define BASISU_FORCE_INLINE __forceinline
  19. #else
  20. #define BASISU_FORCE_INLINE inline
  21. #endif
  22. namespace basisu
  23. {
  24. enum { cInvalidIndex = -1 };
  25. namespace helpers
  26. {
  27. inline bool is_power_of_2(uint32_t x) { return x && ((x & (x - 1U)) == 0U); }
  28. inline bool is_power_of_2(uint64_t x) { return x && ((x & (x - 1U)) == 0U); }
  29. template<class T> const T& minimum(const T& a, const T& b) { return (b < a) ? b : a; }
  30. template<class T> const T& maximum(const T& a, const T& b) { return (a < b) ? b : a; }
  31. inline uint32_t floor_log2i(uint32_t v)
  32. {
  33. uint32_t l = 0;
  34. while (v > 1U)
  35. {
  36. v >>= 1;
  37. l++;
  38. }
  39. return l;
  40. }
  41. inline uint32_t next_pow2(uint32_t val)
  42. {
  43. val--;
  44. val |= val >> 16;
  45. val |= val >> 8;
  46. val |= val >> 4;
  47. val |= val >> 2;
  48. val |= val >> 1;
  49. return val + 1;
  50. }
  51. inline uint64_t next_pow2(uint64_t val)
  52. {
  53. val--;
  54. val |= val >> 32;
  55. val |= val >> 16;
  56. val |= val >> 8;
  57. val |= val >> 4;
  58. val |= val >> 2;
  59. val |= val >> 1;
  60. return val + 1;
  61. }
  62. } // namespace helpers
  63. template <typename T>
  64. inline T* construct(T* p)
  65. {
  66. return new (static_cast<void*>(p)) T;
  67. }
  68. template <typename T, typename U>
  69. inline T* construct(T* p, const U& init)
  70. {
  71. return new (static_cast<void*>(p)) T(init);
  72. }
  73. template <typename T>
  74. inline void construct_array(T* p, size_t n)
  75. {
  76. T* q = p + n;
  77. for (; p != q; ++p)
  78. new (static_cast<void*>(p)) T;
  79. }
  80. template <typename T, typename U>
  81. inline void construct_array(T* p, size_t n, const U& init)
  82. {
  83. T* q = p + n;
  84. for (; p != q; ++p)
  85. new (static_cast<void*>(p)) T(init);
  86. }
  87. template <typename T>
  88. inline void destruct(T* p)
  89. {
  90. (void)p;
  91. p->~T();
  92. }
  93. template <typename T> inline void destruct_array(T* p, size_t n)
  94. {
  95. T* q = p + n;
  96. for (; p != q; ++p)
  97. p->~T();
  98. }
  99. template<typename T> struct int_traits { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };
  100. template<> struct int_traits<int8_t> { enum { cMin = INT8_MIN, cMax = INT8_MAX, cSigned = true }; };
  101. template<> struct int_traits<int16_t> { enum { cMin = INT16_MIN, cMax = INT16_MAX, cSigned = true }; };
  102. template<> struct int_traits<int32_t> { enum { cMin = INT32_MIN, cMax = INT32_MAX, cSigned = true }; };
  103. template<> struct int_traits<uint8_t> { enum { cMin = 0, cMax = UINT8_MAX, cSigned = false }; };
  104. template<> struct int_traits<uint16_t> { enum { cMin = 0, cMax = UINT16_MAX, cSigned = false }; };
  105. template<> struct int_traits<uint32_t> { enum { cMin = 0, cMax = UINT32_MAX, cSigned = false }; };
  106. template<typename T>
  107. struct scalar_type
  108. {
  109. enum { cFlag = false };
  110. static inline void construct(T* p) { basisu::construct(p); }
  111. static inline void construct(T* p, const T& init) { basisu::construct(p, init); }
  112. static inline void construct_array(T* p, size_t n) { basisu::construct_array(p, n); }
  113. static inline void destruct(T* p) { basisu::destruct(p); }
  114. static inline void destruct_array(T* p, size_t n) { basisu::destruct_array(p, n); }
  115. };
  116. template<typename T> struct scalar_type<T*>
  117. {
  118. enum { cFlag = true };
  119. static inline void construct(T** p) { memset(p, 0, sizeof(T*)); }
  120. static inline void construct(T** p, T* init) { *p = init; }
  121. static inline void construct_array(T** p, size_t n) { memset(p, 0, sizeof(T*) * n); }
  122. static inline void destruct(T** p) { p; }
  123. static inline void destruct_array(T** p, size_t n) { p, n; }
  124. };
  125. #define BASISU_DEFINE_BUILT_IN_TYPE(X) \
  126. template<> struct scalar_type<X> { \
  127. enum { cFlag = true }; \
  128. static inline void construct(X* p) { memset(p, 0, sizeof(X)); } \
  129. static inline void construct(X* p, const X& init) { memcpy(p, &init, sizeof(X)); } \
  130. static inline void construct_array(X* p, size_t n) { memset(p, 0, sizeof(X) * n); } \
  131. static inline void destruct(X* p) { p; } \
  132. static inline void destruct_array(X* p, size_t n) { p, n; } };
  133. BASISU_DEFINE_BUILT_IN_TYPE(bool)
  134. BASISU_DEFINE_BUILT_IN_TYPE(char)
  135. BASISU_DEFINE_BUILT_IN_TYPE(unsigned char)
  136. BASISU_DEFINE_BUILT_IN_TYPE(short)
  137. BASISU_DEFINE_BUILT_IN_TYPE(unsigned short)
  138. BASISU_DEFINE_BUILT_IN_TYPE(int)
  139. BASISU_DEFINE_BUILT_IN_TYPE(unsigned int)
  140. BASISU_DEFINE_BUILT_IN_TYPE(long)
  141. BASISU_DEFINE_BUILT_IN_TYPE(unsigned long)
  142. #ifdef __GNUC__
  143. BASISU_DEFINE_BUILT_IN_TYPE(long long)
  144. BASISU_DEFINE_BUILT_IN_TYPE(unsigned long long)
  145. #else
  146. BASISU_DEFINE_BUILT_IN_TYPE(__int64)
  147. BASISU_DEFINE_BUILT_IN_TYPE(unsigned __int64)
  148. #endif
  149. BASISU_DEFINE_BUILT_IN_TYPE(float)
  150. BASISU_DEFINE_BUILT_IN_TYPE(double)
  151. BASISU_DEFINE_BUILT_IN_TYPE(long double)
  152. #undef BASISU_DEFINE_BUILT_IN_TYPE
  153. template<typename T>
  154. struct bitwise_movable { enum { cFlag = false }; };
  155. #define BASISU_DEFINE_BITWISE_MOVABLE(Q) template<> struct bitwise_movable<Q> { enum { cFlag = true }; };
  156. template<typename T>
  157. struct bitwise_copyable { enum { cFlag = false }; };
  158. #define BASISU_DEFINE_BITWISE_COPYABLE(Q) template<> struct bitwise_copyable<Q> { enum { cFlag = true }; };
  159. #define BASISU_IS_POD(T) __is_pod(T)
  160. #define BASISU_IS_SCALAR_TYPE(T) (scalar_type<T>::cFlag)
  161. #if !defined(BASISU_HAVE_STD_TRIVIALLY_COPYABLE) && defined(__GNUC__) && __GNUC__<5
  162. //#define BASISU_IS_TRIVIALLY_COPYABLE(...) __has_trivial_copy(__VA_ARGS__)
  163. #define BASISU_IS_TRIVIALLY_COPYABLE(...) __is_trivially_copyable(__VA_ARGS__)
  164. #else
  165. #define BASISU_IS_TRIVIALLY_COPYABLE(...) std::is_trivially_copyable<__VA_ARGS__>::value
  166. #endif
  167. // TODO: clean this up
  168. #define BASISU_IS_BITWISE_COPYABLE(T) (BASISU_IS_SCALAR_TYPE(T) || BASISU_IS_POD(T) || BASISU_IS_TRIVIALLY_COPYABLE(T) || (bitwise_copyable<T>::cFlag))
  169. #define BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) (BASISU_IS_BITWISE_COPYABLE(T) || (bitwise_movable<T>::cFlag))
  170. #define BASISU_HAS_DESTRUCTOR(T) ((!scalar_type<T>::cFlag) && (!__is_pod(T)))
  171. typedef char(&yes_t)[1];
  172. typedef char(&no_t)[2];
  173. template <class U> yes_t class_test(int U::*);
  174. template <class U> no_t class_test(...);
  175. template <class T> struct is_class
  176. {
  177. enum { value = (sizeof(class_test<T>(0)) == sizeof(yes_t)) };
  178. };
  179. template <typename T> struct is_pointer
  180. {
  181. enum { value = false };
  182. };
  183. template <typename T> struct is_pointer<T*>
  184. {
  185. enum { value = true };
  186. };
  187. struct empty_type { };
  188. BASISU_DEFINE_BITWISE_COPYABLE(empty_type);
  189. BASISU_DEFINE_BITWISE_MOVABLE(empty_type);
  190. template<typename T> struct rel_ops
  191. {
  192. friend bool operator!=(const T& x, const T& y) { return (!(x == y)); }
  193. friend bool operator> (const T& x, const T& y) { return (y < x); }
  194. friend bool operator<=(const T& x, const T& y) { return (!(y < x)); }
  195. friend bool operator>=(const T& x, const T& y) { return (!(x < y)); }
  196. };
  197. struct elemental_vector
  198. {
  199. void* m_p;
  200. uint32_t m_size;
  201. uint32_t m_capacity;
  202. typedef void (*object_mover)(void* pDst, void* pSrc, uint32_t num);
  203. bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, uint32_t element_size, object_mover pRelocate, bool nofail);
  204. };
  205. template<typename T>
  206. class vector : public rel_ops< vector<T> >
  207. {
  208. public:
  209. typedef T* iterator;
  210. typedef const T* const_iterator;
  211. typedef T value_type;
  212. typedef T& reference;
  213. typedef const T& const_reference;
  214. typedef T* pointer;
  215. typedef const T* const_pointer;
  216. inline vector() :
  217. m_p(NULL),
  218. m_size(0),
  219. m_capacity(0)
  220. {
  221. }
  222. inline vector(uint32_t n, const T& init) :
  223. m_p(NULL),
  224. m_size(0),
  225. m_capacity(0)
  226. {
  227. increase_capacity(n, false);
  228. construct_array(m_p, n, init);
  229. m_size = n;
  230. }
  231. inline vector(const vector& other) :
  232. m_p(NULL),
  233. m_size(0),
  234. m_capacity(0)
  235. {
  236. increase_capacity(other.m_size, false);
  237. m_size = other.m_size;
  238. if (BASISU_IS_BITWISE_COPYABLE(T))
  239. {
  240. #ifndef __EMSCRIPTEN__
  241. #ifdef __GNUC__
  242. #pragma GCC diagnostic push
  243. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  244. #endif
  245. #endif
  246. if ((m_p) && (other.m_p))
  247. memcpy(m_p, other.m_p, m_size * sizeof(T));
  248. #ifndef __EMSCRIPTEN__
  249. #ifdef __GNUC__
  250. #pragma GCC diagnostic pop
  251. #endif
  252. #endif
  253. }
  254. else
  255. {
  256. T* pDst = m_p;
  257. const T* pSrc = other.m_p;
  258. for (uint32_t i = m_size; i > 0; i--)
  259. construct(pDst++, *pSrc++);
  260. }
  261. }
  262. inline explicit vector(size_t size) :
  263. m_p(NULL),
  264. m_size(0),
  265. m_capacity(0)
  266. {
  267. resize(size);
  268. }
  269. inline ~vector()
  270. {
  271. if (m_p)
  272. {
  273. scalar_type<T>::destruct_array(m_p, m_size);
  274. free(m_p);
  275. }
  276. }
  277. inline vector& operator= (const vector& other)
  278. {
  279. if (this == &other)
  280. return *this;
  281. if (m_capacity >= other.m_size)
  282. resize(0);
  283. else
  284. {
  285. clear();
  286. increase_capacity(other.m_size, false);
  287. }
  288. if (BASISU_IS_BITWISE_COPYABLE(T))
  289. {
  290. #ifndef __EMSCRIPTEN__
  291. #ifdef __GNUC__
  292. #pragma GCC diagnostic push
  293. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  294. #endif
  295. #endif
  296. if ((m_p) && (other.m_p))
  297. memcpy(m_p, other.m_p, other.m_size * sizeof(T));
  298. #ifndef __EMSCRIPTEN__
  299. #ifdef __GNUC__
  300. #pragma GCC diagnostic pop
  301. #endif
  302. #endif
  303. }
  304. else
  305. {
  306. T* pDst = m_p;
  307. const T* pSrc = other.m_p;
  308. for (uint32_t i = other.m_size; i > 0; i--)
  309. construct(pDst++, *pSrc++);
  310. }
  311. m_size = other.m_size;
  312. return *this;
  313. }
  314. BASISU_FORCE_INLINE const T* begin() const { return m_p; }
  315. BASISU_FORCE_INLINE T* begin() { return m_p; }
  316. BASISU_FORCE_INLINE const T* end() const { return m_p + m_size; }
  317. BASISU_FORCE_INLINE T* end() { return m_p + m_size; }
  318. BASISU_FORCE_INLINE bool empty() const { return !m_size; }
  319. BASISU_FORCE_INLINE uint32_t size() const { return m_size; }
  320. BASISU_FORCE_INLINE uint32_t size_in_bytes() const { return m_size * sizeof(T); }
  321. BASISU_FORCE_INLINE uint32_t capacity() const { return m_capacity; }
  322. // operator[] will assert on out of range indices, but in final builds there is (and will never be) any range checking on this method.
  323. //BASISU_FORCE_INLINE const T& operator[] (uint32_t i) const { assert(i < m_size); return m_p[i]; }
  324. //BASISU_FORCE_INLINE T& operator[] (uint32_t i) { assert(i < m_size); return m_p[i]; }
  325. #if !BASISU_VECTOR_FORCE_CHECKING
  326. BASISU_FORCE_INLINE const T& operator[] (size_t i) const { assert(i < m_size); return m_p[i]; }
  327. BASISU_FORCE_INLINE T& operator[] (size_t i) { assert(i < m_size); return m_p[i]; }
  328. #else
  329. BASISU_FORCE_INLINE const T& operator[] (size_t i) const
  330. {
  331. if (i >= m_size)
  332. {
  333. fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));
  334. abort();
  335. }
  336. return m_p[i];
  337. }
  338. BASISU_FORCE_INLINE T& operator[] (size_t i)
  339. {
  340. if (i >= m_size)
  341. {
  342. fprintf(stderr, "operator[] invalid index: %u, max entries %u, type size %u\n", (uint32_t)i, m_size, (uint32_t)sizeof(T));
  343. abort();
  344. }
  345. return m_p[i];
  346. }
  347. #endif
  348. // at() always includes range checking, even in final builds, unlike operator [].
  349. // The first element is returned if the index is out of range.
  350. BASISU_FORCE_INLINE const T& at(size_t i) const { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }
  351. BASISU_FORCE_INLINE T& at(size_t i) { assert(i < m_size); return (i >= m_size) ? m_p[0] : m_p[i]; }
  352. #if !BASISU_VECTOR_FORCE_CHECKING
  353. BASISU_FORCE_INLINE const T& front() const { assert(m_size); return m_p[0]; }
  354. BASISU_FORCE_INLINE T& front() { assert(m_size); return m_p[0]; }
  355. BASISU_FORCE_INLINE const T& back() const { assert(m_size); return m_p[m_size - 1]; }
  356. BASISU_FORCE_INLINE T& back() { assert(m_size); return m_p[m_size - 1]; }
  357. #else
  358. BASISU_FORCE_INLINE const T& front() const
  359. {
  360. if (!m_size)
  361. {
  362. fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  363. abort();
  364. }
  365. return m_p[0];
  366. }
  367. BASISU_FORCE_INLINE T& front()
  368. {
  369. if (!m_size)
  370. {
  371. fprintf(stderr, "front: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  372. abort();
  373. }
  374. return m_p[0];
  375. }
  376. BASISU_FORCE_INLINE const T& back() const
  377. {
  378. if(!m_size)
  379. {
  380. fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  381. abort();
  382. }
  383. return m_p[m_size - 1];
  384. }
  385. BASISU_FORCE_INLINE T& back()
  386. {
  387. if (!m_size)
  388. {
  389. fprintf(stderr, "back: vector is empty, type size %u\n", (uint32_t)sizeof(T));
  390. abort();
  391. }
  392. return m_p[m_size - 1];
  393. }
  394. #endif
  395. BASISU_FORCE_INLINE const T* get_ptr() const { return m_p; }
  396. BASISU_FORCE_INLINE T* get_ptr() { return m_p; }
  397. BASISU_FORCE_INLINE const T* data() const { return m_p; }
  398. BASISU_FORCE_INLINE T* data() { return m_p; }
  399. // clear() sets the container to empty, then frees the allocated block.
  400. inline void clear()
  401. {
  402. if (m_p)
  403. {
  404. scalar_type<T>::destruct_array(m_p, m_size);
  405. free(m_p);
  406. m_p = NULL;
  407. m_size = 0;
  408. m_capacity = 0;
  409. }
  410. }
  411. inline void clear_no_destruction()
  412. {
  413. if (m_p)
  414. {
  415. free(m_p);
  416. m_p = NULL;
  417. m_size = 0;
  418. m_capacity = 0;
  419. }
  420. }
  421. inline void reserve(size_t new_capacity_size_t)
  422. {
  423. if (new_capacity_size_t > UINT32_MAX)
  424. {
  425. assert(0);
  426. return;
  427. }
  428. uint32_t new_capacity = (uint32_t)new_capacity_size_t;
  429. if (new_capacity > m_capacity)
  430. increase_capacity(new_capacity, false);
  431. else if (new_capacity < m_capacity)
  432. {
  433. // Must work around the lack of a "decrease_capacity()" method.
  434. // This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.
  435. vector tmp;
  436. tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false);
  437. tmp = *this;
  438. swap(tmp);
  439. }
  440. }
  441. inline bool try_reserve(size_t new_capacity_size_t)
  442. {
  443. if (new_capacity_size_t > UINT32_MAX)
  444. {
  445. assert(0);
  446. return false;
  447. }
  448. uint32_t new_capacity = (uint32_t)new_capacity_size_t;
  449. if (new_capacity > m_capacity)
  450. {
  451. if (!increase_capacity(new_capacity, false, true))
  452. return false;
  453. }
  454. else if (new_capacity < m_capacity)
  455. {
  456. // Must work around the lack of a "decrease_capacity()" method.
  457. // This case is rare enough in practice that it's probably not worth implementing an optimized in-place resize.
  458. vector tmp;
  459. if (!tmp.increase_capacity(helpers::maximum(m_size, new_capacity), false, true))
  460. return false;
  461. tmp = *this;
  462. swap(tmp);
  463. }
  464. return true;
  465. }
  466. // resize(0) sets the container to empty, but does not free the allocated block.
  467. inline void resize(size_t new_size_size_t, bool grow_hint = false)
  468. {
  469. if (new_size_size_t > UINT32_MAX)
  470. {
  471. assert(0);
  472. return;
  473. }
  474. uint32_t new_size = (uint32_t)new_size_size_t;
  475. if (m_size != new_size)
  476. {
  477. if (new_size < m_size)
  478. scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);
  479. else
  480. {
  481. if (new_size > m_capacity)
  482. increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint);
  483. scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);
  484. }
  485. m_size = new_size;
  486. }
  487. }
  488. inline bool try_resize(size_t new_size_size_t, bool grow_hint = false)
  489. {
  490. if (new_size_size_t > UINT32_MAX)
  491. {
  492. assert(0);
  493. return false;
  494. }
  495. uint32_t new_size = (uint32_t)new_size_size_t;
  496. if (m_size != new_size)
  497. {
  498. if (new_size < m_size)
  499. scalar_type<T>::destruct_array(m_p + new_size, m_size - new_size);
  500. else
  501. {
  502. if (new_size > m_capacity)
  503. {
  504. if (!increase_capacity(new_size, (new_size == (m_size + 1)) || grow_hint, true))
  505. return false;
  506. }
  507. scalar_type<T>::construct_array(m_p + m_size, new_size - m_size);
  508. }
  509. m_size = new_size;
  510. }
  511. return true;
  512. }
  513. // If size >= capacity/2, reset() sets the container's size to 0 but doesn't free the allocated block (because the container may be similarly loaded in the future).
  514. // Otherwise it blows away the allocated block. See http://www.codercorner.com/blog/?p=494
  515. inline void reset()
  516. {
  517. if (m_size >= (m_capacity >> 1))
  518. resize(0);
  519. else
  520. clear();
  521. }
  522. inline T* enlarge(uint32_t i)
  523. {
  524. uint32_t cur_size = m_size;
  525. resize(cur_size + i, true);
  526. return get_ptr() + cur_size;
  527. }
  528. inline T* try_enlarge(uint32_t i)
  529. {
  530. uint32_t cur_size = m_size;
  531. if (!try_resize(cur_size + i, true))
  532. return NULL;
  533. return get_ptr() + cur_size;
  534. }
  535. BASISU_FORCE_INLINE void push_back(const T& obj)
  536. {
  537. assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));
  538. if (m_size >= m_capacity)
  539. increase_capacity(m_size + 1, true);
  540. scalar_type<T>::construct(m_p + m_size, obj);
  541. m_size++;
  542. }
  543. inline bool try_push_back(const T& obj)
  544. {
  545. assert(!m_p || (&obj < m_p) || (&obj >= (m_p + m_size)));
  546. if (m_size >= m_capacity)
  547. {
  548. if (!increase_capacity(m_size + 1, true, true))
  549. return false;
  550. }
  551. scalar_type<T>::construct(m_p + m_size, obj);
  552. m_size++;
  553. return true;
  554. }
  555. inline void push_back_value(T obj)
  556. {
  557. if (m_size >= m_capacity)
  558. increase_capacity(m_size + 1, true);
  559. scalar_type<T>::construct(m_p + m_size, obj);
  560. m_size++;
  561. }
  562. inline void pop_back()
  563. {
  564. assert(m_size);
  565. if (m_size)
  566. {
  567. m_size--;
  568. scalar_type<T>::destruct(&m_p[m_size]);
  569. }
  570. }
  571. inline void insert(uint32_t index, const T* p, uint32_t n)
  572. {
  573. assert(index <= m_size);
  574. if (!n)
  575. return;
  576. const uint32_t orig_size = m_size;
  577. resize(m_size + n, true);
  578. const uint32_t num_to_move = orig_size - index;
  579. if (BASISU_IS_BITWISE_COPYABLE(T))
  580. {
  581. // This overwrites the destination object bits, but bitwise copyable means we don't need to worry about destruction.
  582. memmove(m_p + index + n, m_p + index, sizeof(T) * num_to_move);
  583. }
  584. else
  585. {
  586. const T* pSrc = m_p + orig_size - 1;
  587. T* pDst = const_cast<T*>(pSrc) + n;
  588. for (uint32_t i = 0; i < num_to_move; i++)
  589. {
  590. assert((pDst - m_p) < (int)m_size);
  591. *pDst-- = *pSrc--;
  592. }
  593. }
  594. T* pDst = m_p + index;
  595. if (BASISU_IS_BITWISE_COPYABLE(T))
  596. {
  597. // This copies in the new bits, overwriting the existing objects, which is OK for copyable types that don't need destruction.
  598. memcpy(pDst, p, sizeof(T) * n);
  599. }
  600. else
  601. {
  602. for (uint32_t i = 0; i < n; i++)
  603. {
  604. assert((pDst - m_p) < (int)m_size);
  605. *pDst++ = *p++;
  606. }
  607. }
  608. }
  609. inline void insert(T* p, const T& obj)
  610. {
  611. int64_t ofs = p - begin();
  612. if ((ofs < 0) || (ofs > UINT32_MAX))
  613. {
  614. assert(0);
  615. return;
  616. }
  617. insert((uint32_t)ofs, &obj, 1);
  618. }
  619. // push_front() isn't going to be very fast - it's only here for usability.
  620. inline void push_front(const T& obj)
  621. {
  622. insert(0, &obj, 1);
  623. }
  624. vector& append(const vector& other)
  625. {
  626. if (other.m_size)
  627. insert(m_size, &other[0], other.m_size);
  628. return *this;
  629. }
  630. vector& append(const T* p, uint32_t n)
  631. {
  632. if (n)
  633. insert(m_size, p, n);
  634. return *this;
  635. }
  636. inline void erase(uint32_t start, uint32_t n)
  637. {
  638. assert((start + n) <= m_size);
  639. if ((start + n) > m_size)
  640. return;
  641. if (!n)
  642. return;
  643. const uint32_t num_to_move = m_size - (start + n);
  644. T* pDst = m_p + start;
  645. const T* pSrc = m_p + start + n;
  646. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T))
  647. {
  648. // This test is overly cautious.
  649. if ((!BASISU_IS_BITWISE_COPYABLE(T)) || (BASISU_HAS_DESTRUCTOR(T)))
  650. {
  651. // Type has been marked explictly as bitwise movable, which means we can move them around but they may need to be destructed.
  652. // First destroy the erased objects.
  653. scalar_type<T>::destruct_array(pDst, n);
  654. }
  655. // Copy "down" the objects to preserve, filling in the empty slots.
  656. #ifndef __EMSCRIPTEN__
  657. #ifdef __GNUC__
  658. #pragma GCC diagnostic push
  659. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  660. #endif
  661. #endif
  662. memmove(pDst, pSrc, num_to_move * sizeof(T));
  663. #ifndef __EMSCRIPTEN__
  664. #ifdef __GNUC__
  665. #pragma GCC diagnostic pop
  666. #endif
  667. #endif
  668. }
  669. else
  670. {
  671. // Type is not bitwise copyable or movable.
  672. // Move them down one at a time by using the equals operator, and destroying anything that's left over at the end.
  673. T* pDst_end = pDst + num_to_move;
  674. while (pDst != pDst_end)
  675. *pDst++ = *pSrc++;
  676. scalar_type<T>::destruct_array(pDst_end, n);
  677. }
  678. m_size -= n;
  679. }
  680. inline void erase(uint32_t index)
  681. {
  682. erase(index, 1);
  683. }
  684. inline void erase(T* p)
  685. {
  686. assert((p >= m_p) && (p < (m_p + m_size)));
  687. erase(static_cast<uint32_t>(p - m_p));
  688. }
  689. inline void erase(T *pFirst, T *pEnd)
  690. {
  691. assert(pFirst <= pEnd);
  692. assert(pFirst >= begin() && pFirst <= end());
  693. assert(pEnd >= begin() && pEnd <= end());
  694. int64_t ofs = pFirst - begin();
  695. if ((ofs < 0) || (ofs > UINT32_MAX))
  696. {
  697. assert(0);
  698. return;
  699. }
  700. int64_t n = pEnd - pFirst;
  701. if ((n < 0) || (n > UINT32_MAX))
  702. {
  703. assert(0);
  704. return;
  705. }
  706. erase((uint32_t)ofs, (uint32_t)n);
  707. }
  708. void erase_unordered(uint32_t index)
  709. {
  710. assert(index < m_size);
  711. if ((index + 1) < m_size)
  712. (*this)[index] = back();
  713. pop_back();
  714. }
  715. inline bool operator== (const vector& rhs) const
  716. {
  717. if (m_size != rhs.m_size)
  718. return false;
  719. else if (m_size)
  720. {
  721. if (scalar_type<T>::cFlag)
  722. return memcmp(m_p, rhs.m_p, sizeof(T) * m_size) == 0;
  723. else
  724. {
  725. const T* pSrc = m_p;
  726. const T* pDst = rhs.m_p;
  727. for (uint32_t i = m_size; i; i--)
  728. if (!(*pSrc++ == *pDst++))
  729. return false;
  730. }
  731. }
  732. return true;
  733. }
  734. inline bool operator< (const vector& rhs) const
  735. {
  736. const uint32_t min_size = helpers::minimum(m_size, rhs.m_size);
  737. const T* pSrc = m_p;
  738. const T* pSrc_end = m_p + min_size;
  739. const T* pDst = rhs.m_p;
  740. while ((pSrc < pSrc_end) && (*pSrc == *pDst))
  741. {
  742. pSrc++;
  743. pDst++;
  744. }
  745. if (pSrc < pSrc_end)
  746. return *pSrc < *pDst;
  747. return m_size < rhs.m_size;
  748. }
  749. inline void swap(vector& other)
  750. {
  751. std::swap(m_p, other.m_p);
  752. std::swap(m_size, other.m_size);
  753. std::swap(m_capacity, other.m_capacity);
  754. }
  755. inline void sort()
  756. {
  757. std::sort(begin(), end());
  758. }
  759. inline void unique()
  760. {
  761. if (!empty())
  762. {
  763. sort();
  764. resize(std::unique(begin(), end()) - begin());
  765. }
  766. }
  767. inline void reverse()
  768. {
  769. uint32_t j = m_size >> 1;
  770. for (uint32_t i = 0; i < j; i++)
  771. std::swap(m_p[i], m_p[m_size - 1 - i]);
  772. }
  773. inline int find(const T& key) const
  774. {
  775. const T* p = m_p;
  776. const T* p_end = m_p + m_size;
  777. uint32_t index = 0;
  778. while (p != p_end)
  779. {
  780. if (key == *p)
  781. return index;
  782. p++;
  783. index++;
  784. }
  785. return cInvalidIndex;
  786. }
  787. inline int find_sorted(const T& key) const
  788. {
  789. if (m_size)
  790. {
  791. // Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.
  792. int i = ((m_size + 1) >> 1) - 1;
  793. int m = m_size;
  794. for (; ; )
  795. {
  796. assert(i >= 0 && i < (int)m_size);
  797. const T* pKey_i = m_p + i;
  798. int cmp = key < *pKey_i;
  799. #if defined(_DEBUG) || defined(DEBUG)
  800. int cmp2 = *pKey_i < key;
  801. assert((cmp != cmp2) || (key == *pKey_i));
  802. #endif
  803. if ((!cmp) && (key == *pKey_i)) return i;
  804. m >>= 1;
  805. if (!m) break;
  806. cmp = -cmp;
  807. i += (((m + 1) >> 1) ^ cmp) - cmp;
  808. if (i < 0)
  809. break;
  810. assert(i >= 0 && i < (int)m_size);
  811. pKey_i = m_p + i;
  812. cmp = key < *pKey_i;
  813. #if defined(_DEBUG) || defined(DEBUG)
  814. cmp2 = *pKey_i < key;
  815. assert((cmp != cmp2) || (key == *pKey_i));
  816. #endif
  817. if ((!cmp) && (key == *pKey_i)) return i;
  818. m >>= 1;
  819. if (!m) break;
  820. cmp = -cmp;
  821. i += (((m + 1) >> 1) ^ cmp) - cmp;
  822. if (i < 0)
  823. break;
  824. }
  825. }
  826. return cInvalidIndex;
  827. }
  828. template<typename Q>
  829. inline int find_sorted(const T& key, Q less_than) const
  830. {
  831. if (m_size)
  832. {
  833. // Uniform binary search - Knuth Algorithm 6.2.1 U, unrolled twice.
  834. int i = ((m_size + 1) >> 1) - 1;
  835. int m = m_size;
  836. for (; ; )
  837. {
  838. assert(i >= 0 && i < (int)m_size);
  839. const T* pKey_i = m_p + i;
  840. int cmp = less_than(key, *pKey_i);
  841. if ((!cmp) && (!less_than(*pKey_i, key))) return i;
  842. m >>= 1;
  843. if (!m) break;
  844. cmp = -cmp;
  845. i += (((m + 1) >> 1) ^ cmp) - cmp;
  846. if (i < 0)
  847. break;
  848. assert(i >= 0 && i < (int)m_size);
  849. pKey_i = m_p + i;
  850. cmp = less_than(key, *pKey_i);
  851. if ((!cmp) && (!less_than(*pKey_i, key))) return i;
  852. m >>= 1;
  853. if (!m) break;
  854. cmp = -cmp;
  855. i += (((m + 1) >> 1) ^ cmp) - cmp;
  856. if (i < 0)
  857. break;
  858. }
  859. }
  860. return cInvalidIndex;
  861. }
  862. inline uint32_t count_occurences(const T& key) const
  863. {
  864. uint32_t c = 0;
  865. const T* p = m_p;
  866. const T* p_end = m_p + m_size;
  867. while (p != p_end)
  868. {
  869. if (key == *p)
  870. c++;
  871. p++;
  872. }
  873. return c;
  874. }
  875. inline void set_all(const T& o)
  876. {
  877. if ((sizeof(T) == 1) && (scalar_type<T>::cFlag))
  878. {
  879. #ifndef __EMSCRIPTEN__
  880. #ifdef __GNUC__
  881. #pragma GCC diagnostic push
  882. #pragma GCC diagnostic ignored "-Wclass-memaccess"
  883. #endif
  884. #endif
  885. memset(m_p, *reinterpret_cast<const uint8_t*>(&o), m_size);
  886. #ifndef __EMSCRIPTEN__
  887. #ifdef __GNUC__
  888. #pragma GCC diagnostic pop
  889. #endif
  890. #endif
  891. }
  892. else
  893. {
  894. T* pDst = m_p;
  895. T* pDst_end = pDst + m_size;
  896. while (pDst != pDst_end)
  897. *pDst++ = o;
  898. }
  899. }
  900. // Caller assumes ownership of the heap block associated with the container. Container is cleared.
  901. inline void* assume_ownership()
  902. {
  903. T* p = m_p;
  904. m_p = NULL;
  905. m_size = 0;
  906. m_capacity = 0;
  907. return p;
  908. }
  909. // Caller is granting ownership of the indicated heap block.
  910. // Block must have size constructed elements, and have enough room for capacity elements.
  911. // The block must have been allocated using malloc().
  912. // Important: This method is used in Basis Universal. If you change how this container allocates memory, you'll need to change any users of this method.
  913. inline bool grant_ownership(T* p, uint32_t size, uint32_t capacity)
  914. {
  915. // To prevent the caller from obviously shooting themselves in the foot.
  916. if (((p + capacity) > m_p) && (p < (m_p + m_capacity)))
  917. {
  918. // Can grant ownership of a block inside the container itself!
  919. assert(0);
  920. return false;
  921. }
  922. if (size > capacity)
  923. {
  924. assert(0);
  925. return false;
  926. }
  927. if (!p)
  928. {
  929. if (capacity)
  930. {
  931. assert(0);
  932. return false;
  933. }
  934. }
  935. else if (!capacity)
  936. {
  937. assert(0);
  938. return false;
  939. }
  940. clear();
  941. m_p = p;
  942. m_size = size;
  943. m_capacity = capacity;
  944. return true;
  945. }
  946. private:
  947. T* m_p;
  948. uint32_t m_size;
  949. uint32_t m_capacity;
  950. template<typename Q> struct is_vector { enum { cFlag = false }; };
  951. template<typename Q> struct is_vector< vector<Q> > { enum { cFlag = true }; };
  952. static void object_mover(void* pDst_void, void* pSrc_void, uint32_t num)
  953. {
  954. T* pSrc = static_cast<T*>(pSrc_void);
  955. T* const pSrc_end = pSrc + num;
  956. T* pDst = static_cast<T*>(pDst_void);
  957. while (pSrc != pSrc_end)
  958. {
  959. // placement new
  960. new (static_cast<void*>(pDst)) T(*pSrc);
  961. pSrc->~T();
  962. ++pSrc;
  963. ++pDst;
  964. }
  965. }
  966. inline bool increase_capacity(uint32_t min_new_capacity, bool grow_hint, bool nofail = false)
  967. {
  968. return reinterpret_cast<elemental_vector*>(this)->increase_capacity(
  969. min_new_capacity, grow_hint, sizeof(T),
  970. (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(T) || (is_vector<T>::cFlag)) ? NULL : object_mover, nofail);
  971. }
  972. };
  973. template<typename T> struct bitwise_movable< vector<T> > { enum { cFlag = true }; };
  974. // Hash map
  975. template <typename T>
  976. struct hasher
  977. {
  978. inline size_t operator() (const T& key) const { return static_cast<size_t>(key); }
  979. };
  980. template <typename T>
  981. struct equal_to
  982. {
  983. inline bool operator()(const T& a, const T& b) const { return a == b; }
  984. };
  985. // Important: The Hasher and Equals objects must be bitwise movable!
  986. template<typename Key, typename Value = empty_type, typename Hasher = hasher<Key>, typename Equals = equal_to<Key> >
  987. class hash_map
  988. {
  989. public:
  990. class iterator;
  991. class const_iterator;
  992. private:
  993. friend class iterator;
  994. friend class const_iterator;
  995. enum state
  996. {
  997. cStateInvalid = 0,
  998. cStateValid = 1
  999. };
  1000. enum
  1001. {
  1002. cMinHashSize = 4U
  1003. };
  1004. public:
  1005. typedef hash_map<Key, Value, Hasher, Equals> hash_map_type;
  1006. typedef std::pair<Key, Value> value_type;
  1007. typedef Key key_type;
  1008. typedef Value referent_type;
  1009. typedef Hasher hasher_type;
  1010. typedef Equals equals_type;
  1011. hash_map() :
  1012. m_hash_shift(32), m_num_valid(0), m_grow_threshold(0)
  1013. {
  1014. }
  1015. hash_map(const hash_map& other) :
  1016. m_values(other.m_values),
  1017. m_hash_shift(other.m_hash_shift),
  1018. m_hasher(other.m_hasher),
  1019. m_equals(other.m_equals),
  1020. m_num_valid(other.m_num_valid),
  1021. m_grow_threshold(other.m_grow_threshold)
  1022. {
  1023. }
  1024. hash_map& operator= (const hash_map& other)
  1025. {
  1026. if (this == &other)
  1027. return *this;
  1028. clear();
  1029. m_values = other.m_values;
  1030. m_hash_shift = other.m_hash_shift;
  1031. m_num_valid = other.m_num_valid;
  1032. m_grow_threshold = other.m_grow_threshold;
  1033. m_hasher = other.m_hasher;
  1034. m_equals = other.m_equals;
  1035. return *this;
  1036. }
  1037. inline ~hash_map()
  1038. {
  1039. clear();
  1040. }
  1041. const Equals& get_equals() const { return m_equals; }
  1042. Equals& get_equals() { return m_equals; }
  1043. void set_equals(const Equals& equals) { m_equals = equals; }
  1044. const Hasher& get_hasher() const { return m_hasher; }
  1045. Hasher& get_hasher() { return m_hasher; }
  1046. void set_hasher(const Hasher& hasher) { m_hasher = hasher; }
  1047. inline void clear()
  1048. {
  1049. if (!m_values.empty())
  1050. {
  1051. if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))
  1052. {
  1053. node* p = &get_node(0);
  1054. node* p_end = p + m_values.size();
  1055. uint32_t num_remaining = m_num_valid;
  1056. while (p != p_end)
  1057. {
  1058. if (p->state)
  1059. {
  1060. destruct_value_type(p);
  1061. num_remaining--;
  1062. if (!num_remaining)
  1063. break;
  1064. }
  1065. p++;
  1066. }
  1067. }
  1068. m_values.clear_no_destruction();
  1069. m_hash_shift = 32;
  1070. m_num_valid = 0;
  1071. m_grow_threshold = 0;
  1072. }
  1073. }
  1074. inline void reset()
  1075. {
  1076. if (!m_num_valid)
  1077. return;
  1078. if (BASISU_HAS_DESTRUCTOR(Key) || BASISU_HAS_DESTRUCTOR(Value))
  1079. {
  1080. node* p = &get_node(0);
  1081. node* p_end = p + m_values.size();
  1082. uint32_t num_remaining = m_num_valid;
  1083. while (p != p_end)
  1084. {
  1085. if (p->state)
  1086. {
  1087. destruct_value_type(p);
  1088. p->state = cStateInvalid;
  1089. num_remaining--;
  1090. if (!num_remaining)
  1091. break;
  1092. }
  1093. p++;
  1094. }
  1095. }
  1096. else if (sizeof(node) <= 32)
  1097. {
  1098. memset(&m_values[0], 0, m_values.size_in_bytes());
  1099. }
  1100. else
  1101. {
  1102. node* p = &get_node(0);
  1103. node* p_end = p + m_values.size();
  1104. uint32_t num_remaining = m_num_valid;
  1105. while (p != p_end)
  1106. {
  1107. if (p->state)
  1108. {
  1109. p->state = cStateInvalid;
  1110. num_remaining--;
  1111. if (!num_remaining)
  1112. break;
  1113. }
  1114. p++;
  1115. }
  1116. }
  1117. m_num_valid = 0;
  1118. }
  1119. inline uint32_t size()
  1120. {
  1121. return m_num_valid;
  1122. }
  1123. inline uint32_t get_table_size()
  1124. {
  1125. return m_values.size();
  1126. }
  1127. inline bool empty()
  1128. {
  1129. return !m_num_valid;
  1130. }
  1131. inline void reserve(uint32_t new_capacity)
  1132. {
  1133. uint64_t new_hash_size = helpers::maximum(1U, new_capacity);
  1134. new_hash_size = new_hash_size * 2ULL;
  1135. if (!helpers::is_power_of_2(new_hash_size))
  1136. new_hash_size = helpers::next_pow2(new_hash_size);
  1137. new_hash_size = helpers::maximum<uint64_t>(cMinHashSize, new_hash_size);
  1138. new_hash_size = helpers::minimum<uint64_t>(0x80000000UL, new_hash_size);
  1139. if (new_hash_size > m_values.size())
  1140. rehash((uint32_t)new_hash_size);
  1141. }
  1142. class iterator
  1143. {
  1144. friend class hash_map<Key, Value, Hasher, Equals>;
  1145. friend class hash_map<Key, Value, Hasher, Equals>::const_iterator;
  1146. public:
  1147. inline iterator() : m_pTable(NULL), m_index(0) { }
  1148. inline iterator(hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }
  1149. inline iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1150. inline iterator& operator= (const iterator& other)
  1151. {
  1152. m_pTable = other.m_pTable;
  1153. m_index = other.m_index;
  1154. return *this;
  1155. }
  1156. // post-increment
  1157. inline iterator operator++(int)
  1158. {
  1159. iterator result(*this);
  1160. ++*this;
  1161. return result;
  1162. }
  1163. // pre-increment
  1164. inline iterator& operator++()
  1165. {
  1166. probe();
  1167. return *this;
  1168. }
  1169. inline value_type& operator*() const { return *get_cur(); }
  1170. inline value_type* operator->() const { return get_cur(); }
  1171. inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1172. inline bool operator != (const iterator& b) const { return !(*this == b); }
  1173. inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1174. inline bool operator != (const const_iterator& b) const { return !(*this == b); }
  1175. private:
  1176. hash_map_type* m_pTable;
  1177. uint32_t m_index;
  1178. inline value_type* get_cur() const
  1179. {
  1180. assert(m_pTable && (m_index < m_pTable->m_values.size()));
  1181. assert(m_pTable->get_node_state(m_index) == cStateValid);
  1182. return &m_pTable->get_node(m_index);
  1183. }
  1184. inline void probe()
  1185. {
  1186. assert(m_pTable);
  1187. m_index = m_pTable->find_next(m_index);
  1188. }
  1189. };
  1190. class const_iterator
  1191. {
  1192. friend class hash_map<Key, Value, Hasher, Equals>;
  1193. friend class hash_map<Key, Value, Hasher, Equals>::iterator;
  1194. public:
  1195. inline const_iterator() : m_pTable(NULL), m_index(0) { }
  1196. inline const_iterator(const hash_map_type& table, uint32_t index) : m_pTable(&table), m_index(index) { }
  1197. inline const_iterator(const iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1198. inline const_iterator(const const_iterator& other) : m_pTable(other.m_pTable), m_index(other.m_index) { }
  1199. inline const_iterator& operator= (const const_iterator& other)
  1200. {
  1201. m_pTable = other.m_pTable;
  1202. m_index = other.m_index;
  1203. return *this;
  1204. }
  1205. inline const_iterator& operator= (const iterator& other)
  1206. {
  1207. m_pTable = other.m_pTable;
  1208. m_index = other.m_index;
  1209. return *this;
  1210. }
  1211. // post-increment
  1212. inline const_iterator operator++(int)
  1213. {
  1214. const_iterator result(*this);
  1215. ++*this;
  1216. return result;
  1217. }
  1218. // pre-increment
  1219. inline const_iterator& operator++()
  1220. {
  1221. probe();
  1222. return *this;
  1223. }
  1224. inline const value_type& operator*() const { return *get_cur(); }
  1225. inline const value_type* operator->() const { return get_cur(); }
  1226. inline bool operator == (const const_iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1227. inline bool operator != (const const_iterator& b) const { return !(*this == b); }
  1228. inline bool operator == (const iterator& b) const { return (m_pTable == b.m_pTable) && (m_index == b.m_index); }
  1229. inline bool operator != (const iterator& b) const { return !(*this == b); }
  1230. private:
  1231. const hash_map_type* m_pTable;
  1232. uint32_t m_index;
  1233. inline const value_type* get_cur() const
  1234. {
  1235. assert(m_pTable && (m_index < m_pTable->m_values.size()));
  1236. assert(m_pTable->get_node_state(m_index) == cStateValid);
  1237. return &m_pTable->get_node(m_index);
  1238. }
  1239. inline void probe()
  1240. {
  1241. assert(m_pTable);
  1242. m_index = m_pTable->find_next(m_index);
  1243. }
  1244. };
  1245. inline const_iterator begin() const
  1246. {
  1247. if (!m_num_valid)
  1248. return end();
  1249. return const_iterator(*this, find_next(UINT32_MAX));
  1250. }
  1251. inline const_iterator end() const
  1252. {
  1253. return const_iterator(*this, m_values.size());
  1254. }
  1255. inline iterator begin()
  1256. {
  1257. if (!m_num_valid)
  1258. return end();
  1259. return iterator(*this, find_next(UINT32_MAX));
  1260. }
  1261. inline iterator end()
  1262. {
  1263. return iterator(*this, m_values.size());
  1264. }
  1265. // insert_result.first will always point to inserted key/value (or the already existing key/value).
  1266. // insert_resutt.second will be true if a new key/value was inserted, or false if the key already existed (in which case first will point to the already existing value).
  1267. typedef std::pair<iterator, bool> insert_result;
  1268. inline insert_result insert(const Key& k, const Value& v = Value())
  1269. {
  1270. insert_result result;
  1271. if (!insert_no_grow(result, k, v))
  1272. {
  1273. grow();
  1274. // This must succeed.
  1275. if (!insert_no_grow(result, k, v))
  1276. {
  1277. fprintf(stderr, "insert() failed");
  1278. abort();
  1279. }
  1280. }
  1281. return result;
  1282. }
  1283. inline insert_result insert(const value_type& v)
  1284. {
  1285. return insert(v.first, v.second);
  1286. }
  1287. inline const_iterator find(const Key& k) const
  1288. {
  1289. return const_iterator(*this, find_index(k));
  1290. }
  1291. inline iterator find(const Key& k)
  1292. {
  1293. return iterator(*this, find_index(k));
  1294. }
  1295. inline bool erase(const Key& k)
  1296. {
  1297. uint32_t i = find_index(k);
  1298. if (i >= m_values.size())
  1299. return false;
  1300. node* pDst = &get_node(i);
  1301. destruct_value_type(pDst);
  1302. pDst->state = cStateInvalid;
  1303. m_num_valid--;
  1304. for (; ; )
  1305. {
  1306. uint32_t r, j = i;
  1307. node* pSrc = pDst;
  1308. do
  1309. {
  1310. if (!i)
  1311. {
  1312. i = m_values.size() - 1;
  1313. pSrc = &get_node(i);
  1314. }
  1315. else
  1316. {
  1317. i--;
  1318. pSrc--;
  1319. }
  1320. if (!pSrc->state)
  1321. return true;
  1322. r = hash_key(pSrc->first);
  1323. } while ((i <= r && r < j) || (r < j && j < i) || (j < i && i <= r));
  1324. move_node(pDst, pSrc);
  1325. pDst = pSrc;
  1326. }
  1327. }
  1328. inline void swap(hash_map_type& other)
  1329. {
  1330. m_values.swap(other.m_values);
  1331. std::swap(m_hash_shift, other.m_hash_shift);
  1332. std::swap(m_num_valid, other.m_num_valid);
  1333. std::swap(m_grow_threshold, other.m_grow_threshold);
  1334. std::swap(m_hasher, other.m_hasher);
  1335. std::swap(m_equals, other.m_equals);
  1336. }
  1337. private:
  1338. struct node : public value_type
  1339. {
  1340. uint8_t state;
  1341. };
  1342. static inline void construct_value_type(value_type* pDst, const Key& k, const Value& v)
  1343. {
  1344. if (BASISU_IS_BITWISE_COPYABLE(Key))
  1345. memcpy(&pDst->first, &k, sizeof(Key));
  1346. else
  1347. scalar_type<Key>::construct(&pDst->first, k);
  1348. if (BASISU_IS_BITWISE_COPYABLE(Value))
  1349. memcpy(&pDst->second, &v, sizeof(Value));
  1350. else
  1351. scalar_type<Value>::construct(&pDst->second, v);
  1352. }
  1353. static inline void construct_value_type(value_type* pDst, const value_type* pSrc)
  1354. {
  1355. if ((BASISU_IS_BITWISE_COPYABLE(Key)) && (BASISU_IS_BITWISE_COPYABLE(Value)))
  1356. {
  1357. memcpy(pDst, pSrc, sizeof(value_type));
  1358. }
  1359. else
  1360. {
  1361. if (BASISU_IS_BITWISE_COPYABLE(Key))
  1362. memcpy(&pDst->first, &pSrc->first, sizeof(Key));
  1363. else
  1364. scalar_type<Key>::construct(&pDst->first, pSrc->first);
  1365. if (BASISU_IS_BITWISE_COPYABLE(Value))
  1366. memcpy(&pDst->second, &pSrc->second, sizeof(Value));
  1367. else
  1368. scalar_type<Value>::construct(&pDst->second, pSrc->second);
  1369. }
  1370. }
  1371. static inline void destruct_value_type(value_type* p)
  1372. {
  1373. scalar_type<Key>::destruct(&p->first);
  1374. scalar_type<Value>::destruct(&p->second);
  1375. }
  1376. // Moves *pSrc to *pDst efficiently.
  1377. // pDst should NOT be constructed on entry.
  1378. static inline void move_node(node* pDst, node* pSrc, bool update_src_state = true)
  1379. {
  1380. assert(!pDst->state);
  1381. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key) && BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))
  1382. {
  1383. memcpy(pDst, pSrc, sizeof(node));
  1384. }
  1385. else
  1386. {
  1387. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Key))
  1388. memcpy(&pDst->first, &pSrc->first, sizeof(Key));
  1389. else
  1390. {
  1391. scalar_type<Key>::construct(&pDst->first, pSrc->first);
  1392. scalar_type<Key>::destruct(&pSrc->first);
  1393. }
  1394. if (BASISU_IS_BITWISE_COPYABLE_OR_MOVABLE(Value))
  1395. memcpy(&pDst->second, &pSrc->second, sizeof(Value));
  1396. else
  1397. {
  1398. scalar_type<Value>::construct(&pDst->second, pSrc->second);
  1399. scalar_type<Value>::destruct(&pSrc->second);
  1400. }
  1401. pDst->state = cStateValid;
  1402. }
  1403. if (update_src_state)
  1404. pSrc->state = cStateInvalid;
  1405. }
  1406. struct raw_node
  1407. {
  1408. inline raw_node()
  1409. {
  1410. node* p = reinterpret_cast<node*>(this);
  1411. p->state = cStateInvalid;
  1412. }
  1413. inline ~raw_node()
  1414. {
  1415. node* p = reinterpret_cast<node*>(this);
  1416. if (p->state)
  1417. hash_map_type::destruct_value_type(p);
  1418. }
  1419. inline raw_node(const raw_node& other)
  1420. {
  1421. node* pDst = reinterpret_cast<node*>(this);
  1422. const node* pSrc = reinterpret_cast<const node*>(&other);
  1423. if (pSrc->state)
  1424. {
  1425. hash_map_type::construct_value_type(pDst, pSrc);
  1426. pDst->state = cStateValid;
  1427. }
  1428. else
  1429. pDst->state = cStateInvalid;
  1430. }
  1431. inline raw_node& operator= (const raw_node& rhs)
  1432. {
  1433. if (this == &rhs)
  1434. return *this;
  1435. node* pDst = reinterpret_cast<node*>(this);
  1436. const node* pSrc = reinterpret_cast<const node*>(&rhs);
  1437. if (pSrc->state)
  1438. {
  1439. if (pDst->state)
  1440. {
  1441. pDst->first = pSrc->first;
  1442. pDst->second = pSrc->second;
  1443. }
  1444. else
  1445. {
  1446. hash_map_type::construct_value_type(pDst, pSrc);
  1447. pDst->state = cStateValid;
  1448. }
  1449. }
  1450. else if (pDst->state)
  1451. {
  1452. hash_map_type::destruct_value_type(pDst);
  1453. pDst->state = cStateInvalid;
  1454. }
  1455. return *this;
  1456. }
  1457. uint8_t m_bits[sizeof(node)];
  1458. };
  1459. typedef basisu::vector<raw_node> node_vector;
  1460. node_vector m_values;
  1461. uint32_t m_hash_shift;
  1462. Hasher m_hasher;
  1463. Equals m_equals;
  1464. uint32_t m_num_valid;
  1465. uint32_t m_grow_threshold;
  1466. inline uint32_t hash_key(const Key& k) const
  1467. {
  1468. assert((1U << (32U - m_hash_shift)) == m_values.size());
  1469. uint32_t hash = static_cast<uint32_t>(m_hasher(k));
  1470. // Fibonacci hashing
  1471. hash = (2654435769U * hash) >> m_hash_shift;
  1472. assert(hash < m_values.size());
  1473. return hash;
  1474. }
  1475. inline const node& get_node(uint32_t index) const
  1476. {
  1477. return *reinterpret_cast<const node*>(&m_values[index]);
  1478. }
  1479. inline node& get_node(uint32_t index)
  1480. {
  1481. return *reinterpret_cast<node*>(&m_values[index]);
  1482. }
  1483. inline state get_node_state(uint32_t index) const
  1484. {
  1485. return static_cast<state>(get_node(index).state);
  1486. }
  1487. inline void set_node_state(uint32_t index, bool valid)
  1488. {
  1489. get_node(index).state = valid;
  1490. }
  1491. inline void grow()
  1492. {
  1493. uint64_t n = m_values.size() * 3ULL; // was * 2
  1494. if (!helpers::is_power_of_2(n))
  1495. n = helpers::next_pow2(n);
  1496. if (n > 0x80000000UL)
  1497. n = 0x80000000UL;
  1498. rehash(helpers::maximum<uint32_t>(cMinHashSize, (uint32_t)n));
  1499. }
  1500. inline void rehash(uint32_t new_hash_size)
  1501. {
  1502. assert(new_hash_size >= m_num_valid);
  1503. assert(helpers::is_power_of_2(new_hash_size));
  1504. if ((new_hash_size < m_num_valid) || (new_hash_size == m_values.size()))
  1505. return;
  1506. hash_map new_map;
  1507. new_map.m_values.resize(new_hash_size);
  1508. new_map.m_hash_shift = 32U - helpers::floor_log2i(new_hash_size);
  1509. assert(new_hash_size == (1U << (32U - new_map.m_hash_shift)));
  1510. new_map.m_grow_threshold = UINT_MAX;
  1511. node* pNode = reinterpret_cast<node*>(m_values.begin());
  1512. node* pNode_end = pNode + m_values.size();
  1513. while (pNode != pNode_end)
  1514. {
  1515. if (pNode->state)
  1516. {
  1517. new_map.move_into(pNode);
  1518. if (new_map.m_num_valid == m_num_valid)
  1519. break;
  1520. }
  1521. pNode++;
  1522. }
  1523. new_map.m_grow_threshold = (new_hash_size + 1U) >> 1U;
  1524. m_values.clear_no_destruction();
  1525. m_hash_shift = 32;
  1526. swap(new_map);
  1527. }
  1528. inline uint32_t find_next(uint32_t index) const
  1529. {
  1530. index++;
  1531. if (index >= m_values.size())
  1532. return index;
  1533. const node* pNode = &get_node(index);
  1534. for (; ; )
  1535. {
  1536. if (pNode->state)
  1537. break;
  1538. if (++index >= m_values.size())
  1539. break;
  1540. pNode++;
  1541. }
  1542. return index;
  1543. }
  1544. inline uint32_t find_index(const Key& k) const
  1545. {
  1546. if (m_num_valid)
  1547. {
  1548. uint32_t index = hash_key(k);
  1549. const node* pNode = &get_node(index);
  1550. if (pNode->state)
  1551. {
  1552. if (m_equals(pNode->first, k))
  1553. return index;
  1554. const uint32_t orig_index = index;
  1555. for (; ; )
  1556. {
  1557. if (!index)
  1558. {
  1559. index = m_values.size() - 1;
  1560. pNode = &get_node(index);
  1561. }
  1562. else
  1563. {
  1564. index--;
  1565. pNode--;
  1566. }
  1567. if (index == orig_index)
  1568. break;
  1569. if (!pNode->state)
  1570. break;
  1571. if (m_equals(pNode->first, k))
  1572. return index;
  1573. }
  1574. }
  1575. }
  1576. return m_values.size();
  1577. }
  1578. inline bool insert_no_grow(insert_result& result, const Key& k, const Value& v = Value())
  1579. {
  1580. if (!m_values.size())
  1581. return false;
  1582. uint32_t index = hash_key(k);
  1583. node* pNode = &get_node(index);
  1584. if (pNode->state)
  1585. {
  1586. if (m_equals(pNode->first, k))
  1587. {
  1588. result.first = iterator(*this, index);
  1589. result.second = false;
  1590. return true;
  1591. }
  1592. const uint32_t orig_index = index;
  1593. for (; ; )
  1594. {
  1595. if (!index)
  1596. {
  1597. index = m_values.size() - 1;
  1598. pNode = &get_node(index);
  1599. }
  1600. else
  1601. {
  1602. index--;
  1603. pNode--;
  1604. }
  1605. if (orig_index == index)
  1606. return false;
  1607. if (!pNode->state)
  1608. break;
  1609. if (m_equals(pNode->first, k))
  1610. {
  1611. result.first = iterator(*this, index);
  1612. result.second = false;
  1613. return true;
  1614. }
  1615. }
  1616. }
  1617. if (m_num_valid >= m_grow_threshold)
  1618. return false;
  1619. construct_value_type(pNode, k, v);
  1620. pNode->state = cStateValid;
  1621. m_num_valid++;
  1622. assert(m_num_valid <= m_values.size());
  1623. result.first = iterator(*this, index);
  1624. result.second = true;
  1625. return true;
  1626. }
  1627. inline void move_into(node* pNode)
  1628. {
  1629. uint32_t index = hash_key(pNode->first);
  1630. node* pDst_node = &get_node(index);
  1631. if (pDst_node->state)
  1632. {
  1633. const uint32_t orig_index = index;
  1634. for (; ; )
  1635. {
  1636. if (!index)
  1637. {
  1638. index = m_values.size() - 1;
  1639. pDst_node = &get_node(index);
  1640. }
  1641. else
  1642. {
  1643. index--;
  1644. pDst_node--;
  1645. }
  1646. if (index == orig_index)
  1647. {
  1648. assert(false);
  1649. return;
  1650. }
  1651. if (!pDst_node->state)
  1652. break;
  1653. }
  1654. }
  1655. move_node(pDst_node, pNode, false);
  1656. m_num_valid++;
  1657. }
  1658. };
  1659. template<typename Key, typename Value, typename Hasher, typename Equals>
  1660. struct bitwise_movable< hash_map<Key, Value, Hasher, Equals> > { enum { cFlag = true }; };
  1661. #if BASISU_HASHMAP_TEST
  1662. extern void hash_map_test();
  1663. #endif
  1664. } // namespace basisu
  1665. namespace std
  1666. {
  1667. template<typename T>
  1668. inline void swap(basisu::vector<T>& a, basisu::vector<T>& b)
  1669. {
  1670. a.swap(b);
  1671. }
  1672. template<typename Key, typename Value, typename Hasher, typename Equals>
  1673. inline void swap(basisu::hash_map<Key, Value, Hasher, Equals>& a, basisu::hash_map<Key, Value, Hasher, Equals>& b)
  1674. {
  1675. a.swap(b);
  1676. }
  1677. } // namespace std