pool_vector.h 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649
  1. /*************************************************************************/
  2. /* pool_vector.h */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #ifndef POOL_VECTOR_H
  31. #define POOL_VECTOR_H
  32. #include "core/os/copymem.h"
  33. #include "core/os/memory.h"
  34. #include "core/os/rw_lock.h"
  35. #include "core/pool_allocator.h"
  36. #include "core/safe_refcount.h"
  37. #include "core/ustring.h"
  38. struct MemoryPool {
  39. //avoid accessing these directly, must be public for template access
  40. static PoolAllocator *memory_pool;
  41. static uint8_t *pool_memory;
  42. static size_t *pool_size;
  43. struct Alloc {
  44. SafeRefCount refcount;
  45. uint32_t lock;
  46. void *mem;
  47. PoolAllocator::ID pool_id;
  48. size_t size;
  49. Alloc *free_list;
  50. Alloc() :
  51. lock(0),
  52. mem(NULL),
  53. pool_id(POOL_ALLOCATOR_INVALID_ID),
  54. size(0),
  55. free_list(NULL) {
  56. }
  57. };
  58. static Alloc *allocs;
  59. static Alloc *free_list;
  60. static uint32_t alloc_count;
  61. static uint32_t allocs_used;
  62. static Mutex *alloc_mutex;
  63. static size_t total_memory;
  64. static size_t max_memory;
  65. static void setup(uint32_t p_max_allocs = (1 << 16));
  66. static void cleanup();
  67. };
  68. template <class T>
  69. class PoolVector {
  70. MemoryPool::Alloc *alloc;
  71. void _copy_on_write() {
  72. if (!alloc)
  73. return;
  74. // ERR_FAIL_COND(alloc->lock>0); should not be illegal to lock this for copy on write, as it's a copy on write after all
  75. // Refcount should not be zero, otherwise it's a misuse of COW
  76. if (alloc->refcount.get() == 1)
  77. return; //nothing to do
  78. //must allocate something
  79. MemoryPool::alloc_mutex->lock();
  80. if (MemoryPool::allocs_used == MemoryPool::alloc_count) {
  81. MemoryPool::alloc_mutex->unlock();
  82. ERR_FAIL_MSG("All memory pool allocations are in use, can't COW.");
  83. }
  84. MemoryPool::Alloc *old_alloc = alloc;
  85. //take one from the free list
  86. alloc = MemoryPool::free_list;
  87. MemoryPool::free_list = alloc->free_list;
  88. //increment the used counter
  89. MemoryPool::allocs_used++;
  90. //copy the alloc data
  91. alloc->size = old_alloc->size;
  92. alloc->refcount.init();
  93. alloc->pool_id = POOL_ALLOCATOR_INVALID_ID;
  94. alloc->lock = 0;
  95. #ifdef DEBUG_ENABLED
  96. MemoryPool::total_memory += alloc->size;
  97. if (MemoryPool::total_memory > MemoryPool::max_memory) {
  98. MemoryPool::max_memory = MemoryPool::total_memory;
  99. }
  100. #endif
  101. MemoryPool::alloc_mutex->unlock();
  102. if (MemoryPool::memory_pool) {
  103. } else {
  104. alloc->mem = memalloc(alloc->size);
  105. }
  106. {
  107. Write w;
  108. w._ref(alloc);
  109. Read r;
  110. r._ref(old_alloc);
  111. int cur_elements = alloc->size / sizeof(T);
  112. T *dst = (T *)w.ptr();
  113. const T *src = (const T *)r.ptr();
  114. for (int i = 0; i < cur_elements; i++) {
  115. memnew_placement(&dst[i], T(src[i]));
  116. }
  117. }
  118. if (old_alloc->refcount.unref()) {
  119. //this should never happen but..
  120. #ifdef DEBUG_ENABLED
  121. MemoryPool::alloc_mutex->lock();
  122. MemoryPool::total_memory -= old_alloc->size;
  123. MemoryPool::alloc_mutex->unlock();
  124. #endif
  125. {
  126. Write w;
  127. w._ref(old_alloc);
  128. int cur_elements = old_alloc->size / sizeof(T);
  129. T *elems = (T *)w.ptr();
  130. for (int i = 0; i < cur_elements; i++) {
  131. elems[i].~T();
  132. }
  133. }
  134. if (MemoryPool::memory_pool) {
  135. //resize memory pool
  136. //if none, create
  137. //if some resize
  138. } else {
  139. memfree(old_alloc->mem);
  140. old_alloc->mem = NULL;
  141. old_alloc->size = 0;
  142. MemoryPool::alloc_mutex->lock();
  143. old_alloc->free_list = MemoryPool::free_list;
  144. MemoryPool::free_list = old_alloc;
  145. MemoryPool::allocs_used--;
  146. MemoryPool::alloc_mutex->unlock();
  147. }
  148. }
  149. }
  150. void _reference(const PoolVector &p_pool_vector) {
  151. if (alloc == p_pool_vector.alloc)
  152. return;
  153. _unreference();
  154. if (!p_pool_vector.alloc) {
  155. return;
  156. }
  157. if (p_pool_vector.alloc->refcount.ref()) {
  158. alloc = p_pool_vector.alloc;
  159. }
  160. }
  161. void _unreference() {
  162. if (!alloc)
  163. return;
  164. if (!alloc->refcount.unref()) {
  165. alloc = NULL;
  166. return;
  167. }
  168. //must be disposed!
  169. {
  170. int cur_elements = alloc->size / sizeof(T);
  171. // Don't use write() here because it could otherwise provoke COW,
  172. // which is not desirable here because we are destroying the last reference anyways
  173. Write w;
  174. // Reference to still prevent other threads from touching the alloc
  175. w._ref(alloc);
  176. for (int i = 0; i < cur_elements; i++) {
  177. w[i].~T();
  178. }
  179. }
  180. #ifdef DEBUG_ENABLED
  181. MemoryPool::alloc_mutex->lock();
  182. MemoryPool::total_memory -= alloc->size;
  183. MemoryPool::alloc_mutex->unlock();
  184. #endif
  185. if (MemoryPool::memory_pool) {
  186. //resize memory pool
  187. //if none, create
  188. //if some resize
  189. } else {
  190. memfree(alloc->mem);
  191. alloc->mem = NULL;
  192. alloc->size = 0;
  193. MemoryPool::alloc_mutex->lock();
  194. alloc->free_list = MemoryPool::free_list;
  195. MemoryPool::free_list = alloc;
  196. MemoryPool::allocs_used--;
  197. MemoryPool::alloc_mutex->unlock();
  198. }
  199. alloc = NULL;
  200. }
  201. public:
  202. class Access {
  203. friend class PoolVector;
  204. protected:
  205. MemoryPool::Alloc *alloc;
  206. T *mem;
  207. _FORCE_INLINE_ void _ref(MemoryPool::Alloc *p_alloc) {
  208. alloc = p_alloc;
  209. if (alloc) {
  210. if (atomic_increment(&alloc->lock) == 1) {
  211. if (MemoryPool::memory_pool) {
  212. //lock it and get mem
  213. }
  214. }
  215. mem = (T *)alloc->mem;
  216. }
  217. }
  218. _FORCE_INLINE_ void _unref() {
  219. if (alloc) {
  220. if (atomic_decrement(&alloc->lock) == 0) {
  221. if (MemoryPool::memory_pool) {
  222. //put mem back
  223. }
  224. }
  225. mem = NULL;
  226. alloc = NULL;
  227. }
  228. }
  229. Access() {
  230. alloc = NULL;
  231. mem = NULL;
  232. }
  233. public:
  234. virtual ~Access() {
  235. _unref();
  236. }
  237. void release() {
  238. _unref();
  239. }
  240. };
  241. class Read : public Access {
  242. public:
  243. _FORCE_INLINE_ const T &operator[](int p_index) const { return this->mem[p_index]; }
  244. _FORCE_INLINE_ const T *ptr() const { return this->mem; }
  245. void operator=(const Read &p_read) {
  246. if (this->alloc == p_read.alloc)
  247. return;
  248. this->_unref();
  249. this->_ref(p_read.alloc);
  250. }
  251. Read(const Read &p_read) {
  252. this->_ref(p_read.alloc);
  253. }
  254. Read() {}
  255. };
  256. class Write : public Access {
  257. public:
  258. _FORCE_INLINE_ T &operator[](int p_index) const { return this->mem[p_index]; }
  259. _FORCE_INLINE_ T *ptr() const { return this->mem; }
  260. void operator=(const Write &p_read) {
  261. if (this->alloc == p_read.alloc)
  262. return;
  263. this->_unref();
  264. this->_ref(p_read.alloc);
  265. }
  266. Write(const Write &p_read) {
  267. this->_ref(p_read.alloc);
  268. }
  269. Write() {}
  270. };
  271. Read read() const {
  272. Read r;
  273. if (alloc) {
  274. r._ref(alloc);
  275. }
  276. return r;
  277. }
  278. Write write() {
  279. Write w;
  280. if (alloc) {
  281. _copy_on_write(); //make sure there is only one being acessed
  282. w._ref(alloc);
  283. }
  284. return w;
  285. }
  286. template <class MC>
  287. void fill_with(const MC &p_mc) {
  288. int c = p_mc.size();
  289. resize(c);
  290. Write w = write();
  291. int idx = 0;
  292. for (const typename MC::Element *E = p_mc.front(); E; E = E->next()) {
  293. w[idx++] = E->get();
  294. }
  295. }
  296. void remove(int p_index) {
  297. int s = size();
  298. ERR_FAIL_INDEX(p_index, s);
  299. Write w = write();
  300. for (int i = p_index; i < s - 1; i++) {
  301. w[i] = w[i + 1];
  302. };
  303. w = Write();
  304. resize(s - 1);
  305. }
  306. inline int size() const;
  307. inline bool empty() const;
  308. T get(int p_index) const;
  309. void set(int p_index, const T &p_val);
  310. void push_back(const T &p_val);
  311. void append(const T &p_val) { push_back(p_val); }
  312. void append_array(const PoolVector<T> &p_arr) {
  313. int ds = p_arr.size();
  314. if (ds == 0)
  315. return;
  316. int bs = size();
  317. resize(bs + ds);
  318. Write w = write();
  319. Read r = p_arr.read();
  320. for (int i = 0; i < ds; i++)
  321. w[bs + i] = r[i];
  322. }
  323. PoolVector<T> subarray(int p_from, int p_to) {
  324. if (p_from < 0) {
  325. p_from = size() + p_from;
  326. }
  327. if (p_to < 0) {
  328. p_to = size() + p_to;
  329. }
  330. ERR_FAIL_INDEX_V(p_from, size(), PoolVector<T>());
  331. ERR_FAIL_INDEX_V(p_to, size(), PoolVector<T>());
  332. PoolVector<T> slice;
  333. int span = 1 + p_to - p_from;
  334. slice.resize(span);
  335. Read r = read();
  336. Write w = slice.write();
  337. for (int i = 0; i < span; ++i) {
  338. w[i] = r[p_from + i];
  339. }
  340. return slice;
  341. }
  342. Error insert(int p_pos, const T &p_val) {
  343. int s = size();
  344. ERR_FAIL_INDEX_V(p_pos, s + 1, ERR_INVALID_PARAMETER);
  345. resize(s + 1);
  346. {
  347. Write w = write();
  348. for (int i = s; i > p_pos; i--)
  349. w[i] = w[i - 1];
  350. w[p_pos] = p_val;
  351. }
  352. return OK;
  353. }
  354. String join(String delimiter) {
  355. String rs = "";
  356. int s = size();
  357. Read r = read();
  358. for (int i = 0; i < s; i++) {
  359. rs += r[i] + delimiter;
  360. }
  361. rs.erase(rs.length() - delimiter.length(), delimiter.length());
  362. return rs;
  363. }
  364. bool is_locked() const { return alloc && alloc->lock > 0; }
  365. inline T operator[](int p_index) const;
  366. Error resize(int p_size);
  367. void invert();
  368. void operator=(const PoolVector &p_pool_vector) { _reference(p_pool_vector); }
  369. PoolVector() { alloc = NULL; }
  370. PoolVector(const PoolVector &p_pool_vector) {
  371. alloc = NULL;
  372. _reference(p_pool_vector);
  373. }
  374. ~PoolVector() { _unreference(); }
  375. };
  376. template <class T>
  377. int PoolVector<T>::size() const {
  378. return alloc ? alloc->size / sizeof(T) : 0;
  379. }
  380. template <class T>
  381. bool PoolVector<T>::empty() const {
  382. return alloc ? alloc->size == 0 : true;
  383. }
  384. template <class T>
  385. T PoolVector<T>::get(int p_index) const {
  386. return operator[](p_index);
  387. }
  388. template <class T>
  389. void PoolVector<T>::set(int p_index, const T &p_val) {
  390. ERR_FAIL_INDEX(p_index, size());
  391. Write w = write();
  392. w[p_index] = p_val;
  393. }
  394. template <class T>
  395. void PoolVector<T>::push_back(const T &p_val) {
  396. resize(size() + 1);
  397. set(size() - 1, p_val);
  398. }
  399. template <class T>
  400. T PoolVector<T>::operator[](int p_index) const {
  401. CRASH_BAD_INDEX(p_index, size());
  402. Read r = read();
  403. return r[p_index];
  404. }
  405. template <class T>
  406. Error PoolVector<T>::resize(int p_size) {
  407. ERR_FAIL_COND_V_MSG(p_size < 0, ERR_INVALID_PARAMETER, "Size of PoolVector cannot be negative.");
  408. if (alloc == NULL) {
  409. if (p_size == 0)
  410. return OK; //nothing to do here
  411. //must allocate something
  412. MemoryPool::alloc_mutex->lock();
  413. if (MemoryPool::allocs_used == MemoryPool::alloc_count) {
  414. MemoryPool::alloc_mutex->unlock();
  415. ERR_FAIL_V_MSG(ERR_OUT_OF_MEMORY, "All memory pool allocations are in use.");
  416. }
  417. //take one from the free list
  418. alloc = MemoryPool::free_list;
  419. MemoryPool::free_list = alloc->free_list;
  420. //increment the used counter
  421. MemoryPool::allocs_used++;
  422. //cleanup the alloc
  423. alloc->size = 0;
  424. alloc->refcount.init();
  425. alloc->pool_id = POOL_ALLOCATOR_INVALID_ID;
  426. MemoryPool::alloc_mutex->unlock();
  427. } else {
  428. ERR_FAIL_COND_V_MSG(alloc->lock > 0, ERR_LOCKED, "Can't resize PoolVector if locked."); //can't resize if locked!
  429. }
  430. size_t new_size = sizeof(T) * p_size;
  431. if (alloc->size == new_size)
  432. return OK; //nothing to do
  433. if (p_size == 0) {
  434. _unreference();
  435. return OK;
  436. }
  437. _copy_on_write(); // make it unique
  438. #ifdef DEBUG_ENABLED
  439. MemoryPool::alloc_mutex->lock();
  440. MemoryPool::total_memory -= alloc->size;
  441. MemoryPool::total_memory += new_size;
  442. if (MemoryPool::total_memory > MemoryPool::max_memory) {
  443. MemoryPool::max_memory = MemoryPool::total_memory;
  444. }
  445. MemoryPool::alloc_mutex->unlock();
  446. #endif
  447. int cur_elements = alloc->size / sizeof(T);
  448. if (p_size > cur_elements) {
  449. if (MemoryPool::memory_pool) {
  450. //resize memory pool
  451. //if none, create
  452. //if some resize
  453. } else {
  454. if (alloc->size == 0) {
  455. alloc->mem = memalloc(new_size);
  456. } else {
  457. alloc->mem = memrealloc(alloc->mem, new_size);
  458. }
  459. }
  460. alloc->size = new_size;
  461. Write w = write();
  462. for (int i = cur_elements; i < p_size; i++) {
  463. memnew_placement(&w[i], T);
  464. }
  465. } else {
  466. {
  467. Write w = write();
  468. for (int i = p_size; i < cur_elements; i++) {
  469. w[i].~T();
  470. }
  471. }
  472. if (MemoryPool::memory_pool) {
  473. //resize memory pool
  474. //if none, create
  475. //if some resize
  476. } else {
  477. if (new_size == 0) {
  478. memfree(alloc->mem);
  479. alloc->mem = NULL;
  480. alloc->size = 0;
  481. MemoryPool::alloc_mutex->lock();
  482. alloc->free_list = MemoryPool::free_list;
  483. MemoryPool::free_list = alloc;
  484. MemoryPool::allocs_used--;
  485. MemoryPool::alloc_mutex->unlock();
  486. } else {
  487. alloc->mem = memrealloc(alloc->mem, new_size);
  488. alloc->size = new_size;
  489. }
  490. }
  491. }
  492. return OK;
  493. }
  494. template <class T>
  495. void PoolVector<T>::invert() {
  496. T temp;
  497. Write w = write();
  498. int s = size();
  499. int half_s = s / 2;
  500. for (int i = 0; i < half_s; i++) {
  501. temp = w[i];
  502. w[i] = w[s - i - 1];
  503. w[s - i - 1] = temp;
  504. }
  505. }
  506. #endif // POOL_VECTOR_H