DFGAllocator.h 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242
  1. /*
  2. * Copyright (C) 2013 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef DFGAllocator_h
  26. #define DFGAllocator_h
  27. #include <wtf/Platform.h>
  28. #if ENABLE(DFG_JIT)
  29. #include "DFGCommon.h"
  30. #include <wtf/PageAllocationAligned.h>
  31. #include <wtf/StdLibExtras.h>
  32. namespace JSC { namespace DFG {
  33. // Custom pool allocator for exactly one type (type T). It has fast (O(1), only a few
  34. // instructions) allocator, and a similarly fast free(). Recycling works if either of
  35. // the following is true:
  36. // - T has a trivial destructor. In that case you don't have to ever call free() on
  37. // anything. You can just call freeAll() instead.
  38. // - You call free() on all T's that you allocated, and never use freeAll().
  39. template<typename T>
  40. class Allocator {
  41. public:
  42. Allocator();
  43. ~Allocator();
  44. void* allocate(); // Use placement new to allocate, and avoid using this method.
  45. void free(T*); // Call this method to delete; never use 'delete' directly.
  46. void freeAll(); // Only call this if T has a trivial destructor.
  47. void reset(); // Like freeAll(), but also returns all memory to the OS.
  48. unsigned indexOf(const T*);
  49. static Allocator* allocatorOf(const T*);
  50. private:
  51. void* bumpAllocate();
  52. void* freeListAllocate();
  53. void* allocateSlow();
  54. struct Region {
  55. static size_t size() { return 64 * KB; }
  56. static size_t headerSize() { return std::max(sizeof(Region), sizeof(T)); }
  57. static unsigned numberOfThingsPerRegion() { return (size() - headerSize()) / sizeof(T); }
  58. T* data() { return bitwise_cast<T*>(bitwise_cast<char*>(this) + headerSize()); }
  59. bool isInThisRegion(const T* pointer) { return static_cast<unsigned>(pointer - data()) < numberOfThingsPerRegion(); }
  60. static Region* regionFor(const T* pointer) { return bitwise_cast<Region*>(bitwise_cast<uintptr_t>(pointer) & ~(size() - 1)); }
  61. PageAllocationAligned m_allocation;
  62. Allocator* m_allocator;
  63. Region* m_next;
  64. };
  65. void freeRegionsStartingAt(Allocator::Region*);
  66. void startBumpingIn(Allocator::Region*);
  67. Region* m_regionHead;
  68. void** m_freeListHead;
  69. T* m_bumpEnd;
  70. unsigned m_bumpRemaining;
  71. };
  72. template<typename T>
  73. inline Allocator<T>::Allocator()
  74. : m_regionHead(0)
  75. , m_freeListHead(0)
  76. , m_bumpRemaining(0)
  77. {
  78. }
  79. template<typename T>
  80. inline Allocator<T>::~Allocator()
  81. {
  82. reset();
  83. }
  84. template<typename T>
  85. ALWAYS_INLINE void* Allocator<T>::allocate()
  86. {
  87. void* result = bumpAllocate();
  88. if (LIKELY(!!result))
  89. return result;
  90. return freeListAllocate();
  91. }
  92. template<typename T>
  93. void Allocator<T>::free(T* object)
  94. {
  95. object->~T();
  96. void** cell = bitwise_cast<void**>(object);
  97. *cell = m_freeListHead;
  98. m_freeListHead = cell;
  99. }
  100. template<typename T>
  101. void Allocator<T>::freeAll()
  102. {
  103. if (!m_regionHead) {
  104. ASSERT(!m_bumpRemaining);
  105. ASSERT(!m_freeListHead);
  106. return;
  107. }
  108. // Since the caller is opting out of calling the destructor for any allocated thing,
  109. // we have two choices, plus a continuum between: we can either just delete all regions
  110. // (i.e. call reset()), or we can make all regions available for reuse. We do something
  111. // that optimizes for (a) speed of freeAll(), (b) the assumption that if the user calls
  112. // freeAll() then they will probably be calling allocate() in the near future. Namely,
  113. // we free all but one region, and make the remaining region a bump allocation region.
  114. freeRegionsStartingAt(m_regionHead->m_next);
  115. m_regionHead->m_next = 0;
  116. m_freeListHead = 0;
  117. startBumpingIn(m_regionHead);
  118. }
  119. template<typename T>
  120. void Allocator<T>::reset()
  121. {
  122. freeRegionsStartingAt(m_regionHead);
  123. m_regionHead = 0;
  124. m_freeListHead = 0;
  125. m_bumpRemaining = 0;
  126. }
  127. template<typename T>
  128. unsigned Allocator<T>::indexOf(const T* object)
  129. {
  130. unsigned baseIndex = 0;
  131. for (Region* region = m_regionHead; region; region = region->m_next) {
  132. if (region->isInThisRegion(object))
  133. return baseIndex + (object - region->data());
  134. baseIndex += Region::numberOfThingsPerRegion();
  135. }
  136. CRASH();
  137. return 0;
  138. }
  139. template<typename T>
  140. Allocator<T>* Allocator<T>::allocatorOf(const T* object)
  141. {
  142. return Region::regionFor(object)->m_allocator;
  143. }
  144. template<typename T>
  145. ALWAYS_INLINE void* Allocator<T>::bumpAllocate()
  146. {
  147. if (unsigned remaining = m_bumpRemaining) {
  148. remaining--;
  149. m_bumpRemaining = remaining;
  150. return m_bumpEnd - (remaining + 1);
  151. }
  152. return 0;
  153. }
  154. template<typename T>
  155. void* Allocator<T>::freeListAllocate()
  156. {
  157. void** result = m_freeListHead;
  158. if (UNLIKELY(!result))
  159. return allocateSlow();
  160. m_freeListHead = bitwise_cast<void**>(*result);
  161. return result;
  162. }
  163. template<typename T>
  164. void* Allocator<T>::allocateSlow()
  165. {
  166. ASSERT(!m_freeListHead);
  167. ASSERT(!m_bumpRemaining);
  168. if (logCompilationChanges())
  169. dataLog("Allocating another allocator region.\n");
  170. PageAllocationAligned allocation = PageAllocationAligned::allocate(Region::size(), Region::size(), OSAllocator::JSGCHeapPages);
  171. if (!static_cast<bool>(allocation))
  172. CRASH();
  173. Region* region = static_cast<Region*>(allocation.base());
  174. region->m_allocation = allocation;
  175. region->m_allocator = this;
  176. startBumpingIn(region);
  177. region->m_next = m_regionHead;
  178. m_regionHead = region;
  179. void* result = bumpAllocate();
  180. ASSERT(result);
  181. return result;
  182. }
  183. template<typename T>
  184. void Allocator<T>::freeRegionsStartingAt(typename Allocator<T>::Region* region)
  185. {
  186. while (region) {
  187. Region* nextRegion = region->m_next;
  188. region->m_allocation.deallocate();
  189. region = nextRegion;
  190. }
  191. }
  192. template<typename T>
  193. void Allocator<T>::startBumpingIn(typename Allocator<T>::Region* region)
  194. {
  195. m_bumpEnd = region->data() + Region::numberOfThingsPerRegion();
  196. m_bumpRemaining = Region::numberOfThingsPerRegion();
  197. }
  198. } } // namespace JSC::DFG
  199. #endif // ENABLE(DFG_JIT)
  200. #endif // DFGAllocator_h