Vector.h 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491
  1. /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
  2. /* This Source Code Form is subject to the terms of the Mozilla Public
  3. * License, v. 2.0. If a copy of the MPL was not distributed with this
  4. * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  5. /* A type/length-parametrized vector class. */
  6. #ifndef mozilla_Vector_h
  7. #define mozilla_Vector_h
  8. #include "mozilla/Alignment.h"
  9. #include "mozilla/AllocPolicy.h"
  10. #include "mozilla/ArrayUtils.h" // for PointerRangeSize
  11. #include "mozilla/Assertions.h"
  12. #include "mozilla/Attributes.h"
  13. #include "mozilla/MathAlgorithms.h"
  14. #include "mozilla/MemoryReporting.h"
  15. #include "mozilla/Move.h"
  16. #include "mozilla/OperatorNewExtensions.h"
  17. #include "mozilla/ReentrancyGuard.h"
  18. #include "mozilla/TemplateLib.h"
  19. #include "mozilla/TypeTraits.h"
  20. #include <new> // for placement new
  21. /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
  22. #ifdef _MSC_VER
  23. #pragma warning(push)
  24. #pragma warning(disable:4345)
  25. #endif
  26. namespace mozilla {
  27. template<typename T, size_t N, class AllocPolicy>
  28. class Vector;
  29. namespace detail {
  30. /*
  31. * Check that the given capacity wastes the minimal amount of space if
  32. * allocated on the heap. This means that aCapacity*sizeof(T) is as close to a
  33. * power-of-two as possible. growStorageBy() is responsible for ensuring this.
  34. */
  35. template<typename T>
  36. static bool CapacityHasExcessSpace(size_t aCapacity)
  37. {
  38. size_t size = aCapacity * sizeof(T);
  39. return RoundUpPow2(size) - size >= sizeof(T);
  40. }
  41. /*
  42. * This template class provides a default implementation for vector operations
  43. * when the element type is not known to be a POD, as judged by IsPod.
  44. */
  45. template<typename T, size_t N, class AP, bool IsPod>
  46. struct VectorImpl
  47. {
  48. /*
  49. * Constructs an object in the uninitialized memory at *aDst with aArgs.
  50. */
  51. template<typename... Args>
  52. MOZ_NONNULL(1)
  53. static inline void new_(T* aDst, Args&&... aArgs)
  54. {
  55. new(KnownNotNull, aDst) T(Forward<Args>(aArgs)...);
  56. }
  57. /* Destroys constructed objects in the range [aBegin, aEnd). */
  58. static inline void destroy(T* aBegin, T* aEnd)
  59. {
  60. MOZ_ASSERT(aBegin <= aEnd);
  61. for (T* p = aBegin; p < aEnd; ++p) {
  62. p->~T();
  63. }
  64. }
  65. /* Constructs objects in the uninitialized range [aBegin, aEnd). */
  66. static inline void initialize(T* aBegin, T* aEnd)
  67. {
  68. MOZ_ASSERT(aBegin <= aEnd);
  69. for (T* p = aBegin; p < aEnd; ++p) {
  70. new_(p);
  71. }
  72. }
  73. /*
  74. * Copy-constructs objects in the uninitialized range
  75. * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
  76. */
  77. template<typename U>
  78. static inline void copyConstruct(T* aDst,
  79. const U* aSrcStart, const U* aSrcEnd)
  80. {
  81. MOZ_ASSERT(aSrcStart <= aSrcEnd);
  82. for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
  83. new_(aDst, *p);
  84. }
  85. }
  86. /*
  87. * Move-constructs objects in the uninitialized range
  88. * [aDst, aDst+(aSrcEnd-aSrcStart)) from the range [aSrcStart, aSrcEnd).
  89. */
  90. template<typename U>
  91. static inline void moveConstruct(T* aDst, U* aSrcStart, U* aSrcEnd)
  92. {
  93. MOZ_ASSERT(aSrcStart <= aSrcEnd);
  94. for (U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
  95. new_(aDst, Move(*p));
  96. }
  97. }
  98. /*
  99. * Copy-constructs objects in the uninitialized range [aDst, aDst+aN) from
  100. * the same object aU.
  101. */
  102. template<typename U>
  103. static inline void copyConstructN(T* aDst, size_t aN, const U& aU)
  104. {
  105. for (T* end = aDst + aN; aDst < end; ++aDst) {
  106. new_(aDst, aU);
  107. }
  108. }
  109. /*
  110. * Grows the given buffer to have capacity aNewCap, preserving the objects
  111. * constructed in the range [begin, end) and updating aV. Assumes that (1)
  112. * aNewCap has not overflowed, and (2) multiplying aNewCap by sizeof(T) will
  113. * not overflow.
  114. */
  115. static inline MOZ_MUST_USE bool
  116. growTo(Vector<T, N, AP>& aV, size_t aNewCap)
  117. {
  118. MOZ_ASSERT(!aV.usingInlineStorage());
  119. MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
  120. T* newbuf = aV.template pod_malloc<T>(aNewCap);
  121. if (MOZ_UNLIKELY(!newbuf)) {
  122. return false;
  123. }
  124. T* dst = newbuf;
  125. T* src = aV.beginNoCheck();
  126. for (; src < aV.endNoCheck(); ++dst, ++src) {
  127. new_(dst, Move(*src));
  128. }
  129. VectorImpl::destroy(aV.beginNoCheck(), aV.endNoCheck());
  130. aV.free_(aV.mBegin);
  131. aV.mBegin = newbuf;
  132. /* aV.mLength is unchanged. */
  133. aV.mCapacity = aNewCap;
  134. return true;
  135. }
  136. };
  137. /*
  138. * This partial template specialization provides a default implementation for
  139. * vector operations when the element type is known to be a POD, as judged by
  140. * IsPod.
  141. */
  142. template<typename T, size_t N, class AP>
  143. struct VectorImpl<T, N, AP, true>
  144. {
  145. template<typename... Args>
  146. MOZ_NONNULL(1)
  147. static inline void new_(T* aDst, Args&&... aArgs)
  148. {
  149. // Explicitly construct a local object instead of using a temporary since
  150. // T(args...) will be treated like a C-style cast in the unary case and
  151. // allow unsafe conversions. Both forms should be equivalent to an
  152. // optimizing compiler.
  153. T temp(Forward<Args>(aArgs)...);
  154. *aDst = temp;
  155. }
  156. static inline void destroy(T*, T*) {}
  157. static inline void initialize(T* aBegin, T* aEnd)
  158. {
  159. /*
  160. * You would think that memset would be a big win (or even break even)
  161. * when we know T is a POD. But currently it's not. This is probably
  162. * because |append| tends to be given small ranges and memset requires
  163. * a function call that doesn't get inlined.
  164. *
  165. * memset(aBegin, 0, sizeof(T) * (aEnd - aBegin));
  166. */
  167. MOZ_ASSERT(aBegin <= aEnd);
  168. for (T* p = aBegin; p < aEnd; ++p) {
  169. new_(p);
  170. }
  171. }
  172. template<typename U>
  173. static inline void copyConstruct(T* aDst,
  174. const U* aSrcStart, const U* aSrcEnd)
  175. {
  176. /*
  177. * See above memset comment. Also, notice that copyConstruct is
  178. * currently templated (T != U), so memcpy won't work without
  179. * requiring T == U.
  180. *
  181. * memcpy(aDst, aSrcStart, sizeof(T) * (aSrcEnd - aSrcStart));
  182. */
  183. MOZ_ASSERT(aSrcStart <= aSrcEnd);
  184. for (const U* p = aSrcStart; p < aSrcEnd; ++p, ++aDst) {
  185. new_(aDst, *p);
  186. }
  187. }
  188. template<typename U>
  189. static inline void moveConstruct(T* aDst,
  190. const U* aSrcStart, const U* aSrcEnd)
  191. {
  192. copyConstruct(aDst, aSrcStart, aSrcEnd);
  193. }
  194. static inline void copyConstructN(T* aDst, size_t aN, const T& aT)
  195. {
  196. for (T* end = aDst + aN; aDst < end; ++aDst) {
  197. new_(aDst, aT);
  198. }
  199. }
  200. static inline MOZ_MUST_USE bool
  201. growTo(Vector<T, N, AP>& aV, size_t aNewCap)
  202. {
  203. MOZ_ASSERT(!aV.usingInlineStorage());
  204. MOZ_ASSERT(!CapacityHasExcessSpace<T>(aNewCap));
  205. T* newbuf = aV.template pod_realloc<T>(aV.mBegin, aV.mCapacity, aNewCap);
  206. if (MOZ_UNLIKELY(!newbuf)) {
  207. return false;
  208. }
  209. aV.mBegin = newbuf;
  210. /* aV.mLength is unchanged. */
  211. aV.mCapacity = aNewCap;
  212. return true;
  213. }
  214. static inline void
  215. podResizeToFit(Vector<T, N, AP>& aV)
  216. {
  217. if (aV.usingInlineStorage() || aV.mLength == aV.mCapacity) {
  218. return;
  219. }
  220. T* newbuf = aV.template pod_realloc<T>(aV.mBegin, aV.mCapacity, aV.mLength);
  221. if (MOZ_UNLIKELY(!newbuf)) {
  222. return;
  223. }
  224. aV.mBegin = newbuf;
  225. aV.mCapacity = aV.mLength;
  226. }
  227. };
  228. // A struct for TestVector.cpp to access private internal fields.
  229. // DO NOT DEFINE IN YOUR OWN CODE.
  230. struct VectorTesting;
  231. } // namespace detail
  232. /*
  233. * STL-like container providing a short-lived, dynamic buffer. Vector calls the
  234. * constructors/destructors of all elements stored in its internal buffer, so
  235. * non-PODs may be safely used. Additionally, Vector will store the first N
  236. * elements in-place before resorting to dynamic allocation.
  237. *
  238. * T requirements:
  239. * - default and copy constructible, assignable, destructible
  240. * - operations do not throw
  241. * MinInlineCapacity requirements:
  242. * - any value, however, MinInlineCapacity is clamped to min/max values
  243. * AllocPolicy:
  244. * - see "Allocation policies" in AllocPolicy.h (defaults to
  245. * mozilla::MallocAllocPolicy)
  246. *
  247. * Vector is not reentrant: T member functions called during Vector member
  248. * functions must not call back into the same object!
  249. */
  250. template<typename T,
  251. size_t MinInlineCapacity = 0,
  252. class AllocPolicy = MallocAllocPolicy>
  253. class Vector final : private AllocPolicy
  254. {
  255. /* utilities */
  256. static const bool kElemIsPod = IsPod<T>::value;
  257. typedef detail::VectorImpl<T, MinInlineCapacity, AllocPolicy, kElemIsPod> Impl;
  258. friend struct detail::VectorImpl<T, MinInlineCapacity, AllocPolicy, kElemIsPod>;
  259. friend struct detail::VectorTesting;
  260. MOZ_MUST_USE bool growStorageBy(size_t aIncr);
  261. MOZ_MUST_USE bool convertToHeapStorage(size_t aNewCap);
  262. MOZ_MUST_USE bool maybeCheckSimulatedOOM(size_t aRequestedSize);
  263. /* magic constants */
  264. static const int kMaxInlineBytes = 1024;
  265. /* compute constants */
  266. /*
  267. * Consider element size to be 1 for buffer sizing if there are 0 inline
  268. * elements. This allows us to compile when the definition of the element
  269. * type is not visible here.
  270. *
  271. * Explicit specialization is only allowed at namespace scope, so in order
  272. * to keep everything here, we use a dummy template parameter with partial
  273. * specialization.
  274. */
  275. template<int M, int Dummy>
  276. struct ElemSize
  277. {
  278. static const size_t value = sizeof(T);
  279. };
  280. template<int Dummy>
  281. struct ElemSize<0, Dummy>
  282. {
  283. static const size_t value = 1;
  284. };
  285. static const size_t kInlineCapacity =
  286. tl::Min<MinInlineCapacity, kMaxInlineBytes / ElemSize<MinInlineCapacity, 0>::value>::value;
  287. /* Calculate inline buffer size; avoid 0-sized array. */
  288. static const size_t kInlineBytes =
  289. tl::Max<1, kInlineCapacity * ElemSize<MinInlineCapacity, 0>::value>::value;
  290. /* member data */
  291. /*
  292. * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
  293. * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
  294. * mLength, mBegin + mCapacity) holds uninitialized memory. The range
  295. * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
  296. * previously allocated by a call to reserve().
  297. */
  298. T* mBegin;
  299. /* Number of elements in the vector. */
  300. size_t mLength;
  301. /* Max number of elements storable in the vector without resizing. */
  302. size_t mCapacity;
  303. #ifdef DEBUG
  304. /* Max elements of reserved or used space in this vector. */
  305. size_t mReserved;
  306. #endif
  307. /* Memory used for inline storage. */
  308. AlignedStorage<kInlineBytes> mStorage;
  309. #ifdef DEBUG
  310. friend class ReentrancyGuard;
  311. bool mEntered;
  312. #endif
  313. /* private accessors */
  314. bool usingInlineStorage() const
  315. {
  316. return mBegin == const_cast<Vector*>(this)->inlineStorage();
  317. }
  318. T* inlineStorage()
  319. {
  320. return static_cast<T*>(mStorage.addr());
  321. }
  322. T* beginNoCheck() const
  323. {
  324. return mBegin;
  325. }
  326. T* endNoCheck()
  327. {
  328. return mBegin + mLength;
  329. }
  330. const T* endNoCheck() const
  331. {
  332. return mBegin + mLength;
  333. }
  334. #ifdef DEBUG
  335. /**
  336. * The amount of explicitly allocated space in this vector that is immediately
  337. * available to be filled by appending additional elements. This value is
  338. * always greater than or equal to |length()| -- the vector's actual elements
  339. * are implicitly reserved. This value is always less than or equal to
  340. * |capacity()|. It may be explicitly increased using the |reserve()| method.
  341. */
  342. size_t reserved() const
  343. {
  344. MOZ_ASSERT(mLength <= mReserved);
  345. MOZ_ASSERT(mReserved <= mCapacity);
  346. return mReserved;
  347. }
  348. #endif
  349. /* Append operations guaranteed to succeed due to pre-reserved space. */
  350. template<typename U> void internalAppend(U&& aU);
  351. template<typename U, size_t O, class BP>
  352. void internalAppendAll(const Vector<U, O, BP>& aU);
  353. void internalAppendN(const T& aT, size_t aN);
  354. template<typename U> void internalAppend(const U* aBegin, size_t aLength);
  355. public:
  356. static const size_t sMaxInlineStorage = MinInlineCapacity;
  357. typedef T ElementType;
  358. explicit Vector(AllocPolicy = AllocPolicy());
  359. Vector(Vector&&); /* Move constructor. */
  360. Vector& operator=(Vector&&); /* Move assignment. */
  361. ~Vector();
  362. /* accessors */
  363. const AllocPolicy& allocPolicy() const { return *this; }
  364. AllocPolicy& allocPolicy() { return *this; }
  365. enum { InlineLength = MinInlineCapacity };
  366. size_t length() const { return mLength; }
  367. bool empty() const { return mLength == 0; }
  368. size_t capacity() const { return mCapacity; }
  369. T* begin()
  370. {
  371. MOZ_ASSERT(!mEntered);
  372. return mBegin;
  373. }
  374. const T* begin() const
  375. {
  376. MOZ_ASSERT(!mEntered);
  377. return mBegin;
  378. }
  379. T* end()
  380. {
  381. MOZ_ASSERT(!mEntered);
  382. return mBegin + mLength;
  383. }
  384. const T* end() const
  385. {
  386. MOZ_ASSERT(!mEntered);
  387. return mBegin + mLength;
  388. }
  389. T& operator[](size_t aIndex)
  390. {
  391. MOZ_ASSERT(!mEntered);
  392. MOZ_ASSERT(aIndex < mLength);
  393. return begin()[aIndex];
  394. }
  395. const T& operator[](size_t aIndex) const
  396. {
  397. MOZ_ASSERT(!mEntered);
  398. MOZ_ASSERT(aIndex < mLength);
  399. return begin()[aIndex];
  400. }
  401. T& back()
  402. {
  403. MOZ_ASSERT(!mEntered);
  404. MOZ_ASSERT(!empty());
  405. return *(end() - 1);
  406. }
  407. const T& back() const
  408. {
  409. MOZ_ASSERT(!mEntered);
  410. MOZ_ASSERT(!empty());
  411. return *(end() - 1);
  412. }
  413. class Range
  414. {
  415. friend class Vector;
  416. T* mCur;
  417. T* mEnd;
  418. Range(T* aCur, T* aEnd)
  419. : mCur(aCur)
  420. , mEnd(aEnd)
  421. {
  422. MOZ_ASSERT(aCur <= aEnd);
  423. }
  424. public:
  425. bool empty() const { return mCur == mEnd; }
  426. size_t remain() const { return PointerRangeSize(mCur, mEnd); }
  427. T& front() const { MOZ_ASSERT(!empty()); return *mCur; }
  428. void popFront() { MOZ_ASSERT(!empty()); ++mCur; }
  429. T popCopyFront() { MOZ_ASSERT(!empty()); return *mCur++; }
  430. };
  431. class ConstRange
  432. {
  433. friend class Vector;
  434. const T* mCur;
  435. const T* mEnd;
  436. ConstRange(const T* aCur, const T* aEnd)
  437. : mCur(aCur)
  438. , mEnd(aEnd)
  439. {
  440. MOZ_ASSERT(aCur <= aEnd);
  441. }
  442. public:
  443. bool empty() const { return mCur == mEnd; }
  444. size_t remain() const { return PointerRangeSize(mCur, mEnd); }
  445. const T& front() const { MOZ_ASSERT(!empty()); return *mCur; }
  446. void popFront() { MOZ_ASSERT(!empty()); ++mCur; }
  447. T popCopyFront() { MOZ_ASSERT(!empty()); return *mCur++; }
  448. };
  449. Range all() { return Range(begin(), end()); }
  450. ConstRange all() const { return ConstRange(begin(), end()); }
  451. /* mutators */
  452. /**
  453. * Reverse the order of the elements in the vector in place.
  454. */
  455. void reverse();
  456. /**
  457. * Given that the vector is empty, grow the internal capacity to |aRequest|,
  458. * keeping the length 0.
  459. */
  460. MOZ_MUST_USE bool initCapacity(size_t aRequest);
  461. /**
  462. * Given that the vector is empty, grow the internal capacity and length to
  463. * |aRequest| leaving the elements' memory completely uninitialized (with all
  464. * the associated hazards and caveats). This avoids the usual allocation-size
  465. * rounding that happens in resize and overhead of initialization for elements
  466. * that are about to be overwritten.
  467. */
  468. MOZ_MUST_USE bool initLengthUninitialized(size_t aRequest);
  469. /**
  470. * If reserve(aRequest) succeeds and |aRequest >= length()|, then appending
  471. * |aRequest - length()| elements, in any sequence of append/appendAll calls,
  472. * is guaranteed to succeed.
  473. *
  474. * A request to reserve an amount less than the current length does not affect
  475. * reserved space.
  476. */
  477. MOZ_MUST_USE bool reserve(size_t aRequest);
  478. /**
  479. * Destroy elements in the range [end() - aIncr, end()). Does not deallocate
  480. * or unreserve storage for those elements.
  481. */
  482. void shrinkBy(size_t aIncr);
  483. /**
  484. * Destroy elements in the range [aNewLength, end()). Does not deallocate
  485. * or unreserve storage for those elements.
  486. */
  487. void shrinkTo(size_t aNewLength);
  488. /** Grow the vector by aIncr elements. */
  489. MOZ_MUST_USE bool growBy(size_t aIncr);
  490. /** Call shrinkBy or growBy based on whether newSize > length(). */
  491. MOZ_MUST_USE bool resize(size_t aNewLength);
  492. /**
  493. * Increase the length of the vector, but don't initialize the new elements
  494. * -- leave them as uninitialized memory.
  495. */
  496. MOZ_MUST_USE bool growByUninitialized(size_t aIncr);
  497. void infallibleGrowByUninitialized(size_t aIncr);
  498. MOZ_MUST_USE bool resizeUninitialized(size_t aNewLength);
  499. /** Shorthand for shrinkBy(length()). */
  500. void clear();
  501. /** Clears and releases any heap-allocated storage. */
  502. void clearAndFree();
  503. /**
  504. * Calls the AllocPolicy's pod_realloc to release excess capacity. Since
  505. * realloc is only safe on PODs, this method fails to compile if IsPod<T>
  506. * is false.
  507. */
  508. void podResizeToFit();
  509. /**
  510. * If true, appending |aNeeded| elements won't reallocate elements storage.
  511. * This *doesn't* mean that infallibleAppend may be used! You still must
  512. * reserve the extra space, even if this method indicates that appends won't
  513. * need to reallocate elements storage.
  514. */
  515. bool canAppendWithoutRealloc(size_t aNeeded) const;
  516. /** Potentially fallible append operations. */
  517. /**
  518. * This can take either a T& or a T&&. Given a T&&, it moves |aU| into the
  519. * vector, instead of copying it. If it fails, |aU| is left unmoved. ("We are
  520. * not amused.")
  521. */
  522. template<typename U> MOZ_MUST_USE bool append(U&& aU);
  523. /**
  524. * Construct a T in-place as a new entry at the end of this vector.
  525. */
  526. template<typename... Args>
  527. MOZ_MUST_USE bool emplaceBack(Args&&... aArgs)
  528. {
  529. if (!growByUninitialized(1))
  530. return false;
  531. Impl::new_(&back(), Forward<Args>(aArgs)...);
  532. return true;
  533. }
  534. template<typename U, size_t O, class BP>
  535. MOZ_MUST_USE bool appendAll(const Vector<U, O, BP>& aU);
  536. MOZ_MUST_USE bool appendN(const T& aT, size_t aN);
  537. template<typename U> MOZ_MUST_USE bool append(const U* aBegin, const U* aEnd);
  538. template<typename U> MOZ_MUST_USE bool append(const U* aBegin, size_t aLength);
  539. /*
  540. * Guaranteed-infallible append operations for use upon vectors whose
  541. * memory has been pre-reserved. Don't use this if you haven't reserved the
  542. * memory!
  543. */
  544. template<typename U> void infallibleAppend(U&& aU)
  545. {
  546. internalAppend(Forward<U>(aU));
  547. }
  548. void infallibleAppendN(const T& aT, size_t aN)
  549. {
  550. internalAppendN(aT, aN);
  551. }
  552. template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd)
  553. {
  554. internalAppend(aBegin, PointerRangeSize(aBegin, aEnd));
  555. }
  556. template<typename U> void infallibleAppend(const U* aBegin, size_t aLength)
  557. {
  558. internalAppend(aBegin, aLength);
  559. }
  560. template<typename... Args>
  561. void infallibleEmplaceBack(Args&&... aArgs)
  562. {
  563. infallibleGrowByUninitialized(1);
  564. Impl::new_(&back(), Forward<Args>(aArgs)...);
  565. }
  566. void popBack();
  567. T popCopy();
  568. /**
  569. * If elements are stored in-place, return nullptr and leave this vector
  570. * unmodified.
  571. *
  572. * Otherwise return this vector's elements buffer, and clear this vector as if
  573. * by clearAndFree(). The caller now owns the buffer and is responsible for
  574. * deallocating it consistent with this vector's AllocPolicy.
  575. *
  576. * N.B. Although a T*, only the range [0, length()) is constructed.
  577. */
  578. MOZ_MUST_USE T* extractRawBuffer();
  579. /**
  580. * If elements are stored in-place, allocate a new buffer, move this vector's
  581. * elements into it, and return that buffer.
  582. *
  583. * Otherwise return this vector's elements buffer. The caller now owns the
  584. * buffer and is responsible for deallocating it consistent with this vector's
  585. * AllocPolicy.
  586. *
  587. * This vector is cleared, as if by clearAndFree(), when this method
  588. * succeeds. This method fails and returns nullptr only if new elements buffer
  589. * allocation fails.
  590. *
  591. * N.B. Only the range [0, length()) of the returned buffer is constructed.
  592. * If any of these elements are uninitialized (as growByUninitialized
  593. * enables), behavior is undefined.
  594. */
  595. MOZ_MUST_USE T* extractOrCopyRawBuffer();
  596. /**
  597. * Transfer ownership of an array of objects into the vector. The caller
  598. * must have allocated the array in accordance with this vector's
  599. * AllocPolicy.
  600. *
  601. * N.B. This call assumes that there are no uninitialized elements in the
  602. * passed array.
  603. */
  604. void replaceRawBuffer(T* aP, size_t aLength);
  605. /**
  606. * Places |aVal| at position |aP|, shifting existing elements from |aP| onward
  607. * one position higher. On success, |aP| should not be reused because it'll
  608. * be a dangling pointer if reallocation of the vector storage occurred; the
  609. * return value should be used instead. On failure, nullptr is returned.
  610. *
  611. * Example usage:
  612. *
  613. * if (!(p = vec.insert(p, val))) {
  614. * <handle failure>
  615. * }
  616. * <keep working with p>
  617. *
  618. * This is inherently a linear-time operation. Be careful!
  619. */
  620. template<typename U>
  621. MOZ_MUST_USE T* insert(T* aP, U&& aVal);
  622. /**
  623. * Removes the element |aT|, which must fall in the bounds [begin, end),
  624. * shifting existing elements from |aT + 1| onward one position lower.
  625. */
  626. void erase(T* aT);
  627. /**
  628. * Removes the elements [|aBegin|, |aEnd|), which must fall in the bounds
  629. * [begin, end), shifting existing elements from |aEnd + 1| onward to aBegin's
  630. * old position.
  631. */
  632. void erase(T* aBegin, T* aEnd);
  633. /**
  634. * Measure the size of the vector's heap-allocated storage.
  635. */
  636. size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const;
  637. /**
  638. * Like sizeOfExcludingThis, but also measures the size of the vector
  639. * object (which must be heap-allocated) itself.
  640. */
  641. size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const;
  642. void swap(Vector& aOther);
  643. private:
  644. Vector(const Vector&) = delete;
  645. void operator=(const Vector&) = delete;
  646. };
  647. /* This does the re-entrancy check plus several other sanity checks. */
  648. #define MOZ_REENTRANCY_GUARD_ET_AL \
  649. ReentrancyGuard g(*this); \
  650. MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == kInlineCapacity); \
  651. MOZ_ASSERT(reserved() <= mCapacity); \
  652. MOZ_ASSERT(mLength <= reserved()); \
  653. MOZ_ASSERT(mLength <= mCapacity)
  654. /* Vector Implementation */
  655. template<typename T, size_t N, class AP>
  656. MOZ_ALWAYS_INLINE
  657. Vector<T, N, AP>::Vector(AP aAP)
  658. : AP(aAP)
  659. , mLength(0)
  660. , mCapacity(kInlineCapacity)
  661. #ifdef DEBUG
  662. , mReserved(0)
  663. , mEntered(false)
  664. #endif
  665. {
  666. mBegin = static_cast<T*>(mStorage.addr());
  667. }
  668. /* Move constructor. */
  669. template<typename T, size_t N, class AllocPolicy>
  670. MOZ_ALWAYS_INLINE
  671. Vector<T, N, AllocPolicy>::Vector(Vector&& aRhs)
  672. : AllocPolicy(Move(aRhs))
  673. #ifdef DEBUG
  674. , mEntered(false)
  675. #endif
  676. {
  677. mLength = aRhs.mLength;
  678. mCapacity = aRhs.mCapacity;
  679. #ifdef DEBUG
  680. mReserved = aRhs.mReserved;
  681. #endif
  682. if (aRhs.usingInlineStorage()) {
  683. /* We can't move the buffer over in this case, so copy elements. */
  684. mBegin = static_cast<T*>(mStorage.addr());
  685. Impl::moveConstruct(mBegin, aRhs.beginNoCheck(), aRhs.endNoCheck());
  686. /*
  687. * Leave aRhs's mLength, mBegin, mCapacity, and mReserved as they are.
  688. * The elements in its in-line storage still need to be destroyed.
  689. */
  690. } else {
  691. /*
  692. * Take src's buffer, and turn src into an empty vector using
  693. * in-line storage.
  694. */
  695. mBegin = aRhs.mBegin;
  696. aRhs.mBegin = static_cast<T*>(aRhs.mStorage.addr());
  697. aRhs.mCapacity = kInlineCapacity;
  698. aRhs.mLength = 0;
  699. #ifdef DEBUG
  700. aRhs.mReserved = 0;
  701. #endif
  702. }
  703. }
  704. /* Move assignment. */
  705. template<typename T, size_t N, class AP>
  706. MOZ_ALWAYS_INLINE Vector<T, N, AP>&
  707. Vector<T, N, AP>::operator=(Vector&& aRhs)
  708. {
  709. MOZ_ASSERT(this != &aRhs, "self-move assignment is prohibited");
  710. this->~Vector();
  711. new(KnownNotNull, this) Vector(Move(aRhs));
  712. return *this;
  713. }
  714. template<typename T, size_t N, class AP>
  715. MOZ_ALWAYS_INLINE
  716. Vector<T, N, AP>::~Vector()
  717. {
  718. MOZ_REENTRANCY_GUARD_ET_AL;
  719. Impl::destroy(beginNoCheck(), endNoCheck());
  720. if (!usingInlineStorage()) {
  721. this->free_(beginNoCheck());
  722. }
  723. }
  724. template<typename T, size_t N, class AP>
  725. MOZ_ALWAYS_INLINE void
  726. Vector<T, N, AP>::reverse() {
  727. MOZ_REENTRANCY_GUARD_ET_AL;
  728. T* elems = mBegin;
  729. size_t len = mLength;
  730. size_t mid = len / 2;
  731. for (size_t i = 0; i < mid; i++) {
  732. Swap(elems[i], elems[len - i - 1]);
  733. }
  734. }
  735. /*
  736. * This function will create a new heap buffer with capacity aNewCap,
  737. * move all elements in the inline buffer to this new buffer,
  738. * and fail on OOM.
  739. */
  740. template<typename T, size_t N, class AP>
  741. inline bool
  742. Vector<T, N, AP>::convertToHeapStorage(size_t aNewCap)
  743. {
  744. MOZ_ASSERT(usingInlineStorage());
  745. /* Allocate buffer. */
  746. MOZ_ASSERT(!detail::CapacityHasExcessSpace<T>(aNewCap));
  747. T* newBuf = this->template pod_malloc<T>(aNewCap);
  748. if (MOZ_UNLIKELY(!newBuf)) {
  749. return false;
  750. }
  751. /* Copy inline elements into heap buffer. */
  752. Impl::moveConstruct(newBuf, beginNoCheck(), endNoCheck());
  753. Impl::destroy(beginNoCheck(), endNoCheck());
  754. /* Switch in heap buffer. */
  755. mBegin = newBuf;
  756. /* mLength is unchanged. */
  757. mCapacity = aNewCap;
  758. return true;
  759. }
  760. template<typename T, size_t N, class AP>
  761. MOZ_NEVER_INLINE bool
  762. Vector<T, N, AP>::growStorageBy(size_t aIncr)
  763. {
  764. MOZ_ASSERT(mLength + aIncr > mCapacity);
  765. /*
  766. * When choosing a new capacity, its size should is as close to 2**N bytes
  767. * as possible. 2**N-sized requests are best because they are unlikely to
  768. * be rounded up by the allocator. Asking for a 2**N number of elements
  769. * isn't as good, because if sizeof(T) is not a power-of-two that would
  770. * result in a non-2**N request size.
  771. */
  772. size_t newCap;
  773. if (aIncr == 1) {
  774. if (usingInlineStorage()) {
  775. /* This case occurs in ~70--80% of the calls to this function. */
  776. size_t newSize =
  777. tl::RoundUpPow2<(kInlineCapacity + 1) * sizeof(T)>::value;
  778. newCap = newSize / sizeof(T);
  779. goto convert;
  780. }
  781. if (mLength == 0) {
  782. /* This case occurs in ~0--10% of the calls to this function. */
  783. newCap = 1;
  784. goto grow;
  785. }
  786. /* This case occurs in ~15--20% of the calls to this function. */
  787. /*
  788. * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector
  789. * to 1GB of memory on a 32-bit system, which is a reasonable limit. It
  790. * also ensures that
  791. *
  792. * static_cast<char*>(end()) - static_cast<char*>(begin())
  793. *
  794. * doesn't overflow ptrdiff_t (see bug 510319).
  795. */
  796. if (MOZ_UNLIKELY(mLength & tl::MulOverflowMask<4 * sizeof(T)>::value)) {
  797. this->reportAllocOverflow();
  798. return false;
  799. }
  800. /*
  801. * If we reach here, the existing capacity will have a size that is already
  802. * as close to 2^N as sizeof(T) will allow. Just double the capacity, and
  803. * then there might be space for one more element.
  804. */
  805. newCap = mLength * 2;
  806. if (detail::CapacityHasExcessSpace<T>(newCap)) {
  807. newCap += 1;
  808. }
  809. } else {
  810. /* This case occurs in ~2% of the calls to this function. */
  811. size_t newMinCap = mLength + aIncr;
  812. /* Did mLength + aIncr overflow? Will newCap * sizeof(T) overflow? */
  813. if (MOZ_UNLIKELY(newMinCap < mLength ||
  814. newMinCap & tl::MulOverflowMask<2 * sizeof(T)>::value))
  815. {
  816. this->reportAllocOverflow();
  817. return false;
  818. }
  819. size_t newMinSize = newMinCap * sizeof(T);
  820. size_t newSize = RoundUpPow2(newMinSize);
  821. newCap = newSize / sizeof(T);
  822. }
  823. if (usingInlineStorage()) {
  824. convert:
  825. return convertToHeapStorage(newCap);
  826. }
  827. grow:
  828. return Impl::growTo(*this, newCap);
  829. }
  830. template<typename T, size_t N, class AP>
  831. inline bool
  832. Vector<T, N, AP>::initCapacity(size_t aRequest)
  833. {
  834. MOZ_ASSERT(empty());
  835. MOZ_ASSERT(usingInlineStorage());
  836. if (aRequest == 0) {
  837. return true;
  838. }
  839. T* newbuf = this->template pod_malloc<T>(aRequest);
  840. if (MOZ_UNLIKELY(!newbuf)) {
  841. return false;
  842. }
  843. mBegin = newbuf;
  844. mCapacity = aRequest;
  845. #ifdef DEBUG
  846. mReserved = aRequest;
  847. #endif
  848. return true;
  849. }
  850. template<typename T, size_t N, class AP>
  851. inline bool
  852. Vector<T, N, AP>::initLengthUninitialized(size_t aRequest)
  853. {
  854. if (!initCapacity(aRequest)) {
  855. return false;
  856. }
  857. infallibleGrowByUninitialized(aRequest);
  858. return true;
  859. }
  860. template<typename T, size_t N, class AP>
  861. inline bool
  862. Vector<T, N, AP>::maybeCheckSimulatedOOM(size_t aRequestedSize)
  863. {
  864. if (aRequestedSize <= N) {
  865. return true;
  866. }
  867. #ifdef DEBUG
  868. if (aRequestedSize <= mReserved) {
  869. return true;
  870. }
  871. #endif
  872. return allocPolicy().checkSimulatedOOM();
  873. }
  874. template<typename T, size_t N, class AP>
  875. inline bool
  876. Vector<T, N, AP>::reserve(size_t aRequest)
  877. {
  878. MOZ_REENTRANCY_GUARD_ET_AL;
  879. if (aRequest > mCapacity) {
  880. if (MOZ_UNLIKELY(!growStorageBy(aRequest - mLength))) {
  881. return false;
  882. }
  883. } else if (!maybeCheckSimulatedOOM(aRequest)) {
  884. return false;
  885. }
  886. #ifdef DEBUG
  887. if (aRequest > mReserved) {
  888. mReserved = aRequest;
  889. }
  890. MOZ_ASSERT(mLength <= mReserved);
  891. MOZ_ASSERT(mReserved <= mCapacity);
  892. #endif
  893. return true;
  894. }
  895. template<typename T, size_t N, class AP>
  896. inline void
  897. Vector<T, N, AP>::shrinkBy(size_t aIncr)
  898. {
  899. MOZ_REENTRANCY_GUARD_ET_AL;
  900. MOZ_ASSERT(aIncr <= mLength);
  901. Impl::destroy(endNoCheck() - aIncr, endNoCheck());
  902. mLength -= aIncr;
  903. }
  904. template<typename T, size_t N, class AP>
  905. MOZ_ALWAYS_INLINE void
  906. Vector<T, N, AP>::shrinkTo(size_t aNewLength)
  907. {
  908. MOZ_ASSERT(aNewLength <= mLength);
  909. shrinkBy(mLength - aNewLength);
  910. }
  911. template<typename T, size_t N, class AP>
  912. MOZ_ALWAYS_INLINE bool
  913. Vector<T, N, AP>::growBy(size_t aIncr)
  914. {
  915. MOZ_REENTRANCY_GUARD_ET_AL;
  916. if (aIncr > mCapacity - mLength) {
  917. if (MOZ_UNLIKELY(!growStorageBy(aIncr))) {
  918. return false;
  919. }
  920. } else if (!maybeCheckSimulatedOOM(mLength + aIncr)) {
  921. return false;
  922. }
  923. MOZ_ASSERT(mLength + aIncr <= mCapacity);
  924. T* newend = endNoCheck() + aIncr;
  925. Impl::initialize(endNoCheck(), newend);
  926. mLength += aIncr;
  927. #ifdef DEBUG
  928. if (mLength > mReserved) {
  929. mReserved = mLength;
  930. }
  931. #endif
  932. return true;
  933. }
  934. template<typename T, size_t N, class AP>
  935. MOZ_ALWAYS_INLINE bool
  936. Vector<T, N, AP>::growByUninitialized(size_t aIncr)
  937. {
  938. MOZ_REENTRANCY_GUARD_ET_AL;
  939. if (aIncr > mCapacity - mLength) {
  940. if (MOZ_UNLIKELY(!growStorageBy(aIncr))) {
  941. return false;
  942. }
  943. } else if (!maybeCheckSimulatedOOM(mLength + aIncr)) {
  944. return false;
  945. }
  946. #ifdef DEBUG
  947. if (mLength + aIncr > mReserved) {
  948. mReserved = mLength + aIncr;
  949. }
  950. #endif
  951. infallibleGrowByUninitialized(aIncr);
  952. return true;
  953. }
  954. template<typename T, size_t N, class AP>
  955. MOZ_ALWAYS_INLINE void
  956. Vector<T, N, AP>::infallibleGrowByUninitialized(size_t aIncr)
  957. {
  958. MOZ_ASSERT(mLength + aIncr <= reserved());
  959. mLength += aIncr;
  960. }
  961. template<typename T, size_t N, class AP>
  962. inline bool
  963. Vector<T, N, AP>::resize(size_t aNewLength)
  964. {
  965. size_t curLength = mLength;
  966. if (aNewLength > curLength) {
  967. return growBy(aNewLength - curLength);
  968. }
  969. shrinkBy(curLength - aNewLength);
  970. return true;
  971. }
  972. template<typename T, size_t N, class AP>
  973. MOZ_ALWAYS_INLINE bool
  974. Vector<T, N, AP>::resizeUninitialized(size_t aNewLength)
  975. {
  976. size_t curLength = mLength;
  977. if (aNewLength > curLength) {
  978. return growByUninitialized(aNewLength - curLength);
  979. }
  980. shrinkBy(curLength - aNewLength);
  981. return true;
  982. }
  983. template<typename T, size_t N, class AP>
  984. inline void
  985. Vector<T, N, AP>::clear()
  986. {
  987. MOZ_REENTRANCY_GUARD_ET_AL;
  988. Impl::destroy(beginNoCheck(), endNoCheck());
  989. mLength = 0;
  990. }
  991. template<typename T, size_t N, class AP>
  992. inline void
  993. Vector<T, N, AP>::clearAndFree()
  994. {
  995. clear();
  996. if (usingInlineStorage()) {
  997. return;
  998. }
  999. this->free_(beginNoCheck());
  1000. mBegin = static_cast<T*>(mStorage.addr());
  1001. mCapacity = kInlineCapacity;
  1002. #ifdef DEBUG
  1003. mReserved = 0;
  1004. #endif
  1005. }
  1006. template<typename T, size_t N, class AP>
  1007. inline void
  1008. Vector<T, N, AP>::podResizeToFit()
  1009. {
  1010. // This function is only defined if IsPod is true and will fail to compile
  1011. // otherwise.
  1012. Impl::podResizeToFit(*this);
  1013. }
  1014. template<typename T, size_t N, class AP>
  1015. inline bool
  1016. Vector<T, N, AP>::canAppendWithoutRealloc(size_t aNeeded) const
  1017. {
  1018. return mLength + aNeeded <= mCapacity;
  1019. }
  1020. template<typename T, size_t N, class AP>
  1021. template<typename U, size_t O, class BP>
  1022. MOZ_ALWAYS_INLINE void
  1023. Vector<T, N, AP>::internalAppendAll(const Vector<U, O, BP>& aOther)
  1024. {
  1025. internalAppend(aOther.begin(), aOther.length());
  1026. }
  1027. template<typename T, size_t N, class AP>
  1028. template<typename U>
  1029. MOZ_ALWAYS_INLINE void
  1030. Vector<T, N, AP>::internalAppend(U&& aU)
  1031. {
  1032. MOZ_ASSERT(mLength + 1 <= mReserved);
  1033. MOZ_ASSERT(mReserved <= mCapacity);
  1034. Impl::new_(endNoCheck(), Forward<U>(aU));
  1035. ++mLength;
  1036. }
  1037. template<typename T, size_t N, class AP>
  1038. MOZ_ALWAYS_INLINE bool
  1039. Vector<T, N, AP>::appendN(const T& aT, size_t aNeeded)
  1040. {
  1041. MOZ_REENTRANCY_GUARD_ET_AL;
  1042. if (mLength + aNeeded > mCapacity) {
  1043. if (MOZ_UNLIKELY(!growStorageBy(aNeeded))) {
  1044. return false;
  1045. }
  1046. } else if (!maybeCheckSimulatedOOM(mLength + aNeeded)) {
  1047. return false;
  1048. }
  1049. #ifdef DEBUG
  1050. if (mLength + aNeeded > mReserved) {
  1051. mReserved = mLength + aNeeded;
  1052. }
  1053. #endif
  1054. internalAppendN(aT, aNeeded);
  1055. return true;
  1056. }
  1057. template<typename T, size_t N, class AP>
  1058. MOZ_ALWAYS_INLINE void
  1059. Vector<T, N, AP>::internalAppendN(const T& aT, size_t aNeeded)
  1060. {
  1061. MOZ_ASSERT(mLength + aNeeded <= mReserved);
  1062. MOZ_ASSERT(mReserved <= mCapacity);
  1063. Impl::copyConstructN(endNoCheck(), aNeeded, aT);
  1064. mLength += aNeeded;
  1065. }
  1066. template<typename T, size_t N, class AP>
  1067. template<typename U>
  1068. inline T*
  1069. Vector<T, N, AP>::insert(T* aP, U&& aVal)
  1070. {
  1071. MOZ_ASSERT(begin() <= aP);
  1072. MOZ_ASSERT(aP <= end());
  1073. size_t pos = aP - begin();
  1074. MOZ_ASSERT(pos <= mLength);
  1075. size_t oldLength = mLength;
  1076. if (pos == oldLength) {
  1077. if (!append(Forward<U>(aVal))) {
  1078. return nullptr;
  1079. }
  1080. } else {
  1081. T oldBack = Move(back());
  1082. if (!append(Move(oldBack))) {
  1083. return nullptr;
  1084. }
  1085. for (size_t i = oldLength - 1; i > pos; --i) {
  1086. (*this)[i] = Move((*this)[i - 1]);
  1087. }
  1088. (*this)[pos] = Forward<U>(aVal);
  1089. }
  1090. return begin() + pos;
  1091. }
  1092. template<typename T, size_t N, class AP>
  1093. inline void
  1094. Vector<T, N, AP>::erase(T* aIt)
  1095. {
  1096. MOZ_ASSERT(begin() <= aIt);
  1097. MOZ_ASSERT(aIt < end());
  1098. while (aIt + 1 < end()) {
  1099. *aIt = Move(*(aIt + 1));
  1100. ++aIt;
  1101. }
  1102. popBack();
  1103. }
  1104. template<typename T, size_t N, class AP>
  1105. inline void
  1106. Vector<T, N, AP>::erase(T* aBegin, T* aEnd)
  1107. {
  1108. MOZ_ASSERT(begin() <= aBegin);
  1109. MOZ_ASSERT(aBegin <= aEnd);
  1110. MOZ_ASSERT(aEnd <= end());
  1111. while (aEnd < end()) {
  1112. *aBegin++ = Move(*aEnd++);
  1113. }
  1114. shrinkBy(aEnd - aBegin);
  1115. }
  1116. template<typename T, size_t N, class AP>
  1117. template<typename U>
  1118. MOZ_ALWAYS_INLINE bool
  1119. Vector<T, N, AP>::append(const U* aInsBegin, const U* aInsEnd)
  1120. {
  1121. MOZ_REENTRANCY_GUARD_ET_AL;
  1122. size_t aNeeded = PointerRangeSize(aInsBegin, aInsEnd);
  1123. if (mLength + aNeeded > mCapacity) {
  1124. if (MOZ_UNLIKELY(!growStorageBy(aNeeded))) {
  1125. return false;
  1126. }
  1127. } else if (!maybeCheckSimulatedOOM(mLength + aNeeded)) {
  1128. return false;
  1129. }
  1130. #ifdef DEBUG
  1131. if (mLength + aNeeded > mReserved) {
  1132. mReserved = mLength + aNeeded;
  1133. }
  1134. #endif
  1135. internalAppend(aInsBegin, aNeeded);
  1136. return true;
  1137. }
  1138. template<typename T, size_t N, class AP>
  1139. template<typename U>
  1140. MOZ_ALWAYS_INLINE void
  1141. Vector<T, N, AP>::internalAppend(const U* aInsBegin, size_t aInsLength)
  1142. {
  1143. MOZ_ASSERT(mLength + aInsLength <= mReserved);
  1144. MOZ_ASSERT(mReserved <= mCapacity);
  1145. Impl::copyConstruct(endNoCheck(), aInsBegin, aInsBegin + aInsLength);
  1146. mLength += aInsLength;
  1147. }
  1148. template<typename T, size_t N, class AP>
  1149. template<typename U>
  1150. MOZ_ALWAYS_INLINE bool
  1151. Vector<T, N, AP>::append(U&& aU)
  1152. {
  1153. MOZ_REENTRANCY_GUARD_ET_AL;
  1154. if (mLength == mCapacity) {
  1155. if (MOZ_UNLIKELY(!growStorageBy(1))) {
  1156. return false;
  1157. }
  1158. } else if (!maybeCheckSimulatedOOM(mLength + 1)) {
  1159. return false;
  1160. }
  1161. #ifdef DEBUG
  1162. if (mLength + 1 > mReserved) {
  1163. mReserved = mLength + 1;
  1164. }
  1165. #endif
  1166. internalAppend(Forward<U>(aU));
  1167. return true;
  1168. }
  1169. template<typename T, size_t N, class AP>
  1170. template<typename U, size_t O, class BP>
  1171. MOZ_ALWAYS_INLINE bool
  1172. Vector<T, N, AP>::appendAll(const Vector<U, O, BP>& aOther)
  1173. {
  1174. return append(aOther.begin(), aOther.length());
  1175. }
  1176. template<typename T, size_t N, class AP>
  1177. template<class U>
  1178. MOZ_ALWAYS_INLINE bool
  1179. Vector<T, N, AP>::append(const U* aInsBegin, size_t aInsLength)
  1180. {
  1181. return append(aInsBegin, aInsBegin + aInsLength);
  1182. }
  1183. template<typename T, size_t N, class AP>
  1184. MOZ_ALWAYS_INLINE void
  1185. Vector<T, N, AP>::popBack()
  1186. {
  1187. MOZ_REENTRANCY_GUARD_ET_AL;
  1188. MOZ_ASSERT(!empty());
  1189. --mLength;
  1190. endNoCheck()->~T();
  1191. }
  1192. template<typename T, size_t N, class AP>
  1193. MOZ_ALWAYS_INLINE T
  1194. Vector<T, N, AP>::popCopy()
  1195. {
  1196. T ret = back();
  1197. popBack();
  1198. return ret;
  1199. }
  1200. template<typename T, size_t N, class AP>
  1201. inline T*
  1202. Vector<T, N, AP>::extractRawBuffer()
  1203. {
  1204. MOZ_REENTRANCY_GUARD_ET_AL;
  1205. if (usingInlineStorage()) {
  1206. return nullptr;
  1207. }
  1208. T* ret = mBegin;
  1209. mBegin = static_cast<T*>(mStorage.addr());
  1210. mLength = 0;
  1211. mCapacity = kInlineCapacity;
  1212. #ifdef DEBUG
  1213. mReserved = 0;
  1214. #endif
  1215. return ret;
  1216. }
  1217. template<typename T, size_t N, class AP>
  1218. inline T*
  1219. Vector<T, N, AP>::extractOrCopyRawBuffer()
  1220. {
  1221. if (T* ret = extractRawBuffer()) {
  1222. return ret;
  1223. }
  1224. MOZ_REENTRANCY_GUARD_ET_AL;
  1225. T* copy = this->template pod_malloc<T>(mLength);
  1226. if (!copy) {
  1227. return nullptr;
  1228. }
  1229. Impl::moveConstruct(copy, beginNoCheck(), endNoCheck());
  1230. Impl::destroy(beginNoCheck(), endNoCheck());
  1231. mBegin = static_cast<T*>(mStorage.addr());
  1232. mLength = 0;
  1233. mCapacity = kInlineCapacity;
  1234. #ifdef DEBUG
  1235. mReserved = 0;
  1236. #endif
  1237. return copy;
  1238. }
  1239. template<typename T, size_t N, class AP>
  1240. inline void
  1241. Vector<T, N, AP>::replaceRawBuffer(T* aP, size_t aLength)
  1242. {
  1243. MOZ_REENTRANCY_GUARD_ET_AL;
  1244. /* Destroy what we have. */
  1245. Impl::destroy(beginNoCheck(), endNoCheck());
  1246. if (!usingInlineStorage()) {
  1247. this->free_(beginNoCheck());
  1248. }
  1249. /* Take in the new buffer. */
  1250. if (aLength <= kInlineCapacity) {
  1251. /*
  1252. * We convert to inline storage if possible, even though aP might
  1253. * otherwise be acceptable. Maybe this behaviour should be
  1254. * specifiable with an argument to this function.
  1255. */
  1256. mBegin = static_cast<T*>(mStorage.addr());
  1257. mLength = aLength;
  1258. mCapacity = kInlineCapacity;
  1259. Impl::moveConstruct(mBegin, aP, aP + aLength);
  1260. Impl::destroy(aP, aP + aLength);
  1261. this->free_(aP);
  1262. } else {
  1263. mBegin = aP;
  1264. mLength = aLength;
  1265. mCapacity = aLength;
  1266. }
  1267. #ifdef DEBUG
  1268. mReserved = aLength;
  1269. #endif
  1270. }
  1271. template<typename T, size_t N, class AP>
  1272. inline size_t
  1273. Vector<T, N, AP>::sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
  1274. {
  1275. return usingInlineStorage() ? 0 : aMallocSizeOf(beginNoCheck());
  1276. }
  1277. template<typename T, size_t N, class AP>
  1278. inline size_t
  1279. Vector<T, N, AP>::sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
  1280. {
  1281. return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf);
  1282. }
  1283. template<typename T, size_t N, class AP>
  1284. inline void
  1285. Vector<T, N, AP>::swap(Vector& aOther)
  1286. {
  1287. static_assert(N == 0,
  1288. "still need to implement this for N != 0");
  1289. // This only works when inline storage is always empty.
  1290. if (!usingInlineStorage() && aOther.usingInlineStorage()) {
  1291. aOther.mBegin = mBegin;
  1292. mBegin = inlineStorage();
  1293. } else if (usingInlineStorage() && !aOther.usingInlineStorage()) {
  1294. mBegin = aOther.mBegin;
  1295. aOther.mBegin = aOther.inlineStorage();
  1296. } else if (!usingInlineStorage() && !aOther.usingInlineStorage()) {
  1297. Swap(mBegin, aOther.mBegin);
  1298. } else {
  1299. // This case is a no-op, since we'd set both to use their inline storage.
  1300. }
  1301. Swap(mLength, aOther.mLength);
  1302. Swap(mCapacity, aOther.mCapacity);
  1303. #ifdef DEBUG
  1304. Swap(mReserved, aOther.mReserved);
  1305. #endif
  1306. }
  1307. } // namespace mozilla
  1308. #ifdef _MSC_VER
  1309. #pragma warning(pop)
  1310. #endif
  1311. #endif /* mozilla_Vector_h */