12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160 |
- /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
- /* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
- //
- // This file implements a garbage-cycle collector based on the paper
- //
- // Concurrent Cycle Collection in Reference Counted Systems
- // Bacon & Rajan (2001), ECOOP 2001 / Springer LNCS vol 2072
- //
- // We are not using the concurrent or acyclic cases of that paper; so
- // the green, red and orange colors are not used.
- //
- // The collector is based on tracking pointers of four colors:
- //
- // Black nodes are definitely live. If we ever determine a node is
- // black, it's ok to forget about, drop from our records.
- //
- // White nodes are definitely garbage cycles. Once we finish with our
- // scanning, we unlink all the white nodes and expect that by
- // unlinking them they will self-destruct (since a garbage cycle is
- // only keeping itself alive with internal links, by definition).
- //
- // Snow-white is an addition to the original algorithm. Snow-white object
- // has reference count zero and is just waiting for deletion.
- //
- // Grey nodes are being scanned. Nodes that turn grey will turn
- // either black if we determine that they're live, or white if we
- // determine that they're a garbage cycle. After the main collection
- // algorithm there should be no grey nodes.
- //
- // Purple nodes are *candidates* for being scanned. They are nodes we
- // haven't begun scanning yet because they're not old enough, or we're
- // still partway through the algorithm.
- //
- // XPCOM objects participating in garbage-cycle collection are obliged
- // to inform us when they ought to turn purple; that is, when their
- // refcount transitions from N+1 -> N, for nonzero N. Furthermore we
- // require that *after* an XPCOM object has informed us of turning
- // purple, they will tell us when they either transition back to being
- // black (incremented refcount) or are ultimately deleted.
- // Incremental cycle collection
- //
- // Beyond the simple state machine required to implement incremental
- // collection, the CC needs to be able to compensate for things the browser
- // is doing during the collection. There are two kinds of problems. For each
- // of these, there are two cases to deal with: purple-buffered C++ objects
- // and JS objects.
- // The first problem is that an object in the CC's graph can become garbage.
- // This is bad because the CC touches the objects in its graph at every
- // stage of its operation.
- //
- // All cycle collected C++ objects that die during a cycle collection
- // will end up actually getting deleted by the SnowWhiteKiller. Before
- // the SWK deletes an object, it checks if an ICC is running, and if so,
- // if the object is in the graph. If it is, the CC clears mPointer and
- // mParticipant so it does not point to the raw object any more. Because
- // objects could die any time the CC returns to the mutator, any time the CC
- // accesses a PtrInfo it must perform a null check on mParticipant to
- // ensure the object has not gone away.
- //
- // JS objects don't always run finalizers, so the CC can't remove them from
- // the graph when they die. Fortunately, JS objects can only die during a GC,
- // so if a GC is begun during an ICC, the browser synchronously finishes off
- // the ICC, which clears the entire CC graph. If the GC and CC are scheduled
- // properly, this should be rare.
- //
- // The second problem is that objects in the graph can be changed, say by
- // being addrefed or released, or by having a field updated, after the object
- // has been added to the graph. The problem is that ICC can miss a newly
- // created reference to an object, and end up unlinking an object that is
- // actually alive.
- //
- // The basic idea of the solution, from "An on-the-fly Reference Counting
- // Garbage Collector for Java" by Levanoni and Petrank, is to notice if an
- // object has had an additional reference to it created during the collection,
- // and if so, don't collect it during the current collection. This avoids having
- // to rerun the scan as in Bacon & Rajan 2001.
- //
- // For cycle collected C++ objects, we modify AddRef to place the object in
- // the purple buffer, in addition to Release. Then, in the CC, we treat any
- // objects in the purple buffer as being alive, after graph building has
- // completed. Because they are in the purple buffer, they will be suspected
- // in the next CC, so there's no danger of leaks. This is imprecise, because
- // we will treat as live an object that has been Released but not AddRefed
- // during graph building, but that's probably rare enough that the additional
- // bookkeeping overhead is not worthwhile.
- //
- // For JS objects, the cycle collector is only looking at gray objects. If a
- // gray object is touched during ICC, it will be made black by UnmarkGray.
- // Thus, if a JS object has become black during the ICC, we treat it as live.
- // Merged JS zones have to be handled specially: we scan all zone globals.
- // If any are black, we treat the zone as being black.
- // Safety
- //
- // An XPCOM object is either scan-safe or scan-unsafe, purple-safe or
- // purple-unsafe.
- //
- // An nsISupports object is scan-safe if:
- //
- // - It can be QI'ed to |nsXPCOMCycleCollectionParticipant|, though
- // this operation loses ISupports identity (like nsIClassInfo).
- // - Additionally, the operation |traverse| on the resulting
- // nsXPCOMCycleCollectionParticipant does not cause *any* refcount
- // adjustment to occur (no AddRef / Release calls).
- //
- // A non-nsISupports ("native") object is scan-safe by explicitly
- // providing its nsCycleCollectionParticipant.
- //
- // An object is purple-safe if it satisfies the following properties:
- //
- // - The object is scan-safe.
- //
- // When we receive a pointer |ptr| via
- // |nsCycleCollector::suspect(ptr)|, we assume it is purple-safe. We
- // can check the scan-safety, but have no way to ensure the
- // purple-safety; objects must obey, or else the entire system falls
- // apart. Don't involve an object in this scheme if you can't
- // guarantee its purple-safety. The easiest way to ensure that an
- // object is purple-safe is to use nsCycleCollectingAutoRefCnt.
- //
- // When we have a scannable set of purple nodes ready, we begin
- // our walks. During the walks, the nodes we |traverse| should only
- // feed us more scan-safe nodes, and should not adjust the refcounts
- // of those nodes.
- //
- // We do not |AddRef| or |Release| any objects during scanning. We
- // rely on the purple-safety of the roots that call |suspect| to
- // hold, such that we will clear the pointer from the purple buffer
- // entry to the object before it is destroyed. The pointers that are
- // merely scan-safe we hold only for the duration of scanning, and
- // there should be no objects released from the scan-safe set during
- // the scan.
- //
- // We *do* call |Root| and |Unroot| on every white object, on
- // either side of the calls to |Unlink|. This keeps the set of white
- // objects alive during the unlinking.
- //
- #if !defined(__MINGW32__)
- #ifdef WIN32
- #include <crtdbg.h>
- #include <errno.h>
- #endif
- #endif
- #include "base/process_util.h"
- #include "mozilla/ArrayUtils.h"
- #include "mozilla/AutoRestore.h"
- #include "mozilla/CycleCollectedJSContext.h"
- #include "mozilla/DebugOnly.h"
- #include "mozilla/HoldDropJSObjects.h"
- /* This must occur *after* base/process_util.h to avoid typedefs conflicts. */
- #include "mozilla/LinkedList.h"
- #include "mozilla/MemoryReporting.h"
- #include "mozilla/Move.h"
- #include "mozilla/SegmentedVector.h"
- #include "nsCycleCollectionParticipant.h"
- #include "nsCycleCollectionNoteRootCallback.h"
- #include "nsDeque.h"
- #include "nsCycleCollector.h"
- #include "nsThreadUtils.h"
- #include "nsXULAppAPI.h"
- #include "prenv.h"
- #include "nsPrintfCString.h"
- #include "nsTArray.h"
- #include "nsIConsoleService.h"
- #include "mozilla/Attributes.h"
- #include "nsICycleCollectorListener.h"
- #include "nsIMemoryReporter.h"
- #include "nsIFile.h"
- #include "nsDumpUtils.h"
- #include "xpcpublic.h"
- #include "GeckoProfiler.h"
- #include <stdint.h>
- #include <stdio.h>
- #include "mozilla/AutoGlobalTimelineMarker.h"
- #include "mozilla/Likely.h"
- #include "mozilla/PoisonIOInterposer.h"
- #include "mozilla/ThreadLocal.h"
- using namespace mozilla;
- //#define COLLECT_TIME_DEBUG
- // Enable assertions that are useful for diagnosing errors in graph construction.
- //#define DEBUG_CC_GRAPH
- #define DEFAULT_SHUTDOWN_COLLECTIONS 5
- // One to do the freeing, then another to detect there is no more work to do.
- #define NORMAL_SHUTDOWN_COLLECTIONS 2
- // Cycle collector environment variables
- //
- // MOZ_CC_LOG_ALL: If defined, always log cycle collector heaps.
- //
- // MOZ_CC_LOG_SHUTDOWN: If defined, log cycle collector heaps at shutdown.
- //
- // MOZ_CC_LOG_THREAD: If set to "main", only automatically log main thread
- // CCs. If set to "worker", only automatically log worker CCs. If set to "all",
- // log either. The default value is "all". This must be used with either
- // MOZ_CC_LOG_ALL or MOZ_CC_LOG_SHUTDOWN for it to do anything.
- //
- // MOZ_CC_LOG_PROCESS: If set to "main", only automatically log main process
- // CCs. If set to "content", only automatically log tab CCs. If set to
- // "plugins", only automatically log plugin CCs. If set to "all", log
- // everything. The default value is "all". This must be used with either
- // MOZ_CC_LOG_ALL or MOZ_CC_LOG_SHUTDOWN for it to do anything.
- //
- // MOZ_CC_ALL_TRACES: If set to "all", any cycle collector
- // logging done will be WantAllTraces, which disables
- // various cycle collector optimizations to give a fuller picture of
- // the heap. If set to "shutdown", only shutdown logging will be WantAllTraces.
- // The default is none.
- //
- // MOZ_CC_RUN_DURING_SHUTDOWN: In non-DEBUG or builds, if this is set,
- // run cycle collections at shutdown.
- //
- // MOZ_CC_LOG_DIRECTORY: The directory in which logs are placed (such as
- // logs from MOZ_CC_LOG_ALL and MOZ_CC_LOG_SHUTDOWN, or other uses
- // of nsICycleCollectorListener)
- // Various parameters of this collector can be tuned using environment
- // variables.
- struct nsCycleCollectorParams
- {
- bool mLogAll;
- bool mLogShutdown;
- bool mAllTracesAll;
- bool mAllTracesShutdown;
- bool mLogThisThread;
- nsCycleCollectorParams() :
- mLogAll(PR_GetEnv("MOZ_CC_LOG_ALL") != nullptr),
- mLogShutdown(PR_GetEnv("MOZ_CC_LOG_SHUTDOWN") != nullptr),
- mAllTracesAll(false),
- mAllTracesShutdown(false)
- {
- const char* logThreadEnv = PR_GetEnv("MOZ_CC_LOG_THREAD");
- bool threadLogging = true;
- if (logThreadEnv && !!strcmp(logThreadEnv, "all")) {
- if (NS_IsMainThread()) {
- threadLogging = !strcmp(logThreadEnv, "main");
- } else {
- threadLogging = !strcmp(logThreadEnv, "worker");
- }
- }
- const char* logProcessEnv = PR_GetEnv("MOZ_CC_LOG_PROCESS");
- bool processLogging = true;
- if (logProcessEnv && !!strcmp(logProcessEnv, "all")) {
- switch (XRE_GetProcessType()) {
- case GeckoProcessType_Default:
- processLogging = !strcmp(logProcessEnv, "main");
- break;
- case GeckoProcessType_Plugin:
- processLogging = !strcmp(logProcessEnv, "plugins");
- break;
- case GeckoProcessType_Content:
- processLogging = !strcmp(logProcessEnv, "content");
- break;
- default:
- processLogging = false;
- break;
- }
- }
- mLogThisThread = threadLogging && processLogging;
- const char* allTracesEnv = PR_GetEnv("MOZ_CC_ALL_TRACES");
- if (allTracesEnv) {
- if (!strcmp(allTracesEnv, "all")) {
- mAllTracesAll = true;
- } else if (!strcmp(allTracesEnv, "shutdown")) {
- mAllTracesShutdown = true;
- }
- }
- }
- bool LogThisCC(bool aIsShutdown)
- {
- return (mLogAll || (aIsShutdown && mLogShutdown)) && mLogThisThread;
- }
- bool AllTracesThisCC(bool aIsShutdown)
- {
- return mAllTracesAll || (aIsShutdown && mAllTracesShutdown);
- }
- };
- #ifdef COLLECT_TIME_DEBUG
- class TimeLog
- {
- public:
- TimeLog() : mLastCheckpoint(TimeStamp::Now())
- {
- }
- void
- Checkpoint(const char* aEvent)
- {
- TimeStamp now = TimeStamp::Now();
- double dur = (now - mLastCheckpoint).ToMilliseconds();
- if (dur >= 0.5) {
- printf("cc: %s took %.1fms\n", aEvent, dur);
- }
- mLastCheckpoint = now;
- }
- private:
- TimeStamp mLastCheckpoint;
- };
- #else
- class TimeLog
- {
- public:
- TimeLog()
- {
- }
- void Checkpoint(const char* aEvent)
- {
- }
- };
- #endif
- ////////////////////////////////////////////////////////////////////////
- // Base types
- ////////////////////////////////////////////////////////////////////////
- struct PtrInfo;
- class EdgePool
- {
- public:
- // EdgePool allocates arrays of void*, primarily to hold PtrInfo*.
- // However, at the end of a block, the last two pointers are a null
- // and then a void** pointing to the next block. This allows
- // EdgePool::Iterators to be a single word but still capable of crossing
- // block boundaries.
- EdgePool()
- {
- mSentinelAndBlocks[0].block = nullptr;
- mSentinelAndBlocks[1].block = nullptr;
- }
- ~EdgePool()
- {
- MOZ_ASSERT(!mSentinelAndBlocks[0].block &&
- !mSentinelAndBlocks[1].block,
- "Didn't call Clear()?");
- }
- void Clear()
- {
- EdgeBlock* b = EdgeBlocks();
- while (b) {
- EdgeBlock* next = b->Next();
- delete b;
- b = next;
- }
- mSentinelAndBlocks[0].block = nullptr;
- mSentinelAndBlocks[1].block = nullptr;
- }
- #ifdef DEBUG
- bool IsEmpty()
- {
- return !mSentinelAndBlocks[0].block &&
- !mSentinelAndBlocks[1].block;
- }
- #endif
- private:
- struct EdgeBlock;
- union PtrInfoOrBlock
- {
- // Use a union to avoid reinterpret_cast and the ensuing
- // potential aliasing bugs.
- PtrInfo* ptrInfo;
- EdgeBlock* block;
- };
- struct EdgeBlock
- {
- enum { EdgeBlockSize = 16 * 1024 };
- PtrInfoOrBlock mPointers[EdgeBlockSize];
- EdgeBlock()
- {
- mPointers[EdgeBlockSize - 2].block = nullptr; // sentinel
- mPointers[EdgeBlockSize - 1].block = nullptr; // next block pointer
- }
- EdgeBlock*& Next()
- {
- return mPointers[EdgeBlockSize - 1].block;
- }
- PtrInfoOrBlock* Start()
- {
- return &mPointers[0];
- }
- PtrInfoOrBlock* End()
- {
- return &mPointers[EdgeBlockSize - 2];
- }
- };
- // Store the null sentinel so that we can have valid iterators
- // before adding any edges and without adding any blocks.
- PtrInfoOrBlock mSentinelAndBlocks[2];
- EdgeBlock*& EdgeBlocks()
- {
- return mSentinelAndBlocks[1].block;
- }
- EdgeBlock* EdgeBlocks() const
- {
- return mSentinelAndBlocks[1].block;
- }
- public:
- class Iterator
- {
- public:
- Iterator() : mPointer(nullptr) {}
- explicit Iterator(PtrInfoOrBlock* aPointer) : mPointer(aPointer) {}
- Iterator(const Iterator& aOther) : mPointer(aOther.mPointer) {}
- Iterator& operator++()
- {
- if (!mPointer->ptrInfo) {
- // Null pointer is a sentinel for link to the next block.
- mPointer = (mPointer + 1)->block->mPointers;
- }
- ++mPointer;
- return *this;
- }
- PtrInfo* operator*() const
- {
- if (!mPointer->ptrInfo) {
- // Null pointer is a sentinel for link to the next block.
- return (mPointer + 1)->block->mPointers->ptrInfo;
- }
- return mPointer->ptrInfo;
- }
- bool operator==(const Iterator& aOther) const
- {
- return mPointer == aOther.mPointer;
- }
- bool operator!=(const Iterator& aOther) const
- {
- return mPointer != aOther.mPointer;
- }
- #ifdef DEBUG_CC_GRAPH
- bool Initialized() const
- {
- return mPointer != nullptr;
- }
- #endif
- private:
- PtrInfoOrBlock* mPointer;
- };
- class Builder;
- friend class Builder;
- class Builder
- {
- public:
- explicit Builder(EdgePool& aPool)
- : mCurrent(&aPool.mSentinelAndBlocks[0])
- , mBlockEnd(&aPool.mSentinelAndBlocks[0])
- , mNextBlockPtr(&aPool.EdgeBlocks())
- {
- }
- Iterator Mark()
- {
- return Iterator(mCurrent);
- }
- void Add(PtrInfo* aEdge)
- {
- if (mCurrent == mBlockEnd) {
- EdgeBlock* b = new EdgeBlock();
- *mNextBlockPtr = b;
- mCurrent = b->Start();
- mBlockEnd = b->End();
- mNextBlockPtr = &b->Next();
- }
- (mCurrent++)->ptrInfo = aEdge;
- }
- private:
- // mBlockEnd points to space for null sentinel
- PtrInfoOrBlock* mCurrent;
- PtrInfoOrBlock* mBlockEnd;
- EdgeBlock** mNextBlockPtr;
- };
- size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
- {
- size_t n = 0;
- EdgeBlock* b = EdgeBlocks();
- while (b) {
- n += aMallocSizeOf(b);
- b = b->Next();
- }
- return n;
- }
- };
- #ifdef DEBUG_CC_GRAPH
- #define CC_GRAPH_ASSERT(b) MOZ_ASSERT(b)
- #else
- #define CC_GRAPH_ASSERT(b)
- #endif
- enum NodeColor { black, white, grey };
- // This structure should be kept as small as possible; we may expect
- // hundreds of thousands of them to be allocated and touched
- // repeatedly during each cycle collection.
- struct PtrInfo
- {
- void* mPointer;
- nsCycleCollectionParticipant* mParticipant;
- uint32_t mColor : 2;
- uint32_t mInternalRefs : 30;
- uint32_t mRefCount;
- private:
- EdgePool::Iterator mFirstChild;
- static const uint32_t kInitialRefCount = UINT32_MAX - 1;
- public:
- PtrInfo(void* aPointer, nsCycleCollectionParticipant* aParticipant)
- : mPointer(aPointer),
- mParticipant(aParticipant),
- mColor(grey),
- mInternalRefs(0),
- mRefCount(kInitialRefCount),
- mFirstChild()
- {
- MOZ_ASSERT(aParticipant);
- // We initialize mRefCount to a large non-zero value so
- // that it doesn't look like a JS object to the cycle collector
- // in the case where the object dies before being traversed.
- MOZ_ASSERT(!IsGrayJS() && !IsBlackJS());
- }
- // Allow NodePool::NodeBlock's constructor to compile.
- PtrInfo()
- {
- NS_NOTREACHED("should never be called");
- }
- bool IsGrayJS() const
- {
- return mRefCount == 0;
- }
- bool IsBlackJS() const
- {
- return mRefCount == UINT32_MAX;
- }
- bool WasTraversed() const
- {
- return mRefCount != kInitialRefCount;
- }
- EdgePool::Iterator FirstChild() const
- {
- CC_GRAPH_ASSERT(mFirstChild.Initialized());
- return mFirstChild;
- }
- // this PtrInfo must be part of a NodePool
- EdgePool::Iterator LastChild() const
- {
- CC_GRAPH_ASSERT((this + 1)->mFirstChild.Initialized());
- return (this + 1)->mFirstChild;
- }
- void SetFirstChild(EdgePool::Iterator aFirstChild)
- {
- CC_GRAPH_ASSERT(aFirstChild.Initialized());
- mFirstChild = aFirstChild;
- }
- // this PtrInfo must be part of a NodePool
- void SetLastChild(EdgePool::Iterator aLastChild)
- {
- CC_GRAPH_ASSERT(aLastChild.Initialized());
- (this + 1)->mFirstChild = aLastChild;
- }
- };
- /**
- * A structure designed to be used like a linked list of PtrInfo, except
- * it allocates many PtrInfos at a time.
- */
- class NodePool
- {
- private:
- // The -2 allows us to use |NodeBlockSize + 1| for |mEntries|, and fit
- // |mNext|, all without causing slop.
- enum { NodeBlockSize = 4 * 1024 - 2 };
- struct NodeBlock
- {
- // We create and destroy NodeBlock using moz_xmalloc/free rather than new
- // and delete to avoid calling its constructor and destructor.
- NodeBlock()
- {
- NS_NOTREACHED("should never be called");
- // Ensure NodeBlock is the right size (see the comment on NodeBlockSize
- // above).
- static_assert(
- sizeof(NodeBlock) == 81904 || // 32-bit; equals 19.996 x 4 KiB pages
- sizeof(NodeBlock) == 131048, // 64-bit; equals 31.994 x 4 KiB pages
- "ill-sized NodeBlock"
- );
- }
- ~NodeBlock()
- {
- NS_NOTREACHED("should never be called");
- }
- NodeBlock* mNext;
- PtrInfo mEntries[NodeBlockSize + 1]; // +1 to store last child of last node
- };
- public:
- NodePool()
- : mBlocks(nullptr)
- , mLast(nullptr)
- {
- }
- ~NodePool()
- {
- MOZ_ASSERT(!mBlocks, "Didn't call Clear()?");
- }
- void Clear()
- {
- NodeBlock* b = mBlocks;
- while (b) {
- NodeBlock* n = b->mNext;
- free(b);
- b = n;
- }
- mBlocks = nullptr;
- mLast = nullptr;
- }
- #ifdef DEBUG
- bool IsEmpty()
- {
- return !mBlocks && !mLast;
- }
- #endif
- class Builder;
- friend class Builder;
- class Builder
- {
- public:
- explicit Builder(NodePool& aPool)
- : mNextBlock(&aPool.mBlocks)
- , mNext(aPool.mLast)
- , mBlockEnd(nullptr)
- {
- MOZ_ASSERT(!aPool.mBlocks && !aPool.mLast, "pool not empty");
- }
- PtrInfo* Add(void* aPointer, nsCycleCollectionParticipant* aParticipant)
- {
- if (mNext == mBlockEnd) {
- NodeBlock* block = static_cast<NodeBlock*>(malloc(sizeof(NodeBlock)));
- if (!block) {
- return nullptr;
- }
- *mNextBlock = block;
- mNext = block->mEntries;
- mBlockEnd = block->mEntries + NodeBlockSize;
- block->mNext = nullptr;
- mNextBlock = &block->mNext;
- }
- return new (mozilla::KnownNotNull, mNext++) PtrInfo(aPointer, aParticipant);
- }
- private:
- NodeBlock** mNextBlock;
- PtrInfo*& mNext;
- PtrInfo* mBlockEnd;
- };
- class Enumerator;
- friend class Enumerator;
- class Enumerator
- {
- public:
- explicit Enumerator(NodePool& aPool)
- : mFirstBlock(aPool.mBlocks)
- , mCurBlock(nullptr)
- , mNext(nullptr)
- , mBlockEnd(nullptr)
- , mLast(aPool.mLast)
- {
- }
- bool IsDone() const
- {
- return mNext == mLast;
- }
- bool AtBlockEnd() const
- {
- return mNext == mBlockEnd;
- }
- PtrInfo* GetNext()
- {
- MOZ_ASSERT(!IsDone(), "calling GetNext when done");
- if (mNext == mBlockEnd) {
- NodeBlock* nextBlock = mCurBlock ? mCurBlock->mNext : mFirstBlock;
- mNext = nextBlock->mEntries;
- mBlockEnd = mNext + NodeBlockSize;
- mCurBlock = nextBlock;
- }
- return mNext++;
- }
- private:
- // mFirstBlock is a reference to allow an Enumerator to be constructed
- // for an empty graph.
- NodeBlock*& mFirstBlock;
- NodeBlock* mCurBlock;
- // mNext is the next value we want to return, unless mNext == mBlockEnd
- // NB: mLast is a reference to allow enumerating while building!
- PtrInfo* mNext;
- PtrInfo* mBlockEnd;
- PtrInfo*& mLast;
- };
- size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
- {
- // We don't measure the things pointed to by mEntries[] because those
- // pointers are non-owning.
- size_t n = 0;
- NodeBlock* b = mBlocks;
- while (b) {
- n += aMallocSizeOf(b);
- b = b->mNext;
- }
- return n;
- }
- private:
- NodeBlock* mBlocks;
- PtrInfo* mLast;
- };
- // Declarations for mPtrToNodeMap.
- struct PtrToNodeEntry : public PLDHashEntryHdr
- {
- // The key is mNode->mPointer
- PtrInfo* mNode;
- };
- static bool
- PtrToNodeMatchEntry(const PLDHashEntryHdr* aEntry, const void* aKey)
- {
- const PtrToNodeEntry* n = static_cast<const PtrToNodeEntry*>(aEntry);
- return n->mNode->mPointer == aKey;
- }
- static PLDHashTableOps PtrNodeOps = {
- PLDHashTable::HashVoidPtrKeyStub,
- PtrToNodeMatchEntry,
- PLDHashTable::MoveEntryStub,
- PLDHashTable::ClearEntryStub,
- nullptr
- };
- struct WeakMapping
- {
- // map and key will be null if the corresponding objects are GC marked
- PtrInfo* mMap;
- PtrInfo* mKey;
- PtrInfo* mKeyDelegate;
- PtrInfo* mVal;
- };
- class CCGraphBuilder;
- struct CCGraph
- {
- NodePool mNodes;
- EdgePool mEdges;
- nsTArray<WeakMapping> mWeakMaps;
- uint32_t mRootCount;
- private:
- PLDHashTable mPtrToNodeMap;
- bool mOutOfMemory;
- static const uint32_t kInitialMapLength = 16384;
- public:
- CCGraph()
- : mRootCount(0)
- , mPtrToNodeMap(&PtrNodeOps, sizeof(PtrToNodeEntry), kInitialMapLength)
- , mOutOfMemory(false)
- {}
- ~CCGraph() {}
- void Init()
- {
- MOZ_ASSERT(IsEmpty(), "Failed to call CCGraph::Clear");
- }
- void Clear()
- {
- mNodes.Clear();
- mEdges.Clear();
- mWeakMaps.Clear();
- mRootCount = 0;
- mPtrToNodeMap.ClearAndPrepareForLength(kInitialMapLength);
- mOutOfMemory = false;
- }
- #ifdef DEBUG
- bool IsEmpty()
- {
- return mNodes.IsEmpty() && mEdges.IsEmpty() &&
- mWeakMaps.IsEmpty() && mRootCount == 0 &&
- mPtrToNodeMap.EntryCount() == 0;
- }
- #endif
- PtrInfo* FindNode(void* aPtr);
- PtrToNodeEntry* AddNodeToMap(void* aPtr);
- void RemoveObjectFromMap(void* aObject);
- uint32_t MapCount() const
- {
- return mPtrToNodeMap.EntryCount();
- }
- size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
- {
- size_t n = 0;
- n += mNodes.SizeOfExcludingThis(aMallocSizeOf);
- n += mEdges.SizeOfExcludingThis(aMallocSizeOf);
- // We don't measure what the WeakMappings point to, because the
- // pointers are non-owning.
- n += mWeakMaps.ShallowSizeOfExcludingThis(aMallocSizeOf);
- n += mPtrToNodeMap.ShallowSizeOfExcludingThis(aMallocSizeOf);
- return n;
- }
- private:
- PtrToNodeEntry* FindNodeEntry(void* aPtr)
- {
- return static_cast<PtrToNodeEntry*>(mPtrToNodeMap.Search(aPtr));
- }
- };
- PtrInfo*
- CCGraph::FindNode(void* aPtr)
- {
- PtrToNodeEntry* e = FindNodeEntry(aPtr);
- return e ? e->mNode : nullptr;
- }
- PtrToNodeEntry*
- CCGraph::AddNodeToMap(void* aPtr)
- {
- JS::AutoSuppressGCAnalysis suppress;
- if (mOutOfMemory) {
- return nullptr;
- }
- auto e = static_cast<PtrToNodeEntry*>(mPtrToNodeMap.Add(aPtr, fallible));
- if (!e) {
- mOutOfMemory = true;
- MOZ_ASSERT(false, "Ran out of memory while building cycle collector graph");
- return nullptr;
- }
- return e;
- }
- void
- CCGraph::RemoveObjectFromMap(void* aObj)
- {
- PtrToNodeEntry* e = FindNodeEntry(aObj);
- PtrInfo* pinfo = e ? e->mNode : nullptr;
- if (pinfo) {
- mPtrToNodeMap.RemoveEntry(e);
- pinfo->mPointer = nullptr;
- pinfo->mParticipant = nullptr;
- }
- }
- static nsISupports*
- CanonicalizeXPCOMParticipant(nsISupports* aIn)
- {
- nsISupports* out = nullptr;
- aIn->QueryInterface(NS_GET_IID(nsCycleCollectionISupports),
- reinterpret_cast<void**>(&out));
- return out;
- }
- static inline void
- ToParticipant(nsISupports* aPtr, nsXPCOMCycleCollectionParticipant** aCp);
- static void
- CanonicalizeParticipant(void** aParti, nsCycleCollectionParticipant** aCp)
- {
- // If the participant is null, this is an nsISupports participant,
- // so we must QI to get the real participant.
- if (!*aCp) {
- nsISupports* nsparti = static_cast<nsISupports*>(*aParti);
- nsparti = CanonicalizeXPCOMParticipant(nsparti);
- NS_ASSERTION(nsparti,
- "Don't add objects that don't participate in collection!");
- nsXPCOMCycleCollectionParticipant* xcp;
- ToParticipant(nsparti, &xcp);
- *aParti = nsparti;
- *aCp = xcp;
- }
- }
- struct nsPurpleBufferEntry
- {
- nsPurpleBufferEntry(void* aObject, nsCycleCollectingAutoRefCnt* aRefCnt,
- nsCycleCollectionParticipant* aParticipant)
- : mObject(aObject)
- , mRefCnt(aRefCnt)
- , mParticipant(aParticipant)
- {}
- nsPurpleBufferEntry(nsPurpleBufferEntry&& aOther)
- : mObject(nullptr)
- , mRefCnt(nullptr)
- , mParticipant(nullptr)
- {
- Swap(aOther);
- }
- void Swap(nsPurpleBufferEntry& aOther) {
- std::swap(mObject, aOther.mObject);
- std::swap(mRefCnt, aOther.mRefCnt);
- std::swap(mParticipant, aOther.mParticipant);
- }
- void Clear() {
- mRefCnt->RemoveFromPurpleBuffer();
- mRefCnt = nullptr;
- mObject = nullptr;
- mParticipant = nullptr;
- }
- ~nsPurpleBufferEntry() {
- if (mRefCnt) {
- mRefCnt->RemoveFromPurpleBuffer();
- }
- }
- void* mObject;
- nsCycleCollectingAutoRefCnt* mRefCnt;
- nsCycleCollectionParticipant* mParticipant; // nullptr for nsISupports
- };
- class nsCycleCollector;
- struct nsPurpleBuffer
- {
- private:
- uint32_t mCount;
- // Try to match the size of a jemalloc bucket, to minimize slop bytes.
- // - On 32-bit platforms sizeof(nsPurpleBufferEntry) is 12, so mEntries'
- // Segment is 16,372 bytes.
- // - On 64-bit platforms sizeof(nsPurpleBufferEntry) is 24, so mEntries'
- // Segment is 32,760 bytes.
- static const uint32_t kEntriesPerSegment = 1365;
- static const size_t kSegmentSize =
- sizeof(nsPurpleBufferEntry) * kEntriesPerSegment;
- typedef
- SegmentedVector<nsPurpleBufferEntry, kSegmentSize, InfallibleAllocPolicy>
- PurpleBufferVector;
- PurpleBufferVector mEntries;
- public:
- nsPurpleBuffer()
- : mCount(0)
- {
- static_assert(
- sizeof(PurpleBufferVector::Segment) == 16372 || // 32-bit
- sizeof(PurpleBufferVector::Segment) == 32760 || // 64-bit
- sizeof(PurpleBufferVector::Segment) == 32744, // 64-bit Windows
- "ill-sized nsPurpleBuffer::mEntries");
- }
- ~nsPurpleBuffer()
- {}
- // This method compacts mEntries.
- template<class PurpleVisitor>
- void VisitEntries(PurpleVisitor& aVisitor) {
- if (mEntries.IsEmpty()) {
- return;
- }
- uint32_t oldLength = mEntries.Length();
- uint32_t newLength = 0;
- auto revIter = mEntries.IterFromLast();
- auto iter = mEntries.Iter();
- // After iteration this points to the first empty entry.
- auto firstEmptyIter = mEntries.Iter();
- auto iterFromLastEntry = mEntries.IterFromLast();
- for (; !iter.Done(); iter.Next()) {
- nsPurpleBufferEntry& e = iter.Get();
- if (e.mObject) {
- aVisitor.Visit(*this, &e);
- }
- // Visit call above may have cleared the entry, or the entry was empty
- // already.
- if (!e.mObject) {
- // Try to find a non-empty entry from the end of the vector.
- for (; !revIter.Done(); revIter.Prev()) {
- nsPurpleBufferEntry& otherEntry = revIter.Get();
- if (&e == &otherEntry) {
- break;
- }
- if (otherEntry.mObject) {
- aVisitor.Visit(*this, &otherEntry);
- // Visit may have cleared otherEntry.
- if (otherEntry.mObject) {
- e.Swap(otherEntry);
- revIter.Prev(); // We've swapped this now empty entry.
- break;
- }
- }
- }
- }
- // Entry is non-empty even after the Visit call, ensure it is kept
- // in mEntries.
- if (e.mObject) {
- firstEmptyIter.Next();
- ++newLength;
- }
- if (&e == &revIter.Get()) {
- break;
- }
- }
- // There were some empty entries.
- if (oldLength != newLength) {
- // While visiting entries, some new ones were possibly added. This can
- // happen during CanSkip. Move all such new entries to be after other
- // entries. Note, we don't call Visit on newly added entries!
- if (&iterFromLastEntry.Get() != &mEntries.GetLast()) {
- iterFromLastEntry.Next(); // Now pointing to the first added entry.
- auto& iterForNewEntries = iterFromLastEntry;
- while (!iterForNewEntries.Done()) {
- MOZ_ASSERT(!firstEmptyIter.Done());
- MOZ_ASSERT(!firstEmptyIter.Get().mObject);
- firstEmptyIter.Get().Swap(iterForNewEntries.Get());
- firstEmptyIter.Next();
- iterForNewEntries.Next();
- ++newLength; // We keep all the new entries.
- }
- }
- mEntries.PopLastN(oldLength - newLength);
- }
- }
- void FreeBlocks() {
- mCount = 0;
- mEntries.Clear();
- }
- void SelectPointers(CCGraphBuilder& aBuilder);
- // RemoveSkippable removes entries from the purple buffer synchronously
- // (1) if aAsyncSnowWhiteFreeing is false and nsPurpleBufferEntry::mRefCnt is 0 or
- // (2) if the object's nsXPCOMCycleCollectionParticipant::CanSkip() returns true or
- // (3) if nsPurpleBufferEntry::mRefCnt->IsPurple() is false.
- // (4) If removeChildlessNodes is true, then any nodes in the purple buffer
- // that will have no children in the cycle collector graph will also be
- // removed. CanSkip() may be run on these children.
- void RemoveSkippable(nsCycleCollector* aCollector,
- bool aRemoveChildlessNodes,
- bool aAsyncSnowWhiteFreeing,
- CC_ForgetSkippableCallback aCb);
- MOZ_ALWAYS_INLINE void Put(void* aObject, nsCycleCollectionParticipant* aCp,
- nsCycleCollectingAutoRefCnt* aRefCnt) {
- nsPurpleBufferEntry entry(aObject, aRefCnt, aCp);
- Unused << mEntries.Append(Move(entry));
- MOZ_ASSERT(!entry.mRefCnt, "CC: PurpleBufferEntry:Put() Move failed!");
- ++mCount;
- }
- void Remove(nsPurpleBufferEntry* aEntry) {
- MOZ_ASSERT(mCount != 0, "must have entries");
- --mCount;
- aEntry->Clear();
- }
- uint32_t Count() const {
- return mCount;
- }
- size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
- return mEntries.SizeOfExcludingThis(aMallocSizeOf);
- }
- };
- static bool
- AddPurpleRoot(CCGraphBuilder& aBuilder, void* aRoot,
- nsCycleCollectionParticipant* aParti);
- struct SelectPointersVisitor
- {
- explicit SelectPointersVisitor(CCGraphBuilder& aBuilder)
- : mBuilder(aBuilder)
- {
- }
- void
- Visit(nsPurpleBuffer& aBuffer, nsPurpleBufferEntry* aEntry)
- {
- MOZ_ASSERT(aEntry->mObject, "Null object in purple buffer");
- MOZ_ASSERT(aEntry->mRefCnt->get() != 0,
- "SelectPointersVisitor: snow-white object in the purple buffer");
- if (!aEntry->mRefCnt->IsPurple() ||
- AddPurpleRoot(mBuilder, aEntry->mObject, aEntry->mParticipant)) {
- aBuffer.Remove(aEntry);
- }
- }
- private:
- CCGraphBuilder& mBuilder;
- };
- void
- nsPurpleBuffer::SelectPointers(CCGraphBuilder& aBuilder) {
- SelectPointersVisitor visitor(aBuilder);
- VisitEntries(visitor);
- NS_ASSERTION(mCount == 0, "AddPurpleRoot failed");
- if (mCount == 0) {
- FreeBlocks();
- }
- }
- enum ccPhase
- {
- IdlePhase,
- GraphBuildingPhase,
- ScanAndCollectWhitePhase,
- CleanupPhase
- };
- enum ccType
- {
- SliceCC, /* If a CC is in progress, continue it. Otherwise, start a new one. */
- ManualCC, /* Explicitly triggered. */
- ShutdownCC /* Shutdown CC, used for finding leaks. */
- };
- ////////////////////////////////////////////////////////////////////////
- // Top level structure for the cycle collector.
- ////////////////////////////////////////////////////////////////////////
- using js::SliceBudget;
- class JSPurpleBuffer;
- class nsCycleCollector : public nsIMemoryReporter
- {
- public:
- NS_DECL_ISUPPORTS
- NS_DECL_NSIMEMORYREPORTER
- private:
- bool mActivelyCollecting;
- bool mFreeingSnowWhite;
- // mScanInProgress should be false when we're collecting white objects.
- bool mScanInProgress;
- CycleCollectorResults mResults;
- TimeStamp mCollectionStart;
- CycleCollectedJSContext* mJSContext;
- ccPhase mIncrementalPhase;
- CCGraph mGraph;
- nsAutoPtr<CCGraphBuilder> mBuilder;
- RefPtr<nsCycleCollectorLogger> mLogger;
- #ifdef DEBUG
- void* mThread;
- #endif
- nsCycleCollectorParams mParams;
- uint32_t mWhiteNodeCount;
- CC_BeforeUnlinkCallback mBeforeUnlinkCB;
- CC_ForgetSkippableCallback mForgetSkippableCB;
- nsPurpleBuffer mPurpleBuf;
- uint32_t mUnmergedNeeded;
- uint32_t mMergedInARow;
- RefPtr<JSPurpleBuffer> mJSPurpleBuffer;
- private:
- virtual ~nsCycleCollector();
- public:
- nsCycleCollector();
- void RegisterJSContext(CycleCollectedJSContext* aJSContext);
- void ForgetJSContext();
- void SetBeforeUnlinkCallback(CC_BeforeUnlinkCallback aBeforeUnlinkCB)
- {
- CheckThreadSafety();
- mBeforeUnlinkCB = aBeforeUnlinkCB;
- }
- void SetForgetSkippableCallback(CC_ForgetSkippableCallback aForgetSkippableCB)
- {
- CheckThreadSafety();
- mForgetSkippableCB = aForgetSkippableCB;
- }
- void Suspect(void* aPtr, nsCycleCollectionParticipant* aCp,
- nsCycleCollectingAutoRefCnt* aRefCnt);
- uint32_t SuspectedCount();
- void ForgetSkippable(bool aRemoveChildlessNodes, bool aAsyncSnowWhiteFreeing);
- bool FreeSnowWhite(bool aUntilNoSWInPurpleBuffer);
- // This method assumes its argument is already canonicalized.
- void RemoveObjectFromGraph(void* aPtr);
- void PrepareForGarbageCollection();
- void FinishAnyCurrentCollection();
- bool Collect(ccType aCCType,
- SliceBudget& aBudget,
- nsICycleCollectorListener* aManualListener,
- bool aPreferShorterSlices = false);
- void Shutdown(bool aDoCollect);
- bool IsIdle() const { return mIncrementalPhase == IdlePhase; }
- void SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf,
- size_t* aObjectSize,
- size_t* aGraphSize,
- size_t* aPurpleBufferSize) const;
- JSPurpleBuffer* GetJSPurpleBuffer();
- CycleCollectedJSContext* Context() { return mJSContext; }
- private:
- void CheckThreadSafety();
- void ShutdownCollect();
- void FixGrayBits(bool aForceGC, TimeLog& aTimeLog);
- bool IsIncrementalGCInProgress();
- void FinishAnyIncrementalGCInProgress();
- bool ShouldMergeZones(ccType aCCType);
- void BeginCollection(ccType aCCType, nsICycleCollectorListener* aManualListener);
- void MarkRoots(SliceBudget& aBudget);
- void ScanRoots(bool aFullySynchGraphBuild);
- void ScanIncrementalRoots();
- void ScanWhiteNodes(bool aFullySynchGraphBuild);
- void ScanBlackNodes();
- void ScanWeakMaps();
- // returns whether anything was collected
- bool CollectWhite();
- void CleanupAfterCollection();
- };
- NS_IMPL_ISUPPORTS(nsCycleCollector, nsIMemoryReporter)
- /**
- * GraphWalker is templatized over a Visitor class that must provide
- * the following two methods:
- *
- * bool ShouldVisitNode(PtrInfo const *pi);
- * void VisitNode(PtrInfo *pi);
- */
- template<class Visitor>
- class GraphWalker
- {
- private:
- Visitor mVisitor;
- void DoWalk(nsDeque& aQueue);
- void CheckedPush(nsDeque& aQueue, PtrInfo* aPi)
- {
- if (!aPi) {
- MOZ_CRASH();
- }
- if (!aQueue.Push(aPi, fallible)) {
- mVisitor.Failed();
- }
- }
- public:
- void Walk(PtrInfo* aPi);
- void WalkFromRoots(CCGraph& aGraph);
- // copy-constructing the visitor should be cheap, and less
- // indirection than using a reference
- explicit GraphWalker(const Visitor aVisitor) : mVisitor(aVisitor)
- {
- }
- };
- ////////////////////////////////////////////////////////////////////////
- // The static collector struct
- ////////////////////////////////////////////////////////////////////////
- struct CollectorData
- {
- RefPtr<nsCycleCollector> mCollector;
- CycleCollectedJSContext* mContext;
- };
- static MOZ_THREAD_LOCAL(CollectorData*) sCollectorData;
- ////////////////////////////////////////////////////////////////////////
- // Utility functions
- ////////////////////////////////////////////////////////////////////////
- static inline void
- ToParticipant(nsISupports* aPtr, nsXPCOMCycleCollectionParticipant** aCp)
- {
- // We use QI to move from an nsISupports to an
- // nsXPCOMCycleCollectionParticipant, which is a per-class singleton helper
- // object that implements traversal and unlinking logic for the nsISupports
- // in question.
- *aCp = nullptr;
- CallQueryInterface(aPtr, aCp);
- }
- template<class Visitor>
- MOZ_NEVER_INLINE void
- GraphWalker<Visitor>::Walk(PtrInfo* aPi)
- {
- nsDeque queue;
- CheckedPush(queue, aPi);
- DoWalk(queue);
- }
- template<class Visitor>
- MOZ_NEVER_INLINE void
- GraphWalker<Visitor>::WalkFromRoots(CCGraph& aGraph)
- {
- nsDeque queue;
- NodePool::Enumerator etor(aGraph.mNodes);
- for (uint32_t i = 0; i < aGraph.mRootCount; ++i) {
- CheckedPush(queue, etor.GetNext());
- }
- DoWalk(queue);
- }
- template<class Visitor>
- MOZ_NEVER_INLINE void
- GraphWalker<Visitor>::DoWalk(nsDeque& aQueue)
- {
- // Use a aQueue to match the breadth-first traversal used when we
- // built the graph, for hopefully-better locality.
- while (aQueue.GetSize() > 0) {
- PtrInfo* pi = static_cast<PtrInfo*>(aQueue.PopFront());
- if (pi->WasTraversed() && mVisitor.ShouldVisitNode(pi)) {
- mVisitor.VisitNode(pi);
- for (EdgePool::Iterator child = pi->FirstChild(),
- child_end = pi->LastChild();
- child != child_end; ++child) {
- CheckedPush(aQueue, *child);
- }
- }
- }
- }
- struct CCGraphDescriber : public LinkedListElement<CCGraphDescriber>
- {
- CCGraphDescriber()
- : mAddress("0x"), mCnt(0), mType(eUnknown)
- {
- }
- enum Type
- {
- eRefCountedObject,
- eGCedObject,
- eGCMarkedObject,
- eEdge,
- eRoot,
- eGarbage,
- eUnknown
- };
- nsCString mAddress;
- nsCString mName;
- nsCString mCompartmentOrToAddress;
- uint32_t mCnt;
- Type mType;
- };
- class LogStringMessageAsync : public CancelableRunnable
- {
- public:
- explicit LogStringMessageAsync(const nsAString& aMsg) : mMsg(aMsg)
- {}
- NS_IMETHOD Run() override
- {
- nsCOMPtr<nsIConsoleService> cs =
- do_GetService(NS_CONSOLESERVICE_CONTRACTID);
- if (cs) {
- cs->LogStringMessage(mMsg.get());
- }
- return NS_OK;
- }
- private:
- nsString mMsg;
- };
- class nsCycleCollectorLogSinkToFile final : public nsICycleCollectorLogSink
- {
- public:
- NS_DECL_ISUPPORTS
- nsCycleCollectorLogSinkToFile() :
- mProcessIdentifier(base::GetCurrentProcId()),
- mGCLog("gc-edges"), mCCLog("cc-edges")
- {
- }
- NS_IMETHOD GetFilenameIdentifier(nsAString& aIdentifier) override
- {
- aIdentifier = mFilenameIdentifier;
- return NS_OK;
- }
- NS_IMETHOD SetFilenameIdentifier(const nsAString& aIdentifier) override
- {
- mFilenameIdentifier = aIdentifier;
- return NS_OK;
- }
- NS_IMETHOD GetProcessIdentifier(int32_t* aIdentifier) override
- {
- *aIdentifier = mProcessIdentifier;
- return NS_OK;
- }
- NS_IMETHOD SetProcessIdentifier(int32_t aIdentifier) override
- {
- mProcessIdentifier = aIdentifier;
- return NS_OK;
- }
- NS_IMETHOD GetGcLog(nsIFile** aPath) override
- {
- NS_IF_ADDREF(*aPath = mGCLog.mFile);
- return NS_OK;
- }
- NS_IMETHOD GetCcLog(nsIFile** aPath) override
- {
- NS_IF_ADDREF(*aPath = mCCLog.mFile);
- return NS_OK;
- }
- NS_IMETHOD Open(FILE** aGCLog, FILE** aCCLog) override
- {
- nsresult rv;
- if (mGCLog.mStream || mCCLog.mStream) {
- return NS_ERROR_UNEXPECTED;
- }
- rv = OpenLog(&mGCLog);
- NS_ENSURE_SUCCESS(rv, rv);
- *aGCLog = mGCLog.mStream;
- rv = OpenLog(&mCCLog);
- NS_ENSURE_SUCCESS(rv, rv);
- *aCCLog = mCCLog.mStream;
- return NS_OK;
- }
- NS_IMETHOD CloseGCLog() override
- {
- if (!mGCLog.mStream) {
- return NS_ERROR_UNEXPECTED;
- }
- CloseLog(&mGCLog, NS_LITERAL_STRING("Garbage"));
- return NS_OK;
- }
- NS_IMETHOD CloseCCLog() override
- {
- if (!mCCLog.mStream) {
- return NS_ERROR_UNEXPECTED;
- }
- CloseLog(&mCCLog, NS_LITERAL_STRING("Cycle"));
- return NS_OK;
- }
- private:
- ~nsCycleCollectorLogSinkToFile()
- {
- if (mGCLog.mStream) {
- MozillaUnRegisterDebugFILE(mGCLog.mStream);
- fclose(mGCLog.mStream);
- }
- if (mCCLog.mStream) {
- MozillaUnRegisterDebugFILE(mCCLog.mStream);
- fclose(mCCLog.mStream);
- }
- }
- struct FileInfo
- {
- const char* const mPrefix;
- nsCOMPtr<nsIFile> mFile;
- FILE* mStream;
- explicit FileInfo(const char* aPrefix) : mPrefix(aPrefix), mStream(nullptr) { }
- };
- /**
- * Create a new file named something like aPrefix.$PID.$IDENTIFIER.log in
- * $MOZ_CC_LOG_DIRECTORY or in the system's temp directory. No existing
- * file will be overwritten; if aPrefix.$PID.$IDENTIFIER.log exists, we'll
- * try a file named something like aPrefix.$PID.$IDENTIFIER-1.log, and so
- * on.
- */
- already_AddRefed<nsIFile> CreateTempFile(const char* aPrefix)
- {
- nsPrintfCString filename("%s.%d%s%s.log",
- aPrefix,
- mProcessIdentifier,
- mFilenameIdentifier.IsEmpty() ? "" : ".",
- NS_ConvertUTF16toUTF8(mFilenameIdentifier).get());
- // Get the log directory either from $MOZ_CC_LOG_DIRECTORY or from
- // the fallback directories in OpenTempFile. We don't use an nsCOMPtr
- // here because OpenTempFile uses an in/out param and getter_AddRefs
- // wouldn't work.
- nsIFile* logFile = nullptr;
- if (char* env = PR_GetEnv("MOZ_CC_LOG_DIRECTORY")) {
- NS_NewNativeLocalFile(nsCString(env), /* followLinks = */ true,
- &logFile);
- }
- // On Android or B2G, this function will open a file named
- // aFilename under a memory-reporting-specific folder
- // (/data/local/tmp/memory-reports). Otherwise, it will open a
- // file named aFilename under "NS_OS_TEMP_DIR".
- nsresult rv = nsDumpUtils::OpenTempFile(filename, &logFile,
- NS_LITERAL_CSTRING("memory-reports"));
- if (NS_FAILED(rv)) {
- NS_IF_RELEASE(logFile);
- return nullptr;
- }
- return dont_AddRef(logFile);
- }
- nsresult OpenLog(FileInfo* aLog)
- {
- // Initially create the log in a file starting with "incomplete-".
- // We'll move the file and strip off the "incomplete-" once the dump
- // completes. (We do this because we don't want scripts which poll
- // the filesystem looking for GC/CC dumps to grab a file before we're
- // finished writing to it.)
- nsAutoCString incomplete;
- incomplete += "incomplete-";
- incomplete += aLog->mPrefix;
- MOZ_ASSERT(!aLog->mFile);
- aLog->mFile = CreateTempFile(incomplete.get());
- if (NS_WARN_IF(!aLog->mFile)) {
- return NS_ERROR_UNEXPECTED;
- }
- MOZ_ASSERT(!aLog->mStream);
- nsresult rv = aLog->mFile->OpenANSIFileDesc("w", &aLog->mStream);
- if (NS_WARN_IF(NS_FAILED(rv))) {
- return NS_ERROR_UNEXPECTED;
- }
- MozillaRegisterDebugFILE(aLog->mStream);
- return NS_OK;
- }
- nsresult CloseLog(FileInfo* aLog, const nsAString& aCollectorKind)
- {
- MOZ_ASSERT(aLog->mStream);
- MOZ_ASSERT(aLog->mFile);
- MozillaUnRegisterDebugFILE(aLog->mStream);
- fclose(aLog->mStream);
- aLog->mStream = nullptr;
- // Strip off "incomplete-".
- nsCOMPtr<nsIFile> logFileFinalDestination =
- CreateTempFile(aLog->mPrefix);
- if (NS_WARN_IF(!logFileFinalDestination)) {
- return NS_ERROR_UNEXPECTED;
- }
- nsAutoString logFileFinalDestinationName;
- logFileFinalDestination->GetLeafName(logFileFinalDestinationName);
- if (NS_WARN_IF(logFileFinalDestinationName.IsEmpty())) {
- return NS_ERROR_UNEXPECTED;
- }
- aLog->mFile->MoveTo(/* directory */ nullptr, logFileFinalDestinationName);
- // Save the file path.
- aLog->mFile = logFileFinalDestination;
- // Log to the error console.
- nsAutoString logPath;
- logFileFinalDestination->GetPath(logPath);
- nsAutoString msg = aCollectorKind +
- NS_LITERAL_STRING(" Collector log dumped to ") + logPath;
- // We don't want any JS to run between ScanRoots and CollectWhite calls,
- // and since ScanRoots calls this method, better to log the message
- // asynchronously.
- RefPtr<LogStringMessageAsync> log = new LogStringMessageAsync(msg);
- NS_DispatchToCurrentThread(log);
- return NS_OK;
- }
- int32_t mProcessIdentifier;
- nsString mFilenameIdentifier;
- FileInfo mGCLog;
- FileInfo mCCLog;
- };
- NS_IMPL_ISUPPORTS(nsCycleCollectorLogSinkToFile, nsICycleCollectorLogSink)
- class nsCycleCollectorLogger final : public nsICycleCollectorListener
- {
- ~nsCycleCollectorLogger()
- {
- ClearDescribers();
- }
- public:
- nsCycleCollectorLogger()
- : mLogSink(nsCycleCollector_createLogSink())
- , mWantAllTraces(false)
- , mDisableLog(false)
- , mWantAfterProcessing(false)
- , mCCLog(nullptr)
- {
- }
- NS_DECL_ISUPPORTS
- void SetAllTraces()
- {
- mWantAllTraces = true;
- }
- bool IsAllTraces()
- {
- return mWantAllTraces;
- }
- NS_IMETHOD AllTraces(nsICycleCollectorListener** aListener) override
- {
- SetAllTraces();
- NS_ADDREF(*aListener = this);
- return NS_OK;
- }
- NS_IMETHOD GetWantAllTraces(bool* aAllTraces) override
- {
- *aAllTraces = mWantAllTraces;
- return NS_OK;
- }
- NS_IMETHOD GetDisableLog(bool* aDisableLog) override
- {
- *aDisableLog = mDisableLog;
- return NS_OK;
- }
- NS_IMETHOD SetDisableLog(bool aDisableLog) override
- {
- mDisableLog = aDisableLog;
- return NS_OK;
- }
- NS_IMETHOD GetWantAfterProcessing(bool* aWantAfterProcessing) override
- {
- *aWantAfterProcessing = mWantAfterProcessing;
- return NS_OK;
- }
- NS_IMETHOD SetWantAfterProcessing(bool aWantAfterProcessing) override
- {
- mWantAfterProcessing = aWantAfterProcessing;
- return NS_OK;
- }
- NS_IMETHOD GetLogSink(nsICycleCollectorLogSink** aLogSink) override
- {
- NS_ADDREF(*aLogSink = mLogSink);
- return NS_OK;
- }
- NS_IMETHOD SetLogSink(nsICycleCollectorLogSink* aLogSink) override
- {
- if (!aLogSink) {
- return NS_ERROR_INVALID_ARG;
- }
- mLogSink = aLogSink;
- return NS_OK;
- }
- nsresult Begin()
- {
- nsresult rv;
- mCurrentAddress.AssignLiteral("0x");
- ClearDescribers();
- if (mDisableLog) {
- return NS_OK;
- }
- FILE* gcLog;
- rv = mLogSink->Open(&gcLog, &mCCLog);
- NS_ENSURE_SUCCESS(rv, rv);
- // Dump the JS heap.
- CollectorData* data = sCollectorData.get();
- if (data && data->mContext) {
- data->mContext->DumpJSHeap(gcLog);
- }
- rv = mLogSink->CloseGCLog();
- NS_ENSURE_SUCCESS(rv, rv);
- fprintf(mCCLog, "# WantAllTraces=%s\n", mWantAllTraces ? "true" : "false");
- return NS_OK;
- }
- void NoteRefCountedObject(uint64_t aAddress, uint32_t aRefCount,
- const char* aObjectDescription)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "%p [rc=%u] %s\n", (void*)aAddress, aRefCount,
- aObjectDescription);
- }
- if (mWantAfterProcessing) {
- CCGraphDescriber* d = new CCGraphDescriber();
- mDescribers.insertBack(d);
- mCurrentAddress.AssignLiteral("0x");
- mCurrentAddress.AppendInt(aAddress, 16);
- d->mType = CCGraphDescriber::eRefCountedObject;
- d->mAddress = mCurrentAddress;
- d->mCnt = aRefCount;
- d->mName.Append(aObjectDescription);
- }
- }
- void NoteGCedObject(uint64_t aAddress, bool aMarked,
- const char* aObjectDescription,
- uint64_t aCompartmentAddress)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "%p [gc%s] %s\n", (void*)aAddress,
- aMarked ? ".marked" : "", aObjectDescription);
- }
- if (mWantAfterProcessing) {
- CCGraphDescriber* d = new CCGraphDescriber();
- mDescribers.insertBack(d);
- mCurrentAddress.AssignLiteral("0x");
- mCurrentAddress.AppendInt(aAddress, 16);
- d->mType = aMarked ? CCGraphDescriber::eGCMarkedObject :
- CCGraphDescriber::eGCedObject;
- d->mAddress = mCurrentAddress;
- d->mName.Append(aObjectDescription);
- if (aCompartmentAddress) {
- d->mCompartmentOrToAddress.AssignLiteral("0x");
- d->mCompartmentOrToAddress.AppendInt(aCompartmentAddress, 16);
- } else {
- d->mCompartmentOrToAddress.SetIsVoid(true);
- }
- }
- }
- void NoteEdge(uint64_t aToAddress, const char* aEdgeName)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "> %p %s\n", (void*)aToAddress, aEdgeName);
- }
- if (mWantAfterProcessing) {
- CCGraphDescriber* d = new CCGraphDescriber();
- mDescribers.insertBack(d);
- d->mType = CCGraphDescriber::eEdge;
- d->mAddress = mCurrentAddress;
- d->mCompartmentOrToAddress.AssignLiteral("0x");
- d->mCompartmentOrToAddress.AppendInt(aToAddress, 16);
- d->mName.Append(aEdgeName);
- }
- }
- void NoteWeakMapEntry(uint64_t aMap, uint64_t aKey,
- uint64_t aKeyDelegate, uint64_t aValue)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n",
- (void*)aMap, (void*)aKey, (void*)aKeyDelegate, (void*)aValue);
- }
- // We don't support after-processing for weak map entries.
- }
- void NoteIncrementalRoot(uint64_t aAddress)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "IncrementalRoot %p\n", (void*)aAddress);
- }
- // We don't support after-processing for incremental roots.
- }
- void BeginResults()
- {
- if (!mDisableLog) {
- fputs("==========\n", mCCLog);
- }
- }
- void DescribeRoot(uint64_t aAddress, uint32_t aKnownEdges)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "%p [known=%u]\n", (void*)aAddress, aKnownEdges);
- }
- if (mWantAfterProcessing) {
- CCGraphDescriber* d = new CCGraphDescriber();
- mDescribers.insertBack(d);
- d->mType = CCGraphDescriber::eRoot;
- d->mAddress.AppendInt(aAddress, 16);
- d->mCnt = aKnownEdges;
- }
- }
- void DescribeGarbage(uint64_t aAddress)
- {
- if (!mDisableLog) {
- fprintf(mCCLog, "%p [garbage]\n", (void*)aAddress);
- }
- if (mWantAfterProcessing) {
- CCGraphDescriber* d = new CCGraphDescriber();
- mDescribers.insertBack(d);
- d->mType = CCGraphDescriber::eGarbage;
- d->mAddress.AppendInt(aAddress, 16);
- }
- }
- void End()
- {
- if (!mDisableLog) {
- mCCLog = nullptr;
- Unused << NS_WARN_IF(NS_FAILED(mLogSink->CloseCCLog()));
- }
- }
- NS_IMETHOD ProcessNext(nsICycleCollectorHandler* aHandler,
- bool* aCanContinue) override
- {
- if (NS_WARN_IF(!aHandler) || NS_WARN_IF(!mWantAfterProcessing)) {
- return NS_ERROR_UNEXPECTED;
- }
- CCGraphDescriber* d = mDescribers.popFirst();
- if (d) {
- switch (d->mType) {
- case CCGraphDescriber::eRefCountedObject:
- aHandler->NoteRefCountedObject(d->mAddress,
- d->mCnt,
- d->mName);
- break;
- case CCGraphDescriber::eGCedObject:
- case CCGraphDescriber::eGCMarkedObject:
- aHandler->NoteGCedObject(d->mAddress,
- d->mType ==
- CCGraphDescriber::eGCMarkedObject,
- d->mName,
- d->mCompartmentOrToAddress);
- break;
- case CCGraphDescriber::eEdge:
- aHandler->NoteEdge(d->mAddress,
- d->mCompartmentOrToAddress,
- d->mName);
- break;
- case CCGraphDescriber::eRoot:
- aHandler->DescribeRoot(d->mAddress,
- d->mCnt);
- break;
- case CCGraphDescriber::eGarbage:
- aHandler->DescribeGarbage(d->mAddress);
- break;
- case CCGraphDescriber::eUnknown:
- NS_NOTREACHED("CCGraphDescriber::eUnknown");
- break;
- }
- delete d;
- }
- if (!(*aCanContinue = !mDescribers.isEmpty())) {
- mCurrentAddress.AssignLiteral("0x");
- }
- return NS_OK;
- }
- NS_IMETHOD AsLogger(nsCycleCollectorLogger** aRetVal) override
- {
- RefPtr<nsCycleCollectorLogger> rval = this;
- rval.forget(aRetVal);
- return NS_OK;
- }
- private:
- void ClearDescribers()
- {
- CCGraphDescriber* d;
- while ((d = mDescribers.popFirst())) {
- delete d;
- }
- }
- nsCOMPtr<nsICycleCollectorLogSink> mLogSink;
- bool mWantAllTraces;
- bool mDisableLog;
- bool mWantAfterProcessing;
- nsCString mCurrentAddress;
- mozilla::LinkedList<CCGraphDescriber> mDescribers;
- FILE* mCCLog;
- };
- NS_IMPL_ISUPPORTS(nsCycleCollectorLogger, nsICycleCollectorListener)
- nsresult
- nsCycleCollectorLoggerConstructor(nsISupports* aOuter,
- const nsIID& aIID,
- void** aInstancePtr)
- {
- if (NS_WARN_IF(aOuter)) {
- return NS_ERROR_NO_AGGREGATION;
- }
- nsISupports* logger = new nsCycleCollectorLogger();
- return logger->QueryInterface(aIID, aInstancePtr);
- }
- static bool
- GCThingIsGrayCCThing(JS::GCCellPtr thing)
- {
- return AddToCCKind(thing.kind()) &&
- JS::GCThingIsMarkedGray(thing);
- }
- static bool
- ValueIsGrayCCThing(const JS::Value& value)
- {
- return AddToCCKind(value.traceKind()) &&
- JS::GCThingIsMarkedGray(value.toGCCellPtr());
- }
- ////////////////////////////////////////////////////////////////////////
- // Bacon & Rajan's |MarkRoots| routine.
- ////////////////////////////////////////////////////////////////////////
- class CCGraphBuilder final : public nsCycleCollectionTraversalCallback,
- public nsCycleCollectionNoteRootCallback
- {
- private:
- CCGraph& mGraph;
- CycleCollectorResults& mResults;
- NodePool::Builder mNodeBuilder;
- EdgePool::Builder mEdgeBuilder;
- MOZ_INIT_OUTSIDE_CTOR PtrInfo* mCurrPi;
- nsCycleCollectionParticipant* mJSParticipant;
- nsCycleCollectionParticipant* mJSZoneParticipant;
- nsCString mNextEdgeName;
- RefPtr<nsCycleCollectorLogger> mLogger;
- bool mMergeZones;
- nsAutoPtr<NodePool::Enumerator> mCurrNode;
- public:
- CCGraphBuilder(CCGraph& aGraph,
- CycleCollectorResults& aResults,
- CycleCollectedJSContext* aJSContext,
- nsCycleCollectorLogger* aLogger,
- bool aMergeZones);
- virtual ~CCGraphBuilder();
- bool WantAllTraces() const
- {
- return nsCycleCollectionNoteRootCallback::WantAllTraces();
- }
- bool AddPurpleRoot(void* aRoot, nsCycleCollectionParticipant* aParti);
- // This is called when all roots have been added to the graph, to prepare for BuildGraph().
- void DoneAddingRoots();
- // Do some work traversing nodes in the graph. Returns true if this graph building is finished.
- bool BuildGraph(SliceBudget& aBudget);
- private:
- PtrInfo* AddNode(void* aPtr, nsCycleCollectionParticipant* aParticipant);
- PtrInfo* AddWeakMapNode(JS::GCCellPtr aThing);
- PtrInfo* AddWeakMapNode(JSObject* aObject);
- void SetFirstChild()
- {
- mCurrPi->SetFirstChild(mEdgeBuilder.Mark());
- }
- void SetLastChild()
- {
- mCurrPi->SetLastChild(mEdgeBuilder.Mark());
- }
- public:
- // nsCycleCollectionNoteRootCallback methods.
- NS_IMETHOD_(void) NoteXPCOMRoot(nsISupports* aRoot);
- NS_IMETHOD_(void) NoteJSRoot(JSObject* aRoot);
- NS_IMETHOD_(void) NoteNativeRoot(void* aRoot,
- nsCycleCollectionParticipant* aParticipant);
- NS_IMETHOD_(void) NoteWeakMapping(JSObject* aMap, JS::GCCellPtr aKey,
- JSObject* aKdelegate, JS::GCCellPtr aVal);
- // nsCycleCollectionTraversalCallback methods.
- NS_IMETHOD_(void) DescribeRefCountedNode(nsrefcnt aRefCount,
- const char* aObjName);
- NS_IMETHOD_(void) DescribeGCedNode(bool aIsMarked, const char* aObjName,
- uint64_t aCompartmentAddress);
- NS_IMETHOD_(void) NoteXPCOMChild(nsISupports* aChild);
- NS_IMETHOD_(void) NoteJSChild(const JS::GCCellPtr& aThing);
- NS_IMETHOD_(void) NoteNativeChild(void* aChild,
- nsCycleCollectionParticipant* aParticipant);
- NS_IMETHOD_(void) NoteNextEdgeName(const char* aName);
- private:
- void NoteJSChild(JS::GCCellPtr aChild);
- NS_IMETHOD_(void) NoteRoot(void* aRoot,
- nsCycleCollectionParticipant* aParticipant)
- {
- MOZ_ASSERT(aRoot);
- MOZ_ASSERT(aParticipant);
- if (!aParticipant->CanSkipInCC(aRoot) || MOZ_UNLIKELY(WantAllTraces())) {
- AddNode(aRoot, aParticipant);
- }
- }
- NS_IMETHOD_(void) NoteChild(void* aChild, nsCycleCollectionParticipant* aCp,
- nsCString& aEdgeName)
- {
- PtrInfo* childPi = AddNode(aChild, aCp);
- if (!childPi) {
- return;
- }
- mEdgeBuilder.Add(childPi);
- if (mLogger) {
- mLogger->NoteEdge((uint64_t)aChild, aEdgeName.get());
- }
- ++childPi->mInternalRefs;
- }
- JS::Zone* MergeZone(JS::GCCellPtr aGcthing)
- {
- if (!mMergeZones) {
- return nullptr;
- }
- JS::Zone* zone = JS::GetTenuredGCThingZone(aGcthing);
- if (js::IsSystemZone(zone)) {
- return nullptr;
- }
- return zone;
- }
- };
- CCGraphBuilder::CCGraphBuilder(CCGraph& aGraph,
- CycleCollectorResults& aResults,
- CycleCollectedJSContext* aJSContext,
- nsCycleCollectorLogger* aLogger,
- bool aMergeZones)
- : mGraph(aGraph)
- , mResults(aResults)
- , mNodeBuilder(aGraph.mNodes)
- , mEdgeBuilder(aGraph.mEdges)
- , mJSParticipant(nullptr)
- , mJSZoneParticipant(nullptr)
- , mLogger(aLogger)
- , mMergeZones(aMergeZones)
- {
- if (aJSContext) {
- mJSParticipant = aJSContext->GCThingParticipant();
- mJSZoneParticipant = aJSContext->ZoneParticipant();
- }
- if (mLogger) {
- mFlags |= nsCycleCollectionTraversalCallback::WANT_DEBUG_INFO;
- if (mLogger->IsAllTraces()) {
- mFlags |= nsCycleCollectionTraversalCallback::WANT_ALL_TRACES;
- mWantAllTraces = true; // for nsCycleCollectionNoteRootCallback
- }
- }
- mMergeZones = mMergeZones && MOZ_LIKELY(!WantAllTraces());
- MOZ_ASSERT(nsCycleCollectionNoteRootCallback::WantAllTraces() ==
- nsCycleCollectionTraversalCallback::WantAllTraces());
- }
- CCGraphBuilder::~CCGraphBuilder()
- {
- }
- PtrInfo*
- CCGraphBuilder::AddNode(void* aPtr, nsCycleCollectionParticipant* aParticipant)
- {
- PtrToNodeEntry* e = mGraph.AddNodeToMap(aPtr);
- if (!e) {
- return nullptr;
- }
- PtrInfo* result;
- if (!e->mNode) {
- // New entry.
- result = mNodeBuilder.Add(aPtr, aParticipant);
- if (!result) {
- return nullptr;
- }
- e->mNode = result;
- NS_ASSERTION(result, "mNodeBuilder.Add returned null");
- } else {
- result = e->mNode;
- MOZ_ASSERT(result->mParticipant == aParticipant,
- "nsCycleCollectionParticipant shouldn't change!");
- }
- return result;
- }
- bool
- CCGraphBuilder::AddPurpleRoot(void* aRoot, nsCycleCollectionParticipant* aParti)
- {
- CanonicalizeParticipant(&aRoot, &aParti);
- if (WantAllTraces() || !aParti->CanSkipInCC(aRoot)) {
- PtrInfo* pinfo = AddNode(aRoot, aParti);
- if (!pinfo) {
- return false;
- }
- }
- return true;
- }
- void
- CCGraphBuilder::DoneAddingRoots()
- {
- // We've finished adding roots, and everything in the graph is a root.
- mGraph.mRootCount = mGraph.MapCount();
- mCurrNode = new NodePool::Enumerator(mGraph.mNodes);
- }
- MOZ_NEVER_INLINE bool
- CCGraphBuilder::BuildGraph(SliceBudget& aBudget)
- {
- const intptr_t kNumNodesBetweenTimeChecks = 1000;
- const intptr_t kStep = SliceBudget::CounterReset / kNumNodesBetweenTimeChecks;
- MOZ_ASSERT(mCurrNode);
- while (!aBudget.isOverBudget() && !mCurrNode->IsDone()) {
- PtrInfo* pi = mCurrNode->GetNext();
- if (!pi) {
- MOZ_CRASH();
- }
- mCurrPi = pi;
- // We need to call SetFirstChild() even on deleted nodes, to set their
- // firstChild() that may be read by a prior non-deleted neighbor.
- SetFirstChild();
- if (pi->mParticipant) {
- nsresult rv = pi->mParticipant->TraverseNativeAndJS(pi->mPointer, *this);
- MOZ_RELEASE_ASSERT(!NS_FAILED(rv), "Cycle collector Traverse method failed");
- }
- if (mCurrNode->AtBlockEnd()) {
- SetLastChild();
- }
- aBudget.step(kStep);
- }
- if (!mCurrNode->IsDone()) {
- return false;
- }
- if (mGraph.mRootCount > 0) {
- SetLastChild();
- }
- mCurrNode = nullptr;
- return true;
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteXPCOMRoot(nsISupports* aRoot)
- {
- aRoot = CanonicalizeXPCOMParticipant(aRoot);
- NS_ASSERTION(aRoot,
- "Don't add objects that don't participate in collection!");
- nsXPCOMCycleCollectionParticipant* cp;
- ToParticipant(aRoot, &cp);
- NoteRoot(aRoot, cp);
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteJSRoot(JSObject* aRoot)
- {
- if (JS::Zone* zone = MergeZone(JS::GCCellPtr(aRoot))) {
- NoteRoot(zone, mJSZoneParticipant);
- } else {
- NoteRoot(aRoot, mJSParticipant);
- }
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteNativeRoot(void* aRoot,
- nsCycleCollectionParticipant* aParticipant)
- {
- NoteRoot(aRoot, aParticipant);
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::DescribeRefCountedNode(nsrefcnt aRefCount, const char* aObjName)
- {
- MOZ_RELEASE_ASSERT(aRefCount != 0, "CCed refcounted object has zero refcount");
- MOZ_RELEASE_ASSERT(aRefCount != UINT32_MAX, "CCed refcounted object has overflowing refcount");
- mResults.mVisitedRefCounted++;
- if (mLogger) {
- mLogger->NoteRefCountedObject((uint64_t)mCurrPi->mPointer, aRefCount,
- aObjName);
- }
- mCurrPi->mRefCount = aRefCount;
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::DescribeGCedNode(bool aIsMarked, const char* aObjName,
- uint64_t aCompartmentAddress)
- {
- uint32_t refCount = aIsMarked ? UINT32_MAX : 0;
- mResults.mVisitedGCed++;
- if (mLogger) {
- mLogger->NoteGCedObject((uint64_t)mCurrPi->mPointer, aIsMarked,
- aObjName, aCompartmentAddress);
- }
- mCurrPi->mRefCount = refCount;
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteXPCOMChild(nsISupports* aChild)
- {
- nsCString edgeName;
- if (WantDebugInfo()) {
- edgeName.Assign(mNextEdgeName);
- mNextEdgeName.Truncate();
- }
- if (!aChild || !(aChild = CanonicalizeXPCOMParticipant(aChild))) {
- return;
- }
- nsXPCOMCycleCollectionParticipant* cp;
- ToParticipant(aChild, &cp);
- if (cp && (!cp->CanSkipThis(aChild) || WantAllTraces())) {
- NoteChild(aChild, cp, edgeName);
- }
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteNativeChild(void* aChild,
- nsCycleCollectionParticipant* aParticipant)
- {
- nsCString edgeName;
- if (WantDebugInfo()) {
- edgeName.Assign(mNextEdgeName);
- mNextEdgeName.Truncate();
- }
- if (!aChild) {
- return;
- }
- MOZ_ASSERT(aParticipant, "Need a nsCycleCollectionParticipant!");
- if (!aParticipant->CanSkipThis(aChild) || WantAllTraces()) {
- NoteChild(aChild, aParticipant, edgeName);
- }
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteJSChild(const JS::GCCellPtr& aChild)
- {
- if (!aChild) {
- return;
- }
- nsCString edgeName;
- if (MOZ_UNLIKELY(WantDebugInfo())) {
- edgeName.Assign(mNextEdgeName);
- mNextEdgeName.Truncate();
- }
- if (GCThingIsGrayCCThing(aChild) || MOZ_UNLIKELY(WantAllTraces())) {
- if (JS::Zone* zone = MergeZone(aChild)) {
- NoteChild(zone, mJSZoneParticipant, edgeName);
- } else {
- NoteChild(aChild.asCell(), mJSParticipant, edgeName);
- }
- }
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteNextEdgeName(const char* aName)
- {
- if (WantDebugInfo()) {
- mNextEdgeName = aName;
- }
- }
- PtrInfo*
- CCGraphBuilder::AddWeakMapNode(JS::GCCellPtr aNode)
- {
- MOZ_ASSERT(aNode, "Weak map node should be non-null.");
- if (!GCThingIsGrayCCThing(aNode) && !WantAllTraces()) {
- return nullptr;
- }
- if (JS::Zone* zone = MergeZone(aNode)) {
- return AddNode(zone, mJSZoneParticipant);
- }
- return AddNode(aNode.asCell(), mJSParticipant);
- }
- PtrInfo*
- CCGraphBuilder::AddWeakMapNode(JSObject* aObject)
- {
- return AddWeakMapNode(JS::GCCellPtr(aObject));
- }
- NS_IMETHODIMP_(void)
- CCGraphBuilder::NoteWeakMapping(JSObject* aMap, JS::GCCellPtr aKey,
- JSObject* aKdelegate, JS::GCCellPtr aVal)
- {
- // Don't try to optimize away the entry here, as we've already attempted to
- // do that in TraceWeakMapping in nsXPConnect.
- WeakMapping* mapping = mGraph.mWeakMaps.AppendElement();
- mapping->mMap = aMap ? AddWeakMapNode(aMap) : nullptr;
- mapping->mKey = aKey ? AddWeakMapNode(aKey) : nullptr;
- mapping->mKeyDelegate = aKdelegate ? AddWeakMapNode(aKdelegate) : mapping->mKey;
- mapping->mVal = aVal ? AddWeakMapNode(aVal) : nullptr;
- if (mLogger) {
- mLogger->NoteWeakMapEntry((uint64_t)aMap, aKey ? aKey.unsafeAsInteger() : 0,
- (uint64_t)aKdelegate,
- aVal ? aVal.unsafeAsInteger() : 0);
- }
- }
- static bool
- AddPurpleRoot(CCGraphBuilder& aBuilder, void* aRoot,
- nsCycleCollectionParticipant* aParti)
- {
- return aBuilder.AddPurpleRoot(aRoot, aParti);
- }
- // MayHaveChild() will be false after a Traverse if the object does
- // not have any children the CC will visit.
- class ChildFinder : public nsCycleCollectionTraversalCallback
- {
- public:
- ChildFinder() : mMayHaveChild(false)
- {
- }
- // The logic of the Note*Child functions must mirror that of their
- // respective functions in CCGraphBuilder.
- NS_IMETHOD_(void) NoteXPCOMChild(nsISupports* aChild);
- NS_IMETHOD_(void) NoteNativeChild(void* aChild,
- nsCycleCollectionParticipant* aHelper);
- NS_IMETHOD_(void) NoteJSChild(const JS::GCCellPtr& aThing);
- NS_IMETHOD_(void) DescribeRefCountedNode(nsrefcnt aRefcount,
- const char* aObjname)
- {
- }
- NS_IMETHOD_(void) DescribeGCedNode(bool aIsMarked,
- const char* aObjname,
- uint64_t aCompartmentAddress)
- {
- }
- NS_IMETHOD_(void) NoteNextEdgeName(const char* aName)
- {
- }
- bool MayHaveChild()
- {
- return mMayHaveChild;
- }
- private:
- bool mMayHaveChild;
- };
- NS_IMETHODIMP_(void)
- ChildFinder::NoteXPCOMChild(nsISupports* aChild)
- {
- if (!aChild || !(aChild = CanonicalizeXPCOMParticipant(aChild))) {
- return;
- }
- nsXPCOMCycleCollectionParticipant* cp;
- ToParticipant(aChild, &cp);
- if (cp && !cp->CanSkip(aChild, true)) {
- mMayHaveChild = true;
- }
- }
- NS_IMETHODIMP_(void)
- ChildFinder::NoteNativeChild(void* aChild,
- nsCycleCollectionParticipant* aHelper)
- {
- if (!aChild) {
- return;
- }
- MOZ_ASSERT(aHelper, "Native child must have a participant");
- if (!aHelper->CanSkip(aChild, true)) {
- mMayHaveChild = true;
- }
- }
- NS_IMETHODIMP_(void)
- ChildFinder::NoteJSChild(const JS::GCCellPtr& aChild)
- {
- if (aChild && JS::GCThingIsMarkedGray(aChild)) {
- mMayHaveChild = true;
- }
- }
- static bool
- MayHaveChild(void* aObj, nsCycleCollectionParticipant* aCp)
- {
- ChildFinder cf;
- aCp->TraverseNativeAndJS(aObj, cf);
- return cf.MayHaveChild();
- }
- // JSPurpleBuffer keeps references to GCThings which might affect the
- // next cycle collection. It is owned only by itself and during unlink its
- // self reference is broken down and the object ends up killing itself.
- // If GC happens before CC, references to GCthings and the self reference are
- // removed.
- class JSPurpleBuffer
- {
- ~JSPurpleBuffer()
- {
- MOZ_ASSERT(mValues.IsEmpty());
- MOZ_ASSERT(mObjects.IsEmpty());
- }
- public:
- explicit JSPurpleBuffer(RefPtr<JSPurpleBuffer>& aReferenceToThis)
- : mReferenceToThis(aReferenceToThis)
- , mValues(kSegmentSize)
- , mObjects(kSegmentSize)
- {
- mReferenceToThis = this;
- mozilla::HoldJSObjects(this);
- }
- void Destroy()
- {
- mReferenceToThis = nullptr;
- mValues.Clear();
- mObjects.Clear();
- mozilla::DropJSObjects(this);
- }
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(JSPurpleBuffer)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(JSPurpleBuffer)
- RefPtr<JSPurpleBuffer>& mReferenceToThis;
- // These are raw pointers instead of Heap<T> because we only need Heap<T> for
- // pointers which may point into the nursery. The purple buffer never contains
- // pointers to the nursery because nursery gcthings can never be gray and only
- // gray things can be inserted into the purple buffer.
- static const size_t kSegmentSize = 512;
- SegmentedVector<JS::Value, kSegmentSize, InfallibleAllocPolicy> mValues;
- SegmentedVector<JSObject*, kSegmentSize, InfallibleAllocPolicy> mObjects;
- };
- NS_IMPL_CYCLE_COLLECTION_CLASS(JSPurpleBuffer)
- NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(JSPurpleBuffer)
- tmp->Destroy();
- NS_IMPL_CYCLE_COLLECTION_UNLINK_END
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(JSPurpleBuffer)
- CycleCollectionNoteChild(cb, tmp, "self");
- NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
- #define NS_TRACE_SEGMENTED_ARRAY(_field, _type) \
- { \
- for (auto iter = tmp->_field.Iter(); !iter.Done(); iter.Next()) { \
- js::gc::CallTraceCallbackOnNonHeap<_type, TraceCallbacks>( \
- &iter.Get(), aCallbacks, #_field, aClosure); \
- } \
- }
- NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(JSPurpleBuffer)
- NS_TRACE_SEGMENTED_ARRAY(mValues, JS::Value)
- NS_TRACE_SEGMENTED_ARRAY(mObjects, JSObject*)
- NS_IMPL_CYCLE_COLLECTION_TRACE_END
- NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(JSPurpleBuffer, AddRef)
- NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(JSPurpleBuffer, Release)
- class SnowWhiteKiller : public TraceCallbacks
- {
- struct SnowWhiteObject
- {
- void* mPointer;
- nsCycleCollectionParticipant* mParticipant;
- nsCycleCollectingAutoRefCnt* mRefCnt;
- };
- // Segments are 4 KiB on 32-bit and 8 KiB on 64-bit.
- static const size_t kSegmentSize = sizeof(void*) * 1024;
- typedef SegmentedVector<SnowWhiteObject, kSegmentSize, InfallibleAllocPolicy>
- ObjectsVector;
- public:
- explicit SnowWhiteKiller(nsCycleCollector* aCollector)
- : mCollector(aCollector)
- , mObjects(kSegmentSize)
- {
- MOZ_ASSERT(mCollector, "Calling SnowWhiteKiller after nsCC went away");
- }
- ~SnowWhiteKiller()
- {
- for (auto iter = mObjects.Iter(); !iter.Done(); iter.Next()) {
- SnowWhiteObject& o = iter.Get();
- if (!o.mRefCnt->get() && !o.mRefCnt->IsInPurpleBuffer()) {
- mCollector->RemoveObjectFromGraph(o.mPointer);
- o.mRefCnt->stabilizeForDeletion();
- {
- JS::AutoEnterCycleCollection autocc(mCollector->Context()->Context());
- o.mParticipant->Trace(o.mPointer, *this, nullptr);
- }
- o.mParticipant->DeleteCycleCollectable(o.mPointer);
- }
- }
- }
- void
- Visit(nsPurpleBuffer& aBuffer, nsPurpleBufferEntry* aEntry)
- {
- MOZ_ASSERT(aEntry->mObject, "Null object in purple buffer");
- if (!aEntry->mRefCnt->get()) {
- void* o = aEntry->mObject;
- nsCycleCollectionParticipant* cp = aEntry->mParticipant;
- CanonicalizeParticipant(&o, &cp);
- SnowWhiteObject swo = { o, cp, aEntry->mRefCnt };
- mObjects.InfallibleAppend(swo);
- aBuffer.Remove(aEntry);
- }
- }
- bool HasSnowWhiteObjects() const
- {
- return !mObjects.IsEmpty();
- }
- virtual void Trace(JS::Heap<JS::Value>* aValue, const char* aName,
- void* aClosure) const override
- {
- const JS::Value& val = aValue->unbarrieredGet();
- if (val.isGCThing() && ValueIsGrayCCThing(val)) {
- MOZ_ASSERT(!js::gc::IsInsideNursery(val.toGCThing()));
- mCollector->GetJSPurpleBuffer()->mValues.InfallibleAppend(val);
- }
- }
- virtual void Trace(JS::Heap<jsid>* aId, const char* aName,
- void* aClosure) const override
- {
- }
- void AppendJSObjectToPurpleBuffer(JSObject* obj) const
- {
- if (obj && JS::ObjectIsMarkedGray(obj)) {
- MOZ_ASSERT(JS::ObjectIsTenured(obj));
- mCollector->GetJSPurpleBuffer()->mObjects.InfallibleAppend(obj);
- }
- }
- virtual void Trace(JS::Heap<JSObject*>* aObject, const char* aName,
- void* aClosure) const override
- {
- AppendJSObjectToPurpleBuffer(aObject->unbarrieredGet());
- }
- virtual void Trace(JSObject** aObject, const char* aName,
- void* aClosure) const override
- {
- AppendJSObjectToPurpleBuffer(*aObject);
- }
- virtual void Trace(JS::TenuredHeap<JSObject*>* aObject, const char* aName,
- void* aClosure) const override
- {
- AppendJSObjectToPurpleBuffer(aObject->unbarrieredGetPtr());
- }
- virtual void Trace(JS::Heap<JSString*>* aString, const char* aName,
- void* aClosure) const override
- {
- }
- virtual void Trace(JS::Heap<JSScript*>* aScript, const char* aName,
- void* aClosure) const override
- {
- }
- virtual void Trace(JS::Heap<JSFunction*>* aFunction, const char* aName,
- void* aClosure) const override
- {
- }
- private:
- RefPtr<nsCycleCollector> mCollector;
- ObjectsVector mObjects;
- };
- class RemoveSkippableVisitor : public SnowWhiteKiller
- {
- public:
- RemoveSkippableVisitor(nsCycleCollector* aCollector,
- bool aRemoveChildlessNodes,
- bool aAsyncSnowWhiteFreeing,
- CC_ForgetSkippableCallback aCb)
- : SnowWhiteKiller(aCollector)
- , mRemoveChildlessNodes(aRemoveChildlessNodes)
- , mAsyncSnowWhiteFreeing(aAsyncSnowWhiteFreeing)
- , mDispatchedDeferredDeletion(false)
- , mCallback(aCb)
- {
- }
- ~RemoveSkippableVisitor()
- {
- // Note, we must call the callback before SnowWhiteKiller calls
- // DeleteCycleCollectable!
- if (mCallback) {
- mCallback();
- }
- if (HasSnowWhiteObjects()) {
- // Effectively a continuation.
- nsCycleCollector_dispatchDeferredDeletion(true);
- }
- }
- void
- Visit(nsPurpleBuffer& aBuffer, nsPurpleBufferEntry* aEntry)
- {
- MOZ_ASSERT(aEntry->mObject, "null mObject in purple buffer");
- if (!aEntry->mRefCnt->get()) {
- if (!mAsyncSnowWhiteFreeing) {
- SnowWhiteKiller::Visit(aBuffer, aEntry);
- } else if (!mDispatchedDeferredDeletion) {
- mDispatchedDeferredDeletion = true;
- nsCycleCollector_dispatchDeferredDeletion(false);
- }
- return;
- }
- void* o = aEntry->mObject;
- nsCycleCollectionParticipant* cp = aEntry->mParticipant;
- CanonicalizeParticipant(&o, &cp);
- if (aEntry->mRefCnt->IsPurple() && !cp->CanSkip(o, false) &&
- (!mRemoveChildlessNodes || MayHaveChild(o, cp))) {
- return;
- }
- aBuffer.Remove(aEntry);
- }
- private:
- bool mRemoveChildlessNodes;
- bool mAsyncSnowWhiteFreeing;
- bool mDispatchedDeferredDeletion;
- CC_ForgetSkippableCallback mCallback;
- };
- void
- nsPurpleBuffer::RemoveSkippable(nsCycleCollector* aCollector,
- bool aRemoveChildlessNodes,
- bool aAsyncSnowWhiteFreeing,
- CC_ForgetSkippableCallback aCb)
- {
- RemoveSkippableVisitor visitor(aCollector, aRemoveChildlessNodes,
- aAsyncSnowWhiteFreeing, aCb);
- VisitEntries(visitor);
- }
- bool
- nsCycleCollector::FreeSnowWhite(bool aUntilNoSWInPurpleBuffer)
- {
- CheckThreadSafety();
- if (mFreeingSnowWhite) {
- return false;
- }
- AutoRestore<bool> ar(mFreeingSnowWhite);
- mFreeingSnowWhite = true;
- bool hadSnowWhiteObjects = false;
- do {
- SnowWhiteKiller visitor(this);
- mPurpleBuf.VisitEntries(visitor);
- hadSnowWhiteObjects = hadSnowWhiteObjects ||
- visitor.HasSnowWhiteObjects();
- if (!visitor.HasSnowWhiteObjects()) {
- break;
- }
- } while (aUntilNoSWInPurpleBuffer);
- return hadSnowWhiteObjects;
- }
- void
- nsCycleCollector::ForgetSkippable(bool aRemoveChildlessNodes,
- bool aAsyncSnowWhiteFreeing)
- {
- CheckThreadSafety();
- // Avoid this when we're aleady dealing with snow-white objects.
- if (mFreeingSnowWhite) {
- return;
- }
- mozilla::Maybe<mozilla::AutoGlobalTimelineMarker> marker;
- if (NS_IsMainThread()) {
- marker.emplace("nsCycleCollector::ForgetSkippable", MarkerStackRequest::NO_STACK);
- }
- // If we remove things from the purple buffer during graph building, we may
- // lose track of an object that was mutated during graph building.
- MOZ_ASSERT(IsIdle());
- if (mJSContext) {
- mJSContext->PrepareForForgetSkippable();
- }
- MOZ_ASSERT(!mScanInProgress,
- "Don't forget skippable or free snow-white while scan is in progress.");
- mPurpleBuf.RemoveSkippable(this, aRemoveChildlessNodes,
- aAsyncSnowWhiteFreeing, mForgetSkippableCB);
- }
- MOZ_NEVER_INLINE void
- nsCycleCollector::MarkRoots(SliceBudget& aBudget)
- {
- JS::AutoAssertNoGC nogc;
- TimeLog timeLog;
- AutoRestore<bool> ar(mScanInProgress);
- MOZ_RELEASE_ASSERT(!mScanInProgress);
- mScanInProgress = true;
- MOZ_ASSERT(mIncrementalPhase == GraphBuildingPhase);
- JS::AutoEnterCycleCollection autocc(Context()->Context());
- bool doneBuilding = mBuilder->BuildGraph(aBudget);
- if (!doneBuilding) {
- timeLog.Checkpoint("MarkRoots()");
- return;
- }
- mBuilder = nullptr;
- mIncrementalPhase = ScanAndCollectWhitePhase;
- timeLog.Checkpoint("MarkRoots()");
- }
- ////////////////////////////////////////////////////////////////////////
- // Bacon & Rajan's |ScanRoots| routine.
- ////////////////////////////////////////////////////////////////////////
- struct ScanBlackVisitor
- {
- ScanBlackVisitor(uint32_t& aWhiteNodeCount, bool& aFailed)
- : mWhiteNodeCount(aWhiteNodeCount), mFailed(aFailed)
- {
- }
- bool ShouldVisitNode(PtrInfo const* aPi)
- {
- return aPi->mColor != black;
- }
- MOZ_NEVER_INLINE void VisitNode(PtrInfo* aPi)
- {
- if (aPi->mColor == white) {
- --mWhiteNodeCount;
- }
- aPi->mColor = black;
- }
- void Failed()
- {
- mFailed = true;
- }
- private:
- uint32_t& mWhiteNodeCount;
- bool& mFailed;
- };
- static void
- FloodBlackNode(uint32_t& aWhiteNodeCount, bool& aFailed, PtrInfo* aPi)
- {
- GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(aWhiteNodeCount,
- aFailed)).Walk(aPi);
- MOZ_ASSERT(aPi->mColor == black || !aPi->WasTraversed(),
- "FloodBlackNode should make aPi black");
- }
- // Iterate over the WeakMaps. If we mark anything while iterating
- // over the WeakMaps, we must iterate over all of the WeakMaps again.
- void
- nsCycleCollector::ScanWeakMaps()
- {
- bool anyChanged;
- bool failed = false;
- do {
- anyChanged = false;
- for (uint32_t i = 0; i < mGraph.mWeakMaps.Length(); i++) {
- WeakMapping* wm = &mGraph.mWeakMaps[i];
- // If any of these are null, the original object was marked black.
- uint32_t mColor = wm->mMap ? wm->mMap->mColor : black;
- uint32_t kColor = wm->mKey ? wm->mKey->mColor : black;
- uint32_t kdColor = wm->mKeyDelegate ? wm->mKeyDelegate->mColor : black;
- uint32_t vColor = wm->mVal ? wm->mVal->mColor : black;
- MOZ_ASSERT(mColor != grey, "Uncolored weak map");
- MOZ_ASSERT(kColor != grey, "Uncolored weak map key");
- MOZ_ASSERT(kdColor != grey, "Uncolored weak map key delegate");
- MOZ_ASSERT(vColor != grey, "Uncolored weak map value");
- if (mColor == black && kColor != black && kdColor == black) {
- FloodBlackNode(mWhiteNodeCount, failed, wm->mKey);
- anyChanged = true;
- }
- if (mColor == black && kColor == black && vColor != black) {
- FloodBlackNode(mWhiteNodeCount, failed, wm->mVal);
- anyChanged = true;
- }
- }
- } while (anyChanged);
- if (failed) {
- MOZ_ASSERT(false, "Ran out of memory in ScanWeakMaps");
- }
- }
- // Flood black from any objects in the purple buffer that are in the CC graph.
- class PurpleScanBlackVisitor
- {
- public:
- PurpleScanBlackVisitor(CCGraph& aGraph, nsCycleCollectorLogger* aLogger,
- uint32_t& aCount, bool& aFailed)
- : mGraph(aGraph), mLogger(aLogger), mCount(aCount), mFailed(aFailed)
- {
- }
- void
- Visit(nsPurpleBuffer& aBuffer, nsPurpleBufferEntry* aEntry)
- {
- MOZ_ASSERT(aEntry->mObject,
- "Entries with null mObject shouldn't be in the purple buffer.");
- MOZ_ASSERT(aEntry->mRefCnt->get() != 0,
- "Snow-white objects shouldn't be in the purple buffer.");
- void* obj = aEntry->mObject;
- if (!aEntry->mParticipant) {
- obj = CanonicalizeXPCOMParticipant(static_cast<nsISupports*>(obj));
- MOZ_ASSERT(obj, "Don't add objects that don't participate in collection!");
- }
- PtrInfo* pi = mGraph.FindNode(obj);
- if (!pi) {
- return;
- }
- MOZ_ASSERT(pi->mParticipant, "No dead objects should be in the purple buffer.");
- if (MOZ_UNLIKELY(mLogger)) {
- mLogger->NoteIncrementalRoot((uint64_t)pi->mPointer);
- }
- if (pi->mColor == black) {
- return;
- }
- FloodBlackNode(mCount, mFailed, pi);
- }
- private:
- CCGraph& mGraph;
- RefPtr<nsCycleCollectorLogger> mLogger;
- uint32_t& mCount;
- bool& mFailed;
- };
- // Objects that have been stored somewhere since the start of incremental graph building must
- // be treated as live for this cycle collection, because we may not have accurate information
- // about who holds references to them.
- void
- nsCycleCollector::ScanIncrementalRoots()
- {
- TimeLog timeLog;
- // Reference counted objects:
- // We cleared the purple buffer at the start of the current ICC, so if a
- // refcounted object is purple, it may have been AddRef'd during the current
- // ICC. (It may also have only been released.) If that is the case, we cannot
- // be sure that the set of things pointing to the object in the CC graph
- // is accurate. Therefore, for safety, we treat any purple objects as being
- // live during the current CC. We don't remove anything from the purple
- // buffer here, so these objects will be suspected and freed in the next CC
- // if they are garbage.
- bool failed = false;
- PurpleScanBlackVisitor purpleScanBlackVisitor(mGraph, mLogger,
- mWhiteNodeCount, failed);
- mPurpleBuf.VisitEntries(purpleScanBlackVisitor);
- timeLog.Checkpoint("ScanIncrementalRoots::fix purple");
- bool hasJSContext = !!mJSContext;
- nsCycleCollectionParticipant* jsParticipant =
- hasJSContext ? mJSContext->GCThingParticipant() : nullptr;
- nsCycleCollectionParticipant* zoneParticipant =
- hasJSContext ? mJSContext->ZoneParticipant() : nullptr;
- bool hasLogger = !!mLogger;
- NodePool::Enumerator etor(mGraph.mNodes);
- while (!etor.IsDone()) {
- PtrInfo* pi = etor.GetNext();
- // As an optimization, if an object has already been determined to be live,
- // don't consider it further. We can't do this if there is a listener,
- // because the listener wants to know the complete set of incremental roots.
- if (pi->mColor == black && MOZ_LIKELY(!hasLogger)) {
- continue;
- }
- // Garbage collected objects:
- // If a GCed object was added to the graph with a refcount of zero, and is
- // now marked black by the GC, it was probably gray before and was exposed
- // to active JS, so it may have been stored somewhere, so it needs to be
- // treated as live.
- if (pi->IsGrayJS() && MOZ_LIKELY(hasJSContext)) {
- // If the object is still marked gray by the GC, nothing could have gotten
- // hold of it, so it isn't an incremental root.
- if (pi->mParticipant == jsParticipant) {
- JS::GCCellPtr ptr(pi->mPointer, JS::GCThingTraceKind(pi->mPointer));
- if (GCThingIsGrayCCThing(ptr)) {
- continue;
- }
- } else if (pi->mParticipant == zoneParticipant) {
- JS::Zone* zone = static_cast<JS::Zone*>(pi->mPointer);
- if (js::ZoneGlobalsAreAllGray(zone)) {
- continue;
- }
- } else {
- MOZ_ASSERT(false, "Non-JS thing with 0 refcount? Treating as live.");
- }
- } else if (!pi->mParticipant && pi->WasTraversed()) {
- // Dead traversed refcounted objects:
- // If the object was traversed, it must have been alive at the start of
- // the CC, and thus had a positive refcount. It is dead now, so its
- // refcount must have decreased at some point during the CC. Therefore,
- // it would be in the purple buffer if it wasn't dead, so treat it as an
- // incremental root.
- //
- // This should not cause leaks because as the object died it should have
- // released anything it held onto, which will add them to the purple
- // buffer, which will cause them to be considered in the next CC.
- } else {
- continue;
- }
- // At this point, pi must be an incremental root.
- // If there's a listener, tell it about this root. We don't bother with the
- // optimization of skipping the Walk() if pi is black: it will just return
- // without doing anything and there's no need to make this case faster.
- if (MOZ_UNLIKELY(hasLogger) && pi->mPointer) {
- // Dead objects aren't logged. See bug 1031370.
- mLogger->NoteIncrementalRoot((uint64_t)pi->mPointer);
- }
- FloodBlackNode(mWhiteNodeCount, failed, pi);
- }
- timeLog.Checkpoint("ScanIncrementalRoots::fix nodes");
- if (failed) {
- NS_ASSERTION(false, "Ran out of memory in ScanIncrementalRoots");
- }
- }
- // Mark nodes white and make sure their refcounts are ok.
- // No nodes are marked black during this pass to ensure that refcount
- // checking is run on all nodes not marked black by ScanIncrementalRoots.
- void
- nsCycleCollector::ScanWhiteNodes(bool aFullySynchGraphBuild)
- {
- NodePool::Enumerator nodeEnum(mGraph.mNodes);
- while (!nodeEnum.IsDone()) {
- PtrInfo* pi = nodeEnum.GetNext();
- if (pi->mColor == black) {
- // Incremental roots can be in a nonsensical state, so don't
- // check them. This will miss checking nodes that are merely
- // reachable from incremental roots.
- MOZ_ASSERT(!aFullySynchGraphBuild,
- "In a synch CC, no nodes should be marked black early on.");
- continue;
- }
- MOZ_ASSERT(pi->mColor == grey);
- if (!pi->WasTraversed()) {
- // This node was deleted before it was traversed, so there's no reason
- // to look at it.
- MOZ_ASSERT(!pi->mParticipant, "Live nodes should all have been traversed");
- continue;
- }
- if (pi->mInternalRefs == pi->mRefCount || pi->IsGrayJS()) {
- pi->mColor = white;
- ++mWhiteNodeCount;
- continue;
- }
- if (pi->mInternalRefs > pi->mRefCount) {
- MOZ_CRASH();
- }
- // This node will get marked black in the next pass.
- }
- }
- // Any remaining grey nodes that haven't already been deleted must be alive,
- // so mark them and their children black. Any nodes that are black must have
- // already had their children marked black, so there's no need to look at them
- // again. This pass may turn some white nodes to black.
- void
- nsCycleCollector::ScanBlackNodes()
- {
- bool failed = false;
- NodePool::Enumerator nodeEnum(mGraph.mNodes);
- while (!nodeEnum.IsDone()) {
- PtrInfo* pi = nodeEnum.GetNext();
- if (pi->mColor == grey && pi->WasTraversed()) {
- FloodBlackNode(mWhiteNodeCount, failed, pi);
- }
- }
- if (failed) {
- NS_ASSERTION(false, "Ran out of memory in ScanBlackNodes");
- }
- }
- void
- nsCycleCollector::ScanRoots(bool aFullySynchGraphBuild)
- {
- JS::AutoAssertNoGC nogc;
- AutoRestore<bool> ar(mScanInProgress);
- MOZ_RELEASE_ASSERT(!mScanInProgress);
- mScanInProgress = true;
- mWhiteNodeCount = 0;
- MOZ_ASSERT(mIncrementalPhase == ScanAndCollectWhitePhase);
- JS::AutoEnterCycleCollection autocc(Context()->Context());
- if (!aFullySynchGraphBuild) {
- ScanIncrementalRoots();
- }
- TimeLog timeLog;
- ScanWhiteNodes(aFullySynchGraphBuild);
- timeLog.Checkpoint("ScanRoots::ScanWhiteNodes");
- ScanBlackNodes();
- timeLog.Checkpoint("ScanRoots::ScanBlackNodes");
- // Scanning weak maps must be done last.
- ScanWeakMaps();
- timeLog.Checkpoint("ScanRoots::ScanWeakMaps");
- if (mLogger) {
- mLogger->BeginResults();
- NodePool::Enumerator etor(mGraph.mNodes);
- while (!etor.IsDone()) {
- PtrInfo* pi = etor.GetNext();
- if (!pi->WasTraversed()) {
- continue;
- }
- switch (pi->mColor) {
- case black:
- if (!pi->IsGrayJS() && !pi->IsBlackJS() &&
- pi->mInternalRefs != pi->mRefCount) {
- mLogger->DescribeRoot((uint64_t)pi->mPointer,
- pi->mInternalRefs);
- }
- break;
- case white:
- mLogger->DescribeGarbage((uint64_t)pi->mPointer);
- break;
- case grey:
- MOZ_ASSERT(false, "All traversed objects should be black or white");
- break;
- }
- }
- mLogger->End();
- mLogger = nullptr;
- timeLog.Checkpoint("ScanRoots::listener");
- }
- }
- ////////////////////////////////////////////////////////////////////////
- // Bacon & Rajan's |CollectWhite| routine, somewhat modified.
- ////////////////////////////////////////////////////////////////////////
- bool
- nsCycleCollector::CollectWhite()
- {
- // Explanation of "somewhat modified": we have no way to collect the
- // set of whites "all at once", we have to ask each of them to drop
- // their outgoing links and assume this will cause the garbage cycle
- // to *mostly* self-destruct (except for the reference we continue
- // to hold).
- //
- // To do this "safely" we must make sure that the white nodes we're
- // operating on are stable for the duration of our operation. So we
- // make 3 sets of calls to language runtimes:
- //
- // - Root(whites), which should pin the whites in memory.
- // - Unlink(whites), which drops outgoing links on each white.
- // - Unroot(whites), which returns the whites to normal GC.
- // Segments are 4 KiB on 32-bit and 8 KiB on 64-bit.
- static const size_t kSegmentSize = sizeof(void*) * 1024;
- SegmentedVector<PtrInfo*, kSegmentSize, InfallibleAllocPolicy>
- whiteNodes(kSegmentSize);
- TimeLog timeLog;
- MOZ_ASSERT(mIncrementalPhase == ScanAndCollectWhitePhase);
- uint32_t numWhiteNodes = 0;
- uint32_t numWhiteGCed = 0;
- uint32_t numWhiteJSZones = 0;
- {
- JS::AutoAssertNoGC nogc;
- bool hasJSContext = !!mJSContext;
- nsCycleCollectionParticipant* zoneParticipant =
- hasJSContext ? mJSContext->ZoneParticipant() : nullptr;
- NodePool::Enumerator etor(mGraph.mNodes);
- while (!etor.IsDone()) {
- PtrInfo* pinfo = etor.GetNext();
- if (pinfo->mColor == white && pinfo->mParticipant) {
- if (pinfo->IsGrayJS()) {
- MOZ_ASSERT(mJSContext);
- ++numWhiteGCed;
- JS::Zone* zone;
- if (MOZ_UNLIKELY(pinfo->mParticipant == zoneParticipant)) {
- ++numWhiteJSZones;
- zone = static_cast<JS::Zone*>(pinfo->mPointer);
- } else {
- JS::GCCellPtr ptr(pinfo->mPointer, JS::GCThingTraceKind(pinfo->mPointer));
- zone = JS::GetTenuredGCThingZone(ptr);
- }
- mJSContext->AddZoneWaitingForGC(zone);
- } else {
- whiteNodes.InfallibleAppend(pinfo);
- pinfo->mParticipant->Root(pinfo->mPointer);
- ++numWhiteNodes;
- }
- }
- }
- }
- mResults.mFreedRefCounted += numWhiteNodes;
- mResults.mFreedGCed += numWhiteGCed;
- mResults.mFreedJSZones += numWhiteJSZones;
- timeLog.Checkpoint("CollectWhite::Root");
- if (mBeforeUnlinkCB) {
- mBeforeUnlinkCB();
- timeLog.Checkpoint("CollectWhite::BeforeUnlinkCB");
- }
- // Unlink() can trigger a GC, so do not touch any JS or anything
- // else not in whiteNodes after here.
- for (auto iter = whiteNodes.Iter(); !iter.Done(); iter.Next()) {
- PtrInfo* pinfo = iter.Get();
- MOZ_ASSERT(pinfo->mParticipant,
- "Unlink shouldn't see objects removed from graph.");
- pinfo->mParticipant->Unlink(pinfo->mPointer);
- #ifdef DEBUG
- if (mJSContext) {
- mJSContext->AssertNoObjectsToTrace(pinfo->mPointer);
- }
- #endif
- }
- timeLog.Checkpoint("CollectWhite::Unlink");
- JS::AutoAssertNoGC nogc;
- for (auto iter = whiteNodes.Iter(); !iter.Done(); iter.Next()) {
- PtrInfo* pinfo = iter.Get();
- MOZ_ASSERT(pinfo->mParticipant,
- "Unroot shouldn't see objects removed from graph.");
- pinfo->mParticipant->Unroot(pinfo->mPointer);
- }
- timeLog.Checkpoint("CollectWhite::Unroot");
- nsCycleCollector_dispatchDeferredDeletion(false, true);
- timeLog.Checkpoint("CollectWhite::dispatchDeferredDeletion");
- mIncrementalPhase = CleanupPhase;
- return numWhiteNodes > 0 || numWhiteGCed > 0 || numWhiteJSZones > 0;
- }
- ////////////////////////
- // Memory reporting
- ////////////////////////
- MOZ_DEFINE_MALLOC_SIZE_OF(CycleCollectorMallocSizeOf)
- NS_IMETHODIMP
- nsCycleCollector::CollectReports(nsIHandleReportCallback* aHandleReport,
- nsISupports* aData, bool aAnonymize)
- {
- size_t objectSize, graphSize, purpleBufferSize;
- SizeOfIncludingThis(CycleCollectorMallocSizeOf,
- &objectSize, &graphSize,
- &purpleBufferSize);
- if (objectSize > 0) {
- MOZ_COLLECT_REPORT(
- "explicit/cycle-collector/collector-object", KIND_HEAP, UNITS_BYTES,
- objectSize,
- "Memory used for the cycle collector object itself.");
- }
- if (graphSize > 0) {
- MOZ_COLLECT_REPORT(
- "explicit/cycle-collector/graph", KIND_HEAP, UNITS_BYTES,
- graphSize,
- "Memory used for the cycle collector's graph. This should be zero when "
- "the collector is idle.");
- }
- if (purpleBufferSize > 0) {
- MOZ_COLLECT_REPORT(
- "explicit/cycle-collector/purple-buffer", KIND_HEAP, UNITS_BYTES,
- purpleBufferSize,
- "Memory used for the cycle collector's purple buffer.");
- }
- return NS_OK;
- };
- ////////////////////////////////////////////////////////////////////////
- // Collector implementation
- ////////////////////////////////////////////////////////////////////////
- nsCycleCollector::nsCycleCollector() :
- mActivelyCollecting(false),
- mFreeingSnowWhite(false),
- mScanInProgress(false),
- mJSContext(nullptr),
- mIncrementalPhase(IdlePhase),
- #ifdef DEBUG
- mThread(NS_GetCurrentThread()),
- #endif
- mWhiteNodeCount(0),
- mBeforeUnlinkCB(nullptr),
- mForgetSkippableCB(nullptr),
- mUnmergedNeeded(0),
- mMergedInARow(0)
- {
- }
- nsCycleCollector::~nsCycleCollector()
- {
- UnregisterWeakMemoryReporter(this);
- }
- void
- nsCycleCollector::RegisterJSContext(CycleCollectedJSContext* aJSContext)
- {
- MOZ_RELEASE_ASSERT(!mJSContext, "Multiple registrations of JS context in cycle collector");
- mJSContext = aJSContext;
- if (!NS_IsMainThread()) {
- return;
- }
- // We can't register as a reporter in nsCycleCollector() because that runs
- // before the memory reporter manager is initialized. So we do it here
- // instead.
- RegisterWeakMemoryReporter(this);
- }
- void
- nsCycleCollector::ForgetJSContext()
- {
- MOZ_RELEASE_ASSERT(mJSContext, "Forgetting JS context in cycle collector before a JS context was registered");
- mJSContext = nullptr;
- }
- #ifdef DEBUG
- static bool
- HasParticipant(void* aPtr, nsCycleCollectionParticipant* aParti)
- {
- if (aParti) {
- return true;
- }
- nsXPCOMCycleCollectionParticipant* xcp;
- ToParticipant(static_cast<nsISupports*>(aPtr), &xcp);
- return xcp != nullptr;
- }
- #endif
- MOZ_ALWAYS_INLINE void
- nsCycleCollector::Suspect(void* aPtr, nsCycleCollectionParticipant* aParti,
- nsCycleCollectingAutoRefCnt* aRefCnt)
- {
- CheckThreadSafety();
- // Don't call AddRef or Release of a CCed object in a Traverse() method.
- MOZ_ASSERT(!mScanInProgress, "Attempted to call Suspect() while a scan was in progress");
- if (MOZ_UNLIKELY(mScanInProgress)) {
- return;
- }
- MOZ_ASSERT(aPtr, "Don't suspect null pointers");
- MOZ_ASSERT(HasParticipant(aPtr, aParti),
- "Suspected nsISupports pointer must QI to nsXPCOMCycleCollectionParticipant");
- mPurpleBuf.Put(aPtr, aParti, aRefCnt);
- }
- void
- nsCycleCollector::CheckThreadSafety()
- {
- #ifdef DEBUG
- nsIThread* currentThread = NS_GetCurrentThread();
- // XXXkhuey we can be called so late in shutdown that NS_GetCurrentThread
- // returns null (after the thread manager has shut down)
- MOZ_ASSERT(mThread == currentThread || !currentThread);
- #endif
- }
- // The cycle collector uses the mark bitmap to discover what JS objects
- // were reachable only from XPConnect roots that might participate in
- // cycles. We ask the JS context whether we need to force a GC before
- // this CC. It returns true on startup (before the mark bits have been set),
- // and also when UnmarkGray has run out of stack. We also force GCs on shut
- // down to collect cycles involving both DOM and JS.
- void
- nsCycleCollector::FixGrayBits(bool aForceGC, TimeLog& aTimeLog)
- {
- CheckThreadSafety();
- if (!mJSContext) {
- return;
- }
- if (!aForceGC) {
- mJSContext->FixWeakMappingGrayBits();
- aTimeLog.Checkpoint("FixWeakMappingGrayBits");
- bool needGC = !mJSContext->AreGCGrayBitsValid();
- if (!needGC) {
- return;
- }
- mResults.mForcedGC = true;
- }
- mJSContext->GarbageCollect(aForceGC ? JS::gcreason::SHUTDOWN_CC :
- JS::gcreason::CC_FORCED);
- aTimeLog.Checkpoint("FixGrayBits GC");
- }
- bool
- nsCycleCollector::IsIncrementalGCInProgress()
- {
- return mJSContext && JS::IsIncrementalGCInProgress(mJSContext->Context());
- }
- void
- nsCycleCollector::FinishAnyIncrementalGCInProgress()
- {
- if (IsIncrementalGCInProgress()) {
- NS_WARNING("Finishing incremental GC in progress during CC");
- JS::PrepareForIncrementalGC(mJSContext->Context());
- JS::FinishIncrementalGC(mJSContext->Context(), JS::gcreason::CC_FORCED);
- }
- }
- void
- nsCycleCollector::CleanupAfterCollection()
- {
- TimeLog timeLog;
- MOZ_ASSERT(mIncrementalPhase == CleanupPhase);
- mGraph.Clear();
- timeLog.Checkpoint("CleanupAfterCollection::mGraph.Clear()");
- #ifdef COLLECT_TIME_DEBUG
- uint32_t interval =
- (uint32_t)((TimeStamp::Now() - mCollectionStart).ToMilliseconds());
- printf("cc: total cycle collector time was %ums in %u slices\n", interval,
- mResults.mNumSlices);
- printf("cc: visited %u ref counted and %u GCed objects, freed %d ref counted and %d GCed objects",
- mResults.mVisitedRefCounted, mResults.mVisitedGCed,
- mResults.mFreedRefCounted, mResults.mFreedGCed);
- uint32_t numVisited = mResults.mVisitedRefCounted + mResults.mVisitedGCed;
- if (numVisited > 1000) {
- uint32_t numFreed = mResults.mFreedRefCounted + mResults.mFreedGCed;
- printf(" (%d%%)", 100 * numFreed / numVisited);
- }
- printf(".\ncc: \n");
- #endif
- if (mJSContext) {
- mJSContext->FinalizeDeferredThings(mResults.mAnyManual
- ? CycleCollectedJSContext::FinalizeNow
- : CycleCollectedJSContext::FinalizeIncrementally);
- mJSContext->EndCycleCollectionCallback(mResults);
- timeLog.Checkpoint("CleanupAfterCollection::EndCycleCollectionCallback()");
- }
- mIncrementalPhase = IdlePhase;
- }
- void
- nsCycleCollector::ShutdownCollect()
- {
- FinishAnyIncrementalGCInProgress();
- SliceBudget unlimitedBudget = SliceBudget::unlimited();
- uint32_t i;
- for (i = 0; i < DEFAULT_SHUTDOWN_COLLECTIONS; ++i) {
- if (!Collect(ShutdownCC, unlimitedBudget, nullptr)) {
- break;
- }
- }
- NS_WARNING_ASSERTION(i < NORMAL_SHUTDOWN_COLLECTIONS, "Extra shutdown CC");
- }
- static void
- PrintPhase(const char* aPhase)
- {
- #ifdef DEBUG_PHASES
- printf("cc: begin %s on %s\n", aPhase,
- NS_IsMainThread() ? "mainthread" : "worker");
- #endif
- }
- bool
- nsCycleCollector::Collect(ccType aCCType,
- SliceBudget& aBudget,
- nsICycleCollectorListener* aManualListener,
- bool aPreferShorterSlices)
- {
- CheckThreadSafety();
- // This can legitimately happen in a few cases. See bug 383651.
- if (mActivelyCollecting || mFreeingSnowWhite) {
- return false;
- }
- mActivelyCollecting = true;
- MOZ_ASSERT(!IsIncrementalGCInProgress());
- mozilla::Maybe<mozilla::AutoGlobalTimelineMarker> marker;
- if (NS_IsMainThread()) {
- marker.emplace("nsCycleCollector::Collect", MarkerStackRequest::NO_STACK);
- }
- bool startedIdle = IsIdle();
- bool collectedAny = false;
- // If the CC started idle, it will call BeginCollection, which
- // will do FreeSnowWhite, so it doesn't need to be done here.
- if (!startedIdle) {
- TimeLog timeLog;
- FreeSnowWhite(true);
- timeLog.Checkpoint("Collect::FreeSnowWhite");
- }
- if (aCCType != SliceCC) {
- mResults.mAnyManual = true;
- }
- ++mResults.mNumSlices;
- bool continueSlice = aBudget.isUnlimited() || !aPreferShorterSlices;
- do {
- switch (mIncrementalPhase) {
- case IdlePhase:
- PrintPhase("BeginCollection");
- BeginCollection(aCCType, aManualListener);
- break;
- case GraphBuildingPhase:
- PrintPhase("MarkRoots");
- MarkRoots(aBudget);
- // Only continue this slice if we're running synchronously or the
- // next phase will probably be short, to reduce the max pause for this
- // collection.
- // (There's no need to check if we've finished graph building, because
- // if we haven't, we've already exceeded our budget, and will finish
- // this slice anyways.)
- continueSlice = aBudget.isUnlimited() ||
- (mResults.mNumSlices < 3 && !aPreferShorterSlices);
- break;
- case ScanAndCollectWhitePhase:
- // We do ScanRoots and CollectWhite in a single slice to ensure
- // that we won't unlink a live object if a weak reference is
- // promoted to a strong reference after ScanRoots has finished.
- // See bug 926533.
- PrintPhase("ScanRoots");
- ScanRoots(startedIdle);
- PrintPhase("CollectWhite");
- collectedAny = CollectWhite();
- break;
- case CleanupPhase:
- PrintPhase("CleanupAfterCollection");
- CleanupAfterCollection();
- continueSlice = false;
- break;
- }
- if (continueSlice) {
- // Force SliceBudget::isOverBudget to check the time.
- aBudget.step(SliceBudget::CounterReset);
- continueSlice = !aBudget.isOverBudget();
- }
- } while (continueSlice);
- // Clear mActivelyCollecting here to ensure that a recursive call to
- // Collect() does something.
- mActivelyCollecting = false;
- if (aCCType != SliceCC && !startedIdle) {
- // We were in the middle of an incremental CC (using its own listener).
- // Somebody has forced a CC, so after having finished out the current CC,
- // run the CC again using the new listener.
- MOZ_ASSERT(IsIdle());
- if (Collect(aCCType, aBudget, aManualListener)) {
- collectedAny = true;
- }
- }
- MOZ_ASSERT_IF(aCCType != SliceCC, IsIdle());
- return collectedAny;
- }
- // Any JS objects we have in the graph could die when we GC, but we
- // don't want to abandon the current CC, because the graph contains
- // information about purple roots. So we synchronously finish off
- // the current CC.
- void
- nsCycleCollector::PrepareForGarbageCollection()
- {
- if (IsIdle()) {
- MOZ_ASSERT(mGraph.IsEmpty(), "Non-empty graph when idle");
- MOZ_ASSERT(!mBuilder, "Non-null builder when idle");
- if (mJSPurpleBuffer) {
- mJSPurpleBuffer->Destroy();
- }
- return;
- }
- FinishAnyCurrentCollection();
- }
- void
- nsCycleCollector::FinishAnyCurrentCollection()
- {
- if (IsIdle()) {
- return;
- }
- SliceBudget unlimitedBudget = SliceBudget::unlimited();
- PrintPhase("FinishAnyCurrentCollection");
- // Use SliceCC because we only want to finish the CC in progress.
- Collect(SliceCC, unlimitedBudget, nullptr);
- // It is only okay for Collect() to have failed to finish the
- // current CC if we're reentering the CC at some point past
- // graph building. We need to be past the point where the CC will
- // look at JS objects so that it is safe to GC.
- MOZ_ASSERT(IsIdle() ||
- (mActivelyCollecting && mIncrementalPhase != GraphBuildingPhase),
- "Reentered CC during graph building");
- }
- // Don't merge too many times in a row, and do at least a minimum
- // number of unmerged CCs in a row.
- static const uint32_t kMinConsecutiveUnmerged = 3;
- static const uint32_t kMaxConsecutiveMerged = 3;
- bool
- nsCycleCollector::ShouldMergeZones(ccType aCCType)
- {
- if (!mJSContext) {
- return false;
- }
- MOZ_ASSERT(mUnmergedNeeded <= kMinConsecutiveUnmerged);
- MOZ_ASSERT(mMergedInARow <= kMaxConsecutiveMerged);
- if (mMergedInARow == kMaxConsecutiveMerged) {
- MOZ_ASSERT(mUnmergedNeeded == 0);
- mUnmergedNeeded = kMinConsecutiveUnmerged;
- }
- if (mUnmergedNeeded > 0) {
- mUnmergedNeeded--;
- mMergedInARow = 0;
- return false;
- }
- if (aCCType == SliceCC && mJSContext->UsefulToMergeZones()) {
- mMergedInARow++;
- return true;
- } else {
- mMergedInARow = 0;
- return false;
- }
- }
- void
- nsCycleCollector::BeginCollection(ccType aCCType,
- nsICycleCollectorListener* aManualListener)
- {
- TimeLog timeLog;
- MOZ_ASSERT(IsIdle());
- mCollectionStart = TimeStamp::Now();
- if (mJSContext) {
- mJSContext->BeginCycleCollectionCallback();
- timeLog.Checkpoint("BeginCycleCollectionCallback()");
- }
- bool isShutdown = (aCCType == ShutdownCC);
- // Set up the listener for this CC.
- MOZ_ASSERT_IF(isShutdown, !aManualListener);
- MOZ_ASSERT(!mLogger, "Forgot to clear a previous listener?");
- if (aManualListener) {
- aManualListener->AsLogger(getter_AddRefs(mLogger));
- }
- aManualListener = nullptr;
- if (!mLogger && mParams.LogThisCC(isShutdown)) {
- mLogger = new nsCycleCollectorLogger();
- if (mParams.AllTracesThisCC(isShutdown)) {
- mLogger->SetAllTraces();
- }
- }
- // On a WantAllTraces CC, force a synchronous global GC to prevent
- // hijinks from ForgetSkippable and compartmental GCs.
- bool forceGC = isShutdown || (mLogger && mLogger->IsAllTraces());
- // BeginCycleCollectionCallback() might have started an IGC, and we need
- // to finish it before we run FixGrayBits.
- FinishAnyIncrementalGCInProgress();
- timeLog.Checkpoint("Pre-FixGrayBits finish IGC");
- FixGrayBits(forceGC, timeLog);
- FreeSnowWhite(true);
- timeLog.Checkpoint("BeginCollection FreeSnowWhite");
- if (mLogger && NS_FAILED(mLogger->Begin())) {
- mLogger = nullptr;
- }
- // FreeSnowWhite could potentially have started an IGC, which we need
- // to finish before we look at any JS roots.
- FinishAnyIncrementalGCInProgress();
- timeLog.Checkpoint("Post-FreeSnowWhite finish IGC");
- // Set up the data structures for building the graph.
- JS::AutoAssertNoGC nogc;
- JS::AutoEnterCycleCollection autocc(mJSContext->Context());
- mGraph.Init();
- mResults.Init();
- mResults.mAnyManual = (aCCType != SliceCC);
- bool mergeZones = ShouldMergeZones(aCCType);
- mResults.mMergedZones = mergeZones;
- MOZ_ASSERT(!mBuilder, "Forgot to clear mBuilder");
- mBuilder = new CCGraphBuilder(mGraph, mResults, mJSContext, mLogger,
- mergeZones);
- timeLog.Checkpoint("BeginCollection prepare graph builder");
- if (mJSContext) {
- mJSContext->TraverseRoots(*mBuilder);
- timeLog.Checkpoint("mJSContext->TraverseRoots()");
- }
- AutoRestore<bool> ar(mScanInProgress);
- MOZ_RELEASE_ASSERT(!mScanInProgress);
- mScanInProgress = true;
- mPurpleBuf.SelectPointers(*mBuilder);
- timeLog.Checkpoint("SelectPointers()");
- mBuilder->DoneAddingRoots();
- mIncrementalPhase = GraphBuildingPhase;
- }
- uint32_t
- nsCycleCollector::SuspectedCount()
- {
- CheckThreadSafety();
- return mPurpleBuf.Count();
- }
- void
- nsCycleCollector::Shutdown(bool aDoCollect)
- {
- CheckThreadSafety();
- // Always delete snow white objects.
- FreeSnowWhite(true);
- if (aDoCollect) {
- ShutdownCollect();
- }
- }
- void
- nsCycleCollector::RemoveObjectFromGraph(void* aObj)
- {
- if (IsIdle()) {
- return;
- }
- mGraph.RemoveObjectFromMap(aObj);
- }
- void
- nsCycleCollector::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf,
- size_t* aObjectSize,
- size_t* aGraphSize,
- size_t* aPurpleBufferSize) const
- {
- *aObjectSize = aMallocSizeOf(this);
- *aGraphSize = mGraph.SizeOfExcludingThis(aMallocSizeOf);
- *aPurpleBufferSize = mPurpleBuf.SizeOfExcludingThis(aMallocSizeOf);
- // These fields are deliberately not measured:
- // - mJSContext: because it's non-owning and measured by JS reporters.
- // - mParams: because it only contains scalars.
- }
- JSPurpleBuffer*
- nsCycleCollector::GetJSPurpleBuffer()
- {
- if (!mJSPurpleBuffer) {
- // The Release call here confuses the GC analysis.
- JS::AutoSuppressGCAnalysis nogc;
- // JSPurpleBuffer keeps itself alive, but we need to create it in such way
- // that it ends up in the normal purple buffer. That happens when
- // nsRefPtr goes out of the scope and calls Release.
- RefPtr<JSPurpleBuffer> pb = new JSPurpleBuffer(mJSPurpleBuffer);
- }
- return mJSPurpleBuffer;
- }
- ////////////////////////////////////////////////////////////////////////
- // Module public API (exported in nsCycleCollector.h)
- // Just functions that redirect into the singleton, once it's built.
- ////////////////////////////////////////////////////////////////////////
- void
- nsCycleCollector_registerJSContext(CycleCollectedJSContext* aCx)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- // But we shouldn't already have a context.
- MOZ_ASSERT(!data->mContext);
- data->mContext = aCx;
- data->mCollector->RegisterJSContext(aCx);
- }
- void
- nsCycleCollector_forgetJSContext()
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- // And we shouldn't have already forgotten our context.
- MOZ_ASSERT(data->mContext);
- // But it may have shutdown already.
- if (data->mCollector) {
- data->mCollector->ForgetJSContext();
- data->mContext = nullptr;
- } else {
- data->mContext = nullptr;
- delete data;
- sCollectorData.set(nullptr);
- }
- }
- /* static */ CycleCollectedJSContext*
- CycleCollectedJSContext::Get()
- {
- CollectorData* data = sCollectorData.get();
- if (data) {
- return data->mContext;
- }
- return nullptr;
- }
- MOZ_NEVER_INLINE static void
- SuspectAfterShutdown(void* aPtr, nsCycleCollectionParticipant* aCp,
- nsCycleCollectingAutoRefCnt* aRefCnt,
- bool* aShouldDelete)
- {
- if (aRefCnt->get() == 0) {
- if (!aShouldDelete) {
- // The CC is shut down, so we can't be in the middle of an ICC.
- CanonicalizeParticipant(&aPtr, &aCp);
- aRefCnt->stabilizeForDeletion();
- aCp->DeleteCycleCollectable(aPtr);
- } else {
- *aShouldDelete = true;
- }
- } else {
- // Make sure we'll get called again.
- aRefCnt->RemoveFromPurpleBuffer();
- }
- }
- void
- NS_CycleCollectorSuspect3(void* aPtr, nsCycleCollectionParticipant* aCp,
- nsCycleCollectingAutoRefCnt* aRefCnt,
- bool* aShouldDelete)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- if (MOZ_LIKELY(data->mCollector)) {
- data->mCollector->Suspect(aPtr, aCp, aRefCnt);
- return;
- }
- SuspectAfterShutdown(aPtr, aCp, aRefCnt, aShouldDelete);
- }
- uint32_t
- nsCycleCollector_suspectedCount()
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- if (!data->mCollector) {
- return 0;
- }
- return data->mCollector->SuspectedCount();
- }
- bool
- nsCycleCollector_init()
- {
- #ifdef DEBUG
- static bool sInitialized;
- MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
- MOZ_ASSERT(!sInitialized, "Called twice!?");
- sInitialized = true;
- #endif
- return sCollectorData.init();
- }
- void
- nsCycleCollector_startup()
- {
- if (sCollectorData.get()) {
- MOZ_CRASH();
- }
- CollectorData* data = new CollectorData;
- data->mCollector = new nsCycleCollector();
- data->mContext = nullptr;
- sCollectorData.set(data);
- }
- void
- nsCycleCollector_setBeforeUnlinkCallback(CC_BeforeUnlinkCallback aCB)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- data->mCollector->SetBeforeUnlinkCallback(aCB);
- }
- void
- nsCycleCollector_setForgetSkippableCallback(CC_ForgetSkippableCallback aCB)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- data->mCollector->SetForgetSkippableCallback(aCB);
- }
- void
- nsCycleCollector_forgetSkippable(bool aRemoveChildlessNodes,
- bool aAsyncSnowWhiteFreeing)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- PROFILER_LABEL("nsCycleCollector", "forgetSkippable",
- js::ProfileEntry::Category::CC);
- TimeLog timeLog;
- data->mCollector->ForgetSkippable(aRemoveChildlessNodes,
- aAsyncSnowWhiteFreeing);
- timeLog.Checkpoint("ForgetSkippable()");
- }
- void
- nsCycleCollector_dispatchDeferredDeletion(bool aContinuation, bool aPurge)
- {
- CycleCollectedJSContext* cx = CycleCollectedJSContext::Get();
- if (cx) {
- cx->DispatchDeferredDeletion(aContinuation, aPurge);
- }
- }
- bool
- nsCycleCollector_doDeferredDeletion()
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- MOZ_ASSERT(data->mContext);
- return data->mCollector->FreeSnowWhite(false);
- }
- already_AddRefed<nsICycleCollectorLogSink>
- nsCycleCollector_createLogSink()
- {
- nsCOMPtr<nsICycleCollectorLogSink> sink = new nsCycleCollectorLogSinkToFile();
- return sink.forget();
- }
- void
- nsCycleCollector_collect(nsICycleCollectorListener* aManualListener)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- PROFILER_LABEL("nsCycleCollector", "collect",
- js::ProfileEntry::Category::CC);
- SliceBudget unlimitedBudget = SliceBudget::unlimited();
- data->mCollector->Collect(ManualCC, unlimitedBudget, aManualListener);
- }
- void
- nsCycleCollector_collectSlice(SliceBudget& budget,
- bool aPreferShorterSlices)
- {
- CollectorData* data = sCollectorData.get();
- // We should have started the cycle collector by now.
- MOZ_ASSERT(data);
- MOZ_ASSERT(data->mCollector);
- PROFILER_LABEL("nsCycleCollector", "collectSlice",
- js::ProfileEntry::Category::CC);
- data->mCollector->Collect(SliceCC, budget, nullptr, aPreferShorterSlices);
- }
- void
- nsCycleCollector_prepareForGarbageCollection()
- {
- CollectorData* data = sCollectorData.get();
- MOZ_ASSERT(data);
- if (!data->mCollector) {
- return;
- }
- data->mCollector->PrepareForGarbageCollection();
- }
- void
- nsCycleCollector_finishAnyCurrentCollection()
- {
- CollectorData* data = sCollectorData.get();
- MOZ_ASSERT(data);
- if (!data->mCollector) {
- return;
- }
- data->mCollector->FinishAnyCurrentCollection();
- }
- void
- nsCycleCollector_shutdown(bool aDoCollect)
- {
- CollectorData* data = sCollectorData.get();
- if (data) {
- MOZ_ASSERT(data->mCollector);
- PROFILER_LABEL("nsCycleCollector", "shutdown",
- js::ProfileEntry::Category::CC);
- data->mCollector->Shutdown(aDoCollect);
- data->mCollector = nullptr;
- if (data->mContext) {
- // Run any remaining tasks that may have been enqueued via
- // RunInStableState during the final cycle collection.
- data->mContext->ProcessStableStateQueue();
- }
- if (!data->mContext) {
- delete data;
- sCollectorData.set(nullptr);
- }
- }
- }
|