var-tracking.c 273 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433
  1. /* Variable tracking routines for the GNU compiler.
  2. Copyright (C) 2002-2015 Free Software Foundation, Inc.
  3. This file is part of GCC.
  4. GCC is free software; you can redistribute it and/or modify it
  5. under the terms of the GNU General Public License as published by
  6. the Free Software Foundation; either version 3, or (at your option)
  7. any later version.
  8. GCC is distributed in the hope that it will be useful, but WITHOUT
  9. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  10. or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
  11. License for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with GCC; see the file COPYING3. If not see
  14. <http://www.gnu.org/licenses/>. */
  15. /* This file contains the variable tracking pass. It computes where
  16. variables are located (which registers or where in memory) at each position
  17. in instruction stream and emits notes describing the locations.
  18. Debug information (DWARF2 location lists) is finally generated from
  19. these notes.
  20. With this debug information, it is possible to show variables
  21. even when debugging optimized code.
  22. How does the variable tracking pass work?
  23. First, it scans RTL code for uses, stores and clobbers (register/memory
  24. references in instructions), for call insns and for stack adjustments
  25. separately for each basic block and saves them to an array of micro
  26. operations.
  27. The micro operations of one instruction are ordered so that
  28. pre-modifying stack adjustment < use < use with no var < call insn <
  29. < clobber < set < post-modifying stack adjustment
  30. Then, a forward dataflow analysis is performed to find out how locations
  31. of variables change through code and to propagate the variable locations
  32. along control flow graph.
  33. The IN set for basic block BB is computed as a union of OUT sets of BB's
  34. predecessors, the OUT set for BB is copied from the IN set for BB and
  35. is changed according to micro operations in BB.
  36. The IN and OUT sets for basic blocks consist of a current stack adjustment
  37. (used for adjusting offset of variables addressed using stack pointer),
  38. the table of structures describing the locations of parts of a variable
  39. and for each physical register a linked list for each physical register.
  40. The linked list is a list of variable parts stored in the register,
  41. i.e. it is a list of triplets (reg, decl, offset) where decl is
  42. REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
  43. effective deleting appropriate variable parts when we set or clobber the
  44. register.
  45. There may be more than one variable part in a register. The linked lists
  46. should be pretty short so it is a good data structure here.
  47. For example in the following code, register allocator may assign same
  48. register to variables A and B, and both of them are stored in the same
  49. register in CODE:
  50. if (cond)
  51. set A;
  52. else
  53. set B;
  54. CODE;
  55. if (cond)
  56. use A;
  57. else
  58. use B;
  59. Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
  60. are emitted to appropriate positions in RTL code. Each such a note describes
  61. the location of one variable at the point in instruction stream where the
  62. note is. There is no need to emit a note for each variable before each
  63. instruction, we only emit these notes where the location of variable changes
  64. (this means that we also emit notes for changes between the OUT set of the
  65. previous block and the IN set of the current block).
  66. The notes consist of two parts:
  67. 1. the declaration (from REG_EXPR or MEM_EXPR)
  68. 2. the location of a variable - it is either a simple register/memory
  69. reference (for simple variables, for example int),
  70. or a parallel of register/memory references (for a large variables
  71. which consist of several parts, for example long long).
  72. */
  73. #include "config.h"
  74. #include "system.h"
  75. #include "coretypes.h"
  76. #include "tm.h"
  77. #include "rtl.h"
  78. #include "hash-set.h"
  79. #include "machmode.h"
  80. #include "vec.h"
  81. #include "double-int.h"
  82. #include "input.h"
  83. #include "alias.h"
  84. #include "symtab.h"
  85. #include "wide-int.h"
  86. #include "inchash.h"
  87. #include "tree.h"
  88. #include "varasm.h"
  89. #include "stor-layout.h"
  90. #include "hash-map.h"
  91. #include "hash-table.h"
  92. #include "predict.h"
  93. #include "hard-reg-set.h"
  94. #include "function.h"
  95. #include "dominance.h"
  96. #include "cfg.h"
  97. #include "cfgrtl.h"
  98. #include "cfganal.h"
  99. #include "basic-block.h"
  100. #include "tm_p.h"
  101. #include "flags.h"
  102. #include "insn-config.h"
  103. #include "reload.h"
  104. #include "sbitmap.h"
  105. #include "alloc-pool.h"
  106. #include "regs.h"
  107. #include "hashtab.h"
  108. #include "statistics.h"
  109. #include "real.h"
  110. #include "fixed-value.h"
  111. #include "expmed.h"
  112. #include "dojump.h"
  113. #include "explow.h"
  114. #include "calls.h"
  115. #include "emit-rtl.h"
  116. #include "stmt.h"
  117. #include "expr.h"
  118. #include "tree-pass.h"
  119. #include "bitmap.h"
  120. #include "tree-dfa.h"
  121. #include "tree-ssa.h"
  122. #include "cselib.h"
  123. #include "target.h"
  124. #include "params.h"
  125. #include "diagnostic.h"
  126. #include "tree-pretty-print.h"
  127. #include "recog.h"
  128. #include "rtl-iter.h"
  129. #include "fibonacci_heap.h"
  130. typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
  131. typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
  132. /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
  133. has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
  134. Currently the value is the same as IDENTIFIER_NODE, which has such
  135. a property. If this compile time assertion ever fails, make sure that
  136. the new tree code that equals (int) VALUE has the same property. */
  137. extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
  138. /* Type of micro operation. */
  139. enum micro_operation_type
  140. {
  141. MO_USE, /* Use location (REG or MEM). */
  142. MO_USE_NO_VAR,/* Use location which is not associated with a variable
  143. or the variable is not trackable. */
  144. MO_VAL_USE, /* Use location which is associated with a value. */
  145. MO_VAL_LOC, /* Use location which appears in a debug insn. */
  146. MO_VAL_SET, /* Set location associated with a value. */
  147. MO_SET, /* Set location. */
  148. MO_COPY, /* Copy the same portion of a variable from one
  149. location to another. */
  150. MO_CLOBBER, /* Clobber location. */
  151. MO_CALL, /* Call insn. */
  152. MO_ADJUST /* Adjust stack pointer. */
  153. };
  154. static const char * const ATTRIBUTE_UNUSED
  155. micro_operation_type_name[] = {
  156. "MO_USE",
  157. "MO_USE_NO_VAR",
  158. "MO_VAL_USE",
  159. "MO_VAL_LOC",
  160. "MO_VAL_SET",
  161. "MO_SET",
  162. "MO_COPY",
  163. "MO_CLOBBER",
  164. "MO_CALL",
  165. "MO_ADJUST"
  166. };
  167. /* Where shall the note be emitted? BEFORE or AFTER the instruction.
  168. Notes emitted as AFTER_CALL are to take effect during the call,
  169. rather than after the call. */
  170. enum emit_note_where
  171. {
  172. EMIT_NOTE_BEFORE_INSN,
  173. EMIT_NOTE_AFTER_INSN,
  174. EMIT_NOTE_AFTER_CALL_INSN
  175. };
  176. /* Structure holding information about micro operation. */
  177. typedef struct micro_operation_def
  178. {
  179. /* Type of micro operation. */
  180. enum micro_operation_type type;
  181. /* The instruction which the micro operation is in, for MO_USE,
  182. MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
  183. instruction or note in the original flow (before any var-tracking
  184. notes are inserted, to simplify emission of notes), for MO_SET
  185. and MO_CLOBBER. */
  186. rtx_insn *insn;
  187. union {
  188. /* Location. For MO_SET and MO_COPY, this is the SET that
  189. performs the assignment, if known, otherwise it is the target
  190. of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
  191. CONCAT of the VALUE and the LOC associated with it. For
  192. MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
  193. associated with it. */
  194. rtx loc;
  195. /* Stack adjustment. */
  196. HOST_WIDE_INT adjust;
  197. } u;
  198. } micro_operation;
  199. /* A declaration of a variable, or an RTL value being handled like a
  200. declaration. */
  201. typedef void *decl_or_value;
  202. /* Return true if a decl_or_value DV is a DECL or NULL. */
  203. static inline bool
  204. dv_is_decl_p (decl_or_value dv)
  205. {
  206. return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
  207. }
  208. /* Return true if a decl_or_value is a VALUE rtl. */
  209. static inline bool
  210. dv_is_value_p (decl_or_value dv)
  211. {
  212. return dv && !dv_is_decl_p (dv);
  213. }
  214. /* Return the decl in the decl_or_value. */
  215. static inline tree
  216. dv_as_decl (decl_or_value dv)
  217. {
  218. gcc_checking_assert (dv_is_decl_p (dv));
  219. return (tree) dv;
  220. }
  221. /* Return the value in the decl_or_value. */
  222. static inline rtx
  223. dv_as_value (decl_or_value dv)
  224. {
  225. gcc_checking_assert (dv_is_value_p (dv));
  226. return (rtx)dv;
  227. }
  228. /* Return the opaque pointer in the decl_or_value. */
  229. static inline void *
  230. dv_as_opaque (decl_or_value dv)
  231. {
  232. return dv;
  233. }
  234. /* Description of location of a part of a variable. The content of a physical
  235. register is described by a chain of these structures.
  236. The chains are pretty short (usually 1 or 2 elements) and thus
  237. chain is the best data structure. */
  238. typedef struct attrs_def
  239. {
  240. /* Pointer to next member of the list. */
  241. struct attrs_def *next;
  242. /* The rtx of register. */
  243. rtx loc;
  244. /* The declaration corresponding to LOC. */
  245. decl_or_value dv;
  246. /* Offset from start of DECL. */
  247. HOST_WIDE_INT offset;
  248. } *attrs;
  249. /* Structure for chaining the locations. */
  250. typedef struct location_chain_def
  251. {
  252. /* Next element in the chain. */
  253. struct location_chain_def *next;
  254. /* The location (REG, MEM or VALUE). */
  255. rtx loc;
  256. /* The "value" stored in this location. */
  257. rtx set_src;
  258. /* Initialized? */
  259. enum var_init_status init;
  260. } *location_chain;
  261. /* A vector of loc_exp_dep holds the active dependencies of a one-part
  262. DV on VALUEs, i.e., the VALUEs expanded so as to form the current
  263. location of DV. Each entry is also part of VALUE' s linked-list of
  264. backlinks back to DV. */
  265. typedef struct loc_exp_dep_s
  266. {
  267. /* The dependent DV. */
  268. decl_or_value dv;
  269. /* The dependency VALUE or DECL_DEBUG. */
  270. rtx value;
  271. /* The next entry in VALUE's backlinks list. */
  272. struct loc_exp_dep_s *next;
  273. /* A pointer to the pointer to this entry (head or prev's next) in
  274. the doubly-linked list. */
  275. struct loc_exp_dep_s **pprev;
  276. } loc_exp_dep;
  277. /* This data structure holds information about the depth of a variable
  278. expansion. */
  279. typedef struct expand_depth_struct
  280. {
  281. /* This measures the complexity of the expanded expression. It
  282. grows by one for each level of expansion that adds more than one
  283. operand. */
  284. int complexity;
  285. /* This counts the number of ENTRY_VALUE expressions in an
  286. expansion. We want to minimize their use. */
  287. int entryvals;
  288. } expand_depth;
  289. /* This data structure is allocated for one-part variables at the time
  290. of emitting notes. */
  291. struct onepart_aux
  292. {
  293. /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
  294. computation used the expansion of this variable, and that ought
  295. to be notified should this variable change. If the DV's cur_loc
  296. expanded to NULL, all components of the loc list are regarded as
  297. active, so that any changes in them give us a chance to get a
  298. location. Otherwise, only components of the loc that expanded to
  299. non-NULL are regarded as active dependencies. */
  300. loc_exp_dep *backlinks;
  301. /* This holds the LOC that was expanded into cur_loc. We need only
  302. mark a one-part variable as changed if the FROM loc is removed,
  303. or if it has no known location and a loc is added, or if it gets
  304. a change notification from any of its active dependencies. */
  305. rtx from;
  306. /* The depth of the cur_loc expression. */
  307. expand_depth depth;
  308. /* Dependencies actively used when expand FROM into cur_loc. */
  309. vec<loc_exp_dep, va_heap, vl_embed> deps;
  310. };
  311. /* Structure describing one part of variable. */
  312. typedef struct variable_part_def
  313. {
  314. /* Chain of locations of the part. */
  315. location_chain loc_chain;
  316. /* Location which was last emitted to location list. */
  317. rtx cur_loc;
  318. union variable_aux
  319. {
  320. /* The offset in the variable, if !var->onepart. */
  321. HOST_WIDE_INT offset;
  322. /* Pointer to auxiliary data, if var->onepart and emit_notes. */
  323. struct onepart_aux *onepaux;
  324. } aux;
  325. } variable_part;
  326. /* Maximum number of location parts. */
  327. #define MAX_VAR_PARTS 16
  328. /* Enumeration type used to discriminate various types of one-part
  329. variables. */
  330. typedef enum onepart_enum
  331. {
  332. /* Not a one-part variable. */
  333. NOT_ONEPART = 0,
  334. /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
  335. ONEPART_VDECL = 1,
  336. /* A DEBUG_EXPR_DECL. */
  337. ONEPART_DEXPR = 2,
  338. /* A VALUE. */
  339. ONEPART_VALUE = 3
  340. } onepart_enum_t;
  341. /* Structure describing where the variable is located. */
  342. typedef struct variable_def
  343. {
  344. /* The declaration of the variable, or an RTL value being handled
  345. like a declaration. */
  346. decl_or_value dv;
  347. /* Reference count. */
  348. int refcount;
  349. /* Number of variable parts. */
  350. char n_var_parts;
  351. /* What type of DV this is, according to enum onepart_enum. */
  352. ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
  353. /* True if this variable_def struct is currently in the
  354. changed_variables hash table. */
  355. bool in_changed_variables;
  356. /* The variable parts. */
  357. variable_part var_part[1];
  358. } *variable;
  359. typedef const struct variable_def *const_variable;
  360. /* Pointer to the BB's information specific to variable tracking pass. */
  361. #define VTI(BB) ((variable_tracking_info) (BB)->aux)
  362. /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
  363. #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
  364. #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
  365. /* Access VAR's Ith part's offset, checking that it's not a one-part
  366. variable. */
  367. #define VAR_PART_OFFSET(var, i) __extension__ \
  368. (*({ variable const __v = (var); \
  369. gcc_checking_assert (!__v->onepart); \
  370. &__v->var_part[(i)].aux.offset; }))
  371. /* Access VAR's one-part auxiliary data, checking that it is a
  372. one-part variable. */
  373. #define VAR_LOC_1PAUX(var) __extension__ \
  374. (*({ variable const __v = (var); \
  375. gcc_checking_assert (__v->onepart); \
  376. &__v->var_part[0].aux.onepaux; }))
  377. #else
  378. #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
  379. #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
  380. #endif
  381. /* These are accessor macros for the one-part auxiliary data. When
  382. convenient for users, they're guarded by tests that the data was
  383. allocated. */
  384. #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
  385. ? VAR_LOC_1PAUX (var)->backlinks \
  386. : NULL)
  387. #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
  388. ? &VAR_LOC_1PAUX (var)->backlinks \
  389. : NULL)
  390. #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
  391. #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
  392. #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
  393. ? &VAR_LOC_1PAUX (var)->deps \
  394. : NULL)
  395. typedef unsigned int dvuid;
  396. /* Return the uid of DV. */
  397. static inline dvuid
  398. dv_uid (decl_or_value dv)
  399. {
  400. if (dv_is_value_p (dv))
  401. return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
  402. else
  403. return DECL_UID (dv_as_decl (dv));
  404. }
  405. /* Compute the hash from the uid. */
  406. static inline hashval_t
  407. dv_uid2hash (dvuid uid)
  408. {
  409. return uid;
  410. }
  411. /* The hash function for a mask table in a shared_htab chain. */
  412. static inline hashval_t
  413. dv_htab_hash (decl_or_value dv)
  414. {
  415. return dv_uid2hash (dv_uid (dv));
  416. }
  417. static void variable_htab_free (void *);
  418. /* Variable hashtable helpers. */
  419. struct variable_hasher
  420. {
  421. typedef variable_def value_type;
  422. typedef void compare_type;
  423. static inline hashval_t hash (const value_type *);
  424. static inline bool equal (const value_type *, const compare_type *);
  425. static inline void remove (value_type *);
  426. };
  427. /* The hash function for variable_htab, computes the hash value
  428. from the declaration of variable X. */
  429. inline hashval_t
  430. variable_hasher::hash (const value_type *v)
  431. {
  432. return dv_htab_hash (v->dv);
  433. }
  434. /* Compare the declaration of variable X with declaration Y. */
  435. inline bool
  436. variable_hasher::equal (const value_type *v, const compare_type *y)
  437. {
  438. decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
  439. return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
  440. }
  441. /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
  442. inline void
  443. variable_hasher::remove (value_type *var)
  444. {
  445. variable_htab_free (var);
  446. }
  447. typedef hash_table<variable_hasher> variable_table_type;
  448. typedef variable_table_type::iterator variable_iterator_type;
  449. /* Structure for passing some other parameters to function
  450. emit_note_insn_var_location. */
  451. typedef struct emit_note_data_def
  452. {
  453. /* The instruction which the note will be emitted before/after. */
  454. rtx_insn *insn;
  455. /* Where the note will be emitted (before/after insn)? */
  456. enum emit_note_where where;
  457. /* The variables and values active at this point. */
  458. variable_table_type *vars;
  459. } emit_note_data;
  460. /* Structure holding a refcounted hash table. If refcount > 1,
  461. it must be first unshared before modified. */
  462. typedef struct shared_hash_def
  463. {
  464. /* Reference count. */
  465. int refcount;
  466. /* Actual hash table. */
  467. variable_table_type *htab;
  468. } *shared_hash;
  469. /* Structure holding the IN or OUT set for a basic block. */
  470. typedef struct dataflow_set_def
  471. {
  472. /* Adjustment of stack offset. */
  473. HOST_WIDE_INT stack_adjust;
  474. /* Attributes for registers (lists of attrs). */
  475. attrs regs[FIRST_PSEUDO_REGISTER];
  476. /* Variable locations. */
  477. shared_hash vars;
  478. /* Vars that is being traversed. */
  479. shared_hash traversed_vars;
  480. } dataflow_set;
  481. /* The structure (one for each basic block) containing the information
  482. needed for variable tracking. */
  483. typedef struct variable_tracking_info_def
  484. {
  485. /* The vector of micro operations. */
  486. vec<micro_operation> mos;
  487. /* The IN and OUT set for dataflow analysis. */
  488. dataflow_set in;
  489. dataflow_set out;
  490. /* The permanent-in dataflow set for this block. This is used to
  491. hold values for which we had to compute entry values. ??? This
  492. should probably be dynamically allocated, to avoid using more
  493. memory in non-debug builds. */
  494. dataflow_set *permp;
  495. /* Has the block been visited in DFS? */
  496. bool visited;
  497. /* Has the block been flooded in VTA? */
  498. bool flooded;
  499. } *variable_tracking_info;
  500. /* Alloc pool for struct attrs_def. */
  501. static alloc_pool attrs_pool;
  502. /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
  503. static alloc_pool var_pool;
  504. /* Alloc pool for struct variable_def with a single var_part entry. */
  505. static alloc_pool valvar_pool;
  506. /* Alloc pool for struct location_chain_def. */
  507. static alloc_pool loc_chain_pool;
  508. /* Alloc pool for struct shared_hash_def. */
  509. static alloc_pool shared_hash_pool;
  510. /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
  511. static alloc_pool loc_exp_dep_pool;
  512. /* Changed variables, notes will be emitted for them. */
  513. static variable_table_type *changed_variables;
  514. /* Shall notes be emitted? */
  515. static bool emit_notes;
  516. /* Values whose dynamic location lists have gone empty, but whose
  517. cselib location lists are still usable. Use this to hold the
  518. current location, the backlinks, etc, during emit_notes. */
  519. static variable_table_type *dropped_values;
  520. /* Empty shared hashtable. */
  521. static shared_hash empty_shared_hash;
  522. /* Scratch register bitmap used by cselib_expand_value_rtx. */
  523. static bitmap scratch_regs = NULL;
  524. #ifdef HAVE_window_save
  525. typedef struct GTY(()) parm_reg {
  526. rtx outgoing;
  527. rtx incoming;
  528. } parm_reg_t;
  529. /* Vector of windowed parameter registers, if any. */
  530. static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
  531. #endif
  532. /* Variable used to tell whether cselib_process_insn called our hook. */
  533. static bool cselib_hook_called;
  534. /* Local function prototypes. */
  535. static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
  536. HOST_WIDE_INT *);
  537. static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
  538. HOST_WIDE_INT *);
  539. static bool vt_stack_adjustments (void);
  540. static void init_attrs_list_set (attrs *);
  541. static void attrs_list_clear (attrs *);
  542. static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
  543. static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
  544. static void attrs_list_copy (attrs *, attrs);
  545. static void attrs_list_union (attrs *, attrs);
  546. static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
  547. variable var, enum var_init_status);
  548. static void vars_copy (variable_table_type *, variable_table_type *);
  549. static tree var_debug_decl (tree);
  550. static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
  551. static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
  552. enum var_init_status, rtx);
  553. static void var_reg_delete (dataflow_set *, rtx, bool);
  554. static void var_regno_delete (dataflow_set *, int);
  555. static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
  556. static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
  557. enum var_init_status, rtx);
  558. static void var_mem_delete (dataflow_set *, rtx, bool);
  559. static void dataflow_set_init (dataflow_set *);
  560. static void dataflow_set_clear (dataflow_set *);
  561. static void dataflow_set_copy (dataflow_set *, dataflow_set *);
  562. static int variable_union_info_cmp_pos (const void *, const void *);
  563. static void dataflow_set_union (dataflow_set *, dataflow_set *);
  564. static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
  565. static bool canon_value_cmp (rtx, rtx);
  566. static int loc_cmp (rtx, rtx);
  567. static bool variable_part_different_p (variable_part *, variable_part *);
  568. static bool onepart_variable_different_p (variable, variable);
  569. static bool variable_different_p (variable, variable);
  570. static bool dataflow_set_different (dataflow_set *, dataflow_set *);
  571. static void dataflow_set_destroy (dataflow_set *);
  572. static bool contains_symbol_ref (rtx);
  573. static bool track_expr_p (tree, bool);
  574. static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
  575. static void add_uses_1 (rtx *, void *);
  576. static void add_stores (rtx, const_rtx, void *);
  577. static bool compute_bb_dataflow (basic_block);
  578. static bool vt_find_locations (void);
  579. static void dump_attrs_list (attrs);
  580. static void dump_var (variable);
  581. static void dump_vars (variable_table_type *);
  582. static void dump_dataflow_set (dataflow_set *);
  583. static void dump_dataflow_sets (void);
  584. static void set_dv_changed (decl_or_value, bool);
  585. static void variable_was_changed (variable, dataflow_set *);
  586. static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
  587. decl_or_value, HOST_WIDE_INT,
  588. enum var_init_status, rtx);
  589. static void set_variable_part (dataflow_set *, rtx,
  590. decl_or_value, HOST_WIDE_INT,
  591. enum var_init_status, rtx, enum insert_option);
  592. static variable_def **clobber_slot_part (dataflow_set *, rtx,
  593. variable_def **, HOST_WIDE_INT, rtx);
  594. static void clobber_variable_part (dataflow_set *, rtx,
  595. decl_or_value, HOST_WIDE_INT, rtx);
  596. static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
  597. HOST_WIDE_INT);
  598. static void delete_variable_part (dataflow_set *, rtx,
  599. decl_or_value, HOST_WIDE_INT);
  600. static void emit_notes_in_bb (basic_block, dataflow_set *);
  601. static void vt_emit_notes (void);
  602. static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
  603. static void vt_add_function_parameters (void);
  604. static bool vt_initialize (void);
  605. static void vt_finalize (void);
  606. /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
  607. static int
  608. stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
  609. void *arg)
  610. {
  611. if (dest != stack_pointer_rtx)
  612. return 0;
  613. switch (GET_CODE (op))
  614. {
  615. case PRE_INC:
  616. case PRE_DEC:
  617. ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
  618. return 0;
  619. case POST_INC:
  620. case POST_DEC:
  621. ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
  622. return 0;
  623. case PRE_MODIFY:
  624. case POST_MODIFY:
  625. /* We handle only adjustments by constant amount. */
  626. gcc_assert (GET_CODE (src) == PLUS
  627. && CONST_INT_P (XEXP (src, 1))
  628. && XEXP (src, 0) == stack_pointer_rtx);
  629. ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
  630. -= INTVAL (XEXP (src, 1));
  631. return 0;
  632. default:
  633. gcc_unreachable ();
  634. }
  635. }
  636. /* Given a SET, calculate the amount of stack adjustment it contains
  637. PRE- and POST-modifying stack pointer.
  638. This function is similar to stack_adjust_offset. */
  639. static void
  640. stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
  641. HOST_WIDE_INT *post)
  642. {
  643. rtx src = SET_SRC (pattern);
  644. rtx dest = SET_DEST (pattern);
  645. enum rtx_code code;
  646. if (dest == stack_pointer_rtx)
  647. {
  648. /* (set (reg sp) (plus (reg sp) (const_int))) */
  649. code = GET_CODE (src);
  650. if (! (code == PLUS || code == MINUS)
  651. || XEXP (src, 0) != stack_pointer_rtx
  652. || !CONST_INT_P (XEXP (src, 1)))
  653. return;
  654. if (code == MINUS)
  655. *post += INTVAL (XEXP (src, 1));
  656. else
  657. *post -= INTVAL (XEXP (src, 1));
  658. return;
  659. }
  660. HOST_WIDE_INT res[2] = { 0, 0 };
  661. for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
  662. *pre += res[0];
  663. *post += res[1];
  664. }
  665. /* Given an INSN, calculate the amount of stack adjustment it contains
  666. PRE- and POST-modifying stack pointer. */
  667. static void
  668. insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
  669. HOST_WIDE_INT *post)
  670. {
  671. rtx pattern;
  672. *pre = 0;
  673. *post = 0;
  674. pattern = PATTERN (insn);
  675. if (RTX_FRAME_RELATED_P (insn))
  676. {
  677. rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
  678. if (expr)
  679. pattern = XEXP (expr, 0);
  680. }
  681. if (GET_CODE (pattern) == SET)
  682. stack_adjust_offset_pre_post (pattern, pre, post);
  683. else if (GET_CODE (pattern) == PARALLEL
  684. || GET_CODE (pattern) == SEQUENCE)
  685. {
  686. int i;
  687. /* There may be stack adjustments inside compound insns. Search
  688. for them. */
  689. for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
  690. if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
  691. stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
  692. }
  693. }
  694. /* Compute stack adjustments for all blocks by traversing DFS tree.
  695. Return true when the adjustments on all incoming edges are consistent.
  696. Heavily borrowed from pre_and_rev_post_order_compute. */
  697. static bool
  698. vt_stack_adjustments (void)
  699. {
  700. edge_iterator *stack;
  701. int sp;
  702. /* Initialize entry block. */
  703. VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
  704. VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
  705. = INCOMING_FRAME_SP_OFFSET;
  706. VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
  707. = INCOMING_FRAME_SP_OFFSET;
  708. /* Allocate stack for back-tracking up CFG. */
  709. stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
  710. sp = 0;
  711. /* Push the first edge on to the stack. */
  712. stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
  713. while (sp)
  714. {
  715. edge_iterator ei;
  716. basic_block src;
  717. basic_block dest;
  718. /* Look at the edge on the top of the stack. */
  719. ei = stack[sp - 1];
  720. src = ei_edge (ei)->src;
  721. dest = ei_edge (ei)->dest;
  722. /* Check if the edge destination has been visited yet. */
  723. if (!VTI (dest)->visited)
  724. {
  725. rtx_insn *insn;
  726. HOST_WIDE_INT pre, post, offset;
  727. VTI (dest)->visited = true;
  728. VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
  729. if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
  730. for (insn = BB_HEAD (dest);
  731. insn != NEXT_INSN (BB_END (dest));
  732. insn = NEXT_INSN (insn))
  733. if (INSN_P (insn))
  734. {
  735. insn_stack_adjust_offset_pre_post (insn, &pre, &post);
  736. offset += pre + post;
  737. }
  738. VTI (dest)->out.stack_adjust = offset;
  739. if (EDGE_COUNT (dest->succs) > 0)
  740. /* Since the DEST node has been visited for the first
  741. time, check its successors. */
  742. stack[sp++] = ei_start (dest->succs);
  743. }
  744. else
  745. {
  746. /* We can end up with different stack adjustments for the exit block
  747. of a shrink-wrapped function if stack_adjust_offset_pre_post
  748. doesn't understand the rtx pattern used to restore the stack
  749. pointer in the epilogue. For example, on s390(x), the stack
  750. pointer is often restored via a load-multiple instruction
  751. and so no stack_adjust offset is recorded for it. This means
  752. that the stack offset at the end of the epilogue block is the
  753. the same as the offset before the epilogue, whereas other paths
  754. to the exit block will have the correct stack_adjust.
  755. It is safe to ignore these differences because (a) we never
  756. use the stack_adjust for the exit block in this pass and
  757. (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
  758. function are correct.
  759. We must check whether the adjustments on other edges are
  760. the same though. */
  761. if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
  762. && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
  763. {
  764. free (stack);
  765. return false;
  766. }
  767. if (! ei_one_before_end_p (ei))
  768. /* Go to the next edge. */
  769. ei_next (&stack[sp - 1]);
  770. else
  771. /* Return to previous level if there are no more edges. */
  772. sp--;
  773. }
  774. }
  775. free (stack);
  776. return true;
  777. }
  778. /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
  779. hard_frame_pointer_rtx is being mapped to it and offset for it. */
  780. static rtx cfa_base_rtx;
  781. static HOST_WIDE_INT cfa_base_offset;
  782. /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
  783. or hard_frame_pointer_rtx. */
  784. static inline rtx
  785. compute_cfa_pointer (HOST_WIDE_INT adjustment)
  786. {
  787. return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
  788. }
  789. /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
  790. or -1 if the replacement shouldn't be done. */
  791. static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
  792. /* Data for adjust_mems callback. */
  793. struct adjust_mem_data
  794. {
  795. bool store;
  796. machine_mode mem_mode;
  797. HOST_WIDE_INT stack_adjust;
  798. rtx_expr_list *side_effects;
  799. };
  800. /* Helper for adjust_mems. Return true if X is suitable for
  801. transformation of wider mode arithmetics to narrower mode. */
  802. static bool
  803. use_narrower_mode_test (rtx x, const_rtx subreg)
  804. {
  805. subrtx_var_iterator::array_type array;
  806. FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
  807. {
  808. rtx x = *iter;
  809. if (CONSTANT_P (x))
  810. iter.skip_subrtxes ();
  811. else
  812. switch (GET_CODE (x))
  813. {
  814. case REG:
  815. if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
  816. return false;
  817. if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
  818. subreg_lowpart_offset (GET_MODE (subreg),
  819. GET_MODE (x))))
  820. return false;
  821. break;
  822. case PLUS:
  823. case MINUS:
  824. case MULT:
  825. break;
  826. case ASHIFT:
  827. iter.substitute (XEXP (x, 0));
  828. break;
  829. default:
  830. return false;
  831. }
  832. }
  833. return true;
  834. }
  835. /* Transform X into narrower mode MODE from wider mode WMODE. */
  836. static rtx
  837. use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
  838. {
  839. rtx op0, op1;
  840. if (CONSTANT_P (x))
  841. return lowpart_subreg (mode, x, wmode);
  842. switch (GET_CODE (x))
  843. {
  844. case REG:
  845. return lowpart_subreg (mode, x, wmode);
  846. case PLUS:
  847. case MINUS:
  848. case MULT:
  849. op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
  850. op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
  851. return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
  852. case ASHIFT:
  853. op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
  854. op1 = XEXP (x, 1);
  855. /* Ensure shift amount is not wider than mode. */
  856. if (GET_MODE (op1) == VOIDmode)
  857. op1 = lowpart_subreg (mode, op1, wmode);
  858. else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
  859. op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
  860. return simplify_gen_binary (ASHIFT, mode, op0, op1);
  861. default:
  862. gcc_unreachable ();
  863. }
  864. }
  865. /* Helper function for adjusting used MEMs. */
  866. static rtx
  867. adjust_mems (rtx loc, const_rtx old_rtx, void *data)
  868. {
  869. struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
  870. rtx mem, addr = loc, tem;
  871. machine_mode mem_mode_save;
  872. bool store_save;
  873. switch (GET_CODE (loc))
  874. {
  875. case REG:
  876. /* Don't do any sp or fp replacements outside of MEM addresses
  877. on the LHS. */
  878. if (amd->mem_mode == VOIDmode && amd->store)
  879. return loc;
  880. if (loc == stack_pointer_rtx
  881. && !frame_pointer_needed
  882. && cfa_base_rtx)
  883. return compute_cfa_pointer (amd->stack_adjust);
  884. else if (loc == hard_frame_pointer_rtx
  885. && frame_pointer_needed
  886. && hard_frame_pointer_adjustment != -1
  887. && cfa_base_rtx)
  888. return compute_cfa_pointer (hard_frame_pointer_adjustment);
  889. gcc_checking_assert (loc != virtual_incoming_args_rtx);
  890. return loc;
  891. case MEM:
  892. mem = loc;
  893. if (!amd->store)
  894. {
  895. mem = targetm.delegitimize_address (mem);
  896. if (mem != loc && !MEM_P (mem))
  897. return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
  898. }
  899. addr = XEXP (mem, 0);
  900. mem_mode_save = amd->mem_mode;
  901. amd->mem_mode = GET_MODE (mem);
  902. store_save = amd->store;
  903. amd->store = false;
  904. addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
  905. amd->store = store_save;
  906. amd->mem_mode = mem_mode_save;
  907. if (mem == loc)
  908. addr = targetm.delegitimize_address (addr);
  909. if (addr != XEXP (mem, 0))
  910. mem = replace_equiv_address_nv (mem, addr);
  911. if (!amd->store)
  912. mem = avoid_constant_pool_reference (mem);
  913. return mem;
  914. case PRE_INC:
  915. case PRE_DEC:
  916. addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
  917. gen_int_mode (GET_CODE (loc) == PRE_INC
  918. ? GET_MODE_SIZE (amd->mem_mode)
  919. : -GET_MODE_SIZE (amd->mem_mode),
  920. GET_MODE (loc)));
  921. case POST_INC:
  922. case POST_DEC:
  923. if (addr == loc)
  924. addr = XEXP (loc, 0);
  925. gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
  926. addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
  927. tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
  928. gen_int_mode ((GET_CODE (loc) == PRE_INC
  929. || GET_CODE (loc) == POST_INC)
  930. ? GET_MODE_SIZE (amd->mem_mode)
  931. : -GET_MODE_SIZE (amd->mem_mode),
  932. GET_MODE (loc)));
  933. store_save = amd->store;
  934. amd->store = false;
  935. tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
  936. amd->store = store_save;
  937. amd->side_effects = alloc_EXPR_LIST (0,
  938. gen_rtx_SET (VOIDmode,
  939. XEXP (loc, 0), tem),
  940. amd->side_effects);
  941. return addr;
  942. case PRE_MODIFY:
  943. addr = XEXP (loc, 1);
  944. case POST_MODIFY:
  945. if (addr == loc)
  946. addr = XEXP (loc, 0);
  947. gcc_assert (amd->mem_mode != VOIDmode);
  948. addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
  949. store_save = amd->store;
  950. amd->store = false;
  951. tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
  952. adjust_mems, data);
  953. amd->store = store_save;
  954. amd->side_effects = alloc_EXPR_LIST (0,
  955. gen_rtx_SET (VOIDmode,
  956. XEXP (loc, 0), tem),
  957. amd->side_effects);
  958. return addr;
  959. case SUBREG:
  960. /* First try without delegitimization of whole MEMs and
  961. avoid_constant_pool_reference, which is more likely to succeed. */
  962. store_save = amd->store;
  963. amd->store = true;
  964. addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
  965. data);
  966. amd->store = store_save;
  967. mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
  968. if (mem == SUBREG_REG (loc))
  969. {
  970. tem = loc;
  971. goto finish_subreg;
  972. }
  973. tem = simplify_gen_subreg (GET_MODE (loc), mem,
  974. GET_MODE (SUBREG_REG (loc)),
  975. SUBREG_BYTE (loc));
  976. if (tem)
  977. goto finish_subreg;
  978. tem = simplify_gen_subreg (GET_MODE (loc), addr,
  979. GET_MODE (SUBREG_REG (loc)),
  980. SUBREG_BYTE (loc));
  981. if (tem == NULL_RTX)
  982. tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
  983. finish_subreg:
  984. if (MAY_HAVE_DEBUG_INSNS
  985. && GET_CODE (tem) == SUBREG
  986. && (GET_CODE (SUBREG_REG (tem)) == PLUS
  987. || GET_CODE (SUBREG_REG (tem)) == MINUS
  988. || GET_CODE (SUBREG_REG (tem)) == MULT
  989. || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
  990. && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
  991. || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
  992. && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
  993. || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
  994. && GET_MODE_PRECISION (GET_MODE (tem))
  995. < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
  996. && subreg_lowpart_p (tem)
  997. && use_narrower_mode_test (SUBREG_REG (tem), tem))
  998. return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
  999. GET_MODE (SUBREG_REG (tem)));
  1000. return tem;
  1001. case ASM_OPERANDS:
  1002. /* Don't do any replacements in second and following
  1003. ASM_OPERANDS of inline-asm with multiple sets.
  1004. ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
  1005. and ASM_OPERANDS_LABEL_VEC need to be equal between
  1006. all the ASM_OPERANDs in the insn and adjust_insn will
  1007. fix this up. */
  1008. if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
  1009. return loc;
  1010. break;
  1011. default:
  1012. break;
  1013. }
  1014. return NULL_RTX;
  1015. }
  1016. /* Helper function for replacement of uses. */
  1017. static void
  1018. adjust_mem_uses (rtx *x, void *data)
  1019. {
  1020. rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
  1021. if (new_x != *x)
  1022. validate_change (NULL_RTX, x, new_x, true);
  1023. }
  1024. /* Helper function for replacement of stores. */
  1025. static void
  1026. adjust_mem_stores (rtx loc, const_rtx expr, void *data)
  1027. {
  1028. if (MEM_P (loc))
  1029. {
  1030. rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
  1031. adjust_mems, data);
  1032. if (new_dest != SET_DEST (expr))
  1033. {
  1034. rtx xexpr = CONST_CAST_RTX (expr);
  1035. validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
  1036. }
  1037. }
  1038. }
  1039. /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
  1040. replace them with their value in the insn and add the side-effects
  1041. as other sets to the insn. */
  1042. static void
  1043. adjust_insn (basic_block bb, rtx_insn *insn)
  1044. {
  1045. struct adjust_mem_data amd;
  1046. rtx set;
  1047. #ifdef HAVE_window_save
  1048. /* If the target machine has an explicit window save instruction, the
  1049. transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
  1050. if (RTX_FRAME_RELATED_P (insn)
  1051. && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
  1052. {
  1053. unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
  1054. rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
  1055. parm_reg_t *p;
  1056. FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
  1057. {
  1058. XVECEXP (rtl, 0, i * 2)
  1059. = gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
  1060. /* Do not clobber the attached DECL, but only the REG. */
  1061. XVECEXP (rtl, 0, i * 2 + 1)
  1062. = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
  1063. gen_raw_REG (GET_MODE (p->outgoing),
  1064. REGNO (p->outgoing)));
  1065. }
  1066. validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
  1067. return;
  1068. }
  1069. #endif
  1070. amd.mem_mode = VOIDmode;
  1071. amd.stack_adjust = -VTI (bb)->out.stack_adjust;
  1072. amd.side_effects = NULL;
  1073. amd.store = true;
  1074. note_stores (PATTERN (insn), adjust_mem_stores, &amd);
  1075. amd.store = false;
  1076. if (GET_CODE (PATTERN (insn)) == PARALLEL
  1077. && asm_noperands (PATTERN (insn)) > 0
  1078. && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
  1079. {
  1080. rtx body, set0;
  1081. int i;
  1082. /* inline-asm with multiple sets is tiny bit more complicated,
  1083. because the 3 vectors in ASM_OPERANDS need to be shared between
  1084. all ASM_OPERANDS in the instruction. adjust_mems will
  1085. not touch ASM_OPERANDS other than the first one, asm_noperands
  1086. test above needs to be called before that (otherwise it would fail)
  1087. and afterwards this code fixes it up. */
  1088. note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
  1089. body = PATTERN (insn);
  1090. set0 = XVECEXP (body, 0, 0);
  1091. gcc_checking_assert (GET_CODE (set0) == SET
  1092. && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
  1093. && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
  1094. for (i = 1; i < XVECLEN (body, 0); i++)
  1095. if (GET_CODE (XVECEXP (body, 0, i)) != SET)
  1096. break;
  1097. else
  1098. {
  1099. set = XVECEXP (body, 0, i);
  1100. gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
  1101. && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
  1102. == i);
  1103. if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
  1104. != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
  1105. || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
  1106. != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
  1107. || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
  1108. != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
  1109. {
  1110. rtx newsrc = shallow_copy_rtx (SET_SRC (set));
  1111. ASM_OPERANDS_INPUT_VEC (newsrc)
  1112. = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
  1113. ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
  1114. = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
  1115. ASM_OPERANDS_LABEL_VEC (newsrc)
  1116. = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
  1117. validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
  1118. }
  1119. }
  1120. }
  1121. else
  1122. note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
  1123. /* For read-only MEMs containing some constant, prefer those
  1124. constants. */
  1125. set = single_set (insn);
  1126. if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
  1127. {
  1128. rtx note = find_reg_equal_equiv_note (insn);
  1129. if (note && CONSTANT_P (XEXP (note, 0)))
  1130. validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
  1131. }
  1132. if (amd.side_effects)
  1133. {
  1134. rtx *pat, new_pat, s;
  1135. int i, oldn, newn;
  1136. pat = &PATTERN (insn);
  1137. if (GET_CODE (*pat) == COND_EXEC)
  1138. pat = &COND_EXEC_CODE (*pat);
  1139. if (GET_CODE (*pat) == PARALLEL)
  1140. oldn = XVECLEN (*pat, 0);
  1141. else
  1142. oldn = 1;
  1143. for (s = amd.side_effects, newn = 0; s; newn++)
  1144. s = XEXP (s, 1);
  1145. new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
  1146. if (GET_CODE (*pat) == PARALLEL)
  1147. for (i = 0; i < oldn; i++)
  1148. XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
  1149. else
  1150. XVECEXP (new_pat, 0, 0) = *pat;
  1151. for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
  1152. XVECEXP (new_pat, 0, i) = XEXP (s, 0);
  1153. free_EXPR_LIST_list (&amd.side_effects);
  1154. validate_change (NULL_RTX, pat, new_pat, true);
  1155. }
  1156. }
  1157. /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
  1158. static inline rtx
  1159. dv_as_rtx (decl_or_value dv)
  1160. {
  1161. tree decl;
  1162. if (dv_is_value_p (dv))
  1163. return dv_as_value (dv);
  1164. decl = dv_as_decl (dv);
  1165. gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
  1166. return DECL_RTL_KNOWN_SET (decl);
  1167. }
  1168. /* Return nonzero if a decl_or_value must not have more than one
  1169. variable part. The returned value discriminates among various
  1170. kinds of one-part DVs ccording to enum onepart_enum. */
  1171. static inline onepart_enum_t
  1172. dv_onepart_p (decl_or_value dv)
  1173. {
  1174. tree decl;
  1175. if (!MAY_HAVE_DEBUG_INSNS)
  1176. return NOT_ONEPART;
  1177. if (dv_is_value_p (dv))
  1178. return ONEPART_VALUE;
  1179. decl = dv_as_decl (dv);
  1180. if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
  1181. return ONEPART_DEXPR;
  1182. if (target_for_debug_bind (decl) != NULL_TREE)
  1183. return ONEPART_VDECL;
  1184. return NOT_ONEPART;
  1185. }
  1186. /* Return the variable pool to be used for a dv of type ONEPART. */
  1187. static inline alloc_pool
  1188. onepart_pool (onepart_enum_t onepart)
  1189. {
  1190. return onepart ? valvar_pool : var_pool;
  1191. }
  1192. /* Build a decl_or_value out of a decl. */
  1193. static inline decl_or_value
  1194. dv_from_decl (tree decl)
  1195. {
  1196. decl_or_value dv;
  1197. dv = decl;
  1198. gcc_checking_assert (dv_is_decl_p (dv));
  1199. return dv;
  1200. }
  1201. /* Build a decl_or_value out of a value. */
  1202. static inline decl_or_value
  1203. dv_from_value (rtx value)
  1204. {
  1205. decl_or_value dv;
  1206. dv = value;
  1207. gcc_checking_assert (dv_is_value_p (dv));
  1208. return dv;
  1209. }
  1210. /* Return a value or the decl of a debug_expr as a decl_or_value. */
  1211. static inline decl_or_value
  1212. dv_from_rtx (rtx x)
  1213. {
  1214. decl_or_value dv;
  1215. switch (GET_CODE (x))
  1216. {
  1217. case DEBUG_EXPR:
  1218. dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
  1219. gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
  1220. break;
  1221. case VALUE:
  1222. dv = dv_from_value (x);
  1223. break;
  1224. default:
  1225. gcc_unreachable ();
  1226. }
  1227. return dv;
  1228. }
  1229. extern void debug_dv (decl_or_value dv);
  1230. DEBUG_FUNCTION void
  1231. debug_dv (decl_or_value dv)
  1232. {
  1233. if (dv_is_value_p (dv))
  1234. debug_rtx (dv_as_value (dv));
  1235. else
  1236. debug_generic_stmt (dv_as_decl (dv));
  1237. }
  1238. static void loc_exp_dep_clear (variable var);
  1239. /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
  1240. static void
  1241. variable_htab_free (void *elem)
  1242. {
  1243. int i;
  1244. variable var = (variable) elem;
  1245. location_chain node, next;
  1246. gcc_checking_assert (var->refcount > 0);
  1247. var->refcount--;
  1248. if (var->refcount > 0)
  1249. return;
  1250. for (i = 0; i < var->n_var_parts; i++)
  1251. {
  1252. for (node = var->var_part[i].loc_chain; node; node = next)
  1253. {
  1254. next = node->next;
  1255. pool_free (loc_chain_pool, node);
  1256. }
  1257. var->var_part[i].loc_chain = NULL;
  1258. }
  1259. if (var->onepart && VAR_LOC_1PAUX (var))
  1260. {
  1261. loc_exp_dep_clear (var);
  1262. if (VAR_LOC_DEP_LST (var))
  1263. VAR_LOC_DEP_LST (var)->pprev = NULL;
  1264. XDELETE (VAR_LOC_1PAUX (var));
  1265. /* These may be reused across functions, so reset
  1266. e.g. NO_LOC_P. */
  1267. if (var->onepart == ONEPART_DEXPR)
  1268. set_dv_changed (var->dv, true);
  1269. }
  1270. pool_free (onepart_pool (var->onepart), var);
  1271. }
  1272. /* Initialize the set (array) SET of attrs to empty lists. */
  1273. static void
  1274. init_attrs_list_set (attrs *set)
  1275. {
  1276. int i;
  1277. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  1278. set[i] = NULL;
  1279. }
  1280. /* Make the list *LISTP empty. */
  1281. static void
  1282. attrs_list_clear (attrs *listp)
  1283. {
  1284. attrs list, next;
  1285. for (list = *listp; list; list = next)
  1286. {
  1287. next = list->next;
  1288. pool_free (attrs_pool, list);
  1289. }
  1290. *listp = NULL;
  1291. }
  1292. /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
  1293. static attrs
  1294. attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
  1295. {
  1296. for (; list; list = list->next)
  1297. if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
  1298. return list;
  1299. return NULL;
  1300. }
  1301. /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
  1302. static void
  1303. attrs_list_insert (attrs *listp, decl_or_value dv,
  1304. HOST_WIDE_INT offset, rtx loc)
  1305. {
  1306. attrs list;
  1307. list = (attrs) pool_alloc (attrs_pool);
  1308. list->loc = loc;
  1309. list->dv = dv;
  1310. list->offset = offset;
  1311. list->next = *listp;
  1312. *listp = list;
  1313. }
  1314. /* Copy all nodes from SRC and create a list *DSTP of the copies. */
  1315. static void
  1316. attrs_list_copy (attrs *dstp, attrs src)
  1317. {
  1318. attrs n;
  1319. attrs_list_clear (dstp);
  1320. for (; src; src = src->next)
  1321. {
  1322. n = (attrs) pool_alloc (attrs_pool);
  1323. n->loc = src->loc;
  1324. n->dv = src->dv;
  1325. n->offset = src->offset;
  1326. n->next = *dstp;
  1327. *dstp = n;
  1328. }
  1329. }
  1330. /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
  1331. static void
  1332. attrs_list_union (attrs *dstp, attrs src)
  1333. {
  1334. for (; src; src = src->next)
  1335. {
  1336. if (!attrs_list_member (*dstp, src->dv, src->offset))
  1337. attrs_list_insert (dstp, src->dv, src->offset, src->loc);
  1338. }
  1339. }
  1340. /* Combine nodes that are not onepart nodes from SRC and SRC2 into
  1341. *DSTP. */
  1342. static void
  1343. attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
  1344. {
  1345. gcc_assert (!*dstp);
  1346. for (; src; src = src->next)
  1347. {
  1348. if (!dv_onepart_p (src->dv))
  1349. attrs_list_insert (dstp, src->dv, src->offset, src->loc);
  1350. }
  1351. for (src = src2; src; src = src->next)
  1352. {
  1353. if (!dv_onepart_p (src->dv)
  1354. && !attrs_list_member (*dstp, src->dv, src->offset))
  1355. attrs_list_insert (dstp, src->dv, src->offset, src->loc);
  1356. }
  1357. }
  1358. /* Shared hashtable support. */
  1359. /* Return true if VARS is shared. */
  1360. static inline bool
  1361. shared_hash_shared (shared_hash vars)
  1362. {
  1363. return vars->refcount > 1;
  1364. }
  1365. /* Return the hash table for VARS. */
  1366. static inline variable_table_type *
  1367. shared_hash_htab (shared_hash vars)
  1368. {
  1369. return vars->htab;
  1370. }
  1371. /* Return true if VAR is shared, or maybe because VARS is shared. */
  1372. static inline bool
  1373. shared_var_p (variable var, shared_hash vars)
  1374. {
  1375. /* Don't count an entry in the changed_variables table as a duplicate. */
  1376. return ((var->refcount > 1 + (int) var->in_changed_variables)
  1377. || shared_hash_shared (vars));
  1378. }
  1379. /* Copy variables into a new hash table. */
  1380. static shared_hash
  1381. shared_hash_unshare (shared_hash vars)
  1382. {
  1383. shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool);
  1384. gcc_assert (vars->refcount > 1);
  1385. new_vars->refcount = 1;
  1386. new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
  1387. vars_copy (new_vars->htab, vars->htab);
  1388. vars->refcount--;
  1389. return new_vars;
  1390. }
  1391. /* Increment reference counter on VARS and return it. */
  1392. static inline shared_hash
  1393. shared_hash_copy (shared_hash vars)
  1394. {
  1395. vars->refcount++;
  1396. return vars;
  1397. }
  1398. /* Decrement reference counter and destroy hash table if not shared
  1399. anymore. */
  1400. static void
  1401. shared_hash_destroy (shared_hash vars)
  1402. {
  1403. gcc_checking_assert (vars->refcount > 0);
  1404. if (--vars->refcount == 0)
  1405. {
  1406. delete vars->htab;
  1407. pool_free (shared_hash_pool, vars);
  1408. }
  1409. }
  1410. /* Unshare *PVARS if shared and return slot for DV. If INS is
  1411. INSERT, insert it if not already present. */
  1412. static inline variable_def **
  1413. shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
  1414. hashval_t dvhash, enum insert_option ins)
  1415. {
  1416. if (shared_hash_shared (*pvars))
  1417. *pvars = shared_hash_unshare (*pvars);
  1418. return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
  1419. }
  1420. static inline variable_def **
  1421. shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
  1422. enum insert_option ins)
  1423. {
  1424. return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
  1425. }
  1426. /* Return slot for DV, if it is already present in the hash table.
  1427. If it is not present, insert it only VARS is not shared, otherwise
  1428. return NULL. */
  1429. static inline variable_def **
  1430. shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
  1431. {
  1432. return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
  1433. shared_hash_shared (vars)
  1434. ? NO_INSERT : INSERT);
  1435. }
  1436. static inline variable_def **
  1437. shared_hash_find_slot (shared_hash vars, decl_or_value dv)
  1438. {
  1439. return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
  1440. }
  1441. /* Return slot for DV only if it is already present in the hash table. */
  1442. static inline variable_def **
  1443. shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
  1444. hashval_t dvhash)
  1445. {
  1446. return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
  1447. }
  1448. static inline variable_def **
  1449. shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
  1450. {
  1451. return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
  1452. }
  1453. /* Return variable for DV or NULL if not already present in the hash
  1454. table. */
  1455. static inline variable
  1456. shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
  1457. {
  1458. return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
  1459. }
  1460. static inline variable
  1461. shared_hash_find (shared_hash vars, decl_or_value dv)
  1462. {
  1463. return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
  1464. }
  1465. /* Return true if TVAL is better than CVAL as a canonival value. We
  1466. choose lowest-numbered VALUEs, using the RTX address as a
  1467. tie-breaker. The idea is to arrange them into a star topology,
  1468. such that all of them are at most one step away from the canonical
  1469. value, and the canonical value has backlinks to all of them, in
  1470. addition to all the actual locations. We don't enforce this
  1471. topology throughout the entire dataflow analysis, though.
  1472. */
  1473. static inline bool
  1474. canon_value_cmp (rtx tval, rtx cval)
  1475. {
  1476. return !cval
  1477. || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
  1478. }
  1479. static bool dst_can_be_shared;
  1480. /* Return a copy of a variable VAR and insert it to dataflow set SET. */
  1481. static variable_def **
  1482. unshare_variable (dataflow_set *set, variable_def **slot, variable var,
  1483. enum var_init_status initialized)
  1484. {
  1485. variable new_var;
  1486. int i;
  1487. new_var = (variable) pool_alloc (onepart_pool (var->onepart));
  1488. new_var->dv = var->dv;
  1489. new_var->refcount = 1;
  1490. var->refcount--;
  1491. new_var->n_var_parts = var->n_var_parts;
  1492. new_var->onepart = var->onepart;
  1493. new_var->in_changed_variables = false;
  1494. if (! flag_var_tracking_uninit)
  1495. initialized = VAR_INIT_STATUS_INITIALIZED;
  1496. for (i = 0; i < var->n_var_parts; i++)
  1497. {
  1498. location_chain node;
  1499. location_chain *nextp;
  1500. if (i == 0 && var->onepart)
  1501. {
  1502. /* One-part auxiliary data is only used while emitting
  1503. notes, so propagate it to the new variable in the active
  1504. dataflow set. If we're not emitting notes, this will be
  1505. a no-op. */
  1506. gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
  1507. VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
  1508. VAR_LOC_1PAUX (var) = NULL;
  1509. }
  1510. else
  1511. VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
  1512. nextp = &new_var->var_part[i].loc_chain;
  1513. for (node = var->var_part[i].loc_chain; node; node = node->next)
  1514. {
  1515. location_chain new_lc;
  1516. new_lc = (location_chain) pool_alloc (loc_chain_pool);
  1517. new_lc->next = NULL;
  1518. if (node->init > initialized)
  1519. new_lc->init = node->init;
  1520. else
  1521. new_lc->init = initialized;
  1522. if (node->set_src && !(MEM_P (node->set_src)))
  1523. new_lc->set_src = node->set_src;
  1524. else
  1525. new_lc->set_src = NULL;
  1526. new_lc->loc = node->loc;
  1527. *nextp = new_lc;
  1528. nextp = &new_lc->next;
  1529. }
  1530. new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
  1531. }
  1532. dst_can_be_shared = false;
  1533. if (shared_hash_shared (set->vars))
  1534. slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
  1535. else if (set->traversed_vars && set->vars != set->traversed_vars)
  1536. slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
  1537. *slot = new_var;
  1538. if (var->in_changed_variables)
  1539. {
  1540. variable_def **cslot
  1541. = changed_variables->find_slot_with_hash (var->dv,
  1542. dv_htab_hash (var->dv),
  1543. NO_INSERT);
  1544. gcc_assert (*cslot == (void *) var);
  1545. var->in_changed_variables = false;
  1546. variable_htab_free (var);
  1547. *cslot = new_var;
  1548. new_var->in_changed_variables = true;
  1549. }
  1550. return slot;
  1551. }
  1552. /* Copy all variables from hash table SRC to hash table DST. */
  1553. static void
  1554. vars_copy (variable_table_type *dst, variable_table_type *src)
  1555. {
  1556. variable_iterator_type hi;
  1557. variable var;
  1558. FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
  1559. {
  1560. variable_def **dstp;
  1561. var->refcount++;
  1562. dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
  1563. INSERT);
  1564. *dstp = var;
  1565. }
  1566. }
  1567. /* Map a decl to its main debug decl. */
  1568. static inline tree
  1569. var_debug_decl (tree decl)
  1570. {
  1571. if (decl && TREE_CODE (decl) == VAR_DECL
  1572. && DECL_HAS_DEBUG_EXPR_P (decl))
  1573. {
  1574. tree debugdecl = DECL_DEBUG_EXPR (decl);
  1575. if (DECL_P (debugdecl))
  1576. decl = debugdecl;
  1577. }
  1578. return decl;
  1579. }
  1580. /* Set the register LOC to contain DV, OFFSET. */
  1581. static void
  1582. var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
  1583. decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
  1584. enum insert_option iopt)
  1585. {
  1586. attrs node;
  1587. bool decl_p = dv_is_decl_p (dv);
  1588. if (decl_p)
  1589. dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
  1590. for (node = set->regs[REGNO (loc)]; node; node = node->next)
  1591. if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
  1592. && node->offset == offset)
  1593. break;
  1594. if (!node)
  1595. attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
  1596. set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
  1597. }
  1598. /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
  1599. static void
  1600. var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
  1601. rtx set_src)
  1602. {
  1603. tree decl = REG_EXPR (loc);
  1604. HOST_WIDE_INT offset = REG_OFFSET (loc);
  1605. var_reg_decl_set (set, loc, initialized,
  1606. dv_from_decl (decl), offset, set_src, INSERT);
  1607. }
  1608. static enum var_init_status
  1609. get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
  1610. {
  1611. variable var;
  1612. int i;
  1613. enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
  1614. if (! flag_var_tracking_uninit)
  1615. return VAR_INIT_STATUS_INITIALIZED;
  1616. var = shared_hash_find (set->vars, dv);
  1617. if (var)
  1618. {
  1619. for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
  1620. {
  1621. location_chain nextp;
  1622. for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
  1623. if (rtx_equal_p (nextp->loc, loc))
  1624. {
  1625. ret_val = nextp->init;
  1626. break;
  1627. }
  1628. }
  1629. }
  1630. return ret_val;
  1631. }
  1632. /* Delete current content of register LOC in dataflow set SET and set
  1633. the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
  1634. MODIFY is true, any other live copies of the same variable part are
  1635. also deleted from the dataflow set, otherwise the variable part is
  1636. assumed to be copied from another location holding the same
  1637. part. */
  1638. static void
  1639. var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
  1640. enum var_init_status initialized, rtx set_src)
  1641. {
  1642. tree decl = REG_EXPR (loc);
  1643. HOST_WIDE_INT offset = REG_OFFSET (loc);
  1644. attrs node, next;
  1645. attrs *nextp;
  1646. decl = var_debug_decl (decl);
  1647. if (initialized == VAR_INIT_STATUS_UNKNOWN)
  1648. initialized = get_init_value (set, loc, dv_from_decl (decl));
  1649. nextp = &set->regs[REGNO (loc)];
  1650. for (node = *nextp; node; node = next)
  1651. {
  1652. next = node->next;
  1653. if (dv_as_opaque (node->dv) != decl || node->offset != offset)
  1654. {
  1655. delete_variable_part (set, node->loc, node->dv, node->offset);
  1656. pool_free (attrs_pool, node);
  1657. *nextp = next;
  1658. }
  1659. else
  1660. {
  1661. node->loc = loc;
  1662. nextp = &node->next;
  1663. }
  1664. }
  1665. if (modify)
  1666. clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
  1667. var_reg_set (set, loc, initialized, set_src);
  1668. }
  1669. /* Delete the association of register LOC in dataflow set SET with any
  1670. variables that aren't onepart. If CLOBBER is true, also delete any
  1671. other live copies of the same variable part, and delete the
  1672. association with onepart dvs too. */
  1673. static void
  1674. var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
  1675. {
  1676. attrs *nextp = &set->regs[REGNO (loc)];
  1677. attrs node, next;
  1678. if (clobber)
  1679. {
  1680. tree decl = REG_EXPR (loc);
  1681. HOST_WIDE_INT offset = REG_OFFSET (loc);
  1682. decl = var_debug_decl (decl);
  1683. clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
  1684. }
  1685. for (node = *nextp; node; node = next)
  1686. {
  1687. next = node->next;
  1688. if (clobber || !dv_onepart_p (node->dv))
  1689. {
  1690. delete_variable_part (set, node->loc, node->dv, node->offset);
  1691. pool_free (attrs_pool, node);
  1692. *nextp = next;
  1693. }
  1694. else
  1695. nextp = &node->next;
  1696. }
  1697. }
  1698. /* Delete content of register with number REGNO in dataflow set SET. */
  1699. static void
  1700. var_regno_delete (dataflow_set *set, int regno)
  1701. {
  1702. attrs *reg = &set->regs[regno];
  1703. attrs node, next;
  1704. for (node = *reg; node; node = next)
  1705. {
  1706. next = node->next;
  1707. delete_variable_part (set, node->loc, node->dv, node->offset);
  1708. pool_free (attrs_pool, node);
  1709. }
  1710. *reg = NULL;
  1711. }
  1712. /* Return true if I is the negated value of a power of two. */
  1713. static bool
  1714. negative_power_of_two_p (HOST_WIDE_INT i)
  1715. {
  1716. unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
  1717. return x == (x & -x);
  1718. }
  1719. /* Strip constant offsets and alignments off of LOC. Return the base
  1720. expression. */
  1721. static rtx
  1722. vt_get_canonicalize_base (rtx loc)
  1723. {
  1724. while ((GET_CODE (loc) == PLUS
  1725. || GET_CODE (loc) == AND)
  1726. && GET_CODE (XEXP (loc, 1)) == CONST_INT
  1727. && (GET_CODE (loc) != AND
  1728. || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
  1729. loc = XEXP (loc, 0);
  1730. return loc;
  1731. }
  1732. /* This caches canonicalized addresses for VALUEs, computed using
  1733. information in the global cselib table. */
  1734. static hash_map<rtx, rtx> *global_get_addr_cache;
  1735. /* This caches canonicalized addresses for VALUEs, computed using
  1736. information from the global cache and information pertaining to a
  1737. basic block being analyzed. */
  1738. static hash_map<rtx, rtx> *local_get_addr_cache;
  1739. static rtx vt_canonicalize_addr (dataflow_set *, rtx);
  1740. /* Return the canonical address for LOC, that must be a VALUE, using a
  1741. cached global equivalence or computing it and storing it in the
  1742. global cache. */
  1743. static rtx
  1744. get_addr_from_global_cache (rtx const loc)
  1745. {
  1746. rtx x;
  1747. gcc_checking_assert (GET_CODE (loc) == VALUE);
  1748. bool existed;
  1749. rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
  1750. if (existed)
  1751. return *slot;
  1752. x = canon_rtx (get_addr (loc));
  1753. /* Tentative, avoiding infinite recursion. */
  1754. *slot = x;
  1755. if (x != loc)
  1756. {
  1757. rtx nx = vt_canonicalize_addr (NULL, x);
  1758. if (nx != x)
  1759. {
  1760. /* The table may have moved during recursion, recompute
  1761. SLOT. */
  1762. *global_get_addr_cache->get (loc) = x = nx;
  1763. }
  1764. }
  1765. return x;
  1766. }
  1767. /* Return the canonical address for LOC, that must be a VALUE, using a
  1768. cached local equivalence or computing it and storing it in the
  1769. local cache. */
  1770. static rtx
  1771. get_addr_from_local_cache (dataflow_set *set, rtx const loc)
  1772. {
  1773. rtx x;
  1774. decl_or_value dv;
  1775. variable var;
  1776. location_chain l;
  1777. gcc_checking_assert (GET_CODE (loc) == VALUE);
  1778. bool existed;
  1779. rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
  1780. if (existed)
  1781. return *slot;
  1782. x = get_addr_from_global_cache (loc);
  1783. /* Tentative, avoiding infinite recursion. */
  1784. *slot = x;
  1785. /* Recurse to cache local expansion of X, or if we need to search
  1786. for a VALUE in the expansion. */
  1787. if (x != loc)
  1788. {
  1789. rtx nx = vt_canonicalize_addr (set, x);
  1790. if (nx != x)
  1791. {
  1792. slot = local_get_addr_cache->get (loc);
  1793. *slot = x = nx;
  1794. }
  1795. return x;
  1796. }
  1797. dv = dv_from_rtx (x);
  1798. var = shared_hash_find (set->vars, dv);
  1799. if (!var)
  1800. return x;
  1801. /* Look for an improved equivalent expression. */
  1802. for (l = var->var_part[0].loc_chain; l; l = l->next)
  1803. {
  1804. rtx base = vt_get_canonicalize_base (l->loc);
  1805. if (GET_CODE (base) == VALUE
  1806. && canon_value_cmp (base, loc))
  1807. {
  1808. rtx nx = vt_canonicalize_addr (set, l->loc);
  1809. if (x != nx)
  1810. {
  1811. slot = local_get_addr_cache->get (loc);
  1812. *slot = x = nx;
  1813. }
  1814. break;
  1815. }
  1816. }
  1817. return x;
  1818. }
  1819. /* Canonicalize LOC using equivalences from SET in addition to those
  1820. in the cselib static table. It expects a VALUE-based expression,
  1821. and it will only substitute VALUEs with other VALUEs or
  1822. function-global equivalences, so that, if two addresses have base
  1823. VALUEs that are locally or globally related in ways that
  1824. memrefs_conflict_p cares about, they will both canonicalize to
  1825. expressions that have the same base VALUE.
  1826. The use of VALUEs as canonical base addresses enables the canonical
  1827. RTXs to remain unchanged globally, if they resolve to a constant,
  1828. or throughout a basic block otherwise, so that they can be cached
  1829. and the cache needs not be invalidated when REGs, MEMs or such
  1830. change. */
  1831. static rtx
  1832. vt_canonicalize_addr (dataflow_set *set, rtx oloc)
  1833. {
  1834. HOST_WIDE_INT ofst = 0;
  1835. machine_mode mode = GET_MODE (oloc);
  1836. rtx loc = oloc;
  1837. rtx x;
  1838. bool retry = true;
  1839. while (retry)
  1840. {
  1841. while (GET_CODE (loc) == PLUS
  1842. && GET_CODE (XEXP (loc, 1)) == CONST_INT)
  1843. {
  1844. ofst += INTVAL (XEXP (loc, 1));
  1845. loc = XEXP (loc, 0);
  1846. }
  1847. /* Alignment operations can't normally be combined, so just
  1848. canonicalize the base and we're done. We'll normally have
  1849. only one stack alignment anyway. */
  1850. if (GET_CODE (loc) == AND
  1851. && GET_CODE (XEXP (loc, 1)) == CONST_INT
  1852. && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
  1853. {
  1854. x = vt_canonicalize_addr (set, XEXP (loc, 0));
  1855. if (x != XEXP (loc, 0))
  1856. loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
  1857. retry = false;
  1858. }
  1859. if (GET_CODE (loc) == VALUE)
  1860. {
  1861. if (set)
  1862. loc = get_addr_from_local_cache (set, loc);
  1863. else
  1864. loc = get_addr_from_global_cache (loc);
  1865. /* Consolidate plus_constants. */
  1866. while (ofst && GET_CODE (loc) == PLUS
  1867. && GET_CODE (XEXP (loc, 1)) == CONST_INT)
  1868. {
  1869. ofst += INTVAL (XEXP (loc, 1));
  1870. loc = XEXP (loc, 0);
  1871. }
  1872. retry = false;
  1873. }
  1874. else
  1875. {
  1876. x = canon_rtx (loc);
  1877. if (retry)
  1878. retry = (x != loc);
  1879. loc = x;
  1880. }
  1881. }
  1882. /* Add OFST back in. */
  1883. if (ofst)
  1884. {
  1885. /* Don't build new RTL if we can help it. */
  1886. if (GET_CODE (oloc) == PLUS
  1887. && XEXP (oloc, 0) == loc
  1888. && INTVAL (XEXP (oloc, 1)) == ofst)
  1889. return oloc;
  1890. loc = plus_constant (mode, loc, ofst);
  1891. }
  1892. return loc;
  1893. }
  1894. /* Return true iff there's a true dependence between MLOC and LOC.
  1895. MADDR must be a canonicalized version of MLOC's address. */
  1896. static inline bool
  1897. vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
  1898. {
  1899. if (GET_CODE (loc) != MEM)
  1900. return false;
  1901. rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
  1902. if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
  1903. return false;
  1904. return true;
  1905. }
  1906. /* Hold parameters for the hashtab traversal function
  1907. drop_overlapping_mem_locs, see below. */
  1908. struct overlapping_mems
  1909. {
  1910. dataflow_set *set;
  1911. rtx loc, addr;
  1912. };
  1913. /* Remove all MEMs that overlap with COMS->LOC from the location list
  1914. of a hash table entry for a value. COMS->ADDR must be a
  1915. canonicalized form of COMS->LOC's address, and COMS->LOC must be
  1916. canonicalized itself. */
  1917. int
  1918. drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
  1919. {
  1920. dataflow_set *set = coms->set;
  1921. rtx mloc = coms->loc, addr = coms->addr;
  1922. variable var = *slot;
  1923. if (var->onepart == ONEPART_VALUE)
  1924. {
  1925. location_chain loc, *locp;
  1926. bool changed = false;
  1927. rtx cur_loc;
  1928. gcc_assert (var->n_var_parts == 1);
  1929. if (shared_var_p (var, set->vars))
  1930. {
  1931. for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
  1932. if (vt_canon_true_dep (set, mloc, addr, loc->loc))
  1933. break;
  1934. if (!loc)
  1935. return 1;
  1936. slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
  1937. var = *slot;
  1938. gcc_assert (var->n_var_parts == 1);
  1939. }
  1940. if (VAR_LOC_1PAUX (var))
  1941. cur_loc = VAR_LOC_FROM (var);
  1942. else
  1943. cur_loc = var->var_part[0].cur_loc;
  1944. for (locp = &var->var_part[0].loc_chain, loc = *locp;
  1945. loc; loc = *locp)
  1946. {
  1947. if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
  1948. {
  1949. locp = &loc->next;
  1950. continue;
  1951. }
  1952. *locp = loc->next;
  1953. /* If we have deleted the location which was last emitted
  1954. we have to emit new location so add the variable to set
  1955. of changed variables. */
  1956. if (cur_loc == loc->loc)
  1957. {
  1958. changed = true;
  1959. var->var_part[0].cur_loc = NULL;
  1960. if (VAR_LOC_1PAUX (var))
  1961. VAR_LOC_FROM (var) = NULL;
  1962. }
  1963. pool_free (loc_chain_pool, loc);
  1964. }
  1965. if (!var->var_part[0].loc_chain)
  1966. {
  1967. var->n_var_parts--;
  1968. changed = true;
  1969. }
  1970. if (changed)
  1971. variable_was_changed (var, set);
  1972. }
  1973. return 1;
  1974. }
  1975. /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
  1976. static void
  1977. clobber_overlapping_mems (dataflow_set *set, rtx loc)
  1978. {
  1979. struct overlapping_mems coms;
  1980. gcc_checking_assert (GET_CODE (loc) == MEM);
  1981. coms.set = set;
  1982. coms.loc = canon_rtx (loc);
  1983. coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
  1984. set->traversed_vars = set->vars;
  1985. shared_hash_htab (set->vars)
  1986. ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
  1987. set->traversed_vars = NULL;
  1988. }
  1989. /* Set the location of DV, OFFSET as the MEM LOC. */
  1990. static void
  1991. var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
  1992. decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
  1993. enum insert_option iopt)
  1994. {
  1995. if (dv_is_decl_p (dv))
  1996. dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
  1997. set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
  1998. }
  1999. /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
  2000. SET to LOC.
  2001. Adjust the address first if it is stack pointer based. */
  2002. static void
  2003. var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
  2004. rtx set_src)
  2005. {
  2006. tree decl = MEM_EXPR (loc);
  2007. HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
  2008. var_mem_decl_set (set, loc, initialized,
  2009. dv_from_decl (decl), offset, set_src, INSERT);
  2010. }
  2011. /* Delete and set the location part of variable MEM_EXPR (LOC) in
  2012. dataflow set SET to LOC. If MODIFY is true, any other live copies
  2013. of the same variable part are also deleted from the dataflow set,
  2014. otherwise the variable part is assumed to be copied from another
  2015. location holding the same part.
  2016. Adjust the address first if it is stack pointer based. */
  2017. static void
  2018. var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
  2019. enum var_init_status initialized, rtx set_src)
  2020. {
  2021. tree decl = MEM_EXPR (loc);
  2022. HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
  2023. clobber_overlapping_mems (set, loc);
  2024. decl = var_debug_decl (decl);
  2025. if (initialized == VAR_INIT_STATUS_UNKNOWN)
  2026. initialized = get_init_value (set, loc, dv_from_decl (decl));
  2027. if (modify)
  2028. clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
  2029. var_mem_set (set, loc, initialized, set_src);
  2030. }
  2031. /* Delete the location part LOC from dataflow set SET. If CLOBBER is
  2032. true, also delete any other live copies of the same variable part.
  2033. Adjust the address first if it is stack pointer based. */
  2034. static void
  2035. var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
  2036. {
  2037. tree decl = MEM_EXPR (loc);
  2038. HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
  2039. clobber_overlapping_mems (set, loc);
  2040. decl = var_debug_decl (decl);
  2041. if (clobber)
  2042. clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
  2043. delete_variable_part (set, loc, dv_from_decl (decl), offset);
  2044. }
  2045. /* Return true if LOC should not be expanded for location expressions,
  2046. or used in them. */
  2047. static inline bool
  2048. unsuitable_loc (rtx loc)
  2049. {
  2050. switch (GET_CODE (loc))
  2051. {
  2052. case PC:
  2053. case SCRATCH:
  2054. case CC0:
  2055. case ASM_INPUT:
  2056. case ASM_OPERANDS:
  2057. return true;
  2058. default:
  2059. return false;
  2060. }
  2061. }
  2062. /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
  2063. bound to it. */
  2064. static inline void
  2065. val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
  2066. {
  2067. if (REG_P (loc))
  2068. {
  2069. if (modified)
  2070. var_regno_delete (set, REGNO (loc));
  2071. var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
  2072. dv_from_value (val), 0, NULL_RTX, INSERT);
  2073. }
  2074. else if (MEM_P (loc))
  2075. {
  2076. struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
  2077. if (modified)
  2078. clobber_overlapping_mems (set, loc);
  2079. if (l && GET_CODE (l->loc) == VALUE)
  2080. l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
  2081. /* If this MEM is a global constant, we don't need it in the
  2082. dynamic tables. ??? We should test this before emitting the
  2083. micro-op in the first place. */
  2084. while (l)
  2085. if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
  2086. break;
  2087. else
  2088. l = l->next;
  2089. if (!l)
  2090. var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
  2091. dv_from_value (val), 0, NULL_RTX, INSERT);
  2092. }
  2093. else
  2094. {
  2095. /* Other kinds of equivalences are necessarily static, at least
  2096. so long as we do not perform substitutions while merging
  2097. expressions. */
  2098. gcc_unreachable ();
  2099. set_variable_part (set, loc, dv_from_value (val), 0,
  2100. VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
  2101. }
  2102. }
  2103. /* Bind a value to a location it was just stored in. If MODIFIED
  2104. holds, assume the location was modified, detaching it from any
  2105. values bound to it. */
  2106. static void
  2107. val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
  2108. bool modified)
  2109. {
  2110. cselib_val *v = CSELIB_VAL_PTR (val);
  2111. gcc_assert (cselib_preserved_value_p (v));
  2112. if (dump_file)
  2113. {
  2114. fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
  2115. print_inline_rtx (dump_file, loc, 0);
  2116. fprintf (dump_file, " evaluates to ");
  2117. print_inline_rtx (dump_file, val, 0);
  2118. if (v->locs)
  2119. {
  2120. struct elt_loc_list *l;
  2121. for (l = v->locs; l; l = l->next)
  2122. {
  2123. fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
  2124. print_inline_rtx (dump_file, l->loc, 0);
  2125. }
  2126. }
  2127. fprintf (dump_file, "\n");
  2128. }
  2129. gcc_checking_assert (!unsuitable_loc (loc));
  2130. val_bind (set, val, loc, modified);
  2131. }
  2132. /* Clear (canonical address) slots that reference X. */
  2133. bool
  2134. local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
  2135. {
  2136. if (vt_get_canonicalize_base (*slot) == x)
  2137. *slot = NULL;
  2138. return true;
  2139. }
  2140. /* Reset this node, detaching all its equivalences. Return the slot
  2141. in the variable hash table that holds dv, if there is one. */
  2142. static void
  2143. val_reset (dataflow_set *set, decl_or_value dv)
  2144. {
  2145. variable var = shared_hash_find (set->vars, dv) ;
  2146. location_chain node;
  2147. rtx cval;
  2148. if (!var || !var->n_var_parts)
  2149. return;
  2150. gcc_assert (var->n_var_parts == 1);
  2151. if (var->onepart == ONEPART_VALUE)
  2152. {
  2153. rtx x = dv_as_value (dv);
  2154. /* Relationships in the global cache don't change, so reset the
  2155. local cache entry only. */
  2156. rtx *slot = local_get_addr_cache->get (x);
  2157. if (slot)
  2158. {
  2159. /* If the value resolved back to itself, odds are that other
  2160. values may have cached it too. These entries now refer
  2161. to the old X, so detach them too. Entries that used the
  2162. old X but resolved to something else remain ok as long as
  2163. that something else isn't also reset. */
  2164. if (*slot == x)
  2165. local_get_addr_cache
  2166. ->traverse<rtx, local_get_addr_clear_given_value> (x);
  2167. *slot = NULL;
  2168. }
  2169. }
  2170. cval = NULL;
  2171. for (node = var->var_part[0].loc_chain; node; node = node->next)
  2172. if (GET_CODE (node->loc) == VALUE
  2173. && canon_value_cmp (node->loc, cval))
  2174. cval = node->loc;
  2175. for (node = var->var_part[0].loc_chain; node; node = node->next)
  2176. if (GET_CODE (node->loc) == VALUE && cval != node->loc)
  2177. {
  2178. /* Redirect the equivalence link to the new canonical
  2179. value, or simply remove it if it would point at
  2180. itself. */
  2181. if (cval)
  2182. set_variable_part (set, cval, dv_from_value (node->loc),
  2183. 0, node->init, node->set_src, NO_INSERT);
  2184. delete_variable_part (set, dv_as_value (dv),
  2185. dv_from_value (node->loc), 0);
  2186. }
  2187. if (cval)
  2188. {
  2189. decl_or_value cdv = dv_from_value (cval);
  2190. /* Keep the remaining values connected, accummulating links
  2191. in the canonical value. */
  2192. for (node = var->var_part[0].loc_chain; node; node = node->next)
  2193. {
  2194. if (node->loc == cval)
  2195. continue;
  2196. else if (GET_CODE (node->loc) == REG)
  2197. var_reg_decl_set (set, node->loc, node->init, cdv, 0,
  2198. node->set_src, NO_INSERT);
  2199. else if (GET_CODE (node->loc) == MEM)
  2200. var_mem_decl_set (set, node->loc, node->init, cdv, 0,
  2201. node->set_src, NO_INSERT);
  2202. else
  2203. set_variable_part (set, node->loc, cdv, 0,
  2204. node->init, node->set_src, NO_INSERT);
  2205. }
  2206. }
  2207. /* We remove this last, to make sure that the canonical value is not
  2208. removed to the point of requiring reinsertion. */
  2209. if (cval)
  2210. delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
  2211. clobber_variable_part (set, NULL, dv, 0, NULL);
  2212. }
  2213. /* Find the values in a given location and map the val to another
  2214. value, if it is unique, or add the location as one holding the
  2215. value. */
  2216. static void
  2217. val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
  2218. {
  2219. decl_or_value dv = dv_from_value (val);
  2220. if (dump_file && (dump_flags & TDF_DETAILS))
  2221. {
  2222. if (insn)
  2223. fprintf (dump_file, "%i: ", INSN_UID (insn));
  2224. else
  2225. fprintf (dump_file, "head: ");
  2226. print_inline_rtx (dump_file, val, 0);
  2227. fputs (" is at ", dump_file);
  2228. print_inline_rtx (dump_file, loc, 0);
  2229. fputc ('\n', dump_file);
  2230. }
  2231. val_reset (set, dv);
  2232. gcc_checking_assert (!unsuitable_loc (loc));
  2233. if (REG_P (loc))
  2234. {
  2235. attrs node, found = NULL;
  2236. for (node = set->regs[REGNO (loc)]; node; node = node->next)
  2237. if (dv_is_value_p (node->dv)
  2238. && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
  2239. {
  2240. found = node;
  2241. /* Map incoming equivalences. ??? Wouldn't it be nice if
  2242. we just started sharing the location lists? Maybe a
  2243. circular list ending at the value itself or some
  2244. such. */
  2245. set_variable_part (set, dv_as_value (node->dv),
  2246. dv_from_value (val), node->offset,
  2247. VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
  2248. set_variable_part (set, val, node->dv, node->offset,
  2249. VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
  2250. }
  2251. /* If we didn't find any equivalence, we need to remember that
  2252. this value is held in the named register. */
  2253. if (found)
  2254. return;
  2255. }
  2256. /* ??? Attempt to find and merge equivalent MEMs or other
  2257. expressions too. */
  2258. val_bind (set, val, loc, false);
  2259. }
  2260. /* Initialize dataflow set SET to be empty.
  2261. VARS_SIZE is the initial size of hash table VARS. */
  2262. static void
  2263. dataflow_set_init (dataflow_set *set)
  2264. {
  2265. init_attrs_list_set (set->regs);
  2266. set->vars = shared_hash_copy (empty_shared_hash);
  2267. set->stack_adjust = 0;
  2268. set->traversed_vars = NULL;
  2269. }
  2270. /* Delete the contents of dataflow set SET. */
  2271. static void
  2272. dataflow_set_clear (dataflow_set *set)
  2273. {
  2274. int i;
  2275. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  2276. attrs_list_clear (&set->regs[i]);
  2277. shared_hash_destroy (set->vars);
  2278. set->vars = shared_hash_copy (empty_shared_hash);
  2279. }
  2280. /* Copy the contents of dataflow set SRC to DST. */
  2281. static void
  2282. dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
  2283. {
  2284. int i;
  2285. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  2286. attrs_list_copy (&dst->regs[i], src->regs[i]);
  2287. shared_hash_destroy (dst->vars);
  2288. dst->vars = shared_hash_copy (src->vars);
  2289. dst->stack_adjust = src->stack_adjust;
  2290. }
  2291. /* Information for merging lists of locations for a given offset of variable.
  2292. */
  2293. struct variable_union_info
  2294. {
  2295. /* Node of the location chain. */
  2296. location_chain lc;
  2297. /* The sum of positions in the input chains. */
  2298. int pos;
  2299. /* The position in the chain of DST dataflow set. */
  2300. int pos_dst;
  2301. };
  2302. /* Buffer for location list sorting and its allocated size. */
  2303. static struct variable_union_info *vui_vec;
  2304. static int vui_allocated;
  2305. /* Compare function for qsort, order the structures by POS element. */
  2306. static int
  2307. variable_union_info_cmp_pos (const void *n1, const void *n2)
  2308. {
  2309. const struct variable_union_info *const i1 =
  2310. (const struct variable_union_info *) n1;
  2311. const struct variable_union_info *const i2 =
  2312. ( const struct variable_union_info *) n2;
  2313. if (i1->pos != i2->pos)
  2314. return i1->pos - i2->pos;
  2315. return (i1->pos_dst - i2->pos_dst);
  2316. }
  2317. /* Compute union of location parts of variable *SLOT and the same variable
  2318. from hash table DATA. Compute "sorted" union of the location chains
  2319. for common offsets, i.e. the locations of a variable part are sorted by
  2320. a priority where the priority is the sum of the positions in the 2 chains
  2321. (if a location is only in one list the position in the second list is
  2322. defined to be larger than the length of the chains).
  2323. When we are updating the location parts the newest location is in the
  2324. beginning of the chain, so when we do the described "sorted" union
  2325. we keep the newest locations in the beginning. */
  2326. static int
  2327. variable_union (variable src, dataflow_set *set)
  2328. {
  2329. variable dst;
  2330. variable_def **dstp;
  2331. int i, j, k;
  2332. dstp = shared_hash_find_slot (set->vars, src->dv);
  2333. if (!dstp || !*dstp)
  2334. {
  2335. src->refcount++;
  2336. dst_can_be_shared = false;
  2337. if (!dstp)
  2338. dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
  2339. *dstp = src;
  2340. /* Continue traversing the hash table. */
  2341. return 1;
  2342. }
  2343. else
  2344. dst = *dstp;
  2345. gcc_assert (src->n_var_parts);
  2346. gcc_checking_assert (src->onepart == dst->onepart);
  2347. /* We can combine one-part variables very efficiently, because their
  2348. entries are in canonical order. */
  2349. if (src->onepart)
  2350. {
  2351. location_chain *nodep, dnode, snode;
  2352. gcc_assert (src->n_var_parts == 1
  2353. && dst->n_var_parts == 1);
  2354. snode = src->var_part[0].loc_chain;
  2355. gcc_assert (snode);
  2356. restart_onepart_unshared:
  2357. nodep = &dst->var_part[0].loc_chain;
  2358. dnode = *nodep;
  2359. gcc_assert (dnode);
  2360. while (snode)
  2361. {
  2362. int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
  2363. if (r > 0)
  2364. {
  2365. location_chain nnode;
  2366. if (shared_var_p (dst, set->vars))
  2367. {
  2368. dstp = unshare_variable (set, dstp, dst,
  2369. VAR_INIT_STATUS_INITIALIZED);
  2370. dst = *dstp;
  2371. goto restart_onepart_unshared;
  2372. }
  2373. *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool);
  2374. nnode->loc = snode->loc;
  2375. nnode->init = snode->init;
  2376. if (!snode->set_src || MEM_P (snode->set_src))
  2377. nnode->set_src = NULL;
  2378. else
  2379. nnode->set_src = snode->set_src;
  2380. nnode->next = dnode;
  2381. dnode = nnode;
  2382. }
  2383. else if (r == 0)
  2384. gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
  2385. if (r >= 0)
  2386. snode = snode->next;
  2387. nodep = &dnode->next;
  2388. dnode = *nodep;
  2389. }
  2390. return 1;
  2391. }
  2392. gcc_checking_assert (!src->onepart);
  2393. /* Count the number of location parts, result is K. */
  2394. for (i = 0, j = 0, k = 0;
  2395. i < src->n_var_parts && j < dst->n_var_parts; k++)
  2396. {
  2397. if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
  2398. {
  2399. i++;
  2400. j++;
  2401. }
  2402. else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
  2403. i++;
  2404. else
  2405. j++;
  2406. }
  2407. k += src->n_var_parts - i;
  2408. k += dst->n_var_parts - j;
  2409. /* We track only variables whose size is <= MAX_VAR_PARTS bytes
  2410. thus there are at most MAX_VAR_PARTS different offsets. */
  2411. gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
  2412. if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
  2413. {
  2414. dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
  2415. dst = *dstp;
  2416. }
  2417. i = src->n_var_parts - 1;
  2418. j = dst->n_var_parts - 1;
  2419. dst->n_var_parts = k;
  2420. for (k--; k >= 0; k--)
  2421. {
  2422. location_chain node, node2;
  2423. if (i >= 0 && j >= 0
  2424. && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
  2425. {
  2426. /* Compute the "sorted" union of the chains, i.e. the locations which
  2427. are in both chains go first, they are sorted by the sum of
  2428. positions in the chains. */
  2429. int dst_l, src_l;
  2430. int ii, jj, n;
  2431. struct variable_union_info *vui;
  2432. /* If DST is shared compare the location chains.
  2433. If they are different we will modify the chain in DST with
  2434. high probability so make a copy of DST. */
  2435. if (shared_var_p (dst, set->vars))
  2436. {
  2437. for (node = src->var_part[i].loc_chain,
  2438. node2 = dst->var_part[j].loc_chain; node && node2;
  2439. node = node->next, node2 = node2->next)
  2440. {
  2441. if (!((REG_P (node2->loc)
  2442. && REG_P (node->loc)
  2443. && REGNO (node2->loc) == REGNO (node->loc))
  2444. || rtx_equal_p (node2->loc, node->loc)))
  2445. {
  2446. if (node2->init < node->init)
  2447. node2->init = node->init;
  2448. break;
  2449. }
  2450. }
  2451. if (node || node2)
  2452. {
  2453. dstp = unshare_variable (set, dstp, dst,
  2454. VAR_INIT_STATUS_UNKNOWN);
  2455. dst = (variable)*dstp;
  2456. }
  2457. }
  2458. src_l = 0;
  2459. for (node = src->var_part[i].loc_chain; node; node = node->next)
  2460. src_l++;
  2461. dst_l = 0;
  2462. for (node = dst->var_part[j].loc_chain; node; node = node->next)
  2463. dst_l++;
  2464. if (dst_l == 1)
  2465. {
  2466. /* The most common case, much simpler, no qsort is needed. */
  2467. location_chain dstnode = dst->var_part[j].loc_chain;
  2468. dst->var_part[k].loc_chain = dstnode;
  2469. VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
  2470. node2 = dstnode;
  2471. for (node = src->var_part[i].loc_chain; node; node = node->next)
  2472. if (!((REG_P (dstnode->loc)
  2473. && REG_P (node->loc)
  2474. && REGNO (dstnode->loc) == REGNO (node->loc))
  2475. || rtx_equal_p (dstnode->loc, node->loc)))
  2476. {
  2477. location_chain new_node;
  2478. /* Copy the location from SRC. */
  2479. new_node = (location_chain) pool_alloc (loc_chain_pool);
  2480. new_node->loc = node->loc;
  2481. new_node->init = node->init;
  2482. if (!node->set_src || MEM_P (node->set_src))
  2483. new_node->set_src = NULL;
  2484. else
  2485. new_node->set_src = node->set_src;
  2486. node2->next = new_node;
  2487. node2 = new_node;
  2488. }
  2489. node2->next = NULL;
  2490. }
  2491. else
  2492. {
  2493. if (src_l + dst_l > vui_allocated)
  2494. {
  2495. vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
  2496. vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
  2497. vui_allocated);
  2498. }
  2499. vui = vui_vec;
  2500. /* Fill in the locations from DST. */
  2501. for (node = dst->var_part[j].loc_chain, jj = 0; node;
  2502. node = node->next, jj++)
  2503. {
  2504. vui[jj].lc = node;
  2505. vui[jj].pos_dst = jj;
  2506. /* Pos plus value larger than a sum of 2 valid positions. */
  2507. vui[jj].pos = jj + src_l + dst_l;
  2508. }
  2509. /* Fill in the locations from SRC. */
  2510. n = dst_l;
  2511. for (node = src->var_part[i].loc_chain, ii = 0; node;
  2512. node = node->next, ii++)
  2513. {
  2514. /* Find location from NODE. */
  2515. for (jj = 0; jj < dst_l; jj++)
  2516. {
  2517. if ((REG_P (vui[jj].lc->loc)
  2518. && REG_P (node->loc)
  2519. && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
  2520. || rtx_equal_p (vui[jj].lc->loc, node->loc))
  2521. {
  2522. vui[jj].pos = jj + ii;
  2523. break;
  2524. }
  2525. }
  2526. if (jj >= dst_l) /* The location has not been found. */
  2527. {
  2528. location_chain new_node;
  2529. /* Copy the location from SRC. */
  2530. new_node = (location_chain) pool_alloc (loc_chain_pool);
  2531. new_node->loc = node->loc;
  2532. new_node->init = node->init;
  2533. if (!node->set_src || MEM_P (node->set_src))
  2534. new_node->set_src = NULL;
  2535. else
  2536. new_node->set_src = node->set_src;
  2537. vui[n].lc = new_node;
  2538. vui[n].pos_dst = src_l + dst_l;
  2539. vui[n].pos = ii + src_l + dst_l;
  2540. n++;
  2541. }
  2542. }
  2543. if (dst_l == 2)
  2544. {
  2545. /* Special case still very common case. For dst_l == 2
  2546. all entries dst_l ... n-1 are sorted, with for i >= dst_l
  2547. vui[i].pos == i + src_l + dst_l. */
  2548. if (vui[0].pos > vui[1].pos)
  2549. {
  2550. /* Order should be 1, 0, 2... */
  2551. dst->var_part[k].loc_chain = vui[1].lc;
  2552. vui[1].lc->next = vui[0].lc;
  2553. if (n >= 3)
  2554. {
  2555. vui[0].lc->next = vui[2].lc;
  2556. vui[n - 1].lc->next = NULL;
  2557. }
  2558. else
  2559. vui[0].lc->next = NULL;
  2560. ii = 3;
  2561. }
  2562. else
  2563. {
  2564. dst->var_part[k].loc_chain = vui[0].lc;
  2565. if (n >= 3 && vui[2].pos < vui[1].pos)
  2566. {
  2567. /* Order should be 0, 2, 1, 3... */
  2568. vui[0].lc->next = vui[2].lc;
  2569. vui[2].lc->next = vui[1].lc;
  2570. if (n >= 4)
  2571. {
  2572. vui[1].lc->next = vui[3].lc;
  2573. vui[n - 1].lc->next = NULL;
  2574. }
  2575. else
  2576. vui[1].lc->next = NULL;
  2577. ii = 4;
  2578. }
  2579. else
  2580. {
  2581. /* Order should be 0, 1, 2... */
  2582. ii = 1;
  2583. vui[n - 1].lc->next = NULL;
  2584. }
  2585. }
  2586. for (; ii < n; ii++)
  2587. vui[ii - 1].lc->next = vui[ii].lc;
  2588. }
  2589. else
  2590. {
  2591. qsort (vui, n, sizeof (struct variable_union_info),
  2592. variable_union_info_cmp_pos);
  2593. /* Reconnect the nodes in sorted order. */
  2594. for (ii = 1; ii < n; ii++)
  2595. vui[ii - 1].lc->next = vui[ii].lc;
  2596. vui[n - 1].lc->next = NULL;
  2597. dst->var_part[k].loc_chain = vui[0].lc;
  2598. }
  2599. VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
  2600. }
  2601. i--;
  2602. j--;
  2603. }
  2604. else if ((i >= 0 && j >= 0
  2605. && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
  2606. || i < 0)
  2607. {
  2608. dst->var_part[k] = dst->var_part[j];
  2609. j--;
  2610. }
  2611. else if ((i >= 0 && j >= 0
  2612. && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
  2613. || j < 0)
  2614. {
  2615. location_chain *nextp;
  2616. /* Copy the chain from SRC. */
  2617. nextp = &dst->var_part[k].loc_chain;
  2618. for (node = src->var_part[i].loc_chain; node; node = node->next)
  2619. {
  2620. location_chain new_lc;
  2621. new_lc = (location_chain) pool_alloc (loc_chain_pool);
  2622. new_lc->next = NULL;
  2623. new_lc->init = node->init;
  2624. if (!node->set_src || MEM_P (node->set_src))
  2625. new_lc->set_src = NULL;
  2626. else
  2627. new_lc->set_src = node->set_src;
  2628. new_lc->loc = node->loc;
  2629. *nextp = new_lc;
  2630. nextp = &new_lc->next;
  2631. }
  2632. VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
  2633. i--;
  2634. }
  2635. dst->var_part[k].cur_loc = NULL;
  2636. }
  2637. if (flag_var_tracking_uninit)
  2638. for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
  2639. {
  2640. location_chain node, node2;
  2641. for (node = src->var_part[i].loc_chain; node; node = node->next)
  2642. for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
  2643. if (rtx_equal_p (node->loc, node2->loc))
  2644. {
  2645. if (node->init > node2->init)
  2646. node2->init = node->init;
  2647. }
  2648. }
  2649. /* Continue traversing the hash table. */
  2650. return 1;
  2651. }
  2652. /* Compute union of dataflow sets SRC and DST and store it to DST. */
  2653. static void
  2654. dataflow_set_union (dataflow_set *dst, dataflow_set *src)
  2655. {
  2656. int i;
  2657. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  2658. attrs_list_union (&dst->regs[i], src->regs[i]);
  2659. if (dst->vars == empty_shared_hash)
  2660. {
  2661. shared_hash_destroy (dst->vars);
  2662. dst->vars = shared_hash_copy (src->vars);
  2663. }
  2664. else
  2665. {
  2666. variable_iterator_type hi;
  2667. variable var;
  2668. FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
  2669. var, variable, hi)
  2670. variable_union (var, dst);
  2671. }
  2672. }
  2673. /* Whether the value is currently being expanded. */
  2674. #define VALUE_RECURSED_INTO(x) \
  2675. (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
  2676. /* Whether no expansion was found, saving useless lookups.
  2677. It must only be set when VALUE_CHANGED is clear. */
  2678. #define NO_LOC_P(x) \
  2679. (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
  2680. /* Whether cur_loc in the value needs to be (re)computed. */
  2681. #define VALUE_CHANGED(x) \
  2682. (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
  2683. /* Whether cur_loc in the decl needs to be (re)computed. */
  2684. #define DECL_CHANGED(x) TREE_VISITED (x)
  2685. /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
  2686. user DECLs, this means they're in changed_variables. Values and
  2687. debug exprs may be left with this flag set if no user variable
  2688. requires them to be evaluated. */
  2689. static inline void
  2690. set_dv_changed (decl_or_value dv, bool newv)
  2691. {
  2692. switch (dv_onepart_p (dv))
  2693. {
  2694. case ONEPART_VALUE:
  2695. if (newv)
  2696. NO_LOC_P (dv_as_value (dv)) = false;
  2697. VALUE_CHANGED (dv_as_value (dv)) = newv;
  2698. break;
  2699. case ONEPART_DEXPR:
  2700. if (newv)
  2701. NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
  2702. /* Fall through... */
  2703. default:
  2704. DECL_CHANGED (dv_as_decl (dv)) = newv;
  2705. break;
  2706. }
  2707. }
  2708. /* Return true if DV needs to have its cur_loc recomputed. */
  2709. static inline bool
  2710. dv_changed_p (decl_or_value dv)
  2711. {
  2712. return (dv_is_value_p (dv)
  2713. ? VALUE_CHANGED (dv_as_value (dv))
  2714. : DECL_CHANGED (dv_as_decl (dv)));
  2715. }
  2716. /* Return a location list node whose loc is rtx_equal to LOC, in the
  2717. location list of a one-part variable or value VAR, or in that of
  2718. any values recursively mentioned in the location lists. VARS must
  2719. be in star-canonical form. */
  2720. static location_chain
  2721. find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
  2722. {
  2723. location_chain node;
  2724. enum rtx_code loc_code;
  2725. if (!var)
  2726. return NULL;
  2727. gcc_checking_assert (var->onepart);
  2728. if (!var->n_var_parts)
  2729. return NULL;
  2730. gcc_checking_assert (loc != dv_as_opaque (var->dv));
  2731. loc_code = GET_CODE (loc);
  2732. for (node = var->var_part[0].loc_chain; node; node = node->next)
  2733. {
  2734. decl_or_value dv;
  2735. variable rvar;
  2736. if (GET_CODE (node->loc) != loc_code)
  2737. {
  2738. if (GET_CODE (node->loc) != VALUE)
  2739. continue;
  2740. }
  2741. else if (loc == node->loc)
  2742. return node;
  2743. else if (loc_code != VALUE)
  2744. {
  2745. if (rtx_equal_p (loc, node->loc))
  2746. return node;
  2747. continue;
  2748. }
  2749. /* Since we're in star-canonical form, we don't need to visit
  2750. non-canonical nodes: one-part variables and non-canonical
  2751. values would only point back to the canonical node. */
  2752. if (dv_is_value_p (var->dv)
  2753. && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
  2754. {
  2755. /* Skip all subsequent VALUEs. */
  2756. while (node->next && GET_CODE (node->next->loc) == VALUE)
  2757. {
  2758. node = node->next;
  2759. gcc_checking_assert (!canon_value_cmp (node->loc,
  2760. dv_as_value (var->dv)));
  2761. if (loc == node->loc)
  2762. return node;
  2763. }
  2764. continue;
  2765. }
  2766. gcc_checking_assert (node == var->var_part[0].loc_chain);
  2767. gcc_checking_assert (!node->next);
  2768. dv = dv_from_value (node->loc);
  2769. rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
  2770. return find_loc_in_1pdv (loc, rvar, vars);
  2771. }
  2772. /* ??? Gotta look in cselib_val locations too. */
  2773. return NULL;
  2774. }
  2775. /* Hash table iteration argument passed to variable_merge. */
  2776. struct dfset_merge
  2777. {
  2778. /* The set in which the merge is to be inserted. */
  2779. dataflow_set *dst;
  2780. /* The set that we're iterating in. */
  2781. dataflow_set *cur;
  2782. /* The set that may contain the other dv we are to merge with. */
  2783. dataflow_set *src;
  2784. /* Number of onepart dvs in src. */
  2785. int src_onepart_cnt;
  2786. };
  2787. /* Insert LOC in *DNODE, if it's not there yet. The list must be in
  2788. loc_cmp order, and it is maintained as such. */
  2789. static void
  2790. insert_into_intersection (location_chain *nodep, rtx loc,
  2791. enum var_init_status status)
  2792. {
  2793. location_chain node;
  2794. int r;
  2795. for (node = *nodep; node; nodep = &node->next, node = *nodep)
  2796. if ((r = loc_cmp (node->loc, loc)) == 0)
  2797. {
  2798. node->init = MIN (node->init, status);
  2799. return;
  2800. }
  2801. else if (r > 0)
  2802. break;
  2803. node = (location_chain) pool_alloc (loc_chain_pool);
  2804. node->loc = loc;
  2805. node->set_src = NULL;
  2806. node->init = status;
  2807. node->next = *nodep;
  2808. *nodep = node;
  2809. }
  2810. /* Insert in DEST the intersection of the locations present in both
  2811. S1NODE and S2VAR, directly or indirectly. S1NODE is from a
  2812. variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
  2813. DSM->dst. */
  2814. static void
  2815. intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
  2816. location_chain s1node, variable s2var)
  2817. {
  2818. dataflow_set *s1set = dsm->cur;
  2819. dataflow_set *s2set = dsm->src;
  2820. location_chain found;
  2821. if (s2var)
  2822. {
  2823. location_chain s2node;
  2824. gcc_checking_assert (s2var->onepart);
  2825. if (s2var->n_var_parts)
  2826. {
  2827. s2node = s2var->var_part[0].loc_chain;
  2828. for (; s1node && s2node;
  2829. s1node = s1node->next, s2node = s2node->next)
  2830. if (s1node->loc != s2node->loc)
  2831. break;
  2832. else if (s1node->loc == val)
  2833. continue;
  2834. else
  2835. insert_into_intersection (dest, s1node->loc,
  2836. MIN (s1node->init, s2node->init));
  2837. }
  2838. }
  2839. for (; s1node; s1node = s1node->next)
  2840. {
  2841. if (s1node->loc == val)
  2842. continue;
  2843. if ((found = find_loc_in_1pdv (s1node->loc, s2var,
  2844. shared_hash_htab (s2set->vars))))
  2845. {
  2846. insert_into_intersection (dest, s1node->loc,
  2847. MIN (s1node->init, found->init));
  2848. continue;
  2849. }
  2850. if (GET_CODE (s1node->loc) == VALUE
  2851. && !VALUE_RECURSED_INTO (s1node->loc))
  2852. {
  2853. decl_or_value dv = dv_from_value (s1node->loc);
  2854. variable svar = shared_hash_find (s1set->vars, dv);
  2855. if (svar)
  2856. {
  2857. if (svar->n_var_parts == 1)
  2858. {
  2859. VALUE_RECURSED_INTO (s1node->loc) = true;
  2860. intersect_loc_chains (val, dest, dsm,
  2861. svar->var_part[0].loc_chain,
  2862. s2var);
  2863. VALUE_RECURSED_INTO (s1node->loc) = false;
  2864. }
  2865. }
  2866. }
  2867. /* ??? gotta look in cselib_val locations too. */
  2868. /* ??? if the location is equivalent to any location in src,
  2869. searched recursively
  2870. add to dst the values needed to represent the equivalence
  2871. telling whether locations S is equivalent to another dv's
  2872. location list:
  2873. for each location D in the list
  2874. if S and D satisfy rtx_equal_p, then it is present
  2875. else if D is a value, recurse without cycles
  2876. else if S and D have the same CODE and MODE
  2877. for each operand oS and the corresponding oD
  2878. if oS and oD are not equivalent, then S an D are not equivalent
  2879. else if they are RTX vectors
  2880. if any vector oS element is not equivalent to its respective oD,
  2881. then S and D are not equivalent
  2882. */
  2883. }
  2884. }
  2885. /* Return -1 if X should be before Y in a location list for a 1-part
  2886. variable, 1 if Y should be before X, and 0 if they're equivalent
  2887. and should not appear in the list. */
  2888. static int
  2889. loc_cmp (rtx x, rtx y)
  2890. {
  2891. int i, j, r;
  2892. RTX_CODE code = GET_CODE (x);
  2893. const char *fmt;
  2894. if (x == y)
  2895. return 0;
  2896. if (REG_P (x))
  2897. {
  2898. if (!REG_P (y))
  2899. return -1;
  2900. gcc_assert (GET_MODE (x) == GET_MODE (y));
  2901. if (REGNO (x) == REGNO (y))
  2902. return 0;
  2903. else if (REGNO (x) < REGNO (y))
  2904. return -1;
  2905. else
  2906. return 1;
  2907. }
  2908. if (REG_P (y))
  2909. return 1;
  2910. if (MEM_P (x))
  2911. {
  2912. if (!MEM_P (y))
  2913. return -1;
  2914. gcc_assert (GET_MODE (x) == GET_MODE (y));
  2915. return loc_cmp (XEXP (x, 0), XEXP (y, 0));
  2916. }
  2917. if (MEM_P (y))
  2918. return 1;
  2919. if (GET_CODE (x) == VALUE)
  2920. {
  2921. if (GET_CODE (y) != VALUE)
  2922. return -1;
  2923. /* Don't assert the modes are the same, that is true only
  2924. when not recursing. (subreg:QI (value:SI 1:1) 0)
  2925. and (subreg:QI (value:DI 2:2) 0) can be compared,
  2926. even when the modes are different. */
  2927. if (canon_value_cmp (x, y))
  2928. return -1;
  2929. else
  2930. return 1;
  2931. }
  2932. if (GET_CODE (y) == VALUE)
  2933. return 1;
  2934. /* Entry value is the least preferable kind of expression. */
  2935. if (GET_CODE (x) == ENTRY_VALUE)
  2936. {
  2937. if (GET_CODE (y) != ENTRY_VALUE)
  2938. return 1;
  2939. gcc_assert (GET_MODE (x) == GET_MODE (y));
  2940. return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
  2941. }
  2942. if (GET_CODE (y) == ENTRY_VALUE)
  2943. return -1;
  2944. if (GET_CODE (x) == GET_CODE (y))
  2945. /* Compare operands below. */;
  2946. else if (GET_CODE (x) < GET_CODE (y))
  2947. return -1;
  2948. else
  2949. return 1;
  2950. gcc_assert (GET_MODE (x) == GET_MODE (y));
  2951. if (GET_CODE (x) == DEBUG_EXPR)
  2952. {
  2953. if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
  2954. < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
  2955. return -1;
  2956. gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
  2957. > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
  2958. return 1;
  2959. }
  2960. fmt = GET_RTX_FORMAT (code);
  2961. for (i = 0; i < GET_RTX_LENGTH (code); i++)
  2962. switch (fmt[i])
  2963. {
  2964. case 'w':
  2965. if (XWINT (x, i) == XWINT (y, i))
  2966. break;
  2967. else if (XWINT (x, i) < XWINT (y, i))
  2968. return -1;
  2969. else
  2970. return 1;
  2971. case 'n':
  2972. case 'i':
  2973. if (XINT (x, i) == XINT (y, i))
  2974. break;
  2975. else if (XINT (x, i) < XINT (y, i))
  2976. return -1;
  2977. else
  2978. return 1;
  2979. case 'V':
  2980. case 'E':
  2981. /* Compare the vector length first. */
  2982. if (XVECLEN (x, i) == XVECLEN (y, i))
  2983. /* Compare the vectors elements. */;
  2984. else if (XVECLEN (x, i) < XVECLEN (y, i))
  2985. return -1;
  2986. else
  2987. return 1;
  2988. for (j = 0; j < XVECLEN (x, i); j++)
  2989. if ((r = loc_cmp (XVECEXP (x, i, j),
  2990. XVECEXP (y, i, j))))
  2991. return r;
  2992. break;
  2993. case 'e':
  2994. if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
  2995. return r;
  2996. break;
  2997. case 'S':
  2998. case 's':
  2999. if (XSTR (x, i) == XSTR (y, i))
  3000. break;
  3001. if (!XSTR (x, i))
  3002. return -1;
  3003. if (!XSTR (y, i))
  3004. return 1;
  3005. if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
  3006. break;
  3007. else if (r < 0)
  3008. return -1;
  3009. else
  3010. return 1;
  3011. case 'u':
  3012. /* These are just backpointers, so they don't matter. */
  3013. break;
  3014. case '0':
  3015. case 't':
  3016. break;
  3017. /* It is believed that rtx's at this level will never
  3018. contain anything but integers and other rtx's,
  3019. except for within LABEL_REFs and SYMBOL_REFs. */
  3020. default:
  3021. gcc_unreachable ();
  3022. }
  3023. if (CONST_WIDE_INT_P (x))
  3024. {
  3025. /* Compare the vector length first. */
  3026. if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
  3027. return 1;
  3028. else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
  3029. return -1;
  3030. /* Compare the vectors elements. */;
  3031. for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
  3032. {
  3033. if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
  3034. return -1;
  3035. if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
  3036. return 1;
  3037. }
  3038. }
  3039. return 0;
  3040. }
  3041. #if ENABLE_CHECKING
  3042. /* Check the order of entries in one-part variables. */
  3043. int
  3044. canonicalize_loc_order_check (variable_def **slot,
  3045. dataflow_set *data ATTRIBUTE_UNUSED)
  3046. {
  3047. variable var = *slot;
  3048. location_chain node, next;
  3049. #ifdef ENABLE_RTL_CHECKING
  3050. int i;
  3051. for (i = 0; i < var->n_var_parts; i++)
  3052. gcc_assert (var->var_part[0].cur_loc == NULL);
  3053. gcc_assert (!var->in_changed_variables);
  3054. #endif
  3055. if (!var->onepart)
  3056. return 1;
  3057. gcc_assert (var->n_var_parts == 1);
  3058. node = var->var_part[0].loc_chain;
  3059. gcc_assert (node);
  3060. while ((next = node->next))
  3061. {
  3062. gcc_assert (loc_cmp (node->loc, next->loc) < 0);
  3063. node = next;
  3064. }
  3065. return 1;
  3066. }
  3067. #endif
  3068. /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
  3069. more likely to be chosen as canonical for an equivalence set.
  3070. Ensure less likely values can reach more likely neighbors, making
  3071. the connections bidirectional. */
  3072. int
  3073. canonicalize_values_mark (variable_def **slot, dataflow_set *set)
  3074. {
  3075. variable var = *slot;
  3076. decl_or_value dv = var->dv;
  3077. rtx val;
  3078. location_chain node;
  3079. if (!dv_is_value_p (dv))
  3080. return 1;
  3081. gcc_checking_assert (var->n_var_parts == 1);
  3082. val = dv_as_value (dv);
  3083. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3084. if (GET_CODE (node->loc) == VALUE)
  3085. {
  3086. if (canon_value_cmp (node->loc, val))
  3087. VALUE_RECURSED_INTO (val) = true;
  3088. else
  3089. {
  3090. decl_or_value odv = dv_from_value (node->loc);
  3091. variable_def **oslot;
  3092. oslot = shared_hash_find_slot_noinsert (set->vars, odv);
  3093. set_slot_part (set, val, oslot, odv, 0,
  3094. node->init, NULL_RTX);
  3095. VALUE_RECURSED_INTO (node->loc) = true;
  3096. }
  3097. }
  3098. return 1;
  3099. }
  3100. /* Remove redundant entries from equivalence lists in onepart
  3101. variables, canonicalizing equivalence sets into star shapes. */
  3102. int
  3103. canonicalize_values_star (variable_def **slot, dataflow_set *set)
  3104. {
  3105. variable var = *slot;
  3106. decl_or_value dv = var->dv;
  3107. location_chain node;
  3108. decl_or_value cdv;
  3109. rtx val, cval;
  3110. variable_def **cslot;
  3111. bool has_value;
  3112. bool has_marks;
  3113. if (!var->onepart)
  3114. return 1;
  3115. gcc_checking_assert (var->n_var_parts == 1);
  3116. if (dv_is_value_p (dv))
  3117. {
  3118. cval = dv_as_value (dv);
  3119. if (!VALUE_RECURSED_INTO (cval))
  3120. return 1;
  3121. VALUE_RECURSED_INTO (cval) = false;
  3122. }
  3123. else
  3124. cval = NULL_RTX;
  3125. restart:
  3126. val = cval;
  3127. has_value = false;
  3128. has_marks = false;
  3129. gcc_assert (var->n_var_parts == 1);
  3130. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3131. if (GET_CODE (node->loc) == VALUE)
  3132. {
  3133. has_value = true;
  3134. if (VALUE_RECURSED_INTO (node->loc))
  3135. has_marks = true;
  3136. if (canon_value_cmp (node->loc, cval))
  3137. cval = node->loc;
  3138. }
  3139. if (!has_value)
  3140. return 1;
  3141. if (cval == val)
  3142. {
  3143. if (!has_marks || dv_is_decl_p (dv))
  3144. return 1;
  3145. /* Keep it marked so that we revisit it, either after visiting a
  3146. child node, or after visiting a new parent that might be
  3147. found out. */
  3148. VALUE_RECURSED_INTO (val) = true;
  3149. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3150. if (GET_CODE (node->loc) == VALUE
  3151. && VALUE_RECURSED_INTO (node->loc))
  3152. {
  3153. cval = node->loc;
  3154. restart_with_cval:
  3155. VALUE_RECURSED_INTO (cval) = false;
  3156. dv = dv_from_value (cval);
  3157. slot = shared_hash_find_slot_noinsert (set->vars, dv);
  3158. if (!slot)
  3159. {
  3160. gcc_assert (dv_is_decl_p (var->dv));
  3161. /* The canonical value was reset and dropped.
  3162. Remove it. */
  3163. clobber_variable_part (set, NULL, var->dv, 0, NULL);
  3164. return 1;
  3165. }
  3166. var = *slot;
  3167. gcc_assert (dv_is_value_p (var->dv));
  3168. if (var->n_var_parts == 0)
  3169. return 1;
  3170. gcc_assert (var->n_var_parts == 1);
  3171. goto restart;
  3172. }
  3173. VALUE_RECURSED_INTO (val) = false;
  3174. return 1;
  3175. }
  3176. /* Push values to the canonical one. */
  3177. cdv = dv_from_value (cval);
  3178. cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
  3179. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3180. if (node->loc != cval)
  3181. {
  3182. cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
  3183. node->init, NULL_RTX);
  3184. if (GET_CODE (node->loc) == VALUE)
  3185. {
  3186. decl_or_value ndv = dv_from_value (node->loc);
  3187. set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
  3188. NO_INSERT);
  3189. if (canon_value_cmp (node->loc, val))
  3190. {
  3191. /* If it could have been a local minimum, it's not any more,
  3192. since it's now neighbor to cval, so it may have to push
  3193. to it. Conversely, if it wouldn't have prevailed over
  3194. val, then whatever mark it has is fine: if it was to
  3195. push, it will now push to a more canonical node, but if
  3196. it wasn't, then it has already pushed any values it might
  3197. have to. */
  3198. VALUE_RECURSED_INTO (node->loc) = true;
  3199. /* Make sure we visit node->loc by ensuring we cval is
  3200. visited too. */
  3201. VALUE_RECURSED_INTO (cval) = true;
  3202. }
  3203. else if (!VALUE_RECURSED_INTO (node->loc))
  3204. /* If we have no need to "recurse" into this node, it's
  3205. already "canonicalized", so drop the link to the old
  3206. parent. */
  3207. clobber_variable_part (set, cval, ndv, 0, NULL);
  3208. }
  3209. else if (GET_CODE (node->loc) == REG)
  3210. {
  3211. attrs list = set->regs[REGNO (node->loc)], *listp;
  3212. /* Change an existing attribute referring to dv so that it
  3213. refers to cdv, removing any duplicate this might
  3214. introduce, and checking that no previous duplicates
  3215. existed, all in a single pass. */
  3216. while (list)
  3217. {
  3218. if (list->offset == 0
  3219. && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
  3220. || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
  3221. break;
  3222. list = list->next;
  3223. }
  3224. gcc_assert (list);
  3225. if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
  3226. {
  3227. list->dv = cdv;
  3228. for (listp = &list->next; (list = *listp); listp = &list->next)
  3229. {
  3230. if (list->offset)
  3231. continue;
  3232. if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
  3233. {
  3234. *listp = list->next;
  3235. pool_free (attrs_pool, list);
  3236. list = *listp;
  3237. break;
  3238. }
  3239. gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
  3240. }
  3241. }
  3242. else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
  3243. {
  3244. for (listp = &list->next; (list = *listp); listp = &list->next)
  3245. {
  3246. if (list->offset)
  3247. continue;
  3248. if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
  3249. {
  3250. *listp = list->next;
  3251. pool_free (attrs_pool, list);
  3252. list = *listp;
  3253. break;
  3254. }
  3255. gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
  3256. }
  3257. }
  3258. else
  3259. gcc_unreachable ();
  3260. #if ENABLE_CHECKING
  3261. while (list)
  3262. {
  3263. if (list->offset == 0
  3264. && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
  3265. || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
  3266. gcc_unreachable ();
  3267. list = list->next;
  3268. }
  3269. #endif
  3270. }
  3271. }
  3272. if (val)
  3273. set_slot_part (set, val, cslot, cdv, 0,
  3274. VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
  3275. slot = clobber_slot_part (set, cval, slot, 0, NULL);
  3276. /* Variable may have been unshared. */
  3277. var = *slot;
  3278. gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
  3279. && var->var_part[0].loc_chain->next == NULL);
  3280. if (VALUE_RECURSED_INTO (cval))
  3281. goto restart_with_cval;
  3282. return 1;
  3283. }
  3284. /* Bind one-part variables to the canonical value in an equivalence
  3285. set. Not doing this causes dataflow convergence failure in rare
  3286. circumstances, see PR42873. Unfortunately we can't do this
  3287. efficiently as part of canonicalize_values_star, since we may not
  3288. have determined or even seen the canonical value of a set when we
  3289. get to a variable that references another member of the set. */
  3290. int
  3291. canonicalize_vars_star (variable_def **slot, dataflow_set *set)
  3292. {
  3293. variable var = *slot;
  3294. decl_or_value dv = var->dv;
  3295. location_chain node;
  3296. rtx cval;
  3297. decl_or_value cdv;
  3298. variable_def **cslot;
  3299. variable cvar;
  3300. location_chain cnode;
  3301. if (!var->onepart || var->onepart == ONEPART_VALUE)
  3302. return 1;
  3303. gcc_assert (var->n_var_parts == 1);
  3304. node = var->var_part[0].loc_chain;
  3305. if (GET_CODE (node->loc) != VALUE)
  3306. return 1;
  3307. gcc_assert (!node->next);
  3308. cval = node->loc;
  3309. /* Push values to the canonical one. */
  3310. cdv = dv_from_value (cval);
  3311. cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
  3312. if (!cslot)
  3313. return 1;
  3314. cvar = *cslot;
  3315. gcc_assert (cvar->n_var_parts == 1);
  3316. cnode = cvar->var_part[0].loc_chain;
  3317. /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
  3318. that are not “more canonical” than it. */
  3319. if (GET_CODE (cnode->loc) != VALUE
  3320. || !canon_value_cmp (cnode->loc, cval))
  3321. return 1;
  3322. /* CVAL was found to be non-canonical. Change the variable to point
  3323. to the canonical VALUE. */
  3324. gcc_assert (!cnode->next);
  3325. cval = cnode->loc;
  3326. slot = set_slot_part (set, cval, slot, dv, 0,
  3327. node->init, node->set_src);
  3328. clobber_slot_part (set, cval, slot, 0, node->set_src);
  3329. return 1;
  3330. }
  3331. /* Combine variable or value in *S1SLOT (in DSM->cur) with the
  3332. corresponding entry in DSM->src. Multi-part variables are combined
  3333. with variable_union, whereas onepart dvs are combined with
  3334. intersection. */
  3335. static int
  3336. variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
  3337. {
  3338. dataflow_set *dst = dsm->dst;
  3339. variable_def **dstslot;
  3340. variable s2var, dvar = NULL;
  3341. decl_or_value dv = s1var->dv;
  3342. onepart_enum_t onepart = s1var->onepart;
  3343. rtx val;
  3344. hashval_t dvhash;
  3345. location_chain node, *nodep;
  3346. /* If the incoming onepart variable has an empty location list, then
  3347. the intersection will be just as empty. For other variables,
  3348. it's always union. */
  3349. gcc_checking_assert (s1var->n_var_parts
  3350. && s1var->var_part[0].loc_chain);
  3351. if (!onepart)
  3352. return variable_union (s1var, dst);
  3353. gcc_checking_assert (s1var->n_var_parts == 1);
  3354. dvhash = dv_htab_hash (dv);
  3355. if (dv_is_value_p (dv))
  3356. val = dv_as_value (dv);
  3357. else
  3358. val = NULL;
  3359. s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
  3360. if (!s2var)
  3361. {
  3362. dst_can_be_shared = false;
  3363. return 1;
  3364. }
  3365. dsm->src_onepart_cnt--;
  3366. gcc_assert (s2var->var_part[0].loc_chain
  3367. && s2var->onepart == onepart
  3368. && s2var->n_var_parts == 1);
  3369. dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
  3370. if (dstslot)
  3371. {
  3372. dvar = *dstslot;
  3373. gcc_assert (dvar->refcount == 1
  3374. && dvar->onepart == onepart
  3375. && dvar->n_var_parts == 1);
  3376. nodep = &dvar->var_part[0].loc_chain;
  3377. }
  3378. else
  3379. {
  3380. nodep = &node;
  3381. node = NULL;
  3382. }
  3383. if (!dstslot && !onepart_variable_different_p (s1var, s2var))
  3384. {
  3385. dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
  3386. dvhash, INSERT);
  3387. *dstslot = dvar = s2var;
  3388. dvar->refcount++;
  3389. }
  3390. else
  3391. {
  3392. dst_can_be_shared = false;
  3393. intersect_loc_chains (val, nodep, dsm,
  3394. s1var->var_part[0].loc_chain, s2var);
  3395. if (!dstslot)
  3396. {
  3397. if (node)
  3398. {
  3399. dvar = (variable) pool_alloc (onepart_pool (onepart));
  3400. dvar->dv = dv;
  3401. dvar->refcount = 1;
  3402. dvar->n_var_parts = 1;
  3403. dvar->onepart = onepart;
  3404. dvar->in_changed_variables = false;
  3405. dvar->var_part[0].loc_chain = node;
  3406. dvar->var_part[0].cur_loc = NULL;
  3407. if (onepart)
  3408. VAR_LOC_1PAUX (dvar) = NULL;
  3409. else
  3410. VAR_PART_OFFSET (dvar, 0) = 0;
  3411. dstslot
  3412. = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
  3413. INSERT);
  3414. gcc_assert (!*dstslot);
  3415. *dstslot = dvar;
  3416. }
  3417. else
  3418. return 1;
  3419. }
  3420. }
  3421. nodep = &dvar->var_part[0].loc_chain;
  3422. while ((node = *nodep))
  3423. {
  3424. location_chain *nextp = &node->next;
  3425. if (GET_CODE (node->loc) == REG)
  3426. {
  3427. attrs list;
  3428. for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
  3429. if (GET_MODE (node->loc) == GET_MODE (list->loc)
  3430. && dv_is_value_p (list->dv))
  3431. break;
  3432. if (!list)
  3433. attrs_list_insert (&dst->regs[REGNO (node->loc)],
  3434. dv, 0, node->loc);
  3435. /* If this value became canonical for another value that had
  3436. this register, we want to leave it alone. */
  3437. else if (dv_as_value (list->dv) != val)
  3438. {
  3439. dstslot = set_slot_part (dst, dv_as_value (list->dv),
  3440. dstslot, dv, 0,
  3441. node->init, NULL_RTX);
  3442. dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
  3443. /* Since nextp points into the removed node, we can't
  3444. use it. The pointer to the next node moved to nodep.
  3445. However, if the variable we're walking is unshared
  3446. during our walk, we'll keep walking the location list
  3447. of the previously-shared variable, in which case the
  3448. node won't have been removed, and we'll want to skip
  3449. it. That's why we test *nodep here. */
  3450. if (*nodep != node)
  3451. nextp = nodep;
  3452. }
  3453. }
  3454. else
  3455. /* Canonicalization puts registers first, so we don't have to
  3456. walk it all. */
  3457. break;
  3458. nodep = nextp;
  3459. }
  3460. if (dvar != *dstslot)
  3461. dvar = *dstslot;
  3462. nodep = &dvar->var_part[0].loc_chain;
  3463. if (val)
  3464. {
  3465. /* Mark all referenced nodes for canonicalization, and make sure
  3466. we have mutual equivalence links. */
  3467. VALUE_RECURSED_INTO (val) = true;
  3468. for (node = *nodep; node; node = node->next)
  3469. if (GET_CODE (node->loc) == VALUE)
  3470. {
  3471. VALUE_RECURSED_INTO (node->loc) = true;
  3472. set_variable_part (dst, val, dv_from_value (node->loc), 0,
  3473. node->init, NULL, INSERT);
  3474. }
  3475. dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
  3476. gcc_assert (*dstslot == dvar);
  3477. canonicalize_values_star (dstslot, dst);
  3478. gcc_checking_assert (dstslot
  3479. == shared_hash_find_slot_noinsert_1 (dst->vars,
  3480. dv, dvhash));
  3481. dvar = *dstslot;
  3482. }
  3483. else
  3484. {
  3485. bool has_value = false, has_other = false;
  3486. /* If we have one value and anything else, we're going to
  3487. canonicalize this, so make sure all values have an entry in
  3488. the table and are marked for canonicalization. */
  3489. for (node = *nodep; node; node = node->next)
  3490. {
  3491. if (GET_CODE (node->loc) == VALUE)
  3492. {
  3493. /* If this was marked during register canonicalization,
  3494. we know we have to canonicalize values. */
  3495. if (has_value)
  3496. has_other = true;
  3497. has_value = true;
  3498. if (has_other)
  3499. break;
  3500. }
  3501. else
  3502. {
  3503. has_other = true;
  3504. if (has_value)
  3505. break;
  3506. }
  3507. }
  3508. if (has_value && has_other)
  3509. {
  3510. for (node = *nodep; node; node = node->next)
  3511. {
  3512. if (GET_CODE (node->loc) == VALUE)
  3513. {
  3514. decl_or_value dv = dv_from_value (node->loc);
  3515. variable_def **slot = NULL;
  3516. if (shared_hash_shared (dst->vars))
  3517. slot = shared_hash_find_slot_noinsert (dst->vars, dv);
  3518. if (!slot)
  3519. slot = shared_hash_find_slot_unshare (&dst->vars, dv,
  3520. INSERT);
  3521. if (!*slot)
  3522. {
  3523. variable var = (variable) pool_alloc (onepart_pool
  3524. (ONEPART_VALUE));
  3525. var->dv = dv;
  3526. var->refcount = 1;
  3527. var->n_var_parts = 1;
  3528. var->onepart = ONEPART_VALUE;
  3529. var->in_changed_variables = false;
  3530. var->var_part[0].loc_chain = NULL;
  3531. var->var_part[0].cur_loc = NULL;
  3532. VAR_LOC_1PAUX (var) = NULL;
  3533. *slot = var;
  3534. }
  3535. VALUE_RECURSED_INTO (node->loc) = true;
  3536. }
  3537. }
  3538. dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
  3539. gcc_assert (*dstslot == dvar);
  3540. canonicalize_values_star (dstslot, dst);
  3541. gcc_checking_assert (dstslot
  3542. == shared_hash_find_slot_noinsert_1 (dst->vars,
  3543. dv, dvhash));
  3544. dvar = *dstslot;
  3545. }
  3546. }
  3547. if (!onepart_variable_different_p (dvar, s2var))
  3548. {
  3549. variable_htab_free (dvar);
  3550. *dstslot = dvar = s2var;
  3551. dvar->refcount++;
  3552. }
  3553. else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
  3554. {
  3555. variable_htab_free (dvar);
  3556. *dstslot = dvar = s1var;
  3557. dvar->refcount++;
  3558. dst_can_be_shared = false;
  3559. }
  3560. else
  3561. dst_can_be_shared = false;
  3562. return 1;
  3563. }
  3564. /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
  3565. multi-part variable. Unions of multi-part variables and
  3566. intersections of one-part ones will be handled in
  3567. variable_merge_over_cur(). */
  3568. static int
  3569. variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
  3570. {
  3571. dataflow_set *dst = dsm->dst;
  3572. decl_or_value dv = s2var->dv;
  3573. if (!s2var->onepart)
  3574. {
  3575. variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
  3576. *dstp = s2var;
  3577. s2var->refcount++;
  3578. return 1;
  3579. }
  3580. dsm->src_onepart_cnt++;
  3581. return 1;
  3582. }
  3583. /* Combine dataflow set information from SRC2 into DST, using PDST
  3584. to carry over information across passes. */
  3585. static void
  3586. dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
  3587. {
  3588. dataflow_set cur = *dst;
  3589. dataflow_set *src1 = &cur;
  3590. struct dfset_merge dsm;
  3591. int i;
  3592. size_t src1_elems, src2_elems;
  3593. variable_iterator_type hi;
  3594. variable var;
  3595. src1_elems = shared_hash_htab (src1->vars)->elements ();
  3596. src2_elems = shared_hash_htab (src2->vars)->elements ();
  3597. dataflow_set_init (dst);
  3598. dst->stack_adjust = cur.stack_adjust;
  3599. shared_hash_destroy (dst->vars);
  3600. dst->vars = (shared_hash) pool_alloc (shared_hash_pool);
  3601. dst->vars->refcount = 1;
  3602. dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
  3603. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  3604. attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
  3605. dsm.dst = dst;
  3606. dsm.src = src2;
  3607. dsm.cur = src1;
  3608. dsm.src_onepart_cnt = 0;
  3609. FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
  3610. var, variable, hi)
  3611. variable_merge_over_src (var, &dsm);
  3612. FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
  3613. var, variable, hi)
  3614. variable_merge_over_cur (var, &dsm);
  3615. if (dsm.src_onepart_cnt)
  3616. dst_can_be_shared = false;
  3617. dataflow_set_destroy (src1);
  3618. }
  3619. /* Mark register equivalences. */
  3620. static void
  3621. dataflow_set_equiv_regs (dataflow_set *set)
  3622. {
  3623. int i;
  3624. attrs list, *listp;
  3625. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  3626. {
  3627. rtx canon[NUM_MACHINE_MODES];
  3628. /* If the list is empty or one entry, no need to canonicalize
  3629. anything. */
  3630. if (set->regs[i] == NULL || set->regs[i]->next == NULL)
  3631. continue;
  3632. memset (canon, 0, sizeof (canon));
  3633. for (list = set->regs[i]; list; list = list->next)
  3634. if (list->offset == 0 && dv_is_value_p (list->dv))
  3635. {
  3636. rtx val = dv_as_value (list->dv);
  3637. rtx *cvalp = &canon[(int)GET_MODE (val)];
  3638. rtx cval = *cvalp;
  3639. if (canon_value_cmp (val, cval))
  3640. *cvalp = val;
  3641. }
  3642. for (list = set->regs[i]; list; list = list->next)
  3643. if (list->offset == 0 && dv_onepart_p (list->dv))
  3644. {
  3645. rtx cval = canon[(int)GET_MODE (list->loc)];
  3646. if (!cval)
  3647. continue;
  3648. if (dv_is_value_p (list->dv))
  3649. {
  3650. rtx val = dv_as_value (list->dv);
  3651. if (val == cval)
  3652. continue;
  3653. VALUE_RECURSED_INTO (val) = true;
  3654. set_variable_part (set, val, dv_from_value (cval), 0,
  3655. VAR_INIT_STATUS_INITIALIZED,
  3656. NULL, NO_INSERT);
  3657. }
  3658. VALUE_RECURSED_INTO (cval) = true;
  3659. set_variable_part (set, cval, list->dv, 0,
  3660. VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
  3661. }
  3662. for (listp = &set->regs[i]; (list = *listp);
  3663. listp = list ? &list->next : listp)
  3664. if (list->offset == 0 && dv_onepart_p (list->dv))
  3665. {
  3666. rtx cval = canon[(int)GET_MODE (list->loc)];
  3667. variable_def **slot;
  3668. if (!cval)
  3669. continue;
  3670. if (dv_is_value_p (list->dv))
  3671. {
  3672. rtx val = dv_as_value (list->dv);
  3673. if (!VALUE_RECURSED_INTO (val))
  3674. continue;
  3675. }
  3676. slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
  3677. canonicalize_values_star (slot, set);
  3678. if (*listp != list)
  3679. list = NULL;
  3680. }
  3681. }
  3682. }
  3683. /* Remove any redundant values in the location list of VAR, which must
  3684. be unshared and 1-part. */
  3685. static void
  3686. remove_duplicate_values (variable var)
  3687. {
  3688. location_chain node, *nodep;
  3689. gcc_assert (var->onepart);
  3690. gcc_assert (var->n_var_parts == 1);
  3691. gcc_assert (var->refcount == 1);
  3692. for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
  3693. {
  3694. if (GET_CODE (node->loc) == VALUE)
  3695. {
  3696. if (VALUE_RECURSED_INTO (node->loc))
  3697. {
  3698. /* Remove duplicate value node. */
  3699. *nodep = node->next;
  3700. pool_free (loc_chain_pool, node);
  3701. continue;
  3702. }
  3703. else
  3704. VALUE_RECURSED_INTO (node->loc) = true;
  3705. }
  3706. nodep = &node->next;
  3707. }
  3708. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3709. if (GET_CODE (node->loc) == VALUE)
  3710. {
  3711. gcc_assert (VALUE_RECURSED_INTO (node->loc));
  3712. VALUE_RECURSED_INTO (node->loc) = false;
  3713. }
  3714. }
  3715. /* Hash table iteration argument passed to variable_post_merge. */
  3716. struct dfset_post_merge
  3717. {
  3718. /* The new input set for the current block. */
  3719. dataflow_set *set;
  3720. /* Pointer to the permanent input set for the current block, or
  3721. NULL. */
  3722. dataflow_set **permp;
  3723. };
  3724. /* Create values for incoming expressions associated with one-part
  3725. variables that don't have value numbers for them. */
  3726. int
  3727. variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
  3728. {
  3729. dataflow_set *set = dfpm->set;
  3730. variable var = *slot;
  3731. location_chain node;
  3732. if (!var->onepart || !var->n_var_parts)
  3733. return 1;
  3734. gcc_assert (var->n_var_parts == 1);
  3735. if (dv_is_decl_p (var->dv))
  3736. {
  3737. bool check_dupes = false;
  3738. restart:
  3739. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3740. {
  3741. if (GET_CODE (node->loc) == VALUE)
  3742. gcc_assert (!VALUE_RECURSED_INTO (node->loc));
  3743. else if (GET_CODE (node->loc) == REG)
  3744. {
  3745. attrs att, *attp, *curp = NULL;
  3746. if (var->refcount != 1)
  3747. {
  3748. slot = unshare_variable (set, slot, var,
  3749. VAR_INIT_STATUS_INITIALIZED);
  3750. var = *slot;
  3751. goto restart;
  3752. }
  3753. for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
  3754. attp = &att->next)
  3755. if (att->offset == 0
  3756. && GET_MODE (att->loc) == GET_MODE (node->loc))
  3757. {
  3758. if (dv_is_value_p (att->dv))
  3759. {
  3760. rtx cval = dv_as_value (att->dv);
  3761. node->loc = cval;
  3762. check_dupes = true;
  3763. break;
  3764. }
  3765. else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
  3766. curp = attp;
  3767. }
  3768. if (!curp)
  3769. {
  3770. curp = attp;
  3771. while (*curp)
  3772. if ((*curp)->offset == 0
  3773. && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
  3774. && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
  3775. break;
  3776. else
  3777. curp = &(*curp)->next;
  3778. gcc_assert (*curp);
  3779. }
  3780. if (!att)
  3781. {
  3782. decl_or_value cdv;
  3783. rtx cval;
  3784. if (!*dfpm->permp)
  3785. {
  3786. *dfpm->permp = XNEW (dataflow_set);
  3787. dataflow_set_init (*dfpm->permp);
  3788. }
  3789. for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
  3790. att; att = att->next)
  3791. if (GET_MODE (att->loc) == GET_MODE (node->loc))
  3792. {
  3793. gcc_assert (att->offset == 0
  3794. && dv_is_value_p (att->dv));
  3795. val_reset (set, att->dv);
  3796. break;
  3797. }
  3798. if (att)
  3799. {
  3800. cdv = att->dv;
  3801. cval = dv_as_value (cdv);
  3802. }
  3803. else
  3804. {
  3805. /* Create a unique value to hold this register,
  3806. that ought to be found and reused in
  3807. subsequent rounds. */
  3808. cselib_val *v;
  3809. gcc_assert (!cselib_lookup (node->loc,
  3810. GET_MODE (node->loc), 0,
  3811. VOIDmode));
  3812. v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
  3813. VOIDmode);
  3814. cselib_preserve_value (v);
  3815. cselib_invalidate_rtx (node->loc);
  3816. cval = v->val_rtx;
  3817. cdv = dv_from_value (cval);
  3818. if (dump_file)
  3819. fprintf (dump_file,
  3820. "Created new value %u:%u for reg %i\n",
  3821. v->uid, v->hash, REGNO (node->loc));
  3822. }
  3823. var_reg_decl_set (*dfpm->permp, node->loc,
  3824. VAR_INIT_STATUS_INITIALIZED,
  3825. cdv, 0, NULL, INSERT);
  3826. node->loc = cval;
  3827. check_dupes = true;
  3828. }
  3829. /* Remove attribute referring to the decl, which now
  3830. uses the value for the register, already existing or
  3831. to be added when we bring perm in. */
  3832. att = *curp;
  3833. *curp = att->next;
  3834. pool_free (attrs_pool, att);
  3835. }
  3836. }
  3837. if (check_dupes)
  3838. remove_duplicate_values (var);
  3839. }
  3840. return 1;
  3841. }
  3842. /* Reset values in the permanent set that are not associated with the
  3843. chosen expression. */
  3844. int
  3845. variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
  3846. {
  3847. dataflow_set *set = dfpm->set;
  3848. variable pvar = *pslot, var;
  3849. location_chain pnode;
  3850. decl_or_value dv;
  3851. attrs att;
  3852. gcc_assert (dv_is_value_p (pvar->dv)
  3853. && pvar->n_var_parts == 1);
  3854. pnode = pvar->var_part[0].loc_chain;
  3855. gcc_assert (pnode
  3856. && !pnode->next
  3857. && REG_P (pnode->loc));
  3858. dv = pvar->dv;
  3859. var = shared_hash_find (set->vars, dv);
  3860. if (var)
  3861. {
  3862. /* Although variable_post_merge_new_vals may have made decls
  3863. non-star-canonical, values that pre-existed in canonical form
  3864. remain canonical, and newly-created values reference a single
  3865. REG, so they are canonical as well. Since VAR has the
  3866. location list for a VALUE, using find_loc_in_1pdv for it is
  3867. fine, since VALUEs don't map back to DECLs. */
  3868. if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
  3869. return 1;
  3870. val_reset (set, dv);
  3871. }
  3872. for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
  3873. if (att->offset == 0
  3874. && GET_MODE (att->loc) == GET_MODE (pnode->loc)
  3875. && dv_is_value_p (att->dv))
  3876. break;
  3877. /* If there is a value associated with this register already, create
  3878. an equivalence. */
  3879. if (att && dv_as_value (att->dv) != dv_as_value (dv))
  3880. {
  3881. rtx cval = dv_as_value (att->dv);
  3882. set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
  3883. set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
  3884. NULL, INSERT);
  3885. }
  3886. else if (!att)
  3887. {
  3888. attrs_list_insert (&set->regs[REGNO (pnode->loc)],
  3889. dv, 0, pnode->loc);
  3890. variable_union (pvar, set);
  3891. }
  3892. return 1;
  3893. }
  3894. /* Just checking stuff and registering register attributes for
  3895. now. */
  3896. static void
  3897. dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
  3898. {
  3899. struct dfset_post_merge dfpm;
  3900. dfpm.set = set;
  3901. dfpm.permp = permp;
  3902. shared_hash_htab (set->vars)
  3903. ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
  3904. if (*permp)
  3905. shared_hash_htab ((*permp)->vars)
  3906. ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
  3907. shared_hash_htab (set->vars)
  3908. ->traverse <dataflow_set *, canonicalize_values_star> (set);
  3909. shared_hash_htab (set->vars)
  3910. ->traverse <dataflow_set *, canonicalize_vars_star> (set);
  3911. }
  3912. /* Return a node whose loc is a MEM that refers to EXPR in the
  3913. location list of a one-part variable or value VAR, or in that of
  3914. any values recursively mentioned in the location lists. */
  3915. static location_chain
  3916. find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
  3917. {
  3918. location_chain node;
  3919. decl_or_value dv;
  3920. variable var;
  3921. location_chain where = NULL;
  3922. if (!val)
  3923. return NULL;
  3924. gcc_assert (GET_CODE (val) == VALUE
  3925. && !VALUE_RECURSED_INTO (val));
  3926. dv = dv_from_value (val);
  3927. var = vars->find_with_hash (dv, dv_htab_hash (dv));
  3928. if (!var)
  3929. return NULL;
  3930. gcc_assert (var->onepart);
  3931. if (!var->n_var_parts)
  3932. return NULL;
  3933. VALUE_RECURSED_INTO (val) = true;
  3934. for (node = var->var_part[0].loc_chain; node; node = node->next)
  3935. if (MEM_P (node->loc)
  3936. && MEM_EXPR (node->loc) == expr
  3937. && INT_MEM_OFFSET (node->loc) == 0)
  3938. {
  3939. where = node;
  3940. break;
  3941. }
  3942. else if (GET_CODE (node->loc) == VALUE
  3943. && !VALUE_RECURSED_INTO (node->loc)
  3944. && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
  3945. break;
  3946. VALUE_RECURSED_INTO (val) = false;
  3947. return where;
  3948. }
  3949. /* Return TRUE if the value of MEM may vary across a call. */
  3950. static bool
  3951. mem_dies_at_call (rtx mem)
  3952. {
  3953. tree expr = MEM_EXPR (mem);
  3954. tree decl;
  3955. if (!expr)
  3956. return true;
  3957. decl = get_base_address (expr);
  3958. if (!decl)
  3959. return true;
  3960. if (!DECL_P (decl))
  3961. return true;
  3962. return (may_be_aliased (decl)
  3963. || (!TREE_READONLY (decl) && is_global_var (decl)));
  3964. }
  3965. /* Remove all MEMs from the location list of a hash table entry for a
  3966. one-part variable, except those whose MEM attributes map back to
  3967. the variable itself, directly or within a VALUE. */
  3968. int
  3969. dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
  3970. {
  3971. variable var = *slot;
  3972. if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
  3973. {
  3974. tree decl = dv_as_decl (var->dv);
  3975. location_chain loc, *locp;
  3976. bool changed = false;
  3977. if (!var->n_var_parts)
  3978. return 1;
  3979. gcc_assert (var->n_var_parts == 1);
  3980. if (shared_var_p (var, set->vars))
  3981. {
  3982. for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
  3983. {
  3984. /* We want to remove dying MEMs that doesn't refer to DECL. */
  3985. if (GET_CODE (loc->loc) == MEM
  3986. && (MEM_EXPR (loc->loc) != decl
  3987. || INT_MEM_OFFSET (loc->loc) != 0)
  3988. && !mem_dies_at_call (loc->loc))
  3989. break;
  3990. /* We want to move here MEMs that do refer to DECL. */
  3991. else if (GET_CODE (loc->loc) == VALUE
  3992. && find_mem_expr_in_1pdv (decl, loc->loc,
  3993. shared_hash_htab (set->vars)))
  3994. break;
  3995. }
  3996. if (!loc)
  3997. return 1;
  3998. slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
  3999. var = *slot;
  4000. gcc_assert (var->n_var_parts == 1);
  4001. }
  4002. for (locp = &var->var_part[0].loc_chain, loc = *locp;
  4003. loc; loc = *locp)
  4004. {
  4005. rtx old_loc = loc->loc;
  4006. if (GET_CODE (old_loc) == VALUE)
  4007. {
  4008. location_chain mem_node
  4009. = find_mem_expr_in_1pdv (decl, loc->loc,
  4010. shared_hash_htab (set->vars));
  4011. /* ??? This picks up only one out of multiple MEMs that
  4012. refer to the same variable. Do we ever need to be
  4013. concerned about dealing with more than one, or, given
  4014. that they should all map to the same variable
  4015. location, their addresses will have been merged and
  4016. they will be regarded as equivalent? */
  4017. if (mem_node)
  4018. {
  4019. loc->loc = mem_node->loc;
  4020. loc->set_src = mem_node->set_src;
  4021. loc->init = MIN (loc->init, mem_node->init);
  4022. }
  4023. }
  4024. if (GET_CODE (loc->loc) != MEM
  4025. || (MEM_EXPR (loc->loc) == decl
  4026. && INT_MEM_OFFSET (loc->loc) == 0)
  4027. || !mem_dies_at_call (loc->loc))
  4028. {
  4029. if (old_loc != loc->loc && emit_notes)
  4030. {
  4031. if (old_loc == var->var_part[0].cur_loc)
  4032. {
  4033. changed = true;
  4034. var->var_part[0].cur_loc = NULL;
  4035. }
  4036. }
  4037. locp = &loc->next;
  4038. continue;
  4039. }
  4040. if (emit_notes)
  4041. {
  4042. if (old_loc == var->var_part[0].cur_loc)
  4043. {
  4044. changed = true;
  4045. var->var_part[0].cur_loc = NULL;
  4046. }
  4047. }
  4048. *locp = loc->next;
  4049. pool_free (loc_chain_pool, loc);
  4050. }
  4051. if (!var->var_part[0].loc_chain)
  4052. {
  4053. var->n_var_parts--;
  4054. changed = true;
  4055. }
  4056. if (changed)
  4057. variable_was_changed (var, set);
  4058. }
  4059. return 1;
  4060. }
  4061. /* Remove all MEMs from the location list of a hash table entry for a
  4062. value. */
  4063. int
  4064. dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
  4065. {
  4066. variable var = *slot;
  4067. if (var->onepart == ONEPART_VALUE)
  4068. {
  4069. location_chain loc, *locp;
  4070. bool changed = false;
  4071. rtx cur_loc;
  4072. gcc_assert (var->n_var_parts == 1);
  4073. if (shared_var_p (var, set->vars))
  4074. {
  4075. for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
  4076. if (GET_CODE (loc->loc) == MEM
  4077. && mem_dies_at_call (loc->loc))
  4078. break;
  4079. if (!loc)
  4080. return 1;
  4081. slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
  4082. var = *slot;
  4083. gcc_assert (var->n_var_parts == 1);
  4084. }
  4085. if (VAR_LOC_1PAUX (var))
  4086. cur_loc = VAR_LOC_FROM (var);
  4087. else
  4088. cur_loc = var->var_part[0].cur_loc;
  4089. for (locp = &var->var_part[0].loc_chain, loc = *locp;
  4090. loc; loc = *locp)
  4091. {
  4092. if (GET_CODE (loc->loc) != MEM
  4093. || !mem_dies_at_call (loc->loc))
  4094. {
  4095. locp = &loc->next;
  4096. continue;
  4097. }
  4098. *locp = loc->next;
  4099. /* If we have deleted the location which was last emitted
  4100. we have to emit new location so add the variable to set
  4101. of changed variables. */
  4102. if (cur_loc == loc->loc)
  4103. {
  4104. changed = true;
  4105. var->var_part[0].cur_loc = NULL;
  4106. if (VAR_LOC_1PAUX (var))
  4107. VAR_LOC_FROM (var) = NULL;
  4108. }
  4109. pool_free (loc_chain_pool, loc);
  4110. }
  4111. if (!var->var_part[0].loc_chain)
  4112. {
  4113. var->n_var_parts--;
  4114. changed = true;
  4115. }
  4116. if (changed)
  4117. variable_was_changed (var, set);
  4118. }
  4119. return 1;
  4120. }
  4121. /* Remove all variable-location information about call-clobbered
  4122. registers, as well as associations between MEMs and VALUEs. */
  4123. static void
  4124. dataflow_set_clear_at_call (dataflow_set *set)
  4125. {
  4126. unsigned int r;
  4127. hard_reg_set_iterator hrsi;
  4128. EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
  4129. var_regno_delete (set, r);
  4130. if (MAY_HAVE_DEBUG_INSNS)
  4131. {
  4132. set->traversed_vars = set->vars;
  4133. shared_hash_htab (set->vars)
  4134. ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
  4135. set->traversed_vars = set->vars;
  4136. shared_hash_htab (set->vars)
  4137. ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
  4138. set->traversed_vars = NULL;
  4139. }
  4140. }
  4141. static bool
  4142. variable_part_different_p (variable_part *vp1, variable_part *vp2)
  4143. {
  4144. location_chain lc1, lc2;
  4145. for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
  4146. {
  4147. for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
  4148. {
  4149. if (REG_P (lc1->loc) && REG_P (lc2->loc))
  4150. {
  4151. if (REGNO (lc1->loc) == REGNO (lc2->loc))
  4152. break;
  4153. }
  4154. if (rtx_equal_p (lc1->loc, lc2->loc))
  4155. break;
  4156. }
  4157. if (!lc2)
  4158. return true;
  4159. }
  4160. return false;
  4161. }
  4162. /* Return true if one-part variables VAR1 and VAR2 are different.
  4163. They must be in canonical order. */
  4164. static bool
  4165. onepart_variable_different_p (variable var1, variable var2)
  4166. {
  4167. location_chain lc1, lc2;
  4168. if (var1 == var2)
  4169. return false;
  4170. gcc_assert (var1->n_var_parts == 1
  4171. && var2->n_var_parts == 1);
  4172. lc1 = var1->var_part[0].loc_chain;
  4173. lc2 = var2->var_part[0].loc_chain;
  4174. gcc_assert (lc1 && lc2);
  4175. while (lc1 && lc2)
  4176. {
  4177. if (loc_cmp (lc1->loc, lc2->loc))
  4178. return true;
  4179. lc1 = lc1->next;
  4180. lc2 = lc2->next;
  4181. }
  4182. return lc1 != lc2;
  4183. }
  4184. /* Return true if variables VAR1 and VAR2 are different. */
  4185. static bool
  4186. variable_different_p (variable var1, variable var2)
  4187. {
  4188. int i;
  4189. if (var1 == var2)
  4190. return false;
  4191. if (var1->onepart != var2->onepart)
  4192. return true;
  4193. if (var1->n_var_parts != var2->n_var_parts)
  4194. return true;
  4195. if (var1->onepart && var1->n_var_parts)
  4196. {
  4197. gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
  4198. && var1->n_var_parts == 1);
  4199. /* One-part values have locations in a canonical order. */
  4200. return onepart_variable_different_p (var1, var2);
  4201. }
  4202. for (i = 0; i < var1->n_var_parts; i++)
  4203. {
  4204. if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
  4205. return true;
  4206. if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
  4207. return true;
  4208. if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
  4209. return true;
  4210. }
  4211. return false;
  4212. }
  4213. /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
  4214. static bool
  4215. dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
  4216. {
  4217. variable_iterator_type hi;
  4218. variable var1;
  4219. if (old_set->vars == new_set->vars)
  4220. return false;
  4221. if (shared_hash_htab (old_set->vars)->elements ()
  4222. != shared_hash_htab (new_set->vars)->elements ())
  4223. return true;
  4224. FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
  4225. var1, variable, hi)
  4226. {
  4227. variable_table_type *htab = shared_hash_htab (new_set->vars);
  4228. variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
  4229. if (!var2)
  4230. {
  4231. if (dump_file && (dump_flags & TDF_DETAILS))
  4232. {
  4233. fprintf (dump_file, "dataflow difference found: removal of:\n");
  4234. dump_var (var1);
  4235. }
  4236. return true;
  4237. }
  4238. if (variable_different_p (var1, var2))
  4239. {
  4240. if (dump_file && (dump_flags & TDF_DETAILS))
  4241. {
  4242. fprintf (dump_file, "dataflow difference found: "
  4243. "old and new follow:\n");
  4244. dump_var (var1);
  4245. dump_var (var2);
  4246. }
  4247. return true;
  4248. }
  4249. }
  4250. /* No need to traverse the second hashtab, if both have the same number
  4251. of elements and the second one had all entries found in the first one,
  4252. then it can't have any extra entries. */
  4253. return false;
  4254. }
  4255. /* Free the contents of dataflow set SET. */
  4256. static void
  4257. dataflow_set_destroy (dataflow_set *set)
  4258. {
  4259. int i;
  4260. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  4261. attrs_list_clear (&set->regs[i]);
  4262. shared_hash_destroy (set->vars);
  4263. set->vars = NULL;
  4264. }
  4265. /* Return true if RTL X contains a SYMBOL_REF. */
  4266. static bool
  4267. contains_symbol_ref (rtx x)
  4268. {
  4269. const char *fmt;
  4270. RTX_CODE code;
  4271. int i;
  4272. if (!x)
  4273. return false;
  4274. code = GET_CODE (x);
  4275. if (code == SYMBOL_REF)
  4276. return true;
  4277. fmt = GET_RTX_FORMAT (code);
  4278. for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
  4279. {
  4280. if (fmt[i] == 'e')
  4281. {
  4282. if (contains_symbol_ref (XEXP (x, i)))
  4283. return true;
  4284. }
  4285. else if (fmt[i] == 'E')
  4286. {
  4287. int j;
  4288. for (j = 0; j < XVECLEN (x, i); j++)
  4289. if (contains_symbol_ref (XVECEXP (x, i, j)))
  4290. return true;
  4291. }
  4292. }
  4293. return false;
  4294. }
  4295. /* Shall EXPR be tracked? */
  4296. static bool
  4297. track_expr_p (tree expr, bool need_rtl)
  4298. {
  4299. rtx decl_rtl;
  4300. tree realdecl;
  4301. if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
  4302. return DECL_RTL_SET_P (expr);
  4303. /* If EXPR is not a parameter or a variable do not track it. */
  4304. if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
  4305. return 0;
  4306. /* It also must have a name... */
  4307. if (!DECL_NAME (expr) && need_rtl)
  4308. return 0;
  4309. /* ... and a RTL assigned to it. */
  4310. decl_rtl = DECL_RTL_IF_SET (expr);
  4311. if (!decl_rtl && need_rtl)
  4312. return 0;
  4313. /* If this expression is really a debug alias of some other declaration, we
  4314. don't need to track this expression if the ultimate declaration is
  4315. ignored. */
  4316. realdecl = expr;
  4317. if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
  4318. {
  4319. realdecl = DECL_DEBUG_EXPR (realdecl);
  4320. if (!DECL_P (realdecl))
  4321. {
  4322. if (handled_component_p (realdecl)
  4323. || (TREE_CODE (realdecl) == MEM_REF
  4324. && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
  4325. {
  4326. HOST_WIDE_INT bitsize, bitpos, maxsize;
  4327. tree innerdecl
  4328. = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
  4329. &maxsize);
  4330. if (!DECL_P (innerdecl)
  4331. || DECL_IGNORED_P (innerdecl)
  4332. /* Do not track declarations for parts of tracked parameters
  4333. since we want to track them as a whole instead. */
  4334. || (TREE_CODE (innerdecl) == PARM_DECL
  4335. && DECL_MODE (innerdecl) != BLKmode
  4336. && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
  4337. || TREE_STATIC (innerdecl)
  4338. || bitsize <= 0
  4339. || bitpos + bitsize > 256
  4340. || bitsize != maxsize)
  4341. return 0;
  4342. else
  4343. realdecl = expr;
  4344. }
  4345. else
  4346. return 0;
  4347. }
  4348. }
  4349. /* Do not track EXPR if REALDECL it should be ignored for debugging
  4350. purposes. */
  4351. if (DECL_IGNORED_P (realdecl))
  4352. return 0;
  4353. /* Do not track global variables until we are able to emit correct location
  4354. list for them. */
  4355. if (TREE_STATIC (realdecl))
  4356. return 0;
  4357. /* When the EXPR is a DECL for alias of some variable (see example)
  4358. the TREE_STATIC flag is not used. Disable tracking all DECLs whose
  4359. DECL_RTL contains SYMBOL_REF.
  4360. Example:
  4361. extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
  4362. char **_dl_argv;
  4363. */
  4364. if (decl_rtl && MEM_P (decl_rtl)
  4365. && contains_symbol_ref (XEXP (decl_rtl, 0)))
  4366. return 0;
  4367. /* If RTX is a memory it should not be very large (because it would be
  4368. an array or struct). */
  4369. if (decl_rtl && MEM_P (decl_rtl))
  4370. {
  4371. /* Do not track structures and arrays. */
  4372. if (GET_MODE (decl_rtl) == BLKmode
  4373. || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
  4374. return 0;
  4375. if (MEM_SIZE_KNOWN_P (decl_rtl)
  4376. && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
  4377. return 0;
  4378. }
  4379. DECL_CHANGED (expr) = 0;
  4380. DECL_CHANGED (realdecl) = 0;
  4381. return 1;
  4382. }
  4383. /* Determine whether a given LOC refers to the same variable part as
  4384. EXPR+OFFSET. */
  4385. static bool
  4386. same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
  4387. {
  4388. tree expr2;
  4389. HOST_WIDE_INT offset2;
  4390. if (! DECL_P (expr))
  4391. return false;
  4392. if (REG_P (loc))
  4393. {
  4394. expr2 = REG_EXPR (loc);
  4395. offset2 = REG_OFFSET (loc);
  4396. }
  4397. else if (MEM_P (loc))
  4398. {
  4399. expr2 = MEM_EXPR (loc);
  4400. offset2 = INT_MEM_OFFSET (loc);
  4401. }
  4402. else
  4403. return false;
  4404. if (! expr2 || ! DECL_P (expr2))
  4405. return false;
  4406. expr = var_debug_decl (expr);
  4407. expr2 = var_debug_decl (expr2);
  4408. return (expr == expr2 && offset == offset2);
  4409. }
  4410. /* LOC is a REG or MEM that we would like to track if possible.
  4411. If EXPR is null, we don't know what expression LOC refers to,
  4412. otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
  4413. LOC is an lvalue register.
  4414. Return true if EXPR is nonnull and if LOC, or some lowpart of it,
  4415. is something we can track. When returning true, store the mode of
  4416. the lowpart we can track in *MODE_OUT (if nonnull) and its offset
  4417. from EXPR in *OFFSET_OUT (if nonnull). */
  4418. static bool
  4419. track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
  4420. machine_mode *mode_out, HOST_WIDE_INT *offset_out)
  4421. {
  4422. machine_mode mode;
  4423. if (expr == NULL || !track_expr_p (expr, true))
  4424. return false;
  4425. /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
  4426. whole subreg, but only the old inner part is really relevant. */
  4427. mode = GET_MODE (loc);
  4428. if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
  4429. {
  4430. machine_mode pseudo_mode;
  4431. pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
  4432. if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
  4433. {
  4434. offset += byte_lowpart_offset (pseudo_mode, mode);
  4435. mode = pseudo_mode;
  4436. }
  4437. }
  4438. /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
  4439. Do the same if we are storing to a register and EXPR occupies
  4440. the whole of register LOC; in that case, the whole of EXPR is
  4441. being changed. We exclude complex modes from the second case
  4442. because the real and imaginary parts are represented as separate
  4443. pseudo registers, even if the whole complex value fits into one
  4444. hard register. */
  4445. if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
  4446. || (store_reg_p
  4447. && !COMPLEX_MODE_P (DECL_MODE (expr))
  4448. && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
  4449. && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
  4450. {
  4451. mode = DECL_MODE (expr);
  4452. offset = 0;
  4453. }
  4454. if (offset < 0 || offset >= MAX_VAR_PARTS)
  4455. return false;
  4456. if (mode_out)
  4457. *mode_out = mode;
  4458. if (offset_out)
  4459. *offset_out = offset;
  4460. return true;
  4461. }
  4462. /* Return the MODE lowpart of LOC, or null if LOC is not something we
  4463. want to track. When returning nonnull, make sure that the attributes
  4464. on the returned value are updated. */
  4465. static rtx
  4466. var_lowpart (machine_mode mode, rtx loc)
  4467. {
  4468. unsigned int offset, reg_offset, regno;
  4469. if (GET_MODE (loc) == mode)
  4470. return loc;
  4471. if (!REG_P (loc) && !MEM_P (loc))
  4472. return NULL;
  4473. offset = byte_lowpart_offset (mode, GET_MODE (loc));
  4474. if (MEM_P (loc))
  4475. return adjust_address_nv (loc, mode, offset);
  4476. reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
  4477. regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
  4478. reg_offset, mode);
  4479. return gen_rtx_REG_offset (loc, mode, regno, offset);
  4480. }
  4481. /* Carry information about uses and stores while walking rtx. */
  4482. struct count_use_info
  4483. {
  4484. /* The insn where the RTX is. */
  4485. rtx_insn *insn;
  4486. /* The basic block where insn is. */
  4487. basic_block bb;
  4488. /* The array of n_sets sets in the insn, as determined by cselib. */
  4489. struct cselib_set *sets;
  4490. int n_sets;
  4491. /* True if we're counting stores, false otherwise. */
  4492. bool store_p;
  4493. };
  4494. /* Find a VALUE corresponding to X. */
  4495. static inline cselib_val *
  4496. find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
  4497. {
  4498. int i;
  4499. if (cui->sets)
  4500. {
  4501. /* This is called after uses are set up and before stores are
  4502. processed by cselib, so it's safe to look up srcs, but not
  4503. dsts. So we look up expressions that appear in srcs or in
  4504. dest expressions, but we search the sets array for dests of
  4505. stores. */
  4506. if (cui->store_p)
  4507. {
  4508. /* Some targets represent memset and memcpy patterns
  4509. by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
  4510. (set (mem:BLK ...) (const_int ...)) or
  4511. (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
  4512. in that case, otherwise we end up with mode mismatches. */
  4513. if (mode == BLKmode && MEM_P (x))
  4514. return NULL;
  4515. for (i = 0; i < cui->n_sets; i++)
  4516. if (cui->sets[i].dest == x)
  4517. return cui->sets[i].src_elt;
  4518. }
  4519. else
  4520. return cselib_lookup (x, mode, 0, VOIDmode);
  4521. }
  4522. return NULL;
  4523. }
  4524. /* Replace all registers and addresses in an expression with VALUE
  4525. expressions that map back to them, unless the expression is a
  4526. register. If no mapping is or can be performed, returns NULL. */
  4527. static rtx
  4528. replace_expr_with_values (rtx loc)
  4529. {
  4530. if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
  4531. return NULL;
  4532. else if (MEM_P (loc))
  4533. {
  4534. cselib_val *addr = cselib_lookup (XEXP (loc, 0),
  4535. get_address_mode (loc), 0,
  4536. GET_MODE (loc));
  4537. if (addr)
  4538. return replace_equiv_address_nv (loc, addr->val_rtx);
  4539. else
  4540. return NULL;
  4541. }
  4542. else
  4543. return cselib_subst_to_values (loc, VOIDmode);
  4544. }
  4545. /* Return true if X contains a DEBUG_EXPR. */
  4546. static bool
  4547. rtx_debug_expr_p (const_rtx x)
  4548. {
  4549. subrtx_iterator::array_type array;
  4550. FOR_EACH_SUBRTX (iter, array, x, ALL)
  4551. if (GET_CODE (*iter) == DEBUG_EXPR)
  4552. return true;
  4553. return false;
  4554. }
  4555. /* Determine what kind of micro operation to choose for a USE. Return
  4556. MO_CLOBBER if no micro operation is to be generated. */
  4557. static enum micro_operation_type
  4558. use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
  4559. {
  4560. tree expr;
  4561. if (cui && cui->sets)
  4562. {
  4563. if (GET_CODE (loc) == VAR_LOCATION)
  4564. {
  4565. if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
  4566. {
  4567. rtx ploc = PAT_VAR_LOCATION_LOC (loc);
  4568. if (! VAR_LOC_UNKNOWN_P (ploc))
  4569. {
  4570. cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
  4571. VOIDmode);
  4572. /* ??? flag_float_store and volatile mems are never
  4573. given values, but we could in theory use them for
  4574. locations. */
  4575. gcc_assert (val || 1);
  4576. }
  4577. return MO_VAL_LOC;
  4578. }
  4579. else
  4580. return MO_CLOBBER;
  4581. }
  4582. if (REG_P (loc) || MEM_P (loc))
  4583. {
  4584. if (modep)
  4585. *modep = GET_MODE (loc);
  4586. if (cui->store_p)
  4587. {
  4588. if (REG_P (loc)
  4589. || (find_use_val (loc, GET_MODE (loc), cui)
  4590. && cselib_lookup (XEXP (loc, 0),
  4591. get_address_mode (loc), 0,
  4592. GET_MODE (loc))))
  4593. return MO_VAL_SET;
  4594. }
  4595. else
  4596. {
  4597. cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
  4598. if (val && !cselib_preserved_value_p (val))
  4599. return MO_VAL_USE;
  4600. }
  4601. }
  4602. }
  4603. if (REG_P (loc))
  4604. {
  4605. gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
  4606. if (loc == cfa_base_rtx)
  4607. return MO_CLOBBER;
  4608. expr = REG_EXPR (loc);
  4609. if (!expr)
  4610. return MO_USE_NO_VAR;
  4611. else if (target_for_debug_bind (var_debug_decl (expr)))
  4612. return MO_CLOBBER;
  4613. else if (track_loc_p (loc, expr, REG_OFFSET (loc),
  4614. false, modep, NULL))
  4615. return MO_USE;
  4616. else
  4617. return MO_USE_NO_VAR;
  4618. }
  4619. else if (MEM_P (loc))
  4620. {
  4621. expr = MEM_EXPR (loc);
  4622. if (!expr)
  4623. return MO_CLOBBER;
  4624. else if (target_for_debug_bind (var_debug_decl (expr)))
  4625. return MO_CLOBBER;
  4626. else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
  4627. false, modep, NULL)
  4628. /* Multi-part variables shouldn't refer to one-part
  4629. variable names such as VALUEs (never happens) or
  4630. DEBUG_EXPRs (only happens in the presence of debug
  4631. insns). */
  4632. && (!MAY_HAVE_DEBUG_INSNS
  4633. || !rtx_debug_expr_p (XEXP (loc, 0))))
  4634. return MO_USE;
  4635. else
  4636. return MO_CLOBBER;
  4637. }
  4638. return MO_CLOBBER;
  4639. }
  4640. /* Log to OUT information about micro-operation MOPT involving X in
  4641. INSN of BB. */
  4642. static inline void
  4643. log_op_type (rtx x, basic_block bb, rtx_insn *insn,
  4644. enum micro_operation_type mopt, FILE *out)
  4645. {
  4646. fprintf (out, "bb %i op %i insn %i %s ",
  4647. bb->index, VTI (bb)->mos.length (),
  4648. INSN_UID (insn), micro_operation_type_name[mopt]);
  4649. print_inline_rtx (out, x, 2);
  4650. fputc ('\n', out);
  4651. }
  4652. /* Tell whether the CONCAT used to holds a VALUE and its location
  4653. needs value resolution, i.e., an attempt of mapping the location
  4654. back to other incoming values. */
  4655. #define VAL_NEEDS_RESOLUTION(x) \
  4656. (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
  4657. /* Whether the location in the CONCAT is a tracked expression, that
  4658. should also be handled like a MO_USE. */
  4659. #define VAL_HOLDS_TRACK_EXPR(x) \
  4660. (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
  4661. /* Whether the location in the CONCAT should be handled like a MO_COPY
  4662. as well. */
  4663. #define VAL_EXPR_IS_COPIED(x) \
  4664. (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
  4665. /* Whether the location in the CONCAT should be handled like a
  4666. MO_CLOBBER as well. */
  4667. #define VAL_EXPR_IS_CLOBBERED(x) \
  4668. (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
  4669. /* All preserved VALUEs. */
  4670. static vec<rtx> preserved_values;
  4671. /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
  4672. static void
  4673. preserve_value (cselib_val *val)
  4674. {
  4675. cselib_preserve_value (val);
  4676. preserved_values.safe_push (val->val_rtx);
  4677. }
  4678. /* Helper function for MO_VAL_LOC handling. Return non-zero if
  4679. any rtxes not suitable for CONST use not replaced by VALUEs
  4680. are discovered. */
  4681. static bool
  4682. non_suitable_const (const_rtx x)
  4683. {
  4684. subrtx_iterator::array_type array;
  4685. FOR_EACH_SUBRTX (iter, array, x, ALL)
  4686. {
  4687. const_rtx x = *iter;
  4688. switch (GET_CODE (x))
  4689. {
  4690. case REG:
  4691. case DEBUG_EXPR:
  4692. case PC:
  4693. case SCRATCH:
  4694. case CC0:
  4695. case ASM_INPUT:
  4696. case ASM_OPERANDS:
  4697. return true;
  4698. case MEM:
  4699. if (!MEM_READONLY_P (x))
  4700. return true;
  4701. break;
  4702. default:
  4703. break;
  4704. }
  4705. }
  4706. return false;
  4707. }
  4708. /* Add uses (register and memory references) LOC which will be tracked
  4709. to VTI (bb)->mos. */
  4710. static void
  4711. add_uses (rtx loc, struct count_use_info *cui)
  4712. {
  4713. machine_mode mode = VOIDmode;
  4714. enum micro_operation_type type = use_type (loc, cui, &mode);
  4715. if (type != MO_CLOBBER)
  4716. {
  4717. basic_block bb = cui->bb;
  4718. micro_operation mo;
  4719. mo.type = type;
  4720. mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
  4721. mo.insn = cui->insn;
  4722. if (type == MO_VAL_LOC)
  4723. {
  4724. rtx oloc = loc;
  4725. rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
  4726. cselib_val *val;
  4727. gcc_assert (cui->sets);
  4728. if (MEM_P (vloc)
  4729. && !REG_P (XEXP (vloc, 0))
  4730. && !MEM_P (XEXP (vloc, 0)))
  4731. {
  4732. rtx mloc = vloc;
  4733. machine_mode address_mode = get_address_mode (mloc);
  4734. cselib_val *val
  4735. = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
  4736. GET_MODE (mloc));
  4737. if (val && !cselib_preserved_value_p (val))
  4738. preserve_value (val);
  4739. }
  4740. if (CONSTANT_P (vloc)
  4741. && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
  4742. /* For constants don't look up any value. */;
  4743. else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
  4744. && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
  4745. {
  4746. machine_mode mode2;
  4747. enum micro_operation_type type2;
  4748. rtx nloc = NULL;
  4749. bool resolvable = REG_P (vloc) || MEM_P (vloc);
  4750. if (resolvable)
  4751. nloc = replace_expr_with_values (vloc);
  4752. if (nloc)
  4753. {
  4754. oloc = shallow_copy_rtx (oloc);
  4755. PAT_VAR_LOCATION_LOC (oloc) = nloc;
  4756. }
  4757. oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
  4758. type2 = use_type (vloc, 0, &mode2);
  4759. gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
  4760. || type2 == MO_CLOBBER);
  4761. if (type2 == MO_CLOBBER
  4762. && !cselib_preserved_value_p (val))
  4763. {
  4764. VAL_NEEDS_RESOLUTION (oloc) = resolvable;
  4765. preserve_value (val);
  4766. }
  4767. }
  4768. else if (!VAR_LOC_UNKNOWN_P (vloc))
  4769. {
  4770. oloc = shallow_copy_rtx (oloc);
  4771. PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
  4772. }
  4773. mo.u.loc = oloc;
  4774. }
  4775. else if (type == MO_VAL_USE)
  4776. {
  4777. machine_mode mode2 = VOIDmode;
  4778. enum micro_operation_type type2;
  4779. cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
  4780. rtx vloc, oloc = loc, nloc;
  4781. gcc_assert (cui->sets);
  4782. if (MEM_P (oloc)
  4783. && !REG_P (XEXP (oloc, 0))
  4784. && !MEM_P (XEXP (oloc, 0)))
  4785. {
  4786. rtx mloc = oloc;
  4787. machine_mode address_mode = get_address_mode (mloc);
  4788. cselib_val *val
  4789. = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
  4790. GET_MODE (mloc));
  4791. if (val && !cselib_preserved_value_p (val))
  4792. preserve_value (val);
  4793. }
  4794. type2 = use_type (loc, 0, &mode2);
  4795. gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
  4796. || type2 == MO_CLOBBER);
  4797. if (type2 == MO_USE)
  4798. vloc = var_lowpart (mode2, loc);
  4799. else
  4800. vloc = oloc;
  4801. /* The loc of a MO_VAL_USE may have two forms:
  4802. (concat val src): val is at src, a value-based
  4803. representation.
  4804. (concat (concat val use) src): same as above, with use as
  4805. the MO_USE tracked value, if it differs from src.
  4806. */
  4807. gcc_checking_assert (REG_P (loc) || MEM_P (loc));
  4808. nloc = replace_expr_with_values (loc);
  4809. if (!nloc)
  4810. nloc = oloc;
  4811. if (vloc != nloc)
  4812. oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
  4813. else
  4814. oloc = val->val_rtx;
  4815. mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
  4816. if (type2 == MO_USE)
  4817. VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
  4818. if (!cselib_preserved_value_p (val))
  4819. {
  4820. VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
  4821. preserve_value (val);
  4822. }
  4823. }
  4824. else
  4825. gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
  4826. if (dump_file && (dump_flags & TDF_DETAILS))
  4827. log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
  4828. VTI (bb)->mos.safe_push (mo);
  4829. }
  4830. }
  4831. /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
  4832. static void
  4833. add_uses_1 (rtx *x, void *cui)
  4834. {
  4835. subrtx_var_iterator::array_type array;
  4836. FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
  4837. add_uses (*iter, (struct count_use_info *) cui);
  4838. }
  4839. /* This is the value used during expansion of locations. We want it
  4840. to be unbounded, so that variables expanded deep in a recursion
  4841. nest are fully evaluated, so that their values are cached
  4842. correctly. We avoid recursion cycles through other means, and we
  4843. don't unshare RTL, so excess complexity is not a problem. */
  4844. #define EXPR_DEPTH (INT_MAX)
  4845. /* We use this to keep too-complex expressions from being emitted as
  4846. location notes, and then to debug information. Users can trade
  4847. compile time for ridiculously complex expressions, although they're
  4848. seldom useful, and they may often have to be discarded as not
  4849. representable anyway. */
  4850. #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
  4851. /* Attempt to reverse the EXPR operation in the debug info and record
  4852. it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
  4853. no longer live we can express its value as VAL - 6. */
  4854. static void
  4855. reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
  4856. {
  4857. rtx src, arg, ret;
  4858. cselib_val *v;
  4859. struct elt_loc_list *l;
  4860. enum rtx_code code;
  4861. int count;
  4862. if (GET_CODE (expr) != SET)
  4863. return;
  4864. if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
  4865. return;
  4866. src = SET_SRC (expr);
  4867. switch (GET_CODE (src))
  4868. {
  4869. case PLUS:
  4870. case MINUS:
  4871. case XOR:
  4872. case NOT:
  4873. case NEG:
  4874. if (!REG_P (XEXP (src, 0)))
  4875. return;
  4876. break;
  4877. case SIGN_EXTEND:
  4878. case ZERO_EXTEND:
  4879. if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
  4880. return;
  4881. break;
  4882. default:
  4883. return;
  4884. }
  4885. if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
  4886. return;
  4887. v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
  4888. if (!v || !cselib_preserved_value_p (v))
  4889. return;
  4890. /* Use canonical V to avoid creating multiple redundant expressions
  4891. for different VALUES equivalent to V. */
  4892. v = canonical_cselib_val (v);
  4893. /* Adding a reverse op isn't useful if V already has an always valid
  4894. location. Ignore ENTRY_VALUE, while it is always constant, we should
  4895. prefer non-ENTRY_VALUE locations whenever possible. */
  4896. for (l = v->locs, count = 0; l; l = l->next, count++)
  4897. if (CONSTANT_P (l->loc)
  4898. && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
  4899. return;
  4900. /* Avoid creating too large locs lists. */
  4901. else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
  4902. return;
  4903. switch (GET_CODE (src))
  4904. {
  4905. case NOT:
  4906. case NEG:
  4907. if (GET_MODE (v->val_rtx) != GET_MODE (val))
  4908. return;
  4909. ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
  4910. break;
  4911. case SIGN_EXTEND:
  4912. case ZERO_EXTEND:
  4913. ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
  4914. break;
  4915. case XOR:
  4916. code = XOR;
  4917. goto binary;
  4918. case PLUS:
  4919. code = MINUS;
  4920. goto binary;
  4921. case MINUS:
  4922. code = PLUS;
  4923. goto binary;
  4924. binary:
  4925. if (GET_MODE (v->val_rtx) != GET_MODE (val))
  4926. return;
  4927. arg = XEXP (src, 1);
  4928. if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
  4929. {
  4930. arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
  4931. if (arg == NULL_RTX)
  4932. return;
  4933. if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
  4934. return;
  4935. }
  4936. ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
  4937. if (ret == val)
  4938. /* Ensure ret isn't VALUE itself (which can happen e.g. for
  4939. (plus (reg1) (reg2)) when reg2 is known to be 0), as that
  4940. breaks a lot of routines during var-tracking. */
  4941. ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
  4942. break;
  4943. default:
  4944. gcc_unreachable ();
  4945. }
  4946. cselib_add_permanent_equiv (v, ret, insn);
  4947. }
  4948. /* Add stores (register and memory references) LOC which will be tracked
  4949. to VTI (bb)->mos. EXPR is the RTL expression containing the store.
  4950. CUIP->insn is instruction which the LOC is part of. */
  4951. static void
  4952. add_stores (rtx loc, const_rtx expr, void *cuip)
  4953. {
  4954. machine_mode mode = VOIDmode, mode2;
  4955. struct count_use_info *cui = (struct count_use_info *)cuip;
  4956. basic_block bb = cui->bb;
  4957. micro_operation mo;
  4958. rtx oloc = loc, nloc, src = NULL;
  4959. enum micro_operation_type type = use_type (loc, cui, &mode);
  4960. bool track_p = false;
  4961. cselib_val *v;
  4962. bool resolve, preserve;
  4963. if (type == MO_CLOBBER)
  4964. return;
  4965. mode2 = mode;
  4966. if (REG_P (loc))
  4967. {
  4968. gcc_assert (loc != cfa_base_rtx);
  4969. if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
  4970. || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
  4971. || GET_CODE (expr) == CLOBBER)
  4972. {
  4973. mo.type = MO_CLOBBER;
  4974. mo.u.loc = loc;
  4975. if (GET_CODE (expr) == SET
  4976. && SET_DEST (expr) == loc
  4977. && !unsuitable_loc (SET_SRC (expr))
  4978. && find_use_val (loc, mode, cui))
  4979. {
  4980. gcc_checking_assert (type == MO_VAL_SET);
  4981. mo.u.loc = gen_rtx_SET (VOIDmode, loc, SET_SRC (expr));
  4982. }
  4983. }
  4984. else
  4985. {
  4986. if (GET_CODE (expr) == SET
  4987. && SET_DEST (expr) == loc
  4988. && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
  4989. src = var_lowpart (mode2, SET_SRC (expr));
  4990. loc = var_lowpart (mode2, loc);
  4991. if (src == NULL)
  4992. {
  4993. mo.type = MO_SET;
  4994. mo.u.loc = loc;
  4995. }
  4996. else
  4997. {
  4998. rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
  4999. if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
  5000. {
  5001. /* If this is an instruction copying (part of) a parameter
  5002. passed by invisible reference to its register location,
  5003. pretend it's a SET so that the initial memory location
  5004. is discarded, as the parameter register can be reused
  5005. for other purposes and we do not track locations based
  5006. on generic registers. */
  5007. if (MEM_P (src)
  5008. && REG_EXPR (loc)
  5009. && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
  5010. && DECL_MODE (REG_EXPR (loc)) != BLKmode
  5011. && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
  5012. && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
  5013. != arg_pointer_rtx)
  5014. mo.type = MO_SET;
  5015. else
  5016. mo.type = MO_COPY;
  5017. }
  5018. else
  5019. mo.type = MO_SET;
  5020. mo.u.loc = xexpr;
  5021. }
  5022. }
  5023. mo.insn = cui->insn;
  5024. }
  5025. else if (MEM_P (loc)
  5026. && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
  5027. || cui->sets))
  5028. {
  5029. if (MEM_P (loc) && type == MO_VAL_SET
  5030. && !REG_P (XEXP (loc, 0))
  5031. && !MEM_P (XEXP (loc, 0)))
  5032. {
  5033. rtx mloc = loc;
  5034. machine_mode address_mode = get_address_mode (mloc);
  5035. cselib_val *val = cselib_lookup (XEXP (mloc, 0),
  5036. address_mode, 0,
  5037. GET_MODE (mloc));
  5038. if (val && !cselib_preserved_value_p (val))
  5039. preserve_value (val);
  5040. }
  5041. if (GET_CODE (expr) == CLOBBER || !track_p)
  5042. {
  5043. mo.type = MO_CLOBBER;
  5044. mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
  5045. }
  5046. else
  5047. {
  5048. if (GET_CODE (expr) == SET
  5049. && SET_DEST (expr) == loc
  5050. && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
  5051. src = var_lowpart (mode2, SET_SRC (expr));
  5052. loc = var_lowpart (mode2, loc);
  5053. if (src == NULL)
  5054. {
  5055. mo.type = MO_SET;
  5056. mo.u.loc = loc;
  5057. }
  5058. else
  5059. {
  5060. rtx xexpr = gen_rtx_SET (VOIDmode, loc, src);
  5061. if (same_variable_part_p (SET_SRC (xexpr),
  5062. MEM_EXPR (loc),
  5063. INT_MEM_OFFSET (loc)))
  5064. mo.type = MO_COPY;
  5065. else
  5066. mo.type = MO_SET;
  5067. mo.u.loc = xexpr;
  5068. }
  5069. }
  5070. mo.insn = cui->insn;
  5071. }
  5072. else
  5073. return;
  5074. if (type != MO_VAL_SET)
  5075. goto log_and_return;
  5076. v = find_use_val (oloc, mode, cui);
  5077. if (!v)
  5078. goto log_and_return;
  5079. resolve = preserve = !cselib_preserved_value_p (v);
  5080. /* We cannot track values for multiple-part variables, so we track only
  5081. locations for tracked parameters passed either by invisible reference
  5082. or directly in multiple locations. */
  5083. if (track_p
  5084. && REG_P (loc)
  5085. && REG_EXPR (loc)
  5086. && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
  5087. && DECL_MODE (REG_EXPR (loc)) != BLKmode
  5088. && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
  5089. && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
  5090. && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
  5091. || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
  5092. && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
  5093. {
  5094. /* Although we don't use the value here, it could be used later by the
  5095. mere virtue of its existence as the operand of the reverse operation
  5096. that gave rise to it (typically extension/truncation). Make sure it
  5097. is preserved as required by vt_expand_var_loc_chain. */
  5098. if (preserve)
  5099. preserve_value (v);
  5100. goto log_and_return;
  5101. }
  5102. if (loc == stack_pointer_rtx
  5103. && hard_frame_pointer_adjustment != -1
  5104. && preserve)
  5105. cselib_set_value_sp_based (v);
  5106. nloc = replace_expr_with_values (oloc);
  5107. if (nloc)
  5108. oloc = nloc;
  5109. if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
  5110. {
  5111. cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
  5112. if (oval == v)
  5113. return;
  5114. gcc_assert (REG_P (oloc) || MEM_P (oloc));
  5115. if (oval && !cselib_preserved_value_p (oval))
  5116. {
  5117. micro_operation moa;
  5118. preserve_value (oval);
  5119. moa.type = MO_VAL_USE;
  5120. moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
  5121. VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
  5122. moa.insn = cui->insn;
  5123. if (dump_file && (dump_flags & TDF_DETAILS))
  5124. log_op_type (moa.u.loc, cui->bb, cui->insn,
  5125. moa.type, dump_file);
  5126. VTI (bb)->mos.safe_push (moa);
  5127. }
  5128. resolve = false;
  5129. }
  5130. else if (resolve && GET_CODE (mo.u.loc) == SET)
  5131. {
  5132. if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
  5133. nloc = replace_expr_with_values (SET_SRC (expr));
  5134. else
  5135. nloc = NULL_RTX;
  5136. /* Avoid the mode mismatch between oexpr and expr. */
  5137. if (!nloc && mode != mode2)
  5138. {
  5139. nloc = SET_SRC (expr);
  5140. gcc_assert (oloc == SET_DEST (expr));
  5141. }
  5142. if (nloc && nloc != SET_SRC (mo.u.loc))
  5143. oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc);
  5144. else
  5145. {
  5146. if (oloc == SET_DEST (mo.u.loc))
  5147. /* No point in duplicating. */
  5148. oloc = mo.u.loc;
  5149. if (!REG_P (SET_SRC (mo.u.loc)))
  5150. resolve = false;
  5151. }
  5152. }
  5153. else if (!resolve)
  5154. {
  5155. if (GET_CODE (mo.u.loc) == SET
  5156. && oloc == SET_DEST (mo.u.loc))
  5157. /* No point in duplicating. */
  5158. oloc = mo.u.loc;
  5159. }
  5160. else
  5161. resolve = false;
  5162. loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
  5163. if (mo.u.loc != oloc)
  5164. loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
  5165. /* The loc of a MO_VAL_SET may have various forms:
  5166. (concat val dst): dst now holds val
  5167. (concat val (set dst src)): dst now holds val, copied from src
  5168. (concat (concat val dstv) dst): dst now holds val; dstv is dst
  5169. after replacing mems and non-top-level regs with values.
  5170. (concat (concat val dstv) (set dst src)): dst now holds val,
  5171. copied from src. dstv is a value-based representation of dst, if
  5172. it differs from dst. If resolution is needed, src is a REG, and
  5173. its mode is the same as that of val.
  5174. (concat (concat val (set dstv srcv)) (set dst src)): src
  5175. copied to dst, holding val. dstv and srcv are value-based
  5176. representations of dst and src, respectively.
  5177. */
  5178. if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
  5179. reverse_op (v->val_rtx, expr, cui->insn);
  5180. mo.u.loc = loc;
  5181. if (track_p)
  5182. VAL_HOLDS_TRACK_EXPR (loc) = 1;
  5183. if (preserve)
  5184. {
  5185. VAL_NEEDS_RESOLUTION (loc) = resolve;
  5186. preserve_value (v);
  5187. }
  5188. if (mo.type == MO_CLOBBER)
  5189. VAL_EXPR_IS_CLOBBERED (loc) = 1;
  5190. if (mo.type == MO_COPY)
  5191. VAL_EXPR_IS_COPIED (loc) = 1;
  5192. mo.type = MO_VAL_SET;
  5193. log_and_return:
  5194. if (dump_file && (dump_flags & TDF_DETAILS))
  5195. log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
  5196. VTI (bb)->mos.safe_push (mo);
  5197. }
  5198. /* Arguments to the call. */
  5199. static rtx call_arguments;
  5200. /* Compute call_arguments. */
  5201. static void
  5202. prepare_call_arguments (basic_block bb, rtx_insn *insn)
  5203. {
  5204. rtx link, x, call;
  5205. rtx prev, cur, next;
  5206. rtx this_arg = NULL_RTX;
  5207. tree type = NULL_TREE, t, fndecl = NULL_TREE;
  5208. tree obj_type_ref = NULL_TREE;
  5209. CUMULATIVE_ARGS args_so_far_v;
  5210. cumulative_args_t args_so_far;
  5211. memset (&args_so_far_v, 0, sizeof (args_so_far_v));
  5212. args_so_far = pack_cumulative_args (&args_so_far_v);
  5213. call = get_call_rtx_from (insn);
  5214. if (call)
  5215. {
  5216. if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
  5217. {
  5218. rtx symbol = XEXP (XEXP (call, 0), 0);
  5219. if (SYMBOL_REF_DECL (symbol))
  5220. fndecl = SYMBOL_REF_DECL (symbol);
  5221. }
  5222. if (fndecl == NULL_TREE)
  5223. fndecl = MEM_EXPR (XEXP (call, 0));
  5224. if (fndecl
  5225. && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
  5226. && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
  5227. fndecl = NULL_TREE;
  5228. if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
  5229. type = TREE_TYPE (fndecl);
  5230. if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
  5231. {
  5232. if (TREE_CODE (fndecl) == INDIRECT_REF
  5233. && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
  5234. obj_type_ref = TREE_OPERAND (fndecl, 0);
  5235. fndecl = NULL_TREE;
  5236. }
  5237. if (type)
  5238. {
  5239. for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
  5240. t = TREE_CHAIN (t))
  5241. if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
  5242. && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
  5243. break;
  5244. if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
  5245. type = NULL;
  5246. else
  5247. {
  5248. int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
  5249. link = CALL_INSN_FUNCTION_USAGE (insn);
  5250. #ifndef PCC_STATIC_STRUCT_RETURN
  5251. if (aggregate_value_p (TREE_TYPE (type), type)
  5252. && targetm.calls.struct_value_rtx (type, 0) == 0)
  5253. {
  5254. tree struct_addr = build_pointer_type (TREE_TYPE (type));
  5255. machine_mode mode = TYPE_MODE (struct_addr);
  5256. rtx reg;
  5257. INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
  5258. nargs + 1);
  5259. reg = targetm.calls.function_arg (args_so_far, mode,
  5260. struct_addr, true);
  5261. targetm.calls.function_arg_advance (args_so_far, mode,
  5262. struct_addr, true);
  5263. if (reg == NULL_RTX)
  5264. {
  5265. for (; link; link = XEXP (link, 1))
  5266. if (GET_CODE (XEXP (link, 0)) == USE
  5267. && MEM_P (XEXP (XEXP (link, 0), 0)))
  5268. {
  5269. link = XEXP (link, 1);
  5270. break;
  5271. }
  5272. }
  5273. }
  5274. else
  5275. #endif
  5276. INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
  5277. nargs);
  5278. if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
  5279. {
  5280. machine_mode mode;
  5281. t = TYPE_ARG_TYPES (type);
  5282. mode = TYPE_MODE (TREE_VALUE (t));
  5283. this_arg = targetm.calls.function_arg (args_so_far, mode,
  5284. TREE_VALUE (t), true);
  5285. if (this_arg && !REG_P (this_arg))
  5286. this_arg = NULL_RTX;
  5287. else if (this_arg == NULL_RTX)
  5288. {
  5289. for (; link; link = XEXP (link, 1))
  5290. if (GET_CODE (XEXP (link, 0)) == USE
  5291. && MEM_P (XEXP (XEXP (link, 0), 0)))
  5292. {
  5293. this_arg = XEXP (XEXP (link, 0), 0);
  5294. break;
  5295. }
  5296. }
  5297. }
  5298. }
  5299. }
  5300. }
  5301. t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
  5302. for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
  5303. if (GET_CODE (XEXP (link, 0)) == USE)
  5304. {
  5305. rtx item = NULL_RTX;
  5306. x = XEXP (XEXP (link, 0), 0);
  5307. if (GET_MODE (link) == VOIDmode
  5308. || GET_MODE (link) == BLKmode
  5309. || (GET_MODE (link) != GET_MODE (x)
  5310. && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
  5311. && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
  5312. || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
  5313. && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
  5314. /* Can't do anything for these, if the original type mode
  5315. isn't known or can't be converted. */;
  5316. else if (REG_P (x))
  5317. {
  5318. cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
  5319. if (val && cselib_preserved_value_p (val))
  5320. item = val->val_rtx;
  5321. else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
  5322. || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
  5323. {
  5324. machine_mode mode = GET_MODE (x);
  5325. while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
  5326. && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
  5327. {
  5328. rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
  5329. if (reg == NULL_RTX || !REG_P (reg))
  5330. continue;
  5331. val = cselib_lookup (reg, mode, 0, VOIDmode);
  5332. if (val && cselib_preserved_value_p (val))
  5333. {
  5334. item = val->val_rtx;
  5335. break;
  5336. }
  5337. }
  5338. }
  5339. }
  5340. else if (MEM_P (x))
  5341. {
  5342. rtx mem = x;
  5343. cselib_val *val;
  5344. if (!frame_pointer_needed)
  5345. {
  5346. struct adjust_mem_data amd;
  5347. amd.mem_mode = VOIDmode;
  5348. amd.stack_adjust = -VTI (bb)->out.stack_adjust;
  5349. amd.side_effects = NULL;
  5350. amd.store = true;
  5351. mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
  5352. &amd);
  5353. gcc_assert (amd.side_effects == NULL_RTX);
  5354. }
  5355. val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
  5356. if (val && cselib_preserved_value_p (val))
  5357. item = val->val_rtx;
  5358. else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
  5359. && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
  5360. {
  5361. /* For non-integer stack argument see also if they weren't
  5362. initialized by integers. */
  5363. machine_mode imode = int_mode_for_mode (GET_MODE (mem));
  5364. if (imode != GET_MODE (mem) && imode != BLKmode)
  5365. {
  5366. val = cselib_lookup (adjust_address_nv (mem, imode, 0),
  5367. imode, 0, VOIDmode);
  5368. if (val && cselib_preserved_value_p (val))
  5369. item = lowpart_subreg (GET_MODE (x), val->val_rtx,
  5370. imode);
  5371. }
  5372. }
  5373. }
  5374. if (item)
  5375. {
  5376. rtx x2 = x;
  5377. if (GET_MODE (item) != GET_MODE (link))
  5378. item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
  5379. if (GET_MODE (x2) != GET_MODE (link))
  5380. x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
  5381. item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
  5382. call_arguments
  5383. = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
  5384. }
  5385. if (t && t != void_list_node)
  5386. {
  5387. tree argtype = TREE_VALUE (t);
  5388. machine_mode mode = TYPE_MODE (argtype);
  5389. rtx reg;
  5390. if (pass_by_reference (&args_so_far_v, mode, argtype, true))
  5391. {
  5392. argtype = build_pointer_type (argtype);
  5393. mode = TYPE_MODE (argtype);
  5394. }
  5395. reg = targetm.calls.function_arg (args_so_far, mode,
  5396. argtype, true);
  5397. if (TREE_CODE (argtype) == REFERENCE_TYPE
  5398. && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
  5399. && reg
  5400. && REG_P (reg)
  5401. && GET_MODE (reg) == mode
  5402. && (GET_MODE_CLASS (mode) == MODE_INT
  5403. || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
  5404. && REG_P (x)
  5405. && REGNO (x) == REGNO (reg)
  5406. && GET_MODE (x) == mode
  5407. && item)
  5408. {
  5409. machine_mode indmode
  5410. = TYPE_MODE (TREE_TYPE (argtype));
  5411. rtx mem = gen_rtx_MEM (indmode, x);
  5412. cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
  5413. if (val && cselib_preserved_value_p (val))
  5414. {
  5415. item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
  5416. call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
  5417. call_arguments);
  5418. }
  5419. else
  5420. {
  5421. struct elt_loc_list *l;
  5422. tree initial;
  5423. /* Try harder, when passing address of a constant
  5424. pool integer it can be easily read back. */
  5425. item = XEXP (item, 1);
  5426. if (GET_CODE (item) == SUBREG)
  5427. item = SUBREG_REG (item);
  5428. gcc_assert (GET_CODE (item) == VALUE);
  5429. val = CSELIB_VAL_PTR (item);
  5430. for (l = val->locs; l; l = l->next)
  5431. if (GET_CODE (l->loc) == SYMBOL_REF
  5432. && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
  5433. && SYMBOL_REF_DECL (l->loc)
  5434. && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
  5435. {
  5436. initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
  5437. if (tree_fits_shwi_p (initial))
  5438. {
  5439. item = GEN_INT (tree_to_shwi (initial));
  5440. item = gen_rtx_CONCAT (indmode, mem, item);
  5441. call_arguments
  5442. = gen_rtx_EXPR_LIST (VOIDmode, item,
  5443. call_arguments);
  5444. }
  5445. break;
  5446. }
  5447. }
  5448. }
  5449. targetm.calls.function_arg_advance (args_so_far, mode,
  5450. argtype, true);
  5451. t = TREE_CHAIN (t);
  5452. }
  5453. }
  5454. /* Add debug arguments. */
  5455. if (fndecl
  5456. && TREE_CODE (fndecl) == FUNCTION_DECL
  5457. && DECL_HAS_DEBUG_ARGS_P (fndecl))
  5458. {
  5459. vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
  5460. if (debug_args)
  5461. {
  5462. unsigned int ix;
  5463. tree param;
  5464. for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
  5465. {
  5466. rtx item;
  5467. tree dtemp = (**debug_args)[ix + 1];
  5468. machine_mode mode = DECL_MODE (dtemp);
  5469. item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
  5470. item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
  5471. call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
  5472. call_arguments);
  5473. }
  5474. }
  5475. }
  5476. /* Reverse call_arguments chain. */
  5477. prev = NULL_RTX;
  5478. for (cur = call_arguments; cur; cur = next)
  5479. {
  5480. next = XEXP (cur, 1);
  5481. XEXP (cur, 1) = prev;
  5482. prev = cur;
  5483. }
  5484. call_arguments = prev;
  5485. x = get_call_rtx_from (insn);
  5486. if (x)
  5487. {
  5488. x = XEXP (XEXP (x, 0), 0);
  5489. if (GET_CODE (x) == SYMBOL_REF)
  5490. /* Don't record anything. */;
  5491. else if (CONSTANT_P (x))
  5492. {
  5493. x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
  5494. pc_rtx, x);
  5495. call_arguments
  5496. = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
  5497. }
  5498. else
  5499. {
  5500. cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
  5501. if (val && cselib_preserved_value_p (val))
  5502. {
  5503. x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
  5504. call_arguments
  5505. = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
  5506. }
  5507. }
  5508. }
  5509. if (this_arg)
  5510. {
  5511. machine_mode mode
  5512. = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
  5513. rtx clobbered = gen_rtx_MEM (mode, this_arg);
  5514. HOST_WIDE_INT token
  5515. = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
  5516. if (token)
  5517. clobbered = plus_constant (mode, clobbered,
  5518. token * GET_MODE_SIZE (mode));
  5519. clobbered = gen_rtx_MEM (mode, clobbered);
  5520. x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
  5521. call_arguments
  5522. = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
  5523. }
  5524. }
  5525. /* Callback for cselib_record_sets_hook, that records as micro
  5526. operations uses and stores in an insn after cselib_record_sets has
  5527. analyzed the sets in an insn, but before it modifies the stored
  5528. values in the internal tables, unless cselib_record_sets doesn't
  5529. call it directly (perhaps because we're not doing cselib in the
  5530. first place, in which case sets and n_sets will be 0). */
  5531. static void
  5532. add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
  5533. {
  5534. basic_block bb = BLOCK_FOR_INSN (insn);
  5535. int n1, n2;
  5536. struct count_use_info cui;
  5537. micro_operation *mos;
  5538. cselib_hook_called = true;
  5539. cui.insn = insn;
  5540. cui.bb = bb;
  5541. cui.sets = sets;
  5542. cui.n_sets = n_sets;
  5543. n1 = VTI (bb)->mos.length ();
  5544. cui.store_p = false;
  5545. note_uses (&PATTERN (insn), add_uses_1, &cui);
  5546. n2 = VTI (bb)->mos.length () - 1;
  5547. mos = VTI (bb)->mos.address ();
  5548. /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
  5549. MO_VAL_LOC last. */
  5550. while (n1 < n2)
  5551. {
  5552. while (n1 < n2 && mos[n1].type == MO_USE)
  5553. n1++;
  5554. while (n1 < n2 && mos[n2].type != MO_USE)
  5555. n2--;
  5556. if (n1 < n2)
  5557. {
  5558. micro_operation sw;
  5559. sw = mos[n1];
  5560. mos[n1] = mos[n2];
  5561. mos[n2] = sw;
  5562. }
  5563. }
  5564. n2 = VTI (bb)->mos.length () - 1;
  5565. while (n1 < n2)
  5566. {
  5567. while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
  5568. n1++;
  5569. while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
  5570. n2--;
  5571. if (n1 < n2)
  5572. {
  5573. micro_operation sw;
  5574. sw = mos[n1];
  5575. mos[n1] = mos[n2];
  5576. mos[n2] = sw;
  5577. }
  5578. }
  5579. if (CALL_P (insn))
  5580. {
  5581. micro_operation mo;
  5582. mo.type = MO_CALL;
  5583. mo.insn = insn;
  5584. mo.u.loc = call_arguments;
  5585. call_arguments = NULL_RTX;
  5586. if (dump_file && (dump_flags & TDF_DETAILS))
  5587. log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
  5588. VTI (bb)->mos.safe_push (mo);
  5589. }
  5590. n1 = VTI (bb)->mos.length ();
  5591. /* This will record NEXT_INSN (insn), such that we can
  5592. insert notes before it without worrying about any
  5593. notes that MO_USEs might emit after the insn. */
  5594. cui.store_p = true;
  5595. note_stores (PATTERN (insn), add_stores, &cui);
  5596. n2 = VTI (bb)->mos.length () - 1;
  5597. mos = VTI (bb)->mos.address ();
  5598. /* Order the MO_VAL_USEs first (note_stores does nothing
  5599. on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
  5600. insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
  5601. while (n1 < n2)
  5602. {
  5603. while (n1 < n2 && mos[n1].type == MO_VAL_USE)
  5604. n1++;
  5605. while (n1 < n2 && mos[n2].type != MO_VAL_USE)
  5606. n2--;
  5607. if (n1 < n2)
  5608. {
  5609. micro_operation sw;
  5610. sw = mos[n1];
  5611. mos[n1] = mos[n2];
  5612. mos[n2] = sw;
  5613. }
  5614. }
  5615. n2 = VTI (bb)->mos.length () - 1;
  5616. while (n1 < n2)
  5617. {
  5618. while (n1 < n2 && mos[n1].type == MO_CLOBBER)
  5619. n1++;
  5620. while (n1 < n2 && mos[n2].type != MO_CLOBBER)
  5621. n2--;
  5622. if (n1 < n2)
  5623. {
  5624. micro_operation sw;
  5625. sw = mos[n1];
  5626. mos[n1] = mos[n2];
  5627. mos[n2] = sw;
  5628. }
  5629. }
  5630. }
  5631. static enum var_init_status
  5632. find_src_status (dataflow_set *in, rtx src)
  5633. {
  5634. tree decl = NULL_TREE;
  5635. enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
  5636. if (! flag_var_tracking_uninit)
  5637. status = VAR_INIT_STATUS_INITIALIZED;
  5638. if (src && REG_P (src))
  5639. decl = var_debug_decl (REG_EXPR (src));
  5640. else if (src && MEM_P (src))
  5641. decl = var_debug_decl (MEM_EXPR (src));
  5642. if (src && decl)
  5643. status = get_init_value (in, src, dv_from_decl (decl));
  5644. return status;
  5645. }
  5646. /* SRC is the source of an assignment. Use SET to try to find what
  5647. was ultimately assigned to SRC. Return that value if known,
  5648. otherwise return SRC itself. */
  5649. static rtx
  5650. find_src_set_src (dataflow_set *set, rtx src)
  5651. {
  5652. tree decl = NULL_TREE; /* The variable being copied around. */
  5653. rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
  5654. variable var;
  5655. location_chain nextp;
  5656. int i;
  5657. bool found;
  5658. if (src && REG_P (src))
  5659. decl = var_debug_decl (REG_EXPR (src));
  5660. else if (src && MEM_P (src))
  5661. decl = var_debug_decl (MEM_EXPR (src));
  5662. if (src && decl)
  5663. {
  5664. decl_or_value dv = dv_from_decl (decl);
  5665. var = shared_hash_find (set->vars, dv);
  5666. if (var)
  5667. {
  5668. found = false;
  5669. for (i = 0; i < var->n_var_parts && !found; i++)
  5670. for (nextp = var->var_part[i].loc_chain; nextp && !found;
  5671. nextp = nextp->next)
  5672. if (rtx_equal_p (nextp->loc, src))
  5673. {
  5674. set_src = nextp->set_src;
  5675. found = true;
  5676. }
  5677. }
  5678. }
  5679. return set_src;
  5680. }
  5681. /* Compute the changes of variable locations in the basic block BB. */
  5682. static bool
  5683. compute_bb_dataflow (basic_block bb)
  5684. {
  5685. unsigned int i;
  5686. micro_operation *mo;
  5687. bool changed;
  5688. dataflow_set old_out;
  5689. dataflow_set *in = &VTI (bb)->in;
  5690. dataflow_set *out = &VTI (bb)->out;
  5691. dataflow_set_init (&old_out);
  5692. dataflow_set_copy (&old_out, out);
  5693. dataflow_set_copy (out, in);
  5694. if (MAY_HAVE_DEBUG_INSNS)
  5695. local_get_addr_cache = new hash_map<rtx, rtx>;
  5696. FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
  5697. {
  5698. rtx_insn *insn = mo->insn;
  5699. switch (mo->type)
  5700. {
  5701. case MO_CALL:
  5702. dataflow_set_clear_at_call (out);
  5703. break;
  5704. case MO_USE:
  5705. {
  5706. rtx loc = mo->u.loc;
  5707. if (REG_P (loc))
  5708. var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
  5709. else if (MEM_P (loc))
  5710. var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
  5711. }
  5712. break;
  5713. case MO_VAL_LOC:
  5714. {
  5715. rtx loc = mo->u.loc;
  5716. rtx val, vloc;
  5717. tree var;
  5718. if (GET_CODE (loc) == CONCAT)
  5719. {
  5720. val = XEXP (loc, 0);
  5721. vloc = XEXP (loc, 1);
  5722. }
  5723. else
  5724. {
  5725. val = NULL_RTX;
  5726. vloc = loc;
  5727. }
  5728. var = PAT_VAR_LOCATION_DECL (vloc);
  5729. clobber_variable_part (out, NULL_RTX,
  5730. dv_from_decl (var), 0, NULL_RTX);
  5731. if (val)
  5732. {
  5733. if (VAL_NEEDS_RESOLUTION (loc))
  5734. val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
  5735. set_variable_part (out, val, dv_from_decl (var), 0,
  5736. VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
  5737. INSERT);
  5738. }
  5739. else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
  5740. set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
  5741. dv_from_decl (var), 0,
  5742. VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
  5743. INSERT);
  5744. }
  5745. break;
  5746. case MO_VAL_USE:
  5747. {
  5748. rtx loc = mo->u.loc;
  5749. rtx val, vloc, uloc;
  5750. vloc = uloc = XEXP (loc, 1);
  5751. val = XEXP (loc, 0);
  5752. if (GET_CODE (val) == CONCAT)
  5753. {
  5754. uloc = XEXP (val, 1);
  5755. val = XEXP (val, 0);
  5756. }
  5757. if (VAL_NEEDS_RESOLUTION (loc))
  5758. val_resolve (out, val, vloc, insn);
  5759. else
  5760. val_store (out, val, uloc, insn, false);
  5761. if (VAL_HOLDS_TRACK_EXPR (loc))
  5762. {
  5763. if (GET_CODE (uloc) == REG)
  5764. var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
  5765. NULL);
  5766. else if (GET_CODE (uloc) == MEM)
  5767. var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
  5768. NULL);
  5769. }
  5770. }
  5771. break;
  5772. case MO_VAL_SET:
  5773. {
  5774. rtx loc = mo->u.loc;
  5775. rtx val, vloc, uloc;
  5776. rtx dstv, srcv;
  5777. vloc = loc;
  5778. uloc = XEXP (vloc, 1);
  5779. val = XEXP (vloc, 0);
  5780. vloc = uloc;
  5781. if (GET_CODE (uloc) == SET)
  5782. {
  5783. dstv = SET_DEST (uloc);
  5784. srcv = SET_SRC (uloc);
  5785. }
  5786. else
  5787. {
  5788. dstv = uloc;
  5789. srcv = NULL;
  5790. }
  5791. if (GET_CODE (val) == CONCAT)
  5792. {
  5793. dstv = vloc = XEXP (val, 1);
  5794. val = XEXP (val, 0);
  5795. }
  5796. if (GET_CODE (vloc) == SET)
  5797. {
  5798. srcv = SET_SRC (vloc);
  5799. gcc_assert (val != srcv);
  5800. gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
  5801. dstv = vloc = SET_DEST (vloc);
  5802. if (VAL_NEEDS_RESOLUTION (loc))
  5803. val_resolve (out, val, srcv, insn);
  5804. }
  5805. else if (VAL_NEEDS_RESOLUTION (loc))
  5806. {
  5807. gcc_assert (GET_CODE (uloc) == SET
  5808. && GET_CODE (SET_SRC (uloc)) == REG);
  5809. val_resolve (out, val, SET_SRC (uloc), insn);
  5810. }
  5811. if (VAL_HOLDS_TRACK_EXPR (loc))
  5812. {
  5813. if (VAL_EXPR_IS_CLOBBERED (loc))
  5814. {
  5815. if (REG_P (uloc))
  5816. var_reg_delete (out, uloc, true);
  5817. else if (MEM_P (uloc))
  5818. {
  5819. gcc_assert (MEM_P (dstv));
  5820. gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
  5821. var_mem_delete (out, dstv, true);
  5822. }
  5823. }
  5824. else
  5825. {
  5826. bool copied_p = VAL_EXPR_IS_COPIED (loc);
  5827. rtx src = NULL, dst = uloc;
  5828. enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
  5829. if (GET_CODE (uloc) == SET)
  5830. {
  5831. src = SET_SRC (uloc);
  5832. dst = SET_DEST (uloc);
  5833. }
  5834. if (copied_p)
  5835. {
  5836. if (flag_var_tracking_uninit)
  5837. {
  5838. status = find_src_status (in, src);
  5839. if (status == VAR_INIT_STATUS_UNKNOWN)
  5840. status = find_src_status (out, src);
  5841. }
  5842. src = find_src_set_src (in, src);
  5843. }
  5844. if (REG_P (dst))
  5845. var_reg_delete_and_set (out, dst, !copied_p,
  5846. status, srcv);
  5847. else if (MEM_P (dst))
  5848. {
  5849. gcc_assert (MEM_P (dstv));
  5850. gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
  5851. var_mem_delete_and_set (out, dstv, !copied_p,
  5852. status, srcv);
  5853. }
  5854. }
  5855. }
  5856. else if (REG_P (uloc))
  5857. var_regno_delete (out, REGNO (uloc));
  5858. else if (MEM_P (uloc))
  5859. {
  5860. gcc_checking_assert (GET_CODE (vloc) == MEM);
  5861. gcc_checking_assert (dstv == vloc);
  5862. if (dstv != vloc)
  5863. clobber_overlapping_mems (out, vloc);
  5864. }
  5865. val_store (out, val, dstv, insn, true);
  5866. }
  5867. break;
  5868. case MO_SET:
  5869. {
  5870. rtx loc = mo->u.loc;
  5871. rtx set_src = NULL;
  5872. if (GET_CODE (loc) == SET)
  5873. {
  5874. set_src = SET_SRC (loc);
  5875. loc = SET_DEST (loc);
  5876. }
  5877. if (REG_P (loc))
  5878. var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
  5879. set_src);
  5880. else if (MEM_P (loc))
  5881. var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
  5882. set_src);
  5883. }
  5884. break;
  5885. case MO_COPY:
  5886. {
  5887. rtx loc = mo->u.loc;
  5888. enum var_init_status src_status;
  5889. rtx set_src = NULL;
  5890. if (GET_CODE (loc) == SET)
  5891. {
  5892. set_src = SET_SRC (loc);
  5893. loc = SET_DEST (loc);
  5894. }
  5895. if (! flag_var_tracking_uninit)
  5896. src_status = VAR_INIT_STATUS_INITIALIZED;
  5897. else
  5898. {
  5899. src_status = find_src_status (in, set_src);
  5900. if (src_status == VAR_INIT_STATUS_UNKNOWN)
  5901. src_status = find_src_status (out, set_src);
  5902. }
  5903. set_src = find_src_set_src (in, set_src);
  5904. if (REG_P (loc))
  5905. var_reg_delete_and_set (out, loc, false, src_status, set_src);
  5906. else if (MEM_P (loc))
  5907. var_mem_delete_and_set (out, loc, false, src_status, set_src);
  5908. }
  5909. break;
  5910. case MO_USE_NO_VAR:
  5911. {
  5912. rtx loc = mo->u.loc;
  5913. if (REG_P (loc))
  5914. var_reg_delete (out, loc, false);
  5915. else if (MEM_P (loc))
  5916. var_mem_delete (out, loc, false);
  5917. }
  5918. break;
  5919. case MO_CLOBBER:
  5920. {
  5921. rtx loc = mo->u.loc;
  5922. if (REG_P (loc))
  5923. var_reg_delete (out, loc, true);
  5924. else if (MEM_P (loc))
  5925. var_mem_delete (out, loc, true);
  5926. }
  5927. break;
  5928. case MO_ADJUST:
  5929. out->stack_adjust += mo->u.adjust;
  5930. break;
  5931. }
  5932. }
  5933. if (MAY_HAVE_DEBUG_INSNS)
  5934. {
  5935. delete local_get_addr_cache;
  5936. local_get_addr_cache = NULL;
  5937. dataflow_set_equiv_regs (out);
  5938. shared_hash_htab (out->vars)
  5939. ->traverse <dataflow_set *, canonicalize_values_mark> (out);
  5940. shared_hash_htab (out->vars)
  5941. ->traverse <dataflow_set *, canonicalize_values_star> (out);
  5942. #if ENABLE_CHECKING
  5943. shared_hash_htab (out->vars)
  5944. ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
  5945. #endif
  5946. }
  5947. changed = dataflow_set_different (&old_out, out);
  5948. dataflow_set_destroy (&old_out);
  5949. return changed;
  5950. }
  5951. /* Find the locations of variables in the whole function. */
  5952. static bool
  5953. vt_find_locations (void)
  5954. {
  5955. bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
  5956. bb_heap_t *pending = new bb_heap_t (LONG_MIN);
  5957. bb_heap_t *fibheap_swap = NULL;
  5958. sbitmap visited, in_worklist, in_pending, sbitmap_swap;
  5959. basic_block bb;
  5960. edge e;
  5961. int *bb_order;
  5962. int *rc_order;
  5963. int i;
  5964. int htabsz = 0;
  5965. int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
  5966. bool success = true;
  5967. timevar_push (TV_VAR_TRACKING_DATAFLOW);
  5968. /* Compute reverse completion order of depth first search of the CFG
  5969. so that the data-flow runs faster. */
  5970. rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
  5971. bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
  5972. pre_and_rev_post_order_compute (NULL, rc_order, false);
  5973. for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
  5974. bb_order[rc_order[i]] = i;
  5975. free (rc_order);
  5976. visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
  5977. in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
  5978. in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
  5979. bitmap_clear (in_worklist);
  5980. FOR_EACH_BB_FN (bb, cfun)
  5981. pending->insert (bb_order[bb->index], bb);
  5982. bitmap_ones (in_pending);
  5983. while (success && !pending->empty ())
  5984. {
  5985. fibheap_swap = pending;
  5986. pending = worklist;
  5987. worklist = fibheap_swap;
  5988. sbitmap_swap = in_pending;
  5989. in_pending = in_worklist;
  5990. in_worklist = sbitmap_swap;
  5991. bitmap_clear (visited);
  5992. while (!worklist->empty ())
  5993. {
  5994. bb = worklist->extract_min ();
  5995. bitmap_clear_bit (in_worklist, bb->index);
  5996. gcc_assert (!bitmap_bit_p (visited, bb->index));
  5997. if (!bitmap_bit_p (visited, bb->index))
  5998. {
  5999. bool changed;
  6000. edge_iterator ei;
  6001. int oldinsz, oldoutsz;
  6002. bitmap_set_bit (visited, bb->index);
  6003. if (VTI (bb)->in.vars)
  6004. {
  6005. htabsz
  6006. -= shared_hash_htab (VTI (bb)->in.vars)->size ()
  6007. + shared_hash_htab (VTI (bb)->out.vars)->size ();
  6008. oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
  6009. oldoutsz
  6010. = shared_hash_htab (VTI (bb)->out.vars)->elements ();
  6011. }
  6012. else
  6013. oldinsz = oldoutsz = 0;
  6014. if (MAY_HAVE_DEBUG_INSNS)
  6015. {
  6016. dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
  6017. bool first = true, adjust = false;
  6018. /* Calculate the IN set as the intersection of
  6019. predecessor OUT sets. */
  6020. dataflow_set_clear (in);
  6021. dst_can_be_shared = true;
  6022. FOR_EACH_EDGE (e, ei, bb->preds)
  6023. if (!VTI (e->src)->flooded)
  6024. gcc_assert (bb_order[bb->index]
  6025. <= bb_order[e->src->index]);
  6026. else if (first)
  6027. {
  6028. dataflow_set_copy (in, &VTI (e->src)->out);
  6029. first_out = &VTI (e->src)->out;
  6030. first = false;
  6031. }
  6032. else
  6033. {
  6034. dataflow_set_merge (in, &VTI (e->src)->out);
  6035. adjust = true;
  6036. }
  6037. if (adjust)
  6038. {
  6039. dataflow_post_merge_adjust (in, &VTI (bb)->permp);
  6040. #if ENABLE_CHECKING
  6041. /* Merge and merge_adjust should keep entries in
  6042. canonical order. */
  6043. shared_hash_htab (in->vars)
  6044. ->traverse <dataflow_set *,
  6045. canonicalize_loc_order_check> (in);
  6046. #endif
  6047. if (dst_can_be_shared)
  6048. {
  6049. shared_hash_destroy (in->vars);
  6050. in->vars = shared_hash_copy (first_out->vars);
  6051. }
  6052. }
  6053. VTI (bb)->flooded = true;
  6054. }
  6055. else
  6056. {
  6057. /* Calculate the IN set as union of predecessor OUT sets. */
  6058. dataflow_set_clear (&VTI (bb)->in);
  6059. FOR_EACH_EDGE (e, ei, bb->preds)
  6060. dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
  6061. }
  6062. changed = compute_bb_dataflow (bb);
  6063. htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
  6064. + shared_hash_htab (VTI (bb)->out.vars)->size ();
  6065. if (htabmax && htabsz > htabmax)
  6066. {
  6067. if (MAY_HAVE_DEBUG_INSNS)
  6068. inform (DECL_SOURCE_LOCATION (cfun->decl),
  6069. "variable tracking size limit exceeded with "
  6070. "-fvar-tracking-assignments, retrying without");
  6071. else
  6072. inform (DECL_SOURCE_LOCATION (cfun->decl),
  6073. "variable tracking size limit exceeded");
  6074. success = false;
  6075. break;
  6076. }
  6077. if (changed)
  6078. {
  6079. FOR_EACH_EDGE (e, ei, bb->succs)
  6080. {
  6081. if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
  6082. continue;
  6083. if (bitmap_bit_p (visited, e->dest->index))
  6084. {
  6085. if (!bitmap_bit_p (in_pending, e->dest->index))
  6086. {
  6087. /* Send E->DEST to next round. */
  6088. bitmap_set_bit (in_pending, e->dest->index);
  6089. pending->insert (bb_order[e->dest->index],
  6090. e->dest);
  6091. }
  6092. }
  6093. else if (!bitmap_bit_p (in_worklist, e->dest->index))
  6094. {
  6095. /* Add E->DEST to current round. */
  6096. bitmap_set_bit (in_worklist, e->dest->index);
  6097. worklist->insert (bb_order[e->dest->index],
  6098. e->dest);
  6099. }
  6100. }
  6101. }
  6102. if (dump_file)
  6103. fprintf (dump_file,
  6104. "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
  6105. bb->index,
  6106. (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
  6107. oldinsz,
  6108. (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
  6109. oldoutsz,
  6110. (int)worklist->nodes (), (int)pending->nodes (),
  6111. htabsz);
  6112. if (dump_file && (dump_flags & TDF_DETAILS))
  6113. {
  6114. fprintf (dump_file, "BB %i IN:\n", bb->index);
  6115. dump_dataflow_set (&VTI (bb)->in);
  6116. fprintf (dump_file, "BB %i OUT:\n", bb->index);
  6117. dump_dataflow_set (&VTI (bb)->out);
  6118. }
  6119. }
  6120. }
  6121. }
  6122. if (success && MAY_HAVE_DEBUG_INSNS)
  6123. FOR_EACH_BB_FN (bb, cfun)
  6124. gcc_assert (VTI (bb)->flooded);
  6125. free (bb_order);
  6126. delete worklist;
  6127. delete pending;
  6128. sbitmap_free (visited);
  6129. sbitmap_free (in_worklist);
  6130. sbitmap_free (in_pending);
  6131. timevar_pop (TV_VAR_TRACKING_DATAFLOW);
  6132. return success;
  6133. }
  6134. /* Print the content of the LIST to dump file. */
  6135. static void
  6136. dump_attrs_list (attrs list)
  6137. {
  6138. for (; list; list = list->next)
  6139. {
  6140. if (dv_is_decl_p (list->dv))
  6141. print_mem_expr (dump_file, dv_as_decl (list->dv));
  6142. else
  6143. print_rtl_single (dump_file, dv_as_value (list->dv));
  6144. fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
  6145. }
  6146. fprintf (dump_file, "\n");
  6147. }
  6148. /* Print the information about variable *SLOT to dump file. */
  6149. int
  6150. dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
  6151. {
  6152. variable var = *slot;
  6153. dump_var (var);
  6154. /* Continue traversing the hash table. */
  6155. return 1;
  6156. }
  6157. /* Print the information about variable VAR to dump file. */
  6158. static void
  6159. dump_var (variable var)
  6160. {
  6161. int i;
  6162. location_chain node;
  6163. if (dv_is_decl_p (var->dv))
  6164. {
  6165. const_tree decl = dv_as_decl (var->dv);
  6166. if (DECL_NAME (decl))
  6167. {
  6168. fprintf (dump_file, " name: %s",
  6169. IDENTIFIER_POINTER (DECL_NAME (decl)));
  6170. if (dump_flags & TDF_UID)
  6171. fprintf (dump_file, "D.%u", DECL_UID (decl));
  6172. }
  6173. else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
  6174. fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
  6175. else
  6176. fprintf (dump_file, " name: D.%u", DECL_UID (decl));
  6177. fprintf (dump_file, "\n");
  6178. }
  6179. else
  6180. {
  6181. fputc (' ', dump_file);
  6182. print_rtl_single (dump_file, dv_as_value (var->dv));
  6183. }
  6184. for (i = 0; i < var->n_var_parts; i++)
  6185. {
  6186. fprintf (dump_file, " offset %ld\n",
  6187. (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
  6188. for (node = var->var_part[i].loc_chain; node; node = node->next)
  6189. {
  6190. fprintf (dump_file, " ");
  6191. if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
  6192. fprintf (dump_file, "[uninit]");
  6193. print_rtl_single (dump_file, node->loc);
  6194. }
  6195. }
  6196. }
  6197. /* Print the information about variables from hash table VARS to dump file. */
  6198. static void
  6199. dump_vars (variable_table_type *vars)
  6200. {
  6201. if (vars->elements () > 0)
  6202. {
  6203. fprintf (dump_file, "Variables:\n");
  6204. vars->traverse <void *, dump_var_tracking_slot> (NULL);
  6205. }
  6206. }
  6207. /* Print the dataflow set SET to dump file. */
  6208. static void
  6209. dump_dataflow_set (dataflow_set *set)
  6210. {
  6211. int i;
  6212. fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
  6213. set->stack_adjust);
  6214. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  6215. {
  6216. if (set->regs[i])
  6217. {
  6218. fprintf (dump_file, "Reg %d:", i);
  6219. dump_attrs_list (set->regs[i]);
  6220. }
  6221. }
  6222. dump_vars (shared_hash_htab (set->vars));
  6223. fprintf (dump_file, "\n");
  6224. }
  6225. /* Print the IN and OUT sets for each basic block to dump file. */
  6226. static void
  6227. dump_dataflow_sets (void)
  6228. {
  6229. basic_block bb;
  6230. FOR_EACH_BB_FN (bb, cfun)
  6231. {
  6232. fprintf (dump_file, "\nBasic block %d:\n", bb->index);
  6233. fprintf (dump_file, "IN:\n");
  6234. dump_dataflow_set (&VTI (bb)->in);
  6235. fprintf (dump_file, "OUT:\n");
  6236. dump_dataflow_set (&VTI (bb)->out);
  6237. }
  6238. }
  6239. /* Return the variable for DV in dropped_values, inserting one if
  6240. requested with INSERT. */
  6241. static inline variable
  6242. variable_from_dropped (decl_or_value dv, enum insert_option insert)
  6243. {
  6244. variable_def **slot;
  6245. variable empty_var;
  6246. onepart_enum_t onepart;
  6247. slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
  6248. if (!slot)
  6249. return NULL;
  6250. if (*slot)
  6251. return *slot;
  6252. gcc_checking_assert (insert == INSERT);
  6253. onepart = dv_onepart_p (dv);
  6254. gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
  6255. empty_var = (variable) pool_alloc (onepart_pool (onepart));
  6256. empty_var->dv = dv;
  6257. empty_var->refcount = 1;
  6258. empty_var->n_var_parts = 0;
  6259. empty_var->onepart = onepart;
  6260. empty_var->in_changed_variables = false;
  6261. empty_var->var_part[0].loc_chain = NULL;
  6262. empty_var->var_part[0].cur_loc = NULL;
  6263. VAR_LOC_1PAUX (empty_var) = NULL;
  6264. set_dv_changed (dv, true);
  6265. *slot = empty_var;
  6266. return empty_var;
  6267. }
  6268. /* Recover the one-part aux from dropped_values. */
  6269. static struct onepart_aux *
  6270. recover_dropped_1paux (variable var)
  6271. {
  6272. variable dvar;
  6273. gcc_checking_assert (var->onepart);
  6274. if (VAR_LOC_1PAUX (var))
  6275. return VAR_LOC_1PAUX (var);
  6276. if (var->onepart == ONEPART_VDECL)
  6277. return NULL;
  6278. dvar = variable_from_dropped (var->dv, NO_INSERT);
  6279. if (!dvar)
  6280. return NULL;
  6281. VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
  6282. VAR_LOC_1PAUX (dvar) = NULL;
  6283. return VAR_LOC_1PAUX (var);
  6284. }
  6285. /* Add variable VAR to the hash table of changed variables and
  6286. if it has no locations delete it from SET's hash table. */
  6287. static void
  6288. variable_was_changed (variable var, dataflow_set *set)
  6289. {
  6290. hashval_t hash = dv_htab_hash (var->dv);
  6291. if (emit_notes)
  6292. {
  6293. variable_def **slot;
  6294. /* Remember this decl or VALUE has been added to changed_variables. */
  6295. set_dv_changed (var->dv, true);
  6296. slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
  6297. if (*slot)
  6298. {
  6299. variable old_var = *slot;
  6300. gcc_assert (old_var->in_changed_variables);
  6301. old_var->in_changed_variables = false;
  6302. if (var != old_var && var->onepart)
  6303. {
  6304. /* Restore the auxiliary info from an empty variable
  6305. previously created for changed_variables, so it is
  6306. not lost. */
  6307. gcc_checking_assert (!VAR_LOC_1PAUX (var));
  6308. VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
  6309. VAR_LOC_1PAUX (old_var) = NULL;
  6310. }
  6311. variable_htab_free (*slot);
  6312. }
  6313. if (set && var->n_var_parts == 0)
  6314. {
  6315. onepart_enum_t onepart = var->onepart;
  6316. variable empty_var = NULL;
  6317. variable_def **dslot = NULL;
  6318. if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
  6319. {
  6320. dslot = dropped_values->find_slot_with_hash (var->dv,
  6321. dv_htab_hash (var->dv),
  6322. INSERT);
  6323. empty_var = *dslot;
  6324. if (empty_var)
  6325. {
  6326. gcc_checking_assert (!empty_var->in_changed_variables);
  6327. if (!VAR_LOC_1PAUX (var))
  6328. {
  6329. VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
  6330. VAR_LOC_1PAUX (empty_var) = NULL;
  6331. }
  6332. else
  6333. gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
  6334. }
  6335. }
  6336. if (!empty_var)
  6337. {
  6338. empty_var = (variable) pool_alloc (onepart_pool (onepart));
  6339. empty_var->dv = var->dv;
  6340. empty_var->refcount = 1;
  6341. empty_var->n_var_parts = 0;
  6342. empty_var->onepart = onepart;
  6343. if (dslot)
  6344. {
  6345. empty_var->refcount++;
  6346. *dslot = empty_var;
  6347. }
  6348. }
  6349. else
  6350. empty_var->refcount++;
  6351. empty_var->in_changed_variables = true;
  6352. *slot = empty_var;
  6353. if (onepart)
  6354. {
  6355. empty_var->var_part[0].loc_chain = NULL;
  6356. empty_var->var_part[0].cur_loc = NULL;
  6357. VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
  6358. VAR_LOC_1PAUX (var) = NULL;
  6359. }
  6360. goto drop_var;
  6361. }
  6362. else
  6363. {
  6364. if (var->onepart && !VAR_LOC_1PAUX (var))
  6365. recover_dropped_1paux (var);
  6366. var->refcount++;
  6367. var->in_changed_variables = true;
  6368. *slot = var;
  6369. }
  6370. }
  6371. else
  6372. {
  6373. gcc_assert (set);
  6374. if (var->n_var_parts == 0)
  6375. {
  6376. variable_def **slot;
  6377. drop_var:
  6378. slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
  6379. if (slot)
  6380. {
  6381. if (shared_hash_shared (set->vars))
  6382. slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
  6383. NO_INSERT);
  6384. shared_hash_htab (set->vars)->clear_slot (slot);
  6385. }
  6386. }
  6387. }
  6388. }
  6389. /* Look for the index in VAR->var_part corresponding to OFFSET.
  6390. Return -1 if not found. If INSERTION_POINT is non-NULL, the
  6391. referenced int will be set to the index that the part has or should
  6392. have, if it should be inserted. */
  6393. static inline int
  6394. find_variable_location_part (variable var, HOST_WIDE_INT offset,
  6395. int *insertion_point)
  6396. {
  6397. int pos, low, high;
  6398. if (var->onepart)
  6399. {
  6400. if (offset != 0)
  6401. return -1;
  6402. if (insertion_point)
  6403. *insertion_point = 0;
  6404. return var->n_var_parts - 1;
  6405. }
  6406. /* Find the location part. */
  6407. low = 0;
  6408. high = var->n_var_parts;
  6409. while (low != high)
  6410. {
  6411. pos = (low + high) / 2;
  6412. if (VAR_PART_OFFSET (var, pos) < offset)
  6413. low = pos + 1;
  6414. else
  6415. high = pos;
  6416. }
  6417. pos = low;
  6418. if (insertion_point)
  6419. *insertion_point = pos;
  6420. if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
  6421. return pos;
  6422. return -1;
  6423. }
  6424. static variable_def **
  6425. set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
  6426. decl_or_value dv, HOST_WIDE_INT offset,
  6427. enum var_init_status initialized, rtx set_src)
  6428. {
  6429. int pos;
  6430. location_chain node, next;
  6431. location_chain *nextp;
  6432. variable var;
  6433. onepart_enum_t onepart;
  6434. var = *slot;
  6435. if (var)
  6436. onepart = var->onepart;
  6437. else
  6438. onepart = dv_onepart_p (dv);
  6439. gcc_checking_assert (offset == 0 || !onepart);
  6440. gcc_checking_assert (loc != dv_as_opaque (dv));
  6441. if (! flag_var_tracking_uninit)
  6442. initialized = VAR_INIT_STATUS_INITIALIZED;
  6443. if (!var)
  6444. {
  6445. /* Create new variable information. */
  6446. var = (variable) pool_alloc (onepart_pool (onepart));
  6447. var->dv = dv;
  6448. var->refcount = 1;
  6449. var->n_var_parts = 1;
  6450. var->onepart = onepart;
  6451. var->in_changed_variables = false;
  6452. if (var->onepart)
  6453. VAR_LOC_1PAUX (var) = NULL;
  6454. else
  6455. VAR_PART_OFFSET (var, 0) = offset;
  6456. var->var_part[0].loc_chain = NULL;
  6457. var->var_part[0].cur_loc = NULL;
  6458. *slot = var;
  6459. pos = 0;
  6460. nextp = &var->var_part[0].loc_chain;
  6461. }
  6462. else if (onepart)
  6463. {
  6464. int r = -1, c = 0;
  6465. gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
  6466. pos = 0;
  6467. if (GET_CODE (loc) == VALUE)
  6468. {
  6469. for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
  6470. nextp = &node->next)
  6471. if (GET_CODE (node->loc) == VALUE)
  6472. {
  6473. if (node->loc == loc)
  6474. {
  6475. r = 0;
  6476. break;
  6477. }
  6478. if (canon_value_cmp (node->loc, loc))
  6479. c++;
  6480. else
  6481. {
  6482. r = 1;
  6483. break;
  6484. }
  6485. }
  6486. else if (REG_P (node->loc) || MEM_P (node->loc))
  6487. c++;
  6488. else
  6489. {
  6490. r = 1;
  6491. break;
  6492. }
  6493. }
  6494. else if (REG_P (loc))
  6495. {
  6496. for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
  6497. nextp = &node->next)
  6498. if (REG_P (node->loc))
  6499. {
  6500. if (REGNO (node->loc) < REGNO (loc))
  6501. c++;
  6502. else
  6503. {
  6504. if (REGNO (node->loc) == REGNO (loc))
  6505. r = 0;
  6506. else
  6507. r = 1;
  6508. break;
  6509. }
  6510. }
  6511. else
  6512. {
  6513. r = 1;
  6514. break;
  6515. }
  6516. }
  6517. else if (MEM_P (loc))
  6518. {
  6519. for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
  6520. nextp = &node->next)
  6521. if (REG_P (node->loc))
  6522. c++;
  6523. else if (MEM_P (node->loc))
  6524. {
  6525. if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
  6526. break;
  6527. else
  6528. c++;
  6529. }
  6530. else
  6531. {
  6532. r = 1;
  6533. break;
  6534. }
  6535. }
  6536. else
  6537. for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
  6538. nextp = &node->next)
  6539. if ((r = loc_cmp (node->loc, loc)) >= 0)
  6540. break;
  6541. else
  6542. c++;
  6543. if (r == 0)
  6544. return slot;
  6545. if (shared_var_p (var, set->vars))
  6546. {
  6547. slot = unshare_variable (set, slot, var, initialized);
  6548. var = *slot;
  6549. for (nextp = &var->var_part[0].loc_chain; c;
  6550. nextp = &(*nextp)->next)
  6551. c--;
  6552. gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
  6553. }
  6554. }
  6555. else
  6556. {
  6557. int inspos = 0;
  6558. gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
  6559. pos = find_variable_location_part (var, offset, &inspos);
  6560. if (pos >= 0)
  6561. {
  6562. node = var->var_part[pos].loc_chain;
  6563. if (node
  6564. && ((REG_P (node->loc) && REG_P (loc)
  6565. && REGNO (node->loc) == REGNO (loc))
  6566. || rtx_equal_p (node->loc, loc)))
  6567. {
  6568. /* LOC is in the beginning of the chain so we have nothing
  6569. to do. */
  6570. if (node->init < initialized)
  6571. node->init = initialized;
  6572. if (set_src != NULL)
  6573. node->set_src = set_src;
  6574. return slot;
  6575. }
  6576. else
  6577. {
  6578. /* We have to make a copy of a shared variable. */
  6579. if (shared_var_p (var, set->vars))
  6580. {
  6581. slot = unshare_variable (set, slot, var, initialized);
  6582. var = *slot;
  6583. }
  6584. }
  6585. }
  6586. else
  6587. {
  6588. /* We have not found the location part, new one will be created. */
  6589. /* We have to make a copy of the shared variable. */
  6590. if (shared_var_p (var, set->vars))
  6591. {
  6592. slot = unshare_variable (set, slot, var, initialized);
  6593. var = *slot;
  6594. }
  6595. /* We track only variables whose size is <= MAX_VAR_PARTS bytes
  6596. thus there are at most MAX_VAR_PARTS different offsets. */
  6597. gcc_assert (var->n_var_parts < MAX_VAR_PARTS
  6598. && (!var->n_var_parts || !onepart));
  6599. /* We have to move the elements of array starting at index
  6600. inspos to the next position. */
  6601. for (pos = var->n_var_parts; pos > inspos; pos--)
  6602. var->var_part[pos] = var->var_part[pos - 1];
  6603. var->n_var_parts++;
  6604. gcc_checking_assert (!onepart);
  6605. VAR_PART_OFFSET (var, pos) = offset;
  6606. var->var_part[pos].loc_chain = NULL;
  6607. var->var_part[pos].cur_loc = NULL;
  6608. }
  6609. /* Delete the location from the list. */
  6610. nextp = &var->var_part[pos].loc_chain;
  6611. for (node = var->var_part[pos].loc_chain; node; node = next)
  6612. {
  6613. next = node->next;
  6614. if ((REG_P (node->loc) && REG_P (loc)
  6615. && REGNO (node->loc) == REGNO (loc))
  6616. || rtx_equal_p (node->loc, loc))
  6617. {
  6618. /* Save these values, to assign to the new node, before
  6619. deleting this one. */
  6620. if (node->init > initialized)
  6621. initialized = node->init;
  6622. if (node->set_src != NULL && set_src == NULL)
  6623. set_src = node->set_src;
  6624. if (var->var_part[pos].cur_loc == node->loc)
  6625. var->var_part[pos].cur_loc = NULL;
  6626. pool_free (loc_chain_pool, node);
  6627. *nextp = next;
  6628. break;
  6629. }
  6630. else
  6631. nextp = &node->next;
  6632. }
  6633. nextp = &var->var_part[pos].loc_chain;
  6634. }
  6635. /* Add the location to the beginning. */
  6636. node = (location_chain) pool_alloc (loc_chain_pool);
  6637. node->loc = loc;
  6638. node->init = initialized;
  6639. node->set_src = set_src;
  6640. node->next = *nextp;
  6641. *nextp = node;
  6642. /* If no location was emitted do so. */
  6643. if (var->var_part[pos].cur_loc == NULL)
  6644. variable_was_changed (var, set);
  6645. return slot;
  6646. }
  6647. /* Set the part of variable's location in the dataflow set SET. The
  6648. variable part is specified by variable's declaration in DV and
  6649. offset OFFSET and the part's location by LOC. IOPT should be
  6650. NO_INSERT if the variable is known to be in SET already and the
  6651. variable hash table must not be resized, and INSERT otherwise. */
  6652. static void
  6653. set_variable_part (dataflow_set *set, rtx loc,
  6654. decl_or_value dv, HOST_WIDE_INT offset,
  6655. enum var_init_status initialized, rtx set_src,
  6656. enum insert_option iopt)
  6657. {
  6658. variable_def **slot;
  6659. if (iopt == NO_INSERT)
  6660. slot = shared_hash_find_slot_noinsert (set->vars, dv);
  6661. else
  6662. {
  6663. slot = shared_hash_find_slot (set->vars, dv);
  6664. if (!slot)
  6665. slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
  6666. }
  6667. set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
  6668. }
  6669. /* Remove all recorded register locations for the given variable part
  6670. from dataflow set SET, except for those that are identical to loc.
  6671. The variable part is specified by variable's declaration or value
  6672. DV and offset OFFSET. */
  6673. static variable_def **
  6674. clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
  6675. HOST_WIDE_INT offset, rtx set_src)
  6676. {
  6677. variable var = *slot;
  6678. int pos = find_variable_location_part (var, offset, NULL);
  6679. if (pos >= 0)
  6680. {
  6681. location_chain node, next;
  6682. /* Remove the register locations from the dataflow set. */
  6683. next = var->var_part[pos].loc_chain;
  6684. for (node = next; node; node = next)
  6685. {
  6686. next = node->next;
  6687. if (node->loc != loc
  6688. && (!flag_var_tracking_uninit
  6689. || !set_src
  6690. || MEM_P (set_src)
  6691. || !rtx_equal_p (set_src, node->set_src)))
  6692. {
  6693. if (REG_P (node->loc))
  6694. {
  6695. attrs anode, anext;
  6696. attrs *anextp;
  6697. /* Remove the variable part from the register's
  6698. list, but preserve any other variable parts
  6699. that might be regarded as live in that same
  6700. register. */
  6701. anextp = &set->regs[REGNO (node->loc)];
  6702. for (anode = *anextp; anode; anode = anext)
  6703. {
  6704. anext = anode->next;
  6705. if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
  6706. && anode->offset == offset)
  6707. {
  6708. pool_free (attrs_pool, anode);
  6709. *anextp = anext;
  6710. }
  6711. else
  6712. anextp = &anode->next;
  6713. }
  6714. }
  6715. slot = delete_slot_part (set, node->loc, slot, offset);
  6716. }
  6717. }
  6718. }
  6719. return slot;
  6720. }
  6721. /* Remove all recorded register locations for the given variable part
  6722. from dataflow set SET, except for those that are identical to loc.
  6723. The variable part is specified by variable's declaration or value
  6724. DV and offset OFFSET. */
  6725. static void
  6726. clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
  6727. HOST_WIDE_INT offset, rtx set_src)
  6728. {
  6729. variable_def **slot;
  6730. if (!dv_as_opaque (dv)
  6731. || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
  6732. return;
  6733. slot = shared_hash_find_slot_noinsert (set->vars, dv);
  6734. if (!slot)
  6735. return;
  6736. clobber_slot_part (set, loc, slot, offset, set_src);
  6737. }
  6738. /* Delete the part of variable's location from dataflow set SET. The
  6739. variable part is specified by its SET->vars slot SLOT and offset
  6740. OFFSET and the part's location by LOC. */
  6741. static variable_def **
  6742. delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
  6743. HOST_WIDE_INT offset)
  6744. {
  6745. variable var = *slot;
  6746. int pos = find_variable_location_part (var, offset, NULL);
  6747. if (pos >= 0)
  6748. {
  6749. location_chain node, next;
  6750. location_chain *nextp;
  6751. bool changed;
  6752. rtx cur_loc;
  6753. if (shared_var_p (var, set->vars))
  6754. {
  6755. /* If the variable contains the location part we have to
  6756. make a copy of the variable. */
  6757. for (node = var->var_part[pos].loc_chain; node;
  6758. node = node->next)
  6759. {
  6760. if ((REG_P (node->loc) && REG_P (loc)
  6761. && REGNO (node->loc) == REGNO (loc))
  6762. || rtx_equal_p (node->loc, loc))
  6763. {
  6764. slot = unshare_variable (set, slot, var,
  6765. VAR_INIT_STATUS_UNKNOWN);
  6766. var = *slot;
  6767. break;
  6768. }
  6769. }
  6770. }
  6771. if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
  6772. cur_loc = VAR_LOC_FROM (var);
  6773. else
  6774. cur_loc = var->var_part[pos].cur_loc;
  6775. /* Delete the location part. */
  6776. changed = false;
  6777. nextp = &var->var_part[pos].loc_chain;
  6778. for (node = *nextp; node; node = next)
  6779. {
  6780. next = node->next;
  6781. if ((REG_P (node->loc) && REG_P (loc)
  6782. && REGNO (node->loc) == REGNO (loc))
  6783. || rtx_equal_p (node->loc, loc))
  6784. {
  6785. /* If we have deleted the location which was last emitted
  6786. we have to emit new location so add the variable to set
  6787. of changed variables. */
  6788. if (cur_loc == node->loc)
  6789. {
  6790. changed = true;
  6791. var->var_part[pos].cur_loc = NULL;
  6792. if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
  6793. VAR_LOC_FROM (var) = NULL;
  6794. }
  6795. pool_free (loc_chain_pool, node);
  6796. *nextp = next;
  6797. break;
  6798. }
  6799. else
  6800. nextp = &node->next;
  6801. }
  6802. if (var->var_part[pos].loc_chain == NULL)
  6803. {
  6804. changed = true;
  6805. var->n_var_parts--;
  6806. while (pos < var->n_var_parts)
  6807. {
  6808. var->var_part[pos] = var->var_part[pos + 1];
  6809. pos++;
  6810. }
  6811. }
  6812. if (changed)
  6813. variable_was_changed (var, set);
  6814. }
  6815. return slot;
  6816. }
  6817. /* Delete the part of variable's location from dataflow set SET. The
  6818. variable part is specified by variable's declaration or value DV
  6819. and offset OFFSET and the part's location by LOC. */
  6820. static void
  6821. delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
  6822. HOST_WIDE_INT offset)
  6823. {
  6824. variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
  6825. if (!slot)
  6826. return;
  6827. delete_slot_part (set, loc, slot, offset);
  6828. }
  6829. /* Structure for passing some other parameters to function
  6830. vt_expand_loc_callback. */
  6831. struct expand_loc_callback_data
  6832. {
  6833. /* The variables and values active at this point. */
  6834. variable_table_type *vars;
  6835. /* Stack of values and debug_exprs under expansion, and their
  6836. children. */
  6837. auto_vec<rtx, 4> expanding;
  6838. /* Stack of values and debug_exprs whose expansion hit recursion
  6839. cycles. They will have VALUE_RECURSED_INTO marked when added to
  6840. this list. This flag will be cleared if any of its dependencies
  6841. resolves to a valid location. So, if the flag remains set at the
  6842. end of the search, we know no valid location for this one can
  6843. possibly exist. */
  6844. auto_vec<rtx, 4> pending;
  6845. /* The maximum depth among the sub-expressions under expansion.
  6846. Zero indicates no expansion so far. */
  6847. expand_depth depth;
  6848. };
  6849. /* Allocate the one-part auxiliary data structure for VAR, with enough
  6850. room for COUNT dependencies. */
  6851. static void
  6852. loc_exp_dep_alloc (variable var, int count)
  6853. {
  6854. size_t allocsize;
  6855. gcc_checking_assert (var->onepart);
  6856. /* We can be called with COUNT == 0 to allocate the data structure
  6857. without any dependencies, e.g. for the backlinks only. However,
  6858. if we are specifying a COUNT, then the dependency list must have
  6859. been emptied before. It would be possible to adjust pointers or
  6860. force it empty here, but this is better done at an earlier point
  6861. in the algorithm, so we instead leave an assertion to catch
  6862. errors. */
  6863. gcc_checking_assert (!count
  6864. || VAR_LOC_DEP_VEC (var) == NULL
  6865. || VAR_LOC_DEP_VEC (var)->is_empty ());
  6866. if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
  6867. return;
  6868. allocsize = offsetof (struct onepart_aux, deps)
  6869. + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
  6870. if (VAR_LOC_1PAUX (var))
  6871. {
  6872. VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
  6873. VAR_LOC_1PAUX (var), allocsize);
  6874. /* If the reallocation moves the onepaux structure, the
  6875. back-pointer to BACKLINKS in the first list member will still
  6876. point to its old location. Adjust it. */
  6877. if (VAR_LOC_DEP_LST (var))
  6878. VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
  6879. }
  6880. else
  6881. {
  6882. VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
  6883. *VAR_LOC_DEP_LSTP (var) = NULL;
  6884. VAR_LOC_FROM (var) = NULL;
  6885. VAR_LOC_DEPTH (var).complexity = 0;
  6886. VAR_LOC_DEPTH (var).entryvals = 0;
  6887. }
  6888. VAR_LOC_DEP_VEC (var)->embedded_init (count);
  6889. }
  6890. /* Remove all entries from the vector of active dependencies of VAR,
  6891. removing them from the back-links lists too. */
  6892. static void
  6893. loc_exp_dep_clear (variable var)
  6894. {
  6895. while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
  6896. {
  6897. loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
  6898. if (led->next)
  6899. led->next->pprev = led->pprev;
  6900. if (led->pprev)
  6901. *led->pprev = led->next;
  6902. VAR_LOC_DEP_VEC (var)->pop ();
  6903. }
  6904. }
  6905. /* Insert an active dependency from VAR on X to the vector of
  6906. dependencies, and add the corresponding back-link to X's list of
  6907. back-links in VARS. */
  6908. static void
  6909. loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
  6910. {
  6911. decl_or_value dv;
  6912. variable xvar;
  6913. loc_exp_dep *led;
  6914. dv = dv_from_rtx (x);
  6915. /* ??? Build a vector of variables parallel to EXPANDING, to avoid
  6916. an additional look up? */
  6917. xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
  6918. if (!xvar)
  6919. {
  6920. xvar = variable_from_dropped (dv, NO_INSERT);
  6921. gcc_checking_assert (xvar);
  6922. }
  6923. /* No point in adding the same backlink more than once. This may
  6924. arise if say the same value appears in two complex expressions in
  6925. the same loc_list, or even more than once in a single
  6926. expression. */
  6927. if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
  6928. return;
  6929. if (var->onepart == NOT_ONEPART)
  6930. led = (loc_exp_dep *) pool_alloc (loc_exp_dep_pool);
  6931. else
  6932. {
  6933. loc_exp_dep empty;
  6934. memset (&empty, 0, sizeof (empty));
  6935. VAR_LOC_DEP_VEC (var)->quick_push (empty);
  6936. led = &VAR_LOC_DEP_VEC (var)->last ();
  6937. }
  6938. led->dv = var->dv;
  6939. led->value = x;
  6940. loc_exp_dep_alloc (xvar, 0);
  6941. led->pprev = VAR_LOC_DEP_LSTP (xvar);
  6942. led->next = *led->pprev;
  6943. if (led->next)
  6944. led->next->pprev = &led->next;
  6945. *led->pprev = led;
  6946. }
  6947. /* Create active dependencies of VAR on COUNT values starting at
  6948. VALUE, and corresponding back-links to the entries in VARS. Return
  6949. true if we found any pending-recursion results. */
  6950. static bool
  6951. loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
  6952. variable_table_type *vars)
  6953. {
  6954. bool pending_recursion = false;
  6955. gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
  6956. || VAR_LOC_DEP_VEC (var)->is_empty ());
  6957. /* Set up all dependencies from last_child (as set up at the end of
  6958. the loop above) to the end. */
  6959. loc_exp_dep_alloc (var, count);
  6960. while (count--)
  6961. {
  6962. rtx x = *value++;
  6963. if (!pending_recursion)
  6964. pending_recursion = !result && VALUE_RECURSED_INTO (x);
  6965. loc_exp_insert_dep (var, x, vars);
  6966. }
  6967. return pending_recursion;
  6968. }
  6969. /* Notify the back-links of IVAR that are pending recursion that we
  6970. have found a non-NIL value for it, so they are cleared for another
  6971. attempt to compute a current location. */
  6972. static void
  6973. notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
  6974. {
  6975. loc_exp_dep *led, *next;
  6976. for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
  6977. {
  6978. decl_or_value dv = led->dv;
  6979. variable var;
  6980. next = led->next;
  6981. if (dv_is_value_p (dv))
  6982. {
  6983. rtx value = dv_as_value (dv);
  6984. /* If we have already resolved it, leave it alone. */
  6985. if (!VALUE_RECURSED_INTO (value))
  6986. continue;
  6987. /* Check that VALUE_RECURSED_INTO, true from the test above,
  6988. implies NO_LOC_P. */
  6989. gcc_checking_assert (NO_LOC_P (value));
  6990. /* We won't notify variables that are being expanded,
  6991. because their dependency list is cleared before
  6992. recursing. */
  6993. NO_LOC_P (value) = false;
  6994. VALUE_RECURSED_INTO (value) = false;
  6995. gcc_checking_assert (dv_changed_p (dv));
  6996. }
  6997. else
  6998. {
  6999. gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
  7000. if (!dv_changed_p (dv))
  7001. continue;
  7002. }
  7003. var = vars->find_with_hash (dv, dv_htab_hash (dv));
  7004. if (!var)
  7005. var = variable_from_dropped (dv, NO_INSERT);
  7006. if (var)
  7007. notify_dependents_of_resolved_value (var, vars);
  7008. if (next)
  7009. next->pprev = led->pprev;
  7010. if (led->pprev)
  7011. *led->pprev = next;
  7012. led->next = NULL;
  7013. led->pprev = NULL;
  7014. }
  7015. }
  7016. static rtx vt_expand_loc_callback (rtx x, bitmap regs,
  7017. int max_depth, void *data);
  7018. /* Return the combined depth, when one sub-expression evaluated to
  7019. BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
  7020. static inline expand_depth
  7021. update_depth (expand_depth saved_depth, expand_depth best_depth)
  7022. {
  7023. /* If we didn't find anything, stick with what we had. */
  7024. if (!best_depth.complexity)
  7025. return saved_depth;
  7026. /* If we found hadn't found anything, use the depth of the current
  7027. expression. Do NOT add one extra level, we want to compute the
  7028. maximum depth among sub-expressions. We'll increment it later,
  7029. if appropriate. */
  7030. if (!saved_depth.complexity)
  7031. return best_depth;
  7032. /* Combine the entryval count so that regardless of which one we
  7033. return, the entryval count is accurate. */
  7034. best_depth.entryvals = saved_depth.entryvals
  7035. = best_depth.entryvals + saved_depth.entryvals;
  7036. if (saved_depth.complexity < best_depth.complexity)
  7037. return best_depth;
  7038. else
  7039. return saved_depth;
  7040. }
  7041. /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
  7042. DATA for cselib expand callback. If PENDRECP is given, indicate in
  7043. it whether any sub-expression couldn't be fully evaluated because
  7044. it is pending recursion resolution. */
  7045. static inline rtx
  7046. vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
  7047. {
  7048. struct expand_loc_callback_data *elcd
  7049. = (struct expand_loc_callback_data *) data;
  7050. location_chain loc, next;
  7051. rtx result = NULL;
  7052. int first_child, result_first_child, last_child;
  7053. bool pending_recursion;
  7054. rtx loc_from = NULL;
  7055. struct elt_loc_list *cloc = NULL;
  7056. expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
  7057. int wanted_entryvals, found_entryvals = 0;
  7058. /* Clear all backlinks pointing at this, so that we're not notified
  7059. while we're active. */
  7060. loc_exp_dep_clear (var);
  7061. retry:
  7062. if (var->onepart == ONEPART_VALUE)
  7063. {
  7064. cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
  7065. gcc_checking_assert (cselib_preserved_value_p (val));
  7066. cloc = val->locs;
  7067. }
  7068. first_child = result_first_child = last_child
  7069. = elcd->expanding.length ();
  7070. wanted_entryvals = found_entryvals;
  7071. /* Attempt to expand each available location in turn. */
  7072. for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
  7073. loc || cloc; loc = next)
  7074. {
  7075. result_first_child = last_child;
  7076. if (!loc)
  7077. {
  7078. loc_from = cloc->loc;
  7079. next = loc;
  7080. cloc = cloc->next;
  7081. if (unsuitable_loc (loc_from))
  7082. continue;
  7083. }
  7084. else
  7085. {
  7086. loc_from = loc->loc;
  7087. next = loc->next;
  7088. }
  7089. gcc_checking_assert (!unsuitable_loc (loc_from));
  7090. elcd->depth.complexity = elcd->depth.entryvals = 0;
  7091. result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
  7092. vt_expand_loc_callback, data);
  7093. last_child = elcd->expanding.length ();
  7094. if (result)
  7095. {
  7096. depth = elcd->depth;
  7097. gcc_checking_assert (depth.complexity
  7098. || result_first_child == last_child);
  7099. if (last_child - result_first_child != 1)
  7100. {
  7101. if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
  7102. depth.entryvals++;
  7103. depth.complexity++;
  7104. }
  7105. if (depth.complexity <= EXPR_USE_DEPTH)
  7106. {
  7107. if (depth.entryvals <= wanted_entryvals)
  7108. break;
  7109. else if (!found_entryvals || depth.entryvals < found_entryvals)
  7110. found_entryvals = depth.entryvals;
  7111. }
  7112. result = NULL;
  7113. }
  7114. /* Set it up in case we leave the loop. */
  7115. depth.complexity = depth.entryvals = 0;
  7116. loc_from = NULL;
  7117. result_first_child = first_child;
  7118. }
  7119. if (!loc_from && wanted_entryvals < found_entryvals)
  7120. {
  7121. /* We found entries with ENTRY_VALUEs and skipped them. Since
  7122. we could not find any expansions without ENTRY_VALUEs, but we
  7123. found at least one with them, go back and get an entry with
  7124. the minimum number ENTRY_VALUE count that we found. We could
  7125. avoid looping, but since each sub-loc is already resolved,
  7126. the re-expansion should be trivial. ??? Should we record all
  7127. attempted locs as dependencies, so that we retry the
  7128. expansion should any of them change, in the hope it can give
  7129. us a new entry without an ENTRY_VALUE? */
  7130. elcd->expanding.truncate (first_child);
  7131. goto retry;
  7132. }
  7133. /* Register all encountered dependencies as active. */
  7134. pending_recursion = loc_exp_dep_set
  7135. (var, result, elcd->expanding.address () + result_first_child,
  7136. last_child - result_first_child, elcd->vars);
  7137. elcd->expanding.truncate (first_child);
  7138. /* Record where the expansion came from. */
  7139. gcc_checking_assert (!result || !pending_recursion);
  7140. VAR_LOC_FROM (var) = loc_from;
  7141. VAR_LOC_DEPTH (var) = depth;
  7142. gcc_checking_assert (!depth.complexity == !result);
  7143. elcd->depth = update_depth (saved_depth, depth);
  7144. /* Indicate whether any of the dependencies are pending recursion
  7145. resolution. */
  7146. if (pendrecp)
  7147. *pendrecp = pending_recursion;
  7148. if (!pendrecp || !pending_recursion)
  7149. var->var_part[0].cur_loc = result;
  7150. return result;
  7151. }
  7152. /* Callback for cselib_expand_value, that looks for expressions
  7153. holding the value in the var-tracking hash tables. Return X for
  7154. standard processing, anything else is to be used as-is. */
  7155. static rtx
  7156. vt_expand_loc_callback (rtx x, bitmap regs,
  7157. int max_depth ATTRIBUTE_UNUSED,
  7158. void *data)
  7159. {
  7160. struct expand_loc_callback_data *elcd
  7161. = (struct expand_loc_callback_data *) data;
  7162. decl_or_value dv;
  7163. variable var;
  7164. rtx result, subreg;
  7165. bool pending_recursion = false;
  7166. bool from_empty = false;
  7167. switch (GET_CODE (x))
  7168. {
  7169. case SUBREG:
  7170. subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
  7171. EXPR_DEPTH,
  7172. vt_expand_loc_callback, data);
  7173. if (!subreg)
  7174. return NULL;
  7175. result = simplify_gen_subreg (GET_MODE (x), subreg,
  7176. GET_MODE (SUBREG_REG (x)),
  7177. SUBREG_BYTE (x));
  7178. /* Invalid SUBREGs are ok in debug info. ??? We could try
  7179. alternate expansions for the VALUE as well. */
  7180. if (!result)
  7181. result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
  7182. return result;
  7183. case DEBUG_EXPR:
  7184. case VALUE:
  7185. dv = dv_from_rtx (x);
  7186. break;
  7187. default:
  7188. return x;
  7189. }
  7190. elcd->expanding.safe_push (x);
  7191. /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
  7192. gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
  7193. if (NO_LOC_P (x))
  7194. {
  7195. gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
  7196. return NULL;
  7197. }
  7198. var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
  7199. if (!var)
  7200. {
  7201. from_empty = true;
  7202. var = variable_from_dropped (dv, INSERT);
  7203. }
  7204. gcc_checking_assert (var);
  7205. if (!dv_changed_p (dv))
  7206. {
  7207. gcc_checking_assert (!NO_LOC_P (x));
  7208. gcc_checking_assert (var->var_part[0].cur_loc);
  7209. gcc_checking_assert (VAR_LOC_1PAUX (var));
  7210. gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
  7211. elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
  7212. return var->var_part[0].cur_loc;
  7213. }
  7214. VALUE_RECURSED_INTO (x) = true;
  7215. /* This is tentative, but it makes some tests simpler. */
  7216. NO_LOC_P (x) = true;
  7217. gcc_checking_assert (var->n_var_parts == 1 || from_empty);
  7218. result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
  7219. if (pending_recursion)
  7220. {
  7221. gcc_checking_assert (!result);
  7222. elcd->pending.safe_push (x);
  7223. }
  7224. else
  7225. {
  7226. NO_LOC_P (x) = !result;
  7227. VALUE_RECURSED_INTO (x) = false;
  7228. set_dv_changed (dv, false);
  7229. if (result)
  7230. notify_dependents_of_resolved_value (var, elcd->vars);
  7231. }
  7232. return result;
  7233. }
  7234. /* While expanding variables, we may encounter recursion cycles
  7235. because of mutual (possibly indirect) dependencies between two
  7236. particular variables (or values), say A and B. If we're trying to
  7237. expand A when we get to B, which in turn attempts to expand A, if
  7238. we can't find any other expansion for B, we'll add B to this
  7239. pending-recursion stack, and tentatively return NULL for its
  7240. location. This tentative value will be used for any other
  7241. occurrences of B, unless A gets some other location, in which case
  7242. it will notify B that it is worth another try at computing a
  7243. location for it, and it will use the location computed for A then.
  7244. At the end of the expansion, the tentative NULL locations become
  7245. final for all members of PENDING that didn't get a notification.
  7246. This function performs this finalization of NULL locations. */
  7247. static void
  7248. resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
  7249. {
  7250. while (!pending->is_empty ())
  7251. {
  7252. rtx x = pending->pop ();
  7253. decl_or_value dv;
  7254. if (!VALUE_RECURSED_INTO (x))
  7255. continue;
  7256. gcc_checking_assert (NO_LOC_P (x));
  7257. VALUE_RECURSED_INTO (x) = false;
  7258. dv = dv_from_rtx (x);
  7259. gcc_checking_assert (dv_changed_p (dv));
  7260. set_dv_changed (dv, false);
  7261. }
  7262. }
  7263. /* Initialize expand_loc_callback_data D with variable hash table V.
  7264. It must be a macro because of alloca (vec stack). */
  7265. #define INIT_ELCD(d, v) \
  7266. do \
  7267. { \
  7268. (d).vars = (v); \
  7269. (d).depth.complexity = (d).depth.entryvals = 0; \
  7270. } \
  7271. while (0)
  7272. /* Finalize expand_loc_callback_data D, resolved to location L. */
  7273. #define FINI_ELCD(d, l) \
  7274. do \
  7275. { \
  7276. resolve_expansions_pending_recursion (&(d).pending); \
  7277. (d).pending.release (); \
  7278. (d).expanding.release (); \
  7279. \
  7280. if ((l) && MEM_P (l)) \
  7281. (l) = targetm.delegitimize_address (l); \
  7282. } \
  7283. while (0)
  7284. /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
  7285. equivalences in VARS, updating their CUR_LOCs in the process. */
  7286. static rtx
  7287. vt_expand_loc (rtx loc, variable_table_type *vars)
  7288. {
  7289. struct expand_loc_callback_data data;
  7290. rtx result;
  7291. if (!MAY_HAVE_DEBUG_INSNS)
  7292. return loc;
  7293. INIT_ELCD (data, vars);
  7294. result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
  7295. vt_expand_loc_callback, &data);
  7296. FINI_ELCD (data, result);
  7297. return result;
  7298. }
  7299. /* Expand the one-part VARiable to a location, using the equivalences
  7300. in VARS, updating their CUR_LOCs in the process. */
  7301. static rtx
  7302. vt_expand_1pvar (variable var, variable_table_type *vars)
  7303. {
  7304. struct expand_loc_callback_data data;
  7305. rtx loc;
  7306. gcc_checking_assert (var->onepart && var->n_var_parts == 1);
  7307. if (!dv_changed_p (var->dv))
  7308. return var->var_part[0].cur_loc;
  7309. INIT_ELCD (data, vars);
  7310. loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
  7311. gcc_checking_assert (data.expanding.is_empty ());
  7312. FINI_ELCD (data, loc);
  7313. return loc;
  7314. }
  7315. /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
  7316. additional parameters: WHERE specifies whether the note shall be emitted
  7317. before or after instruction INSN. */
  7318. int
  7319. emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
  7320. {
  7321. variable var = *varp;
  7322. rtx_insn *insn = data->insn;
  7323. enum emit_note_where where = data->where;
  7324. variable_table_type *vars = data->vars;
  7325. rtx_note *note;
  7326. rtx note_vl;
  7327. int i, j, n_var_parts;
  7328. bool complete;
  7329. enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
  7330. HOST_WIDE_INT last_limit;
  7331. tree type_size_unit;
  7332. HOST_WIDE_INT offsets[MAX_VAR_PARTS];
  7333. rtx loc[MAX_VAR_PARTS];
  7334. tree decl;
  7335. location_chain lc;
  7336. gcc_checking_assert (var->onepart == NOT_ONEPART
  7337. || var->onepart == ONEPART_VDECL);
  7338. decl = dv_as_decl (var->dv);
  7339. complete = true;
  7340. last_limit = 0;
  7341. n_var_parts = 0;
  7342. if (!var->onepart)
  7343. for (i = 0; i < var->n_var_parts; i++)
  7344. if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
  7345. var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
  7346. for (i = 0; i < var->n_var_parts; i++)
  7347. {
  7348. machine_mode mode, wider_mode;
  7349. rtx loc2;
  7350. HOST_WIDE_INT offset;
  7351. if (i == 0 && var->onepart)
  7352. {
  7353. gcc_checking_assert (var->n_var_parts == 1);
  7354. offset = 0;
  7355. initialized = VAR_INIT_STATUS_INITIALIZED;
  7356. loc2 = vt_expand_1pvar (var, vars);
  7357. }
  7358. else
  7359. {
  7360. if (last_limit < VAR_PART_OFFSET (var, i))
  7361. {
  7362. complete = false;
  7363. break;
  7364. }
  7365. else if (last_limit > VAR_PART_OFFSET (var, i))
  7366. continue;
  7367. offset = VAR_PART_OFFSET (var, i);
  7368. loc2 = var->var_part[i].cur_loc;
  7369. if (loc2 && GET_CODE (loc2) == MEM
  7370. && GET_CODE (XEXP (loc2, 0)) == VALUE)
  7371. {
  7372. rtx depval = XEXP (loc2, 0);
  7373. loc2 = vt_expand_loc (loc2, vars);
  7374. if (loc2)
  7375. loc_exp_insert_dep (var, depval, vars);
  7376. }
  7377. if (!loc2)
  7378. {
  7379. complete = false;
  7380. continue;
  7381. }
  7382. gcc_checking_assert (GET_CODE (loc2) != VALUE);
  7383. for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
  7384. if (var->var_part[i].cur_loc == lc->loc)
  7385. {
  7386. initialized = lc->init;
  7387. break;
  7388. }
  7389. gcc_assert (lc);
  7390. }
  7391. offsets[n_var_parts] = offset;
  7392. if (!loc2)
  7393. {
  7394. complete = false;
  7395. continue;
  7396. }
  7397. loc[n_var_parts] = loc2;
  7398. mode = GET_MODE (var->var_part[i].cur_loc);
  7399. if (mode == VOIDmode && var->onepart)
  7400. mode = DECL_MODE (decl);
  7401. last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
  7402. /* Attempt to merge adjacent registers or memory. */
  7403. wider_mode = GET_MODE_WIDER_MODE (mode);
  7404. for (j = i + 1; j < var->n_var_parts; j++)
  7405. if (last_limit <= VAR_PART_OFFSET (var, j))
  7406. break;
  7407. if (j < var->n_var_parts
  7408. && wider_mode != VOIDmode
  7409. && var->var_part[j].cur_loc
  7410. && mode == GET_MODE (var->var_part[j].cur_loc)
  7411. && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
  7412. && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
  7413. && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
  7414. && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
  7415. {
  7416. rtx new_loc = NULL;
  7417. if (REG_P (loc[n_var_parts])
  7418. && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
  7419. == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
  7420. && end_hard_regno (mode, REGNO (loc[n_var_parts]))
  7421. == REGNO (loc2))
  7422. {
  7423. if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
  7424. new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
  7425. mode, 0);
  7426. else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
  7427. new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
  7428. if (new_loc)
  7429. {
  7430. if (!REG_P (new_loc)
  7431. || REGNO (new_loc) != REGNO (loc[n_var_parts]))
  7432. new_loc = NULL;
  7433. else
  7434. REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
  7435. }
  7436. }
  7437. else if (MEM_P (loc[n_var_parts])
  7438. && GET_CODE (XEXP (loc2, 0)) == PLUS
  7439. && REG_P (XEXP (XEXP (loc2, 0), 0))
  7440. && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
  7441. {
  7442. if ((REG_P (XEXP (loc[n_var_parts], 0))
  7443. && rtx_equal_p (XEXP (loc[n_var_parts], 0),
  7444. XEXP (XEXP (loc2, 0), 0))
  7445. && INTVAL (XEXP (XEXP (loc2, 0), 1))
  7446. == GET_MODE_SIZE (mode))
  7447. || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
  7448. && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
  7449. && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
  7450. XEXP (XEXP (loc2, 0), 0))
  7451. && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
  7452. + GET_MODE_SIZE (mode)
  7453. == INTVAL (XEXP (XEXP (loc2, 0), 1))))
  7454. new_loc = adjust_address_nv (loc[n_var_parts],
  7455. wider_mode, 0);
  7456. }
  7457. if (new_loc)
  7458. {
  7459. loc[n_var_parts] = new_loc;
  7460. mode = wider_mode;
  7461. last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
  7462. i = j;
  7463. }
  7464. }
  7465. ++n_var_parts;
  7466. }
  7467. type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
  7468. if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
  7469. complete = false;
  7470. if (! flag_var_tracking_uninit)
  7471. initialized = VAR_INIT_STATUS_INITIALIZED;
  7472. note_vl = NULL_RTX;
  7473. if (!complete)
  7474. note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
  7475. else if (n_var_parts == 1)
  7476. {
  7477. rtx expr_list;
  7478. if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
  7479. expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
  7480. else
  7481. expr_list = loc[0];
  7482. note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
  7483. }
  7484. else if (n_var_parts)
  7485. {
  7486. rtx parallel;
  7487. for (i = 0; i < n_var_parts; i++)
  7488. loc[i]
  7489. = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
  7490. parallel = gen_rtx_PARALLEL (VOIDmode,
  7491. gen_rtvec_v (n_var_parts, loc));
  7492. note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
  7493. parallel, initialized);
  7494. }
  7495. if (where != EMIT_NOTE_BEFORE_INSN)
  7496. {
  7497. note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
  7498. if (where == EMIT_NOTE_AFTER_CALL_INSN)
  7499. NOTE_DURING_CALL_P (note) = true;
  7500. }
  7501. else
  7502. {
  7503. /* Make sure that the call related notes come first. */
  7504. while (NEXT_INSN (insn)
  7505. && NOTE_P (insn)
  7506. && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
  7507. && NOTE_DURING_CALL_P (insn))
  7508. || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
  7509. insn = NEXT_INSN (insn);
  7510. if (NOTE_P (insn)
  7511. && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
  7512. && NOTE_DURING_CALL_P (insn))
  7513. || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
  7514. note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
  7515. else
  7516. note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
  7517. }
  7518. NOTE_VAR_LOCATION (note) = note_vl;
  7519. set_dv_changed (var->dv, false);
  7520. gcc_assert (var->in_changed_variables);
  7521. var->in_changed_variables = false;
  7522. changed_variables->clear_slot (varp);
  7523. /* Continue traversing the hash table. */
  7524. return 1;
  7525. }
  7526. /* While traversing changed_variables, push onto DATA (a stack of RTX
  7527. values) entries that aren't user variables. */
  7528. int
  7529. var_track_values_to_stack (variable_def **slot,
  7530. vec<rtx, va_heap> *changed_values_stack)
  7531. {
  7532. variable var = *slot;
  7533. if (var->onepart == ONEPART_VALUE)
  7534. changed_values_stack->safe_push (dv_as_value (var->dv));
  7535. else if (var->onepart == ONEPART_DEXPR)
  7536. changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
  7537. return 1;
  7538. }
  7539. /* Remove from changed_variables the entry whose DV corresponds to
  7540. value or debug_expr VAL. */
  7541. static void
  7542. remove_value_from_changed_variables (rtx val)
  7543. {
  7544. decl_or_value dv = dv_from_rtx (val);
  7545. variable_def **slot;
  7546. variable var;
  7547. slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
  7548. NO_INSERT);
  7549. var = *slot;
  7550. var->in_changed_variables = false;
  7551. changed_variables->clear_slot (slot);
  7552. }
  7553. /* If VAL (a value or debug_expr) has backlinks to variables actively
  7554. dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
  7555. changed, adding to CHANGED_VALUES_STACK any dependencies that may
  7556. have dependencies of their own to notify. */
  7557. static void
  7558. notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
  7559. vec<rtx, va_heap> *changed_values_stack)
  7560. {
  7561. variable_def **slot;
  7562. variable var;
  7563. loc_exp_dep *led;
  7564. decl_or_value dv = dv_from_rtx (val);
  7565. slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
  7566. NO_INSERT);
  7567. if (!slot)
  7568. slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
  7569. if (!slot)
  7570. slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
  7571. NO_INSERT);
  7572. var = *slot;
  7573. while ((led = VAR_LOC_DEP_LST (var)))
  7574. {
  7575. decl_or_value ldv = led->dv;
  7576. variable ivar;
  7577. /* Deactivate and remove the backlink, as it was “used up”. It
  7578. makes no sense to attempt to notify the same entity again:
  7579. either it will be recomputed and re-register an active
  7580. dependency, or it will still have the changed mark. */
  7581. if (led->next)
  7582. led->next->pprev = led->pprev;
  7583. if (led->pprev)
  7584. *led->pprev = led->next;
  7585. led->next = NULL;
  7586. led->pprev = NULL;
  7587. if (dv_changed_p (ldv))
  7588. continue;
  7589. switch (dv_onepart_p (ldv))
  7590. {
  7591. case ONEPART_VALUE:
  7592. case ONEPART_DEXPR:
  7593. set_dv_changed (ldv, true);
  7594. changed_values_stack->safe_push (dv_as_rtx (ldv));
  7595. break;
  7596. case ONEPART_VDECL:
  7597. ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
  7598. gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
  7599. variable_was_changed (ivar, NULL);
  7600. break;
  7601. case NOT_ONEPART:
  7602. pool_free (loc_exp_dep_pool, led);
  7603. ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
  7604. if (ivar)
  7605. {
  7606. int i = ivar->n_var_parts;
  7607. while (i--)
  7608. {
  7609. rtx loc = ivar->var_part[i].cur_loc;
  7610. if (loc && GET_CODE (loc) == MEM
  7611. && XEXP (loc, 0) == val)
  7612. {
  7613. variable_was_changed (ivar, NULL);
  7614. break;
  7615. }
  7616. }
  7617. }
  7618. break;
  7619. default:
  7620. gcc_unreachable ();
  7621. }
  7622. }
  7623. }
  7624. /* Take out of changed_variables any entries that don't refer to use
  7625. variables. Back-propagate change notifications from values and
  7626. debug_exprs to their active dependencies in HTAB or in
  7627. CHANGED_VARIABLES. */
  7628. static void
  7629. process_changed_values (variable_table_type *htab)
  7630. {
  7631. int i, n;
  7632. rtx val;
  7633. auto_vec<rtx, 20> changed_values_stack;
  7634. /* Move values from changed_variables to changed_values_stack. */
  7635. changed_variables
  7636. ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
  7637. (&changed_values_stack);
  7638. /* Back-propagate change notifications in values while popping
  7639. them from the stack. */
  7640. for (n = i = changed_values_stack.length ();
  7641. i > 0; i = changed_values_stack.length ())
  7642. {
  7643. val = changed_values_stack.pop ();
  7644. notify_dependents_of_changed_value (val, htab, &changed_values_stack);
  7645. /* This condition will hold when visiting each of the entries
  7646. originally in changed_variables. We can't remove them
  7647. earlier because this could drop the backlinks before we got a
  7648. chance to use them. */
  7649. if (i == n)
  7650. {
  7651. remove_value_from_changed_variables (val);
  7652. n--;
  7653. }
  7654. }
  7655. }
  7656. /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
  7657. CHANGED_VARIABLES and delete this chain. WHERE specifies whether
  7658. the notes shall be emitted before of after instruction INSN. */
  7659. static void
  7660. emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
  7661. shared_hash vars)
  7662. {
  7663. emit_note_data data;
  7664. variable_table_type *htab = shared_hash_htab (vars);
  7665. if (!changed_variables->elements ())
  7666. return;
  7667. if (MAY_HAVE_DEBUG_INSNS)
  7668. process_changed_values (htab);
  7669. data.insn = insn;
  7670. data.where = where;
  7671. data.vars = htab;
  7672. changed_variables
  7673. ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
  7674. }
  7675. /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
  7676. same variable in hash table DATA or is not there at all. */
  7677. int
  7678. emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
  7679. {
  7680. variable old_var, new_var;
  7681. old_var = *slot;
  7682. new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
  7683. if (!new_var)
  7684. {
  7685. /* Variable has disappeared. */
  7686. variable empty_var = NULL;
  7687. if (old_var->onepart == ONEPART_VALUE
  7688. || old_var->onepart == ONEPART_DEXPR)
  7689. {
  7690. empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
  7691. if (empty_var)
  7692. {
  7693. gcc_checking_assert (!empty_var->in_changed_variables);
  7694. if (!VAR_LOC_1PAUX (old_var))
  7695. {
  7696. VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
  7697. VAR_LOC_1PAUX (empty_var) = NULL;
  7698. }
  7699. else
  7700. gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
  7701. }
  7702. }
  7703. if (!empty_var)
  7704. {
  7705. empty_var = (variable) pool_alloc (onepart_pool (old_var->onepart));
  7706. empty_var->dv = old_var->dv;
  7707. empty_var->refcount = 0;
  7708. empty_var->n_var_parts = 0;
  7709. empty_var->onepart = old_var->onepart;
  7710. empty_var->in_changed_variables = false;
  7711. }
  7712. if (empty_var->onepart)
  7713. {
  7714. /* Propagate the auxiliary data to (ultimately)
  7715. changed_variables. */
  7716. empty_var->var_part[0].loc_chain = NULL;
  7717. empty_var->var_part[0].cur_loc = NULL;
  7718. VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
  7719. VAR_LOC_1PAUX (old_var) = NULL;
  7720. }
  7721. variable_was_changed (empty_var, NULL);
  7722. /* Continue traversing the hash table. */
  7723. return 1;
  7724. }
  7725. /* Update cur_loc and one-part auxiliary data, before new_var goes
  7726. through variable_was_changed. */
  7727. if (old_var != new_var && new_var->onepart)
  7728. {
  7729. gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
  7730. VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
  7731. VAR_LOC_1PAUX (old_var) = NULL;
  7732. new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
  7733. }
  7734. if (variable_different_p (old_var, new_var))
  7735. variable_was_changed (new_var, NULL);
  7736. /* Continue traversing the hash table. */
  7737. return 1;
  7738. }
  7739. /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
  7740. table DATA. */
  7741. int
  7742. emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
  7743. {
  7744. variable old_var, new_var;
  7745. new_var = *slot;
  7746. old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
  7747. if (!old_var)
  7748. {
  7749. int i;
  7750. for (i = 0; i < new_var->n_var_parts; i++)
  7751. new_var->var_part[i].cur_loc = NULL;
  7752. variable_was_changed (new_var, NULL);
  7753. }
  7754. /* Continue traversing the hash table. */
  7755. return 1;
  7756. }
  7757. /* Emit notes before INSN for differences between dataflow sets OLD_SET and
  7758. NEW_SET. */
  7759. static void
  7760. emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
  7761. dataflow_set *new_set)
  7762. {
  7763. shared_hash_htab (old_set->vars)
  7764. ->traverse <variable_table_type *, emit_notes_for_differences_1>
  7765. (shared_hash_htab (new_set->vars));
  7766. shared_hash_htab (new_set->vars)
  7767. ->traverse <variable_table_type *, emit_notes_for_differences_2>
  7768. (shared_hash_htab (old_set->vars));
  7769. emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
  7770. }
  7771. /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
  7772. static rtx_insn *
  7773. next_non_note_insn_var_location (rtx_insn *insn)
  7774. {
  7775. while (insn)
  7776. {
  7777. insn = NEXT_INSN (insn);
  7778. if (insn == 0
  7779. || !NOTE_P (insn)
  7780. || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
  7781. break;
  7782. }
  7783. return insn;
  7784. }
  7785. /* Emit the notes for changes of location parts in the basic block BB. */
  7786. static void
  7787. emit_notes_in_bb (basic_block bb, dataflow_set *set)
  7788. {
  7789. unsigned int i;
  7790. micro_operation *mo;
  7791. dataflow_set_clear (set);
  7792. dataflow_set_copy (set, &VTI (bb)->in);
  7793. FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
  7794. {
  7795. rtx_insn *insn = mo->insn;
  7796. rtx_insn *next_insn = next_non_note_insn_var_location (insn);
  7797. switch (mo->type)
  7798. {
  7799. case MO_CALL:
  7800. dataflow_set_clear_at_call (set);
  7801. emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
  7802. {
  7803. rtx arguments = mo->u.loc, *p = &arguments;
  7804. rtx_note *note;
  7805. while (*p)
  7806. {
  7807. XEXP (XEXP (*p, 0), 1)
  7808. = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
  7809. shared_hash_htab (set->vars));
  7810. /* If expansion is successful, keep it in the list. */
  7811. if (XEXP (XEXP (*p, 0), 1))
  7812. p = &XEXP (*p, 1);
  7813. /* Otherwise, if the following item is data_value for it,
  7814. drop it too too. */
  7815. else if (XEXP (*p, 1)
  7816. && REG_P (XEXP (XEXP (*p, 0), 0))
  7817. && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
  7818. && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
  7819. 0))
  7820. && REGNO (XEXP (XEXP (*p, 0), 0))
  7821. == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
  7822. 0), 0)))
  7823. *p = XEXP (XEXP (*p, 1), 1);
  7824. /* Just drop this item. */
  7825. else
  7826. *p = XEXP (*p, 1);
  7827. }
  7828. note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
  7829. NOTE_VAR_LOCATION (note) = arguments;
  7830. }
  7831. break;
  7832. case MO_USE:
  7833. {
  7834. rtx loc = mo->u.loc;
  7835. if (REG_P (loc))
  7836. var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
  7837. else
  7838. var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
  7839. emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
  7840. }
  7841. break;
  7842. case MO_VAL_LOC:
  7843. {
  7844. rtx loc = mo->u.loc;
  7845. rtx val, vloc;
  7846. tree var;
  7847. if (GET_CODE (loc) == CONCAT)
  7848. {
  7849. val = XEXP (loc, 0);
  7850. vloc = XEXP (loc, 1);
  7851. }
  7852. else
  7853. {
  7854. val = NULL_RTX;
  7855. vloc = loc;
  7856. }
  7857. var = PAT_VAR_LOCATION_DECL (vloc);
  7858. clobber_variable_part (set, NULL_RTX,
  7859. dv_from_decl (var), 0, NULL_RTX);
  7860. if (val)
  7861. {
  7862. if (VAL_NEEDS_RESOLUTION (loc))
  7863. val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
  7864. set_variable_part (set, val, dv_from_decl (var), 0,
  7865. VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
  7866. INSERT);
  7867. }
  7868. else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
  7869. set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
  7870. dv_from_decl (var), 0,
  7871. VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
  7872. INSERT);
  7873. emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
  7874. }
  7875. break;
  7876. case MO_VAL_USE:
  7877. {
  7878. rtx loc = mo->u.loc;
  7879. rtx val, vloc, uloc;
  7880. vloc = uloc = XEXP (loc, 1);
  7881. val = XEXP (loc, 0);
  7882. if (GET_CODE (val) == CONCAT)
  7883. {
  7884. uloc = XEXP (val, 1);
  7885. val = XEXP (val, 0);
  7886. }
  7887. if (VAL_NEEDS_RESOLUTION (loc))
  7888. val_resolve (set, val, vloc, insn);
  7889. else
  7890. val_store (set, val, uloc, insn, false);
  7891. if (VAL_HOLDS_TRACK_EXPR (loc))
  7892. {
  7893. if (GET_CODE (uloc) == REG)
  7894. var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
  7895. NULL);
  7896. else if (GET_CODE (uloc) == MEM)
  7897. var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
  7898. NULL);
  7899. }
  7900. emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
  7901. }
  7902. break;
  7903. case MO_VAL_SET:
  7904. {
  7905. rtx loc = mo->u.loc;
  7906. rtx val, vloc, uloc;
  7907. rtx dstv, srcv;
  7908. vloc = loc;
  7909. uloc = XEXP (vloc, 1);
  7910. val = XEXP (vloc, 0);
  7911. vloc = uloc;
  7912. if (GET_CODE (uloc) == SET)
  7913. {
  7914. dstv = SET_DEST (uloc);
  7915. srcv = SET_SRC (uloc);
  7916. }
  7917. else
  7918. {
  7919. dstv = uloc;
  7920. srcv = NULL;
  7921. }
  7922. if (GET_CODE (val) == CONCAT)
  7923. {
  7924. dstv = vloc = XEXP (val, 1);
  7925. val = XEXP (val, 0);
  7926. }
  7927. if (GET_CODE (vloc) == SET)
  7928. {
  7929. srcv = SET_SRC (vloc);
  7930. gcc_assert (val != srcv);
  7931. gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
  7932. dstv = vloc = SET_DEST (vloc);
  7933. if (VAL_NEEDS_RESOLUTION (loc))
  7934. val_resolve (set, val, srcv, insn);
  7935. }
  7936. else if (VAL_NEEDS_RESOLUTION (loc))
  7937. {
  7938. gcc_assert (GET_CODE (uloc) == SET
  7939. && GET_CODE (SET_SRC (uloc)) == REG);
  7940. val_resolve (set, val, SET_SRC (uloc), insn);
  7941. }
  7942. if (VAL_HOLDS_TRACK_EXPR (loc))
  7943. {
  7944. if (VAL_EXPR_IS_CLOBBERED (loc))
  7945. {
  7946. if (REG_P (uloc))
  7947. var_reg_delete (set, uloc, true);
  7948. else if (MEM_P (uloc))
  7949. {
  7950. gcc_assert (MEM_P (dstv));
  7951. gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
  7952. var_mem_delete (set, dstv, true);
  7953. }
  7954. }
  7955. else
  7956. {
  7957. bool copied_p = VAL_EXPR_IS_COPIED (loc);
  7958. rtx src = NULL, dst = uloc;
  7959. enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
  7960. if (GET_CODE (uloc) == SET)
  7961. {
  7962. src = SET_SRC (uloc);
  7963. dst = SET_DEST (uloc);
  7964. }
  7965. if (copied_p)
  7966. {
  7967. status = find_src_status (set, src);
  7968. src = find_src_set_src (set, src);
  7969. }
  7970. if (REG_P (dst))
  7971. var_reg_delete_and_set (set, dst, !copied_p,
  7972. status, srcv);
  7973. else if (MEM_P (dst))
  7974. {
  7975. gcc_assert (MEM_P (dstv));
  7976. gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
  7977. var_mem_delete_and_set (set, dstv, !copied_p,
  7978. status, srcv);
  7979. }
  7980. }
  7981. }
  7982. else if (REG_P (uloc))
  7983. var_regno_delete (set, REGNO (uloc));
  7984. else if (MEM_P (uloc))
  7985. {
  7986. gcc_checking_assert (GET_CODE (vloc) == MEM);
  7987. gcc_checking_assert (vloc == dstv);
  7988. if (vloc != dstv)
  7989. clobber_overlapping_mems (set, vloc);
  7990. }
  7991. val_store (set, val, dstv, insn, true);
  7992. emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
  7993. set->vars);
  7994. }
  7995. break;
  7996. case MO_SET:
  7997. {
  7998. rtx loc = mo->u.loc;
  7999. rtx set_src = NULL;
  8000. if (GET_CODE (loc) == SET)
  8001. {
  8002. set_src = SET_SRC (loc);
  8003. loc = SET_DEST (loc);
  8004. }
  8005. if (REG_P (loc))
  8006. var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
  8007. set_src);
  8008. else
  8009. var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
  8010. set_src);
  8011. emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
  8012. set->vars);
  8013. }
  8014. break;
  8015. case MO_COPY:
  8016. {
  8017. rtx loc = mo->u.loc;
  8018. enum var_init_status src_status;
  8019. rtx set_src = NULL;
  8020. if (GET_CODE (loc) == SET)
  8021. {
  8022. set_src = SET_SRC (loc);
  8023. loc = SET_DEST (loc);
  8024. }
  8025. src_status = find_src_status (set, set_src);
  8026. set_src = find_src_set_src (set, set_src);
  8027. if (REG_P (loc))
  8028. var_reg_delete_and_set (set, loc, false, src_status, set_src);
  8029. else
  8030. var_mem_delete_and_set (set, loc, false, src_status, set_src);
  8031. emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
  8032. set->vars);
  8033. }
  8034. break;
  8035. case MO_USE_NO_VAR:
  8036. {
  8037. rtx loc = mo->u.loc;
  8038. if (REG_P (loc))
  8039. var_reg_delete (set, loc, false);
  8040. else
  8041. var_mem_delete (set, loc, false);
  8042. emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
  8043. }
  8044. break;
  8045. case MO_CLOBBER:
  8046. {
  8047. rtx loc = mo->u.loc;
  8048. if (REG_P (loc))
  8049. var_reg_delete (set, loc, true);
  8050. else
  8051. var_mem_delete (set, loc, true);
  8052. emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
  8053. set->vars);
  8054. }
  8055. break;
  8056. case MO_ADJUST:
  8057. set->stack_adjust += mo->u.adjust;
  8058. break;
  8059. }
  8060. }
  8061. }
  8062. /* Emit notes for the whole function. */
  8063. static void
  8064. vt_emit_notes (void)
  8065. {
  8066. basic_block bb;
  8067. dataflow_set cur;
  8068. gcc_assert (!changed_variables->elements ());
  8069. /* Free memory occupied by the out hash tables, as they aren't used
  8070. anymore. */
  8071. FOR_EACH_BB_FN (bb, cfun)
  8072. dataflow_set_clear (&VTI (bb)->out);
  8073. /* Enable emitting notes by functions (mainly by set_variable_part and
  8074. delete_variable_part). */
  8075. emit_notes = true;
  8076. if (MAY_HAVE_DEBUG_INSNS)
  8077. {
  8078. dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
  8079. loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool",
  8080. sizeof (loc_exp_dep), 64);
  8081. }
  8082. dataflow_set_init (&cur);
  8083. FOR_EACH_BB_FN (bb, cfun)
  8084. {
  8085. /* Emit the notes for changes of variable locations between two
  8086. subsequent basic blocks. */
  8087. emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
  8088. if (MAY_HAVE_DEBUG_INSNS)
  8089. local_get_addr_cache = new hash_map<rtx, rtx>;
  8090. /* Emit the notes for the changes in the basic block itself. */
  8091. emit_notes_in_bb (bb, &cur);
  8092. if (MAY_HAVE_DEBUG_INSNS)
  8093. delete local_get_addr_cache;
  8094. local_get_addr_cache = NULL;
  8095. /* Free memory occupied by the in hash table, we won't need it
  8096. again. */
  8097. dataflow_set_clear (&VTI (bb)->in);
  8098. }
  8099. #ifdef ENABLE_CHECKING
  8100. shared_hash_htab (cur.vars)
  8101. ->traverse <variable_table_type *, emit_notes_for_differences_1>
  8102. (shared_hash_htab (empty_shared_hash));
  8103. #endif
  8104. dataflow_set_destroy (&cur);
  8105. if (MAY_HAVE_DEBUG_INSNS)
  8106. delete dropped_values;
  8107. dropped_values = NULL;
  8108. emit_notes = false;
  8109. }
  8110. /* If there is a declaration and offset associated with register/memory RTL
  8111. assign declaration to *DECLP and offset to *OFFSETP, and return true. */
  8112. static bool
  8113. vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
  8114. {
  8115. if (REG_P (rtl))
  8116. {
  8117. if (REG_ATTRS (rtl))
  8118. {
  8119. *declp = REG_EXPR (rtl);
  8120. *offsetp = REG_OFFSET (rtl);
  8121. return true;
  8122. }
  8123. }
  8124. else if (GET_CODE (rtl) == PARALLEL)
  8125. {
  8126. tree decl = NULL_TREE;
  8127. HOST_WIDE_INT offset = MAX_VAR_PARTS;
  8128. int len = XVECLEN (rtl, 0), i;
  8129. for (i = 0; i < len; i++)
  8130. {
  8131. rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
  8132. if (!REG_P (reg) || !REG_ATTRS (reg))
  8133. break;
  8134. if (!decl)
  8135. decl = REG_EXPR (reg);
  8136. if (REG_EXPR (reg) != decl)
  8137. break;
  8138. if (REG_OFFSET (reg) < offset)
  8139. offset = REG_OFFSET (reg);
  8140. }
  8141. if (i == len)
  8142. {
  8143. *declp = decl;
  8144. *offsetp = offset;
  8145. return true;
  8146. }
  8147. }
  8148. else if (MEM_P (rtl))
  8149. {
  8150. if (MEM_ATTRS (rtl))
  8151. {
  8152. *declp = MEM_EXPR (rtl);
  8153. *offsetp = INT_MEM_OFFSET (rtl);
  8154. return true;
  8155. }
  8156. }
  8157. return false;
  8158. }
  8159. /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
  8160. of VAL. */
  8161. static void
  8162. record_entry_value (cselib_val *val, rtx rtl)
  8163. {
  8164. rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
  8165. ENTRY_VALUE_EXP (ev) = rtl;
  8166. cselib_add_permanent_equiv (val, ev, get_insns ());
  8167. }
  8168. /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
  8169. static void
  8170. vt_add_function_parameter (tree parm)
  8171. {
  8172. rtx decl_rtl = DECL_RTL_IF_SET (parm);
  8173. rtx incoming = DECL_INCOMING_RTL (parm);
  8174. tree decl;
  8175. machine_mode mode;
  8176. HOST_WIDE_INT offset;
  8177. dataflow_set *out;
  8178. decl_or_value dv;
  8179. if (TREE_CODE (parm) != PARM_DECL)
  8180. return;
  8181. if (!decl_rtl || !incoming)
  8182. return;
  8183. if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
  8184. return;
  8185. /* If there is a DRAP register or a pseudo in internal_arg_pointer,
  8186. rewrite the incoming location of parameters passed on the stack
  8187. into MEMs based on the argument pointer, so that incoming doesn't
  8188. depend on a pseudo. */
  8189. if (MEM_P (incoming)
  8190. && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
  8191. || (GET_CODE (XEXP (incoming, 0)) == PLUS
  8192. && XEXP (XEXP (incoming, 0), 0)
  8193. == crtl->args.internal_arg_pointer
  8194. && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
  8195. {
  8196. HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
  8197. if (GET_CODE (XEXP (incoming, 0)) == PLUS)
  8198. off += INTVAL (XEXP (XEXP (incoming, 0), 1));
  8199. incoming
  8200. = replace_equiv_address_nv (incoming,
  8201. plus_constant (Pmode,
  8202. arg_pointer_rtx, off));
  8203. }
  8204. #ifdef HAVE_window_save
  8205. /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
  8206. If the target machine has an explicit window save instruction, the
  8207. actual entry value is the corresponding OUTGOING_REGNO instead. */
  8208. if (HAVE_window_save && !crtl->uses_only_leaf_regs)
  8209. {
  8210. if (REG_P (incoming)
  8211. && HARD_REGISTER_P (incoming)
  8212. && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
  8213. {
  8214. parm_reg_t p;
  8215. p.incoming = incoming;
  8216. incoming
  8217. = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
  8218. OUTGOING_REGNO (REGNO (incoming)), 0);
  8219. p.outgoing = incoming;
  8220. vec_safe_push (windowed_parm_regs, p);
  8221. }
  8222. else if (GET_CODE (incoming) == PARALLEL)
  8223. {
  8224. rtx outgoing
  8225. = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
  8226. int i;
  8227. for (i = 0; i < XVECLEN (incoming, 0); i++)
  8228. {
  8229. rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
  8230. parm_reg_t p;
  8231. p.incoming = reg;
  8232. reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
  8233. OUTGOING_REGNO (REGNO (reg)), 0);
  8234. p.outgoing = reg;
  8235. XVECEXP (outgoing, 0, i)
  8236. = gen_rtx_EXPR_LIST (VOIDmode, reg,
  8237. XEXP (XVECEXP (incoming, 0, i), 1));
  8238. vec_safe_push (windowed_parm_regs, p);
  8239. }
  8240. incoming = outgoing;
  8241. }
  8242. else if (MEM_P (incoming)
  8243. && REG_P (XEXP (incoming, 0))
  8244. && HARD_REGISTER_P (XEXP (incoming, 0)))
  8245. {
  8246. rtx reg = XEXP (incoming, 0);
  8247. if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
  8248. {
  8249. parm_reg_t p;
  8250. p.incoming = reg;
  8251. reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
  8252. p.outgoing = reg;
  8253. vec_safe_push (windowed_parm_regs, p);
  8254. incoming = replace_equiv_address_nv (incoming, reg);
  8255. }
  8256. }
  8257. }
  8258. #endif
  8259. if (!vt_get_decl_and_offset (incoming, &decl, &offset))
  8260. {
  8261. if (MEM_P (incoming))
  8262. {
  8263. /* This means argument is passed by invisible reference. */
  8264. offset = 0;
  8265. decl = parm;
  8266. }
  8267. else
  8268. {
  8269. if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
  8270. return;
  8271. offset += byte_lowpart_offset (GET_MODE (incoming),
  8272. GET_MODE (decl_rtl));
  8273. }
  8274. }
  8275. if (!decl)
  8276. return;
  8277. if (parm != decl)
  8278. {
  8279. /* If that DECL_RTL wasn't a pseudo that got spilled to
  8280. memory, bail out. Otherwise, the spill slot sharing code
  8281. will force the memory to reference spill_slot_decl (%sfp),
  8282. so we don't match above. That's ok, the pseudo must have
  8283. referenced the entire parameter, so just reset OFFSET. */
  8284. if (decl != get_spill_slot_decl (false))
  8285. return;
  8286. offset = 0;
  8287. }
  8288. if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
  8289. return;
  8290. out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
  8291. dv = dv_from_decl (parm);
  8292. if (target_for_debug_bind (parm)
  8293. /* We can't deal with these right now, because this kind of
  8294. variable is single-part. ??? We could handle parallels
  8295. that describe multiple locations for the same single
  8296. value, but ATM we don't. */
  8297. && GET_CODE (incoming) != PARALLEL)
  8298. {
  8299. cselib_val *val;
  8300. rtx lowpart;
  8301. /* ??? We shouldn't ever hit this, but it may happen because
  8302. arguments passed by invisible reference aren't dealt with
  8303. above: incoming-rtl will have Pmode rather than the
  8304. expected mode for the type. */
  8305. if (offset)
  8306. return;
  8307. lowpart = var_lowpart (mode, incoming);
  8308. if (!lowpart)
  8309. return;
  8310. val = cselib_lookup_from_insn (lowpart, mode, true,
  8311. VOIDmode, get_insns ());
  8312. /* ??? Float-typed values in memory are not handled by
  8313. cselib. */
  8314. if (val)
  8315. {
  8316. preserve_value (val);
  8317. set_variable_part (out, val->val_rtx, dv, offset,
  8318. VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
  8319. dv = dv_from_value (val->val_rtx);
  8320. }
  8321. if (MEM_P (incoming))
  8322. {
  8323. val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
  8324. VOIDmode, get_insns ());
  8325. if (val)
  8326. {
  8327. preserve_value (val);
  8328. incoming = replace_equiv_address_nv (incoming, val->val_rtx);
  8329. }
  8330. }
  8331. }
  8332. if (REG_P (incoming))
  8333. {
  8334. incoming = var_lowpart (mode, incoming);
  8335. gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
  8336. attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
  8337. incoming);
  8338. set_variable_part (out, incoming, dv, offset,
  8339. VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
  8340. if (dv_is_value_p (dv))
  8341. {
  8342. record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
  8343. if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
  8344. && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
  8345. {
  8346. machine_mode indmode
  8347. = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
  8348. rtx mem = gen_rtx_MEM (indmode, incoming);
  8349. cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
  8350. VOIDmode,
  8351. get_insns ());
  8352. if (val)
  8353. {
  8354. preserve_value (val);
  8355. record_entry_value (val, mem);
  8356. set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
  8357. VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
  8358. }
  8359. }
  8360. }
  8361. }
  8362. else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
  8363. {
  8364. int i;
  8365. for (i = 0; i < XVECLEN (incoming, 0); i++)
  8366. {
  8367. rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
  8368. offset = REG_OFFSET (reg);
  8369. gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
  8370. attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
  8371. set_variable_part (out, reg, dv, offset,
  8372. VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
  8373. }
  8374. }
  8375. else if (MEM_P (incoming))
  8376. {
  8377. incoming = var_lowpart (mode, incoming);
  8378. set_variable_part (out, incoming, dv, offset,
  8379. VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
  8380. }
  8381. }
  8382. /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
  8383. static void
  8384. vt_add_function_parameters (void)
  8385. {
  8386. tree parm;
  8387. for (parm = DECL_ARGUMENTS (current_function_decl);
  8388. parm; parm = DECL_CHAIN (parm))
  8389. if (!POINTER_BOUNDS_P (parm))
  8390. vt_add_function_parameter (parm);
  8391. if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
  8392. {
  8393. tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
  8394. if (TREE_CODE (vexpr) == INDIRECT_REF)
  8395. vexpr = TREE_OPERAND (vexpr, 0);
  8396. if (TREE_CODE (vexpr) == PARM_DECL
  8397. && DECL_ARTIFICIAL (vexpr)
  8398. && !DECL_IGNORED_P (vexpr)
  8399. && DECL_NAMELESS (vexpr))
  8400. vt_add_function_parameter (vexpr);
  8401. }
  8402. }
  8403. /* Initialize cfa_base_rtx, create a preserved VALUE for it and
  8404. ensure it isn't flushed during cselib_reset_table.
  8405. Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
  8406. has been eliminated. */
  8407. static void
  8408. vt_init_cfa_base (void)
  8409. {
  8410. cselib_val *val;
  8411. #ifdef FRAME_POINTER_CFA_OFFSET
  8412. cfa_base_rtx = frame_pointer_rtx;
  8413. cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
  8414. #else
  8415. cfa_base_rtx = arg_pointer_rtx;
  8416. cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
  8417. #endif
  8418. if (cfa_base_rtx == hard_frame_pointer_rtx
  8419. || !fixed_regs[REGNO (cfa_base_rtx)])
  8420. {
  8421. cfa_base_rtx = NULL_RTX;
  8422. return;
  8423. }
  8424. if (!MAY_HAVE_DEBUG_INSNS)
  8425. return;
  8426. /* Tell alias analysis that cfa_base_rtx should share
  8427. find_base_term value with stack pointer or hard frame pointer. */
  8428. if (!frame_pointer_needed)
  8429. vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
  8430. else if (!crtl->stack_realign_tried)
  8431. vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
  8432. val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
  8433. VOIDmode, get_insns ());
  8434. preserve_value (val);
  8435. cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
  8436. }
  8437. /* Allocate and initialize the data structures for variable tracking
  8438. and parse the RTL to get the micro operations. */
  8439. static bool
  8440. vt_initialize (void)
  8441. {
  8442. basic_block bb;
  8443. HOST_WIDE_INT fp_cfa_offset = -1;
  8444. alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
  8445. attrs_pool = create_alloc_pool ("attrs_def pool",
  8446. sizeof (struct attrs_def), 1024);
  8447. var_pool = create_alloc_pool ("variable_def pool",
  8448. sizeof (struct variable_def)
  8449. + (MAX_VAR_PARTS - 1)
  8450. * sizeof (((variable)NULL)->var_part[0]), 64);
  8451. loc_chain_pool = create_alloc_pool ("location_chain_def pool",
  8452. sizeof (struct location_chain_def),
  8453. 1024);
  8454. shared_hash_pool = create_alloc_pool ("shared_hash_def pool",
  8455. sizeof (struct shared_hash_def), 256);
  8456. empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool);
  8457. empty_shared_hash->refcount = 1;
  8458. empty_shared_hash->htab = new variable_table_type (1);
  8459. changed_variables = new variable_table_type (10);
  8460. /* Init the IN and OUT sets. */
  8461. FOR_ALL_BB_FN (bb, cfun)
  8462. {
  8463. VTI (bb)->visited = false;
  8464. VTI (bb)->flooded = false;
  8465. dataflow_set_init (&VTI (bb)->in);
  8466. dataflow_set_init (&VTI (bb)->out);
  8467. VTI (bb)->permp = NULL;
  8468. }
  8469. if (MAY_HAVE_DEBUG_INSNS)
  8470. {
  8471. cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
  8472. scratch_regs = BITMAP_ALLOC (NULL);
  8473. valvar_pool = create_alloc_pool ("small variable_def pool",
  8474. sizeof (struct variable_def), 256);
  8475. preserved_values.create (256);
  8476. global_get_addr_cache = new hash_map<rtx, rtx>;
  8477. }
  8478. else
  8479. {
  8480. scratch_regs = NULL;
  8481. valvar_pool = NULL;
  8482. global_get_addr_cache = NULL;
  8483. }
  8484. if (MAY_HAVE_DEBUG_INSNS)
  8485. {
  8486. rtx reg, expr;
  8487. int ofst;
  8488. cselib_val *val;
  8489. #ifdef FRAME_POINTER_CFA_OFFSET
  8490. reg = frame_pointer_rtx;
  8491. ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
  8492. #else
  8493. reg = arg_pointer_rtx;
  8494. ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
  8495. #endif
  8496. ofst -= INCOMING_FRAME_SP_OFFSET;
  8497. val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
  8498. VOIDmode, get_insns ());
  8499. preserve_value (val);
  8500. if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
  8501. cselib_preserve_cfa_base_value (val, REGNO (reg));
  8502. expr = plus_constant (GET_MODE (stack_pointer_rtx),
  8503. stack_pointer_rtx, -ofst);
  8504. cselib_add_permanent_equiv (val, expr, get_insns ());
  8505. if (ofst)
  8506. {
  8507. val = cselib_lookup_from_insn (stack_pointer_rtx,
  8508. GET_MODE (stack_pointer_rtx), 1,
  8509. VOIDmode, get_insns ());
  8510. preserve_value (val);
  8511. expr = plus_constant (GET_MODE (reg), reg, ofst);
  8512. cselib_add_permanent_equiv (val, expr, get_insns ());
  8513. }
  8514. }
  8515. /* In order to factor out the adjustments made to the stack pointer or to
  8516. the hard frame pointer and thus be able to use DW_OP_fbreg operations
  8517. instead of individual location lists, we're going to rewrite MEMs based
  8518. on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
  8519. or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
  8520. resp. arg_pointer_rtx. We can do this either when there is no frame
  8521. pointer in the function and stack adjustments are consistent for all
  8522. basic blocks or when there is a frame pointer and no stack realignment.
  8523. But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
  8524. has been eliminated. */
  8525. if (!frame_pointer_needed)
  8526. {
  8527. rtx reg, elim;
  8528. if (!vt_stack_adjustments ())
  8529. return false;
  8530. #ifdef FRAME_POINTER_CFA_OFFSET
  8531. reg = frame_pointer_rtx;
  8532. #else
  8533. reg = arg_pointer_rtx;
  8534. #endif
  8535. elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
  8536. if (elim != reg)
  8537. {
  8538. if (GET_CODE (elim) == PLUS)
  8539. elim = XEXP (elim, 0);
  8540. if (elim == stack_pointer_rtx)
  8541. vt_init_cfa_base ();
  8542. }
  8543. }
  8544. else if (!crtl->stack_realign_tried)
  8545. {
  8546. rtx reg, elim;
  8547. #ifdef FRAME_POINTER_CFA_OFFSET
  8548. reg = frame_pointer_rtx;
  8549. fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
  8550. #else
  8551. reg = arg_pointer_rtx;
  8552. fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
  8553. #endif
  8554. elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
  8555. if (elim != reg)
  8556. {
  8557. if (GET_CODE (elim) == PLUS)
  8558. {
  8559. fp_cfa_offset -= INTVAL (XEXP (elim, 1));
  8560. elim = XEXP (elim, 0);
  8561. }
  8562. if (elim != hard_frame_pointer_rtx)
  8563. fp_cfa_offset = -1;
  8564. }
  8565. else
  8566. fp_cfa_offset = -1;
  8567. }
  8568. /* If the stack is realigned and a DRAP register is used, we're going to
  8569. rewrite MEMs based on it representing incoming locations of parameters
  8570. passed on the stack into MEMs based on the argument pointer. Although
  8571. we aren't going to rewrite other MEMs, we still need to initialize the
  8572. virtual CFA pointer in order to ensure that the argument pointer will
  8573. be seen as a constant throughout the function.
  8574. ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
  8575. else if (stack_realign_drap)
  8576. {
  8577. rtx reg, elim;
  8578. #ifdef FRAME_POINTER_CFA_OFFSET
  8579. reg = frame_pointer_rtx;
  8580. #else
  8581. reg = arg_pointer_rtx;
  8582. #endif
  8583. elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
  8584. if (elim != reg)
  8585. {
  8586. if (GET_CODE (elim) == PLUS)
  8587. elim = XEXP (elim, 0);
  8588. if (elim == hard_frame_pointer_rtx)
  8589. vt_init_cfa_base ();
  8590. }
  8591. }
  8592. hard_frame_pointer_adjustment = -1;
  8593. vt_add_function_parameters ();
  8594. FOR_EACH_BB_FN (bb, cfun)
  8595. {
  8596. rtx_insn *insn;
  8597. HOST_WIDE_INT pre, post = 0;
  8598. basic_block first_bb, last_bb;
  8599. if (MAY_HAVE_DEBUG_INSNS)
  8600. {
  8601. cselib_record_sets_hook = add_with_sets;
  8602. if (dump_file && (dump_flags & TDF_DETAILS))
  8603. fprintf (dump_file, "first value: %i\n",
  8604. cselib_get_next_uid ());
  8605. }
  8606. first_bb = bb;
  8607. for (;;)
  8608. {
  8609. edge e;
  8610. if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
  8611. || ! single_pred_p (bb->next_bb))
  8612. break;
  8613. e = find_edge (bb, bb->next_bb);
  8614. if (! e || (e->flags & EDGE_FALLTHRU) == 0)
  8615. break;
  8616. bb = bb->next_bb;
  8617. }
  8618. last_bb = bb;
  8619. /* Add the micro-operations to the vector. */
  8620. FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
  8621. {
  8622. HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
  8623. VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
  8624. for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
  8625. insn = NEXT_INSN (insn))
  8626. {
  8627. if (INSN_P (insn))
  8628. {
  8629. if (!frame_pointer_needed)
  8630. {
  8631. insn_stack_adjust_offset_pre_post (insn, &pre, &post);
  8632. if (pre)
  8633. {
  8634. micro_operation mo;
  8635. mo.type = MO_ADJUST;
  8636. mo.u.adjust = pre;
  8637. mo.insn = insn;
  8638. if (dump_file && (dump_flags & TDF_DETAILS))
  8639. log_op_type (PATTERN (insn), bb, insn,
  8640. MO_ADJUST, dump_file);
  8641. VTI (bb)->mos.safe_push (mo);
  8642. VTI (bb)->out.stack_adjust += pre;
  8643. }
  8644. }
  8645. cselib_hook_called = false;
  8646. adjust_insn (bb, insn);
  8647. if (MAY_HAVE_DEBUG_INSNS)
  8648. {
  8649. if (CALL_P (insn))
  8650. prepare_call_arguments (bb, insn);
  8651. cselib_process_insn (insn);
  8652. if (dump_file && (dump_flags & TDF_DETAILS))
  8653. {
  8654. print_rtl_single (dump_file, insn);
  8655. dump_cselib_table (dump_file);
  8656. }
  8657. }
  8658. if (!cselib_hook_called)
  8659. add_with_sets (insn, 0, 0);
  8660. cancel_changes (0);
  8661. if (!frame_pointer_needed && post)
  8662. {
  8663. micro_operation mo;
  8664. mo.type = MO_ADJUST;
  8665. mo.u.adjust = post;
  8666. mo.insn = insn;
  8667. if (dump_file && (dump_flags & TDF_DETAILS))
  8668. log_op_type (PATTERN (insn), bb, insn,
  8669. MO_ADJUST, dump_file);
  8670. VTI (bb)->mos.safe_push (mo);
  8671. VTI (bb)->out.stack_adjust += post;
  8672. }
  8673. if (fp_cfa_offset != -1
  8674. && hard_frame_pointer_adjustment == -1
  8675. && fp_setter_insn (insn))
  8676. {
  8677. vt_init_cfa_base ();
  8678. hard_frame_pointer_adjustment = fp_cfa_offset;
  8679. /* Disassociate sp from fp now. */
  8680. if (MAY_HAVE_DEBUG_INSNS)
  8681. {
  8682. cselib_val *v;
  8683. cselib_invalidate_rtx (stack_pointer_rtx);
  8684. v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
  8685. VOIDmode);
  8686. if (v && !cselib_preserved_value_p (v))
  8687. {
  8688. cselib_set_value_sp_based (v);
  8689. preserve_value (v);
  8690. }
  8691. }
  8692. }
  8693. }
  8694. }
  8695. gcc_assert (offset == VTI (bb)->out.stack_adjust);
  8696. }
  8697. bb = last_bb;
  8698. if (MAY_HAVE_DEBUG_INSNS)
  8699. {
  8700. cselib_preserve_only_values ();
  8701. cselib_reset_table (cselib_get_next_uid ());
  8702. cselib_record_sets_hook = NULL;
  8703. }
  8704. }
  8705. hard_frame_pointer_adjustment = -1;
  8706. VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
  8707. cfa_base_rtx = NULL_RTX;
  8708. return true;
  8709. }
  8710. /* This is *not* reset after each function. It gives each
  8711. NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
  8712. a unique label number. */
  8713. static int debug_label_num = 1;
  8714. /* Get rid of all debug insns from the insn stream. */
  8715. static void
  8716. delete_debug_insns (void)
  8717. {
  8718. basic_block bb;
  8719. rtx_insn *insn, *next;
  8720. if (!MAY_HAVE_DEBUG_INSNS)
  8721. return;
  8722. FOR_EACH_BB_FN (bb, cfun)
  8723. {
  8724. FOR_BB_INSNS_SAFE (bb, insn, next)
  8725. if (DEBUG_INSN_P (insn))
  8726. {
  8727. tree decl = INSN_VAR_LOCATION_DECL (insn);
  8728. if (TREE_CODE (decl) == LABEL_DECL
  8729. && DECL_NAME (decl)
  8730. && !DECL_RTL_SET_P (decl))
  8731. {
  8732. PUT_CODE (insn, NOTE);
  8733. NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
  8734. NOTE_DELETED_LABEL_NAME (insn)
  8735. = IDENTIFIER_POINTER (DECL_NAME (decl));
  8736. SET_DECL_RTL (decl, insn);
  8737. CODE_LABEL_NUMBER (insn) = debug_label_num++;
  8738. }
  8739. else
  8740. delete_insn (insn);
  8741. }
  8742. }
  8743. }
  8744. /* Run a fast, BB-local only version of var tracking, to take care of
  8745. information that we don't do global analysis on, such that not all
  8746. information is lost. If SKIPPED holds, we're skipping the global
  8747. pass entirely, so we should try to use information it would have
  8748. handled as well.. */
  8749. static void
  8750. vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
  8751. {
  8752. /* ??? Just skip it all for now. */
  8753. delete_debug_insns ();
  8754. }
  8755. /* Free the data structures needed for variable tracking. */
  8756. static void
  8757. vt_finalize (void)
  8758. {
  8759. basic_block bb;
  8760. FOR_EACH_BB_FN (bb, cfun)
  8761. {
  8762. VTI (bb)->mos.release ();
  8763. }
  8764. FOR_ALL_BB_FN (bb, cfun)
  8765. {
  8766. dataflow_set_destroy (&VTI (bb)->in);
  8767. dataflow_set_destroy (&VTI (bb)->out);
  8768. if (VTI (bb)->permp)
  8769. {
  8770. dataflow_set_destroy (VTI (bb)->permp);
  8771. XDELETE (VTI (bb)->permp);
  8772. }
  8773. }
  8774. free_aux_for_blocks ();
  8775. delete empty_shared_hash->htab;
  8776. empty_shared_hash->htab = NULL;
  8777. delete changed_variables;
  8778. changed_variables = NULL;
  8779. free_alloc_pool (attrs_pool);
  8780. free_alloc_pool (var_pool);
  8781. free_alloc_pool (loc_chain_pool);
  8782. free_alloc_pool (shared_hash_pool);
  8783. if (MAY_HAVE_DEBUG_INSNS)
  8784. {
  8785. if (global_get_addr_cache)
  8786. delete global_get_addr_cache;
  8787. global_get_addr_cache = NULL;
  8788. if (loc_exp_dep_pool)
  8789. free_alloc_pool (loc_exp_dep_pool);
  8790. loc_exp_dep_pool = NULL;
  8791. free_alloc_pool (valvar_pool);
  8792. preserved_values.release ();
  8793. cselib_finish ();
  8794. BITMAP_FREE (scratch_regs);
  8795. scratch_regs = NULL;
  8796. }
  8797. #ifdef HAVE_window_save
  8798. vec_free (windowed_parm_regs);
  8799. #endif
  8800. if (vui_vec)
  8801. XDELETEVEC (vui_vec);
  8802. vui_vec = NULL;
  8803. vui_allocated = 0;
  8804. }
  8805. /* The entry point to variable tracking pass. */
  8806. static inline unsigned int
  8807. variable_tracking_main_1 (void)
  8808. {
  8809. bool success;
  8810. if (flag_var_tracking_assignments < 0
  8811. /* Var-tracking right now assumes the IR doesn't contain
  8812. any pseudos at this point. */
  8813. || targetm.no_register_allocation)
  8814. {
  8815. delete_debug_insns ();
  8816. return 0;
  8817. }
  8818. if (n_basic_blocks_for_fn (cfun) > 500 &&
  8819. n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
  8820. {
  8821. vt_debug_insns_local (true);
  8822. return 0;
  8823. }
  8824. mark_dfs_back_edges ();
  8825. if (!vt_initialize ())
  8826. {
  8827. vt_finalize ();
  8828. vt_debug_insns_local (true);
  8829. return 0;
  8830. }
  8831. success = vt_find_locations ();
  8832. if (!success && flag_var_tracking_assignments > 0)
  8833. {
  8834. vt_finalize ();
  8835. delete_debug_insns ();
  8836. /* This is later restored by our caller. */
  8837. flag_var_tracking_assignments = 0;
  8838. success = vt_initialize ();
  8839. gcc_assert (success);
  8840. success = vt_find_locations ();
  8841. }
  8842. if (!success)
  8843. {
  8844. vt_finalize ();
  8845. vt_debug_insns_local (false);
  8846. return 0;
  8847. }
  8848. if (dump_file && (dump_flags & TDF_DETAILS))
  8849. {
  8850. dump_dataflow_sets ();
  8851. dump_reg_info (dump_file);
  8852. dump_flow_info (dump_file, dump_flags);
  8853. }
  8854. timevar_push (TV_VAR_TRACKING_EMIT);
  8855. vt_emit_notes ();
  8856. timevar_pop (TV_VAR_TRACKING_EMIT);
  8857. vt_finalize ();
  8858. vt_debug_insns_local (false);
  8859. return 0;
  8860. }
  8861. unsigned int
  8862. variable_tracking_main (void)
  8863. {
  8864. unsigned int ret;
  8865. int save = flag_var_tracking_assignments;
  8866. ret = variable_tracking_main_1 ();
  8867. flag_var_tracking_assignments = save;
  8868. return ret;
  8869. }
  8870. namespace {
  8871. const pass_data pass_data_variable_tracking =
  8872. {
  8873. RTL_PASS, /* type */
  8874. "vartrack", /* name */
  8875. OPTGROUP_NONE, /* optinfo_flags */
  8876. TV_VAR_TRACKING, /* tv_id */
  8877. 0, /* properties_required */
  8878. 0, /* properties_provided */
  8879. 0, /* properties_destroyed */
  8880. 0, /* todo_flags_start */
  8881. 0, /* todo_flags_finish */
  8882. };
  8883. class pass_variable_tracking : public rtl_opt_pass
  8884. {
  8885. public:
  8886. pass_variable_tracking (gcc::context *ctxt)
  8887. : rtl_opt_pass (pass_data_variable_tracking, ctxt)
  8888. {}
  8889. /* opt_pass methods: */
  8890. virtual bool gate (function *)
  8891. {
  8892. return (flag_var_tracking && !targetm.delay_vartrack);
  8893. }
  8894. virtual unsigned int execute (function *)
  8895. {
  8896. return variable_tracking_main ();
  8897. }
  8898. }; // class pass_variable_tracking
  8899. } // anon namespace
  8900. rtl_opt_pass *
  8901. make_pass_variable_tracking (gcc::context *ctxt)
  8902. {
  8903. return new pass_variable_tracking (ctxt);
  8904. }