1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989 |
- /* Tree inlining.
- Copyright (C) 2001-2015 Free Software Foundation, Inc.
- Contributed by Alexandre Oliva <aoliva@redhat.com>
- This file is part of GCC.
- GCC is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3, or (at your option)
- any later version.
- GCC is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
- You should have received a copy of the GNU General Public License
- along with GCC; see the file COPYING3. If not see
- <http://www.gnu.org/licenses/>. */
- #include "config.h"
- #include "system.h"
- #include "coretypes.h"
- #include "tm.h"
- #include "diagnostic-core.h"
- #include "hash-set.h"
- #include "machmode.h"
- #include "vec.h"
- #include "double-int.h"
- #include "input.h"
- #include "alias.h"
- #include "symtab.h"
- #include "wide-int.h"
- #include "inchash.h"
- #include "tree.h"
- #include "fold-const.h"
- #include "stor-layout.h"
- #include "calls.h"
- #include "tree-inline.h"
- #include "flags.h"
- #include "params.h"
- #include "insn-config.h"
- #include "hashtab.h"
- #include "langhooks.h"
- #include "predict.h"
- #include "hard-reg-set.h"
- #include "function.h"
- #include "dominance.h"
- #include "cfg.h"
- #include "cfganal.h"
- #include "basic-block.h"
- #include "tree-iterator.h"
- #include "intl.h"
- #include "tree-ssa-alias.h"
- #include "internal-fn.h"
- #include "gimple-fold.h"
- #include "tree-eh.h"
- #include "gimple-expr.h"
- #include "is-a.h"
- #include "gimple.h"
- #include "gimplify.h"
- #include "gimple-iterator.h"
- #include "gimplify-me.h"
- #include "gimple-walk.h"
- #include "gimple-ssa.h"
- #include "tree-cfg.h"
- #include "tree-phinodes.h"
- #include "ssa-iterators.h"
- #include "stringpool.h"
- #include "tree-ssanames.h"
- #include "tree-into-ssa.h"
- #include "rtl.h"
- #include "statistics.h"
- #include "real.h"
- #include "fixed-value.h"
- #include "expmed.h"
- #include "dojump.h"
- #include "explow.h"
- #include "emit-rtl.h"
- #include "varasm.h"
- #include "stmt.h"
- #include "expr.h"
- #include "tree-dfa.h"
- #include "tree-ssa.h"
- #include "tree-pretty-print.h"
- #include "except.h"
- #include "debug.h"
- #include "hash-map.h"
- #include "plugin-api.h"
- #include "ipa-ref.h"
- #include "cgraph.h"
- #include "alloc-pool.h"
- #include "symbol-summary.h"
- #include "ipa-prop.h"
- #include "value-prof.h"
- #include "tree-pass.h"
- #include "target.h"
- #include "cfgloop.h"
- #include "builtins.h"
- #include "tree-chkp.h"
- #include "rtl.h" /* FIXME: For asm_str_count. */
- /* I'm not real happy about this, but we need to handle gimple and
- non-gimple trees. */
- /* Inlining, Cloning, Versioning, Parallelization
- Inlining: a function body is duplicated, but the PARM_DECLs are
- remapped into VAR_DECLs, and non-void RETURN_EXPRs become
- MODIFY_EXPRs that store to a dedicated returned-value variable.
- The duplicated eh_region info of the copy will later be appended
- to the info for the caller; the eh_region info in copied throwing
- statements and RESX statements are adjusted accordingly.
- Cloning: (only in C++) We have one body for a con/de/structor, and
- multiple function decls, each with a unique parameter list.
- Duplicate the body, using the given splay tree; some parameters
- will become constants (like 0 or 1).
- Versioning: a function body is duplicated and the result is a new
- function rather than into blocks of an existing function as with
- inlining. Some parameters will become constants.
- Parallelization: a region of a function is duplicated resulting in
- a new function. Variables may be replaced with complex expressions
- to enable shared variable semantics.
- All of these will simultaneously lookup any callgraph edges. If
- we're going to inline the duplicated function body, and the given
- function has some cloned callgraph nodes (one for each place this
- function will be inlined) those callgraph edges will be duplicated.
- If we're cloning the body, those callgraph edges will be
- updated to point into the new body. (Note that the original
- callgraph node and edge list will not be altered.)
- See the CALL_EXPR handling case in copy_tree_body_r (). */
- /* To Do:
- o In order to make inlining-on-trees work, we pessimized
- function-local static constants. In particular, they are now
- always output, even when not addressed. Fix this by treating
- function-local static constants just like global static
- constants; the back-end already knows not to output them if they
- are not needed.
- o Provide heuristics to clamp inlining of recursive template
- calls? */
- /* Weights that estimate_num_insns uses to estimate the size of the
- produced code. */
- eni_weights eni_size_weights;
- /* Weights that estimate_num_insns uses to estimate the time necessary
- to execute the produced code. */
- eni_weights eni_time_weights;
- /* Prototypes. */
- static tree declare_return_variable (copy_body_data *, tree, tree, tree,
- basic_block);
- static void remap_block (tree *, copy_body_data *);
- static void copy_bind_expr (tree *, int *, copy_body_data *);
- static void declare_inline_vars (tree, tree);
- static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
- static void prepend_lexical_block (tree current_block, tree new_block);
- static tree copy_decl_to_var (tree, copy_body_data *);
- static tree copy_result_decl_to_var (tree, copy_body_data *);
- static tree copy_decl_maybe_to_var (tree, copy_body_data *);
- static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
- static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
- static void insert_init_stmt (copy_body_data *, basic_block, gimple);
- /* Insert a tree->tree mapping for ID. Despite the name suggests
- that the trees should be variables, it is used for more than that. */
- void
- insert_decl_map (copy_body_data *id, tree key, tree value)
- {
- id->decl_map->put (key, value);
- /* Always insert an identity map as well. If we see this same new
- node again, we won't want to duplicate it a second time. */
- if (key != value)
- id->decl_map->put (value, value);
- }
- /* Insert a tree->tree mapping for ID. This is only used for
- variables. */
- static void
- insert_debug_decl_map (copy_body_data *id, tree key, tree value)
- {
- if (!gimple_in_ssa_p (id->src_cfun))
- return;
- if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
- return;
- if (!target_for_debug_bind (key))
- return;
- gcc_assert (TREE_CODE (key) == PARM_DECL);
- gcc_assert (TREE_CODE (value) == VAR_DECL);
- if (!id->debug_map)
- id->debug_map = new hash_map<tree, tree>;
- id->debug_map->put (key, value);
- }
- /* If nonzero, we're remapping the contents of inlined debug
- statements. If negative, an error has occurred, such as a
- reference to a variable that isn't available in the inlined
- context. */
- static int processing_debug_stmt = 0;
- /* Construct new SSA name for old NAME. ID is the inline context. */
- static tree
- remap_ssa_name (tree name, copy_body_data *id)
- {
- tree new_tree, var;
- tree *n;
- gcc_assert (TREE_CODE (name) == SSA_NAME);
- n = id->decl_map->get (name);
- if (n)
- return unshare_expr (*n);
- if (processing_debug_stmt)
- {
- if (SSA_NAME_IS_DEFAULT_DEF (name)
- && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
- && id->entry_bb == NULL
- && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
- {
- tree vexpr = make_node (DEBUG_EXPR_DECL);
- gimple def_temp;
- gimple_stmt_iterator gsi;
- tree val = SSA_NAME_VAR (name);
- n = id->decl_map->get (val);
- if (n != NULL)
- val = *n;
- if (TREE_CODE (val) != PARM_DECL)
- {
- processing_debug_stmt = -1;
- return name;
- }
- def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
- DECL_ARTIFICIAL (vexpr) = 1;
- TREE_TYPE (vexpr) = TREE_TYPE (name);
- DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
- gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
- gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
- return vexpr;
- }
- processing_debug_stmt = -1;
- return name;
- }
- /* Remap anonymous SSA names or SSA names of anonymous decls. */
- var = SSA_NAME_VAR (name);
- if (!var
- || (!SSA_NAME_IS_DEFAULT_DEF (name)
- && TREE_CODE (var) == VAR_DECL
- && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
- && DECL_ARTIFICIAL (var)
- && DECL_IGNORED_P (var)
- && !DECL_NAME (var)))
- {
- struct ptr_info_def *pi;
- new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
- if (!var && SSA_NAME_IDENTIFIER (name))
- SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
- insert_decl_map (id, name, new_tree);
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
- = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
- /* At least IPA points-to info can be directly transferred. */
- if (id->src_cfun->gimple_df
- && id->src_cfun->gimple_df->ipa_pta
- && (pi = SSA_NAME_PTR_INFO (name))
- && !pi->pt.anything)
- {
- struct ptr_info_def *new_pi = get_ptr_info (new_tree);
- new_pi->pt = pi->pt;
- }
- return new_tree;
- }
- /* Do not set DEF_STMT yet as statement is not copied yet. We do that
- in copy_bb. */
- new_tree = remap_decl (var, id);
- /* We might've substituted constant or another SSA_NAME for
- the variable.
- Replace the SSA name representing RESULT_DECL by variable during
- inlining: this saves us from need to introduce PHI node in a case
- return value is just partly initialized. */
- if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
- && (!SSA_NAME_VAR (name)
- || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
- || !id->transform_return_to_modify))
- {
- struct ptr_info_def *pi;
- new_tree = make_ssa_name (new_tree);
- insert_decl_map (id, name, new_tree);
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
- = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
- /* At least IPA points-to info can be directly transferred. */
- if (id->src_cfun->gimple_df
- && id->src_cfun->gimple_df->ipa_pta
- && (pi = SSA_NAME_PTR_INFO (name))
- && !pi->pt.anything)
- {
- struct ptr_info_def *new_pi = get_ptr_info (new_tree);
- new_pi->pt = pi->pt;
- }
- if (SSA_NAME_IS_DEFAULT_DEF (name))
- {
- /* By inlining function having uninitialized variable, we might
- extend the lifetime (variable might get reused). This cause
- ICE in the case we end up extending lifetime of SSA name across
- abnormal edge, but also increase register pressure.
- We simply initialize all uninitialized vars by 0 except
- for case we are inlining to very first BB. We can avoid
- this for all BBs that are not inside strongly connected
- regions of the CFG, but this is expensive to test. */
- if (id->entry_bb
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
- && (!SSA_NAME_VAR (name)
- || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
- && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
- 0)->dest
- || EDGE_COUNT (id->entry_bb->preds) != 1))
- {
- gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
- gimple init_stmt;
- tree zero = build_zero_cst (TREE_TYPE (new_tree));
- init_stmt = gimple_build_assign (new_tree, zero);
- gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
- SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
- }
- else
- {
- SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
- set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
- }
- }
- }
- else
- insert_decl_map (id, name, new_tree);
- return new_tree;
- }
- /* Remap DECL during the copying of the BLOCK tree for the function. */
- tree
- remap_decl (tree decl, copy_body_data *id)
- {
- tree *n;
- /* We only remap local variables in the current function. */
- /* See if we have remapped this declaration. */
- n = id->decl_map->get (decl);
- if (!n && processing_debug_stmt)
- {
- processing_debug_stmt = -1;
- return decl;
- }
- /* If we didn't already have an equivalent for this declaration,
- create one now. */
- if (!n)
- {
- /* Make a copy of the variable or label. */
- tree t = id->copy_decl (decl, id);
- /* Remember it, so that if we encounter this local entity again
- we can reuse this copy. Do this early because remap_type may
- need this decl for TYPE_STUB_DECL. */
- insert_decl_map (id, decl, t);
- if (!DECL_P (t))
- return t;
- /* Remap types, if necessary. */
- TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
- if (TREE_CODE (t) == TYPE_DECL)
- DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
- /* Remap sizes as necessary. */
- walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
- walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
- /* If fields, do likewise for offset and qualifier. */
- if (TREE_CODE (t) == FIELD_DECL)
- {
- walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
- if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
- walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
- }
- return t;
- }
- if (id->do_not_unshare)
- return *n;
- else
- return unshare_expr (*n);
- }
- static tree
- remap_type_1 (tree type, copy_body_data *id)
- {
- tree new_tree, t;
- /* We do need a copy. build and register it now. If this is a pointer or
- reference type, remap the designated type and make a new pointer or
- reference type. */
- if (TREE_CODE (type) == POINTER_TYPE)
- {
- new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
- TYPE_MODE (type),
- TYPE_REF_CAN_ALIAS_ALL (type));
- if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
- new_tree = build_type_attribute_qual_variant (new_tree,
- TYPE_ATTRIBUTES (type),
- TYPE_QUALS (type));
- insert_decl_map (id, type, new_tree);
- return new_tree;
- }
- else if (TREE_CODE (type) == REFERENCE_TYPE)
- {
- new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
- TYPE_MODE (type),
- TYPE_REF_CAN_ALIAS_ALL (type));
- if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
- new_tree = build_type_attribute_qual_variant (new_tree,
- TYPE_ATTRIBUTES (type),
- TYPE_QUALS (type));
- insert_decl_map (id, type, new_tree);
- return new_tree;
- }
- else
- new_tree = copy_node (type);
- insert_decl_map (id, type, new_tree);
- /* This is a new type, not a copy of an old type. Need to reassociate
- variants. We can handle everything except the main variant lazily. */
- t = TYPE_MAIN_VARIANT (type);
- if (type != t)
- {
- t = remap_type (t, id);
- TYPE_MAIN_VARIANT (new_tree) = t;
- TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
- TYPE_NEXT_VARIANT (t) = new_tree;
- }
- else
- {
- TYPE_MAIN_VARIANT (new_tree) = new_tree;
- TYPE_NEXT_VARIANT (new_tree) = NULL;
- }
- if (TYPE_STUB_DECL (type))
- TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
- /* Lazily create pointer and reference types. */
- TYPE_POINTER_TO (new_tree) = NULL;
- TYPE_REFERENCE_TO (new_tree) = NULL;
- /* Copy all types that may contain references to local variables; be sure to
- preserve sharing in between type and its main variant when possible. */
- switch (TREE_CODE (new_tree))
- {
- case INTEGER_TYPE:
- case REAL_TYPE:
- case FIXED_POINT_TYPE:
- case ENUMERAL_TYPE:
- case BOOLEAN_TYPE:
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
- {
- gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
- gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
- TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
- TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
- }
- else
- {
- t = TYPE_MIN_VALUE (new_tree);
- if (t && TREE_CODE (t) != INTEGER_CST)
- walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
- t = TYPE_MAX_VALUE (new_tree);
- if (t && TREE_CODE (t) != INTEGER_CST)
- walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
- }
- return new_tree;
- case FUNCTION_TYPE:
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree
- && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
- TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
- else
- TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree
- && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
- TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
- else
- walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
- return new_tree;
- case ARRAY_TYPE:
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree
- && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
- TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
- else
- TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
- {
- gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
- TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
- }
- else
- TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
- break;
- case RECORD_TYPE:
- case UNION_TYPE:
- case QUAL_UNION_TYPE:
- if (TYPE_MAIN_VARIANT (type) != type
- && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
- TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
- else
- {
- tree f, nf = NULL;
- for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
- {
- t = remap_decl (f, id);
- DECL_CONTEXT (t) = new_tree;
- DECL_CHAIN (t) = nf;
- nf = t;
- }
- TYPE_FIELDS (new_tree) = nreverse (nf);
- }
- break;
- case OFFSET_TYPE:
- default:
- /* Shouldn't have been thought variable sized. */
- gcc_unreachable ();
- }
- /* All variants of type share the same size, so use the already remaped data. */
- if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
- {
- gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
- gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
- TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
- TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
- }
- else
- {
- walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
- walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
- }
- return new_tree;
- }
- tree
- remap_type (tree type, copy_body_data *id)
- {
- tree *node;
- tree tmp;
- if (type == NULL)
- return type;
- /* See if we have remapped this type. */
- node = id->decl_map->get (type);
- if (node)
- return *node;
- /* The type only needs remapping if it's variably modified. */
- if (! variably_modified_type_p (type, id->src_fn))
- {
- insert_decl_map (id, type, type);
- return type;
- }
- id->remapping_type_depth++;
- tmp = remap_type_1 (type, id);
- id->remapping_type_depth--;
- return tmp;
- }
- /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
- static bool
- can_be_nonlocal (tree decl, copy_body_data *id)
- {
- /* We can not duplicate function decls. */
- if (TREE_CODE (decl) == FUNCTION_DECL)
- return true;
- /* Local static vars must be non-local or we get multiple declaration
- problems. */
- if (TREE_CODE (decl) == VAR_DECL
- && !auto_var_in_fn_p (decl, id->src_fn))
- return true;
- return false;
- }
- static tree
- remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
- copy_body_data *id)
- {
- tree old_var;
- tree new_decls = NULL_TREE;
- /* Remap its variables. */
- for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
- {
- tree new_var;
- if (can_be_nonlocal (old_var, id))
- {
- /* We need to add this variable to the local decls as otherwise
- nothing else will do so. */
- if (TREE_CODE (old_var) == VAR_DECL
- && ! DECL_EXTERNAL (old_var))
- add_local_decl (cfun, old_var);
- if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
- && !DECL_IGNORED_P (old_var)
- && nonlocalized_list)
- vec_safe_push (*nonlocalized_list, old_var);
- continue;
- }
- /* Remap the variable. */
- new_var = remap_decl (old_var, id);
- /* If we didn't remap this variable, we can't mess with its
- TREE_CHAIN. If we remapped this variable to the return slot, it's
- already declared somewhere else, so don't declare it here. */
- if (new_var == id->retvar)
- ;
- else if (!new_var)
- {
- if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
- && !DECL_IGNORED_P (old_var)
- && nonlocalized_list)
- vec_safe_push (*nonlocalized_list, old_var);
- }
- else
- {
- gcc_assert (DECL_P (new_var));
- DECL_CHAIN (new_var) = new_decls;
- new_decls = new_var;
-
- /* Also copy value-expressions. */
- if (TREE_CODE (new_var) == VAR_DECL
- && DECL_HAS_VALUE_EXPR_P (new_var))
- {
- tree tem = DECL_VALUE_EXPR (new_var);
- bool old_regimplify = id->regimplify;
- id->remapping_type_depth++;
- walk_tree (&tem, copy_tree_body_r, id, NULL);
- id->remapping_type_depth--;
- id->regimplify = old_regimplify;
- SET_DECL_VALUE_EXPR (new_var, tem);
- }
- }
- }
- return nreverse (new_decls);
- }
- /* Copy the BLOCK to contain remapped versions of the variables
- therein. And hook the new block into the block-tree. */
- static void
- remap_block (tree *block, copy_body_data *id)
- {
- tree old_block;
- tree new_block;
- /* Make the new block. */
- old_block = *block;
- new_block = make_node (BLOCK);
- TREE_USED (new_block) = TREE_USED (old_block);
- BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
- BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
- BLOCK_NONLOCALIZED_VARS (new_block)
- = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
- *block = new_block;
- /* Remap its variables. */
- BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
- &BLOCK_NONLOCALIZED_VARS (new_block),
- id);
- if (id->transform_lang_insert_block)
- id->transform_lang_insert_block (new_block);
- /* Remember the remapped block. */
- insert_decl_map (id, old_block, new_block);
- }
- /* Copy the whole block tree and root it in id->block. */
- static tree
- remap_blocks (tree block, copy_body_data *id)
- {
- tree t;
- tree new_tree = block;
- if (!block)
- return NULL;
- remap_block (&new_tree, id);
- gcc_assert (new_tree != block);
- for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
- prepend_lexical_block (new_tree, remap_blocks (t, id));
- /* Blocks are in arbitrary order, but make things slightly prettier and do
- not swap order when producing a copy. */
- BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
- return new_tree;
- }
- /* Remap the block tree rooted at BLOCK to nothing. */
- static void
- remap_blocks_to_null (tree block, copy_body_data *id)
- {
- tree t;
- insert_decl_map (id, block, NULL_TREE);
- for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
- remap_blocks_to_null (t, id);
- }
- static void
- copy_statement_list (tree *tp)
- {
- tree_stmt_iterator oi, ni;
- tree new_tree;
- new_tree = alloc_stmt_list ();
- ni = tsi_start (new_tree);
- oi = tsi_start (*tp);
- TREE_TYPE (new_tree) = TREE_TYPE (*tp);
- *tp = new_tree;
- for (; !tsi_end_p (oi); tsi_next (&oi))
- {
- tree stmt = tsi_stmt (oi);
- if (TREE_CODE (stmt) == STATEMENT_LIST)
- /* This copy is not redundant; tsi_link_after will smash this
- STATEMENT_LIST into the end of the one we're building, and we
- don't want to do that with the original. */
- copy_statement_list (&stmt);
- tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
- }
- }
- static void
- copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
- {
- tree block = BIND_EXPR_BLOCK (*tp);
- /* Copy (and replace) the statement. */
- copy_tree_r (tp, walk_subtrees, NULL);
- if (block)
- {
- remap_block (&block, id);
- BIND_EXPR_BLOCK (*tp) = block;
- }
- if (BIND_EXPR_VARS (*tp))
- /* This will remap a lot of the same decls again, but this should be
- harmless. */
- BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
- }
- /* Create a new gimple_seq by remapping all the statements in BODY
- using the inlining information in ID. */
- static gimple_seq
- remap_gimple_seq (gimple_seq body, copy_body_data *id)
- {
- gimple_stmt_iterator si;
- gimple_seq new_body = NULL;
- for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
- {
- gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
- gimple_seq_add_seq (&new_body, new_stmts);
- }
- return new_body;
- }
- /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
- block using the mapping information in ID. */
- static gimple
- copy_gimple_bind (gbind *stmt, copy_body_data *id)
- {
- gimple new_bind;
- tree new_block, new_vars;
- gimple_seq body, new_body;
- /* Copy the statement. Note that we purposely don't use copy_stmt
- here because we need to remap statements as we copy. */
- body = gimple_bind_body (stmt);
- new_body = remap_gimple_seq (body, id);
- new_block = gimple_bind_block (stmt);
- if (new_block)
- remap_block (&new_block, id);
- /* This will remap a lot of the same decls again, but this should be
- harmless. */
- new_vars = gimple_bind_vars (stmt);
- if (new_vars)
- new_vars = remap_decls (new_vars, NULL, id);
- new_bind = gimple_build_bind (new_vars, new_body, new_block);
- return new_bind;
- }
- /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
- static bool
- is_parm (tree decl)
- {
- if (TREE_CODE (decl) == SSA_NAME)
- {
- decl = SSA_NAME_VAR (decl);
- if (!decl)
- return false;
- }
- return (TREE_CODE (decl) == PARM_DECL);
- }
- /* Remap the dependence CLIQUE from the source to the destination function
- as specified in ID. */
- static unsigned short
- remap_dependence_clique (copy_body_data *id, unsigned short clique)
- {
- if (clique == 0)
- return 0;
- if (!id->dependence_map)
- id->dependence_map
- = new hash_map<unsigned short, unsigned short, dependence_hasher>;
- bool existed;
- unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
- if (!existed)
- newc = ++cfun->last_clique;
- return newc;
- }
- /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
- 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
- WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
- recursing into the children nodes of *TP. */
- static tree
- remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
- {
- struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
- copy_body_data *id = (copy_body_data *) wi_p->info;
- tree fn = id->src_fn;
- if (TREE_CODE (*tp) == SSA_NAME)
- {
- *tp = remap_ssa_name (*tp, id);
- *walk_subtrees = 0;
- return NULL;
- }
- else if (auto_var_in_fn_p (*tp, fn))
- {
- /* Local variables and labels need to be replaced by equivalent
- variables. We don't want to copy static variables; there's
- only one of those, no matter how many times we inline the
- containing function. Similarly for globals from an outer
- function. */
- tree new_decl;
- /* Remap the declaration. */
- new_decl = remap_decl (*tp, id);
- gcc_assert (new_decl);
- /* Replace this variable with the copy. */
- STRIP_TYPE_NOPS (new_decl);
- /* ??? The C++ frontend uses void * pointer zero to initialize
- any other type. This confuses the middle-end type verification.
- As cloned bodies do not go through gimplification again the fixup
- there doesn't trigger. */
- if (TREE_CODE (new_decl) == INTEGER_CST
- && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
- new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
- *tp = new_decl;
- *walk_subtrees = 0;
- }
- else if (TREE_CODE (*tp) == STATEMENT_LIST)
- gcc_unreachable ();
- else if (TREE_CODE (*tp) == SAVE_EXPR)
- gcc_unreachable ();
- else if (TREE_CODE (*tp) == LABEL_DECL
- && (!DECL_CONTEXT (*tp)
- || decl_function_context (*tp) == id->src_fn))
- /* These may need to be remapped for EH handling. */
- *tp = remap_decl (*tp, id);
- else if (TREE_CODE (*tp) == FIELD_DECL)
- {
- /* If the enclosing record type is variably_modified_type_p, the field
- has already been remapped. Otherwise, it need not be. */
- tree *n = id->decl_map->get (*tp);
- if (n)
- *tp = *n;
- *walk_subtrees = 0;
- }
- else if (TYPE_P (*tp))
- /* Types may need remapping as well. */
- *tp = remap_type (*tp, id);
- else if (CONSTANT_CLASS_P (*tp))
- {
- /* If this is a constant, we have to copy the node iff the type
- will be remapped. copy_tree_r will not copy a constant. */
- tree new_type = remap_type (TREE_TYPE (*tp), id);
- if (new_type == TREE_TYPE (*tp))
- *walk_subtrees = 0;
- else if (TREE_CODE (*tp) == INTEGER_CST)
- *tp = wide_int_to_tree (new_type, *tp);
- else
- {
- *tp = copy_node (*tp);
- TREE_TYPE (*tp) = new_type;
- }
- }
- else
- {
- /* Otherwise, just copy the node. Note that copy_tree_r already
- knows not to copy VAR_DECLs, etc., so this is safe. */
- if (TREE_CODE (*tp) == MEM_REF)
- {
- /* We need to re-canonicalize MEM_REFs from inline substitutions
- that can happen when a pointer argument is an ADDR_EXPR.
- Recurse here manually to allow that. */
- tree ptr = TREE_OPERAND (*tp, 0);
- tree type = remap_type (TREE_TYPE (*tp), id);
- tree old = *tp;
- walk_tree (&ptr, remap_gimple_op_r, data, NULL);
- *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
- TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
- TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
- TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
- if (MR_DEPENDENCE_CLIQUE (old) != 0)
- {
- MR_DEPENDENCE_CLIQUE (*tp)
- = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
- MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
- }
- /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
- remapped a parameter as the property might be valid only
- for the parameter itself. */
- if (TREE_THIS_NOTRAP (old)
- && (!is_parm (TREE_OPERAND (old, 0))
- || (!id->transform_parameter && is_parm (ptr))))
- TREE_THIS_NOTRAP (*tp) = 1;
- *walk_subtrees = 0;
- return NULL;
- }
- /* Here is the "usual case". Copy this tree node, and then
- tweak some special cases. */
- copy_tree_r (tp, walk_subtrees, NULL);
- if (TREE_CODE (*tp) != OMP_CLAUSE)
- TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
- if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
- {
- /* The copied TARGET_EXPR has never been expanded, even if the
- original node was expanded already. */
- TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
- TREE_OPERAND (*tp, 3) = NULL_TREE;
- }
- else if (TREE_CODE (*tp) == ADDR_EXPR)
- {
- /* Variable substitution need not be simple. In particular,
- the MEM_REF substitution above. Make sure that
- TREE_CONSTANT and friends are up-to-date. */
- int invariant = is_gimple_min_invariant (*tp);
- walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
- recompute_tree_invariant_for_addr_expr (*tp);
- /* If this used to be invariant, but is not any longer,
- then regimplification is probably needed. */
- if (invariant && !is_gimple_min_invariant (*tp))
- id->regimplify = true;
- *walk_subtrees = 0;
- }
- }
- /* Update the TREE_BLOCK for the cloned expr. */
- if (EXPR_P (*tp))
- {
- tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
- tree old_block = TREE_BLOCK (*tp);
- if (old_block)
- {
- tree *n;
- n = id->decl_map->get (TREE_BLOCK (*tp));
- if (n)
- new_block = *n;
- }
- TREE_SET_BLOCK (*tp, new_block);
- }
- /* Keep iterating. */
- return NULL_TREE;
- }
- /* Called from copy_body_id via walk_tree. DATA is really a
- `copy_body_data *'. */
- tree
- copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
- {
- copy_body_data *id = (copy_body_data *) data;
- tree fn = id->src_fn;
- tree new_block;
- /* Begin by recognizing trees that we'll completely rewrite for the
- inlining context. Our output for these trees is completely
- different from out input (e.g. RETURN_EXPR is deleted, and morphs
- into an edge). Further down, we'll handle trees that get
- duplicated and/or tweaked. */
- /* When requested, RETURN_EXPRs should be transformed to just the
- contained MODIFY_EXPR. The branch semantics of the return will
- be handled elsewhere by manipulating the CFG rather than a statement. */
- if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
- {
- tree assignment = TREE_OPERAND (*tp, 0);
- /* If we're returning something, just turn that into an
- assignment into the equivalent of the original RESULT_DECL.
- If the "assignment" is just the result decl, the result
- decl has already been set (e.g. a recent "foo (&result_decl,
- ...)"); just toss the entire RETURN_EXPR. */
- if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
- {
- /* Replace the RETURN_EXPR with (a copy of) the
- MODIFY_EXPR hanging underneath. */
- *tp = copy_node (assignment);
- }
- else /* Else the RETURN_EXPR returns no value. */
- {
- *tp = NULL;
- return (tree) (void *)1;
- }
- }
- else if (TREE_CODE (*tp) == SSA_NAME)
- {
- *tp = remap_ssa_name (*tp, id);
- *walk_subtrees = 0;
- return NULL;
- }
- /* Local variables and labels need to be replaced by equivalent
- variables. We don't want to copy static variables; there's only
- one of those, no matter how many times we inline the containing
- function. Similarly for globals from an outer function. */
- else if (auto_var_in_fn_p (*tp, fn))
- {
- tree new_decl;
- /* Remap the declaration. */
- new_decl = remap_decl (*tp, id);
- gcc_assert (new_decl);
- /* Replace this variable with the copy. */
- STRIP_TYPE_NOPS (new_decl);
- *tp = new_decl;
- *walk_subtrees = 0;
- }
- else if (TREE_CODE (*tp) == STATEMENT_LIST)
- copy_statement_list (tp);
- else if (TREE_CODE (*tp) == SAVE_EXPR
- || TREE_CODE (*tp) == TARGET_EXPR)
- remap_save_expr (tp, id->decl_map, walk_subtrees);
- else if (TREE_CODE (*tp) == LABEL_DECL
- && (! DECL_CONTEXT (*tp)
- || decl_function_context (*tp) == id->src_fn))
- /* These may need to be remapped for EH handling. */
- *tp = remap_decl (*tp, id);
- else if (TREE_CODE (*tp) == BIND_EXPR)
- copy_bind_expr (tp, walk_subtrees, id);
- /* Types may need remapping as well. */
- else if (TYPE_P (*tp))
- *tp = remap_type (*tp, id);
- /* If this is a constant, we have to copy the node iff the type will be
- remapped. copy_tree_r will not copy a constant. */
- else if (CONSTANT_CLASS_P (*tp))
- {
- tree new_type = remap_type (TREE_TYPE (*tp), id);
- if (new_type == TREE_TYPE (*tp))
- *walk_subtrees = 0;
- else if (TREE_CODE (*tp) == INTEGER_CST)
- *tp = wide_int_to_tree (new_type, *tp);
- else
- {
- *tp = copy_node (*tp);
- TREE_TYPE (*tp) = new_type;
- }
- }
- /* Otherwise, just copy the node. Note that copy_tree_r already
- knows not to copy VAR_DECLs, etc., so this is safe. */
- else
- {
- /* Here we handle trees that are not completely rewritten.
- First we detect some inlining-induced bogosities for
- discarding. */
- if (TREE_CODE (*tp) == MODIFY_EXPR
- && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
- && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
- {
- /* Some assignments VAR = VAR; don't generate any rtl code
- and thus don't count as variable modification. Avoid
- keeping bogosities like 0 = 0. */
- tree decl = TREE_OPERAND (*tp, 0), value;
- tree *n;
- n = id->decl_map->get (decl);
- if (n)
- {
- value = *n;
- STRIP_TYPE_NOPS (value);
- if (TREE_CONSTANT (value) || TREE_READONLY (value))
- {
- *tp = build_empty_stmt (EXPR_LOCATION (*tp));
- return copy_tree_body_r (tp, walk_subtrees, data);
- }
- }
- }
- else if (TREE_CODE (*tp) == INDIRECT_REF)
- {
- /* Get rid of *& from inline substitutions that can happen when a
- pointer argument is an ADDR_EXPR. */
- tree decl = TREE_OPERAND (*tp, 0);
- tree *n = id->decl_map->get (decl);
- if (n)
- {
- /* If we happen to get an ADDR_EXPR in n->value, strip
- it manually here as we'll eventually get ADDR_EXPRs
- which lie about their types pointed to. In this case
- build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
- but we absolutely rely on that. As fold_indirect_ref
- does other useful transformations, try that first, though. */
- tree type = TREE_TYPE (*tp);
- tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
- tree old = *tp;
- *tp = gimple_fold_indirect_ref (ptr);
- if (! *tp)
- {
- if (TREE_CODE (ptr) == ADDR_EXPR)
- {
- *tp
- = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
- /* ??? We should either assert here or build
- a VIEW_CONVERT_EXPR instead of blindly leaking
- incompatible types to our IL. */
- if (! *tp)
- *tp = TREE_OPERAND (ptr, 0);
- }
- else
- {
- *tp = build1 (INDIRECT_REF, type, ptr);
- TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
- TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
- TREE_READONLY (*tp) = TREE_READONLY (old);
- /* We cannot propagate the TREE_THIS_NOTRAP flag if we
- have remapped a parameter as the property might be
- valid only for the parameter itself. */
- if (TREE_THIS_NOTRAP (old)
- && (!is_parm (TREE_OPERAND (old, 0))
- || (!id->transform_parameter && is_parm (ptr))))
- TREE_THIS_NOTRAP (*tp) = 1;
- }
- }
- *walk_subtrees = 0;
- return NULL;
- }
- }
- else if (TREE_CODE (*tp) == MEM_REF)
- {
- /* We need to re-canonicalize MEM_REFs from inline substitutions
- that can happen when a pointer argument is an ADDR_EXPR.
- Recurse here manually to allow that. */
- tree ptr = TREE_OPERAND (*tp, 0);
- tree type = remap_type (TREE_TYPE (*tp), id);
- tree old = *tp;
- walk_tree (&ptr, copy_tree_body_r, data, NULL);
- *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
- TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
- TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
- TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
- if (MR_DEPENDENCE_CLIQUE (old) != 0)
- {
- MR_DEPENDENCE_CLIQUE (*tp)
- = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
- MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
- }
- /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
- remapped a parameter as the property might be valid only
- for the parameter itself. */
- if (TREE_THIS_NOTRAP (old)
- && (!is_parm (TREE_OPERAND (old, 0))
- || (!id->transform_parameter && is_parm (ptr))))
- TREE_THIS_NOTRAP (*tp) = 1;
- *walk_subtrees = 0;
- return NULL;
- }
- /* Here is the "usual case". Copy this tree node, and then
- tweak some special cases. */
- copy_tree_r (tp, walk_subtrees, NULL);
- /* If EXPR has block defined, map it to newly constructed block.
- When inlining we want EXPRs without block appear in the block
- of function call if we are not remapping a type. */
- if (EXPR_P (*tp))
- {
- new_block = id->remapping_type_depth == 0 ? id->block : NULL;
- if (TREE_BLOCK (*tp))
- {
- tree *n;
- n = id->decl_map->get (TREE_BLOCK (*tp));
- if (n)
- new_block = *n;
- }
- TREE_SET_BLOCK (*tp, new_block);
- }
- if (TREE_CODE (*tp) != OMP_CLAUSE)
- TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
- /* The copied TARGET_EXPR has never been expanded, even if the
- original node was expanded already. */
- if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
- {
- TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
- TREE_OPERAND (*tp, 3) = NULL_TREE;
- }
- /* Variable substitution need not be simple. In particular, the
- INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
- and friends are up-to-date. */
- else if (TREE_CODE (*tp) == ADDR_EXPR)
- {
- int invariant = is_gimple_min_invariant (*tp);
- walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
- /* Handle the case where we substituted an INDIRECT_REF
- into the operand of the ADDR_EXPR. */
- if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
- *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
- else
- recompute_tree_invariant_for_addr_expr (*tp);
- /* If this used to be invariant, but is not any longer,
- then regimplification is probably needed. */
- if (invariant && !is_gimple_min_invariant (*tp))
- id->regimplify = true;
- *walk_subtrees = 0;
- }
- }
- /* Keep iterating. */
- return NULL_TREE;
- }
- /* Helper for remap_gimple_stmt. Given an EH region number for the
- source function, map that to the duplicate EH region number in
- the destination function. */
- static int
- remap_eh_region_nr (int old_nr, copy_body_data *id)
- {
- eh_region old_r, new_r;
- old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
- new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
- return new_r->index;
- }
- /* Similar, but operate on INTEGER_CSTs. */
- static tree
- remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
- {
- int old_nr, new_nr;
- old_nr = tree_to_shwi (old_t_nr);
- new_nr = remap_eh_region_nr (old_nr, id);
- return build_int_cst (integer_type_node, new_nr);
- }
- /* Helper for copy_bb. Remap statement STMT using the inlining
- information in ID. Return the new statement copy. */
- static gimple_seq
- remap_gimple_stmt (gimple stmt, copy_body_data *id)
- {
- gimple copy = NULL;
- struct walk_stmt_info wi;
- bool skip_first = false;
- gimple_seq stmts = NULL;
- if (is_gimple_debug (stmt)
- && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
- return stmts;
- /* Begin by recognizing trees that we'll completely rewrite for the
- inlining context. Our output for these trees is completely
- different from out input (e.g. RETURN_EXPR is deleted, and morphs
- into an edge). Further down, we'll handle trees that get
- duplicated and/or tweaked. */
- /* When requested, GIMPLE_RETURNs should be transformed to just the
- contained GIMPLE_ASSIGN. The branch semantics of the return will
- be handled elsewhere by manipulating the CFG rather than the
- statement. */
- if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
- {
- tree retval = gimple_return_retval (as_a <greturn *> (stmt));
- tree retbnd = gimple_return_retbnd (stmt);
- tree bndslot = id->retbnd;
- if (retbnd && bndslot)
- {
- gimple bndcopy = gimple_build_assign (bndslot, retbnd);
- memset (&wi, 0, sizeof (wi));
- wi.info = id;
- walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
- gimple_seq_add_stmt (&stmts, bndcopy);
- }
- /* If we're returning something, just turn that into an
- assignment into the equivalent of the original RESULT_DECL.
- If RETVAL is just the result decl, the result decl has
- already been set (e.g. a recent "foo (&result_decl, ...)");
- just toss the entire GIMPLE_RETURN. */
- if (retval
- && (TREE_CODE (retval) != RESULT_DECL
- && (TREE_CODE (retval) != SSA_NAME
- || ! SSA_NAME_VAR (retval)
- || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
- {
- copy = gimple_build_assign (id->do_not_unshare
- ? id->retvar : unshare_expr (id->retvar),
- retval);
- /* id->retvar is already substituted. Skip it on later remapping. */
- skip_first = true;
- /* We need to copy bounds if return structure with pointers into
- instrumented function. */
- if (chkp_function_instrumented_p (id->dst_fn)
- && !bndslot
- && !BOUNDED_P (id->retvar)
- && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
- id->assign_stmts.safe_push (copy);
- }
- else
- return stmts;
- }
- else if (gimple_has_substatements (stmt))
- {
- gimple_seq s1, s2;
- /* When cloning bodies from the C++ front end, we will be handed bodies
- in High GIMPLE form. Handle here all the High GIMPLE statements that
- have embedded statements. */
- switch (gimple_code (stmt))
- {
- case GIMPLE_BIND:
- copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
- break;
- case GIMPLE_CATCH:
- {
- gcatch *catch_stmt = as_a <gcatch *> (stmt);
- s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
- copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
- }
- break;
- case GIMPLE_EH_FILTER:
- s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
- copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
- break;
- case GIMPLE_TRY:
- s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
- s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
- copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
- break;
- case GIMPLE_WITH_CLEANUP_EXPR:
- s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
- copy = gimple_build_wce (s1);
- break;
- case GIMPLE_OMP_PARALLEL:
- {
- gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
- s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
- copy = gimple_build_omp_parallel
- (s1,
- gimple_omp_parallel_clauses (omp_par_stmt),
- gimple_omp_parallel_child_fn (omp_par_stmt),
- gimple_omp_parallel_data_arg (omp_par_stmt));
- }
- break;
- case GIMPLE_OMP_TASK:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_task
- (s1,
- gimple_omp_task_clauses (stmt),
- gimple_omp_task_child_fn (stmt),
- gimple_omp_task_data_arg (stmt),
- gimple_omp_task_copy_fn (stmt),
- gimple_omp_task_arg_size (stmt),
- gimple_omp_task_arg_align (stmt));
- break;
- case GIMPLE_OMP_FOR:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
- copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
- gimple_omp_for_clauses (stmt),
- gimple_omp_for_collapse (stmt), s2);
- {
- size_t i;
- for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
- {
- gimple_omp_for_set_index (copy, i,
- gimple_omp_for_index (stmt, i));
- gimple_omp_for_set_initial (copy, i,
- gimple_omp_for_initial (stmt, i));
- gimple_omp_for_set_final (copy, i,
- gimple_omp_for_final (stmt, i));
- gimple_omp_for_set_incr (copy, i,
- gimple_omp_for_incr (stmt, i));
- gimple_omp_for_set_cond (copy, i,
- gimple_omp_for_cond (stmt, i));
- }
- }
- break;
- case GIMPLE_OMP_MASTER:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_master (s1);
- break;
- case GIMPLE_OMP_TASKGROUP:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_taskgroup (s1);
- break;
- case GIMPLE_OMP_ORDERED:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_ordered (s1);
- break;
- case GIMPLE_OMP_SECTION:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_section (s1);
- break;
- case GIMPLE_OMP_SECTIONS:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_sections
- (s1, gimple_omp_sections_clauses (stmt));
- break;
- case GIMPLE_OMP_SINGLE:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_single
- (s1, gimple_omp_single_clauses (stmt));
- break;
- case GIMPLE_OMP_TARGET:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_target
- (s1, gimple_omp_target_kind (stmt),
- gimple_omp_target_clauses (stmt));
- break;
- case GIMPLE_OMP_TEAMS:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_teams
- (s1, gimple_omp_teams_clauses (stmt));
- break;
- case GIMPLE_OMP_CRITICAL:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_critical (s1,
- gimple_omp_critical_name (
- as_a <gomp_critical *> (stmt)));
- break;
- case GIMPLE_TRANSACTION:
- {
- gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
- gtransaction *new_trans_stmt;
- s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
- id);
- copy = new_trans_stmt
- = gimple_build_transaction (
- s1,
- gimple_transaction_label (old_trans_stmt));
- gimple_transaction_set_subcode (
- new_trans_stmt,
- gimple_transaction_subcode (old_trans_stmt));
- }
- break;
- default:
- gcc_unreachable ();
- }
- }
- else
- {
- if (gimple_assign_copy_p (stmt)
- && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
- && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
- {
- /* Here we handle statements that are not completely rewritten.
- First we detect some inlining-induced bogosities for
- discarding. */
- /* Some assignments VAR = VAR; don't generate any rtl code
- and thus don't count as variable modification. Avoid
- keeping bogosities like 0 = 0. */
- tree decl = gimple_assign_lhs (stmt), value;
- tree *n;
- n = id->decl_map->get (decl);
- if (n)
- {
- value = *n;
- STRIP_TYPE_NOPS (value);
- if (TREE_CONSTANT (value) || TREE_READONLY (value))
- return NULL;
- }
- }
- /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
- in a block that we aren't copying during tree_function_versioning,
- just drop the clobber stmt. */
- if (id->blocks_to_copy && gimple_clobber_p (stmt))
- {
- tree lhs = gimple_assign_lhs (stmt);
- if (TREE_CODE (lhs) == MEM_REF
- && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
- {
- gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
- if (gimple_bb (def_stmt)
- && !bitmap_bit_p (id->blocks_to_copy,
- gimple_bb (def_stmt)->index))
- return NULL;
- }
- }
- if (gimple_debug_bind_p (stmt))
- {
- gdebug *copy
- = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
- gimple_debug_bind_get_value (stmt),
- stmt);
- id->debug_stmts.safe_push (copy);
- gimple_seq_add_stmt (&stmts, copy);
- return stmts;
- }
- if (gimple_debug_source_bind_p (stmt))
- {
- gdebug *copy = gimple_build_debug_source_bind
- (gimple_debug_source_bind_get_var (stmt),
- gimple_debug_source_bind_get_value (stmt),
- stmt);
- id->debug_stmts.safe_push (copy);
- gimple_seq_add_stmt (&stmts, copy);
- return stmts;
- }
- /* Create a new deep copy of the statement. */
- copy = gimple_copy (stmt);
- /* Clear flags that need revisiting. */
- if (gcall *call_stmt = dyn_cast <gcall *> (copy))
- {
- if (gimple_call_tail_p (call_stmt))
- gimple_call_set_tail (call_stmt, false);
- if (gimple_call_from_thunk_p (call_stmt))
- gimple_call_set_from_thunk (call_stmt, false);
- }
- /* Remap the region numbers for __builtin_eh_{pointer,filter},
- RESX and EH_DISPATCH. */
- if (id->eh_map)
- switch (gimple_code (copy))
- {
- case GIMPLE_CALL:
- {
- tree r, fndecl = gimple_call_fndecl (copy);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_EH_COPY_VALUES:
- r = gimple_call_arg (copy, 1);
- r = remap_eh_region_tree_nr (r, id);
- gimple_call_set_arg (copy, 1, r);
- /* FALLTHRU */
- case BUILT_IN_EH_POINTER:
- case BUILT_IN_EH_FILTER:
- r = gimple_call_arg (copy, 0);
- r = remap_eh_region_tree_nr (r, id);
- gimple_call_set_arg (copy, 0, r);
- break;
- default:
- break;
- }
- /* Reset alias info if we didn't apply measures to
- keep it valid over inlining by setting DECL_PT_UID. */
- if (!id->src_cfun->gimple_df
- || !id->src_cfun->gimple_df->ipa_pta)
- gimple_call_reset_alias_info (as_a <gcall *> (copy));
- }
- break;
- case GIMPLE_RESX:
- {
- gresx *resx_stmt = as_a <gresx *> (copy);
- int r = gimple_resx_region (resx_stmt);
- r = remap_eh_region_nr (r, id);
- gimple_resx_set_region (resx_stmt, r);
- }
- break;
- case GIMPLE_EH_DISPATCH:
- {
- geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
- int r = gimple_eh_dispatch_region (eh_dispatch);
- r = remap_eh_region_nr (r, id);
- gimple_eh_dispatch_set_region (eh_dispatch, r);
- }
- break;
- default:
- break;
- }
- }
- /* If STMT has a block defined, map it to the newly constructed
- block. */
- if (gimple_block (copy))
- {
- tree *n;
- n = id->decl_map->get (gimple_block (copy));
- gcc_assert (n);
- gimple_set_block (copy, *n);
- }
- if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
- {
- gimple_seq_add_stmt (&stmts, copy);
- return stmts;
- }
- /* Remap all the operands in COPY. */
- memset (&wi, 0, sizeof (wi));
- wi.info = id;
- if (skip_first)
- walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
- else
- walk_gimple_op (copy, remap_gimple_op_r, &wi);
- /* Clear the copied virtual operands. We are not remapping them here
- but are going to recreate them from scratch. */
- if (gimple_has_mem_ops (copy))
- {
- gimple_set_vdef (copy, NULL_TREE);
- gimple_set_vuse (copy, NULL_TREE);
- }
- gimple_seq_add_stmt (&stmts, copy);
- return stmts;
- }
- /* Copy basic block, scale profile accordingly. Edges will be taken care of
- later */
- static basic_block
- copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
- gcov_type count_scale)
- {
- gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
- basic_block copy_basic_block;
- tree decl;
- gcov_type freq;
- basic_block prev;
- /* Search for previous copied basic block. */
- prev = bb->prev_bb;
- while (!prev->aux)
- prev = prev->prev_bb;
- /* create_basic_block() will append every new block to
- basic_block_info automatically. */
- copy_basic_block = create_basic_block (NULL, (void *) 0,
- (basic_block) prev->aux);
- copy_basic_block->count = apply_scale (bb->count, count_scale);
- /* We are going to rebuild frequencies from scratch. These values
- have just small importance to drive canonicalize_loop_headers. */
- freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
- /* We recompute frequencies after inlining, so this is quite safe. */
- if (freq > BB_FREQ_MAX)
- freq = BB_FREQ_MAX;
- copy_basic_block->frequency = freq;
- copy_gsi = gsi_start_bb (copy_basic_block);
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- {
- gimple_seq stmts;
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
- gimple_stmt_iterator stmts_gsi;
- bool stmt_added = false;
- id->regimplify = false;
- stmts = remap_gimple_stmt (stmt, id);
- if (gimple_seq_empty_p (stmts))
- continue;
- seq_gsi = copy_gsi;
- for (stmts_gsi = gsi_start (stmts);
- !gsi_end_p (stmts_gsi); )
- {
- stmt = gsi_stmt (stmts_gsi);
- /* Advance iterator now before stmt is moved to seq_gsi. */
- gsi_next (&stmts_gsi);
- if (gimple_nop_p (stmt))
- continue;
- gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
- orig_stmt);
- /* With return slot optimization we can end up with
- non-gimple (foo *)&this->m, fix that here. */
- if (is_gimple_assign (stmt)
- && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
- && !is_gimple_val (gimple_assign_rhs1 (stmt)))
- {
- tree new_rhs;
- new_rhs = force_gimple_operand_gsi (&seq_gsi,
- gimple_assign_rhs1 (stmt),
- true, NULL, false,
- GSI_CONTINUE_LINKING);
- gimple_assign_set_rhs1 (stmt, new_rhs);
- id->regimplify = false;
- }
- gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
- if (id->regimplify)
- gimple_regimplify_operands (stmt, &seq_gsi);
- stmt_added = true;
- }
- if (!stmt_added)
- continue;
- /* If copy_basic_block has been empty at the start of this iteration,
- call gsi_start_bb again to get at the newly added statements. */
- if (gsi_end_p (copy_gsi))
- copy_gsi = gsi_start_bb (copy_basic_block);
- else
- gsi_next (©_gsi);
- /* Process the new statement. The call to gimple_regimplify_operands
- possibly turned the statement into multiple statements, we
- need to process all of them. */
- do
- {
- tree fn;
- gcall *call_stmt;
- stmt = gsi_stmt (copy_gsi);
- call_stmt = dyn_cast <gcall *> (stmt);
- if (call_stmt
- && gimple_call_va_arg_pack_p (call_stmt)
- && id->call_stmt)
- {
- /* __builtin_va_arg_pack () should be replaced by
- all arguments corresponding to ... in the caller. */
- tree p;
- gcall *new_call;
- vec<tree> argarray;
- size_t nargs = gimple_call_num_args (id->call_stmt);
- size_t n, i, nargs_to_copy;
- bool remove_bounds = false;
- for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
- nargs--;
- /* Bounds should be removed from arg pack in case
- we handle not instrumented call in instrumented
- function. */
- nargs_to_copy = nargs;
- if (gimple_call_with_bounds_p (id->call_stmt)
- && !gimple_call_with_bounds_p (stmt))
- {
- for (i = gimple_call_num_args (id->call_stmt) - nargs;
- i < gimple_call_num_args (id->call_stmt);
- i++)
- if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
- nargs_to_copy--;
- remove_bounds = true;
- }
- /* Create the new array of arguments. */
- n = nargs_to_copy + gimple_call_num_args (call_stmt);
- argarray.create (n);
- argarray.safe_grow_cleared (n);
- /* Copy all the arguments before '...' */
- memcpy (argarray.address (),
- gimple_call_arg_ptr (call_stmt, 0),
- gimple_call_num_args (call_stmt) * sizeof (tree));
- if (remove_bounds)
- {
- /* Append the rest of arguments removing bounds. */
- unsigned cur = gimple_call_num_args (call_stmt);
- i = gimple_call_num_args (id->call_stmt) - nargs;
- for (i = gimple_call_num_args (id->call_stmt) - nargs;
- i < gimple_call_num_args (id->call_stmt);
- i++)
- if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
- argarray[cur++] = gimple_call_arg (id->call_stmt, i);
- gcc_assert (cur == n);
- }
- else
- {
- /* Append the arguments passed in '...' */
- memcpy (argarray.address () + gimple_call_num_args (call_stmt),
- gimple_call_arg_ptr (id->call_stmt, 0)
- + (gimple_call_num_args (id->call_stmt) - nargs),
- nargs * sizeof (tree));
- }
- new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
- argarray);
- argarray.release ();
- /* Copy all GIMPLE_CALL flags, location and block, except
- GF_CALL_VA_ARG_PACK. */
- gimple_call_copy_flags (new_call, call_stmt);
- gimple_call_set_va_arg_pack (new_call, false);
- gimple_set_location (new_call, gimple_location (stmt));
- gimple_set_block (new_call, gimple_block (stmt));
- gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
- gsi_replace (©_gsi, new_call, false);
- stmt = new_call;
- }
- else if (call_stmt
- && id->call_stmt
- && (decl = gimple_call_fndecl (stmt))
- && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
- {
- /* __builtin_va_arg_pack_len () should be replaced by
- the number of anonymous arguments. */
- size_t nargs = gimple_call_num_args (id->call_stmt), i;
- tree count, p;
- gimple new_stmt;
- for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
- nargs--;
- /* For instrumented calls we should ignore bounds. */
- for (i = gimple_call_num_args (id->call_stmt) - nargs;
- i < gimple_call_num_args (id->call_stmt);
- i++)
- if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
- nargs--;
- count = build_int_cst (integer_type_node, nargs);
- new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
- gsi_replace (©_gsi, new_stmt, false);
- stmt = new_stmt;
- }
- else if (call_stmt
- && id->call_stmt
- && gimple_call_internal_p (stmt)
- && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
- {
- /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
- gsi_remove (©_gsi, false);
- continue;
- }
- /* Statements produced by inlining can be unfolded, especially
- when we constant propagated some operands. We can't fold
- them right now for two reasons:
- 1) folding require SSA_NAME_DEF_STMTs to be correct
- 2) we can't change function calls to builtins.
- So we just mark statement for later folding. We mark
- all new statements, instead just statements that has changed
- by some nontrivial substitution so even statements made
- foldable indirectly are updated. If this turns out to be
- expensive, copy_body can be told to watch for nontrivial
- changes. */
- if (id->statements_to_fold)
- id->statements_to_fold->add (stmt);
- /* We're duplicating a CALL_EXPR. Find any corresponding
- callgraph edges and update or duplicate them. */
- if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
- {
- struct cgraph_edge *edge;
- switch (id->transform_call_graph_edges)
- {
- case CB_CGE_DUPLICATE:
- edge = id->src_node->get_edge (orig_stmt);
- if (edge)
- {
- int edge_freq = edge->frequency;
- int new_freq;
- struct cgraph_edge *old_edge = edge;
- edge = edge->clone (id->dst_node, call_stmt,
- gimple_uid (stmt),
- REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
- true);
- /* We could also just rescale the frequency, but
- doing so would introduce roundoff errors and make
- verifier unhappy. */
- new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
- copy_basic_block);
- /* Speculative calls consist of two edges - direct and indirect.
- Duplicate the whole thing and distribute frequencies accordingly. */
- if (edge->speculative)
- {
- struct cgraph_edge *direct, *indirect;
- struct ipa_ref *ref;
- gcc_assert (!edge->indirect_unknown_callee);
- old_edge->speculative_call_info (direct, indirect, ref);
- indirect = indirect->clone (id->dst_node, call_stmt,
- gimple_uid (stmt),
- REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
- true);
- if (old_edge->frequency + indirect->frequency)
- {
- edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
- (old_edge->frequency + indirect->frequency)),
- CGRAPH_FREQ_MAX);
- indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
- (old_edge->frequency + indirect->frequency)),
- CGRAPH_FREQ_MAX);
- }
- id->dst_node->clone_reference (ref, stmt);
- }
- else
- {
- edge->frequency = new_freq;
- if (dump_file
- && profile_status_for_fn (cfun) != PROFILE_ABSENT
- && (edge_freq > edge->frequency + 10
- || edge_freq < edge->frequency - 10))
- {
- fprintf (dump_file, "Edge frequency estimated by "
- "cgraph %i diverge from inliner's estimate %i\n",
- edge_freq,
- edge->frequency);
- fprintf (dump_file,
- "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
- bb->index,
- bb->frequency,
- copy_basic_block->frequency);
- }
- }
- }
- break;
- case CB_CGE_MOVE_CLONES:
- id->dst_node->set_call_stmt_including_clones (orig_stmt,
- call_stmt);
- edge = id->dst_node->get_edge (stmt);
- break;
- case CB_CGE_MOVE:
- edge = id->dst_node->get_edge (orig_stmt);
- if (edge)
- edge->set_call_stmt (call_stmt);
- break;
- default:
- gcc_unreachable ();
- }
- /* Constant propagation on argument done during inlining
- may create new direct call. Produce an edge for it. */
- if ((!edge
- || (edge->indirect_inlining_edge
- && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
- && id->dst_node->definition
- && (fn = gimple_call_fndecl (stmt)) != NULL)
- {
- struct cgraph_node *dest = cgraph_node::get (fn);
- /* We have missing edge in the callgraph. This can happen
- when previous inlining turned an indirect call into a
- direct call by constant propagating arguments or we are
- producing dead clone (for further cloning). In all
- other cases we hit a bug (incorrect node sharing is the
- most common reason for missing edges). */
- gcc_assert (!dest->definition
- || dest->address_taken
- || !id->src_node->definition
- || !id->dst_node->definition);
- if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
- id->dst_node->create_edge_including_clones
- (dest, orig_stmt, call_stmt, bb->count,
- compute_call_stmt_bb_frequency (id->dst_node->decl,
- copy_basic_block),
- CIF_ORIGINALLY_INDIRECT_CALL);
- else
- id->dst_node->create_edge (dest, call_stmt,
- bb->count,
- compute_call_stmt_bb_frequency
- (id->dst_node->decl,
- copy_basic_block))->inline_failed
- = CIF_ORIGINALLY_INDIRECT_CALL;
- if (dump_file)
- {
- fprintf (dump_file, "Created new direct edge to %s\n",
- dest->name ());
- }
- }
- notice_special_calls (as_a <gcall *> (stmt));
- }
- maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
- id->eh_map, id->eh_lp_nr);
- if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
- {
- ssa_op_iter i;
- tree def;
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
- if (TREE_CODE (def) == SSA_NAME)
- SSA_NAME_DEF_STMT (def) = stmt;
- }
- gsi_next (©_gsi);
- }
- while (!gsi_end_p (copy_gsi));
- copy_gsi = gsi_last_bb (copy_basic_block);
- }
- return copy_basic_block;
- }
- /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
- form is quite easy, since dominator relationship for old basic blocks does
- not change.
- There is however exception where inlining might change dominator relation
- across EH edges from basic block within inlined functions destinating
- to landing pads in function we inline into.
- The function fills in PHI_RESULTs of such PHI nodes if they refer
- to gimple regs. Otherwise, the function mark PHI_RESULT of such
- PHI nodes for renaming. For non-gimple regs, renaming is safe: the
- EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
- set, and this means that there will be no overlapping live ranges
- for the underlying symbol.
- This might change in future if we allow redirecting of EH edges and
- we might want to change way build CFG pre-inlining to include
- all the possible edges then. */
- static void
- update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
- bool can_throw, bool nonlocal_goto)
- {
- edge e;
- edge_iterator ei;
- FOR_EACH_EDGE (e, ei, bb->succs)
- if (!e->dest->aux
- || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
- {
- gphi *phi;
- gphi_iterator si;
- if (!nonlocal_goto)
- gcc_assert (e->flags & EDGE_EH);
- if (!can_throw)
- gcc_assert (!(e->flags & EDGE_EH));
- for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
- {
- edge re;
- phi = si.phi ();
- /* For abnormal goto/call edges the receiver can be the
- ENTRY_BLOCK. Do not assert this cannot happen. */
- gcc_assert ((e->flags & EDGE_EH)
- || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
- re = find_edge (ret_bb, e->dest);
- gcc_checking_assert (re);
- gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
- == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
- SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
- USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
- }
- }
- }
- /* Copy edges from BB into its copy constructed earlier, scale profile
- accordingly. Edges will be taken care of later. Assume aux
- pointers to point to the copies of each BB. Return true if any
- debug stmts are left after a statement that must end the basic block. */
- static bool
- copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
- basic_block abnormal_goto_dest)
- {
- basic_block new_bb = (basic_block) bb->aux;
- edge_iterator ei;
- edge old_edge;
- gimple_stmt_iterator si;
- int flags;
- bool need_debug_cleanup = false;
- /* Use the indices from the original blocks to create edges for the
- new ones. */
- FOR_EACH_EDGE (old_edge, ei, bb->succs)
- if (!(old_edge->flags & EDGE_EH))
- {
- edge new_edge;
- flags = old_edge->flags;
- /* Return edges do get a FALLTHRU flag when the get inlined. */
- if (old_edge->dest->index == EXIT_BLOCK
- && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
- && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
- flags |= EDGE_FALLTHRU;
- new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
- new_edge->count = apply_scale (old_edge->count, count_scale);
- new_edge->probability = old_edge->probability;
- }
- if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
- return false;
- for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
- {
- gimple copy_stmt;
- bool can_throw, nonlocal_goto;
- copy_stmt = gsi_stmt (si);
- if (!is_gimple_debug (copy_stmt))
- update_stmt (copy_stmt);
- /* Do this before the possible split_block. */
- gsi_next (&si);
- /* If this tree could throw an exception, there are two
- cases where we need to add abnormal edge(s): the
- tree wasn't in a region and there is a "current
- region" in the caller; or the original tree had
- EH edges. In both cases split the block after the tree,
- and add abnormal edge(s) as needed; we need both
- those from the callee and the caller.
- We check whether the copy can throw, because the const
- propagation can change an INDIRECT_REF which throws
- into a COMPONENT_REF which doesn't. If the copy
- can throw, the original could also throw. */
- can_throw = stmt_can_throw_internal (copy_stmt);
- nonlocal_goto
- = (stmt_can_make_abnormal_goto (copy_stmt)
- && !computed_goto_p (copy_stmt));
- if (can_throw || nonlocal_goto)
- {
- if (!gsi_end_p (si))
- {
- while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
- gsi_next (&si);
- if (gsi_end_p (si))
- need_debug_cleanup = true;
- }
- if (!gsi_end_p (si))
- /* Note that bb's predecessor edges aren't necessarily
- right at this point; split_block doesn't care. */
- {
- edge e = split_block (new_bb, copy_stmt);
- new_bb = e->dest;
- new_bb->aux = e->src->aux;
- si = gsi_start_bb (new_bb);
- }
- }
- if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
- make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
- else if (can_throw)
- make_eh_edges (copy_stmt);
- /* If the call we inline cannot make abnormal goto do not add
- additional abnormal edges but only retain those already present
- in the original function body. */
- if (abnormal_goto_dest == NULL)
- nonlocal_goto = false;
- if (nonlocal_goto)
- {
- basic_block copy_stmt_bb = gimple_bb (copy_stmt);
- if (get_abnormal_succ_dispatcher (copy_stmt_bb))
- nonlocal_goto = false;
- /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
- in OpenMP regions which aren't allowed to be left abnormally.
- So, no need to add abnormal edge in that case. */
- else if (is_gimple_call (copy_stmt)
- && gimple_call_internal_p (copy_stmt)
- && (gimple_call_internal_fn (copy_stmt)
- == IFN_ABNORMAL_DISPATCHER)
- && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
- nonlocal_goto = false;
- else
- make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
- }
- if ((can_throw || nonlocal_goto)
- && gimple_in_ssa_p (cfun))
- update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
- can_throw, nonlocal_goto);
- }
- return need_debug_cleanup;
- }
- /* Copy the PHIs. All blocks and edges are copied, some blocks
- was possibly split and new outgoing EH edges inserted.
- BB points to the block of original function and AUX pointers links
- the original and newly copied blocks. */
- static void
- copy_phis_for_bb (basic_block bb, copy_body_data *id)
- {
- basic_block const new_bb = (basic_block) bb->aux;
- edge_iterator ei;
- gphi *phi;
- gphi_iterator si;
- edge new_edge;
- bool inserted = false;
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
- {
- tree res, new_res;
- gphi *new_phi;
- phi = si.phi ();
- res = PHI_RESULT (phi);
- new_res = res;
- if (!virtual_operand_p (res))
- {
- walk_tree (&new_res, copy_tree_body_r, id, NULL);
- new_phi = create_phi_node (new_res, new_bb);
- FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
- {
- edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
- tree arg;
- tree new_arg;
- edge_iterator ei2;
- location_t locus;
- /* When doing partial cloning, we allow PHIs on the entry block
- as long as all the arguments are the same. Find any input
- edge to see argument to copy. */
- if (!old_edge)
- FOR_EACH_EDGE (old_edge, ei2, bb->preds)
- if (!old_edge->src->aux)
- break;
- arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
- new_arg = arg;
- walk_tree (&new_arg, copy_tree_body_r, id, NULL);
- gcc_assert (new_arg);
- /* With return slot optimization we can end up with
- non-gimple (foo *)&this->m, fix that here. */
- if (TREE_CODE (new_arg) != SSA_NAME
- && TREE_CODE (new_arg) != FUNCTION_DECL
- && !is_gimple_val (new_arg))
- {
- gimple_seq stmts = NULL;
- new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
- gsi_insert_seq_on_edge (new_edge, stmts);
- inserted = true;
- }
- locus = gimple_phi_arg_location_from_edge (phi, old_edge);
- if (LOCATION_BLOCK (locus))
- {
- tree *n;
- n = id->decl_map->get (LOCATION_BLOCK (locus));
- gcc_assert (n);
- if (*n)
- locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
- else
- locus = LOCATION_LOCUS (locus);
- }
- else
- locus = LOCATION_LOCUS (locus);
- add_phi_arg (new_phi, new_arg, new_edge, locus);
- }
- }
- }
- /* Commit the delayed edge insertions. */
- if (inserted)
- FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
- gsi_commit_one_edge_insert (new_edge, NULL);
- }
- /* Wrapper for remap_decl so it can be used as a callback. */
- static tree
- remap_decl_1 (tree decl, void *data)
- {
- return remap_decl (decl, (copy_body_data *) data);
- }
- /* Build struct function and associated datastructures for the new clone
- NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
- the cfun to the function of new_fndecl (and current_function_decl too). */
- static void
- initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
- {
- struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
- gcov_type count_scale;
- if (!DECL_ARGUMENTS (new_fndecl))
- DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
- if (!DECL_RESULT (new_fndecl))
- DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
- if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
- count_scale
- = GCOV_COMPUTE_SCALE (count,
- ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
- else
- count_scale = REG_BR_PROB_BASE;
- /* Register specific tree functions. */
- gimple_register_cfg_hooks ();
- /* Get clean struct function. */
- push_struct_function (new_fndecl);
- /* We will rebuild these, so just sanity check that they are empty. */
- gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
- gcc_assert (cfun->local_decls == NULL);
- gcc_assert (cfun->cfg == NULL);
- gcc_assert (cfun->decl == new_fndecl);
- /* Copy items we preserve during cloning. */
- cfun->static_chain_decl = src_cfun->static_chain_decl;
- cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
- cfun->function_end_locus = src_cfun->function_end_locus;
- cfun->curr_properties = src_cfun->curr_properties;
- cfun->last_verified = src_cfun->last_verified;
- cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
- cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
- cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
- cfun->stdarg = src_cfun->stdarg;
- cfun->after_inlining = src_cfun->after_inlining;
- cfun->can_throw_non_call_exceptions
- = src_cfun->can_throw_non_call_exceptions;
- cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
- cfun->returns_struct = src_cfun->returns_struct;
- cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
- init_empty_tree_cfg ();
- profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
- ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
- (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
- REG_BR_PROB_BASE);
- ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
- = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
- EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
- (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
- REG_BR_PROB_BASE);
- EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
- EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
- if (src_cfun->eh)
- init_eh_for_function ();
- if (src_cfun->gimple_df)
- {
- init_tree_ssa (cfun);
- cfun->gimple_df->in_ssa_p = true;
- init_ssa_operands (cfun);
- }
- }
- /* Helper function for copy_cfg_body. Move debug stmts from the end
- of NEW_BB to the beginning of successor basic blocks when needed. If the
- successor has multiple predecessors, reset them, otherwise keep
- their value. */
- static void
- maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
- {
- edge e;
- edge_iterator ei;
- gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
- if (gsi_end_p (si)
- || gsi_one_before_end_p (si)
- || !(stmt_can_throw_internal (gsi_stmt (si))
- || stmt_can_make_abnormal_goto (gsi_stmt (si))))
- return;
- FOR_EACH_EDGE (e, ei, new_bb->succs)
- {
- gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
- gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
- while (is_gimple_debug (gsi_stmt (ssi)))
- {
- gimple stmt = gsi_stmt (ssi);
- gdebug *new_stmt;
- tree var;
- tree value;
- /* For the last edge move the debug stmts instead of copying
- them. */
- if (ei_one_before_end_p (ei))
- {
- si = ssi;
- gsi_prev (&ssi);
- if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
- gimple_debug_bind_reset_value (stmt);
- gsi_remove (&si, false);
- gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
- continue;
- }
- if (gimple_debug_bind_p (stmt))
- {
- var = gimple_debug_bind_get_var (stmt);
- if (single_pred_p (e->dest))
- {
- value = gimple_debug_bind_get_value (stmt);
- value = unshare_expr (value);
- }
- else
- value = NULL_TREE;
- new_stmt = gimple_build_debug_bind (var, value, stmt);
- }
- else if (gimple_debug_source_bind_p (stmt))
- {
- var = gimple_debug_source_bind_get_var (stmt);
- value = gimple_debug_source_bind_get_value (stmt);
- new_stmt = gimple_build_debug_source_bind (var, value, stmt);
- }
- else
- gcc_unreachable ();
- gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
- id->debug_stmts.safe_push (new_stmt);
- gsi_prev (&ssi);
- }
- }
- }
- /* Make a copy of the sub-loops of SRC_PARENT and place them
- as siblings of DEST_PARENT. */
- static void
- copy_loops (copy_body_data *id,
- struct loop *dest_parent, struct loop *src_parent)
- {
- struct loop *src_loop = src_parent->inner;
- while (src_loop)
- {
- if (!id->blocks_to_copy
- || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
- {
- struct loop *dest_loop = alloc_loop ();
- /* Assign the new loop its header and latch and associate
- those with the new loop. */
- dest_loop->header = (basic_block)src_loop->header->aux;
- dest_loop->header->loop_father = dest_loop;
- if (src_loop->latch != NULL)
- {
- dest_loop->latch = (basic_block)src_loop->latch->aux;
- dest_loop->latch->loop_father = dest_loop;
- }
- /* Copy loop meta-data. */
- copy_loop_info (src_loop, dest_loop);
- /* Finally place it into the loop array and the loop tree. */
- place_new_loop (cfun, dest_loop);
- flow_loop_tree_node_add (dest_parent, dest_loop);
- dest_loop->safelen = src_loop->safelen;
- dest_loop->dont_vectorize = src_loop->dont_vectorize;
- if (src_loop->force_vectorize)
- {
- dest_loop->force_vectorize = true;
- cfun->has_force_vectorize_loops = true;
- }
- if (src_loop->simduid)
- {
- dest_loop->simduid = remap_decl (src_loop->simduid, id);
- cfun->has_simduid_loops = true;
- }
- /* Recurse. */
- copy_loops (id, dest_loop, src_loop);
- }
- src_loop = src_loop->next;
- }
- }
- /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
- void
- redirect_all_calls (copy_body_data * id, basic_block bb)
- {
- gimple_stmt_iterator si;
- gimple last = last_stmt (bb);
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
- {
- gimple stmt = gsi_stmt (si);
- if (is_gimple_call (stmt))
- {
- struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
- if (edge)
- {
- edge->redirect_call_stmt_to_callee ();
- if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
- gimple_purge_dead_eh_edges (bb);
- }
- }
- }
- }
- /* Convert estimated frequencies into counts for NODE, scaling COUNT
- with each bb's frequency. Used when NODE has a 0-weight entry
- but we are about to inline it into a non-zero count call bb.
- See the comments for handle_missing_profiles() in predict.c for
- when this can happen for COMDATs. */
- void
- freqs_to_counts (struct cgraph_node *node, gcov_type count)
- {
- basic_block bb;
- edge_iterator ei;
- edge e;
- struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
- FOR_ALL_BB_FN(bb, fn)
- {
- bb->count = apply_scale (count,
- GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
- FOR_EACH_EDGE (e, ei, bb->succs)
- e->count = apply_probability (e->src->count, e->probability);
- }
- }
- /* Make a copy of the body of FN so that it can be inserted inline in
- another function. Walks FN via CFG, returns new fndecl. */
- static tree
- copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
- basic_block entry_block_map, basic_block exit_block_map,
- basic_block new_entry)
- {
- tree callee_fndecl = id->src_fn;
- /* Original cfun for the callee, doesn't change. */
- struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
- struct function *cfun_to_copy;
- basic_block bb;
- tree new_fndecl = NULL;
- bool need_debug_cleanup = false;
- gcov_type count_scale;
- int last;
- int incoming_frequency = 0;
- gcov_type incoming_count = 0;
- /* This can happen for COMDAT routines that end up with 0 counts
- despite being called (see the comments for handle_missing_profiles()
- in predict.c as to why). Apply counts to the blocks in the callee
- before inlining, using the guessed edge frequencies, so that we don't
- end up with a 0-count inline body which can confuse downstream
- optimizations such as function splitting. */
- if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
- {
- /* Apply the larger of the call bb count and the total incoming
- call edge count to the callee. */
- gcov_type in_count = 0;
- struct cgraph_edge *in_edge;
- for (in_edge = id->src_node->callers; in_edge;
- in_edge = in_edge->next_caller)
- in_count += in_edge->count;
- freqs_to_counts (id->src_node, count > in_count ? count : in_count);
- }
- if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
- count_scale
- = GCOV_COMPUTE_SCALE (count,
- ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
- else
- count_scale = REG_BR_PROB_BASE;
- /* Register specific tree functions. */
- gimple_register_cfg_hooks ();
- /* If we are inlining just region of the function, make sure to connect
- new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
- part of loop, we must compute frequency and probability of
- ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
- probabilities of edges incoming from nonduplicated region. */
- if (new_entry)
- {
- edge e;
- edge_iterator ei;
- FOR_EACH_EDGE (e, ei, new_entry->preds)
- if (!e->src->aux)
- {
- incoming_frequency += EDGE_FREQUENCY (e);
- incoming_count += e->count;
- }
- incoming_count = apply_scale (incoming_count, count_scale);
- incoming_frequency
- = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
- ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
- ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
- }
- /* Must have a CFG here at this point. */
- gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
- (DECL_STRUCT_FUNCTION (callee_fndecl)));
- cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
- ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
- EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
- entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
- exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
- /* Duplicate any exception-handling regions. */
- if (cfun->eh)
- id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
- remap_decl_1, id);
- /* Use aux pointers to map the original blocks to copy. */
- FOR_EACH_BB_FN (bb, cfun_to_copy)
- if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
- {
- basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
- bb->aux = new_bb;
- new_bb->aux = bb;
- new_bb->loop_father = entry_block_map->loop_father;
- }
- last = last_basic_block_for_fn (cfun);
- /* Now that we've duplicated the blocks, duplicate their edges. */
- basic_block abnormal_goto_dest = NULL;
- if (id->call_stmt
- && stmt_can_make_abnormal_goto (id->call_stmt))
- {
- gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
- bb = gimple_bb (id->call_stmt);
- gsi_next (&gsi);
- if (gsi_end_p (gsi))
- abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
- }
- FOR_ALL_BB_FN (bb, cfun_to_copy)
- if (!id->blocks_to_copy
- || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
- need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
- abnormal_goto_dest);
- if (new_entry)
- {
- edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
- e->probability = REG_BR_PROB_BASE;
- e->count = incoming_count;
- }
- /* Duplicate the loop tree, if available and wanted. */
- if (loops_for_fn (src_cfun) != NULL
- && current_loops != NULL)
- {
- copy_loops (id, entry_block_map->loop_father,
- get_loop (src_cfun, 0));
- /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
- loops_state_set (LOOPS_NEED_FIXUP);
- }
- /* If the loop tree in the source function needed fixup, mark the
- destination loop tree for fixup, too. */
- if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
- loops_state_set (LOOPS_NEED_FIXUP);
- if (gimple_in_ssa_p (cfun))
- FOR_ALL_BB_FN (bb, cfun_to_copy)
- if (!id->blocks_to_copy
- || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
- copy_phis_for_bb (bb, id);
- FOR_ALL_BB_FN (bb, cfun_to_copy)
- if (bb->aux)
- {
- if (need_debug_cleanup
- && bb->index != ENTRY_BLOCK
- && bb->index != EXIT_BLOCK)
- maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
- /* Update call edge destinations. This can not be done before loop
- info is updated, because we may split basic blocks. */
- if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
- && bb->index != ENTRY_BLOCK
- && bb->index != EXIT_BLOCK)
- redirect_all_calls (id, (basic_block)bb->aux);
- ((basic_block)bb->aux)->aux = NULL;
- bb->aux = NULL;
- }
- /* Zero out AUX fields of newly created block during EH edge
- insertion. */
- for (; last < last_basic_block_for_fn (cfun); last++)
- {
- if (need_debug_cleanup)
- maybe_move_debug_stmts_to_successors (id,
- BASIC_BLOCK_FOR_FN (cfun, last));
- BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
- /* Update call edge destinations. This can not be done before loop
- info is updated, because we may split basic blocks. */
- if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
- redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
- }
- entry_block_map->aux = NULL;
- exit_block_map->aux = NULL;
- if (id->eh_map)
- {
- delete id->eh_map;
- id->eh_map = NULL;
- }
- if (id->dependence_map)
- {
- delete id->dependence_map;
- id->dependence_map = NULL;
- }
- return new_fndecl;
- }
- /* Copy the debug STMT using ID. We deal with these statements in a
- special way: if any variable in their VALUE expression wasn't
- remapped yet, we won't remap it, because that would get decl uids
- out of sync, causing codegen differences between -g and -g0. If
- this arises, we drop the VALUE expression altogether. */
- static void
- copy_debug_stmt (gdebug *stmt, copy_body_data *id)
- {
- tree t, *n;
- struct walk_stmt_info wi;
- if (gimple_block (stmt))
- {
- n = id->decl_map->get (gimple_block (stmt));
- gimple_set_block (stmt, n ? *n : id->block);
- }
- /* Remap all the operands in COPY. */
- memset (&wi, 0, sizeof (wi));
- wi.info = id;
- processing_debug_stmt = 1;
- if (gimple_debug_source_bind_p (stmt))
- t = gimple_debug_source_bind_get_var (stmt);
- else
- t = gimple_debug_bind_get_var (stmt);
- if (TREE_CODE (t) == PARM_DECL && id->debug_map
- && (n = id->debug_map->get (t)))
- {
- gcc_assert (TREE_CODE (*n) == VAR_DECL);
- t = *n;
- }
- else if (TREE_CODE (t) == VAR_DECL
- && !is_global_var (t)
- && !id->decl_map->get (t))
- /* T is a non-localized variable. */;
- else
- walk_tree (&t, remap_gimple_op_r, &wi, NULL);
- if (gimple_debug_bind_p (stmt))
- {
- gimple_debug_bind_set_var (stmt, t);
- if (gimple_debug_bind_has_value_p (stmt))
- walk_tree (gimple_debug_bind_get_value_ptr (stmt),
- remap_gimple_op_r, &wi, NULL);
- /* Punt if any decl couldn't be remapped. */
- if (processing_debug_stmt < 0)
- gimple_debug_bind_reset_value (stmt);
- }
- else if (gimple_debug_source_bind_p (stmt))
- {
- gimple_debug_source_bind_set_var (stmt, t);
- walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
- remap_gimple_op_r, &wi, NULL);
- /* When inlining and source bind refers to one of the optimized
- away parameters, change the source bind into normal debug bind
- referring to the corresponding DEBUG_EXPR_DECL that should have
- been bound before the call stmt. */
- t = gimple_debug_source_bind_get_value (stmt);
- if (t != NULL_TREE
- && TREE_CODE (t) == PARM_DECL
- && id->call_stmt)
- {
- vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
- unsigned int i;
- if (debug_args != NULL)
- {
- for (i = 0; i < vec_safe_length (*debug_args); i += 2)
- if ((**debug_args)[i] == DECL_ORIGIN (t)
- && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
- {
- t = (**debug_args)[i + 1];
- stmt->subcode = GIMPLE_DEBUG_BIND;
- gimple_debug_bind_set_value (stmt, t);
- break;
- }
- }
- }
- }
- processing_debug_stmt = 0;
- update_stmt (stmt);
- }
- /* Process deferred debug stmts. In order to give values better odds
- of being successfully remapped, we delay the processing of debug
- stmts until all other stmts that might require remapping are
- processed. */
- static void
- copy_debug_stmts (copy_body_data *id)
- {
- size_t i;
- gdebug *stmt;
- if (!id->debug_stmts.exists ())
- return;
- FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
- copy_debug_stmt (stmt, id);
- id->debug_stmts.release ();
- }
- /* Make a copy of the body of SRC_FN so that it can be inserted inline in
- another function. */
- static tree
- copy_tree_body (copy_body_data *id)
- {
- tree fndecl = id->src_fn;
- tree body = DECL_SAVED_TREE (fndecl);
- walk_tree (&body, copy_tree_body_r, id, NULL);
- return body;
- }
- /* Make a copy of the body of FN so that it can be inserted inline in
- another function. */
- static tree
- copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
- basic_block entry_block_map, basic_block exit_block_map,
- basic_block new_entry)
- {
- tree fndecl = id->src_fn;
- tree body;
- /* If this body has a CFG, walk CFG and copy. */
- gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
- body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
- new_entry);
- copy_debug_stmts (id);
- return body;
- }
- /* Return true if VALUE is an ADDR_EXPR of an automatic variable
- defined in function FN, or of a data member thereof. */
- static bool
- self_inlining_addr_expr (tree value, tree fn)
- {
- tree var;
- if (TREE_CODE (value) != ADDR_EXPR)
- return false;
- var = get_base_address (TREE_OPERAND (value, 0));
- return var && auto_var_in_fn_p (var, fn);
- }
- /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
- lexical block and line number information from base_stmt, if given,
- or from the last stmt of the block otherwise. */
- static gimple
- insert_init_debug_bind (copy_body_data *id,
- basic_block bb, tree var, tree value,
- gimple base_stmt)
- {
- gimple note;
- gimple_stmt_iterator gsi;
- tree tracked_var;
- if (!gimple_in_ssa_p (id->src_cfun))
- return NULL;
- if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
- return NULL;
- tracked_var = target_for_debug_bind (var);
- if (!tracked_var)
- return NULL;
- if (bb)
- {
- gsi = gsi_last_bb (bb);
- if (!base_stmt && !gsi_end_p (gsi))
- base_stmt = gsi_stmt (gsi);
- }
- note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
- if (bb)
- {
- if (!gsi_end_p (gsi))
- gsi_insert_after (&gsi, note, GSI_SAME_STMT);
- else
- gsi_insert_before (&gsi, note, GSI_SAME_STMT);
- }
- return note;
- }
- static void
- insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
- {
- /* If VAR represents a zero-sized variable, it's possible that the
- assignment statement may result in no gimple statements. */
- if (init_stmt)
- {
- gimple_stmt_iterator si = gsi_last_bb (bb);
- /* We can end up with init statements that store to a non-register
- from a rhs with a conversion. Handle that here by forcing the
- rhs into a temporary. gimple_regimplify_operands is not
- prepared to do this for us. */
- if (!is_gimple_debug (init_stmt)
- && !is_gimple_reg (gimple_assign_lhs (init_stmt))
- && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
- && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
- {
- tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
- gimple_expr_type (init_stmt),
- gimple_assign_rhs1 (init_stmt));
- rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
- GSI_NEW_STMT);
- gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
- gimple_assign_set_rhs1 (init_stmt, rhs);
- }
- gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
- gimple_regimplify_operands (init_stmt, &si);
- if (!is_gimple_debug (init_stmt))
- {
- tree def = gimple_assign_lhs (init_stmt);
- insert_init_debug_bind (id, bb, def, def, init_stmt);
- }
- }
- }
- /* Initialize parameter P with VALUE. If needed, produce init statement
- at the end of BB. When BB is NULL, we return init statement to be
- output later. */
- static gimple
- setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
- basic_block bb, tree *vars)
- {
- gimple init_stmt = NULL;
- tree var;
- tree rhs = value;
- tree def = (gimple_in_ssa_p (cfun)
- ? ssa_default_def (id->src_cfun, p) : NULL);
- if (value
- && value != error_mark_node
- && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
- {
- /* If we can match up types by promotion/demotion do so. */
- if (fold_convertible_p (TREE_TYPE (p), value))
- rhs = fold_convert (TREE_TYPE (p), value);
- else
- {
- /* ??? For valid programs we should not end up here.
- Still if we end up with truly mismatched types here, fall back
- to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
- GIMPLE to the following passes. */
- if (!is_gimple_reg_type (TREE_TYPE (value))
- || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
- rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
- else
- rhs = build_zero_cst (TREE_TYPE (p));
- }
- }
- /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
- here since the type of this decl must be visible to the calling
- function. */
- var = copy_decl_to_var (p, id);
- /* Declare this new variable. */
- DECL_CHAIN (var) = *vars;
- *vars = var;
- /* Make gimplifier happy about this variable. */
- DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
- /* If the parameter is never assigned to, has no SSA_NAMEs created,
- we would not need to create a new variable here at all, if it
- weren't for debug info. Still, we can just use the argument
- value. */
- if (TREE_READONLY (p)
- && !TREE_ADDRESSABLE (p)
- && value && !TREE_SIDE_EFFECTS (value)
- && !def)
- {
- /* We may produce non-gimple trees by adding NOPs or introduce
- invalid sharing when operand is not really constant.
- It is not big deal to prohibit constant propagation here as
- we will constant propagate in DOM1 pass anyway. */
- if (is_gimple_min_invariant (value)
- && useless_type_conversion_p (TREE_TYPE (p),
- TREE_TYPE (value))
- /* We have to be very careful about ADDR_EXPR. Make sure
- the base variable isn't a local variable of the inlined
- function, e.g., when doing recursive inlining, direct or
- mutually-recursive or whatever, which is why we don't
- just test whether fn == current_function_decl. */
- && ! self_inlining_addr_expr (value, fn))
- {
- insert_decl_map (id, p, value);
- insert_debug_decl_map (id, p, var);
- return insert_init_debug_bind (id, bb, var, value, NULL);
- }
- }
- /* Register the VAR_DECL as the equivalent for the PARM_DECL;
- that way, when the PARM_DECL is encountered, it will be
- automatically replaced by the VAR_DECL. */
- insert_decl_map (id, p, var);
- /* Even if P was TREE_READONLY, the new VAR should not be.
- In the original code, we would have constructed a
- temporary, and then the function body would have never
- changed the value of P. However, now, we will be
- constructing VAR directly. The constructor body may
- change its value multiple times as it is being
- constructed. Therefore, it must not be TREE_READONLY;
- the back-end assumes that TREE_READONLY variable is
- assigned to only once. */
- if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
- TREE_READONLY (var) = 0;
- /* If there is no setup required and we are in SSA, take the easy route
- replacing all SSA names representing the function parameter by the
- SSA name passed to function.
- We need to construct map for the variable anyway as it might be used
- in different SSA names when parameter is set in function.
- Do replacement at -O0 for const arguments replaced by constant.
- This is important for builtin_constant_p and other construct requiring
- constant argument to be visible in inlined function body. */
- if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
- && (optimize
- || (TREE_READONLY (p)
- && is_gimple_min_invariant (rhs)))
- && (TREE_CODE (rhs) == SSA_NAME
- || is_gimple_min_invariant (rhs))
- && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
- {
- insert_decl_map (id, def, rhs);
- return insert_init_debug_bind (id, bb, var, rhs, NULL);
- }
- /* If the value of argument is never used, don't care about initializing
- it. */
- if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
- {
- gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
- return insert_init_debug_bind (id, bb, var, rhs, NULL);
- }
- /* Initialize this VAR_DECL from the equivalent argument. Convert
- the argument to the proper type in case it was promoted. */
- if (value)
- {
- if (rhs == error_mark_node)
- {
- insert_decl_map (id, p, var);
- return insert_init_debug_bind (id, bb, var, rhs, NULL);
- }
- STRIP_USELESS_TYPE_CONVERSION (rhs);
- /* If we are in SSA form properly remap the default definition
- or assign to a dummy SSA name if the parameter is unused and
- we are not optimizing. */
- if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
- {
- if (def)
- {
- def = remap_ssa_name (def, id);
- init_stmt = gimple_build_assign (def, rhs);
- SSA_NAME_IS_DEFAULT_DEF (def) = 0;
- set_ssa_default_def (cfun, var, NULL);
- }
- else if (!optimize)
- {
- def = make_ssa_name (var);
- init_stmt = gimple_build_assign (def, rhs);
- }
- }
- else
- init_stmt = gimple_build_assign (var, rhs);
- if (bb && init_stmt)
- insert_init_stmt (id, bb, init_stmt);
- }
- return init_stmt;
- }
- /* Generate code to initialize the parameters of the function at the
- top of the stack in ID from the GIMPLE_CALL STMT. */
- static void
- initialize_inlined_parameters (copy_body_data *id, gimple stmt,
- tree fn, basic_block bb)
- {
- tree parms;
- size_t i;
- tree p;
- tree vars = NULL_TREE;
- tree static_chain = gimple_call_chain (stmt);
- /* Figure out what the parameters are. */
- parms = DECL_ARGUMENTS (fn);
- /* Loop through the parameter declarations, replacing each with an
- equivalent VAR_DECL, appropriately initialized. */
- for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
- {
- tree val;
- val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
- setup_one_parameter (id, p, val, fn, bb, &vars);
- }
- /* After remapping parameters remap their types. This has to be done
- in a second loop over all parameters to appropriately remap
- variable sized arrays when the size is specified in a
- parameter following the array. */
- for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
- {
- tree *varp = id->decl_map->get (p);
- if (varp
- && TREE_CODE (*varp) == VAR_DECL)
- {
- tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
- ? ssa_default_def (id->src_cfun, p) : NULL);
- tree var = *varp;
- TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
- /* Also remap the default definition if it was remapped
- to the default definition of the parameter replacement
- by the parameter setup. */
- if (def)
- {
- tree *defp = id->decl_map->get (def);
- if (defp
- && TREE_CODE (*defp) == SSA_NAME
- && SSA_NAME_VAR (*defp) == var)
- TREE_TYPE (*defp) = TREE_TYPE (var);
- }
- }
- }
- /* Initialize the static chain. */
- p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
- gcc_assert (fn != current_function_decl);
- if (p)
- {
- /* No static chain? Seems like a bug in tree-nested.c. */
- gcc_assert (static_chain);
- setup_one_parameter (id, p, static_chain, fn, bb, &vars);
- }
- declare_inline_vars (id->block, vars);
- }
- /* Declare a return variable to replace the RESULT_DECL for the
- function we are calling. An appropriate DECL_STMT is returned.
- The USE_STMT is filled to contain a use of the declaration to
- indicate the return value of the function.
- RETURN_SLOT, if non-null is place where to store the result. It
- is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
- was the LHS of the MODIFY_EXPR to which this call is the RHS.
- RETURN_BOUNDS holds a destination for returned bounds.
- The return value is a (possibly null) value that holds the result
- as seen by the caller. */
- static tree
- declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
- tree return_bounds, basic_block entry_bb)
- {
- tree callee = id->src_fn;
- tree result = DECL_RESULT (callee);
- tree callee_type = TREE_TYPE (result);
- tree caller_type;
- tree var, use;
- /* Handle type-mismatches in the function declaration return type
- vs. the call expression. */
- if (modify_dest)
- caller_type = TREE_TYPE (modify_dest);
- else
- caller_type = TREE_TYPE (TREE_TYPE (callee));
- /* We don't need to do anything for functions that don't return anything. */
- if (VOID_TYPE_P (callee_type))
- return NULL_TREE;
- /* If there was a return slot, then the return value is the
- dereferenced address of that object. */
- if (return_slot)
- {
- /* The front end shouldn't have used both return_slot and
- a modify expression. */
- gcc_assert (!modify_dest);
- if (DECL_BY_REFERENCE (result))
- {
- tree return_slot_addr = build_fold_addr_expr (return_slot);
- STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
- /* We are going to construct *&return_slot and we can't do that
- for variables believed to be not addressable.
- FIXME: This check possibly can match, because values returned
- via return slot optimization are not believed to have address
- taken by alias analysis. */
- gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
- var = return_slot_addr;
- }
- else
- {
- var = return_slot;
- gcc_assert (TREE_CODE (var) != SSA_NAME);
- if (TREE_ADDRESSABLE (result))
- mark_addressable (var);
- }
- if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
- && !DECL_GIMPLE_REG_P (result)
- && DECL_P (var))
- DECL_GIMPLE_REG_P (var) = 0;
- use = NULL;
- goto done;
- }
- /* All types requiring non-trivial constructors should have been handled. */
- gcc_assert (!TREE_ADDRESSABLE (callee_type));
- /* Attempt to avoid creating a new temporary variable. */
- if (modify_dest
- && TREE_CODE (modify_dest) != SSA_NAME)
- {
- bool use_it = false;
- /* We can't use MODIFY_DEST if there's type promotion involved. */
- if (!useless_type_conversion_p (callee_type, caller_type))
- use_it = false;
- /* ??? If we're assigning to a variable sized type, then we must
- reuse the destination variable, because we've no good way to
- create variable sized temporaries at this point. */
- else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
- use_it = true;
- /* If the callee cannot possibly modify MODIFY_DEST, then we can
- reuse it as the result of the call directly. Don't do this if
- it would promote MODIFY_DEST to addressable. */
- else if (TREE_ADDRESSABLE (result))
- use_it = false;
- else
- {
- tree base_m = get_base_address (modify_dest);
- /* If the base isn't a decl, then it's a pointer, and we don't
- know where that's going to go. */
- if (!DECL_P (base_m))
- use_it = false;
- else if (is_global_var (base_m))
- use_it = false;
- else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
- && !DECL_GIMPLE_REG_P (result)
- && DECL_GIMPLE_REG_P (base_m))
- use_it = false;
- else if (!TREE_ADDRESSABLE (base_m))
- use_it = true;
- }
- if (use_it)
- {
- var = modify_dest;
- use = NULL;
- goto done;
- }
- }
- gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
- var = copy_result_decl_to_var (result, id);
- DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
- /* Do not have the rest of GCC warn about this variable as it should
- not be visible to the user. */
- TREE_NO_WARNING (var) = 1;
- declare_inline_vars (id->block, var);
- /* Build the use expr. If the return type of the function was
- promoted, convert it back to the expected type. */
- use = var;
- if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
- {
- /* If we can match up types by promotion/demotion do so. */
- if (fold_convertible_p (caller_type, var))
- use = fold_convert (caller_type, var);
- else
- {
- /* ??? For valid programs we should not end up here.
- Still if we end up with truly mismatched types here, fall back
- to using a MEM_REF to not leak invalid GIMPLE to the following
- passes. */
- /* Prevent var from being written into SSA form. */
- if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
- || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
- DECL_GIMPLE_REG_P (var) = false;
- else if (is_gimple_reg_type (TREE_TYPE (var)))
- TREE_ADDRESSABLE (var) = true;
- use = fold_build2 (MEM_REF, caller_type,
- build_fold_addr_expr (var),
- build_int_cst (ptr_type_node, 0));
- }
- }
- STRIP_USELESS_TYPE_CONVERSION (use);
- if (DECL_BY_REFERENCE (result))
- {
- TREE_ADDRESSABLE (var) = 1;
- var = build_fold_addr_expr (var);
- }
- done:
- /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
- way, when the RESULT_DECL is encountered, it will be
- automatically replaced by the VAR_DECL.
- When returning by reference, ensure that RESULT_DECL remaps to
- gimple_val. */
- if (DECL_BY_REFERENCE (result)
- && !is_gimple_val (var))
- {
- tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
- insert_decl_map (id, result, temp);
- /* When RESULT_DECL is in SSA form, we need to remap and initialize
- it's default_def SSA_NAME. */
- if (gimple_in_ssa_p (id->src_cfun)
- && is_gimple_reg (result))
- {
- temp = make_ssa_name (temp);
- insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
- }
- insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
- }
- else
- insert_decl_map (id, result, var);
- /* Remember this so we can ignore it in remap_decls. */
- id->retvar = var;
- /* If returned bounds are used, then make var for them. */
- if (return_bounds)
- {
- tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
- DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
- TREE_NO_WARNING (bndtemp) = 1;
- declare_inline_vars (id->block, bndtemp);
- id->retbnd = bndtemp;
- insert_init_stmt (id, entry_bb,
- gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
- }
- return use;
- }
- /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
- to a local label. */
- static tree
- has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
- {
- tree node = *nodep;
- tree fn = (tree) fnp;
- if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
- return node;
- if (TYPE_P (node))
- *walk_subtrees = 0;
- return NULL_TREE;
- }
- /* Determine if the function can be copied. If so return NULL. If
- not return a string describng the reason for failure. */
- const char *
- copy_forbidden (struct function *fun, tree fndecl)
- {
- const char *reason = fun->cannot_be_copied_reason;
- tree decl;
- unsigned ix;
- /* Only examine the function once. */
- if (fun->cannot_be_copied_set)
- return reason;
- /* We cannot copy a function that receives a non-local goto
- because we cannot remap the destination label used in the
- function that is performing the non-local goto. */
- /* ??? Actually, this should be possible, if we work at it.
- No doubt there's just a handful of places that simply
- assume it doesn't happen and don't substitute properly. */
- if (fun->has_nonlocal_label)
- {
- reason = G_("function %q+F can never be copied "
- "because it receives a non-local goto");
- goto fail;
- }
- FOR_EACH_LOCAL_DECL (fun, ix, decl)
- if (TREE_CODE (decl) == VAR_DECL
- && TREE_STATIC (decl)
- && !DECL_EXTERNAL (decl)
- && DECL_INITIAL (decl)
- && walk_tree_without_duplicates (&DECL_INITIAL (decl),
- has_label_address_in_static_1,
- fndecl))
- {
- reason = G_("function %q+F can never be copied because it saves "
- "address of local label in a static variable");
- goto fail;
- }
- fail:
- fun->cannot_be_copied_reason = reason;
- fun->cannot_be_copied_set = true;
- return reason;
- }
- static const char *inline_forbidden_reason;
- /* A callback for walk_gimple_seq to handle statements. Returns non-null
- iff a function can not be inlined. Also sets the reason why. */
- static tree
- inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
- struct walk_stmt_info *wip)
- {
- tree fn = (tree) wip->info;
- tree t;
- gimple stmt = gsi_stmt (*gsi);
- switch (gimple_code (stmt))
- {
- case GIMPLE_CALL:
- /* Refuse to inline alloca call unless user explicitly forced so as
- this may change program's memory overhead drastically when the
- function using alloca is called in loop. In GCC present in
- SPEC2000 inlining into schedule_block cause it to require 2GB of
- RAM instead of 256MB. Don't do so for alloca calls emitted for
- VLA objects as those can't cause unbounded growth (they're always
- wrapped inside stack_save/stack_restore regions. */
- if (gimple_alloca_call_p (stmt)
- && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
- && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
- {
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because it uses "
- "alloca (override using the always_inline attribute)");
- *handled_ops_p = true;
- return fn;
- }
- t = gimple_call_fndecl (stmt);
- if (t == NULL_TREE)
- break;
- /* We cannot inline functions that call setjmp. */
- if (setjmp_call_p (t))
- {
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because it uses setjmp");
- *handled_ops_p = true;
- return t;
- }
- if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (t))
- {
- /* We cannot inline functions that take a variable number of
- arguments. */
- case BUILT_IN_VA_START:
- case BUILT_IN_NEXT_ARG:
- case BUILT_IN_VA_END:
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because it "
- "uses variable argument lists");
- *handled_ops_p = true;
- return t;
- case BUILT_IN_LONGJMP:
- /* We can't inline functions that call __builtin_longjmp at
- all. The non-local goto machinery really requires the
- destination be in a different function. If we allow the
- function calling __builtin_longjmp to be inlined into the
- function calling __builtin_setjmp, Things will Go Awry. */
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because "
- "it uses setjmp-longjmp exception handling");
- *handled_ops_p = true;
- return t;
- case BUILT_IN_NONLOCAL_GOTO:
- /* Similarly. */
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because "
- "it uses non-local goto");
- *handled_ops_p = true;
- return t;
- case BUILT_IN_RETURN:
- case BUILT_IN_APPLY_ARGS:
- /* If a __builtin_apply_args caller would be inlined,
- it would be saving arguments of the function it has
- been inlined into. Similarly __builtin_return would
- return from the function the inline has been inlined into. */
- inline_forbidden_reason
- = G_("function %q+F can never be inlined because "
- "it uses __builtin_return or __builtin_apply_args");
- *handled_ops_p = true;
- return t;
- default:
- break;
- }
- break;
- case GIMPLE_GOTO:
- t = gimple_goto_dest (stmt);
- /* We will not inline a function which uses computed goto. The
- addresses of its local labels, which may be tucked into
- global storage, are of course not constant across
- instantiations, which causes unexpected behavior. */
- if (TREE_CODE (t) != LABEL_DECL)
- {
- inline_forbidden_reason
- = G_("function %q+F can never be inlined "
- "because it contains a computed goto");
- *handled_ops_p = true;
- return t;
- }
- break;
- default:
- break;
- }
- *handled_ops_p = false;
- return NULL_TREE;
- }
- /* Return true if FNDECL is a function that cannot be inlined into
- another one. */
- static bool
- inline_forbidden_p (tree fndecl)
- {
- struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
- struct walk_stmt_info wi;
- basic_block bb;
- bool forbidden_p = false;
- /* First check for shared reasons not to copy the code. */
- inline_forbidden_reason = copy_forbidden (fun, fndecl);
- if (inline_forbidden_reason != NULL)
- return true;
- /* Next, walk the statements of the function looking for
- constraucts we can't handle, or are non-optimal for inlining. */
- hash_set<tree> visited_nodes;
- memset (&wi, 0, sizeof (wi));
- wi.info = (void *) fndecl;
- wi.pset = &visited_nodes;
- FOR_EACH_BB_FN (bb, fun)
- {
- gimple ret;
- gimple_seq seq = bb_seq (bb);
- ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
- forbidden_p = (ret != NULL);
- if (forbidden_p)
- break;
- }
- return forbidden_p;
- }
- /* Return false if the function FNDECL cannot be inlined on account of its
- attributes, true otherwise. */
- static bool
- function_attribute_inlinable_p (const_tree fndecl)
- {
- if (targetm.attribute_table)
- {
- const_tree a;
- for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
- {
- const_tree name = TREE_PURPOSE (a);
- int i;
- for (i = 0; targetm.attribute_table[i].name != NULL; i++)
- if (is_attribute_p (targetm.attribute_table[i].name, name))
- return targetm.function_attribute_inlinable_p (fndecl);
- }
- }
- return true;
- }
- /* Returns nonzero if FN is a function that does not have any
- fundamental inline blocking properties. */
- bool
- tree_inlinable_function_p (tree fn)
- {
- bool inlinable = true;
- bool do_warning;
- tree always_inline;
- /* If we've already decided this function shouldn't be inlined,
- there's no need to check again. */
- if (DECL_UNINLINABLE (fn))
- return false;
- /* We only warn for functions declared `inline' by the user. */
- do_warning = (warn_inline
- && DECL_DECLARED_INLINE_P (fn)
- && !DECL_NO_INLINE_WARNING_P (fn)
- && !DECL_IN_SYSTEM_HEADER (fn));
- always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
- if (flag_no_inline
- && always_inline == NULL)
- {
- if (do_warning)
- warning (OPT_Winline, "function %q+F can never be inlined because it "
- "is suppressed using -fno-inline", fn);
- inlinable = false;
- }
- else if (!function_attribute_inlinable_p (fn))
- {
- if (do_warning)
- warning (OPT_Winline, "function %q+F can never be inlined because it "
- "uses attributes conflicting with inlining", fn);
- inlinable = false;
- }
- else if (inline_forbidden_p (fn))
- {
- /* See if we should warn about uninlinable functions. Previously,
- some of these warnings would be issued while trying to expand
- the function inline, but that would cause multiple warnings
- about functions that would for example call alloca. But since
- this a property of the function, just one warning is enough.
- As a bonus we can now give more details about the reason why a
- function is not inlinable. */
- if (always_inline)
- error (inline_forbidden_reason, fn);
- else if (do_warning)
- warning (OPT_Winline, inline_forbidden_reason, fn);
- inlinable = false;
- }
- /* Squirrel away the result so that we don't have to check again. */
- DECL_UNINLINABLE (fn) = !inlinable;
- return inlinable;
- }
- /* Estimate the cost of a memory move of type TYPE. Use machine dependent
- word size and take possible memcpy call into account and return
- cost based on whether optimizing for size or speed according to SPEED_P. */
- int
- estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
- {
- HOST_WIDE_INT size;
- gcc_assert (!VOID_TYPE_P (type));
- if (TREE_CODE (type) == VECTOR_TYPE)
- {
- machine_mode inner = TYPE_MODE (TREE_TYPE (type));
- machine_mode simd
- = targetm.vectorize.preferred_simd_mode (inner);
- int simd_mode_size = GET_MODE_SIZE (simd);
- return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
- / simd_mode_size);
- }
- size = int_size_in_bytes (type);
- if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
- /* Cost of a memcpy call, 3 arguments and the call. */
- return 4;
- else
- return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
- }
- /* Returns cost of operation CODE, according to WEIGHTS */
- static int
- estimate_operator_cost (enum tree_code code, eni_weights *weights,
- tree op1 ATTRIBUTE_UNUSED, tree op2)
- {
- switch (code)
- {
- /* These are "free" conversions, or their presumed cost
- is folded into other operations. */
- case RANGE_EXPR:
- CASE_CONVERT:
- case COMPLEX_EXPR:
- case PAREN_EXPR:
- case VIEW_CONVERT_EXPR:
- return 0;
- /* Assign cost of 1 to usual operations.
- ??? We may consider mapping RTL costs to this. */
- case COND_EXPR:
- case VEC_COND_EXPR:
- case VEC_PERM_EXPR:
- case PLUS_EXPR:
- case POINTER_PLUS_EXPR:
- case MINUS_EXPR:
- case MULT_EXPR:
- case MULT_HIGHPART_EXPR:
- case FMA_EXPR:
- case ADDR_SPACE_CONVERT_EXPR:
- case FIXED_CONVERT_EXPR:
- case FIX_TRUNC_EXPR:
- case NEGATE_EXPR:
- case FLOAT_EXPR:
- case MIN_EXPR:
- case MAX_EXPR:
- case ABS_EXPR:
- case LSHIFT_EXPR:
- case RSHIFT_EXPR:
- case LROTATE_EXPR:
- case RROTATE_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- case BIT_AND_EXPR:
- case BIT_NOT_EXPR:
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
- case TRUTH_NOT_EXPR:
- case LT_EXPR:
- case LE_EXPR:
- case GT_EXPR:
- case GE_EXPR:
- case EQ_EXPR:
- case NE_EXPR:
- case ORDERED_EXPR:
- case UNORDERED_EXPR:
- case UNLT_EXPR:
- case UNLE_EXPR:
- case UNGT_EXPR:
- case UNGE_EXPR:
- case UNEQ_EXPR:
- case LTGT_EXPR:
- case CONJ_EXPR:
- case PREDECREMENT_EXPR:
- case PREINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- case POSTINCREMENT_EXPR:
- case REALIGN_LOAD_EXPR:
- case REDUC_MAX_EXPR:
- case REDUC_MIN_EXPR:
- case REDUC_PLUS_EXPR:
- case WIDEN_SUM_EXPR:
- case WIDEN_MULT_EXPR:
- case DOT_PROD_EXPR:
- case SAD_EXPR:
- case WIDEN_MULT_PLUS_EXPR:
- case WIDEN_MULT_MINUS_EXPR:
- case WIDEN_LSHIFT_EXPR:
- case VEC_WIDEN_MULT_HI_EXPR:
- case VEC_WIDEN_MULT_LO_EXPR:
- case VEC_WIDEN_MULT_EVEN_EXPR:
- case VEC_WIDEN_MULT_ODD_EXPR:
- case VEC_UNPACK_HI_EXPR:
- case VEC_UNPACK_LO_EXPR:
- case VEC_UNPACK_FLOAT_HI_EXPR:
- case VEC_UNPACK_FLOAT_LO_EXPR:
- case VEC_PACK_TRUNC_EXPR:
- case VEC_PACK_SAT_EXPR:
- case VEC_PACK_FIX_TRUNC_EXPR:
- case VEC_WIDEN_LSHIFT_HI_EXPR:
- case VEC_WIDEN_LSHIFT_LO_EXPR:
- return 1;
- /* Few special cases of expensive operations. This is useful
- to avoid inlining on functions having too many of these. */
- case TRUNC_DIV_EXPR:
- case CEIL_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case EXACT_DIV_EXPR:
- case TRUNC_MOD_EXPR:
- case CEIL_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case ROUND_MOD_EXPR:
- case RDIV_EXPR:
- if (TREE_CODE (op2) != INTEGER_CST)
- return weights->div_mod_cost;
- return 1;
- default:
- /* We expect a copy assignment with no operator. */
- gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
- return 0;
- }
- }
- /* Estimate number of instructions that will be created by expanding
- the statements in the statement sequence STMTS.
- WEIGHTS contains weights attributed to various constructs. */
- static
- int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
- {
- int cost;
- gimple_stmt_iterator gsi;
- cost = 0;
- for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
- cost += estimate_num_insns (gsi_stmt (gsi), weights);
- return cost;
- }
- /* Estimate number of instructions that will be created by expanding STMT.
- WEIGHTS contains weights attributed to various constructs. */
- int
- estimate_num_insns (gimple stmt, eni_weights *weights)
- {
- unsigned cost, i;
- enum gimple_code code = gimple_code (stmt);
- tree lhs;
- tree rhs;
- switch (code)
- {
- case GIMPLE_ASSIGN:
- /* Try to estimate the cost of assignments. We have three cases to
- deal with:
- 1) Simple assignments to registers;
- 2) Stores to things that must live in memory. This includes
- "normal" stores to scalars, but also assignments of large
- structures, or constructors of big arrays;
- Let us look at the first two cases, assuming we have "a = b + C":
- <GIMPLE_ASSIGN <var_decl "a">
- <plus_expr <var_decl "b"> <constant C>>
- If "a" is a GIMPLE register, the assignment to it is free on almost
- any target, because "a" usually ends up in a real register. Hence
- the only cost of this expression comes from the PLUS_EXPR, and we
- can ignore the GIMPLE_ASSIGN.
- If "a" is not a GIMPLE register, the assignment to "a" will most
- likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
- of moving something into "a", which we compute using the function
- estimate_move_cost. */
- if (gimple_clobber_p (stmt))
- return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
- lhs = gimple_assign_lhs (stmt);
- rhs = gimple_assign_rhs1 (stmt);
- cost = 0;
- /* Account for the cost of moving to / from memory. */
- if (gimple_store_p (stmt))
- cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
- if (gimple_assign_load_p (stmt))
- cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
- cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
- gimple_assign_rhs1 (stmt),
- get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
- == GIMPLE_BINARY_RHS
- ? gimple_assign_rhs2 (stmt) : NULL);
- break;
- case GIMPLE_COND:
- cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
- gimple_op (stmt, 0),
- gimple_op (stmt, 1));
- break;
- case GIMPLE_SWITCH:
- {
- gswitch *switch_stmt = as_a <gswitch *> (stmt);
- /* Take into account cost of the switch + guess 2 conditional jumps for
- each case label.
- TODO: once the switch expansion logic is sufficiently separated, we can
- do better job on estimating cost of the switch. */
- if (weights->time_based)
- cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
- else
- cost = gimple_switch_num_labels (switch_stmt) * 2;
- }
- break;
- case GIMPLE_CALL:
- {
- tree decl;
- if (gimple_call_internal_p (stmt))
- return 0;
- else if ((decl = gimple_call_fndecl (stmt))
- && DECL_BUILT_IN (decl))
- {
- /* Do not special case builtins where we see the body.
- This just confuse inliner. */
- struct cgraph_node *node;
- if (!(node = cgraph_node::get (decl))
- || node->definition)
- ;
- /* For buitins that are likely expanded to nothing or
- inlined do not account operand costs. */
- else if (is_simple_builtin (decl))
- return 0;
- else if (is_inexpensive_builtin (decl))
- return weights->target_builtin_call_cost;
- else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
- {
- /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
- specialize the cheap expansion we do here.
- ??? This asks for a more general solution. */
- switch (DECL_FUNCTION_CODE (decl))
- {
- case BUILT_IN_POW:
- case BUILT_IN_POWF:
- case BUILT_IN_POWL:
- if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
- && REAL_VALUES_EQUAL
- (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
- return estimate_operator_cost
- (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
- gimple_call_arg (stmt, 0));
- break;
- default:
- break;
- }
- }
- }
- cost = decl ? weights->call_cost : weights->indirect_call_cost;
- if (gimple_call_lhs (stmt))
- cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
- weights->time_based);
- for (i = 0; i < gimple_call_num_args (stmt); i++)
- {
- tree arg = gimple_call_arg (stmt, i);
- cost += estimate_move_cost (TREE_TYPE (arg),
- weights->time_based);
- }
- break;
- }
- case GIMPLE_RETURN:
- return weights->return_cost;
- case GIMPLE_GOTO:
- case GIMPLE_LABEL:
- case GIMPLE_NOP:
- case GIMPLE_PHI:
- case GIMPLE_PREDICT:
- case GIMPLE_DEBUG:
- return 0;
- case GIMPLE_ASM:
- {
- int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
- /* 1000 means infinity. This avoids overflows later
- with very long asm statements. */
- if (count > 1000)
- count = 1000;
- return count;
- }
- case GIMPLE_RESX:
- /* This is either going to be an external function call with one
- argument, or two register copy statements plus a goto. */
- return 2;
- case GIMPLE_EH_DISPATCH:
- /* ??? This is going to turn into a switch statement. Ideally
- we'd have a look at the eh region and estimate the number of
- edges involved. */
- return 10;
- case GIMPLE_BIND:
- return estimate_num_insns_seq (
- gimple_bind_body (as_a <gbind *> (stmt)),
- weights);
- case GIMPLE_EH_FILTER:
- return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
- case GIMPLE_CATCH:
- return estimate_num_insns_seq (gimple_catch_handler (
- as_a <gcatch *> (stmt)),
- weights);
- case GIMPLE_TRY:
- return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
- + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
- /* OMP directives are generally very expensive. */
- case GIMPLE_OMP_RETURN:
- case GIMPLE_OMP_SECTIONS_SWITCH:
- case GIMPLE_OMP_ATOMIC_STORE:
- case GIMPLE_OMP_CONTINUE:
- /* ...except these, which are cheap. */
- return 0;
- case GIMPLE_OMP_ATOMIC_LOAD:
- return weights->omp_cost;
- case GIMPLE_OMP_FOR:
- return (weights->omp_cost
- + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
- + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
- case GIMPLE_OMP_PARALLEL:
- case GIMPLE_OMP_TASK:
- case GIMPLE_OMP_CRITICAL:
- case GIMPLE_OMP_MASTER:
- case GIMPLE_OMP_TASKGROUP:
- case GIMPLE_OMP_ORDERED:
- case GIMPLE_OMP_SECTION:
- case GIMPLE_OMP_SECTIONS:
- case GIMPLE_OMP_SINGLE:
- case GIMPLE_OMP_TARGET:
- case GIMPLE_OMP_TEAMS:
- return (weights->omp_cost
- + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
- case GIMPLE_TRANSACTION:
- return (weights->tm_cost
- + estimate_num_insns_seq (gimple_transaction_body (
- as_a <gtransaction *> (stmt)),
- weights));
- default:
- gcc_unreachable ();
- }
- return cost;
- }
- /* Estimate number of instructions that will be created by expanding
- function FNDECL. WEIGHTS contains weights attributed to various
- constructs. */
- int
- estimate_num_insns_fn (tree fndecl, eni_weights *weights)
- {
- struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
- gimple_stmt_iterator bsi;
- basic_block bb;
- int n = 0;
- gcc_assert (my_function && my_function->cfg);
- FOR_EACH_BB_FN (bb, my_function)
- {
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- n += estimate_num_insns (gsi_stmt (bsi), weights);
- }
- return n;
- }
- /* Initializes weights used by estimate_num_insns. */
- void
- init_inline_once (void)
- {
- eni_size_weights.call_cost = 1;
- eni_size_weights.indirect_call_cost = 3;
- eni_size_weights.target_builtin_call_cost = 1;
- eni_size_weights.div_mod_cost = 1;
- eni_size_weights.omp_cost = 40;
- eni_size_weights.tm_cost = 10;
- eni_size_weights.time_based = false;
- eni_size_weights.return_cost = 1;
- /* Estimating time for call is difficult, since we have no idea what the
- called function does. In the current uses of eni_time_weights,
- underestimating the cost does less harm than overestimating it, so
- we choose a rather small value here. */
- eni_time_weights.call_cost = 10;
- eni_time_weights.indirect_call_cost = 15;
- eni_time_weights.target_builtin_call_cost = 1;
- eni_time_weights.div_mod_cost = 10;
- eni_time_weights.omp_cost = 40;
- eni_time_weights.tm_cost = 40;
- eni_time_weights.time_based = true;
- eni_time_weights.return_cost = 2;
- }
- /* Estimate the number of instructions in a gimple_seq. */
- int
- count_insns_seq (gimple_seq seq, eni_weights *weights)
- {
- gimple_stmt_iterator gsi;
- int n = 0;
- for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
- n += estimate_num_insns (gsi_stmt (gsi), weights);
- return n;
- }
- /* Install new lexical TREE_BLOCK underneath 'current_block'. */
- static void
- prepend_lexical_block (tree current_block, tree new_block)
- {
- BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
- BLOCK_SUBBLOCKS (current_block) = new_block;
- BLOCK_SUPERCONTEXT (new_block) = current_block;
- }
- /* Add local variables from CALLEE to CALLER. */
- static inline void
- add_local_variables (struct function *callee, struct function *caller,
- copy_body_data *id)
- {
- tree var;
- unsigned ix;
- FOR_EACH_LOCAL_DECL (callee, ix, var)
- if (!can_be_nonlocal (var, id))
- {
- tree new_var = remap_decl (var, id);
- /* Remap debug-expressions. */
- if (TREE_CODE (new_var) == VAR_DECL
- && DECL_HAS_DEBUG_EXPR_P (var)
- && new_var != var)
- {
- tree tem = DECL_DEBUG_EXPR (var);
- bool old_regimplify = id->regimplify;
- id->remapping_type_depth++;
- walk_tree (&tem, copy_tree_body_r, id, NULL);
- id->remapping_type_depth--;
- id->regimplify = old_regimplify;
- SET_DECL_DEBUG_EXPR (new_var, tem);
- DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
- }
- add_local_decl (caller, new_var);
- }
- }
- /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
- static bool
- expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
- {
- tree use_retvar;
- tree fn;
- hash_map<tree, tree> *dst;
- hash_map<tree, tree> *st = NULL;
- tree return_slot;
- tree modify_dest;
- tree return_bounds = NULL;
- location_t saved_location;
- struct cgraph_edge *cg_edge;
- cgraph_inline_failed_t reason;
- basic_block return_block;
- edge e;
- gimple_stmt_iterator gsi, stmt_gsi;
- bool successfully_inlined = FALSE;
- bool purge_dead_abnormal_edges;
- gcall *call_stmt;
- unsigned int i;
- /* Set input_location here so we get the right instantiation context
- if we call instantiate_decl from inlinable_function_p. */
- /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
- saved_location = input_location;
- input_location = gimple_location (stmt);
- /* From here on, we're only interested in CALL_EXPRs. */
- call_stmt = dyn_cast <gcall *> (stmt);
- if (!call_stmt)
- goto egress;
- cg_edge = id->dst_node->get_edge (stmt);
- gcc_checking_assert (cg_edge);
- /* First, see if we can figure out what function is being called.
- If we cannot, then there is no hope of inlining the function. */
- if (cg_edge->indirect_unknown_callee)
- goto egress;
- fn = cg_edge->callee->decl;
- gcc_checking_assert (fn);
- /* If FN is a declaration of a function in a nested scope that was
- globally declared inline, we don't set its DECL_INITIAL.
- However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
- C++ front-end uses it for cdtors to refer to their internal
- declarations, that are not real functions. Fortunately those
- don't have trees to be saved, so we can tell by checking their
- gimple_body. */
- if (!DECL_INITIAL (fn)
- && DECL_ABSTRACT_ORIGIN (fn)
- && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
- fn = DECL_ABSTRACT_ORIGIN (fn);
- /* Don't try to inline functions that are not well-suited to inlining. */
- if (cg_edge->inline_failed)
- {
- reason = cg_edge->inline_failed;
- /* If this call was originally indirect, we do not want to emit any
- inlining related warnings or sorry messages because there are no
- guarantees regarding those. */
- if (cg_edge->indirect_inlining_edge)
- goto egress;
- if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
- /* For extern inline functions that get redefined we always
- silently ignored always_inline flag. Better behaviour would
- be to be able to keep both bodies and use extern inline body
- for inlining, but we can't do that because frontends overwrite
- the body. */
- && !cg_edge->callee->local.redefined_extern_inline
- /* During early inline pass, report only when optimization is
- not turned on. */
- && (symtab->global_info_ready
- || !optimize
- || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
- /* PR 20090218-1_0.c. Body can be provided by another module. */
- && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
- {
- error ("inlining failed in call to always_inline %q+F: %s", fn,
- cgraph_inline_failed_string (reason));
- error ("called from here");
- }
- else if (warn_inline
- && DECL_DECLARED_INLINE_P (fn)
- && !DECL_NO_INLINE_WARNING_P (fn)
- && !DECL_IN_SYSTEM_HEADER (fn)
- && reason != CIF_UNSPECIFIED
- && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
- /* Do not warn about not inlined recursive calls. */
- && !cg_edge->recursive_p ()
- /* Avoid warnings during early inline pass. */
- && symtab->global_info_ready)
- {
- warning (OPT_Winline, "inlining failed in call to %q+F: %s",
- fn, _(cgraph_inline_failed_string (reason)));
- warning (OPT_Winline, "called from here");
- }
- goto egress;
- }
- fn = cg_edge->callee->decl;
- cg_edge->callee->get_untransformed_body ();
- #ifdef ENABLE_CHECKING
- if (cg_edge->callee->decl != id->dst_node->decl)
- cg_edge->callee->verify ();
- #endif
- /* We will be inlining this callee. */
- id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
- id->assign_stmts.create (0);
- /* Update the callers EH personality. */
- if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
- DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
- /* Split the block holding the GIMPLE_CALL. */
- e = split_block (bb, stmt);
- bb = e->src;
- return_block = e->dest;
- remove_edge (e);
- /* split_block splits after the statement; work around this by
- moving the call into the second block manually. Not pretty,
- but seems easier than doing the CFG manipulation by hand
- when the GIMPLE_CALL is in the last statement of BB. */
- stmt_gsi = gsi_last_bb (bb);
- gsi_remove (&stmt_gsi, false);
- /* If the GIMPLE_CALL was in the last statement of BB, it may have
- been the source of abnormal edges. In this case, schedule
- the removal of dead abnormal edges. */
- gsi = gsi_start_bb (return_block);
- if (gsi_end_p (gsi))
- {
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
- purge_dead_abnormal_edges = true;
- }
- else
- {
- gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
- purge_dead_abnormal_edges = false;
- }
- stmt_gsi = gsi_start_bb (return_block);
- /* Build a block containing code to initialize the arguments, the
- actual inline expansion of the body, and a label for the return
- statements within the function to jump to. The type of the
- statement expression is the return type of the function call.
- ??? If the call does not have an associated block then we will
- remap all callee blocks to NULL, effectively dropping most of
- its debug information. This should only happen for calls to
- artificial decls inserted by the compiler itself. We need to
- either link the inlined blocks into the caller block tree or
- not refer to them in any way to not break GC for locations. */
- if (gimple_block (stmt))
- {
- id->block = make_node (BLOCK);
- BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
- BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
- prepend_lexical_block (gimple_block (stmt), id->block);
- }
- /* Local declarations will be replaced by their equivalents in this
- map. */
- st = id->decl_map;
- id->decl_map = new hash_map<tree, tree>;
- dst = id->debug_map;
- id->debug_map = NULL;
- /* Record the function we are about to inline. */
- id->src_fn = fn;
- id->src_node = cg_edge->callee;
- id->src_cfun = DECL_STRUCT_FUNCTION (fn);
- id->call_stmt = stmt;
- gcc_assert (!id->src_cfun->after_inlining);
- id->entry_bb = bb;
- if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
- {
- gimple_stmt_iterator si = gsi_last_bb (bb);
- gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
- NOT_TAKEN),
- GSI_NEW_STMT);
- }
- initialize_inlined_parameters (id, stmt, fn, bb);
- if (DECL_INITIAL (fn))
- {
- if (gimple_block (stmt))
- {
- tree *var;
- prepend_lexical_block (id->block,
- remap_blocks (DECL_INITIAL (fn), id));
- gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
- && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
- == NULL_TREE));
- /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
- otherwise for DWARF DW_TAG_formal_parameter will not be children of
- DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
- under it. The parameters can be then evaluated in the debugger,
- but don't show in backtraces. */
- for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
- if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
- {
- tree v = *var;
- *var = TREE_CHAIN (v);
- TREE_CHAIN (v) = BLOCK_VARS (id->block);
- BLOCK_VARS (id->block) = v;
- }
- else
- var = &TREE_CHAIN (*var);
- }
- else
- remap_blocks_to_null (DECL_INITIAL (fn), id);
- }
- /* Return statements in the function body will be replaced by jumps
- to the RET_LABEL. */
- gcc_assert (DECL_INITIAL (fn));
- gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
- /* Find the LHS to which the result of this call is assigned. */
- return_slot = NULL;
- if (gimple_call_lhs (stmt))
- {
- modify_dest = gimple_call_lhs (stmt);
- /* Remember where to copy returned bounds. */
- if (gimple_call_with_bounds_p (stmt)
- && TREE_CODE (modify_dest) == SSA_NAME)
- {
- gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
- if (retbnd)
- {
- return_bounds = gimple_call_lhs (retbnd);
- /* If returned bounds are not used then just
- remove unused call. */
- if (!return_bounds)
- {
- gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
- gsi_remove (&iter, true);
- }
- }
- }
- /* The function which we are inlining might not return a value,
- in which case we should issue a warning that the function
- does not return a value. In that case the optimizers will
- see that the variable to which the value is assigned was not
- initialized. We do not want to issue a warning about that
- uninitialized variable. */
- if (DECL_P (modify_dest))
- TREE_NO_WARNING (modify_dest) = 1;
- if (gimple_call_return_slot_opt_p (call_stmt))
- {
- return_slot = modify_dest;
- modify_dest = NULL;
- }
- }
- else
- modify_dest = NULL;
- /* If we are inlining a call to the C++ operator new, we don't want
- to use type based alias analysis on the return value. Otherwise
- we may get confused if the compiler sees that the inlined new
- function returns a pointer which was just deleted. See bug
- 33407. */
- if (DECL_IS_OPERATOR_NEW (fn))
- {
- return_slot = NULL;
- modify_dest = NULL;
- }
- /* Declare the return variable for the function. */
- use_retvar = declare_return_variable (id, return_slot, modify_dest,
- return_bounds, bb);
- /* Add local vars in this inlined callee to caller. */
- add_local_variables (id->src_cfun, cfun, id);
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "Inlining ");
- print_generic_expr (dump_file, id->src_fn, 0);
- fprintf (dump_file, " to ");
- print_generic_expr (dump_file, id->dst_fn, 0);
- fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
- }
- /* This is it. Duplicate the callee body. Assume callee is
- pre-gimplified. Note that we must not alter the caller
- function in any way before this point, as this CALL_EXPR may be
- a self-referential call; if we're calling ourselves, we need to
- duplicate our body before altering anything. */
- copy_body (id, cg_edge->callee->count,
- GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
- bb, return_block, NULL);
- /* Reset the escaped solution. */
- if (cfun->gimple_df)
- pt_solution_reset (&cfun->gimple_df->escaped);
- /* Clean up. */
- if (id->debug_map)
- {
- delete id->debug_map;
- id->debug_map = dst;
- }
- delete id->decl_map;
- id->decl_map = st;
- /* Unlink the calls virtual operands before replacing it. */
- unlink_stmt_vdef (stmt);
- if (gimple_vdef (stmt)
- && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
- release_ssa_name (gimple_vdef (stmt));
- /* If the inlined function returns a result that we care about,
- substitute the GIMPLE_CALL with an assignment of the return
- variable to the LHS of the call. That is, if STMT was
- 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
- if (use_retvar && gimple_call_lhs (stmt))
- {
- gimple old_stmt = stmt;
- stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
- gsi_replace (&stmt_gsi, stmt, false);
- maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
- /* Copy bounds if we copy structure with bounds. */
- if (chkp_function_instrumented_p (id->dst_fn)
- && !BOUNDED_P (use_retvar)
- && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
- id->assign_stmts.safe_push (stmt);
- }
- else
- {
- /* Handle the case of inlining a function with no return
- statement, which causes the return value to become undefined. */
- if (gimple_call_lhs (stmt)
- && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
- {
- tree name = gimple_call_lhs (stmt);
- tree var = SSA_NAME_VAR (name);
- tree def = ssa_default_def (cfun, var);
- if (def)
- {
- /* If the variable is used undefined, make this name
- undefined via a move. */
- stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
- gsi_replace (&stmt_gsi, stmt, true);
- }
- else
- {
- /* Otherwise make this variable undefined. */
- gsi_remove (&stmt_gsi, true);
- set_ssa_default_def (cfun, var, name);
- SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
- }
- }
- else
- gsi_remove (&stmt_gsi, true);
- }
- /* Put returned bounds into the correct place if required. */
- if (return_bounds)
- {
- gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
- gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
- gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
- unlink_stmt_vdef (old_stmt);
- gsi_replace (&bnd_gsi, new_stmt, false);
- maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
- cgraph_update_edges_for_call_stmt (old_stmt,
- gimple_call_fndecl (old_stmt),
- new_stmt);
- }
- if (purge_dead_abnormal_edges)
- {
- gimple_purge_dead_eh_edges (return_block);
- gimple_purge_dead_abnormal_call_edges (return_block);
- }
- /* If the value of the new expression is ignored, that's OK. We
- don't warn about this for CALL_EXPRs, so we shouldn't warn about
- the equivalent inlined version either. */
- if (is_gimple_assign (stmt))
- {
- gcc_assert (gimple_assign_single_p (stmt)
- || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
- TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
- }
- /* Copy bounds for all generated assigns that need it. */
- for (i = 0; i < id->assign_stmts.length (); i++)
- chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
- id->assign_stmts.release ();
- /* Output the inlining info for this abstract function, since it has been
- inlined. If we don't do this now, we can lose the information about the
- variables in the function when the blocks get blown away as soon as we
- remove the cgraph node. */
- if (gimple_block (stmt))
- (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
- /* Update callgraph if needed. */
- cg_edge->callee->remove ();
- id->block = NULL_TREE;
- successfully_inlined = TRUE;
- egress:
- input_location = saved_location;
- return successfully_inlined;
- }
- /* Expand call statements reachable from STMT_P.
- We can only have CALL_EXPRs as the "toplevel" tree code or nested
- in a MODIFY_EXPR. */
- static bool
- gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
- {
- gimple_stmt_iterator gsi;
- bool inlined = false;
- for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
- {
- gimple stmt = gsi_stmt (gsi);
- gsi_prev (&gsi);
- if (is_gimple_call (stmt)
- && !gimple_call_internal_p (stmt))
- inlined |= expand_call_inline (bb, stmt, id);
- }
- return inlined;
- }
- /* Walk all basic blocks created after FIRST and try to fold every statement
- in the STATEMENTS pointer set. */
- static void
- fold_marked_statements (int first, hash_set<gimple> *statements)
- {
- for (; first < n_basic_blocks_for_fn (cfun); first++)
- if (BASIC_BLOCK_FOR_FN (cfun, first))
- {
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
- !gsi_end_p (gsi);
- gsi_next (&gsi))
- if (statements->contains (gsi_stmt (gsi)))
- {
- gimple old_stmt = gsi_stmt (gsi);
- tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
- if (old_decl && DECL_BUILT_IN (old_decl))
- {
- /* Folding builtins can create multiple instructions,
- we need to look at all of them. */
- gimple_stmt_iterator i2 = gsi;
- gsi_prev (&i2);
- if (fold_stmt (&gsi))
- {
- gimple new_stmt;
- /* If a builtin at the end of a bb folded into nothing,
- the following loop won't work. */
- if (gsi_end_p (gsi))
- {
- cgraph_update_edges_for_call_stmt (old_stmt,
- old_decl, NULL);
- break;
- }
- if (gsi_end_p (i2))
- i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
- else
- gsi_next (&i2);
- while (1)
- {
- new_stmt = gsi_stmt (i2);
- update_stmt (new_stmt);
- cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
- new_stmt);
- if (new_stmt == gsi_stmt (gsi))
- {
- /* It is okay to check only for the very last
- of these statements. If it is a throwing
- statement nothing will change. If it isn't
- this can remove EH edges. If that weren't
- correct then because some intermediate stmts
- throw, but not the last one. That would mean
- we'd have to split the block, which we can't
- here and we'd loose anyway. And as builtins
- probably never throw, this all
- is mood anyway. */
- if (maybe_clean_or_replace_eh_stmt (old_stmt,
- new_stmt))
- gimple_purge_dead_eh_edges (
- BASIC_BLOCK_FOR_FN (cfun, first));
- break;
- }
- gsi_next (&i2);
- }
- }
- }
- else if (fold_stmt (&gsi))
- {
- /* Re-read the statement from GSI as fold_stmt() may
- have changed it. */
- gimple new_stmt = gsi_stmt (gsi);
- update_stmt (new_stmt);
- if (is_gimple_call (old_stmt)
- || is_gimple_call (new_stmt))
- cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
- new_stmt);
- if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
- gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
- first));
- }
- }
- }
- }
- /* Expand calls to inline functions in the body of FN. */
- unsigned int
- optimize_inline_calls (tree fn)
- {
- copy_body_data id;
- basic_block bb;
- int last = n_basic_blocks_for_fn (cfun);
- bool inlined_p = false;
- /* Clear out ID. */
- memset (&id, 0, sizeof (id));
- id.src_node = id.dst_node = cgraph_node::get (fn);
- gcc_assert (id.dst_node->definition);
- id.dst_fn = fn;
- /* Or any functions that aren't finished yet. */
- if (current_function_decl)
- id.dst_fn = current_function_decl;
- id.copy_decl = copy_decl_maybe_to_var;
- id.transform_call_graph_edges = CB_CGE_DUPLICATE;
- id.transform_new_cfg = false;
- id.transform_return_to_modify = true;
- id.transform_parameter = true;
- id.transform_lang_insert_block = NULL;
- id.statements_to_fold = new hash_set<gimple>;
- push_gimplify_context ();
- /* We make no attempts to keep dominance info up-to-date. */
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- /* Register specific gimple functions. */
- gimple_register_cfg_hooks ();
- /* Reach the trees by walking over the CFG, and note the
- enclosing basic-blocks in the call edges. */
- /* We walk the blocks going forward, because inlined function bodies
- will split id->current_basic_block, and the new blocks will
- follow it; we'll trudge through them, processing their CALL_EXPRs
- along the way. */
- FOR_EACH_BB_FN (bb, cfun)
- inlined_p |= gimple_expand_calls_inline (bb, &id);
- pop_gimplify_context (NULL);
- #ifdef ENABLE_CHECKING
- {
- struct cgraph_edge *e;
- id.dst_node->verify ();
- /* Double check that we inlined everything we are supposed to inline. */
- for (e = id.dst_node->callees; e; e = e->next_callee)
- gcc_assert (e->inline_failed);
- }
- #endif
- /* Fold queued statements. */
- fold_marked_statements (last, id.statements_to_fold);
- delete id.statements_to_fold;
- gcc_assert (!id.debug_stmts.exists ());
- /* If we didn't inline into the function there is nothing to do. */
- if (!inlined_p)
- return 0;
- /* Renumber the lexical scoping (non-code) blocks consecutively. */
- number_blocks (fn);
- delete_unreachable_blocks_update_callgraph (&id);
- #ifdef ENABLE_CHECKING
- id.dst_node->verify ();
- #endif
- /* It would be nice to check SSA/CFG/statement consistency here, but it is
- not possible yet - the IPA passes might make various functions to not
- throw and they don't care to proactively update local EH info. This is
- done later in fixup_cfg pass that also execute the verification. */
- return (TODO_update_ssa
- | TODO_cleanup_cfg
- | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
- | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
- | (profile_status_for_fn (cfun) != PROFILE_ABSENT
- ? TODO_rebuild_frequencies : 0));
- }
- /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
- tree
- copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
- {
- enum tree_code code = TREE_CODE (*tp);
- enum tree_code_class cl = TREE_CODE_CLASS (code);
- /* We make copies of most nodes. */
- if (IS_EXPR_CODE_CLASS (cl)
- || code == TREE_LIST
- || code == TREE_VEC
- || code == TYPE_DECL
- || code == OMP_CLAUSE)
- {
- /* Because the chain gets clobbered when we make a copy, we save it
- here. */
- tree chain = NULL_TREE, new_tree;
- if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
- chain = TREE_CHAIN (*tp);
- /* Copy the node. */
- new_tree = copy_node (*tp);
- *tp = new_tree;
- /* Now, restore the chain, if appropriate. That will cause
- walk_tree to walk into the chain as well. */
- if (code == PARM_DECL
- || code == TREE_LIST
- || code == OMP_CLAUSE)
- TREE_CHAIN (*tp) = chain;
- /* For now, we don't update BLOCKs when we make copies. So, we
- have to nullify all BIND_EXPRs. */
- if (TREE_CODE (*tp) == BIND_EXPR)
- BIND_EXPR_BLOCK (*tp) = NULL_TREE;
- }
- else if (code == CONSTRUCTOR)
- {
- /* CONSTRUCTOR nodes need special handling because
- we need to duplicate the vector of elements. */
- tree new_tree;
- new_tree = copy_node (*tp);
- CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
- *tp = new_tree;
- }
- else if (code == STATEMENT_LIST)
- /* We used to just abort on STATEMENT_LIST, but we can run into them
- with statement-expressions (c++/40975). */
- copy_statement_list (tp);
- else if (TREE_CODE_CLASS (code) == tcc_type)
- *walk_subtrees = 0;
- else if (TREE_CODE_CLASS (code) == tcc_declaration)
- *walk_subtrees = 0;
- else if (TREE_CODE_CLASS (code) == tcc_constant)
- *walk_subtrees = 0;
- return NULL_TREE;
- }
- /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
- information indicating to what new SAVE_EXPR this one should be mapped,
- use that one. Otherwise, create a new node and enter it in ST. FN is
- the function into which the copy will be placed. */
- static void
- remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
- {
- tree *n;
- tree t;
- /* See if we already encountered this SAVE_EXPR. */
- n = st->get (*tp);
- /* If we didn't already remap this SAVE_EXPR, do so now. */
- if (!n)
- {
- t = copy_node (*tp);
- /* Remember this SAVE_EXPR. */
- st->put (*tp, t);
- /* Make sure we don't remap an already-remapped SAVE_EXPR. */
- st->put (t, t);
- }
- else
- {
- /* We've already walked into this SAVE_EXPR; don't do it again. */
- *walk_subtrees = 0;
- t = *n;
- }
- /* Replace this SAVE_EXPR with the copy. */
- *tp = t;
- }
- /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
- label, copies the declaration and enters it in the splay_tree in DATA (which
- is really a 'copy_body_data *'. */
- static tree
- mark_local_labels_stmt (gimple_stmt_iterator *gsip,
- bool *handled_ops_p ATTRIBUTE_UNUSED,
- struct walk_stmt_info *wi)
- {
- copy_body_data *id = (copy_body_data *) wi->info;
- glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
- if (stmt)
- {
- tree decl = gimple_label_label (stmt);
- /* Copy the decl and remember the copy. */
- insert_decl_map (id, decl, id->copy_decl (decl, id));
- }
- return NULL_TREE;
- }
- /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
- Using the splay_tree pointed to by ST (which is really a `splay_tree'),
- remaps all local declarations to appropriate replacements in gimple
- operands. */
- static tree
- replace_locals_op (tree *tp, int *walk_subtrees, void *data)
- {
- struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
- copy_body_data *id = (copy_body_data *) wi->info;
- hash_map<tree, tree> *st = id->decl_map;
- tree *n;
- tree expr = *tp;
- /* Only a local declaration (variable or label). */
- if ((TREE_CODE (expr) == VAR_DECL
- && !TREE_STATIC (expr))
- || TREE_CODE (expr) == LABEL_DECL)
- {
- /* Lookup the declaration. */
- n = st->get (expr);
- /* If it's there, remap it. */
- if (n)
- *tp = *n;
- *walk_subtrees = 0;
- }
- else if (TREE_CODE (expr) == STATEMENT_LIST
- || TREE_CODE (expr) == BIND_EXPR
- || TREE_CODE (expr) == SAVE_EXPR)
- gcc_unreachable ();
- else if (TREE_CODE (expr) == TARGET_EXPR)
- {
- /* Don't mess with a TARGET_EXPR that hasn't been expanded.
- It's OK for this to happen if it was part of a subtree that
- isn't immediately expanded, such as operand 2 of another
- TARGET_EXPR. */
- if (!TREE_OPERAND (expr, 1))
- {
- TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
- TREE_OPERAND (expr, 3) = NULL_TREE;
- }
- }
- /* Keep iterating. */
- return NULL_TREE;
- }
- /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
- Using the splay_tree pointed to by ST (which is really a `splay_tree'),
- remaps all local declarations to appropriate replacements in gimple
- statements. */
- static tree
- replace_locals_stmt (gimple_stmt_iterator *gsip,
- bool *handled_ops_p ATTRIBUTE_UNUSED,
- struct walk_stmt_info *wi)
- {
- copy_body_data *id = (copy_body_data *) wi->info;
- gimple gs = gsi_stmt (*gsip);
- if (gbind *stmt = dyn_cast <gbind *> (gs))
- {
- tree block = gimple_bind_block (stmt);
- if (block)
- {
- remap_block (&block, id);
- gimple_bind_set_block (stmt, block);
- }
- /* This will remap a lot of the same decls again, but this should be
- harmless. */
- if (gimple_bind_vars (stmt))
- gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
- NULL, id));
- }
- /* Keep iterating. */
- return NULL_TREE;
- }
- /* Copies everything in SEQ and replaces variables and labels local to
- current_function_decl. */
- gimple_seq
- copy_gimple_seq_and_replace_locals (gimple_seq seq)
- {
- copy_body_data id;
- struct walk_stmt_info wi;
- gimple_seq copy;
- /* There's nothing to do for NULL_TREE. */
- if (seq == NULL)
- return seq;
- /* Set up ID. */
- memset (&id, 0, sizeof (id));
- id.src_fn = current_function_decl;
- id.dst_fn = current_function_decl;
- id.decl_map = new hash_map<tree, tree>;
- id.debug_map = NULL;
- id.copy_decl = copy_decl_no_change;
- id.transform_call_graph_edges = CB_CGE_DUPLICATE;
- id.transform_new_cfg = false;
- id.transform_return_to_modify = false;
- id.transform_parameter = false;
- id.transform_lang_insert_block = NULL;
- /* Walk the tree once to find local labels. */
- memset (&wi, 0, sizeof (wi));
- hash_set<tree> visited;
- wi.info = &id;
- wi.pset = &visited;
- walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
- copy = gimple_seq_copy (seq);
- /* Walk the copy, remapping decls. */
- memset (&wi, 0, sizeof (wi));
- wi.info = &id;
- walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
- /* Clean up. */
- delete id.decl_map;
- if (id.debug_map)
- delete id.debug_map;
- if (id.dependence_map)
- {
- delete id.dependence_map;
- id.dependence_map = NULL;
- }
- return copy;
- }
- /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
- static tree
- debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
- {
- if (*tp == data)
- return (tree) data;
- else
- return NULL;
- }
- DEBUG_FUNCTION bool
- debug_find_tree (tree top, tree search)
- {
- return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
- }
- /* Declare the variables created by the inliner. Add all the variables in
- VARS to BIND_EXPR. */
- static void
- declare_inline_vars (tree block, tree vars)
- {
- tree t;
- for (t = vars; t; t = DECL_CHAIN (t))
- {
- DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
- gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
- add_local_decl (cfun, t);
- }
- if (block)
- BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
- }
- /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
- but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
- VAR_DECL translation. */
- static tree
- copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
- {
- /* Don't generate debug information for the copy if we wouldn't have
- generated it for the copy either. */
- DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
- DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
- /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
- declaration inspired this copy. */
- DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
- /* The new variable/label has no RTL, yet. */
- if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
- && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
- SET_DECL_RTL (copy, 0);
- /* These args would always appear unused, if not for this. */
- TREE_USED (copy) = 1;
- /* Set the context for the new declaration. */
- if (!DECL_CONTEXT (decl))
- /* Globals stay global. */
- ;
- else if (DECL_CONTEXT (decl) != id->src_fn)
- /* Things that weren't in the scope of the function we're inlining
- from aren't in the scope we're inlining to, either. */
- ;
- else if (TREE_STATIC (decl))
- /* Function-scoped static variables should stay in the original
- function. */
- ;
- else
- /* Ordinary automatic local variables are now in the scope of the
- new function. */
- DECL_CONTEXT (copy) = id->dst_fn;
- return copy;
- }
- static tree
- copy_decl_to_var (tree decl, copy_body_data *id)
- {
- tree copy, type;
- gcc_assert (TREE_CODE (decl) == PARM_DECL
- || TREE_CODE (decl) == RESULT_DECL);
- type = TREE_TYPE (decl);
- copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
- VAR_DECL, DECL_NAME (decl), type);
- if (DECL_PT_UID_SET_P (decl))
- SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
- TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
- TREE_READONLY (copy) = TREE_READONLY (decl);
- TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
- DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
- return copy_decl_for_dup_finish (id, decl, copy);
- }
- /* Like copy_decl_to_var, but create a return slot object instead of a
- pointer variable for return by invisible reference. */
- static tree
- copy_result_decl_to_var (tree decl, copy_body_data *id)
- {
- tree copy, type;
- gcc_assert (TREE_CODE (decl) == PARM_DECL
- || TREE_CODE (decl) == RESULT_DECL);
- type = TREE_TYPE (decl);
- if (DECL_BY_REFERENCE (decl))
- type = TREE_TYPE (type);
- copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
- VAR_DECL, DECL_NAME (decl), type);
- if (DECL_PT_UID_SET_P (decl))
- SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
- TREE_READONLY (copy) = TREE_READONLY (decl);
- TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
- if (!DECL_BY_REFERENCE (decl))
- {
- TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
- DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
- }
- return copy_decl_for_dup_finish (id, decl, copy);
- }
- tree
- copy_decl_no_change (tree decl, copy_body_data *id)
- {
- tree copy;
- copy = copy_node (decl);
- /* The COPY is not abstract; it will be generated in DST_FN. */
- DECL_ABSTRACT_P (copy) = false;
- lang_hooks.dup_lang_specific_decl (copy);
- /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
- been taken; it's for internal bookkeeping in expand_goto_internal. */
- if (TREE_CODE (copy) == LABEL_DECL)
- {
- TREE_ADDRESSABLE (copy) = 0;
- LABEL_DECL_UID (copy) = -1;
- }
- return copy_decl_for_dup_finish (id, decl, copy);
- }
- static tree
- copy_decl_maybe_to_var (tree decl, copy_body_data *id)
- {
- if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
- return copy_decl_to_var (decl, id);
- else
- return copy_decl_no_change (decl, id);
- }
- /* Return a copy of the function's argument tree. */
- static tree
- copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
- bitmap args_to_skip, tree *vars)
- {
- tree arg, *parg;
- tree new_parm = NULL;
- int i = 0;
- parg = &new_parm;
- for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
- if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
- {
- tree new_tree = remap_decl (arg, id);
- if (TREE_CODE (new_tree) != PARM_DECL)
- new_tree = id->copy_decl (arg, id);
- lang_hooks.dup_lang_specific_decl (new_tree);
- *parg = new_tree;
- parg = &DECL_CHAIN (new_tree);
- }
- else if (!id->decl_map->get (arg))
- {
- /* Make an equivalent VAR_DECL. If the argument was used
- as temporary variable later in function, the uses will be
- replaced by local variable. */
- tree var = copy_decl_to_var (arg, id);
- insert_decl_map (id, arg, var);
- /* Declare this new variable. */
- DECL_CHAIN (var) = *vars;
- *vars = var;
- }
- return new_parm;
- }
- /* Return a copy of the function's static chain. */
- static tree
- copy_static_chain (tree static_chain, copy_body_data * id)
- {
- tree *chain_copy, *pvar;
- chain_copy = &static_chain;
- for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
- {
- tree new_tree = remap_decl (*pvar, id);
- lang_hooks.dup_lang_specific_decl (new_tree);
- DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
- *pvar = new_tree;
- }
- return static_chain;
- }
- /* Return true if the function is allowed to be versioned.
- This is a guard for the versioning functionality. */
- bool
- tree_versionable_function_p (tree fndecl)
- {
- return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
- && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
- }
- /* Delete all unreachable basic blocks and update callgraph.
- Doing so is somewhat nontrivial because we need to update all clones and
- remove inline function that become unreachable. */
- static bool
- delete_unreachable_blocks_update_callgraph (copy_body_data *id)
- {
- bool changed = false;
- basic_block b, next_bb;
- find_unreachable_blocks ();
- /* Delete all unreachable basic blocks. */
- for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
- != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
- {
- next_bb = b->next_bb;
- if (!(b->flags & BB_REACHABLE))
- {
- gimple_stmt_iterator bsi;
- for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
- {
- struct cgraph_edge *e;
- struct cgraph_node *node;
- id->dst_node->remove_stmt_references (gsi_stmt (bsi));
- if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
- {
- if (!e->inline_failed)
- e->callee->remove_symbol_and_inline_clones (id->dst_node);
- else
- e->remove ();
- }
- if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
- && id->dst_node->clones)
- for (node = id->dst_node->clones; node != id->dst_node;)
- {
- node->remove_stmt_references (gsi_stmt (bsi));
- if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
- {
- if (!e->inline_failed)
- e->callee->remove_symbol_and_inline_clones (id->dst_node);
- else
- e->remove ();
- }
- if (node->clones)
- node = node->clones;
- else if (node->next_sibling_clone)
- node = node->next_sibling_clone;
- else
- {
- while (node != id->dst_node && !node->next_sibling_clone)
- node = node->clone_of;
- if (node != id->dst_node)
- node = node->next_sibling_clone;
- }
- }
- }
- delete_basic_block (b);
- changed = true;
- }
- }
- return changed;
- }
- /* Update clone info after duplication. */
- static void
- update_clone_info (copy_body_data * id)
- {
- struct cgraph_node *node;
- if (!id->dst_node->clones)
- return;
- for (node = id->dst_node->clones; node != id->dst_node;)
- {
- /* First update replace maps to match the new body. */
- if (node->clone.tree_map)
- {
- unsigned int i;
- for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
- {
- struct ipa_replace_map *replace_info;
- replace_info = (*node->clone.tree_map)[i];
- walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
- walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
- }
- }
- if (node->clones)
- node = node->clones;
- else if (node->next_sibling_clone)
- node = node->next_sibling_clone;
- else
- {
- while (node != id->dst_node && !node->next_sibling_clone)
- node = node->clone_of;
- if (node != id->dst_node)
- node = node->next_sibling_clone;
- }
- }
- }
- /* Create a copy of a function's tree.
- OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
- of the original function and the new copied function
- respectively. In case we want to replace a DECL
- tree with another tree while duplicating the function's
- body, TREE_MAP represents the mapping between these
- trees. If UPDATE_CLONES is set, the call_stmt fields
- of edges of clones of the function will be updated.
- If non-NULL ARGS_TO_SKIP determine function parameters to remove
- from new version.
- If SKIP_RETURN is true, the new version will return void.
- If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
- If non_NULL NEW_ENTRY determine new entry BB of the clone.
- */
- void
- tree_function_versioning (tree old_decl, tree new_decl,
- vec<ipa_replace_map *, va_gc> *tree_map,
- bool update_clones, bitmap args_to_skip,
- bool skip_return, bitmap blocks_to_copy,
- basic_block new_entry)
- {
- struct cgraph_node *old_version_node;
- struct cgraph_node *new_version_node;
- copy_body_data id;
- tree p;
- unsigned i;
- struct ipa_replace_map *replace_info;
- basic_block old_entry_block, bb;
- auto_vec<gimple, 10> init_stmts;
- tree vars = NULL_TREE;
- gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
- && TREE_CODE (new_decl) == FUNCTION_DECL);
- DECL_POSSIBLY_INLINED (old_decl) = 1;
- old_version_node = cgraph_node::get (old_decl);
- gcc_checking_assert (old_version_node);
- new_version_node = cgraph_node::get (new_decl);
- gcc_checking_assert (new_version_node);
- /* Copy over debug args. */
- if (DECL_HAS_DEBUG_ARGS_P (old_decl))
- {
- vec<tree, va_gc> **new_debug_args, **old_debug_args;
- gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
- DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
- old_debug_args = decl_debug_args_lookup (old_decl);
- if (old_debug_args)
- {
- new_debug_args = decl_debug_args_insert (new_decl);
- *new_debug_args = vec_safe_copy (*old_debug_args);
- }
- }
- /* Output the inlining info for this abstract function, since it has been
- inlined. If we don't do this now, we can lose the information about the
- variables in the function when the blocks get blown away as soon as we
- remove the cgraph node. */
- (*debug_hooks->outlining_inline_function) (old_decl);
- DECL_ARTIFICIAL (new_decl) = 1;
- DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
- if (DECL_ORIGIN (old_decl) == old_decl)
- old_version_node->used_as_abstract_origin = true;
- DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
- /* Prepare the data structures for the tree copy. */
- memset (&id, 0, sizeof (id));
- /* Generate a new name for the new version. */
- id.statements_to_fold = new hash_set<gimple>;
- id.decl_map = new hash_map<tree, tree>;
- id.debug_map = NULL;
- id.src_fn = old_decl;
- id.dst_fn = new_decl;
- id.src_node = old_version_node;
- id.dst_node = new_version_node;
- id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
- id.blocks_to_copy = blocks_to_copy;
- id.copy_decl = copy_decl_no_change;
- id.transform_call_graph_edges
- = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
- id.transform_new_cfg = true;
- id.transform_return_to_modify = false;
- id.transform_parameter = false;
- id.transform_lang_insert_block = NULL;
- old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
- (DECL_STRUCT_FUNCTION (old_decl));
- DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
- DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
- initialize_cfun (new_decl, old_decl,
- old_entry_block->count);
- if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
- DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
- = id.src_cfun->gimple_df->ipa_pta;
- /* Copy the function's static chain. */
- p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
- if (p)
- DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
- copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
- &id);
- /* If there's a tree_map, prepare for substitution. */
- if (tree_map)
- for (i = 0; i < tree_map->length (); i++)
- {
- gimple init;
- replace_info = (*tree_map)[i];
- if (replace_info->replace_p)
- {
- if (!replace_info->old_tree)
- {
- int i = replace_info->parm_num;
- tree parm;
- tree req_type;
- for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
- i --;
- replace_info->old_tree = parm;
- req_type = TREE_TYPE (parm);
- if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
- {
- if (fold_convertible_p (req_type, replace_info->new_tree))
- replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
- else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
- replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
- else
- {
- if (dump_file)
- {
- fprintf (dump_file, " const ");
- print_generic_expr (dump_file, replace_info->new_tree, 0);
- fprintf (dump_file, " can't be converted to param ");
- print_generic_expr (dump_file, parm, 0);
- fprintf (dump_file, "\n");
- }
- replace_info->old_tree = NULL;
- }
- }
- }
- else
- gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
- if (replace_info->old_tree)
- {
- init = setup_one_parameter (&id, replace_info->old_tree,
- replace_info->new_tree, id.src_fn,
- NULL,
- &vars);
- if (init)
- init_stmts.safe_push (init);
- }
- }
- }
- /* Copy the function's arguments. */
- if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
- DECL_ARGUMENTS (new_decl) =
- copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
- args_to_skip, &vars);
- DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
- BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
- declare_inline_vars (DECL_INITIAL (new_decl), vars);
- if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
- /* Add local vars. */
- add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
- if (DECL_RESULT (old_decl) == NULL_TREE)
- ;
- else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
- {
- DECL_RESULT (new_decl)
- = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
- RESULT_DECL, NULL_TREE, void_type_node);
- DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
- cfun->returns_struct = 0;
- cfun->returns_pcc_struct = 0;
- }
- else
- {
- tree old_name;
- DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
- lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
- if (gimple_in_ssa_p (id.src_cfun)
- && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
- && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
- {
- tree new_name = make_ssa_name (DECL_RESULT (new_decl));
- insert_decl_map (&id, old_name, new_name);
- SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
- set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
- }
- }
- /* Set up the destination functions loop tree. */
- if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
- {
- cfun->curr_properties &= ~PROP_loops;
- loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
- cfun->curr_properties |= PROP_loops;
- }
- /* Copy the Function's body. */
- copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
- ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
- new_entry);
- /* Renumber the lexical scoping (non-code) blocks consecutively. */
- number_blocks (new_decl);
- /* We want to create the BB unconditionally, so that the addition of
- debug stmts doesn't affect BB count, which may in the end cause
- codegen differences. */
- bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
- while (init_stmts.length ())
- insert_init_stmt (&id, bb, init_stmts.pop ());
- update_clone_info (&id);
- /* Remap the nonlocal_goto_save_area, if any. */
- if (cfun->nonlocal_goto_save_area)
- {
- struct walk_stmt_info wi;
- memset (&wi, 0, sizeof (wi));
- wi.info = &id;
- walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
- }
- /* Clean up. */
- delete id.decl_map;
- if (id.debug_map)
- delete id.debug_map;
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- fold_marked_statements (0, id.statements_to_fold);
- delete id.statements_to_fold;
- fold_cond_expr_cond ();
- delete_unreachable_blocks_update_callgraph (&id);
- if (id.dst_node->definition)
- cgraph_edge::rebuild_references ();
- if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
- {
- calculate_dominance_info (CDI_DOMINATORS);
- fix_loop_structure (NULL);
- }
- update_ssa (TODO_update_ssa);
- /* After partial cloning we need to rescale frequencies, so they are
- within proper range in the cloned function. */
- if (new_entry)
- {
- struct cgraph_edge *e;
- rebuild_frequencies ();
- new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
- for (e = new_version_node->callees; e; e = e->next_callee)
- {
- basic_block bb = gimple_bb (e->call_stmt);
- e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
- bb);
- e->count = bb->count;
- }
- for (e = new_version_node->indirect_calls; e; e = e->next_callee)
- {
- basic_block bb = gimple_bb (e->call_stmt);
- e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
- bb);
- e->count = bb->count;
- }
- }
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
- gcc_assert (!id.debug_stmts.exists ());
- pop_cfun ();
- return;
- }
- /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
- the callee and return the inlined body on success. */
- tree
- maybe_inline_call_in_expr (tree exp)
- {
- tree fn = get_callee_fndecl (exp);
- /* We can only try to inline "const" functions. */
- if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
- {
- call_expr_arg_iterator iter;
- copy_body_data id;
- tree param, arg, t;
- hash_map<tree, tree> decl_map;
- /* Remap the parameters. */
- for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
- param;
- param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
- decl_map.put (param, arg);
- memset (&id, 0, sizeof (id));
- id.src_fn = fn;
- id.dst_fn = current_function_decl;
- id.src_cfun = DECL_STRUCT_FUNCTION (fn);
- id.decl_map = &decl_map;
- id.copy_decl = copy_decl_no_change;
- id.transform_call_graph_edges = CB_CGE_DUPLICATE;
- id.transform_new_cfg = false;
- id.transform_return_to_modify = true;
- id.transform_parameter = true;
- id.transform_lang_insert_block = NULL;
- /* Make sure not to unshare trees behind the front-end's back
- since front-end specific mechanisms may rely on sharing. */
- id.regimplify = false;
- id.do_not_unshare = true;
- /* We're not inside any EH region. */
- id.eh_lp_nr = 0;
- t = copy_tree_body (&id);
- /* We can only return something suitable for use in a GENERIC
- expression tree. */
- if (TREE_CODE (t) == MODIFY_EXPR)
- return TREE_OPERAND (t, 1);
- }
- return NULL_TREE;
- }
- /* Duplicate a type, fields and all. */
- tree
- build_duplicate_type (tree type)
- {
- struct copy_body_data id;
- memset (&id, 0, sizeof (id));
- id.src_fn = current_function_decl;
- id.dst_fn = current_function_decl;
- id.src_cfun = cfun;
- id.decl_map = new hash_map<tree, tree>;
- id.debug_map = NULL;
- id.copy_decl = copy_decl_no_change;
- type = remap_type_1 (type, &id);
- delete id.decl_map;
- if (id.debug_map)
- delete id.debug_map;
- TYPE_CANONICAL (type) = type;
- return type;
- }
- /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
- parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
- evaluation. */
- tree
- copy_fn (tree fn, tree& parms, tree& result)
- {
- copy_body_data id;
- tree param;
- hash_map<tree, tree> decl_map;
- tree *p = &parms;
- *p = NULL_TREE;
- memset (&id, 0, sizeof (id));
- id.src_fn = fn;
- id.dst_fn = current_function_decl;
- id.src_cfun = DECL_STRUCT_FUNCTION (fn);
- id.decl_map = &decl_map;
- id.copy_decl = copy_decl_no_change;
- id.transform_call_graph_edges = CB_CGE_DUPLICATE;
- id.transform_new_cfg = false;
- id.transform_return_to_modify = false;
- id.transform_parameter = true;
- id.transform_lang_insert_block = NULL;
- /* Make sure not to unshare trees behind the front-end's back
- since front-end specific mechanisms may rely on sharing. */
- id.regimplify = false;
- id.do_not_unshare = true;
- /* We're not inside any EH region. */
- id.eh_lp_nr = 0;
- /* Remap the parameters and result and return them to the caller. */
- for (param = DECL_ARGUMENTS (fn);
- param;
- param = DECL_CHAIN (param))
- {
- *p = remap_decl (param, &id);
- p = &DECL_CHAIN (*p);
- }
- if (DECL_RESULT (fn))
- result = remap_decl (DECL_RESULT (fn), &id);
- else
- result = NULL_TREE;
- return copy_tree_body (&id);
- }
|