ipa-prop.c 156 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407
  1. /* Interprocedural analyses.
  2. Copyright (C) 2005-2015 Free Software Foundation, Inc.
  3. This file is part of GCC.
  4. GCC is free software; you can redistribute it and/or modify it under
  5. the terms of the GNU General Public License as published by the Free
  6. Software Foundation; either version 3, or (at your option) any later
  7. version.
  8. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  9. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  11. for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with GCC; see the file COPYING3. If not see
  14. <http://www.gnu.org/licenses/>. */
  15. #include "config.h"
  16. #include "system.h"
  17. #include "coretypes.h"
  18. #include "hash-set.h"
  19. #include "machmode.h"
  20. #include "vec.h"
  21. #include "double-int.h"
  22. #include "input.h"
  23. #include "alias.h"
  24. #include "symtab.h"
  25. #include "options.h"
  26. #include "wide-int.h"
  27. #include "inchash.h"
  28. #include "tree.h"
  29. #include "fold-const.h"
  30. #include "predict.h"
  31. #include "tm.h"
  32. #include "hard-reg-set.h"
  33. #include "function.h"
  34. #include "dominance.h"
  35. #include "cfg.h"
  36. #include "basic-block.h"
  37. #include "tree-ssa-alias.h"
  38. #include "internal-fn.h"
  39. #include "gimple-fold.h"
  40. #include "tree-eh.h"
  41. #include "gimple-expr.h"
  42. #include "is-a.h"
  43. #include "gimple.h"
  44. #include "hashtab.h"
  45. #include "rtl.h"
  46. #include "flags.h"
  47. #include "statistics.h"
  48. #include "real.h"
  49. #include "fixed-value.h"
  50. #include "insn-config.h"
  51. #include "expmed.h"
  52. #include "dojump.h"
  53. #include "explow.h"
  54. #include "calls.h"
  55. #include "emit-rtl.h"
  56. #include "varasm.h"
  57. #include "stmt.h"
  58. #include "expr.h"
  59. #include "stor-layout.h"
  60. #include "print-tree.h"
  61. #include "gimplify.h"
  62. #include "gimple-iterator.h"
  63. #include "gimplify-me.h"
  64. #include "gimple-walk.h"
  65. #include "langhooks.h"
  66. #include "target.h"
  67. #include "hash-map.h"
  68. #include "plugin-api.h"
  69. #include "ipa-ref.h"
  70. #include "cgraph.h"
  71. #include "alloc-pool.h"
  72. #include "symbol-summary.h"
  73. #include "ipa-prop.h"
  74. #include "bitmap.h"
  75. #include "gimple-ssa.h"
  76. #include "tree-cfg.h"
  77. #include "tree-phinodes.h"
  78. #include "ssa-iterators.h"
  79. #include "tree-into-ssa.h"
  80. #include "tree-dfa.h"
  81. #include "tree-pass.h"
  82. #include "tree-inline.h"
  83. #include "ipa-inline.h"
  84. #include "diagnostic.h"
  85. #include "gimple-pretty-print.h"
  86. #include "lto-streamer.h"
  87. #include "data-streamer.h"
  88. #include "tree-streamer.h"
  89. #include "params.h"
  90. #include "ipa-utils.h"
  91. #include "stringpool.h"
  92. #include "tree-ssanames.h"
  93. #include "dbgcnt.h"
  94. #include "domwalk.h"
  95. #include "builtins.h"
  96. /* Intermediate information that we get from alias analysis about a particular
  97. parameter in a particular basic_block. When a parameter or the memory it
  98. references is marked modified, we use that information in all dominatd
  99. blocks without cosulting alias analysis oracle. */
  100. struct param_aa_status
  101. {
  102. /* Set when this structure contains meaningful information. If not, the
  103. structure describing a dominating BB should be used instead. */
  104. bool valid;
  105. /* Whether we have seen something which might have modified the data in
  106. question. PARM is for the parameter itself, REF is for data it points to
  107. but using the alias type of individual accesses and PT is the same thing
  108. but for computing aggregate pass-through functions using a very inclusive
  109. ao_ref. */
  110. bool parm_modified, ref_modified, pt_modified;
  111. };
  112. /* Information related to a given BB that used only when looking at function
  113. body. */
  114. struct ipa_bb_info
  115. {
  116. /* Call graph edges going out of this BB. */
  117. vec<cgraph_edge *> cg_edges;
  118. /* Alias analysis statuses of each formal parameter at this bb. */
  119. vec<param_aa_status> param_aa_statuses;
  120. };
  121. /* Structure with global information that is only used when looking at function
  122. body. */
  123. struct func_body_info
  124. {
  125. /* The node that is being analyzed. */
  126. cgraph_node *node;
  127. /* Its info. */
  128. struct ipa_node_params *info;
  129. /* Information about individual BBs. */
  130. vec<ipa_bb_info> bb_infos;
  131. /* Number of parameters. */
  132. int param_count;
  133. /* Number of statements already walked by when analyzing this function. */
  134. unsigned int aa_walked;
  135. };
  136. /* Function summary where the parameter infos are actually stored. */
  137. ipa_node_params_t *ipa_node_params_sum = NULL;
  138. /* Vector of IPA-CP transformation data for each clone. */
  139. vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
  140. /* Vector where the parameter infos are actually stored. */
  141. vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
  142. /* Holders of ipa cgraph hooks: */
  143. static struct cgraph_edge_hook_list *edge_removal_hook_holder;
  144. static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
  145. static struct cgraph_node_hook_list *function_insertion_hook_holder;
  146. /* Description of a reference to an IPA constant. */
  147. struct ipa_cst_ref_desc
  148. {
  149. /* Edge that corresponds to the statement which took the reference. */
  150. struct cgraph_edge *cs;
  151. /* Linked list of duplicates created when call graph edges are cloned. */
  152. struct ipa_cst_ref_desc *next_duplicate;
  153. /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
  154. if out of control. */
  155. int refcount;
  156. };
  157. /* Allocation pool for reference descriptions. */
  158. static alloc_pool ipa_refdesc_pool;
  159. /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
  160. with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
  161. static bool
  162. ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
  163. {
  164. tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
  165. if (!fs_opts)
  166. return false;
  167. return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
  168. }
  169. /* Return index of the formal whose tree is PTREE in function which corresponds
  170. to INFO. */
  171. static int
  172. ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
  173. {
  174. int i, count;
  175. count = descriptors.length ();
  176. for (i = 0; i < count; i++)
  177. if (descriptors[i].decl == ptree)
  178. return i;
  179. return -1;
  180. }
  181. /* Return index of the formal whose tree is PTREE in function which corresponds
  182. to INFO. */
  183. int
  184. ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
  185. {
  186. return ipa_get_param_decl_index_1 (info->descriptors, ptree);
  187. }
  188. /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
  189. NODE. */
  190. static void
  191. ipa_populate_param_decls (struct cgraph_node *node,
  192. vec<ipa_param_descriptor> &descriptors)
  193. {
  194. tree fndecl;
  195. tree fnargs;
  196. tree parm;
  197. int param_num;
  198. fndecl = node->decl;
  199. gcc_assert (gimple_has_body_p (fndecl));
  200. fnargs = DECL_ARGUMENTS (fndecl);
  201. param_num = 0;
  202. for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
  203. {
  204. descriptors[param_num].decl = parm;
  205. descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
  206. true);
  207. param_num++;
  208. }
  209. }
  210. /* Return how many formal parameters FNDECL has. */
  211. int
  212. count_formal_params (tree fndecl)
  213. {
  214. tree parm;
  215. int count = 0;
  216. gcc_assert (gimple_has_body_p (fndecl));
  217. for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
  218. count++;
  219. return count;
  220. }
  221. /* Return the declaration of Ith formal parameter of the function corresponding
  222. to INFO. Note there is no setter function as this array is built just once
  223. using ipa_initialize_node_params. */
  224. void
  225. ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
  226. {
  227. fprintf (file, "param #%i", i);
  228. if (info->descriptors[i].decl)
  229. {
  230. fprintf (file, " ");
  231. print_generic_expr (file, info->descriptors[i].decl, 0);
  232. }
  233. }
  234. /* Initialize the ipa_node_params structure associated with NODE
  235. to hold PARAM_COUNT parameters. */
  236. void
  237. ipa_alloc_node_params (struct cgraph_node *node, int param_count)
  238. {
  239. struct ipa_node_params *info = IPA_NODE_REF (node);
  240. if (!info->descriptors.exists () && param_count)
  241. info->descriptors.safe_grow_cleared (param_count);
  242. }
  243. /* Initialize the ipa_node_params structure associated with NODE by counting
  244. the function parameters, creating the descriptors and populating their
  245. param_decls. */
  246. void
  247. ipa_initialize_node_params (struct cgraph_node *node)
  248. {
  249. struct ipa_node_params *info = IPA_NODE_REF (node);
  250. if (!info->descriptors.exists ())
  251. {
  252. ipa_alloc_node_params (node, count_formal_params (node->decl));
  253. ipa_populate_param_decls (node, info->descriptors);
  254. }
  255. }
  256. /* Print the jump functions associated with call graph edge CS to file F. */
  257. static void
  258. ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
  259. {
  260. int i, count;
  261. count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
  262. for (i = 0; i < count; i++)
  263. {
  264. struct ipa_jump_func *jump_func;
  265. enum jump_func_type type;
  266. jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
  267. type = jump_func->type;
  268. fprintf (f, " param %d: ", i);
  269. if (type == IPA_JF_UNKNOWN)
  270. fprintf (f, "UNKNOWN\n");
  271. else if (type == IPA_JF_CONST)
  272. {
  273. tree val = jump_func->value.constant.value;
  274. fprintf (f, "CONST: ");
  275. print_generic_expr (f, val, 0);
  276. if (TREE_CODE (val) == ADDR_EXPR
  277. && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
  278. {
  279. fprintf (f, " -> ");
  280. print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
  281. 0);
  282. }
  283. fprintf (f, "\n");
  284. }
  285. else if (type == IPA_JF_PASS_THROUGH)
  286. {
  287. fprintf (f, "PASS THROUGH: ");
  288. fprintf (f, "%d, op %s",
  289. jump_func->value.pass_through.formal_id,
  290. get_tree_code_name(jump_func->value.pass_through.operation));
  291. if (jump_func->value.pass_through.operation != NOP_EXPR)
  292. {
  293. fprintf (f, " ");
  294. print_generic_expr (f,
  295. jump_func->value.pass_through.operand, 0);
  296. }
  297. if (jump_func->value.pass_through.agg_preserved)
  298. fprintf (f, ", agg_preserved");
  299. fprintf (f, "\n");
  300. }
  301. else if (type == IPA_JF_ANCESTOR)
  302. {
  303. fprintf (f, "ANCESTOR: ");
  304. fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
  305. jump_func->value.ancestor.formal_id,
  306. jump_func->value.ancestor.offset);
  307. if (jump_func->value.ancestor.agg_preserved)
  308. fprintf (f, ", agg_preserved");
  309. fprintf (f, "\n");
  310. }
  311. if (jump_func->agg.items)
  312. {
  313. struct ipa_agg_jf_item *item;
  314. int j;
  315. fprintf (f, " Aggregate passed by %s:\n",
  316. jump_func->agg.by_ref ? "reference" : "value");
  317. FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
  318. {
  319. fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
  320. item->offset);
  321. if (TYPE_P (item->value))
  322. fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
  323. tree_to_uhwi (TYPE_SIZE (item->value)));
  324. else
  325. {
  326. fprintf (f, "cst: ");
  327. print_generic_expr (f, item->value, 0);
  328. }
  329. fprintf (f, "\n");
  330. }
  331. }
  332. struct ipa_polymorphic_call_context *ctx
  333. = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
  334. if (ctx && !ctx->useless_p ())
  335. {
  336. fprintf (f, " Context: ");
  337. ctx->dump (dump_file);
  338. }
  339. if (jump_func->alignment.known)
  340. {
  341. fprintf (f, " Alignment: %u, misalignment: %u\n",
  342. jump_func->alignment.align,
  343. jump_func->alignment.misalign);
  344. }
  345. else
  346. fprintf (f, " Unknown alignment\n");
  347. }
  348. }
  349. /* Print the jump functions of all arguments on all call graph edges going from
  350. NODE to file F. */
  351. void
  352. ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
  353. {
  354. struct cgraph_edge *cs;
  355. fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
  356. node->order);
  357. for (cs = node->callees; cs; cs = cs->next_callee)
  358. {
  359. if (!ipa_edge_args_info_available_for_edge_p (cs))
  360. continue;
  361. fprintf (f, " callsite %s/%i -> %s/%i : \n",
  362. xstrdup_for_dump (node->name ()), node->order,
  363. xstrdup_for_dump (cs->callee->name ()),
  364. cs->callee->order);
  365. ipa_print_node_jump_functions_for_edge (f, cs);
  366. }
  367. for (cs = node->indirect_calls; cs; cs = cs->next_callee)
  368. {
  369. struct cgraph_indirect_call_info *ii;
  370. if (!ipa_edge_args_info_available_for_edge_p (cs))
  371. continue;
  372. ii = cs->indirect_info;
  373. if (ii->agg_contents)
  374. fprintf (f, " indirect %s callsite, calling param %i, "
  375. "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
  376. ii->member_ptr ? "member ptr" : "aggregate",
  377. ii->param_index, ii->offset,
  378. ii->by_ref ? "by reference" : "by_value");
  379. else
  380. fprintf (f, " indirect %s callsite, calling param %i, "
  381. "offset " HOST_WIDE_INT_PRINT_DEC,
  382. ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
  383. ii->offset);
  384. if (cs->call_stmt)
  385. {
  386. fprintf (f, ", for stmt ");
  387. print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
  388. }
  389. else
  390. fprintf (f, "\n");
  391. if (ii->polymorphic)
  392. ii->context.dump (f);
  393. ipa_print_node_jump_functions_for_edge (f, cs);
  394. }
  395. }
  396. /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
  397. void
  398. ipa_print_all_jump_functions (FILE *f)
  399. {
  400. struct cgraph_node *node;
  401. fprintf (f, "\nJump functions:\n");
  402. FOR_EACH_FUNCTION (node)
  403. {
  404. ipa_print_node_jump_functions (f, node);
  405. }
  406. }
  407. /* Set jfunc to be a know-really nothing jump function. */
  408. static void
  409. ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
  410. {
  411. jfunc->type = IPA_JF_UNKNOWN;
  412. jfunc->alignment.known = false;
  413. }
  414. /* Set JFUNC to be a copy of another jmp (to be used by jump function
  415. combination code). The two functions will share their rdesc. */
  416. static void
  417. ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
  418. struct ipa_jump_func *src)
  419. {
  420. gcc_checking_assert (src->type == IPA_JF_CONST);
  421. dst->type = IPA_JF_CONST;
  422. dst->value.constant = src->value.constant;
  423. }
  424. /* Set JFUNC to be a constant jmp function. */
  425. static void
  426. ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
  427. struct cgraph_edge *cs)
  428. {
  429. constant = unshare_expr (constant);
  430. if (constant && EXPR_P (constant))
  431. SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
  432. jfunc->type = IPA_JF_CONST;
  433. jfunc->value.constant.value = unshare_expr_without_location (constant);
  434. if (TREE_CODE (constant) == ADDR_EXPR
  435. && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
  436. {
  437. struct ipa_cst_ref_desc *rdesc;
  438. if (!ipa_refdesc_pool)
  439. ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
  440. sizeof (struct ipa_cst_ref_desc), 32);
  441. rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
  442. rdesc->cs = cs;
  443. rdesc->next_duplicate = NULL;
  444. rdesc->refcount = 1;
  445. jfunc->value.constant.rdesc = rdesc;
  446. }
  447. else
  448. jfunc->value.constant.rdesc = NULL;
  449. }
  450. /* Set JFUNC to be a simple pass-through jump function. */
  451. static void
  452. ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
  453. bool agg_preserved)
  454. {
  455. jfunc->type = IPA_JF_PASS_THROUGH;
  456. jfunc->value.pass_through.operand = NULL_TREE;
  457. jfunc->value.pass_through.formal_id = formal_id;
  458. jfunc->value.pass_through.operation = NOP_EXPR;
  459. jfunc->value.pass_through.agg_preserved = agg_preserved;
  460. }
  461. /* Set JFUNC to be an arithmetic pass through jump function. */
  462. static void
  463. ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
  464. tree operand, enum tree_code operation)
  465. {
  466. jfunc->type = IPA_JF_PASS_THROUGH;
  467. jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
  468. jfunc->value.pass_through.formal_id = formal_id;
  469. jfunc->value.pass_through.operation = operation;
  470. jfunc->value.pass_through.agg_preserved = false;
  471. }
  472. /* Set JFUNC to be an ancestor jump function. */
  473. static void
  474. ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
  475. int formal_id, bool agg_preserved)
  476. {
  477. jfunc->type = IPA_JF_ANCESTOR;
  478. jfunc->value.ancestor.formal_id = formal_id;
  479. jfunc->value.ancestor.offset = offset;
  480. jfunc->value.ancestor.agg_preserved = agg_preserved;
  481. }
  482. /* Get IPA BB information about the given BB. FBI is the context of analyzis
  483. of this function body. */
  484. static struct ipa_bb_info *
  485. ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
  486. {
  487. gcc_checking_assert (fbi);
  488. return &fbi->bb_infos[bb->index];
  489. }
  490. /* Structure to be passed in between detect_type_change and
  491. check_stmt_for_type_change. */
  492. struct prop_type_change_info
  493. {
  494. /* Offset into the object where there is the virtual method pointer we are
  495. looking for. */
  496. HOST_WIDE_INT offset;
  497. /* The declaration or SSA_NAME pointer of the base that we are checking for
  498. type change. */
  499. tree object;
  500. /* Set to true if dynamic type change has been detected. */
  501. bool type_maybe_changed;
  502. };
  503. /* Return true if STMT can modify a virtual method table pointer.
  504. This function makes special assumptions about both constructors and
  505. destructors which are all the functions that are allowed to alter the VMT
  506. pointers. It assumes that destructors begin with assignment into all VMT
  507. pointers and that constructors essentially look in the following way:
  508. 1) The very first thing they do is that they call constructors of ancestor
  509. sub-objects that have them.
  510. 2) Then VMT pointers of this and all its ancestors is set to new values
  511. corresponding to the type corresponding to the constructor.
  512. 3) Only afterwards, other stuff such as constructor of member sub-objects
  513. and the code written by the user is run. Only this may include calling
  514. virtual functions, directly or indirectly.
  515. There is no way to call a constructor of an ancestor sub-object in any
  516. other way.
  517. This means that we do not have to care whether constructors get the correct
  518. type information because they will always change it (in fact, if we define
  519. the type to be given by the VMT pointer, it is undefined).
  520. The most important fact to derive from the above is that if, for some
  521. statement in the section 3, we try to detect whether the dynamic type has
  522. changed, we can safely ignore all calls as we examine the function body
  523. backwards until we reach statements in section 2 because these calls cannot
  524. be ancestor constructors or destructors (if the input is not bogus) and so
  525. do not change the dynamic type (this holds true only for automatically
  526. allocated objects but at the moment we devirtualize only these). We then
  527. must detect that statements in section 2 change the dynamic type and can try
  528. to derive the new type. That is enough and we can stop, we will never see
  529. the calls into constructors of sub-objects in this code. Therefore we can
  530. safely ignore all call statements that we traverse.
  531. */
  532. static bool
  533. stmt_may_be_vtbl_ptr_store (gimple stmt)
  534. {
  535. if (is_gimple_call (stmt))
  536. return false;
  537. if (gimple_clobber_p (stmt))
  538. return false;
  539. else if (is_gimple_assign (stmt))
  540. {
  541. tree lhs = gimple_assign_lhs (stmt);
  542. if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
  543. {
  544. if (flag_strict_aliasing
  545. && !POINTER_TYPE_P (TREE_TYPE (lhs)))
  546. return false;
  547. if (TREE_CODE (lhs) == COMPONENT_REF
  548. && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
  549. return false;
  550. /* In the future we might want to use get_base_ref_and_offset to find
  551. if there is a field corresponding to the offset and if so, proceed
  552. almost like if it was a component ref. */
  553. }
  554. }
  555. return true;
  556. }
  557. /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
  558. to check whether a particular statement may modify the virtual table
  559. pointerIt stores its result into DATA, which points to a
  560. prop_type_change_info structure. */
  561. static bool
  562. check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
  563. {
  564. gimple stmt = SSA_NAME_DEF_STMT (vdef);
  565. struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
  566. if (stmt_may_be_vtbl_ptr_store (stmt))
  567. {
  568. tci->type_maybe_changed = true;
  569. return true;
  570. }
  571. else
  572. return false;
  573. }
  574. /* See if ARG is PARAM_DECl describing instance passed by pointer
  575. or reference in FUNCTION. Return false if the dynamic type may change
  576. in between beggining of the function until CALL is invoked.
  577. Generally functions are not allowed to change type of such instances,
  578. but they call destructors. We assume that methods can not destroy the THIS
  579. pointer. Also as a special cases, constructor and destructors may change
  580. type of the THIS pointer. */
  581. static bool
  582. param_type_may_change_p (tree function, tree arg, gimple call)
  583. {
  584. /* Pure functions can not do any changes on the dynamic type;
  585. that require writting to memory. */
  586. if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
  587. return false;
  588. /* We need to check if we are within inlined consturctor
  589. or destructor (ideally we would have way to check that the
  590. inline cdtor is actually working on ARG, but we don't have
  591. easy tie on this, so punt on all non-pure cdtors.
  592. We may also record the types of cdtors and once we know type
  593. of the instance match them.
  594. Also code unification optimizations may merge calls from
  595. different blocks making return values unreliable. So
  596. do nothing during late optimization. */
  597. if (DECL_STRUCT_FUNCTION (function)->after_inlining)
  598. return true;
  599. if (TREE_CODE (arg) == SSA_NAME
  600. && SSA_NAME_IS_DEFAULT_DEF (arg)
  601. && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
  602. {
  603. /* Normal (non-THIS) argument. */
  604. if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
  605. || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
  606. /* THIS pointer of an method - here we we want to watch constructors
  607. and destructors as those definitely may change the dynamic
  608. type. */
  609. || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
  610. && !DECL_CXX_CONSTRUCTOR_P (function)
  611. && !DECL_CXX_DESTRUCTOR_P (function)
  612. && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
  613. {
  614. /* Walk the inline stack and watch out for ctors/dtors. */
  615. for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
  616. block = BLOCK_SUPERCONTEXT (block))
  617. if (inlined_polymorphic_ctor_dtor_block_p (block, false))
  618. return true;
  619. return false;
  620. }
  621. }
  622. return true;
  623. }
  624. /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
  625. callsite CALL) by looking for assignments to its virtual table pointer. If
  626. it is, return true and fill in the jump function JFUNC with relevant type
  627. information or set it to unknown. ARG is the object itself (not a pointer
  628. to it, unless dereferenced). BASE is the base of the memory access as
  629. returned by get_ref_base_and_extent, as is the offset.
  630. This is helper function for detect_type_change and detect_type_change_ssa
  631. that does the heavy work which is usually unnecesary. */
  632. static bool
  633. detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
  634. gcall *call, struct ipa_jump_func *jfunc,
  635. HOST_WIDE_INT offset)
  636. {
  637. struct prop_type_change_info tci;
  638. ao_ref ao;
  639. bool entry_reached = false;
  640. gcc_checking_assert (DECL_P (arg)
  641. || TREE_CODE (arg) == MEM_REF
  642. || handled_component_p (arg));
  643. comp_type = TYPE_MAIN_VARIANT (comp_type);
  644. /* Const calls cannot call virtual methods through VMT and so type changes do
  645. not matter. */
  646. if (!flag_devirtualize || !gimple_vuse (call)
  647. /* Be sure expected_type is polymorphic. */
  648. || !comp_type
  649. || TREE_CODE (comp_type) != RECORD_TYPE
  650. || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
  651. || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
  652. return true;
  653. ao_ref_init (&ao, arg);
  654. ao.base = base;
  655. ao.offset = offset;
  656. ao.size = POINTER_SIZE;
  657. ao.max_size = ao.size;
  658. tci.offset = offset;
  659. tci.object = get_base_address (arg);
  660. tci.type_maybe_changed = false;
  661. walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
  662. &tci, NULL, &entry_reached);
  663. if (!tci.type_maybe_changed)
  664. return false;
  665. ipa_set_jf_unknown (jfunc);
  666. return true;
  667. }
  668. /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
  669. If it is, return true and fill in the jump function JFUNC with relevant type
  670. information or set it to unknown. ARG is the object itself (not a pointer
  671. to it, unless dereferenced). BASE is the base of the memory access as
  672. returned by get_ref_base_and_extent, as is the offset. */
  673. static bool
  674. detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
  675. struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
  676. {
  677. if (!flag_devirtualize)
  678. return false;
  679. if (TREE_CODE (base) == MEM_REF
  680. && !param_type_may_change_p (current_function_decl,
  681. TREE_OPERAND (base, 0),
  682. call))
  683. return false;
  684. return detect_type_change_from_memory_writes (arg, base, comp_type,
  685. call, jfunc, offset);
  686. }
  687. /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
  688. SSA name (its dereference will become the base and the offset is assumed to
  689. be zero). */
  690. static bool
  691. detect_type_change_ssa (tree arg, tree comp_type,
  692. gcall *call, struct ipa_jump_func *jfunc)
  693. {
  694. gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
  695. if (!flag_devirtualize
  696. || !POINTER_TYPE_P (TREE_TYPE (arg)))
  697. return false;
  698. if (!param_type_may_change_p (current_function_decl, arg, call))
  699. return false;
  700. arg = build2 (MEM_REF, ptr_type_node, arg,
  701. build_int_cst (ptr_type_node, 0));
  702. return detect_type_change_from_memory_writes (arg, arg, comp_type,
  703. call, jfunc, 0);
  704. }
  705. /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
  706. boolean variable pointed to by DATA. */
  707. static bool
  708. mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
  709. void *data)
  710. {
  711. bool *b = (bool *) data;
  712. *b = true;
  713. return true;
  714. }
  715. /* Return true if we have already walked so many statements in AA that we
  716. should really just start giving up. */
  717. static bool
  718. aa_overwalked (struct func_body_info *fbi)
  719. {
  720. gcc_checking_assert (fbi);
  721. return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
  722. }
  723. /* Find the nearest valid aa status for parameter specified by INDEX that
  724. dominates BB. */
  725. static struct param_aa_status *
  726. find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
  727. int index)
  728. {
  729. while (true)
  730. {
  731. bb = get_immediate_dominator (CDI_DOMINATORS, bb);
  732. if (!bb)
  733. return NULL;
  734. struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
  735. if (!bi->param_aa_statuses.is_empty ()
  736. && bi->param_aa_statuses[index].valid)
  737. return &bi->param_aa_statuses[index];
  738. }
  739. }
  740. /* Get AA status structure for the given BB and parameter with INDEX. Allocate
  741. structures and/or intialize the result with a dominating description as
  742. necessary. */
  743. static struct param_aa_status *
  744. parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
  745. int index)
  746. {
  747. gcc_checking_assert (fbi);
  748. struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
  749. if (bi->param_aa_statuses.is_empty ())
  750. bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
  751. struct param_aa_status *paa = &bi->param_aa_statuses[index];
  752. if (!paa->valid)
  753. {
  754. gcc_checking_assert (!paa->parm_modified
  755. && !paa->ref_modified
  756. && !paa->pt_modified);
  757. struct param_aa_status *dom_paa;
  758. dom_paa = find_dominating_aa_status (fbi, bb, index);
  759. if (dom_paa)
  760. *paa = *dom_paa;
  761. else
  762. paa->valid = true;
  763. }
  764. return paa;
  765. }
  766. /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
  767. a value known not to be modified in this function before reaching the
  768. statement STMT. FBI holds information about the function we have so far
  769. gathered but do not survive the summary building stage. */
  770. static bool
  771. parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
  772. gimple stmt, tree parm_load)
  773. {
  774. struct param_aa_status *paa;
  775. bool modified = false;
  776. ao_ref refd;
  777. /* FIXME: FBI can be NULL if we are being called from outside
  778. ipa_node_analysis or ipcp_transform_function, which currently happens
  779. during inlining analysis. It would be great to extend fbi's lifetime and
  780. always have it. Currently, we are just not afraid of too much walking in
  781. that case. */
  782. if (fbi)
  783. {
  784. if (aa_overwalked (fbi))
  785. return false;
  786. paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
  787. if (paa->parm_modified)
  788. return false;
  789. }
  790. else
  791. paa = NULL;
  792. gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
  793. ao_ref_init (&refd, parm_load);
  794. int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
  795. &modified, NULL);
  796. if (fbi)
  797. fbi->aa_walked += walked;
  798. if (paa && modified)
  799. paa->parm_modified = true;
  800. return !modified;
  801. }
  802. /* If STMT is an assignment that loads a value from an parameter declaration,
  803. return the index of the parameter in ipa_node_params which has not been
  804. modified. Otherwise return -1. */
  805. static int
  806. load_from_unmodified_param (struct func_body_info *fbi,
  807. vec<ipa_param_descriptor> descriptors,
  808. gimple stmt)
  809. {
  810. int index;
  811. tree op1;
  812. if (!gimple_assign_single_p (stmt))
  813. return -1;
  814. op1 = gimple_assign_rhs1 (stmt);
  815. if (TREE_CODE (op1) != PARM_DECL)
  816. return -1;
  817. index = ipa_get_param_decl_index_1 (descriptors, op1);
  818. if (index < 0
  819. || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
  820. return -1;
  821. return index;
  822. }
  823. /* Return true if memory reference REF (which must be a load through parameter
  824. with INDEX) loads data that are known to be unmodified in this function
  825. before reaching statement STMT. */
  826. static bool
  827. parm_ref_data_preserved_p (struct func_body_info *fbi,
  828. int index, gimple stmt, tree ref)
  829. {
  830. struct param_aa_status *paa;
  831. bool modified = false;
  832. ao_ref refd;
  833. /* FIXME: FBI can be NULL if we are being called from outside
  834. ipa_node_analysis or ipcp_transform_function, which currently happens
  835. during inlining analysis. It would be great to extend fbi's lifetime and
  836. always have it. Currently, we are just not afraid of too much walking in
  837. that case. */
  838. if (fbi)
  839. {
  840. if (aa_overwalked (fbi))
  841. return false;
  842. paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
  843. if (paa->ref_modified)
  844. return false;
  845. }
  846. else
  847. paa = NULL;
  848. gcc_checking_assert (gimple_vuse (stmt));
  849. ao_ref_init (&refd, ref);
  850. int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
  851. &modified, NULL);
  852. if (fbi)
  853. fbi->aa_walked += walked;
  854. if (paa && modified)
  855. paa->ref_modified = true;
  856. return !modified;
  857. }
  858. /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
  859. is known to be unmodified in this function before reaching call statement
  860. CALL into which it is passed. FBI describes the function body. */
  861. static bool
  862. parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
  863. gimple call, tree parm)
  864. {
  865. bool modified = false;
  866. ao_ref refd;
  867. /* It's unnecessary to calculate anything about memory contnets for a const
  868. function because it is not goin to use it. But do not cache the result
  869. either. Also, no such calculations for non-pointers. */
  870. if (!gimple_vuse (call)
  871. || !POINTER_TYPE_P (TREE_TYPE (parm))
  872. || aa_overwalked (fbi))
  873. return false;
  874. struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
  875. index);
  876. if (paa->pt_modified)
  877. return false;
  878. ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
  879. int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
  880. &modified, NULL);
  881. fbi->aa_walked += walked;
  882. if (modified)
  883. paa->pt_modified = true;
  884. return !modified;
  885. }
  886. /* Return true if we can prove that OP is a memory reference loading unmodified
  887. data from an aggregate passed as a parameter and if the aggregate is passed
  888. by reference, that the alias type of the load corresponds to the type of the
  889. formal parameter (so that we can rely on this type for TBAA in callers).
  890. INFO and PARMS_AINFO describe parameters of the current function (but the
  891. latter can be NULL), STMT is the load statement. If function returns true,
  892. *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
  893. within the aggregate and whether it is a load from a value passed by
  894. reference respectively. */
  895. static bool
  896. ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
  897. vec<ipa_param_descriptor> descriptors,
  898. gimple stmt, tree op, int *index_p,
  899. HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
  900. bool *by_ref_p)
  901. {
  902. int index;
  903. HOST_WIDE_INT size, max_size;
  904. tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
  905. if (max_size == -1 || max_size != size || *offset_p < 0)
  906. return false;
  907. if (DECL_P (base))
  908. {
  909. int index = ipa_get_param_decl_index_1 (descriptors, base);
  910. if (index >= 0
  911. && parm_preserved_before_stmt_p (fbi, index, stmt, op))
  912. {
  913. *index_p = index;
  914. *by_ref_p = false;
  915. if (size_p)
  916. *size_p = size;
  917. return true;
  918. }
  919. return false;
  920. }
  921. if (TREE_CODE (base) != MEM_REF
  922. || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
  923. || !integer_zerop (TREE_OPERAND (base, 1)))
  924. return false;
  925. if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
  926. {
  927. tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
  928. index = ipa_get_param_decl_index_1 (descriptors, parm);
  929. }
  930. else
  931. {
  932. /* This branch catches situations where a pointer parameter is not a
  933. gimple register, for example:
  934. void hip7(S*) (struct S * p)
  935. {
  936. void (*<T2e4>) (struct S *) D.1867;
  937. struct S * p.1;
  938. <bb 2>:
  939. p.1_1 = p;
  940. D.1867_2 = p.1_1->f;
  941. D.1867_2 ();
  942. gdp = &p;
  943. */
  944. gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
  945. index = load_from_unmodified_param (fbi, descriptors, def);
  946. }
  947. if (index >= 0
  948. && parm_ref_data_preserved_p (fbi, index, stmt, op))
  949. {
  950. *index_p = index;
  951. *by_ref_p = true;
  952. if (size_p)
  953. *size_p = size;
  954. return true;
  955. }
  956. return false;
  957. }
  958. /* Just like the previous function, just without the param_analysis_info
  959. pointer, for users outside of this file. */
  960. bool
  961. ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
  962. tree op, int *index_p, HOST_WIDE_INT *offset_p,
  963. bool *by_ref_p)
  964. {
  965. return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
  966. offset_p, NULL, by_ref_p);
  967. }
  968. /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
  969. of an assignment statement STMT, try to determine whether we are actually
  970. handling any of the following cases and construct an appropriate jump
  971. function into JFUNC if so:
  972. 1) The passed value is loaded from a formal parameter which is not a gimple
  973. register (most probably because it is addressable, the value has to be
  974. scalar) and we can guarantee the value has not changed. This case can
  975. therefore be described by a simple pass-through jump function. For example:
  976. foo (int a)
  977. {
  978. int a.0;
  979. a.0_2 = a;
  980. bar (a.0_2);
  981. 2) The passed value can be described by a simple arithmetic pass-through
  982. jump function. E.g.
  983. foo (int a)
  984. {
  985. int D.2064;
  986. D.2064_4 = a.1(D) + 4;
  987. bar (D.2064_4);
  988. This case can also occur in combination of the previous one, e.g.:
  989. foo (int a, int z)
  990. {
  991. int a.0;
  992. int D.2064;
  993. a.0_3 = a;
  994. D.2064_4 = a.0_3 + 4;
  995. foo (D.2064_4);
  996. 3) The passed value is an address of an object within another one (which
  997. also passed by reference). Such situations are described by an ancestor
  998. jump function and describe situations such as:
  999. B::foo() (struct B * const this)
  1000. {
  1001. struct A * D.1845;
  1002. D.1845_2 = &this_1(D)->D.1748;
  1003. A::bar (D.1845_2);
  1004. INFO is the structure describing individual parameters access different
  1005. stages of IPA optimizations. PARMS_AINFO contains the information that is
  1006. only needed for intraprocedural analysis. */
  1007. static void
  1008. compute_complex_assign_jump_func (struct func_body_info *fbi,
  1009. struct ipa_node_params *info,
  1010. struct ipa_jump_func *jfunc,
  1011. gcall *call, gimple stmt, tree name,
  1012. tree param_type)
  1013. {
  1014. HOST_WIDE_INT offset, size, max_size;
  1015. tree op1, tc_ssa, base, ssa;
  1016. int index;
  1017. op1 = gimple_assign_rhs1 (stmt);
  1018. if (TREE_CODE (op1) == SSA_NAME)
  1019. {
  1020. if (SSA_NAME_IS_DEFAULT_DEF (op1))
  1021. index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
  1022. else
  1023. index = load_from_unmodified_param (fbi, info->descriptors,
  1024. SSA_NAME_DEF_STMT (op1));
  1025. tc_ssa = op1;
  1026. }
  1027. else
  1028. {
  1029. index = load_from_unmodified_param (fbi, info->descriptors, stmt);
  1030. tc_ssa = gimple_assign_lhs (stmt);
  1031. }
  1032. if (index >= 0)
  1033. {
  1034. tree op2 = gimple_assign_rhs2 (stmt);
  1035. if (op2)
  1036. {
  1037. if (!is_gimple_ip_invariant (op2)
  1038. || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
  1039. && !useless_type_conversion_p (TREE_TYPE (name),
  1040. TREE_TYPE (op1))))
  1041. return;
  1042. ipa_set_jf_arith_pass_through (jfunc, index, op2,
  1043. gimple_assign_rhs_code (stmt));
  1044. }
  1045. else if (gimple_assign_single_p (stmt))
  1046. {
  1047. bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
  1048. ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
  1049. }
  1050. return;
  1051. }
  1052. if (TREE_CODE (op1) != ADDR_EXPR)
  1053. return;
  1054. op1 = TREE_OPERAND (op1, 0);
  1055. if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
  1056. return;
  1057. base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
  1058. if (TREE_CODE (base) != MEM_REF
  1059. /* If this is a varying address, punt. */
  1060. || max_size == -1
  1061. || max_size != size)
  1062. return;
  1063. offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
  1064. ssa = TREE_OPERAND (base, 0);
  1065. if (TREE_CODE (ssa) != SSA_NAME
  1066. || !SSA_NAME_IS_DEFAULT_DEF (ssa)
  1067. || offset < 0)
  1068. return;
  1069. /* Dynamic types are changed in constructors and destructors. */
  1070. index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
  1071. if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
  1072. ipa_set_ancestor_jf (jfunc, offset, index,
  1073. parm_ref_data_pass_through_p (fbi, index, call, ssa));
  1074. }
  1075. /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
  1076. it looks like:
  1077. iftmp.1_3 = &obj_2(D)->D.1762;
  1078. The base of the MEM_REF must be a default definition SSA NAME of a
  1079. parameter. Return NULL_TREE if it looks otherwise. If case of success, the
  1080. whole MEM_REF expression is returned and the offset calculated from any
  1081. handled components and the MEM_REF itself is stored into *OFFSET. The whole
  1082. RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
  1083. static tree
  1084. get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
  1085. {
  1086. HOST_WIDE_INT size, max_size;
  1087. tree expr, parm, obj;
  1088. if (!gimple_assign_single_p (assign))
  1089. return NULL_TREE;
  1090. expr = gimple_assign_rhs1 (assign);
  1091. if (TREE_CODE (expr) != ADDR_EXPR)
  1092. return NULL_TREE;
  1093. expr = TREE_OPERAND (expr, 0);
  1094. obj = expr;
  1095. expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
  1096. if (TREE_CODE (expr) != MEM_REF
  1097. /* If this is a varying address, punt. */
  1098. || max_size == -1
  1099. || max_size != size
  1100. || *offset < 0)
  1101. return NULL_TREE;
  1102. parm = TREE_OPERAND (expr, 0);
  1103. if (TREE_CODE (parm) != SSA_NAME
  1104. || !SSA_NAME_IS_DEFAULT_DEF (parm)
  1105. || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
  1106. return NULL_TREE;
  1107. *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
  1108. *obj_p = obj;
  1109. return expr;
  1110. }
  1111. /* Given that an actual argument is an SSA_NAME that is a result of a phi
  1112. statement PHI, try to find out whether NAME is in fact a
  1113. multiple-inheritance typecast from a descendant into an ancestor of a formal
  1114. parameter and thus can be described by an ancestor jump function and if so,
  1115. write the appropriate function into JFUNC.
  1116. Essentially we want to match the following pattern:
  1117. if (obj_2(D) != 0B)
  1118. goto <bb 3>;
  1119. else
  1120. goto <bb 4>;
  1121. <bb 3>:
  1122. iftmp.1_3 = &obj_2(D)->D.1762;
  1123. <bb 4>:
  1124. # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
  1125. D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
  1126. return D.1879_6; */
  1127. static void
  1128. compute_complex_ancestor_jump_func (struct func_body_info *fbi,
  1129. struct ipa_node_params *info,
  1130. struct ipa_jump_func *jfunc,
  1131. gcall *call, gphi *phi)
  1132. {
  1133. HOST_WIDE_INT offset;
  1134. gimple assign, cond;
  1135. basic_block phi_bb, assign_bb, cond_bb;
  1136. tree tmp, parm, expr, obj;
  1137. int index, i;
  1138. if (gimple_phi_num_args (phi) != 2)
  1139. return;
  1140. if (integer_zerop (PHI_ARG_DEF (phi, 1)))
  1141. tmp = PHI_ARG_DEF (phi, 0);
  1142. else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
  1143. tmp = PHI_ARG_DEF (phi, 1);
  1144. else
  1145. return;
  1146. if (TREE_CODE (tmp) != SSA_NAME
  1147. || SSA_NAME_IS_DEFAULT_DEF (tmp)
  1148. || !POINTER_TYPE_P (TREE_TYPE (tmp))
  1149. || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
  1150. return;
  1151. assign = SSA_NAME_DEF_STMT (tmp);
  1152. assign_bb = gimple_bb (assign);
  1153. if (!single_pred_p (assign_bb))
  1154. return;
  1155. expr = get_ancestor_addr_info (assign, &obj, &offset);
  1156. if (!expr)
  1157. return;
  1158. parm = TREE_OPERAND (expr, 0);
  1159. index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
  1160. if (index < 0)
  1161. return;
  1162. cond_bb = single_pred (assign_bb);
  1163. cond = last_stmt (cond_bb);
  1164. if (!cond
  1165. || gimple_code (cond) != GIMPLE_COND
  1166. || gimple_cond_code (cond) != NE_EXPR
  1167. || gimple_cond_lhs (cond) != parm
  1168. || !integer_zerop (gimple_cond_rhs (cond)))
  1169. return;
  1170. phi_bb = gimple_bb (phi);
  1171. for (i = 0; i < 2; i++)
  1172. {
  1173. basic_block pred = EDGE_PRED (phi_bb, i)->src;
  1174. if (pred != assign_bb && pred != cond_bb)
  1175. return;
  1176. }
  1177. ipa_set_ancestor_jf (jfunc, offset, index,
  1178. parm_ref_data_pass_through_p (fbi, index, call, parm));
  1179. }
  1180. /* Inspect the given TYPE and return true iff it has the same structure (the
  1181. same number of fields of the same types) as a C++ member pointer. If
  1182. METHOD_PTR and DELTA are non-NULL, store the trees representing the
  1183. corresponding fields there. */
  1184. static bool
  1185. type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
  1186. {
  1187. tree fld;
  1188. if (TREE_CODE (type) != RECORD_TYPE)
  1189. return false;
  1190. fld = TYPE_FIELDS (type);
  1191. if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
  1192. || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
  1193. || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
  1194. return false;
  1195. if (method_ptr)
  1196. *method_ptr = fld;
  1197. fld = DECL_CHAIN (fld);
  1198. if (!fld || INTEGRAL_TYPE_P (fld)
  1199. || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
  1200. return false;
  1201. if (delta)
  1202. *delta = fld;
  1203. if (DECL_CHAIN (fld))
  1204. return false;
  1205. return true;
  1206. }
  1207. /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
  1208. return the rhs of its defining statement. Otherwise return RHS as it
  1209. is. */
  1210. static inline tree
  1211. get_ssa_def_if_simple_copy (tree rhs)
  1212. {
  1213. while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
  1214. {
  1215. gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
  1216. if (gimple_assign_single_p (def_stmt))
  1217. rhs = gimple_assign_rhs1 (def_stmt);
  1218. else
  1219. break;
  1220. }
  1221. return rhs;
  1222. }
  1223. /* Simple linked list, describing known contents of an aggregate beforere
  1224. call. */
  1225. struct ipa_known_agg_contents_list
  1226. {
  1227. /* Offset and size of the described part of the aggregate. */
  1228. HOST_WIDE_INT offset, size;
  1229. /* Known constant value or NULL if the contents is known to be unknown. */
  1230. tree constant;
  1231. /* Pointer to the next structure in the list. */
  1232. struct ipa_known_agg_contents_list *next;
  1233. };
  1234. /* Find the proper place in linked list of ipa_known_agg_contents_list
  1235. structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
  1236. unless there is a partial overlap, in which case return NULL, or such
  1237. element is already there, in which case set *ALREADY_THERE to true. */
  1238. static struct ipa_known_agg_contents_list **
  1239. get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
  1240. HOST_WIDE_INT lhs_offset,
  1241. HOST_WIDE_INT lhs_size,
  1242. bool *already_there)
  1243. {
  1244. struct ipa_known_agg_contents_list **p = list;
  1245. while (*p && (*p)->offset < lhs_offset)
  1246. {
  1247. if ((*p)->offset + (*p)->size > lhs_offset)
  1248. return NULL;
  1249. p = &(*p)->next;
  1250. }
  1251. if (*p && (*p)->offset < lhs_offset + lhs_size)
  1252. {
  1253. if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
  1254. /* We already know this value is subsequently overwritten with
  1255. something else. */
  1256. *already_there = true;
  1257. else
  1258. /* Otherwise this is a partial overlap which we cannot
  1259. represent. */
  1260. return NULL;
  1261. }
  1262. return p;
  1263. }
  1264. /* Build aggregate jump function from LIST, assuming there are exactly
  1265. CONST_COUNT constant entries there and that th offset of the passed argument
  1266. is ARG_OFFSET and store it into JFUNC. */
  1267. static void
  1268. build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
  1269. int const_count, HOST_WIDE_INT arg_offset,
  1270. struct ipa_jump_func *jfunc)
  1271. {
  1272. vec_alloc (jfunc->agg.items, const_count);
  1273. while (list)
  1274. {
  1275. if (list->constant)
  1276. {
  1277. struct ipa_agg_jf_item item;
  1278. item.offset = list->offset - arg_offset;
  1279. gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
  1280. item.value = unshare_expr_without_location (list->constant);
  1281. jfunc->agg.items->quick_push (item);
  1282. }
  1283. list = list->next;
  1284. }
  1285. }
  1286. /* Traverse statements from CALL backwards, scanning whether an aggregate given
  1287. in ARG is filled in with constant values. ARG can either be an aggregate
  1288. expression or a pointer to an aggregate. ARG_TYPE is the type of the
  1289. aggregate. JFUNC is the jump function into which the constants are
  1290. subsequently stored. */
  1291. static void
  1292. determine_locally_known_aggregate_parts (gcall *call, tree arg,
  1293. tree arg_type,
  1294. struct ipa_jump_func *jfunc)
  1295. {
  1296. struct ipa_known_agg_contents_list *list = NULL;
  1297. int item_count = 0, const_count = 0;
  1298. HOST_WIDE_INT arg_offset, arg_size;
  1299. gimple_stmt_iterator gsi;
  1300. tree arg_base;
  1301. bool check_ref, by_ref;
  1302. ao_ref r;
  1303. /* The function operates in three stages. First, we prepare check_ref, r,
  1304. arg_base and arg_offset based on what is actually passed as an actual
  1305. argument. */
  1306. if (POINTER_TYPE_P (arg_type))
  1307. {
  1308. by_ref = true;
  1309. if (TREE_CODE (arg) == SSA_NAME)
  1310. {
  1311. tree type_size;
  1312. if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
  1313. return;
  1314. check_ref = true;
  1315. arg_base = arg;
  1316. arg_offset = 0;
  1317. type_size = TYPE_SIZE (TREE_TYPE (arg_type));
  1318. arg_size = tree_to_uhwi (type_size);
  1319. ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
  1320. }
  1321. else if (TREE_CODE (arg) == ADDR_EXPR)
  1322. {
  1323. HOST_WIDE_INT arg_max_size;
  1324. arg = TREE_OPERAND (arg, 0);
  1325. arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
  1326. &arg_max_size);
  1327. if (arg_max_size == -1
  1328. || arg_max_size != arg_size
  1329. || arg_offset < 0)
  1330. return;
  1331. if (DECL_P (arg_base))
  1332. {
  1333. check_ref = false;
  1334. ao_ref_init (&r, arg_base);
  1335. }
  1336. else
  1337. return;
  1338. }
  1339. else
  1340. return;
  1341. }
  1342. else
  1343. {
  1344. HOST_WIDE_INT arg_max_size;
  1345. gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
  1346. by_ref = false;
  1347. check_ref = false;
  1348. arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
  1349. &arg_max_size);
  1350. if (arg_max_size == -1
  1351. || arg_max_size != arg_size
  1352. || arg_offset < 0)
  1353. return;
  1354. ao_ref_init (&r, arg);
  1355. }
  1356. /* Second stage walks back the BB, looks at individual statements and as long
  1357. as it is confident of how the statements affect contents of the
  1358. aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
  1359. describing it. */
  1360. gsi = gsi_for_stmt (call);
  1361. gsi_prev (&gsi);
  1362. for (; !gsi_end_p (gsi); gsi_prev (&gsi))
  1363. {
  1364. struct ipa_known_agg_contents_list *n, **p;
  1365. gimple stmt = gsi_stmt (gsi);
  1366. HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
  1367. tree lhs, rhs, lhs_base;
  1368. if (!stmt_may_clobber_ref_p_1 (stmt, &r))
  1369. continue;
  1370. if (!gimple_assign_single_p (stmt))
  1371. break;
  1372. lhs = gimple_assign_lhs (stmt);
  1373. rhs = gimple_assign_rhs1 (stmt);
  1374. if (!is_gimple_reg_type (TREE_TYPE (rhs))
  1375. || TREE_CODE (lhs) == BIT_FIELD_REF
  1376. || contains_bitfld_component_ref_p (lhs))
  1377. break;
  1378. lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
  1379. &lhs_max_size);
  1380. if (lhs_max_size == -1
  1381. || lhs_max_size != lhs_size)
  1382. break;
  1383. if (check_ref)
  1384. {
  1385. if (TREE_CODE (lhs_base) != MEM_REF
  1386. || TREE_OPERAND (lhs_base, 0) != arg_base
  1387. || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
  1388. break;
  1389. }
  1390. else if (lhs_base != arg_base)
  1391. {
  1392. if (DECL_P (lhs_base))
  1393. continue;
  1394. else
  1395. break;
  1396. }
  1397. bool already_there = false;
  1398. p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
  1399. &already_there);
  1400. if (!p)
  1401. break;
  1402. if (already_there)
  1403. continue;
  1404. rhs = get_ssa_def_if_simple_copy (rhs);
  1405. n = XALLOCA (struct ipa_known_agg_contents_list);
  1406. n->size = lhs_size;
  1407. n->offset = lhs_offset;
  1408. if (is_gimple_ip_invariant (rhs))
  1409. {
  1410. n->constant = rhs;
  1411. const_count++;
  1412. }
  1413. else
  1414. n->constant = NULL_TREE;
  1415. n->next = *p;
  1416. *p = n;
  1417. item_count++;
  1418. if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
  1419. || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
  1420. break;
  1421. }
  1422. /* Third stage just goes over the list and creates an appropriate vector of
  1423. ipa_agg_jf_item structures out of it, of sourse only if there are
  1424. any known constants to begin with. */
  1425. if (const_count)
  1426. {
  1427. jfunc->agg.by_ref = by_ref;
  1428. build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
  1429. }
  1430. }
  1431. static tree
  1432. ipa_get_callee_param_type (struct cgraph_edge *e, int i)
  1433. {
  1434. int n;
  1435. tree type = (e->callee
  1436. ? TREE_TYPE (e->callee->decl)
  1437. : gimple_call_fntype (e->call_stmt));
  1438. tree t = TYPE_ARG_TYPES (type);
  1439. for (n = 0; n < i; n++)
  1440. {
  1441. if (!t)
  1442. break;
  1443. t = TREE_CHAIN (t);
  1444. }
  1445. if (t)
  1446. return TREE_VALUE (t);
  1447. if (!e->callee)
  1448. return NULL;
  1449. t = DECL_ARGUMENTS (e->callee->decl);
  1450. for (n = 0; n < i; n++)
  1451. {
  1452. if (!t)
  1453. return NULL;
  1454. t = TREE_CHAIN (t);
  1455. }
  1456. if (t)
  1457. return TREE_TYPE (t);
  1458. return NULL;
  1459. }
  1460. /* Compute jump function for all arguments of callsite CS and insert the
  1461. information in the jump_functions array in the ipa_edge_args corresponding
  1462. to this callsite. */
  1463. static void
  1464. ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
  1465. struct cgraph_edge *cs)
  1466. {
  1467. struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
  1468. struct ipa_edge_args *args = IPA_EDGE_REF (cs);
  1469. gcall *call = cs->call_stmt;
  1470. int n, arg_num = gimple_call_num_args (call);
  1471. bool useful_context = false;
  1472. if (arg_num == 0 || args->jump_functions)
  1473. return;
  1474. vec_safe_grow_cleared (args->jump_functions, arg_num);
  1475. if (flag_devirtualize)
  1476. vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
  1477. if (gimple_call_internal_p (call))
  1478. return;
  1479. if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
  1480. return;
  1481. for (n = 0; n < arg_num; n++)
  1482. {
  1483. struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
  1484. tree arg = gimple_call_arg (call, n);
  1485. tree param_type = ipa_get_callee_param_type (cs, n);
  1486. if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
  1487. {
  1488. tree instance;
  1489. struct ipa_polymorphic_call_context context (cs->caller->decl,
  1490. arg, cs->call_stmt,
  1491. &instance);
  1492. context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
  1493. *ipa_get_ith_polymorhic_call_context (args, n) = context;
  1494. if (!context.useless_p ())
  1495. useful_context = true;
  1496. }
  1497. if (POINTER_TYPE_P (TREE_TYPE(arg)))
  1498. {
  1499. unsigned HOST_WIDE_INT hwi_bitpos;
  1500. unsigned align;
  1501. if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
  1502. && align % BITS_PER_UNIT == 0
  1503. && hwi_bitpos % BITS_PER_UNIT == 0)
  1504. {
  1505. jfunc->alignment.known = true;
  1506. jfunc->alignment.align = align / BITS_PER_UNIT;
  1507. jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
  1508. }
  1509. else
  1510. gcc_assert (!jfunc->alignment.known);
  1511. }
  1512. else
  1513. gcc_assert (!jfunc->alignment.known);
  1514. if (is_gimple_ip_invariant (arg))
  1515. ipa_set_jf_constant (jfunc, arg, cs);
  1516. else if (!is_gimple_reg_type (TREE_TYPE (arg))
  1517. && TREE_CODE (arg) == PARM_DECL)
  1518. {
  1519. int index = ipa_get_param_decl_index (info, arg);
  1520. gcc_assert (index >=0);
  1521. /* Aggregate passed by value, check for pass-through, otherwise we
  1522. will attempt to fill in aggregate contents later in this
  1523. for cycle. */
  1524. if (parm_preserved_before_stmt_p (fbi, index, call, arg))
  1525. {
  1526. ipa_set_jf_simple_pass_through (jfunc, index, false);
  1527. continue;
  1528. }
  1529. }
  1530. else if (TREE_CODE (arg) == SSA_NAME)
  1531. {
  1532. if (SSA_NAME_IS_DEFAULT_DEF (arg))
  1533. {
  1534. int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
  1535. if (index >= 0)
  1536. {
  1537. bool agg_p;
  1538. agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
  1539. ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
  1540. }
  1541. }
  1542. else
  1543. {
  1544. gimple stmt = SSA_NAME_DEF_STMT (arg);
  1545. if (is_gimple_assign (stmt))
  1546. compute_complex_assign_jump_func (fbi, info, jfunc,
  1547. call, stmt, arg, param_type);
  1548. else if (gimple_code (stmt) == GIMPLE_PHI)
  1549. compute_complex_ancestor_jump_func (fbi, info, jfunc,
  1550. call,
  1551. as_a <gphi *> (stmt));
  1552. }
  1553. }
  1554. /* If ARG is pointer, we can not use its type to determine the type of aggregate
  1555. passed (because type conversions are ignored in gimple). Usually we can
  1556. safely get type from function declaration, but in case of K&R prototypes or
  1557. variadic functions we can try our luck with type of the pointer passed.
  1558. TODO: Since we look for actual initialization of the memory object, we may better
  1559. work out the type based on the memory stores we find. */
  1560. if (!param_type)
  1561. param_type = TREE_TYPE (arg);
  1562. if ((jfunc->type != IPA_JF_PASS_THROUGH
  1563. || !ipa_get_jf_pass_through_agg_preserved (jfunc))
  1564. && (jfunc->type != IPA_JF_ANCESTOR
  1565. || !ipa_get_jf_ancestor_agg_preserved (jfunc))
  1566. && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
  1567. || POINTER_TYPE_P (param_type)))
  1568. determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
  1569. }
  1570. if (!useful_context)
  1571. vec_free (args->polymorphic_call_contexts);
  1572. }
  1573. /* Compute jump functions for all edges - both direct and indirect - outgoing
  1574. from BB. */
  1575. static void
  1576. ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
  1577. {
  1578. struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
  1579. int i;
  1580. struct cgraph_edge *cs;
  1581. FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
  1582. {
  1583. struct cgraph_node *callee = cs->callee;
  1584. if (callee)
  1585. {
  1586. callee->ultimate_alias_target ();
  1587. /* We do not need to bother analyzing calls to unknown functions
  1588. unless they may become known during lto/whopr. */
  1589. if (!callee->definition && !flag_lto)
  1590. continue;
  1591. }
  1592. ipa_compute_jump_functions_for_edge (fbi, cs);
  1593. }
  1594. }
  1595. /* If STMT looks like a statement loading a value from a member pointer formal
  1596. parameter, return that parameter and store the offset of the field to
  1597. *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
  1598. might be clobbered). If USE_DELTA, then we look for a use of the delta
  1599. field rather than the pfn. */
  1600. static tree
  1601. ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
  1602. HOST_WIDE_INT *offset_p)
  1603. {
  1604. tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
  1605. if (!gimple_assign_single_p (stmt))
  1606. return NULL_TREE;
  1607. rhs = gimple_assign_rhs1 (stmt);
  1608. if (TREE_CODE (rhs) == COMPONENT_REF)
  1609. {
  1610. ref_field = TREE_OPERAND (rhs, 1);
  1611. rhs = TREE_OPERAND (rhs, 0);
  1612. }
  1613. else
  1614. ref_field = NULL_TREE;
  1615. if (TREE_CODE (rhs) != MEM_REF)
  1616. return NULL_TREE;
  1617. rec = TREE_OPERAND (rhs, 0);
  1618. if (TREE_CODE (rec) != ADDR_EXPR)
  1619. return NULL_TREE;
  1620. rec = TREE_OPERAND (rec, 0);
  1621. if (TREE_CODE (rec) != PARM_DECL
  1622. || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
  1623. return NULL_TREE;
  1624. ref_offset = TREE_OPERAND (rhs, 1);
  1625. if (use_delta)
  1626. fld = delta_field;
  1627. else
  1628. fld = ptr_field;
  1629. if (offset_p)
  1630. *offset_p = int_bit_position (fld);
  1631. if (ref_field)
  1632. {
  1633. if (integer_nonzerop (ref_offset))
  1634. return NULL_TREE;
  1635. return ref_field == fld ? rec : NULL_TREE;
  1636. }
  1637. else
  1638. return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
  1639. : NULL_TREE;
  1640. }
  1641. /* Returns true iff T is an SSA_NAME defined by a statement. */
  1642. static bool
  1643. ipa_is_ssa_with_stmt_def (tree t)
  1644. {
  1645. if (TREE_CODE (t) == SSA_NAME
  1646. && !SSA_NAME_IS_DEFAULT_DEF (t))
  1647. return true;
  1648. else
  1649. return false;
  1650. }
  1651. /* Find the indirect call graph edge corresponding to STMT and mark it as a
  1652. call to a parameter number PARAM_INDEX. NODE is the caller. Return the
  1653. indirect call graph edge. */
  1654. static struct cgraph_edge *
  1655. ipa_note_param_call (struct cgraph_node *node, int param_index,
  1656. gcall *stmt)
  1657. {
  1658. struct cgraph_edge *cs;
  1659. cs = node->get_edge (stmt);
  1660. cs->indirect_info->param_index = param_index;
  1661. cs->indirect_info->agg_contents = 0;
  1662. cs->indirect_info->member_ptr = 0;
  1663. return cs;
  1664. }
  1665. /* Analyze the CALL and examine uses of formal parameters of the caller NODE
  1666. (described by INFO). PARMS_AINFO is a pointer to a vector containing
  1667. intermediate information about each formal parameter. Currently it checks
  1668. whether the call calls a pointer that is a formal parameter and if so, the
  1669. parameter is marked with the called flag and an indirect call graph edge
  1670. describing the call is created. This is very simple for ordinary pointers
  1671. represented in SSA but not-so-nice when it comes to member pointers. The
  1672. ugly part of this function does nothing more than trying to match the
  1673. pattern of such a call. An example of such a pattern is the gimple dump
  1674. below, the call is on the last line:
  1675. <bb 2>:
  1676. f$__delta_5 = f.__delta;
  1677. f$__pfn_24 = f.__pfn;
  1678. or
  1679. <bb 2>:
  1680. f$__delta_5 = MEM[(struct *)&f];
  1681. f$__pfn_24 = MEM[(struct *)&f + 4B];
  1682. and a few lines below:
  1683. <bb 5>
  1684. D.2496_3 = (int) f$__pfn_24;
  1685. D.2497_4 = D.2496_3 & 1;
  1686. if (D.2497_4 != 0)
  1687. goto <bb 3>;
  1688. else
  1689. goto <bb 4>;
  1690. <bb 6>:
  1691. D.2500_7 = (unsigned int) f$__delta_5;
  1692. D.2501_8 = &S + D.2500_7;
  1693. D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
  1694. D.2503_10 = *D.2502_9;
  1695. D.2504_12 = f$__pfn_24 + -1;
  1696. D.2505_13 = (unsigned int) D.2504_12;
  1697. D.2506_14 = D.2503_10 + D.2505_13;
  1698. D.2507_15 = *D.2506_14;
  1699. iftmp.11_16 = (String:: *) D.2507_15;
  1700. <bb 7>:
  1701. # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
  1702. D.2500_19 = (unsigned int) f$__delta_5;
  1703. D.2508_20 = &S + D.2500_19;
  1704. D.2493_21 = iftmp.11_1 (D.2508_20, 4);
  1705. Such patterns are results of simple calls to a member pointer:
  1706. int doprinting (int (MyString::* f)(int) const)
  1707. {
  1708. MyString S ("somestring");
  1709. return (S.*f)(4);
  1710. }
  1711. Moreover, the function also looks for called pointers loaded from aggregates
  1712. passed by value or reference. */
  1713. static void
  1714. ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
  1715. tree target)
  1716. {
  1717. struct ipa_node_params *info = fbi->info;
  1718. HOST_WIDE_INT offset;
  1719. bool by_ref;
  1720. if (SSA_NAME_IS_DEFAULT_DEF (target))
  1721. {
  1722. tree var = SSA_NAME_VAR (target);
  1723. int index = ipa_get_param_decl_index (info, var);
  1724. if (index >= 0)
  1725. ipa_note_param_call (fbi->node, index, call);
  1726. return;
  1727. }
  1728. int index;
  1729. gimple def = SSA_NAME_DEF_STMT (target);
  1730. if (gimple_assign_single_p (def)
  1731. && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
  1732. gimple_assign_rhs1 (def), &index, &offset,
  1733. NULL, &by_ref))
  1734. {
  1735. struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
  1736. cs->indirect_info->offset = offset;
  1737. cs->indirect_info->agg_contents = 1;
  1738. cs->indirect_info->by_ref = by_ref;
  1739. return;
  1740. }
  1741. /* Now we need to try to match the complex pattern of calling a member
  1742. pointer. */
  1743. if (gimple_code (def) != GIMPLE_PHI
  1744. || gimple_phi_num_args (def) != 2
  1745. || !POINTER_TYPE_P (TREE_TYPE (target))
  1746. || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
  1747. return;
  1748. /* First, we need to check whether one of these is a load from a member
  1749. pointer that is a parameter to this function. */
  1750. tree n1 = PHI_ARG_DEF (def, 0);
  1751. tree n2 = PHI_ARG_DEF (def, 1);
  1752. if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
  1753. return;
  1754. gimple d1 = SSA_NAME_DEF_STMT (n1);
  1755. gimple d2 = SSA_NAME_DEF_STMT (n2);
  1756. tree rec;
  1757. basic_block bb, virt_bb;
  1758. basic_block join = gimple_bb (def);
  1759. if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
  1760. {
  1761. if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
  1762. return;
  1763. bb = EDGE_PRED (join, 0)->src;
  1764. virt_bb = gimple_bb (d2);
  1765. }
  1766. else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
  1767. {
  1768. bb = EDGE_PRED (join, 1)->src;
  1769. virt_bb = gimple_bb (d1);
  1770. }
  1771. else
  1772. return;
  1773. /* Second, we need to check that the basic blocks are laid out in the way
  1774. corresponding to the pattern. */
  1775. if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
  1776. || single_pred (virt_bb) != bb
  1777. || single_succ (virt_bb) != join)
  1778. return;
  1779. /* Third, let's see that the branching is done depending on the least
  1780. significant bit of the pfn. */
  1781. gimple branch = last_stmt (bb);
  1782. if (!branch || gimple_code (branch) != GIMPLE_COND)
  1783. return;
  1784. if ((gimple_cond_code (branch) != NE_EXPR
  1785. && gimple_cond_code (branch) != EQ_EXPR)
  1786. || !integer_zerop (gimple_cond_rhs (branch)))
  1787. return;
  1788. tree cond = gimple_cond_lhs (branch);
  1789. if (!ipa_is_ssa_with_stmt_def (cond))
  1790. return;
  1791. def = SSA_NAME_DEF_STMT (cond);
  1792. if (!is_gimple_assign (def)
  1793. || gimple_assign_rhs_code (def) != BIT_AND_EXPR
  1794. || !integer_onep (gimple_assign_rhs2 (def)))
  1795. return;
  1796. cond = gimple_assign_rhs1 (def);
  1797. if (!ipa_is_ssa_with_stmt_def (cond))
  1798. return;
  1799. def = SSA_NAME_DEF_STMT (cond);
  1800. if (is_gimple_assign (def)
  1801. && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
  1802. {
  1803. cond = gimple_assign_rhs1 (def);
  1804. if (!ipa_is_ssa_with_stmt_def (cond))
  1805. return;
  1806. def = SSA_NAME_DEF_STMT (cond);
  1807. }
  1808. tree rec2;
  1809. rec2 = ipa_get_stmt_member_ptr_load_param (def,
  1810. (TARGET_PTRMEMFUNC_VBIT_LOCATION
  1811. == ptrmemfunc_vbit_in_delta),
  1812. NULL);
  1813. if (rec != rec2)
  1814. return;
  1815. index = ipa_get_param_decl_index (info, rec);
  1816. if (index >= 0
  1817. && parm_preserved_before_stmt_p (fbi, index, call, rec))
  1818. {
  1819. struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
  1820. cs->indirect_info->offset = offset;
  1821. cs->indirect_info->agg_contents = 1;
  1822. cs->indirect_info->member_ptr = 1;
  1823. }
  1824. return;
  1825. }
  1826. /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
  1827. object referenced in the expression is a formal parameter of the caller
  1828. FBI->node (described by FBI->info), create a call note for the
  1829. statement. */
  1830. static void
  1831. ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
  1832. gcall *call, tree target)
  1833. {
  1834. tree obj = OBJ_TYPE_REF_OBJECT (target);
  1835. int index;
  1836. HOST_WIDE_INT anc_offset;
  1837. if (!flag_devirtualize)
  1838. return;
  1839. if (TREE_CODE (obj) != SSA_NAME)
  1840. return;
  1841. struct ipa_node_params *info = fbi->info;
  1842. if (SSA_NAME_IS_DEFAULT_DEF (obj))
  1843. {
  1844. struct ipa_jump_func jfunc;
  1845. if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
  1846. return;
  1847. anc_offset = 0;
  1848. index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
  1849. gcc_assert (index >= 0);
  1850. if (detect_type_change_ssa (obj, obj_type_ref_class (target),
  1851. call, &jfunc))
  1852. return;
  1853. }
  1854. else
  1855. {
  1856. struct ipa_jump_func jfunc;
  1857. gimple stmt = SSA_NAME_DEF_STMT (obj);
  1858. tree expr;
  1859. expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
  1860. if (!expr)
  1861. return;
  1862. index = ipa_get_param_decl_index (info,
  1863. SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
  1864. gcc_assert (index >= 0);
  1865. if (detect_type_change (obj, expr, obj_type_ref_class (target),
  1866. call, &jfunc, anc_offset))
  1867. return;
  1868. }
  1869. struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
  1870. struct cgraph_indirect_call_info *ii = cs->indirect_info;
  1871. ii->offset = anc_offset;
  1872. ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
  1873. ii->otr_type = obj_type_ref_class (target);
  1874. ii->polymorphic = 1;
  1875. }
  1876. /* Analyze a call statement CALL whether and how it utilizes formal parameters
  1877. of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
  1878. containing intermediate information about each formal parameter. */
  1879. static void
  1880. ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
  1881. {
  1882. tree target = gimple_call_fn (call);
  1883. if (!target
  1884. || (TREE_CODE (target) != SSA_NAME
  1885. && !virtual_method_call_p (target)))
  1886. return;
  1887. struct cgraph_edge *cs = fbi->node->get_edge (call);
  1888. /* If we previously turned the call into a direct call, there is
  1889. no need to analyze. */
  1890. if (cs && !cs->indirect_unknown_callee)
  1891. return;
  1892. if (cs->indirect_info->polymorphic && flag_devirtualize)
  1893. {
  1894. tree instance;
  1895. tree target = gimple_call_fn (call);
  1896. ipa_polymorphic_call_context context (current_function_decl,
  1897. target, call, &instance);
  1898. gcc_checking_assert (cs->indirect_info->otr_type
  1899. == obj_type_ref_class (target));
  1900. gcc_checking_assert (cs->indirect_info->otr_token
  1901. == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
  1902. cs->indirect_info->vptr_changed
  1903. = !context.get_dynamic_type (instance,
  1904. OBJ_TYPE_REF_OBJECT (target),
  1905. obj_type_ref_class (target), call);
  1906. cs->indirect_info->context = context;
  1907. }
  1908. if (TREE_CODE (target) == SSA_NAME)
  1909. ipa_analyze_indirect_call_uses (fbi, call, target);
  1910. else if (virtual_method_call_p (target))
  1911. ipa_analyze_virtual_call_uses (fbi, call, target);
  1912. }
  1913. /* Analyze the call statement STMT with respect to formal parameters (described
  1914. in INFO) of caller given by FBI->NODE. Currently it only checks whether
  1915. formal parameters are called. */
  1916. static void
  1917. ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
  1918. {
  1919. if (is_gimple_call (stmt))
  1920. ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
  1921. }
  1922. /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
  1923. If OP is a parameter declaration, mark it as used in the info structure
  1924. passed in DATA. */
  1925. static bool
  1926. visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
  1927. {
  1928. struct ipa_node_params *info = (struct ipa_node_params *) data;
  1929. op = get_base_address (op);
  1930. if (op
  1931. && TREE_CODE (op) == PARM_DECL)
  1932. {
  1933. int index = ipa_get_param_decl_index (info, op);
  1934. gcc_assert (index >= 0);
  1935. ipa_set_param_used (info, index, true);
  1936. }
  1937. return false;
  1938. }
  1939. /* Scan the statements in BB and inspect the uses of formal parameters. Store
  1940. the findings in various structures of the associated ipa_node_params
  1941. structure, such as parameter flags, notes etc. FBI holds various data about
  1942. the function being analyzed. */
  1943. static void
  1944. ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
  1945. {
  1946. gimple_stmt_iterator gsi;
  1947. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  1948. {
  1949. gimple stmt = gsi_stmt (gsi);
  1950. if (is_gimple_debug (stmt))
  1951. continue;
  1952. ipa_analyze_stmt_uses (fbi, stmt);
  1953. walk_stmt_load_store_addr_ops (stmt, fbi->info,
  1954. visit_ref_for_mod_analysis,
  1955. visit_ref_for_mod_analysis,
  1956. visit_ref_for_mod_analysis);
  1957. }
  1958. for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  1959. walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
  1960. visit_ref_for_mod_analysis,
  1961. visit_ref_for_mod_analysis,
  1962. visit_ref_for_mod_analysis);
  1963. }
  1964. /* Calculate controlled uses of parameters of NODE. */
  1965. static void
  1966. ipa_analyze_controlled_uses (struct cgraph_node *node)
  1967. {
  1968. struct ipa_node_params *info = IPA_NODE_REF (node);
  1969. for (int i = 0; i < ipa_get_param_count (info); i++)
  1970. {
  1971. tree parm = ipa_get_param (info, i);
  1972. int controlled_uses = 0;
  1973. /* For SSA regs see if parameter is used. For non-SSA we compute
  1974. the flag during modification analysis. */
  1975. if (is_gimple_reg (parm))
  1976. {
  1977. tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
  1978. parm);
  1979. if (ddef && !has_zero_uses (ddef))
  1980. {
  1981. imm_use_iterator imm_iter;
  1982. use_operand_p use_p;
  1983. ipa_set_param_used (info, i, true);
  1984. FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
  1985. if (!is_gimple_call (USE_STMT (use_p)))
  1986. {
  1987. if (!is_gimple_debug (USE_STMT (use_p)))
  1988. {
  1989. controlled_uses = IPA_UNDESCRIBED_USE;
  1990. break;
  1991. }
  1992. }
  1993. else
  1994. controlled_uses++;
  1995. }
  1996. else
  1997. controlled_uses = 0;
  1998. }
  1999. else
  2000. controlled_uses = IPA_UNDESCRIBED_USE;
  2001. ipa_set_controlled_uses (info, i, controlled_uses);
  2002. }
  2003. }
  2004. /* Free stuff in BI. */
  2005. static void
  2006. free_ipa_bb_info (struct ipa_bb_info *bi)
  2007. {
  2008. bi->cg_edges.release ();
  2009. bi->param_aa_statuses.release ();
  2010. }
  2011. /* Dominator walker driving the analysis. */
  2012. class analysis_dom_walker : public dom_walker
  2013. {
  2014. public:
  2015. analysis_dom_walker (struct func_body_info *fbi)
  2016. : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
  2017. virtual void before_dom_children (basic_block);
  2018. private:
  2019. struct func_body_info *m_fbi;
  2020. };
  2021. void
  2022. analysis_dom_walker::before_dom_children (basic_block bb)
  2023. {
  2024. ipa_analyze_params_uses_in_bb (m_fbi, bb);
  2025. ipa_compute_jump_functions_for_bb (m_fbi, bb);
  2026. }
  2027. /* Initialize the array describing properties of of formal parameters
  2028. of NODE, analyze their uses and compute jump functions associated
  2029. with actual arguments of calls from within NODE. */
  2030. void
  2031. ipa_analyze_node (struct cgraph_node *node)
  2032. {
  2033. struct func_body_info fbi;
  2034. struct ipa_node_params *info;
  2035. ipa_check_create_node_params ();
  2036. ipa_check_create_edge_args ();
  2037. info = IPA_NODE_REF (node);
  2038. if (info->analysis_done)
  2039. return;
  2040. info->analysis_done = 1;
  2041. if (ipa_func_spec_opts_forbid_analysis_p (node))
  2042. {
  2043. for (int i = 0; i < ipa_get_param_count (info); i++)
  2044. {
  2045. ipa_set_param_used (info, i, true);
  2046. ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
  2047. }
  2048. return;
  2049. }
  2050. struct function *func = DECL_STRUCT_FUNCTION (node->decl);
  2051. push_cfun (func);
  2052. calculate_dominance_info (CDI_DOMINATORS);
  2053. ipa_initialize_node_params (node);
  2054. ipa_analyze_controlled_uses (node);
  2055. fbi.node = node;
  2056. fbi.info = IPA_NODE_REF (node);
  2057. fbi.bb_infos = vNULL;
  2058. fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
  2059. fbi.param_count = ipa_get_param_count (info);
  2060. fbi.aa_walked = 0;
  2061. for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
  2062. {
  2063. ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
  2064. bi->cg_edges.safe_push (cs);
  2065. }
  2066. for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
  2067. {
  2068. ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
  2069. bi->cg_edges.safe_push (cs);
  2070. }
  2071. analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
  2072. int i;
  2073. struct ipa_bb_info *bi;
  2074. FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
  2075. free_ipa_bb_info (bi);
  2076. fbi.bb_infos.release ();
  2077. free_dominance_info (CDI_DOMINATORS);
  2078. pop_cfun ();
  2079. }
  2080. /* Update the jump functions associated with call graph edge E when the call
  2081. graph edge CS is being inlined, assuming that E->caller is already (possibly
  2082. indirectly) inlined into CS->callee and that E has not been inlined. */
  2083. static void
  2084. update_jump_functions_after_inlining (struct cgraph_edge *cs,
  2085. struct cgraph_edge *e)
  2086. {
  2087. struct ipa_edge_args *top = IPA_EDGE_REF (cs);
  2088. struct ipa_edge_args *args = IPA_EDGE_REF (e);
  2089. int count = ipa_get_cs_argument_count (args);
  2090. int i;
  2091. for (i = 0; i < count; i++)
  2092. {
  2093. struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
  2094. struct ipa_polymorphic_call_context *dst_ctx
  2095. = ipa_get_ith_polymorhic_call_context (args, i);
  2096. if (dst->type == IPA_JF_ANCESTOR)
  2097. {
  2098. struct ipa_jump_func *src;
  2099. int dst_fid = dst->value.ancestor.formal_id;
  2100. struct ipa_polymorphic_call_context *src_ctx
  2101. = ipa_get_ith_polymorhic_call_context (top, dst_fid);
  2102. /* Variable number of arguments can cause havoc if we try to access
  2103. one that does not exist in the inlined edge. So make sure we
  2104. don't. */
  2105. if (dst_fid >= ipa_get_cs_argument_count (top))
  2106. {
  2107. ipa_set_jf_unknown (dst);
  2108. continue;
  2109. }
  2110. src = ipa_get_ith_jump_func (top, dst_fid);
  2111. if (src_ctx && !src_ctx->useless_p ())
  2112. {
  2113. struct ipa_polymorphic_call_context ctx = *src_ctx;
  2114. /* TODO: Make type preserved safe WRT contexts. */
  2115. if (!ipa_get_jf_ancestor_type_preserved (dst))
  2116. ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
  2117. ctx.offset_by (dst->value.ancestor.offset);
  2118. if (!ctx.useless_p ())
  2119. {
  2120. vec_safe_grow_cleared (args->polymorphic_call_contexts,
  2121. count);
  2122. dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
  2123. }
  2124. dst_ctx->combine_with (ctx);
  2125. }
  2126. if (src->agg.items
  2127. && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
  2128. {
  2129. struct ipa_agg_jf_item *item;
  2130. int j;
  2131. /* Currently we do not produce clobber aggregate jump functions,
  2132. replace with merging when we do. */
  2133. gcc_assert (!dst->agg.items);
  2134. dst->agg.items = vec_safe_copy (src->agg.items);
  2135. dst->agg.by_ref = src->agg.by_ref;
  2136. FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
  2137. item->offset -= dst->value.ancestor.offset;
  2138. }
  2139. if (src->type == IPA_JF_PASS_THROUGH
  2140. && src->value.pass_through.operation == NOP_EXPR)
  2141. {
  2142. dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
  2143. dst->value.ancestor.agg_preserved &=
  2144. src->value.pass_through.agg_preserved;
  2145. }
  2146. else if (src->type == IPA_JF_ANCESTOR)
  2147. {
  2148. dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
  2149. dst->value.ancestor.offset += src->value.ancestor.offset;
  2150. dst->value.ancestor.agg_preserved &=
  2151. src->value.ancestor.agg_preserved;
  2152. }
  2153. else
  2154. ipa_set_jf_unknown (dst);
  2155. }
  2156. else if (dst->type == IPA_JF_PASS_THROUGH)
  2157. {
  2158. struct ipa_jump_func *src;
  2159. /* We must check range due to calls with variable number of arguments
  2160. and we cannot combine jump functions with operations. */
  2161. if (dst->value.pass_through.operation == NOP_EXPR
  2162. && (dst->value.pass_through.formal_id
  2163. < ipa_get_cs_argument_count (top)))
  2164. {
  2165. int dst_fid = dst->value.pass_through.formal_id;
  2166. src = ipa_get_ith_jump_func (top, dst_fid);
  2167. bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
  2168. struct ipa_polymorphic_call_context *src_ctx
  2169. = ipa_get_ith_polymorhic_call_context (top, dst_fid);
  2170. if (src_ctx && !src_ctx->useless_p ())
  2171. {
  2172. struct ipa_polymorphic_call_context ctx = *src_ctx;
  2173. /* TODO: Make type preserved safe WRT contexts. */
  2174. if (!ipa_get_jf_pass_through_type_preserved (dst))
  2175. ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
  2176. if (!ctx.useless_p ())
  2177. {
  2178. if (!dst_ctx)
  2179. {
  2180. vec_safe_grow_cleared (args->polymorphic_call_contexts,
  2181. count);
  2182. dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
  2183. }
  2184. dst_ctx->combine_with (ctx);
  2185. }
  2186. }
  2187. switch (src->type)
  2188. {
  2189. case IPA_JF_UNKNOWN:
  2190. ipa_set_jf_unknown (dst);
  2191. break;
  2192. case IPA_JF_CONST:
  2193. ipa_set_jf_cst_copy (dst, src);
  2194. break;
  2195. case IPA_JF_PASS_THROUGH:
  2196. {
  2197. int formal_id = ipa_get_jf_pass_through_formal_id (src);
  2198. enum tree_code operation;
  2199. operation = ipa_get_jf_pass_through_operation (src);
  2200. if (operation == NOP_EXPR)
  2201. {
  2202. bool agg_p;
  2203. agg_p = dst_agg_p
  2204. && ipa_get_jf_pass_through_agg_preserved (src);
  2205. ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
  2206. }
  2207. else
  2208. {
  2209. tree operand = ipa_get_jf_pass_through_operand (src);
  2210. ipa_set_jf_arith_pass_through (dst, formal_id, operand,
  2211. operation);
  2212. }
  2213. break;
  2214. }
  2215. case IPA_JF_ANCESTOR:
  2216. {
  2217. bool agg_p;
  2218. agg_p = dst_agg_p
  2219. && ipa_get_jf_ancestor_agg_preserved (src);
  2220. ipa_set_ancestor_jf (dst,
  2221. ipa_get_jf_ancestor_offset (src),
  2222. ipa_get_jf_ancestor_formal_id (src),
  2223. agg_p);
  2224. break;
  2225. }
  2226. default:
  2227. gcc_unreachable ();
  2228. }
  2229. if (src->agg.items
  2230. && (dst_agg_p || !src->agg.by_ref))
  2231. {
  2232. /* Currently we do not produce clobber aggregate jump
  2233. functions, replace with merging when we do. */
  2234. gcc_assert (!dst->agg.items);
  2235. dst->agg.by_ref = src->agg.by_ref;
  2236. dst->agg.items = vec_safe_copy (src->agg.items);
  2237. }
  2238. }
  2239. else
  2240. ipa_set_jf_unknown (dst);
  2241. }
  2242. }
  2243. }
  2244. /* If TARGET is an addr_expr of a function declaration, make it the
  2245. (SPECULATIVE)destination of an indirect edge IE and return the edge.
  2246. Otherwise, return NULL. */
  2247. struct cgraph_edge *
  2248. ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
  2249. bool speculative)
  2250. {
  2251. struct cgraph_node *callee;
  2252. struct inline_edge_summary *es = inline_edge_summary (ie);
  2253. bool unreachable = false;
  2254. if (TREE_CODE (target) == ADDR_EXPR)
  2255. target = TREE_OPERAND (target, 0);
  2256. if (TREE_CODE (target) != FUNCTION_DECL)
  2257. {
  2258. target = canonicalize_constructor_val (target, NULL);
  2259. if (!target || TREE_CODE (target) != FUNCTION_DECL)
  2260. {
  2261. /* Member pointer call that goes through a VMT lookup. */
  2262. if (ie->indirect_info->member_ptr
  2263. /* Or if target is not an invariant expression and we do not
  2264. know if it will evaulate to function at runtime.
  2265. This can happen when folding through &VAR, where &VAR
  2266. is IP invariant, but VAR itself is not.
  2267. TODO: Revisit this when GCC 5 is branched. It seems that
  2268. member_ptr check is not needed and that we may try to fold
  2269. the expression and see if VAR is readonly. */
  2270. || !is_gimple_ip_invariant (target))
  2271. {
  2272. if (dump_enabled_p ())
  2273. {
  2274. location_t loc = gimple_location_safe (ie->call_stmt);
  2275. dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
  2276. "discovered direct call non-invariant "
  2277. "%s/%i\n",
  2278. ie->caller->name (), ie->caller->order);
  2279. }
  2280. return NULL;
  2281. }
  2282. if (dump_enabled_p ())
  2283. {
  2284. location_t loc = gimple_location_safe (ie->call_stmt);
  2285. dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
  2286. "discovered direct call to non-function in %s/%i, "
  2287. "making it __builtin_unreachable\n",
  2288. ie->caller->name (), ie->caller->order);
  2289. }
  2290. target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
  2291. callee = cgraph_node::get_create (target);
  2292. unreachable = true;
  2293. }
  2294. else
  2295. callee = cgraph_node::get (target);
  2296. }
  2297. else
  2298. callee = cgraph_node::get (target);
  2299. /* Because may-edges are not explicitely represented and vtable may be external,
  2300. we may create the first reference to the object in the unit. */
  2301. if (!callee || callee->global.inlined_to)
  2302. {
  2303. /* We are better to ensure we can refer to it.
  2304. In the case of static functions we are out of luck, since we already
  2305. removed its body. In the case of public functions we may or may
  2306. not introduce the reference. */
  2307. if (!canonicalize_constructor_val (target, NULL)
  2308. || !TREE_PUBLIC (target))
  2309. {
  2310. if (dump_file)
  2311. fprintf (dump_file, "ipa-prop: Discovered call to a known target "
  2312. "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
  2313. xstrdup_for_dump (ie->caller->name ()),
  2314. ie->caller->order,
  2315. xstrdup_for_dump (ie->callee->name ()),
  2316. ie->callee->order);
  2317. return NULL;
  2318. }
  2319. callee = cgraph_node::get_create (target);
  2320. }
  2321. /* If the edge is already speculated. */
  2322. if (speculative && ie->speculative)
  2323. {
  2324. struct cgraph_edge *e2;
  2325. struct ipa_ref *ref;
  2326. ie->speculative_call_info (e2, ie, ref);
  2327. if (e2->callee->ultimate_alias_target ()
  2328. != callee->ultimate_alias_target ())
  2329. {
  2330. if (dump_file)
  2331. fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
  2332. "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
  2333. xstrdup_for_dump (ie->caller->name ()),
  2334. ie->caller->order,
  2335. xstrdup_for_dump (callee->name ()),
  2336. callee->order,
  2337. xstrdup_for_dump (e2->callee->name ()),
  2338. e2->callee->order);
  2339. }
  2340. else
  2341. {
  2342. if (dump_file)
  2343. fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
  2344. "(%s/%i -> %s/%i) this agree with previous speculation.\n",
  2345. xstrdup_for_dump (ie->caller->name ()),
  2346. ie->caller->order,
  2347. xstrdup_for_dump (callee->name ()),
  2348. callee->order);
  2349. }
  2350. return NULL;
  2351. }
  2352. if (!dbg_cnt (devirt))
  2353. return NULL;
  2354. ipa_check_create_node_params ();
  2355. /* We can not make edges to inline clones. It is bug that someone removed
  2356. the cgraph node too early. */
  2357. gcc_assert (!callee->global.inlined_to);
  2358. if (dump_file && !unreachable)
  2359. {
  2360. fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
  2361. "(%s/%i -> %s/%i), for stmt ",
  2362. ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
  2363. speculative ? "speculative" : "known",
  2364. xstrdup_for_dump (ie->caller->name ()),
  2365. ie->caller->order,
  2366. xstrdup_for_dump (callee->name ()),
  2367. callee->order);
  2368. if (ie->call_stmt)
  2369. print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
  2370. else
  2371. fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
  2372. }
  2373. if (dump_enabled_p ())
  2374. {
  2375. location_t loc = gimple_location_safe (ie->call_stmt);
  2376. dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
  2377. "converting indirect call in %s to direct call to %s\n",
  2378. ie->caller->name (), callee->name ());
  2379. }
  2380. if (!speculative)
  2381. {
  2382. struct cgraph_edge *orig = ie;
  2383. ie = ie->make_direct (callee);
  2384. /* If we resolved speculative edge the cost is already up to date
  2385. for direct call (adjusted by inline_edge_duplication_hook). */
  2386. if (ie == orig)
  2387. {
  2388. es = inline_edge_summary (ie);
  2389. es->call_stmt_size -= (eni_size_weights.indirect_call_cost
  2390. - eni_size_weights.call_cost);
  2391. es->call_stmt_time -= (eni_time_weights.indirect_call_cost
  2392. - eni_time_weights.call_cost);
  2393. }
  2394. }
  2395. else
  2396. {
  2397. if (!callee->can_be_discarded_p ())
  2398. {
  2399. cgraph_node *alias;
  2400. alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
  2401. if (alias)
  2402. callee = alias;
  2403. }
  2404. /* make_speculative will update ie's cost to direct call cost. */
  2405. ie = ie->make_speculative
  2406. (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
  2407. }
  2408. return ie;
  2409. }
  2410. /* Retrieve value from aggregate jump function AGG for the given OFFSET or
  2411. return NULL if there is not any. BY_REF specifies whether the value has to
  2412. be passed by reference or by value. */
  2413. tree
  2414. ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
  2415. HOST_WIDE_INT offset, bool by_ref)
  2416. {
  2417. struct ipa_agg_jf_item *item;
  2418. int i;
  2419. if (by_ref != agg->by_ref)
  2420. return NULL;
  2421. FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
  2422. if (item->offset == offset)
  2423. {
  2424. /* Currently we do not have clobber values, return NULL for them once
  2425. we do. */
  2426. gcc_checking_assert (is_gimple_ip_invariant (item->value));
  2427. return item->value;
  2428. }
  2429. return NULL;
  2430. }
  2431. /* Remove a reference to SYMBOL from the list of references of a node given by
  2432. reference description RDESC. Return true if the reference has been
  2433. successfully found and removed. */
  2434. static bool
  2435. remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
  2436. {
  2437. struct ipa_ref *to_del;
  2438. struct cgraph_edge *origin;
  2439. origin = rdesc->cs;
  2440. if (!origin)
  2441. return false;
  2442. to_del = origin->caller->find_reference (symbol, origin->call_stmt,
  2443. origin->lto_stmt_uid);
  2444. if (!to_del)
  2445. return false;
  2446. to_del->remove_reference ();
  2447. if (dump_file)
  2448. fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
  2449. xstrdup_for_dump (origin->caller->name ()),
  2450. origin->caller->order, xstrdup_for_dump (symbol->name ()));
  2451. return true;
  2452. }
  2453. /* If JFUNC has a reference description with refcount different from
  2454. IPA_UNDESCRIBED_USE, return the reference description, otherwise return
  2455. NULL. JFUNC must be a constant jump function. */
  2456. static struct ipa_cst_ref_desc *
  2457. jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
  2458. {
  2459. struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
  2460. if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
  2461. return rdesc;
  2462. else
  2463. return NULL;
  2464. }
  2465. /* If the value of constant jump function JFUNC is an address of a function
  2466. declaration, return the associated call graph node. Otherwise return
  2467. NULL. */
  2468. static cgraph_node *
  2469. cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
  2470. {
  2471. gcc_checking_assert (jfunc->type == IPA_JF_CONST);
  2472. tree cst = ipa_get_jf_constant (jfunc);
  2473. if (TREE_CODE (cst) != ADDR_EXPR
  2474. || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
  2475. return NULL;
  2476. return cgraph_node::get (TREE_OPERAND (cst, 0));
  2477. }
  2478. /* If JFUNC is a constant jump function with a usable rdesc, decrement its
  2479. refcount and if it hits zero, remove reference to SYMBOL from the caller of
  2480. the edge specified in the rdesc. Return false if either the symbol or the
  2481. reference could not be found, otherwise return true. */
  2482. static bool
  2483. try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
  2484. {
  2485. struct ipa_cst_ref_desc *rdesc;
  2486. if (jfunc->type == IPA_JF_CONST
  2487. && (rdesc = jfunc_rdesc_usable (jfunc))
  2488. && --rdesc->refcount == 0)
  2489. {
  2490. symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
  2491. if (!symbol)
  2492. return false;
  2493. return remove_described_reference (symbol, rdesc);
  2494. }
  2495. return true;
  2496. }
  2497. /* Try to find a destination for indirect edge IE that corresponds to a simple
  2498. call or a call of a member function pointer and where the destination is a
  2499. pointer formal parameter described by jump function JFUNC. If it can be
  2500. determined, return the newly direct edge, otherwise return NULL.
  2501. NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
  2502. static struct cgraph_edge *
  2503. try_make_edge_direct_simple_call (struct cgraph_edge *ie,
  2504. struct ipa_jump_func *jfunc,
  2505. struct ipa_node_params *new_root_info)
  2506. {
  2507. struct cgraph_edge *cs;
  2508. tree target;
  2509. bool agg_contents = ie->indirect_info->agg_contents;
  2510. if (ie->indirect_info->agg_contents)
  2511. target = ipa_find_agg_cst_for_param (&jfunc->agg,
  2512. ie->indirect_info->offset,
  2513. ie->indirect_info->by_ref);
  2514. else
  2515. target = ipa_value_from_jfunc (new_root_info, jfunc);
  2516. if (!target)
  2517. return NULL;
  2518. cs = ipa_make_edge_direct_to_target (ie, target);
  2519. if (cs && !agg_contents)
  2520. {
  2521. bool ok;
  2522. gcc_checking_assert (cs->callee
  2523. && (cs != ie
  2524. || jfunc->type != IPA_JF_CONST
  2525. || !cgraph_node_for_jfunc (jfunc)
  2526. || cs->callee == cgraph_node_for_jfunc (jfunc)));
  2527. ok = try_decrement_rdesc_refcount (jfunc);
  2528. gcc_checking_assert (ok);
  2529. }
  2530. return cs;
  2531. }
  2532. /* Return the target to be used in cases of impossible devirtualization. IE
  2533. and target (the latter can be NULL) are dumped when dumping is enabled. */
  2534. tree
  2535. ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
  2536. {
  2537. if (dump_file)
  2538. {
  2539. if (target)
  2540. fprintf (dump_file,
  2541. "Type inconsistent devirtualization: %s/%i->%s\n",
  2542. ie->caller->name (), ie->caller->order,
  2543. IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
  2544. else
  2545. fprintf (dump_file,
  2546. "No devirtualization target in %s/%i\n",
  2547. ie->caller->name (), ie->caller->order);
  2548. }
  2549. tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
  2550. cgraph_node::get_create (new_target);
  2551. return new_target;
  2552. }
  2553. /* Try to find a destination for indirect edge IE that corresponds to a virtual
  2554. call based on a formal parameter which is described by jump function JFUNC
  2555. and if it can be determined, make it direct and return the direct edge.
  2556. Otherwise, return NULL. CTX describes the polymorphic context that the
  2557. parameter the call is based on brings along with it. */
  2558. static struct cgraph_edge *
  2559. try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
  2560. struct ipa_jump_func *jfunc,
  2561. struct ipa_polymorphic_call_context ctx)
  2562. {
  2563. tree target = NULL;
  2564. bool speculative = false;
  2565. if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
  2566. return NULL;
  2567. gcc_assert (!ie->indirect_info->by_ref);
  2568. /* Try to do lookup via known virtual table pointer value. */
  2569. if (!ie->indirect_info->vptr_changed
  2570. || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
  2571. {
  2572. tree vtable;
  2573. unsigned HOST_WIDE_INT offset;
  2574. tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
  2575. ie->indirect_info->offset,
  2576. true);
  2577. if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
  2578. {
  2579. t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
  2580. vtable, offset);
  2581. if (t)
  2582. {
  2583. if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
  2584. && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
  2585. || !possible_polymorphic_call_target_p
  2586. (ie, cgraph_node::get (t)))
  2587. {
  2588. /* Do not speculate builtin_unreachable, it is stupid! */
  2589. if (!ie->indirect_info->vptr_changed)
  2590. target = ipa_impossible_devirt_target (ie, target);
  2591. }
  2592. else
  2593. {
  2594. target = t;
  2595. speculative = ie->indirect_info->vptr_changed;
  2596. }
  2597. }
  2598. }
  2599. }
  2600. ipa_polymorphic_call_context ie_context (ie);
  2601. vec <cgraph_node *>targets;
  2602. bool final;
  2603. ctx.offset_by (ie->indirect_info->offset);
  2604. if (ie->indirect_info->vptr_changed)
  2605. ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
  2606. ie->indirect_info->otr_type);
  2607. ctx.combine_with (ie_context, ie->indirect_info->otr_type);
  2608. targets = possible_polymorphic_call_targets
  2609. (ie->indirect_info->otr_type,
  2610. ie->indirect_info->otr_token,
  2611. ctx, &final);
  2612. if (final && targets.length () <= 1)
  2613. {
  2614. speculative = false;
  2615. if (targets.length () == 1)
  2616. target = targets[0]->decl;
  2617. else
  2618. target = ipa_impossible_devirt_target (ie, NULL_TREE);
  2619. }
  2620. else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
  2621. && !ie->speculative && ie->maybe_hot_p ())
  2622. {
  2623. cgraph_node *n;
  2624. n = try_speculative_devirtualization (ie->indirect_info->otr_type,
  2625. ie->indirect_info->otr_token,
  2626. ie->indirect_info->context);
  2627. if (n)
  2628. {
  2629. target = n->decl;
  2630. speculative = true;
  2631. }
  2632. }
  2633. if (target)
  2634. {
  2635. if (!possible_polymorphic_call_target_p
  2636. (ie, cgraph_node::get_create (target)))
  2637. {
  2638. if (speculative)
  2639. return NULL;
  2640. target = ipa_impossible_devirt_target (ie, target);
  2641. }
  2642. return ipa_make_edge_direct_to_target (ie, target, speculative);
  2643. }
  2644. else
  2645. return NULL;
  2646. }
  2647. /* Update the param called notes associated with NODE when CS is being inlined,
  2648. assuming NODE is (potentially indirectly) inlined into CS->callee.
  2649. Moreover, if the callee is discovered to be constant, create a new cgraph
  2650. edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
  2651. unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
  2652. static bool
  2653. update_indirect_edges_after_inlining (struct cgraph_edge *cs,
  2654. struct cgraph_node *node,
  2655. vec<cgraph_edge *> *new_edges)
  2656. {
  2657. struct ipa_edge_args *top;
  2658. struct cgraph_edge *ie, *next_ie, *new_direct_edge;
  2659. struct ipa_node_params *new_root_info;
  2660. bool res = false;
  2661. ipa_check_create_edge_args ();
  2662. top = IPA_EDGE_REF (cs);
  2663. new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
  2664. ? cs->caller->global.inlined_to
  2665. : cs->caller);
  2666. for (ie = node->indirect_calls; ie; ie = next_ie)
  2667. {
  2668. struct cgraph_indirect_call_info *ici = ie->indirect_info;
  2669. struct ipa_jump_func *jfunc;
  2670. int param_index;
  2671. cgraph_node *spec_target = NULL;
  2672. next_ie = ie->next_callee;
  2673. if (ici->param_index == -1)
  2674. continue;
  2675. /* We must check range due to calls with variable number of arguments: */
  2676. if (ici->param_index >= ipa_get_cs_argument_count (top))
  2677. {
  2678. ici->param_index = -1;
  2679. continue;
  2680. }
  2681. param_index = ici->param_index;
  2682. jfunc = ipa_get_ith_jump_func (top, param_index);
  2683. if (ie->speculative)
  2684. {
  2685. struct cgraph_edge *de;
  2686. struct ipa_ref *ref;
  2687. ie->speculative_call_info (de, ie, ref);
  2688. spec_target = de->callee;
  2689. }
  2690. if (!opt_for_fn (node->decl, flag_indirect_inlining))
  2691. new_direct_edge = NULL;
  2692. else if (ici->polymorphic)
  2693. {
  2694. ipa_polymorphic_call_context ctx;
  2695. ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
  2696. new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
  2697. }
  2698. else
  2699. new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
  2700. new_root_info);
  2701. /* If speculation was removed, then we need to do nothing. */
  2702. if (new_direct_edge && new_direct_edge != ie
  2703. && new_direct_edge->callee == spec_target)
  2704. {
  2705. new_direct_edge->indirect_inlining_edge = 1;
  2706. top = IPA_EDGE_REF (cs);
  2707. res = true;
  2708. if (!new_direct_edge->speculative)
  2709. continue;
  2710. }
  2711. else if (new_direct_edge)
  2712. {
  2713. new_direct_edge->indirect_inlining_edge = 1;
  2714. if (new_direct_edge->call_stmt)
  2715. new_direct_edge->call_stmt_cannot_inline_p
  2716. = !gimple_check_call_matching_types (
  2717. new_direct_edge->call_stmt,
  2718. new_direct_edge->callee->decl, false);
  2719. if (new_edges)
  2720. {
  2721. new_edges->safe_push (new_direct_edge);
  2722. res = true;
  2723. }
  2724. top = IPA_EDGE_REF (cs);
  2725. /* If speculative edge was introduced we still need to update
  2726. call info of the indirect edge. */
  2727. if (!new_direct_edge->speculative)
  2728. continue;
  2729. }
  2730. if (jfunc->type == IPA_JF_PASS_THROUGH
  2731. && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
  2732. {
  2733. if (ici->agg_contents
  2734. && !ipa_get_jf_pass_through_agg_preserved (jfunc)
  2735. && !ici->polymorphic)
  2736. ici->param_index = -1;
  2737. else
  2738. {
  2739. ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
  2740. if (ici->polymorphic
  2741. && !ipa_get_jf_pass_through_type_preserved (jfunc))
  2742. ici->vptr_changed = true;
  2743. }
  2744. }
  2745. else if (jfunc->type == IPA_JF_ANCESTOR)
  2746. {
  2747. if (ici->agg_contents
  2748. && !ipa_get_jf_ancestor_agg_preserved (jfunc)
  2749. && !ici->polymorphic)
  2750. ici->param_index = -1;
  2751. else
  2752. {
  2753. ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
  2754. ici->offset += ipa_get_jf_ancestor_offset (jfunc);
  2755. if (ici->polymorphic
  2756. && !ipa_get_jf_ancestor_type_preserved (jfunc))
  2757. ici->vptr_changed = true;
  2758. }
  2759. }
  2760. else
  2761. /* Either we can find a destination for this edge now or never. */
  2762. ici->param_index = -1;
  2763. }
  2764. return res;
  2765. }
  2766. /* Recursively traverse subtree of NODE (including node) made of inlined
  2767. cgraph_edges when CS has been inlined and invoke
  2768. update_indirect_edges_after_inlining on all nodes and
  2769. update_jump_functions_after_inlining on all non-inlined edges that lead out
  2770. of this subtree. Newly discovered indirect edges will be added to
  2771. *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
  2772. created. */
  2773. static bool
  2774. propagate_info_to_inlined_callees (struct cgraph_edge *cs,
  2775. struct cgraph_node *node,
  2776. vec<cgraph_edge *> *new_edges)
  2777. {
  2778. struct cgraph_edge *e;
  2779. bool res;
  2780. res = update_indirect_edges_after_inlining (cs, node, new_edges);
  2781. for (e = node->callees; e; e = e->next_callee)
  2782. if (!e->inline_failed)
  2783. res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
  2784. else
  2785. update_jump_functions_after_inlining (cs, e);
  2786. for (e = node->indirect_calls; e; e = e->next_callee)
  2787. update_jump_functions_after_inlining (cs, e);
  2788. return res;
  2789. }
  2790. /* Combine two controlled uses counts as done during inlining. */
  2791. static int
  2792. combine_controlled_uses_counters (int c, int d)
  2793. {
  2794. if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
  2795. return IPA_UNDESCRIBED_USE;
  2796. else
  2797. return c + d - 1;
  2798. }
  2799. /* Propagate number of controlled users from CS->caleee to the new root of the
  2800. tree of inlined nodes. */
  2801. static void
  2802. propagate_controlled_uses (struct cgraph_edge *cs)
  2803. {
  2804. struct ipa_edge_args *args = IPA_EDGE_REF (cs);
  2805. struct cgraph_node *new_root = cs->caller->global.inlined_to
  2806. ? cs->caller->global.inlined_to : cs->caller;
  2807. struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
  2808. struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
  2809. int count, i;
  2810. count = MIN (ipa_get_cs_argument_count (args),
  2811. ipa_get_param_count (old_root_info));
  2812. for (i = 0; i < count; i++)
  2813. {
  2814. struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
  2815. struct ipa_cst_ref_desc *rdesc;
  2816. if (jf->type == IPA_JF_PASS_THROUGH)
  2817. {
  2818. int src_idx, c, d;
  2819. src_idx = ipa_get_jf_pass_through_formal_id (jf);
  2820. c = ipa_get_controlled_uses (new_root_info, src_idx);
  2821. d = ipa_get_controlled_uses (old_root_info, i);
  2822. gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
  2823. == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
  2824. c = combine_controlled_uses_counters (c, d);
  2825. ipa_set_controlled_uses (new_root_info, src_idx, c);
  2826. if (c == 0 && new_root_info->ipcp_orig_node)
  2827. {
  2828. struct cgraph_node *n;
  2829. struct ipa_ref *ref;
  2830. tree t = new_root_info->known_csts[src_idx];
  2831. if (t && TREE_CODE (t) == ADDR_EXPR
  2832. && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
  2833. && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
  2834. && (ref = new_root->find_reference (n, NULL, 0)))
  2835. {
  2836. if (dump_file)
  2837. fprintf (dump_file, "ipa-prop: Removing cloning-created "
  2838. "reference from %s/%i to %s/%i.\n",
  2839. xstrdup_for_dump (new_root->name ()),
  2840. new_root->order,
  2841. xstrdup_for_dump (n->name ()), n->order);
  2842. ref->remove_reference ();
  2843. }
  2844. }
  2845. }
  2846. else if (jf->type == IPA_JF_CONST
  2847. && (rdesc = jfunc_rdesc_usable (jf)))
  2848. {
  2849. int d = ipa_get_controlled_uses (old_root_info, i);
  2850. int c = rdesc->refcount;
  2851. rdesc->refcount = combine_controlled_uses_counters (c, d);
  2852. if (rdesc->refcount == 0)
  2853. {
  2854. tree cst = ipa_get_jf_constant (jf);
  2855. struct cgraph_node *n;
  2856. gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
  2857. && TREE_CODE (TREE_OPERAND (cst, 0))
  2858. == FUNCTION_DECL);
  2859. n = cgraph_node::get (TREE_OPERAND (cst, 0));
  2860. if (n)
  2861. {
  2862. struct cgraph_node *clone;
  2863. bool ok;
  2864. ok = remove_described_reference (n, rdesc);
  2865. gcc_checking_assert (ok);
  2866. clone = cs->caller;
  2867. while (clone->global.inlined_to
  2868. && clone != rdesc->cs->caller
  2869. && IPA_NODE_REF (clone)->ipcp_orig_node)
  2870. {
  2871. struct ipa_ref *ref;
  2872. ref = clone->find_reference (n, NULL, 0);
  2873. if (ref)
  2874. {
  2875. if (dump_file)
  2876. fprintf (dump_file, "ipa-prop: Removing "
  2877. "cloning-created reference "
  2878. "from %s/%i to %s/%i.\n",
  2879. xstrdup_for_dump (clone->name ()),
  2880. clone->order,
  2881. xstrdup_for_dump (n->name ()),
  2882. n->order);
  2883. ref->remove_reference ();
  2884. }
  2885. clone = clone->callers->caller;
  2886. }
  2887. }
  2888. }
  2889. }
  2890. }
  2891. for (i = ipa_get_param_count (old_root_info);
  2892. i < ipa_get_cs_argument_count (args);
  2893. i++)
  2894. {
  2895. struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
  2896. if (jf->type == IPA_JF_CONST)
  2897. {
  2898. struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
  2899. if (rdesc)
  2900. rdesc->refcount = IPA_UNDESCRIBED_USE;
  2901. }
  2902. else if (jf->type == IPA_JF_PASS_THROUGH)
  2903. ipa_set_controlled_uses (new_root_info,
  2904. jf->value.pass_through.formal_id,
  2905. IPA_UNDESCRIBED_USE);
  2906. }
  2907. }
  2908. /* Update jump functions and call note functions on inlining the call site CS.
  2909. CS is expected to lead to a node already cloned by
  2910. cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
  2911. *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
  2912. created. */
  2913. bool
  2914. ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
  2915. vec<cgraph_edge *> *new_edges)
  2916. {
  2917. bool changed;
  2918. /* Do nothing if the preparation phase has not been carried out yet
  2919. (i.e. during early inlining). */
  2920. if (!ipa_node_params_sum)
  2921. return false;
  2922. gcc_assert (ipa_edge_args_vector);
  2923. propagate_controlled_uses (cs);
  2924. changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
  2925. return changed;
  2926. }
  2927. /* Frees all dynamically allocated structures that the argument info points
  2928. to. */
  2929. void
  2930. ipa_free_edge_args_substructures (struct ipa_edge_args *args)
  2931. {
  2932. vec_free (args->jump_functions);
  2933. memset (args, 0, sizeof (*args));
  2934. }
  2935. /* Free all ipa_edge structures. */
  2936. void
  2937. ipa_free_all_edge_args (void)
  2938. {
  2939. int i;
  2940. struct ipa_edge_args *args;
  2941. if (!ipa_edge_args_vector)
  2942. return;
  2943. FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
  2944. ipa_free_edge_args_substructures (args);
  2945. vec_free (ipa_edge_args_vector);
  2946. }
  2947. /* Frees all dynamically allocated structures that the param info points
  2948. to. */
  2949. ipa_node_params::~ipa_node_params ()
  2950. {
  2951. descriptors.release ();
  2952. free (lattices);
  2953. /* Lattice values and their sources are deallocated with their alocation
  2954. pool. */
  2955. known_contexts.release ();
  2956. lattices = NULL;
  2957. ipcp_orig_node = NULL;
  2958. analysis_done = 0;
  2959. node_enqueued = 0;
  2960. do_clone_for_all_contexts = 0;
  2961. is_all_contexts_clone = 0;
  2962. node_dead = 0;
  2963. }
  2964. /* Free all ipa_node_params structures. */
  2965. void
  2966. ipa_free_all_node_params (void)
  2967. {
  2968. delete ipa_node_params_sum;
  2969. ipa_node_params_sum = NULL;
  2970. }
  2971. /* Grow ipcp_transformations if necessary. */
  2972. void
  2973. ipcp_grow_transformations_if_necessary (void)
  2974. {
  2975. if (vec_safe_length (ipcp_transformations)
  2976. <= (unsigned) symtab->cgraph_max_uid)
  2977. vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
  2978. }
  2979. /* Set the aggregate replacements of NODE to be AGGVALS. */
  2980. void
  2981. ipa_set_node_agg_value_chain (struct cgraph_node *node,
  2982. struct ipa_agg_replacement_value *aggvals)
  2983. {
  2984. ipcp_grow_transformations_if_necessary ();
  2985. (*ipcp_transformations)[node->uid].agg_values = aggvals;
  2986. }
  2987. /* Hook that is called by cgraph.c when an edge is removed. */
  2988. static void
  2989. ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
  2990. {
  2991. struct ipa_edge_args *args;
  2992. /* During IPA-CP updating we can be called on not-yet analyzed clones. */
  2993. if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
  2994. return;
  2995. args = IPA_EDGE_REF (cs);
  2996. if (args->jump_functions)
  2997. {
  2998. struct ipa_jump_func *jf;
  2999. int i;
  3000. FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
  3001. {
  3002. struct ipa_cst_ref_desc *rdesc;
  3003. try_decrement_rdesc_refcount (jf);
  3004. if (jf->type == IPA_JF_CONST
  3005. && (rdesc = ipa_get_jf_constant_rdesc (jf))
  3006. && rdesc->cs == cs)
  3007. rdesc->cs = NULL;
  3008. }
  3009. }
  3010. ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
  3011. }
  3012. /* Hook that is called by cgraph.c when an edge is duplicated. */
  3013. static void
  3014. ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
  3015. void *)
  3016. {
  3017. struct ipa_edge_args *old_args, *new_args;
  3018. unsigned int i;
  3019. ipa_check_create_edge_args ();
  3020. old_args = IPA_EDGE_REF (src);
  3021. new_args = IPA_EDGE_REF (dst);
  3022. new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
  3023. if (old_args->polymorphic_call_contexts)
  3024. new_args->polymorphic_call_contexts
  3025. = vec_safe_copy (old_args->polymorphic_call_contexts);
  3026. for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
  3027. {
  3028. struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
  3029. struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
  3030. dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
  3031. if (src_jf->type == IPA_JF_CONST)
  3032. {
  3033. struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
  3034. if (!src_rdesc)
  3035. dst_jf->value.constant.rdesc = NULL;
  3036. else if (src->caller == dst->caller)
  3037. {
  3038. struct ipa_ref *ref;
  3039. symtab_node *n = cgraph_node_for_jfunc (src_jf);
  3040. gcc_checking_assert (n);
  3041. ref = src->caller->find_reference (n, src->call_stmt,
  3042. src->lto_stmt_uid);
  3043. gcc_checking_assert (ref);
  3044. dst->caller->clone_reference (ref, ref->stmt);
  3045. gcc_checking_assert (ipa_refdesc_pool);
  3046. struct ipa_cst_ref_desc *dst_rdesc
  3047. = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
  3048. dst_rdesc->cs = dst;
  3049. dst_rdesc->refcount = src_rdesc->refcount;
  3050. dst_rdesc->next_duplicate = NULL;
  3051. dst_jf->value.constant.rdesc = dst_rdesc;
  3052. }
  3053. else if (src_rdesc->cs == src)
  3054. {
  3055. struct ipa_cst_ref_desc *dst_rdesc;
  3056. gcc_checking_assert (ipa_refdesc_pool);
  3057. dst_rdesc
  3058. = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
  3059. dst_rdesc->cs = dst;
  3060. dst_rdesc->refcount = src_rdesc->refcount;
  3061. dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
  3062. src_rdesc->next_duplicate = dst_rdesc;
  3063. dst_jf->value.constant.rdesc = dst_rdesc;
  3064. }
  3065. else
  3066. {
  3067. struct ipa_cst_ref_desc *dst_rdesc;
  3068. /* This can happen during inlining, when a JFUNC can refer to a
  3069. reference taken in a function up in the tree of inline clones.
  3070. We need to find the duplicate that refers to our tree of
  3071. inline clones. */
  3072. gcc_assert (dst->caller->global.inlined_to);
  3073. for (dst_rdesc = src_rdesc->next_duplicate;
  3074. dst_rdesc;
  3075. dst_rdesc = dst_rdesc->next_duplicate)
  3076. {
  3077. struct cgraph_node *top;
  3078. top = dst_rdesc->cs->caller->global.inlined_to
  3079. ? dst_rdesc->cs->caller->global.inlined_to
  3080. : dst_rdesc->cs->caller;
  3081. if (dst->caller->global.inlined_to == top)
  3082. break;
  3083. }
  3084. gcc_assert (dst_rdesc);
  3085. dst_jf->value.constant.rdesc = dst_rdesc;
  3086. }
  3087. }
  3088. else if (dst_jf->type == IPA_JF_PASS_THROUGH
  3089. && src->caller == dst->caller)
  3090. {
  3091. struct cgraph_node *inline_root = dst->caller->global.inlined_to
  3092. ? dst->caller->global.inlined_to : dst->caller;
  3093. struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
  3094. int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
  3095. int c = ipa_get_controlled_uses (root_info, idx);
  3096. if (c != IPA_UNDESCRIBED_USE)
  3097. {
  3098. c++;
  3099. ipa_set_controlled_uses (root_info, idx, c);
  3100. }
  3101. }
  3102. }
  3103. }
  3104. /* Analyze newly added function into callgraph. */
  3105. static void
  3106. ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
  3107. {
  3108. if (node->has_gimple_body_p ())
  3109. ipa_analyze_node (node);
  3110. }
  3111. /* Hook that is called by summary when a node is duplicated. */
  3112. void
  3113. ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
  3114. ipa_node_params *old_info,
  3115. ipa_node_params *new_info)
  3116. {
  3117. ipa_agg_replacement_value *old_av, *new_av;
  3118. new_info->descriptors = old_info->descriptors.copy ();
  3119. new_info->lattices = NULL;
  3120. new_info->ipcp_orig_node = old_info->ipcp_orig_node;
  3121. new_info->analysis_done = old_info->analysis_done;
  3122. new_info->node_enqueued = old_info->node_enqueued;
  3123. old_av = ipa_get_agg_replacements_for_node (src);
  3124. if (old_av)
  3125. {
  3126. new_av = NULL;
  3127. while (old_av)
  3128. {
  3129. struct ipa_agg_replacement_value *v;
  3130. v = ggc_alloc<ipa_agg_replacement_value> ();
  3131. memcpy (v, old_av, sizeof (*v));
  3132. v->next = new_av;
  3133. new_av = v;
  3134. old_av = old_av->next;
  3135. }
  3136. ipa_set_node_agg_value_chain (dst, new_av);
  3137. }
  3138. ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
  3139. if (src_trans && vec_safe_length (src_trans->alignments) > 0)
  3140. {
  3141. ipcp_grow_transformations_if_necessary ();
  3142. src_trans = ipcp_get_transformation_summary (src);
  3143. const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
  3144. vec<ipa_alignment, va_gc> *&dst_alignments
  3145. = ipcp_get_transformation_summary (dst)->alignments;
  3146. vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
  3147. for (unsigned i = 0; i < src_alignments->length (); ++i)
  3148. dst_alignments->quick_push ((*src_alignments)[i]);
  3149. }
  3150. }
  3151. /* Register our cgraph hooks if they are not already there. */
  3152. void
  3153. ipa_register_cgraph_hooks (void)
  3154. {
  3155. ipa_check_create_node_params ();
  3156. if (!edge_removal_hook_holder)
  3157. edge_removal_hook_holder =
  3158. symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
  3159. if (!edge_duplication_hook_holder)
  3160. edge_duplication_hook_holder =
  3161. symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
  3162. function_insertion_hook_holder =
  3163. symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
  3164. }
  3165. /* Unregister our cgraph hooks if they are not already there. */
  3166. static void
  3167. ipa_unregister_cgraph_hooks (void)
  3168. {
  3169. symtab->remove_edge_removal_hook (edge_removal_hook_holder);
  3170. edge_removal_hook_holder = NULL;
  3171. symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
  3172. edge_duplication_hook_holder = NULL;
  3173. symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
  3174. function_insertion_hook_holder = NULL;
  3175. }
  3176. /* Free all ipa_node_params and all ipa_edge_args structures if they are no
  3177. longer needed after ipa-cp. */
  3178. void
  3179. ipa_free_all_structures_after_ipa_cp (void)
  3180. {
  3181. if (!optimize && !in_lto_p)
  3182. {
  3183. ipa_free_all_edge_args ();
  3184. ipa_free_all_node_params ();
  3185. free_alloc_pool (ipcp_sources_pool);
  3186. free_alloc_pool (ipcp_cst_values_pool);
  3187. free_alloc_pool (ipcp_poly_ctx_values_pool);
  3188. free_alloc_pool (ipcp_agg_lattice_pool);
  3189. ipa_unregister_cgraph_hooks ();
  3190. if (ipa_refdesc_pool)
  3191. free_alloc_pool (ipa_refdesc_pool);
  3192. }
  3193. }
  3194. /* Free all ipa_node_params and all ipa_edge_args structures if they are no
  3195. longer needed after indirect inlining. */
  3196. void
  3197. ipa_free_all_structures_after_iinln (void)
  3198. {
  3199. ipa_free_all_edge_args ();
  3200. ipa_free_all_node_params ();
  3201. ipa_unregister_cgraph_hooks ();
  3202. if (ipcp_sources_pool)
  3203. free_alloc_pool (ipcp_sources_pool);
  3204. if (ipcp_cst_values_pool)
  3205. free_alloc_pool (ipcp_cst_values_pool);
  3206. if (ipcp_poly_ctx_values_pool)
  3207. free_alloc_pool (ipcp_poly_ctx_values_pool);
  3208. if (ipcp_agg_lattice_pool)
  3209. free_alloc_pool (ipcp_agg_lattice_pool);
  3210. if (ipa_refdesc_pool)
  3211. free_alloc_pool (ipa_refdesc_pool);
  3212. }
  3213. /* Print ipa_tree_map data structures of all functions in the
  3214. callgraph to F. */
  3215. void
  3216. ipa_print_node_params (FILE *f, struct cgraph_node *node)
  3217. {
  3218. int i, count;
  3219. struct ipa_node_params *info;
  3220. if (!node->definition)
  3221. return;
  3222. info = IPA_NODE_REF (node);
  3223. fprintf (f, " function %s/%i parameter descriptors:\n",
  3224. node->name (), node->order);
  3225. count = ipa_get_param_count (info);
  3226. for (i = 0; i < count; i++)
  3227. {
  3228. int c;
  3229. fprintf (f, " ");
  3230. ipa_dump_param (f, info, i);
  3231. if (ipa_is_param_used (info, i))
  3232. fprintf (f, " used");
  3233. c = ipa_get_controlled_uses (info, i);
  3234. if (c == IPA_UNDESCRIBED_USE)
  3235. fprintf (f, " undescribed_use");
  3236. else
  3237. fprintf (f, " controlled_uses=%i", c);
  3238. fprintf (f, "\n");
  3239. }
  3240. }
  3241. /* Print ipa_tree_map data structures of all functions in the
  3242. callgraph to F. */
  3243. void
  3244. ipa_print_all_params (FILE * f)
  3245. {
  3246. struct cgraph_node *node;
  3247. fprintf (f, "\nFunction parameters:\n");
  3248. FOR_EACH_FUNCTION (node)
  3249. ipa_print_node_params (f, node);
  3250. }
  3251. /* Return a heap allocated vector containing formal parameters of FNDECL. */
  3252. vec<tree>
  3253. ipa_get_vector_of_formal_parms (tree fndecl)
  3254. {
  3255. vec<tree> args;
  3256. int count;
  3257. tree parm;
  3258. gcc_assert (!flag_wpa);
  3259. count = count_formal_params (fndecl);
  3260. args.create (count);
  3261. for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
  3262. args.quick_push (parm);
  3263. return args;
  3264. }
  3265. /* Return a heap allocated vector containing types of formal parameters of
  3266. function type FNTYPE. */
  3267. vec<tree>
  3268. ipa_get_vector_of_formal_parm_types (tree fntype)
  3269. {
  3270. vec<tree> types;
  3271. int count = 0;
  3272. tree t;
  3273. for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
  3274. count++;
  3275. types.create (count);
  3276. for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
  3277. types.quick_push (TREE_VALUE (t));
  3278. return types;
  3279. }
  3280. /* Modify the function declaration FNDECL and its type according to the plan in
  3281. ADJUSTMENTS. It also sets base fields of individual adjustments structures
  3282. to reflect the actual parameters being modified which are determined by the
  3283. base_index field. */
  3284. void
  3285. ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
  3286. {
  3287. vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
  3288. tree orig_type = TREE_TYPE (fndecl);
  3289. tree old_arg_types = TYPE_ARG_TYPES (orig_type);
  3290. /* The following test is an ugly hack, some functions simply don't have any
  3291. arguments in their type. This is probably a bug but well... */
  3292. bool care_for_types = (old_arg_types != NULL_TREE);
  3293. bool last_parm_void;
  3294. vec<tree> otypes;
  3295. if (care_for_types)
  3296. {
  3297. last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
  3298. == void_type_node);
  3299. otypes = ipa_get_vector_of_formal_parm_types (orig_type);
  3300. if (last_parm_void)
  3301. gcc_assert (oparms.length () + 1 == otypes.length ());
  3302. else
  3303. gcc_assert (oparms.length () == otypes.length ());
  3304. }
  3305. else
  3306. {
  3307. last_parm_void = false;
  3308. otypes.create (0);
  3309. }
  3310. int len = adjustments.length ();
  3311. tree *link = &DECL_ARGUMENTS (fndecl);
  3312. tree new_arg_types = NULL;
  3313. for (int i = 0; i < len; i++)
  3314. {
  3315. struct ipa_parm_adjustment *adj;
  3316. gcc_assert (link);
  3317. adj = &adjustments[i];
  3318. tree parm;
  3319. if (adj->op == IPA_PARM_OP_NEW)
  3320. parm = NULL;
  3321. else
  3322. parm = oparms[adj->base_index];
  3323. adj->base = parm;
  3324. if (adj->op == IPA_PARM_OP_COPY)
  3325. {
  3326. if (care_for_types)
  3327. new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
  3328. new_arg_types);
  3329. *link = parm;
  3330. link = &DECL_CHAIN (parm);
  3331. }
  3332. else if (adj->op != IPA_PARM_OP_REMOVE)
  3333. {
  3334. tree new_parm;
  3335. tree ptype;
  3336. if (adj->by_ref)
  3337. ptype = build_pointer_type (adj->type);
  3338. else
  3339. {
  3340. ptype = adj->type;
  3341. if (is_gimple_reg_type (ptype))
  3342. {
  3343. unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
  3344. if (TYPE_ALIGN (ptype) < malign)
  3345. ptype = build_aligned_type (ptype, malign);
  3346. }
  3347. }
  3348. if (care_for_types)
  3349. new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
  3350. new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
  3351. ptype);
  3352. const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
  3353. DECL_NAME (new_parm) = create_tmp_var_name (prefix);
  3354. DECL_ARTIFICIAL (new_parm) = 1;
  3355. DECL_ARG_TYPE (new_parm) = ptype;
  3356. DECL_CONTEXT (new_parm) = fndecl;
  3357. TREE_USED (new_parm) = 1;
  3358. DECL_IGNORED_P (new_parm) = 1;
  3359. layout_decl (new_parm, 0);
  3360. if (adj->op == IPA_PARM_OP_NEW)
  3361. adj->base = NULL;
  3362. else
  3363. adj->base = parm;
  3364. adj->new_decl = new_parm;
  3365. *link = new_parm;
  3366. link = &DECL_CHAIN (new_parm);
  3367. }
  3368. }
  3369. *link = NULL_TREE;
  3370. tree new_reversed = NULL;
  3371. if (care_for_types)
  3372. {
  3373. new_reversed = nreverse (new_arg_types);
  3374. if (last_parm_void)
  3375. {
  3376. if (new_reversed)
  3377. TREE_CHAIN (new_arg_types) = void_list_node;
  3378. else
  3379. new_reversed = void_list_node;
  3380. }
  3381. }
  3382. /* Use copy_node to preserve as much as possible from original type
  3383. (debug info, attribute lists etc.)
  3384. Exception is METHOD_TYPEs must have THIS argument.
  3385. When we are asked to remove it, we need to build new FUNCTION_TYPE
  3386. instead. */
  3387. tree new_type = NULL;
  3388. if (TREE_CODE (orig_type) != METHOD_TYPE
  3389. || (adjustments[0].op == IPA_PARM_OP_COPY
  3390. && adjustments[0].base_index == 0))
  3391. {
  3392. new_type = build_distinct_type_copy (orig_type);
  3393. TYPE_ARG_TYPES (new_type) = new_reversed;
  3394. }
  3395. else
  3396. {
  3397. new_type
  3398. = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
  3399. new_reversed));
  3400. TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
  3401. DECL_VINDEX (fndecl) = NULL_TREE;
  3402. }
  3403. /* When signature changes, we need to clear builtin info. */
  3404. if (DECL_BUILT_IN (fndecl))
  3405. {
  3406. DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
  3407. DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
  3408. }
  3409. TREE_TYPE (fndecl) = new_type;
  3410. DECL_VIRTUAL_P (fndecl) = 0;
  3411. DECL_LANG_SPECIFIC (fndecl) = NULL;
  3412. otypes.release ();
  3413. oparms.release ();
  3414. }
  3415. /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
  3416. If this is a directly recursive call, CS must be NULL. Otherwise it must
  3417. contain the corresponding call graph edge. */
  3418. void
  3419. ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
  3420. ipa_parm_adjustment_vec adjustments)
  3421. {
  3422. struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
  3423. vec<tree> vargs;
  3424. vec<tree, va_gc> **debug_args = NULL;
  3425. gcall *new_stmt;
  3426. gimple_stmt_iterator gsi, prev_gsi;
  3427. tree callee_decl;
  3428. int i, len;
  3429. len = adjustments.length ();
  3430. vargs.create (len);
  3431. callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
  3432. current_node->remove_stmt_references (stmt);
  3433. gsi = gsi_for_stmt (stmt);
  3434. prev_gsi = gsi;
  3435. gsi_prev (&prev_gsi);
  3436. for (i = 0; i < len; i++)
  3437. {
  3438. struct ipa_parm_adjustment *adj;
  3439. adj = &adjustments[i];
  3440. if (adj->op == IPA_PARM_OP_COPY)
  3441. {
  3442. tree arg = gimple_call_arg (stmt, adj->base_index);
  3443. vargs.quick_push (arg);
  3444. }
  3445. else if (adj->op != IPA_PARM_OP_REMOVE)
  3446. {
  3447. tree expr, base, off;
  3448. location_t loc;
  3449. unsigned int deref_align = 0;
  3450. bool deref_base = false;
  3451. /* We create a new parameter out of the value of the old one, we can
  3452. do the following kind of transformations:
  3453. - A scalar passed by reference is converted to a scalar passed by
  3454. value. (adj->by_ref is false and the type of the original
  3455. actual argument is a pointer to a scalar).
  3456. - A part of an aggregate is passed instead of the whole aggregate.
  3457. The part can be passed either by value or by reference, this is
  3458. determined by value of adj->by_ref. Moreover, the code below
  3459. handles both situations when the original aggregate is passed by
  3460. value (its type is not a pointer) and when it is passed by
  3461. reference (it is a pointer to an aggregate).
  3462. When the new argument is passed by reference (adj->by_ref is true)
  3463. it must be a part of an aggregate and therefore we form it by
  3464. simply taking the address of a reference inside the original
  3465. aggregate. */
  3466. gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
  3467. base = gimple_call_arg (stmt, adj->base_index);
  3468. loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
  3469. : EXPR_LOCATION (base);
  3470. if (TREE_CODE (base) != ADDR_EXPR
  3471. && POINTER_TYPE_P (TREE_TYPE (base)))
  3472. off = build_int_cst (adj->alias_ptr_type,
  3473. adj->offset / BITS_PER_UNIT);
  3474. else
  3475. {
  3476. HOST_WIDE_INT base_offset;
  3477. tree prev_base;
  3478. bool addrof;
  3479. if (TREE_CODE (base) == ADDR_EXPR)
  3480. {
  3481. base = TREE_OPERAND (base, 0);
  3482. addrof = true;
  3483. }
  3484. else
  3485. addrof = false;
  3486. prev_base = base;
  3487. base = get_addr_base_and_unit_offset (base, &base_offset);
  3488. /* Aggregate arguments can have non-invariant addresses. */
  3489. if (!base)
  3490. {
  3491. base = build_fold_addr_expr (prev_base);
  3492. off = build_int_cst (adj->alias_ptr_type,
  3493. adj->offset / BITS_PER_UNIT);
  3494. }
  3495. else if (TREE_CODE (base) == MEM_REF)
  3496. {
  3497. if (!addrof)
  3498. {
  3499. deref_base = true;
  3500. deref_align = TYPE_ALIGN (TREE_TYPE (base));
  3501. }
  3502. off = build_int_cst (adj->alias_ptr_type,
  3503. base_offset
  3504. + adj->offset / BITS_PER_UNIT);
  3505. off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
  3506. off);
  3507. base = TREE_OPERAND (base, 0);
  3508. }
  3509. else
  3510. {
  3511. off = build_int_cst (adj->alias_ptr_type,
  3512. base_offset
  3513. + adj->offset / BITS_PER_UNIT);
  3514. base = build_fold_addr_expr (base);
  3515. }
  3516. }
  3517. if (!adj->by_ref)
  3518. {
  3519. tree type = adj->type;
  3520. unsigned int align;
  3521. unsigned HOST_WIDE_INT misalign;
  3522. if (deref_base)
  3523. {
  3524. align = deref_align;
  3525. misalign = 0;
  3526. }
  3527. else
  3528. {
  3529. get_pointer_alignment_1 (base, &align, &misalign);
  3530. if (TYPE_ALIGN (type) > align)
  3531. align = TYPE_ALIGN (type);
  3532. }
  3533. misalign += (offset_int::from (off, SIGNED).to_short_addr ()
  3534. * BITS_PER_UNIT);
  3535. misalign = misalign & (align - 1);
  3536. if (misalign != 0)
  3537. align = (misalign & -misalign);
  3538. if (align < TYPE_ALIGN (type))
  3539. type = build_aligned_type (type, align);
  3540. base = force_gimple_operand_gsi (&gsi, base,
  3541. true, NULL, true, GSI_SAME_STMT);
  3542. expr = fold_build2_loc (loc, MEM_REF, type, base, off);
  3543. /* If expr is not a valid gimple call argument emit
  3544. a load into a temporary. */
  3545. if (is_gimple_reg_type (TREE_TYPE (expr)))
  3546. {
  3547. gimple tem = gimple_build_assign (NULL_TREE, expr);
  3548. if (gimple_in_ssa_p (cfun))
  3549. {
  3550. gimple_set_vuse (tem, gimple_vuse (stmt));
  3551. expr = make_ssa_name (TREE_TYPE (expr), tem);
  3552. }
  3553. else
  3554. expr = create_tmp_reg (TREE_TYPE (expr));
  3555. gimple_assign_set_lhs (tem, expr);
  3556. gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
  3557. }
  3558. }
  3559. else
  3560. {
  3561. expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
  3562. expr = build_fold_addr_expr (expr);
  3563. expr = force_gimple_operand_gsi (&gsi, expr,
  3564. true, NULL, true, GSI_SAME_STMT);
  3565. }
  3566. vargs.quick_push (expr);
  3567. }
  3568. if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
  3569. {
  3570. unsigned int ix;
  3571. tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
  3572. gimple def_temp;
  3573. arg = gimple_call_arg (stmt, adj->base_index);
  3574. if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
  3575. {
  3576. if (!fold_convertible_p (TREE_TYPE (origin), arg))
  3577. continue;
  3578. arg = fold_convert_loc (gimple_location (stmt),
  3579. TREE_TYPE (origin), arg);
  3580. }
  3581. if (debug_args == NULL)
  3582. debug_args = decl_debug_args_insert (callee_decl);
  3583. for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
  3584. if (ddecl == origin)
  3585. {
  3586. ddecl = (**debug_args)[ix + 1];
  3587. break;
  3588. }
  3589. if (ddecl == NULL)
  3590. {
  3591. ddecl = make_node (DEBUG_EXPR_DECL);
  3592. DECL_ARTIFICIAL (ddecl) = 1;
  3593. TREE_TYPE (ddecl) = TREE_TYPE (origin);
  3594. DECL_MODE (ddecl) = DECL_MODE (origin);
  3595. vec_safe_push (*debug_args, origin);
  3596. vec_safe_push (*debug_args, ddecl);
  3597. }
  3598. def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
  3599. gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
  3600. }
  3601. }
  3602. if (dump_file && (dump_flags & TDF_DETAILS))
  3603. {
  3604. fprintf (dump_file, "replacing stmt:");
  3605. print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
  3606. }
  3607. new_stmt = gimple_build_call_vec (callee_decl, vargs);
  3608. vargs.release ();
  3609. if (gimple_call_lhs (stmt))
  3610. gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
  3611. gimple_set_block (new_stmt, gimple_block (stmt));
  3612. if (gimple_has_location (stmt))
  3613. gimple_set_location (new_stmt, gimple_location (stmt));
  3614. gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
  3615. gimple_call_copy_flags (new_stmt, stmt);
  3616. if (gimple_in_ssa_p (cfun))
  3617. {
  3618. gimple_set_vuse (new_stmt, gimple_vuse (stmt));
  3619. if (gimple_vdef (stmt))
  3620. {
  3621. gimple_set_vdef (new_stmt, gimple_vdef (stmt));
  3622. SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
  3623. }
  3624. }
  3625. if (dump_file && (dump_flags & TDF_DETAILS))
  3626. {
  3627. fprintf (dump_file, "with stmt:");
  3628. print_gimple_stmt (dump_file, new_stmt, 0, 0);
  3629. fprintf (dump_file, "\n");
  3630. }
  3631. gsi_replace (&gsi, new_stmt, true);
  3632. if (cs)
  3633. cs->set_call_stmt (new_stmt);
  3634. do
  3635. {
  3636. current_node->record_stmt_references (gsi_stmt (gsi));
  3637. gsi_prev (&gsi);
  3638. }
  3639. while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
  3640. }
  3641. /* If the expression *EXPR should be replaced by a reduction of a parameter, do
  3642. so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
  3643. specifies whether the function should care about type incompatibility the
  3644. current and new expressions. If it is false, the function will leave
  3645. incompatibility issues to the caller. Return true iff the expression
  3646. was modified. */
  3647. bool
  3648. ipa_modify_expr (tree *expr, bool convert,
  3649. ipa_parm_adjustment_vec adjustments)
  3650. {
  3651. struct ipa_parm_adjustment *cand
  3652. = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
  3653. if (!cand)
  3654. return false;
  3655. tree src;
  3656. if (cand->by_ref)
  3657. src = build_simple_mem_ref (cand->new_decl);
  3658. else
  3659. src = cand->new_decl;
  3660. if (dump_file && (dump_flags & TDF_DETAILS))
  3661. {
  3662. fprintf (dump_file, "About to replace expr ");
  3663. print_generic_expr (dump_file, *expr, 0);
  3664. fprintf (dump_file, " with ");
  3665. print_generic_expr (dump_file, src, 0);
  3666. fprintf (dump_file, "\n");
  3667. }
  3668. if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
  3669. {
  3670. tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
  3671. *expr = vce;
  3672. }
  3673. else
  3674. *expr = src;
  3675. return true;
  3676. }
  3677. /* If T is an SSA_NAME, return NULL if it is not a default def or
  3678. return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
  3679. the base variable is always returned, regardless if it is a default
  3680. def. Return T if it is not an SSA_NAME. */
  3681. static tree
  3682. get_ssa_base_param (tree t, bool ignore_default_def)
  3683. {
  3684. if (TREE_CODE (t) == SSA_NAME)
  3685. {
  3686. if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
  3687. return SSA_NAME_VAR (t);
  3688. else
  3689. return NULL_TREE;
  3690. }
  3691. return t;
  3692. }
  3693. /* Given an expression, return an adjustment entry specifying the
  3694. transformation to be done on EXPR. If no suitable adjustment entry
  3695. was found, returns NULL.
  3696. If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
  3697. default def, otherwise bail on them.
  3698. If CONVERT is non-NULL, this function will set *CONVERT if the
  3699. expression provided is a component reference. ADJUSTMENTS is the
  3700. adjustments vector. */
  3701. ipa_parm_adjustment *
  3702. ipa_get_adjustment_candidate (tree **expr, bool *convert,
  3703. ipa_parm_adjustment_vec adjustments,
  3704. bool ignore_default_def)
  3705. {
  3706. if (TREE_CODE (**expr) == BIT_FIELD_REF
  3707. || TREE_CODE (**expr) == IMAGPART_EXPR
  3708. || TREE_CODE (**expr) == REALPART_EXPR)
  3709. {
  3710. *expr = &TREE_OPERAND (**expr, 0);
  3711. if (convert)
  3712. *convert = true;
  3713. }
  3714. HOST_WIDE_INT offset, size, max_size;
  3715. tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
  3716. if (!base || size == -1 || max_size == -1)
  3717. return NULL;
  3718. if (TREE_CODE (base) == MEM_REF)
  3719. {
  3720. offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
  3721. base = TREE_OPERAND (base, 0);
  3722. }
  3723. base = get_ssa_base_param (base, ignore_default_def);
  3724. if (!base || TREE_CODE (base) != PARM_DECL)
  3725. return NULL;
  3726. struct ipa_parm_adjustment *cand = NULL;
  3727. unsigned int len = adjustments.length ();
  3728. for (unsigned i = 0; i < len; i++)
  3729. {
  3730. struct ipa_parm_adjustment *adj = &adjustments[i];
  3731. if (adj->base == base
  3732. && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
  3733. {
  3734. cand = adj;
  3735. break;
  3736. }
  3737. }
  3738. if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
  3739. return NULL;
  3740. return cand;
  3741. }
  3742. /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
  3743. static bool
  3744. index_in_adjustments_multiple_times_p (int base_index,
  3745. ipa_parm_adjustment_vec adjustments)
  3746. {
  3747. int i, len = adjustments.length ();
  3748. bool one = false;
  3749. for (i = 0; i < len; i++)
  3750. {
  3751. struct ipa_parm_adjustment *adj;
  3752. adj = &adjustments[i];
  3753. if (adj->base_index == base_index)
  3754. {
  3755. if (one)
  3756. return true;
  3757. else
  3758. one = true;
  3759. }
  3760. }
  3761. return false;
  3762. }
  3763. /* Return adjustments that should have the same effect on function parameters
  3764. and call arguments as if they were first changed according to adjustments in
  3765. INNER and then by adjustments in OUTER. */
  3766. ipa_parm_adjustment_vec
  3767. ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
  3768. ipa_parm_adjustment_vec outer)
  3769. {
  3770. int i, outlen = outer.length ();
  3771. int inlen = inner.length ();
  3772. int removals = 0;
  3773. ipa_parm_adjustment_vec adjustments, tmp;
  3774. tmp.create (inlen);
  3775. for (i = 0; i < inlen; i++)
  3776. {
  3777. struct ipa_parm_adjustment *n;
  3778. n = &inner[i];
  3779. if (n->op == IPA_PARM_OP_REMOVE)
  3780. removals++;
  3781. else
  3782. {
  3783. /* FIXME: Handling of new arguments are not implemented yet. */
  3784. gcc_assert (n->op != IPA_PARM_OP_NEW);
  3785. tmp.quick_push (*n);
  3786. }
  3787. }
  3788. adjustments.create (outlen + removals);
  3789. for (i = 0; i < outlen; i++)
  3790. {
  3791. struct ipa_parm_adjustment r;
  3792. struct ipa_parm_adjustment *out = &outer[i];
  3793. struct ipa_parm_adjustment *in = &tmp[out->base_index];
  3794. memset (&r, 0, sizeof (r));
  3795. gcc_assert (in->op != IPA_PARM_OP_REMOVE);
  3796. if (out->op == IPA_PARM_OP_REMOVE)
  3797. {
  3798. if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
  3799. {
  3800. r.op = IPA_PARM_OP_REMOVE;
  3801. adjustments.quick_push (r);
  3802. }
  3803. continue;
  3804. }
  3805. else
  3806. {
  3807. /* FIXME: Handling of new arguments are not implemented yet. */
  3808. gcc_assert (out->op != IPA_PARM_OP_NEW);
  3809. }
  3810. r.base_index = in->base_index;
  3811. r.type = out->type;
  3812. /* FIXME: Create nonlocal value too. */
  3813. if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
  3814. r.op = IPA_PARM_OP_COPY;
  3815. else if (in->op == IPA_PARM_OP_COPY)
  3816. r.offset = out->offset;
  3817. else if (out->op == IPA_PARM_OP_COPY)
  3818. r.offset = in->offset;
  3819. else
  3820. r.offset = in->offset + out->offset;
  3821. adjustments.quick_push (r);
  3822. }
  3823. for (i = 0; i < inlen; i++)
  3824. {
  3825. struct ipa_parm_adjustment *n = &inner[i];
  3826. if (n->op == IPA_PARM_OP_REMOVE)
  3827. adjustments.quick_push (*n);
  3828. }
  3829. tmp.release ();
  3830. return adjustments;
  3831. }
  3832. /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
  3833. friendly way, assuming they are meant to be applied to FNDECL. */
  3834. void
  3835. ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
  3836. tree fndecl)
  3837. {
  3838. int i, len = adjustments.length ();
  3839. bool first = true;
  3840. vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
  3841. fprintf (file, "IPA param adjustments: ");
  3842. for (i = 0; i < len; i++)
  3843. {
  3844. struct ipa_parm_adjustment *adj;
  3845. adj = &adjustments[i];
  3846. if (!first)
  3847. fprintf (file, " ");
  3848. else
  3849. first = false;
  3850. fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
  3851. print_generic_expr (file, parms[adj->base_index], 0);
  3852. if (adj->base)
  3853. {
  3854. fprintf (file, ", base: ");
  3855. print_generic_expr (file, adj->base, 0);
  3856. }
  3857. if (adj->new_decl)
  3858. {
  3859. fprintf (file, ", new_decl: ");
  3860. print_generic_expr (file, adj->new_decl, 0);
  3861. }
  3862. if (adj->new_ssa_base)
  3863. {
  3864. fprintf (file, ", new_ssa_base: ");
  3865. print_generic_expr (file, adj->new_ssa_base, 0);
  3866. }
  3867. if (adj->op == IPA_PARM_OP_COPY)
  3868. fprintf (file, ", copy_param");
  3869. else if (adj->op == IPA_PARM_OP_REMOVE)
  3870. fprintf (file, ", remove_param");
  3871. else
  3872. fprintf (file, ", offset %li", (long) adj->offset);
  3873. if (adj->by_ref)
  3874. fprintf (file, ", by_ref");
  3875. print_node_brief (file, ", type: ", adj->type, 0);
  3876. fprintf (file, "\n");
  3877. }
  3878. parms.release ();
  3879. }
  3880. /* Dump the AV linked list. */
  3881. void
  3882. ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
  3883. {
  3884. bool comma = false;
  3885. fprintf (f, " Aggregate replacements:");
  3886. for (; av; av = av->next)
  3887. {
  3888. fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
  3889. av->index, av->offset);
  3890. print_generic_expr (f, av->value, 0);
  3891. comma = true;
  3892. }
  3893. fprintf (f, "\n");
  3894. }
  3895. /* Stream out jump function JUMP_FUNC to OB. */
  3896. static void
  3897. ipa_write_jump_function (struct output_block *ob,
  3898. struct ipa_jump_func *jump_func)
  3899. {
  3900. struct ipa_agg_jf_item *item;
  3901. struct bitpack_d bp;
  3902. int i, count;
  3903. streamer_write_uhwi (ob, jump_func->type);
  3904. switch (jump_func->type)
  3905. {
  3906. case IPA_JF_UNKNOWN:
  3907. break;
  3908. case IPA_JF_CONST:
  3909. gcc_assert (
  3910. EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
  3911. stream_write_tree (ob, jump_func->value.constant.value, true);
  3912. break;
  3913. case IPA_JF_PASS_THROUGH:
  3914. streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
  3915. if (jump_func->value.pass_through.operation == NOP_EXPR)
  3916. {
  3917. streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
  3918. bp = bitpack_create (ob->main_stream);
  3919. bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
  3920. streamer_write_bitpack (&bp);
  3921. }
  3922. else
  3923. {
  3924. stream_write_tree (ob, jump_func->value.pass_through.operand, true);
  3925. streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
  3926. }
  3927. break;
  3928. case IPA_JF_ANCESTOR:
  3929. streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
  3930. streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
  3931. bp = bitpack_create (ob->main_stream);
  3932. bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
  3933. streamer_write_bitpack (&bp);
  3934. break;
  3935. }
  3936. count = vec_safe_length (jump_func->agg.items);
  3937. streamer_write_uhwi (ob, count);
  3938. if (count)
  3939. {
  3940. bp = bitpack_create (ob->main_stream);
  3941. bp_pack_value (&bp, jump_func->agg.by_ref, 1);
  3942. streamer_write_bitpack (&bp);
  3943. }
  3944. FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
  3945. {
  3946. streamer_write_uhwi (ob, item->offset);
  3947. stream_write_tree (ob, item->value, true);
  3948. }
  3949. bp = bitpack_create (ob->main_stream);
  3950. bp_pack_value (&bp, jump_func->alignment.known, 1);
  3951. streamer_write_bitpack (&bp);
  3952. if (jump_func->alignment.known)
  3953. {
  3954. streamer_write_uhwi (ob, jump_func->alignment.align);
  3955. streamer_write_uhwi (ob, jump_func->alignment.misalign);
  3956. }
  3957. }
  3958. /* Read in jump function JUMP_FUNC from IB. */
  3959. static void
  3960. ipa_read_jump_function (struct lto_input_block *ib,
  3961. struct ipa_jump_func *jump_func,
  3962. struct cgraph_edge *cs,
  3963. struct data_in *data_in)
  3964. {
  3965. enum jump_func_type jftype;
  3966. enum tree_code operation;
  3967. int i, count;
  3968. jftype = (enum jump_func_type) streamer_read_uhwi (ib);
  3969. switch (jftype)
  3970. {
  3971. case IPA_JF_UNKNOWN:
  3972. ipa_set_jf_unknown (jump_func);
  3973. break;
  3974. case IPA_JF_CONST:
  3975. ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
  3976. break;
  3977. case IPA_JF_PASS_THROUGH:
  3978. operation = (enum tree_code) streamer_read_uhwi (ib);
  3979. if (operation == NOP_EXPR)
  3980. {
  3981. int formal_id = streamer_read_uhwi (ib);
  3982. struct bitpack_d bp = streamer_read_bitpack (ib);
  3983. bool agg_preserved = bp_unpack_value (&bp, 1);
  3984. ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
  3985. }
  3986. else
  3987. {
  3988. tree operand = stream_read_tree (ib, data_in);
  3989. int formal_id = streamer_read_uhwi (ib);
  3990. ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
  3991. operation);
  3992. }
  3993. break;
  3994. case IPA_JF_ANCESTOR:
  3995. {
  3996. HOST_WIDE_INT offset = streamer_read_uhwi (ib);
  3997. int formal_id = streamer_read_uhwi (ib);
  3998. struct bitpack_d bp = streamer_read_bitpack (ib);
  3999. bool agg_preserved = bp_unpack_value (&bp, 1);
  4000. ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
  4001. break;
  4002. }
  4003. }
  4004. count = streamer_read_uhwi (ib);
  4005. vec_alloc (jump_func->agg.items, count);
  4006. if (count)
  4007. {
  4008. struct bitpack_d bp = streamer_read_bitpack (ib);
  4009. jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
  4010. }
  4011. for (i = 0; i < count; i++)
  4012. {
  4013. struct ipa_agg_jf_item item;
  4014. item.offset = streamer_read_uhwi (ib);
  4015. item.value = stream_read_tree (ib, data_in);
  4016. jump_func->agg.items->quick_push (item);
  4017. }
  4018. struct bitpack_d bp = streamer_read_bitpack (ib);
  4019. bool alignment_known = bp_unpack_value (&bp, 1);
  4020. if (alignment_known)
  4021. {
  4022. jump_func->alignment.known = true;
  4023. jump_func->alignment.align = streamer_read_uhwi (ib);
  4024. jump_func->alignment.misalign = streamer_read_uhwi (ib);
  4025. }
  4026. else
  4027. jump_func->alignment.known = false;
  4028. }
  4029. /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
  4030. relevant to indirect inlining to OB. */
  4031. static void
  4032. ipa_write_indirect_edge_info (struct output_block *ob,
  4033. struct cgraph_edge *cs)
  4034. {
  4035. struct cgraph_indirect_call_info *ii = cs->indirect_info;
  4036. struct bitpack_d bp;
  4037. streamer_write_hwi (ob, ii->param_index);
  4038. bp = bitpack_create (ob->main_stream);
  4039. bp_pack_value (&bp, ii->polymorphic, 1);
  4040. bp_pack_value (&bp, ii->agg_contents, 1);
  4041. bp_pack_value (&bp, ii->member_ptr, 1);
  4042. bp_pack_value (&bp, ii->by_ref, 1);
  4043. bp_pack_value (&bp, ii->vptr_changed, 1);
  4044. streamer_write_bitpack (&bp);
  4045. if (ii->agg_contents || ii->polymorphic)
  4046. streamer_write_hwi (ob, ii->offset);
  4047. else
  4048. gcc_assert (ii->offset == 0);
  4049. if (ii->polymorphic)
  4050. {
  4051. streamer_write_hwi (ob, ii->otr_token);
  4052. stream_write_tree (ob, ii->otr_type, true);
  4053. ii->context.stream_out (ob);
  4054. }
  4055. }
  4056. /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
  4057. relevant to indirect inlining from IB. */
  4058. static void
  4059. ipa_read_indirect_edge_info (struct lto_input_block *ib,
  4060. struct data_in *data_in,
  4061. struct cgraph_edge *cs)
  4062. {
  4063. struct cgraph_indirect_call_info *ii = cs->indirect_info;
  4064. struct bitpack_d bp;
  4065. ii->param_index = (int) streamer_read_hwi (ib);
  4066. bp = streamer_read_bitpack (ib);
  4067. ii->polymorphic = bp_unpack_value (&bp, 1);
  4068. ii->agg_contents = bp_unpack_value (&bp, 1);
  4069. ii->member_ptr = bp_unpack_value (&bp, 1);
  4070. ii->by_ref = bp_unpack_value (&bp, 1);
  4071. ii->vptr_changed = bp_unpack_value (&bp, 1);
  4072. if (ii->agg_contents || ii->polymorphic)
  4073. ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
  4074. else
  4075. ii->offset = 0;
  4076. if (ii->polymorphic)
  4077. {
  4078. ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
  4079. ii->otr_type = stream_read_tree (ib, data_in);
  4080. ii->context.stream_in (ib, data_in);
  4081. }
  4082. }
  4083. /* Stream out NODE info to OB. */
  4084. static void
  4085. ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
  4086. {
  4087. int node_ref;
  4088. lto_symtab_encoder_t encoder;
  4089. struct ipa_node_params *info = IPA_NODE_REF (node);
  4090. int j;
  4091. struct cgraph_edge *e;
  4092. struct bitpack_d bp;
  4093. encoder = ob->decl_state->symtab_node_encoder;
  4094. node_ref = lto_symtab_encoder_encode (encoder, node);
  4095. streamer_write_uhwi (ob, node_ref);
  4096. streamer_write_uhwi (ob, ipa_get_param_count (info));
  4097. for (j = 0; j < ipa_get_param_count (info); j++)
  4098. streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
  4099. bp = bitpack_create (ob->main_stream);
  4100. gcc_assert (info->analysis_done
  4101. || ipa_get_param_count (info) == 0);
  4102. gcc_assert (!info->node_enqueued);
  4103. gcc_assert (!info->ipcp_orig_node);
  4104. for (j = 0; j < ipa_get_param_count (info); j++)
  4105. bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
  4106. streamer_write_bitpack (&bp);
  4107. for (j = 0; j < ipa_get_param_count (info); j++)
  4108. streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
  4109. for (e = node->callees; e; e = e->next_callee)
  4110. {
  4111. struct ipa_edge_args *args = IPA_EDGE_REF (e);
  4112. streamer_write_uhwi (ob,
  4113. ipa_get_cs_argument_count (args) * 2
  4114. + (args->polymorphic_call_contexts != NULL));
  4115. for (j = 0; j < ipa_get_cs_argument_count (args); j++)
  4116. {
  4117. ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
  4118. if (args->polymorphic_call_contexts != NULL)
  4119. ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
  4120. }
  4121. }
  4122. for (e = node->indirect_calls; e; e = e->next_callee)
  4123. {
  4124. struct ipa_edge_args *args = IPA_EDGE_REF (e);
  4125. streamer_write_uhwi (ob,
  4126. ipa_get_cs_argument_count (args) * 2
  4127. + (args->polymorphic_call_contexts != NULL));
  4128. for (j = 0; j < ipa_get_cs_argument_count (args); j++)
  4129. {
  4130. ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
  4131. if (args->polymorphic_call_contexts != NULL)
  4132. ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
  4133. }
  4134. ipa_write_indirect_edge_info (ob, e);
  4135. }
  4136. }
  4137. /* Stream in NODE info from IB. */
  4138. static void
  4139. ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
  4140. struct data_in *data_in)
  4141. {
  4142. struct ipa_node_params *info = IPA_NODE_REF (node);
  4143. int k;
  4144. struct cgraph_edge *e;
  4145. struct bitpack_d bp;
  4146. ipa_alloc_node_params (node, streamer_read_uhwi (ib));
  4147. for (k = 0; k < ipa_get_param_count (info); k++)
  4148. info->descriptors[k].move_cost = streamer_read_uhwi (ib);
  4149. bp = streamer_read_bitpack (ib);
  4150. if (ipa_get_param_count (info) != 0)
  4151. info->analysis_done = true;
  4152. info->node_enqueued = false;
  4153. for (k = 0; k < ipa_get_param_count (info); k++)
  4154. ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
  4155. for (k = 0; k < ipa_get_param_count (info); k++)
  4156. ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
  4157. for (e = node->callees; e; e = e->next_callee)
  4158. {
  4159. struct ipa_edge_args *args = IPA_EDGE_REF (e);
  4160. int count = streamer_read_uhwi (ib);
  4161. bool contexts_computed = count & 1;
  4162. count /= 2;
  4163. if (!count)
  4164. continue;
  4165. vec_safe_grow_cleared (args->jump_functions, count);
  4166. if (contexts_computed)
  4167. vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
  4168. for (k = 0; k < ipa_get_cs_argument_count (args); k++)
  4169. {
  4170. ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
  4171. data_in);
  4172. if (contexts_computed)
  4173. ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
  4174. }
  4175. }
  4176. for (e = node->indirect_calls; e; e = e->next_callee)
  4177. {
  4178. struct ipa_edge_args *args = IPA_EDGE_REF (e);
  4179. int count = streamer_read_uhwi (ib);
  4180. bool contexts_computed = count & 1;
  4181. count /= 2;
  4182. if (count)
  4183. {
  4184. vec_safe_grow_cleared (args->jump_functions, count);
  4185. if (contexts_computed)
  4186. vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
  4187. for (k = 0; k < ipa_get_cs_argument_count (args); k++)
  4188. {
  4189. ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
  4190. data_in);
  4191. if (contexts_computed)
  4192. ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
  4193. }
  4194. }
  4195. ipa_read_indirect_edge_info (ib, data_in, e);
  4196. }
  4197. }
  4198. /* Write jump functions for nodes in SET. */
  4199. void
  4200. ipa_prop_write_jump_functions (void)
  4201. {
  4202. struct cgraph_node *node;
  4203. struct output_block *ob;
  4204. unsigned int count = 0;
  4205. lto_symtab_encoder_iterator lsei;
  4206. lto_symtab_encoder_t encoder;
  4207. if (!ipa_node_params_sum)
  4208. return;
  4209. ob = create_output_block (LTO_section_jump_functions);
  4210. encoder = ob->decl_state->symtab_node_encoder;
  4211. ob->symbol = NULL;
  4212. for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
  4213. lsei_next_function_in_partition (&lsei))
  4214. {
  4215. node = lsei_cgraph_node (lsei);
  4216. if (node->has_gimple_body_p ()
  4217. && IPA_NODE_REF (node) != NULL)
  4218. count++;
  4219. }
  4220. streamer_write_uhwi (ob, count);
  4221. /* Process all of the functions. */
  4222. for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
  4223. lsei_next_function_in_partition (&lsei))
  4224. {
  4225. node = lsei_cgraph_node (lsei);
  4226. if (node->has_gimple_body_p ()
  4227. && IPA_NODE_REF (node) != NULL)
  4228. ipa_write_node_info (ob, node);
  4229. }
  4230. streamer_write_char_stream (ob->main_stream, 0);
  4231. produce_asm (ob, NULL);
  4232. destroy_output_block (ob);
  4233. }
  4234. /* Read section in file FILE_DATA of length LEN with data DATA. */
  4235. static void
  4236. ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
  4237. size_t len)
  4238. {
  4239. const struct lto_function_header *header =
  4240. (const struct lto_function_header *) data;
  4241. const int cfg_offset = sizeof (struct lto_function_header);
  4242. const int main_offset = cfg_offset + header->cfg_size;
  4243. const int string_offset = main_offset + header->main_size;
  4244. struct data_in *data_in;
  4245. unsigned int i;
  4246. unsigned int count;
  4247. lto_input_block ib_main ((const char *) data + main_offset,
  4248. header->main_size, file_data->mode_table);
  4249. data_in =
  4250. lto_data_in_create (file_data, (const char *) data + string_offset,
  4251. header->string_size, vNULL);
  4252. count = streamer_read_uhwi (&ib_main);
  4253. for (i = 0; i < count; i++)
  4254. {
  4255. unsigned int index;
  4256. struct cgraph_node *node;
  4257. lto_symtab_encoder_t encoder;
  4258. index = streamer_read_uhwi (&ib_main);
  4259. encoder = file_data->symtab_node_encoder;
  4260. node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
  4261. index));
  4262. gcc_assert (node->definition);
  4263. ipa_read_node_info (&ib_main, node, data_in);
  4264. }
  4265. lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
  4266. len);
  4267. lto_data_in_delete (data_in);
  4268. }
  4269. /* Read ipcp jump functions. */
  4270. void
  4271. ipa_prop_read_jump_functions (void)
  4272. {
  4273. struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
  4274. struct lto_file_decl_data *file_data;
  4275. unsigned int j = 0;
  4276. ipa_check_create_node_params ();
  4277. ipa_check_create_edge_args ();
  4278. ipa_register_cgraph_hooks ();
  4279. while ((file_data = file_data_vec[j++]))
  4280. {
  4281. size_t len;
  4282. const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
  4283. if (data)
  4284. ipa_prop_read_section (file_data, data, len);
  4285. }
  4286. }
  4287. /* After merging units, we can get mismatch in argument counts.
  4288. Also decl merging might've rendered parameter lists obsolete.
  4289. Also compute called_with_variable_arg info. */
  4290. void
  4291. ipa_update_after_lto_read (void)
  4292. {
  4293. ipa_check_create_node_params ();
  4294. ipa_check_create_edge_args ();
  4295. }
  4296. void
  4297. write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
  4298. {
  4299. int node_ref;
  4300. unsigned int count = 0;
  4301. lto_symtab_encoder_t encoder;
  4302. struct ipa_agg_replacement_value *aggvals, *av;
  4303. aggvals = ipa_get_agg_replacements_for_node (node);
  4304. encoder = ob->decl_state->symtab_node_encoder;
  4305. node_ref = lto_symtab_encoder_encode (encoder, node);
  4306. streamer_write_uhwi (ob, node_ref);
  4307. for (av = aggvals; av; av = av->next)
  4308. count++;
  4309. streamer_write_uhwi (ob, count);
  4310. for (av = aggvals; av; av = av->next)
  4311. {
  4312. struct bitpack_d bp;
  4313. streamer_write_uhwi (ob, av->offset);
  4314. streamer_write_uhwi (ob, av->index);
  4315. stream_write_tree (ob, av->value, true);
  4316. bp = bitpack_create (ob->main_stream);
  4317. bp_pack_value (&bp, av->by_ref, 1);
  4318. streamer_write_bitpack (&bp);
  4319. }
  4320. ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
  4321. if (ts && vec_safe_length (ts->alignments) > 0)
  4322. {
  4323. count = ts->alignments->length ();
  4324. streamer_write_uhwi (ob, count);
  4325. for (unsigned i = 0; i < count; ++i)
  4326. {
  4327. ipa_alignment *parm_al = &(*ts->alignments)[i];
  4328. struct bitpack_d bp;
  4329. bp = bitpack_create (ob->main_stream);
  4330. bp_pack_value (&bp, parm_al->known, 1);
  4331. streamer_write_bitpack (&bp);
  4332. if (parm_al->known)
  4333. {
  4334. streamer_write_uhwi (ob, parm_al->align);
  4335. streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
  4336. parm_al->misalign);
  4337. }
  4338. }
  4339. }
  4340. else
  4341. streamer_write_uhwi (ob, 0);
  4342. }
  4343. /* Stream in the aggregate value replacement chain for NODE from IB. */
  4344. static void
  4345. read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
  4346. data_in *data_in)
  4347. {
  4348. struct ipa_agg_replacement_value *aggvals = NULL;
  4349. unsigned int count, i;
  4350. count = streamer_read_uhwi (ib);
  4351. for (i = 0; i <count; i++)
  4352. {
  4353. struct ipa_agg_replacement_value *av;
  4354. struct bitpack_d bp;
  4355. av = ggc_alloc<ipa_agg_replacement_value> ();
  4356. av->offset = streamer_read_uhwi (ib);
  4357. av->index = streamer_read_uhwi (ib);
  4358. av->value = stream_read_tree (ib, data_in);
  4359. bp = streamer_read_bitpack (ib);
  4360. av->by_ref = bp_unpack_value (&bp, 1);
  4361. av->next = aggvals;
  4362. aggvals = av;
  4363. }
  4364. ipa_set_node_agg_value_chain (node, aggvals);
  4365. count = streamer_read_uhwi (ib);
  4366. if (count > 0)
  4367. {
  4368. ipcp_grow_transformations_if_necessary ();
  4369. ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
  4370. vec_safe_grow_cleared (ts->alignments, count);
  4371. for (i = 0; i < count; i++)
  4372. {
  4373. ipa_alignment *parm_al;
  4374. parm_al = &(*ts->alignments)[i];
  4375. struct bitpack_d bp;
  4376. bp = streamer_read_bitpack (ib);
  4377. parm_al->known = bp_unpack_value (&bp, 1);
  4378. if (parm_al->known)
  4379. {
  4380. parm_al->align = streamer_read_uhwi (ib);
  4381. parm_al->misalign
  4382. = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
  4383. 0, parm_al->align);
  4384. }
  4385. }
  4386. }
  4387. }
  4388. /* Write all aggregate replacement for nodes in set. */
  4389. void
  4390. ipcp_write_transformation_summaries (void)
  4391. {
  4392. struct cgraph_node *node;
  4393. struct output_block *ob;
  4394. unsigned int count = 0;
  4395. lto_symtab_encoder_iterator lsei;
  4396. lto_symtab_encoder_t encoder;
  4397. ob = create_output_block (LTO_section_ipcp_transform);
  4398. encoder = ob->decl_state->symtab_node_encoder;
  4399. ob->symbol = NULL;
  4400. for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
  4401. lsei_next_function_in_partition (&lsei))
  4402. {
  4403. node = lsei_cgraph_node (lsei);
  4404. if (node->has_gimple_body_p ())
  4405. count++;
  4406. }
  4407. streamer_write_uhwi (ob, count);
  4408. for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
  4409. lsei_next_function_in_partition (&lsei))
  4410. {
  4411. node = lsei_cgraph_node (lsei);
  4412. if (node->has_gimple_body_p ())
  4413. write_ipcp_transformation_info (ob, node);
  4414. }
  4415. streamer_write_char_stream (ob->main_stream, 0);
  4416. produce_asm (ob, NULL);
  4417. destroy_output_block (ob);
  4418. }
  4419. /* Read replacements section in file FILE_DATA of length LEN with data
  4420. DATA. */
  4421. static void
  4422. read_replacements_section (struct lto_file_decl_data *file_data,
  4423. const char *data,
  4424. size_t len)
  4425. {
  4426. const struct lto_function_header *header =
  4427. (const struct lto_function_header *) data;
  4428. const int cfg_offset = sizeof (struct lto_function_header);
  4429. const int main_offset = cfg_offset + header->cfg_size;
  4430. const int string_offset = main_offset + header->main_size;
  4431. struct data_in *data_in;
  4432. unsigned int i;
  4433. unsigned int count;
  4434. lto_input_block ib_main ((const char *) data + main_offset,
  4435. header->main_size, file_data->mode_table);
  4436. data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
  4437. header->string_size, vNULL);
  4438. count = streamer_read_uhwi (&ib_main);
  4439. for (i = 0; i < count; i++)
  4440. {
  4441. unsigned int index;
  4442. struct cgraph_node *node;
  4443. lto_symtab_encoder_t encoder;
  4444. index = streamer_read_uhwi (&ib_main);
  4445. encoder = file_data->symtab_node_encoder;
  4446. node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
  4447. index));
  4448. gcc_assert (node->definition);
  4449. read_ipcp_transformation_info (&ib_main, node, data_in);
  4450. }
  4451. lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
  4452. len);
  4453. lto_data_in_delete (data_in);
  4454. }
  4455. /* Read IPA-CP aggregate replacements. */
  4456. void
  4457. ipcp_read_transformation_summaries (void)
  4458. {
  4459. struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
  4460. struct lto_file_decl_data *file_data;
  4461. unsigned int j = 0;
  4462. while ((file_data = file_data_vec[j++]))
  4463. {
  4464. size_t len;
  4465. const char *data = lto_get_section_data (file_data,
  4466. LTO_section_ipcp_transform,
  4467. NULL, &len);
  4468. if (data)
  4469. read_replacements_section (file_data, data, len);
  4470. }
  4471. }
  4472. /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
  4473. NODE. */
  4474. static void
  4475. adjust_agg_replacement_values (struct cgraph_node *node,
  4476. struct ipa_agg_replacement_value *aggval)
  4477. {
  4478. struct ipa_agg_replacement_value *v;
  4479. int i, c = 0, d = 0, *adj;
  4480. if (!node->clone.combined_args_to_skip)
  4481. return;
  4482. for (v = aggval; v; v = v->next)
  4483. {
  4484. gcc_assert (v->index >= 0);
  4485. if (c < v->index)
  4486. c = v->index;
  4487. }
  4488. c++;
  4489. adj = XALLOCAVEC (int, c);
  4490. for (i = 0; i < c; i++)
  4491. if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
  4492. {
  4493. adj[i] = -1;
  4494. d++;
  4495. }
  4496. else
  4497. adj[i] = i - d;
  4498. for (v = aggval; v; v = v->next)
  4499. v->index = adj[v->index];
  4500. }
  4501. /* Dominator walker driving the ipcp modification phase. */
  4502. class ipcp_modif_dom_walker : public dom_walker
  4503. {
  4504. public:
  4505. ipcp_modif_dom_walker (struct func_body_info *fbi,
  4506. vec<ipa_param_descriptor> descs,
  4507. struct ipa_agg_replacement_value *av,
  4508. bool *sc, bool *cc)
  4509. : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
  4510. m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
  4511. virtual void before_dom_children (basic_block);
  4512. private:
  4513. struct func_body_info *m_fbi;
  4514. vec<ipa_param_descriptor> m_descriptors;
  4515. struct ipa_agg_replacement_value *m_aggval;
  4516. bool *m_something_changed, *m_cfg_changed;
  4517. };
  4518. void
  4519. ipcp_modif_dom_walker::before_dom_children (basic_block bb)
  4520. {
  4521. gimple_stmt_iterator gsi;
  4522. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  4523. {
  4524. struct ipa_agg_replacement_value *v;
  4525. gimple stmt = gsi_stmt (gsi);
  4526. tree rhs, val, t;
  4527. HOST_WIDE_INT offset, size;
  4528. int index;
  4529. bool by_ref, vce;
  4530. if (!gimple_assign_load_p (stmt))
  4531. continue;
  4532. rhs = gimple_assign_rhs1 (stmt);
  4533. if (!is_gimple_reg_type (TREE_TYPE (rhs)))
  4534. continue;
  4535. vce = false;
  4536. t = rhs;
  4537. while (handled_component_p (t))
  4538. {
  4539. /* V_C_E can do things like convert an array of integers to one
  4540. bigger integer and similar things we do not handle below. */
  4541. if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
  4542. {
  4543. vce = true;
  4544. break;
  4545. }
  4546. t = TREE_OPERAND (t, 0);
  4547. }
  4548. if (vce)
  4549. continue;
  4550. if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
  4551. &offset, &size, &by_ref))
  4552. continue;
  4553. for (v = m_aggval; v; v = v->next)
  4554. if (v->index == index
  4555. && v->offset == offset)
  4556. break;
  4557. if (!v
  4558. || v->by_ref != by_ref
  4559. || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
  4560. continue;
  4561. gcc_checking_assert (is_gimple_ip_invariant (v->value));
  4562. if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
  4563. {
  4564. if (fold_convertible_p (TREE_TYPE (rhs), v->value))
  4565. val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
  4566. else if (TYPE_SIZE (TREE_TYPE (rhs))
  4567. == TYPE_SIZE (TREE_TYPE (v->value)))
  4568. val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
  4569. else
  4570. {
  4571. if (dump_file)
  4572. {
  4573. fprintf (dump_file, " const ");
  4574. print_generic_expr (dump_file, v->value, 0);
  4575. fprintf (dump_file, " can't be converted to type of ");
  4576. print_generic_expr (dump_file, rhs, 0);
  4577. fprintf (dump_file, "\n");
  4578. }
  4579. continue;
  4580. }
  4581. }
  4582. else
  4583. val = v->value;
  4584. if (dump_file && (dump_flags & TDF_DETAILS))
  4585. {
  4586. fprintf (dump_file, "Modifying stmt:\n ");
  4587. print_gimple_stmt (dump_file, stmt, 0, 0);
  4588. }
  4589. gimple_assign_set_rhs_from_tree (&gsi, val);
  4590. update_stmt (stmt);
  4591. if (dump_file && (dump_flags & TDF_DETAILS))
  4592. {
  4593. fprintf (dump_file, "into:\n ");
  4594. print_gimple_stmt (dump_file, stmt, 0, 0);
  4595. fprintf (dump_file, "\n");
  4596. }
  4597. *m_something_changed = true;
  4598. if (maybe_clean_eh_stmt (stmt)
  4599. && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
  4600. *m_cfg_changed = true;
  4601. }
  4602. }
  4603. /* Update alignment of formal parameters as described in
  4604. ipcp_transformation_summary. */
  4605. static void
  4606. ipcp_update_alignments (struct cgraph_node *node)
  4607. {
  4608. tree fndecl = node->decl;
  4609. tree parm = DECL_ARGUMENTS (fndecl);
  4610. tree next_parm = parm;
  4611. ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
  4612. if (!ts || vec_safe_length (ts->alignments) == 0)
  4613. return;
  4614. const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
  4615. unsigned count = alignments.length ();
  4616. for (unsigned i = 0; i < count; ++i, parm = next_parm)
  4617. {
  4618. if (node->clone.combined_args_to_skip
  4619. && bitmap_bit_p (node->clone.combined_args_to_skip, i))
  4620. continue;
  4621. gcc_checking_assert (parm);
  4622. next_parm = DECL_CHAIN (parm);
  4623. if (!alignments[i].known || !is_gimple_reg (parm))
  4624. continue;
  4625. tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
  4626. if (!ddef)
  4627. continue;
  4628. if (dump_file)
  4629. fprintf (dump_file, " Adjusting alignment of param %u to %u, "
  4630. "misalignment to %u\n", i, alignments[i].align,
  4631. alignments[i].misalign);
  4632. struct ptr_info_def *pi = get_ptr_info (ddef);
  4633. gcc_checking_assert (pi);
  4634. unsigned old_align;
  4635. unsigned old_misalign;
  4636. bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
  4637. if (old_known
  4638. && old_align >= alignments[i].align)
  4639. {
  4640. if (dump_file)
  4641. fprintf (dump_file, " But the alignment was already %u.\n",
  4642. old_align);
  4643. continue;
  4644. }
  4645. set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
  4646. }
  4647. }
  4648. /* IPCP transformation phase doing propagation of aggregate values. */
  4649. unsigned int
  4650. ipcp_transform_function (struct cgraph_node *node)
  4651. {
  4652. vec<ipa_param_descriptor> descriptors = vNULL;
  4653. struct func_body_info fbi;
  4654. struct ipa_agg_replacement_value *aggval;
  4655. int param_count;
  4656. bool cfg_changed = false, something_changed = false;
  4657. gcc_checking_assert (cfun);
  4658. gcc_checking_assert (current_function_decl);
  4659. if (dump_file)
  4660. fprintf (dump_file, "Modification phase of node %s/%i\n",
  4661. node->name (), node->order);
  4662. ipcp_update_alignments (node);
  4663. aggval = ipa_get_agg_replacements_for_node (node);
  4664. if (!aggval)
  4665. return 0;
  4666. param_count = count_formal_params (node->decl);
  4667. if (param_count == 0)
  4668. return 0;
  4669. adjust_agg_replacement_values (node, aggval);
  4670. if (dump_file)
  4671. ipa_dump_agg_replacement_values (dump_file, aggval);
  4672. fbi.node = node;
  4673. fbi.info = NULL;
  4674. fbi.bb_infos = vNULL;
  4675. fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
  4676. fbi.param_count = param_count;
  4677. fbi.aa_walked = 0;
  4678. descriptors.safe_grow_cleared (param_count);
  4679. ipa_populate_param_decls (node, descriptors);
  4680. calculate_dominance_info (CDI_DOMINATORS);
  4681. ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
  4682. &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
  4683. int i;
  4684. struct ipa_bb_info *bi;
  4685. FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
  4686. free_ipa_bb_info (bi);
  4687. fbi.bb_infos.release ();
  4688. free_dominance_info (CDI_DOMINATORS);
  4689. (*ipcp_transformations)[node->uid].agg_values = NULL;
  4690. (*ipcp_transformations)[node->uid].alignments = NULL;
  4691. descriptors.release ();
  4692. if (!something_changed)
  4693. return 0;
  4694. else if (cfg_changed)
  4695. return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
  4696. else
  4697. return TODO_update_ssa_only_virtuals;
  4698. }