tree-ssa-sccvn.c 127 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595
  1. /* SCC value numbering for trees
  2. Copyright (C) 2006-2015 Free Software Foundation, Inc.
  3. Contributed by Daniel Berlin <dan@dberlin.org>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify
  6. it under the terms of the GNU General Public License as published by
  7. the Free Software Foundation; either version 3, or (at your option)
  8. any later version.
  9. GCC is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. GNU General Public License for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "tm.h"
  20. #include "hash-set.h"
  21. #include "machmode.h"
  22. #include "vec.h"
  23. #include "double-int.h"
  24. #include "input.h"
  25. #include "alias.h"
  26. #include "symtab.h"
  27. #include "wide-int.h"
  28. #include "inchash.h"
  29. #include "tree.h"
  30. #include "fold-const.h"
  31. #include "stor-layout.h"
  32. #include "predict.h"
  33. #include "hard-reg-set.h"
  34. #include "function.h"
  35. #include "dominance.h"
  36. #include "cfg.h"
  37. #include "cfganal.h"
  38. #include "basic-block.h"
  39. #include "gimple-pretty-print.h"
  40. #include "tree-inline.h"
  41. #include "hash-table.h"
  42. #include "tree-ssa-alias.h"
  43. #include "internal-fn.h"
  44. #include "gimple-fold.h"
  45. #include "tree-eh.h"
  46. #include "gimple-expr.h"
  47. #include "is-a.h"
  48. #include "gimple.h"
  49. #include "gimplify.h"
  50. #include "gimple-ssa.h"
  51. #include "tree-phinodes.h"
  52. #include "ssa-iterators.h"
  53. #include "stringpool.h"
  54. #include "tree-ssanames.h"
  55. #include "hashtab.h"
  56. #include "rtl.h"
  57. #include "flags.h"
  58. #include "statistics.h"
  59. #include "real.h"
  60. #include "fixed-value.h"
  61. #include "insn-config.h"
  62. #include "expmed.h"
  63. #include "dojump.h"
  64. #include "explow.h"
  65. #include "calls.h"
  66. #include "emit-rtl.h"
  67. #include "varasm.h"
  68. #include "stmt.h"
  69. #include "expr.h"
  70. #include "tree-dfa.h"
  71. #include "tree-ssa.h"
  72. #include "dumpfile.h"
  73. #include "alloc-pool.h"
  74. #include "cfgloop.h"
  75. #include "params.h"
  76. #include "tree-ssa-propagate.h"
  77. #include "tree-ssa-sccvn.h"
  78. #include "tree-cfg.h"
  79. #include "domwalk.h"
  80. #include "ipa-ref.h"
  81. #include "plugin-api.h"
  82. #include "cgraph.h"
  83. /* This algorithm is based on the SCC algorithm presented by Keith
  84. Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
  85. (http://citeseer.ist.psu.edu/41805.html). In
  86. straight line code, it is equivalent to a regular hash based value
  87. numbering that is performed in reverse postorder.
  88. For code with cycles, there are two alternatives, both of which
  89. require keeping the hashtables separate from the actual list of
  90. value numbers for SSA names.
  91. 1. Iterate value numbering in an RPO walk of the blocks, removing
  92. all the entries from the hashtable after each iteration (but
  93. keeping the SSA name->value number mapping between iterations).
  94. Iterate until it does not change.
  95. 2. Perform value numbering as part of an SCC walk on the SSA graph,
  96. iterating only the cycles in the SSA graph until they do not change
  97. (using a separate, optimistic hashtable for value numbering the SCC
  98. operands).
  99. The second is not just faster in practice (because most SSA graph
  100. cycles do not involve all the variables in the graph), it also has
  101. some nice properties.
  102. One of these nice properties is that when we pop an SCC off the
  103. stack, we are guaranteed to have processed all the operands coming from
  104. *outside of that SCC*, so we do not need to do anything special to
  105. ensure they have value numbers.
  106. Another nice property is that the SCC walk is done as part of a DFS
  107. of the SSA graph, which makes it easy to perform combining and
  108. simplifying operations at the same time.
  109. The code below is deliberately written in a way that makes it easy
  110. to separate the SCC walk from the other work it does.
  111. In order to propagate constants through the code, we track which
  112. expressions contain constants, and use those while folding. In
  113. theory, we could also track expressions whose value numbers are
  114. replaced, in case we end up folding based on expression
  115. identities.
  116. In order to value number memory, we assign value numbers to vuses.
  117. This enables us to note that, for example, stores to the same
  118. address of the same value from the same starting memory states are
  119. equivalent.
  120. TODO:
  121. 1. We can iterate only the changing portions of the SCC's, but
  122. I have not seen an SCC big enough for this to be a win.
  123. 2. If you differentiate between phi nodes for loops and phi nodes
  124. for if-then-else, you can properly consider phi nodes in different
  125. blocks for equivalence.
  126. 3. We could value number vuses in more cases, particularly, whole
  127. structure copies.
  128. */
  129. /* vn_nary_op hashtable helpers. */
  130. struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
  131. {
  132. typedef vn_nary_op_s value_type;
  133. typedef vn_nary_op_s compare_type;
  134. static inline hashval_t hash (const value_type *);
  135. static inline bool equal (const value_type *, const compare_type *);
  136. };
  137. /* Return the computed hashcode for nary operation P1. */
  138. inline hashval_t
  139. vn_nary_op_hasher::hash (const value_type *vno1)
  140. {
  141. return vno1->hashcode;
  142. }
  143. /* Compare nary operations P1 and P2 and return true if they are
  144. equivalent. */
  145. inline bool
  146. vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
  147. {
  148. return vn_nary_op_eq (vno1, vno2);
  149. }
  150. typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
  151. typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
  152. /* vn_phi hashtable helpers. */
  153. static int
  154. vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
  155. struct vn_phi_hasher
  156. {
  157. typedef vn_phi_s value_type;
  158. typedef vn_phi_s compare_type;
  159. static inline hashval_t hash (const value_type *);
  160. static inline bool equal (const value_type *, const compare_type *);
  161. static inline void remove (value_type *);
  162. };
  163. /* Return the computed hashcode for phi operation P1. */
  164. inline hashval_t
  165. vn_phi_hasher::hash (const value_type *vp1)
  166. {
  167. return vp1->hashcode;
  168. }
  169. /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
  170. inline bool
  171. vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
  172. {
  173. return vn_phi_eq (vp1, vp2);
  174. }
  175. /* Free a phi operation structure VP. */
  176. inline void
  177. vn_phi_hasher::remove (value_type *phi)
  178. {
  179. phi->phiargs.release ();
  180. }
  181. typedef hash_table<vn_phi_hasher> vn_phi_table_type;
  182. typedef vn_phi_table_type::iterator vn_phi_iterator_type;
  183. /* Compare two reference operands P1 and P2 for equality. Return true if
  184. they are equal, and false otherwise. */
  185. static int
  186. vn_reference_op_eq (const void *p1, const void *p2)
  187. {
  188. const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
  189. const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
  190. return (vro1->opcode == vro2->opcode
  191. /* We do not care for differences in type qualification. */
  192. && (vro1->type == vro2->type
  193. || (vro1->type && vro2->type
  194. && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
  195. TYPE_MAIN_VARIANT (vro2->type))))
  196. && expressions_equal_p (vro1->op0, vro2->op0)
  197. && expressions_equal_p (vro1->op1, vro2->op1)
  198. && expressions_equal_p (vro1->op2, vro2->op2));
  199. }
  200. /* Free a reference operation structure VP. */
  201. static inline void
  202. free_reference (vn_reference_s *vr)
  203. {
  204. vr->operands.release ();
  205. }
  206. /* vn_reference hashtable helpers. */
  207. struct vn_reference_hasher
  208. {
  209. typedef vn_reference_s value_type;
  210. typedef vn_reference_s compare_type;
  211. static inline hashval_t hash (const value_type *);
  212. static inline bool equal (const value_type *, const compare_type *);
  213. static inline void remove (value_type *);
  214. };
  215. /* Return the hashcode for a given reference operation P1. */
  216. inline hashval_t
  217. vn_reference_hasher::hash (const value_type *vr1)
  218. {
  219. return vr1->hashcode;
  220. }
  221. inline bool
  222. vn_reference_hasher::equal (const value_type *v, const compare_type *c)
  223. {
  224. return vn_reference_eq (v, c);
  225. }
  226. inline void
  227. vn_reference_hasher::remove (value_type *v)
  228. {
  229. free_reference (v);
  230. }
  231. typedef hash_table<vn_reference_hasher> vn_reference_table_type;
  232. typedef vn_reference_table_type::iterator vn_reference_iterator_type;
  233. /* The set of hashtables and alloc_pool's for their items. */
  234. typedef struct vn_tables_s
  235. {
  236. vn_nary_op_table_type *nary;
  237. vn_phi_table_type *phis;
  238. vn_reference_table_type *references;
  239. struct obstack nary_obstack;
  240. alloc_pool phis_pool;
  241. alloc_pool references_pool;
  242. } *vn_tables_t;
  243. /* vn_constant hashtable helpers. */
  244. struct vn_constant_hasher : typed_free_remove <vn_constant_s>
  245. {
  246. typedef vn_constant_s value_type;
  247. typedef vn_constant_s compare_type;
  248. static inline hashval_t hash (const value_type *);
  249. static inline bool equal (const value_type *, const compare_type *);
  250. };
  251. /* Hash table hash function for vn_constant_t. */
  252. inline hashval_t
  253. vn_constant_hasher::hash (const value_type *vc1)
  254. {
  255. return vc1->hashcode;
  256. }
  257. /* Hash table equality function for vn_constant_t. */
  258. inline bool
  259. vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
  260. {
  261. if (vc1->hashcode != vc2->hashcode)
  262. return false;
  263. return vn_constant_eq_with_type (vc1->constant, vc2->constant);
  264. }
  265. static hash_table<vn_constant_hasher> *constant_to_value_id;
  266. static bitmap constant_value_ids;
  267. /* Valid hashtables storing information we have proven to be
  268. correct. */
  269. static vn_tables_t valid_info;
  270. /* Optimistic hashtables storing information we are making assumptions about
  271. during iterations. */
  272. static vn_tables_t optimistic_info;
  273. /* Pointer to the set of hashtables that is currently being used.
  274. Should always point to either the optimistic_info, or the
  275. valid_info. */
  276. static vn_tables_t current_info;
  277. /* Reverse post order index for each basic block. */
  278. static int *rpo_numbers;
  279. #define SSA_VAL(x) (VN_INFO ((x))->valnum)
  280. /* Return the SSA value of the VUSE x, supporting released VDEFs
  281. during elimination which will value-number the VDEF to the
  282. associated VUSE (but not substitute in the whole lattice). */
  283. static inline tree
  284. vuse_ssa_val (tree x)
  285. {
  286. if (!x)
  287. return NULL_TREE;
  288. do
  289. {
  290. x = SSA_VAL (x);
  291. }
  292. while (SSA_NAME_IN_FREE_LIST (x));
  293. return x;
  294. }
  295. /* This represents the top of the VN lattice, which is the universal
  296. value. */
  297. tree VN_TOP;
  298. /* Unique counter for our value ids. */
  299. static unsigned int next_value_id;
  300. /* Next DFS number and the stack for strongly connected component
  301. detection. */
  302. static unsigned int next_dfs_num;
  303. static vec<tree> sccstack;
  304. /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
  305. are allocated on an obstack for locality reasons, and to free them
  306. without looping over the vec. */
  307. static vec<vn_ssa_aux_t> vn_ssa_aux_table;
  308. static struct obstack vn_ssa_aux_obstack;
  309. /* Return the value numbering information for a given SSA name. */
  310. vn_ssa_aux_t
  311. VN_INFO (tree name)
  312. {
  313. vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
  314. gcc_checking_assert (res);
  315. return res;
  316. }
  317. /* Set the value numbering info for a given SSA name to a given
  318. value. */
  319. static inline void
  320. VN_INFO_SET (tree name, vn_ssa_aux_t value)
  321. {
  322. vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
  323. }
  324. /* Initialize the value numbering info for a given SSA name.
  325. This should be called just once for every SSA name. */
  326. vn_ssa_aux_t
  327. VN_INFO_GET (tree name)
  328. {
  329. vn_ssa_aux_t newinfo;
  330. newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
  331. memset (newinfo, 0, sizeof (struct vn_ssa_aux));
  332. if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
  333. vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
  334. vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
  335. return newinfo;
  336. }
  337. /* Get the representative expression for the SSA_NAME NAME. Returns
  338. the representative SSA_NAME if there is no expression associated with it. */
  339. tree
  340. vn_get_expr_for (tree name)
  341. {
  342. vn_ssa_aux_t vn = VN_INFO (name);
  343. gimple def_stmt;
  344. tree expr = NULL_TREE;
  345. enum tree_code code;
  346. if (vn->valnum == VN_TOP)
  347. return name;
  348. /* If the value-number is a constant it is the representative
  349. expression. */
  350. if (TREE_CODE (vn->valnum) != SSA_NAME)
  351. return vn->valnum;
  352. /* Get to the information of the value of this SSA_NAME. */
  353. vn = VN_INFO (vn->valnum);
  354. /* If the value-number is a constant it is the representative
  355. expression. */
  356. if (TREE_CODE (vn->valnum) != SSA_NAME)
  357. return vn->valnum;
  358. /* Else if we have an expression, return it. */
  359. if (vn->expr != NULL_TREE)
  360. return vn->expr;
  361. /* Otherwise use the defining statement to build the expression. */
  362. def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
  363. /* If the value number is not an assignment use it directly. */
  364. if (!is_gimple_assign (def_stmt))
  365. return vn->valnum;
  366. /* Note that we can valueize here because we clear the cached
  367. simplified expressions after each optimistic iteration. */
  368. code = gimple_assign_rhs_code (def_stmt);
  369. switch (TREE_CODE_CLASS (code))
  370. {
  371. case tcc_reference:
  372. if ((code == REALPART_EXPR
  373. || code == IMAGPART_EXPR
  374. || code == VIEW_CONVERT_EXPR)
  375. && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
  376. 0)) == SSA_NAME)
  377. expr = fold_build1 (code,
  378. gimple_expr_type (def_stmt),
  379. vn_valueize (TREE_OPERAND
  380. (gimple_assign_rhs1 (def_stmt), 0)));
  381. break;
  382. case tcc_unary:
  383. expr = fold_build1 (code,
  384. gimple_expr_type (def_stmt),
  385. vn_valueize (gimple_assign_rhs1 (def_stmt)));
  386. break;
  387. case tcc_binary:
  388. expr = fold_build2 (code,
  389. gimple_expr_type (def_stmt),
  390. vn_valueize (gimple_assign_rhs1 (def_stmt)),
  391. vn_valueize (gimple_assign_rhs2 (def_stmt)));
  392. break;
  393. case tcc_exceptional:
  394. if (code == CONSTRUCTOR
  395. && TREE_CODE
  396. (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
  397. expr = gimple_assign_rhs1 (def_stmt);
  398. break;
  399. default:;
  400. }
  401. if (expr == NULL_TREE)
  402. return vn->valnum;
  403. /* Cache the expression. */
  404. vn->expr = expr;
  405. return expr;
  406. }
  407. /* Return the vn_kind the expression computed by the stmt should be
  408. associated with. */
  409. enum vn_kind
  410. vn_get_stmt_kind (gimple stmt)
  411. {
  412. switch (gimple_code (stmt))
  413. {
  414. case GIMPLE_CALL:
  415. return VN_REFERENCE;
  416. case GIMPLE_PHI:
  417. return VN_PHI;
  418. case GIMPLE_ASSIGN:
  419. {
  420. enum tree_code code = gimple_assign_rhs_code (stmt);
  421. tree rhs1 = gimple_assign_rhs1 (stmt);
  422. switch (get_gimple_rhs_class (code))
  423. {
  424. case GIMPLE_UNARY_RHS:
  425. case GIMPLE_BINARY_RHS:
  426. case GIMPLE_TERNARY_RHS:
  427. return VN_NARY;
  428. case GIMPLE_SINGLE_RHS:
  429. switch (TREE_CODE_CLASS (code))
  430. {
  431. case tcc_reference:
  432. /* VOP-less references can go through unary case. */
  433. if ((code == REALPART_EXPR
  434. || code == IMAGPART_EXPR
  435. || code == VIEW_CONVERT_EXPR
  436. || code == BIT_FIELD_REF)
  437. && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
  438. return VN_NARY;
  439. /* Fallthrough. */
  440. case tcc_declaration:
  441. return VN_REFERENCE;
  442. case tcc_constant:
  443. return VN_CONSTANT;
  444. default:
  445. if (code == ADDR_EXPR)
  446. return (is_gimple_min_invariant (rhs1)
  447. ? VN_CONSTANT : VN_REFERENCE);
  448. else if (code == CONSTRUCTOR)
  449. return VN_NARY;
  450. return VN_NONE;
  451. }
  452. default:
  453. return VN_NONE;
  454. }
  455. }
  456. default:
  457. return VN_NONE;
  458. }
  459. }
  460. /* Lookup a value id for CONSTANT and return it. If it does not
  461. exist returns 0. */
  462. unsigned int
  463. get_constant_value_id (tree constant)
  464. {
  465. vn_constant_s **slot;
  466. struct vn_constant_s vc;
  467. vc.hashcode = vn_hash_constant_with_type (constant);
  468. vc.constant = constant;
  469. slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
  470. if (slot)
  471. return (*slot)->value_id;
  472. return 0;
  473. }
  474. /* Lookup a value id for CONSTANT, and if it does not exist, create a
  475. new one and return it. If it does exist, return it. */
  476. unsigned int
  477. get_or_alloc_constant_value_id (tree constant)
  478. {
  479. vn_constant_s **slot;
  480. struct vn_constant_s vc;
  481. vn_constant_t vcp;
  482. vc.hashcode = vn_hash_constant_with_type (constant);
  483. vc.constant = constant;
  484. slot = constant_to_value_id->find_slot (&vc, INSERT);
  485. if (*slot)
  486. return (*slot)->value_id;
  487. vcp = XNEW (struct vn_constant_s);
  488. vcp->hashcode = vc.hashcode;
  489. vcp->constant = constant;
  490. vcp->value_id = get_next_value_id ();
  491. *slot = vcp;
  492. bitmap_set_bit (constant_value_ids, vcp->value_id);
  493. return vcp->value_id;
  494. }
  495. /* Return true if V is a value id for a constant. */
  496. bool
  497. value_id_constant_p (unsigned int v)
  498. {
  499. return bitmap_bit_p (constant_value_ids, v);
  500. }
  501. /* Compute the hash for a reference operand VRO1. */
  502. static void
  503. vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
  504. {
  505. hstate.add_int (vro1->opcode);
  506. if (vro1->op0)
  507. inchash::add_expr (vro1->op0, hstate);
  508. if (vro1->op1)
  509. inchash::add_expr (vro1->op1, hstate);
  510. if (vro1->op2)
  511. inchash::add_expr (vro1->op2, hstate);
  512. }
  513. /* Compute a hash for the reference operation VR1 and return it. */
  514. static hashval_t
  515. vn_reference_compute_hash (const vn_reference_t vr1)
  516. {
  517. inchash::hash hstate;
  518. hashval_t result;
  519. int i;
  520. vn_reference_op_t vro;
  521. HOST_WIDE_INT off = -1;
  522. bool deref = false;
  523. FOR_EACH_VEC_ELT (vr1->operands, i, vro)
  524. {
  525. if (vro->opcode == MEM_REF)
  526. deref = true;
  527. else if (vro->opcode != ADDR_EXPR)
  528. deref = false;
  529. if (vro->off != -1)
  530. {
  531. if (off == -1)
  532. off = 0;
  533. off += vro->off;
  534. }
  535. else
  536. {
  537. if (off != -1
  538. && off != 0)
  539. hstate.add_int (off);
  540. off = -1;
  541. if (deref
  542. && vro->opcode == ADDR_EXPR)
  543. {
  544. if (vro->op0)
  545. {
  546. tree op = TREE_OPERAND (vro->op0, 0);
  547. hstate.add_int (TREE_CODE (op));
  548. inchash::add_expr (op, hstate);
  549. }
  550. }
  551. else
  552. vn_reference_op_compute_hash (vro, hstate);
  553. }
  554. }
  555. result = hstate.end ();
  556. /* ??? We would ICE later if we hash instead of adding that in. */
  557. if (vr1->vuse)
  558. result += SSA_NAME_VERSION (vr1->vuse);
  559. return result;
  560. }
  561. /* Return true if reference operations VR1 and VR2 are equivalent. This
  562. means they have the same set of operands and vuses. */
  563. bool
  564. vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
  565. {
  566. unsigned i, j;
  567. /* Early out if this is not a hash collision. */
  568. if (vr1->hashcode != vr2->hashcode)
  569. return false;
  570. /* The VOP needs to be the same. */
  571. if (vr1->vuse != vr2->vuse)
  572. return false;
  573. /* If the operands are the same we are done. */
  574. if (vr1->operands == vr2->operands)
  575. return true;
  576. if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
  577. return false;
  578. if (INTEGRAL_TYPE_P (vr1->type)
  579. && INTEGRAL_TYPE_P (vr2->type))
  580. {
  581. if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
  582. return false;
  583. }
  584. else if (INTEGRAL_TYPE_P (vr1->type)
  585. && (TYPE_PRECISION (vr1->type)
  586. != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
  587. return false;
  588. else if (INTEGRAL_TYPE_P (vr2->type)
  589. && (TYPE_PRECISION (vr2->type)
  590. != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
  591. return false;
  592. i = 0;
  593. j = 0;
  594. do
  595. {
  596. HOST_WIDE_INT off1 = 0, off2 = 0;
  597. vn_reference_op_t vro1, vro2;
  598. vn_reference_op_s tem1, tem2;
  599. bool deref1 = false, deref2 = false;
  600. for (; vr1->operands.iterate (i, &vro1); i++)
  601. {
  602. if (vro1->opcode == MEM_REF)
  603. deref1 = true;
  604. if (vro1->off == -1)
  605. break;
  606. off1 += vro1->off;
  607. }
  608. for (; vr2->operands.iterate (j, &vro2); j++)
  609. {
  610. if (vro2->opcode == MEM_REF)
  611. deref2 = true;
  612. if (vro2->off == -1)
  613. break;
  614. off2 += vro2->off;
  615. }
  616. if (off1 != off2)
  617. return false;
  618. if (deref1 && vro1->opcode == ADDR_EXPR)
  619. {
  620. memset (&tem1, 0, sizeof (tem1));
  621. tem1.op0 = TREE_OPERAND (vro1->op0, 0);
  622. tem1.type = TREE_TYPE (tem1.op0);
  623. tem1.opcode = TREE_CODE (tem1.op0);
  624. vro1 = &tem1;
  625. deref1 = false;
  626. }
  627. if (deref2 && vro2->opcode == ADDR_EXPR)
  628. {
  629. memset (&tem2, 0, sizeof (tem2));
  630. tem2.op0 = TREE_OPERAND (vro2->op0, 0);
  631. tem2.type = TREE_TYPE (tem2.op0);
  632. tem2.opcode = TREE_CODE (tem2.op0);
  633. vro2 = &tem2;
  634. deref2 = false;
  635. }
  636. if (deref1 != deref2)
  637. return false;
  638. if (!vn_reference_op_eq (vro1, vro2))
  639. return false;
  640. ++j;
  641. ++i;
  642. }
  643. while (vr1->operands.length () != i
  644. || vr2->operands.length () != j);
  645. return true;
  646. }
  647. /* Copy the operations present in load/store REF into RESULT, a vector of
  648. vn_reference_op_s's. */
  649. static void
  650. copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
  651. {
  652. if (TREE_CODE (ref) == TARGET_MEM_REF)
  653. {
  654. vn_reference_op_s temp;
  655. result->reserve (3);
  656. memset (&temp, 0, sizeof (temp));
  657. temp.type = TREE_TYPE (ref);
  658. temp.opcode = TREE_CODE (ref);
  659. temp.op0 = TMR_INDEX (ref);
  660. temp.op1 = TMR_STEP (ref);
  661. temp.op2 = TMR_OFFSET (ref);
  662. temp.off = -1;
  663. result->quick_push (temp);
  664. memset (&temp, 0, sizeof (temp));
  665. temp.type = NULL_TREE;
  666. temp.opcode = ERROR_MARK;
  667. temp.op0 = TMR_INDEX2 (ref);
  668. temp.off = -1;
  669. result->quick_push (temp);
  670. memset (&temp, 0, sizeof (temp));
  671. temp.type = NULL_TREE;
  672. temp.opcode = TREE_CODE (TMR_BASE (ref));
  673. temp.op0 = TMR_BASE (ref);
  674. temp.off = -1;
  675. result->quick_push (temp);
  676. return;
  677. }
  678. /* For non-calls, store the information that makes up the address. */
  679. tree orig = ref;
  680. while (ref)
  681. {
  682. vn_reference_op_s temp;
  683. memset (&temp, 0, sizeof (temp));
  684. temp.type = TREE_TYPE (ref);
  685. temp.opcode = TREE_CODE (ref);
  686. temp.off = -1;
  687. switch (temp.opcode)
  688. {
  689. case MODIFY_EXPR:
  690. temp.op0 = TREE_OPERAND (ref, 1);
  691. break;
  692. case WITH_SIZE_EXPR:
  693. temp.op0 = TREE_OPERAND (ref, 1);
  694. temp.off = 0;
  695. break;
  696. case MEM_REF:
  697. /* The base address gets its own vn_reference_op_s structure. */
  698. temp.op0 = TREE_OPERAND (ref, 1);
  699. if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
  700. temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
  701. break;
  702. case BIT_FIELD_REF:
  703. /* Record bits and position. */
  704. temp.op0 = TREE_OPERAND (ref, 1);
  705. temp.op1 = TREE_OPERAND (ref, 2);
  706. break;
  707. case COMPONENT_REF:
  708. /* The field decl is enough to unambiguously specify the field,
  709. a matching type is not necessary and a mismatching type
  710. is always a spurious difference. */
  711. temp.type = NULL_TREE;
  712. temp.op0 = TREE_OPERAND (ref, 1);
  713. temp.op1 = TREE_OPERAND (ref, 2);
  714. {
  715. tree this_offset = component_ref_field_offset (ref);
  716. if (this_offset
  717. && TREE_CODE (this_offset) == INTEGER_CST)
  718. {
  719. tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
  720. if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
  721. {
  722. offset_int off
  723. = (wi::to_offset (this_offset)
  724. + wi::lrshift (wi::to_offset (bit_offset),
  725. LOG2_BITS_PER_UNIT));
  726. if (wi::fits_shwi_p (off)
  727. /* Probibit value-numbering zero offset components
  728. of addresses the same before the pass folding
  729. __builtin_object_size had a chance to run
  730. (checking cfun->after_inlining does the
  731. trick here). */
  732. && (TREE_CODE (orig) != ADDR_EXPR
  733. || off != 0
  734. || cfun->after_inlining))
  735. temp.off = off.to_shwi ();
  736. }
  737. }
  738. }
  739. break;
  740. case ARRAY_RANGE_REF:
  741. case ARRAY_REF:
  742. /* Record index as operand. */
  743. temp.op0 = TREE_OPERAND (ref, 1);
  744. /* Always record lower bounds and element size. */
  745. temp.op1 = array_ref_low_bound (ref);
  746. temp.op2 = array_ref_element_size (ref);
  747. if (TREE_CODE (temp.op0) == INTEGER_CST
  748. && TREE_CODE (temp.op1) == INTEGER_CST
  749. && TREE_CODE (temp.op2) == INTEGER_CST)
  750. {
  751. offset_int off = ((wi::to_offset (temp.op0)
  752. - wi::to_offset (temp.op1))
  753. * wi::to_offset (temp.op2));
  754. if (wi::fits_shwi_p (off))
  755. temp.off = off.to_shwi();
  756. }
  757. break;
  758. case VAR_DECL:
  759. if (DECL_HARD_REGISTER (ref))
  760. {
  761. temp.op0 = ref;
  762. break;
  763. }
  764. /* Fallthru. */
  765. case PARM_DECL:
  766. case CONST_DECL:
  767. case RESULT_DECL:
  768. /* Canonicalize decls to MEM[&decl] which is what we end up with
  769. when valueizing MEM[ptr] with ptr = &decl. */
  770. temp.opcode = MEM_REF;
  771. temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
  772. temp.off = 0;
  773. result->safe_push (temp);
  774. temp.opcode = ADDR_EXPR;
  775. temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
  776. temp.type = TREE_TYPE (temp.op0);
  777. temp.off = -1;
  778. break;
  779. case STRING_CST:
  780. case INTEGER_CST:
  781. case COMPLEX_CST:
  782. case VECTOR_CST:
  783. case REAL_CST:
  784. case FIXED_CST:
  785. case CONSTRUCTOR:
  786. case SSA_NAME:
  787. temp.op0 = ref;
  788. break;
  789. case ADDR_EXPR:
  790. if (is_gimple_min_invariant (ref))
  791. {
  792. temp.op0 = ref;
  793. break;
  794. }
  795. break;
  796. /* These are only interesting for their operands, their
  797. existence, and their type. They will never be the last
  798. ref in the chain of references (IE they require an
  799. operand), so we don't have to put anything
  800. for op* as it will be handled by the iteration */
  801. case REALPART_EXPR:
  802. case VIEW_CONVERT_EXPR:
  803. temp.off = 0;
  804. break;
  805. case IMAGPART_EXPR:
  806. /* This is only interesting for its constant offset. */
  807. temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
  808. break;
  809. default:
  810. gcc_unreachable ();
  811. }
  812. result->safe_push (temp);
  813. if (REFERENCE_CLASS_P (ref)
  814. || TREE_CODE (ref) == MODIFY_EXPR
  815. || TREE_CODE (ref) == WITH_SIZE_EXPR
  816. || (TREE_CODE (ref) == ADDR_EXPR
  817. && !is_gimple_min_invariant (ref)))
  818. ref = TREE_OPERAND (ref, 0);
  819. else
  820. ref = NULL_TREE;
  821. }
  822. }
  823. /* Build a alias-oracle reference abstraction in *REF from the vn_reference
  824. operands in *OPS, the reference alias set SET and the reference type TYPE.
  825. Return true if something useful was produced. */
  826. bool
  827. ao_ref_init_from_vn_reference (ao_ref *ref,
  828. alias_set_type set, tree type,
  829. vec<vn_reference_op_s> ops)
  830. {
  831. vn_reference_op_t op;
  832. unsigned i;
  833. tree base = NULL_TREE;
  834. tree *op0_p = &base;
  835. HOST_WIDE_INT offset = 0;
  836. HOST_WIDE_INT max_size;
  837. HOST_WIDE_INT size = -1;
  838. tree size_tree = NULL_TREE;
  839. alias_set_type base_alias_set = -1;
  840. /* First get the final access size from just the outermost expression. */
  841. op = &ops[0];
  842. if (op->opcode == COMPONENT_REF)
  843. size_tree = DECL_SIZE (op->op0);
  844. else if (op->opcode == BIT_FIELD_REF)
  845. size_tree = op->op0;
  846. else
  847. {
  848. machine_mode mode = TYPE_MODE (type);
  849. if (mode == BLKmode)
  850. size_tree = TYPE_SIZE (type);
  851. else
  852. size = GET_MODE_BITSIZE (mode);
  853. }
  854. if (size_tree != NULL_TREE)
  855. {
  856. if (!tree_fits_uhwi_p (size_tree))
  857. size = -1;
  858. else
  859. size = tree_to_uhwi (size_tree);
  860. }
  861. /* Initially, maxsize is the same as the accessed element size.
  862. In the following it will only grow (or become -1). */
  863. max_size = size;
  864. /* Compute cumulative bit-offset for nested component-refs and array-refs,
  865. and find the ultimate containing object. */
  866. FOR_EACH_VEC_ELT (ops, i, op)
  867. {
  868. switch (op->opcode)
  869. {
  870. /* These may be in the reference ops, but we cannot do anything
  871. sensible with them here. */
  872. case ADDR_EXPR:
  873. /* Apart from ADDR_EXPR arguments to MEM_REF. */
  874. if (base != NULL_TREE
  875. && TREE_CODE (base) == MEM_REF
  876. && op->op0
  877. && DECL_P (TREE_OPERAND (op->op0, 0)))
  878. {
  879. vn_reference_op_t pop = &ops[i-1];
  880. base = TREE_OPERAND (op->op0, 0);
  881. if (pop->off == -1)
  882. {
  883. max_size = -1;
  884. offset = 0;
  885. }
  886. else
  887. offset += pop->off * BITS_PER_UNIT;
  888. op0_p = NULL;
  889. break;
  890. }
  891. /* Fallthru. */
  892. case CALL_EXPR:
  893. return false;
  894. /* Record the base objects. */
  895. case MEM_REF:
  896. base_alias_set = get_deref_alias_set (op->op0);
  897. *op0_p = build2 (MEM_REF, op->type,
  898. NULL_TREE, op->op0);
  899. op0_p = &TREE_OPERAND (*op0_p, 0);
  900. break;
  901. case VAR_DECL:
  902. case PARM_DECL:
  903. case RESULT_DECL:
  904. case SSA_NAME:
  905. *op0_p = op->op0;
  906. op0_p = NULL;
  907. break;
  908. /* And now the usual component-reference style ops. */
  909. case BIT_FIELD_REF:
  910. offset += tree_to_shwi (op->op1);
  911. break;
  912. case COMPONENT_REF:
  913. {
  914. tree field = op->op0;
  915. /* We do not have a complete COMPONENT_REF tree here so we
  916. cannot use component_ref_field_offset. Do the interesting
  917. parts manually. */
  918. if (op->op1
  919. || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
  920. max_size = -1;
  921. else
  922. {
  923. offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
  924. * BITS_PER_UNIT);
  925. offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
  926. }
  927. break;
  928. }
  929. case ARRAY_RANGE_REF:
  930. case ARRAY_REF:
  931. /* We recorded the lower bound and the element size. */
  932. if (!tree_fits_shwi_p (op->op0)
  933. || !tree_fits_shwi_p (op->op1)
  934. || !tree_fits_shwi_p (op->op2))
  935. max_size = -1;
  936. else
  937. {
  938. HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
  939. hindex -= tree_to_shwi (op->op1);
  940. hindex *= tree_to_shwi (op->op2);
  941. hindex *= BITS_PER_UNIT;
  942. offset += hindex;
  943. }
  944. break;
  945. case REALPART_EXPR:
  946. break;
  947. case IMAGPART_EXPR:
  948. offset += size;
  949. break;
  950. case VIEW_CONVERT_EXPR:
  951. break;
  952. case STRING_CST:
  953. case INTEGER_CST:
  954. case COMPLEX_CST:
  955. case VECTOR_CST:
  956. case REAL_CST:
  957. case CONSTRUCTOR:
  958. case CONST_DECL:
  959. return false;
  960. default:
  961. return false;
  962. }
  963. }
  964. if (base == NULL_TREE)
  965. return false;
  966. ref->ref = NULL_TREE;
  967. ref->base = base;
  968. ref->offset = offset;
  969. ref->size = size;
  970. ref->max_size = max_size;
  971. ref->ref_alias_set = set;
  972. if (base_alias_set != -1)
  973. ref->base_alias_set = base_alias_set;
  974. else
  975. ref->base_alias_set = get_alias_set (base);
  976. /* We discount volatiles from value-numbering elsewhere. */
  977. ref->volatile_p = false;
  978. return true;
  979. }
  980. /* Copy the operations present in load/store/call REF into RESULT, a vector of
  981. vn_reference_op_s's. */
  982. static void
  983. copy_reference_ops_from_call (gcall *call,
  984. vec<vn_reference_op_s> *result)
  985. {
  986. vn_reference_op_s temp;
  987. unsigned i;
  988. tree lhs = gimple_call_lhs (call);
  989. int lr;
  990. /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
  991. different. By adding the lhs here in the vector, we ensure that the
  992. hashcode is different, guaranteeing a different value number. */
  993. if (lhs && TREE_CODE (lhs) != SSA_NAME)
  994. {
  995. memset (&temp, 0, sizeof (temp));
  996. temp.opcode = MODIFY_EXPR;
  997. temp.type = TREE_TYPE (lhs);
  998. temp.op0 = lhs;
  999. temp.off = -1;
  1000. result->safe_push (temp);
  1001. }
  1002. /* Copy the type, opcode, function, static chain and EH region, if any. */
  1003. memset (&temp, 0, sizeof (temp));
  1004. temp.type = gimple_call_return_type (call);
  1005. temp.opcode = CALL_EXPR;
  1006. temp.op0 = gimple_call_fn (call);
  1007. temp.op1 = gimple_call_chain (call);
  1008. if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
  1009. temp.op2 = size_int (lr);
  1010. temp.off = -1;
  1011. if (gimple_call_with_bounds_p (call))
  1012. temp.with_bounds = 1;
  1013. result->safe_push (temp);
  1014. /* Copy the call arguments. As they can be references as well,
  1015. just chain them together. */
  1016. for (i = 0; i < gimple_call_num_args (call); ++i)
  1017. {
  1018. tree callarg = gimple_call_arg (call, i);
  1019. copy_reference_ops_from_ref (callarg, result);
  1020. }
  1021. }
  1022. /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
  1023. *I_P to point to the last element of the replacement. */
  1024. void
  1025. vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
  1026. unsigned int *i_p)
  1027. {
  1028. unsigned int i = *i_p;
  1029. vn_reference_op_t op = &(*ops)[i];
  1030. vn_reference_op_t mem_op = &(*ops)[i - 1];
  1031. tree addr_base;
  1032. HOST_WIDE_INT addr_offset = 0;
  1033. /* The only thing we have to do is from &OBJ.foo.bar add the offset
  1034. from .foo.bar to the preceding MEM_REF offset and replace the
  1035. address with &OBJ. */
  1036. addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
  1037. &addr_offset);
  1038. gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
  1039. if (addr_base != TREE_OPERAND (op->op0, 0))
  1040. {
  1041. offset_int off = offset_int::from (mem_op->op0, SIGNED);
  1042. off += addr_offset;
  1043. mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
  1044. op->op0 = build_fold_addr_expr (addr_base);
  1045. if (tree_fits_shwi_p (mem_op->op0))
  1046. mem_op->off = tree_to_shwi (mem_op->op0);
  1047. else
  1048. mem_op->off = -1;
  1049. }
  1050. }
  1051. /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
  1052. *I_P to point to the last element of the replacement. */
  1053. static void
  1054. vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
  1055. unsigned int *i_p)
  1056. {
  1057. unsigned int i = *i_p;
  1058. vn_reference_op_t op = &(*ops)[i];
  1059. vn_reference_op_t mem_op = &(*ops)[i - 1];
  1060. gimple def_stmt;
  1061. enum tree_code code;
  1062. offset_int off;
  1063. def_stmt = SSA_NAME_DEF_STMT (op->op0);
  1064. if (!is_gimple_assign (def_stmt))
  1065. return;
  1066. code = gimple_assign_rhs_code (def_stmt);
  1067. if (code != ADDR_EXPR
  1068. && code != POINTER_PLUS_EXPR)
  1069. return;
  1070. off = offset_int::from (mem_op->op0, SIGNED);
  1071. /* The only thing we have to do is from &OBJ.foo.bar add the offset
  1072. from .foo.bar to the preceding MEM_REF offset and replace the
  1073. address with &OBJ. */
  1074. if (code == ADDR_EXPR)
  1075. {
  1076. tree addr, addr_base;
  1077. HOST_WIDE_INT addr_offset;
  1078. addr = gimple_assign_rhs1 (def_stmt);
  1079. addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
  1080. &addr_offset);
  1081. if (!addr_base
  1082. || TREE_CODE (addr_base) != MEM_REF)
  1083. return;
  1084. off += addr_offset;
  1085. off += mem_ref_offset (addr_base);
  1086. op->op0 = TREE_OPERAND (addr_base, 0);
  1087. }
  1088. else
  1089. {
  1090. tree ptr, ptroff;
  1091. ptr = gimple_assign_rhs1 (def_stmt);
  1092. ptroff = gimple_assign_rhs2 (def_stmt);
  1093. if (TREE_CODE (ptr) != SSA_NAME
  1094. || TREE_CODE (ptroff) != INTEGER_CST)
  1095. return;
  1096. off += wi::to_offset (ptroff);
  1097. op->op0 = ptr;
  1098. }
  1099. mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
  1100. if (tree_fits_shwi_p (mem_op->op0))
  1101. mem_op->off = tree_to_shwi (mem_op->op0);
  1102. else
  1103. mem_op->off = -1;
  1104. if (TREE_CODE (op->op0) == SSA_NAME)
  1105. op->op0 = SSA_VAL (op->op0);
  1106. if (TREE_CODE (op->op0) != SSA_NAME)
  1107. op->opcode = TREE_CODE (op->op0);
  1108. /* And recurse. */
  1109. if (TREE_CODE (op->op0) == SSA_NAME)
  1110. vn_reference_maybe_forwprop_address (ops, i_p);
  1111. else if (TREE_CODE (op->op0) == ADDR_EXPR)
  1112. vn_reference_fold_indirect (ops, i_p);
  1113. }
  1114. /* Optimize the reference REF to a constant if possible or return
  1115. NULL_TREE if not. */
  1116. tree
  1117. fully_constant_vn_reference_p (vn_reference_t ref)
  1118. {
  1119. vec<vn_reference_op_s> operands = ref->operands;
  1120. vn_reference_op_t op;
  1121. /* Try to simplify the translated expression if it is
  1122. a call to a builtin function with at most two arguments. */
  1123. op = &operands[0];
  1124. if (op->opcode == CALL_EXPR
  1125. && TREE_CODE (op->op0) == ADDR_EXPR
  1126. && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
  1127. && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
  1128. && operands.length () >= 2
  1129. && operands.length () <= 3)
  1130. {
  1131. vn_reference_op_t arg0, arg1 = NULL;
  1132. bool anyconst = false;
  1133. arg0 = &operands[1];
  1134. if (operands.length () > 2)
  1135. arg1 = &operands[2];
  1136. if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
  1137. || (arg0->opcode == ADDR_EXPR
  1138. && is_gimple_min_invariant (arg0->op0)))
  1139. anyconst = true;
  1140. if (arg1
  1141. && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
  1142. || (arg1->opcode == ADDR_EXPR
  1143. && is_gimple_min_invariant (arg1->op0))))
  1144. anyconst = true;
  1145. if (anyconst)
  1146. {
  1147. tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
  1148. arg1 ? 2 : 1,
  1149. arg0->op0,
  1150. arg1 ? arg1->op0 : NULL);
  1151. if (folded
  1152. && TREE_CODE (folded) == NOP_EXPR)
  1153. folded = TREE_OPERAND (folded, 0);
  1154. if (folded
  1155. && is_gimple_min_invariant (folded))
  1156. return folded;
  1157. }
  1158. }
  1159. /* Simplify reads from constants or constant initializers. */
  1160. else if (BITS_PER_UNIT == 8
  1161. && is_gimple_reg_type (ref->type)
  1162. && (!INTEGRAL_TYPE_P (ref->type)
  1163. || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
  1164. {
  1165. HOST_WIDE_INT off = 0;
  1166. HOST_WIDE_INT size;
  1167. if (INTEGRAL_TYPE_P (ref->type))
  1168. size = TYPE_PRECISION (ref->type);
  1169. else
  1170. size = tree_to_shwi (TYPE_SIZE (ref->type));
  1171. if (size % BITS_PER_UNIT != 0
  1172. || size > MAX_BITSIZE_MODE_ANY_MODE)
  1173. return NULL_TREE;
  1174. size /= BITS_PER_UNIT;
  1175. unsigned i;
  1176. for (i = 0; i < operands.length (); ++i)
  1177. {
  1178. if (operands[i].off == -1)
  1179. return NULL_TREE;
  1180. off += operands[i].off;
  1181. if (operands[i].opcode == MEM_REF)
  1182. {
  1183. ++i;
  1184. break;
  1185. }
  1186. }
  1187. vn_reference_op_t base = &operands[--i];
  1188. tree ctor = error_mark_node;
  1189. tree decl = NULL_TREE;
  1190. if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
  1191. ctor = base->op0;
  1192. else if (base->opcode == MEM_REF
  1193. && base[1].opcode == ADDR_EXPR
  1194. && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
  1195. || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
  1196. {
  1197. decl = TREE_OPERAND (base[1].op0, 0);
  1198. ctor = ctor_for_folding (decl);
  1199. }
  1200. if (ctor == NULL_TREE)
  1201. return build_zero_cst (ref->type);
  1202. else if (ctor != error_mark_node)
  1203. {
  1204. if (decl)
  1205. {
  1206. tree res = fold_ctor_reference (ref->type, ctor,
  1207. off * BITS_PER_UNIT,
  1208. size * BITS_PER_UNIT, decl);
  1209. if (res)
  1210. {
  1211. STRIP_USELESS_TYPE_CONVERSION (res);
  1212. if (is_gimple_min_invariant (res))
  1213. return res;
  1214. }
  1215. }
  1216. else
  1217. {
  1218. unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
  1219. if (native_encode_expr (ctor, buf, size, off) > 0)
  1220. return native_interpret_expr (ref->type, buf, size);
  1221. }
  1222. }
  1223. }
  1224. return NULL_TREE;
  1225. }
  1226. /* Transform any SSA_NAME's in a vector of vn_reference_op_s
  1227. structures into their value numbers. This is done in-place, and
  1228. the vector passed in is returned. *VALUEIZED_ANYTHING will specify
  1229. whether any operands were valueized. */
  1230. static vec<vn_reference_op_s>
  1231. valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
  1232. {
  1233. vn_reference_op_t vro;
  1234. unsigned int i;
  1235. *valueized_anything = false;
  1236. FOR_EACH_VEC_ELT (orig, i, vro)
  1237. {
  1238. if (vro->opcode == SSA_NAME
  1239. || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
  1240. {
  1241. tree tem = SSA_VAL (vro->op0);
  1242. if (tem != vro->op0)
  1243. {
  1244. *valueized_anything = true;
  1245. vro->op0 = tem;
  1246. }
  1247. /* If it transforms from an SSA_NAME to a constant, update
  1248. the opcode. */
  1249. if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
  1250. vro->opcode = TREE_CODE (vro->op0);
  1251. }
  1252. if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
  1253. {
  1254. tree tem = SSA_VAL (vro->op1);
  1255. if (tem != vro->op1)
  1256. {
  1257. *valueized_anything = true;
  1258. vro->op1 = tem;
  1259. }
  1260. }
  1261. if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
  1262. {
  1263. tree tem = SSA_VAL (vro->op2);
  1264. if (tem != vro->op2)
  1265. {
  1266. *valueized_anything = true;
  1267. vro->op2 = tem;
  1268. }
  1269. }
  1270. /* If it transforms from an SSA_NAME to an address, fold with
  1271. a preceding indirect reference. */
  1272. if (i > 0
  1273. && vro->op0
  1274. && TREE_CODE (vro->op0) == ADDR_EXPR
  1275. && orig[i - 1].opcode == MEM_REF)
  1276. vn_reference_fold_indirect (&orig, &i);
  1277. else if (i > 0
  1278. && vro->opcode == SSA_NAME
  1279. && orig[i - 1].opcode == MEM_REF)
  1280. vn_reference_maybe_forwprop_address (&orig, &i);
  1281. /* If it transforms a non-constant ARRAY_REF into a constant
  1282. one, adjust the constant offset. */
  1283. else if (vro->opcode == ARRAY_REF
  1284. && vro->off == -1
  1285. && TREE_CODE (vro->op0) == INTEGER_CST
  1286. && TREE_CODE (vro->op1) == INTEGER_CST
  1287. && TREE_CODE (vro->op2) == INTEGER_CST)
  1288. {
  1289. offset_int off = ((wi::to_offset (vro->op0)
  1290. - wi::to_offset (vro->op1))
  1291. * wi::to_offset (vro->op2));
  1292. if (wi::fits_shwi_p (off))
  1293. vro->off = off.to_shwi ();
  1294. }
  1295. }
  1296. return orig;
  1297. }
  1298. static vec<vn_reference_op_s>
  1299. valueize_refs (vec<vn_reference_op_s> orig)
  1300. {
  1301. bool tem;
  1302. return valueize_refs_1 (orig, &tem);
  1303. }
  1304. static vec<vn_reference_op_s> shared_lookup_references;
  1305. /* Create a vector of vn_reference_op_s structures from REF, a
  1306. REFERENCE_CLASS_P tree. The vector is shared among all callers of
  1307. this function. *VALUEIZED_ANYTHING will specify whether any
  1308. operands were valueized. */
  1309. static vec<vn_reference_op_s>
  1310. valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
  1311. {
  1312. if (!ref)
  1313. return vNULL;
  1314. shared_lookup_references.truncate (0);
  1315. copy_reference_ops_from_ref (ref, &shared_lookup_references);
  1316. shared_lookup_references = valueize_refs_1 (shared_lookup_references,
  1317. valueized_anything);
  1318. return shared_lookup_references;
  1319. }
  1320. /* Create a vector of vn_reference_op_s structures from CALL, a
  1321. call statement. The vector is shared among all callers of
  1322. this function. */
  1323. static vec<vn_reference_op_s>
  1324. valueize_shared_reference_ops_from_call (gcall *call)
  1325. {
  1326. if (!call)
  1327. return vNULL;
  1328. shared_lookup_references.truncate (0);
  1329. copy_reference_ops_from_call (call, &shared_lookup_references);
  1330. shared_lookup_references = valueize_refs (shared_lookup_references);
  1331. return shared_lookup_references;
  1332. }
  1333. /* Lookup a SCCVN reference operation VR in the current hash table.
  1334. Returns the resulting value number if it exists in the hash table,
  1335. NULL_TREE otherwise. VNRESULT will be filled in with the actual
  1336. vn_reference_t stored in the hashtable if something is found. */
  1337. static tree
  1338. vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
  1339. {
  1340. vn_reference_s **slot;
  1341. hashval_t hash;
  1342. hash = vr->hashcode;
  1343. slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
  1344. if (!slot && current_info == optimistic_info)
  1345. slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
  1346. if (slot)
  1347. {
  1348. if (vnresult)
  1349. *vnresult = (vn_reference_t)*slot;
  1350. return ((vn_reference_t)*slot)->result;
  1351. }
  1352. return NULL_TREE;
  1353. }
  1354. static tree *last_vuse_ptr;
  1355. static vn_lookup_kind vn_walk_kind;
  1356. static vn_lookup_kind default_vn_walk_kind;
  1357. /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
  1358. with the current VUSE and performs the expression lookup. */
  1359. static void *
  1360. vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
  1361. unsigned int cnt, void *vr_)
  1362. {
  1363. vn_reference_t vr = (vn_reference_t)vr_;
  1364. vn_reference_s **slot;
  1365. hashval_t hash;
  1366. /* This bounds the stmt walks we perform on reference lookups
  1367. to O(1) instead of O(N) where N is the number of dominating
  1368. stores. */
  1369. if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
  1370. return (void *)-1;
  1371. if (last_vuse_ptr)
  1372. *last_vuse_ptr = vuse;
  1373. /* Fixup vuse and hash. */
  1374. if (vr->vuse)
  1375. vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
  1376. vr->vuse = vuse_ssa_val (vuse);
  1377. if (vr->vuse)
  1378. vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
  1379. hash = vr->hashcode;
  1380. slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
  1381. if (!slot && current_info == optimistic_info)
  1382. slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
  1383. if (slot)
  1384. return *slot;
  1385. return NULL;
  1386. }
  1387. /* Lookup an existing or insert a new vn_reference entry into the
  1388. value table for the VUSE, SET, TYPE, OPERANDS reference which
  1389. has the value VALUE which is either a constant or an SSA name. */
  1390. static vn_reference_t
  1391. vn_reference_lookup_or_insert_for_pieces (tree vuse,
  1392. alias_set_type set,
  1393. tree type,
  1394. vec<vn_reference_op_s,
  1395. va_heap> operands,
  1396. tree value)
  1397. {
  1398. vn_reference_s vr1;
  1399. vn_reference_t result;
  1400. unsigned value_id;
  1401. vr1.vuse = vuse;
  1402. vr1.operands = operands;
  1403. vr1.type = type;
  1404. vr1.set = set;
  1405. vr1.hashcode = vn_reference_compute_hash (&vr1);
  1406. if (vn_reference_lookup_1 (&vr1, &result))
  1407. return result;
  1408. if (TREE_CODE (value) == SSA_NAME)
  1409. value_id = VN_INFO (value)->value_id;
  1410. else
  1411. value_id = get_or_alloc_constant_value_id (value);
  1412. return vn_reference_insert_pieces (vuse, set, type,
  1413. operands.copy (), value, value_id);
  1414. }
  1415. /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
  1416. from the statement defining VUSE and if not successful tries to
  1417. translate *REFP and VR_ through an aggregate copy at the definition
  1418. of VUSE. */
  1419. static void *
  1420. vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
  1421. bool disambiguate_only)
  1422. {
  1423. vn_reference_t vr = (vn_reference_t)vr_;
  1424. gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
  1425. tree base;
  1426. HOST_WIDE_INT offset, maxsize;
  1427. static vec<vn_reference_op_s>
  1428. lhs_ops = vNULL;
  1429. ao_ref lhs_ref;
  1430. bool lhs_ref_ok = false;
  1431. /* First try to disambiguate after value-replacing in the definitions LHS. */
  1432. if (is_gimple_assign (def_stmt))
  1433. {
  1434. vec<vn_reference_op_s> tem;
  1435. tree lhs = gimple_assign_lhs (def_stmt);
  1436. bool valueized_anything = false;
  1437. /* Avoid re-allocation overhead. */
  1438. lhs_ops.truncate (0);
  1439. copy_reference_ops_from_ref (lhs, &lhs_ops);
  1440. tem = lhs_ops;
  1441. lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
  1442. gcc_assert (lhs_ops == tem);
  1443. if (valueized_anything)
  1444. {
  1445. lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
  1446. get_alias_set (lhs),
  1447. TREE_TYPE (lhs), lhs_ops);
  1448. if (lhs_ref_ok
  1449. && !refs_may_alias_p_1 (ref, &lhs_ref, true))
  1450. return NULL;
  1451. }
  1452. else
  1453. {
  1454. ao_ref_init (&lhs_ref, lhs);
  1455. lhs_ref_ok = true;
  1456. }
  1457. }
  1458. else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
  1459. && gimple_call_num_args (def_stmt) <= 4)
  1460. {
  1461. /* For builtin calls valueize its arguments and call the
  1462. alias oracle again. Valueization may improve points-to
  1463. info of pointers and constify size and position arguments.
  1464. Originally this was motivated by PR61034 which has
  1465. conditional calls to free falsely clobbering ref because
  1466. of imprecise points-to info of the argument. */
  1467. tree oldargs[4];
  1468. bool valueized_anything = false;
  1469. for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
  1470. {
  1471. oldargs[i] = gimple_call_arg (def_stmt, i);
  1472. if (TREE_CODE (oldargs[i]) == SSA_NAME
  1473. && VN_INFO (oldargs[i])->valnum != oldargs[i])
  1474. {
  1475. gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
  1476. valueized_anything = true;
  1477. }
  1478. }
  1479. if (valueized_anything)
  1480. {
  1481. bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
  1482. ref);
  1483. for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
  1484. gimple_call_set_arg (def_stmt, i, oldargs[i]);
  1485. if (!res)
  1486. return NULL;
  1487. }
  1488. }
  1489. if (disambiguate_only)
  1490. return (void *)-1;
  1491. base = ao_ref_base (ref);
  1492. offset = ref->offset;
  1493. maxsize = ref->max_size;
  1494. /* If we cannot constrain the size of the reference we cannot
  1495. test if anything kills it. */
  1496. if (maxsize == -1)
  1497. return (void *)-1;
  1498. /* We can't deduce anything useful from clobbers. */
  1499. if (gimple_clobber_p (def_stmt))
  1500. return (void *)-1;
  1501. /* def_stmt may-defs *ref. See if we can derive a value for *ref
  1502. from that definition.
  1503. 1) Memset. */
  1504. if (is_gimple_reg_type (vr->type)
  1505. && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
  1506. && integer_zerop (gimple_call_arg (def_stmt, 1))
  1507. && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
  1508. && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
  1509. {
  1510. tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
  1511. tree base2;
  1512. HOST_WIDE_INT offset2, size2, maxsize2;
  1513. base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
  1514. size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
  1515. if ((unsigned HOST_WIDE_INT)size2 / 8
  1516. == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
  1517. && maxsize2 != -1
  1518. && operand_equal_p (base, base2, 0)
  1519. && offset2 <= offset
  1520. && offset2 + size2 >= offset + maxsize)
  1521. {
  1522. tree val = build_zero_cst (vr->type);
  1523. return vn_reference_lookup_or_insert_for_pieces
  1524. (vuse, vr->set, vr->type, vr->operands, val);
  1525. }
  1526. }
  1527. /* 2) Assignment from an empty CONSTRUCTOR. */
  1528. else if (is_gimple_reg_type (vr->type)
  1529. && gimple_assign_single_p (def_stmt)
  1530. && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
  1531. && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
  1532. {
  1533. tree base2;
  1534. HOST_WIDE_INT offset2, size2, maxsize2;
  1535. base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
  1536. &offset2, &size2, &maxsize2);
  1537. if (maxsize2 != -1
  1538. && operand_equal_p (base, base2, 0)
  1539. && offset2 <= offset
  1540. && offset2 + size2 >= offset + maxsize)
  1541. {
  1542. tree val = build_zero_cst (vr->type);
  1543. return vn_reference_lookup_or_insert_for_pieces
  1544. (vuse, vr->set, vr->type, vr->operands, val);
  1545. }
  1546. }
  1547. /* 3) Assignment from a constant. We can use folds native encode/interpret
  1548. routines to extract the assigned bits. */
  1549. else if (vn_walk_kind == VN_WALKREWRITE
  1550. && CHAR_BIT == 8 && BITS_PER_UNIT == 8
  1551. && ref->size == maxsize
  1552. && maxsize % BITS_PER_UNIT == 0
  1553. && offset % BITS_PER_UNIT == 0
  1554. && is_gimple_reg_type (vr->type)
  1555. && gimple_assign_single_p (def_stmt)
  1556. && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
  1557. {
  1558. tree base2;
  1559. HOST_WIDE_INT offset2, size2, maxsize2;
  1560. base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
  1561. &offset2, &size2, &maxsize2);
  1562. if (maxsize2 != -1
  1563. && maxsize2 == size2
  1564. && size2 % BITS_PER_UNIT == 0
  1565. && offset2 % BITS_PER_UNIT == 0
  1566. && operand_equal_p (base, base2, 0)
  1567. && offset2 <= offset
  1568. && offset2 + size2 >= offset + maxsize)
  1569. {
  1570. /* We support up to 512-bit values (for V8DFmode). */
  1571. unsigned char buffer[64];
  1572. int len;
  1573. len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
  1574. buffer, sizeof (buffer));
  1575. if (len > 0)
  1576. {
  1577. tree val = native_interpret_expr (vr->type,
  1578. buffer
  1579. + ((offset - offset2)
  1580. / BITS_PER_UNIT),
  1581. ref->size / BITS_PER_UNIT);
  1582. if (val)
  1583. return vn_reference_lookup_or_insert_for_pieces
  1584. (vuse, vr->set, vr->type, vr->operands, val);
  1585. }
  1586. }
  1587. }
  1588. /* 4) Assignment from an SSA name which definition we may be able
  1589. to access pieces from. */
  1590. else if (ref->size == maxsize
  1591. && is_gimple_reg_type (vr->type)
  1592. && gimple_assign_single_p (def_stmt)
  1593. && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
  1594. {
  1595. tree rhs1 = gimple_assign_rhs1 (def_stmt);
  1596. gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
  1597. if (is_gimple_assign (def_stmt2)
  1598. && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
  1599. || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
  1600. && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
  1601. {
  1602. tree base2;
  1603. HOST_WIDE_INT offset2, size2, maxsize2, off;
  1604. base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
  1605. &offset2, &size2, &maxsize2);
  1606. off = offset - offset2;
  1607. if (maxsize2 != -1
  1608. && maxsize2 == size2
  1609. && operand_equal_p (base, base2, 0)
  1610. && offset2 <= offset
  1611. && offset2 + size2 >= offset + maxsize)
  1612. {
  1613. tree val = NULL_TREE;
  1614. HOST_WIDE_INT elsz
  1615. = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
  1616. if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
  1617. {
  1618. if (off == 0)
  1619. val = gimple_assign_rhs1 (def_stmt2);
  1620. else if (off == elsz)
  1621. val = gimple_assign_rhs2 (def_stmt2);
  1622. }
  1623. else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
  1624. && off % elsz == 0)
  1625. {
  1626. tree ctor = gimple_assign_rhs1 (def_stmt2);
  1627. unsigned i = off / elsz;
  1628. if (i < CONSTRUCTOR_NELTS (ctor))
  1629. {
  1630. constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
  1631. if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
  1632. {
  1633. if (TREE_CODE (TREE_TYPE (elt->value))
  1634. != VECTOR_TYPE)
  1635. val = elt->value;
  1636. }
  1637. }
  1638. }
  1639. if (val)
  1640. return vn_reference_lookup_or_insert_for_pieces
  1641. (vuse, vr->set, vr->type, vr->operands, val);
  1642. }
  1643. }
  1644. }
  1645. /* 5) For aggregate copies translate the reference through them if
  1646. the copy kills ref. */
  1647. else if (vn_walk_kind == VN_WALKREWRITE
  1648. && gimple_assign_single_p (def_stmt)
  1649. && (DECL_P (gimple_assign_rhs1 (def_stmt))
  1650. || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
  1651. || handled_component_p (gimple_assign_rhs1 (def_stmt))))
  1652. {
  1653. tree base2;
  1654. HOST_WIDE_INT offset2, size2, maxsize2;
  1655. int i, j;
  1656. auto_vec<vn_reference_op_s> rhs;
  1657. vn_reference_op_t vro;
  1658. ao_ref r;
  1659. if (!lhs_ref_ok)
  1660. return (void *)-1;
  1661. /* See if the assignment kills REF. */
  1662. base2 = ao_ref_base (&lhs_ref);
  1663. offset2 = lhs_ref.offset;
  1664. size2 = lhs_ref.size;
  1665. maxsize2 = lhs_ref.max_size;
  1666. if (maxsize2 == -1
  1667. || (base != base2 && !operand_equal_p (base, base2, 0))
  1668. || offset2 > offset
  1669. || offset2 + size2 < offset + maxsize)
  1670. return (void *)-1;
  1671. /* Find the common base of ref and the lhs. lhs_ops already
  1672. contains valueized operands for the lhs. */
  1673. i = vr->operands.length () - 1;
  1674. j = lhs_ops.length () - 1;
  1675. while (j >= 0 && i >= 0
  1676. && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
  1677. {
  1678. i--;
  1679. j--;
  1680. }
  1681. /* ??? The innermost op should always be a MEM_REF and we already
  1682. checked that the assignment to the lhs kills vr. Thus for
  1683. aggregate copies using char[] types the vn_reference_op_eq
  1684. may fail when comparing types for compatibility. But we really
  1685. don't care here - further lookups with the rewritten operands
  1686. will simply fail if we messed up types too badly. */
  1687. HOST_WIDE_INT extra_off = 0;
  1688. if (j == 0 && i >= 0
  1689. && lhs_ops[0].opcode == MEM_REF
  1690. && lhs_ops[0].off != -1)
  1691. {
  1692. if (lhs_ops[0].off == vr->operands[i].off)
  1693. i--, j--;
  1694. else if (vr->operands[i].opcode == MEM_REF
  1695. && vr->operands[i].off != -1)
  1696. {
  1697. extra_off = vr->operands[i].off - lhs_ops[0].off;
  1698. i--, j--;
  1699. }
  1700. }
  1701. /* i now points to the first additional op.
  1702. ??? LHS may not be completely contained in VR, one or more
  1703. VIEW_CONVERT_EXPRs could be in its way. We could at least
  1704. try handling outermost VIEW_CONVERT_EXPRs. */
  1705. if (j != -1)
  1706. return (void *)-1;
  1707. /* Now re-write REF to be based on the rhs of the assignment. */
  1708. copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
  1709. /* Apply an extra offset to the inner MEM_REF of the RHS. */
  1710. if (extra_off != 0)
  1711. {
  1712. if (rhs.length () < 2
  1713. || rhs[0].opcode != MEM_REF
  1714. || rhs[0].off == -1)
  1715. return (void *)-1;
  1716. rhs[0].off += extra_off;
  1717. rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
  1718. build_int_cst (TREE_TYPE (rhs[0].op0),
  1719. extra_off));
  1720. }
  1721. /* We need to pre-pend vr->operands[0..i] to rhs. */
  1722. vec<vn_reference_op_s> old = vr->operands;
  1723. if (i + 1 + rhs.length () > vr->operands.length ())
  1724. {
  1725. vr->operands.safe_grow (i + 1 + rhs.length ());
  1726. if (old == shared_lookup_references)
  1727. shared_lookup_references = vr->operands;
  1728. }
  1729. else
  1730. vr->operands.truncate (i + 1 + rhs.length ());
  1731. FOR_EACH_VEC_ELT (rhs, j, vro)
  1732. vr->operands[i + 1 + j] = *vro;
  1733. vr->operands = valueize_refs (vr->operands);
  1734. if (old == shared_lookup_references)
  1735. shared_lookup_references = vr->operands;
  1736. vr->hashcode = vn_reference_compute_hash (vr);
  1737. /* Try folding the new reference to a constant. */
  1738. tree val = fully_constant_vn_reference_p (vr);
  1739. if (val)
  1740. return vn_reference_lookup_or_insert_for_pieces
  1741. (vuse, vr->set, vr->type, vr->operands, val);
  1742. /* Adjust *ref from the new operands. */
  1743. if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
  1744. return (void *)-1;
  1745. /* This can happen with bitfields. */
  1746. if (ref->size != r.size)
  1747. return (void *)-1;
  1748. *ref = r;
  1749. /* Do not update last seen VUSE after translating. */
  1750. last_vuse_ptr = NULL;
  1751. /* Keep looking for the adjusted *REF / VR pair. */
  1752. return NULL;
  1753. }
  1754. /* 6) For memcpy copies translate the reference through them if
  1755. the copy kills ref. */
  1756. else if (vn_walk_kind == VN_WALKREWRITE
  1757. && is_gimple_reg_type (vr->type)
  1758. /* ??? Handle BCOPY as well. */
  1759. && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
  1760. || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
  1761. || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
  1762. && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
  1763. || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
  1764. && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
  1765. || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
  1766. && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
  1767. {
  1768. tree lhs, rhs;
  1769. ao_ref r;
  1770. HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
  1771. vn_reference_op_s op;
  1772. HOST_WIDE_INT at;
  1773. /* Only handle non-variable, addressable refs. */
  1774. if (ref->size != maxsize
  1775. || offset % BITS_PER_UNIT != 0
  1776. || ref->size % BITS_PER_UNIT != 0)
  1777. return (void *)-1;
  1778. /* Extract a pointer base and an offset for the destination. */
  1779. lhs = gimple_call_arg (def_stmt, 0);
  1780. lhs_offset = 0;
  1781. if (TREE_CODE (lhs) == SSA_NAME)
  1782. lhs = SSA_VAL (lhs);
  1783. if (TREE_CODE (lhs) == ADDR_EXPR)
  1784. {
  1785. tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
  1786. &lhs_offset);
  1787. if (!tem)
  1788. return (void *)-1;
  1789. if (TREE_CODE (tem) == MEM_REF
  1790. && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
  1791. {
  1792. lhs = TREE_OPERAND (tem, 0);
  1793. lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
  1794. }
  1795. else if (DECL_P (tem))
  1796. lhs = build_fold_addr_expr (tem);
  1797. else
  1798. return (void *)-1;
  1799. }
  1800. if (TREE_CODE (lhs) != SSA_NAME
  1801. && TREE_CODE (lhs) != ADDR_EXPR)
  1802. return (void *)-1;
  1803. /* Extract a pointer base and an offset for the source. */
  1804. rhs = gimple_call_arg (def_stmt, 1);
  1805. rhs_offset = 0;
  1806. if (TREE_CODE (rhs) == SSA_NAME)
  1807. rhs = SSA_VAL (rhs);
  1808. if (TREE_CODE (rhs) == ADDR_EXPR)
  1809. {
  1810. tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
  1811. &rhs_offset);
  1812. if (!tem)
  1813. return (void *)-1;
  1814. if (TREE_CODE (tem) == MEM_REF
  1815. && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
  1816. {
  1817. rhs = TREE_OPERAND (tem, 0);
  1818. rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
  1819. }
  1820. else if (DECL_P (tem))
  1821. rhs = build_fold_addr_expr (tem);
  1822. else
  1823. return (void *)-1;
  1824. }
  1825. if (TREE_CODE (rhs) != SSA_NAME
  1826. && TREE_CODE (rhs) != ADDR_EXPR)
  1827. return (void *)-1;
  1828. copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
  1829. /* The bases of the destination and the references have to agree. */
  1830. if ((TREE_CODE (base) != MEM_REF
  1831. && !DECL_P (base))
  1832. || (TREE_CODE (base) == MEM_REF
  1833. && (TREE_OPERAND (base, 0) != lhs
  1834. || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
  1835. || (DECL_P (base)
  1836. && (TREE_CODE (lhs) != ADDR_EXPR
  1837. || TREE_OPERAND (lhs, 0) != base)))
  1838. return (void *)-1;
  1839. /* And the access has to be contained within the memcpy destination. */
  1840. at = offset / BITS_PER_UNIT;
  1841. if (TREE_CODE (base) == MEM_REF)
  1842. at += tree_to_uhwi (TREE_OPERAND (base, 1));
  1843. if (lhs_offset > at
  1844. || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
  1845. return (void *)-1;
  1846. /* Make room for 2 operands in the new reference. */
  1847. if (vr->operands.length () < 2)
  1848. {
  1849. vec<vn_reference_op_s> old = vr->operands;
  1850. vr->operands.safe_grow_cleared (2);
  1851. if (old == shared_lookup_references
  1852. && vr->operands != old)
  1853. shared_lookup_references = vr->operands;
  1854. }
  1855. else
  1856. vr->operands.truncate (2);
  1857. /* The looked-through reference is a simple MEM_REF. */
  1858. memset (&op, 0, sizeof (op));
  1859. op.type = vr->type;
  1860. op.opcode = MEM_REF;
  1861. op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
  1862. op.off = at - lhs_offset + rhs_offset;
  1863. vr->operands[0] = op;
  1864. op.type = TREE_TYPE (rhs);
  1865. op.opcode = TREE_CODE (rhs);
  1866. op.op0 = rhs;
  1867. op.off = -1;
  1868. vr->operands[1] = op;
  1869. vr->hashcode = vn_reference_compute_hash (vr);
  1870. /* Adjust *ref from the new operands. */
  1871. if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
  1872. return (void *)-1;
  1873. /* This can happen with bitfields. */
  1874. if (ref->size != r.size)
  1875. return (void *)-1;
  1876. *ref = r;
  1877. /* Do not update last seen VUSE after translating. */
  1878. last_vuse_ptr = NULL;
  1879. /* Keep looking for the adjusted *REF / VR pair. */
  1880. return NULL;
  1881. }
  1882. /* Bail out and stop walking. */
  1883. return (void *)-1;
  1884. }
  1885. /* Lookup a reference operation by it's parts, in the current hash table.
  1886. Returns the resulting value number if it exists in the hash table,
  1887. NULL_TREE otherwise. VNRESULT will be filled in with the actual
  1888. vn_reference_t stored in the hashtable if something is found. */
  1889. tree
  1890. vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
  1891. vec<vn_reference_op_s> operands,
  1892. vn_reference_t *vnresult, vn_lookup_kind kind)
  1893. {
  1894. struct vn_reference_s vr1;
  1895. vn_reference_t tmp;
  1896. tree cst;
  1897. if (!vnresult)
  1898. vnresult = &tmp;
  1899. *vnresult = NULL;
  1900. vr1.vuse = vuse_ssa_val (vuse);
  1901. shared_lookup_references.truncate (0);
  1902. shared_lookup_references.safe_grow (operands.length ());
  1903. memcpy (shared_lookup_references.address (),
  1904. operands.address (),
  1905. sizeof (vn_reference_op_s)
  1906. * operands.length ());
  1907. vr1.operands = operands = shared_lookup_references
  1908. = valueize_refs (shared_lookup_references);
  1909. vr1.type = type;
  1910. vr1.set = set;
  1911. vr1.hashcode = vn_reference_compute_hash (&vr1);
  1912. if ((cst = fully_constant_vn_reference_p (&vr1)))
  1913. return cst;
  1914. vn_reference_lookup_1 (&vr1, vnresult);
  1915. if (!*vnresult
  1916. && kind != VN_NOWALK
  1917. && vr1.vuse)
  1918. {
  1919. ao_ref r;
  1920. vn_walk_kind = kind;
  1921. if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
  1922. *vnresult =
  1923. (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
  1924. vn_reference_lookup_2,
  1925. vn_reference_lookup_3,
  1926. vuse_ssa_val, &vr1);
  1927. gcc_checking_assert (vr1.operands == shared_lookup_references);
  1928. }
  1929. if (*vnresult)
  1930. return (*vnresult)->result;
  1931. return NULL_TREE;
  1932. }
  1933. /* Lookup OP in the current hash table, and return the resulting value
  1934. number if it exists in the hash table. Return NULL_TREE if it does
  1935. not exist in the hash table or if the result field of the structure
  1936. was NULL.. VNRESULT will be filled in with the vn_reference_t
  1937. stored in the hashtable if one exists. */
  1938. tree
  1939. vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
  1940. vn_reference_t *vnresult)
  1941. {
  1942. vec<vn_reference_op_s> operands;
  1943. struct vn_reference_s vr1;
  1944. tree cst;
  1945. bool valuezied_anything;
  1946. if (vnresult)
  1947. *vnresult = NULL;
  1948. vr1.vuse = vuse_ssa_val (vuse);
  1949. vr1.operands = operands
  1950. = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
  1951. vr1.type = TREE_TYPE (op);
  1952. vr1.set = get_alias_set (op);
  1953. vr1.hashcode = vn_reference_compute_hash (&vr1);
  1954. if ((cst = fully_constant_vn_reference_p (&vr1)))
  1955. return cst;
  1956. if (kind != VN_NOWALK
  1957. && vr1.vuse)
  1958. {
  1959. vn_reference_t wvnresult;
  1960. ao_ref r;
  1961. /* Make sure to use a valueized reference if we valueized anything.
  1962. Otherwise preserve the full reference for advanced TBAA. */
  1963. if (!valuezied_anything
  1964. || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
  1965. vr1.operands))
  1966. ao_ref_init (&r, op);
  1967. vn_walk_kind = kind;
  1968. wvnresult =
  1969. (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
  1970. vn_reference_lookup_2,
  1971. vn_reference_lookup_3,
  1972. vuse_ssa_val, &vr1);
  1973. gcc_checking_assert (vr1.operands == shared_lookup_references);
  1974. if (wvnresult)
  1975. {
  1976. if (vnresult)
  1977. *vnresult = wvnresult;
  1978. return wvnresult->result;
  1979. }
  1980. return NULL_TREE;
  1981. }
  1982. return vn_reference_lookup_1 (&vr1, vnresult);
  1983. }
  1984. /* Lookup CALL in the current hash table and return the entry in
  1985. *VNRESULT if found. Populates *VR for the hashtable lookup. */
  1986. void
  1987. vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
  1988. vn_reference_t vr)
  1989. {
  1990. if (vnresult)
  1991. *vnresult = NULL;
  1992. tree vuse = gimple_vuse (call);
  1993. vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
  1994. vr->operands = valueize_shared_reference_ops_from_call (call);
  1995. vr->type = gimple_expr_type (call);
  1996. vr->set = 0;
  1997. vr->hashcode = vn_reference_compute_hash (vr);
  1998. vn_reference_lookup_1 (vr, vnresult);
  1999. }
  2000. /* Insert OP into the current hash table with a value number of
  2001. RESULT, and return the resulting reference structure we created. */
  2002. static vn_reference_t
  2003. vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
  2004. {
  2005. vn_reference_s **slot;
  2006. vn_reference_t vr1;
  2007. bool tem;
  2008. vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
  2009. if (TREE_CODE (result) == SSA_NAME)
  2010. vr1->value_id = VN_INFO (result)->value_id;
  2011. else
  2012. vr1->value_id = get_or_alloc_constant_value_id (result);
  2013. vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
  2014. vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
  2015. vr1->type = TREE_TYPE (op);
  2016. vr1->set = get_alias_set (op);
  2017. vr1->hashcode = vn_reference_compute_hash (vr1);
  2018. vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
  2019. vr1->result_vdef = vdef;
  2020. slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
  2021. INSERT);
  2022. /* Because we lookup stores using vuses, and value number failures
  2023. using the vdefs (see visit_reference_op_store for how and why),
  2024. it's possible that on failure we may try to insert an already
  2025. inserted store. This is not wrong, there is no ssa name for a
  2026. store that we could use as a differentiator anyway. Thus, unlike
  2027. the other lookup functions, you cannot gcc_assert (!*slot)
  2028. here. */
  2029. /* But free the old slot in case of a collision. */
  2030. if (*slot)
  2031. free_reference (*slot);
  2032. *slot = vr1;
  2033. return vr1;
  2034. }
  2035. /* Insert a reference by it's pieces into the current hash table with
  2036. a value number of RESULT. Return the resulting reference
  2037. structure we created. */
  2038. vn_reference_t
  2039. vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
  2040. vec<vn_reference_op_s> operands,
  2041. tree result, unsigned int value_id)
  2042. {
  2043. vn_reference_s **slot;
  2044. vn_reference_t vr1;
  2045. vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
  2046. vr1->value_id = value_id;
  2047. vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
  2048. vr1->operands = valueize_refs (operands);
  2049. vr1->type = type;
  2050. vr1->set = set;
  2051. vr1->hashcode = vn_reference_compute_hash (vr1);
  2052. if (result && TREE_CODE (result) == SSA_NAME)
  2053. result = SSA_VAL (result);
  2054. vr1->result = result;
  2055. slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
  2056. INSERT);
  2057. /* At this point we should have all the things inserted that we have
  2058. seen before, and we should never try inserting something that
  2059. already exists. */
  2060. gcc_assert (!*slot);
  2061. if (*slot)
  2062. free_reference (*slot);
  2063. *slot = vr1;
  2064. return vr1;
  2065. }
  2066. /* Compute and return the hash value for nary operation VBO1. */
  2067. static hashval_t
  2068. vn_nary_op_compute_hash (const vn_nary_op_t vno1)
  2069. {
  2070. inchash::hash hstate;
  2071. unsigned i;
  2072. for (i = 0; i < vno1->length; ++i)
  2073. if (TREE_CODE (vno1->op[i]) == SSA_NAME)
  2074. vno1->op[i] = SSA_VAL (vno1->op[i]);
  2075. if (vno1->length == 2
  2076. && commutative_tree_code (vno1->opcode)
  2077. && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
  2078. {
  2079. tree temp = vno1->op[0];
  2080. vno1->op[0] = vno1->op[1];
  2081. vno1->op[1] = temp;
  2082. }
  2083. hstate.add_int (vno1->opcode);
  2084. for (i = 0; i < vno1->length; ++i)
  2085. inchash::add_expr (vno1->op[i], hstate);
  2086. return hstate.end ();
  2087. }
  2088. /* Compare nary operations VNO1 and VNO2 and return true if they are
  2089. equivalent. */
  2090. bool
  2091. vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
  2092. {
  2093. unsigned i;
  2094. if (vno1->hashcode != vno2->hashcode)
  2095. return false;
  2096. if (vno1->length != vno2->length)
  2097. return false;
  2098. if (vno1->opcode != vno2->opcode
  2099. || !types_compatible_p (vno1->type, vno2->type))
  2100. return false;
  2101. for (i = 0; i < vno1->length; ++i)
  2102. if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
  2103. return false;
  2104. return true;
  2105. }
  2106. /* Initialize VNO from the pieces provided. */
  2107. static void
  2108. init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
  2109. enum tree_code code, tree type, tree *ops)
  2110. {
  2111. vno->opcode = code;
  2112. vno->length = length;
  2113. vno->type = type;
  2114. memcpy (&vno->op[0], ops, sizeof (tree) * length);
  2115. }
  2116. /* Initialize VNO from OP. */
  2117. static void
  2118. init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
  2119. {
  2120. unsigned i;
  2121. vno->opcode = TREE_CODE (op);
  2122. vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
  2123. vno->type = TREE_TYPE (op);
  2124. for (i = 0; i < vno->length; ++i)
  2125. vno->op[i] = TREE_OPERAND (op, i);
  2126. }
  2127. /* Return the number of operands for a vn_nary ops structure from STMT. */
  2128. static unsigned int
  2129. vn_nary_length_from_stmt (gimple stmt)
  2130. {
  2131. switch (gimple_assign_rhs_code (stmt))
  2132. {
  2133. case REALPART_EXPR:
  2134. case IMAGPART_EXPR:
  2135. case VIEW_CONVERT_EXPR:
  2136. return 1;
  2137. case BIT_FIELD_REF:
  2138. return 3;
  2139. case CONSTRUCTOR:
  2140. return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
  2141. default:
  2142. return gimple_num_ops (stmt) - 1;
  2143. }
  2144. }
  2145. /* Initialize VNO from STMT. */
  2146. static void
  2147. init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
  2148. {
  2149. unsigned i;
  2150. vno->opcode = gimple_assign_rhs_code (stmt);
  2151. vno->type = gimple_expr_type (stmt);
  2152. switch (vno->opcode)
  2153. {
  2154. case REALPART_EXPR:
  2155. case IMAGPART_EXPR:
  2156. case VIEW_CONVERT_EXPR:
  2157. vno->length = 1;
  2158. vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
  2159. break;
  2160. case BIT_FIELD_REF:
  2161. vno->length = 3;
  2162. vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
  2163. vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
  2164. vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
  2165. break;
  2166. case CONSTRUCTOR:
  2167. vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
  2168. for (i = 0; i < vno->length; ++i)
  2169. vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
  2170. break;
  2171. default:
  2172. gcc_checking_assert (!gimple_assign_single_p (stmt));
  2173. vno->length = gimple_num_ops (stmt) - 1;
  2174. for (i = 0; i < vno->length; ++i)
  2175. vno->op[i] = gimple_op (stmt, i + 1);
  2176. }
  2177. }
  2178. /* Compute the hashcode for VNO and look for it in the hash table;
  2179. return the resulting value number if it exists in the hash table.
  2180. Return NULL_TREE if it does not exist in the hash table or if the
  2181. result field of the operation is NULL. VNRESULT will contain the
  2182. vn_nary_op_t from the hashtable if it exists. */
  2183. static tree
  2184. vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
  2185. {
  2186. vn_nary_op_s **slot;
  2187. if (vnresult)
  2188. *vnresult = NULL;
  2189. vno->hashcode = vn_nary_op_compute_hash (vno);
  2190. slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
  2191. NO_INSERT);
  2192. if (!slot && current_info == optimistic_info)
  2193. slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
  2194. NO_INSERT);
  2195. if (!slot)
  2196. return NULL_TREE;
  2197. if (vnresult)
  2198. *vnresult = *slot;
  2199. return (*slot)->result;
  2200. }
  2201. /* Lookup a n-ary operation by its pieces and return the resulting value
  2202. number if it exists in the hash table. Return NULL_TREE if it does
  2203. not exist in the hash table or if the result field of the operation
  2204. is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
  2205. if it exists. */
  2206. tree
  2207. vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
  2208. tree type, tree *ops, vn_nary_op_t *vnresult)
  2209. {
  2210. vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
  2211. sizeof_vn_nary_op (length));
  2212. init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
  2213. return vn_nary_op_lookup_1 (vno1, vnresult);
  2214. }
  2215. /* Lookup OP in the current hash table, and return the resulting value
  2216. number if it exists in the hash table. Return NULL_TREE if it does
  2217. not exist in the hash table or if the result field of the operation
  2218. is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
  2219. if it exists. */
  2220. tree
  2221. vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
  2222. {
  2223. vn_nary_op_t vno1
  2224. = XALLOCAVAR (struct vn_nary_op_s,
  2225. sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
  2226. init_vn_nary_op_from_op (vno1, op);
  2227. return vn_nary_op_lookup_1 (vno1, vnresult);
  2228. }
  2229. /* Lookup the rhs of STMT in the current hash table, and return the resulting
  2230. value number if it exists in the hash table. Return NULL_TREE if
  2231. it does not exist in the hash table. VNRESULT will contain the
  2232. vn_nary_op_t from the hashtable if it exists. */
  2233. tree
  2234. vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
  2235. {
  2236. vn_nary_op_t vno1
  2237. = XALLOCAVAR (struct vn_nary_op_s,
  2238. sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
  2239. init_vn_nary_op_from_stmt (vno1, stmt);
  2240. return vn_nary_op_lookup_1 (vno1, vnresult);
  2241. }
  2242. /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
  2243. static vn_nary_op_t
  2244. alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
  2245. {
  2246. return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
  2247. }
  2248. /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
  2249. obstack. */
  2250. static vn_nary_op_t
  2251. alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
  2252. {
  2253. vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
  2254. &current_info->nary_obstack);
  2255. vno1->value_id = value_id;
  2256. vno1->length = length;
  2257. vno1->result = result;
  2258. return vno1;
  2259. }
  2260. /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
  2261. VNO->HASHCODE first. */
  2262. static vn_nary_op_t
  2263. vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
  2264. bool compute_hash)
  2265. {
  2266. vn_nary_op_s **slot;
  2267. if (compute_hash)
  2268. vno->hashcode = vn_nary_op_compute_hash (vno);
  2269. slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
  2270. gcc_assert (!*slot);
  2271. *slot = vno;
  2272. return vno;
  2273. }
  2274. /* Insert a n-ary operation into the current hash table using it's
  2275. pieces. Return the vn_nary_op_t structure we created and put in
  2276. the hashtable. */
  2277. vn_nary_op_t
  2278. vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
  2279. tree type, tree *ops,
  2280. tree result, unsigned int value_id)
  2281. {
  2282. vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
  2283. init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
  2284. return vn_nary_op_insert_into (vno1, current_info->nary, true);
  2285. }
  2286. /* Insert OP into the current hash table with a value number of
  2287. RESULT. Return the vn_nary_op_t structure we created and put in
  2288. the hashtable. */
  2289. vn_nary_op_t
  2290. vn_nary_op_insert (tree op, tree result)
  2291. {
  2292. unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
  2293. vn_nary_op_t vno1;
  2294. vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
  2295. init_vn_nary_op_from_op (vno1, op);
  2296. return vn_nary_op_insert_into (vno1, current_info->nary, true);
  2297. }
  2298. /* Insert the rhs of STMT into the current hash table with a value number of
  2299. RESULT. */
  2300. vn_nary_op_t
  2301. vn_nary_op_insert_stmt (gimple stmt, tree result)
  2302. {
  2303. vn_nary_op_t vno1
  2304. = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
  2305. result, VN_INFO (result)->value_id);
  2306. init_vn_nary_op_from_stmt (vno1, stmt);
  2307. return vn_nary_op_insert_into (vno1, current_info->nary, true);
  2308. }
  2309. /* Compute a hashcode for PHI operation VP1 and return it. */
  2310. static inline hashval_t
  2311. vn_phi_compute_hash (vn_phi_t vp1)
  2312. {
  2313. inchash::hash hstate (vp1->block->index);
  2314. int i;
  2315. tree phi1op;
  2316. tree type;
  2317. /* If all PHI arguments are constants we need to distinguish
  2318. the PHI node via its type. */
  2319. type = vp1->type;
  2320. hstate.merge_hash (vn_hash_type (type));
  2321. FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
  2322. {
  2323. if (phi1op == VN_TOP)
  2324. continue;
  2325. inchash::add_expr (phi1op, hstate);
  2326. }
  2327. return hstate.end ();
  2328. }
  2329. /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
  2330. static int
  2331. vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
  2332. {
  2333. if (vp1->hashcode != vp2->hashcode)
  2334. return false;
  2335. if (vp1->block == vp2->block)
  2336. {
  2337. int i;
  2338. tree phi1op;
  2339. /* If the PHI nodes do not have compatible types
  2340. they are not the same. */
  2341. if (!types_compatible_p (vp1->type, vp2->type))
  2342. return false;
  2343. /* Any phi in the same block will have it's arguments in the
  2344. same edge order, because of how we store phi nodes. */
  2345. FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
  2346. {
  2347. tree phi2op = vp2->phiargs[i];
  2348. if (phi1op == VN_TOP || phi2op == VN_TOP)
  2349. continue;
  2350. if (!expressions_equal_p (phi1op, phi2op))
  2351. return false;
  2352. }
  2353. return true;
  2354. }
  2355. return false;
  2356. }
  2357. static vec<tree> shared_lookup_phiargs;
  2358. /* Lookup PHI in the current hash table, and return the resulting
  2359. value number if it exists in the hash table. Return NULL_TREE if
  2360. it does not exist in the hash table. */
  2361. static tree
  2362. vn_phi_lookup (gimple phi)
  2363. {
  2364. vn_phi_s **slot;
  2365. struct vn_phi_s vp1;
  2366. unsigned i;
  2367. shared_lookup_phiargs.truncate (0);
  2368. /* Canonicalize the SSA_NAME's to their value number. */
  2369. for (i = 0; i < gimple_phi_num_args (phi); i++)
  2370. {
  2371. tree def = PHI_ARG_DEF (phi, i);
  2372. def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
  2373. shared_lookup_phiargs.safe_push (def);
  2374. }
  2375. vp1.type = TREE_TYPE (gimple_phi_result (phi));
  2376. vp1.phiargs = shared_lookup_phiargs;
  2377. vp1.block = gimple_bb (phi);
  2378. vp1.hashcode = vn_phi_compute_hash (&vp1);
  2379. slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
  2380. NO_INSERT);
  2381. if (!slot && current_info == optimistic_info)
  2382. slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
  2383. NO_INSERT);
  2384. if (!slot)
  2385. return NULL_TREE;
  2386. return (*slot)->result;
  2387. }
  2388. /* Insert PHI into the current hash table with a value number of
  2389. RESULT. */
  2390. static vn_phi_t
  2391. vn_phi_insert (gimple phi, tree result)
  2392. {
  2393. vn_phi_s **slot;
  2394. vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
  2395. unsigned i;
  2396. vec<tree> args = vNULL;
  2397. /* Canonicalize the SSA_NAME's to their value number. */
  2398. for (i = 0; i < gimple_phi_num_args (phi); i++)
  2399. {
  2400. tree def = PHI_ARG_DEF (phi, i);
  2401. def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
  2402. args.safe_push (def);
  2403. }
  2404. vp1->value_id = VN_INFO (result)->value_id;
  2405. vp1->type = TREE_TYPE (gimple_phi_result (phi));
  2406. vp1->phiargs = args;
  2407. vp1->block = gimple_bb (phi);
  2408. vp1->result = result;
  2409. vp1->hashcode = vn_phi_compute_hash (vp1);
  2410. slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
  2411. /* Because we iterate over phi operations more than once, it's
  2412. possible the slot might already exist here, hence no assert.*/
  2413. *slot = vp1;
  2414. return vp1;
  2415. }
  2416. /* Print set of components in strongly connected component SCC to OUT. */
  2417. static void
  2418. print_scc (FILE *out, vec<tree> scc)
  2419. {
  2420. tree var;
  2421. unsigned int i;
  2422. fprintf (out, "SCC consists of:");
  2423. FOR_EACH_VEC_ELT (scc, i, var)
  2424. {
  2425. fprintf (out, " ");
  2426. print_generic_expr (out, var, 0);
  2427. }
  2428. fprintf (out, "\n");
  2429. }
  2430. /* Set the value number of FROM to TO, return true if it has changed
  2431. as a result. */
  2432. static inline bool
  2433. set_ssa_val_to (tree from, tree to)
  2434. {
  2435. tree currval = SSA_VAL (from);
  2436. HOST_WIDE_INT toff, coff;
  2437. /* The only thing we allow as value numbers are ssa_names
  2438. and invariants. So assert that here. We don't allow VN_TOP
  2439. as visiting a stmt should produce a value-number other than
  2440. that.
  2441. ??? Still VN_TOP can happen for unreachable code, so force
  2442. it to varying in that case. Not all code is prepared to
  2443. get VN_TOP on valueization. */
  2444. if (to == VN_TOP)
  2445. {
  2446. if (dump_file && (dump_flags & TDF_DETAILS))
  2447. fprintf (dump_file, "Forcing value number to varying on "
  2448. "receiving VN_TOP\n");
  2449. to = from;
  2450. }
  2451. gcc_assert (to != NULL_TREE
  2452. && ((TREE_CODE (to) == SSA_NAME
  2453. && (to == from || SSA_VAL (to) == to))
  2454. || is_gimple_min_invariant (to)));
  2455. if (from != to)
  2456. {
  2457. if (currval == from)
  2458. {
  2459. if (dump_file && (dump_flags & TDF_DETAILS))
  2460. {
  2461. fprintf (dump_file, "Not changing value number of ");
  2462. print_generic_expr (dump_file, from, 0);
  2463. fprintf (dump_file, " from VARYING to ");
  2464. print_generic_expr (dump_file, to, 0);
  2465. fprintf (dump_file, "\n");
  2466. }
  2467. return false;
  2468. }
  2469. else if (TREE_CODE (to) == SSA_NAME
  2470. && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
  2471. to = from;
  2472. }
  2473. if (dump_file && (dump_flags & TDF_DETAILS))
  2474. {
  2475. fprintf (dump_file, "Setting value number of ");
  2476. print_generic_expr (dump_file, from, 0);
  2477. fprintf (dump_file, " to ");
  2478. print_generic_expr (dump_file, to, 0);
  2479. }
  2480. if (currval != to
  2481. && !operand_equal_p (currval, to, 0)
  2482. /* ??? For addresses involving volatile objects or types operand_equal_p
  2483. does not reliably detect ADDR_EXPRs as equal. We know we are only
  2484. getting invariant gimple addresses here, so can use
  2485. get_addr_base_and_unit_offset to do this comparison. */
  2486. && !(TREE_CODE (currval) == ADDR_EXPR
  2487. && TREE_CODE (to) == ADDR_EXPR
  2488. && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
  2489. == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
  2490. && coff == toff))
  2491. {
  2492. VN_INFO (from)->valnum = to;
  2493. if (dump_file && (dump_flags & TDF_DETAILS))
  2494. fprintf (dump_file, " (changed)\n");
  2495. return true;
  2496. }
  2497. if (dump_file && (dump_flags & TDF_DETAILS))
  2498. fprintf (dump_file, "\n");
  2499. return false;
  2500. }
  2501. /* Mark as processed all the definitions in the defining stmt of USE, or
  2502. the USE itself. */
  2503. static void
  2504. mark_use_processed (tree use)
  2505. {
  2506. ssa_op_iter iter;
  2507. def_operand_p defp;
  2508. gimple stmt = SSA_NAME_DEF_STMT (use);
  2509. if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
  2510. {
  2511. VN_INFO (use)->use_processed = true;
  2512. return;
  2513. }
  2514. FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
  2515. {
  2516. tree def = DEF_FROM_PTR (defp);
  2517. VN_INFO (def)->use_processed = true;
  2518. }
  2519. }
  2520. /* Set all definitions in STMT to value number to themselves.
  2521. Return true if a value number changed. */
  2522. static bool
  2523. defs_to_varying (gimple stmt)
  2524. {
  2525. bool changed = false;
  2526. ssa_op_iter iter;
  2527. def_operand_p defp;
  2528. FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
  2529. {
  2530. tree def = DEF_FROM_PTR (defp);
  2531. changed |= set_ssa_val_to (def, def);
  2532. }
  2533. return changed;
  2534. }
  2535. static bool expr_has_constants (tree expr);
  2536. /* Visit a copy between LHS and RHS, return true if the value number
  2537. changed. */
  2538. static bool
  2539. visit_copy (tree lhs, tree rhs)
  2540. {
  2541. /* The copy may have a more interesting constant filled expression
  2542. (we don't, since we know our RHS is just an SSA name). */
  2543. VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
  2544. VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
  2545. /* And finally valueize. */
  2546. rhs = SSA_VAL (rhs);
  2547. return set_ssa_val_to (lhs, rhs);
  2548. }
  2549. /* Visit a nary operator RHS, value number it, and return true if the
  2550. value number of LHS has changed as a result. */
  2551. static bool
  2552. visit_nary_op (tree lhs, gimple stmt)
  2553. {
  2554. bool changed = false;
  2555. tree result = vn_nary_op_lookup_stmt (stmt, NULL);
  2556. if (result)
  2557. changed = set_ssa_val_to (lhs, result);
  2558. else
  2559. {
  2560. changed = set_ssa_val_to (lhs, lhs);
  2561. vn_nary_op_insert_stmt (stmt, lhs);
  2562. }
  2563. return changed;
  2564. }
  2565. /* Visit a call STMT storing into LHS. Return true if the value number
  2566. of the LHS has changed as a result. */
  2567. static bool
  2568. visit_reference_op_call (tree lhs, gcall *stmt)
  2569. {
  2570. bool changed = false;
  2571. struct vn_reference_s vr1;
  2572. vn_reference_t vnresult = NULL;
  2573. tree vdef = gimple_vdef (stmt);
  2574. /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
  2575. if (lhs && TREE_CODE (lhs) != SSA_NAME)
  2576. lhs = NULL_TREE;
  2577. vn_reference_lookup_call (stmt, &vnresult, &vr1);
  2578. if (vnresult)
  2579. {
  2580. if (vnresult->result_vdef && vdef)
  2581. changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
  2582. if (!vnresult->result && lhs)
  2583. vnresult->result = lhs;
  2584. if (vnresult->result && lhs)
  2585. {
  2586. changed |= set_ssa_val_to (lhs, vnresult->result);
  2587. if (VN_INFO (vnresult->result)->has_constants)
  2588. VN_INFO (lhs)->has_constants = true;
  2589. }
  2590. }
  2591. else
  2592. {
  2593. vn_reference_t vr2;
  2594. vn_reference_s **slot;
  2595. if (vdef)
  2596. changed |= set_ssa_val_to (vdef, vdef);
  2597. if (lhs)
  2598. changed |= set_ssa_val_to (lhs, lhs);
  2599. vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
  2600. vr2->vuse = vr1.vuse;
  2601. /* As we are not walking the virtual operand chain we know the
  2602. shared_lookup_references are still original so we can re-use
  2603. them here. */
  2604. vr2->operands = vr1.operands.copy ();
  2605. vr2->type = vr1.type;
  2606. vr2->set = vr1.set;
  2607. vr2->hashcode = vr1.hashcode;
  2608. vr2->result = lhs;
  2609. vr2->result_vdef = vdef;
  2610. slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
  2611. INSERT);
  2612. gcc_assert (!*slot);
  2613. *slot = vr2;
  2614. }
  2615. return changed;
  2616. }
  2617. /* Visit a load from a reference operator RHS, part of STMT, value number it,
  2618. and return true if the value number of the LHS has changed as a result. */
  2619. static bool
  2620. visit_reference_op_load (tree lhs, tree op, gimple stmt)
  2621. {
  2622. bool changed = false;
  2623. tree last_vuse;
  2624. tree result;
  2625. last_vuse = gimple_vuse (stmt);
  2626. last_vuse_ptr = &last_vuse;
  2627. result = vn_reference_lookup (op, gimple_vuse (stmt),
  2628. default_vn_walk_kind, NULL);
  2629. last_vuse_ptr = NULL;
  2630. /* We handle type-punning through unions by value-numbering based
  2631. on offset and size of the access. Be prepared to handle a
  2632. type-mismatch here via creating a VIEW_CONVERT_EXPR. */
  2633. if (result
  2634. && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
  2635. {
  2636. /* We will be setting the value number of lhs to the value number
  2637. of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
  2638. So first simplify and lookup this expression to see if it
  2639. is already available. */
  2640. tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
  2641. if ((CONVERT_EXPR_P (val)
  2642. || TREE_CODE (val) == VIEW_CONVERT_EXPR)
  2643. && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
  2644. {
  2645. tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
  2646. if ((CONVERT_EXPR_P (tem)
  2647. || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
  2648. && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
  2649. TREE_TYPE (val), tem)))
  2650. val = tem;
  2651. }
  2652. result = val;
  2653. if (!is_gimple_min_invariant (val)
  2654. && TREE_CODE (val) != SSA_NAME)
  2655. result = vn_nary_op_lookup (val, NULL);
  2656. /* If the expression is not yet available, value-number lhs to
  2657. a new SSA_NAME we create. */
  2658. if (!result)
  2659. {
  2660. result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
  2661. "vntemp");
  2662. /* Initialize value-number information properly. */
  2663. VN_INFO_GET (result)->valnum = result;
  2664. VN_INFO (result)->value_id = get_next_value_id ();
  2665. VN_INFO (result)->expr = val;
  2666. VN_INFO (result)->has_constants = expr_has_constants (val);
  2667. VN_INFO (result)->needs_insertion = true;
  2668. /* As all "inserted" statements are singleton SCCs, insert
  2669. to the valid table. This is strictly needed to
  2670. avoid re-generating new value SSA_NAMEs for the same
  2671. expression during SCC iteration over and over (the
  2672. optimistic table gets cleared after each iteration).
  2673. We do not need to insert into the optimistic table, as
  2674. lookups there will fall back to the valid table. */
  2675. if (current_info == optimistic_info)
  2676. {
  2677. current_info = valid_info;
  2678. vn_nary_op_insert (val, result);
  2679. current_info = optimistic_info;
  2680. }
  2681. else
  2682. vn_nary_op_insert (val, result);
  2683. if (dump_file && (dump_flags & TDF_DETAILS))
  2684. {
  2685. fprintf (dump_file, "Inserting name ");
  2686. print_generic_expr (dump_file, result, 0);
  2687. fprintf (dump_file, " for expression ");
  2688. print_generic_expr (dump_file, val, 0);
  2689. fprintf (dump_file, "\n");
  2690. }
  2691. }
  2692. }
  2693. if (result)
  2694. {
  2695. changed = set_ssa_val_to (lhs, result);
  2696. if (TREE_CODE (result) == SSA_NAME
  2697. && VN_INFO (result)->has_constants)
  2698. {
  2699. VN_INFO (lhs)->expr = VN_INFO (result)->expr;
  2700. VN_INFO (lhs)->has_constants = true;
  2701. }
  2702. }
  2703. else
  2704. {
  2705. changed = set_ssa_val_to (lhs, lhs);
  2706. vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
  2707. }
  2708. return changed;
  2709. }
  2710. /* Visit a store to a reference operator LHS, part of STMT, value number it,
  2711. and return true if the value number of the LHS has changed as a result. */
  2712. static bool
  2713. visit_reference_op_store (tree lhs, tree op, gimple stmt)
  2714. {
  2715. bool changed = false;
  2716. vn_reference_t vnresult = NULL;
  2717. tree result, assign;
  2718. bool resultsame = false;
  2719. tree vuse = gimple_vuse (stmt);
  2720. tree vdef = gimple_vdef (stmt);
  2721. if (TREE_CODE (op) == SSA_NAME)
  2722. op = SSA_VAL (op);
  2723. /* First we want to lookup using the *vuses* from the store and see
  2724. if there the last store to this location with the same address
  2725. had the same value.
  2726. The vuses represent the memory state before the store. If the
  2727. memory state, address, and value of the store is the same as the
  2728. last store to this location, then this store will produce the
  2729. same memory state as that store.
  2730. In this case the vdef versions for this store are value numbered to those
  2731. vuse versions, since they represent the same memory state after
  2732. this store.
  2733. Otherwise, the vdefs for the store are used when inserting into
  2734. the table, since the store generates a new memory state. */
  2735. result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
  2736. if (result)
  2737. {
  2738. if (TREE_CODE (result) == SSA_NAME)
  2739. result = SSA_VAL (result);
  2740. resultsame = expressions_equal_p (result, op);
  2741. }
  2742. if ((!result || !resultsame)
  2743. /* Only perform the following when being called from PRE
  2744. which embeds tail merging. */
  2745. && default_vn_walk_kind == VN_WALK)
  2746. {
  2747. assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
  2748. vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
  2749. if (vnresult)
  2750. {
  2751. VN_INFO (vdef)->use_processed = true;
  2752. return set_ssa_val_to (vdef, vnresult->result_vdef);
  2753. }
  2754. }
  2755. if (!result || !resultsame)
  2756. {
  2757. if (dump_file && (dump_flags & TDF_DETAILS))
  2758. {
  2759. fprintf (dump_file, "No store match\n");
  2760. fprintf (dump_file, "Value numbering store ");
  2761. print_generic_expr (dump_file, lhs, 0);
  2762. fprintf (dump_file, " to ");
  2763. print_generic_expr (dump_file, op, 0);
  2764. fprintf (dump_file, "\n");
  2765. }
  2766. /* Have to set value numbers before insert, since insert is
  2767. going to valueize the references in-place. */
  2768. if (vdef)
  2769. {
  2770. changed |= set_ssa_val_to (vdef, vdef);
  2771. }
  2772. /* Do not insert structure copies into the tables. */
  2773. if (is_gimple_min_invariant (op)
  2774. || is_gimple_reg (op))
  2775. vn_reference_insert (lhs, op, vdef, NULL);
  2776. /* Only perform the following when being called from PRE
  2777. which embeds tail merging. */
  2778. if (default_vn_walk_kind == VN_WALK)
  2779. {
  2780. assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
  2781. vn_reference_insert (assign, lhs, vuse, vdef);
  2782. }
  2783. }
  2784. else
  2785. {
  2786. /* We had a match, so value number the vdef to have the value
  2787. number of the vuse it came from. */
  2788. if (dump_file && (dump_flags & TDF_DETAILS))
  2789. fprintf (dump_file, "Store matched earlier value,"
  2790. "value numbering store vdefs to matching vuses.\n");
  2791. changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
  2792. }
  2793. return changed;
  2794. }
  2795. /* Visit and value number PHI, return true if the value number
  2796. changed. */
  2797. static bool
  2798. visit_phi (gimple phi)
  2799. {
  2800. bool changed = false;
  2801. tree result;
  2802. tree sameval = VN_TOP;
  2803. bool allsame = true;
  2804. /* TODO: We could check for this in init_sccvn, and replace this
  2805. with a gcc_assert. */
  2806. if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
  2807. return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
  2808. /* See if all non-TOP arguments have the same value. TOP is
  2809. equivalent to everything, so we can ignore it. */
  2810. edge_iterator ei;
  2811. edge e;
  2812. FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
  2813. if (e->flags & EDGE_EXECUTABLE)
  2814. {
  2815. tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
  2816. if (TREE_CODE (def) == SSA_NAME)
  2817. def = SSA_VAL (def);
  2818. if (def == VN_TOP)
  2819. continue;
  2820. if (sameval == VN_TOP)
  2821. {
  2822. sameval = def;
  2823. }
  2824. else
  2825. {
  2826. if (!expressions_equal_p (def, sameval))
  2827. {
  2828. allsame = false;
  2829. break;
  2830. }
  2831. }
  2832. }
  2833. /* If all value numbered to the same value, the phi node has that
  2834. value. */
  2835. if (allsame)
  2836. return set_ssa_val_to (PHI_RESULT (phi), sameval);
  2837. /* Otherwise, see if it is equivalent to a phi node in this block. */
  2838. result = vn_phi_lookup (phi);
  2839. if (result)
  2840. changed = set_ssa_val_to (PHI_RESULT (phi), result);
  2841. else
  2842. {
  2843. vn_phi_insert (phi, PHI_RESULT (phi));
  2844. VN_INFO (PHI_RESULT (phi))->has_constants = false;
  2845. VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
  2846. changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
  2847. }
  2848. return changed;
  2849. }
  2850. /* Return true if EXPR contains constants. */
  2851. static bool
  2852. expr_has_constants (tree expr)
  2853. {
  2854. switch (TREE_CODE_CLASS (TREE_CODE (expr)))
  2855. {
  2856. case tcc_unary:
  2857. return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
  2858. case tcc_binary:
  2859. return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
  2860. || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
  2861. /* Constants inside reference ops are rarely interesting, but
  2862. it can take a lot of looking to find them. */
  2863. case tcc_reference:
  2864. case tcc_declaration:
  2865. return false;
  2866. default:
  2867. return is_gimple_min_invariant (expr);
  2868. }
  2869. return false;
  2870. }
  2871. /* Return true if STMT contains constants. */
  2872. static bool
  2873. stmt_has_constants (gimple stmt)
  2874. {
  2875. tree tem;
  2876. if (gimple_code (stmt) != GIMPLE_ASSIGN)
  2877. return false;
  2878. switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
  2879. {
  2880. case GIMPLE_TERNARY_RHS:
  2881. tem = gimple_assign_rhs3 (stmt);
  2882. if (TREE_CODE (tem) == SSA_NAME)
  2883. tem = SSA_VAL (tem);
  2884. if (is_gimple_min_invariant (tem))
  2885. return true;
  2886. /* Fallthru. */
  2887. case GIMPLE_BINARY_RHS:
  2888. tem = gimple_assign_rhs2 (stmt);
  2889. if (TREE_CODE (tem) == SSA_NAME)
  2890. tem = SSA_VAL (tem);
  2891. if (is_gimple_min_invariant (tem))
  2892. return true;
  2893. /* Fallthru. */
  2894. case GIMPLE_SINGLE_RHS:
  2895. /* Constants inside reference ops are rarely interesting, but
  2896. it can take a lot of looking to find them. */
  2897. case GIMPLE_UNARY_RHS:
  2898. tem = gimple_assign_rhs1 (stmt);
  2899. if (TREE_CODE (tem) == SSA_NAME)
  2900. tem = SSA_VAL (tem);
  2901. return is_gimple_min_invariant (tem);
  2902. default:
  2903. gcc_unreachable ();
  2904. }
  2905. return false;
  2906. }
  2907. /* Simplify the binary expression RHS, and return the result if
  2908. simplified. */
  2909. static tree
  2910. simplify_binary_expression (gimple stmt)
  2911. {
  2912. tree result = NULL_TREE;
  2913. tree op0 = gimple_assign_rhs1 (stmt);
  2914. tree op1 = gimple_assign_rhs2 (stmt);
  2915. enum tree_code code = gimple_assign_rhs_code (stmt);
  2916. /* This will not catch every single case we could combine, but will
  2917. catch those with constants. The goal here is to simultaneously
  2918. combine constants between expressions, but avoid infinite
  2919. expansion of expressions during simplification. */
  2920. op0 = vn_valueize (op0);
  2921. if (TREE_CODE (op0) == SSA_NAME
  2922. && (VN_INFO (op0)->has_constants
  2923. || TREE_CODE_CLASS (code) == tcc_comparison
  2924. || code == COMPLEX_EXPR))
  2925. op0 = vn_get_expr_for (op0);
  2926. op1 = vn_valueize (op1);
  2927. if (TREE_CODE (op1) == SSA_NAME
  2928. && (VN_INFO (op1)->has_constants
  2929. || code == COMPLEX_EXPR))
  2930. op1 = vn_get_expr_for (op1);
  2931. /* Pointer plus constant can be represented as invariant address.
  2932. Do so to allow further propatation, see also tree forwprop. */
  2933. if (code == POINTER_PLUS_EXPR
  2934. && tree_fits_uhwi_p (op1)
  2935. && TREE_CODE (op0) == ADDR_EXPR
  2936. && is_gimple_min_invariant (op0))
  2937. return build_invariant_address (TREE_TYPE (op0),
  2938. TREE_OPERAND (op0, 0),
  2939. tree_to_uhwi (op1));
  2940. /* Avoid folding if nothing changed. */
  2941. if (op0 == gimple_assign_rhs1 (stmt)
  2942. && op1 == gimple_assign_rhs2 (stmt))
  2943. return NULL_TREE;
  2944. fold_defer_overflow_warnings ();
  2945. result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
  2946. if (result)
  2947. STRIP_USELESS_TYPE_CONVERSION (result);
  2948. fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
  2949. stmt, 0);
  2950. /* Make sure result is not a complex expression consisting
  2951. of operators of operators (IE (a + b) + (a + c))
  2952. Otherwise, we will end up with unbounded expressions if
  2953. fold does anything at all. */
  2954. if (result && valid_gimple_rhs_p (result))
  2955. return result;
  2956. return NULL_TREE;
  2957. }
  2958. /* Simplify the unary expression RHS, and return the result if
  2959. simplified. */
  2960. static tree
  2961. simplify_unary_expression (gassign *stmt)
  2962. {
  2963. tree result = NULL_TREE;
  2964. tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
  2965. enum tree_code code = gimple_assign_rhs_code (stmt);
  2966. /* We handle some tcc_reference codes here that are all
  2967. GIMPLE_ASSIGN_SINGLE codes. */
  2968. if (code == REALPART_EXPR
  2969. || code == IMAGPART_EXPR
  2970. || code == VIEW_CONVERT_EXPR
  2971. || code == BIT_FIELD_REF)
  2972. op0 = TREE_OPERAND (op0, 0);
  2973. orig_op0 = op0;
  2974. op0 = vn_valueize (op0);
  2975. if (TREE_CODE (op0) == SSA_NAME)
  2976. {
  2977. if (VN_INFO (op0)->has_constants)
  2978. op0 = vn_get_expr_for (op0);
  2979. else if (CONVERT_EXPR_CODE_P (code)
  2980. || code == REALPART_EXPR
  2981. || code == IMAGPART_EXPR
  2982. || code == VIEW_CONVERT_EXPR
  2983. || code == BIT_FIELD_REF)
  2984. {
  2985. /* We want to do tree-combining on conversion-like expressions.
  2986. Make sure we feed only SSA_NAMEs or constants to fold though. */
  2987. tree tem = vn_get_expr_for (op0);
  2988. if (UNARY_CLASS_P (tem)
  2989. || BINARY_CLASS_P (tem)
  2990. || TREE_CODE (tem) == VIEW_CONVERT_EXPR
  2991. || TREE_CODE (tem) == SSA_NAME
  2992. || TREE_CODE (tem) == CONSTRUCTOR
  2993. || is_gimple_min_invariant (tem))
  2994. op0 = tem;
  2995. }
  2996. }
  2997. /* Avoid folding if nothing changed, but remember the expression. */
  2998. if (op0 == orig_op0)
  2999. return NULL_TREE;
  3000. if (code == BIT_FIELD_REF)
  3001. {
  3002. tree rhs = gimple_assign_rhs1 (stmt);
  3003. result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
  3004. op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
  3005. }
  3006. else
  3007. result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
  3008. if (result)
  3009. {
  3010. STRIP_USELESS_TYPE_CONVERSION (result);
  3011. if (valid_gimple_rhs_p (result))
  3012. return result;
  3013. }
  3014. return NULL_TREE;
  3015. }
  3016. /* Try to simplify RHS using equivalences and constant folding. */
  3017. static tree
  3018. try_to_simplify (gassign *stmt)
  3019. {
  3020. enum tree_code code = gimple_assign_rhs_code (stmt);
  3021. tree tem;
  3022. /* For stores we can end up simplifying a SSA_NAME rhs. Just return
  3023. in this case, there is no point in doing extra work. */
  3024. if (code == SSA_NAME)
  3025. return NULL_TREE;
  3026. /* First try constant folding based on our current lattice. */
  3027. tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
  3028. if (tem
  3029. && (TREE_CODE (tem) == SSA_NAME
  3030. || is_gimple_min_invariant (tem)))
  3031. return tem;
  3032. /* If that didn't work try combining multiple statements. */
  3033. switch (TREE_CODE_CLASS (code))
  3034. {
  3035. case tcc_reference:
  3036. /* Fallthrough for some unary codes that can operate on registers. */
  3037. if (!(code == REALPART_EXPR
  3038. || code == IMAGPART_EXPR
  3039. || code == VIEW_CONVERT_EXPR
  3040. || code == BIT_FIELD_REF))
  3041. break;
  3042. /* We could do a little more with unary ops, if they expand
  3043. into binary ops, but it's debatable whether it is worth it. */
  3044. case tcc_unary:
  3045. return simplify_unary_expression (stmt);
  3046. case tcc_comparison:
  3047. case tcc_binary:
  3048. return simplify_binary_expression (stmt);
  3049. default:
  3050. break;
  3051. }
  3052. return NULL_TREE;
  3053. }
  3054. /* Visit and value number USE, return true if the value number
  3055. changed. */
  3056. static bool
  3057. visit_use (tree use)
  3058. {
  3059. bool changed = false;
  3060. gimple stmt = SSA_NAME_DEF_STMT (use);
  3061. mark_use_processed (use);
  3062. gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
  3063. if (dump_file && (dump_flags & TDF_DETAILS)
  3064. && !SSA_NAME_IS_DEFAULT_DEF (use))
  3065. {
  3066. fprintf (dump_file, "Value numbering ");
  3067. print_generic_expr (dump_file, use, 0);
  3068. fprintf (dump_file, " stmt = ");
  3069. print_gimple_stmt (dump_file, stmt, 0, 0);
  3070. }
  3071. /* Handle uninitialized uses. */
  3072. if (SSA_NAME_IS_DEFAULT_DEF (use))
  3073. changed = set_ssa_val_to (use, use);
  3074. else
  3075. {
  3076. if (gimple_code (stmt) == GIMPLE_PHI)
  3077. changed = visit_phi (stmt);
  3078. else if (gimple_has_volatile_ops (stmt))
  3079. changed = defs_to_varying (stmt);
  3080. else if (is_gimple_assign (stmt))
  3081. {
  3082. enum tree_code code = gimple_assign_rhs_code (stmt);
  3083. tree lhs = gimple_assign_lhs (stmt);
  3084. tree rhs1 = gimple_assign_rhs1 (stmt);
  3085. tree simplified;
  3086. /* Shortcut for copies. Simplifying copies is pointless,
  3087. since we copy the expression and value they represent. */
  3088. if (code == SSA_NAME
  3089. && TREE_CODE (lhs) == SSA_NAME)
  3090. {
  3091. changed = visit_copy (lhs, rhs1);
  3092. goto done;
  3093. }
  3094. simplified = try_to_simplify (as_a <gassign *> (stmt));
  3095. if (simplified)
  3096. {
  3097. if (dump_file && (dump_flags & TDF_DETAILS))
  3098. {
  3099. fprintf (dump_file, "RHS ");
  3100. print_gimple_expr (dump_file, stmt, 0, 0);
  3101. fprintf (dump_file, " simplified to ");
  3102. print_generic_expr (dump_file, simplified, 0);
  3103. if (TREE_CODE (lhs) == SSA_NAME)
  3104. fprintf (dump_file, " has constants %d\n",
  3105. expr_has_constants (simplified));
  3106. else
  3107. fprintf (dump_file, "\n");
  3108. }
  3109. }
  3110. /* Setting value numbers to constants will occasionally
  3111. screw up phi congruence because constants are not
  3112. uniquely associated with a single ssa name that can be
  3113. looked up. */
  3114. if (simplified
  3115. && is_gimple_min_invariant (simplified)
  3116. && TREE_CODE (lhs) == SSA_NAME)
  3117. {
  3118. VN_INFO (lhs)->expr = simplified;
  3119. VN_INFO (lhs)->has_constants = true;
  3120. changed = set_ssa_val_to (lhs, simplified);
  3121. goto done;
  3122. }
  3123. else if (simplified
  3124. && TREE_CODE (simplified) == SSA_NAME
  3125. && TREE_CODE (lhs) == SSA_NAME)
  3126. {
  3127. changed = visit_copy (lhs, simplified);
  3128. goto done;
  3129. }
  3130. else if (simplified)
  3131. {
  3132. if (TREE_CODE (lhs) == SSA_NAME)
  3133. {
  3134. VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
  3135. /* We have to unshare the expression or else
  3136. valuizing may change the IL stream. */
  3137. VN_INFO (lhs)->expr = unshare_expr (simplified);
  3138. }
  3139. }
  3140. else if (stmt_has_constants (stmt)
  3141. && TREE_CODE (lhs) == SSA_NAME)
  3142. VN_INFO (lhs)->has_constants = true;
  3143. else if (TREE_CODE (lhs) == SSA_NAME)
  3144. {
  3145. /* We reset expr and constantness here because we may
  3146. have been value numbering optimistically, and
  3147. iterating. They may become non-constant in this case,
  3148. even if they were optimistically constant. */
  3149. VN_INFO (lhs)->has_constants = false;
  3150. VN_INFO (lhs)->expr = NULL_TREE;
  3151. }
  3152. if ((TREE_CODE (lhs) == SSA_NAME
  3153. /* We can substitute SSA_NAMEs that are live over
  3154. abnormal edges with their constant value. */
  3155. && !(gimple_assign_copy_p (stmt)
  3156. && is_gimple_min_invariant (rhs1))
  3157. && !(simplified
  3158. && is_gimple_min_invariant (simplified))
  3159. && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
  3160. /* Stores or copies from SSA_NAMEs that are live over
  3161. abnormal edges are a problem. */
  3162. || (code == SSA_NAME
  3163. && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
  3164. changed = defs_to_varying (stmt);
  3165. else if (REFERENCE_CLASS_P (lhs)
  3166. || DECL_P (lhs))
  3167. changed = visit_reference_op_store (lhs, rhs1, stmt);
  3168. else if (TREE_CODE (lhs) == SSA_NAME)
  3169. {
  3170. if ((gimple_assign_copy_p (stmt)
  3171. && is_gimple_min_invariant (rhs1))
  3172. || (simplified
  3173. && is_gimple_min_invariant (simplified)))
  3174. {
  3175. VN_INFO (lhs)->has_constants = true;
  3176. if (simplified)
  3177. changed = set_ssa_val_to (lhs, simplified);
  3178. else
  3179. changed = set_ssa_val_to (lhs, rhs1);
  3180. }
  3181. else
  3182. {
  3183. /* First try to lookup the simplified expression. */
  3184. if (simplified)
  3185. {
  3186. enum gimple_rhs_class rhs_class;
  3187. rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
  3188. if ((rhs_class == GIMPLE_UNARY_RHS
  3189. || rhs_class == GIMPLE_BINARY_RHS
  3190. || rhs_class == GIMPLE_TERNARY_RHS)
  3191. && valid_gimple_rhs_p (simplified))
  3192. {
  3193. tree result = vn_nary_op_lookup (simplified, NULL);
  3194. if (result)
  3195. {
  3196. changed = set_ssa_val_to (lhs, result);
  3197. goto done;
  3198. }
  3199. }
  3200. }
  3201. /* Otherwise visit the original statement. */
  3202. switch (vn_get_stmt_kind (stmt))
  3203. {
  3204. case VN_NARY:
  3205. changed = visit_nary_op (lhs, stmt);
  3206. break;
  3207. case VN_REFERENCE:
  3208. changed = visit_reference_op_load (lhs, rhs1, stmt);
  3209. break;
  3210. default:
  3211. changed = defs_to_varying (stmt);
  3212. break;
  3213. }
  3214. }
  3215. }
  3216. else
  3217. changed = defs_to_varying (stmt);
  3218. }
  3219. else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
  3220. {
  3221. tree lhs = gimple_call_lhs (stmt);
  3222. if (lhs && TREE_CODE (lhs) == SSA_NAME)
  3223. {
  3224. /* Try constant folding based on our current lattice. */
  3225. tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
  3226. vn_valueize);
  3227. if (simplified)
  3228. {
  3229. if (dump_file && (dump_flags & TDF_DETAILS))
  3230. {
  3231. fprintf (dump_file, "call ");
  3232. print_gimple_expr (dump_file, stmt, 0, 0);
  3233. fprintf (dump_file, " simplified to ");
  3234. print_generic_expr (dump_file, simplified, 0);
  3235. if (TREE_CODE (lhs) == SSA_NAME)
  3236. fprintf (dump_file, " has constants %d\n",
  3237. expr_has_constants (simplified));
  3238. else
  3239. fprintf (dump_file, "\n");
  3240. }
  3241. }
  3242. /* Setting value numbers to constants will occasionally
  3243. screw up phi congruence because constants are not
  3244. uniquely associated with a single ssa name that can be
  3245. looked up. */
  3246. if (simplified
  3247. && is_gimple_min_invariant (simplified))
  3248. {
  3249. VN_INFO (lhs)->expr = simplified;
  3250. VN_INFO (lhs)->has_constants = true;
  3251. changed = set_ssa_val_to (lhs, simplified);
  3252. if (gimple_vdef (stmt))
  3253. changed |= set_ssa_val_to (gimple_vdef (stmt),
  3254. SSA_VAL (gimple_vuse (stmt)));
  3255. goto done;
  3256. }
  3257. else if (simplified
  3258. && TREE_CODE (simplified) == SSA_NAME)
  3259. {
  3260. changed = visit_copy (lhs, simplified);
  3261. if (gimple_vdef (stmt))
  3262. changed |= set_ssa_val_to (gimple_vdef (stmt),
  3263. SSA_VAL (gimple_vuse (stmt)));
  3264. goto done;
  3265. }
  3266. else
  3267. {
  3268. if (stmt_has_constants (stmt))
  3269. VN_INFO (lhs)->has_constants = true;
  3270. else
  3271. {
  3272. /* We reset expr and constantness here because we may
  3273. have been value numbering optimistically, and
  3274. iterating. They may become non-constant in this case,
  3275. even if they were optimistically constant. */
  3276. VN_INFO (lhs)->has_constants = false;
  3277. VN_INFO (lhs)->expr = NULL_TREE;
  3278. }
  3279. if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
  3280. {
  3281. changed = defs_to_varying (stmt);
  3282. goto done;
  3283. }
  3284. }
  3285. }
  3286. if (!gimple_call_internal_p (stmt)
  3287. && (/* Calls to the same function with the same vuse
  3288. and the same operands do not necessarily return the same
  3289. value, unless they're pure or const. */
  3290. gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
  3291. /* If calls have a vdef, subsequent calls won't have
  3292. the same incoming vuse. So, if 2 calls with vdef have the
  3293. same vuse, we know they're not subsequent.
  3294. We can value number 2 calls to the same function with the
  3295. same vuse and the same operands which are not subsequent
  3296. the same, because there is no code in the program that can
  3297. compare the 2 values... */
  3298. || (gimple_vdef (stmt)
  3299. /* ... unless the call returns a pointer which does
  3300. not alias with anything else. In which case the
  3301. information that the values are distinct are encoded
  3302. in the IL. */
  3303. && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
  3304. /* Only perform the following when being called from PRE
  3305. which embeds tail merging. */
  3306. && default_vn_walk_kind == VN_WALK)))
  3307. changed = visit_reference_op_call (lhs, call_stmt);
  3308. else
  3309. changed = defs_to_varying (stmt);
  3310. }
  3311. else
  3312. changed = defs_to_varying (stmt);
  3313. }
  3314. done:
  3315. return changed;
  3316. }
  3317. /* Compare two operands by reverse postorder index */
  3318. static int
  3319. compare_ops (const void *pa, const void *pb)
  3320. {
  3321. const tree opa = *((const tree *)pa);
  3322. const tree opb = *((const tree *)pb);
  3323. gimple opstmta = SSA_NAME_DEF_STMT (opa);
  3324. gimple opstmtb = SSA_NAME_DEF_STMT (opb);
  3325. basic_block bba;
  3326. basic_block bbb;
  3327. if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
  3328. return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
  3329. else if (gimple_nop_p (opstmta))
  3330. return -1;
  3331. else if (gimple_nop_p (opstmtb))
  3332. return 1;
  3333. bba = gimple_bb (opstmta);
  3334. bbb = gimple_bb (opstmtb);
  3335. if (!bba && !bbb)
  3336. return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
  3337. else if (!bba)
  3338. return -1;
  3339. else if (!bbb)
  3340. return 1;
  3341. if (bba == bbb)
  3342. {
  3343. if (gimple_code (opstmta) == GIMPLE_PHI
  3344. && gimple_code (opstmtb) == GIMPLE_PHI)
  3345. return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
  3346. else if (gimple_code (opstmta) == GIMPLE_PHI)
  3347. return -1;
  3348. else if (gimple_code (opstmtb) == GIMPLE_PHI)
  3349. return 1;
  3350. else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
  3351. return gimple_uid (opstmta) - gimple_uid (opstmtb);
  3352. else
  3353. return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
  3354. }
  3355. return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
  3356. }
  3357. /* Sort an array containing members of a strongly connected component
  3358. SCC so that the members are ordered by RPO number.
  3359. This means that when the sort is complete, iterating through the
  3360. array will give you the members in RPO order. */
  3361. static void
  3362. sort_scc (vec<tree> scc)
  3363. {
  3364. scc.qsort (compare_ops);
  3365. }
  3366. /* Insert the no longer used nary ONARY to the hash INFO. */
  3367. static void
  3368. copy_nary (vn_nary_op_t onary, vn_tables_t info)
  3369. {
  3370. size_t size = sizeof_vn_nary_op (onary->length);
  3371. vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
  3372. &info->nary_obstack);
  3373. memcpy (nary, onary, size);
  3374. vn_nary_op_insert_into (nary, info->nary, false);
  3375. }
  3376. /* Insert the no longer used phi OPHI to the hash INFO. */
  3377. static void
  3378. copy_phi (vn_phi_t ophi, vn_tables_t info)
  3379. {
  3380. vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
  3381. vn_phi_s **slot;
  3382. memcpy (phi, ophi, sizeof (*phi));
  3383. ophi->phiargs.create (0);
  3384. slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
  3385. gcc_assert (!*slot);
  3386. *slot = phi;
  3387. }
  3388. /* Insert the no longer used reference OREF to the hash INFO. */
  3389. static void
  3390. copy_reference (vn_reference_t oref, vn_tables_t info)
  3391. {
  3392. vn_reference_t ref;
  3393. vn_reference_s **slot;
  3394. ref = (vn_reference_t) pool_alloc (info->references_pool);
  3395. memcpy (ref, oref, sizeof (*ref));
  3396. oref->operands.create (0);
  3397. slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
  3398. if (*slot)
  3399. free_reference (*slot);
  3400. *slot = ref;
  3401. }
  3402. /* Process a strongly connected component in the SSA graph. */
  3403. static void
  3404. process_scc (vec<tree> scc)
  3405. {
  3406. tree var;
  3407. unsigned int i;
  3408. unsigned int iterations = 0;
  3409. bool changed = true;
  3410. vn_nary_op_iterator_type hin;
  3411. vn_phi_iterator_type hip;
  3412. vn_reference_iterator_type hir;
  3413. vn_nary_op_t nary;
  3414. vn_phi_t phi;
  3415. vn_reference_t ref;
  3416. /* If the SCC has a single member, just visit it. */
  3417. if (scc.length () == 1)
  3418. {
  3419. tree use = scc[0];
  3420. if (VN_INFO (use)->use_processed)
  3421. return;
  3422. /* We need to make sure it doesn't form a cycle itself, which can
  3423. happen for self-referential PHI nodes. In that case we would
  3424. end up inserting an expression with VN_TOP operands into the
  3425. valid table which makes us derive bogus equivalences later.
  3426. The cheapest way to check this is to assume it for all PHI nodes. */
  3427. if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
  3428. /* Fallthru to iteration. */ ;
  3429. else
  3430. {
  3431. visit_use (use);
  3432. return;
  3433. }
  3434. }
  3435. if (dump_file && (dump_flags & TDF_DETAILS))
  3436. print_scc (dump_file, scc);
  3437. /* Iterate over the SCC with the optimistic table until it stops
  3438. changing. */
  3439. current_info = optimistic_info;
  3440. while (changed)
  3441. {
  3442. changed = false;
  3443. iterations++;
  3444. if (dump_file && (dump_flags & TDF_DETAILS))
  3445. fprintf (dump_file, "Starting iteration %d\n", iterations);
  3446. /* As we are value-numbering optimistically we have to
  3447. clear the expression tables and the simplified expressions
  3448. in each iteration until we converge. */
  3449. optimistic_info->nary->empty ();
  3450. optimistic_info->phis->empty ();
  3451. optimistic_info->references->empty ();
  3452. obstack_free (&optimistic_info->nary_obstack, NULL);
  3453. gcc_obstack_init (&optimistic_info->nary_obstack);
  3454. empty_alloc_pool (optimistic_info->phis_pool);
  3455. empty_alloc_pool (optimistic_info->references_pool);
  3456. FOR_EACH_VEC_ELT (scc, i, var)
  3457. VN_INFO (var)->expr = NULL_TREE;
  3458. FOR_EACH_VEC_ELT (scc, i, var)
  3459. changed |= visit_use (var);
  3460. }
  3461. if (dump_file && (dump_flags & TDF_DETAILS))
  3462. fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
  3463. statistics_histogram_event (cfun, "SCC iterations", iterations);
  3464. /* Finally, copy the contents of the no longer used optimistic
  3465. table to the valid table. */
  3466. FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
  3467. copy_nary (nary, valid_info);
  3468. FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
  3469. copy_phi (phi, valid_info);
  3470. FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
  3471. ref, vn_reference_t, hir)
  3472. copy_reference (ref, valid_info);
  3473. current_info = valid_info;
  3474. }
  3475. /* Pop the components of the found SCC for NAME off the SCC stack
  3476. and process them. Returns true if all went well, false if
  3477. we run into resource limits. */
  3478. static bool
  3479. extract_and_process_scc_for_name (tree name)
  3480. {
  3481. auto_vec<tree> scc;
  3482. tree x;
  3483. /* Found an SCC, pop the components off the SCC stack and
  3484. process them. */
  3485. do
  3486. {
  3487. x = sccstack.pop ();
  3488. VN_INFO (x)->on_sccstack = false;
  3489. scc.safe_push (x);
  3490. } while (x != name);
  3491. /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
  3492. if (scc.length ()
  3493. > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
  3494. {
  3495. if (dump_file)
  3496. fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
  3497. "SCC size %u exceeding %u\n", scc.length (),
  3498. (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
  3499. return false;
  3500. }
  3501. if (scc.length () > 1)
  3502. sort_scc (scc);
  3503. process_scc (scc);
  3504. return true;
  3505. }
  3506. /* Depth first search on NAME to discover and process SCC's in the SSA
  3507. graph.
  3508. Execution of this algorithm relies on the fact that the SCC's are
  3509. popped off the stack in topological order.
  3510. Returns true if successful, false if we stopped processing SCC's due
  3511. to resource constraints. */
  3512. static bool
  3513. DFS (tree name)
  3514. {
  3515. vec<ssa_op_iter> itervec = vNULL;
  3516. vec<tree> namevec = vNULL;
  3517. use_operand_p usep = NULL;
  3518. gimple defstmt;
  3519. tree use;
  3520. ssa_op_iter iter;
  3521. start_over:
  3522. /* SCC info */
  3523. VN_INFO (name)->dfsnum = next_dfs_num++;
  3524. VN_INFO (name)->visited = true;
  3525. VN_INFO (name)->low = VN_INFO (name)->dfsnum;
  3526. sccstack.safe_push (name);
  3527. VN_INFO (name)->on_sccstack = true;
  3528. defstmt = SSA_NAME_DEF_STMT (name);
  3529. /* Recursively DFS on our operands, looking for SCC's. */
  3530. if (!gimple_nop_p (defstmt))
  3531. {
  3532. /* Push a new iterator. */
  3533. if (gphi *phi = dyn_cast <gphi *> (defstmt))
  3534. usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
  3535. else
  3536. usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
  3537. }
  3538. else
  3539. clear_and_done_ssa_iter (&iter);
  3540. while (1)
  3541. {
  3542. /* If we are done processing uses of a name, go up the stack
  3543. of iterators and process SCCs as we found them. */
  3544. if (op_iter_done (&iter))
  3545. {
  3546. /* See if we found an SCC. */
  3547. if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
  3548. if (!extract_and_process_scc_for_name (name))
  3549. {
  3550. namevec.release ();
  3551. itervec.release ();
  3552. return false;
  3553. }
  3554. /* Check if we are done. */
  3555. if (namevec.is_empty ())
  3556. {
  3557. namevec.release ();
  3558. itervec.release ();
  3559. return true;
  3560. }
  3561. /* Restore the last use walker and continue walking there. */
  3562. use = name;
  3563. name = namevec.pop ();
  3564. memcpy (&iter, &itervec.last (),
  3565. sizeof (ssa_op_iter));
  3566. itervec.pop ();
  3567. goto continue_walking;
  3568. }
  3569. use = USE_FROM_PTR (usep);
  3570. /* Since we handle phi nodes, we will sometimes get
  3571. invariants in the use expression. */
  3572. if (TREE_CODE (use) == SSA_NAME)
  3573. {
  3574. if (! (VN_INFO (use)->visited))
  3575. {
  3576. /* Recurse by pushing the current use walking state on
  3577. the stack and starting over. */
  3578. itervec.safe_push (iter);
  3579. namevec.safe_push (name);
  3580. name = use;
  3581. goto start_over;
  3582. continue_walking:
  3583. VN_INFO (name)->low = MIN (VN_INFO (name)->low,
  3584. VN_INFO (use)->low);
  3585. }
  3586. if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
  3587. && VN_INFO (use)->on_sccstack)
  3588. {
  3589. VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
  3590. VN_INFO (name)->low);
  3591. }
  3592. }
  3593. usep = op_iter_next_use (&iter);
  3594. }
  3595. }
  3596. /* Allocate a value number table. */
  3597. static void
  3598. allocate_vn_table (vn_tables_t table)
  3599. {
  3600. table->phis = new vn_phi_table_type (23);
  3601. table->nary = new vn_nary_op_table_type (23);
  3602. table->references = new vn_reference_table_type (23);
  3603. gcc_obstack_init (&table->nary_obstack);
  3604. table->phis_pool = create_alloc_pool ("VN phis",
  3605. sizeof (struct vn_phi_s),
  3606. 30);
  3607. table->references_pool = create_alloc_pool ("VN references",
  3608. sizeof (struct vn_reference_s),
  3609. 30);
  3610. }
  3611. /* Free a value number table. */
  3612. static void
  3613. free_vn_table (vn_tables_t table)
  3614. {
  3615. delete table->phis;
  3616. table->phis = NULL;
  3617. delete table->nary;
  3618. table->nary = NULL;
  3619. delete table->references;
  3620. table->references = NULL;
  3621. obstack_free (&table->nary_obstack, NULL);
  3622. free_alloc_pool (table->phis_pool);
  3623. free_alloc_pool (table->references_pool);
  3624. }
  3625. static void
  3626. init_scc_vn (void)
  3627. {
  3628. size_t i;
  3629. int j;
  3630. int *rpo_numbers_temp;
  3631. calculate_dominance_info (CDI_DOMINATORS);
  3632. sccstack.create (0);
  3633. constant_to_value_id = new hash_table<vn_constant_hasher> (23);
  3634. constant_value_ids = BITMAP_ALLOC (NULL);
  3635. next_dfs_num = 1;
  3636. next_value_id = 1;
  3637. vn_ssa_aux_table.create (num_ssa_names + 1);
  3638. /* VEC_alloc doesn't actually grow it to the right size, it just
  3639. preallocates the space to do so. */
  3640. vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
  3641. gcc_obstack_init (&vn_ssa_aux_obstack);
  3642. shared_lookup_phiargs.create (0);
  3643. shared_lookup_references.create (0);
  3644. rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
  3645. rpo_numbers_temp =
  3646. XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
  3647. pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
  3648. /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
  3649. the i'th block in RPO order is bb. We want to map bb's to RPO
  3650. numbers, so we need to rearrange this array. */
  3651. for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
  3652. rpo_numbers[rpo_numbers_temp[j]] = j;
  3653. XDELETE (rpo_numbers_temp);
  3654. VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
  3655. /* Create the VN_INFO structures, and initialize value numbers to
  3656. TOP. */
  3657. for (i = 0; i < num_ssa_names; i++)
  3658. {
  3659. tree name = ssa_name (i);
  3660. if (name)
  3661. {
  3662. VN_INFO_GET (name)->valnum = VN_TOP;
  3663. VN_INFO (name)->expr = NULL_TREE;
  3664. VN_INFO (name)->value_id = 0;
  3665. }
  3666. }
  3667. renumber_gimple_stmt_uids ();
  3668. /* Create the valid and optimistic value numbering tables. */
  3669. valid_info = XCNEW (struct vn_tables_s);
  3670. allocate_vn_table (valid_info);
  3671. optimistic_info = XCNEW (struct vn_tables_s);
  3672. allocate_vn_table (optimistic_info);
  3673. }
  3674. void
  3675. free_scc_vn (void)
  3676. {
  3677. size_t i;
  3678. delete constant_to_value_id;
  3679. constant_to_value_id = NULL;
  3680. BITMAP_FREE (constant_value_ids);
  3681. shared_lookup_phiargs.release ();
  3682. shared_lookup_references.release ();
  3683. XDELETEVEC (rpo_numbers);
  3684. for (i = 0; i < num_ssa_names; i++)
  3685. {
  3686. tree name = ssa_name (i);
  3687. if (name
  3688. && VN_INFO (name)->needs_insertion)
  3689. release_ssa_name (name);
  3690. }
  3691. obstack_free (&vn_ssa_aux_obstack, NULL);
  3692. vn_ssa_aux_table.release ();
  3693. sccstack.release ();
  3694. free_vn_table (valid_info);
  3695. XDELETE (valid_info);
  3696. free_vn_table (optimistic_info);
  3697. XDELETE (optimistic_info);
  3698. }
  3699. /* Set *ID according to RESULT. */
  3700. static void
  3701. set_value_id_for_result (tree result, unsigned int *id)
  3702. {
  3703. if (result && TREE_CODE (result) == SSA_NAME)
  3704. *id = VN_INFO (result)->value_id;
  3705. else if (result && is_gimple_min_invariant (result))
  3706. *id = get_or_alloc_constant_value_id (result);
  3707. else
  3708. *id = get_next_value_id ();
  3709. }
  3710. /* Set the value ids in the valid hash tables. */
  3711. static void
  3712. set_hashtable_value_ids (void)
  3713. {
  3714. vn_nary_op_iterator_type hin;
  3715. vn_phi_iterator_type hip;
  3716. vn_reference_iterator_type hir;
  3717. vn_nary_op_t vno;
  3718. vn_reference_t vr;
  3719. vn_phi_t vp;
  3720. /* Now set the value ids of the things we had put in the hash
  3721. table. */
  3722. FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
  3723. set_value_id_for_result (vno->result, &vno->value_id);
  3724. FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
  3725. set_value_id_for_result (vp->result, &vp->value_id);
  3726. FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
  3727. hir)
  3728. set_value_id_for_result (vr->result, &vr->value_id);
  3729. }
  3730. class cond_dom_walker : public dom_walker
  3731. {
  3732. public:
  3733. cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
  3734. virtual void before_dom_children (basic_block);
  3735. bool fail;
  3736. };
  3737. void
  3738. cond_dom_walker::before_dom_children (basic_block bb)
  3739. {
  3740. edge e;
  3741. edge_iterator ei;
  3742. if (fail)
  3743. return;
  3744. /* If any of the predecessor edges that do not come from blocks dominated
  3745. by us are still marked as possibly executable consider this block
  3746. reachable. */
  3747. bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
  3748. FOR_EACH_EDGE (e, ei, bb->preds)
  3749. if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
  3750. reachable |= (e->flags & EDGE_EXECUTABLE);
  3751. /* If the block is not reachable all outgoing edges are not
  3752. executable. */
  3753. if (!reachable)
  3754. {
  3755. if (dump_file && (dump_flags & TDF_DETAILS))
  3756. fprintf (dump_file, "Marking all outgoing edges of unreachable "
  3757. "BB %d as not executable\n", bb->index);
  3758. FOR_EACH_EDGE (e, ei, bb->succs)
  3759. e->flags &= ~EDGE_EXECUTABLE;
  3760. return;
  3761. }
  3762. gimple stmt = last_stmt (bb);
  3763. if (!stmt)
  3764. return;
  3765. enum gimple_code code = gimple_code (stmt);
  3766. if (code != GIMPLE_COND
  3767. && code != GIMPLE_SWITCH
  3768. && code != GIMPLE_GOTO)
  3769. return;
  3770. if (dump_file && (dump_flags & TDF_DETAILS))
  3771. {
  3772. fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
  3773. bb->index);
  3774. print_gimple_stmt (dump_file, stmt, 0, 0);
  3775. }
  3776. /* Value-number the last stmts SSA uses. */
  3777. ssa_op_iter i;
  3778. tree op;
  3779. FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
  3780. if (VN_INFO (op)->visited == false
  3781. && !DFS (op))
  3782. {
  3783. fail = true;
  3784. return;
  3785. }
  3786. /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
  3787. if value-numbering can prove they are not reachable. Handling
  3788. computed gotos is also possible. */
  3789. tree val;
  3790. switch (code)
  3791. {
  3792. case GIMPLE_COND:
  3793. {
  3794. tree lhs = gimple_cond_lhs (stmt);
  3795. tree rhs = gimple_cond_rhs (stmt);
  3796. /* Work hard in computing the condition and take into account
  3797. the valueization of the defining stmt. */
  3798. if (TREE_CODE (lhs) == SSA_NAME)
  3799. lhs = vn_get_expr_for (lhs);
  3800. if (TREE_CODE (rhs) == SSA_NAME)
  3801. rhs = vn_get_expr_for (rhs);
  3802. val = fold_binary (gimple_cond_code (stmt),
  3803. boolean_type_node, lhs, rhs);
  3804. break;
  3805. }
  3806. case GIMPLE_SWITCH:
  3807. val = gimple_switch_index (as_a <gswitch *> (stmt));
  3808. break;
  3809. case GIMPLE_GOTO:
  3810. val = gimple_goto_dest (stmt);
  3811. break;
  3812. default:
  3813. gcc_unreachable ();
  3814. }
  3815. if (!val)
  3816. return;
  3817. edge taken = find_taken_edge (bb, vn_valueize (val));
  3818. if (!taken)
  3819. return;
  3820. if (dump_file && (dump_flags & TDF_DETAILS))
  3821. fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
  3822. "not executable\n", bb->index, bb->index, taken->dest->index);
  3823. FOR_EACH_EDGE (e, ei, bb->succs)
  3824. if (e != taken)
  3825. e->flags &= ~EDGE_EXECUTABLE;
  3826. }
  3827. /* Do SCCVN. Returns true if it finished, false if we bailed out
  3828. due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
  3829. how we use the alias oracle walking during the VN process. */
  3830. bool
  3831. run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
  3832. {
  3833. basic_block bb;
  3834. size_t i;
  3835. tree param;
  3836. default_vn_walk_kind = default_vn_walk_kind_;
  3837. init_scc_vn ();
  3838. current_info = valid_info;
  3839. for (param = DECL_ARGUMENTS (current_function_decl);
  3840. param;
  3841. param = DECL_CHAIN (param))
  3842. {
  3843. tree def = ssa_default_def (cfun, param);
  3844. if (def)
  3845. {
  3846. VN_INFO (def)->visited = true;
  3847. VN_INFO (def)->valnum = def;
  3848. }
  3849. }
  3850. /* Mark all edges as possibly executable. */
  3851. FOR_ALL_BB_FN (bb, cfun)
  3852. {
  3853. edge_iterator ei;
  3854. edge e;
  3855. FOR_EACH_EDGE (e, ei, bb->succs)
  3856. e->flags |= EDGE_EXECUTABLE;
  3857. }
  3858. /* Walk all blocks in dominator order, value-numbering the last stmts
  3859. SSA uses and decide whether outgoing edges are not executable. */
  3860. cond_dom_walker walker;
  3861. walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
  3862. if (walker.fail)
  3863. {
  3864. free_scc_vn ();
  3865. return false;
  3866. }
  3867. /* Value-number remaining SSA names. */
  3868. for (i = 1; i < num_ssa_names; ++i)
  3869. {
  3870. tree name = ssa_name (i);
  3871. if (name
  3872. && VN_INFO (name)->visited == false
  3873. && !has_zero_uses (name))
  3874. if (!DFS (name))
  3875. {
  3876. free_scc_vn ();
  3877. return false;
  3878. }
  3879. }
  3880. /* Initialize the value ids. */
  3881. for (i = 1; i < num_ssa_names; ++i)
  3882. {
  3883. tree name = ssa_name (i);
  3884. vn_ssa_aux_t info;
  3885. if (!name)
  3886. continue;
  3887. info = VN_INFO (name);
  3888. if (info->valnum == name
  3889. || info->valnum == VN_TOP)
  3890. info->value_id = get_next_value_id ();
  3891. else if (is_gimple_min_invariant (info->valnum))
  3892. info->value_id = get_or_alloc_constant_value_id (info->valnum);
  3893. }
  3894. /* Propagate. */
  3895. for (i = 1; i < num_ssa_names; ++i)
  3896. {
  3897. tree name = ssa_name (i);
  3898. vn_ssa_aux_t info;
  3899. if (!name)
  3900. continue;
  3901. info = VN_INFO (name);
  3902. if (TREE_CODE (info->valnum) == SSA_NAME
  3903. && info->valnum != name
  3904. && info->value_id != VN_INFO (info->valnum)->value_id)
  3905. info->value_id = VN_INFO (info->valnum)->value_id;
  3906. }
  3907. set_hashtable_value_ids ();
  3908. if (dump_file && (dump_flags & TDF_DETAILS))
  3909. {
  3910. fprintf (dump_file, "Value numbers:\n");
  3911. for (i = 0; i < num_ssa_names; i++)
  3912. {
  3913. tree name = ssa_name (i);
  3914. if (name
  3915. && VN_INFO (name)->visited
  3916. && SSA_VAL (name) != name)
  3917. {
  3918. print_generic_expr (dump_file, name, 0);
  3919. fprintf (dump_file, " = ");
  3920. print_generic_expr (dump_file, SSA_VAL (name), 0);
  3921. fprintf (dump_file, "\n");
  3922. }
  3923. }
  3924. }
  3925. return true;
  3926. }
  3927. /* Return the maximum value id we have ever seen. */
  3928. unsigned int
  3929. get_max_value_id (void)
  3930. {
  3931. return next_value_id;
  3932. }
  3933. /* Return the next unique value id. */
  3934. unsigned int
  3935. get_next_value_id (void)
  3936. {
  3937. return next_value_id++;
  3938. }
  3939. /* Compare two expressions E1 and E2 and return true if they are equal. */
  3940. bool
  3941. expressions_equal_p (tree e1, tree e2)
  3942. {
  3943. /* The obvious case. */
  3944. if (e1 == e2)
  3945. return true;
  3946. /* If only one of them is null, they cannot be equal. */
  3947. if (!e1 || !e2)
  3948. return false;
  3949. /* Now perform the actual comparison. */
  3950. if (TREE_CODE (e1) == TREE_CODE (e2)
  3951. && operand_equal_p (e1, e2, OEP_PURE_SAME))
  3952. return true;
  3953. return false;
  3954. }
  3955. /* Return true if the nary operation NARY may trap. This is a copy
  3956. of stmt_could_throw_1_p adjusted to the SCCVN IL. */
  3957. bool
  3958. vn_nary_may_trap (vn_nary_op_t nary)
  3959. {
  3960. tree type;
  3961. tree rhs2 = NULL_TREE;
  3962. bool honor_nans = false;
  3963. bool honor_snans = false;
  3964. bool fp_operation = false;
  3965. bool honor_trapv = false;
  3966. bool handled, ret;
  3967. unsigned i;
  3968. if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
  3969. || TREE_CODE_CLASS (nary->opcode) == tcc_unary
  3970. || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
  3971. {
  3972. type = nary->type;
  3973. fp_operation = FLOAT_TYPE_P (type);
  3974. if (fp_operation)
  3975. {
  3976. honor_nans = flag_trapping_math && !flag_finite_math_only;
  3977. honor_snans = flag_signaling_nans != 0;
  3978. }
  3979. else if (INTEGRAL_TYPE_P (type)
  3980. && TYPE_OVERFLOW_TRAPS (type))
  3981. honor_trapv = true;
  3982. }
  3983. if (nary->length >= 2)
  3984. rhs2 = nary->op[1];
  3985. ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
  3986. honor_trapv,
  3987. honor_nans, honor_snans, rhs2,
  3988. &handled);
  3989. if (handled
  3990. && ret)
  3991. return true;
  3992. for (i = 0; i < nary->length; ++i)
  3993. if (tree_could_trap_p (nary->op[i]))
  3994. return true;
  3995. return false;
  3996. }