df-problems.c 127 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366
  1. /* Standard problems for dataflow support routines.
  2. Copyright (C) 1999-2015 Free Software Foundation, Inc.
  3. Originally contributed by Michael P. Hayes
  4. (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
  5. Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
  6. and Kenneth Zadeck (zadeck@naturalbridge.com).
  7. This file is part of GCC.
  8. GCC is free software; you can redistribute it and/or modify it under
  9. the terms of the GNU General Public License as published by the Free
  10. Software Foundation; either version 3, or (at your option) any later
  11. version.
  12. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  13. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  14. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  15. for more details.
  16. You should have received a copy of the GNU General Public License
  17. along with GCC; see the file COPYING3. If not see
  18. <http://www.gnu.org/licenses/>. */
  19. #include "config.h"
  20. #include "system.h"
  21. #include "coretypes.h"
  22. #include "tm.h"
  23. #include "rtl.h"
  24. #include "tm_p.h"
  25. #include "insn-config.h"
  26. #include "recog.h"
  27. #include "hashtab.h"
  28. #include "hash-set.h"
  29. #include "vec.h"
  30. #include "machmode.h"
  31. #include "hard-reg-set.h"
  32. #include "input.h"
  33. #include "function.h"
  34. #include "regs.h"
  35. #include "alloc-pool.h"
  36. #include "flags.h"
  37. #include "predict.h"
  38. #include "dominance.h"
  39. #include "cfg.h"
  40. #include "cfganal.h"
  41. #include "basic-block.h"
  42. #include "sbitmap.h"
  43. #include "bitmap.h"
  44. #include "target.h"
  45. #include "timevar.h"
  46. #include "df.h"
  47. #include "except.h"
  48. #include "dce.h"
  49. #include "valtrack.h"
  50. #include "dumpfile.h"
  51. #include "rtl-iter.h"
  52. /* Note that turning REG_DEAD_DEBUGGING on will cause
  53. gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
  54. addresses in the dumps. */
  55. #define REG_DEAD_DEBUGGING 0
  56. #define DF_SPARSE_THRESHOLD 32
  57. static bitmap_head seen_in_block;
  58. static bitmap_head seen_in_insn;
  59. /*----------------------------------------------------------------------------
  60. Utility functions.
  61. ----------------------------------------------------------------------------*/
  62. /* Generic versions to get the void* version of the block info. Only
  63. used inside the problem instance vectors. */
  64. /* Dump a def-use or use-def chain for REF to FILE. */
  65. void
  66. df_chain_dump (struct df_link *link, FILE *file)
  67. {
  68. fprintf (file, "{ ");
  69. for (; link; link = link->next)
  70. {
  71. fprintf (file, "%c%d(bb %d insn %d) ",
  72. DF_REF_REG_DEF_P (link->ref)
  73. ? 'd'
  74. : (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
  75. DF_REF_ID (link->ref),
  76. DF_REF_BBNO (link->ref),
  77. DF_REF_IS_ARTIFICIAL (link->ref)
  78. ? -1 : DF_REF_INSN_UID (link->ref));
  79. }
  80. fprintf (file, "}");
  81. }
  82. /* Print some basic block info as part of df_dump. */
  83. void
  84. df_print_bb_index (basic_block bb, FILE *file)
  85. {
  86. edge e;
  87. edge_iterator ei;
  88. fprintf (file, "\n( ");
  89. FOR_EACH_EDGE (e, ei, bb->preds)
  90. {
  91. basic_block pred = e->src;
  92. fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
  93. }
  94. fprintf (file, ")->[%d]->( ", bb->index);
  95. FOR_EACH_EDGE (e, ei, bb->succs)
  96. {
  97. basic_block succ = e->dest;
  98. fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
  99. }
  100. fprintf (file, ")\n");
  101. }
  102. /*----------------------------------------------------------------------------
  103. REACHING DEFINITIONS
  104. Find the locations in the function where each definition site for a
  105. pseudo reaches. In and out bitvectors are built for each basic
  106. block. The id field in the ref is used to index into these sets.
  107. See df.h for details.
  108. If the DF_RD_PRUNE_DEAD_DEFS changeable flag is set, only DEFs reaching
  109. existing uses are included in the global reaching DEFs set, or in other
  110. words only DEFs that are still live. This is a kind of pruned version
  111. of the traditional reaching definitions problem that is much less
  112. complex to compute and produces enough information to compute UD-chains.
  113. In this context, live must be interpreted in the DF_LR sense: Uses that
  114. are upward exposed but maybe not initialized on all paths through the
  115. CFG. For a USE that is not reached by a DEF on all paths, we still want
  116. to make those DEFs that do reach the USE visible, and pruning based on
  117. DF_LIVE would make that impossible.
  118. ----------------------------------------------------------------------------*/
  119. /* This problem plays a large number of games for the sake of
  120. efficiency.
  121. 1) The order of the bits in the bitvectors. After the scanning
  122. phase, all of the defs are sorted. All of the defs for the reg 0
  123. are first, followed by all defs for reg 1 and so on.
  124. 2) There are two kill sets, one if the number of defs is less or
  125. equal to DF_SPARSE_THRESHOLD and another if the number of defs is
  126. greater.
  127. <= : Data is built directly in the kill set.
  128. > : One level of indirection is used to keep from generating long
  129. strings of 1 bits in the kill sets. Bitvectors that are indexed
  130. by the regnum are used to represent that there is a killing def
  131. for the register. The confluence and transfer functions use
  132. these along with the bitmap_clear_range call to remove ranges of
  133. bits without actually generating a knockout vector.
  134. The kill and sparse_kill and the dense_invalidated_by_call and
  135. sparse_invalidated_by_call both play this game. */
  136. /* Private data used to compute the solution for this problem. These
  137. data structures are not accessible outside of this module. */
  138. struct df_rd_problem_data
  139. {
  140. /* The set of defs to regs invalidated by call. */
  141. bitmap_head sparse_invalidated_by_call;
  142. /* The set of defs to regs invalidate by call for rd. */
  143. bitmap_head dense_invalidated_by_call;
  144. /* An obstack for the bitmaps we need for this problem. */
  145. bitmap_obstack rd_bitmaps;
  146. };
  147. /* Free basic block info. */
  148. static void
  149. df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
  150. void *vbb_info)
  151. {
  152. struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
  153. if (bb_info)
  154. {
  155. bitmap_clear (&bb_info->kill);
  156. bitmap_clear (&bb_info->sparse_kill);
  157. bitmap_clear (&bb_info->gen);
  158. bitmap_clear (&bb_info->in);
  159. bitmap_clear (&bb_info->out);
  160. }
  161. }
  162. /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
  163. not touched unless the block is new. */
  164. static void
  165. df_rd_alloc (bitmap all_blocks)
  166. {
  167. unsigned int bb_index;
  168. bitmap_iterator bi;
  169. struct df_rd_problem_data *problem_data;
  170. if (df_rd->problem_data)
  171. {
  172. problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
  173. bitmap_clear (&problem_data->sparse_invalidated_by_call);
  174. bitmap_clear (&problem_data->dense_invalidated_by_call);
  175. }
  176. else
  177. {
  178. problem_data = XNEW (struct df_rd_problem_data);
  179. df_rd->problem_data = problem_data;
  180. bitmap_obstack_initialize (&problem_data->rd_bitmaps);
  181. bitmap_initialize (&problem_data->sparse_invalidated_by_call,
  182. &problem_data->rd_bitmaps);
  183. bitmap_initialize (&problem_data->dense_invalidated_by_call,
  184. &problem_data->rd_bitmaps);
  185. }
  186. df_grow_bb_info (df_rd);
  187. /* Because of the clustering of all use sites for the same pseudo,
  188. we have to process all of the blocks before doing the analysis. */
  189. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  190. {
  191. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  192. /* When bitmaps are already initialized, just clear them. */
  193. if (bb_info->kill.obstack)
  194. {
  195. bitmap_clear (&bb_info->kill);
  196. bitmap_clear (&bb_info->sparse_kill);
  197. bitmap_clear (&bb_info->gen);
  198. }
  199. else
  200. {
  201. bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
  202. bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
  203. bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
  204. bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
  205. bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
  206. }
  207. }
  208. df_rd->optional_p = true;
  209. }
  210. /* Add the effect of the top artificial defs of BB to the reaching definitions
  211. bitmap LOCAL_RD. */
  212. void
  213. df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
  214. {
  215. int bb_index = bb->index;
  216. df_ref def;
  217. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  218. if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
  219. {
  220. unsigned int dregno = DF_REF_REGNO (def);
  221. if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
  222. bitmap_clear_range (local_rd,
  223. DF_DEFS_BEGIN (dregno),
  224. DF_DEFS_COUNT (dregno));
  225. bitmap_set_bit (local_rd, DF_REF_ID (def));
  226. }
  227. }
  228. /* Add the effect of the defs of INSN to the reaching definitions bitmap
  229. LOCAL_RD. */
  230. void
  231. df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
  232. bitmap local_rd)
  233. {
  234. df_ref def;
  235. FOR_EACH_INSN_DEF (def, insn)
  236. {
  237. unsigned int dregno = DF_REF_REGNO (def);
  238. if ((!(df->changeable_flags & DF_NO_HARD_REGS))
  239. || (dregno >= FIRST_PSEUDO_REGISTER))
  240. {
  241. if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
  242. bitmap_clear_range (local_rd,
  243. DF_DEFS_BEGIN (dregno),
  244. DF_DEFS_COUNT (dregno));
  245. if (!(DF_REF_FLAGS (def)
  246. & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
  247. bitmap_set_bit (local_rd, DF_REF_ID (def));
  248. }
  249. }
  250. }
  251. /* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
  252. more complicated than just simulating, because we must produce the
  253. gen and kill sets and hence deal with the two possible representations
  254. of kill sets. */
  255. static void
  256. df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
  257. df_ref def,
  258. int top_flag)
  259. {
  260. for (; def; def = DF_REF_NEXT_LOC (def))
  261. {
  262. if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
  263. {
  264. unsigned int regno = DF_REF_REGNO (def);
  265. unsigned int begin = DF_DEFS_BEGIN (regno);
  266. unsigned int n_defs = DF_DEFS_COUNT (regno);
  267. if ((!(df->changeable_flags & DF_NO_HARD_REGS))
  268. || (regno >= FIRST_PSEUDO_REGISTER))
  269. {
  270. /* Only the last def(s) for a regno in the block has any
  271. effect. */
  272. if (!bitmap_bit_p (&seen_in_block, regno))
  273. {
  274. /* The first def for regno in insn gets to knock out the
  275. defs from other instructions. */
  276. if ((!bitmap_bit_p (&seen_in_insn, regno))
  277. /* If the def is to only part of the reg, it does
  278. not kill the other defs that reach here. */
  279. && (!(DF_REF_FLAGS (def) &
  280. (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
  281. {
  282. if (n_defs > DF_SPARSE_THRESHOLD)
  283. {
  284. bitmap_set_bit (&bb_info->sparse_kill, regno);
  285. bitmap_clear_range (&bb_info->gen, begin, n_defs);
  286. }
  287. else
  288. {
  289. bitmap_set_range (&bb_info->kill, begin, n_defs);
  290. bitmap_clear_range (&bb_info->gen, begin, n_defs);
  291. }
  292. }
  293. bitmap_set_bit (&seen_in_insn, regno);
  294. /* All defs for regno in the instruction may be put into
  295. the gen set. */
  296. if (!(DF_REF_FLAGS (def)
  297. & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
  298. bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
  299. }
  300. }
  301. }
  302. }
  303. }
  304. /* Compute local reaching def info for basic block BB. */
  305. static void
  306. df_rd_bb_local_compute (unsigned int bb_index)
  307. {
  308. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  309. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  310. rtx_insn *insn;
  311. bitmap_clear (&seen_in_block);
  312. bitmap_clear (&seen_in_insn);
  313. /* Artificials are only hard regs. */
  314. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  315. df_rd_bb_local_compute_process_def (bb_info,
  316. df_get_artificial_defs (bb_index),
  317. 0);
  318. FOR_BB_INSNS_REVERSE (bb, insn)
  319. {
  320. unsigned int uid = INSN_UID (insn);
  321. if (!INSN_P (insn))
  322. continue;
  323. df_rd_bb_local_compute_process_def (bb_info,
  324. DF_INSN_UID_DEFS (uid), 0);
  325. /* This complex dance with the two bitmaps is required because
  326. instructions can assign twice to the same pseudo. This
  327. generally happens with calls that will have one def for the
  328. result and another def for the clobber. If only one vector
  329. is used and the clobber goes first, the result will be
  330. lost. */
  331. bitmap_ior_into (&seen_in_block, &seen_in_insn);
  332. bitmap_clear (&seen_in_insn);
  333. }
  334. /* Process the artificial defs at the top of the block last since we
  335. are going backwards through the block and these are logically at
  336. the start. */
  337. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  338. df_rd_bb_local_compute_process_def (bb_info,
  339. df_get_artificial_defs (bb_index),
  340. DF_REF_AT_TOP);
  341. }
  342. /* Compute local reaching def info for each basic block within BLOCKS. */
  343. static void
  344. df_rd_local_compute (bitmap all_blocks)
  345. {
  346. unsigned int bb_index;
  347. bitmap_iterator bi;
  348. unsigned int regno;
  349. struct df_rd_problem_data *problem_data
  350. = (struct df_rd_problem_data *) df_rd->problem_data;
  351. bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
  352. bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
  353. bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
  354. bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
  355. df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
  356. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  357. {
  358. df_rd_bb_local_compute (bb_index);
  359. }
  360. /* Set up the knockout bit vectors to be applied across EH_EDGES. */
  361. EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
  362. {
  363. if (! HARD_REGISTER_NUM_P (regno)
  364. || !(df->changeable_flags & DF_NO_HARD_REGS))
  365. {
  366. if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
  367. bitmap_set_bit (sparse_invalidated, regno);
  368. else
  369. bitmap_set_range (dense_invalidated,
  370. DF_DEFS_BEGIN (regno),
  371. DF_DEFS_COUNT (regno));
  372. }
  373. }
  374. bitmap_clear (&seen_in_block);
  375. bitmap_clear (&seen_in_insn);
  376. }
  377. /* Initialize the solution bit vectors for problem. */
  378. static void
  379. df_rd_init_solution (bitmap all_blocks)
  380. {
  381. unsigned int bb_index;
  382. bitmap_iterator bi;
  383. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  384. {
  385. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  386. bitmap_copy (&bb_info->out, &bb_info->gen);
  387. bitmap_clear (&bb_info->in);
  388. }
  389. }
  390. /* In of target gets or of out of source. */
  391. static bool
  392. df_rd_confluence_n (edge e)
  393. {
  394. bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
  395. bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
  396. bool changed = false;
  397. if (e->flags & EDGE_FAKE)
  398. return false;
  399. if (e->flags & EDGE_EH)
  400. {
  401. struct df_rd_problem_data *problem_data
  402. = (struct df_rd_problem_data *) df_rd->problem_data;
  403. bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
  404. bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
  405. bitmap_iterator bi;
  406. unsigned int regno;
  407. bitmap_head tmp;
  408. bitmap_initialize (&tmp, &df_bitmap_obstack);
  409. bitmap_and_compl (&tmp, op2, dense_invalidated);
  410. EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
  411. {
  412. bitmap_clear_range (&tmp,
  413. DF_DEFS_BEGIN (regno),
  414. DF_DEFS_COUNT (regno));
  415. }
  416. changed |= bitmap_ior_into (op1, &tmp);
  417. bitmap_clear (&tmp);
  418. return changed;
  419. }
  420. else
  421. return bitmap_ior_into (op1, op2);
  422. }
  423. /* Transfer function. */
  424. static bool
  425. df_rd_transfer_function (int bb_index)
  426. {
  427. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  428. unsigned int regno;
  429. bitmap_iterator bi;
  430. bitmap in = &bb_info->in;
  431. bitmap out = &bb_info->out;
  432. bitmap gen = &bb_info->gen;
  433. bitmap kill = &bb_info->kill;
  434. bitmap sparse_kill = &bb_info->sparse_kill;
  435. bool changed = false;
  436. if (bitmap_empty_p (sparse_kill))
  437. changed = bitmap_ior_and_compl (out, gen, in, kill);
  438. else
  439. {
  440. struct df_rd_problem_data *problem_data;
  441. bitmap_head tmp;
  442. /* Note that TMP is _not_ a temporary bitmap if we end up replacing
  443. OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
  444. problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
  445. bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
  446. bitmap_and_compl (&tmp, in, kill);
  447. EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
  448. {
  449. bitmap_clear_range (&tmp,
  450. DF_DEFS_BEGIN (regno),
  451. DF_DEFS_COUNT (regno));
  452. }
  453. bitmap_ior_into (&tmp, gen);
  454. changed = !bitmap_equal_p (&tmp, out);
  455. if (changed)
  456. {
  457. bitmap_clear (out);
  458. bb_info->out = tmp;
  459. }
  460. else
  461. bitmap_clear (&tmp);
  462. }
  463. if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
  464. {
  465. /* Create a mask of DEFs for all registers live at the end of this
  466. basic block, and mask out DEFs of registers that are not live.
  467. Computing the mask looks costly, but the benefit of the pruning
  468. outweighs the cost. */
  469. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  470. bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
  471. bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
  472. unsigned int regno;
  473. bitmap_iterator bi;
  474. EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi)
  475. bitmap_set_range (live_defs,
  476. DF_DEFS_BEGIN (regno),
  477. DF_DEFS_COUNT (regno));
  478. changed |= bitmap_and_into (&bb_info->out, live_defs);
  479. BITMAP_FREE (live_defs);
  480. }
  481. return changed;
  482. }
  483. /* Free all storage associated with the problem. */
  484. static void
  485. df_rd_free (void)
  486. {
  487. struct df_rd_problem_data *problem_data
  488. = (struct df_rd_problem_data *) df_rd->problem_data;
  489. if (problem_data)
  490. {
  491. bitmap_obstack_release (&problem_data->rd_bitmaps);
  492. df_rd->block_info_size = 0;
  493. free (df_rd->block_info);
  494. df_rd->block_info = NULL;
  495. free (df_rd->problem_data);
  496. }
  497. free (df_rd);
  498. }
  499. /* Debugging info. */
  500. static void
  501. df_rd_start_dump (FILE *file)
  502. {
  503. struct df_rd_problem_data *problem_data
  504. = (struct df_rd_problem_data *) df_rd->problem_data;
  505. unsigned int m = DF_REG_SIZE (df);
  506. unsigned int regno;
  507. if (!df_rd->block_info)
  508. return;
  509. fprintf (file, ";; Reaching defs:\n");
  510. fprintf (file, ";; sparse invalidated \t");
  511. dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
  512. fprintf (file, ";; dense invalidated \t");
  513. dump_bitmap (file, &problem_data->dense_invalidated_by_call);
  514. fprintf (file, ";; reg->defs[] map:\t");
  515. for (regno = 0; regno < m; regno++)
  516. if (DF_DEFS_COUNT (regno))
  517. fprintf (file, "%d[%d,%d] ", regno,
  518. DF_DEFS_BEGIN (regno),
  519. DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1);
  520. fprintf (file, "\n");
  521. }
  522. static void
  523. df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
  524. {
  525. bitmap_head tmp;
  526. unsigned int regno;
  527. unsigned int m = DF_REG_SIZE (df);
  528. bool first_reg = true;
  529. fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
  530. bitmap_initialize (&tmp, &df_bitmap_obstack);
  531. for (regno = 0; regno < m; regno++)
  532. {
  533. if (HARD_REGISTER_NUM_P (regno)
  534. && (df->changeable_flags & DF_NO_HARD_REGS))
  535. continue;
  536. bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
  537. bitmap_and_into (&tmp, defs_set);
  538. if (! bitmap_empty_p (&tmp))
  539. {
  540. bitmap_iterator bi;
  541. unsigned int ix;
  542. bool first_def = true;
  543. if (! first_reg)
  544. fprintf (file, ",");
  545. first_reg = false;
  546. fprintf (file, "%u[", regno);
  547. EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi)
  548. {
  549. fprintf (file, "%s%u", first_def ? "" : ",", ix);
  550. first_def = false;
  551. }
  552. fprintf (file, "]");
  553. }
  554. bitmap_clear (&tmp);
  555. }
  556. fprintf (file, "\n");
  557. bitmap_clear (&tmp);
  558. }
  559. /* Debugging info at top of bb. */
  560. static void
  561. df_rd_top_dump (basic_block bb, FILE *file)
  562. {
  563. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
  564. if (!bb_info)
  565. return;
  566. df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file);
  567. df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file);
  568. df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file);
  569. }
  570. /* Debugging info at bottom of bb. */
  571. static void
  572. df_rd_bottom_dump (basic_block bb, FILE *file)
  573. {
  574. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
  575. if (!bb_info)
  576. return;
  577. df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file);
  578. }
  579. /* All of the information associated with every instance of the problem. */
  580. static struct df_problem problem_RD =
  581. {
  582. DF_RD, /* Problem id. */
  583. DF_FORWARD, /* Direction. */
  584. df_rd_alloc, /* Allocate the problem specific data. */
  585. NULL, /* Reset global information. */
  586. df_rd_free_bb_info, /* Free basic block info. */
  587. df_rd_local_compute, /* Local compute function. */
  588. df_rd_init_solution, /* Init the solution specific data. */
  589. df_worklist_dataflow, /* Worklist solver. */
  590. NULL, /* Confluence operator 0. */
  591. df_rd_confluence_n, /* Confluence operator n. */
  592. df_rd_transfer_function, /* Transfer function. */
  593. NULL, /* Finalize function. */
  594. df_rd_free, /* Free all of the problem information. */
  595. df_rd_free, /* Remove this problem from the stack of dataflow problems. */
  596. df_rd_start_dump, /* Debugging. */
  597. df_rd_top_dump, /* Debugging start block. */
  598. df_rd_bottom_dump, /* Debugging end block. */
  599. NULL, /* Debugging start insn. */
  600. NULL, /* Debugging end insn. */
  601. NULL, /* Incremental solution verify start. */
  602. NULL, /* Incremental solution verify end. */
  603. NULL, /* Dependent problem. */
  604. sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
  605. TV_DF_RD, /* Timing variable. */
  606. true /* Reset blocks on dropping out of blocks_to_analyze. */
  607. };
  608. /* Create a new RD instance and add it to the existing instance
  609. of DF. */
  610. void
  611. df_rd_add_problem (void)
  612. {
  613. df_add_problem (&problem_RD);
  614. }
  615. /*----------------------------------------------------------------------------
  616. LIVE REGISTERS
  617. Find the locations in the function where any use of a pseudo can
  618. reach in the backwards direction. In and out bitvectors are built
  619. for each basic block. The regno is used to index into these sets.
  620. See df.h for details.
  621. ----------------------------------------------------------------------------*/
  622. /* Private data used to verify the solution for this problem. */
  623. struct df_lr_problem_data
  624. {
  625. bitmap_head *in;
  626. bitmap_head *out;
  627. /* An obstack for the bitmaps we need for this problem. */
  628. bitmap_obstack lr_bitmaps;
  629. };
  630. /* Free basic block info. */
  631. static void
  632. df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
  633. void *vbb_info)
  634. {
  635. struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
  636. if (bb_info)
  637. {
  638. bitmap_clear (&bb_info->use);
  639. bitmap_clear (&bb_info->def);
  640. bitmap_clear (&bb_info->in);
  641. bitmap_clear (&bb_info->out);
  642. }
  643. }
  644. /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
  645. not touched unless the block is new. */
  646. static void
  647. df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
  648. {
  649. unsigned int bb_index;
  650. bitmap_iterator bi;
  651. struct df_lr_problem_data *problem_data;
  652. df_grow_bb_info (df_lr);
  653. if (df_lr->problem_data)
  654. problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
  655. else
  656. {
  657. problem_data = XNEW (struct df_lr_problem_data);
  658. df_lr->problem_data = problem_data;
  659. problem_data->out = NULL;
  660. problem_data->in = NULL;
  661. bitmap_obstack_initialize (&problem_data->lr_bitmaps);
  662. }
  663. EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
  664. {
  665. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
  666. /* When bitmaps are already initialized, just clear them. */
  667. if (bb_info->use.obstack)
  668. {
  669. bitmap_clear (&bb_info->def);
  670. bitmap_clear (&bb_info->use);
  671. }
  672. else
  673. {
  674. bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
  675. bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
  676. bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
  677. bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
  678. }
  679. }
  680. df_lr->optional_p = false;
  681. }
  682. /* Reset the global solution for recalculation. */
  683. static void
  684. df_lr_reset (bitmap all_blocks)
  685. {
  686. unsigned int bb_index;
  687. bitmap_iterator bi;
  688. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  689. {
  690. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
  691. gcc_assert (bb_info);
  692. bitmap_clear (&bb_info->in);
  693. bitmap_clear (&bb_info->out);
  694. }
  695. }
  696. /* Compute local live register info for basic block BB. */
  697. static void
  698. df_lr_bb_local_compute (unsigned int bb_index)
  699. {
  700. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  701. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
  702. rtx_insn *insn;
  703. df_ref def, use;
  704. /* Process the registers set in an exception handler. */
  705. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  706. if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
  707. {
  708. unsigned int dregno = DF_REF_REGNO (def);
  709. bitmap_set_bit (&bb_info->def, dregno);
  710. bitmap_clear_bit (&bb_info->use, dregno);
  711. }
  712. /* Process the hardware registers that are always live. */
  713. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  714. /* Add use to set of uses in this BB. */
  715. if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
  716. bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
  717. FOR_BB_INSNS_REVERSE (bb, insn)
  718. {
  719. if (!NONDEBUG_INSN_P (insn))
  720. continue;
  721. df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  722. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  723. /* If the def is to only part of the reg, it does
  724. not kill the other defs that reach here. */
  725. if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
  726. {
  727. unsigned int dregno = DF_REF_REGNO (def);
  728. bitmap_set_bit (&bb_info->def, dregno);
  729. bitmap_clear_bit (&bb_info->use, dregno);
  730. }
  731. FOR_EACH_INSN_INFO_USE (use, insn_info)
  732. /* Add use to set of uses in this BB. */
  733. bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
  734. }
  735. /* Process the registers set in an exception handler or the hard
  736. frame pointer if this block is the target of a non local
  737. goto. */
  738. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  739. if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
  740. {
  741. unsigned int dregno = DF_REF_REGNO (def);
  742. bitmap_set_bit (&bb_info->def, dregno);
  743. bitmap_clear_bit (&bb_info->use, dregno);
  744. }
  745. #ifdef EH_USES
  746. /* Process the uses that are live into an exception handler. */
  747. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  748. /* Add use to set of uses in this BB. */
  749. if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
  750. bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
  751. #endif
  752. /* If the df_live problem is not defined, such as at -O0 and -O1, we
  753. still need to keep the luids up to date. This is normally done
  754. in the df_live problem since this problem has a forwards
  755. scan. */
  756. if (!df_live)
  757. df_recompute_luids (bb);
  758. }
  759. /* Compute local live register info for each basic block within BLOCKS. */
  760. static void
  761. df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
  762. {
  763. unsigned int bb_index, i;
  764. bitmap_iterator bi;
  765. bitmap_clear (&df->hardware_regs_used);
  766. /* The all-important stack pointer must always be live. */
  767. bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
  768. /* Global regs are always live, too. */
  769. for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
  770. if (global_regs[i])
  771. bitmap_set_bit (&df->hardware_regs_used, i);
  772. /* Before reload, there are a few registers that must be forced
  773. live everywhere -- which might not already be the case for
  774. blocks within infinite loops. */
  775. if (!reload_completed)
  776. {
  777. unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
  778. /* Any reference to any pseudo before reload is a potential
  779. reference of the frame pointer. */
  780. bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
  781. #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
  782. /* Pseudos with argument area equivalences may require
  783. reloading via the argument pointer. */
  784. if (fixed_regs[ARG_POINTER_REGNUM])
  785. bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
  786. #endif
  787. /* Any constant, or pseudo with constant equivalences, may
  788. require reloading from memory using the pic register. */
  789. if (pic_offset_table_regnum != INVALID_REGNUM
  790. && fixed_regs[pic_offset_table_regnum])
  791. bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
  792. }
  793. EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
  794. {
  795. if (bb_index == EXIT_BLOCK)
  796. {
  797. /* The exit block is special for this problem and its bits are
  798. computed from thin air. */
  799. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
  800. bitmap_copy (&bb_info->use, df->exit_block_uses);
  801. }
  802. else
  803. df_lr_bb_local_compute (bb_index);
  804. }
  805. bitmap_clear (df_lr->out_of_date_transfer_functions);
  806. }
  807. /* Initialize the solution vectors. */
  808. static void
  809. df_lr_init (bitmap all_blocks)
  810. {
  811. unsigned int bb_index;
  812. bitmap_iterator bi;
  813. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  814. {
  815. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
  816. bitmap_copy (&bb_info->in, &bb_info->use);
  817. bitmap_clear (&bb_info->out);
  818. }
  819. }
  820. /* Confluence function that processes infinite loops. This might be a
  821. noreturn function that throws. And even if it isn't, getting the
  822. unwind info right helps debugging. */
  823. static void
  824. df_lr_confluence_0 (basic_block bb)
  825. {
  826. bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
  827. if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
  828. bitmap_copy (op1, &df->hardware_regs_used);
  829. }
  830. /* Confluence function that ignores fake edges. */
  831. static bool
  832. df_lr_confluence_n (edge e)
  833. {
  834. bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
  835. bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
  836. bool changed = false;
  837. /* Call-clobbered registers die across exception and call edges. */
  838. /* ??? Abnormal call edges ignored for the moment, as this gets
  839. confused by sibling call edges, which crashes reg-stack. */
  840. if (e->flags & EDGE_EH)
  841. changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
  842. else
  843. changed = bitmap_ior_into (op1, op2);
  844. changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
  845. return changed;
  846. }
  847. /* Transfer function. */
  848. static bool
  849. df_lr_transfer_function (int bb_index)
  850. {
  851. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
  852. bitmap in = &bb_info->in;
  853. bitmap out = &bb_info->out;
  854. bitmap use = &bb_info->use;
  855. bitmap def = &bb_info->def;
  856. return bitmap_ior_and_compl (in, use, out, def);
  857. }
  858. /* Run the fast dce as a side effect of building LR. */
  859. static void
  860. df_lr_finalize (bitmap all_blocks)
  861. {
  862. df_lr->solutions_dirty = false;
  863. if (df->changeable_flags & DF_LR_RUN_DCE)
  864. {
  865. run_fast_df_dce ();
  866. /* If dce deletes some instructions, we need to recompute the lr
  867. solution before proceeding further. The problem is that fast
  868. dce is a pessimestic dataflow algorithm. In the case where
  869. it deletes a statement S inside of a loop, the uses inside of
  870. S may not be deleted from the dataflow solution because they
  871. were carried around the loop. While it is conservatively
  872. correct to leave these extra bits, the standards of df
  873. require that we maintain the best possible (least fixed
  874. point) solution. The only way to do that is to redo the
  875. iteration from the beginning. See PR35805 for an
  876. example. */
  877. if (df_lr->solutions_dirty)
  878. {
  879. df_clear_flags (DF_LR_RUN_DCE);
  880. df_lr_alloc (all_blocks);
  881. df_lr_local_compute (all_blocks);
  882. df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
  883. df_lr_finalize (all_blocks);
  884. df_set_flags (DF_LR_RUN_DCE);
  885. }
  886. }
  887. }
  888. /* Free all storage associated with the problem. */
  889. static void
  890. df_lr_free (void)
  891. {
  892. struct df_lr_problem_data *problem_data
  893. = (struct df_lr_problem_data *) df_lr->problem_data;
  894. if (df_lr->block_info)
  895. {
  896. df_lr->block_info_size = 0;
  897. free (df_lr->block_info);
  898. df_lr->block_info = NULL;
  899. bitmap_obstack_release (&problem_data->lr_bitmaps);
  900. free (df_lr->problem_data);
  901. df_lr->problem_data = NULL;
  902. }
  903. BITMAP_FREE (df_lr->out_of_date_transfer_functions);
  904. free (df_lr);
  905. }
  906. /* Debugging info at top of bb. */
  907. static void
  908. df_lr_top_dump (basic_block bb, FILE *file)
  909. {
  910. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
  911. struct df_lr_problem_data *problem_data;
  912. if (!bb_info)
  913. return;
  914. fprintf (file, ";; lr in \t");
  915. df_print_regset (file, &bb_info->in);
  916. if (df_lr->problem_data)
  917. {
  918. problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
  919. if (problem_data->in)
  920. {
  921. fprintf (file, ";; old in \t");
  922. df_print_regset (file, &problem_data->in[bb->index]);
  923. }
  924. }
  925. fprintf (file, ";; lr use \t");
  926. df_print_regset (file, &bb_info->use);
  927. fprintf (file, ";; lr def \t");
  928. df_print_regset (file, &bb_info->def);
  929. }
  930. /* Debugging info at bottom of bb. */
  931. static void
  932. df_lr_bottom_dump (basic_block bb, FILE *file)
  933. {
  934. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
  935. struct df_lr_problem_data *problem_data;
  936. if (!bb_info)
  937. return;
  938. fprintf (file, ";; lr out \t");
  939. df_print_regset (file, &bb_info->out);
  940. if (df_lr->problem_data)
  941. {
  942. problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
  943. if (problem_data->out)
  944. {
  945. fprintf (file, ";; old out \t");
  946. df_print_regset (file, &problem_data->out[bb->index]);
  947. }
  948. }
  949. }
  950. /* Build the datastructure to verify that the solution to the dataflow
  951. equations is not dirty. */
  952. static void
  953. df_lr_verify_solution_start (void)
  954. {
  955. basic_block bb;
  956. struct df_lr_problem_data *problem_data;
  957. if (df_lr->solutions_dirty)
  958. return;
  959. /* Set it true so that the solution is recomputed. */
  960. df_lr->solutions_dirty = true;
  961. problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
  962. problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
  963. problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
  964. FOR_ALL_BB_FN (bb, cfun)
  965. {
  966. bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
  967. bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
  968. bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
  969. bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
  970. }
  971. }
  972. /* Compare the saved datastructure and the new solution to the dataflow
  973. equations. */
  974. static void
  975. df_lr_verify_solution_end (void)
  976. {
  977. struct df_lr_problem_data *problem_data;
  978. basic_block bb;
  979. problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
  980. if (!problem_data->out)
  981. return;
  982. if (df_lr->solutions_dirty)
  983. /* Do not check if the solution is still dirty. See the comment
  984. in df_lr_finalize for details. */
  985. df_lr->solutions_dirty = false;
  986. else
  987. FOR_ALL_BB_FN (bb, cfun)
  988. {
  989. if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
  990. || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
  991. {
  992. /*df_dump (stderr);*/
  993. gcc_unreachable ();
  994. }
  995. }
  996. /* Cannot delete them immediately because you may want to dump them
  997. if the comparison fails. */
  998. FOR_ALL_BB_FN (bb, cfun)
  999. {
  1000. bitmap_clear (&problem_data->in[bb->index]);
  1001. bitmap_clear (&problem_data->out[bb->index]);
  1002. }
  1003. free (problem_data->in);
  1004. free (problem_data->out);
  1005. problem_data->in = NULL;
  1006. problem_data->out = NULL;
  1007. }
  1008. /* All of the information associated with every instance of the problem. */
  1009. static struct df_problem problem_LR =
  1010. {
  1011. DF_LR, /* Problem id. */
  1012. DF_BACKWARD, /* Direction. */
  1013. df_lr_alloc, /* Allocate the problem specific data. */
  1014. df_lr_reset, /* Reset global information. */
  1015. df_lr_free_bb_info, /* Free basic block info. */
  1016. df_lr_local_compute, /* Local compute function. */
  1017. df_lr_init, /* Init the solution specific data. */
  1018. df_worklist_dataflow, /* Worklist solver. */
  1019. df_lr_confluence_0, /* Confluence operator 0. */
  1020. df_lr_confluence_n, /* Confluence operator n. */
  1021. df_lr_transfer_function, /* Transfer function. */
  1022. df_lr_finalize, /* Finalize function. */
  1023. df_lr_free, /* Free all of the problem information. */
  1024. NULL, /* Remove this problem from the stack of dataflow problems. */
  1025. NULL, /* Debugging. */
  1026. df_lr_top_dump, /* Debugging start block. */
  1027. df_lr_bottom_dump, /* Debugging end block. */
  1028. NULL, /* Debugging start insn. */
  1029. NULL, /* Debugging end insn. */
  1030. df_lr_verify_solution_start,/* Incremental solution verify start. */
  1031. df_lr_verify_solution_end, /* Incremental solution verify end. */
  1032. NULL, /* Dependent problem. */
  1033. sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
  1034. TV_DF_LR, /* Timing variable. */
  1035. false /* Reset blocks on dropping out of blocks_to_analyze. */
  1036. };
  1037. /* Create a new DATAFLOW instance and add it to an existing instance
  1038. of DF. The returned structure is what is used to get at the
  1039. solution. */
  1040. void
  1041. df_lr_add_problem (void)
  1042. {
  1043. df_add_problem (&problem_LR);
  1044. /* These will be initialized when df_scan_blocks processes each
  1045. block. */
  1046. df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
  1047. }
  1048. /* Verify that all of the lr related info is consistent and
  1049. correct. */
  1050. void
  1051. df_lr_verify_transfer_functions (void)
  1052. {
  1053. basic_block bb;
  1054. bitmap_head saved_def;
  1055. bitmap_head saved_use;
  1056. bitmap_head all_blocks;
  1057. if (!df)
  1058. return;
  1059. bitmap_initialize (&saved_def, &bitmap_default_obstack);
  1060. bitmap_initialize (&saved_use, &bitmap_default_obstack);
  1061. bitmap_initialize (&all_blocks, &bitmap_default_obstack);
  1062. FOR_ALL_BB_FN (bb, cfun)
  1063. {
  1064. struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
  1065. bitmap_set_bit (&all_blocks, bb->index);
  1066. if (bb_info)
  1067. {
  1068. /* Make a copy of the transfer functions and then compute
  1069. new ones to see if the transfer functions have
  1070. changed. */
  1071. if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
  1072. bb->index))
  1073. {
  1074. bitmap_copy (&saved_def, &bb_info->def);
  1075. bitmap_copy (&saved_use, &bb_info->use);
  1076. bitmap_clear (&bb_info->def);
  1077. bitmap_clear (&bb_info->use);
  1078. df_lr_bb_local_compute (bb->index);
  1079. gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
  1080. gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
  1081. }
  1082. }
  1083. else
  1084. {
  1085. /* If we do not have basic block info, the block must be in
  1086. the list of dirty blocks or else some one has added a
  1087. block behind our backs. */
  1088. gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
  1089. bb->index));
  1090. }
  1091. /* Make sure no one created a block without following
  1092. procedures. */
  1093. gcc_assert (df_scan_get_bb_info (bb->index));
  1094. }
  1095. /* Make sure there are no dirty bits in blocks that have been deleted. */
  1096. gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
  1097. &all_blocks));
  1098. bitmap_clear (&saved_def);
  1099. bitmap_clear (&saved_use);
  1100. bitmap_clear (&all_blocks);
  1101. }
  1102. /*----------------------------------------------------------------------------
  1103. LIVE AND MUST-INITIALIZED REGISTERS.
  1104. This problem first computes the IN and OUT bitvectors for the
  1105. must-initialized registers problems, which is a forward problem.
  1106. It gives the set of registers for which we MUST have an available
  1107. definition on any path from the entry block to the entry/exit of
  1108. a basic block. Sets generate a definition, while clobbers kill
  1109. a definition.
  1110. In and out bitvectors are built for each basic block and are indexed by
  1111. regnum (see df.h for details). In and out bitvectors in struct
  1112. df_live_bb_info actually refers to the must-initialized problem;
  1113. Then, the in and out sets for the LIVE problem itself are computed.
  1114. These are the logical AND of the IN and OUT sets from the LR problem
  1115. and the must-initialized problem.
  1116. ----------------------------------------------------------------------------*/
  1117. /* Private data used to verify the solution for this problem. */
  1118. struct df_live_problem_data
  1119. {
  1120. bitmap_head *in;
  1121. bitmap_head *out;
  1122. /* An obstack for the bitmaps we need for this problem. */
  1123. bitmap_obstack live_bitmaps;
  1124. };
  1125. /* Scratch var used by transfer functions. This is used to implement
  1126. an optimization to reduce the amount of space used to compute the
  1127. combined lr and live analysis. */
  1128. static bitmap_head df_live_scratch;
  1129. /* Free basic block info. */
  1130. static void
  1131. df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
  1132. void *vbb_info)
  1133. {
  1134. struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
  1135. if (bb_info)
  1136. {
  1137. bitmap_clear (&bb_info->gen);
  1138. bitmap_clear (&bb_info->kill);
  1139. bitmap_clear (&bb_info->in);
  1140. bitmap_clear (&bb_info->out);
  1141. }
  1142. }
  1143. /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
  1144. not touched unless the block is new. */
  1145. static void
  1146. df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
  1147. {
  1148. unsigned int bb_index;
  1149. bitmap_iterator bi;
  1150. struct df_live_problem_data *problem_data;
  1151. if (df_live->problem_data)
  1152. problem_data = (struct df_live_problem_data *) df_live->problem_data;
  1153. else
  1154. {
  1155. problem_data = XNEW (struct df_live_problem_data);
  1156. df_live->problem_data = problem_data;
  1157. problem_data->out = NULL;
  1158. problem_data->in = NULL;
  1159. bitmap_obstack_initialize (&problem_data->live_bitmaps);
  1160. bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
  1161. }
  1162. df_grow_bb_info (df_live);
  1163. EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
  1164. {
  1165. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
  1166. /* When bitmaps are already initialized, just clear them. */
  1167. if (bb_info->kill.obstack)
  1168. {
  1169. bitmap_clear (&bb_info->kill);
  1170. bitmap_clear (&bb_info->gen);
  1171. }
  1172. else
  1173. {
  1174. bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
  1175. bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
  1176. bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
  1177. bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
  1178. }
  1179. }
  1180. df_live->optional_p = (optimize <= 1);
  1181. }
  1182. /* Reset the global solution for recalculation. */
  1183. static void
  1184. df_live_reset (bitmap all_blocks)
  1185. {
  1186. unsigned int bb_index;
  1187. bitmap_iterator bi;
  1188. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  1189. {
  1190. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
  1191. gcc_assert (bb_info);
  1192. bitmap_clear (&bb_info->in);
  1193. bitmap_clear (&bb_info->out);
  1194. }
  1195. }
  1196. /* Compute local uninitialized register info for basic block BB. */
  1197. static void
  1198. df_live_bb_local_compute (unsigned int bb_index)
  1199. {
  1200. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  1201. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
  1202. rtx_insn *insn;
  1203. df_ref def;
  1204. int luid = 0;
  1205. FOR_BB_INSNS (bb, insn)
  1206. {
  1207. unsigned int uid = INSN_UID (insn);
  1208. struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
  1209. /* Inserting labels does not always trigger the incremental
  1210. rescanning. */
  1211. if (!insn_info)
  1212. {
  1213. gcc_assert (!INSN_P (insn));
  1214. insn_info = df_insn_create_insn_record (insn);
  1215. }
  1216. DF_INSN_INFO_LUID (insn_info) = luid;
  1217. if (!INSN_P (insn))
  1218. continue;
  1219. luid++;
  1220. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  1221. {
  1222. unsigned int regno = DF_REF_REGNO (def);
  1223. if (DF_REF_FLAGS_IS_SET (def,
  1224. DF_REF_PARTIAL | DF_REF_CONDITIONAL))
  1225. /* All partial or conditional def
  1226. seen are included in the gen set. */
  1227. bitmap_set_bit (&bb_info->gen, regno);
  1228. else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
  1229. /* Only must clobbers for the entire reg destroy the
  1230. value. */
  1231. bitmap_set_bit (&bb_info->kill, regno);
  1232. else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
  1233. bitmap_set_bit (&bb_info->gen, regno);
  1234. }
  1235. }
  1236. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  1237. bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
  1238. }
  1239. /* Compute local uninitialized register info. */
  1240. static void
  1241. df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
  1242. {
  1243. unsigned int bb_index;
  1244. bitmap_iterator bi;
  1245. df_grow_insn_info ();
  1246. EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
  1247. 0, bb_index, bi)
  1248. {
  1249. df_live_bb_local_compute (bb_index);
  1250. }
  1251. bitmap_clear (df_live->out_of_date_transfer_functions);
  1252. }
  1253. /* Initialize the solution vectors. */
  1254. static void
  1255. df_live_init (bitmap all_blocks)
  1256. {
  1257. unsigned int bb_index;
  1258. bitmap_iterator bi;
  1259. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  1260. {
  1261. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
  1262. struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
  1263. /* No register may reach a location where it is not used. Thus
  1264. we trim the rr result to the places where it is used. */
  1265. bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
  1266. bitmap_clear (&bb_info->in);
  1267. }
  1268. }
  1269. /* Forward confluence function that ignores fake edges. */
  1270. static bool
  1271. df_live_confluence_n (edge e)
  1272. {
  1273. bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
  1274. bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
  1275. if (e->flags & EDGE_FAKE)
  1276. return false;
  1277. return bitmap_ior_into (op1, op2);
  1278. }
  1279. /* Transfer function for the forwards must-initialized problem. */
  1280. static bool
  1281. df_live_transfer_function (int bb_index)
  1282. {
  1283. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
  1284. struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
  1285. bitmap in = &bb_info->in;
  1286. bitmap out = &bb_info->out;
  1287. bitmap gen = &bb_info->gen;
  1288. bitmap kill = &bb_info->kill;
  1289. /* We need to use a scratch set here so that the value returned from this
  1290. function invocation properly reflects whether the sets changed in a
  1291. significant way; i.e. not just because the lr set was anded in. */
  1292. bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
  1293. /* No register may reach a location where it is not used. Thus
  1294. we trim the rr result to the places where it is used. */
  1295. bitmap_and_into (in, &bb_lr_info->in);
  1296. return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
  1297. }
  1298. /* And the LR info with the must-initialized registers, to produce the LIVE info. */
  1299. static void
  1300. df_live_finalize (bitmap all_blocks)
  1301. {
  1302. if (df_live->solutions_dirty)
  1303. {
  1304. bitmap_iterator bi;
  1305. unsigned int bb_index;
  1306. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  1307. {
  1308. struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
  1309. struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
  1310. /* No register may reach a location where it is not used. Thus
  1311. we trim the rr result to the places where it is used. */
  1312. bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
  1313. bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
  1314. }
  1315. df_live->solutions_dirty = false;
  1316. }
  1317. }
  1318. /* Free all storage associated with the problem. */
  1319. static void
  1320. df_live_free (void)
  1321. {
  1322. struct df_live_problem_data *problem_data
  1323. = (struct df_live_problem_data *) df_live->problem_data;
  1324. if (df_live->block_info)
  1325. {
  1326. df_live->block_info_size = 0;
  1327. free (df_live->block_info);
  1328. df_live->block_info = NULL;
  1329. bitmap_clear (&df_live_scratch);
  1330. bitmap_obstack_release (&problem_data->live_bitmaps);
  1331. free (problem_data);
  1332. df_live->problem_data = NULL;
  1333. }
  1334. BITMAP_FREE (df_live->out_of_date_transfer_functions);
  1335. free (df_live);
  1336. }
  1337. /* Debugging info at top of bb. */
  1338. static void
  1339. df_live_top_dump (basic_block bb, FILE *file)
  1340. {
  1341. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
  1342. struct df_live_problem_data *problem_data;
  1343. if (!bb_info)
  1344. return;
  1345. fprintf (file, ";; live in \t");
  1346. df_print_regset (file, &bb_info->in);
  1347. if (df_live->problem_data)
  1348. {
  1349. problem_data = (struct df_live_problem_data *)df_live->problem_data;
  1350. if (problem_data->in)
  1351. {
  1352. fprintf (file, ";; old in \t");
  1353. df_print_regset (file, &problem_data->in[bb->index]);
  1354. }
  1355. }
  1356. fprintf (file, ";; live gen \t");
  1357. df_print_regset (file, &bb_info->gen);
  1358. fprintf (file, ";; live kill\t");
  1359. df_print_regset (file, &bb_info->kill);
  1360. }
  1361. /* Debugging info at bottom of bb. */
  1362. static void
  1363. df_live_bottom_dump (basic_block bb, FILE *file)
  1364. {
  1365. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
  1366. struct df_live_problem_data *problem_data;
  1367. if (!bb_info)
  1368. return;
  1369. fprintf (file, ";; live out \t");
  1370. df_print_regset (file, &bb_info->out);
  1371. if (df_live->problem_data)
  1372. {
  1373. problem_data = (struct df_live_problem_data *)df_live->problem_data;
  1374. if (problem_data->out)
  1375. {
  1376. fprintf (file, ";; old out \t");
  1377. df_print_regset (file, &problem_data->out[bb->index]);
  1378. }
  1379. }
  1380. }
  1381. /* Build the datastructure to verify that the solution to the dataflow
  1382. equations is not dirty. */
  1383. static void
  1384. df_live_verify_solution_start (void)
  1385. {
  1386. basic_block bb;
  1387. struct df_live_problem_data *problem_data;
  1388. if (df_live->solutions_dirty)
  1389. return;
  1390. /* Set it true so that the solution is recomputed. */
  1391. df_live->solutions_dirty = true;
  1392. problem_data = (struct df_live_problem_data *)df_live->problem_data;
  1393. problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
  1394. problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
  1395. FOR_ALL_BB_FN (bb, cfun)
  1396. {
  1397. bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
  1398. bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
  1399. bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
  1400. bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
  1401. }
  1402. }
  1403. /* Compare the saved datastructure and the new solution to the dataflow
  1404. equations. */
  1405. static void
  1406. df_live_verify_solution_end (void)
  1407. {
  1408. struct df_live_problem_data *problem_data;
  1409. basic_block bb;
  1410. problem_data = (struct df_live_problem_data *)df_live->problem_data;
  1411. if (!problem_data->out)
  1412. return;
  1413. FOR_ALL_BB_FN (bb, cfun)
  1414. {
  1415. if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
  1416. || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
  1417. {
  1418. /*df_dump (stderr);*/
  1419. gcc_unreachable ();
  1420. }
  1421. }
  1422. /* Cannot delete them immediately because you may want to dump them
  1423. if the comparison fails. */
  1424. FOR_ALL_BB_FN (bb, cfun)
  1425. {
  1426. bitmap_clear (&problem_data->in[bb->index]);
  1427. bitmap_clear (&problem_data->out[bb->index]);
  1428. }
  1429. free (problem_data->in);
  1430. free (problem_data->out);
  1431. free (problem_data);
  1432. df_live->problem_data = NULL;
  1433. }
  1434. /* All of the information associated with every instance of the problem. */
  1435. static struct df_problem problem_LIVE =
  1436. {
  1437. DF_LIVE, /* Problem id. */
  1438. DF_FORWARD, /* Direction. */
  1439. df_live_alloc, /* Allocate the problem specific data. */
  1440. df_live_reset, /* Reset global information. */
  1441. df_live_free_bb_info, /* Free basic block info. */
  1442. df_live_local_compute, /* Local compute function. */
  1443. df_live_init, /* Init the solution specific data. */
  1444. df_worklist_dataflow, /* Worklist solver. */
  1445. NULL, /* Confluence operator 0. */
  1446. df_live_confluence_n, /* Confluence operator n. */
  1447. df_live_transfer_function, /* Transfer function. */
  1448. df_live_finalize, /* Finalize function. */
  1449. df_live_free, /* Free all of the problem information. */
  1450. df_live_free, /* Remove this problem from the stack of dataflow problems. */
  1451. NULL, /* Debugging. */
  1452. df_live_top_dump, /* Debugging start block. */
  1453. df_live_bottom_dump, /* Debugging end block. */
  1454. NULL, /* Debugging start insn. */
  1455. NULL, /* Debugging end insn. */
  1456. df_live_verify_solution_start,/* Incremental solution verify start. */
  1457. df_live_verify_solution_end, /* Incremental solution verify end. */
  1458. &problem_LR, /* Dependent problem. */
  1459. sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
  1460. TV_DF_LIVE, /* Timing variable. */
  1461. false /* Reset blocks on dropping out of blocks_to_analyze. */
  1462. };
  1463. /* Create a new DATAFLOW instance and add it to an existing instance
  1464. of DF. The returned structure is what is used to get at the
  1465. solution. */
  1466. void
  1467. df_live_add_problem (void)
  1468. {
  1469. df_add_problem (&problem_LIVE);
  1470. /* These will be initialized when df_scan_blocks processes each
  1471. block. */
  1472. df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
  1473. }
  1474. /* Set all of the blocks as dirty. This needs to be done if this
  1475. problem is added after all of the insns have been scanned. */
  1476. void
  1477. df_live_set_all_dirty (void)
  1478. {
  1479. basic_block bb;
  1480. FOR_ALL_BB_FN (bb, cfun)
  1481. bitmap_set_bit (df_live->out_of_date_transfer_functions,
  1482. bb->index);
  1483. }
  1484. /* Verify that all of the lr related info is consistent and
  1485. correct. */
  1486. void
  1487. df_live_verify_transfer_functions (void)
  1488. {
  1489. basic_block bb;
  1490. bitmap_head saved_gen;
  1491. bitmap_head saved_kill;
  1492. bitmap_head all_blocks;
  1493. if (!df)
  1494. return;
  1495. bitmap_initialize (&saved_gen, &bitmap_default_obstack);
  1496. bitmap_initialize (&saved_kill, &bitmap_default_obstack);
  1497. bitmap_initialize (&all_blocks, &bitmap_default_obstack);
  1498. df_grow_insn_info ();
  1499. FOR_ALL_BB_FN (bb, cfun)
  1500. {
  1501. struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
  1502. bitmap_set_bit (&all_blocks, bb->index);
  1503. if (bb_info)
  1504. {
  1505. /* Make a copy of the transfer functions and then compute
  1506. new ones to see if the transfer functions have
  1507. changed. */
  1508. if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
  1509. bb->index))
  1510. {
  1511. bitmap_copy (&saved_gen, &bb_info->gen);
  1512. bitmap_copy (&saved_kill, &bb_info->kill);
  1513. bitmap_clear (&bb_info->gen);
  1514. bitmap_clear (&bb_info->kill);
  1515. df_live_bb_local_compute (bb->index);
  1516. gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
  1517. gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
  1518. }
  1519. }
  1520. else
  1521. {
  1522. /* If we do not have basic block info, the block must be in
  1523. the list of dirty blocks or else some one has added a
  1524. block behind our backs. */
  1525. gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
  1526. bb->index));
  1527. }
  1528. /* Make sure no one created a block without following
  1529. procedures. */
  1530. gcc_assert (df_scan_get_bb_info (bb->index));
  1531. }
  1532. /* Make sure there are no dirty bits in blocks that have been deleted. */
  1533. gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
  1534. &all_blocks));
  1535. bitmap_clear (&saved_gen);
  1536. bitmap_clear (&saved_kill);
  1537. bitmap_clear (&all_blocks);
  1538. }
  1539. /*----------------------------------------------------------------------------
  1540. CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
  1541. Link either the defs to the uses and / or the uses to the defs.
  1542. These problems are set up like the other dataflow problems so that
  1543. they nicely fit into the framework. They are much simpler and only
  1544. involve a single traversal of instructions and an examination of
  1545. the reaching defs information (the dependent problem).
  1546. ----------------------------------------------------------------------------*/
  1547. #define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
  1548. /* Create a du or ud chain from SRC to DST and link it into SRC. */
  1549. struct df_link *
  1550. df_chain_create (df_ref src, df_ref dst)
  1551. {
  1552. struct df_link *head = DF_REF_CHAIN (src);
  1553. struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool);
  1554. DF_REF_CHAIN (src) = link;
  1555. link->next = head;
  1556. link->ref = dst;
  1557. return link;
  1558. }
  1559. /* Delete any du or ud chains that start at REF and point to
  1560. TARGET. */
  1561. static void
  1562. df_chain_unlink_1 (df_ref ref, df_ref target)
  1563. {
  1564. struct df_link *chain = DF_REF_CHAIN (ref);
  1565. struct df_link *prev = NULL;
  1566. while (chain)
  1567. {
  1568. if (chain->ref == target)
  1569. {
  1570. if (prev)
  1571. prev->next = chain->next;
  1572. else
  1573. DF_REF_CHAIN (ref) = chain->next;
  1574. pool_free (df_chain->block_pool, chain);
  1575. return;
  1576. }
  1577. prev = chain;
  1578. chain = chain->next;
  1579. }
  1580. }
  1581. /* Delete a du or ud chain that leave or point to REF. */
  1582. void
  1583. df_chain_unlink (df_ref ref)
  1584. {
  1585. struct df_link *chain = DF_REF_CHAIN (ref);
  1586. while (chain)
  1587. {
  1588. struct df_link *next = chain->next;
  1589. /* Delete the other side if it exists. */
  1590. df_chain_unlink_1 (chain->ref, ref);
  1591. pool_free (df_chain->block_pool, chain);
  1592. chain = next;
  1593. }
  1594. DF_REF_CHAIN (ref) = NULL;
  1595. }
  1596. /* Copy the du or ud chain starting at FROM_REF and attach it to
  1597. TO_REF. */
  1598. void
  1599. df_chain_copy (df_ref to_ref,
  1600. struct df_link *from_ref)
  1601. {
  1602. while (from_ref)
  1603. {
  1604. df_chain_create (to_ref, from_ref->ref);
  1605. from_ref = from_ref->next;
  1606. }
  1607. }
  1608. /* Remove this problem from the stack of dataflow problems. */
  1609. static void
  1610. df_chain_remove_problem (void)
  1611. {
  1612. bitmap_iterator bi;
  1613. unsigned int bb_index;
  1614. /* Wholesale destruction of the old chains. */
  1615. if (df_chain->block_pool)
  1616. free_alloc_pool (df_chain->block_pool);
  1617. EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
  1618. {
  1619. rtx_insn *insn;
  1620. df_ref def, use;
  1621. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  1622. if (df_chain_problem_p (DF_DU_CHAIN))
  1623. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  1624. DF_REF_CHAIN (def) = NULL;
  1625. if (df_chain_problem_p (DF_UD_CHAIN))
  1626. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  1627. DF_REF_CHAIN (use) = NULL;
  1628. FOR_BB_INSNS (bb, insn)
  1629. if (INSN_P (insn))
  1630. {
  1631. df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  1632. if (df_chain_problem_p (DF_DU_CHAIN))
  1633. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  1634. DF_REF_CHAIN (def) = NULL;
  1635. if (df_chain_problem_p (DF_UD_CHAIN))
  1636. {
  1637. FOR_EACH_INSN_INFO_USE (use, insn_info)
  1638. DF_REF_CHAIN (use) = NULL;
  1639. FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
  1640. DF_REF_CHAIN (use) = NULL;
  1641. }
  1642. }
  1643. }
  1644. bitmap_clear (df_chain->out_of_date_transfer_functions);
  1645. df_chain->block_pool = NULL;
  1646. }
  1647. /* Remove the chain problem completely. */
  1648. static void
  1649. df_chain_fully_remove_problem (void)
  1650. {
  1651. df_chain_remove_problem ();
  1652. BITMAP_FREE (df_chain->out_of_date_transfer_functions);
  1653. free (df_chain);
  1654. }
  1655. /* Create def-use or use-def chains. */
  1656. static void
  1657. df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
  1658. {
  1659. df_chain_remove_problem ();
  1660. df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
  1661. sizeof (struct df_link), 50);
  1662. df_chain->optional_p = true;
  1663. }
  1664. /* Reset all of the chains when the set of basic blocks changes. */
  1665. static void
  1666. df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
  1667. {
  1668. df_chain_remove_problem ();
  1669. }
  1670. /* Create the chains for a list of USEs. */
  1671. static void
  1672. df_chain_create_bb_process_use (bitmap local_rd,
  1673. df_ref use,
  1674. int top_flag)
  1675. {
  1676. bitmap_iterator bi;
  1677. unsigned int def_index;
  1678. for (; use; use = DF_REF_NEXT_LOC (use))
  1679. {
  1680. unsigned int uregno = DF_REF_REGNO (use);
  1681. if ((!(df->changeable_flags & DF_NO_HARD_REGS))
  1682. || (uregno >= FIRST_PSEUDO_REGISTER))
  1683. {
  1684. /* Do not want to go through this for an uninitialized var. */
  1685. int count = DF_DEFS_COUNT (uregno);
  1686. if (count)
  1687. {
  1688. if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
  1689. {
  1690. unsigned int first_index = DF_DEFS_BEGIN (uregno);
  1691. unsigned int last_index = first_index + count - 1;
  1692. EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
  1693. {
  1694. df_ref def;
  1695. if (def_index > last_index)
  1696. break;
  1697. def = DF_DEFS_GET (def_index);
  1698. if (df_chain_problem_p (DF_DU_CHAIN))
  1699. df_chain_create (def, use);
  1700. if (df_chain_problem_p (DF_UD_CHAIN))
  1701. df_chain_create (use, def);
  1702. }
  1703. }
  1704. }
  1705. }
  1706. }
  1707. }
  1708. /* Create chains from reaching defs bitmaps for basic block BB. */
  1709. static void
  1710. df_chain_create_bb (unsigned int bb_index)
  1711. {
  1712. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  1713. struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
  1714. rtx_insn *insn;
  1715. bitmap_head cpy;
  1716. bitmap_initialize (&cpy, &bitmap_default_obstack);
  1717. bitmap_copy (&cpy, &bb_info->in);
  1718. bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
  1719. /* Since we are going forwards, process the artificial uses first
  1720. then the artificial defs second. */
  1721. #ifdef EH_USES
  1722. /* Create the chains for the artificial uses from the EH_USES at the
  1723. beginning of the block. */
  1724. /* Artificials are only hard regs. */
  1725. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  1726. df_chain_create_bb_process_use (&cpy,
  1727. df_get_artificial_uses (bb->index),
  1728. DF_REF_AT_TOP);
  1729. #endif
  1730. df_rd_simulate_artificial_defs_at_top (bb, &cpy);
  1731. /* Process the regular instructions next. */
  1732. FOR_BB_INSNS (bb, insn)
  1733. if (INSN_P (insn))
  1734. {
  1735. unsigned int uid = INSN_UID (insn);
  1736. /* First scan the uses and link them up with the defs that remain
  1737. in the cpy vector. */
  1738. df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
  1739. if (df->changeable_flags & DF_EQ_NOTES)
  1740. df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
  1741. /* Since we are going forwards, process the defs second. */
  1742. df_rd_simulate_one_insn (bb, insn, &cpy);
  1743. }
  1744. /* Create the chains for the artificial uses of the hard registers
  1745. at the end of the block. */
  1746. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  1747. df_chain_create_bb_process_use (&cpy,
  1748. df_get_artificial_uses (bb->index),
  1749. 0);
  1750. bitmap_clear (&cpy);
  1751. }
  1752. /* Create def-use chains from reaching use bitmaps for basic blocks
  1753. in BLOCKS. */
  1754. static void
  1755. df_chain_finalize (bitmap all_blocks)
  1756. {
  1757. unsigned int bb_index;
  1758. bitmap_iterator bi;
  1759. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  1760. {
  1761. df_chain_create_bb (bb_index);
  1762. }
  1763. }
  1764. /* Free all storage associated with the problem. */
  1765. static void
  1766. df_chain_free (void)
  1767. {
  1768. free_alloc_pool (df_chain->block_pool);
  1769. BITMAP_FREE (df_chain->out_of_date_transfer_functions);
  1770. free (df_chain);
  1771. }
  1772. /* Debugging info. */
  1773. static void
  1774. df_chain_bb_dump (basic_block bb, FILE *file, bool top)
  1775. {
  1776. /* Artificials are only hard regs. */
  1777. if (df->changeable_flags & DF_NO_HARD_REGS)
  1778. return;
  1779. if (df_chain_problem_p (DF_UD_CHAIN))
  1780. {
  1781. df_ref use;
  1782. fprintf (file,
  1783. ";; UD chains for artificial uses at %s\n",
  1784. top ? "top" : "bottom");
  1785. FOR_EACH_ARTIFICIAL_USE (use, bb->index)
  1786. if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
  1787. || (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
  1788. {
  1789. fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
  1790. df_chain_dump (DF_REF_CHAIN (use), file);
  1791. fprintf (file, "\n");
  1792. }
  1793. }
  1794. if (df_chain_problem_p (DF_DU_CHAIN))
  1795. {
  1796. df_ref def;
  1797. fprintf (file,
  1798. ";; DU chains for artificial defs at %s\n",
  1799. top ? "top" : "bottom");
  1800. FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
  1801. if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
  1802. || (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
  1803. {
  1804. fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
  1805. df_chain_dump (DF_REF_CHAIN (def), file);
  1806. fprintf (file, "\n");
  1807. }
  1808. }
  1809. }
  1810. static void
  1811. df_chain_top_dump (basic_block bb, FILE *file)
  1812. {
  1813. df_chain_bb_dump (bb, file, /*top=*/true);
  1814. }
  1815. static void
  1816. df_chain_bottom_dump (basic_block bb, FILE *file)
  1817. {
  1818. df_chain_bb_dump (bb, file, /*top=*/false);
  1819. }
  1820. static void
  1821. df_chain_insn_top_dump (const rtx_insn *insn, FILE *file)
  1822. {
  1823. if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
  1824. {
  1825. struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  1826. df_ref use;
  1827. fprintf (file, ";; UD chains for insn luid %d uid %d\n",
  1828. DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
  1829. FOR_EACH_INSN_INFO_USE (use, insn_info)
  1830. if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
  1831. || !(df->changeable_flags & DF_NO_HARD_REGS))
  1832. {
  1833. fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
  1834. if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
  1835. fprintf (file, "read/write ");
  1836. df_chain_dump (DF_REF_CHAIN (use), file);
  1837. fprintf (file, "\n");
  1838. }
  1839. FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
  1840. if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
  1841. || !(df->changeable_flags & DF_NO_HARD_REGS))
  1842. {
  1843. fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
  1844. df_chain_dump (DF_REF_CHAIN (use), file);
  1845. fprintf (file, "\n");
  1846. }
  1847. }
  1848. }
  1849. static void
  1850. df_chain_insn_bottom_dump (const rtx_insn *insn, FILE *file)
  1851. {
  1852. if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
  1853. {
  1854. struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  1855. df_ref def;
  1856. fprintf (file, ";; DU chains for insn luid %d uid %d\n",
  1857. DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
  1858. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  1859. if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
  1860. || !(df->changeable_flags & DF_NO_HARD_REGS))
  1861. {
  1862. fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
  1863. if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
  1864. fprintf (file, "read/write ");
  1865. df_chain_dump (DF_REF_CHAIN (def), file);
  1866. fprintf (file, "\n");
  1867. }
  1868. fprintf (file, "\n");
  1869. }
  1870. }
  1871. static struct df_problem problem_CHAIN =
  1872. {
  1873. DF_CHAIN, /* Problem id. */
  1874. DF_NONE, /* Direction. */
  1875. df_chain_alloc, /* Allocate the problem specific data. */
  1876. df_chain_reset, /* Reset global information. */
  1877. NULL, /* Free basic block info. */
  1878. NULL, /* Local compute function. */
  1879. NULL, /* Init the solution specific data. */
  1880. NULL, /* Iterative solver. */
  1881. NULL, /* Confluence operator 0. */
  1882. NULL, /* Confluence operator n. */
  1883. NULL, /* Transfer function. */
  1884. df_chain_finalize, /* Finalize function. */
  1885. df_chain_free, /* Free all of the problem information. */
  1886. df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
  1887. NULL, /* Debugging. */
  1888. df_chain_top_dump, /* Debugging start block. */
  1889. df_chain_bottom_dump, /* Debugging end block. */
  1890. df_chain_insn_top_dump, /* Debugging start insn. */
  1891. df_chain_insn_bottom_dump, /* Debugging end insn. */
  1892. NULL, /* Incremental solution verify start. */
  1893. NULL, /* Incremental solution verify end. */
  1894. &problem_RD, /* Dependent problem. */
  1895. sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
  1896. TV_DF_CHAIN, /* Timing variable. */
  1897. false /* Reset blocks on dropping out of blocks_to_analyze. */
  1898. };
  1899. /* Create a new DATAFLOW instance and add it to an existing instance
  1900. of DF. The returned structure is what is used to get at the
  1901. solution. */
  1902. void
  1903. df_chain_add_problem (unsigned int chain_flags)
  1904. {
  1905. df_add_problem (&problem_CHAIN);
  1906. df_chain->local_flags = chain_flags;
  1907. df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
  1908. }
  1909. #undef df_chain_problem_p
  1910. /*----------------------------------------------------------------------------
  1911. WORD LEVEL LIVE REGISTERS
  1912. Find the locations in the function where any use of a pseudo can
  1913. reach in the backwards direction. In and out bitvectors are built
  1914. for each basic block. We only track pseudo registers that have a
  1915. size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
  1916. contain two bits corresponding to each of the subwords.
  1917. ----------------------------------------------------------------------------*/
  1918. /* Private data used to verify the solution for this problem. */
  1919. struct df_word_lr_problem_data
  1920. {
  1921. /* An obstack for the bitmaps we need for this problem. */
  1922. bitmap_obstack word_lr_bitmaps;
  1923. };
  1924. /* Free basic block info. */
  1925. static void
  1926. df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
  1927. void *vbb_info)
  1928. {
  1929. struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
  1930. if (bb_info)
  1931. {
  1932. bitmap_clear (&bb_info->use);
  1933. bitmap_clear (&bb_info->def);
  1934. bitmap_clear (&bb_info->in);
  1935. bitmap_clear (&bb_info->out);
  1936. }
  1937. }
  1938. /* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
  1939. not touched unless the block is new. */
  1940. static void
  1941. df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
  1942. {
  1943. unsigned int bb_index;
  1944. bitmap_iterator bi;
  1945. basic_block bb;
  1946. struct df_word_lr_problem_data *problem_data
  1947. = XNEW (struct df_word_lr_problem_data);
  1948. df_word_lr->problem_data = problem_data;
  1949. df_grow_bb_info (df_word_lr);
  1950. /* Create the mapping from regnos to slots. This does not change
  1951. unless the problem is destroyed and recreated. In particular, if
  1952. we end up deleting the only insn that used a subreg, we do not
  1953. want to redo the mapping because this would invalidate everything
  1954. else. */
  1955. bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
  1956. FOR_EACH_BB_FN (bb, cfun)
  1957. bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
  1958. bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
  1959. bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
  1960. EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
  1961. {
  1962. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
  1963. /* When bitmaps are already initialized, just clear them. */
  1964. if (bb_info->use.obstack)
  1965. {
  1966. bitmap_clear (&bb_info->def);
  1967. bitmap_clear (&bb_info->use);
  1968. }
  1969. else
  1970. {
  1971. bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
  1972. bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
  1973. bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
  1974. bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
  1975. }
  1976. }
  1977. df_word_lr->optional_p = true;
  1978. }
  1979. /* Reset the global solution for recalculation. */
  1980. static void
  1981. df_word_lr_reset (bitmap all_blocks)
  1982. {
  1983. unsigned int bb_index;
  1984. bitmap_iterator bi;
  1985. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  1986. {
  1987. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
  1988. gcc_assert (bb_info);
  1989. bitmap_clear (&bb_info->in);
  1990. bitmap_clear (&bb_info->out);
  1991. }
  1992. }
  1993. /* Examine REF, and if it is for a reg we're interested in, set or
  1994. clear the bits corresponding to its subwords from the bitmap
  1995. according to IS_SET. LIVE is the bitmap we should update. We do
  1996. not track hard regs or pseudos of any size other than 2 *
  1997. UNITS_PER_WORD.
  1998. We return true if we changed the bitmap, or if we encountered a register
  1999. we're not tracking. */
  2000. bool
  2001. df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
  2002. {
  2003. rtx orig_reg = DF_REF_REG (ref);
  2004. rtx reg = orig_reg;
  2005. machine_mode reg_mode;
  2006. unsigned regno;
  2007. /* Left at -1 for whole accesses. */
  2008. int which_subword = -1;
  2009. bool changed = false;
  2010. if (GET_CODE (reg) == SUBREG)
  2011. reg = SUBREG_REG (orig_reg);
  2012. regno = REGNO (reg);
  2013. reg_mode = GET_MODE (reg);
  2014. if (regno < FIRST_PSEUDO_REGISTER
  2015. || GET_MODE_SIZE (reg_mode) != 2 * UNITS_PER_WORD)
  2016. return true;
  2017. if (GET_CODE (orig_reg) == SUBREG
  2018. && df_read_modify_subreg_p (orig_reg))
  2019. {
  2020. gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
  2021. if (subreg_lowpart_p (orig_reg))
  2022. which_subword = 0;
  2023. else
  2024. which_subword = 1;
  2025. }
  2026. if (is_set)
  2027. {
  2028. if (which_subword != 1)
  2029. changed |= bitmap_set_bit (live, regno * 2);
  2030. if (which_subword != 0)
  2031. changed |= bitmap_set_bit (live, regno * 2 + 1);
  2032. }
  2033. else
  2034. {
  2035. if (which_subword != 1)
  2036. changed |= bitmap_clear_bit (live, regno * 2);
  2037. if (which_subword != 0)
  2038. changed |= bitmap_clear_bit (live, regno * 2 + 1);
  2039. }
  2040. return changed;
  2041. }
  2042. /* Compute local live register info for basic block BB. */
  2043. static void
  2044. df_word_lr_bb_local_compute (unsigned int bb_index)
  2045. {
  2046. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  2047. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
  2048. rtx_insn *insn;
  2049. df_ref def, use;
  2050. /* Ensure that artificial refs don't contain references to pseudos. */
  2051. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  2052. gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
  2053. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  2054. gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
  2055. FOR_BB_INSNS_REVERSE (bb, insn)
  2056. {
  2057. if (!NONDEBUG_INSN_P (insn))
  2058. continue;
  2059. df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  2060. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  2061. /* If the def is to only part of the reg, it does
  2062. not kill the other defs that reach here. */
  2063. if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
  2064. {
  2065. df_word_lr_mark_ref (def, true, &bb_info->def);
  2066. df_word_lr_mark_ref (def, false, &bb_info->use);
  2067. }
  2068. FOR_EACH_INSN_INFO_USE (use, insn_info)
  2069. df_word_lr_mark_ref (use, true, &bb_info->use);
  2070. }
  2071. }
  2072. /* Compute local live register info for each basic block within BLOCKS. */
  2073. static void
  2074. df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
  2075. {
  2076. unsigned int bb_index;
  2077. bitmap_iterator bi;
  2078. EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
  2079. {
  2080. if (bb_index == EXIT_BLOCK)
  2081. {
  2082. unsigned regno;
  2083. bitmap_iterator bi;
  2084. EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
  2085. regno, bi)
  2086. gcc_unreachable ();
  2087. }
  2088. else
  2089. df_word_lr_bb_local_compute (bb_index);
  2090. }
  2091. bitmap_clear (df_word_lr->out_of_date_transfer_functions);
  2092. }
  2093. /* Initialize the solution vectors. */
  2094. static void
  2095. df_word_lr_init (bitmap all_blocks)
  2096. {
  2097. unsigned int bb_index;
  2098. bitmap_iterator bi;
  2099. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  2100. {
  2101. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
  2102. bitmap_copy (&bb_info->in, &bb_info->use);
  2103. bitmap_clear (&bb_info->out);
  2104. }
  2105. }
  2106. /* Confluence function that ignores fake edges. */
  2107. static bool
  2108. df_word_lr_confluence_n (edge e)
  2109. {
  2110. bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
  2111. bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
  2112. return bitmap_ior_into (op1, op2);
  2113. }
  2114. /* Transfer function. */
  2115. static bool
  2116. df_word_lr_transfer_function (int bb_index)
  2117. {
  2118. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
  2119. bitmap in = &bb_info->in;
  2120. bitmap out = &bb_info->out;
  2121. bitmap use = &bb_info->use;
  2122. bitmap def = &bb_info->def;
  2123. return bitmap_ior_and_compl (in, use, out, def);
  2124. }
  2125. /* Free all storage associated with the problem. */
  2126. static void
  2127. df_word_lr_free (void)
  2128. {
  2129. struct df_word_lr_problem_data *problem_data
  2130. = (struct df_word_lr_problem_data *)df_word_lr->problem_data;
  2131. if (df_word_lr->block_info)
  2132. {
  2133. df_word_lr->block_info_size = 0;
  2134. free (df_word_lr->block_info);
  2135. df_word_lr->block_info = NULL;
  2136. }
  2137. BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
  2138. bitmap_obstack_release (&problem_data->word_lr_bitmaps);
  2139. free (problem_data);
  2140. free (df_word_lr);
  2141. }
  2142. /* Debugging info at top of bb. */
  2143. static void
  2144. df_word_lr_top_dump (basic_block bb, FILE *file)
  2145. {
  2146. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
  2147. if (!bb_info)
  2148. return;
  2149. fprintf (file, ";; blr in \t");
  2150. df_print_word_regset (file, &bb_info->in);
  2151. fprintf (file, ";; blr use \t");
  2152. df_print_word_regset (file, &bb_info->use);
  2153. fprintf (file, ";; blr def \t");
  2154. df_print_word_regset (file, &bb_info->def);
  2155. }
  2156. /* Debugging info at bottom of bb. */
  2157. static void
  2158. df_word_lr_bottom_dump (basic_block bb, FILE *file)
  2159. {
  2160. struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
  2161. if (!bb_info)
  2162. return;
  2163. fprintf (file, ";; blr out \t");
  2164. df_print_word_regset (file, &bb_info->out);
  2165. }
  2166. /* All of the information associated with every instance of the problem. */
  2167. static struct df_problem problem_WORD_LR =
  2168. {
  2169. DF_WORD_LR, /* Problem id. */
  2170. DF_BACKWARD, /* Direction. */
  2171. df_word_lr_alloc, /* Allocate the problem specific data. */
  2172. df_word_lr_reset, /* Reset global information. */
  2173. df_word_lr_free_bb_info, /* Free basic block info. */
  2174. df_word_lr_local_compute, /* Local compute function. */
  2175. df_word_lr_init, /* Init the solution specific data. */
  2176. df_worklist_dataflow, /* Worklist solver. */
  2177. NULL, /* Confluence operator 0. */
  2178. df_word_lr_confluence_n, /* Confluence operator n. */
  2179. df_word_lr_transfer_function, /* Transfer function. */
  2180. NULL, /* Finalize function. */
  2181. df_word_lr_free, /* Free all of the problem information. */
  2182. df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
  2183. NULL, /* Debugging. */
  2184. df_word_lr_top_dump, /* Debugging start block. */
  2185. df_word_lr_bottom_dump, /* Debugging end block. */
  2186. NULL, /* Debugging start insn. */
  2187. NULL, /* Debugging end insn. */
  2188. NULL, /* Incremental solution verify start. */
  2189. NULL, /* Incremental solution verify end. */
  2190. NULL, /* Dependent problem. */
  2191. sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
  2192. TV_DF_WORD_LR, /* Timing variable. */
  2193. false /* Reset blocks on dropping out of blocks_to_analyze. */
  2194. };
  2195. /* Create a new DATAFLOW instance and add it to an existing instance
  2196. of DF. The returned structure is what is used to get at the
  2197. solution. */
  2198. void
  2199. df_word_lr_add_problem (void)
  2200. {
  2201. df_add_problem (&problem_WORD_LR);
  2202. /* These will be initialized when df_scan_blocks processes each
  2203. block. */
  2204. df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
  2205. }
  2206. /* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
  2207. any bits, which is used by the caller to determine whether a set is
  2208. necessary. We also return true if there are other reasons not to delete
  2209. an insn. */
  2210. bool
  2211. df_word_lr_simulate_defs (rtx_insn *insn, bitmap live)
  2212. {
  2213. bool changed = false;
  2214. df_ref def;
  2215. FOR_EACH_INSN_DEF (def, insn)
  2216. if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
  2217. changed = true;
  2218. else
  2219. changed |= df_word_lr_mark_ref (def, false, live);
  2220. return changed;
  2221. }
  2222. /* Simulate the effects of the uses of INSN on LIVE. */
  2223. void
  2224. df_word_lr_simulate_uses (rtx_insn *insn, bitmap live)
  2225. {
  2226. df_ref use;
  2227. FOR_EACH_INSN_USE (use, insn)
  2228. df_word_lr_mark_ref (use, true, live);
  2229. }
  2230. /*----------------------------------------------------------------------------
  2231. This problem computes REG_DEAD and REG_UNUSED notes.
  2232. ----------------------------------------------------------------------------*/
  2233. static void
  2234. df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
  2235. {
  2236. df_note->optional_p = true;
  2237. }
  2238. /* This is only used if REG_DEAD_DEBUGGING is in effect. */
  2239. static void
  2240. df_print_note (const char *prefix, rtx_insn *insn, rtx note)
  2241. {
  2242. if (dump_file)
  2243. {
  2244. fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
  2245. print_rtl (dump_file, note);
  2246. fprintf (dump_file, "\n");
  2247. }
  2248. }
  2249. /* After reg-stack, the x86 floating point stack regs are difficult to
  2250. analyze because of all of the pushes, pops and rotations. Thus, we
  2251. just leave the notes alone. */
  2252. #ifdef STACK_REGS
  2253. static inline bool
  2254. df_ignore_stack_reg (int regno)
  2255. {
  2256. return regstack_completed
  2257. && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
  2258. }
  2259. #else
  2260. static inline bool
  2261. df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
  2262. {
  2263. return false;
  2264. }
  2265. #endif
  2266. /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
  2267. static void
  2268. df_remove_dead_and_unused_notes (rtx_insn *insn)
  2269. {
  2270. rtx *pprev = &REG_NOTES (insn);
  2271. rtx link = *pprev;
  2272. while (link)
  2273. {
  2274. switch (REG_NOTE_KIND (link))
  2275. {
  2276. case REG_DEAD:
  2277. /* After reg-stack, we need to ignore any unused notes
  2278. for the stack registers. */
  2279. if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
  2280. {
  2281. pprev = &XEXP (link, 1);
  2282. link = *pprev;
  2283. }
  2284. else
  2285. {
  2286. rtx next = XEXP (link, 1);
  2287. if (REG_DEAD_DEBUGGING)
  2288. df_print_note ("deleting: ", insn, link);
  2289. free_EXPR_LIST_node (link);
  2290. *pprev = link = next;
  2291. }
  2292. break;
  2293. case REG_UNUSED:
  2294. /* After reg-stack, we need to ignore any unused notes
  2295. for the stack registers. */
  2296. if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
  2297. {
  2298. pprev = &XEXP (link, 1);
  2299. link = *pprev;
  2300. }
  2301. else
  2302. {
  2303. rtx next = XEXP (link, 1);
  2304. if (REG_DEAD_DEBUGGING)
  2305. df_print_note ("deleting: ", insn, link);
  2306. free_EXPR_LIST_node (link);
  2307. *pprev = link = next;
  2308. }
  2309. break;
  2310. default:
  2311. pprev = &XEXP (link, 1);
  2312. link = *pprev;
  2313. break;
  2314. }
  2315. }
  2316. }
  2317. /* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
  2318. as the bitmap of currently live registers. */
  2319. static void
  2320. df_remove_dead_eq_notes (rtx_insn *insn, bitmap live)
  2321. {
  2322. rtx *pprev = &REG_NOTES (insn);
  2323. rtx link = *pprev;
  2324. while (link)
  2325. {
  2326. switch (REG_NOTE_KIND (link))
  2327. {
  2328. case REG_EQUAL:
  2329. case REG_EQUIV:
  2330. {
  2331. /* Remove the notes that refer to dead registers. As we have at most
  2332. one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
  2333. so we need to purge the complete EQ_USES vector when removing
  2334. the note using df_notes_rescan. */
  2335. df_ref use;
  2336. bool deleted = false;
  2337. FOR_EACH_INSN_EQ_USE (use, insn)
  2338. if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
  2339. && DF_REF_LOC (use)
  2340. && (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
  2341. && !bitmap_bit_p (live, DF_REF_REGNO (use))
  2342. && loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
  2343. {
  2344. deleted = true;
  2345. break;
  2346. }
  2347. if (deleted)
  2348. {
  2349. rtx next;
  2350. if (REG_DEAD_DEBUGGING)
  2351. df_print_note ("deleting: ", insn, link);
  2352. next = XEXP (link, 1);
  2353. free_EXPR_LIST_node (link);
  2354. *pprev = link = next;
  2355. df_notes_rescan (insn);
  2356. }
  2357. else
  2358. {
  2359. pprev = &XEXP (link, 1);
  2360. link = *pprev;
  2361. }
  2362. break;
  2363. }
  2364. default:
  2365. pprev = &XEXP (link, 1);
  2366. link = *pprev;
  2367. break;
  2368. }
  2369. }
  2370. }
  2371. /* Set a NOTE_TYPE note for REG in INSN. */
  2372. static inline void
  2373. df_set_note (enum reg_note note_type, rtx insn, rtx reg)
  2374. {
  2375. gcc_checking_assert (!DEBUG_INSN_P (insn));
  2376. add_reg_note (insn, note_type, reg);
  2377. }
  2378. /* A subroutine of df_set_unused_notes_for_mw, with a selection of its
  2379. arguments. Return true if the register value described by MWS's
  2380. mw_reg is known to be completely unused, and if mw_reg can therefore
  2381. be used in a REG_UNUSED note. */
  2382. static bool
  2383. df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
  2384. bitmap live, bitmap artificial_uses)
  2385. {
  2386. unsigned int r;
  2387. /* If MWS describes a partial reference, create REG_UNUSED notes for
  2388. individual hard registers. */
  2389. if (mws->flags & DF_REF_PARTIAL)
  2390. return false;
  2391. /* Likewise if some part of the register is used. */
  2392. for (r = mws->start_regno; r <= mws->end_regno; r++)
  2393. if (bitmap_bit_p (live, r)
  2394. || bitmap_bit_p (artificial_uses, r))
  2395. return false;
  2396. gcc_assert (REG_P (mws->mw_reg));
  2397. return true;
  2398. }
  2399. /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
  2400. based on the bits in LIVE. Do not generate notes for registers in
  2401. artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
  2402. not generated if the reg is both read and written by the
  2403. instruction.
  2404. */
  2405. static void
  2406. df_set_unused_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
  2407. bitmap live, bitmap do_not_gen,
  2408. bitmap artificial_uses,
  2409. struct dead_debug_local *debug)
  2410. {
  2411. unsigned int r;
  2412. if (REG_DEAD_DEBUGGING && dump_file)
  2413. fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
  2414. mws->start_regno, mws->end_regno);
  2415. if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
  2416. {
  2417. unsigned int regno = mws->start_regno;
  2418. df_set_note (REG_UNUSED, insn, mws->mw_reg);
  2419. dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
  2420. if (REG_DEAD_DEBUGGING)
  2421. df_print_note ("adding 1: ", insn, REG_NOTES (insn));
  2422. bitmap_set_bit (do_not_gen, regno);
  2423. /* Only do this if the value is totally dead. */
  2424. }
  2425. else
  2426. for (r = mws->start_regno; r <= mws->end_regno; r++)
  2427. {
  2428. if (!bitmap_bit_p (live, r)
  2429. && !bitmap_bit_p (artificial_uses, r))
  2430. {
  2431. df_set_note (REG_UNUSED, insn, regno_reg_rtx[r]);
  2432. dead_debug_insert_temp (debug, r, insn, DEBUG_TEMP_AFTER_WITH_REG);
  2433. if (REG_DEAD_DEBUGGING)
  2434. df_print_note ("adding 2: ", insn, REG_NOTES (insn));
  2435. }
  2436. bitmap_set_bit (do_not_gen, r);
  2437. }
  2438. }
  2439. /* A subroutine of df_set_dead_notes_for_mw, with a selection of its
  2440. arguments. Return true if the register value described by MWS's
  2441. mw_reg is known to be completely dead, and if mw_reg can therefore
  2442. be used in a REG_DEAD note. */
  2443. static bool
  2444. df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws,
  2445. bitmap live, bitmap artificial_uses,
  2446. bitmap do_not_gen)
  2447. {
  2448. unsigned int r;
  2449. /* If MWS describes a partial reference, create REG_DEAD notes for
  2450. individual hard registers. */
  2451. if (mws->flags & DF_REF_PARTIAL)
  2452. return false;
  2453. /* Likewise if some part of the register is not dead. */
  2454. for (r = mws->start_regno; r <= mws->end_regno; r++)
  2455. if (bitmap_bit_p (live, r)
  2456. || bitmap_bit_p (artificial_uses, r)
  2457. || bitmap_bit_p (do_not_gen, r))
  2458. return false;
  2459. gcc_assert (REG_P (mws->mw_reg));
  2460. return true;
  2461. }
  2462. /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
  2463. on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
  2464. from being set if the instruction both reads and writes the
  2465. register. */
  2466. static void
  2467. df_set_dead_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
  2468. bitmap live, bitmap do_not_gen,
  2469. bitmap artificial_uses, bool *added_notes_p)
  2470. {
  2471. unsigned int r;
  2472. bool is_debug = *added_notes_p;
  2473. *added_notes_p = false;
  2474. if (REG_DEAD_DEBUGGING && dump_file)
  2475. {
  2476. fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
  2477. mws->start_regno, mws->end_regno);
  2478. df_print_regset (dump_file, do_not_gen);
  2479. fprintf (dump_file, " live =");
  2480. df_print_regset (dump_file, live);
  2481. fprintf (dump_file, " artificial uses =");
  2482. df_print_regset (dump_file, artificial_uses);
  2483. }
  2484. if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen))
  2485. {
  2486. if (is_debug)
  2487. {
  2488. *added_notes_p = true;
  2489. return;
  2490. }
  2491. /* Add a dead note for the entire multi word register. */
  2492. df_set_note (REG_DEAD, insn, mws->mw_reg);
  2493. if (REG_DEAD_DEBUGGING)
  2494. df_print_note ("adding 1: ", insn, REG_NOTES (insn));
  2495. }
  2496. else
  2497. {
  2498. for (r = mws->start_regno; r <= mws->end_regno; r++)
  2499. if (!bitmap_bit_p (live, r)
  2500. && !bitmap_bit_p (artificial_uses, r)
  2501. && !bitmap_bit_p (do_not_gen, r))
  2502. {
  2503. if (is_debug)
  2504. {
  2505. *added_notes_p = true;
  2506. return;
  2507. }
  2508. df_set_note (REG_DEAD, insn, regno_reg_rtx[r]);
  2509. if (REG_DEAD_DEBUGGING)
  2510. df_print_note ("adding 2: ", insn, REG_NOTES (insn));
  2511. }
  2512. }
  2513. return;
  2514. }
  2515. /* Create a REG_UNUSED note if necessary for DEF in INSN updating
  2516. LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */
  2517. static void
  2518. df_create_unused_note (rtx_insn *insn, df_ref def,
  2519. bitmap live, bitmap artificial_uses,
  2520. struct dead_debug_local *debug)
  2521. {
  2522. unsigned int dregno = DF_REF_REGNO (def);
  2523. if (REG_DEAD_DEBUGGING && dump_file)
  2524. {
  2525. fprintf (dump_file, " regular looking at def ");
  2526. df_ref_debug (def, dump_file);
  2527. }
  2528. if (!((DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
  2529. || bitmap_bit_p (live, dregno)
  2530. || bitmap_bit_p (artificial_uses, dregno)
  2531. || df_ignore_stack_reg (dregno)))
  2532. {
  2533. rtx reg = (DF_REF_LOC (def))
  2534. ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
  2535. df_set_note (REG_UNUSED, insn, reg);
  2536. dead_debug_insert_temp (debug, dregno, insn, DEBUG_TEMP_AFTER_WITH_REG);
  2537. if (REG_DEAD_DEBUGGING)
  2538. df_print_note ("adding 3: ", insn, REG_NOTES (insn));
  2539. }
  2540. return;
  2541. }
  2542. /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
  2543. info: lifetime, bb, and number of defs and uses for basic block
  2544. BB. The three bitvectors are scratch regs used here. */
  2545. static void
  2546. df_note_bb_compute (unsigned int bb_index,
  2547. bitmap live, bitmap do_not_gen, bitmap artificial_uses)
  2548. {
  2549. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  2550. rtx_insn *insn;
  2551. df_ref def, use;
  2552. struct dead_debug_local debug;
  2553. dead_debug_local_init (&debug, NULL, NULL);
  2554. bitmap_copy (live, df_get_live_out (bb));
  2555. bitmap_clear (artificial_uses);
  2556. if (REG_DEAD_DEBUGGING && dump_file)
  2557. {
  2558. fprintf (dump_file, "live at bottom ");
  2559. df_print_regset (dump_file, live);
  2560. }
  2561. /* Process the artificial defs and uses at the bottom of the block
  2562. to begin processing. */
  2563. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  2564. {
  2565. if (REG_DEAD_DEBUGGING && dump_file)
  2566. fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
  2567. if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
  2568. bitmap_clear_bit (live, DF_REF_REGNO (def));
  2569. }
  2570. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  2571. if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
  2572. {
  2573. unsigned int regno = DF_REF_REGNO (use);
  2574. bitmap_set_bit (live, regno);
  2575. /* Notes are not generated for any of the artificial registers
  2576. at the bottom of the block. */
  2577. bitmap_set_bit (artificial_uses, regno);
  2578. }
  2579. if (REG_DEAD_DEBUGGING && dump_file)
  2580. {
  2581. fprintf (dump_file, "live before artificials out ");
  2582. df_print_regset (dump_file, live);
  2583. }
  2584. FOR_BB_INSNS_REVERSE (bb, insn)
  2585. {
  2586. df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  2587. df_mw_hardreg *mw;
  2588. int debug_insn;
  2589. if (!INSN_P (insn))
  2590. continue;
  2591. debug_insn = DEBUG_INSN_P (insn);
  2592. bitmap_clear (do_not_gen);
  2593. df_remove_dead_and_unused_notes (insn);
  2594. /* Process the defs. */
  2595. if (CALL_P (insn))
  2596. {
  2597. if (REG_DEAD_DEBUGGING && dump_file)
  2598. {
  2599. fprintf (dump_file, "processing call %d\n live =",
  2600. INSN_UID (insn));
  2601. df_print_regset (dump_file, live);
  2602. }
  2603. /* We only care about real sets for calls. Clobbers cannot
  2604. be depended on to really die. */
  2605. FOR_EACH_INSN_INFO_MW (mw, insn_info)
  2606. if ((DF_MWS_REG_DEF_P (mw))
  2607. && !df_ignore_stack_reg (mw->start_regno))
  2608. df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
  2609. artificial_uses, &debug);
  2610. /* All of the defs except the return value are some sort of
  2611. clobber. This code is for the return. */
  2612. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  2613. {
  2614. unsigned int dregno = DF_REF_REGNO (def);
  2615. if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
  2616. {
  2617. df_create_unused_note (insn,
  2618. def, live, artificial_uses, &debug);
  2619. bitmap_set_bit (do_not_gen, dregno);
  2620. }
  2621. if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
  2622. bitmap_clear_bit (live, dregno);
  2623. }
  2624. }
  2625. else
  2626. {
  2627. /* Regular insn. */
  2628. FOR_EACH_INSN_INFO_MW (mw, insn_info)
  2629. if (DF_MWS_REG_DEF_P (mw))
  2630. df_set_unused_notes_for_mw (insn, mw, live, do_not_gen,
  2631. artificial_uses, &debug);
  2632. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  2633. {
  2634. unsigned int dregno = DF_REF_REGNO (def);
  2635. df_create_unused_note (insn,
  2636. def, live, artificial_uses, &debug);
  2637. if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
  2638. bitmap_set_bit (do_not_gen, dregno);
  2639. if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
  2640. bitmap_clear_bit (live, dregno);
  2641. }
  2642. }
  2643. /* Process the uses. */
  2644. FOR_EACH_INSN_INFO_MW (mw, insn_info)
  2645. if (DF_MWS_REG_USE_P (mw)
  2646. && !df_ignore_stack_reg (mw->start_regno))
  2647. {
  2648. bool really_add_notes = debug_insn != 0;
  2649. df_set_dead_notes_for_mw (insn, mw, live, do_not_gen,
  2650. artificial_uses,
  2651. &really_add_notes);
  2652. if (really_add_notes)
  2653. debug_insn = -1;
  2654. }
  2655. FOR_EACH_INSN_INFO_USE (use, insn_info)
  2656. {
  2657. unsigned int uregno = DF_REF_REGNO (use);
  2658. if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
  2659. {
  2660. fprintf (dump_file, " regular looking at use ");
  2661. df_ref_debug (use, dump_file);
  2662. }
  2663. if (!bitmap_bit_p (live, uregno))
  2664. {
  2665. if (debug_insn)
  2666. {
  2667. if (debug_insn > 0)
  2668. {
  2669. /* We won't add REG_UNUSED or REG_DEAD notes for
  2670. these, so we don't have to mess with them in
  2671. debug insns either. */
  2672. if (!bitmap_bit_p (artificial_uses, uregno)
  2673. && !df_ignore_stack_reg (uregno))
  2674. dead_debug_add (&debug, use, uregno);
  2675. continue;
  2676. }
  2677. break;
  2678. }
  2679. else
  2680. dead_debug_insert_temp (&debug, uregno, insn,
  2681. DEBUG_TEMP_BEFORE_WITH_REG);
  2682. if ( (!(DF_REF_FLAGS (use)
  2683. & (DF_REF_MW_HARDREG | DF_REF_READ_WRITE)))
  2684. && (!bitmap_bit_p (do_not_gen, uregno))
  2685. && (!bitmap_bit_p (artificial_uses, uregno))
  2686. && (!df_ignore_stack_reg (uregno)))
  2687. {
  2688. rtx reg = (DF_REF_LOC (use))
  2689. ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
  2690. df_set_note (REG_DEAD, insn, reg);
  2691. if (REG_DEAD_DEBUGGING)
  2692. df_print_note ("adding 4: ", insn, REG_NOTES (insn));
  2693. }
  2694. /* This register is now live. */
  2695. bitmap_set_bit (live, uregno);
  2696. }
  2697. }
  2698. df_remove_dead_eq_notes (insn, live);
  2699. if (debug_insn == -1)
  2700. {
  2701. /* ??? We could probably do better here, replacing dead
  2702. registers with their definitions. */
  2703. INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
  2704. df_insn_rescan_debug_internal (insn);
  2705. }
  2706. }
  2707. dead_debug_local_finish (&debug, NULL);
  2708. }
  2709. /* Compute register info: lifetime, bb, and number of defs and uses. */
  2710. static void
  2711. df_note_compute (bitmap all_blocks)
  2712. {
  2713. unsigned int bb_index;
  2714. bitmap_iterator bi;
  2715. bitmap_head live, do_not_gen, artificial_uses;
  2716. bitmap_initialize (&live, &df_bitmap_obstack);
  2717. bitmap_initialize (&do_not_gen, &df_bitmap_obstack);
  2718. bitmap_initialize (&artificial_uses, &df_bitmap_obstack);
  2719. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  2720. {
  2721. /* ??? Unlike fast DCE, we don't use global_debug for uses of dead
  2722. pseudos in debug insns because we don't always (re)visit blocks
  2723. with death points after visiting dead uses. Even changing this
  2724. loop to postorder would still leave room for visiting a death
  2725. point before visiting a subsequent debug use. */
  2726. df_note_bb_compute (bb_index, &live, &do_not_gen, &artificial_uses);
  2727. }
  2728. bitmap_clear (&live);
  2729. bitmap_clear (&do_not_gen);
  2730. bitmap_clear (&artificial_uses);
  2731. }
  2732. /* Free all storage associated with the problem. */
  2733. static void
  2734. df_note_free (void)
  2735. {
  2736. free (df_note);
  2737. }
  2738. /* All of the information associated every instance of the problem. */
  2739. static struct df_problem problem_NOTE =
  2740. {
  2741. DF_NOTE, /* Problem id. */
  2742. DF_NONE, /* Direction. */
  2743. df_note_alloc, /* Allocate the problem specific data. */
  2744. NULL, /* Reset global information. */
  2745. NULL, /* Free basic block info. */
  2746. df_note_compute, /* Local compute function. */
  2747. NULL, /* Init the solution specific data. */
  2748. NULL, /* Iterative solver. */
  2749. NULL, /* Confluence operator 0. */
  2750. NULL, /* Confluence operator n. */
  2751. NULL, /* Transfer function. */
  2752. NULL, /* Finalize function. */
  2753. df_note_free, /* Free all of the problem information. */
  2754. df_note_free, /* Remove this problem from the stack of dataflow problems. */
  2755. NULL, /* Debugging. */
  2756. NULL, /* Debugging start block. */
  2757. NULL, /* Debugging end block. */
  2758. NULL, /* Debugging start insn. */
  2759. NULL, /* Debugging end insn. */
  2760. NULL, /* Incremental solution verify start. */
  2761. NULL, /* Incremental solution verify end. */
  2762. &problem_LR, /* Dependent problem. */
  2763. sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
  2764. TV_DF_NOTE, /* Timing variable. */
  2765. false /* Reset blocks on dropping out of blocks_to_analyze. */
  2766. };
  2767. /* Create a new DATAFLOW instance and add it to an existing instance
  2768. of DF. The returned structure is what is used to get at the
  2769. solution. */
  2770. void
  2771. df_note_add_problem (void)
  2772. {
  2773. df_add_problem (&problem_NOTE);
  2774. }
  2775. /*----------------------------------------------------------------------------
  2776. Functions for simulating the effects of single insns.
  2777. You can either simulate in the forwards direction, starting from
  2778. the top of a block or the backwards direction from the end of the
  2779. block. If you go backwards, defs are examined first to clear bits,
  2780. then uses are examined to set bits. If you go forwards, defs are
  2781. examined first to set bits, then REG_DEAD and REG_UNUSED notes
  2782. are examined to clear bits. In either case, the result of examining
  2783. a def can be undone (respectively by a use or a REG_UNUSED note).
  2784. If you start at the top of the block, use one of DF_LIVE_IN or
  2785. DF_LR_IN. If you start at the bottom of the block use one of
  2786. DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
  2787. THEY WILL BE DESTROYED.
  2788. ----------------------------------------------------------------------------*/
  2789. /* Find the set of DEFs for INSN. */
  2790. void
  2791. df_simulate_find_defs (rtx_insn *insn, bitmap defs)
  2792. {
  2793. df_ref def;
  2794. FOR_EACH_INSN_DEF (def, insn)
  2795. bitmap_set_bit (defs, DF_REF_REGNO (def));
  2796. }
  2797. /* Find the set of uses for INSN. This includes partial defs. */
  2798. static void
  2799. df_simulate_find_uses (rtx_insn *insn, bitmap uses)
  2800. {
  2801. df_ref def, use;
  2802. struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
  2803. FOR_EACH_INSN_INFO_DEF (def, insn_info)
  2804. if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
  2805. bitmap_set_bit (uses, DF_REF_REGNO (def));
  2806. FOR_EACH_INSN_INFO_USE (use, insn_info)
  2807. bitmap_set_bit (uses, DF_REF_REGNO (use));
  2808. }
  2809. /* Find the set of real DEFs, which are not clobbers, for INSN. */
  2810. void
  2811. df_simulate_find_noclobber_defs (rtx_insn *insn, bitmap defs)
  2812. {
  2813. df_ref def;
  2814. FOR_EACH_INSN_DEF (def, insn)
  2815. if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
  2816. bitmap_set_bit (defs, DF_REF_REGNO (def));
  2817. }
  2818. /* Simulate the effects of the defs of INSN on LIVE. */
  2819. void
  2820. df_simulate_defs (rtx_insn *insn, bitmap live)
  2821. {
  2822. df_ref def;
  2823. FOR_EACH_INSN_DEF (def, insn)
  2824. {
  2825. unsigned int dregno = DF_REF_REGNO (def);
  2826. /* If the def is to only part of the reg, it does
  2827. not kill the other defs that reach here. */
  2828. if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
  2829. bitmap_clear_bit (live, dregno);
  2830. }
  2831. }
  2832. /* Simulate the effects of the uses of INSN on LIVE. */
  2833. void
  2834. df_simulate_uses (rtx_insn *insn, bitmap live)
  2835. {
  2836. df_ref use;
  2837. if (DEBUG_INSN_P (insn))
  2838. return;
  2839. FOR_EACH_INSN_USE (use, insn)
  2840. /* Add use to set of uses in this BB. */
  2841. bitmap_set_bit (live, DF_REF_REGNO (use));
  2842. }
  2843. /* Add back the always live regs in BB to LIVE. */
  2844. static inline void
  2845. df_simulate_fixup_sets (basic_block bb, bitmap live)
  2846. {
  2847. /* These regs are considered always live so if they end up dying
  2848. because of some def, we need to bring the back again. */
  2849. if (bb_has_eh_pred (bb))
  2850. bitmap_ior_into (live, &df->eh_block_artificial_uses);
  2851. else
  2852. bitmap_ior_into (live, &df->regular_block_artificial_uses);
  2853. }
  2854. /*----------------------------------------------------------------------------
  2855. The following three functions are used only for BACKWARDS scanning:
  2856. i.e. they process the defs before the uses.
  2857. df_simulate_initialize_backwards should be called first with a
  2858. bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then
  2859. df_simulate_one_insn_backwards should be called for each insn in
  2860. the block, starting with the last one. Finally,
  2861. df_simulate_finalize_backwards can be called to get a new value
  2862. of the sets at the top of the block (this is rarely used).
  2863. ----------------------------------------------------------------------------*/
  2864. /* Apply the artificial uses and defs at the end of BB in a backwards
  2865. direction. */
  2866. void
  2867. df_simulate_initialize_backwards (basic_block bb, bitmap live)
  2868. {
  2869. df_ref def, use;
  2870. int bb_index = bb->index;
  2871. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  2872. if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
  2873. bitmap_clear_bit (live, DF_REF_REGNO (def));
  2874. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  2875. if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
  2876. bitmap_set_bit (live, DF_REF_REGNO (use));
  2877. }
  2878. /* Simulate the backwards effects of INSN on the bitmap LIVE. */
  2879. void
  2880. df_simulate_one_insn_backwards (basic_block bb, rtx_insn *insn, bitmap live)
  2881. {
  2882. if (!NONDEBUG_INSN_P (insn))
  2883. return;
  2884. df_simulate_defs (insn, live);
  2885. df_simulate_uses (insn, live);
  2886. df_simulate_fixup_sets (bb, live);
  2887. }
  2888. /* Apply the artificial uses and defs at the top of BB in a backwards
  2889. direction. */
  2890. void
  2891. df_simulate_finalize_backwards (basic_block bb, bitmap live)
  2892. {
  2893. df_ref def;
  2894. #ifdef EH_USES
  2895. df_ref use;
  2896. #endif
  2897. int bb_index = bb->index;
  2898. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  2899. if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
  2900. bitmap_clear_bit (live, DF_REF_REGNO (def));
  2901. #ifdef EH_USES
  2902. FOR_EACH_ARTIFICIAL_USE (use, bb_index)
  2903. if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
  2904. bitmap_set_bit (live, DF_REF_REGNO (use));
  2905. #endif
  2906. }
  2907. /*----------------------------------------------------------------------------
  2908. The following three functions are used only for FORWARDS scanning:
  2909. i.e. they process the defs and the REG_DEAD and REG_UNUSED notes.
  2910. Thus it is important to add the DF_NOTES problem to the stack of
  2911. problems computed before using these functions.
  2912. df_simulate_initialize_forwards should be called first with a
  2913. bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then
  2914. df_simulate_one_insn_forwards should be called for each insn in
  2915. the block, starting with the first one.
  2916. ----------------------------------------------------------------------------*/
  2917. /* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or
  2918. DF_LR_IN for basic block BB, for forward scanning by marking artificial
  2919. defs live. */
  2920. void
  2921. df_simulate_initialize_forwards (basic_block bb, bitmap live)
  2922. {
  2923. df_ref def;
  2924. int bb_index = bb->index;
  2925. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  2926. if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
  2927. bitmap_set_bit (live, DF_REF_REGNO (def));
  2928. }
  2929. /* Simulate the forwards effects of INSN on the bitmap LIVE. */
  2930. void
  2931. df_simulate_one_insn_forwards (basic_block bb, rtx_insn *insn, bitmap live)
  2932. {
  2933. rtx link;
  2934. if (! INSN_P (insn))
  2935. return;
  2936. /* Make sure that DF_NOTE really is an active df problem. */
  2937. gcc_assert (df_note);
  2938. /* Note that this is the opposite as how the problem is defined, because
  2939. in the LR problem defs _kill_ liveness. However, they do so backwards,
  2940. while here the scan is performed forwards! So, first assume that the
  2941. def is live, and if this is not true REG_UNUSED notes will rectify the
  2942. situation. */
  2943. df_simulate_find_noclobber_defs (insn, live);
  2944. /* Clear all of the registers that go dead. */
  2945. for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
  2946. {
  2947. switch (REG_NOTE_KIND (link))
  2948. {
  2949. case REG_DEAD:
  2950. case REG_UNUSED:
  2951. {
  2952. rtx reg = XEXP (link, 0);
  2953. int regno = REGNO (reg);
  2954. if (HARD_REGISTER_NUM_P (regno))
  2955. bitmap_clear_range (live, regno,
  2956. hard_regno_nregs[regno][GET_MODE (reg)]);
  2957. else
  2958. bitmap_clear_bit (live, regno);
  2959. }
  2960. break;
  2961. default:
  2962. break;
  2963. }
  2964. }
  2965. df_simulate_fixup_sets (bb, live);
  2966. }
  2967. /* Used by the next two functions to encode information about the
  2968. memory references we found. */
  2969. #define MEMREF_NORMAL 1
  2970. #define MEMREF_VOLATILE 2
  2971. /* Return an OR of MEMREF_NORMAL or MEMREF_VOLATILE for the MEMs in X. */
  2972. static int
  2973. find_memory (rtx insn)
  2974. {
  2975. int flags = 0;
  2976. subrtx_iterator::array_type array;
  2977. FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
  2978. {
  2979. const_rtx x = *iter;
  2980. if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
  2981. flags |= MEMREF_VOLATILE;
  2982. else if (MEM_P (x))
  2983. {
  2984. if (MEM_VOLATILE_P (x))
  2985. flags |= MEMREF_VOLATILE;
  2986. else if (!MEM_READONLY_P (x))
  2987. flags |= MEMREF_NORMAL;
  2988. }
  2989. }
  2990. return flags;
  2991. }
  2992. /* A subroutine of can_move_insns_across_p called through note_stores.
  2993. DATA points to an integer in which we set either the bit for
  2994. MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
  2995. of either kind. */
  2996. static void
  2997. find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
  2998. void *data ATTRIBUTE_UNUSED)
  2999. {
  3000. int *pflags = (int *)data;
  3001. if (GET_CODE (x) == SUBREG)
  3002. x = XEXP (x, 0);
  3003. /* Treat stores to SP as stores to memory, this will prevent problems
  3004. when there are references to the stack frame. */
  3005. if (x == stack_pointer_rtx)
  3006. *pflags |= MEMREF_VOLATILE;
  3007. if (!MEM_P (x))
  3008. return;
  3009. *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
  3010. }
  3011. /* Scan BB backwards, using df_simulate functions to keep track of
  3012. lifetimes, up to insn POINT. The result is stored in LIVE. */
  3013. void
  3014. simulate_backwards_to_point (basic_block bb, regset live, rtx point)
  3015. {
  3016. rtx_insn *insn;
  3017. bitmap_copy (live, df_get_live_out (bb));
  3018. df_simulate_initialize_backwards (bb, live);
  3019. /* Scan and update life information until we reach the point we're
  3020. interested in. */
  3021. for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
  3022. df_simulate_one_insn_backwards (bb, insn, live);
  3023. }
  3024. /* Return true if it is safe to move a group of insns, described by
  3025. the range FROM to TO, backwards across another group of insns,
  3026. described by ACROSS_FROM to ACROSS_TO. It is assumed that there
  3027. are no insns between ACROSS_TO and FROM, but they may be in
  3028. different basic blocks; MERGE_BB is the block from which the
  3029. insns will be moved. The caller must pass in a regset MERGE_LIVE
  3030. which specifies the registers live after TO.
  3031. This function may be called in one of two cases: either we try to
  3032. move identical instructions from all successor blocks into their
  3033. predecessor, or we try to move from only one successor block. If
  3034. OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
  3035. the second case. It should contain a set of registers live at the
  3036. end of ACROSS_TO which must not be clobbered by moving the insns.
  3037. In that case, we're also more careful about moving memory references
  3038. and trapping insns.
  3039. We return false if it is not safe to move the entire group, but it
  3040. may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
  3041. is set to point at the last moveable insn in such a case. */
  3042. bool
  3043. can_move_insns_across (rtx_insn *from, rtx_insn *to,
  3044. rtx_insn *across_from, rtx_insn *across_to,
  3045. basic_block merge_bb, regset merge_live,
  3046. regset other_branch_live, rtx_insn **pmove_upto)
  3047. {
  3048. rtx_insn *insn, *next, *max_to;
  3049. bitmap merge_set, merge_use, local_merge_live;
  3050. bitmap test_set, test_use;
  3051. unsigned i, fail = 0;
  3052. bitmap_iterator bi;
  3053. int memrefs_in_across = 0;
  3054. int mem_sets_in_across = 0;
  3055. bool trapping_insns_in_across = false;
  3056. if (pmove_upto != NULL)
  3057. *pmove_upto = NULL;
  3058. /* Find real bounds, ignoring debug insns. */
  3059. while (!NONDEBUG_INSN_P (from) && from != to)
  3060. from = NEXT_INSN (from);
  3061. while (!NONDEBUG_INSN_P (to) && from != to)
  3062. to = PREV_INSN (to);
  3063. for (insn = across_to; ; insn = next)
  3064. {
  3065. if (CALL_P (insn))
  3066. {
  3067. if (RTL_CONST_OR_PURE_CALL_P (insn))
  3068. /* Pure functions can read from memory. Const functions can
  3069. read from arguments that the ABI has forced onto the stack.
  3070. Neither sort of read can be volatile. */
  3071. memrefs_in_across |= MEMREF_NORMAL;
  3072. else
  3073. {
  3074. memrefs_in_across |= MEMREF_VOLATILE;
  3075. mem_sets_in_across |= MEMREF_VOLATILE;
  3076. }
  3077. }
  3078. if (NONDEBUG_INSN_P (insn))
  3079. {
  3080. if (volatile_insn_p (PATTERN (insn)))
  3081. return false;
  3082. memrefs_in_across |= find_memory (insn);
  3083. note_stores (PATTERN (insn), find_memory_stores,
  3084. &mem_sets_in_across);
  3085. /* This is used just to find sets of the stack pointer. */
  3086. memrefs_in_across |= mem_sets_in_across;
  3087. trapping_insns_in_across |= may_trap_p (PATTERN (insn));
  3088. }
  3089. next = PREV_INSN (insn);
  3090. if (insn == across_from)
  3091. break;
  3092. }
  3093. /* Collect:
  3094. MERGE_SET = set of registers set in MERGE_BB
  3095. MERGE_USE = set of registers used in MERGE_BB and live at its top
  3096. MERGE_LIVE = set of registers live at the point inside the MERGE
  3097. range that we've reached during scanning
  3098. TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
  3099. TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
  3100. and live before ACROSS_FROM. */
  3101. merge_set = BITMAP_ALLOC (&reg_obstack);
  3102. merge_use = BITMAP_ALLOC (&reg_obstack);
  3103. local_merge_live = BITMAP_ALLOC (&reg_obstack);
  3104. test_set = BITMAP_ALLOC (&reg_obstack);
  3105. test_use = BITMAP_ALLOC (&reg_obstack);
  3106. /* Compute the set of registers set and used in the ACROSS range. */
  3107. if (other_branch_live != NULL)
  3108. bitmap_copy (test_use, other_branch_live);
  3109. df_simulate_initialize_backwards (merge_bb, test_use);
  3110. for (insn = across_to; ; insn = next)
  3111. {
  3112. if (NONDEBUG_INSN_P (insn))
  3113. {
  3114. df_simulate_find_defs (insn, test_set);
  3115. df_simulate_defs (insn, test_use);
  3116. df_simulate_uses (insn, test_use);
  3117. }
  3118. next = PREV_INSN (insn);
  3119. if (insn == across_from)
  3120. break;
  3121. }
  3122. /* Compute an upper bound for the amount of insns moved, by finding
  3123. the first insn in MERGE that sets a register in TEST_USE, or uses
  3124. a register in TEST_SET. We also check for calls, trapping operations,
  3125. and memory references. */
  3126. max_to = NULL;
  3127. for (insn = from; ; insn = next)
  3128. {
  3129. if (CALL_P (insn))
  3130. break;
  3131. if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
  3132. break;
  3133. if (NONDEBUG_INSN_P (insn))
  3134. {
  3135. if (may_trap_or_fault_p (PATTERN (insn))
  3136. && (trapping_insns_in_across
  3137. || other_branch_live != NULL
  3138. || volatile_insn_p (PATTERN (insn))))
  3139. break;
  3140. /* We cannot move memory stores past each other, or move memory
  3141. reads past stores, at least not without tracking them and
  3142. calling true_dependence on every pair.
  3143. If there is no other branch and no memory references or
  3144. sets in the ACROSS range, we can move memory references
  3145. freely, even volatile ones.
  3146. Otherwise, the rules are as follows: volatile memory
  3147. references and stores can't be moved at all, and any type
  3148. of memory reference can't be moved if there are volatile
  3149. accesses or stores in the ACROSS range. That leaves
  3150. normal reads, which can be moved, as the trapping case is
  3151. dealt with elsewhere. */
  3152. if (other_branch_live != NULL || memrefs_in_across != 0)
  3153. {
  3154. int mem_ref_flags = 0;
  3155. int mem_set_flags = 0;
  3156. note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
  3157. mem_ref_flags = find_memory (insn);
  3158. /* Catch sets of the stack pointer. */
  3159. mem_ref_flags |= mem_set_flags;
  3160. if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
  3161. break;
  3162. if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
  3163. break;
  3164. if (mem_set_flags != 0
  3165. || (mem_sets_in_across != 0 && mem_ref_flags != 0))
  3166. break;
  3167. }
  3168. df_simulate_find_uses (insn, merge_use);
  3169. /* We're only interested in uses which use a value live at
  3170. the top, not one previously set in this block. */
  3171. bitmap_and_compl_into (merge_use, merge_set);
  3172. df_simulate_find_defs (insn, merge_set);
  3173. if (bitmap_intersect_p (merge_set, test_use)
  3174. || bitmap_intersect_p (merge_use, test_set))
  3175. break;
  3176. #ifdef HAVE_cc0
  3177. if (!sets_cc0_p (insn))
  3178. #endif
  3179. max_to = insn;
  3180. }
  3181. next = NEXT_INSN (insn);
  3182. if (insn == to)
  3183. break;
  3184. }
  3185. if (max_to != to)
  3186. fail = 1;
  3187. if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
  3188. goto out;
  3189. /* Now, lower this upper bound by also taking into account that
  3190. a range of insns moved across ACROSS must not leave a register
  3191. live at the end that will be clobbered in ACROSS. We need to
  3192. find a point where TEST_SET & LIVE == 0.
  3193. Insns in the MERGE range that set registers which are also set
  3194. in the ACROSS range may still be moved as long as we also move
  3195. later insns which use the results of the set, and make the
  3196. register dead again. This is verified by the condition stated
  3197. above. We only need to test it for registers that are set in
  3198. the moved region.
  3199. MERGE_LIVE is provided by the caller and holds live registers after
  3200. TO. */
  3201. bitmap_copy (local_merge_live, merge_live);
  3202. for (insn = to; insn != max_to; insn = PREV_INSN (insn))
  3203. df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
  3204. /* We're not interested in registers that aren't set in the moved
  3205. region at all. */
  3206. bitmap_and_into (local_merge_live, merge_set);
  3207. for (;;)
  3208. {
  3209. if (NONDEBUG_INSN_P (insn))
  3210. {
  3211. if (!bitmap_intersect_p (test_set, local_merge_live)
  3212. #ifdef HAVE_cc0
  3213. && !sets_cc0_p (insn)
  3214. #endif
  3215. )
  3216. {
  3217. max_to = insn;
  3218. break;
  3219. }
  3220. df_simulate_one_insn_backwards (merge_bb, insn,
  3221. local_merge_live);
  3222. }
  3223. if (insn == from)
  3224. {
  3225. fail = 1;
  3226. goto out;
  3227. }
  3228. insn = PREV_INSN (insn);
  3229. }
  3230. if (max_to != to)
  3231. fail = 1;
  3232. if (pmove_upto)
  3233. *pmove_upto = max_to;
  3234. /* For small register class machines, don't lengthen lifetimes of
  3235. hard registers before reload. */
  3236. if (! reload_completed
  3237. && targetm.small_register_classes_for_mode_p (VOIDmode))
  3238. {
  3239. EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
  3240. {
  3241. if (i < FIRST_PSEUDO_REGISTER
  3242. && ! fixed_regs[i]
  3243. && ! global_regs[i])
  3244. {
  3245. fail = 1;
  3246. break;
  3247. }
  3248. }
  3249. }
  3250. out:
  3251. BITMAP_FREE (merge_set);
  3252. BITMAP_FREE (merge_use);
  3253. BITMAP_FREE (local_merge_live);
  3254. BITMAP_FREE (test_set);
  3255. BITMAP_FREE (test_use);
  3256. return !fail;
  3257. }
  3258. /*----------------------------------------------------------------------------
  3259. MULTIPLE DEFINITIONS
  3260. Find the locations in the function reached by multiple definition sites
  3261. for a live pseudo. In and out bitvectors are built for each basic
  3262. block. They are restricted for efficiency to live registers.
  3263. The gen and kill sets for the problem are obvious. Together they
  3264. include all defined registers in a basic block; the gen set includes
  3265. registers where a partial or conditional or may-clobber definition is
  3266. last in the BB, while the kill set includes registers with a complete
  3267. definition coming last. However, the computation of the dataflow
  3268. itself is interesting.
  3269. The idea behind it comes from SSA form's iterated dominance frontier
  3270. criterion for inserting PHI functions. Just like in that case, we can use
  3271. the dominance frontier to find places where multiple definitions meet;
  3272. a register X defined in a basic block BB1 has multiple definitions in
  3273. basic blocks in BB1's dominance frontier.
  3274. So, the in-set of a basic block BB2 is not just the union of the
  3275. out-sets of BB2's predecessors, but includes some more bits that come
  3276. from the basic blocks of whose dominance frontier BB2 is part (BB1 in
  3277. the previous paragraph). I called this set the init-set of BB2.
  3278. (Note: I actually use the kill-set only to build the init-set.
  3279. gen bits are anyway propagated from BB1 to BB2 by dataflow).
  3280. For example, if you have
  3281. BB1 : r10 = 0
  3282. r11 = 0
  3283. if <...> goto BB2 else goto BB3;
  3284. BB2 : r10 = 1
  3285. r12 = 1
  3286. goto BB3;
  3287. BB3 :
  3288. you have BB3 in BB2's dominance frontier but not in BB1's, so that the
  3289. init-set of BB3 includes r10 and r12, but not r11. Note that we do
  3290. not need to iterate the dominance frontier, because we do not insert
  3291. anything like PHI functions there! Instead, dataflow will take care of
  3292. propagating the information to BB3's successors.
  3293. ---------------------------------------------------------------------------*/
  3294. /* Private data used to verify the solution for this problem. */
  3295. struct df_md_problem_data
  3296. {
  3297. /* An obstack for the bitmaps we need for this problem. */
  3298. bitmap_obstack md_bitmaps;
  3299. };
  3300. /* Scratch var used by transfer functions. This is used to do md analysis
  3301. only for live registers. */
  3302. static bitmap_head df_md_scratch;
  3303. static void
  3304. df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
  3305. void *vbb_info)
  3306. {
  3307. struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
  3308. if (bb_info)
  3309. {
  3310. bitmap_clear (&bb_info->kill);
  3311. bitmap_clear (&bb_info->gen);
  3312. bitmap_clear (&bb_info->init);
  3313. bitmap_clear (&bb_info->in);
  3314. bitmap_clear (&bb_info->out);
  3315. }
  3316. }
  3317. /* Allocate or reset bitmaps for DF_MD. The solution bits are
  3318. not touched unless the block is new. */
  3319. static void
  3320. df_md_alloc (bitmap all_blocks)
  3321. {
  3322. unsigned int bb_index;
  3323. bitmap_iterator bi;
  3324. struct df_md_problem_data *problem_data;
  3325. df_grow_bb_info (df_md);
  3326. if (df_md->problem_data)
  3327. problem_data = (struct df_md_problem_data *) df_md->problem_data;
  3328. else
  3329. {
  3330. problem_data = XNEW (struct df_md_problem_data);
  3331. df_md->problem_data = problem_data;
  3332. bitmap_obstack_initialize (&problem_data->md_bitmaps);
  3333. }
  3334. bitmap_initialize (&df_md_scratch, &problem_data->md_bitmaps);
  3335. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  3336. {
  3337. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
  3338. /* When bitmaps are already initialized, just clear them. */
  3339. if (bb_info->init.obstack)
  3340. {
  3341. bitmap_clear (&bb_info->init);
  3342. bitmap_clear (&bb_info->gen);
  3343. bitmap_clear (&bb_info->kill);
  3344. bitmap_clear (&bb_info->in);
  3345. bitmap_clear (&bb_info->out);
  3346. }
  3347. else
  3348. {
  3349. bitmap_initialize (&bb_info->init, &problem_data->md_bitmaps);
  3350. bitmap_initialize (&bb_info->gen, &problem_data->md_bitmaps);
  3351. bitmap_initialize (&bb_info->kill, &problem_data->md_bitmaps);
  3352. bitmap_initialize (&bb_info->in, &problem_data->md_bitmaps);
  3353. bitmap_initialize (&bb_info->out, &problem_data->md_bitmaps);
  3354. }
  3355. }
  3356. df_md->optional_p = true;
  3357. }
  3358. /* Add the effect of the top artificial defs of BB to the multiple definitions
  3359. bitmap LOCAL_MD. */
  3360. void
  3361. df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
  3362. {
  3363. int bb_index = bb->index;
  3364. df_ref def;
  3365. FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
  3366. if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
  3367. {
  3368. unsigned int dregno = DF_REF_REGNO (def);
  3369. if (DF_REF_FLAGS (def)
  3370. & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
  3371. bitmap_set_bit (local_md, dregno);
  3372. else
  3373. bitmap_clear_bit (local_md, dregno);
  3374. }
  3375. }
  3376. /* Add the effect of the defs of INSN to the reaching definitions bitmap
  3377. LOCAL_MD. */
  3378. void
  3379. df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
  3380. bitmap local_md)
  3381. {
  3382. df_ref def;
  3383. FOR_EACH_INSN_DEF (def, insn)
  3384. {
  3385. unsigned int dregno = DF_REF_REGNO (def);
  3386. if ((!(df->changeable_flags & DF_NO_HARD_REGS))
  3387. || (dregno >= FIRST_PSEUDO_REGISTER))
  3388. {
  3389. if (DF_REF_FLAGS (def)
  3390. & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
  3391. bitmap_set_bit (local_md, DF_REF_ID (def));
  3392. else
  3393. bitmap_clear_bit (local_md, DF_REF_ID (def));
  3394. }
  3395. }
  3396. }
  3397. static void
  3398. df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
  3399. df_ref def,
  3400. int top_flag)
  3401. {
  3402. bitmap_clear (&seen_in_insn);
  3403. for (; def; def = DF_REF_NEXT_LOC (def))
  3404. {
  3405. unsigned int dregno = DF_REF_REGNO (def);
  3406. if (((!(df->changeable_flags & DF_NO_HARD_REGS))
  3407. || (dregno >= FIRST_PSEUDO_REGISTER))
  3408. && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
  3409. {
  3410. if (!bitmap_bit_p (&seen_in_insn, dregno))
  3411. {
  3412. if (DF_REF_FLAGS (def)
  3413. & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
  3414. {
  3415. bitmap_set_bit (&bb_info->gen, dregno);
  3416. bitmap_clear_bit (&bb_info->kill, dregno);
  3417. }
  3418. else
  3419. {
  3420. /* When we find a clobber and a regular def,
  3421. make sure the regular def wins. */
  3422. bitmap_set_bit (&seen_in_insn, dregno);
  3423. bitmap_set_bit (&bb_info->kill, dregno);
  3424. bitmap_clear_bit (&bb_info->gen, dregno);
  3425. }
  3426. }
  3427. }
  3428. }
  3429. }
  3430. /* Compute local multiple def info for basic block BB. */
  3431. static void
  3432. df_md_bb_local_compute (unsigned int bb_index)
  3433. {
  3434. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  3435. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
  3436. rtx_insn *insn;
  3437. /* Artificials are only hard regs. */
  3438. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  3439. df_md_bb_local_compute_process_def (bb_info,
  3440. df_get_artificial_defs (bb_index),
  3441. DF_REF_AT_TOP);
  3442. FOR_BB_INSNS (bb, insn)
  3443. {
  3444. unsigned int uid = INSN_UID (insn);
  3445. if (!INSN_P (insn))
  3446. continue;
  3447. df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0);
  3448. }
  3449. if (!(df->changeable_flags & DF_NO_HARD_REGS))
  3450. df_md_bb_local_compute_process_def (bb_info,
  3451. df_get_artificial_defs (bb_index),
  3452. 0);
  3453. }
  3454. /* Compute local reaching def info for each basic block within BLOCKS. */
  3455. static void
  3456. df_md_local_compute (bitmap all_blocks)
  3457. {
  3458. unsigned int bb_index, df_bb_index;
  3459. bitmap_iterator bi1, bi2;
  3460. basic_block bb;
  3461. bitmap_head *frontiers;
  3462. bitmap_initialize (&seen_in_insn, &bitmap_default_obstack);
  3463. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
  3464. {
  3465. df_md_bb_local_compute (bb_index);
  3466. }
  3467. bitmap_clear (&seen_in_insn);
  3468. frontiers = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
  3469. FOR_ALL_BB_FN (bb, cfun)
  3470. bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
  3471. compute_dominance_frontiers (frontiers);
  3472. /* Add each basic block's kills to the nodes in the frontier of the BB. */
  3473. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1)
  3474. {
  3475. bitmap kill = &df_md_get_bb_info (bb_index)->kill;
  3476. EXECUTE_IF_SET_IN_BITMAP (&frontiers[bb_index], 0, df_bb_index, bi2)
  3477. {
  3478. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, df_bb_index);
  3479. if (bitmap_bit_p (all_blocks, df_bb_index))
  3480. bitmap_ior_and_into (&df_md_get_bb_info (df_bb_index)->init, kill,
  3481. df_get_live_in (bb));
  3482. }
  3483. }
  3484. FOR_ALL_BB_FN (bb, cfun)
  3485. bitmap_clear (&frontiers[bb->index]);
  3486. free (frontiers);
  3487. }
  3488. /* Reset the global solution for recalculation. */
  3489. static void
  3490. df_md_reset (bitmap all_blocks)
  3491. {
  3492. unsigned int bb_index;
  3493. bitmap_iterator bi;
  3494. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  3495. {
  3496. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
  3497. gcc_assert (bb_info);
  3498. bitmap_clear (&bb_info->in);
  3499. bitmap_clear (&bb_info->out);
  3500. }
  3501. }
  3502. static bool
  3503. df_md_transfer_function (int bb_index)
  3504. {
  3505. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
  3506. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
  3507. bitmap in = &bb_info->in;
  3508. bitmap out = &bb_info->out;
  3509. bitmap gen = &bb_info->gen;
  3510. bitmap kill = &bb_info->kill;
  3511. /* We need to use a scratch set here so that the value returned from this
  3512. function invocation properly reflects whether the sets changed in a
  3513. significant way; i.e. not just because the live set was anded in. */
  3514. bitmap_and (&df_md_scratch, gen, df_get_live_out (bb));
  3515. /* Multiple definitions of a register are not relevant if it is not
  3516. live. Thus we trim the result to the places where it is live. */
  3517. bitmap_and_into (in, df_get_live_in (bb));
  3518. return bitmap_ior_and_compl (out, &df_md_scratch, in, kill);
  3519. }
  3520. /* Initialize the solution bit vectors for problem. */
  3521. static void
  3522. df_md_init (bitmap all_blocks)
  3523. {
  3524. unsigned int bb_index;
  3525. bitmap_iterator bi;
  3526. EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
  3527. {
  3528. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
  3529. bitmap_copy (&bb_info->in, &bb_info->init);
  3530. df_md_transfer_function (bb_index);
  3531. }
  3532. }
  3533. static void
  3534. df_md_confluence_0 (basic_block bb)
  3535. {
  3536. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
  3537. bitmap_copy (&bb_info->in, &bb_info->init);
  3538. }
  3539. /* In of target gets or of out of source. */
  3540. static bool
  3541. df_md_confluence_n (edge e)
  3542. {
  3543. bitmap op1 = &df_md_get_bb_info (e->dest->index)->in;
  3544. bitmap op2 = &df_md_get_bb_info (e->src->index)->out;
  3545. if (e->flags & EDGE_FAKE)
  3546. return false;
  3547. if (e->flags & EDGE_EH)
  3548. return bitmap_ior_and_compl_into (op1, op2,
  3549. regs_invalidated_by_call_regset);
  3550. else
  3551. return bitmap_ior_into (op1, op2);
  3552. }
  3553. /* Free all storage associated with the problem. */
  3554. static void
  3555. df_md_free (void)
  3556. {
  3557. struct df_md_problem_data *problem_data
  3558. = (struct df_md_problem_data *) df_md->problem_data;
  3559. bitmap_obstack_release (&problem_data->md_bitmaps);
  3560. free (problem_data);
  3561. df_md->problem_data = NULL;
  3562. df_md->block_info_size = 0;
  3563. free (df_md->block_info);
  3564. df_md->block_info = NULL;
  3565. free (df_md);
  3566. }
  3567. /* Debugging info at top of bb. */
  3568. static void
  3569. df_md_top_dump (basic_block bb, FILE *file)
  3570. {
  3571. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
  3572. if (!bb_info)
  3573. return;
  3574. fprintf (file, ";; md in \t");
  3575. df_print_regset (file, &bb_info->in);
  3576. fprintf (file, ";; md init \t");
  3577. df_print_regset (file, &bb_info->init);
  3578. fprintf (file, ";; md gen \t");
  3579. df_print_regset (file, &bb_info->gen);
  3580. fprintf (file, ";; md kill \t");
  3581. df_print_regset (file, &bb_info->kill);
  3582. }
  3583. /* Debugging info at bottom of bb. */
  3584. static void
  3585. df_md_bottom_dump (basic_block bb, FILE *file)
  3586. {
  3587. struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
  3588. if (!bb_info)
  3589. return;
  3590. fprintf (file, ";; md out \t");
  3591. df_print_regset (file, &bb_info->out);
  3592. }
  3593. static struct df_problem problem_MD =
  3594. {
  3595. DF_MD, /* Problem id. */
  3596. DF_FORWARD, /* Direction. */
  3597. df_md_alloc, /* Allocate the problem specific data. */
  3598. df_md_reset, /* Reset global information. */
  3599. df_md_free_bb_info, /* Free basic block info. */
  3600. df_md_local_compute, /* Local compute function. */
  3601. df_md_init, /* Init the solution specific data. */
  3602. df_worklist_dataflow, /* Worklist solver. */
  3603. df_md_confluence_0, /* Confluence operator 0. */
  3604. df_md_confluence_n, /* Confluence operator n. */
  3605. df_md_transfer_function, /* Transfer function. */
  3606. NULL, /* Finalize function. */
  3607. df_md_free, /* Free all of the problem information. */
  3608. df_md_free, /* Remove this problem from the stack of dataflow problems. */
  3609. NULL, /* Debugging. */
  3610. df_md_top_dump, /* Debugging start block. */
  3611. df_md_bottom_dump, /* Debugging end block. */
  3612. NULL, /* Debugging start insn. */
  3613. NULL, /* Debugging end insn. */
  3614. NULL, /* Incremental solution verify start. */
  3615. NULL, /* Incremental solution verify end. */
  3616. NULL, /* Dependent problem. */
  3617. sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
  3618. TV_DF_MD, /* Timing variable. */
  3619. false /* Reset blocks on dropping out of blocks_to_analyze. */
  3620. };
  3621. /* Create a new MD instance and add it to the existing instance
  3622. of DF. */
  3623. void
  3624. df_md_add_problem (void)
  3625. {
  3626. df_add_problem (&problem_MD);
  3627. }