tree-vrp.c 303 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466
  1. /* Support routines for Value Range Propagation (VRP).
  2. Copyright (C) 2005-2015 Free Software Foundation, Inc.
  3. Contributed by Diego Novillo <dnovillo@redhat.com>.
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify
  6. it under the terms of the GNU General Public License as published by
  7. the Free Software Foundation; either version 3, or (at your option)
  8. any later version.
  9. GCC is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. GNU General Public License for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "tm.h"
  20. #include "flags.h"
  21. #include "hash-set.h"
  22. #include "machmode.h"
  23. #include "vec.h"
  24. #include "double-int.h"
  25. #include "input.h"
  26. #include "alias.h"
  27. #include "symtab.h"
  28. #include "wide-int.h"
  29. #include "inchash.h"
  30. #include "tree.h"
  31. #include "fold-const.h"
  32. #include "stor-layout.h"
  33. #include "calls.h"
  34. #include "predict.h"
  35. #include "hard-reg-set.h"
  36. #include "function.h"
  37. #include "dominance.h"
  38. #include "cfg.h"
  39. #include "cfganal.h"
  40. #include "basic-block.h"
  41. #include "tree-ssa-alias.h"
  42. #include "internal-fn.h"
  43. #include "gimple-fold.h"
  44. #include "tree-eh.h"
  45. #include "gimple-expr.h"
  46. #include "is-a.h"
  47. #include "gimple.h"
  48. #include "gimple-iterator.h"
  49. #include "gimple-walk.h"
  50. #include "gimple-ssa.h"
  51. #include "tree-cfg.h"
  52. #include "tree-phinodes.h"
  53. #include "ssa-iterators.h"
  54. #include "stringpool.h"
  55. #include "tree-ssanames.h"
  56. #include "tree-ssa-loop-manip.h"
  57. #include "tree-ssa-loop-niter.h"
  58. #include "tree-ssa-loop.h"
  59. #include "tree-into-ssa.h"
  60. #include "tree-ssa.h"
  61. #include "tree-pass.h"
  62. #include "tree-dump.h"
  63. #include "gimple-pretty-print.h"
  64. #include "diagnostic-core.h"
  65. #include "intl.h"
  66. #include "cfgloop.h"
  67. #include "tree-scalar-evolution.h"
  68. #include "tree-ssa-propagate.h"
  69. #include "tree-chrec.h"
  70. #include "tree-ssa-threadupdate.h"
  71. #include "hashtab.h"
  72. #include "rtl.h"
  73. #include "statistics.h"
  74. #include "real.h"
  75. #include "fixed-value.h"
  76. #include "insn-config.h"
  77. #include "expmed.h"
  78. #include "dojump.h"
  79. #include "explow.h"
  80. #include "emit-rtl.h"
  81. #include "varasm.h"
  82. #include "stmt.h"
  83. #include "expr.h"
  84. #include "insn-codes.h"
  85. #include "optabs.h"
  86. #include "tree-ssa-threadedge.h"
  87. /* Range of values that can be associated with an SSA_NAME after VRP
  88. has executed. */
  89. struct value_range_d
  90. {
  91. /* Lattice value represented by this range. */
  92. enum value_range_type type;
  93. /* Minimum and maximum values represented by this range. These
  94. values should be interpreted as follows:
  95. - If TYPE is VR_UNDEFINED or VR_VARYING then MIN and MAX must
  96. be NULL.
  97. - If TYPE == VR_RANGE then MIN holds the minimum value and
  98. MAX holds the maximum value of the range [MIN, MAX].
  99. - If TYPE == ANTI_RANGE the variable is known to NOT
  100. take any values in the range [MIN, MAX]. */
  101. tree min;
  102. tree max;
  103. /* Set of SSA names whose value ranges are equivalent to this one.
  104. This set is only valid when TYPE is VR_RANGE or VR_ANTI_RANGE. */
  105. bitmap equiv;
  106. };
  107. typedef struct value_range_d value_range_t;
  108. #define VR_INITIALIZER { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL }
  109. /* Set of SSA names found live during the RPO traversal of the function
  110. for still active basic-blocks. */
  111. static sbitmap *live;
  112. /* Return true if the SSA name NAME is live on the edge E. */
  113. static bool
  114. live_on_edge (edge e, tree name)
  115. {
  116. return (live[e->dest->index]
  117. && bitmap_bit_p (live[e->dest->index], SSA_NAME_VERSION (name)));
  118. }
  119. /* Local functions. */
  120. static int compare_values (tree val1, tree val2);
  121. static int compare_values_warnv (tree val1, tree val2, bool *);
  122. static void vrp_meet (value_range_t *, value_range_t *);
  123. static void vrp_intersect_ranges (value_range_t *, value_range_t *);
  124. static tree vrp_evaluate_conditional_warnv_with_ops (enum tree_code,
  125. tree, tree, bool, bool *,
  126. bool *);
  127. /* Location information for ASSERT_EXPRs. Each instance of this
  128. structure describes an ASSERT_EXPR for an SSA name. Since a single
  129. SSA name may have more than one assertion associated with it, these
  130. locations are kept in a linked list attached to the corresponding
  131. SSA name. */
  132. struct assert_locus_d
  133. {
  134. /* Basic block where the assertion would be inserted. */
  135. basic_block bb;
  136. /* Some assertions need to be inserted on an edge (e.g., assertions
  137. generated by COND_EXPRs). In those cases, BB will be NULL. */
  138. edge e;
  139. /* Pointer to the statement that generated this assertion. */
  140. gimple_stmt_iterator si;
  141. /* Predicate code for the ASSERT_EXPR. Must be COMPARISON_CLASS_P. */
  142. enum tree_code comp_code;
  143. /* Value being compared against. */
  144. tree val;
  145. /* Expression to compare. */
  146. tree expr;
  147. /* Next node in the linked list. */
  148. struct assert_locus_d *next;
  149. };
  150. typedef struct assert_locus_d *assert_locus_t;
  151. /* If bit I is present, it means that SSA name N_i has a list of
  152. assertions that should be inserted in the IL. */
  153. static bitmap need_assert_for;
  154. /* Array of locations lists where to insert assertions. ASSERTS_FOR[I]
  155. holds a list of ASSERT_LOCUS_T nodes that describe where
  156. ASSERT_EXPRs for SSA name N_I should be inserted. */
  157. static assert_locus_t *asserts_for;
  158. /* Value range array. After propagation, VR_VALUE[I] holds the range
  159. of values that SSA name N_I may take. */
  160. static unsigned num_vr_values;
  161. static value_range_t **vr_value;
  162. static bool values_propagated;
  163. /* For a PHI node which sets SSA name N_I, VR_COUNTS[I] holds the
  164. number of executable edges we saw the last time we visited the
  165. node. */
  166. static int *vr_phi_edge_counts;
  167. typedef struct {
  168. gswitch *stmt;
  169. tree vec;
  170. } switch_update;
  171. static vec<edge> to_remove_edges;
  172. static vec<switch_update> to_update_switch_stmts;
  173. /* Return the maximum value for TYPE. */
  174. static inline tree
  175. vrp_val_max (const_tree type)
  176. {
  177. if (!INTEGRAL_TYPE_P (type))
  178. return NULL_TREE;
  179. return TYPE_MAX_VALUE (type);
  180. }
  181. /* Return the minimum value for TYPE. */
  182. static inline tree
  183. vrp_val_min (const_tree type)
  184. {
  185. if (!INTEGRAL_TYPE_P (type))
  186. return NULL_TREE;
  187. return TYPE_MIN_VALUE (type);
  188. }
  189. /* Return whether VAL is equal to the maximum value of its type. This
  190. will be true for a positive overflow infinity. We can't do a
  191. simple equality comparison with TYPE_MAX_VALUE because C typedefs
  192. and Ada subtypes can produce types whose TYPE_MAX_VALUE is not ==
  193. to the integer constant with the same value in the type. */
  194. static inline bool
  195. vrp_val_is_max (const_tree val)
  196. {
  197. tree type_max = vrp_val_max (TREE_TYPE (val));
  198. return (val == type_max
  199. || (type_max != NULL_TREE
  200. && operand_equal_p (val, type_max, 0)));
  201. }
  202. /* Return whether VAL is equal to the minimum value of its type. This
  203. will be true for a negative overflow infinity. */
  204. static inline bool
  205. vrp_val_is_min (const_tree val)
  206. {
  207. tree type_min = vrp_val_min (TREE_TYPE (val));
  208. return (val == type_min
  209. || (type_min != NULL_TREE
  210. && operand_equal_p (val, type_min, 0)));
  211. }
  212. /* Return whether TYPE should use an overflow infinity distinct from
  213. TYPE_{MIN,MAX}_VALUE. We use an overflow infinity value to
  214. represent a signed overflow during VRP computations. An infinity
  215. is distinct from a half-range, which will go from some number to
  216. TYPE_{MIN,MAX}_VALUE. */
  217. static inline bool
  218. needs_overflow_infinity (const_tree type)
  219. {
  220. return INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type);
  221. }
  222. /* Return whether TYPE can support our overflow infinity
  223. representation: we use the TREE_OVERFLOW flag, which only exists
  224. for constants. If TYPE doesn't support this, we don't optimize
  225. cases which would require signed overflow--we drop them to
  226. VARYING. */
  227. static inline bool
  228. supports_overflow_infinity (const_tree type)
  229. {
  230. tree min = vrp_val_min (type), max = vrp_val_max (type);
  231. #ifdef ENABLE_CHECKING
  232. gcc_assert (needs_overflow_infinity (type));
  233. #endif
  234. return (min != NULL_TREE
  235. && CONSTANT_CLASS_P (min)
  236. && max != NULL_TREE
  237. && CONSTANT_CLASS_P (max));
  238. }
  239. /* VAL is the maximum or minimum value of a type. Return a
  240. corresponding overflow infinity. */
  241. static inline tree
  242. make_overflow_infinity (tree val)
  243. {
  244. gcc_checking_assert (val != NULL_TREE && CONSTANT_CLASS_P (val));
  245. val = copy_node (val);
  246. TREE_OVERFLOW (val) = 1;
  247. return val;
  248. }
  249. /* Return a negative overflow infinity for TYPE. */
  250. static inline tree
  251. negative_overflow_infinity (tree type)
  252. {
  253. gcc_checking_assert (supports_overflow_infinity (type));
  254. return make_overflow_infinity (vrp_val_min (type));
  255. }
  256. /* Return a positive overflow infinity for TYPE. */
  257. static inline tree
  258. positive_overflow_infinity (tree type)
  259. {
  260. gcc_checking_assert (supports_overflow_infinity (type));
  261. return make_overflow_infinity (vrp_val_max (type));
  262. }
  263. /* Return whether VAL is a negative overflow infinity. */
  264. static inline bool
  265. is_negative_overflow_infinity (const_tree val)
  266. {
  267. return (TREE_OVERFLOW_P (val)
  268. && needs_overflow_infinity (TREE_TYPE (val))
  269. && vrp_val_is_min (val));
  270. }
  271. /* Return whether VAL is a positive overflow infinity. */
  272. static inline bool
  273. is_positive_overflow_infinity (const_tree val)
  274. {
  275. return (TREE_OVERFLOW_P (val)
  276. && needs_overflow_infinity (TREE_TYPE (val))
  277. && vrp_val_is_max (val));
  278. }
  279. /* Return whether VAL is a positive or negative overflow infinity. */
  280. static inline bool
  281. is_overflow_infinity (const_tree val)
  282. {
  283. return (TREE_OVERFLOW_P (val)
  284. && needs_overflow_infinity (TREE_TYPE (val))
  285. && (vrp_val_is_min (val) || vrp_val_is_max (val)));
  286. }
  287. /* Return whether STMT has a constant rhs that is_overflow_infinity. */
  288. static inline bool
  289. stmt_overflow_infinity (gimple stmt)
  290. {
  291. if (is_gimple_assign (stmt)
  292. && get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) ==
  293. GIMPLE_SINGLE_RHS)
  294. return is_overflow_infinity (gimple_assign_rhs1 (stmt));
  295. return false;
  296. }
  297. /* If VAL is now an overflow infinity, return VAL. Otherwise, return
  298. the same value with TREE_OVERFLOW clear. This can be used to avoid
  299. confusing a regular value with an overflow value. */
  300. static inline tree
  301. avoid_overflow_infinity (tree val)
  302. {
  303. if (!is_overflow_infinity (val))
  304. return val;
  305. if (vrp_val_is_max (val))
  306. return vrp_val_max (TREE_TYPE (val));
  307. else
  308. {
  309. gcc_checking_assert (vrp_val_is_min (val));
  310. return vrp_val_min (TREE_TYPE (val));
  311. }
  312. }
  313. /* Return true if ARG is marked with the nonnull attribute in the
  314. current function signature. */
  315. static bool
  316. nonnull_arg_p (const_tree arg)
  317. {
  318. tree t, attrs, fntype;
  319. unsigned HOST_WIDE_INT arg_num;
  320. gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
  321. /* The static chain decl is always non null. */
  322. if (arg == cfun->static_chain_decl)
  323. return true;
  324. fntype = TREE_TYPE (current_function_decl);
  325. for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
  326. {
  327. attrs = lookup_attribute ("nonnull", attrs);
  328. /* If "nonnull" wasn't specified, we know nothing about the argument. */
  329. if (attrs == NULL_TREE)
  330. return false;
  331. /* If "nonnull" applies to all the arguments, then ARG is non-null. */
  332. if (TREE_VALUE (attrs) == NULL_TREE)
  333. return true;
  334. /* Get the position number for ARG in the function signature. */
  335. for (arg_num = 1, t = DECL_ARGUMENTS (current_function_decl);
  336. t;
  337. t = DECL_CHAIN (t), arg_num++)
  338. {
  339. if (t == arg)
  340. break;
  341. }
  342. gcc_assert (t == arg);
  343. /* Now see if ARG_NUM is mentioned in the nonnull list. */
  344. for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
  345. {
  346. if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
  347. return true;
  348. }
  349. }
  350. return false;
  351. }
  352. /* Set value range VR to VR_UNDEFINED. */
  353. static inline void
  354. set_value_range_to_undefined (value_range_t *vr)
  355. {
  356. vr->type = VR_UNDEFINED;
  357. vr->min = vr->max = NULL_TREE;
  358. if (vr->equiv)
  359. bitmap_clear (vr->equiv);
  360. }
  361. /* Set value range VR to VR_VARYING. */
  362. static inline void
  363. set_value_range_to_varying (value_range_t *vr)
  364. {
  365. vr->type = VR_VARYING;
  366. vr->min = vr->max = NULL_TREE;
  367. if (vr->equiv)
  368. bitmap_clear (vr->equiv);
  369. }
  370. /* Set value range VR to {T, MIN, MAX, EQUIV}. */
  371. static void
  372. set_value_range (value_range_t *vr, enum value_range_type t, tree min,
  373. tree max, bitmap equiv)
  374. {
  375. #if defined ENABLE_CHECKING
  376. /* Check the validity of the range. */
  377. if (t == VR_RANGE || t == VR_ANTI_RANGE)
  378. {
  379. int cmp;
  380. gcc_assert (min && max);
  381. gcc_assert ((!TREE_OVERFLOW_P (min) || is_overflow_infinity (min))
  382. && (!TREE_OVERFLOW_P (max) || is_overflow_infinity (max)));
  383. if (INTEGRAL_TYPE_P (TREE_TYPE (min)) && t == VR_ANTI_RANGE)
  384. gcc_assert (!vrp_val_is_min (min) || !vrp_val_is_max (max));
  385. cmp = compare_values (min, max);
  386. gcc_assert (cmp == 0 || cmp == -1 || cmp == -2);
  387. if (needs_overflow_infinity (TREE_TYPE (min)))
  388. gcc_assert (!is_overflow_infinity (min)
  389. || !is_overflow_infinity (max));
  390. }
  391. if (t == VR_UNDEFINED || t == VR_VARYING)
  392. gcc_assert (min == NULL_TREE && max == NULL_TREE);
  393. if (t == VR_UNDEFINED || t == VR_VARYING)
  394. gcc_assert (equiv == NULL || bitmap_empty_p (equiv));
  395. #endif
  396. vr->type = t;
  397. vr->min = min;
  398. vr->max = max;
  399. /* Since updating the equivalence set involves deep copying the
  400. bitmaps, only do it if absolutely necessary. */
  401. if (vr->equiv == NULL
  402. && equiv != NULL)
  403. vr->equiv = BITMAP_ALLOC (NULL);
  404. if (equiv != vr->equiv)
  405. {
  406. if (equiv && !bitmap_empty_p (equiv))
  407. bitmap_copy (vr->equiv, equiv);
  408. else
  409. bitmap_clear (vr->equiv);
  410. }
  411. }
  412. /* Set value range VR to the canonical form of {T, MIN, MAX, EQUIV}.
  413. This means adjusting T, MIN and MAX representing the case of a
  414. wrapping range with MAX < MIN covering [MIN, type_max] U [type_min, MAX]
  415. as anti-rage ~[MAX+1, MIN-1]. Likewise for wrapping anti-ranges.
  416. In corner cases where MAX+1 or MIN-1 wraps this will fall back
  417. to varying.
  418. This routine exists to ease canonicalization in the case where we
  419. extract ranges from var + CST op limit. */
  420. static void
  421. set_and_canonicalize_value_range (value_range_t *vr, enum value_range_type t,
  422. tree min, tree max, bitmap equiv)
  423. {
  424. /* Use the canonical setters for VR_UNDEFINED and VR_VARYING. */
  425. if (t == VR_UNDEFINED)
  426. {
  427. set_value_range_to_undefined (vr);
  428. return;
  429. }
  430. else if (t == VR_VARYING)
  431. {
  432. set_value_range_to_varying (vr);
  433. return;
  434. }
  435. /* Nothing to canonicalize for symbolic ranges. */
  436. if (TREE_CODE (min) != INTEGER_CST
  437. || TREE_CODE (max) != INTEGER_CST)
  438. {
  439. set_value_range (vr, t, min, max, equiv);
  440. return;
  441. }
  442. /* Wrong order for min and max, to swap them and the VR type we need
  443. to adjust them. */
  444. if (tree_int_cst_lt (max, min))
  445. {
  446. tree one, tmp;
  447. /* For one bit precision if max < min, then the swapped
  448. range covers all values, so for VR_RANGE it is varying and
  449. for VR_ANTI_RANGE empty range, so drop to varying as well. */
  450. if (TYPE_PRECISION (TREE_TYPE (min)) == 1)
  451. {
  452. set_value_range_to_varying (vr);
  453. return;
  454. }
  455. one = build_int_cst (TREE_TYPE (min), 1);
  456. tmp = int_const_binop (PLUS_EXPR, max, one);
  457. max = int_const_binop (MINUS_EXPR, min, one);
  458. min = tmp;
  459. /* There's one corner case, if we had [C+1, C] before we now have
  460. that again. But this represents an empty value range, so drop
  461. to varying in this case. */
  462. if (tree_int_cst_lt (max, min))
  463. {
  464. set_value_range_to_varying (vr);
  465. return;
  466. }
  467. t = t == VR_RANGE ? VR_ANTI_RANGE : VR_RANGE;
  468. }
  469. /* Anti-ranges that can be represented as ranges should be so. */
  470. if (t == VR_ANTI_RANGE)
  471. {
  472. bool is_min = vrp_val_is_min (min);
  473. bool is_max = vrp_val_is_max (max);
  474. if (is_min && is_max)
  475. {
  476. /* We cannot deal with empty ranges, drop to varying.
  477. ??? This could be VR_UNDEFINED instead. */
  478. set_value_range_to_varying (vr);
  479. return;
  480. }
  481. else if (TYPE_PRECISION (TREE_TYPE (min)) == 1
  482. && (is_min || is_max))
  483. {
  484. /* Non-empty boolean ranges can always be represented
  485. as a singleton range. */
  486. if (is_min)
  487. min = max = vrp_val_max (TREE_TYPE (min));
  488. else
  489. min = max = vrp_val_min (TREE_TYPE (min));
  490. t = VR_RANGE;
  491. }
  492. else if (is_min
  493. /* As a special exception preserve non-null ranges. */
  494. && !(TYPE_UNSIGNED (TREE_TYPE (min))
  495. && integer_zerop (max)))
  496. {
  497. tree one = build_int_cst (TREE_TYPE (max), 1);
  498. min = int_const_binop (PLUS_EXPR, max, one);
  499. max = vrp_val_max (TREE_TYPE (max));
  500. t = VR_RANGE;
  501. }
  502. else if (is_max)
  503. {
  504. tree one = build_int_cst (TREE_TYPE (min), 1);
  505. max = int_const_binop (MINUS_EXPR, min, one);
  506. min = vrp_val_min (TREE_TYPE (min));
  507. t = VR_RANGE;
  508. }
  509. }
  510. /* Drop [-INF(OVF), +INF(OVF)] to varying. */
  511. if (needs_overflow_infinity (TREE_TYPE (min))
  512. && is_overflow_infinity (min)
  513. && is_overflow_infinity (max))
  514. {
  515. set_value_range_to_varying (vr);
  516. return;
  517. }
  518. set_value_range (vr, t, min, max, equiv);
  519. }
  520. /* Copy value range FROM into value range TO. */
  521. static inline void
  522. copy_value_range (value_range_t *to, value_range_t *from)
  523. {
  524. set_value_range (to, from->type, from->min, from->max, from->equiv);
  525. }
  526. /* Set value range VR to a single value. This function is only called
  527. with values we get from statements, and exists to clear the
  528. TREE_OVERFLOW flag so that we don't think we have an overflow
  529. infinity when we shouldn't. */
  530. static inline void
  531. set_value_range_to_value (value_range_t *vr, tree val, bitmap equiv)
  532. {
  533. gcc_assert (is_gimple_min_invariant (val));
  534. if (TREE_OVERFLOW_P (val))
  535. val = drop_tree_overflow (val);
  536. set_value_range (vr, VR_RANGE, val, val, equiv);
  537. }
  538. /* Set value range VR to a non-negative range of type TYPE.
  539. OVERFLOW_INFINITY indicates whether to use an overflow infinity
  540. rather than TYPE_MAX_VALUE; this should be true if we determine
  541. that the range is nonnegative based on the assumption that signed
  542. overflow does not occur. */
  543. static inline void
  544. set_value_range_to_nonnegative (value_range_t *vr, tree type,
  545. bool overflow_infinity)
  546. {
  547. tree zero;
  548. if (overflow_infinity && !supports_overflow_infinity (type))
  549. {
  550. set_value_range_to_varying (vr);
  551. return;
  552. }
  553. zero = build_int_cst (type, 0);
  554. set_value_range (vr, VR_RANGE, zero,
  555. (overflow_infinity
  556. ? positive_overflow_infinity (type)
  557. : TYPE_MAX_VALUE (type)),
  558. vr->equiv);
  559. }
  560. /* Set value range VR to a non-NULL range of type TYPE. */
  561. static inline void
  562. set_value_range_to_nonnull (value_range_t *vr, tree type)
  563. {
  564. tree zero = build_int_cst (type, 0);
  565. set_value_range (vr, VR_ANTI_RANGE, zero, zero, vr->equiv);
  566. }
  567. /* Set value range VR to a NULL range of type TYPE. */
  568. static inline void
  569. set_value_range_to_null (value_range_t *vr, tree type)
  570. {
  571. set_value_range_to_value (vr, build_int_cst (type, 0), vr->equiv);
  572. }
  573. /* Set value range VR to a range of a truthvalue of type TYPE. */
  574. static inline void
  575. set_value_range_to_truthvalue (value_range_t *vr, tree type)
  576. {
  577. if (TYPE_PRECISION (type) == 1)
  578. set_value_range_to_varying (vr);
  579. else
  580. set_value_range (vr, VR_RANGE,
  581. build_int_cst (type, 0), build_int_cst (type, 1),
  582. vr->equiv);
  583. }
  584. /* If abs (min) < abs (max), set VR to [-max, max], if
  585. abs (min) >= abs (max), set VR to [-min, min]. */
  586. static void
  587. abs_extent_range (value_range_t *vr, tree min, tree max)
  588. {
  589. int cmp;
  590. gcc_assert (TREE_CODE (min) == INTEGER_CST);
  591. gcc_assert (TREE_CODE (max) == INTEGER_CST);
  592. gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (min)));
  593. gcc_assert (!TYPE_UNSIGNED (TREE_TYPE (min)));
  594. min = fold_unary (ABS_EXPR, TREE_TYPE (min), min);
  595. max = fold_unary (ABS_EXPR, TREE_TYPE (max), max);
  596. if (TREE_OVERFLOW (min) || TREE_OVERFLOW (max))
  597. {
  598. set_value_range_to_varying (vr);
  599. return;
  600. }
  601. cmp = compare_values (min, max);
  602. if (cmp == -1)
  603. min = fold_unary (NEGATE_EXPR, TREE_TYPE (min), max);
  604. else if (cmp == 0 || cmp == 1)
  605. {
  606. max = min;
  607. min = fold_unary (NEGATE_EXPR, TREE_TYPE (min), min);
  608. }
  609. else
  610. {
  611. set_value_range_to_varying (vr);
  612. return;
  613. }
  614. set_and_canonicalize_value_range (vr, VR_RANGE, min, max, NULL);
  615. }
  616. /* Return value range information for VAR.
  617. If we have no values ranges recorded (ie, VRP is not running), then
  618. return NULL. Otherwise create an empty range if none existed for VAR. */
  619. static value_range_t *
  620. get_value_range (const_tree var)
  621. {
  622. static const struct value_range_d vr_const_varying
  623. = { VR_VARYING, NULL_TREE, NULL_TREE, NULL };
  624. value_range_t *vr;
  625. tree sym;
  626. unsigned ver = SSA_NAME_VERSION (var);
  627. /* If we have no recorded ranges, then return NULL. */
  628. if (! vr_value)
  629. return NULL;
  630. /* If we query the range for a new SSA name return an unmodifiable VARYING.
  631. We should get here at most from the substitute-and-fold stage which
  632. will never try to change values. */
  633. if (ver >= num_vr_values)
  634. return CONST_CAST (value_range_t *, &vr_const_varying);
  635. vr = vr_value[ver];
  636. if (vr)
  637. return vr;
  638. /* After propagation finished do not allocate new value-ranges. */
  639. if (values_propagated)
  640. return CONST_CAST (value_range_t *, &vr_const_varying);
  641. /* Create a default value range. */
  642. vr_value[ver] = vr = XCNEW (value_range_t);
  643. /* Defer allocating the equivalence set. */
  644. vr->equiv = NULL;
  645. /* If VAR is a default definition of a parameter, the variable can
  646. take any value in VAR's type. */
  647. if (SSA_NAME_IS_DEFAULT_DEF (var))
  648. {
  649. sym = SSA_NAME_VAR (var);
  650. if (TREE_CODE (sym) == PARM_DECL)
  651. {
  652. /* Try to use the "nonnull" attribute to create ~[0, 0]
  653. anti-ranges for pointers. Note that this is only valid with
  654. default definitions of PARM_DECLs. */
  655. if (POINTER_TYPE_P (TREE_TYPE (sym))
  656. && nonnull_arg_p (sym))
  657. set_value_range_to_nonnull (vr, TREE_TYPE (sym));
  658. else
  659. set_value_range_to_varying (vr);
  660. }
  661. else if (TREE_CODE (sym) == RESULT_DECL
  662. && DECL_BY_REFERENCE (sym))
  663. set_value_range_to_nonnull (vr, TREE_TYPE (sym));
  664. }
  665. return vr;
  666. }
  667. /* Return true, if VAL1 and VAL2 are equal values for VRP purposes. */
  668. static inline bool
  669. vrp_operand_equal_p (const_tree val1, const_tree val2)
  670. {
  671. if (val1 == val2)
  672. return true;
  673. if (!val1 || !val2 || !operand_equal_p (val1, val2, 0))
  674. return false;
  675. return is_overflow_infinity (val1) == is_overflow_infinity (val2);
  676. }
  677. /* Return true, if the bitmaps B1 and B2 are equal. */
  678. static inline bool
  679. vrp_bitmap_equal_p (const_bitmap b1, const_bitmap b2)
  680. {
  681. return (b1 == b2
  682. || ((!b1 || bitmap_empty_p (b1))
  683. && (!b2 || bitmap_empty_p (b2)))
  684. || (b1 && b2
  685. && bitmap_equal_p (b1, b2)));
  686. }
  687. /* Update the value range and equivalence set for variable VAR to
  688. NEW_VR. Return true if NEW_VR is different from VAR's previous
  689. value.
  690. NOTE: This function assumes that NEW_VR is a temporary value range
  691. object created for the sole purpose of updating VAR's range. The
  692. storage used by the equivalence set from NEW_VR will be freed by
  693. this function. Do not call update_value_range when NEW_VR
  694. is the range object associated with another SSA name. */
  695. static inline bool
  696. update_value_range (const_tree var, value_range_t *new_vr)
  697. {
  698. value_range_t *old_vr;
  699. bool is_new;
  700. /* If there is a value-range on the SSA name from earlier analysis
  701. factor that in. */
  702. if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
  703. {
  704. wide_int min, max;
  705. value_range_type rtype = get_range_info (var, &min, &max);
  706. if (rtype == VR_RANGE || rtype == VR_ANTI_RANGE)
  707. {
  708. value_range_d nr;
  709. nr.type = rtype;
  710. nr.min = wide_int_to_tree (TREE_TYPE (var), min);
  711. nr.max = wide_int_to_tree (TREE_TYPE (var), max);
  712. nr.equiv = NULL;
  713. vrp_intersect_ranges (new_vr, &nr);
  714. }
  715. }
  716. /* Update the value range, if necessary. */
  717. old_vr = get_value_range (var);
  718. is_new = old_vr->type != new_vr->type
  719. || !vrp_operand_equal_p (old_vr->min, new_vr->min)
  720. || !vrp_operand_equal_p (old_vr->max, new_vr->max)
  721. || !vrp_bitmap_equal_p (old_vr->equiv, new_vr->equiv);
  722. if (is_new)
  723. {
  724. /* Do not allow transitions up the lattice. The following
  725. is slightly more awkward than just new_vr->type < old_vr->type
  726. because VR_RANGE and VR_ANTI_RANGE need to be considered
  727. the same. We may not have is_new when transitioning to
  728. UNDEFINED. If old_vr->type is VARYING, we shouldn't be
  729. called. */
  730. if (new_vr->type == VR_UNDEFINED)
  731. {
  732. BITMAP_FREE (new_vr->equiv);
  733. set_value_range_to_varying (old_vr);
  734. set_value_range_to_varying (new_vr);
  735. return true;
  736. }
  737. else
  738. set_value_range (old_vr, new_vr->type, new_vr->min, new_vr->max,
  739. new_vr->equiv);
  740. }
  741. BITMAP_FREE (new_vr->equiv);
  742. return is_new;
  743. }
  744. /* Add VAR and VAR's equivalence set to EQUIV. This is the central
  745. point where equivalence processing can be turned on/off. */
  746. static void
  747. add_equivalence (bitmap *equiv, const_tree var)
  748. {
  749. unsigned ver = SSA_NAME_VERSION (var);
  750. value_range_t *vr = vr_value[ver];
  751. if (*equiv == NULL)
  752. *equiv = BITMAP_ALLOC (NULL);
  753. bitmap_set_bit (*equiv, ver);
  754. if (vr && vr->equiv)
  755. bitmap_ior_into (*equiv, vr->equiv);
  756. }
  757. /* Return true if VR is ~[0, 0]. */
  758. static inline bool
  759. range_is_nonnull (value_range_t *vr)
  760. {
  761. return vr->type == VR_ANTI_RANGE
  762. && integer_zerop (vr->min)
  763. && integer_zerop (vr->max);
  764. }
  765. /* Return true if VR is [0, 0]. */
  766. static inline bool
  767. range_is_null (value_range_t *vr)
  768. {
  769. return vr->type == VR_RANGE
  770. && integer_zerop (vr->min)
  771. && integer_zerop (vr->max);
  772. }
  773. /* Return true if max and min of VR are INTEGER_CST. It's not necessary
  774. a singleton. */
  775. static inline bool
  776. range_int_cst_p (value_range_t *vr)
  777. {
  778. return (vr->type == VR_RANGE
  779. && TREE_CODE (vr->max) == INTEGER_CST
  780. && TREE_CODE (vr->min) == INTEGER_CST);
  781. }
  782. /* Return true if VR is a INTEGER_CST singleton. */
  783. static inline bool
  784. range_int_cst_singleton_p (value_range_t *vr)
  785. {
  786. return (range_int_cst_p (vr)
  787. && !is_overflow_infinity (vr->min)
  788. && !is_overflow_infinity (vr->max)
  789. && tree_int_cst_equal (vr->min, vr->max));
  790. }
  791. /* Return true if value range VR involves at least one symbol. */
  792. static inline bool
  793. symbolic_range_p (value_range_t *vr)
  794. {
  795. return (!is_gimple_min_invariant (vr->min)
  796. || !is_gimple_min_invariant (vr->max));
  797. }
  798. /* Return the single symbol (an SSA_NAME) contained in T if any, or NULL_TREE
  799. otherwise. We only handle additive operations and set NEG to true if the
  800. symbol is negated and INV to the invariant part, if any. */
  801. static tree
  802. get_single_symbol (tree t, bool *neg, tree *inv)
  803. {
  804. bool neg_;
  805. tree inv_;
  806. if (TREE_CODE (t) == PLUS_EXPR
  807. || TREE_CODE (t) == POINTER_PLUS_EXPR
  808. || TREE_CODE (t) == MINUS_EXPR)
  809. {
  810. if (is_gimple_min_invariant (TREE_OPERAND (t, 0)))
  811. {
  812. neg_ = (TREE_CODE (t) == MINUS_EXPR);
  813. inv_ = TREE_OPERAND (t, 0);
  814. t = TREE_OPERAND (t, 1);
  815. }
  816. else if (is_gimple_min_invariant (TREE_OPERAND (t, 1)))
  817. {
  818. neg_ = false;
  819. inv_ = TREE_OPERAND (t, 1);
  820. t = TREE_OPERAND (t, 0);
  821. }
  822. else
  823. return NULL_TREE;
  824. }
  825. else
  826. {
  827. neg_ = false;
  828. inv_ = NULL_TREE;
  829. }
  830. if (TREE_CODE (t) == NEGATE_EXPR)
  831. {
  832. t = TREE_OPERAND (t, 0);
  833. neg_ = !neg_;
  834. }
  835. if (TREE_CODE (t) != SSA_NAME)
  836. return NULL_TREE;
  837. *neg = neg_;
  838. *inv = inv_;
  839. return t;
  840. }
  841. /* The reverse operation: build a symbolic expression with TYPE
  842. from symbol SYM, negated according to NEG, and invariant INV. */
  843. static tree
  844. build_symbolic_expr (tree type, tree sym, bool neg, tree inv)
  845. {
  846. const bool pointer_p = POINTER_TYPE_P (type);
  847. tree t = sym;
  848. if (neg)
  849. t = build1 (NEGATE_EXPR, type, t);
  850. if (integer_zerop (inv))
  851. return t;
  852. return build2 (pointer_p ? POINTER_PLUS_EXPR : PLUS_EXPR, type, t, inv);
  853. }
  854. /* Return true if value range VR involves exactly one symbol SYM. */
  855. static bool
  856. symbolic_range_based_on_p (value_range_t *vr, const_tree sym)
  857. {
  858. bool neg, min_has_symbol, max_has_symbol;
  859. tree inv;
  860. if (is_gimple_min_invariant (vr->min))
  861. min_has_symbol = false;
  862. else if (get_single_symbol (vr->min, &neg, &inv) == sym)
  863. min_has_symbol = true;
  864. else
  865. return false;
  866. if (is_gimple_min_invariant (vr->max))
  867. max_has_symbol = false;
  868. else if (get_single_symbol (vr->max, &neg, &inv) == sym)
  869. max_has_symbol = true;
  870. else
  871. return false;
  872. return (min_has_symbol || max_has_symbol);
  873. }
  874. /* Return true if value range VR uses an overflow infinity. */
  875. static inline bool
  876. overflow_infinity_range_p (value_range_t *vr)
  877. {
  878. return (vr->type == VR_RANGE
  879. && (is_overflow_infinity (vr->min)
  880. || is_overflow_infinity (vr->max)));
  881. }
  882. /* Return false if we can not make a valid comparison based on VR;
  883. this will be the case if it uses an overflow infinity and overflow
  884. is not undefined (i.e., -fno-strict-overflow is in effect).
  885. Otherwise return true, and set *STRICT_OVERFLOW_P to true if VR
  886. uses an overflow infinity. */
  887. static bool
  888. usable_range_p (value_range_t *vr, bool *strict_overflow_p)
  889. {
  890. gcc_assert (vr->type == VR_RANGE);
  891. if (is_overflow_infinity (vr->min))
  892. {
  893. *strict_overflow_p = true;
  894. if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (vr->min)))
  895. return false;
  896. }
  897. if (is_overflow_infinity (vr->max))
  898. {
  899. *strict_overflow_p = true;
  900. if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (vr->max)))
  901. return false;
  902. }
  903. return true;
  904. }
  905. /* Return true if the result of assignment STMT is know to be non-negative.
  906. If the return value is based on the assumption that signed overflow is
  907. undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
  908. *STRICT_OVERFLOW_P.*/
  909. static bool
  910. gimple_assign_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
  911. {
  912. enum tree_code code = gimple_assign_rhs_code (stmt);
  913. switch (get_gimple_rhs_class (code))
  914. {
  915. case GIMPLE_UNARY_RHS:
  916. return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
  917. gimple_expr_type (stmt),
  918. gimple_assign_rhs1 (stmt),
  919. strict_overflow_p);
  920. case GIMPLE_BINARY_RHS:
  921. return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
  922. gimple_expr_type (stmt),
  923. gimple_assign_rhs1 (stmt),
  924. gimple_assign_rhs2 (stmt),
  925. strict_overflow_p);
  926. case GIMPLE_TERNARY_RHS:
  927. return false;
  928. case GIMPLE_SINGLE_RHS:
  929. return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
  930. strict_overflow_p);
  931. case GIMPLE_INVALID_RHS:
  932. gcc_unreachable ();
  933. default:
  934. gcc_unreachable ();
  935. }
  936. }
  937. /* Return true if return value of call STMT is know to be non-negative.
  938. If the return value is based on the assumption that signed overflow is
  939. undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
  940. *STRICT_OVERFLOW_P.*/
  941. static bool
  942. gimple_call_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
  943. {
  944. tree arg0 = gimple_call_num_args (stmt) > 0 ?
  945. gimple_call_arg (stmt, 0) : NULL_TREE;
  946. tree arg1 = gimple_call_num_args (stmt) > 1 ?
  947. gimple_call_arg (stmt, 1) : NULL_TREE;
  948. return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
  949. gimple_call_fndecl (stmt),
  950. arg0,
  951. arg1,
  952. strict_overflow_p);
  953. }
  954. /* Return true if STMT is know to to compute a non-negative value.
  955. If the return value is based on the assumption that signed overflow is
  956. undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
  957. *STRICT_OVERFLOW_P.*/
  958. static bool
  959. gimple_stmt_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
  960. {
  961. switch (gimple_code (stmt))
  962. {
  963. case GIMPLE_ASSIGN:
  964. return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p);
  965. case GIMPLE_CALL:
  966. return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p);
  967. default:
  968. gcc_unreachable ();
  969. }
  970. }
  971. /* Return true if the result of assignment STMT is know to be non-zero.
  972. If the return value is based on the assumption that signed overflow is
  973. undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
  974. *STRICT_OVERFLOW_P.*/
  975. static bool
  976. gimple_assign_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
  977. {
  978. enum tree_code code = gimple_assign_rhs_code (stmt);
  979. switch (get_gimple_rhs_class (code))
  980. {
  981. case GIMPLE_UNARY_RHS:
  982. return tree_unary_nonzero_warnv_p (gimple_assign_rhs_code (stmt),
  983. gimple_expr_type (stmt),
  984. gimple_assign_rhs1 (stmt),
  985. strict_overflow_p);
  986. case GIMPLE_BINARY_RHS:
  987. return tree_binary_nonzero_warnv_p (gimple_assign_rhs_code (stmt),
  988. gimple_expr_type (stmt),
  989. gimple_assign_rhs1 (stmt),
  990. gimple_assign_rhs2 (stmt),
  991. strict_overflow_p);
  992. case GIMPLE_TERNARY_RHS:
  993. return false;
  994. case GIMPLE_SINGLE_RHS:
  995. return tree_single_nonzero_warnv_p (gimple_assign_rhs1 (stmt),
  996. strict_overflow_p);
  997. case GIMPLE_INVALID_RHS:
  998. gcc_unreachable ();
  999. default:
  1000. gcc_unreachable ();
  1001. }
  1002. }
  1003. /* Return true if STMT is known to compute a non-zero value.
  1004. If the return value is based on the assumption that signed overflow is
  1005. undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
  1006. *STRICT_OVERFLOW_P.*/
  1007. static bool
  1008. gimple_stmt_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
  1009. {
  1010. switch (gimple_code (stmt))
  1011. {
  1012. case GIMPLE_ASSIGN:
  1013. return gimple_assign_nonzero_warnv_p (stmt, strict_overflow_p);
  1014. case GIMPLE_CALL:
  1015. {
  1016. tree fndecl = gimple_call_fndecl (stmt);
  1017. if (!fndecl) return false;
  1018. if (flag_delete_null_pointer_checks && !flag_check_new
  1019. && DECL_IS_OPERATOR_NEW (fndecl)
  1020. && !TREE_NOTHROW (fndecl))
  1021. return true;
  1022. if (flag_delete_null_pointer_checks &&
  1023. lookup_attribute ("returns_nonnull",
  1024. TYPE_ATTRIBUTES (gimple_call_fntype (stmt))))
  1025. return true;
  1026. return gimple_alloca_call_p (stmt);
  1027. }
  1028. default:
  1029. gcc_unreachable ();
  1030. }
  1031. }
  1032. /* Like tree_expr_nonzero_warnv_p, but this function uses value ranges
  1033. obtained so far. */
  1034. static bool
  1035. vrp_stmt_computes_nonzero (gimple stmt, bool *strict_overflow_p)
  1036. {
  1037. if (gimple_stmt_nonzero_warnv_p (stmt, strict_overflow_p))
  1038. return true;
  1039. /* If we have an expression of the form &X->a, then the expression
  1040. is nonnull if X is nonnull. */
  1041. if (is_gimple_assign (stmt)
  1042. && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
  1043. {
  1044. tree expr = gimple_assign_rhs1 (stmt);
  1045. tree base = get_base_address (TREE_OPERAND (expr, 0));
  1046. if (base != NULL_TREE
  1047. && TREE_CODE (base) == MEM_REF
  1048. && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
  1049. {
  1050. value_range_t *vr = get_value_range (TREE_OPERAND (base, 0));
  1051. if (range_is_nonnull (vr))
  1052. return true;
  1053. }
  1054. }
  1055. return false;
  1056. }
  1057. /* Returns true if EXPR is a valid value (as expected by compare_values) --
  1058. a gimple invariant, or SSA_NAME +- CST. */
  1059. static bool
  1060. valid_value_p (tree expr)
  1061. {
  1062. if (TREE_CODE (expr) == SSA_NAME)
  1063. return true;
  1064. if (TREE_CODE (expr) == PLUS_EXPR
  1065. || TREE_CODE (expr) == MINUS_EXPR)
  1066. return (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
  1067. && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST);
  1068. return is_gimple_min_invariant (expr);
  1069. }
  1070. /* Return
  1071. 1 if VAL < VAL2
  1072. 0 if !(VAL < VAL2)
  1073. -2 if those are incomparable. */
  1074. static inline int
  1075. operand_less_p (tree val, tree val2)
  1076. {
  1077. /* LT is folded faster than GE and others. Inline the common case. */
  1078. if (TREE_CODE (val) == INTEGER_CST && TREE_CODE (val2) == INTEGER_CST)
  1079. return tree_int_cst_lt (val, val2);
  1080. else
  1081. {
  1082. tree tcmp;
  1083. fold_defer_overflow_warnings ();
  1084. tcmp = fold_binary_to_constant (LT_EXPR, boolean_type_node, val, val2);
  1085. fold_undefer_and_ignore_overflow_warnings ();
  1086. if (!tcmp
  1087. || TREE_CODE (tcmp) != INTEGER_CST)
  1088. return -2;
  1089. if (!integer_zerop (tcmp))
  1090. return 1;
  1091. }
  1092. /* val >= val2, not considering overflow infinity. */
  1093. if (is_negative_overflow_infinity (val))
  1094. return is_negative_overflow_infinity (val2) ? 0 : 1;
  1095. else if (is_positive_overflow_infinity (val2))
  1096. return is_positive_overflow_infinity (val) ? 0 : 1;
  1097. return 0;
  1098. }
  1099. /* Compare two values VAL1 and VAL2. Return
  1100. -2 if VAL1 and VAL2 cannot be compared at compile-time,
  1101. -1 if VAL1 < VAL2,
  1102. 0 if VAL1 == VAL2,
  1103. +1 if VAL1 > VAL2, and
  1104. +2 if VAL1 != VAL2
  1105. This is similar to tree_int_cst_compare but supports pointer values
  1106. and values that cannot be compared at compile time.
  1107. If STRICT_OVERFLOW_P is not NULL, then set *STRICT_OVERFLOW_P to
  1108. true if the return value is only valid if we assume that signed
  1109. overflow is undefined. */
  1110. static int
  1111. compare_values_warnv (tree val1, tree val2, bool *strict_overflow_p)
  1112. {
  1113. if (val1 == val2)
  1114. return 0;
  1115. /* Below we rely on the fact that VAL1 and VAL2 are both pointers or
  1116. both integers. */
  1117. gcc_assert (POINTER_TYPE_P (TREE_TYPE (val1))
  1118. == POINTER_TYPE_P (TREE_TYPE (val2)));
  1119. /* Convert the two values into the same type. This is needed because
  1120. sizetype causes sign extension even for unsigned types. */
  1121. val2 = fold_convert (TREE_TYPE (val1), val2);
  1122. STRIP_USELESS_TYPE_CONVERSION (val2);
  1123. if ((TREE_CODE (val1) == SSA_NAME
  1124. || (TREE_CODE (val1) == NEGATE_EXPR
  1125. && TREE_CODE (TREE_OPERAND (val1, 0)) == SSA_NAME)
  1126. || TREE_CODE (val1) == PLUS_EXPR
  1127. || TREE_CODE (val1) == MINUS_EXPR)
  1128. && (TREE_CODE (val2) == SSA_NAME
  1129. || (TREE_CODE (val2) == NEGATE_EXPR
  1130. && TREE_CODE (TREE_OPERAND (val2, 0)) == SSA_NAME)
  1131. || TREE_CODE (val2) == PLUS_EXPR
  1132. || TREE_CODE (val2) == MINUS_EXPR))
  1133. {
  1134. tree n1, c1, n2, c2;
  1135. enum tree_code code1, code2;
  1136. /* If VAL1 and VAL2 are of the form '[-]NAME [+-] CST' or 'NAME',
  1137. return -1 or +1 accordingly. If VAL1 and VAL2 don't use the
  1138. same name, return -2. */
  1139. if (TREE_CODE (val1) == SSA_NAME || TREE_CODE (val1) == NEGATE_EXPR)
  1140. {
  1141. code1 = SSA_NAME;
  1142. n1 = val1;
  1143. c1 = NULL_TREE;
  1144. }
  1145. else
  1146. {
  1147. code1 = TREE_CODE (val1);
  1148. n1 = TREE_OPERAND (val1, 0);
  1149. c1 = TREE_OPERAND (val1, 1);
  1150. if (tree_int_cst_sgn (c1) == -1)
  1151. {
  1152. if (is_negative_overflow_infinity (c1))
  1153. return -2;
  1154. c1 = fold_unary_to_constant (NEGATE_EXPR, TREE_TYPE (c1), c1);
  1155. if (!c1)
  1156. return -2;
  1157. code1 = code1 == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR;
  1158. }
  1159. }
  1160. if (TREE_CODE (val2) == SSA_NAME || TREE_CODE (val2) == NEGATE_EXPR)
  1161. {
  1162. code2 = SSA_NAME;
  1163. n2 = val2;
  1164. c2 = NULL_TREE;
  1165. }
  1166. else
  1167. {
  1168. code2 = TREE_CODE (val2);
  1169. n2 = TREE_OPERAND (val2, 0);
  1170. c2 = TREE_OPERAND (val2, 1);
  1171. if (tree_int_cst_sgn (c2) == -1)
  1172. {
  1173. if (is_negative_overflow_infinity (c2))
  1174. return -2;
  1175. c2 = fold_unary_to_constant (NEGATE_EXPR, TREE_TYPE (c2), c2);
  1176. if (!c2)
  1177. return -2;
  1178. code2 = code2 == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR;
  1179. }
  1180. }
  1181. /* Both values must use the same name. */
  1182. if (TREE_CODE (n1) == NEGATE_EXPR && TREE_CODE (n2) == NEGATE_EXPR)
  1183. {
  1184. n1 = TREE_OPERAND (n1, 0);
  1185. n2 = TREE_OPERAND (n2, 0);
  1186. }
  1187. if (n1 != n2)
  1188. return -2;
  1189. if (code1 == SSA_NAME && code2 == SSA_NAME)
  1190. /* NAME == NAME */
  1191. return 0;
  1192. /* If overflow is defined we cannot simplify more. */
  1193. if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (val1)))
  1194. return -2;
  1195. if (strict_overflow_p != NULL
  1196. && (code1 == SSA_NAME || !TREE_NO_WARNING (val1))
  1197. && (code2 == SSA_NAME || !TREE_NO_WARNING (val2)))
  1198. *strict_overflow_p = true;
  1199. if (code1 == SSA_NAME)
  1200. {
  1201. if (code2 == PLUS_EXPR)
  1202. /* NAME < NAME + CST */
  1203. return -1;
  1204. else if (code2 == MINUS_EXPR)
  1205. /* NAME > NAME - CST */
  1206. return 1;
  1207. }
  1208. else if (code1 == PLUS_EXPR)
  1209. {
  1210. if (code2 == SSA_NAME)
  1211. /* NAME + CST > NAME */
  1212. return 1;
  1213. else if (code2 == PLUS_EXPR)
  1214. /* NAME + CST1 > NAME + CST2, if CST1 > CST2 */
  1215. return compare_values_warnv (c1, c2, strict_overflow_p);
  1216. else if (code2 == MINUS_EXPR)
  1217. /* NAME + CST1 > NAME - CST2 */
  1218. return 1;
  1219. }
  1220. else if (code1 == MINUS_EXPR)
  1221. {
  1222. if (code2 == SSA_NAME)
  1223. /* NAME - CST < NAME */
  1224. return -1;
  1225. else if (code2 == PLUS_EXPR)
  1226. /* NAME - CST1 < NAME + CST2 */
  1227. return -1;
  1228. else if (code2 == MINUS_EXPR)
  1229. /* NAME - CST1 > NAME - CST2, if CST1 < CST2. Notice that
  1230. C1 and C2 are swapped in the call to compare_values. */
  1231. return compare_values_warnv (c2, c1, strict_overflow_p);
  1232. }
  1233. gcc_unreachable ();
  1234. }
  1235. /* We cannot compare non-constants. */
  1236. if (!is_gimple_min_invariant (val1) || !is_gimple_min_invariant (val2))
  1237. return -2;
  1238. if (!POINTER_TYPE_P (TREE_TYPE (val1)))
  1239. {
  1240. /* We cannot compare overflowed values, except for overflow
  1241. infinities. */
  1242. if (TREE_OVERFLOW (val1) || TREE_OVERFLOW (val2))
  1243. {
  1244. if (strict_overflow_p != NULL)
  1245. *strict_overflow_p = true;
  1246. if (is_negative_overflow_infinity (val1))
  1247. return is_negative_overflow_infinity (val2) ? 0 : -1;
  1248. else if (is_negative_overflow_infinity (val2))
  1249. return 1;
  1250. else if (is_positive_overflow_infinity (val1))
  1251. return is_positive_overflow_infinity (val2) ? 0 : 1;
  1252. else if (is_positive_overflow_infinity (val2))
  1253. return -1;
  1254. return -2;
  1255. }
  1256. return tree_int_cst_compare (val1, val2);
  1257. }
  1258. else
  1259. {
  1260. tree t;
  1261. /* First see if VAL1 and VAL2 are not the same. */
  1262. if (val1 == val2 || operand_equal_p (val1, val2, 0))
  1263. return 0;
  1264. /* If VAL1 is a lower address than VAL2, return -1. */
  1265. if (operand_less_p (val1, val2) == 1)
  1266. return -1;
  1267. /* If VAL1 is a higher address than VAL2, return +1. */
  1268. if (operand_less_p (val2, val1) == 1)
  1269. return 1;
  1270. /* If VAL1 is different than VAL2, return +2.
  1271. For integer constants we either have already returned -1 or 1
  1272. or they are equivalent. We still might succeed in proving
  1273. something about non-trivial operands. */
  1274. if (TREE_CODE (val1) != INTEGER_CST
  1275. || TREE_CODE (val2) != INTEGER_CST)
  1276. {
  1277. t = fold_binary_to_constant (NE_EXPR, boolean_type_node, val1, val2);
  1278. if (t && integer_onep (t))
  1279. return 2;
  1280. }
  1281. return -2;
  1282. }
  1283. }
  1284. /* Compare values like compare_values_warnv, but treat comparisons of
  1285. nonconstants which rely on undefined overflow as incomparable. */
  1286. static int
  1287. compare_values (tree val1, tree val2)
  1288. {
  1289. bool sop;
  1290. int ret;
  1291. sop = false;
  1292. ret = compare_values_warnv (val1, val2, &sop);
  1293. if (sop
  1294. && (!is_gimple_min_invariant (val1) || !is_gimple_min_invariant (val2)))
  1295. ret = -2;
  1296. return ret;
  1297. }
  1298. /* Return 1 if VAL is inside value range MIN <= VAL <= MAX,
  1299. 0 if VAL is not inside [MIN, MAX],
  1300. -2 if we cannot tell either way.
  1301. Benchmark compile/20001226-1.c compilation time after changing this
  1302. function. */
  1303. static inline int
  1304. value_inside_range (tree val, tree min, tree max)
  1305. {
  1306. int cmp1, cmp2;
  1307. cmp1 = operand_less_p (val, min);
  1308. if (cmp1 == -2)
  1309. return -2;
  1310. if (cmp1 == 1)
  1311. return 0;
  1312. cmp2 = operand_less_p (max, val);
  1313. if (cmp2 == -2)
  1314. return -2;
  1315. return !cmp2;
  1316. }
  1317. /* Return true if value ranges VR0 and VR1 have a non-empty
  1318. intersection.
  1319. Benchmark compile/20001226-1.c compilation time after changing this
  1320. function.
  1321. */
  1322. static inline bool
  1323. value_ranges_intersect_p (value_range_t *vr0, value_range_t *vr1)
  1324. {
  1325. /* The value ranges do not intersect if the maximum of the first range is
  1326. less than the minimum of the second range or vice versa.
  1327. When those relations are unknown, we can't do any better. */
  1328. if (operand_less_p (vr0->max, vr1->min) != 0)
  1329. return false;
  1330. if (operand_less_p (vr1->max, vr0->min) != 0)
  1331. return false;
  1332. return true;
  1333. }
  1334. /* Return 1 if [MIN, MAX] includes the value zero, 0 if it does not
  1335. include the value zero, -2 if we cannot tell. */
  1336. static inline int
  1337. range_includes_zero_p (tree min, tree max)
  1338. {
  1339. tree zero = build_int_cst (TREE_TYPE (min), 0);
  1340. return value_inside_range (zero, min, max);
  1341. }
  1342. /* Return true if *VR is know to only contain nonnegative values. */
  1343. static inline bool
  1344. value_range_nonnegative_p (value_range_t *vr)
  1345. {
  1346. /* Testing for VR_ANTI_RANGE is not useful here as any anti-range
  1347. which would return a useful value should be encoded as a
  1348. VR_RANGE. */
  1349. if (vr->type == VR_RANGE)
  1350. {
  1351. int result = compare_values (vr->min, integer_zero_node);
  1352. return (result == 0 || result == 1);
  1353. }
  1354. return false;
  1355. }
  1356. /* If *VR has a value rante that is a single constant value return that,
  1357. otherwise return NULL_TREE. */
  1358. static tree
  1359. value_range_constant_singleton (value_range_t *vr)
  1360. {
  1361. if (vr->type == VR_RANGE
  1362. && operand_equal_p (vr->min, vr->max, 0)
  1363. && is_gimple_min_invariant (vr->min))
  1364. return vr->min;
  1365. return NULL_TREE;
  1366. }
  1367. /* If OP has a value range with a single constant value return that,
  1368. otherwise return NULL_TREE. This returns OP itself if OP is a
  1369. constant. */
  1370. static tree
  1371. op_with_constant_singleton_value_range (tree op)
  1372. {
  1373. if (is_gimple_min_invariant (op))
  1374. return op;
  1375. if (TREE_CODE (op) != SSA_NAME)
  1376. return NULL_TREE;
  1377. return value_range_constant_singleton (get_value_range (op));
  1378. }
  1379. /* Return true if op is in a boolean [0, 1] value-range. */
  1380. static bool
  1381. op_with_boolean_value_range_p (tree op)
  1382. {
  1383. value_range_t *vr;
  1384. if (TYPE_PRECISION (TREE_TYPE (op)) == 1)
  1385. return true;
  1386. if (integer_zerop (op)
  1387. || integer_onep (op))
  1388. return true;
  1389. if (TREE_CODE (op) != SSA_NAME)
  1390. return false;
  1391. vr = get_value_range (op);
  1392. return (vr->type == VR_RANGE
  1393. && integer_zerop (vr->min)
  1394. && integer_onep (vr->max));
  1395. }
  1396. /* Extract value range information from an ASSERT_EXPR EXPR and store
  1397. it in *VR_P. */
  1398. static void
  1399. extract_range_from_assert (value_range_t *vr_p, tree expr)
  1400. {
  1401. tree var, cond, limit, min, max, type;
  1402. value_range_t *limit_vr;
  1403. enum tree_code cond_code;
  1404. var = ASSERT_EXPR_VAR (expr);
  1405. cond = ASSERT_EXPR_COND (expr);
  1406. gcc_assert (COMPARISON_CLASS_P (cond));
  1407. /* Find VAR in the ASSERT_EXPR conditional. */
  1408. if (var == TREE_OPERAND (cond, 0)
  1409. || TREE_CODE (TREE_OPERAND (cond, 0)) == PLUS_EXPR
  1410. || TREE_CODE (TREE_OPERAND (cond, 0)) == NOP_EXPR)
  1411. {
  1412. /* If the predicate is of the form VAR COMP LIMIT, then we just
  1413. take LIMIT from the RHS and use the same comparison code. */
  1414. cond_code = TREE_CODE (cond);
  1415. limit = TREE_OPERAND (cond, 1);
  1416. cond = TREE_OPERAND (cond, 0);
  1417. }
  1418. else
  1419. {
  1420. /* If the predicate is of the form LIMIT COMP VAR, then we need
  1421. to flip around the comparison code to create the proper range
  1422. for VAR. */
  1423. cond_code = swap_tree_comparison (TREE_CODE (cond));
  1424. limit = TREE_OPERAND (cond, 0);
  1425. cond = TREE_OPERAND (cond, 1);
  1426. }
  1427. limit = avoid_overflow_infinity (limit);
  1428. type = TREE_TYPE (var);
  1429. gcc_assert (limit != var);
  1430. /* For pointer arithmetic, we only keep track of pointer equality
  1431. and inequality. */
  1432. if (POINTER_TYPE_P (type) && cond_code != NE_EXPR && cond_code != EQ_EXPR)
  1433. {
  1434. set_value_range_to_varying (vr_p);
  1435. return;
  1436. }
  1437. /* If LIMIT is another SSA name and LIMIT has a range of its own,
  1438. try to use LIMIT's range to avoid creating symbolic ranges
  1439. unnecessarily. */
  1440. limit_vr = (TREE_CODE (limit) == SSA_NAME) ? get_value_range (limit) : NULL;
  1441. /* LIMIT's range is only interesting if it has any useful information. */
  1442. if (limit_vr
  1443. && (limit_vr->type == VR_UNDEFINED
  1444. || limit_vr->type == VR_VARYING
  1445. || symbolic_range_p (limit_vr)))
  1446. limit_vr = NULL;
  1447. /* Initially, the new range has the same set of equivalences of
  1448. VAR's range. This will be revised before returning the final
  1449. value. Since assertions may be chained via mutually exclusive
  1450. predicates, we will need to trim the set of equivalences before
  1451. we are done. */
  1452. gcc_assert (vr_p->equiv == NULL);
  1453. add_equivalence (&vr_p->equiv, var);
  1454. /* Extract a new range based on the asserted comparison for VAR and
  1455. LIMIT's value range. Notice that if LIMIT has an anti-range, we
  1456. will only use it for equality comparisons (EQ_EXPR). For any
  1457. other kind of assertion, we cannot derive a range from LIMIT's
  1458. anti-range that can be used to describe the new range. For
  1459. instance, ASSERT_EXPR <x_2, x_2 <= b_4>. If b_4 is ~[2, 10],
  1460. then b_4 takes on the ranges [-INF, 1] and [11, +INF]. There is
  1461. no single range for x_2 that could describe LE_EXPR, so we might
  1462. as well build the range [b_4, +INF] for it.
  1463. One special case we handle is extracting a range from a
  1464. range test encoded as (unsigned)var + CST <= limit. */
  1465. if (TREE_CODE (cond) == NOP_EXPR
  1466. || TREE_CODE (cond) == PLUS_EXPR)
  1467. {
  1468. if (TREE_CODE (cond) == PLUS_EXPR)
  1469. {
  1470. min = fold_build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (cond, 1)),
  1471. TREE_OPERAND (cond, 1));
  1472. max = int_const_binop (PLUS_EXPR, limit, min);
  1473. cond = TREE_OPERAND (cond, 0);
  1474. }
  1475. else
  1476. {
  1477. min = build_int_cst (TREE_TYPE (var), 0);
  1478. max = limit;
  1479. }
  1480. /* Make sure to not set TREE_OVERFLOW on the final type
  1481. conversion. We are willingly interpreting large positive
  1482. unsigned values as negative signed values here. */
  1483. min = force_fit_type (TREE_TYPE (var), wi::to_widest (min), 0, false);
  1484. max = force_fit_type (TREE_TYPE (var), wi::to_widest (max), 0, false);
  1485. /* We can transform a max, min range to an anti-range or
  1486. vice-versa. Use set_and_canonicalize_value_range which does
  1487. this for us. */
  1488. if (cond_code == LE_EXPR)
  1489. set_and_canonicalize_value_range (vr_p, VR_RANGE,
  1490. min, max, vr_p->equiv);
  1491. else if (cond_code == GT_EXPR)
  1492. set_and_canonicalize_value_range (vr_p, VR_ANTI_RANGE,
  1493. min, max, vr_p->equiv);
  1494. else
  1495. gcc_unreachable ();
  1496. }
  1497. else if (cond_code == EQ_EXPR)
  1498. {
  1499. enum value_range_type range_type;
  1500. if (limit_vr)
  1501. {
  1502. range_type = limit_vr->type;
  1503. min = limit_vr->min;
  1504. max = limit_vr->max;
  1505. }
  1506. else
  1507. {
  1508. range_type = VR_RANGE;
  1509. min = limit;
  1510. max = limit;
  1511. }
  1512. set_value_range (vr_p, range_type, min, max, vr_p->equiv);
  1513. /* When asserting the equality VAR == LIMIT and LIMIT is another
  1514. SSA name, the new range will also inherit the equivalence set
  1515. from LIMIT. */
  1516. if (TREE_CODE (limit) == SSA_NAME)
  1517. add_equivalence (&vr_p->equiv, limit);
  1518. }
  1519. else if (cond_code == NE_EXPR)
  1520. {
  1521. /* As described above, when LIMIT's range is an anti-range and
  1522. this assertion is an inequality (NE_EXPR), then we cannot
  1523. derive anything from the anti-range. For instance, if
  1524. LIMIT's range was ~[0, 0], the assertion 'VAR != LIMIT' does
  1525. not imply that VAR's range is [0, 0]. So, in the case of
  1526. anti-ranges, we just assert the inequality using LIMIT and
  1527. not its anti-range.
  1528. If LIMIT_VR is a range, we can only use it to build a new
  1529. anti-range if LIMIT_VR is a single-valued range. For
  1530. instance, if LIMIT_VR is [0, 1], the predicate
  1531. VAR != [0, 1] does not mean that VAR's range is ~[0, 1].
  1532. Rather, it means that for value 0 VAR should be ~[0, 0]
  1533. and for value 1, VAR should be ~[1, 1]. We cannot
  1534. represent these ranges.
  1535. The only situation in which we can build a valid
  1536. anti-range is when LIMIT_VR is a single-valued range
  1537. (i.e., LIMIT_VR->MIN == LIMIT_VR->MAX). In that case,
  1538. build the anti-range ~[LIMIT_VR->MIN, LIMIT_VR->MAX]. */
  1539. if (limit_vr
  1540. && limit_vr->type == VR_RANGE
  1541. && compare_values (limit_vr->min, limit_vr->max) == 0)
  1542. {
  1543. min = limit_vr->min;
  1544. max = limit_vr->max;
  1545. }
  1546. else
  1547. {
  1548. /* In any other case, we cannot use LIMIT's range to build a
  1549. valid anti-range. */
  1550. min = max = limit;
  1551. }
  1552. /* If MIN and MAX cover the whole range for their type, then
  1553. just use the original LIMIT. */
  1554. if (INTEGRAL_TYPE_P (type)
  1555. && vrp_val_is_min (min)
  1556. && vrp_val_is_max (max))
  1557. min = max = limit;
  1558. set_and_canonicalize_value_range (vr_p, VR_ANTI_RANGE,
  1559. min, max, vr_p->equiv);
  1560. }
  1561. else if (cond_code == LE_EXPR || cond_code == LT_EXPR)
  1562. {
  1563. min = TYPE_MIN_VALUE (type);
  1564. if (limit_vr == NULL || limit_vr->type == VR_ANTI_RANGE)
  1565. max = limit;
  1566. else
  1567. {
  1568. /* If LIMIT_VR is of the form [N1, N2], we need to build the
  1569. range [MIN, N2] for LE_EXPR and [MIN, N2 - 1] for
  1570. LT_EXPR. */
  1571. max = limit_vr->max;
  1572. }
  1573. /* If the maximum value forces us to be out of bounds, simply punt.
  1574. It would be pointless to try and do anything more since this
  1575. all should be optimized away above us. */
  1576. if ((cond_code == LT_EXPR
  1577. && compare_values (max, min) == 0)
  1578. || is_overflow_infinity (max))
  1579. set_value_range_to_varying (vr_p);
  1580. else
  1581. {
  1582. /* For LT_EXPR, we create the range [MIN, MAX - 1]. */
  1583. if (cond_code == LT_EXPR)
  1584. {
  1585. if (TYPE_PRECISION (TREE_TYPE (max)) == 1
  1586. && !TYPE_UNSIGNED (TREE_TYPE (max)))
  1587. max = fold_build2 (PLUS_EXPR, TREE_TYPE (max), max,
  1588. build_int_cst (TREE_TYPE (max), -1));
  1589. else
  1590. max = fold_build2 (MINUS_EXPR, TREE_TYPE (max), max,
  1591. build_int_cst (TREE_TYPE (max), 1));
  1592. if (EXPR_P (max))
  1593. TREE_NO_WARNING (max) = 1;
  1594. }
  1595. set_value_range (vr_p, VR_RANGE, min, max, vr_p->equiv);
  1596. }
  1597. }
  1598. else if (cond_code == GE_EXPR || cond_code == GT_EXPR)
  1599. {
  1600. max = TYPE_MAX_VALUE (type);
  1601. if (limit_vr == NULL || limit_vr->type == VR_ANTI_RANGE)
  1602. min = limit;
  1603. else
  1604. {
  1605. /* If LIMIT_VR is of the form [N1, N2], we need to build the
  1606. range [N1, MAX] for GE_EXPR and [N1 + 1, MAX] for
  1607. GT_EXPR. */
  1608. min = limit_vr->min;
  1609. }
  1610. /* If the minimum value forces us to be out of bounds, simply punt.
  1611. It would be pointless to try and do anything more since this
  1612. all should be optimized away above us. */
  1613. if ((cond_code == GT_EXPR
  1614. && compare_values (min, max) == 0)
  1615. || is_overflow_infinity (min))
  1616. set_value_range_to_varying (vr_p);
  1617. else
  1618. {
  1619. /* For GT_EXPR, we create the range [MIN + 1, MAX]. */
  1620. if (cond_code == GT_EXPR)
  1621. {
  1622. if (TYPE_PRECISION (TREE_TYPE (min)) == 1
  1623. && !TYPE_UNSIGNED (TREE_TYPE (min)))
  1624. min = fold_build2 (MINUS_EXPR, TREE_TYPE (min), min,
  1625. build_int_cst (TREE_TYPE (min), -1));
  1626. else
  1627. min = fold_build2 (PLUS_EXPR, TREE_TYPE (min), min,
  1628. build_int_cst (TREE_TYPE (min), 1));
  1629. if (EXPR_P (min))
  1630. TREE_NO_WARNING (min) = 1;
  1631. }
  1632. set_value_range (vr_p, VR_RANGE, min, max, vr_p->equiv);
  1633. }
  1634. }
  1635. else
  1636. gcc_unreachable ();
  1637. /* Finally intersect the new range with what we already know about var. */
  1638. vrp_intersect_ranges (vr_p, get_value_range (var));
  1639. }
  1640. /* Extract range information from SSA name VAR and store it in VR. If
  1641. VAR has an interesting range, use it. Otherwise, create the
  1642. range [VAR, VAR] and return it. This is useful in situations where
  1643. we may have conditionals testing values of VARYING names. For
  1644. instance,
  1645. x_3 = y_5;
  1646. if (x_3 > y_5)
  1647. ...
  1648. Even if y_5 is deemed VARYING, we can determine that x_3 > y_5 is
  1649. always false. */
  1650. static void
  1651. extract_range_from_ssa_name (value_range_t *vr, tree var)
  1652. {
  1653. value_range_t *var_vr = get_value_range (var);
  1654. if (var_vr->type != VR_VARYING)
  1655. copy_value_range (vr, var_vr);
  1656. else
  1657. set_value_range (vr, VR_RANGE, var, var, NULL);
  1658. add_equivalence (&vr->equiv, var);
  1659. }
  1660. /* Wrapper around int_const_binop. If the operation overflows and we
  1661. are not using wrapping arithmetic, then adjust the result to be
  1662. -INF or +INF depending on CODE, VAL1 and VAL2. This can return
  1663. NULL_TREE if we need to use an overflow infinity representation but
  1664. the type does not support it. */
  1665. static tree
  1666. vrp_int_const_binop (enum tree_code code, tree val1, tree val2)
  1667. {
  1668. tree res;
  1669. res = int_const_binop (code, val1, val2);
  1670. /* If we are using unsigned arithmetic, operate symbolically
  1671. on -INF and +INF as int_const_binop only handles signed overflow. */
  1672. if (TYPE_UNSIGNED (TREE_TYPE (val1)))
  1673. {
  1674. int checkz = compare_values (res, val1);
  1675. bool overflow = false;
  1676. /* Ensure that res = val1 [+*] val2 >= val1
  1677. or that res = val1 - val2 <= val1. */
  1678. if ((code == PLUS_EXPR
  1679. && !(checkz == 1 || checkz == 0))
  1680. || (code == MINUS_EXPR
  1681. && !(checkz == 0 || checkz == -1)))
  1682. {
  1683. overflow = true;
  1684. }
  1685. /* Checking for multiplication overflow is done by dividing the
  1686. output of the multiplication by the first input of the
  1687. multiplication. If the result of that division operation is
  1688. not equal to the second input of the multiplication, then the
  1689. multiplication overflowed. */
  1690. else if (code == MULT_EXPR && !integer_zerop (val1))
  1691. {
  1692. tree tmp = int_const_binop (TRUNC_DIV_EXPR,
  1693. res,
  1694. val1);
  1695. int check = compare_values (tmp, val2);
  1696. if (check != 0)
  1697. overflow = true;
  1698. }
  1699. if (overflow)
  1700. {
  1701. res = copy_node (res);
  1702. TREE_OVERFLOW (res) = 1;
  1703. }
  1704. }
  1705. else if (TYPE_OVERFLOW_WRAPS (TREE_TYPE (val1)))
  1706. /* If the singed operation wraps then int_const_binop has done
  1707. everything we want. */
  1708. ;
  1709. /* Signed division of -1/0 overflows and by the time it gets here
  1710. returns NULL_TREE. */
  1711. else if (!res)
  1712. return NULL_TREE;
  1713. else if ((TREE_OVERFLOW (res)
  1714. && !TREE_OVERFLOW (val1)
  1715. && !TREE_OVERFLOW (val2))
  1716. || is_overflow_infinity (val1)
  1717. || is_overflow_infinity (val2))
  1718. {
  1719. /* If the operation overflowed but neither VAL1 nor VAL2 are
  1720. overflown, return -INF or +INF depending on the operation
  1721. and the combination of signs of the operands. */
  1722. int sgn1 = tree_int_cst_sgn (val1);
  1723. int sgn2 = tree_int_cst_sgn (val2);
  1724. if (needs_overflow_infinity (TREE_TYPE (res))
  1725. && !supports_overflow_infinity (TREE_TYPE (res)))
  1726. return NULL_TREE;
  1727. /* We have to punt on adding infinities of different signs,
  1728. since we can't tell what the sign of the result should be.
  1729. Likewise for subtracting infinities of the same sign. */
  1730. if (((code == PLUS_EXPR && sgn1 != sgn2)
  1731. || (code == MINUS_EXPR && sgn1 == sgn2))
  1732. && is_overflow_infinity (val1)
  1733. && is_overflow_infinity (val2))
  1734. return NULL_TREE;
  1735. /* Don't try to handle division or shifting of infinities. */
  1736. if ((code == TRUNC_DIV_EXPR
  1737. || code == FLOOR_DIV_EXPR
  1738. || code == CEIL_DIV_EXPR
  1739. || code == EXACT_DIV_EXPR
  1740. || code == ROUND_DIV_EXPR
  1741. || code == RSHIFT_EXPR)
  1742. && (is_overflow_infinity (val1)
  1743. || is_overflow_infinity (val2)))
  1744. return NULL_TREE;
  1745. /* Notice that we only need to handle the restricted set of
  1746. operations handled by extract_range_from_binary_expr.
  1747. Among them, only multiplication, addition and subtraction
  1748. can yield overflow without overflown operands because we
  1749. are working with integral types only... except in the
  1750. case VAL1 = -INF and VAL2 = -1 which overflows to +INF
  1751. for division too. */
  1752. /* For multiplication, the sign of the overflow is given
  1753. by the comparison of the signs of the operands. */
  1754. if ((code == MULT_EXPR && sgn1 == sgn2)
  1755. /* For addition, the operands must be of the same sign
  1756. to yield an overflow. Its sign is therefore that
  1757. of one of the operands, for example the first. For
  1758. infinite operands X + -INF is negative, not positive. */
  1759. || (code == PLUS_EXPR
  1760. && (sgn1 >= 0
  1761. ? !is_negative_overflow_infinity (val2)
  1762. : is_positive_overflow_infinity (val2)))
  1763. /* For subtraction, non-infinite operands must be of
  1764. different signs to yield an overflow. Its sign is
  1765. therefore that of the first operand or the opposite of
  1766. that of the second operand. A first operand of 0 counts
  1767. as positive here, for the corner case 0 - (-INF), which
  1768. overflows, but must yield +INF. For infinite operands 0
  1769. - INF is negative, not positive. */
  1770. || (code == MINUS_EXPR
  1771. && (sgn1 >= 0
  1772. ? !is_positive_overflow_infinity (val2)
  1773. : is_negative_overflow_infinity (val2)))
  1774. /* We only get in here with positive shift count, so the
  1775. overflow direction is the same as the sign of val1.
  1776. Actually rshift does not overflow at all, but we only
  1777. handle the case of shifting overflowed -INF and +INF. */
  1778. || (code == RSHIFT_EXPR
  1779. && sgn1 >= 0)
  1780. /* For division, the only case is -INF / -1 = +INF. */
  1781. || code == TRUNC_DIV_EXPR
  1782. || code == FLOOR_DIV_EXPR
  1783. || code == CEIL_DIV_EXPR
  1784. || code == EXACT_DIV_EXPR
  1785. || code == ROUND_DIV_EXPR)
  1786. return (needs_overflow_infinity (TREE_TYPE (res))
  1787. ? positive_overflow_infinity (TREE_TYPE (res))
  1788. : TYPE_MAX_VALUE (TREE_TYPE (res)));
  1789. else
  1790. return (needs_overflow_infinity (TREE_TYPE (res))
  1791. ? negative_overflow_infinity (TREE_TYPE (res))
  1792. : TYPE_MIN_VALUE (TREE_TYPE (res)));
  1793. }
  1794. return res;
  1795. }
  1796. /* For range VR compute two wide_int bitmasks. In *MAY_BE_NONZERO
  1797. bitmask if some bit is unset, it means for all numbers in the range
  1798. the bit is 0, otherwise it might be 0 or 1. In *MUST_BE_NONZERO
  1799. bitmask if some bit is set, it means for all numbers in the range
  1800. the bit is 1, otherwise it might be 0 or 1. */
  1801. static bool
  1802. zero_nonzero_bits_from_vr (const tree expr_type,
  1803. value_range_t *vr,
  1804. wide_int *may_be_nonzero,
  1805. wide_int *must_be_nonzero)
  1806. {
  1807. *may_be_nonzero = wi::minus_one (TYPE_PRECISION (expr_type));
  1808. *must_be_nonzero = wi::zero (TYPE_PRECISION (expr_type));
  1809. if (!range_int_cst_p (vr)
  1810. || is_overflow_infinity (vr->min)
  1811. || is_overflow_infinity (vr->max))
  1812. return false;
  1813. if (range_int_cst_singleton_p (vr))
  1814. {
  1815. *may_be_nonzero = vr->min;
  1816. *must_be_nonzero = *may_be_nonzero;
  1817. }
  1818. else if (tree_int_cst_sgn (vr->min) >= 0
  1819. || tree_int_cst_sgn (vr->max) < 0)
  1820. {
  1821. wide_int xor_mask = wi::bit_xor (vr->min, vr->max);
  1822. *may_be_nonzero = wi::bit_or (vr->min, vr->max);
  1823. *must_be_nonzero = wi::bit_and (vr->min, vr->max);
  1824. if (xor_mask != 0)
  1825. {
  1826. wide_int mask = wi::mask (wi::floor_log2 (xor_mask), false,
  1827. may_be_nonzero->get_precision ());
  1828. *may_be_nonzero = *may_be_nonzero | mask;
  1829. *must_be_nonzero = must_be_nonzero->and_not (mask);
  1830. }
  1831. }
  1832. return true;
  1833. }
  1834. /* Create two value-ranges in *VR0 and *VR1 from the anti-range *AR
  1835. so that *VR0 U *VR1 == *AR. Returns true if that is possible,
  1836. false otherwise. If *AR can be represented with a single range
  1837. *VR1 will be VR_UNDEFINED. */
  1838. static bool
  1839. ranges_from_anti_range (value_range_t *ar,
  1840. value_range_t *vr0, value_range_t *vr1)
  1841. {
  1842. tree type = TREE_TYPE (ar->min);
  1843. vr0->type = VR_UNDEFINED;
  1844. vr1->type = VR_UNDEFINED;
  1845. if (ar->type != VR_ANTI_RANGE
  1846. || TREE_CODE (ar->min) != INTEGER_CST
  1847. || TREE_CODE (ar->max) != INTEGER_CST
  1848. || !vrp_val_min (type)
  1849. || !vrp_val_max (type))
  1850. return false;
  1851. if (!vrp_val_is_min (ar->min))
  1852. {
  1853. vr0->type = VR_RANGE;
  1854. vr0->min = vrp_val_min (type);
  1855. vr0->max = wide_int_to_tree (type, wi::sub (ar->min, 1));
  1856. }
  1857. if (!vrp_val_is_max (ar->max))
  1858. {
  1859. vr1->type = VR_RANGE;
  1860. vr1->min = wide_int_to_tree (type, wi::add (ar->max, 1));
  1861. vr1->max = vrp_val_max (type);
  1862. }
  1863. if (vr0->type == VR_UNDEFINED)
  1864. {
  1865. *vr0 = *vr1;
  1866. vr1->type = VR_UNDEFINED;
  1867. }
  1868. return vr0->type != VR_UNDEFINED;
  1869. }
  1870. /* Helper to extract a value-range *VR for a multiplicative operation
  1871. *VR0 CODE *VR1. */
  1872. static void
  1873. extract_range_from_multiplicative_op_1 (value_range_t *vr,
  1874. enum tree_code code,
  1875. value_range_t *vr0, value_range_t *vr1)
  1876. {
  1877. enum value_range_type type;
  1878. tree val[4];
  1879. size_t i;
  1880. tree min, max;
  1881. bool sop;
  1882. int cmp;
  1883. /* Multiplications, divisions and shifts are a bit tricky to handle,
  1884. depending on the mix of signs we have in the two ranges, we
  1885. need to operate on different values to get the minimum and
  1886. maximum values for the new range. One approach is to figure
  1887. out all the variations of range combinations and do the
  1888. operations.
  1889. However, this involves several calls to compare_values and it
  1890. is pretty convoluted. It's simpler to do the 4 operations
  1891. (MIN0 OP MIN1, MIN0 OP MAX1, MAX0 OP MIN1 and MAX0 OP MAX0 OP
  1892. MAX1) and then figure the smallest and largest values to form
  1893. the new range. */
  1894. gcc_assert (code == MULT_EXPR
  1895. || code == TRUNC_DIV_EXPR
  1896. || code == FLOOR_DIV_EXPR
  1897. || code == CEIL_DIV_EXPR
  1898. || code == EXACT_DIV_EXPR
  1899. || code == ROUND_DIV_EXPR
  1900. || code == RSHIFT_EXPR
  1901. || code == LSHIFT_EXPR);
  1902. gcc_assert ((vr0->type == VR_RANGE
  1903. || (code == MULT_EXPR && vr0->type == VR_ANTI_RANGE))
  1904. && vr0->type == vr1->type);
  1905. type = vr0->type;
  1906. /* Compute the 4 cross operations. */
  1907. sop = false;
  1908. val[0] = vrp_int_const_binop (code, vr0->min, vr1->min);
  1909. if (val[0] == NULL_TREE)
  1910. sop = true;
  1911. if (vr1->max == vr1->min)
  1912. val[1] = NULL_TREE;
  1913. else
  1914. {
  1915. val[1] = vrp_int_const_binop (code, vr0->min, vr1->max);
  1916. if (val[1] == NULL_TREE)
  1917. sop = true;
  1918. }
  1919. if (vr0->max == vr0->min)
  1920. val[2] = NULL_TREE;
  1921. else
  1922. {
  1923. val[2] = vrp_int_const_binop (code, vr0->max, vr1->min);
  1924. if (val[2] == NULL_TREE)
  1925. sop = true;
  1926. }
  1927. if (vr0->min == vr0->max || vr1->min == vr1->max)
  1928. val[3] = NULL_TREE;
  1929. else
  1930. {
  1931. val[3] = vrp_int_const_binop (code, vr0->max, vr1->max);
  1932. if (val[3] == NULL_TREE)
  1933. sop = true;
  1934. }
  1935. if (sop)
  1936. {
  1937. set_value_range_to_varying (vr);
  1938. return;
  1939. }
  1940. /* Set MIN to the minimum of VAL[i] and MAX to the maximum
  1941. of VAL[i]. */
  1942. min = val[0];
  1943. max = val[0];
  1944. for (i = 1; i < 4; i++)
  1945. {
  1946. if (!is_gimple_min_invariant (min)
  1947. || (TREE_OVERFLOW (min) && !is_overflow_infinity (min))
  1948. || !is_gimple_min_invariant (max)
  1949. || (TREE_OVERFLOW (max) && !is_overflow_infinity (max)))
  1950. break;
  1951. if (val[i])
  1952. {
  1953. if (!is_gimple_min_invariant (val[i])
  1954. || (TREE_OVERFLOW (val[i])
  1955. && !is_overflow_infinity (val[i])))
  1956. {
  1957. /* If we found an overflowed value, set MIN and MAX
  1958. to it so that we set the resulting range to
  1959. VARYING. */
  1960. min = max = val[i];
  1961. break;
  1962. }
  1963. if (compare_values (val[i], min) == -1)
  1964. min = val[i];
  1965. if (compare_values (val[i], max) == 1)
  1966. max = val[i];
  1967. }
  1968. }
  1969. /* If either MIN or MAX overflowed, then set the resulting range to
  1970. VARYING. But we do accept an overflow infinity
  1971. representation. */
  1972. if (min == NULL_TREE
  1973. || !is_gimple_min_invariant (min)
  1974. || (TREE_OVERFLOW (min) && !is_overflow_infinity (min))
  1975. || max == NULL_TREE
  1976. || !is_gimple_min_invariant (max)
  1977. || (TREE_OVERFLOW (max) && !is_overflow_infinity (max)))
  1978. {
  1979. set_value_range_to_varying (vr);
  1980. return;
  1981. }
  1982. /* We punt if:
  1983. 1) [-INF, +INF]
  1984. 2) [-INF, +-INF(OVF)]
  1985. 3) [+-INF(OVF), +INF]
  1986. 4) [+-INF(OVF), +-INF(OVF)]
  1987. We learn nothing when we have INF and INF(OVF) on both sides.
  1988. Note that we do accept [-INF, -INF] and [+INF, +INF] without
  1989. overflow. */
  1990. if ((vrp_val_is_min (min) || is_overflow_infinity (min))
  1991. && (vrp_val_is_max (max) || is_overflow_infinity (max)))
  1992. {
  1993. set_value_range_to_varying (vr);
  1994. return;
  1995. }
  1996. cmp = compare_values (min, max);
  1997. if (cmp == -2 || cmp == 1)
  1998. {
  1999. /* If the new range has its limits swapped around (MIN > MAX),
  2000. then the operation caused one of them to wrap around, mark
  2001. the new range VARYING. */
  2002. set_value_range_to_varying (vr);
  2003. }
  2004. else
  2005. set_value_range (vr, type, min, max, NULL);
  2006. }
  2007. /* Extract range information from a binary operation CODE based on
  2008. the ranges of each of its operands *VR0 and *VR1 with resulting
  2009. type EXPR_TYPE. The resulting range is stored in *VR. */
  2010. static void
  2011. extract_range_from_binary_expr_1 (value_range_t *vr,
  2012. enum tree_code code, tree expr_type,
  2013. value_range_t *vr0_, value_range_t *vr1_)
  2014. {
  2015. value_range_t vr0 = *vr0_, vr1 = *vr1_;
  2016. value_range_t vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
  2017. enum value_range_type type;
  2018. tree min = NULL_TREE, max = NULL_TREE;
  2019. int cmp;
  2020. if (!INTEGRAL_TYPE_P (expr_type)
  2021. && !POINTER_TYPE_P (expr_type))
  2022. {
  2023. set_value_range_to_varying (vr);
  2024. return;
  2025. }
  2026. /* Not all binary expressions can be applied to ranges in a
  2027. meaningful way. Handle only arithmetic operations. */
  2028. if (code != PLUS_EXPR
  2029. && code != MINUS_EXPR
  2030. && code != POINTER_PLUS_EXPR
  2031. && code != MULT_EXPR
  2032. && code != TRUNC_DIV_EXPR
  2033. && code != FLOOR_DIV_EXPR
  2034. && code != CEIL_DIV_EXPR
  2035. && code != EXACT_DIV_EXPR
  2036. && code != ROUND_DIV_EXPR
  2037. && code != TRUNC_MOD_EXPR
  2038. && code != RSHIFT_EXPR
  2039. && code != LSHIFT_EXPR
  2040. && code != MIN_EXPR
  2041. && code != MAX_EXPR
  2042. && code != BIT_AND_EXPR
  2043. && code != BIT_IOR_EXPR
  2044. && code != BIT_XOR_EXPR)
  2045. {
  2046. set_value_range_to_varying (vr);
  2047. return;
  2048. }
  2049. /* If both ranges are UNDEFINED, so is the result. */
  2050. if (vr0.type == VR_UNDEFINED && vr1.type == VR_UNDEFINED)
  2051. {
  2052. set_value_range_to_undefined (vr);
  2053. return;
  2054. }
  2055. /* If one of the ranges is UNDEFINED drop it to VARYING for the following
  2056. code. At some point we may want to special-case operations that
  2057. have UNDEFINED result for all or some value-ranges of the not UNDEFINED
  2058. operand. */
  2059. else if (vr0.type == VR_UNDEFINED)
  2060. set_value_range_to_varying (&vr0);
  2061. else if (vr1.type == VR_UNDEFINED)
  2062. set_value_range_to_varying (&vr1);
  2063. /* Now canonicalize anti-ranges to ranges when they are not symbolic
  2064. and express ~[] op X as ([]' op X) U ([]'' op X). */
  2065. if (vr0.type == VR_ANTI_RANGE
  2066. && ranges_from_anti_range (&vr0, &vrtem0, &vrtem1))
  2067. {
  2068. extract_range_from_binary_expr_1 (vr, code, expr_type, &vrtem0, vr1_);
  2069. if (vrtem1.type != VR_UNDEFINED)
  2070. {
  2071. value_range_t vrres = VR_INITIALIZER;
  2072. extract_range_from_binary_expr_1 (&vrres, code, expr_type,
  2073. &vrtem1, vr1_);
  2074. vrp_meet (vr, &vrres);
  2075. }
  2076. return;
  2077. }
  2078. /* Likewise for X op ~[]. */
  2079. if (vr1.type == VR_ANTI_RANGE
  2080. && ranges_from_anti_range (&vr1, &vrtem0, &vrtem1))
  2081. {
  2082. extract_range_from_binary_expr_1 (vr, code, expr_type, vr0_, &vrtem0);
  2083. if (vrtem1.type != VR_UNDEFINED)
  2084. {
  2085. value_range_t vrres = VR_INITIALIZER;
  2086. extract_range_from_binary_expr_1 (&vrres, code, expr_type,
  2087. vr0_, &vrtem1);
  2088. vrp_meet (vr, &vrres);
  2089. }
  2090. return;
  2091. }
  2092. /* The type of the resulting value range defaults to VR0.TYPE. */
  2093. type = vr0.type;
  2094. /* Refuse to operate on VARYING ranges, ranges of different kinds
  2095. and symbolic ranges. As an exception, we allow BIT_{AND,IOR}
  2096. because we may be able to derive a useful range even if one of
  2097. the operands is VR_VARYING or symbolic range. Similarly for
  2098. divisions, MIN/MAX and PLUS/MINUS.
  2099. TODO, we may be able to derive anti-ranges in some cases. */
  2100. if (code != BIT_AND_EXPR
  2101. && code != BIT_IOR_EXPR
  2102. && code != TRUNC_DIV_EXPR
  2103. && code != FLOOR_DIV_EXPR
  2104. && code != CEIL_DIV_EXPR
  2105. && code != EXACT_DIV_EXPR
  2106. && code != ROUND_DIV_EXPR
  2107. && code != TRUNC_MOD_EXPR
  2108. && code != MIN_EXPR
  2109. && code != MAX_EXPR
  2110. && code != PLUS_EXPR
  2111. && code != MINUS_EXPR
  2112. && code != RSHIFT_EXPR
  2113. && (vr0.type == VR_VARYING
  2114. || vr1.type == VR_VARYING
  2115. || vr0.type != vr1.type
  2116. || symbolic_range_p (&vr0)
  2117. || symbolic_range_p (&vr1)))
  2118. {
  2119. set_value_range_to_varying (vr);
  2120. return;
  2121. }
  2122. /* Now evaluate the expression to determine the new range. */
  2123. if (POINTER_TYPE_P (expr_type))
  2124. {
  2125. if (code == MIN_EXPR || code == MAX_EXPR)
  2126. {
  2127. /* For MIN/MAX expressions with pointers, we only care about
  2128. nullness, if both are non null, then the result is nonnull.
  2129. If both are null, then the result is null. Otherwise they
  2130. are varying. */
  2131. if (range_is_nonnull (&vr0) && range_is_nonnull (&vr1))
  2132. set_value_range_to_nonnull (vr, expr_type);
  2133. else if (range_is_null (&vr0) && range_is_null (&vr1))
  2134. set_value_range_to_null (vr, expr_type);
  2135. else
  2136. set_value_range_to_varying (vr);
  2137. }
  2138. else if (code == POINTER_PLUS_EXPR)
  2139. {
  2140. /* For pointer types, we are really only interested in asserting
  2141. whether the expression evaluates to non-NULL. */
  2142. if (range_is_nonnull (&vr0) || range_is_nonnull (&vr1))
  2143. set_value_range_to_nonnull (vr, expr_type);
  2144. else if (range_is_null (&vr0) && range_is_null (&vr1))
  2145. set_value_range_to_null (vr, expr_type);
  2146. else
  2147. set_value_range_to_varying (vr);
  2148. }
  2149. else if (code == BIT_AND_EXPR)
  2150. {
  2151. /* For pointer types, we are really only interested in asserting
  2152. whether the expression evaluates to non-NULL. */
  2153. if (range_is_nonnull (&vr0) && range_is_nonnull (&vr1))
  2154. set_value_range_to_nonnull (vr, expr_type);
  2155. else if (range_is_null (&vr0) || range_is_null (&vr1))
  2156. set_value_range_to_null (vr, expr_type);
  2157. else
  2158. set_value_range_to_varying (vr);
  2159. }
  2160. else
  2161. set_value_range_to_varying (vr);
  2162. return;
  2163. }
  2164. /* For integer ranges, apply the operation to each end of the
  2165. range and see what we end up with. */
  2166. if (code == PLUS_EXPR || code == MINUS_EXPR)
  2167. {
  2168. const bool minus_p = (code == MINUS_EXPR);
  2169. tree min_op0 = vr0.min;
  2170. tree min_op1 = minus_p ? vr1.max : vr1.min;
  2171. tree max_op0 = vr0.max;
  2172. tree max_op1 = minus_p ? vr1.min : vr1.max;
  2173. tree sym_min_op0 = NULL_TREE;
  2174. tree sym_min_op1 = NULL_TREE;
  2175. tree sym_max_op0 = NULL_TREE;
  2176. tree sym_max_op1 = NULL_TREE;
  2177. bool neg_min_op0, neg_min_op1, neg_max_op0, neg_max_op1;
  2178. /* If we have a PLUS or MINUS with two VR_RANGEs, either constant or
  2179. single-symbolic ranges, try to compute the precise resulting range,
  2180. but only if we know that this resulting range will also be constant
  2181. or single-symbolic. */
  2182. if (vr0.type == VR_RANGE && vr1.type == VR_RANGE
  2183. && (TREE_CODE (min_op0) == INTEGER_CST
  2184. || (sym_min_op0
  2185. = get_single_symbol (min_op0, &neg_min_op0, &min_op0)))
  2186. && (TREE_CODE (min_op1) == INTEGER_CST
  2187. || (sym_min_op1
  2188. = get_single_symbol (min_op1, &neg_min_op1, &min_op1)))
  2189. && (!(sym_min_op0 && sym_min_op1)
  2190. || (sym_min_op0 == sym_min_op1
  2191. && neg_min_op0 == (minus_p ? neg_min_op1 : !neg_min_op1)))
  2192. && (TREE_CODE (max_op0) == INTEGER_CST
  2193. || (sym_max_op0
  2194. = get_single_symbol (max_op0, &neg_max_op0, &max_op0)))
  2195. && (TREE_CODE (max_op1) == INTEGER_CST
  2196. || (sym_max_op1
  2197. = get_single_symbol (max_op1, &neg_max_op1, &max_op1)))
  2198. && (!(sym_max_op0 && sym_max_op1)
  2199. || (sym_max_op0 == sym_max_op1
  2200. && neg_max_op0 == (minus_p ? neg_max_op1 : !neg_max_op1))))
  2201. {
  2202. const signop sgn = TYPE_SIGN (expr_type);
  2203. const unsigned int prec = TYPE_PRECISION (expr_type);
  2204. wide_int type_min, type_max, wmin, wmax;
  2205. int min_ovf = 0;
  2206. int max_ovf = 0;
  2207. /* Get the lower and upper bounds of the type. */
  2208. if (TYPE_OVERFLOW_WRAPS (expr_type))
  2209. {
  2210. type_min = wi::min_value (prec, sgn);
  2211. type_max = wi::max_value (prec, sgn);
  2212. }
  2213. else
  2214. {
  2215. type_min = vrp_val_min (expr_type);
  2216. type_max = vrp_val_max (expr_type);
  2217. }
  2218. /* Combine the lower bounds, if any. */
  2219. if (min_op0 && min_op1)
  2220. {
  2221. if (minus_p)
  2222. {
  2223. wmin = wi::sub (min_op0, min_op1);
  2224. /* Check for overflow. */
  2225. if (wi::cmp (0, min_op1, sgn)
  2226. != wi::cmp (wmin, min_op0, sgn))
  2227. min_ovf = wi::cmp (min_op0, min_op1, sgn);
  2228. }
  2229. else
  2230. {
  2231. wmin = wi::add (min_op0, min_op1);
  2232. /* Check for overflow. */
  2233. if (wi::cmp (min_op1, 0, sgn)
  2234. != wi::cmp (wmin, min_op0, sgn))
  2235. min_ovf = wi::cmp (min_op0, wmin, sgn);
  2236. }
  2237. }
  2238. else if (min_op0)
  2239. wmin = min_op0;
  2240. else if (min_op1)
  2241. wmin = minus_p ? wi::neg (min_op1) : min_op1;
  2242. else
  2243. wmin = wi::shwi (0, prec);
  2244. /* Combine the upper bounds, if any. */
  2245. if (max_op0 && max_op1)
  2246. {
  2247. if (minus_p)
  2248. {
  2249. wmax = wi::sub (max_op0, max_op1);
  2250. /* Check for overflow. */
  2251. if (wi::cmp (0, max_op1, sgn)
  2252. != wi::cmp (wmax, max_op0, sgn))
  2253. max_ovf = wi::cmp (max_op0, max_op1, sgn);
  2254. }
  2255. else
  2256. {
  2257. wmax = wi::add (max_op0, max_op1);
  2258. if (wi::cmp (max_op1, 0, sgn)
  2259. != wi::cmp (wmax, max_op0, sgn))
  2260. max_ovf = wi::cmp (max_op0, wmax, sgn);
  2261. }
  2262. }
  2263. else if (max_op0)
  2264. wmax = max_op0;
  2265. else if (max_op1)
  2266. wmax = minus_p ? wi::neg (max_op1) : max_op1;
  2267. else
  2268. wmax = wi::shwi (0, prec);
  2269. /* Check for type overflow. */
  2270. if (min_ovf == 0)
  2271. {
  2272. if (wi::cmp (wmin, type_min, sgn) == -1)
  2273. min_ovf = -1;
  2274. else if (wi::cmp (wmin, type_max, sgn) == 1)
  2275. min_ovf = 1;
  2276. }
  2277. if (max_ovf == 0)
  2278. {
  2279. if (wi::cmp (wmax, type_min, sgn) == -1)
  2280. max_ovf = -1;
  2281. else if (wi::cmp (wmax, type_max, sgn) == 1)
  2282. max_ovf = 1;
  2283. }
  2284. /* If we have overflow for the constant part and the resulting
  2285. range will be symbolic, drop to VR_VARYING. */
  2286. if ((min_ovf && sym_min_op0 != sym_min_op1)
  2287. || (max_ovf && sym_max_op0 != sym_max_op1))
  2288. {
  2289. set_value_range_to_varying (vr);
  2290. return;
  2291. }
  2292. if (TYPE_OVERFLOW_WRAPS (expr_type))
  2293. {
  2294. /* If overflow wraps, truncate the values and adjust the
  2295. range kind and bounds appropriately. */
  2296. wide_int tmin = wide_int::from (wmin, prec, sgn);
  2297. wide_int tmax = wide_int::from (wmax, prec, sgn);
  2298. if (min_ovf == max_ovf)
  2299. {
  2300. /* No overflow or both overflow or underflow. The
  2301. range kind stays VR_RANGE. */
  2302. min = wide_int_to_tree (expr_type, tmin);
  2303. max = wide_int_to_tree (expr_type, tmax);
  2304. }
  2305. else if (min_ovf == -1 && max_ovf == 1)
  2306. {
  2307. /* Underflow and overflow, drop to VR_VARYING. */
  2308. set_value_range_to_varying (vr);
  2309. return;
  2310. }
  2311. else
  2312. {
  2313. /* Min underflow or max overflow. The range kind
  2314. changes to VR_ANTI_RANGE. */
  2315. bool covers = false;
  2316. wide_int tem = tmin;
  2317. gcc_assert ((min_ovf == -1 && max_ovf == 0)
  2318. || (max_ovf == 1 && min_ovf == 0));
  2319. type = VR_ANTI_RANGE;
  2320. tmin = tmax + 1;
  2321. if (wi::cmp (tmin, tmax, sgn) < 0)
  2322. covers = true;
  2323. tmax = tem - 1;
  2324. if (wi::cmp (tmax, tem, sgn) > 0)
  2325. covers = true;
  2326. /* If the anti-range would cover nothing, drop to varying.
  2327. Likewise if the anti-range bounds are outside of the
  2328. types values. */
  2329. if (covers || wi::cmp (tmin, tmax, sgn) > 0)
  2330. {
  2331. set_value_range_to_varying (vr);
  2332. return;
  2333. }
  2334. min = wide_int_to_tree (expr_type, tmin);
  2335. max = wide_int_to_tree (expr_type, tmax);
  2336. }
  2337. }
  2338. else
  2339. {
  2340. /* If overflow does not wrap, saturate to the types min/max
  2341. value. */
  2342. if (min_ovf == -1)
  2343. {
  2344. if (needs_overflow_infinity (expr_type)
  2345. && supports_overflow_infinity (expr_type))
  2346. min = negative_overflow_infinity (expr_type);
  2347. else
  2348. min = wide_int_to_tree (expr_type, type_min);
  2349. }
  2350. else if (min_ovf == 1)
  2351. {
  2352. if (needs_overflow_infinity (expr_type)
  2353. && supports_overflow_infinity (expr_type))
  2354. min = positive_overflow_infinity (expr_type);
  2355. else
  2356. min = wide_int_to_tree (expr_type, type_max);
  2357. }
  2358. else
  2359. min = wide_int_to_tree (expr_type, wmin);
  2360. if (max_ovf == -1)
  2361. {
  2362. if (needs_overflow_infinity (expr_type)
  2363. && supports_overflow_infinity (expr_type))
  2364. max = negative_overflow_infinity (expr_type);
  2365. else
  2366. max = wide_int_to_tree (expr_type, type_min);
  2367. }
  2368. else if (max_ovf == 1)
  2369. {
  2370. if (needs_overflow_infinity (expr_type)
  2371. && supports_overflow_infinity (expr_type))
  2372. max = positive_overflow_infinity (expr_type);
  2373. else
  2374. max = wide_int_to_tree (expr_type, type_max);
  2375. }
  2376. else
  2377. max = wide_int_to_tree (expr_type, wmax);
  2378. }
  2379. if (needs_overflow_infinity (expr_type)
  2380. && supports_overflow_infinity (expr_type))
  2381. {
  2382. if ((min_op0 && is_negative_overflow_infinity (min_op0))
  2383. || (min_op1
  2384. && (minus_p
  2385. ? is_positive_overflow_infinity (min_op1)
  2386. : is_negative_overflow_infinity (min_op1))))
  2387. min = negative_overflow_infinity (expr_type);
  2388. if ((max_op0 && is_positive_overflow_infinity (max_op0))
  2389. || (max_op1
  2390. && (minus_p
  2391. ? is_negative_overflow_infinity (max_op1)
  2392. : is_positive_overflow_infinity (max_op1))))
  2393. max = positive_overflow_infinity (expr_type);
  2394. }
  2395. /* If the result lower bound is constant, we're done;
  2396. otherwise, build the symbolic lower bound. */
  2397. if (sym_min_op0 == sym_min_op1)
  2398. ;
  2399. else if (sym_min_op0)
  2400. min = build_symbolic_expr (expr_type, sym_min_op0,
  2401. neg_min_op0, min);
  2402. else if (sym_min_op1)
  2403. min = build_symbolic_expr (expr_type, sym_min_op1,
  2404. neg_min_op1 ^ minus_p, min);
  2405. /* Likewise for the upper bound. */
  2406. if (sym_max_op0 == sym_max_op1)
  2407. ;
  2408. else if (sym_max_op0)
  2409. max = build_symbolic_expr (expr_type, sym_max_op0,
  2410. neg_max_op0, max);
  2411. else if (sym_max_op1)
  2412. max = build_symbolic_expr (expr_type, sym_max_op1,
  2413. neg_max_op1 ^ minus_p, max);
  2414. }
  2415. else
  2416. {
  2417. /* For other cases, for example if we have a PLUS_EXPR with two
  2418. VR_ANTI_RANGEs, drop to VR_VARYING. It would take more effort
  2419. to compute a precise range for such a case.
  2420. ??? General even mixed range kind operations can be expressed
  2421. by for example transforming ~[3, 5] + [1, 2] to range-only
  2422. operations and a union primitive:
  2423. [-INF, 2] + [1, 2] U [5, +INF] + [1, 2]
  2424. [-INF+1, 4] U [6, +INF(OVF)]
  2425. though usually the union is not exactly representable with
  2426. a single range or anti-range as the above is
  2427. [-INF+1, +INF(OVF)] intersected with ~[5, 5]
  2428. but one could use a scheme similar to equivalences for this. */
  2429. set_value_range_to_varying (vr);
  2430. return;
  2431. }
  2432. }
  2433. else if (code == MIN_EXPR
  2434. || code == MAX_EXPR)
  2435. {
  2436. if (vr0.type == VR_RANGE
  2437. && !symbolic_range_p (&vr0))
  2438. {
  2439. type = VR_RANGE;
  2440. if (vr1.type == VR_RANGE
  2441. && !symbolic_range_p (&vr1))
  2442. {
  2443. /* For operations that make the resulting range directly
  2444. proportional to the original ranges, apply the operation to
  2445. the same end of each range. */
  2446. min = vrp_int_const_binop (code, vr0.min, vr1.min);
  2447. max = vrp_int_const_binop (code, vr0.max, vr1.max);
  2448. }
  2449. else if (code == MIN_EXPR)
  2450. {
  2451. min = vrp_val_min (expr_type);
  2452. max = vr0.max;
  2453. }
  2454. else if (code == MAX_EXPR)
  2455. {
  2456. min = vr0.min;
  2457. max = vrp_val_max (expr_type);
  2458. }
  2459. }
  2460. else if (vr1.type == VR_RANGE
  2461. && !symbolic_range_p (&vr1))
  2462. {
  2463. type = VR_RANGE;
  2464. if (code == MIN_EXPR)
  2465. {
  2466. min = vrp_val_min (expr_type);
  2467. max = vr1.max;
  2468. }
  2469. else if (code == MAX_EXPR)
  2470. {
  2471. min = vr1.min;
  2472. max = vrp_val_max (expr_type);
  2473. }
  2474. }
  2475. else
  2476. {
  2477. set_value_range_to_varying (vr);
  2478. return;
  2479. }
  2480. }
  2481. else if (code == MULT_EXPR)
  2482. {
  2483. /* Fancy code so that with unsigned, [-3,-1]*[-3,-1] does not
  2484. drop to varying. This test requires 2*prec bits if both
  2485. operands are signed and 2*prec + 2 bits if either is not. */
  2486. signop sign = TYPE_SIGN (expr_type);
  2487. unsigned int prec = TYPE_PRECISION (expr_type);
  2488. if (range_int_cst_p (&vr0)
  2489. && range_int_cst_p (&vr1)
  2490. && TYPE_OVERFLOW_WRAPS (expr_type))
  2491. {
  2492. typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) vrp_int;
  2493. typedef generic_wide_int
  2494. <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> > vrp_int_cst;
  2495. vrp_int sizem1 = wi::mask <vrp_int> (prec, false);
  2496. vrp_int size = sizem1 + 1;
  2497. /* Extend the values using the sign of the result to PREC2.
  2498. From here on out, everthing is just signed math no matter
  2499. what the input types were. */
  2500. vrp_int min0 = vrp_int_cst (vr0.min);
  2501. vrp_int max0 = vrp_int_cst (vr0.max);
  2502. vrp_int min1 = vrp_int_cst (vr1.min);
  2503. vrp_int max1 = vrp_int_cst (vr1.max);
  2504. /* Canonicalize the intervals. */
  2505. if (sign == UNSIGNED)
  2506. {
  2507. if (wi::ltu_p (size, min0 + max0))
  2508. {
  2509. min0 -= size;
  2510. max0 -= size;
  2511. }
  2512. if (wi::ltu_p (size, min1 + max1))
  2513. {
  2514. min1 -= size;
  2515. max1 -= size;
  2516. }
  2517. }
  2518. vrp_int prod0 = min0 * min1;
  2519. vrp_int prod1 = min0 * max1;
  2520. vrp_int prod2 = max0 * min1;
  2521. vrp_int prod3 = max0 * max1;
  2522. /* Sort the 4 products so that min is in prod0 and max is in
  2523. prod3. */
  2524. /* min0min1 > max0max1 */
  2525. if (wi::gts_p (prod0, prod3))
  2526. {
  2527. vrp_int tmp = prod3;
  2528. prod3 = prod0;
  2529. prod0 = tmp;
  2530. }
  2531. /* min0max1 > max0min1 */
  2532. if (wi::gts_p (prod1, prod2))
  2533. {
  2534. vrp_int tmp = prod2;
  2535. prod2 = prod1;
  2536. prod1 = tmp;
  2537. }
  2538. if (wi::gts_p (prod0, prod1))
  2539. {
  2540. vrp_int tmp = prod1;
  2541. prod1 = prod0;
  2542. prod0 = tmp;
  2543. }
  2544. if (wi::gts_p (prod2, prod3))
  2545. {
  2546. vrp_int tmp = prod3;
  2547. prod3 = prod2;
  2548. prod2 = tmp;
  2549. }
  2550. /* diff = max - min. */
  2551. prod2 = prod3 - prod0;
  2552. if (wi::geu_p (prod2, sizem1))
  2553. {
  2554. /* the range covers all values. */
  2555. set_value_range_to_varying (vr);
  2556. return;
  2557. }
  2558. /* The following should handle the wrapping and selecting
  2559. VR_ANTI_RANGE for us. */
  2560. min = wide_int_to_tree (expr_type, prod0);
  2561. max = wide_int_to_tree (expr_type, prod3);
  2562. set_and_canonicalize_value_range (vr, VR_RANGE, min, max, NULL);
  2563. return;
  2564. }
  2565. /* If we have an unsigned MULT_EXPR with two VR_ANTI_RANGEs,
  2566. drop to VR_VARYING. It would take more effort to compute a
  2567. precise range for such a case. For example, if we have
  2568. op0 == 65536 and op1 == 65536 with their ranges both being
  2569. ~[0,0] on a 32-bit machine, we would have op0 * op1 == 0, so
  2570. we cannot claim that the product is in ~[0,0]. Note that we
  2571. are guaranteed to have vr0.type == vr1.type at this
  2572. point. */
  2573. if (vr0.type == VR_ANTI_RANGE
  2574. && !TYPE_OVERFLOW_UNDEFINED (expr_type))
  2575. {
  2576. set_value_range_to_varying (vr);
  2577. return;
  2578. }
  2579. extract_range_from_multiplicative_op_1 (vr, code, &vr0, &vr1);
  2580. return;
  2581. }
  2582. else if (code == RSHIFT_EXPR
  2583. || code == LSHIFT_EXPR)
  2584. {
  2585. /* If we have a RSHIFT_EXPR with any shift values outside [0..prec-1],
  2586. then drop to VR_VARYING. Outside of this range we get undefined
  2587. behavior from the shift operation. We cannot even trust
  2588. SHIFT_COUNT_TRUNCATED at this stage, because that applies to rtl
  2589. shifts, and the operation at the tree level may be widened. */
  2590. if (range_int_cst_p (&vr1)
  2591. && compare_tree_int (vr1.min, 0) >= 0
  2592. && compare_tree_int (vr1.max, TYPE_PRECISION (expr_type)) == -1)
  2593. {
  2594. if (code == RSHIFT_EXPR)
  2595. {
  2596. /* Even if vr0 is VARYING or otherwise not usable, we can derive
  2597. useful ranges just from the shift count. E.g.
  2598. x >> 63 for signed 64-bit x is always [-1, 0]. */
  2599. if (vr0.type != VR_RANGE || symbolic_range_p (&vr0))
  2600. {
  2601. vr0.type = type = VR_RANGE;
  2602. vr0.min = vrp_val_min (expr_type);
  2603. vr0.max = vrp_val_max (expr_type);
  2604. }
  2605. extract_range_from_multiplicative_op_1 (vr, code, &vr0, &vr1);
  2606. return;
  2607. }
  2608. /* We can map lshifts by constants to MULT_EXPR handling. */
  2609. else if (code == LSHIFT_EXPR
  2610. && range_int_cst_singleton_p (&vr1))
  2611. {
  2612. bool saved_flag_wrapv;
  2613. value_range_t vr1p = VR_INITIALIZER;
  2614. vr1p.type = VR_RANGE;
  2615. vr1p.min = (wide_int_to_tree
  2616. (expr_type,
  2617. wi::set_bit_in_zero (tree_to_shwi (vr1.min),
  2618. TYPE_PRECISION (expr_type))));
  2619. vr1p.max = vr1p.min;
  2620. /* We have to use a wrapping multiply though as signed overflow
  2621. on lshifts is implementation defined in C89. */
  2622. saved_flag_wrapv = flag_wrapv;
  2623. flag_wrapv = 1;
  2624. extract_range_from_binary_expr_1 (vr, MULT_EXPR, expr_type,
  2625. &vr0, &vr1p);
  2626. flag_wrapv = saved_flag_wrapv;
  2627. return;
  2628. }
  2629. else if (code == LSHIFT_EXPR
  2630. && range_int_cst_p (&vr0))
  2631. {
  2632. int prec = TYPE_PRECISION (expr_type);
  2633. int overflow_pos = prec;
  2634. int bound_shift;
  2635. wide_int low_bound, high_bound;
  2636. bool uns = TYPE_UNSIGNED (expr_type);
  2637. bool in_bounds = false;
  2638. if (!uns)
  2639. overflow_pos -= 1;
  2640. bound_shift = overflow_pos - tree_to_shwi (vr1.max);
  2641. /* If bound_shift == HOST_BITS_PER_WIDE_INT, the llshift can
  2642. overflow. However, for that to happen, vr1.max needs to be
  2643. zero, which means vr1 is a singleton range of zero, which
  2644. means it should be handled by the previous LSHIFT_EXPR
  2645. if-clause. */
  2646. wide_int bound = wi::set_bit_in_zero (bound_shift, prec);
  2647. wide_int complement = ~(bound - 1);
  2648. if (uns)
  2649. {
  2650. low_bound = bound;
  2651. high_bound = complement;
  2652. if (wi::ltu_p (vr0.max, low_bound))
  2653. {
  2654. /* [5, 6] << [1, 2] == [10, 24]. */
  2655. /* We're shifting out only zeroes, the value increases
  2656. monotonically. */
  2657. in_bounds = true;
  2658. }
  2659. else if (wi::ltu_p (high_bound, vr0.min))
  2660. {
  2661. /* [0xffffff00, 0xffffffff] << [1, 2]
  2662. == [0xfffffc00, 0xfffffffe]. */
  2663. /* We're shifting out only ones, the value decreases
  2664. monotonically. */
  2665. in_bounds = true;
  2666. }
  2667. }
  2668. else
  2669. {
  2670. /* [-1, 1] << [1, 2] == [-4, 4]. */
  2671. low_bound = complement;
  2672. high_bound = bound;
  2673. if (wi::lts_p (vr0.max, high_bound)
  2674. && wi::lts_p (low_bound, vr0.min))
  2675. {
  2676. /* For non-negative numbers, we're shifting out only
  2677. zeroes, the value increases monotonically.
  2678. For negative numbers, we're shifting out only ones, the
  2679. value decreases monotomically. */
  2680. in_bounds = true;
  2681. }
  2682. }
  2683. if (in_bounds)
  2684. {
  2685. extract_range_from_multiplicative_op_1 (vr, code, &vr0, &vr1);
  2686. return;
  2687. }
  2688. }
  2689. }
  2690. set_value_range_to_varying (vr);
  2691. return;
  2692. }
  2693. else if (code == TRUNC_DIV_EXPR
  2694. || code == FLOOR_DIV_EXPR
  2695. || code == CEIL_DIV_EXPR
  2696. || code == EXACT_DIV_EXPR
  2697. || code == ROUND_DIV_EXPR)
  2698. {
  2699. if (vr0.type != VR_RANGE || symbolic_range_p (&vr0))
  2700. {
  2701. /* For division, if op1 has VR_RANGE but op0 does not, something
  2702. can be deduced just from that range. Say [min, max] / [4, max]
  2703. gives [min / 4, max / 4] range. */
  2704. if (vr1.type == VR_RANGE
  2705. && !symbolic_range_p (&vr1)
  2706. && range_includes_zero_p (vr1.min, vr1.max) == 0)
  2707. {
  2708. vr0.type = type = VR_RANGE;
  2709. vr0.min = vrp_val_min (expr_type);
  2710. vr0.max = vrp_val_max (expr_type);
  2711. }
  2712. else
  2713. {
  2714. set_value_range_to_varying (vr);
  2715. return;
  2716. }
  2717. }
  2718. /* For divisions, if flag_non_call_exceptions is true, we must
  2719. not eliminate a division by zero. */
  2720. if (cfun->can_throw_non_call_exceptions
  2721. && (vr1.type != VR_RANGE
  2722. || range_includes_zero_p (vr1.min, vr1.max) != 0))
  2723. {
  2724. set_value_range_to_varying (vr);
  2725. return;
  2726. }
  2727. /* For divisions, if op0 is VR_RANGE, we can deduce a range
  2728. even if op1 is VR_VARYING, VR_ANTI_RANGE, symbolic or can
  2729. include 0. */
  2730. if (vr0.type == VR_RANGE
  2731. && (vr1.type != VR_RANGE
  2732. || range_includes_zero_p (vr1.min, vr1.max) != 0))
  2733. {
  2734. tree zero = build_int_cst (TREE_TYPE (vr0.min), 0);
  2735. int cmp;
  2736. min = NULL_TREE;
  2737. max = NULL_TREE;
  2738. if (TYPE_UNSIGNED (expr_type)
  2739. || value_range_nonnegative_p (&vr1))
  2740. {
  2741. /* For unsigned division or when divisor is known
  2742. to be non-negative, the range has to cover
  2743. all numbers from 0 to max for positive max
  2744. and all numbers from min to 0 for negative min. */
  2745. cmp = compare_values (vr0.max, zero);
  2746. if (cmp == -1)
  2747. max = zero;
  2748. else if (cmp == 0 || cmp == 1)
  2749. max = vr0.max;
  2750. else
  2751. type = VR_VARYING;
  2752. cmp = compare_values (vr0.min, zero);
  2753. if (cmp == 1)
  2754. min = zero;
  2755. else if (cmp == 0 || cmp == -1)
  2756. min = vr0.min;
  2757. else
  2758. type = VR_VARYING;
  2759. }
  2760. else
  2761. {
  2762. /* Otherwise the range is -max .. max or min .. -min
  2763. depending on which bound is bigger in absolute value,
  2764. as the division can change the sign. */
  2765. abs_extent_range (vr, vr0.min, vr0.max);
  2766. return;
  2767. }
  2768. if (type == VR_VARYING)
  2769. {
  2770. set_value_range_to_varying (vr);
  2771. return;
  2772. }
  2773. }
  2774. else
  2775. {
  2776. extract_range_from_multiplicative_op_1 (vr, code, &vr0, &vr1);
  2777. return;
  2778. }
  2779. }
  2780. else if (code == TRUNC_MOD_EXPR)
  2781. {
  2782. if (vr1.type != VR_RANGE
  2783. || range_includes_zero_p (vr1.min, vr1.max) != 0
  2784. || vrp_val_is_min (vr1.min))
  2785. {
  2786. set_value_range_to_varying (vr);
  2787. return;
  2788. }
  2789. type = VR_RANGE;
  2790. /* Compute MAX <|vr1.min|, |vr1.max|> - 1. */
  2791. max = fold_unary_to_constant (ABS_EXPR, expr_type, vr1.min);
  2792. if (tree_int_cst_lt (max, vr1.max))
  2793. max = vr1.max;
  2794. max = int_const_binop (MINUS_EXPR, max, build_int_cst (TREE_TYPE (max), 1));
  2795. /* If the dividend is non-negative the modulus will be
  2796. non-negative as well. */
  2797. if (TYPE_UNSIGNED (expr_type)
  2798. || value_range_nonnegative_p (&vr0))
  2799. min = build_int_cst (TREE_TYPE (max), 0);
  2800. else
  2801. min = fold_unary_to_constant (NEGATE_EXPR, expr_type, max);
  2802. }
  2803. else if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
  2804. {
  2805. bool int_cst_range0, int_cst_range1;
  2806. wide_int may_be_nonzero0, may_be_nonzero1;
  2807. wide_int must_be_nonzero0, must_be_nonzero1;
  2808. int_cst_range0 = zero_nonzero_bits_from_vr (expr_type, &vr0,
  2809. &may_be_nonzero0,
  2810. &must_be_nonzero0);
  2811. int_cst_range1 = zero_nonzero_bits_from_vr (expr_type, &vr1,
  2812. &may_be_nonzero1,
  2813. &must_be_nonzero1);
  2814. type = VR_RANGE;
  2815. if (code == BIT_AND_EXPR)
  2816. {
  2817. min = wide_int_to_tree (expr_type,
  2818. must_be_nonzero0 & must_be_nonzero1);
  2819. wide_int wmax = may_be_nonzero0 & may_be_nonzero1;
  2820. /* If both input ranges contain only negative values we can
  2821. truncate the result range maximum to the minimum of the
  2822. input range maxima. */
  2823. if (int_cst_range0 && int_cst_range1
  2824. && tree_int_cst_sgn (vr0.max) < 0
  2825. && tree_int_cst_sgn (vr1.max) < 0)
  2826. {
  2827. wmax = wi::min (wmax, vr0.max, TYPE_SIGN (expr_type));
  2828. wmax = wi::min (wmax, vr1.max, TYPE_SIGN (expr_type));
  2829. }
  2830. /* If either input range contains only non-negative values
  2831. we can truncate the result range maximum to the respective
  2832. maximum of the input range. */
  2833. if (int_cst_range0 && tree_int_cst_sgn (vr0.min) >= 0)
  2834. wmax = wi::min (wmax, vr0.max, TYPE_SIGN (expr_type));
  2835. if (int_cst_range1 && tree_int_cst_sgn (vr1.min) >= 0)
  2836. wmax = wi::min (wmax, vr1.max, TYPE_SIGN (expr_type));
  2837. max = wide_int_to_tree (expr_type, wmax);
  2838. }
  2839. else if (code == BIT_IOR_EXPR)
  2840. {
  2841. max = wide_int_to_tree (expr_type,
  2842. may_be_nonzero0 | may_be_nonzero1);
  2843. wide_int wmin = must_be_nonzero0 | must_be_nonzero1;
  2844. /* If the input ranges contain only positive values we can
  2845. truncate the minimum of the result range to the maximum
  2846. of the input range minima. */
  2847. if (int_cst_range0 && int_cst_range1
  2848. && tree_int_cst_sgn (vr0.min) >= 0
  2849. && tree_int_cst_sgn (vr1.min) >= 0)
  2850. {
  2851. wmin = wi::max (wmin, vr0.min, TYPE_SIGN (expr_type));
  2852. wmin = wi::max (wmin, vr1.min, TYPE_SIGN (expr_type));
  2853. }
  2854. /* If either input range contains only negative values
  2855. we can truncate the minimum of the result range to the
  2856. respective minimum range. */
  2857. if (int_cst_range0 && tree_int_cst_sgn (vr0.max) < 0)
  2858. wmin = wi::max (wmin, vr0.min, TYPE_SIGN (expr_type));
  2859. if (int_cst_range1 && tree_int_cst_sgn (vr1.max) < 0)
  2860. wmin = wi::max (wmin, vr1.min, TYPE_SIGN (expr_type));
  2861. min = wide_int_to_tree (expr_type, wmin);
  2862. }
  2863. else if (code == BIT_XOR_EXPR)
  2864. {
  2865. wide_int result_zero_bits = ((must_be_nonzero0 & must_be_nonzero1)
  2866. | ~(may_be_nonzero0 | may_be_nonzero1));
  2867. wide_int result_one_bits
  2868. = (must_be_nonzero0.and_not (may_be_nonzero1)
  2869. | must_be_nonzero1.and_not (may_be_nonzero0));
  2870. max = wide_int_to_tree (expr_type, ~result_zero_bits);
  2871. min = wide_int_to_tree (expr_type, result_one_bits);
  2872. /* If the range has all positive or all negative values the
  2873. result is better than VARYING. */
  2874. if (tree_int_cst_sgn (min) < 0
  2875. || tree_int_cst_sgn (max) >= 0)
  2876. ;
  2877. else
  2878. max = min = NULL_TREE;
  2879. }
  2880. }
  2881. else
  2882. gcc_unreachable ();
  2883. /* If either MIN or MAX overflowed, then set the resulting range to
  2884. VARYING. But we do accept an overflow infinity representation. */
  2885. if (min == NULL_TREE
  2886. || (TREE_OVERFLOW_P (min) && !is_overflow_infinity (min))
  2887. || max == NULL_TREE
  2888. || (TREE_OVERFLOW_P (max) && !is_overflow_infinity (max)))
  2889. {
  2890. set_value_range_to_varying (vr);
  2891. return;
  2892. }
  2893. /* We punt if:
  2894. 1) [-INF, +INF]
  2895. 2) [-INF, +-INF(OVF)]
  2896. 3) [+-INF(OVF), +INF]
  2897. 4) [+-INF(OVF), +-INF(OVF)]
  2898. We learn nothing when we have INF and INF(OVF) on both sides.
  2899. Note that we do accept [-INF, -INF] and [+INF, +INF] without
  2900. overflow. */
  2901. if ((vrp_val_is_min (min) || is_overflow_infinity (min))
  2902. && (vrp_val_is_max (max) || is_overflow_infinity (max)))
  2903. {
  2904. set_value_range_to_varying (vr);
  2905. return;
  2906. }
  2907. cmp = compare_values (min, max);
  2908. if (cmp == -2 || cmp == 1)
  2909. {
  2910. /* If the new range has its limits swapped around (MIN > MAX),
  2911. then the operation caused one of them to wrap around, mark
  2912. the new range VARYING. */
  2913. set_value_range_to_varying (vr);
  2914. }
  2915. else
  2916. set_value_range (vr, type, min, max, NULL);
  2917. }
  2918. /* Extract range information from a binary expression OP0 CODE OP1 based on
  2919. the ranges of each of its operands with resulting type EXPR_TYPE.
  2920. The resulting range is stored in *VR. */
  2921. static void
  2922. extract_range_from_binary_expr (value_range_t *vr,
  2923. enum tree_code code,
  2924. tree expr_type, tree op0, tree op1)
  2925. {
  2926. value_range_t vr0 = VR_INITIALIZER;
  2927. value_range_t vr1 = VR_INITIALIZER;
  2928. /* Get value ranges for each operand. For constant operands, create
  2929. a new value range with the operand to simplify processing. */
  2930. if (TREE_CODE (op0) == SSA_NAME)
  2931. vr0 = *(get_value_range (op0));
  2932. else if (is_gimple_min_invariant (op0))
  2933. set_value_range_to_value (&vr0, op0, NULL);
  2934. else
  2935. set_value_range_to_varying (&vr0);
  2936. if (TREE_CODE (op1) == SSA_NAME)
  2937. vr1 = *(get_value_range (op1));
  2938. else if (is_gimple_min_invariant (op1))
  2939. set_value_range_to_value (&vr1, op1, NULL);
  2940. else
  2941. set_value_range_to_varying (&vr1);
  2942. extract_range_from_binary_expr_1 (vr, code, expr_type, &vr0, &vr1);
  2943. /* Try harder for PLUS and MINUS if the range of one operand is symbolic
  2944. and based on the other operand, for example if it was deduced from a
  2945. symbolic comparison. When a bound of the range of the first operand
  2946. is invariant, we set the corresponding bound of the new range to INF
  2947. in order to avoid recursing on the range of the second operand. */
  2948. if (vr->type == VR_VARYING
  2949. && (code == PLUS_EXPR || code == MINUS_EXPR)
  2950. && TREE_CODE (op1) == SSA_NAME
  2951. && vr0.type == VR_RANGE
  2952. && symbolic_range_based_on_p (&vr0, op1))
  2953. {
  2954. const bool minus_p = (code == MINUS_EXPR);
  2955. value_range_t n_vr1 = VR_INITIALIZER;
  2956. /* Try with VR0 and [-INF, OP1]. */
  2957. if (is_gimple_min_invariant (minus_p ? vr0.max : vr0.min))
  2958. set_value_range (&n_vr1, VR_RANGE, vrp_val_min (expr_type), op1, NULL);
  2959. /* Try with VR0 and [OP1, +INF]. */
  2960. else if (is_gimple_min_invariant (minus_p ? vr0.min : vr0.max))
  2961. set_value_range (&n_vr1, VR_RANGE, op1, vrp_val_max (expr_type), NULL);
  2962. /* Try with VR0 and [OP1, OP1]. */
  2963. else
  2964. set_value_range (&n_vr1, VR_RANGE, op1, op1, NULL);
  2965. extract_range_from_binary_expr_1 (vr, code, expr_type, &vr0, &n_vr1);
  2966. }
  2967. if (vr->type == VR_VARYING
  2968. && (code == PLUS_EXPR || code == MINUS_EXPR)
  2969. && TREE_CODE (op0) == SSA_NAME
  2970. && vr1.type == VR_RANGE
  2971. && symbolic_range_based_on_p (&vr1, op0))
  2972. {
  2973. const bool minus_p = (code == MINUS_EXPR);
  2974. value_range_t n_vr0 = VR_INITIALIZER;
  2975. /* Try with [-INF, OP0] and VR1. */
  2976. if (is_gimple_min_invariant (minus_p ? vr1.max : vr1.min))
  2977. set_value_range (&n_vr0, VR_RANGE, vrp_val_min (expr_type), op0, NULL);
  2978. /* Try with [OP0, +INF] and VR1. */
  2979. else if (is_gimple_min_invariant (minus_p ? vr1.min : vr1.max))
  2980. set_value_range (&n_vr0, VR_RANGE, op0, vrp_val_max (expr_type), NULL);
  2981. /* Try with [OP0, OP0] and VR1. */
  2982. else
  2983. set_value_range (&n_vr0, VR_RANGE, op0, op0, NULL);
  2984. extract_range_from_binary_expr_1 (vr, code, expr_type, &n_vr0, &vr1);
  2985. }
  2986. }
  2987. /* Extract range information from a unary operation CODE based on
  2988. the range of its operand *VR0 with type OP0_TYPE with resulting type TYPE.
  2989. The The resulting range is stored in *VR. */
  2990. static void
  2991. extract_range_from_unary_expr_1 (value_range_t *vr,
  2992. enum tree_code code, tree type,
  2993. value_range_t *vr0_, tree op0_type)
  2994. {
  2995. value_range_t vr0 = *vr0_, vrtem0 = VR_INITIALIZER, vrtem1 = VR_INITIALIZER;
  2996. /* VRP only operates on integral and pointer types. */
  2997. if (!(INTEGRAL_TYPE_P (op0_type)
  2998. || POINTER_TYPE_P (op0_type))
  2999. || !(INTEGRAL_TYPE_P (type)
  3000. || POINTER_TYPE_P (type)))
  3001. {
  3002. set_value_range_to_varying (vr);
  3003. return;
  3004. }
  3005. /* If VR0 is UNDEFINED, so is the result. */
  3006. if (vr0.type == VR_UNDEFINED)
  3007. {
  3008. set_value_range_to_undefined (vr);
  3009. return;
  3010. }
  3011. /* Handle operations that we express in terms of others. */
  3012. if (code == PAREN_EXPR || code == OBJ_TYPE_REF)
  3013. {
  3014. /* PAREN_EXPR and OBJ_TYPE_REF are simple copies. */
  3015. copy_value_range (vr, &vr0);
  3016. return;
  3017. }
  3018. else if (code == NEGATE_EXPR)
  3019. {
  3020. /* -X is simply 0 - X, so re-use existing code that also handles
  3021. anti-ranges fine. */
  3022. value_range_t zero = VR_INITIALIZER;
  3023. set_value_range_to_value (&zero, build_int_cst (type, 0), NULL);
  3024. extract_range_from_binary_expr_1 (vr, MINUS_EXPR, type, &zero, &vr0);
  3025. return;
  3026. }
  3027. else if (code == BIT_NOT_EXPR)
  3028. {
  3029. /* ~X is simply -1 - X, so re-use existing code that also handles
  3030. anti-ranges fine. */
  3031. value_range_t minusone = VR_INITIALIZER;
  3032. set_value_range_to_value (&minusone, build_int_cst (type, -1), NULL);
  3033. extract_range_from_binary_expr_1 (vr, MINUS_EXPR,
  3034. type, &minusone, &vr0);
  3035. return;
  3036. }
  3037. /* Now canonicalize anti-ranges to ranges when they are not symbolic
  3038. and express op ~[] as (op []') U (op []''). */
  3039. if (vr0.type == VR_ANTI_RANGE
  3040. && ranges_from_anti_range (&vr0, &vrtem0, &vrtem1))
  3041. {
  3042. extract_range_from_unary_expr_1 (vr, code, type, &vrtem0, op0_type);
  3043. if (vrtem1.type != VR_UNDEFINED)
  3044. {
  3045. value_range_t vrres = VR_INITIALIZER;
  3046. extract_range_from_unary_expr_1 (&vrres, code, type,
  3047. &vrtem1, op0_type);
  3048. vrp_meet (vr, &vrres);
  3049. }
  3050. return;
  3051. }
  3052. if (CONVERT_EXPR_CODE_P (code))
  3053. {
  3054. tree inner_type = op0_type;
  3055. tree outer_type = type;
  3056. /* If the expression evaluates to a pointer, we are only interested in
  3057. determining if it evaluates to NULL [0, 0] or non-NULL (~[0, 0]). */
  3058. if (POINTER_TYPE_P (type))
  3059. {
  3060. if (range_is_nonnull (&vr0))
  3061. set_value_range_to_nonnull (vr, type);
  3062. else if (range_is_null (&vr0))
  3063. set_value_range_to_null (vr, type);
  3064. else
  3065. set_value_range_to_varying (vr);
  3066. return;
  3067. }
  3068. /* If VR0 is varying and we increase the type precision, assume
  3069. a full range for the following transformation. */
  3070. if (vr0.type == VR_VARYING
  3071. && INTEGRAL_TYPE_P (inner_type)
  3072. && TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type))
  3073. {
  3074. vr0.type = VR_RANGE;
  3075. vr0.min = TYPE_MIN_VALUE (inner_type);
  3076. vr0.max = TYPE_MAX_VALUE (inner_type);
  3077. }
  3078. /* If VR0 is a constant range or anti-range and the conversion is
  3079. not truncating we can convert the min and max values and
  3080. canonicalize the resulting range. Otherwise we can do the
  3081. conversion if the size of the range is less than what the
  3082. precision of the target type can represent and the range is
  3083. not an anti-range. */
  3084. if ((vr0.type == VR_RANGE
  3085. || vr0.type == VR_ANTI_RANGE)
  3086. && TREE_CODE (vr0.min) == INTEGER_CST
  3087. && TREE_CODE (vr0.max) == INTEGER_CST
  3088. && (!is_overflow_infinity (vr0.min)
  3089. || (vr0.type == VR_RANGE
  3090. && TYPE_PRECISION (outer_type) > TYPE_PRECISION (inner_type)
  3091. && needs_overflow_infinity (outer_type)
  3092. && supports_overflow_infinity (outer_type)))
  3093. && (!is_overflow_infinity (vr0.max)
  3094. || (vr0.type == VR_RANGE
  3095. && TYPE_PRECISION (outer_type) > TYPE_PRECISION (inner_type)
  3096. && needs_overflow_infinity (outer_type)
  3097. && supports_overflow_infinity (outer_type)))
  3098. && (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
  3099. || (vr0.type == VR_RANGE
  3100. && integer_zerop (int_const_binop (RSHIFT_EXPR,
  3101. int_const_binop (MINUS_EXPR, vr0.max, vr0.min),
  3102. size_int (TYPE_PRECISION (outer_type)))))))
  3103. {
  3104. tree new_min, new_max;
  3105. if (is_overflow_infinity (vr0.min))
  3106. new_min = negative_overflow_infinity (outer_type);
  3107. else
  3108. new_min = force_fit_type (outer_type, wi::to_widest (vr0.min),
  3109. 0, false);
  3110. if (is_overflow_infinity (vr0.max))
  3111. new_max = positive_overflow_infinity (outer_type);
  3112. else
  3113. new_max = force_fit_type (outer_type, wi::to_widest (vr0.max),
  3114. 0, false);
  3115. set_and_canonicalize_value_range (vr, vr0.type,
  3116. new_min, new_max, NULL);
  3117. return;
  3118. }
  3119. set_value_range_to_varying (vr);
  3120. return;
  3121. }
  3122. else if (code == ABS_EXPR)
  3123. {
  3124. tree min, max;
  3125. int cmp;
  3126. /* Pass through vr0 in the easy cases. */
  3127. if (TYPE_UNSIGNED (type)
  3128. || value_range_nonnegative_p (&vr0))
  3129. {
  3130. copy_value_range (vr, &vr0);
  3131. return;
  3132. }
  3133. /* For the remaining varying or symbolic ranges we can't do anything
  3134. useful. */
  3135. if (vr0.type == VR_VARYING
  3136. || symbolic_range_p (&vr0))
  3137. {
  3138. set_value_range_to_varying (vr);
  3139. return;
  3140. }
  3141. /* -TYPE_MIN_VALUE = TYPE_MIN_VALUE with flag_wrapv so we can't get a
  3142. useful range. */
  3143. if (!TYPE_OVERFLOW_UNDEFINED (type)
  3144. && ((vr0.type == VR_RANGE
  3145. && vrp_val_is_min (vr0.min))
  3146. || (vr0.type == VR_ANTI_RANGE
  3147. && !vrp_val_is_min (vr0.min))))
  3148. {
  3149. set_value_range_to_varying (vr);
  3150. return;
  3151. }
  3152. /* ABS_EXPR may flip the range around, if the original range
  3153. included negative values. */
  3154. if (is_overflow_infinity (vr0.min))
  3155. min = positive_overflow_infinity (type);
  3156. else if (!vrp_val_is_min (vr0.min))
  3157. min = fold_unary_to_constant (code, type, vr0.min);
  3158. else if (!needs_overflow_infinity (type))
  3159. min = TYPE_MAX_VALUE (type);
  3160. else if (supports_overflow_infinity (type))
  3161. min = positive_overflow_infinity (type);
  3162. else
  3163. {
  3164. set_value_range_to_varying (vr);
  3165. return;
  3166. }
  3167. if (is_overflow_infinity (vr0.max))
  3168. max = positive_overflow_infinity (type);
  3169. else if (!vrp_val_is_min (vr0.max))
  3170. max = fold_unary_to_constant (code, type, vr0.max);
  3171. else if (!needs_overflow_infinity (type))
  3172. max = TYPE_MAX_VALUE (type);
  3173. else if (supports_overflow_infinity (type)
  3174. /* We shouldn't generate [+INF, +INF] as set_value_range
  3175. doesn't like this and ICEs. */
  3176. && !is_positive_overflow_infinity (min))
  3177. max = positive_overflow_infinity (type);
  3178. else
  3179. {
  3180. set_value_range_to_varying (vr);
  3181. return;
  3182. }
  3183. cmp = compare_values (min, max);
  3184. /* If a VR_ANTI_RANGEs contains zero, then we have
  3185. ~[-INF, min(MIN, MAX)]. */
  3186. if (vr0.type == VR_ANTI_RANGE)
  3187. {
  3188. if (range_includes_zero_p (vr0.min, vr0.max) == 1)
  3189. {
  3190. /* Take the lower of the two values. */
  3191. if (cmp != 1)
  3192. max = min;
  3193. /* Create ~[-INF, min (abs(MIN), abs(MAX))]
  3194. or ~[-INF + 1, min (abs(MIN), abs(MAX))] when
  3195. flag_wrapv is set and the original anti-range doesn't include
  3196. TYPE_MIN_VALUE, remember -TYPE_MIN_VALUE = TYPE_MIN_VALUE. */
  3197. if (TYPE_OVERFLOW_WRAPS (type))
  3198. {
  3199. tree type_min_value = TYPE_MIN_VALUE (type);
  3200. min = (vr0.min != type_min_value
  3201. ? int_const_binop (PLUS_EXPR, type_min_value,
  3202. build_int_cst (TREE_TYPE (type_min_value), 1))
  3203. : type_min_value);
  3204. }
  3205. else
  3206. {
  3207. if (overflow_infinity_range_p (&vr0))
  3208. min = negative_overflow_infinity (type);
  3209. else
  3210. min = TYPE_MIN_VALUE (type);
  3211. }
  3212. }
  3213. else
  3214. {
  3215. /* All else has failed, so create the range [0, INF], even for
  3216. flag_wrapv since TYPE_MIN_VALUE is in the original
  3217. anti-range. */
  3218. vr0.type = VR_RANGE;
  3219. min = build_int_cst (type, 0);
  3220. if (needs_overflow_infinity (type))
  3221. {
  3222. if (supports_overflow_infinity (type))
  3223. max = positive_overflow_infinity (type);
  3224. else
  3225. {
  3226. set_value_range_to_varying (vr);
  3227. return;
  3228. }
  3229. }
  3230. else
  3231. max = TYPE_MAX_VALUE (type);
  3232. }
  3233. }
  3234. /* If the range contains zero then we know that the minimum value in the
  3235. range will be zero. */
  3236. else if (range_includes_zero_p (vr0.min, vr0.max) == 1)
  3237. {
  3238. if (cmp == 1)
  3239. max = min;
  3240. min = build_int_cst (type, 0);
  3241. }
  3242. else
  3243. {
  3244. /* If the range was reversed, swap MIN and MAX. */
  3245. if (cmp == 1)
  3246. {
  3247. tree t = min;
  3248. min = max;
  3249. max = t;
  3250. }
  3251. }
  3252. cmp = compare_values (min, max);
  3253. if (cmp == -2 || cmp == 1)
  3254. {
  3255. /* If the new range has its limits swapped around (MIN > MAX),
  3256. then the operation caused one of them to wrap around, mark
  3257. the new range VARYING. */
  3258. set_value_range_to_varying (vr);
  3259. }
  3260. else
  3261. set_value_range (vr, vr0.type, min, max, NULL);
  3262. return;
  3263. }
  3264. /* For unhandled operations fall back to varying. */
  3265. set_value_range_to_varying (vr);
  3266. return;
  3267. }
  3268. /* Extract range information from a unary expression CODE OP0 based on
  3269. the range of its operand with resulting type TYPE.
  3270. The resulting range is stored in *VR. */
  3271. static void
  3272. extract_range_from_unary_expr (value_range_t *vr, enum tree_code code,
  3273. tree type, tree op0)
  3274. {
  3275. value_range_t vr0 = VR_INITIALIZER;
  3276. /* Get value ranges for the operand. For constant operands, create
  3277. a new value range with the operand to simplify processing. */
  3278. if (TREE_CODE (op0) == SSA_NAME)
  3279. vr0 = *(get_value_range (op0));
  3280. else if (is_gimple_min_invariant (op0))
  3281. set_value_range_to_value (&vr0, op0, NULL);
  3282. else
  3283. set_value_range_to_varying (&vr0);
  3284. extract_range_from_unary_expr_1 (vr, code, type, &vr0, TREE_TYPE (op0));
  3285. }
  3286. /* Extract range information from a conditional expression STMT based on
  3287. the ranges of each of its operands and the expression code. */
  3288. static void
  3289. extract_range_from_cond_expr (value_range_t *vr, gassign *stmt)
  3290. {
  3291. tree op0, op1;
  3292. value_range_t vr0 = VR_INITIALIZER;
  3293. value_range_t vr1 = VR_INITIALIZER;
  3294. /* Get value ranges for each operand. For constant operands, create
  3295. a new value range with the operand to simplify processing. */
  3296. op0 = gimple_assign_rhs2 (stmt);
  3297. if (TREE_CODE (op0) == SSA_NAME)
  3298. vr0 = *(get_value_range (op0));
  3299. else if (is_gimple_min_invariant (op0))
  3300. set_value_range_to_value (&vr0, op0, NULL);
  3301. else
  3302. set_value_range_to_varying (&vr0);
  3303. op1 = gimple_assign_rhs3 (stmt);
  3304. if (TREE_CODE (op1) == SSA_NAME)
  3305. vr1 = *(get_value_range (op1));
  3306. else if (is_gimple_min_invariant (op1))
  3307. set_value_range_to_value (&vr1, op1, NULL);
  3308. else
  3309. set_value_range_to_varying (&vr1);
  3310. /* The resulting value range is the union of the operand ranges */
  3311. copy_value_range (vr, &vr0);
  3312. vrp_meet (vr, &vr1);
  3313. }
  3314. /* Extract range information from a comparison expression EXPR based
  3315. on the range of its operand and the expression code. */
  3316. static void
  3317. extract_range_from_comparison (value_range_t *vr, enum tree_code code,
  3318. tree type, tree op0, tree op1)
  3319. {
  3320. bool sop = false;
  3321. tree val;
  3322. val = vrp_evaluate_conditional_warnv_with_ops (code, op0, op1, false, &sop,
  3323. NULL);
  3324. /* A disadvantage of using a special infinity as an overflow
  3325. representation is that we lose the ability to record overflow
  3326. when we don't have an infinity. So we have to ignore a result
  3327. which relies on overflow. */
  3328. if (val && !is_overflow_infinity (val) && !sop)
  3329. {
  3330. /* Since this expression was found on the RHS of an assignment,
  3331. its type may be different from _Bool. Convert VAL to EXPR's
  3332. type. */
  3333. val = fold_convert (type, val);
  3334. if (is_gimple_min_invariant (val))
  3335. set_value_range_to_value (vr, val, vr->equiv);
  3336. else
  3337. set_value_range (vr, VR_RANGE, val, val, vr->equiv);
  3338. }
  3339. else
  3340. /* The result of a comparison is always true or false. */
  3341. set_value_range_to_truthvalue (vr, type);
  3342. }
  3343. /* Helper function for simplify_internal_call_using_ranges and
  3344. extract_range_basic. Return true if OP0 SUBCODE OP1 for
  3345. SUBCODE {PLUS,MINUS,MULT}_EXPR is known to never overflow or
  3346. always overflow. Set *OVF to true if it is known to always
  3347. overflow. */
  3348. static bool
  3349. check_for_binary_op_overflow (enum tree_code subcode, tree type,
  3350. tree op0, tree op1, bool *ovf)
  3351. {
  3352. value_range_t vr0 = VR_INITIALIZER;
  3353. value_range_t vr1 = VR_INITIALIZER;
  3354. if (TREE_CODE (op0) == SSA_NAME)
  3355. vr0 = *get_value_range (op0);
  3356. else if (TREE_CODE (op0) == INTEGER_CST)
  3357. set_value_range_to_value (&vr0, op0, NULL);
  3358. else
  3359. set_value_range_to_varying (&vr0);
  3360. if (TREE_CODE (op1) == SSA_NAME)
  3361. vr1 = *get_value_range (op1);
  3362. else if (TREE_CODE (op1) == INTEGER_CST)
  3363. set_value_range_to_value (&vr1, op1, NULL);
  3364. else
  3365. set_value_range_to_varying (&vr1);
  3366. if (!range_int_cst_p (&vr0)
  3367. || TREE_OVERFLOW (vr0.min)
  3368. || TREE_OVERFLOW (vr0.max))
  3369. {
  3370. vr0.min = vrp_val_min (TREE_TYPE (op0));
  3371. vr0.max = vrp_val_max (TREE_TYPE (op0));
  3372. }
  3373. if (!range_int_cst_p (&vr1)
  3374. || TREE_OVERFLOW (vr1.min)
  3375. || TREE_OVERFLOW (vr1.max))
  3376. {
  3377. vr1.min = vrp_val_min (TREE_TYPE (op1));
  3378. vr1.max = vrp_val_max (TREE_TYPE (op1));
  3379. }
  3380. *ovf = arith_overflowed_p (subcode, type, vr0.min,
  3381. subcode == MINUS_EXPR ? vr1.max : vr1.min);
  3382. if (arith_overflowed_p (subcode, type, vr0.max,
  3383. subcode == MINUS_EXPR ? vr1.min : vr1.max) != *ovf)
  3384. return false;
  3385. if (subcode == MULT_EXPR)
  3386. {
  3387. if (arith_overflowed_p (subcode, type, vr0.min, vr1.max) != *ovf
  3388. || arith_overflowed_p (subcode, type, vr0.max, vr1.min) != *ovf)
  3389. return false;
  3390. }
  3391. if (*ovf)
  3392. {
  3393. /* So far we found that there is an overflow on the boundaries.
  3394. That doesn't prove that there is an overflow even for all values
  3395. in between the boundaries. For that compute widest_int range
  3396. of the result and see if it doesn't overlap the range of
  3397. type. */
  3398. widest_int wmin, wmax;
  3399. widest_int w[4];
  3400. int i;
  3401. w[0] = wi::to_widest (vr0.min);
  3402. w[1] = wi::to_widest (vr0.max);
  3403. w[2] = wi::to_widest (vr1.min);
  3404. w[3] = wi::to_widest (vr1.max);
  3405. for (i = 0; i < 4; i++)
  3406. {
  3407. widest_int wt;
  3408. switch (subcode)
  3409. {
  3410. case PLUS_EXPR:
  3411. wt = wi::add (w[i & 1], w[2 + (i & 2) / 2]);
  3412. break;
  3413. case MINUS_EXPR:
  3414. wt = wi::sub (w[i & 1], w[2 + (i & 2) / 2]);
  3415. break;
  3416. case MULT_EXPR:
  3417. wt = wi::mul (w[i & 1], w[2 + (i & 2) / 2]);
  3418. break;
  3419. default:
  3420. gcc_unreachable ();
  3421. }
  3422. if (i == 0)
  3423. {
  3424. wmin = wt;
  3425. wmax = wt;
  3426. }
  3427. else
  3428. {
  3429. wmin = wi::smin (wmin, wt);
  3430. wmax = wi::smax (wmax, wt);
  3431. }
  3432. }
  3433. /* The result of op0 CODE op1 is known to be in range
  3434. [wmin, wmax]. */
  3435. widest_int wtmin = wi::to_widest (vrp_val_min (type));
  3436. widest_int wtmax = wi::to_widest (vrp_val_max (type));
  3437. /* If all values in [wmin, wmax] are smaller than
  3438. [wtmin, wtmax] or all are larger than [wtmin, wtmax],
  3439. the arithmetic operation will always overflow. */
  3440. if (wi::lts_p (wmax, wtmin) || wi::gts_p (wmin, wtmax))
  3441. return true;
  3442. return false;
  3443. }
  3444. return true;
  3445. }
  3446. /* Try to derive a nonnegative or nonzero range out of STMT relying
  3447. primarily on generic routines in fold in conjunction with range data.
  3448. Store the result in *VR */
  3449. static void
  3450. extract_range_basic (value_range_t *vr, gimple stmt)
  3451. {
  3452. bool sop = false;
  3453. tree type = gimple_expr_type (stmt);
  3454. if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
  3455. {
  3456. tree fndecl = gimple_call_fndecl (stmt), arg;
  3457. int mini, maxi, zerov = 0, prec;
  3458. switch (DECL_FUNCTION_CODE (fndecl))
  3459. {
  3460. case BUILT_IN_CONSTANT_P:
  3461. /* If the call is __builtin_constant_p and the argument is a
  3462. function parameter resolve it to false. This avoids bogus
  3463. array bound warnings.
  3464. ??? We could do this as early as inlining is finished. */
  3465. arg = gimple_call_arg (stmt, 0);
  3466. if (TREE_CODE (arg) == SSA_NAME
  3467. && SSA_NAME_IS_DEFAULT_DEF (arg)
  3468. && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
  3469. {
  3470. set_value_range_to_null (vr, type);
  3471. return;
  3472. }
  3473. break;
  3474. /* Both __builtin_ffs* and __builtin_popcount return
  3475. [0, prec]. */
  3476. CASE_INT_FN (BUILT_IN_FFS):
  3477. CASE_INT_FN (BUILT_IN_POPCOUNT):
  3478. arg = gimple_call_arg (stmt, 0);
  3479. prec = TYPE_PRECISION (TREE_TYPE (arg));
  3480. mini = 0;
  3481. maxi = prec;
  3482. if (TREE_CODE (arg) == SSA_NAME)
  3483. {
  3484. value_range_t *vr0 = get_value_range (arg);
  3485. /* If arg is non-zero, then ffs or popcount
  3486. are non-zero. */
  3487. if (((vr0->type == VR_RANGE
  3488. && range_includes_zero_p (vr0->min, vr0->max) == 0)
  3489. || (vr0->type == VR_ANTI_RANGE
  3490. && range_includes_zero_p (vr0->min, vr0->max) == 1))
  3491. && !is_overflow_infinity (vr0->min)
  3492. && !is_overflow_infinity (vr0->max))
  3493. mini = 1;
  3494. /* If some high bits are known to be zero,
  3495. we can decrease the maximum. */
  3496. if (vr0->type == VR_RANGE
  3497. && TREE_CODE (vr0->max) == INTEGER_CST
  3498. && !operand_less_p (vr0->min,
  3499. build_zero_cst (TREE_TYPE (vr0->min)))
  3500. && !is_overflow_infinity (vr0->max))
  3501. maxi = tree_floor_log2 (vr0->max) + 1;
  3502. }
  3503. goto bitop_builtin;
  3504. /* __builtin_parity* returns [0, 1]. */
  3505. CASE_INT_FN (BUILT_IN_PARITY):
  3506. mini = 0;
  3507. maxi = 1;
  3508. goto bitop_builtin;
  3509. /* __builtin_c[lt]z* return [0, prec-1], except for
  3510. when the argument is 0, but that is undefined behavior.
  3511. On many targets where the CLZ RTL or optab value is defined
  3512. for 0 the value is prec, so include that in the range
  3513. by default. */
  3514. CASE_INT_FN (BUILT_IN_CLZ):
  3515. arg = gimple_call_arg (stmt, 0);
  3516. prec = TYPE_PRECISION (TREE_TYPE (arg));
  3517. mini = 0;
  3518. maxi = prec;
  3519. if (optab_handler (clz_optab, TYPE_MODE (TREE_TYPE (arg)))
  3520. != CODE_FOR_nothing
  3521. && CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (TREE_TYPE (arg)),
  3522. zerov)
  3523. /* Handle only the single common value. */
  3524. && zerov != prec)
  3525. /* Magic value to give up, unless vr0 proves
  3526. arg is non-zero. */
  3527. mini = -2;
  3528. if (TREE_CODE (arg) == SSA_NAME)
  3529. {
  3530. value_range_t *vr0 = get_value_range (arg);
  3531. /* From clz of VR_RANGE minimum we can compute
  3532. result maximum. */
  3533. if (vr0->type == VR_RANGE
  3534. && TREE_CODE (vr0->min) == INTEGER_CST
  3535. && !is_overflow_infinity (vr0->min))
  3536. {
  3537. maxi = prec - 1 - tree_floor_log2 (vr0->min);
  3538. if (maxi != prec)
  3539. mini = 0;
  3540. }
  3541. else if (vr0->type == VR_ANTI_RANGE
  3542. && integer_zerop (vr0->min)
  3543. && !is_overflow_infinity (vr0->min))
  3544. {
  3545. maxi = prec - 1;
  3546. mini = 0;
  3547. }
  3548. if (mini == -2)
  3549. break;
  3550. /* From clz of VR_RANGE maximum we can compute
  3551. result minimum. */
  3552. if (vr0->type == VR_RANGE
  3553. && TREE_CODE (vr0->max) == INTEGER_CST
  3554. && !is_overflow_infinity (vr0->max))
  3555. {
  3556. mini = prec - 1 - tree_floor_log2 (vr0->max);
  3557. if (mini == prec)
  3558. break;
  3559. }
  3560. }
  3561. if (mini == -2)
  3562. break;
  3563. goto bitop_builtin;
  3564. /* __builtin_ctz* return [0, prec-1], except for
  3565. when the argument is 0, but that is undefined behavior.
  3566. If there is a ctz optab for this mode and
  3567. CTZ_DEFINED_VALUE_AT_ZERO, include that in the range,
  3568. otherwise just assume 0 won't be seen. */
  3569. CASE_INT_FN (BUILT_IN_CTZ):
  3570. arg = gimple_call_arg (stmt, 0);
  3571. prec = TYPE_PRECISION (TREE_TYPE (arg));
  3572. mini = 0;
  3573. maxi = prec - 1;
  3574. if (optab_handler (ctz_optab, TYPE_MODE (TREE_TYPE (arg)))
  3575. != CODE_FOR_nothing
  3576. && CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (TREE_TYPE (arg)),
  3577. zerov))
  3578. {
  3579. /* Handle only the two common values. */
  3580. if (zerov == -1)
  3581. mini = -1;
  3582. else if (zerov == prec)
  3583. maxi = prec;
  3584. else
  3585. /* Magic value to give up, unless vr0 proves
  3586. arg is non-zero. */
  3587. mini = -2;
  3588. }
  3589. if (TREE_CODE (arg) == SSA_NAME)
  3590. {
  3591. value_range_t *vr0 = get_value_range (arg);
  3592. /* If arg is non-zero, then use [0, prec - 1]. */
  3593. if (((vr0->type == VR_RANGE
  3594. && integer_nonzerop (vr0->min))
  3595. || (vr0->type == VR_ANTI_RANGE
  3596. && integer_zerop (vr0->min)))
  3597. && !is_overflow_infinity (vr0->min))
  3598. {
  3599. mini = 0;
  3600. maxi = prec - 1;
  3601. }
  3602. /* If some high bits are known to be zero,
  3603. we can decrease the result maximum. */
  3604. if (vr0->type == VR_RANGE
  3605. && TREE_CODE (vr0->max) == INTEGER_CST
  3606. && !is_overflow_infinity (vr0->max))
  3607. {
  3608. maxi = tree_floor_log2 (vr0->max);
  3609. /* For vr0 [0, 0] give up. */
  3610. if (maxi == -1)
  3611. break;
  3612. }
  3613. }
  3614. if (mini == -2)
  3615. break;
  3616. goto bitop_builtin;
  3617. /* __builtin_clrsb* returns [0, prec-1]. */
  3618. CASE_INT_FN (BUILT_IN_CLRSB):
  3619. arg = gimple_call_arg (stmt, 0);
  3620. prec = TYPE_PRECISION (TREE_TYPE (arg));
  3621. mini = 0;
  3622. maxi = prec - 1;
  3623. goto bitop_builtin;
  3624. bitop_builtin:
  3625. set_value_range (vr, VR_RANGE, build_int_cst (type, mini),
  3626. build_int_cst (type, maxi), NULL);
  3627. return;
  3628. default:
  3629. break;
  3630. }
  3631. }
  3632. else if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
  3633. {
  3634. enum tree_code subcode = ERROR_MARK;
  3635. switch (gimple_call_internal_fn (stmt))
  3636. {
  3637. case IFN_UBSAN_CHECK_ADD:
  3638. subcode = PLUS_EXPR;
  3639. break;
  3640. case IFN_UBSAN_CHECK_SUB:
  3641. subcode = MINUS_EXPR;
  3642. break;
  3643. case IFN_UBSAN_CHECK_MUL:
  3644. subcode = MULT_EXPR;
  3645. break;
  3646. default:
  3647. break;
  3648. }
  3649. if (subcode != ERROR_MARK)
  3650. {
  3651. bool saved_flag_wrapv = flag_wrapv;
  3652. /* Pretend the arithmetics is wrapping. If there is
  3653. any overflow, we'll complain, but will actually do
  3654. wrapping operation. */
  3655. flag_wrapv = 1;
  3656. extract_range_from_binary_expr (vr, subcode, type,
  3657. gimple_call_arg (stmt, 0),
  3658. gimple_call_arg (stmt, 1));
  3659. flag_wrapv = saved_flag_wrapv;
  3660. /* If for both arguments vrp_valueize returned non-NULL,
  3661. this should have been already folded and if not, it
  3662. wasn't folded because of overflow. Avoid removing the
  3663. UBSAN_CHECK_* calls in that case. */
  3664. if (vr->type == VR_RANGE
  3665. && (vr->min == vr->max
  3666. || operand_equal_p (vr->min, vr->max, 0)))
  3667. set_value_range_to_varying (vr);
  3668. return;
  3669. }
  3670. }
  3671. /* Handle extraction of the two results (result of arithmetics and
  3672. a flag whether arithmetics overflowed) from {ADD,SUB,MUL}_OVERFLOW
  3673. internal function. */
  3674. else if (is_gimple_assign (stmt)
  3675. && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
  3676. || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
  3677. && INTEGRAL_TYPE_P (type))
  3678. {
  3679. enum tree_code code = gimple_assign_rhs_code (stmt);
  3680. tree op = gimple_assign_rhs1 (stmt);
  3681. if (TREE_CODE (op) == code && TREE_CODE (TREE_OPERAND (op, 0)) == SSA_NAME)
  3682. {
  3683. gimple g = SSA_NAME_DEF_STMT (TREE_OPERAND (op, 0));
  3684. if (is_gimple_call (g) && gimple_call_internal_p (g))
  3685. {
  3686. enum tree_code subcode = ERROR_MARK;
  3687. switch (gimple_call_internal_fn (g))
  3688. {
  3689. case IFN_ADD_OVERFLOW:
  3690. subcode = PLUS_EXPR;
  3691. break;
  3692. case IFN_SUB_OVERFLOW:
  3693. subcode = MINUS_EXPR;
  3694. break;
  3695. case IFN_MUL_OVERFLOW:
  3696. subcode = MULT_EXPR;
  3697. break;
  3698. default:
  3699. break;
  3700. }
  3701. if (subcode != ERROR_MARK)
  3702. {
  3703. tree op0 = gimple_call_arg (g, 0);
  3704. tree op1 = gimple_call_arg (g, 1);
  3705. if (code == IMAGPART_EXPR)
  3706. {
  3707. bool ovf = false;
  3708. if (check_for_binary_op_overflow (subcode, type,
  3709. op0, op1, &ovf))
  3710. set_value_range_to_value (vr,
  3711. build_int_cst (type, ovf),
  3712. NULL);
  3713. else
  3714. set_value_range (vr, VR_RANGE, build_int_cst (type, 0),
  3715. build_int_cst (type, 1), NULL);
  3716. }
  3717. else if (types_compatible_p (type, TREE_TYPE (op0))
  3718. && types_compatible_p (type, TREE_TYPE (op1)))
  3719. {
  3720. bool saved_flag_wrapv = flag_wrapv;
  3721. /* Pretend the arithmetics is wrapping. If there is
  3722. any overflow, IMAGPART_EXPR will be set. */
  3723. flag_wrapv = 1;
  3724. extract_range_from_binary_expr (vr, subcode, type,
  3725. op0, op1);
  3726. flag_wrapv = saved_flag_wrapv;
  3727. }
  3728. else
  3729. {
  3730. value_range_t vr0 = VR_INITIALIZER;
  3731. value_range_t vr1 = VR_INITIALIZER;
  3732. bool saved_flag_wrapv = flag_wrapv;
  3733. /* Pretend the arithmetics is wrapping. If there is
  3734. any overflow, IMAGPART_EXPR will be set. */
  3735. flag_wrapv = 1;
  3736. extract_range_from_unary_expr (&vr0, NOP_EXPR,
  3737. type, op0);
  3738. extract_range_from_unary_expr (&vr1, NOP_EXPR,
  3739. type, op1);
  3740. extract_range_from_binary_expr_1 (vr, subcode, type,
  3741. &vr0, &vr1);
  3742. flag_wrapv = saved_flag_wrapv;
  3743. }
  3744. return;
  3745. }
  3746. }
  3747. }
  3748. }
  3749. if (INTEGRAL_TYPE_P (type)
  3750. && gimple_stmt_nonnegative_warnv_p (stmt, &sop))
  3751. set_value_range_to_nonnegative (vr, type,
  3752. sop || stmt_overflow_infinity (stmt));
  3753. else if (vrp_stmt_computes_nonzero (stmt, &sop)
  3754. && !sop)
  3755. set_value_range_to_nonnull (vr, type);
  3756. else
  3757. set_value_range_to_varying (vr);
  3758. }
  3759. /* Try to compute a useful range out of assignment STMT and store it
  3760. in *VR. */
  3761. static void
  3762. extract_range_from_assignment (value_range_t *vr, gassign *stmt)
  3763. {
  3764. enum tree_code code = gimple_assign_rhs_code (stmt);
  3765. if (code == ASSERT_EXPR)
  3766. extract_range_from_assert (vr, gimple_assign_rhs1 (stmt));
  3767. else if (code == SSA_NAME)
  3768. extract_range_from_ssa_name (vr, gimple_assign_rhs1 (stmt));
  3769. else if (TREE_CODE_CLASS (code) == tcc_binary)
  3770. extract_range_from_binary_expr (vr, gimple_assign_rhs_code (stmt),
  3771. gimple_expr_type (stmt),
  3772. gimple_assign_rhs1 (stmt),
  3773. gimple_assign_rhs2 (stmt));
  3774. else if (TREE_CODE_CLASS (code) == tcc_unary)
  3775. extract_range_from_unary_expr (vr, gimple_assign_rhs_code (stmt),
  3776. gimple_expr_type (stmt),
  3777. gimple_assign_rhs1 (stmt));
  3778. else if (code == COND_EXPR)
  3779. extract_range_from_cond_expr (vr, stmt);
  3780. else if (TREE_CODE_CLASS (code) == tcc_comparison)
  3781. extract_range_from_comparison (vr, gimple_assign_rhs_code (stmt),
  3782. gimple_expr_type (stmt),
  3783. gimple_assign_rhs1 (stmt),
  3784. gimple_assign_rhs2 (stmt));
  3785. else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
  3786. && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
  3787. set_value_range_to_value (vr, gimple_assign_rhs1 (stmt), NULL);
  3788. else
  3789. set_value_range_to_varying (vr);
  3790. if (vr->type == VR_VARYING)
  3791. extract_range_basic (vr, stmt);
  3792. }
  3793. /* Given a range VR, a LOOP and a variable VAR, determine whether it
  3794. would be profitable to adjust VR using scalar evolution information
  3795. for VAR. If so, update VR with the new limits. */
  3796. static void
  3797. adjust_range_with_scev (value_range_t *vr, struct loop *loop,
  3798. gimple stmt, tree var)
  3799. {
  3800. tree init, step, chrec, tmin, tmax, min, max, type, tem;
  3801. enum ev_direction dir;
  3802. /* TODO. Don't adjust anti-ranges. An anti-range may provide
  3803. better opportunities than a regular range, but I'm not sure. */
  3804. if (vr->type == VR_ANTI_RANGE)
  3805. return;
  3806. chrec = instantiate_parameters (loop, analyze_scalar_evolution (loop, var));
  3807. /* Like in PR19590, scev can return a constant function. */
  3808. if (is_gimple_min_invariant (chrec))
  3809. {
  3810. set_value_range_to_value (vr, chrec, vr->equiv);
  3811. return;
  3812. }
  3813. if (TREE_CODE (chrec) != POLYNOMIAL_CHREC)
  3814. return;
  3815. init = initial_condition_in_loop_num (chrec, loop->num);
  3816. tem = op_with_constant_singleton_value_range (init);
  3817. if (tem)
  3818. init = tem;
  3819. step = evolution_part_in_loop_num (chrec, loop->num);
  3820. tem = op_with_constant_singleton_value_range (step);
  3821. if (tem)
  3822. step = tem;
  3823. /* If STEP is symbolic, we can't know whether INIT will be the
  3824. minimum or maximum value in the range. Also, unless INIT is
  3825. a simple expression, compare_values and possibly other functions
  3826. in tree-vrp won't be able to handle it. */
  3827. if (step == NULL_TREE
  3828. || !is_gimple_min_invariant (step)
  3829. || !valid_value_p (init))
  3830. return;
  3831. dir = scev_direction (chrec);
  3832. if (/* Do not adjust ranges if we do not know whether the iv increases
  3833. or decreases, ... */
  3834. dir == EV_DIR_UNKNOWN
  3835. /* ... or if it may wrap. */
  3836. || scev_probably_wraps_p (init, step, stmt, get_chrec_loop (chrec),
  3837. true))
  3838. return;
  3839. /* We use TYPE_MIN_VALUE and TYPE_MAX_VALUE here instead of
  3840. negative_overflow_infinity and positive_overflow_infinity,
  3841. because we have concluded that the loop probably does not
  3842. wrap. */
  3843. type = TREE_TYPE (var);
  3844. if (POINTER_TYPE_P (type) || !TYPE_MIN_VALUE (type))
  3845. tmin = lower_bound_in_type (type, type);
  3846. else
  3847. tmin = TYPE_MIN_VALUE (type);
  3848. if (POINTER_TYPE_P (type) || !TYPE_MAX_VALUE (type))
  3849. tmax = upper_bound_in_type (type, type);
  3850. else
  3851. tmax = TYPE_MAX_VALUE (type);
  3852. /* Try to use estimated number of iterations for the loop to constrain the
  3853. final value in the evolution. */
  3854. if (TREE_CODE (step) == INTEGER_CST
  3855. && is_gimple_val (init)
  3856. && (TREE_CODE (init) != SSA_NAME
  3857. || get_value_range (init)->type == VR_RANGE))
  3858. {
  3859. widest_int nit;
  3860. /* We are only entering here for loop header PHI nodes, so using
  3861. the number of latch executions is the correct thing to use. */
  3862. if (max_loop_iterations (loop, &nit))
  3863. {
  3864. value_range_t maxvr = VR_INITIALIZER;
  3865. signop sgn = TYPE_SIGN (TREE_TYPE (step));
  3866. bool overflow;
  3867. widest_int wtmp = wi::mul (wi::to_widest (step), nit, sgn,
  3868. &overflow);
  3869. /* If the multiplication overflowed we can't do a meaningful
  3870. adjustment. Likewise if the result doesn't fit in the type
  3871. of the induction variable. For a signed type we have to
  3872. check whether the result has the expected signedness which
  3873. is that of the step as number of iterations is unsigned. */
  3874. if (!overflow
  3875. && wi::fits_to_tree_p (wtmp, TREE_TYPE (init))
  3876. && (sgn == UNSIGNED
  3877. || wi::gts_p (wtmp, 0) == wi::gts_p (step, 0)))
  3878. {
  3879. tem = wide_int_to_tree (TREE_TYPE (init), wtmp);
  3880. extract_range_from_binary_expr (&maxvr, PLUS_EXPR,
  3881. TREE_TYPE (init), init, tem);
  3882. /* Likewise if the addition did. */
  3883. if (maxvr.type == VR_RANGE)
  3884. {
  3885. tmin = maxvr.min;
  3886. tmax = maxvr.max;
  3887. }
  3888. }
  3889. }
  3890. }
  3891. if (vr->type == VR_VARYING || vr->type == VR_UNDEFINED)
  3892. {
  3893. min = tmin;
  3894. max = tmax;
  3895. /* For VARYING or UNDEFINED ranges, just about anything we get
  3896. from scalar evolutions should be better. */
  3897. if (dir == EV_DIR_DECREASES)
  3898. max = init;
  3899. else
  3900. min = init;
  3901. }
  3902. else if (vr->type == VR_RANGE)
  3903. {
  3904. min = vr->min;
  3905. max = vr->max;
  3906. if (dir == EV_DIR_DECREASES)
  3907. {
  3908. /* INIT is the maximum value. If INIT is lower than VR->MAX
  3909. but no smaller than VR->MIN, set VR->MAX to INIT. */
  3910. if (compare_values (init, max) == -1)
  3911. max = init;
  3912. /* According to the loop information, the variable does not
  3913. overflow. If we think it does, probably because of an
  3914. overflow due to arithmetic on a different INF value,
  3915. reset now. */
  3916. if (is_negative_overflow_infinity (min)
  3917. || compare_values (min, tmin) == -1)
  3918. min = tmin;
  3919. }
  3920. else
  3921. {
  3922. /* If INIT is bigger than VR->MIN, set VR->MIN to INIT. */
  3923. if (compare_values (init, min) == 1)
  3924. min = init;
  3925. if (is_positive_overflow_infinity (max)
  3926. || compare_values (tmax, max) == -1)
  3927. max = tmax;
  3928. }
  3929. }
  3930. else
  3931. return;
  3932. /* If we just created an invalid range with the minimum
  3933. greater than the maximum, we fail conservatively.
  3934. This should happen only in unreachable
  3935. parts of code, or for invalid programs. */
  3936. if (compare_values (min, max) == 1
  3937. || (is_negative_overflow_infinity (min)
  3938. && is_positive_overflow_infinity (max)))
  3939. return;
  3940. set_value_range (vr, VR_RANGE, min, max, vr->equiv);
  3941. }
  3942. /* Given two numeric value ranges VR0, VR1 and a comparison code COMP:
  3943. - Return BOOLEAN_TRUE_NODE if VR0 COMP VR1 always returns true for
  3944. all the values in the ranges.
  3945. - Return BOOLEAN_FALSE_NODE if the comparison always returns false.
  3946. - Return NULL_TREE if it is not always possible to determine the
  3947. value of the comparison.
  3948. Also set *STRICT_OVERFLOW_P to indicate whether a range with an
  3949. overflow infinity was used in the test. */
  3950. static tree
  3951. compare_ranges (enum tree_code comp, value_range_t *vr0, value_range_t *vr1,
  3952. bool *strict_overflow_p)
  3953. {
  3954. /* VARYING or UNDEFINED ranges cannot be compared. */
  3955. if (vr0->type == VR_VARYING
  3956. || vr0->type == VR_UNDEFINED
  3957. || vr1->type == VR_VARYING
  3958. || vr1->type == VR_UNDEFINED)
  3959. return NULL_TREE;
  3960. /* Anti-ranges need to be handled separately. */
  3961. if (vr0->type == VR_ANTI_RANGE || vr1->type == VR_ANTI_RANGE)
  3962. {
  3963. /* If both are anti-ranges, then we cannot compute any
  3964. comparison. */
  3965. if (vr0->type == VR_ANTI_RANGE && vr1->type == VR_ANTI_RANGE)
  3966. return NULL_TREE;
  3967. /* These comparisons are never statically computable. */
  3968. if (comp == GT_EXPR
  3969. || comp == GE_EXPR
  3970. || comp == LT_EXPR
  3971. || comp == LE_EXPR)
  3972. return NULL_TREE;
  3973. /* Equality can be computed only between a range and an
  3974. anti-range. ~[VAL1, VAL2] == [VAL1, VAL2] is always false. */
  3975. if (vr0->type == VR_RANGE)
  3976. {
  3977. /* To simplify processing, make VR0 the anti-range. */
  3978. value_range_t *tmp = vr0;
  3979. vr0 = vr1;
  3980. vr1 = tmp;
  3981. }
  3982. gcc_assert (comp == NE_EXPR || comp == EQ_EXPR);
  3983. if (compare_values_warnv (vr0->min, vr1->min, strict_overflow_p) == 0
  3984. && compare_values_warnv (vr0->max, vr1->max, strict_overflow_p) == 0)
  3985. return (comp == NE_EXPR) ? boolean_true_node : boolean_false_node;
  3986. return NULL_TREE;
  3987. }
  3988. if (!usable_range_p (vr0, strict_overflow_p)
  3989. || !usable_range_p (vr1, strict_overflow_p))
  3990. return NULL_TREE;
  3991. /* Simplify processing. If COMP is GT_EXPR or GE_EXPR, switch the
  3992. operands around and change the comparison code. */
  3993. if (comp == GT_EXPR || comp == GE_EXPR)
  3994. {
  3995. value_range_t *tmp;
  3996. comp = (comp == GT_EXPR) ? LT_EXPR : LE_EXPR;
  3997. tmp = vr0;
  3998. vr0 = vr1;
  3999. vr1 = tmp;
  4000. }
  4001. if (comp == EQ_EXPR)
  4002. {
  4003. /* Equality may only be computed if both ranges represent
  4004. exactly one value. */
  4005. if (compare_values_warnv (vr0->min, vr0->max, strict_overflow_p) == 0
  4006. && compare_values_warnv (vr1->min, vr1->max, strict_overflow_p) == 0)
  4007. {
  4008. int cmp_min = compare_values_warnv (vr0->min, vr1->min,
  4009. strict_overflow_p);
  4010. int cmp_max = compare_values_warnv (vr0->max, vr1->max,
  4011. strict_overflow_p);
  4012. if (cmp_min == 0 && cmp_max == 0)
  4013. return boolean_true_node;
  4014. else if (cmp_min != -2 && cmp_max != -2)
  4015. return boolean_false_node;
  4016. }
  4017. /* If [V0_MIN, V1_MAX] < [V1_MIN, V1_MAX] then V0 != V1. */
  4018. else if (compare_values_warnv (vr0->min, vr1->max,
  4019. strict_overflow_p) == 1
  4020. || compare_values_warnv (vr1->min, vr0->max,
  4021. strict_overflow_p) == 1)
  4022. return boolean_false_node;
  4023. return NULL_TREE;
  4024. }
  4025. else if (comp == NE_EXPR)
  4026. {
  4027. int cmp1, cmp2;
  4028. /* If VR0 is completely to the left or completely to the right
  4029. of VR1, they are always different. Notice that we need to
  4030. make sure that both comparisons yield similar results to
  4031. avoid comparing values that cannot be compared at
  4032. compile-time. */
  4033. cmp1 = compare_values_warnv (vr0->max, vr1->min, strict_overflow_p);
  4034. cmp2 = compare_values_warnv (vr0->min, vr1->max, strict_overflow_p);
  4035. if ((cmp1 == -1 && cmp2 == -1) || (cmp1 == 1 && cmp2 == 1))
  4036. return boolean_true_node;
  4037. /* If VR0 and VR1 represent a single value and are identical,
  4038. return false. */
  4039. else if (compare_values_warnv (vr0->min, vr0->max,
  4040. strict_overflow_p) == 0
  4041. && compare_values_warnv (vr1->min, vr1->max,
  4042. strict_overflow_p) == 0
  4043. && compare_values_warnv (vr0->min, vr1->min,
  4044. strict_overflow_p) == 0
  4045. && compare_values_warnv (vr0->max, vr1->max,
  4046. strict_overflow_p) == 0)
  4047. return boolean_false_node;
  4048. /* Otherwise, they may or may not be different. */
  4049. else
  4050. return NULL_TREE;
  4051. }
  4052. else if (comp == LT_EXPR || comp == LE_EXPR)
  4053. {
  4054. int tst;
  4055. /* If VR0 is to the left of VR1, return true. */
  4056. tst = compare_values_warnv (vr0->max, vr1->min, strict_overflow_p);
  4057. if ((comp == LT_EXPR && tst == -1)
  4058. || (comp == LE_EXPR && (tst == -1 || tst == 0)))
  4059. {
  4060. if (overflow_infinity_range_p (vr0)
  4061. || overflow_infinity_range_p (vr1))
  4062. *strict_overflow_p = true;
  4063. return boolean_true_node;
  4064. }
  4065. /* If VR0 is to the right of VR1, return false. */
  4066. tst = compare_values_warnv (vr0->min, vr1->max, strict_overflow_p);
  4067. if ((comp == LT_EXPR && (tst == 0 || tst == 1))
  4068. || (comp == LE_EXPR && tst == 1))
  4069. {
  4070. if (overflow_infinity_range_p (vr0)
  4071. || overflow_infinity_range_p (vr1))
  4072. *strict_overflow_p = true;
  4073. return boolean_false_node;
  4074. }
  4075. /* Otherwise, we don't know. */
  4076. return NULL_TREE;
  4077. }
  4078. gcc_unreachable ();
  4079. }
  4080. /* Given a value range VR, a value VAL and a comparison code COMP, return
  4081. BOOLEAN_TRUE_NODE if VR COMP VAL always returns true for all the
  4082. values in VR. Return BOOLEAN_FALSE_NODE if the comparison
  4083. always returns false. Return NULL_TREE if it is not always
  4084. possible to determine the value of the comparison. Also set
  4085. *STRICT_OVERFLOW_P to indicate whether a range with an overflow
  4086. infinity was used in the test. */
  4087. static tree
  4088. compare_range_with_value (enum tree_code comp, value_range_t *vr, tree val,
  4089. bool *strict_overflow_p)
  4090. {
  4091. if (vr->type == VR_VARYING || vr->type == VR_UNDEFINED)
  4092. return NULL_TREE;
  4093. /* Anti-ranges need to be handled separately. */
  4094. if (vr->type == VR_ANTI_RANGE)
  4095. {
  4096. /* For anti-ranges, the only predicates that we can compute at
  4097. compile time are equality and inequality. */
  4098. if (comp == GT_EXPR
  4099. || comp == GE_EXPR
  4100. || comp == LT_EXPR
  4101. || comp == LE_EXPR)
  4102. return NULL_TREE;
  4103. /* ~[VAL_1, VAL_2] OP VAL is known if VAL_1 <= VAL <= VAL_2. */
  4104. if (value_inside_range (val, vr->min, vr->max) == 1)
  4105. return (comp == NE_EXPR) ? boolean_true_node : boolean_false_node;
  4106. return NULL_TREE;
  4107. }
  4108. if (!usable_range_p (vr, strict_overflow_p))
  4109. return NULL_TREE;
  4110. if (comp == EQ_EXPR)
  4111. {
  4112. /* EQ_EXPR may only be computed if VR represents exactly
  4113. one value. */
  4114. if (compare_values_warnv (vr->min, vr->max, strict_overflow_p) == 0)
  4115. {
  4116. int cmp = compare_values_warnv (vr->min, val, strict_overflow_p);
  4117. if (cmp == 0)
  4118. return boolean_true_node;
  4119. else if (cmp == -1 || cmp == 1 || cmp == 2)
  4120. return boolean_false_node;
  4121. }
  4122. else if (compare_values_warnv (val, vr->min, strict_overflow_p) == -1
  4123. || compare_values_warnv (vr->max, val, strict_overflow_p) == -1)
  4124. return boolean_false_node;
  4125. return NULL_TREE;
  4126. }
  4127. else if (comp == NE_EXPR)
  4128. {
  4129. /* If VAL is not inside VR, then they are always different. */
  4130. if (compare_values_warnv (vr->max, val, strict_overflow_p) == -1
  4131. || compare_values_warnv (vr->min, val, strict_overflow_p) == 1)
  4132. return boolean_true_node;
  4133. /* If VR represents exactly one value equal to VAL, then return
  4134. false. */
  4135. if (compare_values_warnv (vr->min, vr->max, strict_overflow_p) == 0
  4136. && compare_values_warnv (vr->min, val, strict_overflow_p) == 0)
  4137. return boolean_false_node;
  4138. /* Otherwise, they may or may not be different. */
  4139. return NULL_TREE;
  4140. }
  4141. else if (comp == LT_EXPR || comp == LE_EXPR)
  4142. {
  4143. int tst;
  4144. /* If VR is to the left of VAL, return true. */
  4145. tst = compare_values_warnv (vr->max, val, strict_overflow_p);
  4146. if ((comp == LT_EXPR && tst == -1)
  4147. || (comp == LE_EXPR && (tst == -1 || tst == 0)))
  4148. {
  4149. if (overflow_infinity_range_p (vr))
  4150. *strict_overflow_p = true;
  4151. return boolean_true_node;
  4152. }
  4153. /* If VR is to the right of VAL, return false. */
  4154. tst = compare_values_warnv (vr->min, val, strict_overflow_p);
  4155. if ((comp == LT_EXPR && (tst == 0 || tst == 1))
  4156. || (comp == LE_EXPR && tst == 1))
  4157. {
  4158. if (overflow_infinity_range_p (vr))
  4159. *strict_overflow_p = true;
  4160. return boolean_false_node;
  4161. }
  4162. /* Otherwise, we don't know. */
  4163. return NULL_TREE;
  4164. }
  4165. else if (comp == GT_EXPR || comp == GE_EXPR)
  4166. {
  4167. int tst;
  4168. /* If VR is to the right of VAL, return true. */
  4169. tst = compare_values_warnv (vr->min, val, strict_overflow_p);
  4170. if ((comp == GT_EXPR && tst == 1)
  4171. || (comp == GE_EXPR && (tst == 0 || tst == 1)))
  4172. {
  4173. if (overflow_infinity_range_p (vr))
  4174. *strict_overflow_p = true;
  4175. return boolean_true_node;
  4176. }
  4177. /* If VR is to the left of VAL, return false. */
  4178. tst = compare_values_warnv (vr->max, val, strict_overflow_p);
  4179. if ((comp == GT_EXPR && (tst == -1 || tst == 0))
  4180. || (comp == GE_EXPR && tst == -1))
  4181. {
  4182. if (overflow_infinity_range_p (vr))
  4183. *strict_overflow_p = true;
  4184. return boolean_false_node;
  4185. }
  4186. /* Otherwise, we don't know. */
  4187. return NULL_TREE;
  4188. }
  4189. gcc_unreachable ();
  4190. }
  4191. /* Debugging dumps. */
  4192. void dump_value_range (FILE *, value_range_t *);
  4193. void debug_value_range (value_range_t *);
  4194. void dump_all_value_ranges (FILE *);
  4195. void debug_all_value_ranges (void);
  4196. void dump_vr_equiv (FILE *, bitmap);
  4197. void debug_vr_equiv (bitmap);
  4198. /* Dump value range VR to FILE. */
  4199. void
  4200. dump_value_range (FILE *file, value_range_t *vr)
  4201. {
  4202. if (vr == NULL)
  4203. fprintf (file, "[]");
  4204. else if (vr->type == VR_UNDEFINED)
  4205. fprintf (file, "UNDEFINED");
  4206. else if (vr->type == VR_RANGE || vr->type == VR_ANTI_RANGE)
  4207. {
  4208. tree type = TREE_TYPE (vr->min);
  4209. fprintf (file, "%s[", (vr->type == VR_ANTI_RANGE) ? "~" : "");
  4210. if (is_negative_overflow_infinity (vr->min))
  4211. fprintf (file, "-INF(OVF)");
  4212. else if (INTEGRAL_TYPE_P (type)
  4213. && !TYPE_UNSIGNED (type)
  4214. && vrp_val_is_min (vr->min))
  4215. fprintf (file, "-INF");
  4216. else
  4217. print_generic_expr (file, vr->min, 0);
  4218. fprintf (file, ", ");
  4219. if (is_positive_overflow_infinity (vr->max))
  4220. fprintf (file, "+INF(OVF)");
  4221. else if (INTEGRAL_TYPE_P (type)
  4222. && vrp_val_is_max (vr->max))
  4223. fprintf (file, "+INF");
  4224. else
  4225. print_generic_expr (file, vr->max, 0);
  4226. fprintf (file, "]");
  4227. if (vr->equiv)
  4228. {
  4229. bitmap_iterator bi;
  4230. unsigned i, c = 0;
  4231. fprintf (file, " EQUIVALENCES: { ");
  4232. EXECUTE_IF_SET_IN_BITMAP (vr->equiv, 0, i, bi)
  4233. {
  4234. print_generic_expr (file, ssa_name (i), 0);
  4235. fprintf (file, " ");
  4236. c++;
  4237. }
  4238. fprintf (file, "} (%u elements)", c);
  4239. }
  4240. }
  4241. else if (vr->type == VR_VARYING)
  4242. fprintf (file, "VARYING");
  4243. else
  4244. fprintf (file, "INVALID RANGE");
  4245. }
  4246. /* Dump value range VR to stderr. */
  4247. DEBUG_FUNCTION void
  4248. debug_value_range (value_range_t *vr)
  4249. {
  4250. dump_value_range (stderr, vr);
  4251. fprintf (stderr, "\n");
  4252. }
  4253. /* Dump value ranges of all SSA_NAMEs to FILE. */
  4254. void
  4255. dump_all_value_ranges (FILE *file)
  4256. {
  4257. size_t i;
  4258. for (i = 0; i < num_vr_values; i++)
  4259. {
  4260. if (vr_value[i])
  4261. {
  4262. print_generic_expr (file, ssa_name (i), 0);
  4263. fprintf (file, ": ");
  4264. dump_value_range (file, vr_value[i]);
  4265. fprintf (file, "\n");
  4266. }
  4267. }
  4268. fprintf (file, "\n");
  4269. }
  4270. /* Dump all value ranges to stderr. */
  4271. DEBUG_FUNCTION void
  4272. debug_all_value_ranges (void)
  4273. {
  4274. dump_all_value_ranges (stderr);
  4275. }
  4276. /* Given a COND_EXPR COND of the form 'V OP W', and an SSA name V,
  4277. create a new SSA name N and return the assertion assignment
  4278. 'N = ASSERT_EXPR <V, V OP W>'. */
  4279. static gimple
  4280. build_assert_expr_for (tree cond, tree v)
  4281. {
  4282. tree a;
  4283. gassign *assertion;
  4284. gcc_assert (TREE_CODE (v) == SSA_NAME
  4285. && COMPARISON_CLASS_P (cond));
  4286. a = build2 (ASSERT_EXPR, TREE_TYPE (v), v, cond);
  4287. assertion = gimple_build_assign (NULL_TREE, a);
  4288. /* The new ASSERT_EXPR, creates a new SSA name that replaces the
  4289. operand of the ASSERT_EXPR. Create it so the new name and the old one
  4290. are registered in the replacement table so that we can fix the SSA web
  4291. after adding all the ASSERT_EXPRs. */
  4292. create_new_def_for (v, assertion, NULL);
  4293. return assertion;
  4294. }
  4295. /* Return false if EXPR is a predicate expression involving floating
  4296. point values. */
  4297. static inline bool
  4298. fp_predicate (gimple stmt)
  4299. {
  4300. GIMPLE_CHECK (stmt, GIMPLE_COND);
  4301. return FLOAT_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)));
  4302. }
  4303. /* If the range of values taken by OP can be inferred after STMT executes,
  4304. return the comparison code (COMP_CODE_P) and value (VAL_P) that
  4305. describes the inferred range. Return true if a range could be
  4306. inferred. */
  4307. static bool
  4308. infer_value_range (gimple stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
  4309. {
  4310. *val_p = NULL_TREE;
  4311. *comp_code_p = ERROR_MARK;
  4312. /* Do not attempt to infer anything in names that flow through
  4313. abnormal edges. */
  4314. if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
  4315. return false;
  4316. /* Similarly, don't infer anything from statements that may throw
  4317. exceptions. ??? Relax this requirement? */
  4318. if (stmt_could_throw_p (stmt))
  4319. return false;
  4320. /* If STMT is the last statement of a basic block with no normal
  4321. successors, there is no point inferring anything about any of its
  4322. operands. We would not be able to find a proper insertion point
  4323. for the assertion, anyway. */
  4324. if (stmt_ends_bb_p (stmt))
  4325. {
  4326. edge_iterator ei;
  4327. edge e;
  4328. FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
  4329. if (!(e->flags & EDGE_ABNORMAL))
  4330. break;
  4331. if (e == NULL)
  4332. return false;
  4333. }
  4334. if (infer_nonnull_range (stmt, op, true, true))
  4335. {
  4336. *val_p = build_int_cst (TREE_TYPE (op), 0);
  4337. *comp_code_p = NE_EXPR;
  4338. return true;
  4339. }
  4340. return false;
  4341. }
  4342. void dump_asserts_for (FILE *, tree);
  4343. void debug_asserts_for (tree);
  4344. void dump_all_asserts (FILE *);
  4345. void debug_all_asserts (void);
  4346. /* Dump all the registered assertions for NAME to FILE. */
  4347. void
  4348. dump_asserts_for (FILE *file, tree name)
  4349. {
  4350. assert_locus_t loc;
  4351. fprintf (file, "Assertions to be inserted for ");
  4352. print_generic_expr (file, name, 0);
  4353. fprintf (file, "\n");
  4354. loc = asserts_for[SSA_NAME_VERSION (name)];
  4355. while (loc)
  4356. {
  4357. fprintf (file, "\t");
  4358. print_gimple_stmt (file, gsi_stmt (loc->si), 0, 0);
  4359. fprintf (file, "\n\tBB #%d", loc->bb->index);
  4360. if (loc->e)
  4361. {
  4362. fprintf (file, "\n\tEDGE %d->%d", loc->e->src->index,
  4363. loc->e->dest->index);
  4364. dump_edge_info (file, loc->e, dump_flags, 0);
  4365. }
  4366. fprintf (file, "\n\tPREDICATE: ");
  4367. print_generic_expr (file, name, 0);
  4368. fprintf (file, " %s ", get_tree_code_name (loc->comp_code));
  4369. print_generic_expr (file, loc->val, 0);
  4370. fprintf (file, "\n\n");
  4371. loc = loc->next;
  4372. }
  4373. fprintf (file, "\n");
  4374. }
  4375. /* Dump all the registered assertions for NAME to stderr. */
  4376. DEBUG_FUNCTION void
  4377. debug_asserts_for (tree name)
  4378. {
  4379. dump_asserts_for (stderr, name);
  4380. }
  4381. /* Dump all the registered assertions for all the names to FILE. */
  4382. void
  4383. dump_all_asserts (FILE *file)
  4384. {
  4385. unsigned i;
  4386. bitmap_iterator bi;
  4387. fprintf (file, "\nASSERT_EXPRs to be inserted\n\n");
  4388. EXECUTE_IF_SET_IN_BITMAP (need_assert_for, 0, i, bi)
  4389. dump_asserts_for (file, ssa_name (i));
  4390. fprintf (file, "\n");
  4391. }
  4392. /* Dump all the registered assertions for all the names to stderr. */
  4393. DEBUG_FUNCTION void
  4394. debug_all_asserts (void)
  4395. {
  4396. dump_all_asserts (stderr);
  4397. }
  4398. /* If NAME doesn't have an ASSERT_EXPR registered for asserting
  4399. 'EXPR COMP_CODE VAL' at a location that dominates block BB or
  4400. E->DEST, then register this location as a possible insertion point
  4401. for ASSERT_EXPR <NAME, EXPR COMP_CODE VAL>.
  4402. BB, E and SI provide the exact insertion point for the new
  4403. ASSERT_EXPR. If BB is NULL, then the ASSERT_EXPR is to be inserted
  4404. on edge E. Otherwise, if E is NULL, the ASSERT_EXPR is inserted on
  4405. BB. If SI points to a COND_EXPR or a SWITCH_EXPR statement, then E
  4406. must not be NULL. */
  4407. static void
  4408. register_new_assert_for (tree name, tree expr,
  4409. enum tree_code comp_code,
  4410. tree val,
  4411. basic_block bb,
  4412. edge e,
  4413. gimple_stmt_iterator si)
  4414. {
  4415. assert_locus_t n, loc, last_loc;
  4416. basic_block dest_bb;
  4417. gcc_checking_assert (bb == NULL || e == NULL);
  4418. if (e == NULL)
  4419. gcc_checking_assert (gimple_code (gsi_stmt (si)) != GIMPLE_COND
  4420. && gimple_code (gsi_stmt (si)) != GIMPLE_SWITCH);
  4421. /* Never build an assert comparing against an integer constant with
  4422. TREE_OVERFLOW set. This confuses our undefined overflow warning
  4423. machinery. */
  4424. if (TREE_OVERFLOW_P (val))
  4425. val = drop_tree_overflow (val);
  4426. /* The new assertion A will be inserted at BB or E. We need to
  4427. determine if the new location is dominated by a previously
  4428. registered location for A. If we are doing an edge insertion,
  4429. assume that A will be inserted at E->DEST. Note that this is not
  4430. necessarily true.
  4431. If E is a critical edge, it will be split. But even if E is
  4432. split, the new block will dominate the same set of blocks that
  4433. E->DEST dominates.
  4434. The reverse, however, is not true, blocks dominated by E->DEST
  4435. will not be dominated by the new block created to split E. So,
  4436. if the insertion location is on a critical edge, we will not use
  4437. the new location to move another assertion previously registered
  4438. at a block dominated by E->DEST. */
  4439. dest_bb = (bb) ? bb : e->dest;
  4440. /* If NAME already has an ASSERT_EXPR registered for COMP_CODE and
  4441. VAL at a block dominating DEST_BB, then we don't need to insert a new
  4442. one. Similarly, if the same assertion already exists at a block
  4443. dominated by DEST_BB and the new location is not on a critical
  4444. edge, then update the existing location for the assertion (i.e.,
  4445. move the assertion up in the dominance tree).
  4446. Note, this is implemented as a simple linked list because there
  4447. should not be more than a handful of assertions registered per
  4448. name. If this becomes a performance problem, a table hashed by
  4449. COMP_CODE and VAL could be implemented. */
  4450. loc = asserts_for[SSA_NAME_VERSION (name)];
  4451. last_loc = loc;
  4452. while (loc)
  4453. {
  4454. if (loc->comp_code == comp_code
  4455. && (loc->val == val
  4456. || operand_equal_p (loc->val, val, 0))
  4457. && (loc->expr == expr
  4458. || operand_equal_p (loc->expr, expr, 0)))
  4459. {
  4460. /* If E is not a critical edge and DEST_BB
  4461. dominates the existing location for the assertion, move
  4462. the assertion up in the dominance tree by updating its
  4463. location information. */
  4464. if ((e == NULL || !EDGE_CRITICAL_P (e))
  4465. && dominated_by_p (CDI_DOMINATORS, loc->bb, dest_bb))
  4466. {
  4467. loc->bb = dest_bb;
  4468. loc->e = e;
  4469. loc->si = si;
  4470. return;
  4471. }
  4472. }
  4473. /* Update the last node of the list and move to the next one. */
  4474. last_loc = loc;
  4475. loc = loc->next;
  4476. }
  4477. /* If we didn't find an assertion already registered for
  4478. NAME COMP_CODE VAL, add a new one at the end of the list of
  4479. assertions associated with NAME. */
  4480. n = XNEW (struct assert_locus_d);
  4481. n->bb = dest_bb;
  4482. n->e = e;
  4483. n->si = si;
  4484. n->comp_code = comp_code;
  4485. n->val = val;
  4486. n->expr = expr;
  4487. n->next = NULL;
  4488. if (last_loc)
  4489. last_loc->next = n;
  4490. else
  4491. asserts_for[SSA_NAME_VERSION (name)] = n;
  4492. bitmap_set_bit (need_assert_for, SSA_NAME_VERSION (name));
  4493. }
  4494. /* (COND_OP0 COND_CODE COND_OP1) is a predicate which uses NAME.
  4495. Extract a suitable test code and value and store them into *CODE_P and
  4496. *VAL_P so the predicate is normalized to NAME *CODE_P *VAL_P.
  4497. If no extraction was possible, return FALSE, otherwise return TRUE.
  4498. If INVERT is true, then we invert the result stored into *CODE_P. */
  4499. static bool
  4500. extract_code_and_val_from_cond_with_ops (tree name, enum tree_code cond_code,
  4501. tree cond_op0, tree cond_op1,
  4502. bool invert, enum tree_code *code_p,
  4503. tree *val_p)
  4504. {
  4505. enum tree_code comp_code;
  4506. tree val;
  4507. /* Otherwise, we have a comparison of the form NAME COMP VAL
  4508. or VAL COMP NAME. */
  4509. if (name == cond_op1)
  4510. {
  4511. /* If the predicate is of the form VAL COMP NAME, flip
  4512. COMP around because we need to register NAME as the
  4513. first operand in the predicate. */
  4514. comp_code = swap_tree_comparison (cond_code);
  4515. val = cond_op0;
  4516. }
  4517. else
  4518. {
  4519. /* The comparison is of the form NAME COMP VAL, so the
  4520. comparison code remains unchanged. */
  4521. comp_code = cond_code;
  4522. val = cond_op1;
  4523. }
  4524. /* Invert the comparison code as necessary. */
  4525. if (invert)
  4526. comp_code = invert_tree_comparison (comp_code, 0);
  4527. /* VRP does not handle float types. */
  4528. if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)))
  4529. return false;
  4530. /* Do not register always-false predicates.
  4531. FIXME: this works around a limitation in fold() when dealing with
  4532. enumerations. Given 'enum { N1, N2 } x;', fold will not
  4533. fold 'if (x > N2)' to 'if (0)'. */
  4534. if ((comp_code == GT_EXPR || comp_code == LT_EXPR)
  4535. && INTEGRAL_TYPE_P (TREE_TYPE (val)))
  4536. {
  4537. tree min = TYPE_MIN_VALUE (TREE_TYPE (val));
  4538. tree max = TYPE_MAX_VALUE (TREE_TYPE (val));
  4539. if (comp_code == GT_EXPR
  4540. && (!max
  4541. || compare_values (val, max) == 0))
  4542. return false;
  4543. if (comp_code == LT_EXPR
  4544. && (!min
  4545. || compare_values (val, min) == 0))
  4546. return false;
  4547. }
  4548. *code_p = comp_code;
  4549. *val_p = val;
  4550. return true;
  4551. }
  4552. /* Find out smallest RES where RES > VAL && (RES & MASK) == RES, if any
  4553. (otherwise return VAL). VAL and MASK must be zero-extended for
  4554. precision PREC. If SGNBIT is non-zero, first xor VAL with SGNBIT
  4555. (to transform signed values into unsigned) and at the end xor
  4556. SGNBIT back. */
  4557. static wide_int
  4558. masked_increment (const wide_int &val_in, const wide_int &mask,
  4559. const wide_int &sgnbit, unsigned int prec)
  4560. {
  4561. wide_int bit = wi::one (prec), res;
  4562. unsigned int i;
  4563. wide_int val = val_in ^ sgnbit;
  4564. for (i = 0; i < prec; i++, bit += bit)
  4565. {
  4566. res = mask;
  4567. if ((res & bit) == 0)
  4568. continue;
  4569. res = bit - 1;
  4570. res = (val + bit).and_not (res);
  4571. res &= mask;
  4572. if (wi::gtu_p (res, val))
  4573. return res ^ sgnbit;
  4574. }
  4575. return val ^ sgnbit;
  4576. }
  4577. /* Try to register an edge assertion for SSA name NAME on edge E for
  4578. the condition COND contributing to the conditional jump pointed to by BSI.
  4579. Invert the condition COND if INVERT is true. */
  4580. static void
  4581. register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
  4582. enum tree_code cond_code,
  4583. tree cond_op0, tree cond_op1, bool invert)
  4584. {
  4585. tree val;
  4586. enum tree_code comp_code;
  4587. if (!extract_code_and_val_from_cond_with_ops (name, cond_code,
  4588. cond_op0,
  4589. cond_op1,
  4590. invert, &comp_code, &val))
  4591. return;
  4592. /* Only register an ASSERT_EXPR if NAME was found in the sub-graph
  4593. reachable from E. */
  4594. if (live_on_edge (e, name)
  4595. && !has_single_use (name))
  4596. register_new_assert_for (name, name, comp_code, val, NULL, e, bsi);
  4597. /* In the case of NAME <= CST and NAME being defined as
  4598. NAME = (unsigned) NAME2 + CST2 we can assert NAME2 >= -CST2
  4599. and NAME2 <= CST - CST2. We can do the same for NAME > CST.
  4600. This catches range and anti-range tests. */
  4601. if ((comp_code == LE_EXPR
  4602. || comp_code == GT_EXPR)
  4603. && TREE_CODE (val) == INTEGER_CST
  4604. && TYPE_UNSIGNED (TREE_TYPE (val)))
  4605. {
  4606. gimple def_stmt = SSA_NAME_DEF_STMT (name);
  4607. tree cst2 = NULL_TREE, name2 = NULL_TREE, name3 = NULL_TREE;
  4608. /* Extract CST2 from the (optional) addition. */
  4609. if (is_gimple_assign (def_stmt)
  4610. && gimple_assign_rhs_code (def_stmt) == PLUS_EXPR)
  4611. {
  4612. name2 = gimple_assign_rhs1 (def_stmt);
  4613. cst2 = gimple_assign_rhs2 (def_stmt);
  4614. if (TREE_CODE (name2) == SSA_NAME
  4615. && TREE_CODE (cst2) == INTEGER_CST)
  4616. def_stmt = SSA_NAME_DEF_STMT (name2);
  4617. }
  4618. /* Extract NAME2 from the (optional) sign-changing cast. */
  4619. if (gimple_assign_cast_p (def_stmt))
  4620. {
  4621. if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
  4622. && ! TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
  4623. && (TYPE_PRECISION (gimple_expr_type (def_stmt))
  4624. == TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))))
  4625. name3 = gimple_assign_rhs1 (def_stmt);
  4626. }
  4627. /* If name3 is used later, create an ASSERT_EXPR for it. */
  4628. if (name3 != NULL_TREE
  4629. && TREE_CODE (name3) == SSA_NAME
  4630. && (cst2 == NULL_TREE
  4631. || TREE_CODE (cst2) == INTEGER_CST)
  4632. && INTEGRAL_TYPE_P (TREE_TYPE (name3))
  4633. && live_on_edge (e, name3)
  4634. && !has_single_use (name3))
  4635. {
  4636. tree tmp;
  4637. /* Build an expression for the range test. */
  4638. tmp = build1 (NOP_EXPR, TREE_TYPE (name), name3);
  4639. if (cst2 != NULL_TREE)
  4640. tmp = build2 (PLUS_EXPR, TREE_TYPE (name), tmp, cst2);
  4641. if (dump_file)
  4642. {
  4643. fprintf (dump_file, "Adding assert for ");
  4644. print_generic_expr (dump_file, name3, 0);
  4645. fprintf (dump_file, " from ");
  4646. print_generic_expr (dump_file, tmp, 0);
  4647. fprintf (dump_file, "\n");
  4648. }
  4649. register_new_assert_for (name3, tmp, comp_code, val, NULL, e, bsi);
  4650. }
  4651. /* If name2 is used later, create an ASSERT_EXPR for it. */
  4652. if (name2 != NULL_TREE
  4653. && TREE_CODE (name2) == SSA_NAME
  4654. && TREE_CODE (cst2) == INTEGER_CST
  4655. && INTEGRAL_TYPE_P (TREE_TYPE (name2))
  4656. && live_on_edge (e, name2)
  4657. && !has_single_use (name2))
  4658. {
  4659. tree tmp;
  4660. /* Build an expression for the range test. */
  4661. tmp = name2;
  4662. if (TREE_TYPE (name) != TREE_TYPE (name2))
  4663. tmp = build1 (NOP_EXPR, TREE_TYPE (name), tmp);
  4664. if (cst2 != NULL_TREE)
  4665. tmp = build2 (PLUS_EXPR, TREE_TYPE (name), tmp, cst2);
  4666. if (dump_file)
  4667. {
  4668. fprintf (dump_file, "Adding assert for ");
  4669. print_generic_expr (dump_file, name2, 0);
  4670. fprintf (dump_file, " from ");
  4671. print_generic_expr (dump_file, tmp, 0);
  4672. fprintf (dump_file, "\n");
  4673. }
  4674. register_new_assert_for (name2, tmp, comp_code, val, NULL, e, bsi);
  4675. }
  4676. }
  4677. /* In the case of post-in/decrement tests like if (i++) ... and uses
  4678. of the in/decremented value on the edge the extra name we want to
  4679. assert for is not on the def chain of the name compared. Instead
  4680. it is in the set of use stmts. */
  4681. if ((comp_code == NE_EXPR
  4682. || comp_code == EQ_EXPR)
  4683. && TREE_CODE (val) == INTEGER_CST)
  4684. {
  4685. imm_use_iterator ui;
  4686. gimple use_stmt;
  4687. FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
  4688. {
  4689. /* Cut off to use-stmts that are in the predecessor. */
  4690. if (gimple_bb (use_stmt) != e->src)
  4691. continue;
  4692. if (!is_gimple_assign (use_stmt))
  4693. continue;
  4694. enum tree_code code = gimple_assign_rhs_code (use_stmt);
  4695. if (code != PLUS_EXPR
  4696. && code != MINUS_EXPR)
  4697. continue;
  4698. tree cst = gimple_assign_rhs2 (use_stmt);
  4699. if (TREE_CODE (cst) != INTEGER_CST)
  4700. continue;
  4701. tree name2 = gimple_assign_lhs (use_stmt);
  4702. if (live_on_edge (e, name2))
  4703. {
  4704. cst = int_const_binop (code, val, cst);
  4705. register_new_assert_for (name2, name2, comp_code, cst,
  4706. NULL, e, bsi);
  4707. }
  4708. }
  4709. }
  4710. if (TREE_CODE_CLASS (comp_code) == tcc_comparison
  4711. && TREE_CODE (val) == INTEGER_CST)
  4712. {
  4713. gimple def_stmt = SSA_NAME_DEF_STMT (name);
  4714. tree name2 = NULL_TREE, names[2], cst2 = NULL_TREE;
  4715. tree val2 = NULL_TREE;
  4716. unsigned int prec = TYPE_PRECISION (TREE_TYPE (val));
  4717. wide_int mask = wi::zero (prec);
  4718. unsigned int nprec = prec;
  4719. enum tree_code rhs_code = ERROR_MARK;
  4720. if (is_gimple_assign (def_stmt))
  4721. rhs_code = gimple_assign_rhs_code (def_stmt);
  4722. /* Add asserts for NAME cmp CST and NAME being defined
  4723. as NAME = (int) NAME2. */
  4724. if (!TYPE_UNSIGNED (TREE_TYPE (val))
  4725. && (comp_code == LE_EXPR || comp_code == LT_EXPR
  4726. || comp_code == GT_EXPR || comp_code == GE_EXPR)
  4727. && gimple_assign_cast_p (def_stmt))
  4728. {
  4729. name2 = gimple_assign_rhs1 (def_stmt);
  4730. if (CONVERT_EXPR_CODE_P (rhs_code)
  4731. && INTEGRAL_TYPE_P (TREE_TYPE (name2))
  4732. && TYPE_UNSIGNED (TREE_TYPE (name2))
  4733. && prec == TYPE_PRECISION (TREE_TYPE (name2))
  4734. && (comp_code == LE_EXPR || comp_code == GT_EXPR
  4735. || !tree_int_cst_equal (val,
  4736. TYPE_MIN_VALUE (TREE_TYPE (val))))
  4737. && live_on_edge (e, name2)
  4738. && !has_single_use (name2))
  4739. {
  4740. tree tmp, cst;
  4741. enum tree_code new_comp_code = comp_code;
  4742. cst = fold_convert (TREE_TYPE (name2),
  4743. TYPE_MIN_VALUE (TREE_TYPE (val)));
  4744. /* Build an expression for the range test. */
  4745. tmp = build2 (PLUS_EXPR, TREE_TYPE (name2), name2, cst);
  4746. cst = fold_build2 (PLUS_EXPR, TREE_TYPE (name2), cst,
  4747. fold_convert (TREE_TYPE (name2), val));
  4748. if (comp_code == LT_EXPR || comp_code == GE_EXPR)
  4749. {
  4750. new_comp_code = comp_code == LT_EXPR ? LE_EXPR : GT_EXPR;
  4751. cst = fold_build2 (MINUS_EXPR, TREE_TYPE (name2), cst,
  4752. build_int_cst (TREE_TYPE (name2), 1));
  4753. }
  4754. if (dump_file)
  4755. {
  4756. fprintf (dump_file, "Adding assert for ");
  4757. print_generic_expr (dump_file, name2, 0);
  4758. fprintf (dump_file, " from ");
  4759. print_generic_expr (dump_file, tmp, 0);
  4760. fprintf (dump_file, "\n");
  4761. }
  4762. register_new_assert_for (name2, tmp, new_comp_code, cst, NULL,
  4763. e, bsi);
  4764. }
  4765. }
  4766. /* Add asserts for NAME cmp CST and NAME being defined as
  4767. NAME = NAME2 >> CST2.
  4768. Extract CST2 from the right shift. */
  4769. if (rhs_code == RSHIFT_EXPR)
  4770. {
  4771. name2 = gimple_assign_rhs1 (def_stmt);
  4772. cst2 = gimple_assign_rhs2 (def_stmt);
  4773. if (TREE_CODE (name2) == SSA_NAME
  4774. && tree_fits_uhwi_p (cst2)
  4775. && INTEGRAL_TYPE_P (TREE_TYPE (name2))
  4776. && IN_RANGE (tree_to_uhwi (cst2), 1, prec - 1)
  4777. && prec == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (val)))
  4778. && live_on_edge (e, name2)
  4779. && !has_single_use (name2))
  4780. {
  4781. mask = wi::mask (tree_to_uhwi (cst2), false, prec);
  4782. val2 = fold_binary (LSHIFT_EXPR, TREE_TYPE (val), val, cst2);
  4783. }
  4784. }
  4785. if (val2 != NULL_TREE
  4786. && TREE_CODE (val2) == INTEGER_CST
  4787. && simple_cst_equal (fold_build2 (RSHIFT_EXPR,
  4788. TREE_TYPE (val),
  4789. val2, cst2), val))
  4790. {
  4791. enum tree_code new_comp_code = comp_code;
  4792. tree tmp, new_val;
  4793. tmp = name2;
  4794. if (comp_code == EQ_EXPR || comp_code == NE_EXPR)
  4795. {
  4796. if (!TYPE_UNSIGNED (TREE_TYPE (val)))
  4797. {
  4798. tree type = build_nonstandard_integer_type (prec, 1);
  4799. tmp = build1 (NOP_EXPR, type, name2);
  4800. val2 = fold_convert (type, val2);
  4801. }
  4802. tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (tmp), tmp, val2);
  4803. new_val = wide_int_to_tree (TREE_TYPE (tmp), mask);
  4804. new_comp_code = comp_code == EQ_EXPR ? LE_EXPR : GT_EXPR;
  4805. }
  4806. else if (comp_code == LT_EXPR || comp_code == GE_EXPR)
  4807. {
  4808. wide_int minval
  4809. = wi::min_value (prec, TYPE_SIGN (TREE_TYPE (val)));
  4810. new_val = val2;
  4811. if (minval == new_val)
  4812. new_val = NULL_TREE;
  4813. }
  4814. else
  4815. {
  4816. wide_int maxval
  4817. = wi::max_value (prec, TYPE_SIGN (TREE_TYPE (val)));
  4818. mask |= val2;
  4819. if (mask == maxval)
  4820. new_val = NULL_TREE;
  4821. else
  4822. new_val = wide_int_to_tree (TREE_TYPE (val2), mask);
  4823. }
  4824. if (new_val)
  4825. {
  4826. if (dump_file)
  4827. {
  4828. fprintf (dump_file, "Adding assert for ");
  4829. print_generic_expr (dump_file, name2, 0);
  4830. fprintf (dump_file, " from ");
  4831. print_generic_expr (dump_file, tmp, 0);
  4832. fprintf (dump_file, "\n");
  4833. }
  4834. register_new_assert_for (name2, tmp, new_comp_code, new_val,
  4835. NULL, e, bsi);
  4836. }
  4837. }
  4838. /* Add asserts for NAME cmp CST and NAME being defined as
  4839. NAME = NAME2 & CST2.
  4840. Extract CST2 from the and.
  4841. Also handle
  4842. NAME = (unsigned) NAME2;
  4843. casts where NAME's type is unsigned and has smaller precision
  4844. than NAME2's type as if it was NAME = NAME2 & MASK. */
  4845. names[0] = NULL_TREE;
  4846. names[1] = NULL_TREE;
  4847. cst2 = NULL_TREE;
  4848. if (rhs_code == BIT_AND_EXPR
  4849. || (CONVERT_EXPR_CODE_P (rhs_code)
  4850. && TREE_CODE (TREE_TYPE (val)) == INTEGER_TYPE
  4851. && TYPE_UNSIGNED (TREE_TYPE (val))
  4852. && TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
  4853. > prec))
  4854. {
  4855. name2 = gimple_assign_rhs1 (def_stmt);
  4856. if (rhs_code == BIT_AND_EXPR)
  4857. cst2 = gimple_assign_rhs2 (def_stmt);
  4858. else
  4859. {
  4860. cst2 = TYPE_MAX_VALUE (TREE_TYPE (val));
  4861. nprec = TYPE_PRECISION (TREE_TYPE (name2));
  4862. }
  4863. if (TREE_CODE (name2) == SSA_NAME
  4864. && INTEGRAL_TYPE_P (TREE_TYPE (name2))
  4865. && TREE_CODE (cst2) == INTEGER_CST
  4866. && !integer_zerop (cst2)
  4867. && (nprec > 1
  4868. || TYPE_UNSIGNED (TREE_TYPE (val))))
  4869. {
  4870. gimple def_stmt2 = SSA_NAME_DEF_STMT (name2);
  4871. if (gimple_assign_cast_p (def_stmt2))
  4872. {
  4873. names[1] = gimple_assign_rhs1 (def_stmt2);
  4874. if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt2))
  4875. || !INTEGRAL_TYPE_P (TREE_TYPE (names[1]))
  4876. || (TYPE_PRECISION (TREE_TYPE (name2))
  4877. != TYPE_PRECISION (TREE_TYPE (names[1])))
  4878. || !live_on_edge (e, names[1])
  4879. || has_single_use (names[1]))
  4880. names[1] = NULL_TREE;
  4881. }
  4882. if (live_on_edge (e, name2)
  4883. && !has_single_use (name2))
  4884. names[0] = name2;
  4885. }
  4886. }
  4887. if (names[0] || names[1])
  4888. {
  4889. wide_int minv, maxv, valv, cst2v;
  4890. wide_int tem, sgnbit;
  4891. bool valid_p = false, valn, cst2n;
  4892. enum tree_code ccode = comp_code;
  4893. valv = wide_int::from (val, nprec, UNSIGNED);
  4894. cst2v = wide_int::from (cst2, nprec, UNSIGNED);
  4895. valn = wi::neg_p (valv, TYPE_SIGN (TREE_TYPE (val)));
  4896. cst2n = wi::neg_p (cst2v, TYPE_SIGN (TREE_TYPE (val)));
  4897. /* If CST2 doesn't have most significant bit set,
  4898. but VAL is negative, we have comparison like
  4899. if ((x & 0x123) > -4) (always true). Just give up. */
  4900. if (!cst2n && valn)
  4901. ccode = ERROR_MARK;
  4902. if (cst2n)
  4903. sgnbit = wi::set_bit_in_zero (nprec - 1, nprec);
  4904. else
  4905. sgnbit = wi::zero (nprec);
  4906. minv = valv & cst2v;
  4907. switch (ccode)
  4908. {
  4909. case EQ_EXPR:
  4910. /* Minimum unsigned value for equality is VAL & CST2
  4911. (should be equal to VAL, otherwise we probably should
  4912. have folded the comparison into false) and
  4913. maximum unsigned value is VAL | ~CST2. */
  4914. maxv = valv | ~cst2v;
  4915. valid_p = true;
  4916. break;
  4917. case NE_EXPR:
  4918. tem = valv | ~cst2v;
  4919. /* If VAL is 0, handle (X & CST2) != 0 as (X & CST2) > 0U. */
  4920. if (valv == 0)
  4921. {
  4922. cst2n = false;
  4923. sgnbit = wi::zero (nprec);
  4924. goto gt_expr;
  4925. }
  4926. /* If (VAL | ~CST2) is all ones, handle it as
  4927. (X & CST2) < VAL. */
  4928. if (tem == -1)
  4929. {
  4930. cst2n = false;
  4931. valn = false;
  4932. sgnbit = wi::zero (nprec);
  4933. goto lt_expr;
  4934. }
  4935. if (!cst2n && wi::neg_p (cst2v))
  4936. sgnbit = wi::set_bit_in_zero (nprec - 1, nprec);
  4937. if (sgnbit != 0)
  4938. {
  4939. if (valv == sgnbit)
  4940. {
  4941. cst2n = true;
  4942. valn = true;
  4943. goto gt_expr;
  4944. }
  4945. if (tem == wi::mask (nprec - 1, false, nprec))
  4946. {
  4947. cst2n = true;
  4948. goto lt_expr;
  4949. }
  4950. if (!cst2n)
  4951. sgnbit = wi::zero (nprec);
  4952. }
  4953. break;
  4954. case GE_EXPR:
  4955. /* Minimum unsigned value for >= if (VAL & CST2) == VAL
  4956. is VAL and maximum unsigned value is ~0. For signed
  4957. comparison, if CST2 doesn't have most significant bit
  4958. set, handle it similarly. If CST2 has MSB set,
  4959. the minimum is the same, and maximum is ~0U/2. */
  4960. if (minv != valv)
  4961. {
  4962. /* If (VAL & CST2) != VAL, X & CST2 can't be equal to
  4963. VAL. */
  4964. minv = masked_increment (valv, cst2v, sgnbit, nprec);
  4965. if (minv == valv)
  4966. break;
  4967. }
  4968. maxv = wi::mask (nprec - (cst2n ? 1 : 0), false, nprec);
  4969. valid_p = true;
  4970. break;
  4971. case GT_EXPR:
  4972. gt_expr:
  4973. /* Find out smallest MINV where MINV > VAL
  4974. && (MINV & CST2) == MINV, if any. If VAL is signed and
  4975. CST2 has MSB set, compute it biased by 1 << (nprec - 1). */
  4976. minv = masked_increment (valv, cst2v, sgnbit, nprec);
  4977. if (minv == valv)
  4978. break;
  4979. maxv = wi::mask (nprec - (cst2n ? 1 : 0), false, nprec);
  4980. valid_p = true;
  4981. break;
  4982. case LE_EXPR:
  4983. /* Minimum unsigned value for <= is 0 and maximum
  4984. unsigned value is VAL | ~CST2 if (VAL & CST2) == VAL.
  4985. Otherwise, find smallest VAL2 where VAL2 > VAL
  4986. && (VAL2 & CST2) == VAL2 and use (VAL2 - 1) | ~CST2
  4987. as maximum.
  4988. For signed comparison, if CST2 doesn't have most
  4989. significant bit set, handle it similarly. If CST2 has
  4990. MSB set, the maximum is the same and minimum is INT_MIN. */
  4991. if (minv == valv)
  4992. maxv = valv;
  4993. else
  4994. {
  4995. maxv = masked_increment (valv, cst2v, sgnbit, nprec);
  4996. if (maxv == valv)
  4997. break;
  4998. maxv -= 1;
  4999. }
  5000. maxv |= ~cst2v;
  5001. minv = sgnbit;
  5002. valid_p = true;
  5003. break;
  5004. case LT_EXPR:
  5005. lt_expr:
  5006. /* Minimum unsigned value for < is 0 and maximum
  5007. unsigned value is (VAL-1) | ~CST2 if (VAL & CST2) == VAL.
  5008. Otherwise, find smallest VAL2 where VAL2 > VAL
  5009. && (VAL2 & CST2) == VAL2 and use (VAL2 - 1) | ~CST2
  5010. as maximum.
  5011. For signed comparison, if CST2 doesn't have most
  5012. significant bit set, handle it similarly. If CST2 has
  5013. MSB set, the maximum is the same and minimum is INT_MIN. */
  5014. if (minv == valv)
  5015. {
  5016. if (valv == sgnbit)
  5017. break;
  5018. maxv = valv;
  5019. }
  5020. else
  5021. {
  5022. maxv = masked_increment (valv, cst2v, sgnbit, nprec);
  5023. if (maxv == valv)
  5024. break;
  5025. }
  5026. maxv -= 1;
  5027. maxv |= ~cst2v;
  5028. minv = sgnbit;
  5029. valid_p = true;
  5030. break;
  5031. default:
  5032. break;
  5033. }
  5034. if (valid_p
  5035. && (maxv - minv) != -1)
  5036. {
  5037. tree tmp, new_val, type;
  5038. int i;
  5039. for (i = 0; i < 2; i++)
  5040. if (names[i])
  5041. {
  5042. wide_int maxv2 = maxv;
  5043. tmp = names[i];
  5044. type = TREE_TYPE (names[i]);
  5045. if (!TYPE_UNSIGNED (type))
  5046. {
  5047. type = build_nonstandard_integer_type (nprec, 1);
  5048. tmp = build1 (NOP_EXPR, type, names[i]);
  5049. }
  5050. if (minv != 0)
  5051. {
  5052. tmp = build2 (PLUS_EXPR, type, tmp,
  5053. wide_int_to_tree (type, -minv));
  5054. maxv2 = maxv - minv;
  5055. }
  5056. new_val = wide_int_to_tree (type, maxv2);
  5057. if (dump_file)
  5058. {
  5059. fprintf (dump_file, "Adding assert for ");
  5060. print_generic_expr (dump_file, names[i], 0);
  5061. fprintf (dump_file, " from ");
  5062. print_generic_expr (dump_file, tmp, 0);
  5063. fprintf (dump_file, "\n");
  5064. }
  5065. register_new_assert_for (names[i], tmp, LE_EXPR,
  5066. new_val, NULL, e, bsi);
  5067. }
  5068. }
  5069. }
  5070. }
  5071. }
  5072. /* OP is an operand of a truth value expression which is known to have
  5073. a particular value. Register any asserts for OP and for any
  5074. operands in OP's defining statement.
  5075. If CODE is EQ_EXPR, then we want to register OP is zero (false),
  5076. if CODE is NE_EXPR, then we want to register OP is nonzero (true). */
  5077. static void
  5078. register_edge_assert_for_1 (tree op, enum tree_code code,
  5079. edge e, gimple_stmt_iterator bsi)
  5080. {
  5081. gimple op_def;
  5082. tree val;
  5083. enum tree_code rhs_code;
  5084. /* We only care about SSA_NAMEs. */
  5085. if (TREE_CODE (op) != SSA_NAME)
  5086. return;
  5087. /* We know that OP will have a zero or nonzero value. If OP is used
  5088. more than once go ahead and register an assert for OP. */
  5089. if (live_on_edge (e, op)
  5090. && !has_single_use (op))
  5091. {
  5092. val = build_int_cst (TREE_TYPE (op), 0);
  5093. register_new_assert_for (op, op, code, val, NULL, e, bsi);
  5094. }
  5095. /* Now look at how OP is set. If it's set from a comparison,
  5096. a truth operation or some bit operations, then we may be able
  5097. to register information about the operands of that assignment. */
  5098. op_def = SSA_NAME_DEF_STMT (op);
  5099. if (gimple_code (op_def) != GIMPLE_ASSIGN)
  5100. return;
  5101. rhs_code = gimple_assign_rhs_code (op_def);
  5102. if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
  5103. {
  5104. bool invert = (code == EQ_EXPR ? true : false);
  5105. tree op0 = gimple_assign_rhs1 (op_def);
  5106. tree op1 = gimple_assign_rhs2 (op_def);
  5107. if (TREE_CODE (op0) == SSA_NAME)
  5108. register_edge_assert_for_2 (op0, e, bsi, rhs_code, op0, op1, invert);
  5109. if (TREE_CODE (op1) == SSA_NAME)
  5110. register_edge_assert_for_2 (op1, e, bsi, rhs_code, op0, op1, invert);
  5111. }
  5112. else if ((code == NE_EXPR
  5113. && gimple_assign_rhs_code (op_def) == BIT_AND_EXPR)
  5114. || (code == EQ_EXPR
  5115. && gimple_assign_rhs_code (op_def) == BIT_IOR_EXPR))
  5116. {
  5117. /* Recurse on each operand. */
  5118. tree op0 = gimple_assign_rhs1 (op_def);
  5119. tree op1 = gimple_assign_rhs2 (op_def);
  5120. if (TREE_CODE (op0) == SSA_NAME
  5121. && has_single_use (op0))
  5122. register_edge_assert_for_1 (op0, code, e, bsi);
  5123. if (TREE_CODE (op1) == SSA_NAME
  5124. && has_single_use (op1))
  5125. register_edge_assert_for_1 (op1, code, e, bsi);
  5126. }
  5127. else if (gimple_assign_rhs_code (op_def) == BIT_NOT_EXPR
  5128. && TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (op_def))) == 1)
  5129. {
  5130. /* Recurse, flipping CODE. */
  5131. code = invert_tree_comparison (code, false);
  5132. register_edge_assert_for_1 (gimple_assign_rhs1 (op_def), code, e, bsi);
  5133. }
  5134. else if (gimple_assign_rhs_code (op_def) == SSA_NAME)
  5135. {
  5136. /* Recurse through the copy. */
  5137. register_edge_assert_for_1 (gimple_assign_rhs1 (op_def), code, e, bsi);
  5138. }
  5139. else if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (op_def)))
  5140. {
  5141. /* Recurse through the type conversion, unless it is a narrowing
  5142. conversion or conversion from non-integral type. */
  5143. tree rhs = gimple_assign_rhs1 (op_def);
  5144. if (INTEGRAL_TYPE_P (TREE_TYPE (rhs))
  5145. && (TYPE_PRECISION (TREE_TYPE (rhs))
  5146. <= TYPE_PRECISION (TREE_TYPE (op))))
  5147. register_edge_assert_for_1 (rhs, code, e, bsi);
  5148. }
  5149. }
  5150. /* Try to register an edge assertion for SSA name NAME on edge E for
  5151. the condition COND contributing to the conditional jump pointed to by
  5152. SI. */
  5153. static void
  5154. register_edge_assert_for (tree name, edge e, gimple_stmt_iterator si,
  5155. enum tree_code cond_code, tree cond_op0,
  5156. tree cond_op1)
  5157. {
  5158. tree val;
  5159. enum tree_code comp_code;
  5160. bool is_else_edge = (e->flags & EDGE_FALSE_VALUE) != 0;
  5161. /* Do not attempt to infer anything in names that flow through
  5162. abnormal edges. */
  5163. if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
  5164. return;
  5165. if (!extract_code_and_val_from_cond_with_ops (name, cond_code,
  5166. cond_op0, cond_op1,
  5167. is_else_edge,
  5168. &comp_code, &val))
  5169. return;
  5170. /* Register ASSERT_EXPRs for name. */
  5171. register_edge_assert_for_2 (name, e, si, cond_code, cond_op0,
  5172. cond_op1, is_else_edge);
  5173. /* If COND is effectively an equality test of an SSA_NAME against
  5174. the value zero or one, then we may be able to assert values
  5175. for SSA_NAMEs which flow into COND. */
  5176. /* In the case of NAME == 1 or NAME != 0, for BIT_AND_EXPR defining
  5177. statement of NAME we can assert both operands of the BIT_AND_EXPR
  5178. have nonzero value. */
  5179. if (((comp_code == EQ_EXPR && integer_onep (val))
  5180. || (comp_code == NE_EXPR && integer_zerop (val))))
  5181. {
  5182. gimple def_stmt = SSA_NAME_DEF_STMT (name);
  5183. if (is_gimple_assign (def_stmt)
  5184. && gimple_assign_rhs_code (def_stmt) == BIT_AND_EXPR)
  5185. {
  5186. tree op0 = gimple_assign_rhs1 (def_stmt);
  5187. tree op1 = gimple_assign_rhs2 (def_stmt);
  5188. register_edge_assert_for_1 (op0, NE_EXPR, e, si);
  5189. register_edge_assert_for_1 (op1, NE_EXPR, e, si);
  5190. }
  5191. }
  5192. /* In the case of NAME == 0 or NAME != 1, for BIT_IOR_EXPR defining
  5193. statement of NAME we can assert both operands of the BIT_IOR_EXPR
  5194. have zero value. */
  5195. if (((comp_code == EQ_EXPR && integer_zerop (val))
  5196. || (comp_code == NE_EXPR && integer_onep (val))))
  5197. {
  5198. gimple def_stmt = SSA_NAME_DEF_STMT (name);
  5199. /* For BIT_IOR_EXPR only if NAME == 0 both operands have
  5200. necessarily zero value, or if type-precision is one. */
  5201. if (is_gimple_assign (def_stmt)
  5202. && (gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR
  5203. && (TYPE_PRECISION (TREE_TYPE (name)) == 1
  5204. || comp_code == EQ_EXPR)))
  5205. {
  5206. tree op0 = gimple_assign_rhs1 (def_stmt);
  5207. tree op1 = gimple_assign_rhs2 (def_stmt);
  5208. register_edge_assert_for_1 (op0, EQ_EXPR, e, si);
  5209. register_edge_assert_for_1 (op1, EQ_EXPR, e, si);
  5210. }
  5211. }
  5212. }
  5213. /* Determine whether the outgoing edges of BB should receive an
  5214. ASSERT_EXPR for each of the operands of BB's LAST statement.
  5215. The last statement of BB must be a COND_EXPR.
  5216. If any of the sub-graphs rooted at BB have an interesting use of
  5217. the predicate operands, an assert location node is added to the
  5218. list of assertions for the corresponding operands. */
  5219. static void
  5220. find_conditional_asserts (basic_block bb, gcond *last)
  5221. {
  5222. gimple_stmt_iterator bsi;
  5223. tree op;
  5224. edge_iterator ei;
  5225. edge e;
  5226. ssa_op_iter iter;
  5227. bsi = gsi_for_stmt (last);
  5228. /* Look for uses of the operands in each of the sub-graphs
  5229. rooted at BB. We need to check each of the outgoing edges
  5230. separately, so that we know what kind of ASSERT_EXPR to
  5231. insert. */
  5232. FOR_EACH_EDGE (e, ei, bb->succs)
  5233. {
  5234. if (e->dest == bb)
  5235. continue;
  5236. /* Register the necessary assertions for each operand in the
  5237. conditional predicate. */
  5238. FOR_EACH_SSA_TREE_OPERAND (op, last, iter, SSA_OP_USE)
  5239. register_edge_assert_for (op, e, bsi,
  5240. gimple_cond_code (last),
  5241. gimple_cond_lhs (last),
  5242. gimple_cond_rhs (last));
  5243. }
  5244. }
  5245. struct case_info
  5246. {
  5247. tree expr;
  5248. basic_block bb;
  5249. };
  5250. /* Compare two case labels sorting first by the destination bb index
  5251. and then by the case value. */
  5252. static int
  5253. compare_case_labels (const void *p1, const void *p2)
  5254. {
  5255. const struct case_info *ci1 = (const struct case_info *) p1;
  5256. const struct case_info *ci2 = (const struct case_info *) p2;
  5257. int idx1 = ci1->bb->index;
  5258. int idx2 = ci2->bb->index;
  5259. if (idx1 < idx2)
  5260. return -1;
  5261. else if (idx1 == idx2)
  5262. {
  5263. /* Make sure the default label is first in a group. */
  5264. if (!CASE_LOW (ci1->expr))
  5265. return -1;
  5266. else if (!CASE_LOW (ci2->expr))
  5267. return 1;
  5268. else
  5269. return tree_int_cst_compare (CASE_LOW (ci1->expr),
  5270. CASE_LOW (ci2->expr));
  5271. }
  5272. else
  5273. return 1;
  5274. }
  5275. /* Determine whether the outgoing edges of BB should receive an
  5276. ASSERT_EXPR for each of the operands of BB's LAST statement.
  5277. The last statement of BB must be a SWITCH_EXPR.
  5278. If any of the sub-graphs rooted at BB have an interesting use of
  5279. the predicate operands, an assert location node is added to the
  5280. list of assertions for the corresponding operands. */
  5281. static void
  5282. find_switch_asserts (basic_block bb, gswitch *last)
  5283. {
  5284. gimple_stmt_iterator bsi;
  5285. tree op;
  5286. edge e;
  5287. struct case_info *ci;
  5288. size_t n = gimple_switch_num_labels (last);
  5289. #if GCC_VERSION >= 4000
  5290. unsigned int idx;
  5291. #else
  5292. /* Work around GCC 3.4 bug (PR 37086). */
  5293. volatile unsigned int idx;
  5294. #endif
  5295. bsi = gsi_for_stmt (last);
  5296. op = gimple_switch_index (last);
  5297. if (TREE_CODE (op) != SSA_NAME)
  5298. return;
  5299. /* Build a vector of case labels sorted by destination label. */
  5300. ci = XNEWVEC (struct case_info, n);
  5301. for (idx = 0; idx < n; ++idx)
  5302. {
  5303. ci[idx].expr = gimple_switch_label (last, idx);
  5304. ci[idx].bb = label_to_block (CASE_LABEL (ci[idx].expr));
  5305. }
  5306. qsort (ci, n, sizeof (struct case_info), compare_case_labels);
  5307. for (idx = 0; idx < n; ++idx)
  5308. {
  5309. tree min, max;
  5310. tree cl = ci[idx].expr;
  5311. basic_block cbb = ci[idx].bb;
  5312. min = CASE_LOW (cl);
  5313. max = CASE_HIGH (cl);
  5314. /* If there are multiple case labels with the same destination
  5315. we need to combine them to a single value range for the edge. */
  5316. if (idx + 1 < n && cbb == ci[idx + 1].bb)
  5317. {
  5318. /* Skip labels until the last of the group. */
  5319. do {
  5320. ++idx;
  5321. } while (idx < n && cbb == ci[idx].bb);
  5322. --idx;
  5323. /* Pick up the maximum of the case label range. */
  5324. if (CASE_HIGH (ci[idx].expr))
  5325. max = CASE_HIGH (ci[idx].expr);
  5326. else
  5327. max = CASE_LOW (ci[idx].expr);
  5328. }
  5329. /* Nothing to do if the range includes the default label until we
  5330. can register anti-ranges. */
  5331. if (min == NULL_TREE)
  5332. continue;
  5333. /* Find the edge to register the assert expr on. */
  5334. e = find_edge (bb, cbb);
  5335. /* Register the necessary assertions for the operand in the
  5336. SWITCH_EXPR. */
  5337. register_edge_assert_for (op, e, bsi,
  5338. max ? GE_EXPR : EQ_EXPR,
  5339. op, fold_convert (TREE_TYPE (op), min));
  5340. if (max)
  5341. register_edge_assert_for (op, e, bsi, LE_EXPR, op,
  5342. fold_convert (TREE_TYPE (op), max));
  5343. }
  5344. XDELETEVEC (ci);
  5345. }
  5346. /* Traverse all the statements in block BB looking for statements that
  5347. may generate useful assertions for the SSA names in their operand.
  5348. If a statement produces a useful assertion A for name N_i, then the
  5349. list of assertions already generated for N_i is scanned to
  5350. determine if A is actually needed.
  5351. If N_i already had the assertion A at a location dominating the
  5352. current location, then nothing needs to be done. Otherwise, the
  5353. new location for A is recorded instead.
  5354. 1- For every statement S in BB, all the variables used by S are
  5355. added to bitmap FOUND_IN_SUBGRAPH.
  5356. 2- If statement S uses an operand N in a way that exposes a known
  5357. value range for N, then if N was not already generated by an
  5358. ASSERT_EXPR, create a new assert location for N. For instance,
  5359. if N is a pointer and the statement dereferences it, we can
  5360. assume that N is not NULL.
  5361. 3- COND_EXPRs are a special case of #2. We can derive range
  5362. information from the predicate but need to insert different
  5363. ASSERT_EXPRs for each of the sub-graphs rooted at the
  5364. conditional block. If the last statement of BB is a conditional
  5365. expression of the form 'X op Y', then
  5366. a) Remove X and Y from the set FOUND_IN_SUBGRAPH.
  5367. b) If the conditional is the only entry point to the sub-graph
  5368. corresponding to the THEN_CLAUSE, recurse into it. On
  5369. return, if X and/or Y are marked in FOUND_IN_SUBGRAPH, then
  5370. an ASSERT_EXPR is added for the corresponding variable.
  5371. c) Repeat step (b) on the ELSE_CLAUSE.
  5372. d) Mark X and Y in FOUND_IN_SUBGRAPH.
  5373. For instance,
  5374. if (a == 9)
  5375. b = a;
  5376. else
  5377. b = c + 1;
  5378. In this case, an assertion on the THEN clause is useful to
  5379. determine that 'a' is always 9 on that edge. However, an assertion
  5380. on the ELSE clause would be unnecessary.
  5381. 4- If BB does not end in a conditional expression, then we recurse
  5382. into BB's dominator children.
  5383. At the end of the recursive traversal, every SSA name will have a
  5384. list of locations where ASSERT_EXPRs should be added. When a new
  5385. location for name N is found, it is registered by calling
  5386. register_new_assert_for. That function keeps track of all the
  5387. registered assertions to prevent adding unnecessary assertions.
  5388. For instance, if a pointer P_4 is dereferenced more than once in a
  5389. dominator tree, only the location dominating all the dereference of
  5390. P_4 will receive an ASSERT_EXPR. */
  5391. static void
  5392. find_assert_locations_1 (basic_block bb, sbitmap live)
  5393. {
  5394. gimple last;
  5395. last = last_stmt (bb);
  5396. /* If BB's last statement is a conditional statement involving integer
  5397. operands, determine if we need to add ASSERT_EXPRs. */
  5398. if (last
  5399. && gimple_code (last) == GIMPLE_COND
  5400. && !fp_predicate (last)
  5401. && !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
  5402. find_conditional_asserts (bb, as_a <gcond *> (last));
  5403. /* If BB's last statement is a switch statement involving integer
  5404. operands, determine if we need to add ASSERT_EXPRs. */
  5405. if (last
  5406. && gimple_code (last) == GIMPLE_SWITCH
  5407. && !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
  5408. find_switch_asserts (bb, as_a <gswitch *> (last));
  5409. /* Traverse all the statements in BB marking used names and looking
  5410. for statements that may infer assertions for their used operands. */
  5411. for (gimple_stmt_iterator si = gsi_last_bb (bb); !gsi_end_p (si);
  5412. gsi_prev (&si))
  5413. {
  5414. gimple stmt;
  5415. tree op;
  5416. ssa_op_iter i;
  5417. stmt = gsi_stmt (si);
  5418. if (is_gimple_debug (stmt))
  5419. continue;
  5420. /* See if we can derive an assertion for any of STMT's operands. */
  5421. FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
  5422. {
  5423. tree value;
  5424. enum tree_code comp_code;
  5425. /* If op is not live beyond this stmt, do not bother to insert
  5426. asserts for it. */
  5427. if (!bitmap_bit_p (live, SSA_NAME_VERSION (op)))
  5428. continue;
  5429. /* If OP is used in such a way that we can infer a value
  5430. range for it, and we don't find a previous assertion for
  5431. it, create a new assertion location node for OP. */
  5432. if (infer_value_range (stmt, op, &comp_code, &value))
  5433. {
  5434. /* If we are able to infer a nonzero value range for OP,
  5435. then walk backwards through the use-def chain to see if OP
  5436. was set via a typecast.
  5437. If so, then we can also infer a nonzero value range
  5438. for the operand of the NOP_EXPR. */
  5439. if (comp_code == NE_EXPR && integer_zerop (value))
  5440. {
  5441. tree t = op;
  5442. gimple def_stmt = SSA_NAME_DEF_STMT (t);
  5443. while (is_gimple_assign (def_stmt)
  5444. && CONVERT_EXPR_CODE_P
  5445. (gimple_assign_rhs_code (def_stmt))
  5446. && TREE_CODE
  5447. (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
  5448. && POINTER_TYPE_P
  5449. (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
  5450. {
  5451. t = gimple_assign_rhs1 (def_stmt);
  5452. def_stmt = SSA_NAME_DEF_STMT (t);
  5453. /* Note we want to register the assert for the
  5454. operand of the NOP_EXPR after SI, not after the
  5455. conversion. */
  5456. if (! has_single_use (t))
  5457. register_new_assert_for (t, t, comp_code, value,
  5458. bb, NULL, si);
  5459. }
  5460. }
  5461. register_new_assert_for (op, op, comp_code, value, bb, NULL, si);
  5462. }
  5463. }
  5464. /* Update live. */
  5465. FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
  5466. bitmap_set_bit (live, SSA_NAME_VERSION (op));
  5467. FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_DEF)
  5468. bitmap_clear_bit (live, SSA_NAME_VERSION (op));
  5469. }
  5470. /* Traverse all PHI nodes in BB, updating live. */
  5471. for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
  5472. gsi_next (&si))
  5473. {
  5474. use_operand_p arg_p;
  5475. ssa_op_iter i;
  5476. gphi *phi = si.phi ();
  5477. tree res = gimple_phi_result (phi);
  5478. if (virtual_operand_p (res))
  5479. continue;
  5480. FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
  5481. {
  5482. tree arg = USE_FROM_PTR (arg_p);
  5483. if (TREE_CODE (arg) == SSA_NAME)
  5484. bitmap_set_bit (live, SSA_NAME_VERSION (arg));
  5485. }
  5486. bitmap_clear_bit (live, SSA_NAME_VERSION (res));
  5487. }
  5488. }
  5489. /* Do an RPO walk over the function computing SSA name liveness
  5490. on-the-fly and deciding on assert expressions to insert. */
  5491. static void
  5492. find_assert_locations (void)
  5493. {
  5494. int *rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
  5495. int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
  5496. int *last_rpo = XCNEWVEC (int, last_basic_block_for_fn (cfun));
  5497. int rpo_cnt, i;
  5498. live = XCNEWVEC (sbitmap, last_basic_block_for_fn (cfun));
  5499. rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false);
  5500. for (i = 0; i < rpo_cnt; ++i)
  5501. bb_rpo[rpo[i]] = i;
  5502. /* Pre-seed loop latch liveness from loop header PHI nodes. Due to
  5503. the order we compute liveness and insert asserts we otherwise
  5504. fail to insert asserts into the loop latch. */
  5505. loop_p loop;
  5506. FOR_EACH_LOOP (loop, 0)
  5507. {
  5508. i = loop->latch->index;
  5509. unsigned int j = single_succ_edge (loop->latch)->dest_idx;
  5510. for (gphi_iterator gsi = gsi_start_phis (loop->header);
  5511. !gsi_end_p (gsi); gsi_next (&gsi))
  5512. {
  5513. gphi *phi = gsi.phi ();
  5514. if (virtual_operand_p (gimple_phi_result (phi)))
  5515. continue;
  5516. tree arg = gimple_phi_arg_def (phi, j);
  5517. if (TREE_CODE (arg) == SSA_NAME)
  5518. {
  5519. if (live[i] == NULL)
  5520. {
  5521. live[i] = sbitmap_alloc (num_ssa_names);
  5522. bitmap_clear (live[i]);
  5523. }
  5524. bitmap_set_bit (live[i], SSA_NAME_VERSION (arg));
  5525. }
  5526. }
  5527. }
  5528. for (i = rpo_cnt - 1; i >= 0; --i)
  5529. {
  5530. basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
  5531. edge e;
  5532. edge_iterator ei;
  5533. if (!live[rpo[i]])
  5534. {
  5535. live[rpo[i]] = sbitmap_alloc (num_ssa_names);
  5536. bitmap_clear (live[rpo[i]]);
  5537. }
  5538. /* Process BB and update the live information with uses in
  5539. this block. */
  5540. find_assert_locations_1 (bb, live[rpo[i]]);
  5541. /* Merge liveness into the predecessor blocks and free it. */
  5542. if (!bitmap_empty_p (live[rpo[i]]))
  5543. {
  5544. int pred_rpo = i;
  5545. FOR_EACH_EDGE (e, ei, bb->preds)
  5546. {
  5547. int pred = e->src->index;
  5548. if ((e->flags & EDGE_DFS_BACK) || pred == ENTRY_BLOCK)
  5549. continue;
  5550. if (!live[pred])
  5551. {
  5552. live[pred] = sbitmap_alloc (num_ssa_names);
  5553. bitmap_clear (live[pred]);
  5554. }
  5555. bitmap_ior (live[pred], live[pred], live[rpo[i]]);
  5556. if (bb_rpo[pred] < pred_rpo)
  5557. pred_rpo = bb_rpo[pred];
  5558. }
  5559. /* Record the RPO number of the last visited block that needs
  5560. live information from this block. */
  5561. last_rpo[rpo[i]] = pred_rpo;
  5562. }
  5563. else
  5564. {
  5565. sbitmap_free (live[rpo[i]]);
  5566. live[rpo[i]] = NULL;
  5567. }
  5568. /* We can free all successors live bitmaps if all their
  5569. predecessors have been visited already. */
  5570. FOR_EACH_EDGE (e, ei, bb->succs)
  5571. if (last_rpo[e->dest->index] == i
  5572. && live[e->dest->index])
  5573. {
  5574. sbitmap_free (live[e->dest->index]);
  5575. live[e->dest->index] = NULL;
  5576. }
  5577. }
  5578. XDELETEVEC (rpo);
  5579. XDELETEVEC (bb_rpo);
  5580. XDELETEVEC (last_rpo);
  5581. for (i = 0; i < last_basic_block_for_fn (cfun); ++i)
  5582. if (live[i])
  5583. sbitmap_free (live[i]);
  5584. XDELETEVEC (live);
  5585. }
  5586. /* Create an ASSERT_EXPR for NAME and insert it in the location
  5587. indicated by LOC. Return true if we made any edge insertions. */
  5588. static bool
  5589. process_assert_insertions_for (tree name, assert_locus_t loc)
  5590. {
  5591. /* Build the comparison expression NAME_i COMP_CODE VAL. */
  5592. gimple stmt;
  5593. tree cond;
  5594. gimple assert_stmt;
  5595. edge_iterator ei;
  5596. edge e;
  5597. /* If we have X <=> X do not insert an assert expr for that. */
  5598. if (loc->expr == loc->val)
  5599. return false;
  5600. cond = build2 (loc->comp_code, boolean_type_node, loc->expr, loc->val);
  5601. assert_stmt = build_assert_expr_for (cond, name);
  5602. if (loc->e)
  5603. {
  5604. /* We have been asked to insert the assertion on an edge. This
  5605. is used only by COND_EXPR and SWITCH_EXPR assertions. */
  5606. gcc_checking_assert (gimple_code (gsi_stmt (loc->si)) == GIMPLE_COND
  5607. || (gimple_code (gsi_stmt (loc->si))
  5608. == GIMPLE_SWITCH));
  5609. gsi_insert_on_edge (loc->e, assert_stmt);
  5610. return true;
  5611. }
  5612. /* Otherwise, we can insert right after LOC->SI iff the
  5613. statement must not be the last statement in the block. */
  5614. stmt = gsi_stmt (loc->si);
  5615. if (!stmt_ends_bb_p (stmt))
  5616. {
  5617. gsi_insert_after (&loc->si, assert_stmt, GSI_SAME_STMT);
  5618. return false;
  5619. }
  5620. /* If STMT must be the last statement in BB, we can only insert new
  5621. assertions on the non-abnormal edge out of BB. Note that since
  5622. STMT is not control flow, there may only be one non-abnormal edge
  5623. out of BB. */
  5624. FOR_EACH_EDGE (e, ei, loc->bb->succs)
  5625. if (!(e->flags & EDGE_ABNORMAL))
  5626. {
  5627. gsi_insert_on_edge (e, assert_stmt);
  5628. return true;
  5629. }
  5630. gcc_unreachable ();
  5631. }
  5632. /* Process all the insertions registered for every name N_i registered
  5633. in NEED_ASSERT_FOR. The list of assertions to be inserted are
  5634. found in ASSERTS_FOR[i]. */
  5635. static void
  5636. process_assert_insertions (void)
  5637. {
  5638. unsigned i;
  5639. bitmap_iterator bi;
  5640. bool update_edges_p = false;
  5641. int num_asserts = 0;
  5642. if (dump_file && (dump_flags & TDF_DETAILS))
  5643. dump_all_asserts (dump_file);
  5644. EXECUTE_IF_SET_IN_BITMAP (need_assert_for, 0, i, bi)
  5645. {
  5646. assert_locus_t loc = asserts_for[i];
  5647. gcc_assert (loc);
  5648. while (loc)
  5649. {
  5650. assert_locus_t next = loc->next;
  5651. update_edges_p |= process_assert_insertions_for (ssa_name (i), loc);
  5652. free (loc);
  5653. loc = next;
  5654. num_asserts++;
  5655. }
  5656. }
  5657. if (update_edges_p)
  5658. gsi_commit_edge_inserts ();
  5659. statistics_counter_event (cfun, "Number of ASSERT_EXPR expressions inserted",
  5660. num_asserts);
  5661. }
  5662. /* Traverse the flowgraph looking for conditional jumps to insert range
  5663. expressions. These range expressions are meant to provide information
  5664. to optimizations that need to reason in terms of value ranges. They
  5665. will not be expanded into RTL. For instance, given:
  5666. x = ...
  5667. y = ...
  5668. if (x < y)
  5669. y = x - 2;
  5670. else
  5671. x = y + 3;
  5672. this pass will transform the code into:
  5673. x = ...
  5674. y = ...
  5675. if (x < y)
  5676. {
  5677. x = ASSERT_EXPR <x, x < y>
  5678. y = x - 2
  5679. }
  5680. else
  5681. {
  5682. y = ASSERT_EXPR <y, x >= y>
  5683. x = y + 3
  5684. }
  5685. The idea is that once copy and constant propagation have run, other
  5686. optimizations will be able to determine what ranges of values can 'x'
  5687. take in different paths of the code, simply by checking the reaching
  5688. definition of 'x'. */
  5689. static void
  5690. insert_range_assertions (void)
  5691. {
  5692. need_assert_for = BITMAP_ALLOC (NULL);
  5693. asserts_for = XCNEWVEC (assert_locus_t, num_ssa_names);
  5694. calculate_dominance_info (CDI_DOMINATORS);
  5695. find_assert_locations ();
  5696. if (!bitmap_empty_p (need_assert_for))
  5697. {
  5698. process_assert_insertions ();
  5699. update_ssa (TODO_update_ssa_no_phi);
  5700. }
  5701. if (dump_file && (dump_flags & TDF_DETAILS))
  5702. {
  5703. fprintf (dump_file, "\nSSA form after inserting ASSERT_EXPRs\n");
  5704. dump_function_to_file (current_function_decl, dump_file, dump_flags);
  5705. }
  5706. free (asserts_for);
  5707. BITMAP_FREE (need_assert_for);
  5708. }
  5709. /* Checks one ARRAY_REF in REF, located at LOCUS. Ignores flexible arrays
  5710. and "struct" hacks. If VRP can determine that the
  5711. array subscript is a constant, check if it is outside valid
  5712. range. If the array subscript is a RANGE, warn if it is
  5713. non-overlapping with valid range.
  5714. IGNORE_OFF_BY_ONE is true if the ARRAY_REF is inside a ADDR_EXPR. */
  5715. static void
  5716. check_array_ref (location_t location, tree ref, bool ignore_off_by_one)
  5717. {
  5718. value_range_t* vr = NULL;
  5719. tree low_sub, up_sub;
  5720. tree low_bound, up_bound, up_bound_p1;
  5721. tree base;
  5722. if (TREE_NO_WARNING (ref))
  5723. return;
  5724. low_sub = up_sub = TREE_OPERAND (ref, 1);
  5725. up_bound = array_ref_up_bound (ref);
  5726. /* Can not check flexible arrays. */
  5727. if (!up_bound
  5728. || TREE_CODE (up_bound) != INTEGER_CST)
  5729. return;
  5730. /* Accesses to trailing arrays via pointers may access storage
  5731. beyond the types array bounds. */
  5732. base = get_base_address (ref);
  5733. if ((warn_array_bounds < 2)
  5734. && base && TREE_CODE (base) == MEM_REF)
  5735. {
  5736. tree cref, next = NULL_TREE;
  5737. if (TREE_CODE (TREE_OPERAND (ref, 0)) != COMPONENT_REF)
  5738. return;
  5739. cref = TREE_OPERAND (ref, 0);
  5740. if (TREE_CODE (TREE_TYPE (TREE_OPERAND (cref, 0))) == RECORD_TYPE)
  5741. for (next = DECL_CHAIN (TREE_OPERAND (cref, 1));
  5742. next && TREE_CODE (next) != FIELD_DECL;
  5743. next = DECL_CHAIN (next))
  5744. ;
  5745. /* If this is the last field in a struct type or a field in a
  5746. union type do not warn. */
  5747. if (!next)
  5748. return;
  5749. }
  5750. low_bound = array_ref_low_bound (ref);
  5751. up_bound_p1 = int_const_binop (PLUS_EXPR, up_bound,
  5752. build_int_cst (TREE_TYPE (up_bound), 1));
  5753. if (TREE_CODE (low_sub) == SSA_NAME)
  5754. {
  5755. vr = get_value_range (low_sub);
  5756. if (vr->type == VR_RANGE || vr->type == VR_ANTI_RANGE)
  5757. {
  5758. low_sub = vr->type == VR_RANGE ? vr->max : vr->min;
  5759. up_sub = vr->type == VR_RANGE ? vr->min : vr->max;
  5760. }
  5761. }
  5762. if (vr && vr->type == VR_ANTI_RANGE)
  5763. {
  5764. if (TREE_CODE (up_sub) == INTEGER_CST
  5765. && tree_int_cst_lt (up_bound, up_sub)
  5766. && TREE_CODE (low_sub) == INTEGER_CST
  5767. && tree_int_cst_lt (low_sub, low_bound))
  5768. {
  5769. warning_at (location, OPT_Warray_bounds,
  5770. "array subscript is outside array bounds");
  5771. TREE_NO_WARNING (ref) = 1;
  5772. }
  5773. }
  5774. else if (TREE_CODE (up_sub) == INTEGER_CST
  5775. && (ignore_off_by_one
  5776. ? (tree_int_cst_lt (up_bound, up_sub)
  5777. && !tree_int_cst_equal (up_bound_p1, up_sub))
  5778. : (tree_int_cst_lt (up_bound, up_sub)
  5779. || tree_int_cst_equal (up_bound_p1, up_sub))))
  5780. {
  5781. if (dump_file && (dump_flags & TDF_DETAILS))
  5782. {
  5783. fprintf (dump_file, "Array bound warning for ");
  5784. dump_generic_expr (MSG_NOTE, TDF_SLIM, ref);
  5785. fprintf (dump_file, "\n");
  5786. }
  5787. warning_at (location, OPT_Warray_bounds,
  5788. "array subscript is above array bounds");
  5789. TREE_NO_WARNING (ref) = 1;
  5790. }
  5791. else if (TREE_CODE (low_sub) == INTEGER_CST
  5792. && tree_int_cst_lt (low_sub, low_bound))
  5793. {
  5794. if (dump_file && (dump_flags & TDF_DETAILS))
  5795. {
  5796. fprintf (dump_file, "Array bound warning for ");
  5797. dump_generic_expr (MSG_NOTE, TDF_SLIM, ref);
  5798. fprintf (dump_file, "\n");
  5799. }
  5800. warning_at (location, OPT_Warray_bounds,
  5801. "array subscript is below array bounds");
  5802. TREE_NO_WARNING (ref) = 1;
  5803. }
  5804. }
  5805. /* Searches if the expr T, located at LOCATION computes
  5806. address of an ARRAY_REF, and call check_array_ref on it. */
  5807. static void
  5808. search_for_addr_array (tree t, location_t location)
  5809. {
  5810. while (TREE_CODE (t) == SSA_NAME)
  5811. {
  5812. gimple g = SSA_NAME_DEF_STMT (t);
  5813. if (gimple_code (g) != GIMPLE_ASSIGN)
  5814. return;
  5815. if (get_gimple_rhs_class (gimple_assign_rhs_code (g))
  5816. != GIMPLE_SINGLE_RHS)
  5817. return;
  5818. t = gimple_assign_rhs1 (g);
  5819. }
  5820. /* We are only interested in addresses of ARRAY_REF's. */
  5821. if (TREE_CODE (t) != ADDR_EXPR)
  5822. return;
  5823. /* Check each ARRAY_REFs in the reference chain. */
  5824. do
  5825. {
  5826. if (TREE_CODE (t) == ARRAY_REF)
  5827. check_array_ref (location, t, true /*ignore_off_by_one*/);
  5828. t = TREE_OPERAND (t, 0);
  5829. }
  5830. while (handled_component_p (t));
  5831. if (TREE_CODE (t) == MEM_REF
  5832. && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
  5833. && !TREE_NO_WARNING (t))
  5834. {
  5835. tree tem = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
  5836. tree low_bound, up_bound, el_sz;
  5837. offset_int idx;
  5838. if (TREE_CODE (TREE_TYPE (tem)) != ARRAY_TYPE
  5839. || TREE_CODE (TREE_TYPE (TREE_TYPE (tem))) == ARRAY_TYPE
  5840. || !TYPE_DOMAIN (TREE_TYPE (tem)))
  5841. return;
  5842. low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (tem)));
  5843. up_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (tem)));
  5844. el_sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (tem)));
  5845. if (!low_bound
  5846. || TREE_CODE (low_bound) != INTEGER_CST
  5847. || !up_bound
  5848. || TREE_CODE (up_bound) != INTEGER_CST
  5849. || !el_sz
  5850. || TREE_CODE (el_sz) != INTEGER_CST)
  5851. return;
  5852. idx = mem_ref_offset (t);
  5853. idx = wi::sdiv_trunc (idx, wi::to_offset (el_sz));
  5854. if (wi::lts_p (idx, 0))
  5855. {
  5856. if (dump_file && (dump_flags & TDF_DETAILS))
  5857. {
  5858. fprintf (dump_file, "Array bound warning for ");
  5859. dump_generic_expr (MSG_NOTE, TDF_SLIM, t);
  5860. fprintf (dump_file, "\n");
  5861. }
  5862. warning_at (location, OPT_Warray_bounds,
  5863. "array subscript is below array bounds");
  5864. TREE_NO_WARNING (t) = 1;
  5865. }
  5866. else if (wi::gts_p (idx, (wi::to_offset (up_bound)
  5867. - wi::to_offset (low_bound) + 1)))
  5868. {
  5869. if (dump_file && (dump_flags & TDF_DETAILS))
  5870. {
  5871. fprintf (dump_file, "Array bound warning for ");
  5872. dump_generic_expr (MSG_NOTE, TDF_SLIM, t);
  5873. fprintf (dump_file, "\n");
  5874. }
  5875. warning_at (location, OPT_Warray_bounds,
  5876. "array subscript is above array bounds");
  5877. TREE_NO_WARNING (t) = 1;
  5878. }
  5879. }
  5880. }
  5881. /* walk_tree() callback that checks if *TP is
  5882. an ARRAY_REF inside an ADDR_EXPR (in which an array
  5883. subscript one outside the valid range is allowed). Call
  5884. check_array_ref for each ARRAY_REF found. The location is
  5885. passed in DATA. */
  5886. static tree
  5887. check_array_bounds (tree *tp, int *walk_subtree, void *data)
  5888. {
  5889. tree t = *tp;
  5890. struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
  5891. location_t location;
  5892. if (EXPR_HAS_LOCATION (t))
  5893. location = EXPR_LOCATION (t);
  5894. else
  5895. {
  5896. location_t *locp = (location_t *) wi->info;
  5897. location = *locp;
  5898. }
  5899. *walk_subtree = TRUE;
  5900. if (TREE_CODE (t) == ARRAY_REF)
  5901. check_array_ref (location, t, false /*ignore_off_by_one*/);
  5902. if (TREE_CODE (t) == MEM_REF
  5903. || (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0)))
  5904. search_for_addr_array (TREE_OPERAND (t, 0), location);
  5905. if (TREE_CODE (t) == ADDR_EXPR)
  5906. *walk_subtree = FALSE;
  5907. return NULL_TREE;
  5908. }
  5909. /* Walk over all statements of all reachable BBs and call check_array_bounds
  5910. on them. */
  5911. static void
  5912. check_all_array_refs (void)
  5913. {
  5914. basic_block bb;
  5915. gimple_stmt_iterator si;
  5916. FOR_EACH_BB_FN (bb, cfun)
  5917. {
  5918. edge_iterator ei;
  5919. edge e;
  5920. bool executable = false;
  5921. /* Skip blocks that were found to be unreachable. */
  5922. FOR_EACH_EDGE (e, ei, bb->preds)
  5923. executable |= !!(e->flags & EDGE_EXECUTABLE);
  5924. if (!executable)
  5925. continue;
  5926. for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
  5927. {
  5928. gimple stmt = gsi_stmt (si);
  5929. struct walk_stmt_info wi;
  5930. if (!gimple_has_location (stmt))
  5931. continue;
  5932. if (is_gimple_call (stmt))
  5933. {
  5934. size_t i;
  5935. size_t n = gimple_call_num_args (stmt);
  5936. for (i = 0; i < n; i++)
  5937. {
  5938. tree arg = gimple_call_arg (stmt, i);
  5939. search_for_addr_array (arg, gimple_location (stmt));
  5940. }
  5941. }
  5942. else
  5943. {
  5944. memset (&wi, 0, sizeof (wi));
  5945. wi.info = CONST_CAST (void *, (const void *)
  5946. gimple_location_ptr (stmt));
  5947. walk_gimple_op (gsi_stmt (si),
  5948. check_array_bounds,
  5949. &wi);
  5950. }
  5951. }
  5952. }
  5953. }
  5954. /* Return true if all imm uses of VAR are either in STMT, or
  5955. feed (optionally through a chain of single imm uses) GIMPLE_COND
  5956. in basic block COND_BB. */
  5957. static bool
  5958. all_imm_uses_in_stmt_or_feed_cond (tree var, gimple stmt, basic_block cond_bb)
  5959. {
  5960. use_operand_p use_p, use2_p;
  5961. imm_use_iterator iter;
  5962. FOR_EACH_IMM_USE_FAST (use_p, iter, var)
  5963. if (USE_STMT (use_p) != stmt)
  5964. {
  5965. gimple use_stmt = USE_STMT (use_p), use_stmt2;
  5966. if (is_gimple_debug (use_stmt))
  5967. continue;
  5968. while (is_gimple_assign (use_stmt)
  5969. && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
  5970. && single_imm_use (gimple_assign_lhs (use_stmt),
  5971. &use2_p, &use_stmt2))
  5972. use_stmt = use_stmt2;
  5973. if (gimple_code (use_stmt) != GIMPLE_COND
  5974. || gimple_bb (use_stmt) != cond_bb)
  5975. return false;
  5976. }
  5977. return true;
  5978. }
  5979. /* Handle
  5980. _4 = x_3 & 31;
  5981. if (_4 != 0)
  5982. goto <bb 6>;
  5983. else
  5984. goto <bb 7>;
  5985. <bb 6>:
  5986. __builtin_unreachable ();
  5987. <bb 7>:
  5988. x_5 = ASSERT_EXPR <x_3, ...>;
  5989. If x_3 has no other immediate uses (checked by caller),
  5990. var is the x_3 var from ASSERT_EXPR, we can clear low 5 bits
  5991. from the non-zero bitmask. */
  5992. static void
  5993. maybe_set_nonzero_bits (basic_block bb, tree var)
  5994. {
  5995. edge e = single_pred_edge (bb);
  5996. basic_block cond_bb = e->src;
  5997. gimple stmt = last_stmt (cond_bb);
  5998. tree cst;
  5999. if (stmt == NULL
  6000. || gimple_code (stmt) != GIMPLE_COND
  6001. || gimple_cond_code (stmt) != ((e->flags & EDGE_TRUE_VALUE)
  6002. ? EQ_EXPR : NE_EXPR)
  6003. || TREE_CODE (gimple_cond_lhs (stmt)) != SSA_NAME
  6004. || !integer_zerop (gimple_cond_rhs (stmt)))
  6005. return;
  6006. stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
  6007. if (!is_gimple_assign (stmt)
  6008. || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
  6009. || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
  6010. return;
  6011. if (gimple_assign_rhs1 (stmt) != var)
  6012. {
  6013. gimple stmt2;
  6014. if (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME)
  6015. return;
  6016. stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
  6017. if (!gimple_assign_cast_p (stmt2)
  6018. || gimple_assign_rhs1 (stmt2) != var
  6019. || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt2))
  6020. || (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt)))
  6021. != TYPE_PRECISION (TREE_TYPE (var))))
  6022. return;
  6023. }
  6024. cst = gimple_assign_rhs2 (stmt);
  6025. set_nonzero_bits (var, wi::bit_and_not (get_nonzero_bits (var), cst));
  6026. }
  6027. /* Convert range assertion expressions into the implied copies and
  6028. copy propagate away the copies. Doing the trivial copy propagation
  6029. here avoids the need to run the full copy propagation pass after
  6030. VRP.
  6031. FIXME, this will eventually lead to copy propagation removing the
  6032. names that had useful range information attached to them. For
  6033. instance, if we had the assertion N_i = ASSERT_EXPR <N_j, N_j > 3>,
  6034. then N_i will have the range [3, +INF].
  6035. However, by converting the assertion into the implied copy
  6036. operation N_i = N_j, we will then copy-propagate N_j into the uses
  6037. of N_i and lose the range information. We may want to hold on to
  6038. ASSERT_EXPRs a little while longer as the ranges could be used in
  6039. things like jump threading.
  6040. The problem with keeping ASSERT_EXPRs around is that passes after
  6041. VRP need to handle them appropriately.
  6042. Another approach would be to make the range information a first
  6043. class property of the SSA_NAME so that it can be queried from
  6044. any pass. This is made somewhat more complex by the need for
  6045. multiple ranges to be associated with one SSA_NAME. */
  6046. static void
  6047. remove_range_assertions (void)
  6048. {
  6049. basic_block bb;
  6050. gimple_stmt_iterator si;
  6051. /* 1 if looking at ASSERT_EXPRs immediately at the beginning of
  6052. a basic block preceeded by GIMPLE_COND branching to it and
  6053. __builtin_trap, -1 if not yet checked, 0 otherwise. */
  6054. int is_unreachable;
  6055. /* Note that the BSI iterator bump happens at the bottom of the
  6056. loop and no bump is necessary if we're removing the statement
  6057. referenced by the current BSI. */
  6058. FOR_EACH_BB_FN (bb, cfun)
  6059. for (si = gsi_after_labels (bb), is_unreachable = -1; !gsi_end_p (si);)
  6060. {
  6061. gimple stmt = gsi_stmt (si);
  6062. gimple use_stmt;
  6063. if (is_gimple_assign (stmt)
  6064. && gimple_assign_rhs_code (stmt) == ASSERT_EXPR)
  6065. {
  6066. tree lhs = gimple_assign_lhs (stmt);
  6067. tree rhs = gimple_assign_rhs1 (stmt);
  6068. tree var;
  6069. tree cond = fold (ASSERT_EXPR_COND (rhs));
  6070. use_operand_p use_p;
  6071. imm_use_iterator iter;
  6072. gcc_assert (cond != boolean_false_node);
  6073. var = ASSERT_EXPR_VAR (rhs);
  6074. gcc_assert (TREE_CODE (var) == SSA_NAME);
  6075. if (!POINTER_TYPE_P (TREE_TYPE (lhs))
  6076. && SSA_NAME_RANGE_INFO (lhs))
  6077. {
  6078. if (is_unreachable == -1)
  6079. {
  6080. is_unreachable = 0;
  6081. if (single_pred_p (bb)
  6082. && assert_unreachable_fallthru_edge_p
  6083. (single_pred_edge (bb)))
  6084. is_unreachable = 1;
  6085. }
  6086. /* Handle
  6087. if (x_7 >= 10 && x_7 < 20)
  6088. __builtin_unreachable ();
  6089. x_8 = ASSERT_EXPR <x_7, ...>;
  6090. if the only uses of x_7 are in the ASSERT_EXPR and
  6091. in the condition. In that case, we can copy the
  6092. range info from x_8 computed in this pass also
  6093. for x_7. */
  6094. if (is_unreachable
  6095. && all_imm_uses_in_stmt_or_feed_cond (var, stmt,
  6096. single_pred (bb)))
  6097. {
  6098. set_range_info (var, SSA_NAME_RANGE_TYPE (lhs),
  6099. SSA_NAME_RANGE_INFO (lhs)->get_min (),
  6100. SSA_NAME_RANGE_INFO (lhs)->get_max ());
  6101. maybe_set_nonzero_bits (bb, var);
  6102. }
  6103. }
  6104. /* Propagate the RHS into every use of the LHS. */
  6105. FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
  6106. FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
  6107. SET_USE (use_p, var);
  6108. /* And finally, remove the copy, it is not needed. */
  6109. gsi_remove (&si, true);
  6110. release_defs (stmt);
  6111. }
  6112. else
  6113. {
  6114. if (!is_gimple_debug (gsi_stmt (si)))
  6115. is_unreachable = 0;
  6116. gsi_next (&si);
  6117. }
  6118. }
  6119. }
  6120. /* Return true if STMT is interesting for VRP. */
  6121. static bool
  6122. stmt_interesting_for_vrp (gimple stmt)
  6123. {
  6124. if (gimple_code (stmt) == GIMPLE_PHI)
  6125. {
  6126. tree res = gimple_phi_result (stmt);
  6127. return (!virtual_operand_p (res)
  6128. && (INTEGRAL_TYPE_P (TREE_TYPE (res))
  6129. || POINTER_TYPE_P (TREE_TYPE (res))));
  6130. }
  6131. else if (is_gimple_assign (stmt) || is_gimple_call (stmt))
  6132. {
  6133. tree lhs = gimple_get_lhs (stmt);
  6134. /* In general, assignments with virtual operands are not useful
  6135. for deriving ranges, with the obvious exception of calls to
  6136. builtin functions. */
  6137. if (lhs && TREE_CODE (lhs) == SSA_NAME
  6138. && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
  6139. || POINTER_TYPE_P (TREE_TYPE (lhs)))
  6140. && (is_gimple_call (stmt)
  6141. || !gimple_vuse (stmt)))
  6142. return true;
  6143. else if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
  6144. switch (gimple_call_internal_fn (stmt))
  6145. {
  6146. case IFN_ADD_OVERFLOW:
  6147. case IFN_SUB_OVERFLOW:
  6148. case IFN_MUL_OVERFLOW:
  6149. /* These internal calls return _Complex integer type,
  6150. but are interesting to VRP nevertheless. */
  6151. if (lhs && TREE_CODE (lhs) == SSA_NAME)
  6152. return true;
  6153. break;
  6154. default:
  6155. break;
  6156. }
  6157. }
  6158. else if (gimple_code (stmt) == GIMPLE_COND
  6159. || gimple_code (stmt) == GIMPLE_SWITCH)
  6160. return true;
  6161. return false;
  6162. }
  6163. /* Initialize local data structures for VRP. */
  6164. static void
  6165. vrp_initialize (void)
  6166. {
  6167. basic_block bb;
  6168. values_propagated = false;
  6169. num_vr_values = num_ssa_names;
  6170. vr_value = XCNEWVEC (value_range_t *, num_vr_values);
  6171. vr_phi_edge_counts = XCNEWVEC (int, num_ssa_names);
  6172. FOR_EACH_BB_FN (bb, cfun)
  6173. {
  6174. for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
  6175. gsi_next (&si))
  6176. {
  6177. gphi *phi = si.phi ();
  6178. if (!stmt_interesting_for_vrp (phi))
  6179. {
  6180. tree lhs = PHI_RESULT (phi);
  6181. set_value_range_to_varying (get_value_range (lhs));
  6182. prop_set_simulate_again (phi, false);
  6183. }
  6184. else
  6185. prop_set_simulate_again (phi, true);
  6186. }
  6187. for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
  6188. gsi_next (&si))
  6189. {
  6190. gimple stmt = gsi_stmt (si);
  6191. /* If the statement is a control insn, then we do not
  6192. want to avoid simulating the statement once. Failure
  6193. to do so means that those edges will never get added. */
  6194. if (stmt_ends_bb_p (stmt))
  6195. prop_set_simulate_again (stmt, true);
  6196. else if (!stmt_interesting_for_vrp (stmt))
  6197. {
  6198. ssa_op_iter i;
  6199. tree def;
  6200. FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
  6201. set_value_range_to_varying (get_value_range (def));
  6202. prop_set_simulate_again (stmt, false);
  6203. }
  6204. else
  6205. prop_set_simulate_again (stmt, true);
  6206. }
  6207. }
  6208. }
  6209. /* Return the singleton value-range for NAME or NAME. */
  6210. static inline tree
  6211. vrp_valueize (tree name)
  6212. {
  6213. if (TREE_CODE (name) == SSA_NAME)
  6214. {
  6215. value_range_t *vr = get_value_range (name);
  6216. if (vr->type == VR_RANGE
  6217. && (vr->min == vr->max
  6218. || operand_equal_p (vr->min, vr->max, 0)))
  6219. return vr->min;
  6220. }
  6221. return name;
  6222. }
  6223. /* Return the singleton value-range for NAME if that is a constant
  6224. but signal to not follow SSA edges. */
  6225. static inline tree
  6226. vrp_valueize_1 (tree name)
  6227. {
  6228. if (TREE_CODE (name) == SSA_NAME)
  6229. {
  6230. /* If the definition may be simulated again we cannot follow
  6231. this SSA edge as the SSA propagator does not necessarily
  6232. re-visit the use. */
  6233. gimple def_stmt = SSA_NAME_DEF_STMT (name);
  6234. if (!gimple_nop_p (def_stmt)
  6235. && prop_simulate_again_p (def_stmt))
  6236. return NULL_TREE;
  6237. value_range_t *vr = get_value_range (name);
  6238. if (range_int_cst_singleton_p (vr))
  6239. return vr->min;
  6240. }
  6241. return name;
  6242. }
  6243. /* Visit assignment STMT. If it produces an interesting range, record
  6244. the SSA name in *OUTPUT_P. */
  6245. static enum ssa_prop_result
  6246. vrp_visit_assignment_or_call (gimple stmt, tree *output_p)
  6247. {
  6248. tree def, lhs;
  6249. ssa_op_iter iter;
  6250. enum gimple_code code = gimple_code (stmt);
  6251. lhs = gimple_get_lhs (stmt);
  6252. /* We only keep track of ranges in integral and pointer types. */
  6253. if (TREE_CODE (lhs) == SSA_NAME
  6254. && ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
  6255. /* It is valid to have NULL MIN/MAX values on a type. See
  6256. build_range_type. */
  6257. && TYPE_MIN_VALUE (TREE_TYPE (lhs))
  6258. && TYPE_MAX_VALUE (TREE_TYPE (lhs)))
  6259. || POINTER_TYPE_P (TREE_TYPE (lhs))))
  6260. {
  6261. value_range_t new_vr = VR_INITIALIZER;
  6262. /* Try folding the statement to a constant first. */
  6263. tree tem = gimple_fold_stmt_to_constant_1 (stmt, vrp_valueize,
  6264. vrp_valueize_1);
  6265. if (tem && is_gimple_min_invariant (tem))
  6266. set_value_range_to_value (&new_vr, tem, NULL);
  6267. /* Then dispatch to value-range extracting functions. */
  6268. else if (code == GIMPLE_CALL)
  6269. extract_range_basic (&new_vr, stmt);
  6270. else
  6271. extract_range_from_assignment (&new_vr, as_a <gassign *> (stmt));
  6272. if (update_value_range (lhs, &new_vr))
  6273. {
  6274. *output_p = lhs;
  6275. if (dump_file && (dump_flags & TDF_DETAILS))
  6276. {
  6277. fprintf (dump_file, "Found new range for ");
  6278. print_generic_expr (dump_file, lhs, 0);
  6279. fprintf (dump_file, ": ");
  6280. dump_value_range (dump_file, &new_vr);
  6281. fprintf (dump_file, "\n");
  6282. }
  6283. if (new_vr.type == VR_VARYING)
  6284. return SSA_PROP_VARYING;
  6285. return SSA_PROP_INTERESTING;
  6286. }
  6287. return SSA_PROP_NOT_INTERESTING;
  6288. }
  6289. else if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
  6290. switch (gimple_call_internal_fn (stmt))
  6291. {
  6292. case IFN_ADD_OVERFLOW:
  6293. case IFN_SUB_OVERFLOW:
  6294. case IFN_MUL_OVERFLOW:
  6295. /* These internal calls return _Complex integer type,
  6296. which VRP does not track, but the immediate uses
  6297. thereof might be interesting. */
  6298. if (lhs && TREE_CODE (lhs) == SSA_NAME)
  6299. {
  6300. imm_use_iterator iter;
  6301. use_operand_p use_p;
  6302. enum ssa_prop_result res = SSA_PROP_VARYING;
  6303. set_value_range_to_varying (get_value_range (lhs));
  6304. FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
  6305. {
  6306. gimple use_stmt = USE_STMT (use_p);
  6307. if (!is_gimple_assign (use_stmt))
  6308. continue;
  6309. enum tree_code rhs_code = gimple_assign_rhs_code (use_stmt);
  6310. if (rhs_code != REALPART_EXPR && rhs_code != IMAGPART_EXPR)
  6311. continue;
  6312. tree rhs1 = gimple_assign_rhs1 (use_stmt);
  6313. tree use_lhs = gimple_assign_lhs (use_stmt);
  6314. if (TREE_CODE (rhs1) != rhs_code
  6315. || TREE_OPERAND (rhs1, 0) != lhs
  6316. || TREE_CODE (use_lhs) != SSA_NAME
  6317. || !stmt_interesting_for_vrp (use_stmt)
  6318. || (!INTEGRAL_TYPE_P (TREE_TYPE (use_lhs))
  6319. || !TYPE_MIN_VALUE (TREE_TYPE (use_lhs))
  6320. || !TYPE_MAX_VALUE (TREE_TYPE (use_lhs))))
  6321. continue;
  6322. /* If there is a change in the value range for any of the
  6323. REALPART_EXPR/IMAGPART_EXPR immediate uses, return
  6324. SSA_PROP_INTERESTING. If there are any REALPART_EXPR
  6325. or IMAGPART_EXPR immediate uses, but none of them have
  6326. a change in their value ranges, return
  6327. SSA_PROP_NOT_INTERESTING. If there are no
  6328. {REAL,IMAG}PART_EXPR uses at all,
  6329. return SSA_PROP_VARYING. */
  6330. value_range_t new_vr = VR_INITIALIZER;
  6331. extract_range_basic (&new_vr, use_stmt);
  6332. value_range_t *old_vr = get_value_range (use_lhs);
  6333. if (old_vr->type != new_vr.type
  6334. || !vrp_operand_equal_p (old_vr->min, new_vr.min)
  6335. || !vrp_operand_equal_p (old_vr->max, new_vr.max)
  6336. || !vrp_bitmap_equal_p (old_vr->equiv, new_vr.equiv))
  6337. res = SSA_PROP_INTERESTING;
  6338. else
  6339. res = SSA_PROP_NOT_INTERESTING;
  6340. BITMAP_FREE (new_vr.equiv);
  6341. if (res == SSA_PROP_INTERESTING)
  6342. {
  6343. *output_p = lhs;
  6344. return res;
  6345. }
  6346. }
  6347. return res;
  6348. }
  6349. break;
  6350. default:
  6351. break;
  6352. }
  6353. /* Every other statement produces no useful ranges. */
  6354. FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
  6355. set_value_range_to_varying (get_value_range (def));
  6356. return SSA_PROP_VARYING;
  6357. }
  6358. /* Helper that gets the value range of the SSA_NAME with version I
  6359. or a symbolic range containing the SSA_NAME only if the value range
  6360. is varying or undefined. */
  6361. static inline value_range_t
  6362. get_vr_for_comparison (int i)
  6363. {
  6364. value_range_t vr = *get_value_range (ssa_name (i));
  6365. /* If name N_i does not have a valid range, use N_i as its own
  6366. range. This allows us to compare against names that may
  6367. have N_i in their ranges. */
  6368. if (vr.type == VR_VARYING || vr.type == VR_UNDEFINED)
  6369. {
  6370. vr.type = VR_RANGE;
  6371. vr.min = ssa_name (i);
  6372. vr.max = ssa_name (i);
  6373. }
  6374. return vr;
  6375. }
  6376. /* Compare all the value ranges for names equivalent to VAR with VAL
  6377. using comparison code COMP. Return the same value returned by
  6378. compare_range_with_value, including the setting of
  6379. *STRICT_OVERFLOW_P. */
  6380. static tree
  6381. compare_name_with_value (enum tree_code comp, tree var, tree val,
  6382. bool *strict_overflow_p)
  6383. {
  6384. bitmap_iterator bi;
  6385. unsigned i;
  6386. bitmap e;
  6387. tree retval, t;
  6388. int used_strict_overflow;
  6389. bool sop;
  6390. value_range_t equiv_vr;
  6391. /* Get the set of equivalences for VAR. */
  6392. e = get_value_range (var)->equiv;
  6393. /* Start at -1. Set it to 0 if we do a comparison without relying
  6394. on overflow, or 1 if all comparisons rely on overflow. */
  6395. used_strict_overflow = -1;
  6396. /* Compare vars' value range with val. */
  6397. equiv_vr = get_vr_for_comparison (SSA_NAME_VERSION (var));
  6398. sop = false;
  6399. retval = compare_range_with_value (comp, &equiv_vr, val, &sop);
  6400. if (retval)
  6401. used_strict_overflow = sop ? 1 : 0;
  6402. /* If the equiv set is empty we have done all work we need to do. */
  6403. if (e == NULL)
  6404. {
  6405. if (retval
  6406. && used_strict_overflow > 0)
  6407. *strict_overflow_p = true;
  6408. return retval;
  6409. }
  6410. EXECUTE_IF_SET_IN_BITMAP (e, 0, i, bi)
  6411. {
  6412. equiv_vr = get_vr_for_comparison (i);
  6413. sop = false;
  6414. t = compare_range_with_value (comp, &equiv_vr, val, &sop);
  6415. if (t)
  6416. {
  6417. /* If we get different answers from different members
  6418. of the equivalence set this check must be in a dead
  6419. code region. Folding it to a trap representation
  6420. would be correct here. For now just return don't-know. */
  6421. if (retval != NULL
  6422. && t != retval)
  6423. {
  6424. retval = NULL_TREE;
  6425. break;
  6426. }
  6427. retval = t;
  6428. if (!sop)
  6429. used_strict_overflow = 0;
  6430. else if (used_strict_overflow < 0)
  6431. used_strict_overflow = 1;
  6432. }
  6433. }
  6434. if (retval
  6435. && used_strict_overflow > 0)
  6436. *strict_overflow_p = true;
  6437. return retval;
  6438. }
  6439. /* Given a comparison code COMP and names N1 and N2, compare all the
  6440. ranges equivalent to N1 against all the ranges equivalent to N2
  6441. to determine the value of N1 COMP N2. Return the same value
  6442. returned by compare_ranges. Set *STRICT_OVERFLOW_P to indicate
  6443. whether we relied on an overflow infinity in the comparison. */
  6444. static tree
  6445. compare_names (enum tree_code comp, tree n1, tree n2,
  6446. bool *strict_overflow_p)
  6447. {
  6448. tree t, retval;
  6449. bitmap e1, e2;
  6450. bitmap_iterator bi1, bi2;
  6451. unsigned i1, i2;
  6452. int used_strict_overflow;
  6453. static bitmap_obstack *s_obstack = NULL;
  6454. static bitmap s_e1 = NULL, s_e2 = NULL;
  6455. /* Compare the ranges of every name equivalent to N1 against the
  6456. ranges of every name equivalent to N2. */
  6457. e1 = get_value_range (n1)->equiv;
  6458. e2 = get_value_range (n2)->equiv;
  6459. /* Use the fake bitmaps if e1 or e2 are not available. */
  6460. if (s_obstack == NULL)
  6461. {
  6462. s_obstack = XNEW (bitmap_obstack);
  6463. bitmap_obstack_initialize (s_obstack);
  6464. s_e1 = BITMAP_ALLOC (s_obstack);
  6465. s_e2 = BITMAP_ALLOC (s_obstack);
  6466. }
  6467. if (e1 == NULL)
  6468. e1 = s_e1;
  6469. if (e2 == NULL)
  6470. e2 = s_e2;
  6471. /* Add N1 and N2 to their own set of equivalences to avoid
  6472. duplicating the body of the loop just to check N1 and N2
  6473. ranges. */
  6474. bitmap_set_bit (e1, SSA_NAME_VERSION (n1));
  6475. bitmap_set_bit (e2, SSA_NAME_VERSION (n2));
  6476. /* If the equivalence sets have a common intersection, then the two
  6477. names can be compared without checking their ranges. */
  6478. if (bitmap_intersect_p (e1, e2))
  6479. {
  6480. bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
  6481. bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
  6482. return (comp == EQ_EXPR || comp == GE_EXPR || comp == LE_EXPR)
  6483. ? boolean_true_node
  6484. : boolean_false_node;
  6485. }
  6486. /* Start at -1. Set it to 0 if we do a comparison without relying
  6487. on overflow, or 1 if all comparisons rely on overflow. */
  6488. used_strict_overflow = -1;
  6489. /* Otherwise, compare all the equivalent ranges. First, add N1 and
  6490. N2 to their own set of equivalences to avoid duplicating the body
  6491. of the loop just to check N1 and N2 ranges. */
  6492. EXECUTE_IF_SET_IN_BITMAP (e1, 0, i1, bi1)
  6493. {
  6494. value_range_t vr1 = get_vr_for_comparison (i1);
  6495. t = retval = NULL_TREE;
  6496. EXECUTE_IF_SET_IN_BITMAP (e2, 0, i2, bi2)
  6497. {
  6498. bool sop = false;
  6499. value_range_t vr2 = get_vr_for_comparison (i2);
  6500. t = compare_ranges (comp, &vr1, &vr2, &sop);
  6501. if (t)
  6502. {
  6503. /* If we get different answers from different members
  6504. of the equivalence set this check must be in a dead
  6505. code region. Folding it to a trap representation
  6506. would be correct here. For now just return don't-know. */
  6507. if (retval != NULL
  6508. && t != retval)
  6509. {
  6510. bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
  6511. bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
  6512. return NULL_TREE;
  6513. }
  6514. retval = t;
  6515. if (!sop)
  6516. used_strict_overflow = 0;
  6517. else if (used_strict_overflow < 0)
  6518. used_strict_overflow = 1;
  6519. }
  6520. }
  6521. if (retval)
  6522. {
  6523. bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
  6524. bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
  6525. if (used_strict_overflow > 0)
  6526. *strict_overflow_p = true;
  6527. return retval;
  6528. }
  6529. }
  6530. /* None of the equivalent ranges are useful in computing this
  6531. comparison. */
  6532. bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
  6533. bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
  6534. return NULL_TREE;
  6535. }
  6536. /* Helper function for vrp_evaluate_conditional_warnv. */
  6537. static tree
  6538. vrp_evaluate_conditional_warnv_with_ops_using_ranges (enum tree_code code,
  6539. tree op0, tree op1,
  6540. bool * strict_overflow_p)
  6541. {
  6542. value_range_t *vr0, *vr1;
  6543. vr0 = (TREE_CODE (op0) == SSA_NAME) ? get_value_range (op0) : NULL;
  6544. vr1 = (TREE_CODE (op1) == SSA_NAME) ? get_value_range (op1) : NULL;
  6545. tree res = NULL_TREE;
  6546. if (vr0 && vr1)
  6547. res = compare_ranges (code, vr0, vr1, strict_overflow_p);
  6548. if (!res && vr0)
  6549. res = compare_range_with_value (code, vr0, op1, strict_overflow_p);
  6550. if (!res && vr1)
  6551. res = (compare_range_with_value
  6552. (swap_tree_comparison (code), vr1, op0, strict_overflow_p));
  6553. return res;
  6554. }
  6555. /* Helper function for vrp_evaluate_conditional_warnv. */
  6556. static tree
  6557. vrp_evaluate_conditional_warnv_with_ops (enum tree_code code, tree op0,
  6558. tree op1, bool use_equiv_p,
  6559. bool *strict_overflow_p, bool *only_ranges)
  6560. {
  6561. tree ret;
  6562. if (only_ranges)
  6563. *only_ranges = true;
  6564. /* We only deal with integral and pointer types. */
  6565. if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
  6566. && !POINTER_TYPE_P (TREE_TYPE (op0)))
  6567. return NULL_TREE;
  6568. if (use_equiv_p)
  6569. {
  6570. if (only_ranges
  6571. && (ret = vrp_evaluate_conditional_warnv_with_ops_using_ranges
  6572. (code, op0, op1, strict_overflow_p)))
  6573. return ret;
  6574. *only_ranges = false;
  6575. if (TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == SSA_NAME)
  6576. return compare_names (code, op0, op1, strict_overflow_p);
  6577. else if (TREE_CODE (op0) == SSA_NAME)
  6578. return compare_name_with_value (code, op0, op1, strict_overflow_p);
  6579. else if (TREE_CODE (op1) == SSA_NAME)
  6580. return (compare_name_with_value
  6581. (swap_tree_comparison (code), op1, op0, strict_overflow_p));
  6582. }
  6583. else
  6584. return vrp_evaluate_conditional_warnv_with_ops_using_ranges (code, op0, op1,
  6585. strict_overflow_p);
  6586. return NULL_TREE;
  6587. }
  6588. /* Given (CODE OP0 OP1) within STMT, try to simplify it based on value range
  6589. information. Return NULL if the conditional can not be evaluated.
  6590. The ranges of all the names equivalent with the operands in COND
  6591. will be used when trying to compute the value. If the result is
  6592. based on undefined signed overflow, issue a warning if
  6593. appropriate. */
  6594. static tree
  6595. vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, gimple stmt)
  6596. {
  6597. bool sop;
  6598. tree ret;
  6599. bool only_ranges;
  6600. /* Some passes and foldings leak constants with overflow flag set
  6601. into the IL. Avoid doing wrong things with these and bail out. */
  6602. if ((TREE_CODE (op0) == INTEGER_CST
  6603. && TREE_OVERFLOW (op0))
  6604. || (TREE_CODE (op1) == INTEGER_CST
  6605. && TREE_OVERFLOW (op1)))
  6606. return NULL_TREE;
  6607. sop = false;
  6608. ret = vrp_evaluate_conditional_warnv_with_ops (code, op0, op1, true, &sop,
  6609. &only_ranges);
  6610. if (ret && sop)
  6611. {
  6612. enum warn_strict_overflow_code wc;
  6613. const char* warnmsg;
  6614. if (is_gimple_min_invariant (ret))
  6615. {
  6616. wc = WARN_STRICT_OVERFLOW_CONDITIONAL;
  6617. warnmsg = G_("assuming signed overflow does not occur when "
  6618. "simplifying conditional to constant");
  6619. }
  6620. else
  6621. {
  6622. wc = WARN_STRICT_OVERFLOW_COMPARISON;
  6623. warnmsg = G_("assuming signed overflow does not occur when "
  6624. "simplifying conditional");
  6625. }
  6626. if (issue_strict_overflow_warning (wc))
  6627. {
  6628. location_t location;
  6629. if (!gimple_has_location (stmt))
  6630. location = input_location;
  6631. else
  6632. location = gimple_location (stmt);
  6633. warning_at (location, OPT_Wstrict_overflow, "%s", warnmsg);
  6634. }
  6635. }
  6636. if (warn_type_limits
  6637. && ret && only_ranges
  6638. && TREE_CODE_CLASS (code) == tcc_comparison
  6639. && TREE_CODE (op0) == SSA_NAME)
  6640. {
  6641. /* If the comparison is being folded and the operand on the LHS
  6642. is being compared against a constant value that is outside of
  6643. the natural range of OP0's type, then the predicate will
  6644. always fold regardless of the value of OP0. If -Wtype-limits
  6645. was specified, emit a warning. */
  6646. tree type = TREE_TYPE (op0);
  6647. value_range_t *vr0 = get_value_range (op0);
  6648. if (vr0->type == VR_RANGE
  6649. && INTEGRAL_TYPE_P (type)
  6650. && vrp_val_is_min (vr0->min)
  6651. && vrp_val_is_max (vr0->max)
  6652. && is_gimple_min_invariant (op1))
  6653. {
  6654. location_t location;
  6655. if (!gimple_has_location (stmt))
  6656. location = input_location;
  6657. else
  6658. location = gimple_location (stmt);
  6659. warning_at (location, OPT_Wtype_limits,
  6660. integer_zerop (ret)
  6661. ? G_("comparison always false "
  6662. "due to limited range of data type")
  6663. : G_("comparison always true "
  6664. "due to limited range of data type"));
  6665. }
  6666. }
  6667. return ret;
  6668. }
  6669. /* Visit conditional statement STMT. If we can determine which edge
  6670. will be taken out of STMT's basic block, record it in
  6671. *TAKEN_EDGE_P and return SSA_PROP_INTERESTING. Otherwise, return
  6672. SSA_PROP_VARYING. */
  6673. static enum ssa_prop_result
  6674. vrp_visit_cond_stmt (gcond *stmt, edge *taken_edge_p)
  6675. {
  6676. tree val;
  6677. bool sop;
  6678. *taken_edge_p = NULL;
  6679. if (dump_file && (dump_flags & TDF_DETAILS))
  6680. {
  6681. tree use;
  6682. ssa_op_iter i;
  6683. fprintf (dump_file, "\nVisiting conditional with predicate: ");
  6684. print_gimple_stmt (dump_file, stmt, 0, 0);
  6685. fprintf (dump_file, "\nWith known ranges\n");
  6686. FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
  6687. {
  6688. fprintf (dump_file, "\t");
  6689. print_generic_expr (dump_file, use, 0);
  6690. fprintf (dump_file, ": ");
  6691. dump_value_range (dump_file, vr_value[SSA_NAME_VERSION (use)]);
  6692. }
  6693. fprintf (dump_file, "\n");
  6694. }
  6695. /* Compute the value of the predicate COND by checking the known
  6696. ranges of each of its operands.
  6697. Note that we cannot evaluate all the equivalent ranges here
  6698. because those ranges may not yet be final and with the current
  6699. propagation strategy, we cannot determine when the value ranges
  6700. of the names in the equivalence set have changed.
  6701. For instance, given the following code fragment
  6702. i_5 = PHI <8, i_13>
  6703. ...
  6704. i_14 = ASSERT_EXPR <i_5, i_5 != 0>
  6705. if (i_14 == 1)
  6706. ...
  6707. Assume that on the first visit to i_14, i_5 has the temporary
  6708. range [8, 8] because the second argument to the PHI function is
  6709. not yet executable. We derive the range ~[0, 0] for i_14 and the
  6710. equivalence set { i_5 }. So, when we visit 'if (i_14 == 1)' for
  6711. the first time, since i_14 is equivalent to the range [8, 8], we
  6712. determine that the predicate is always false.
  6713. On the next round of propagation, i_13 is determined to be
  6714. VARYING, which causes i_5 to drop down to VARYING. So, another
  6715. visit to i_14 is scheduled. In this second visit, we compute the
  6716. exact same range and equivalence set for i_14, namely ~[0, 0] and
  6717. { i_5 }. But we did not have the previous range for i_5
  6718. registered, so vrp_visit_assignment thinks that the range for
  6719. i_14 has not changed. Therefore, the predicate 'if (i_14 == 1)'
  6720. is not visited again, which stops propagation from visiting
  6721. statements in the THEN clause of that if().
  6722. To properly fix this we would need to keep the previous range
  6723. value for the names in the equivalence set. This way we would've
  6724. discovered that from one visit to the other i_5 changed from
  6725. range [8, 8] to VR_VARYING.
  6726. However, fixing this apparent limitation may not be worth the
  6727. additional checking. Testing on several code bases (GCC, DLV,
  6728. MICO, TRAMP3D and SPEC2000) showed that doing this results in
  6729. 4 more predicates folded in SPEC. */
  6730. sop = false;
  6731. val = vrp_evaluate_conditional_warnv_with_ops (gimple_cond_code (stmt),
  6732. gimple_cond_lhs (stmt),
  6733. gimple_cond_rhs (stmt),
  6734. false, &sop, NULL);
  6735. if (val)
  6736. {
  6737. if (!sop)
  6738. *taken_edge_p = find_taken_edge (gimple_bb (stmt), val);
  6739. else
  6740. {
  6741. if (dump_file && (dump_flags & TDF_DETAILS))
  6742. fprintf (dump_file,
  6743. "\nIgnoring predicate evaluation because "
  6744. "it assumes that signed overflow is undefined");
  6745. val = NULL_TREE;
  6746. }
  6747. }
  6748. if (dump_file && (dump_flags & TDF_DETAILS))
  6749. {
  6750. fprintf (dump_file, "\nPredicate evaluates to: ");
  6751. if (val == NULL_TREE)
  6752. fprintf (dump_file, "DON'T KNOW\n");
  6753. else
  6754. print_generic_stmt (dump_file, val, 0);
  6755. }
  6756. return (*taken_edge_p) ? SSA_PROP_INTERESTING : SSA_PROP_VARYING;
  6757. }
  6758. /* Searches the case label vector VEC for the index *IDX of the CASE_LABEL
  6759. that includes the value VAL. The search is restricted to the range
  6760. [START_IDX, n - 1] where n is the size of VEC.
  6761. If there is a CASE_LABEL for VAL, its index is placed in IDX and true is
  6762. returned.
  6763. If there is no CASE_LABEL for VAL and there is one that is larger than VAL,
  6764. it is placed in IDX and false is returned.
  6765. If VAL is larger than any CASE_LABEL, n is placed on IDX and false is
  6766. returned. */
  6767. static bool
  6768. find_case_label_index (gswitch *stmt, size_t start_idx, tree val, size_t *idx)
  6769. {
  6770. size_t n = gimple_switch_num_labels (stmt);
  6771. size_t low, high;
  6772. /* Find case label for minimum of the value range or the next one.
  6773. At each iteration we are searching in [low, high - 1]. */
  6774. for (low = start_idx, high = n; high != low; )
  6775. {
  6776. tree t;
  6777. int cmp;
  6778. /* Note that i != high, so we never ask for n. */
  6779. size_t i = (high + low) / 2;
  6780. t = gimple_switch_label (stmt, i);
  6781. /* Cache the result of comparing CASE_LOW and val. */
  6782. cmp = tree_int_cst_compare (CASE_LOW (t), val);
  6783. if (cmp == 0)
  6784. {
  6785. /* Ranges cannot be empty. */
  6786. *idx = i;
  6787. return true;
  6788. }
  6789. else if (cmp > 0)
  6790. high = i;
  6791. else
  6792. {
  6793. low = i + 1;
  6794. if (CASE_HIGH (t) != NULL
  6795. && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
  6796. {
  6797. *idx = i;
  6798. return true;
  6799. }
  6800. }
  6801. }
  6802. *idx = high;
  6803. return false;
  6804. }
  6805. /* Searches the case label vector VEC for the range of CASE_LABELs that is used
  6806. for values between MIN and MAX. The first index is placed in MIN_IDX. The
  6807. last index is placed in MAX_IDX. If the range of CASE_LABELs is empty
  6808. then MAX_IDX < MIN_IDX.
  6809. Returns true if the default label is not needed. */
  6810. static bool
  6811. find_case_label_range (gswitch *stmt, tree min, tree max, size_t *min_idx,
  6812. size_t *max_idx)
  6813. {
  6814. size_t i, j;
  6815. bool min_take_default = !find_case_label_index (stmt, 1, min, &i);
  6816. bool max_take_default = !find_case_label_index (stmt, i, max, &j);
  6817. if (i == j
  6818. && min_take_default
  6819. && max_take_default)
  6820. {
  6821. /* Only the default case label reached.
  6822. Return an empty range. */
  6823. *min_idx = 1;
  6824. *max_idx = 0;
  6825. return false;
  6826. }
  6827. else
  6828. {
  6829. bool take_default = min_take_default || max_take_default;
  6830. tree low, high;
  6831. size_t k;
  6832. if (max_take_default)
  6833. j--;
  6834. /* If the case label range is continuous, we do not need
  6835. the default case label. Verify that. */
  6836. high = CASE_LOW (gimple_switch_label (stmt, i));
  6837. if (CASE_HIGH (gimple_switch_label (stmt, i)))
  6838. high = CASE_HIGH (gimple_switch_label (stmt, i));
  6839. for (k = i + 1; k <= j; ++k)
  6840. {
  6841. low = CASE_LOW (gimple_switch_label (stmt, k));
  6842. if (!integer_onep (int_const_binop (MINUS_EXPR, low, high)))
  6843. {
  6844. take_default = true;
  6845. break;
  6846. }
  6847. high = low;
  6848. if (CASE_HIGH (gimple_switch_label (stmt, k)))
  6849. high = CASE_HIGH (gimple_switch_label (stmt, k));
  6850. }
  6851. *min_idx = i;
  6852. *max_idx = j;
  6853. return !take_default;
  6854. }
  6855. }
  6856. /* Searches the case label vector VEC for the ranges of CASE_LABELs that are
  6857. used in range VR. The indices are placed in MIN_IDX1, MAX_IDX, MIN_IDX2 and
  6858. MAX_IDX2. If the ranges of CASE_LABELs are empty then MAX_IDX1 < MIN_IDX1.
  6859. Returns true if the default label is not needed. */
  6860. static bool
  6861. find_case_label_ranges (gswitch *stmt, value_range_t *vr, size_t *min_idx1,
  6862. size_t *max_idx1, size_t *min_idx2,
  6863. size_t *max_idx2)
  6864. {
  6865. size_t i, j, k, l;
  6866. unsigned int n = gimple_switch_num_labels (stmt);
  6867. bool take_default;
  6868. tree case_low, case_high;
  6869. tree min = vr->min, max = vr->max;
  6870. gcc_checking_assert (vr->type == VR_RANGE || vr->type == VR_ANTI_RANGE);
  6871. take_default = !find_case_label_range (stmt, min, max, &i, &j);
  6872. /* Set second range to emtpy. */
  6873. *min_idx2 = 1;
  6874. *max_idx2 = 0;
  6875. if (vr->type == VR_RANGE)
  6876. {
  6877. *min_idx1 = i;
  6878. *max_idx1 = j;
  6879. return !take_default;
  6880. }
  6881. /* Set first range to all case labels. */
  6882. *min_idx1 = 1;
  6883. *max_idx1 = n - 1;
  6884. if (i > j)
  6885. return false;
  6886. /* Make sure all the values of case labels [i , j] are contained in
  6887. range [MIN, MAX]. */
  6888. case_low = CASE_LOW (gimple_switch_label (stmt, i));
  6889. case_high = CASE_HIGH (gimple_switch_label (stmt, j));
  6890. if (tree_int_cst_compare (case_low, min) < 0)
  6891. i += 1;
  6892. if (case_high != NULL_TREE
  6893. && tree_int_cst_compare (max, case_high) < 0)
  6894. j -= 1;
  6895. if (i > j)
  6896. return false;
  6897. /* If the range spans case labels [i, j], the corresponding anti-range spans
  6898. the labels [1, i - 1] and [j + 1, n - 1]. */
  6899. k = j + 1;
  6900. l = n - 1;
  6901. if (k > l)
  6902. {
  6903. k = 1;
  6904. l = 0;
  6905. }
  6906. j = i - 1;
  6907. i = 1;
  6908. if (i > j)
  6909. {
  6910. i = k;
  6911. j = l;
  6912. k = 1;
  6913. l = 0;
  6914. }
  6915. *min_idx1 = i;
  6916. *max_idx1 = j;
  6917. *min_idx2 = k;
  6918. *max_idx2 = l;
  6919. return false;
  6920. }
  6921. /* Visit switch statement STMT. If we can determine which edge
  6922. will be taken out of STMT's basic block, record it in
  6923. *TAKEN_EDGE_P and return SSA_PROP_INTERESTING. Otherwise, return
  6924. SSA_PROP_VARYING. */
  6925. static enum ssa_prop_result
  6926. vrp_visit_switch_stmt (gswitch *stmt, edge *taken_edge_p)
  6927. {
  6928. tree op, val;
  6929. value_range_t *vr;
  6930. size_t i = 0, j = 0, k, l;
  6931. bool take_default;
  6932. *taken_edge_p = NULL;
  6933. op = gimple_switch_index (stmt);
  6934. if (TREE_CODE (op) != SSA_NAME)
  6935. return SSA_PROP_VARYING;
  6936. vr = get_value_range (op);
  6937. if (dump_file && (dump_flags & TDF_DETAILS))
  6938. {
  6939. fprintf (dump_file, "\nVisiting switch expression with operand ");
  6940. print_generic_expr (dump_file, op, 0);
  6941. fprintf (dump_file, " with known range ");
  6942. dump_value_range (dump_file, vr);
  6943. fprintf (dump_file, "\n");
  6944. }
  6945. if ((vr->type != VR_RANGE
  6946. && vr->type != VR_ANTI_RANGE)
  6947. || symbolic_range_p (vr))
  6948. return SSA_PROP_VARYING;
  6949. /* Find the single edge that is taken from the switch expression. */
  6950. take_default = !find_case_label_ranges (stmt, vr, &i, &j, &k, &l);
  6951. /* Check if the range spans no CASE_LABEL. If so, we only reach the default
  6952. label */
  6953. if (j < i)
  6954. {
  6955. gcc_assert (take_default);
  6956. val = gimple_switch_default_label (stmt);
  6957. }
  6958. else
  6959. {
  6960. /* Check if labels with index i to j and maybe the default label
  6961. are all reaching the same label. */
  6962. val = gimple_switch_label (stmt, i);
  6963. if (take_default
  6964. && CASE_LABEL (gimple_switch_default_label (stmt))
  6965. != CASE_LABEL (val))
  6966. {
  6967. if (dump_file && (dump_flags & TDF_DETAILS))
  6968. fprintf (dump_file, " not a single destination for this "
  6969. "range\n");
  6970. return SSA_PROP_VARYING;
  6971. }
  6972. for (++i; i <= j; ++i)
  6973. {
  6974. if (CASE_LABEL (gimple_switch_label (stmt, i)) != CASE_LABEL (val))
  6975. {
  6976. if (dump_file && (dump_flags & TDF_DETAILS))
  6977. fprintf (dump_file, " not a single destination for this "
  6978. "range\n");
  6979. return SSA_PROP_VARYING;
  6980. }
  6981. }
  6982. for (; k <= l; ++k)
  6983. {
  6984. if (CASE_LABEL (gimple_switch_label (stmt, k)) != CASE_LABEL (val))
  6985. {
  6986. if (dump_file && (dump_flags & TDF_DETAILS))
  6987. fprintf (dump_file, " not a single destination for this "
  6988. "range\n");
  6989. return SSA_PROP_VARYING;
  6990. }
  6991. }
  6992. }
  6993. *taken_edge_p = find_edge (gimple_bb (stmt),
  6994. label_to_block (CASE_LABEL (val)));
  6995. if (dump_file && (dump_flags & TDF_DETAILS))
  6996. {
  6997. fprintf (dump_file, " will take edge to ");
  6998. print_generic_stmt (dump_file, CASE_LABEL (val), 0);
  6999. }
  7000. return SSA_PROP_INTERESTING;
  7001. }
  7002. /* Evaluate statement STMT. If the statement produces a useful range,
  7003. return SSA_PROP_INTERESTING and record the SSA name with the
  7004. interesting range into *OUTPUT_P.
  7005. If STMT is a conditional branch and we can determine its truth
  7006. value, the taken edge is recorded in *TAKEN_EDGE_P.
  7007. If STMT produces a varying value, return SSA_PROP_VARYING. */
  7008. static enum ssa_prop_result
  7009. vrp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
  7010. {
  7011. tree def;
  7012. ssa_op_iter iter;
  7013. if (dump_file && (dump_flags & TDF_DETAILS))
  7014. {
  7015. fprintf (dump_file, "\nVisiting statement:\n");
  7016. print_gimple_stmt (dump_file, stmt, 0, dump_flags);
  7017. }
  7018. if (!stmt_interesting_for_vrp (stmt))
  7019. gcc_assert (stmt_ends_bb_p (stmt));
  7020. else if (is_gimple_assign (stmt) || is_gimple_call (stmt))
  7021. return vrp_visit_assignment_or_call (stmt, output_p);
  7022. else if (gimple_code (stmt) == GIMPLE_COND)
  7023. return vrp_visit_cond_stmt (as_a <gcond *> (stmt), taken_edge_p);
  7024. else if (gimple_code (stmt) == GIMPLE_SWITCH)
  7025. return vrp_visit_switch_stmt (as_a <gswitch *> (stmt), taken_edge_p);
  7026. /* All other statements produce nothing of interest for VRP, so mark
  7027. their outputs varying and prevent further simulation. */
  7028. FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
  7029. set_value_range_to_varying (get_value_range (def));
  7030. return SSA_PROP_VARYING;
  7031. }
  7032. /* Union the two value-ranges { *VR0TYPE, *VR0MIN, *VR0MAX } and
  7033. { VR1TYPE, VR0MIN, VR0MAX } and store the result
  7034. in { *VR0TYPE, *VR0MIN, *VR0MAX }. This may not be the smallest
  7035. possible such range. The resulting range is not canonicalized. */
  7036. static void
  7037. union_ranges (enum value_range_type *vr0type,
  7038. tree *vr0min, tree *vr0max,
  7039. enum value_range_type vr1type,
  7040. tree vr1min, tree vr1max)
  7041. {
  7042. bool mineq = operand_equal_p (*vr0min, vr1min, 0);
  7043. bool maxeq = operand_equal_p (*vr0max, vr1max, 0);
  7044. /* [] is vr0, () is vr1 in the following classification comments. */
  7045. if (mineq && maxeq)
  7046. {
  7047. /* [( )] */
  7048. if (*vr0type == vr1type)
  7049. /* Nothing to do for equal ranges. */
  7050. ;
  7051. else if ((*vr0type == VR_RANGE
  7052. && vr1type == VR_ANTI_RANGE)
  7053. || (*vr0type == VR_ANTI_RANGE
  7054. && vr1type == VR_RANGE))
  7055. {
  7056. /* For anti-range with range union the result is varying. */
  7057. goto give_up;
  7058. }
  7059. else
  7060. gcc_unreachable ();
  7061. }
  7062. else if (operand_less_p (*vr0max, vr1min) == 1
  7063. || operand_less_p (vr1max, *vr0min) == 1)
  7064. {
  7065. /* [ ] ( ) or ( ) [ ]
  7066. If the ranges have an empty intersection, result of the union
  7067. operation is the anti-range or if both are anti-ranges
  7068. it covers all. */
  7069. if (*vr0type == VR_ANTI_RANGE
  7070. && vr1type == VR_ANTI_RANGE)
  7071. goto give_up;
  7072. else if (*vr0type == VR_ANTI_RANGE
  7073. && vr1type == VR_RANGE)
  7074. ;
  7075. else if (*vr0type == VR_RANGE
  7076. && vr1type == VR_ANTI_RANGE)
  7077. {
  7078. *vr0type = vr1type;
  7079. *vr0min = vr1min;
  7080. *vr0max = vr1max;
  7081. }
  7082. else if (*vr0type == VR_RANGE
  7083. && vr1type == VR_RANGE)
  7084. {
  7085. /* The result is the convex hull of both ranges. */
  7086. if (operand_less_p (*vr0max, vr1min) == 1)
  7087. {
  7088. /* If the result can be an anti-range, create one. */
  7089. if (TREE_CODE (*vr0max) == INTEGER_CST
  7090. && TREE_CODE (vr1min) == INTEGER_CST
  7091. && vrp_val_is_min (*vr0min)
  7092. && vrp_val_is_max (vr1max))
  7093. {
  7094. tree min = int_const_binop (PLUS_EXPR,
  7095. *vr0max,
  7096. build_int_cst (TREE_TYPE (*vr0max), 1));
  7097. tree max = int_const_binop (MINUS_EXPR,
  7098. vr1min,
  7099. build_int_cst (TREE_TYPE (vr1min), 1));
  7100. if (!operand_less_p (max, min))
  7101. {
  7102. *vr0type = VR_ANTI_RANGE;
  7103. *vr0min = min;
  7104. *vr0max = max;
  7105. }
  7106. else
  7107. *vr0max = vr1max;
  7108. }
  7109. else
  7110. *vr0max = vr1max;
  7111. }
  7112. else
  7113. {
  7114. /* If the result can be an anti-range, create one. */
  7115. if (TREE_CODE (vr1max) == INTEGER_CST
  7116. && TREE_CODE (*vr0min) == INTEGER_CST
  7117. && vrp_val_is_min (vr1min)
  7118. && vrp_val_is_max (*vr0max))
  7119. {
  7120. tree min = int_const_binop (PLUS_EXPR,
  7121. vr1max,
  7122. build_int_cst (TREE_TYPE (vr1max), 1));
  7123. tree max = int_const_binop (MINUS_EXPR,
  7124. *vr0min,
  7125. build_int_cst (TREE_TYPE (*vr0min), 1));
  7126. if (!operand_less_p (max, min))
  7127. {
  7128. *vr0type = VR_ANTI_RANGE;
  7129. *vr0min = min;
  7130. *vr0max = max;
  7131. }
  7132. else
  7133. *vr0min = vr1min;
  7134. }
  7135. else
  7136. *vr0min = vr1min;
  7137. }
  7138. }
  7139. else
  7140. gcc_unreachable ();
  7141. }
  7142. else if ((maxeq || operand_less_p (vr1max, *vr0max) == 1)
  7143. && (mineq || operand_less_p (*vr0min, vr1min) == 1))
  7144. {
  7145. /* [ ( ) ] or [( ) ] or [ ( )] */
  7146. if (*vr0type == VR_RANGE
  7147. && vr1type == VR_RANGE)
  7148. ;
  7149. else if (*vr0type == VR_ANTI_RANGE
  7150. && vr1type == VR_ANTI_RANGE)
  7151. {
  7152. *vr0type = vr1type;
  7153. *vr0min = vr1min;
  7154. *vr0max = vr1max;
  7155. }
  7156. else if (*vr0type == VR_ANTI_RANGE
  7157. && vr1type == VR_RANGE)
  7158. {
  7159. /* Arbitrarily choose the right or left gap. */
  7160. if (!mineq && TREE_CODE (vr1min) == INTEGER_CST)
  7161. *vr0max = int_const_binop (MINUS_EXPR, vr1min,
  7162. build_int_cst (TREE_TYPE (vr1min), 1));
  7163. else if (!maxeq && TREE_CODE (vr1max) == INTEGER_CST)
  7164. *vr0min = int_const_binop (PLUS_EXPR, vr1max,
  7165. build_int_cst (TREE_TYPE (vr1max), 1));
  7166. else
  7167. goto give_up;
  7168. }
  7169. else if (*vr0type == VR_RANGE
  7170. && vr1type == VR_ANTI_RANGE)
  7171. /* The result covers everything. */
  7172. goto give_up;
  7173. else
  7174. gcc_unreachable ();
  7175. }
  7176. else if ((maxeq || operand_less_p (*vr0max, vr1max) == 1)
  7177. && (mineq || operand_less_p (vr1min, *vr0min) == 1))
  7178. {
  7179. /* ( [ ] ) or ([ ] ) or ( [ ]) */
  7180. if (*vr0type == VR_RANGE
  7181. && vr1type == VR_RANGE)
  7182. {
  7183. *vr0type = vr1type;
  7184. *vr0min = vr1min;
  7185. *vr0max = vr1max;
  7186. }
  7187. else if (*vr0type == VR_ANTI_RANGE
  7188. && vr1type == VR_ANTI_RANGE)
  7189. ;
  7190. else if (*vr0type == VR_RANGE
  7191. && vr1type == VR_ANTI_RANGE)
  7192. {
  7193. *vr0type = VR_ANTI_RANGE;
  7194. if (!mineq && TREE_CODE (*vr0min) == INTEGER_CST)
  7195. {
  7196. *vr0max = int_const_binop (MINUS_EXPR, *vr0min,
  7197. build_int_cst (TREE_TYPE (*vr0min), 1));
  7198. *vr0min = vr1min;
  7199. }
  7200. else if (!maxeq && TREE_CODE (*vr0max) == INTEGER_CST)
  7201. {
  7202. *vr0min = int_const_binop (PLUS_EXPR, *vr0max,
  7203. build_int_cst (TREE_TYPE (*vr0max), 1));
  7204. *vr0max = vr1max;
  7205. }
  7206. else
  7207. goto give_up;
  7208. }
  7209. else if (*vr0type == VR_ANTI_RANGE
  7210. && vr1type == VR_RANGE)
  7211. /* The result covers everything. */
  7212. goto give_up;
  7213. else
  7214. gcc_unreachable ();
  7215. }
  7216. else if ((operand_less_p (vr1min, *vr0max) == 1
  7217. || operand_equal_p (vr1min, *vr0max, 0))
  7218. && operand_less_p (*vr0min, vr1min) == 1
  7219. && operand_less_p (*vr0max, vr1max) == 1)
  7220. {
  7221. /* [ ( ] ) or [ ]( ) */
  7222. if (*vr0type == VR_RANGE
  7223. && vr1type == VR_RANGE)
  7224. *vr0max = vr1max;
  7225. else if (*vr0type == VR_ANTI_RANGE
  7226. && vr1type == VR_ANTI_RANGE)
  7227. *vr0min = vr1min;
  7228. else if (*vr0type == VR_ANTI_RANGE
  7229. && vr1type == VR_RANGE)
  7230. {
  7231. if (TREE_CODE (vr1min) == INTEGER_CST)
  7232. *vr0max = int_const_binop (MINUS_EXPR, vr1min,
  7233. build_int_cst (TREE_TYPE (vr1min), 1));
  7234. else
  7235. goto give_up;
  7236. }
  7237. else if (*vr0type == VR_RANGE
  7238. && vr1type == VR_ANTI_RANGE)
  7239. {
  7240. if (TREE_CODE (*vr0max) == INTEGER_CST)
  7241. {
  7242. *vr0type = vr1type;
  7243. *vr0min = int_const_binop (PLUS_EXPR, *vr0max,
  7244. build_int_cst (TREE_TYPE (*vr0max), 1));
  7245. *vr0max = vr1max;
  7246. }
  7247. else
  7248. goto give_up;
  7249. }
  7250. else
  7251. gcc_unreachable ();
  7252. }
  7253. else if ((operand_less_p (*vr0min, vr1max) == 1
  7254. || operand_equal_p (*vr0min, vr1max, 0))
  7255. && operand_less_p (vr1min, *vr0min) == 1
  7256. && operand_less_p (vr1max, *vr0max) == 1)
  7257. {
  7258. /* ( [ ) ] or ( )[ ] */
  7259. if (*vr0type == VR_RANGE
  7260. && vr1type == VR_RANGE)
  7261. *vr0min = vr1min;
  7262. else if (*vr0type == VR_ANTI_RANGE
  7263. && vr1type == VR_ANTI_RANGE)
  7264. *vr0max = vr1max;
  7265. else if (*vr0type == VR_ANTI_RANGE
  7266. && vr1type == VR_RANGE)
  7267. {
  7268. if (TREE_CODE (vr1max) == INTEGER_CST)
  7269. *vr0min = int_const_binop (PLUS_EXPR, vr1max,
  7270. build_int_cst (TREE_TYPE (vr1max), 1));
  7271. else
  7272. goto give_up;
  7273. }
  7274. else if (*vr0type == VR_RANGE
  7275. && vr1type == VR_ANTI_RANGE)
  7276. {
  7277. if (TREE_CODE (*vr0min) == INTEGER_CST)
  7278. {
  7279. *vr0type = vr1type;
  7280. *vr0min = vr1min;
  7281. *vr0max = int_const_binop (MINUS_EXPR, *vr0min,
  7282. build_int_cst (TREE_TYPE (*vr0min), 1));
  7283. }
  7284. else
  7285. goto give_up;
  7286. }
  7287. else
  7288. gcc_unreachable ();
  7289. }
  7290. else
  7291. goto give_up;
  7292. return;
  7293. give_up:
  7294. *vr0type = VR_VARYING;
  7295. *vr0min = NULL_TREE;
  7296. *vr0max = NULL_TREE;
  7297. }
  7298. /* Intersect the two value-ranges { *VR0TYPE, *VR0MIN, *VR0MAX } and
  7299. { VR1TYPE, VR0MIN, VR0MAX } and store the result
  7300. in { *VR0TYPE, *VR0MIN, *VR0MAX }. This may not be the smallest
  7301. possible such range. The resulting range is not canonicalized. */
  7302. static void
  7303. intersect_ranges (enum value_range_type *vr0type,
  7304. tree *vr0min, tree *vr0max,
  7305. enum value_range_type vr1type,
  7306. tree vr1min, tree vr1max)
  7307. {
  7308. bool mineq = operand_equal_p (*vr0min, vr1min, 0);
  7309. bool maxeq = operand_equal_p (*vr0max, vr1max, 0);
  7310. /* [] is vr0, () is vr1 in the following classification comments. */
  7311. if (mineq && maxeq)
  7312. {
  7313. /* [( )] */
  7314. if (*vr0type == vr1type)
  7315. /* Nothing to do for equal ranges. */
  7316. ;
  7317. else if ((*vr0type == VR_RANGE
  7318. && vr1type == VR_ANTI_RANGE)
  7319. || (*vr0type == VR_ANTI_RANGE
  7320. && vr1type == VR_RANGE))
  7321. {
  7322. /* For anti-range with range intersection the result is empty. */
  7323. *vr0type = VR_UNDEFINED;
  7324. *vr0min = NULL_TREE;
  7325. *vr0max = NULL_TREE;
  7326. }
  7327. else
  7328. gcc_unreachable ();
  7329. }
  7330. else if (operand_less_p (*vr0max, vr1min) == 1
  7331. || operand_less_p (vr1max, *vr0min) == 1)
  7332. {
  7333. /* [ ] ( ) or ( ) [ ]
  7334. If the ranges have an empty intersection, the result of the
  7335. intersect operation is the range for intersecting an
  7336. anti-range with a range or empty when intersecting two ranges. */
  7337. if (*vr0type == VR_RANGE
  7338. && vr1type == VR_ANTI_RANGE)
  7339. ;
  7340. else if (*vr0type == VR_ANTI_RANGE
  7341. && vr1type == VR_RANGE)
  7342. {
  7343. *vr0type = vr1type;
  7344. *vr0min = vr1min;
  7345. *vr0max = vr1max;
  7346. }
  7347. else if (*vr0type == VR_RANGE
  7348. && vr1type == VR_RANGE)
  7349. {
  7350. *vr0type = VR_UNDEFINED;
  7351. *vr0min = NULL_TREE;
  7352. *vr0max = NULL_TREE;
  7353. }
  7354. else if (*vr0type == VR_ANTI_RANGE
  7355. && vr1type == VR_ANTI_RANGE)
  7356. {
  7357. /* If the anti-ranges are adjacent to each other merge them. */
  7358. if (TREE_CODE (*vr0max) == INTEGER_CST
  7359. && TREE_CODE (vr1min) == INTEGER_CST
  7360. && operand_less_p (*vr0max, vr1min) == 1
  7361. && integer_onep (int_const_binop (MINUS_EXPR,
  7362. vr1min, *vr0max)))
  7363. *vr0max = vr1max;
  7364. else if (TREE_CODE (vr1max) == INTEGER_CST
  7365. && TREE_CODE (*vr0min) == INTEGER_CST
  7366. && operand_less_p (vr1max, *vr0min) == 1
  7367. && integer_onep (int_const_binop (MINUS_EXPR,
  7368. *vr0min, vr1max)))
  7369. *vr0min = vr1min;
  7370. /* Else arbitrarily take VR0. */
  7371. }
  7372. }
  7373. else if ((maxeq || operand_less_p (vr1max, *vr0max) == 1)
  7374. && (mineq || operand_less_p (*vr0min, vr1min) == 1))
  7375. {
  7376. /* [ ( ) ] or [( ) ] or [ ( )] */
  7377. if (*vr0type == VR_RANGE
  7378. && vr1type == VR_RANGE)
  7379. {
  7380. /* If both are ranges the result is the inner one. */
  7381. *vr0type = vr1type;
  7382. *vr0min = vr1min;
  7383. *vr0max = vr1max;
  7384. }
  7385. else if (*vr0type == VR_RANGE
  7386. && vr1type == VR_ANTI_RANGE)
  7387. {
  7388. /* Choose the right gap if the left one is empty. */
  7389. if (mineq)
  7390. {
  7391. if (TREE_CODE (vr1max) == INTEGER_CST)
  7392. *vr0min = int_const_binop (PLUS_EXPR, vr1max,
  7393. build_int_cst (TREE_TYPE (vr1max), 1));
  7394. else
  7395. *vr0min = vr1max;
  7396. }
  7397. /* Choose the left gap if the right one is empty. */
  7398. else if (maxeq)
  7399. {
  7400. if (TREE_CODE (vr1min) == INTEGER_CST)
  7401. *vr0max = int_const_binop (MINUS_EXPR, vr1min,
  7402. build_int_cst (TREE_TYPE (vr1min), 1));
  7403. else
  7404. *vr0max = vr1min;
  7405. }
  7406. /* Choose the anti-range if the range is effectively varying. */
  7407. else if (vrp_val_is_min (*vr0min)
  7408. && vrp_val_is_max (*vr0max))
  7409. {
  7410. *vr0type = vr1type;
  7411. *vr0min = vr1min;
  7412. *vr0max = vr1max;
  7413. }
  7414. /* Else choose the range. */
  7415. }
  7416. else if (*vr0type == VR_ANTI_RANGE
  7417. && vr1type == VR_ANTI_RANGE)
  7418. /* If both are anti-ranges the result is the outer one. */
  7419. ;
  7420. else if (*vr0type == VR_ANTI_RANGE
  7421. && vr1type == VR_RANGE)
  7422. {
  7423. /* The intersection is empty. */
  7424. *vr0type = VR_UNDEFINED;
  7425. *vr0min = NULL_TREE;
  7426. *vr0max = NULL_TREE;
  7427. }
  7428. else
  7429. gcc_unreachable ();
  7430. }
  7431. else if ((maxeq || operand_less_p (*vr0max, vr1max) == 1)
  7432. && (mineq || operand_less_p (vr1min, *vr0min) == 1))
  7433. {
  7434. /* ( [ ] ) or ([ ] ) or ( [ ]) */
  7435. if (*vr0type == VR_RANGE
  7436. && vr1type == VR_RANGE)
  7437. /* Choose the inner range. */
  7438. ;
  7439. else if (*vr0type == VR_ANTI_RANGE
  7440. && vr1type == VR_RANGE)
  7441. {
  7442. /* Choose the right gap if the left is empty. */
  7443. if (mineq)
  7444. {
  7445. *vr0type = VR_RANGE;
  7446. if (TREE_CODE (*vr0max) == INTEGER_CST)
  7447. *vr0min = int_const_binop (PLUS_EXPR, *vr0max,
  7448. build_int_cst (TREE_TYPE (*vr0max), 1));
  7449. else
  7450. *vr0min = *vr0max;
  7451. *vr0max = vr1max;
  7452. }
  7453. /* Choose the left gap if the right is empty. */
  7454. else if (maxeq)
  7455. {
  7456. *vr0type = VR_RANGE;
  7457. if (TREE_CODE (*vr0min) == INTEGER_CST)
  7458. *vr0max = int_const_binop (MINUS_EXPR, *vr0min,
  7459. build_int_cst (TREE_TYPE (*vr0min), 1));
  7460. else
  7461. *vr0max = *vr0min;
  7462. *vr0min = vr1min;
  7463. }
  7464. /* Choose the anti-range if the range is effectively varying. */
  7465. else if (vrp_val_is_min (vr1min)
  7466. && vrp_val_is_max (vr1max))
  7467. ;
  7468. /* Else choose the range. */
  7469. else
  7470. {
  7471. *vr0type = vr1type;
  7472. *vr0min = vr1min;
  7473. *vr0max = vr1max;
  7474. }
  7475. }
  7476. else if (*vr0type == VR_ANTI_RANGE
  7477. && vr1type == VR_ANTI_RANGE)
  7478. {
  7479. /* If both are anti-ranges the result is the outer one. */
  7480. *vr0type = vr1type;
  7481. *vr0min = vr1min;
  7482. *vr0max = vr1max;
  7483. }
  7484. else if (vr1type == VR_ANTI_RANGE
  7485. && *vr0type == VR_RANGE)
  7486. {
  7487. /* The intersection is empty. */
  7488. *vr0type = VR_UNDEFINED;
  7489. *vr0min = NULL_TREE;
  7490. *vr0max = NULL_TREE;
  7491. }
  7492. else
  7493. gcc_unreachable ();
  7494. }
  7495. else if ((operand_less_p (vr1min, *vr0max) == 1
  7496. || operand_equal_p (vr1min, *vr0max, 0))
  7497. && operand_less_p (*vr0min, vr1min) == 1)
  7498. {
  7499. /* [ ( ] ) or [ ]( ) */
  7500. if (*vr0type == VR_ANTI_RANGE
  7501. && vr1type == VR_ANTI_RANGE)
  7502. *vr0max = vr1max;
  7503. else if (*vr0type == VR_RANGE
  7504. && vr1type == VR_RANGE)
  7505. *vr0min = vr1min;
  7506. else if (*vr0type == VR_RANGE
  7507. && vr1type == VR_ANTI_RANGE)
  7508. {
  7509. if (TREE_CODE (vr1min) == INTEGER_CST)
  7510. *vr0max = int_const_binop (MINUS_EXPR, vr1min,
  7511. build_int_cst (TREE_TYPE (vr1min), 1));
  7512. else
  7513. *vr0max = vr1min;
  7514. }
  7515. else if (*vr0type == VR_ANTI_RANGE
  7516. && vr1type == VR_RANGE)
  7517. {
  7518. *vr0type = VR_RANGE;
  7519. if (TREE_CODE (*vr0max) == INTEGER_CST)
  7520. *vr0min = int_const_binop (PLUS_EXPR, *vr0max,
  7521. build_int_cst (TREE_TYPE (*vr0max), 1));
  7522. else
  7523. *vr0min = *vr0max;
  7524. *vr0max = vr1max;
  7525. }
  7526. else
  7527. gcc_unreachable ();
  7528. }
  7529. else if ((operand_less_p (*vr0min, vr1max) == 1
  7530. || operand_equal_p (*vr0min, vr1max, 0))
  7531. && operand_less_p (vr1min, *vr0min) == 1)
  7532. {
  7533. /* ( [ ) ] or ( )[ ] */
  7534. if (*vr0type == VR_ANTI_RANGE
  7535. && vr1type == VR_ANTI_RANGE)
  7536. *vr0min = vr1min;
  7537. else if (*vr0type == VR_RANGE
  7538. && vr1type == VR_RANGE)
  7539. *vr0max = vr1max;
  7540. else if (*vr0type == VR_RANGE
  7541. && vr1type == VR_ANTI_RANGE)
  7542. {
  7543. if (TREE_CODE (vr1max) == INTEGER_CST)
  7544. *vr0min = int_const_binop (PLUS_EXPR, vr1max,
  7545. build_int_cst (TREE_TYPE (vr1max), 1));
  7546. else
  7547. *vr0min = vr1max;
  7548. }
  7549. else if (*vr0type == VR_ANTI_RANGE
  7550. && vr1type == VR_RANGE)
  7551. {
  7552. *vr0type = VR_RANGE;
  7553. if (TREE_CODE (*vr0min) == INTEGER_CST)
  7554. *vr0max = int_const_binop (MINUS_EXPR, *vr0min,
  7555. build_int_cst (TREE_TYPE (*vr0min), 1));
  7556. else
  7557. *vr0max = *vr0min;
  7558. *vr0min = vr1min;
  7559. }
  7560. else
  7561. gcc_unreachable ();
  7562. }
  7563. /* As a fallback simply use { *VRTYPE, *VR0MIN, *VR0MAX } as
  7564. result for the intersection. That's always a conservative
  7565. correct estimate. */
  7566. return;
  7567. }
  7568. /* Intersect the two value-ranges *VR0 and *VR1 and store the result
  7569. in *VR0. This may not be the smallest possible such range. */
  7570. static void
  7571. vrp_intersect_ranges_1 (value_range_t *vr0, value_range_t *vr1)
  7572. {
  7573. value_range_t saved;
  7574. /* If either range is VR_VARYING the other one wins. */
  7575. if (vr1->type == VR_VARYING)
  7576. return;
  7577. if (vr0->type == VR_VARYING)
  7578. {
  7579. copy_value_range (vr0, vr1);
  7580. return;
  7581. }
  7582. /* When either range is VR_UNDEFINED the resulting range is
  7583. VR_UNDEFINED, too. */
  7584. if (vr0->type == VR_UNDEFINED)
  7585. return;
  7586. if (vr1->type == VR_UNDEFINED)
  7587. {
  7588. set_value_range_to_undefined (vr0);
  7589. return;
  7590. }
  7591. /* Save the original vr0 so we can return it as conservative intersection
  7592. result when our worker turns things to varying. */
  7593. saved = *vr0;
  7594. intersect_ranges (&vr0->type, &vr0->min, &vr0->max,
  7595. vr1->type, vr1->min, vr1->max);
  7596. /* Make sure to canonicalize the result though as the inversion of a
  7597. VR_RANGE can still be a VR_RANGE. */
  7598. set_and_canonicalize_value_range (vr0, vr0->type,
  7599. vr0->min, vr0->max, vr0->equiv);
  7600. /* If that failed, use the saved original VR0. */
  7601. if (vr0->type == VR_VARYING)
  7602. {
  7603. *vr0 = saved;
  7604. return;
  7605. }
  7606. /* If the result is VR_UNDEFINED there is no need to mess with
  7607. the equivalencies. */
  7608. if (vr0->type == VR_UNDEFINED)
  7609. return;
  7610. /* The resulting set of equivalences for range intersection is the union of
  7611. the two sets. */
  7612. if (vr0->equiv && vr1->equiv && vr0->equiv != vr1->equiv)
  7613. bitmap_ior_into (vr0->equiv, vr1->equiv);
  7614. else if (vr1->equiv && !vr0->equiv)
  7615. bitmap_copy (vr0->equiv, vr1->equiv);
  7616. }
  7617. static void
  7618. vrp_intersect_ranges (value_range_t *vr0, value_range_t *vr1)
  7619. {
  7620. if (dump_file && (dump_flags & TDF_DETAILS))
  7621. {
  7622. fprintf (dump_file, "Intersecting\n ");
  7623. dump_value_range (dump_file, vr0);
  7624. fprintf (dump_file, "\nand\n ");
  7625. dump_value_range (dump_file, vr1);
  7626. fprintf (dump_file, "\n");
  7627. }
  7628. vrp_intersect_ranges_1 (vr0, vr1);
  7629. if (dump_file && (dump_flags & TDF_DETAILS))
  7630. {
  7631. fprintf (dump_file, "to\n ");
  7632. dump_value_range (dump_file, vr0);
  7633. fprintf (dump_file, "\n");
  7634. }
  7635. }
  7636. /* Meet operation for value ranges. Given two value ranges VR0 and
  7637. VR1, store in VR0 a range that contains both VR0 and VR1. This
  7638. may not be the smallest possible such range. */
  7639. static void
  7640. vrp_meet_1 (value_range_t *vr0, value_range_t *vr1)
  7641. {
  7642. value_range_t saved;
  7643. if (vr0->type == VR_UNDEFINED)
  7644. {
  7645. set_value_range (vr0, vr1->type, vr1->min, vr1->max, vr1->equiv);
  7646. return;
  7647. }
  7648. if (vr1->type == VR_UNDEFINED)
  7649. {
  7650. /* VR0 already has the resulting range. */
  7651. return;
  7652. }
  7653. if (vr0->type == VR_VARYING)
  7654. {
  7655. /* Nothing to do. VR0 already has the resulting range. */
  7656. return;
  7657. }
  7658. if (vr1->type == VR_VARYING)
  7659. {
  7660. set_value_range_to_varying (vr0);
  7661. return;
  7662. }
  7663. saved = *vr0;
  7664. union_ranges (&vr0->type, &vr0->min, &vr0->max,
  7665. vr1->type, vr1->min, vr1->max);
  7666. if (vr0->type == VR_VARYING)
  7667. {
  7668. /* Failed to find an efficient meet. Before giving up and setting
  7669. the result to VARYING, see if we can at least derive a useful
  7670. anti-range. FIXME, all this nonsense about distinguishing
  7671. anti-ranges from ranges is necessary because of the odd
  7672. semantics of range_includes_zero_p and friends. */
  7673. if (((saved.type == VR_RANGE
  7674. && range_includes_zero_p (saved.min, saved.max) == 0)
  7675. || (saved.type == VR_ANTI_RANGE
  7676. && range_includes_zero_p (saved.min, saved.max) == 1))
  7677. && ((vr1->type == VR_RANGE
  7678. && range_includes_zero_p (vr1->min, vr1->max) == 0)
  7679. || (vr1->type == VR_ANTI_RANGE
  7680. && range_includes_zero_p (vr1->min, vr1->max) == 1)))
  7681. {
  7682. set_value_range_to_nonnull (vr0, TREE_TYPE (saved.min));
  7683. /* Since this meet operation did not result from the meeting of
  7684. two equivalent names, VR0 cannot have any equivalences. */
  7685. if (vr0->equiv)
  7686. bitmap_clear (vr0->equiv);
  7687. return;
  7688. }
  7689. set_value_range_to_varying (vr0);
  7690. return;
  7691. }
  7692. set_and_canonicalize_value_range (vr0, vr0->type, vr0->min, vr0->max,
  7693. vr0->equiv);
  7694. if (vr0->type == VR_VARYING)
  7695. return;
  7696. /* The resulting set of equivalences is always the intersection of
  7697. the two sets. */
  7698. if (vr0->equiv && vr1->equiv && vr0->equiv != vr1->equiv)
  7699. bitmap_and_into (vr0->equiv, vr1->equiv);
  7700. else if (vr0->equiv && !vr1->equiv)
  7701. bitmap_clear (vr0->equiv);
  7702. }
  7703. static void
  7704. vrp_meet (value_range_t *vr0, value_range_t *vr1)
  7705. {
  7706. if (dump_file && (dump_flags & TDF_DETAILS))
  7707. {
  7708. fprintf (dump_file, "Meeting\n ");
  7709. dump_value_range (dump_file, vr0);
  7710. fprintf (dump_file, "\nand\n ");
  7711. dump_value_range (dump_file, vr1);
  7712. fprintf (dump_file, "\n");
  7713. }
  7714. vrp_meet_1 (vr0, vr1);
  7715. if (dump_file && (dump_flags & TDF_DETAILS))
  7716. {
  7717. fprintf (dump_file, "to\n ");
  7718. dump_value_range (dump_file, vr0);
  7719. fprintf (dump_file, "\n");
  7720. }
  7721. }
  7722. /* Visit all arguments for PHI node PHI that flow through executable
  7723. edges. If a valid value range can be derived from all the incoming
  7724. value ranges, set a new range for the LHS of PHI. */
  7725. static enum ssa_prop_result
  7726. vrp_visit_phi_node (gphi *phi)
  7727. {
  7728. size_t i;
  7729. tree lhs = PHI_RESULT (phi);
  7730. value_range_t *lhs_vr = get_value_range (lhs);
  7731. value_range_t vr_result = VR_INITIALIZER;
  7732. bool first = true;
  7733. int edges, old_edges;
  7734. struct loop *l;
  7735. if (dump_file && (dump_flags & TDF_DETAILS))
  7736. {
  7737. fprintf (dump_file, "\nVisiting PHI node: ");
  7738. print_gimple_stmt (dump_file, phi, 0, dump_flags);
  7739. }
  7740. edges = 0;
  7741. for (i = 0; i < gimple_phi_num_args (phi); i++)
  7742. {
  7743. edge e = gimple_phi_arg_edge (phi, i);
  7744. if (dump_file && (dump_flags & TDF_DETAILS))
  7745. {
  7746. fprintf (dump_file,
  7747. " Argument #%d (%d -> %d %sexecutable)\n",
  7748. (int) i, e->src->index, e->dest->index,
  7749. (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
  7750. }
  7751. if (e->flags & EDGE_EXECUTABLE)
  7752. {
  7753. tree arg = PHI_ARG_DEF (phi, i);
  7754. value_range_t vr_arg;
  7755. ++edges;
  7756. if (TREE_CODE (arg) == SSA_NAME)
  7757. {
  7758. vr_arg = *(get_value_range (arg));
  7759. /* Do not allow equivalences or symbolic ranges to leak in from
  7760. backedges. That creates invalid equivalencies.
  7761. See PR53465 and PR54767. */
  7762. if (e->flags & EDGE_DFS_BACK)
  7763. {
  7764. if (vr_arg.type == VR_RANGE
  7765. || vr_arg.type == VR_ANTI_RANGE)
  7766. {
  7767. vr_arg.equiv = NULL;
  7768. if (symbolic_range_p (&vr_arg))
  7769. {
  7770. vr_arg.type = VR_VARYING;
  7771. vr_arg.min = NULL_TREE;
  7772. vr_arg.max = NULL_TREE;
  7773. }
  7774. }
  7775. }
  7776. else
  7777. {
  7778. /* If the non-backedge arguments range is VR_VARYING then
  7779. we can still try recording a simple equivalence. */
  7780. if (vr_arg.type == VR_VARYING)
  7781. {
  7782. vr_arg.type = VR_RANGE;
  7783. vr_arg.min = arg;
  7784. vr_arg.max = arg;
  7785. vr_arg.equiv = NULL;
  7786. }
  7787. }
  7788. }
  7789. else
  7790. {
  7791. if (TREE_OVERFLOW_P (arg))
  7792. arg = drop_tree_overflow (arg);
  7793. vr_arg.type = VR_RANGE;
  7794. vr_arg.min = arg;
  7795. vr_arg.max = arg;
  7796. vr_arg.equiv = NULL;
  7797. }
  7798. if (dump_file && (dump_flags & TDF_DETAILS))
  7799. {
  7800. fprintf (dump_file, "\t");
  7801. print_generic_expr (dump_file, arg, dump_flags);
  7802. fprintf (dump_file, ": ");
  7803. dump_value_range (dump_file, &vr_arg);
  7804. fprintf (dump_file, "\n");
  7805. }
  7806. if (first)
  7807. copy_value_range (&vr_result, &vr_arg);
  7808. else
  7809. vrp_meet (&vr_result, &vr_arg);
  7810. first = false;
  7811. if (vr_result.type == VR_VARYING)
  7812. break;
  7813. }
  7814. }
  7815. if (vr_result.type == VR_VARYING)
  7816. goto varying;
  7817. else if (vr_result.type == VR_UNDEFINED)
  7818. goto update_range;
  7819. old_edges = vr_phi_edge_counts[SSA_NAME_VERSION (lhs)];
  7820. vr_phi_edge_counts[SSA_NAME_VERSION (lhs)] = edges;
  7821. /* To prevent infinite iterations in the algorithm, derive ranges
  7822. when the new value is slightly bigger or smaller than the
  7823. previous one. We don't do this if we have seen a new executable
  7824. edge; this helps us avoid an overflow infinity for conditionals
  7825. which are not in a loop. If the old value-range was VR_UNDEFINED
  7826. use the updated range and iterate one more time. */
  7827. if (edges > 0
  7828. && gimple_phi_num_args (phi) > 1
  7829. && edges == old_edges
  7830. && lhs_vr->type != VR_UNDEFINED)
  7831. {
  7832. /* Compare old and new ranges, fall back to varying if the
  7833. values are not comparable. */
  7834. int cmp_min = compare_values (lhs_vr->min, vr_result.min);
  7835. if (cmp_min == -2)
  7836. goto varying;
  7837. int cmp_max = compare_values (lhs_vr->max, vr_result.max);
  7838. if (cmp_max == -2)
  7839. goto varying;
  7840. /* For non VR_RANGE or for pointers fall back to varying if
  7841. the range changed. */
  7842. if ((lhs_vr->type != VR_RANGE || vr_result.type != VR_RANGE
  7843. || POINTER_TYPE_P (TREE_TYPE (lhs)))
  7844. && (cmp_min != 0 || cmp_max != 0))
  7845. goto varying;
  7846. /* If the new minimum is larger than than the previous one
  7847. retain the old value. If the new minimum value is smaller
  7848. than the previous one and not -INF go all the way to -INF + 1.
  7849. In the first case, to avoid infinite bouncing between different
  7850. minimums, and in the other case to avoid iterating millions of
  7851. times to reach -INF. Going to -INF + 1 also lets the following
  7852. iteration compute whether there will be any overflow, at the
  7853. expense of one additional iteration. */
  7854. if (cmp_min < 0)
  7855. vr_result.min = lhs_vr->min;
  7856. else if (cmp_min > 0
  7857. && !vrp_val_is_min (vr_result.min))
  7858. vr_result.min
  7859. = int_const_binop (PLUS_EXPR,
  7860. vrp_val_min (TREE_TYPE (vr_result.min)),
  7861. build_int_cst (TREE_TYPE (vr_result.min), 1));
  7862. /* Similarly for the maximum value. */
  7863. if (cmp_max > 0)
  7864. vr_result.max = lhs_vr->max;
  7865. else if (cmp_max < 0
  7866. && !vrp_val_is_max (vr_result.max))
  7867. vr_result.max
  7868. = int_const_binop (MINUS_EXPR,
  7869. vrp_val_max (TREE_TYPE (vr_result.min)),
  7870. build_int_cst (TREE_TYPE (vr_result.min), 1));
  7871. /* If we dropped either bound to +-INF then if this is a loop
  7872. PHI node SCEV may known more about its value-range. */
  7873. if ((cmp_min > 0 || cmp_min < 0
  7874. || cmp_max < 0 || cmp_max > 0)
  7875. && (l = loop_containing_stmt (phi))
  7876. && l->header == gimple_bb (phi))
  7877. adjust_range_with_scev (&vr_result, l, phi, lhs);
  7878. /* If we will end up with a (-INF, +INF) range, set it to
  7879. VARYING. Same if the previous max value was invalid for
  7880. the type and we end up with vr_result.min > vr_result.max. */
  7881. if ((vrp_val_is_max (vr_result.max)
  7882. && vrp_val_is_min (vr_result.min))
  7883. || compare_values (vr_result.min,
  7884. vr_result.max) > 0)
  7885. goto varying;
  7886. }
  7887. /* If the new range is different than the previous value, keep
  7888. iterating. */
  7889. update_range:
  7890. if (update_value_range (lhs, &vr_result))
  7891. {
  7892. if (dump_file && (dump_flags & TDF_DETAILS))
  7893. {
  7894. fprintf (dump_file, "Found new range for ");
  7895. print_generic_expr (dump_file, lhs, 0);
  7896. fprintf (dump_file, ": ");
  7897. dump_value_range (dump_file, &vr_result);
  7898. fprintf (dump_file, "\n");
  7899. }
  7900. if (vr_result.type == VR_VARYING)
  7901. return SSA_PROP_VARYING;
  7902. return SSA_PROP_INTERESTING;
  7903. }
  7904. /* Nothing changed, don't add outgoing edges. */
  7905. return SSA_PROP_NOT_INTERESTING;
  7906. /* No match found. Set the LHS to VARYING. */
  7907. varying:
  7908. set_value_range_to_varying (lhs_vr);
  7909. return SSA_PROP_VARYING;
  7910. }
  7911. /* Simplify boolean operations if the source is known
  7912. to be already a boolean. */
  7913. static bool
  7914. simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
  7915. {
  7916. enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
  7917. tree lhs, op0, op1;
  7918. bool need_conversion;
  7919. /* We handle only !=/== case here. */
  7920. gcc_assert (rhs_code == EQ_EXPR || rhs_code == NE_EXPR);
  7921. op0 = gimple_assign_rhs1 (stmt);
  7922. if (!op_with_boolean_value_range_p (op0))
  7923. return false;
  7924. op1 = gimple_assign_rhs2 (stmt);
  7925. if (!op_with_boolean_value_range_p (op1))
  7926. return false;
  7927. /* Reduce number of cases to handle to NE_EXPR. As there is no
  7928. BIT_XNOR_EXPR we cannot replace A == B with a single statement. */
  7929. if (rhs_code == EQ_EXPR)
  7930. {
  7931. if (TREE_CODE (op1) == INTEGER_CST)
  7932. op1 = int_const_binop (BIT_XOR_EXPR, op1,
  7933. build_int_cst (TREE_TYPE (op1), 1));
  7934. else
  7935. return false;
  7936. }
  7937. lhs = gimple_assign_lhs (stmt);
  7938. need_conversion
  7939. = !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0));
  7940. /* Make sure to not sign-extend a 1-bit 1 when converting the result. */
  7941. if (need_conversion
  7942. && !TYPE_UNSIGNED (TREE_TYPE (op0))
  7943. && TYPE_PRECISION (TREE_TYPE (op0)) == 1
  7944. && TYPE_PRECISION (TREE_TYPE (lhs)) > 1)
  7945. return false;
  7946. /* For A != 0 we can substitute A itself. */
  7947. if (integer_zerop (op1))
  7948. gimple_assign_set_rhs_with_ops (gsi,
  7949. need_conversion
  7950. ? NOP_EXPR : TREE_CODE (op0), op0);
  7951. /* For A != B we substitute A ^ B. Either with conversion. */
  7952. else if (need_conversion)
  7953. {
  7954. tree tem = make_ssa_name (TREE_TYPE (op0));
  7955. gassign *newop
  7956. = gimple_build_assign (tem, BIT_XOR_EXPR, op0, op1);
  7957. gsi_insert_before (gsi, newop, GSI_SAME_STMT);
  7958. gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, tem);
  7959. }
  7960. /* Or without. */
  7961. else
  7962. gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op0, op1);
  7963. update_stmt (gsi_stmt (*gsi));
  7964. return true;
  7965. }
  7966. /* Simplify a division or modulo operator to a right shift or
  7967. bitwise and if the first operand is unsigned or is greater
  7968. than zero and the second operand is an exact power of two.
  7969. For TRUNC_MOD_EXPR op0 % op1 with constant op1, optimize it
  7970. into just op0 if op0's range is known to be a subset of
  7971. [-op1 + 1, op1 - 1] for signed and [0, op1 - 1] for unsigned
  7972. modulo. */
  7973. static bool
  7974. simplify_div_or_mod_using_ranges (gimple stmt)
  7975. {
  7976. enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
  7977. tree val = NULL;
  7978. tree op0 = gimple_assign_rhs1 (stmt);
  7979. tree op1 = gimple_assign_rhs2 (stmt);
  7980. value_range_t *vr = get_value_range (op0);
  7981. if (rhs_code == TRUNC_MOD_EXPR
  7982. && TREE_CODE (op1) == INTEGER_CST
  7983. && tree_int_cst_sgn (op1) == 1
  7984. && range_int_cst_p (vr)
  7985. && tree_int_cst_lt (vr->max, op1))
  7986. {
  7987. if (TYPE_UNSIGNED (TREE_TYPE (op0))
  7988. || tree_int_cst_sgn (vr->min) >= 0
  7989. || tree_int_cst_lt (fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1),
  7990. vr->min))
  7991. {
  7992. /* If op0 already has the range op0 % op1 has,
  7993. then TRUNC_MOD_EXPR won't change anything. */
  7994. gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
  7995. gimple_assign_set_rhs_from_tree (&gsi, op0);
  7996. update_stmt (stmt);
  7997. return true;
  7998. }
  7999. }
  8000. if (!integer_pow2p (op1))
  8001. return false;
  8002. if (TYPE_UNSIGNED (TREE_TYPE (op0)))
  8003. {
  8004. val = integer_one_node;
  8005. }
  8006. else
  8007. {
  8008. bool sop = false;
  8009. val = compare_range_with_value (GE_EXPR, vr, integer_zero_node, &sop);
  8010. if (val
  8011. && sop
  8012. && integer_onep (val)
  8013. && issue_strict_overflow_warning (WARN_STRICT_OVERFLOW_MISC))
  8014. {
  8015. location_t location;
  8016. if (!gimple_has_location (stmt))
  8017. location = input_location;
  8018. else
  8019. location = gimple_location (stmt);
  8020. warning_at (location, OPT_Wstrict_overflow,
  8021. "assuming signed overflow does not occur when "
  8022. "simplifying %</%> or %<%%%> to %<>>%> or %<&%>");
  8023. }
  8024. }
  8025. if (val && integer_onep (val))
  8026. {
  8027. tree t;
  8028. if (rhs_code == TRUNC_DIV_EXPR)
  8029. {
  8030. t = build_int_cst (integer_type_node, tree_log2 (op1));
  8031. gimple_assign_set_rhs_code (stmt, RSHIFT_EXPR);
  8032. gimple_assign_set_rhs1 (stmt, op0);
  8033. gimple_assign_set_rhs2 (stmt, t);
  8034. }
  8035. else
  8036. {
  8037. t = build_int_cst (TREE_TYPE (op1), 1);
  8038. t = int_const_binop (MINUS_EXPR, op1, t);
  8039. t = fold_convert (TREE_TYPE (op0), t);
  8040. gimple_assign_set_rhs_code (stmt, BIT_AND_EXPR);
  8041. gimple_assign_set_rhs1 (stmt, op0);
  8042. gimple_assign_set_rhs2 (stmt, t);
  8043. }
  8044. update_stmt (stmt);
  8045. return true;
  8046. }
  8047. return false;
  8048. }
  8049. /* If the operand to an ABS_EXPR is >= 0, then eliminate the
  8050. ABS_EXPR. If the operand is <= 0, then simplify the
  8051. ABS_EXPR into a NEGATE_EXPR. */
  8052. static bool
  8053. simplify_abs_using_ranges (gimple stmt)
  8054. {
  8055. tree val = NULL;
  8056. tree op = gimple_assign_rhs1 (stmt);
  8057. tree type = TREE_TYPE (op);
  8058. value_range_t *vr = get_value_range (op);
  8059. if (TYPE_UNSIGNED (type))
  8060. {
  8061. val = integer_zero_node;
  8062. }
  8063. else if (vr)
  8064. {
  8065. bool sop = false;
  8066. val = compare_range_with_value (LE_EXPR, vr, integer_zero_node, &sop);
  8067. if (!val)
  8068. {
  8069. sop = false;
  8070. val = compare_range_with_value (GE_EXPR, vr, integer_zero_node,
  8071. &sop);
  8072. if (val)
  8073. {
  8074. if (integer_zerop (val))
  8075. val = integer_one_node;
  8076. else if (integer_onep (val))
  8077. val = integer_zero_node;
  8078. }
  8079. }
  8080. if (val
  8081. && (integer_onep (val) || integer_zerop (val)))
  8082. {
  8083. if (sop && issue_strict_overflow_warning (WARN_STRICT_OVERFLOW_MISC))
  8084. {
  8085. location_t location;
  8086. if (!gimple_has_location (stmt))
  8087. location = input_location;
  8088. else
  8089. location = gimple_location (stmt);
  8090. warning_at (location, OPT_Wstrict_overflow,
  8091. "assuming signed overflow does not occur when "
  8092. "simplifying %<abs (X)%> to %<X%> or %<-X%>");
  8093. }
  8094. gimple_assign_set_rhs1 (stmt, op);
  8095. if (integer_onep (val))
  8096. gimple_assign_set_rhs_code (stmt, NEGATE_EXPR);
  8097. else
  8098. gimple_assign_set_rhs_code (stmt, SSA_NAME);
  8099. update_stmt (stmt);
  8100. return true;
  8101. }
  8102. }
  8103. return false;
  8104. }
  8105. /* Optimize away redundant BIT_AND_EXPR and BIT_IOR_EXPR.
  8106. If all the bits that are being cleared by & are already
  8107. known to be zero from VR, or all the bits that are being
  8108. set by | are already known to be one from VR, the bit
  8109. operation is redundant. */
  8110. static bool
  8111. simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
  8112. {
  8113. tree op0 = gimple_assign_rhs1 (stmt);
  8114. tree op1 = gimple_assign_rhs2 (stmt);
  8115. tree op = NULL_TREE;
  8116. value_range_t vr0 = VR_INITIALIZER;
  8117. value_range_t vr1 = VR_INITIALIZER;
  8118. wide_int may_be_nonzero0, may_be_nonzero1;
  8119. wide_int must_be_nonzero0, must_be_nonzero1;
  8120. wide_int mask;
  8121. if (TREE_CODE (op0) == SSA_NAME)
  8122. vr0 = *(get_value_range (op0));
  8123. else if (is_gimple_min_invariant (op0))
  8124. set_value_range_to_value (&vr0, op0, NULL);
  8125. else
  8126. return false;
  8127. if (TREE_CODE (op1) == SSA_NAME)
  8128. vr1 = *(get_value_range (op1));
  8129. else if (is_gimple_min_invariant (op1))
  8130. set_value_range_to_value (&vr1, op1, NULL);
  8131. else
  8132. return false;
  8133. if (!zero_nonzero_bits_from_vr (TREE_TYPE (op0), &vr0, &may_be_nonzero0,
  8134. &must_be_nonzero0))
  8135. return false;
  8136. if (!zero_nonzero_bits_from_vr (TREE_TYPE (op1), &vr1, &may_be_nonzero1,
  8137. &must_be_nonzero1))
  8138. return false;
  8139. switch (gimple_assign_rhs_code (stmt))
  8140. {
  8141. case BIT_AND_EXPR:
  8142. mask = may_be_nonzero0.and_not (must_be_nonzero1);
  8143. if (mask == 0)
  8144. {
  8145. op = op0;
  8146. break;
  8147. }
  8148. mask = may_be_nonzero1.and_not (must_be_nonzero0);
  8149. if (mask == 0)
  8150. {
  8151. op = op1;
  8152. break;
  8153. }
  8154. break;
  8155. case BIT_IOR_EXPR:
  8156. mask = may_be_nonzero0.and_not (must_be_nonzero1);
  8157. if (mask == 0)
  8158. {
  8159. op = op1;
  8160. break;
  8161. }
  8162. mask = may_be_nonzero1.and_not (must_be_nonzero0);
  8163. if (mask == 0)
  8164. {
  8165. op = op0;
  8166. break;
  8167. }
  8168. break;
  8169. default:
  8170. gcc_unreachable ();
  8171. }
  8172. if (op == NULL_TREE)
  8173. return false;
  8174. gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op), op);
  8175. update_stmt (gsi_stmt (*gsi));
  8176. return true;
  8177. }
  8178. /* We are comparing trees OP0 and OP1 using COND_CODE. OP0 has
  8179. a known value range VR.
  8180. If there is one and only one value which will satisfy the
  8181. conditional, then return that value. Else return NULL.
  8182. If signed overflow must be undefined for the value to satisfy
  8183. the conditional, then set *STRICT_OVERFLOW_P to true. */
  8184. static tree
  8185. test_for_singularity (enum tree_code cond_code, tree op0,
  8186. tree op1, value_range_t *vr,
  8187. bool *strict_overflow_p)
  8188. {
  8189. tree min = NULL;
  8190. tree max = NULL;
  8191. /* Extract minimum/maximum values which satisfy the
  8192. the conditional as it was written. */
  8193. if (cond_code == LE_EXPR || cond_code == LT_EXPR)
  8194. {
  8195. /* This should not be negative infinity; there is no overflow
  8196. here. */
  8197. min = TYPE_MIN_VALUE (TREE_TYPE (op0));
  8198. max = op1;
  8199. if (cond_code == LT_EXPR && !is_overflow_infinity (max))
  8200. {
  8201. tree one = build_int_cst (TREE_TYPE (op0), 1);
  8202. max = fold_build2 (MINUS_EXPR, TREE_TYPE (op0), max, one);
  8203. if (EXPR_P (max))
  8204. TREE_NO_WARNING (max) = 1;
  8205. }
  8206. }
  8207. else if (cond_code == GE_EXPR || cond_code == GT_EXPR)
  8208. {
  8209. /* This should not be positive infinity; there is no overflow
  8210. here. */
  8211. max = TYPE_MAX_VALUE (TREE_TYPE (op0));
  8212. min = op1;
  8213. if (cond_code == GT_EXPR && !is_overflow_infinity (min))
  8214. {
  8215. tree one = build_int_cst (TREE_TYPE (op0), 1);
  8216. min = fold_build2 (PLUS_EXPR, TREE_TYPE (op0), min, one);
  8217. if (EXPR_P (min))
  8218. TREE_NO_WARNING (min) = 1;
  8219. }
  8220. }
  8221. /* Now refine the minimum and maximum values using any
  8222. value range information we have for op0. */
  8223. if (min && max)
  8224. {
  8225. if (compare_values (vr->min, min) == 1)
  8226. min = vr->min;
  8227. if (compare_values (vr->max, max) == -1)
  8228. max = vr->max;
  8229. /* If the new min/max values have converged to a single value,
  8230. then there is only one value which can satisfy the condition,
  8231. return that value. */
  8232. if (operand_equal_p (min, max, 0) && is_gimple_min_invariant (min))
  8233. {
  8234. if ((cond_code == LE_EXPR || cond_code == LT_EXPR)
  8235. && is_overflow_infinity (vr->max))
  8236. *strict_overflow_p = true;
  8237. if ((cond_code == GE_EXPR || cond_code == GT_EXPR)
  8238. && is_overflow_infinity (vr->min))
  8239. *strict_overflow_p = true;
  8240. return min;
  8241. }
  8242. }
  8243. return NULL;
  8244. }
  8245. /* Return whether the value range *VR fits in an integer type specified
  8246. by PRECISION and UNSIGNED_P. */
  8247. static bool
  8248. range_fits_type_p (value_range_t *vr, unsigned dest_precision, signop dest_sgn)
  8249. {
  8250. tree src_type;
  8251. unsigned src_precision;
  8252. widest_int tem;
  8253. signop src_sgn;
  8254. /* We can only handle integral and pointer types. */
  8255. src_type = TREE_TYPE (vr->min);
  8256. if (!INTEGRAL_TYPE_P (src_type)
  8257. && !POINTER_TYPE_P (src_type))
  8258. return false;
  8259. /* An extension is fine unless VR is SIGNED and dest_sgn is UNSIGNED,
  8260. and so is an identity transform. */
  8261. src_precision = TYPE_PRECISION (TREE_TYPE (vr->min));
  8262. src_sgn = TYPE_SIGN (src_type);
  8263. if ((src_precision < dest_precision
  8264. && !(dest_sgn == UNSIGNED && src_sgn == SIGNED))
  8265. || (src_precision == dest_precision && src_sgn == dest_sgn))
  8266. return true;
  8267. /* Now we can only handle ranges with constant bounds. */
  8268. if (vr->type != VR_RANGE
  8269. || TREE_CODE (vr->min) != INTEGER_CST
  8270. || TREE_CODE (vr->max) != INTEGER_CST)
  8271. return false;
  8272. /* For sign changes, the MSB of the wide_int has to be clear.
  8273. An unsigned value with its MSB set cannot be represented by
  8274. a signed wide_int, while a negative value cannot be represented
  8275. by an unsigned wide_int. */
  8276. if (src_sgn != dest_sgn
  8277. && (wi::lts_p (vr->min, 0) || wi::lts_p (vr->max, 0)))
  8278. return false;
  8279. /* Then we can perform the conversion on both ends and compare
  8280. the result for equality. */
  8281. tem = wi::ext (wi::to_widest (vr->min), dest_precision, dest_sgn);
  8282. if (tem != wi::to_widest (vr->min))
  8283. return false;
  8284. tem = wi::ext (wi::to_widest (vr->max), dest_precision, dest_sgn);
  8285. if (tem != wi::to_widest (vr->max))
  8286. return false;
  8287. return true;
  8288. }
  8289. /* Simplify a conditional using a relational operator to an equality
  8290. test if the range information indicates only one value can satisfy
  8291. the original conditional. */
  8292. static bool
  8293. simplify_cond_using_ranges (gcond *stmt)
  8294. {
  8295. tree op0 = gimple_cond_lhs (stmt);
  8296. tree op1 = gimple_cond_rhs (stmt);
  8297. enum tree_code cond_code = gimple_cond_code (stmt);
  8298. if (cond_code != NE_EXPR
  8299. && cond_code != EQ_EXPR
  8300. && TREE_CODE (op0) == SSA_NAME
  8301. && INTEGRAL_TYPE_P (TREE_TYPE (op0))
  8302. && is_gimple_min_invariant (op1))
  8303. {
  8304. value_range_t *vr = get_value_range (op0);
  8305. /* If we have range information for OP0, then we might be
  8306. able to simplify this conditional. */
  8307. if (vr->type == VR_RANGE)
  8308. {
  8309. enum warn_strict_overflow_code wc = WARN_STRICT_OVERFLOW_COMPARISON;
  8310. bool sop = false;
  8311. tree new_tree = test_for_singularity (cond_code, op0, op1, vr, &sop);
  8312. if (new_tree
  8313. && (!sop || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))))
  8314. {
  8315. if (dump_file)
  8316. {
  8317. fprintf (dump_file, "Simplified relational ");
  8318. print_gimple_stmt (dump_file, stmt, 0, 0);
  8319. fprintf (dump_file, " into ");
  8320. }
  8321. gimple_cond_set_code (stmt, EQ_EXPR);
  8322. gimple_cond_set_lhs (stmt, op0);
  8323. gimple_cond_set_rhs (stmt, new_tree);
  8324. update_stmt (stmt);
  8325. if (dump_file)
  8326. {
  8327. print_gimple_stmt (dump_file, stmt, 0, 0);
  8328. fprintf (dump_file, "\n");
  8329. }
  8330. if (sop && issue_strict_overflow_warning (wc))
  8331. {
  8332. location_t location = input_location;
  8333. if (gimple_has_location (stmt))
  8334. location = gimple_location (stmt);
  8335. warning_at (location, OPT_Wstrict_overflow,
  8336. "assuming signed overflow does not occur when "
  8337. "simplifying conditional");
  8338. }
  8339. return true;
  8340. }
  8341. /* Try again after inverting the condition. We only deal
  8342. with integral types here, so no need to worry about
  8343. issues with inverting FP comparisons. */
  8344. sop = false;
  8345. new_tree = test_for_singularity
  8346. (invert_tree_comparison (cond_code, false),
  8347. op0, op1, vr, &sop);
  8348. if (new_tree
  8349. && (!sop || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))))
  8350. {
  8351. if (dump_file)
  8352. {
  8353. fprintf (dump_file, "Simplified relational ");
  8354. print_gimple_stmt (dump_file, stmt, 0, 0);
  8355. fprintf (dump_file, " into ");
  8356. }
  8357. gimple_cond_set_code (stmt, NE_EXPR);
  8358. gimple_cond_set_lhs (stmt, op0);
  8359. gimple_cond_set_rhs (stmt, new_tree);
  8360. update_stmt (stmt);
  8361. if (dump_file)
  8362. {
  8363. print_gimple_stmt (dump_file, stmt, 0, 0);
  8364. fprintf (dump_file, "\n");
  8365. }
  8366. if (sop && issue_strict_overflow_warning (wc))
  8367. {
  8368. location_t location = input_location;
  8369. if (gimple_has_location (stmt))
  8370. location = gimple_location (stmt);
  8371. warning_at (location, OPT_Wstrict_overflow,
  8372. "assuming signed overflow does not occur when "
  8373. "simplifying conditional");
  8374. }
  8375. return true;
  8376. }
  8377. }
  8378. }
  8379. /* If we have a comparison of an SSA_NAME (OP0) against a constant,
  8380. see if OP0 was set by a type conversion where the source of
  8381. the conversion is another SSA_NAME with a range that fits
  8382. into the range of OP0's type.
  8383. If so, the conversion is redundant as the earlier SSA_NAME can be
  8384. used for the comparison directly if we just massage the constant in the
  8385. comparison. */
  8386. if (TREE_CODE (op0) == SSA_NAME
  8387. && TREE_CODE (op1) == INTEGER_CST)
  8388. {
  8389. gimple def_stmt = SSA_NAME_DEF_STMT (op0);
  8390. tree innerop;
  8391. if (!is_gimple_assign (def_stmt)
  8392. || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
  8393. return false;
  8394. innerop = gimple_assign_rhs1 (def_stmt);
  8395. if (TREE_CODE (innerop) == SSA_NAME
  8396. && !POINTER_TYPE_P (TREE_TYPE (innerop)))
  8397. {
  8398. value_range_t *vr = get_value_range (innerop);
  8399. if (range_int_cst_p (vr)
  8400. && range_fits_type_p (vr,
  8401. TYPE_PRECISION (TREE_TYPE (op0)),
  8402. TYPE_SIGN (TREE_TYPE (op0)))
  8403. && int_fits_type_p (op1, TREE_TYPE (innerop))
  8404. /* The range must not have overflowed, or if it did overflow
  8405. we must not be wrapping/trapping overflow and optimizing
  8406. with strict overflow semantics. */
  8407. && ((!is_negative_overflow_infinity (vr->min)
  8408. && !is_positive_overflow_infinity (vr->max))
  8409. || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (innerop))))
  8410. {
  8411. /* If the range overflowed and the user has asked for warnings
  8412. when strict overflow semantics were used to optimize code,
  8413. issue an appropriate warning. */
  8414. if (cond_code != EQ_EXPR && cond_code != NE_EXPR
  8415. && (is_negative_overflow_infinity (vr->min)
  8416. || is_positive_overflow_infinity (vr->max))
  8417. && issue_strict_overflow_warning (WARN_STRICT_OVERFLOW_CONDITIONAL))
  8418. {
  8419. location_t location;
  8420. if (!gimple_has_location (stmt))
  8421. location = input_location;
  8422. else
  8423. location = gimple_location (stmt);
  8424. warning_at (location, OPT_Wstrict_overflow,
  8425. "assuming signed overflow does not occur when "
  8426. "simplifying conditional");
  8427. }
  8428. tree newconst = fold_convert (TREE_TYPE (innerop), op1);
  8429. gimple_cond_set_lhs (stmt, innerop);
  8430. gimple_cond_set_rhs (stmt, newconst);
  8431. return true;
  8432. }
  8433. }
  8434. }
  8435. return false;
  8436. }
  8437. /* Simplify a switch statement using the value range of the switch
  8438. argument. */
  8439. static bool
  8440. simplify_switch_using_ranges (gswitch *stmt)
  8441. {
  8442. tree op = gimple_switch_index (stmt);
  8443. value_range_t *vr;
  8444. bool take_default;
  8445. edge e;
  8446. edge_iterator ei;
  8447. size_t i = 0, j = 0, n, n2;
  8448. tree vec2;
  8449. switch_update su;
  8450. size_t k = 1, l = 0;
  8451. if (TREE_CODE (op) == SSA_NAME)
  8452. {
  8453. vr = get_value_range (op);
  8454. /* We can only handle integer ranges. */
  8455. if ((vr->type != VR_RANGE
  8456. && vr->type != VR_ANTI_RANGE)
  8457. || symbolic_range_p (vr))
  8458. return false;
  8459. /* Find case label for min/max of the value range. */
  8460. take_default = !find_case_label_ranges (stmt, vr, &i, &j, &k, &l);
  8461. }
  8462. else if (TREE_CODE (op) == INTEGER_CST)
  8463. {
  8464. take_default = !find_case_label_index (stmt, 1, op, &i);
  8465. if (take_default)
  8466. {
  8467. i = 1;
  8468. j = 0;
  8469. }
  8470. else
  8471. {
  8472. j = i;
  8473. }
  8474. }
  8475. else
  8476. return false;
  8477. n = gimple_switch_num_labels (stmt);
  8478. /* Bail out if this is just all edges taken. */
  8479. if (i == 1
  8480. && j == n - 1
  8481. && take_default)
  8482. return false;
  8483. /* Build a new vector of taken case labels. */
  8484. vec2 = make_tree_vec (j - i + 1 + l - k + 1 + (int)take_default);
  8485. n2 = 0;
  8486. /* Add the default edge, if necessary. */
  8487. if (take_default)
  8488. TREE_VEC_ELT (vec2, n2++) = gimple_switch_default_label (stmt);
  8489. for (; i <= j; ++i, ++n2)
  8490. TREE_VEC_ELT (vec2, n2) = gimple_switch_label (stmt, i);
  8491. for (; k <= l; ++k, ++n2)
  8492. TREE_VEC_ELT (vec2, n2) = gimple_switch_label (stmt, k);
  8493. /* Mark needed edges. */
  8494. for (i = 0; i < n2; ++i)
  8495. {
  8496. e = find_edge (gimple_bb (stmt),
  8497. label_to_block (CASE_LABEL (TREE_VEC_ELT (vec2, i))));
  8498. e->aux = (void *)-1;
  8499. }
  8500. /* Queue not needed edges for later removal. */
  8501. FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
  8502. {
  8503. if (e->aux == (void *)-1)
  8504. {
  8505. e->aux = NULL;
  8506. continue;
  8507. }
  8508. if (dump_file && (dump_flags & TDF_DETAILS))
  8509. {
  8510. fprintf (dump_file, "removing unreachable case label\n");
  8511. }
  8512. to_remove_edges.safe_push (e);
  8513. e->flags &= ~EDGE_EXECUTABLE;
  8514. }
  8515. /* And queue an update for the stmt. */
  8516. su.stmt = stmt;
  8517. su.vec = vec2;
  8518. to_update_switch_stmts.safe_push (su);
  8519. return false;
  8520. }
  8521. /* Simplify an integral conversion from an SSA name in STMT. */
  8522. static bool
  8523. simplify_conversion_using_ranges (gimple stmt)
  8524. {
  8525. tree innerop, middleop, finaltype;
  8526. gimple def_stmt;
  8527. value_range_t *innervr;
  8528. signop inner_sgn, middle_sgn, final_sgn;
  8529. unsigned inner_prec, middle_prec, final_prec;
  8530. widest_int innermin, innermed, innermax, middlemin, middlemed, middlemax;
  8531. finaltype = TREE_TYPE (gimple_assign_lhs (stmt));
  8532. if (!INTEGRAL_TYPE_P (finaltype))
  8533. return false;
  8534. middleop = gimple_assign_rhs1 (stmt);
  8535. def_stmt = SSA_NAME_DEF_STMT (middleop);
  8536. if (!is_gimple_assign (def_stmt)
  8537. || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
  8538. return false;
  8539. innerop = gimple_assign_rhs1 (def_stmt);
  8540. if (TREE_CODE (innerop) != SSA_NAME
  8541. || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (innerop))
  8542. return false;
  8543. /* Get the value-range of the inner operand. */
  8544. innervr = get_value_range (innerop);
  8545. if (innervr->type != VR_RANGE
  8546. || TREE_CODE (innervr->min) != INTEGER_CST
  8547. || TREE_CODE (innervr->max) != INTEGER_CST)
  8548. return false;
  8549. /* Simulate the conversion chain to check if the result is equal if
  8550. the middle conversion is removed. */
  8551. innermin = wi::to_widest (innervr->min);
  8552. innermax = wi::to_widest (innervr->max);
  8553. inner_prec = TYPE_PRECISION (TREE_TYPE (innerop));
  8554. middle_prec = TYPE_PRECISION (TREE_TYPE (middleop));
  8555. final_prec = TYPE_PRECISION (finaltype);
  8556. /* If the first conversion is not injective, the second must not
  8557. be widening. */
  8558. if (wi::gtu_p (innermax - innermin,
  8559. wi::mask <widest_int> (middle_prec, false))
  8560. && middle_prec < final_prec)
  8561. return false;
  8562. /* We also want a medium value so that we can track the effect that
  8563. narrowing conversions with sign change have. */
  8564. inner_sgn = TYPE_SIGN (TREE_TYPE (innerop));
  8565. if (inner_sgn == UNSIGNED)
  8566. innermed = wi::shifted_mask <widest_int> (1, inner_prec - 1, false);
  8567. else
  8568. innermed = 0;
  8569. if (wi::cmp (innermin, innermed, inner_sgn) >= 0
  8570. || wi::cmp (innermed, innermax, inner_sgn) >= 0)
  8571. innermed = innermin;
  8572. middle_sgn = TYPE_SIGN (TREE_TYPE (middleop));
  8573. middlemin = wi::ext (innermin, middle_prec, middle_sgn);
  8574. middlemed = wi::ext (innermed, middle_prec, middle_sgn);
  8575. middlemax = wi::ext (innermax, middle_prec, middle_sgn);
  8576. /* Require that the final conversion applied to both the original
  8577. and the intermediate range produces the same result. */
  8578. final_sgn = TYPE_SIGN (finaltype);
  8579. if (wi::ext (middlemin, final_prec, final_sgn)
  8580. != wi::ext (innermin, final_prec, final_sgn)
  8581. || wi::ext (middlemed, final_prec, final_sgn)
  8582. != wi::ext (innermed, final_prec, final_sgn)
  8583. || wi::ext (middlemax, final_prec, final_sgn)
  8584. != wi::ext (innermax, final_prec, final_sgn))
  8585. return false;
  8586. gimple_assign_set_rhs1 (stmt, innerop);
  8587. update_stmt (stmt);
  8588. return true;
  8589. }
  8590. /* Simplify a conversion from integral SSA name to float in STMT. */
  8591. static bool
  8592. simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
  8593. {
  8594. tree rhs1 = gimple_assign_rhs1 (stmt);
  8595. value_range_t *vr = get_value_range (rhs1);
  8596. machine_mode fltmode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)));
  8597. machine_mode mode;
  8598. tree tem;
  8599. gassign *conv;
  8600. /* We can only handle constant ranges. */
  8601. if (vr->type != VR_RANGE
  8602. || TREE_CODE (vr->min) != INTEGER_CST
  8603. || TREE_CODE (vr->max) != INTEGER_CST)
  8604. return false;
  8605. /* First check if we can use a signed type in place of an unsigned. */
  8606. if (TYPE_UNSIGNED (TREE_TYPE (rhs1))
  8607. && (can_float_p (fltmode, TYPE_MODE (TREE_TYPE (rhs1)), 0)
  8608. != CODE_FOR_nothing)
  8609. && range_fits_type_p (vr, TYPE_PRECISION (TREE_TYPE (rhs1)), SIGNED))
  8610. mode = TYPE_MODE (TREE_TYPE (rhs1));
  8611. /* If we can do the conversion in the current input mode do nothing. */
  8612. else if (can_float_p (fltmode, TYPE_MODE (TREE_TYPE (rhs1)),
  8613. TYPE_UNSIGNED (TREE_TYPE (rhs1))) != CODE_FOR_nothing)
  8614. return false;
  8615. /* Otherwise search for a mode we can use, starting from the narrowest
  8616. integer mode available. */
  8617. else
  8618. {
  8619. mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
  8620. do
  8621. {
  8622. /* If we cannot do a signed conversion to float from mode
  8623. or if the value-range does not fit in the signed type
  8624. try with a wider mode. */
  8625. if (can_float_p (fltmode, mode, 0) != CODE_FOR_nothing
  8626. && range_fits_type_p (vr, GET_MODE_PRECISION (mode), SIGNED))
  8627. break;
  8628. mode = GET_MODE_WIDER_MODE (mode);
  8629. /* But do not widen the input. Instead leave that to the
  8630. optabs expansion code. */
  8631. if (GET_MODE_PRECISION (mode) > TYPE_PRECISION (TREE_TYPE (rhs1)))
  8632. return false;
  8633. }
  8634. while (mode != VOIDmode);
  8635. if (mode == VOIDmode)
  8636. return false;
  8637. }
  8638. /* It works, insert a truncation or sign-change before the
  8639. float conversion. */
  8640. tem = make_ssa_name (build_nonstandard_integer_type
  8641. (GET_MODE_PRECISION (mode), 0));
  8642. conv = gimple_build_assign (tem, NOP_EXPR, rhs1);
  8643. gsi_insert_before (gsi, conv, GSI_SAME_STMT);
  8644. gimple_assign_set_rhs1 (stmt, tem);
  8645. update_stmt (stmt);
  8646. return true;
  8647. }
  8648. /* Simplify an internal fn call using ranges if possible. */
  8649. static bool
  8650. simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
  8651. {
  8652. enum tree_code subcode;
  8653. bool is_ubsan = false;
  8654. bool ovf = false;
  8655. switch (gimple_call_internal_fn (stmt))
  8656. {
  8657. case IFN_UBSAN_CHECK_ADD:
  8658. subcode = PLUS_EXPR;
  8659. is_ubsan = true;
  8660. break;
  8661. case IFN_UBSAN_CHECK_SUB:
  8662. subcode = MINUS_EXPR;
  8663. is_ubsan = true;
  8664. break;
  8665. case IFN_UBSAN_CHECK_MUL:
  8666. subcode = MULT_EXPR;
  8667. is_ubsan = true;
  8668. break;
  8669. case IFN_ADD_OVERFLOW:
  8670. subcode = PLUS_EXPR;
  8671. break;
  8672. case IFN_SUB_OVERFLOW:
  8673. subcode = MINUS_EXPR;
  8674. break;
  8675. case IFN_MUL_OVERFLOW:
  8676. subcode = MULT_EXPR;
  8677. break;
  8678. default:
  8679. return false;
  8680. }
  8681. tree op0 = gimple_call_arg (stmt, 0);
  8682. tree op1 = gimple_call_arg (stmt, 1);
  8683. tree type;
  8684. if (is_ubsan)
  8685. type = TREE_TYPE (op0);
  8686. else if (gimple_call_lhs (stmt) == NULL_TREE)
  8687. return false;
  8688. else
  8689. type = TREE_TYPE (TREE_TYPE (gimple_call_lhs (stmt)));
  8690. if (!check_for_binary_op_overflow (subcode, type, op0, op1, &ovf)
  8691. || (is_ubsan && ovf))
  8692. return false;
  8693. gimple g;
  8694. location_t loc = gimple_location (stmt);
  8695. if (is_ubsan)
  8696. g = gimple_build_assign (gimple_call_lhs (stmt), subcode, op0, op1);
  8697. else
  8698. {
  8699. int prec = TYPE_PRECISION (type);
  8700. tree utype = type;
  8701. if (ovf
  8702. || !useless_type_conversion_p (type, TREE_TYPE (op0))
  8703. || !useless_type_conversion_p (type, TREE_TYPE (op1)))
  8704. utype = build_nonstandard_integer_type (prec, 1);
  8705. if (TREE_CODE (op0) == INTEGER_CST)
  8706. op0 = fold_convert (utype, op0);
  8707. else if (!useless_type_conversion_p (utype, TREE_TYPE (op0)))
  8708. {
  8709. g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, op0);
  8710. gimple_set_location (g, loc);
  8711. gsi_insert_before (gsi, g, GSI_SAME_STMT);
  8712. op0 = gimple_assign_lhs (g);
  8713. }
  8714. if (TREE_CODE (op1) == INTEGER_CST)
  8715. op1 = fold_convert (utype, op1);
  8716. else if (!useless_type_conversion_p (utype, TREE_TYPE (op1)))
  8717. {
  8718. g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, op1);
  8719. gimple_set_location (g, loc);
  8720. gsi_insert_before (gsi, g, GSI_SAME_STMT);
  8721. op1 = gimple_assign_lhs (g);
  8722. }
  8723. g = gimple_build_assign (make_ssa_name (utype), subcode, op0, op1);
  8724. gimple_set_location (g, loc);
  8725. gsi_insert_before (gsi, g, GSI_SAME_STMT);
  8726. if (utype != type)
  8727. {
  8728. g = gimple_build_assign (make_ssa_name (type), NOP_EXPR,
  8729. gimple_assign_lhs (g));
  8730. gimple_set_location (g, loc);
  8731. gsi_insert_before (gsi, g, GSI_SAME_STMT);
  8732. }
  8733. g = gimple_build_assign (gimple_call_lhs (stmt), COMPLEX_EXPR,
  8734. gimple_assign_lhs (g),
  8735. build_int_cst (type, ovf));
  8736. }
  8737. gimple_set_location (g, loc);
  8738. gsi_replace (gsi, g, false);
  8739. return true;
  8740. }
  8741. /* Simplify STMT using ranges if possible. */
  8742. static bool
  8743. simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
  8744. {
  8745. gimple stmt = gsi_stmt (*gsi);
  8746. if (is_gimple_assign (stmt))
  8747. {
  8748. enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
  8749. tree rhs1 = gimple_assign_rhs1 (stmt);
  8750. switch (rhs_code)
  8751. {
  8752. case EQ_EXPR:
  8753. case NE_EXPR:
  8754. /* Transform EQ_EXPR, NE_EXPR into BIT_XOR_EXPR or identity
  8755. if the RHS is zero or one, and the LHS are known to be boolean
  8756. values. */
  8757. if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8758. return simplify_truth_ops_using_ranges (gsi, stmt);
  8759. break;
  8760. /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
  8761. and BIT_AND_EXPR respectively if the first operand is greater
  8762. than zero and the second operand is an exact power of two.
  8763. Also optimize TRUNC_MOD_EXPR away if the second operand is
  8764. constant and the first operand already has the right value
  8765. range. */
  8766. case TRUNC_DIV_EXPR:
  8767. case TRUNC_MOD_EXPR:
  8768. if (TREE_CODE (rhs1) == SSA_NAME
  8769. && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8770. return simplify_div_or_mod_using_ranges (stmt);
  8771. break;
  8772. /* Transform ABS (X) into X or -X as appropriate. */
  8773. case ABS_EXPR:
  8774. if (TREE_CODE (rhs1) == SSA_NAME
  8775. && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8776. return simplify_abs_using_ranges (stmt);
  8777. break;
  8778. case BIT_AND_EXPR:
  8779. case BIT_IOR_EXPR:
  8780. /* Optimize away BIT_AND_EXPR and BIT_IOR_EXPR
  8781. if all the bits being cleared are already cleared or
  8782. all the bits being set are already set. */
  8783. if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8784. return simplify_bit_ops_using_ranges (gsi, stmt);
  8785. break;
  8786. CASE_CONVERT:
  8787. if (TREE_CODE (rhs1) == SSA_NAME
  8788. && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8789. return simplify_conversion_using_ranges (stmt);
  8790. break;
  8791. case FLOAT_EXPR:
  8792. if (TREE_CODE (rhs1) == SSA_NAME
  8793. && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
  8794. return simplify_float_conversion_using_ranges (gsi, stmt);
  8795. break;
  8796. default:
  8797. break;
  8798. }
  8799. }
  8800. else if (gimple_code (stmt) == GIMPLE_COND)
  8801. return simplify_cond_using_ranges (as_a <gcond *> (stmt));
  8802. else if (gimple_code (stmt) == GIMPLE_SWITCH)
  8803. return simplify_switch_using_ranges (as_a <gswitch *> (stmt));
  8804. else if (is_gimple_call (stmt)
  8805. && gimple_call_internal_p (stmt))
  8806. return simplify_internal_call_using_ranges (gsi, stmt);
  8807. return false;
  8808. }
  8809. /* If the statement pointed by SI has a predicate whose value can be
  8810. computed using the value range information computed by VRP, compute
  8811. its value and return true. Otherwise, return false. */
  8812. static bool
  8813. fold_predicate_in (gimple_stmt_iterator *si)
  8814. {
  8815. bool assignment_p = false;
  8816. tree val;
  8817. gimple stmt = gsi_stmt (*si);
  8818. if (is_gimple_assign (stmt)
  8819. && TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
  8820. {
  8821. assignment_p = true;
  8822. val = vrp_evaluate_conditional (gimple_assign_rhs_code (stmt),
  8823. gimple_assign_rhs1 (stmt),
  8824. gimple_assign_rhs2 (stmt),
  8825. stmt);
  8826. }
  8827. else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
  8828. val = vrp_evaluate_conditional (gimple_cond_code (cond_stmt),
  8829. gimple_cond_lhs (cond_stmt),
  8830. gimple_cond_rhs (cond_stmt),
  8831. stmt);
  8832. else
  8833. return false;
  8834. if (val)
  8835. {
  8836. if (assignment_p)
  8837. val = fold_convert (gimple_expr_type (stmt), val);
  8838. if (dump_file)
  8839. {
  8840. fprintf (dump_file, "Folding predicate ");
  8841. print_gimple_expr (dump_file, stmt, 0, 0);
  8842. fprintf (dump_file, " to ");
  8843. print_generic_expr (dump_file, val, 0);
  8844. fprintf (dump_file, "\n");
  8845. }
  8846. if (is_gimple_assign (stmt))
  8847. gimple_assign_set_rhs_from_tree (si, val);
  8848. else
  8849. {
  8850. gcc_assert (gimple_code (stmt) == GIMPLE_COND);
  8851. gcond *cond_stmt = as_a <gcond *> (stmt);
  8852. if (integer_zerop (val))
  8853. gimple_cond_make_false (cond_stmt);
  8854. else if (integer_onep (val))
  8855. gimple_cond_make_true (cond_stmt);
  8856. else
  8857. gcc_unreachable ();
  8858. }
  8859. return true;
  8860. }
  8861. return false;
  8862. }
  8863. /* Callback for substitute_and_fold folding the stmt at *SI. */
  8864. static bool
  8865. vrp_fold_stmt (gimple_stmt_iterator *si)
  8866. {
  8867. if (fold_predicate_in (si))
  8868. return true;
  8869. return simplify_stmt_using_ranges (si);
  8870. }
  8871. /* Stack of dest,src equivalency pairs that need to be restored after
  8872. each attempt to thread a block's incoming edge to an outgoing edge.
  8873. A NULL entry is used to mark the end of pairs which need to be
  8874. restored. */
  8875. static vec<tree> equiv_stack;
  8876. /* A trivial wrapper so that we can present the generic jump threading
  8877. code with a simple API for simplifying statements. STMT is the
  8878. statement we want to simplify, WITHIN_STMT provides the location
  8879. for any overflow warnings. */
  8880. static tree
  8881. simplify_stmt_for_jump_threading (gimple stmt, gimple within_stmt)
  8882. {
  8883. if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
  8884. return vrp_evaluate_conditional (gimple_cond_code (cond_stmt),
  8885. gimple_cond_lhs (cond_stmt),
  8886. gimple_cond_rhs (cond_stmt),
  8887. within_stmt);
  8888. if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
  8889. {
  8890. value_range_t new_vr = VR_INITIALIZER;
  8891. tree lhs = gimple_assign_lhs (assign_stmt);
  8892. if (TREE_CODE (lhs) == SSA_NAME
  8893. && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
  8894. || POINTER_TYPE_P (TREE_TYPE (lhs))))
  8895. {
  8896. extract_range_from_assignment (&new_vr, assign_stmt);
  8897. if (range_int_cst_singleton_p (&new_vr))
  8898. return new_vr.min;
  8899. }
  8900. }
  8901. return NULL_TREE;
  8902. }
  8903. /* Blocks which have more than one predecessor and more than
  8904. one successor present jump threading opportunities, i.e.,
  8905. when the block is reached from a specific predecessor, we
  8906. may be able to determine which of the outgoing edges will
  8907. be traversed. When this optimization applies, we are able
  8908. to avoid conditionals at runtime and we may expose secondary
  8909. optimization opportunities.
  8910. This routine is effectively a driver for the generic jump
  8911. threading code. It basically just presents the generic code
  8912. with edges that may be suitable for jump threading.
  8913. Unlike DOM, we do not iterate VRP if jump threading was successful.
  8914. While iterating may expose new opportunities for VRP, it is expected
  8915. those opportunities would be very limited and the compile time cost
  8916. to expose those opportunities would be significant.
  8917. As jump threading opportunities are discovered, they are registered
  8918. for later realization. */
  8919. static void
  8920. identify_jump_threads (void)
  8921. {
  8922. basic_block bb;
  8923. gcond *dummy;
  8924. int i;
  8925. edge e;
  8926. /* Ugh. When substituting values earlier in this pass we can
  8927. wipe the dominance information. So rebuild the dominator
  8928. information as we need it within the jump threading code. */
  8929. calculate_dominance_info (CDI_DOMINATORS);
  8930. /* We do not allow VRP information to be used for jump threading
  8931. across a back edge in the CFG. Otherwise it becomes too
  8932. difficult to avoid eliminating loop exit tests. Of course
  8933. EDGE_DFS_BACK is not accurate at this time so we have to
  8934. recompute it. */
  8935. mark_dfs_back_edges ();
  8936. /* Do not thread across edges we are about to remove. Just marking
  8937. them as EDGE_DFS_BACK will do. */
  8938. FOR_EACH_VEC_ELT (to_remove_edges, i, e)
  8939. e->flags |= EDGE_DFS_BACK;
  8940. /* Allocate our unwinder stack to unwind any temporary equivalences
  8941. that might be recorded. */
  8942. equiv_stack.create (20);
  8943. /* To avoid lots of silly node creation, we create a single
  8944. conditional and just modify it in-place when attempting to
  8945. thread jumps. */
  8946. dummy = gimple_build_cond (EQ_EXPR,
  8947. integer_zero_node, integer_zero_node,
  8948. NULL, NULL);
  8949. /* Walk through all the blocks finding those which present a
  8950. potential jump threading opportunity. We could set this up
  8951. as a dominator walker and record data during the walk, but
  8952. I doubt it's worth the effort for the classes of jump
  8953. threading opportunities we are trying to identify at this
  8954. point in compilation. */
  8955. FOR_EACH_BB_FN (bb, cfun)
  8956. {
  8957. gimple last;
  8958. /* If the generic jump threading code does not find this block
  8959. interesting, then there is nothing to do. */
  8960. if (! potentially_threadable_block (bb))
  8961. continue;
  8962. last = last_stmt (bb);
  8963. /* We're basically looking for a switch or any kind of conditional with
  8964. integral or pointer type arguments. Note the type of the second
  8965. argument will be the same as the first argument, so no need to
  8966. check it explicitly.
  8967. We also handle the case where there are no statements in the
  8968. block. This come up with forwarder blocks that are not
  8969. optimized away because they lead to a loop header. But we do
  8970. want to thread through them as we can sometimes thread to the
  8971. loop exit which is obviously profitable. */
  8972. if (!last
  8973. || gimple_code (last) == GIMPLE_SWITCH
  8974. || (gimple_code (last) == GIMPLE_COND
  8975. && TREE_CODE (gimple_cond_lhs (last)) == SSA_NAME
  8976. && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (last)))
  8977. || POINTER_TYPE_P (TREE_TYPE (gimple_cond_lhs (last))))
  8978. && (TREE_CODE (gimple_cond_rhs (last)) == SSA_NAME
  8979. || is_gimple_min_invariant (gimple_cond_rhs (last)))))
  8980. {
  8981. edge_iterator ei;
  8982. /* We've got a block with multiple predecessors and multiple
  8983. successors which also ends in a suitable conditional or
  8984. switch statement. For each predecessor, see if we can thread
  8985. it to a specific successor. */
  8986. FOR_EACH_EDGE (e, ei, bb->preds)
  8987. {
  8988. /* Do not thread across back edges or abnormal edges
  8989. in the CFG. */
  8990. if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
  8991. continue;
  8992. thread_across_edge (dummy, e, true, &equiv_stack,
  8993. simplify_stmt_for_jump_threading);
  8994. }
  8995. }
  8996. }
  8997. /* We do not actually update the CFG or SSA graphs at this point as
  8998. ASSERT_EXPRs are still in the IL and cfg cleanup code does not yet
  8999. handle ASSERT_EXPRs gracefully. */
  9000. }
  9001. /* We identified all the jump threading opportunities earlier, but could
  9002. not transform the CFG at that time. This routine transforms the
  9003. CFG and arranges for the dominator tree to be rebuilt if necessary.
  9004. Note the SSA graph update will occur during the normal TODO
  9005. processing by the pass manager. */
  9006. static void
  9007. finalize_jump_threads (void)
  9008. {
  9009. thread_through_all_blocks (false);
  9010. equiv_stack.release ();
  9011. }
  9012. /* Traverse all the blocks folding conditionals with known ranges. */
  9013. static void
  9014. vrp_finalize (void)
  9015. {
  9016. size_t i;
  9017. values_propagated = true;
  9018. if (dump_file)
  9019. {
  9020. fprintf (dump_file, "\nValue ranges after VRP:\n\n");
  9021. dump_all_value_ranges (dump_file);
  9022. fprintf (dump_file, "\n");
  9023. }
  9024. substitute_and_fold (op_with_constant_singleton_value_range,
  9025. vrp_fold_stmt, false);
  9026. if (warn_array_bounds && first_pass_instance)
  9027. check_all_array_refs ();
  9028. /* We must identify jump threading opportunities before we release
  9029. the datastructures built by VRP. */
  9030. identify_jump_threads ();
  9031. /* Set value range to non pointer SSA_NAMEs. */
  9032. for (i = 0; i < num_vr_values; i++)
  9033. if (vr_value[i])
  9034. {
  9035. tree name = ssa_name (i);
  9036. if (!name
  9037. || POINTER_TYPE_P (TREE_TYPE (name))
  9038. || (vr_value[i]->type == VR_VARYING)
  9039. || (vr_value[i]->type == VR_UNDEFINED))
  9040. continue;
  9041. if ((TREE_CODE (vr_value[i]->min) == INTEGER_CST)
  9042. && (TREE_CODE (vr_value[i]->max) == INTEGER_CST)
  9043. && (vr_value[i]->type == VR_RANGE
  9044. || vr_value[i]->type == VR_ANTI_RANGE))
  9045. set_range_info (name, vr_value[i]->type, vr_value[i]->min,
  9046. vr_value[i]->max);
  9047. }
  9048. /* Free allocated memory. */
  9049. for (i = 0; i < num_vr_values; i++)
  9050. if (vr_value[i])
  9051. {
  9052. BITMAP_FREE (vr_value[i]->equiv);
  9053. free (vr_value[i]);
  9054. }
  9055. free (vr_value);
  9056. free (vr_phi_edge_counts);
  9057. /* So that we can distinguish between VRP data being available
  9058. and not available. */
  9059. vr_value = NULL;
  9060. vr_phi_edge_counts = NULL;
  9061. }
  9062. /* Main entry point to VRP (Value Range Propagation). This pass is
  9063. loosely based on J. R. C. Patterson, ``Accurate Static Branch
  9064. Prediction by Value Range Propagation,'' in SIGPLAN Conference on
  9065. Programming Language Design and Implementation, pp. 67-78, 1995.
  9066. Also available at http://citeseer.ist.psu.edu/patterson95accurate.html
  9067. This is essentially an SSA-CCP pass modified to deal with ranges
  9068. instead of constants.
  9069. While propagating ranges, we may find that two or more SSA name
  9070. have equivalent, though distinct ranges. For instance,
  9071. 1 x_9 = p_3->a;
  9072. 2 p_4 = ASSERT_EXPR <p_3, p_3 != 0>
  9073. 3 if (p_4 == q_2)
  9074. 4 p_5 = ASSERT_EXPR <p_4, p_4 == q_2>;
  9075. 5 endif
  9076. 6 if (q_2)
  9077. In the code above, pointer p_5 has range [q_2, q_2], but from the
  9078. code we can also determine that p_5 cannot be NULL and, if q_2 had
  9079. a non-varying range, p_5's range should also be compatible with it.
  9080. These equivalences are created by two expressions: ASSERT_EXPR and
  9081. copy operations. Since p_5 is an assertion on p_4, and p_4 was the
  9082. result of another assertion, then we can use the fact that p_5 and
  9083. p_4 are equivalent when evaluating p_5's range.
  9084. Together with value ranges, we also propagate these equivalences
  9085. between names so that we can take advantage of information from
  9086. multiple ranges when doing final replacement. Note that this
  9087. equivalency relation is transitive but not symmetric.
  9088. In the example above, p_5 is equivalent to p_4, q_2 and p_3, but we
  9089. cannot assert that q_2 is equivalent to p_5 because q_2 may be used
  9090. in contexts where that assertion does not hold (e.g., in line 6).
  9091. TODO, the main difference between this pass and Patterson's is that
  9092. we do not propagate edge probabilities. We only compute whether
  9093. edges can be taken or not. That is, instead of having a spectrum
  9094. of jump probabilities between 0 and 1, we only deal with 0, 1 and
  9095. DON'T KNOW. In the future, it may be worthwhile to propagate
  9096. probabilities to aid branch prediction. */
  9097. static unsigned int
  9098. execute_vrp (void)
  9099. {
  9100. int i;
  9101. edge e;
  9102. switch_update *su;
  9103. loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
  9104. rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
  9105. scev_initialize ();
  9106. /* ??? This ends up using stale EDGE_DFS_BACK for liveness computation.
  9107. Inserting assertions may split edges which will invalidate
  9108. EDGE_DFS_BACK. */
  9109. insert_range_assertions ();
  9110. to_remove_edges.create (10);
  9111. to_update_switch_stmts.create (5);
  9112. threadedge_initialize_values ();
  9113. /* For visiting PHI nodes we need EDGE_DFS_BACK computed. */
  9114. mark_dfs_back_edges ();
  9115. vrp_initialize ();
  9116. ssa_propagate (vrp_visit_stmt, vrp_visit_phi_node);
  9117. vrp_finalize ();
  9118. free_numbers_of_iterations_estimates ();
  9119. /* ASSERT_EXPRs must be removed before finalizing jump threads
  9120. as finalizing jump threads calls the CFG cleanup code which
  9121. does not properly handle ASSERT_EXPRs. */
  9122. remove_range_assertions ();
  9123. /* If we exposed any new variables, go ahead and put them into
  9124. SSA form now, before we handle jump threading. This simplifies
  9125. interactions between rewriting of _DECL nodes into SSA form
  9126. and rewriting SSA_NAME nodes into SSA form after block
  9127. duplication and CFG manipulation. */
  9128. update_ssa (TODO_update_ssa);
  9129. finalize_jump_threads ();
  9130. /* Remove dead edges from SWITCH_EXPR optimization. This leaves the
  9131. CFG in a broken state and requires a cfg_cleanup run. */
  9132. FOR_EACH_VEC_ELT (to_remove_edges, i, e)
  9133. remove_edge (e);
  9134. /* Update SWITCH_EXPR case label vector. */
  9135. FOR_EACH_VEC_ELT (to_update_switch_stmts, i, su)
  9136. {
  9137. size_t j;
  9138. size_t n = TREE_VEC_LENGTH (su->vec);
  9139. tree label;
  9140. gimple_switch_set_num_labels (su->stmt, n);
  9141. for (j = 0; j < n; j++)
  9142. gimple_switch_set_label (su->stmt, j, TREE_VEC_ELT (su->vec, j));
  9143. /* As we may have replaced the default label with a regular one
  9144. make sure to make it a real default label again. This ensures
  9145. optimal expansion. */
  9146. label = gimple_switch_label (su->stmt, 0);
  9147. CASE_LOW (label) = NULL_TREE;
  9148. CASE_HIGH (label) = NULL_TREE;
  9149. }
  9150. if (to_remove_edges.length () > 0)
  9151. {
  9152. free_dominance_info (CDI_DOMINATORS);
  9153. loops_state_set (LOOPS_NEED_FIXUP);
  9154. }
  9155. to_remove_edges.release ();
  9156. to_update_switch_stmts.release ();
  9157. threadedge_finalize_values ();
  9158. scev_finalize ();
  9159. loop_optimizer_finalize ();
  9160. return 0;
  9161. }
  9162. namespace {
  9163. const pass_data pass_data_vrp =
  9164. {
  9165. GIMPLE_PASS, /* type */
  9166. "vrp", /* name */
  9167. OPTGROUP_NONE, /* optinfo_flags */
  9168. TV_TREE_VRP, /* tv_id */
  9169. PROP_ssa, /* properties_required */
  9170. 0, /* properties_provided */
  9171. 0, /* properties_destroyed */
  9172. 0, /* todo_flags_start */
  9173. ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
  9174. };
  9175. class pass_vrp : public gimple_opt_pass
  9176. {
  9177. public:
  9178. pass_vrp (gcc::context *ctxt)
  9179. : gimple_opt_pass (pass_data_vrp, ctxt)
  9180. {}
  9181. /* opt_pass methods: */
  9182. opt_pass * clone () { return new pass_vrp (m_ctxt); }
  9183. virtual bool gate (function *) { return flag_tree_vrp != 0; }
  9184. virtual unsigned int execute (function *) { return execute_vrp (); }
  9185. }; // class pass_vrp
  9186. } // anon namespace
  9187. gimple_opt_pass *
  9188. make_pass_vrp (gcc::context *ctxt)
  9189. {
  9190. return new pass_vrp (ctxt);
  9191. }