gimplify.c 285 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572
  1. /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
  2. tree representation into the GIMPLE form.
  3. Copyright (C) 2002-2015 Free Software Foundation, Inc.
  4. Major work done by Sebastian Pop <s.pop@laposte.net>,
  5. Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
  6. This file is part of GCC.
  7. GCC is free software; you can redistribute it and/or modify it under
  8. the terms of the GNU General Public License as published by the Free
  9. Software Foundation; either version 3, or (at your option) any later
  10. version.
  11. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  12. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  13. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  14. for more details.
  15. You should have received a copy of the GNU General Public License
  16. along with GCC; see the file COPYING3. If not see
  17. <http://www.gnu.org/licenses/>. */
  18. #include "config.h"
  19. #include "system.h"
  20. #include "coretypes.h"
  21. #include "hash-set.h"
  22. #include "machmode.h"
  23. #include "vec.h"
  24. #include "double-int.h"
  25. #include "input.h"
  26. #include "alias.h"
  27. #include "symtab.h"
  28. #include "options.h"
  29. #include "wide-int.h"
  30. #include "inchash.h"
  31. #include "tree.h"
  32. #include "fold-const.h"
  33. #include "hashtab.h"
  34. #include "tm.h"
  35. #include "hard-reg-set.h"
  36. #include "function.h"
  37. #include "rtl.h"
  38. #include "flags.h"
  39. #include "statistics.h"
  40. #include "real.h"
  41. #include "fixed-value.h"
  42. #include "insn-config.h"
  43. #include "expmed.h"
  44. #include "dojump.h"
  45. #include "explow.h"
  46. #include "calls.h"
  47. #include "emit-rtl.h"
  48. #include "varasm.h"
  49. #include "stmt.h"
  50. #include "expr.h"
  51. #include "predict.h"
  52. #include "basic-block.h"
  53. #include "tree-ssa-alias.h"
  54. #include "internal-fn.h"
  55. #include "gimple-fold.h"
  56. #include "tree-eh.h"
  57. #include "gimple-expr.h"
  58. #include "is-a.h"
  59. #include "gimple.h"
  60. #include "gimplify.h"
  61. #include "gimple-iterator.h"
  62. #include "stringpool.h"
  63. #include "stor-layout.h"
  64. #include "print-tree.h"
  65. #include "tree-iterator.h"
  66. #include "tree-inline.h"
  67. #include "tree-pretty-print.h"
  68. #include "langhooks.h"
  69. #include "bitmap.h"
  70. #include "gimple-ssa.h"
  71. #include "hash-map.h"
  72. #include "plugin-api.h"
  73. #include "ipa-ref.h"
  74. #include "cgraph.h"
  75. #include "tree-cfg.h"
  76. #include "tree-ssanames.h"
  77. #include "tree-ssa.h"
  78. #include "diagnostic-core.h"
  79. #include "target.h"
  80. #include "splay-tree.h"
  81. #include "omp-low.h"
  82. #include "gimple-low.h"
  83. #include "cilk.h"
  84. #include "gomp-constants.h"
  85. #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
  86. #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
  87. #include "builtins.h"
  88. enum gimplify_omp_var_data
  89. {
  90. GOVD_SEEN = 1,
  91. GOVD_EXPLICIT = 2,
  92. GOVD_SHARED = 4,
  93. GOVD_PRIVATE = 8,
  94. GOVD_FIRSTPRIVATE = 16,
  95. GOVD_LASTPRIVATE = 32,
  96. GOVD_REDUCTION = 64,
  97. GOVD_LOCAL = 128,
  98. GOVD_MAP = 256,
  99. GOVD_DEBUG_PRIVATE = 512,
  100. GOVD_PRIVATE_OUTER_REF = 1024,
  101. GOVD_LINEAR = 2048,
  102. GOVD_ALIGNED = 4096,
  103. /* Flag for GOVD_MAP: don't copy back. */
  104. GOVD_MAP_TO_ONLY = 8192,
  105. /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
  106. GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
  107. GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
  108. | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
  109. | GOVD_LOCAL)
  110. };
  111. enum omp_region_type
  112. {
  113. ORT_WORKSHARE = 0,
  114. ORT_SIMD = 1,
  115. ORT_PARALLEL = 2,
  116. ORT_COMBINED_PARALLEL = 3,
  117. ORT_TASK = 4,
  118. ORT_UNTIED_TASK = 5,
  119. ORT_TEAMS = 8,
  120. ORT_COMBINED_TEAMS = 9,
  121. /* Data region. */
  122. ORT_TARGET_DATA = 16,
  123. /* Data region with offloading. */
  124. ORT_TARGET = 32
  125. };
  126. /* Gimplify hashtable helper. */
  127. struct gimplify_hasher : typed_free_remove <elt_t>
  128. {
  129. typedef elt_t value_type;
  130. typedef elt_t compare_type;
  131. static inline hashval_t hash (const value_type *);
  132. static inline bool equal (const value_type *, const compare_type *);
  133. };
  134. struct gimplify_ctx
  135. {
  136. struct gimplify_ctx *prev_context;
  137. vec<gbind *> bind_expr_stack;
  138. tree temps;
  139. gimple_seq conditional_cleanups;
  140. tree exit_label;
  141. tree return_temp;
  142. vec<tree> case_labels;
  143. /* The formal temporary table. Should this be persistent? */
  144. hash_table<gimplify_hasher> *temp_htab;
  145. int conditions;
  146. bool save_stack;
  147. bool into_ssa;
  148. bool allow_rhs_cond_expr;
  149. bool in_cleanup_point_expr;
  150. };
  151. struct gimplify_omp_ctx
  152. {
  153. struct gimplify_omp_ctx *outer_context;
  154. splay_tree variables;
  155. hash_set<tree> *privatized_types;
  156. location_t location;
  157. enum omp_clause_default_kind default_kind;
  158. enum omp_region_type region_type;
  159. bool combined_loop;
  160. bool distribute;
  161. };
  162. static struct gimplify_ctx *gimplify_ctxp;
  163. static struct gimplify_omp_ctx *gimplify_omp_ctxp;
  164. /* Forward declaration. */
  165. static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
  166. /* Shorter alias name for the above function for use in gimplify.c
  167. only. */
  168. static inline void
  169. gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
  170. {
  171. gimple_seq_add_stmt_without_update (seq_p, gs);
  172. }
  173. /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
  174. NULL, a new sequence is allocated. This function is
  175. similar to gimple_seq_add_seq, but does not scan the operands.
  176. During gimplification, we need to manipulate statement sequences
  177. before the def/use vectors have been constructed. */
  178. static void
  179. gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
  180. {
  181. gimple_stmt_iterator si;
  182. if (src == NULL)
  183. return;
  184. si = gsi_last (*dst_p);
  185. gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
  186. }
  187. /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
  188. and popping gimplify contexts. */
  189. static struct gimplify_ctx *ctx_pool = NULL;
  190. /* Return a gimplify context struct from the pool. */
  191. static inline struct gimplify_ctx *
  192. ctx_alloc (void)
  193. {
  194. struct gimplify_ctx * c = ctx_pool;
  195. if (c)
  196. ctx_pool = c->prev_context;
  197. else
  198. c = XNEW (struct gimplify_ctx);
  199. memset (c, '\0', sizeof (*c));
  200. return c;
  201. }
  202. /* Put gimplify context C back into the pool. */
  203. static inline void
  204. ctx_free (struct gimplify_ctx *c)
  205. {
  206. c->prev_context = ctx_pool;
  207. ctx_pool = c;
  208. }
  209. /* Free allocated ctx stack memory. */
  210. void
  211. free_gimplify_stack (void)
  212. {
  213. struct gimplify_ctx *c;
  214. while ((c = ctx_pool))
  215. {
  216. ctx_pool = c->prev_context;
  217. free (c);
  218. }
  219. }
  220. /* Set up a context for the gimplifier. */
  221. void
  222. push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
  223. {
  224. struct gimplify_ctx *c = ctx_alloc ();
  225. c->prev_context = gimplify_ctxp;
  226. gimplify_ctxp = c;
  227. gimplify_ctxp->into_ssa = in_ssa;
  228. gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
  229. }
  230. /* Tear down a context for the gimplifier. If BODY is non-null, then
  231. put the temporaries into the outer BIND_EXPR. Otherwise, put them
  232. in the local_decls.
  233. BODY is not a sequence, but the first tuple in a sequence. */
  234. void
  235. pop_gimplify_context (gimple body)
  236. {
  237. struct gimplify_ctx *c = gimplify_ctxp;
  238. gcc_assert (c
  239. && (!c->bind_expr_stack.exists ()
  240. || c->bind_expr_stack.is_empty ()));
  241. c->bind_expr_stack.release ();
  242. gimplify_ctxp = c->prev_context;
  243. if (body)
  244. declare_vars (c->temps, body, false);
  245. else
  246. record_vars (c->temps);
  247. delete c->temp_htab;
  248. c->temp_htab = NULL;
  249. ctx_free (c);
  250. }
  251. /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
  252. static void
  253. gimple_push_bind_expr (gbind *bind_stmt)
  254. {
  255. gimplify_ctxp->bind_expr_stack.reserve (8);
  256. gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
  257. }
  258. /* Pop the first element off the stack of bindings. */
  259. static void
  260. gimple_pop_bind_expr (void)
  261. {
  262. gimplify_ctxp->bind_expr_stack.pop ();
  263. }
  264. /* Return the first element of the stack of bindings. */
  265. gbind *
  266. gimple_current_bind_expr (void)
  267. {
  268. return gimplify_ctxp->bind_expr_stack.last ();
  269. }
  270. /* Return the stack of bindings created during gimplification. */
  271. vec<gbind *>
  272. gimple_bind_expr_stack (void)
  273. {
  274. return gimplify_ctxp->bind_expr_stack;
  275. }
  276. /* Return true iff there is a COND_EXPR between us and the innermost
  277. CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
  278. static bool
  279. gimple_conditional_context (void)
  280. {
  281. return gimplify_ctxp->conditions > 0;
  282. }
  283. /* Note that we've entered a COND_EXPR. */
  284. static void
  285. gimple_push_condition (void)
  286. {
  287. #ifdef ENABLE_GIMPLE_CHECKING
  288. if (gimplify_ctxp->conditions == 0)
  289. gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
  290. #endif
  291. ++(gimplify_ctxp->conditions);
  292. }
  293. /* Note that we've left a COND_EXPR. If we're back at unconditional scope
  294. now, add any conditional cleanups we've seen to the prequeue. */
  295. static void
  296. gimple_pop_condition (gimple_seq *pre_p)
  297. {
  298. int conds = --(gimplify_ctxp->conditions);
  299. gcc_assert (conds >= 0);
  300. if (conds == 0)
  301. {
  302. gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
  303. gimplify_ctxp->conditional_cleanups = NULL;
  304. }
  305. }
  306. /* A stable comparison routine for use with splay trees and DECLs. */
  307. static int
  308. splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
  309. {
  310. tree a = (tree) xa;
  311. tree b = (tree) xb;
  312. return DECL_UID (a) - DECL_UID (b);
  313. }
  314. /* Create a new omp construct that deals with variable remapping. */
  315. static struct gimplify_omp_ctx *
  316. new_omp_context (enum omp_region_type region_type)
  317. {
  318. struct gimplify_omp_ctx *c;
  319. c = XCNEW (struct gimplify_omp_ctx);
  320. c->outer_context = gimplify_omp_ctxp;
  321. c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
  322. c->privatized_types = new hash_set<tree>;
  323. c->location = input_location;
  324. c->region_type = region_type;
  325. if ((region_type & ORT_TASK) == 0)
  326. c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
  327. else
  328. c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
  329. return c;
  330. }
  331. /* Destroy an omp construct that deals with variable remapping. */
  332. static void
  333. delete_omp_context (struct gimplify_omp_ctx *c)
  334. {
  335. splay_tree_delete (c->variables);
  336. delete c->privatized_types;
  337. XDELETE (c);
  338. }
  339. static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
  340. static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
  341. /* Both gimplify the statement T and append it to *SEQ_P. This function
  342. behaves exactly as gimplify_stmt, but you don't have to pass T as a
  343. reference. */
  344. void
  345. gimplify_and_add (tree t, gimple_seq *seq_p)
  346. {
  347. gimplify_stmt (&t, seq_p);
  348. }
  349. /* Gimplify statement T into sequence *SEQ_P, and return the first
  350. tuple in the sequence of generated tuples for this statement.
  351. Return NULL if gimplifying T produced no tuples. */
  352. static gimple
  353. gimplify_and_return_first (tree t, gimple_seq *seq_p)
  354. {
  355. gimple_stmt_iterator last = gsi_last (*seq_p);
  356. gimplify_and_add (t, seq_p);
  357. if (!gsi_end_p (last))
  358. {
  359. gsi_next (&last);
  360. return gsi_stmt (last);
  361. }
  362. else
  363. return gimple_seq_first_stmt (*seq_p);
  364. }
  365. /* Returns true iff T is a valid RHS for an assignment to an un-renamed
  366. LHS, or for a call argument. */
  367. static bool
  368. is_gimple_mem_rhs (tree t)
  369. {
  370. /* If we're dealing with a renamable type, either source or dest must be
  371. a renamed variable. */
  372. if (is_gimple_reg_type (TREE_TYPE (t)))
  373. return is_gimple_val (t);
  374. else
  375. return is_gimple_val (t) || is_gimple_lvalue (t);
  376. }
  377. /* Return true if T is a CALL_EXPR or an expression that can be
  378. assigned to a temporary. Note that this predicate should only be
  379. used during gimplification. See the rationale for this in
  380. gimplify_modify_expr. */
  381. static bool
  382. is_gimple_reg_rhs_or_call (tree t)
  383. {
  384. return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
  385. || TREE_CODE (t) == CALL_EXPR);
  386. }
  387. /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
  388. this predicate should only be used during gimplification. See the
  389. rationale for this in gimplify_modify_expr. */
  390. static bool
  391. is_gimple_mem_rhs_or_call (tree t)
  392. {
  393. /* If we're dealing with a renamable type, either source or dest must be
  394. a renamed variable. */
  395. if (is_gimple_reg_type (TREE_TYPE (t)))
  396. return is_gimple_val (t);
  397. else
  398. return (is_gimple_val (t) || is_gimple_lvalue (t)
  399. || TREE_CODE (t) == CALL_EXPR);
  400. }
  401. /* Create a temporary with a name derived from VAL. Subroutine of
  402. lookup_tmp_var; nobody else should call this function. */
  403. static inline tree
  404. create_tmp_from_val (tree val)
  405. {
  406. /* Drop all qualifiers and address-space information from the value type. */
  407. tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
  408. tree var = create_tmp_var (type, get_name (val));
  409. if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
  410. || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
  411. DECL_GIMPLE_REG_P (var) = 1;
  412. return var;
  413. }
  414. /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
  415. an existing expression temporary. */
  416. static tree
  417. lookup_tmp_var (tree val, bool is_formal)
  418. {
  419. tree ret;
  420. /* If not optimizing, never really reuse a temporary. local-alloc
  421. won't allocate any variable that is used in more than one basic
  422. block, which means it will go into memory, causing much extra
  423. work in reload and final and poorer code generation, outweighing
  424. the extra memory allocation here. */
  425. if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
  426. ret = create_tmp_from_val (val);
  427. else
  428. {
  429. elt_t elt, *elt_p;
  430. elt_t **slot;
  431. elt.val = val;
  432. if (!gimplify_ctxp->temp_htab)
  433. gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
  434. slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
  435. if (*slot == NULL)
  436. {
  437. elt_p = XNEW (elt_t);
  438. elt_p->val = val;
  439. elt_p->temp = ret = create_tmp_from_val (val);
  440. *slot = elt_p;
  441. }
  442. else
  443. {
  444. elt_p = *slot;
  445. ret = elt_p->temp;
  446. }
  447. }
  448. return ret;
  449. }
  450. /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
  451. static tree
  452. internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
  453. bool is_formal)
  454. {
  455. tree t, mod;
  456. /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
  457. can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
  458. gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
  459. fb_rvalue);
  460. if (gimplify_ctxp->into_ssa
  461. && is_gimple_reg_type (TREE_TYPE (val)))
  462. t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
  463. else
  464. t = lookup_tmp_var (val, is_formal);
  465. mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
  466. SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
  467. /* gimplify_modify_expr might want to reduce this further. */
  468. gimplify_and_add (mod, pre_p);
  469. ggc_free (mod);
  470. return t;
  471. }
  472. /* Return a formal temporary variable initialized with VAL. PRE_P is as
  473. in gimplify_expr. Only use this function if:
  474. 1) The value of the unfactored expression represented by VAL will not
  475. change between the initialization and use of the temporary, and
  476. 2) The temporary will not be otherwise modified.
  477. For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
  478. and #2 means it is inappropriate for && temps.
  479. For other cases, use get_initialized_tmp_var instead. */
  480. tree
  481. get_formal_tmp_var (tree val, gimple_seq *pre_p)
  482. {
  483. return internal_get_tmp_var (val, pre_p, NULL, true);
  484. }
  485. /* Return a temporary variable initialized with VAL. PRE_P and POST_P
  486. are as in gimplify_expr. */
  487. tree
  488. get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
  489. {
  490. return internal_get_tmp_var (val, pre_p, post_p, false);
  491. }
  492. /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
  493. generate debug info for them; otherwise don't. */
  494. void
  495. declare_vars (tree vars, gimple gs, bool debug_info)
  496. {
  497. tree last = vars;
  498. if (last)
  499. {
  500. tree temps, block;
  501. gbind *scope = as_a <gbind *> (gs);
  502. temps = nreverse (last);
  503. block = gimple_bind_block (scope);
  504. gcc_assert (!block || TREE_CODE (block) == BLOCK);
  505. if (!block || !debug_info)
  506. {
  507. DECL_CHAIN (last) = gimple_bind_vars (scope);
  508. gimple_bind_set_vars (scope, temps);
  509. }
  510. else
  511. {
  512. /* We need to attach the nodes both to the BIND_EXPR and to its
  513. associated BLOCK for debugging purposes. The key point here
  514. is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
  515. is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
  516. if (BLOCK_VARS (block))
  517. BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
  518. else
  519. {
  520. gimple_bind_set_vars (scope,
  521. chainon (gimple_bind_vars (scope), temps));
  522. BLOCK_VARS (block) = temps;
  523. }
  524. }
  525. }
  526. }
  527. /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
  528. for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
  529. no such upper bound can be obtained. */
  530. static void
  531. force_constant_size (tree var)
  532. {
  533. /* The only attempt we make is by querying the maximum size of objects
  534. of the variable's type. */
  535. HOST_WIDE_INT max_size;
  536. gcc_assert (TREE_CODE (var) == VAR_DECL);
  537. max_size = max_int_size_in_bytes (TREE_TYPE (var));
  538. gcc_assert (max_size >= 0);
  539. DECL_SIZE_UNIT (var)
  540. = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
  541. DECL_SIZE (var)
  542. = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
  543. }
  544. /* Push the temporary variable TMP into the current binding. */
  545. void
  546. gimple_add_tmp_var_fn (struct function *fn, tree tmp)
  547. {
  548. gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
  549. /* Later processing assumes that the object size is constant, which might
  550. not be true at this point. Force the use of a constant upper bound in
  551. this case. */
  552. if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
  553. force_constant_size (tmp);
  554. DECL_CONTEXT (tmp) = fn->decl;
  555. DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
  556. record_vars_into (tmp, fn->decl);
  557. }
  558. /* Push the temporary variable TMP into the current binding. */
  559. void
  560. gimple_add_tmp_var (tree tmp)
  561. {
  562. gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
  563. /* Later processing assumes that the object size is constant, which might
  564. not be true at this point. Force the use of a constant upper bound in
  565. this case. */
  566. if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
  567. force_constant_size (tmp);
  568. DECL_CONTEXT (tmp) = current_function_decl;
  569. DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
  570. if (gimplify_ctxp)
  571. {
  572. DECL_CHAIN (tmp) = gimplify_ctxp->temps;
  573. gimplify_ctxp->temps = tmp;
  574. /* Mark temporaries local within the nearest enclosing parallel. */
  575. if (gimplify_omp_ctxp)
  576. {
  577. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  578. while (ctx
  579. && (ctx->region_type == ORT_WORKSHARE
  580. || ctx->region_type == ORT_SIMD))
  581. ctx = ctx->outer_context;
  582. if (ctx)
  583. omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
  584. }
  585. }
  586. else if (cfun)
  587. record_vars (tmp);
  588. else
  589. {
  590. gimple_seq body_seq;
  591. /* This case is for nested functions. We need to expose the locals
  592. they create. */
  593. body_seq = gimple_body (current_function_decl);
  594. declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
  595. }
  596. }
  597. /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
  598. nodes that are referenced more than once in GENERIC functions. This is
  599. necessary because gimplification (translation into GIMPLE) is performed
  600. by modifying tree nodes in-place, so gimplication of a shared node in a
  601. first context could generate an invalid GIMPLE form in a second context.
  602. This is achieved with a simple mark/copy/unmark algorithm that walks the
  603. GENERIC representation top-down, marks nodes with TREE_VISITED the first
  604. time it encounters them, duplicates them if they already have TREE_VISITED
  605. set, and finally removes the TREE_VISITED marks it has set.
  606. The algorithm works only at the function level, i.e. it generates a GENERIC
  607. representation of a function with no nodes shared within the function when
  608. passed a GENERIC function (except for nodes that are allowed to be shared).
  609. At the global level, it is also necessary to unshare tree nodes that are
  610. referenced in more than one function, for the same aforementioned reason.
  611. This requires some cooperation from the front-end. There are 2 strategies:
  612. 1. Manual unsharing. The front-end needs to call unshare_expr on every
  613. expression that might end up being shared across functions.
  614. 2. Deep unsharing. This is an extension of regular unsharing. Instead
  615. of calling unshare_expr on expressions that might be shared across
  616. functions, the front-end pre-marks them with TREE_VISITED. This will
  617. ensure that they are unshared on the first reference within functions
  618. when the regular unsharing algorithm runs. The counterpart is that
  619. this algorithm must look deeper than for manual unsharing, which is
  620. specified by LANG_HOOKS_DEEP_UNSHARING.
  621. If there are only few specific cases of node sharing across functions, it is
  622. probably easier for a front-end to unshare the expressions manually. On the
  623. contrary, if the expressions generated at the global level are as widespread
  624. as expressions generated within functions, deep unsharing is very likely the
  625. way to go. */
  626. /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
  627. These nodes model computations that must be done once. If we were to
  628. unshare something like SAVE_EXPR(i++), the gimplification process would
  629. create wrong code. However, if DATA is non-null, it must hold a pointer
  630. set that is used to unshare the subtrees of these nodes. */
  631. static tree
  632. mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
  633. {
  634. tree t = *tp;
  635. enum tree_code code = TREE_CODE (t);
  636. /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
  637. copy their subtrees if we can make sure to do it only once. */
  638. if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
  639. {
  640. if (data && !((hash_set<tree> *)data)->add (t))
  641. ;
  642. else
  643. *walk_subtrees = 0;
  644. }
  645. /* Stop at types, decls, constants like copy_tree_r. */
  646. else if (TREE_CODE_CLASS (code) == tcc_type
  647. || TREE_CODE_CLASS (code) == tcc_declaration
  648. || TREE_CODE_CLASS (code) == tcc_constant
  649. /* We can't do anything sensible with a BLOCK used as an
  650. expression, but we also can't just die when we see it
  651. because of non-expression uses. So we avert our eyes
  652. and cross our fingers. Silly Java. */
  653. || code == BLOCK)
  654. *walk_subtrees = 0;
  655. /* Cope with the statement expression extension. */
  656. else if (code == STATEMENT_LIST)
  657. ;
  658. /* Leave the bulk of the work to copy_tree_r itself. */
  659. else
  660. copy_tree_r (tp, walk_subtrees, NULL);
  661. return NULL_TREE;
  662. }
  663. /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
  664. If *TP has been visited already, then *TP is deeply copied by calling
  665. mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
  666. static tree
  667. copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
  668. {
  669. tree t = *tp;
  670. enum tree_code code = TREE_CODE (t);
  671. /* Skip types, decls, and constants. But we do want to look at their
  672. types and the bounds of types. Mark them as visited so we properly
  673. unmark their subtrees on the unmark pass. If we've already seen them,
  674. don't look down further. */
  675. if (TREE_CODE_CLASS (code) == tcc_type
  676. || TREE_CODE_CLASS (code) == tcc_declaration
  677. || TREE_CODE_CLASS (code) == tcc_constant)
  678. {
  679. if (TREE_VISITED (t))
  680. *walk_subtrees = 0;
  681. else
  682. TREE_VISITED (t) = 1;
  683. }
  684. /* If this node has been visited already, unshare it and don't look
  685. any deeper. */
  686. else if (TREE_VISITED (t))
  687. {
  688. walk_tree (tp, mostly_copy_tree_r, data, NULL);
  689. *walk_subtrees = 0;
  690. }
  691. /* Otherwise, mark the node as visited and keep looking. */
  692. else
  693. TREE_VISITED (t) = 1;
  694. return NULL_TREE;
  695. }
  696. /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
  697. copy_if_shared_r callback unmodified. */
  698. static inline void
  699. copy_if_shared (tree *tp, void *data)
  700. {
  701. walk_tree (tp, copy_if_shared_r, data, NULL);
  702. }
  703. /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
  704. any nested functions. */
  705. static void
  706. unshare_body (tree fndecl)
  707. {
  708. struct cgraph_node *cgn = cgraph_node::get (fndecl);
  709. /* If the language requires deep unsharing, we need a pointer set to make
  710. sure we don't repeatedly unshare subtrees of unshareable nodes. */
  711. hash_set<tree> *visited
  712. = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
  713. copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
  714. copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
  715. copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
  716. delete visited;
  717. if (cgn)
  718. for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
  719. unshare_body (cgn->decl);
  720. }
  721. /* Callback for walk_tree to unmark the visited trees rooted at *TP.
  722. Subtrees are walked until the first unvisited node is encountered. */
  723. static tree
  724. unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
  725. {
  726. tree t = *tp;
  727. /* If this node has been visited, unmark it and keep looking. */
  728. if (TREE_VISITED (t))
  729. TREE_VISITED (t) = 0;
  730. /* Otherwise, don't look any deeper. */
  731. else
  732. *walk_subtrees = 0;
  733. return NULL_TREE;
  734. }
  735. /* Unmark the visited trees rooted at *TP. */
  736. static inline void
  737. unmark_visited (tree *tp)
  738. {
  739. walk_tree (tp, unmark_visited_r, NULL, NULL);
  740. }
  741. /* Likewise, but mark all trees as not visited. */
  742. static void
  743. unvisit_body (tree fndecl)
  744. {
  745. struct cgraph_node *cgn = cgraph_node::get (fndecl);
  746. unmark_visited (&DECL_SAVED_TREE (fndecl));
  747. unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
  748. unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
  749. if (cgn)
  750. for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
  751. unvisit_body (cgn->decl);
  752. }
  753. /* Unconditionally make an unshared copy of EXPR. This is used when using
  754. stored expressions which span multiple functions, such as BINFO_VTABLE,
  755. as the normal unsharing process can't tell that they're shared. */
  756. tree
  757. unshare_expr (tree expr)
  758. {
  759. walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
  760. return expr;
  761. }
  762. /* Worker for unshare_expr_without_location. */
  763. static tree
  764. prune_expr_location (tree *tp, int *walk_subtrees, void *)
  765. {
  766. if (EXPR_P (*tp))
  767. SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
  768. else
  769. *walk_subtrees = 0;
  770. return NULL_TREE;
  771. }
  772. /* Similar to unshare_expr but also prune all expression locations
  773. from EXPR. */
  774. tree
  775. unshare_expr_without_location (tree expr)
  776. {
  777. walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
  778. if (EXPR_P (expr))
  779. walk_tree (&expr, prune_expr_location, NULL, NULL);
  780. return expr;
  781. }
  782. /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
  783. contain statements and have a value. Assign its value to a temporary
  784. and give it void_type_node. Return the temporary, or NULL_TREE if
  785. WRAPPER was already void. */
  786. tree
  787. voidify_wrapper_expr (tree wrapper, tree temp)
  788. {
  789. tree type = TREE_TYPE (wrapper);
  790. if (type && !VOID_TYPE_P (type))
  791. {
  792. tree *p;
  793. /* Set p to point to the body of the wrapper. Loop until we find
  794. something that isn't a wrapper. */
  795. for (p = &wrapper; p && *p; )
  796. {
  797. switch (TREE_CODE (*p))
  798. {
  799. case BIND_EXPR:
  800. TREE_SIDE_EFFECTS (*p) = 1;
  801. TREE_TYPE (*p) = void_type_node;
  802. /* For a BIND_EXPR, the body is operand 1. */
  803. p = &BIND_EXPR_BODY (*p);
  804. break;
  805. case CLEANUP_POINT_EXPR:
  806. case TRY_FINALLY_EXPR:
  807. case TRY_CATCH_EXPR:
  808. TREE_SIDE_EFFECTS (*p) = 1;
  809. TREE_TYPE (*p) = void_type_node;
  810. p = &TREE_OPERAND (*p, 0);
  811. break;
  812. case STATEMENT_LIST:
  813. {
  814. tree_stmt_iterator i = tsi_last (*p);
  815. TREE_SIDE_EFFECTS (*p) = 1;
  816. TREE_TYPE (*p) = void_type_node;
  817. p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
  818. }
  819. break;
  820. case COMPOUND_EXPR:
  821. /* Advance to the last statement. Set all container types to
  822. void. */
  823. for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
  824. {
  825. TREE_SIDE_EFFECTS (*p) = 1;
  826. TREE_TYPE (*p) = void_type_node;
  827. }
  828. break;
  829. case TRANSACTION_EXPR:
  830. TREE_SIDE_EFFECTS (*p) = 1;
  831. TREE_TYPE (*p) = void_type_node;
  832. p = &TRANSACTION_EXPR_BODY (*p);
  833. break;
  834. default:
  835. /* Assume that any tree upon which voidify_wrapper_expr is
  836. directly called is a wrapper, and that its body is op0. */
  837. if (p == &wrapper)
  838. {
  839. TREE_SIDE_EFFECTS (*p) = 1;
  840. TREE_TYPE (*p) = void_type_node;
  841. p = &TREE_OPERAND (*p, 0);
  842. break;
  843. }
  844. goto out;
  845. }
  846. }
  847. out:
  848. if (p == NULL || IS_EMPTY_STMT (*p))
  849. temp = NULL_TREE;
  850. else if (temp)
  851. {
  852. /* The wrapper is on the RHS of an assignment that we're pushing
  853. down. */
  854. gcc_assert (TREE_CODE (temp) == INIT_EXPR
  855. || TREE_CODE (temp) == MODIFY_EXPR);
  856. TREE_OPERAND (temp, 1) = *p;
  857. *p = temp;
  858. }
  859. else
  860. {
  861. temp = create_tmp_var (type, "retval");
  862. *p = build2 (INIT_EXPR, type, temp, *p);
  863. }
  864. return temp;
  865. }
  866. return NULL_TREE;
  867. }
  868. /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
  869. a temporary through which they communicate. */
  870. static void
  871. build_stack_save_restore (gcall **save, gcall **restore)
  872. {
  873. tree tmp_var;
  874. *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
  875. tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
  876. gimple_call_set_lhs (*save, tmp_var);
  877. *restore
  878. = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
  879. 1, tmp_var);
  880. }
  881. /* Gimplify a BIND_EXPR. Just voidify and recurse. */
  882. static enum gimplify_status
  883. gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
  884. {
  885. tree bind_expr = *expr_p;
  886. bool old_save_stack = gimplify_ctxp->save_stack;
  887. tree t;
  888. gbind *bind_stmt;
  889. gimple_seq body, cleanup;
  890. gcall *stack_save;
  891. location_t start_locus = 0, end_locus = 0;
  892. tree temp = voidify_wrapper_expr (bind_expr, NULL);
  893. /* Mark variables seen in this bind expr. */
  894. for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
  895. {
  896. if (TREE_CODE (t) == VAR_DECL)
  897. {
  898. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  899. /* Mark variable as local. */
  900. if (ctx && !DECL_EXTERNAL (t)
  901. && (! DECL_SEEN_IN_BIND_EXPR_P (t)
  902. || splay_tree_lookup (ctx->variables,
  903. (splay_tree_key) t) == NULL))
  904. {
  905. if (ctx->region_type == ORT_SIMD
  906. && TREE_ADDRESSABLE (t)
  907. && !TREE_STATIC (t))
  908. omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
  909. else
  910. omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
  911. }
  912. DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
  913. if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
  914. cfun->has_local_explicit_reg_vars = true;
  915. }
  916. /* Preliminarily mark non-addressed complex variables as eligible
  917. for promotion to gimple registers. We'll transform their uses
  918. as we find them. */
  919. if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
  920. || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
  921. && !TREE_THIS_VOLATILE (t)
  922. && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
  923. && !needs_to_live_in_memory (t))
  924. DECL_GIMPLE_REG_P (t) = 1;
  925. }
  926. bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
  927. BIND_EXPR_BLOCK (bind_expr));
  928. gimple_push_bind_expr (bind_stmt);
  929. gimplify_ctxp->save_stack = false;
  930. /* Gimplify the body into the GIMPLE_BIND tuple's body. */
  931. body = NULL;
  932. gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
  933. gimple_bind_set_body (bind_stmt, body);
  934. /* Source location wise, the cleanup code (stack_restore and clobbers)
  935. belongs to the end of the block, so propagate what we have. The
  936. stack_save operation belongs to the beginning of block, which we can
  937. infer from the bind_expr directly if the block has no explicit
  938. assignment. */
  939. if (BIND_EXPR_BLOCK (bind_expr))
  940. {
  941. end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
  942. start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
  943. }
  944. if (start_locus == 0)
  945. start_locus = EXPR_LOCATION (bind_expr);
  946. cleanup = NULL;
  947. stack_save = NULL;
  948. if (gimplify_ctxp->save_stack)
  949. {
  950. gcall *stack_restore;
  951. /* Save stack on entry and restore it on exit. Add a try_finally
  952. block to achieve this. */
  953. build_stack_save_restore (&stack_save, &stack_restore);
  954. gimple_set_location (stack_save, start_locus);
  955. gimple_set_location (stack_restore, end_locus);
  956. gimplify_seq_add_stmt (&cleanup, stack_restore);
  957. }
  958. /* Add clobbers for all variables that go out of scope. */
  959. for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
  960. {
  961. if (TREE_CODE (t) == VAR_DECL
  962. && !is_global_var (t)
  963. && DECL_CONTEXT (t) == current_function_decl
  964. && !DECL_HARD_REGISTER (t)
  965. && !TREE_THIS_VOLATILE (t)
  966. && !DECL_HAS_VALUE_EXPR_P (t)
  967. /* Only care for variables that have to be in memory. Others
  968. will be rewritten into SSA names, hence moved to the top-level. */
  969. && !is_gimple_reg (t)
  970. && flag_stack_reuse != SR_NONE)
  971. {
  972. tree clobber = build_constructor (TREE_TYPE (t), NULL);
  973. gimple clobber_stmt;
  974. TREE_THIS_VOLATILE (clobber) = 1;
  975. clobber_stmt = gimple_build_assign (t, clobber);
  976. gimple_set_location (clobber_stmt, end_locus);
  977. gimplify_seq_add_stmt (&cleanup, clobber_stmt);
  978. }
  979. }
  980. if (cleanup)
  981. {
  982. gtry *gs;
  983. gimple_seq new_body;
  984. new_body = NULL;
  985. gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
  986. GIMPLE_TRY_FINALLY);
  987. if (stack_save)
  988. gimplify_seq_add_stmt (&new_body, stack_save);
  989. gimplify_seq_add_stmt (&new_body, gs);
  990. gimple_bind_set_body (bind_stmt, new_body);
  991. }
  992. gimplify_ctxp->save_stack = old_save_stack;
  993. gimple_pop_bind_expr ();
  994. gimplify_seq_add_stmt (pre_p, bind_stmt);
  995. if (temp)
  996. {
  997. *expr_p = temp;
  998. return GS_OK;
  999. }
  1000. *expr_p = NULL_TREE;
  1001. return GS_ALL_DONE;
  1002. }
  1003. /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
  1004. GIMPLE value, it is assigned to a new temporary and the statement is
  1005. re-written to return the temporary.
  1006. PRE_P points to the sequence where side effects that must happen before
  1007. STMT should be stored. */
  1008. static enum gimplify_status
  1009. gimplify_return_expr (tree stmt, gimple_seq *pre_p)
  1010. {
  1011. greturn *ret;
  1012. tree ret_expr = TREE_OPERAND (stmt, 0);
  1013. tree result_decl, result;
  1014. if (ret_expr == error_mark_node)
  1015. return GS_ERROR;
  1016. /* Implicit _Cilk_sync must be inserted right before any return statement
  1017. if there is a _Cilk_spawn in the function. If the user has provided a
  1018. _Cilk_sync, the optimizer should remove this duplicate one. */
  1019. if (fn_contains_cilk_spawn_p (cfun))
  1020. {
  1021. tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
  1022. gimplify_and_add (impl_sync, pre_p);
  1023. }
  1024. if (!ret_expr
  1025. || TREE_CODE (ret_expr) == RESULT_DECL
  1026. || ret_expr == error_mark_node)
  1027. {
  1028. greturn *ret = gimple_build_return (ret_expr);
  1029. gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
  1030. gimplify_seq_add_stmt (pre_p, ret);
  1031. return GS_ALL_DONE;
  1032. }
  1033. if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
  1034. result_decl = NULL_TREE;
  1035. else
  1036. {
  1037. result_decl = TREE_OPERAND (ret_expr, 0);
  1038. /* See through a return by reference. */
  1039. if (TREE_CODE (result_decl) == INDIRECT_REF)
  1040. result_decl = TREE_OPERAND (result_decl, 0);
  1041. gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
  1042. || TREE_CODE (ret_expr) == INIT_EXPR)
  1043. && TREE_CODE (result_decl) == RESULT_DECL);
  1044. }
  1045. /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
  1046. Recall that aggregate_value_p is FALSE for any aggregate type that is
  1047. returned in registers. If we're returning values in registers, then
  1048. we don't want to extend the lifetime of the RESULT_DECL, particularly
  1049. across another call. In addition, for those aggregates for which
  1050. hard_function_value generates a PARALLEL, we'll die during normal
  1051. expansion of structure assignments; there's special code in expand_return
  1052. to handle this case that does not exist in expand_expr. */
  1053. if (!result_decl)
  1054. result = NULL_TREE;
  1055. else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
  1056. {
  1057. if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
  1058. {
  1059. if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
  1060. gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
  1061. /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
  1062. should be effectively allocated by the caller, i.e. all calls to
  1063. this function must be subject to the Return Slot Optimization. */
  1064. gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
  1065. gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
  1066. }
  1067. result = result_decl;
  1068. }
  1069. else if (gimplify_ctxp->return_temp)
  1070. result = gimplify_ctxp->return_temp;
  1071. else
  1072. {
  1073. result = create_tmp_reg (TREE_TYPE (result_decl));
  1074. /* ??? With complex control flow (usually involving abnormal edges),
  1075. we can wind up warning about an uninitialized value for this. Due
  1076. to how this variable is constructed and initialized, this is never
  1077. true. Give up and never warn. */
  1078. TREE_NO_WARNING (result) = 1;
  1079. gimplify_ctxp->return_temp = result;
  1080. }
  1081. /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
  1082. Then gimplify the whole thing. */
  1083. if (result != result_decl)
  1084. TREE_OPERAND (ret_expr, 0) = result;
  1085. gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
  1086. ret = gimple_build_return (result);
  1087. gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
  1088. gimplify_seq_add_stmt (pre_p, ret);
  1089. return GS_ALL_DONE;
  1090. }
  1091. /* Gimplify a variable-length array DECL. */
  1092. static void
  1093. gimplify_vla_decl (tree decl, gimple_seq *seq_p)
  1094. {
  1095. /* This is a variable-sized decl. Simplify its size and mark it
  1096. for deferred expansion. */
  1097. tree t, addr, ptr_type;
  1098. gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
  1099. gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
  1100. /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
  1101. if (DECL_HAS_VALUE_EXPR_P (decl))
  1102. return;
  1103. /* All occurrences of this decl in final gimplified code will be
  1104. replaced by indirection. Setting DECL_VALUE_EXPR does two
  1105. things: First, it lets the rest of the gimplifier know what
  1106. replacement to use. Second, it lets the debug info know
  1107. where to find the value. */
  1108. ptr_type = build_pointer_type (TREE_TYPE (decl));
  1109. addr = create_tmp_var (ptr_type, get_name (decl));
  1110. DECL_IGNORED_P (addr) = 0;
  1111. t = build_fold_indirect_ref (addr);
  1112. TREE_THIS_NOTRAP (t) = 1;
  1113. SET_DECL_VALUE_EXPR (decl, t);
  1114. DECL_HAS_VALUE_EXPR_P (decl) = 1;
  1115. t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
  1116. t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
  1117. size_int (DECL_ALIGN (decl)));
  1118. /* The call has been built for a variable-sized object. */
  1119. CALL_ALLOCA_FOR_VAR_P (t) = 1;
  1120. t = fold_convert (ptr_type, t);
  1121. t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
  1122. gimplify_and_add (t, seq_p);
  1123. /* Indicate that we need to restore the stack level when the
  1124. enclosing BIND_EXPR is exited. */
  1125. gimplify_ctxp->save_stack = true;
  1126. }
  1127. /* A helper function to be called via walk_tree. Mark all labels under *TP
  1128. as being forced. To be called for DECL_INITIAL of static variables. */
  1129. static tree
  1130. force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
  1131. {
  1132. if (TYPE_P (*tp))
  1133. *walk_subtrees = 0;
  1134. if (TREE_CODE (*tp) == LABEL_DECL)
  1135. FORCED_LABEL (*tp) = 1;
  1136. return NULL_TREE;
  1137. }
  1138. /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
  1139. and initialization explicit. */
  1140. static enum gimplify_status
  1141. gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
  1142. {
  1143. tree stmt = *stmt_p;
  1144. tree decl = DECL_EXPR_DECL (stmt);
  1145. *stmt_p = NULL_TREE;
  1146. if (TREE_TYPE (decl) == error_mark_node)
  1147. return GS_ERROR;
  1148. if ((TREE_CODE (decl) == TYPE_DECL
  1149. || TREE_CODE (decl) == VAR_DECL)
  1150. && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
  1151. gimplify_type_sizes (TREE_TYPE (decl), seq_p);
  1152. /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
  1153. in case its size expressions contain problematic nodes like CALL_EXPR. */
  1154. if (TREE_CODE (decl) == TYPE_DECL
  1155. && DECL_ORIGINAL_TYPE (decl)
  1156. && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
  1157. gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
  1158. if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
  1159. {
  1160. tree init = DECL_INITIAL (decl);
  1161. if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
  1162. || (!TREE_STATIC (decl)
  1163. && flag_stack_check == GENERIC_STACK_CHECK
  1164. && compare_tree_int (DECL_SIZE_UNIT (decl),
  1165. STACK_CHECK_MAX_VAR_SIZE) > 0))
  1166. gimplify_vla_decl (decl, seq_p);
  1167. /* Some front ends do not explicitly declare all anonymous
  1168. artificial variables. We compensate here by declaring the
  1169. variables, though it would be better if the front ends would
  1170. explicitly declare them. */
  1171. if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
  1172. && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
  1173. gimple_add_tmp_var (decl);
  1174. if (init && init != error_mark_node)
  1175. {
  1176. if (!TREE_STATIC (decl))
  1177. {
  1178. DECL_INITIAL (decl) = NULL_TREE;
  1179. init = build2 (INIT_EXPR, void_type_node, decl, init);
  1180. gimplify_and_add (init, seq_p);
  1181. ggc_free (init);
  1182. }
  1183. else
  1184. /* We must still examine initializers for static variables
  1185. as they may contain a label address. */
  1186. walk_tree (&init, force_labels_r, NULL, NULL);
  1187. }
  1188. }
  1189. return GS_ALL_DONE;
  1190. }
  1191. /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
  1192. and replacing the LOOP_EXPR with goto, but if the loop contains an
  1193. EXIT_EXPR, we need to append a label for it to jump to. */
  1194. static enum gimplify_status
  1195. gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
  1196. {
  1197. tree saved_label = gimplify_ctxp->exit_label;
  1198. tree start_label = create_artificial_label (UNKNOWN_LOCATION);
  1199. gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
  1200. gimplify_ctxp->exit_label = NULL_TREE;
  1201. gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
  1202. gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
  1203. if (gimplify_ctxp->exit_label)
  1204. gimplify_seq_add_stmt (pre_p,
  1205. gimple_build_label (gimplify_ctxp->exit_label));
  1206. gimplify_ctxp->exit_label = saved_label;
  1207. *expr_p = NULL;
  1208. return GS_ALL_DONE;
  1209. }
  1210. /* Gimplify a statement list onto a sequence. These may be created either
  1211. by an enlightened front-end, or by shortcut_cond_expr. */
  1212. static enum gimplify_status
  1213. gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
  1214. {
  1215. tree temp = voidify_wrapper_expr (*expr_p, NULL);
  1216. tree_stmt_iterator i = tsi_start (*expr_p);
  1217. while (!tsi_end_p (i))
  1218. {
  1219. gimplify_stmt (tsi_stmt_ptr (i), pre_p);
  1220. tsi_delink (&i);
  1221. }
  1222. if (temp)
  1223. {
  1224. *expr_p = temp;
  1225. return GS_OK;
  1226. }
  1227. return GS_ALL_DONE;
  1228. }
  1229. /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
  1230. branch to. */
  1231. static enum gimplify_status
  1232. gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
  1233. {
  1234. tree switch_expr = *expr_p;
  1235. gimple_seq switch_body_seq = NULL;
  1236. enum gimplify_status ret;
  1237. tree index_type = TREE_TYPE (switch_expr);
  1238. if (index_type == NULL_TREE)
  1239. index_type = TREE_TYPE (SWITCH_COND (switch_expr));
  1240. ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
  1241. fb_rvalue);
  1242. if (ret == GS_ERROR || ret == GS_UNHANDLED)
  1243. return ret;
  1244. if (SWITCH_BODY (switch_expr))
  1245. {
  1246. vec<tree> labels;
  1247. vec<tree> saved_labels;
  1248. tree default_case = NULL_TREE;
  1249. gswitch *switch_stmt;
  1250. /* If someone can be bothered to fill in the labels, they can
  1251. be bothered to null out the body too. */
  1252. gcc_assert (!SWITCH_LABELS (switch_expr));
  1253. /* Save old labels, get new ones from body, then restore the old
  1254. labels. Save all the things from the switch body to append after. */
  1255. saved_labels = gimplify_ctxp->case_labels;
  1256. gimplify_ctxp->case_labels.create (8);
  1257. gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
  1258. labels = gimplify_ctxp->case_labels;
  1259. gimplify_ctxp->case_labels = saved_labels;
  1260. preprocess_case_label_vec_for_gimple (labels, index_type,
  1261. &default_case);
  1262. if (!default_case)
  1263. {
  1264. glabel *new_default;
  1265. default_case
  1266. = build_case_label (NULL_TREE, NULL_TREE,
  1267. create_artificial_label (UNKNOWN_LOCATION));
  1268. new_default = gimple_build_label (CASE_LABEL (default_case));
  1269. gimplify_seq_add_stmt (&switch_body_seq, new_default);
  1270. }
  1271. switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
  1272. default_case, labels);
  1273. gimplify_seq_add_stmt (pre_p, switch_stmt);
  1274. gimplify_seq_add_seq (pre_p, switch_body_seq);
  1275. labels.release ();
  1276. }
  1277. else
  1278. gcc_assert (SWITCH_LABELS (switch_expr));
  1279. return GS_ALL_DONE;
  1280. }
  1281. /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
  1282. static enum gimplify_status
  1283. gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
  1284. {
  1285. struct gimplify_ctx *ctxp;
  1286. glabel *label_stmt;
  1287. /* Invalid programs can play Duff's Device type games with, for example,
  1288. #pragma omp parallel. At least in the C front end, we don't
  1289. detect such invalid branches until after gimplification, in the
  1290. diagnose_omp_blocks pass. */
  1291. for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
  1292. if (ctxp->case_labels.exists ())
  1293. break;
  1294. label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
  1295. ctxp->case_labels.safe_push (*expr_p);
  1296. gimplify_seq_add_stmt (pre_p, label_stmt);
  1297. return GS_ALL_DONE;
  1298. }
  1299. /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
  1300. if necessary. */
  1301. tree
  1302. build_and_jump (tree *label_p)
  1303. {
  1304. if (label_p == NULL)
  1305. /* If there's nowhere to jump, just fall through. */
  1306. return NULL_TREE;
  1307. if (*label_p == NULL_TREE)
  1308. {
  1309. tree label = create_artificial_label (UNKNOWN_LOCATION);
  1310. *label_p = label;
  1311. }
  1312. return build1 (GOTO_EXPR, void_type_node, *label_p);
  1313. }
  1314. /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
  1315. This also involves building a label to jump to and communicating it to
  1316. gimplify_loop_expr through gimplify_ctxp->exit_label. */
  1317. static enum gimplify_status
  1318. gimplify_exit_expr (tree *expr_p)
  1319. {
  1320. tree cond = TREE_OPERAND (*expr_p, 0);
  1321. tree expr;
  1322. expr = build_and_jump (&gimplify_ctxp->exit_label);
  1323. expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
  1324. *expr_p = expr;
  1325. return GS_OK;
  1326. }
  1327. /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
  1328. different from its canonical type, wrap the whole thing inside a
  1329. NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
  1330. type.
  1331. The canonical type of a COMPONENT_REF is the type of the field being
  1332. referenced--unless the field is a bit-field which can be read directly
  1333. in a smaller mode, in which case the canonical type is the
  1334. sign-appropriate type corresponding to that mode. */
  1335. static void
  1336. canonicalize_component_ref (tree *expr_p)
  1337. {
  1338. tree expr = *expr_p;
  1339. tree type;
  1340. gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
  1341. if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
  1342. type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
  1343. else
  1344. type = TREE_TYPE (TREE_OPERAND (expr, 1));
  1345. /* One could argue that all the stuff below is not necessary for
  1346. the non-bitfield case and declare it a FE error if type
  1347. adjustment would be needed. */
  1348. if (TREE_TYPE (expr) != type)
  1349. {
  1350. #ifdef ENABLE_TYPES_CHECKING
  1351. tree old_type = TREE_TYPE (expr);
  1352. #endif
  1353. int type_quals;
  1354. /* We need to preserve qualifiers and propagate them from
  1355. operand 0. */
  1356. type_quals = TYPE_QUALS (type)
  1357. | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
  1358. if (TYPE_QUALS (type) != type_quals)
  1359. type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
  1360. /* Set the type of the COMPONENT_REF to the underlying type. */
  1361. TREE_TYPE (expr) = type;
  1362. #ifdef ENABLE_TYPES_CHECKING
  1363. /* It is now a FE error, if the conversion from the canonical
  1364. type to the original expression type is not useless. */
  1365. gcc_assert (useless_type_conversion_p (old_type, type));
  1366. #endif
  1367. }
  1368. }
  1369. /* If a NOP conversion is changing a pointer to array of foo to a pointer
  1370. to foo, embed that change in the ADDR_EXPR by converting
  1371. T array[U];
  1372. (T *)&array
  1373. ==>
  1374. &array[L]
  1375. where L is the lower bound. For simplicity, only do this for constant
  1376. lower bound.
  1377. The constraint is that the type of &array[L] is trivially convertible
  1378. to T *. */
  1379. static void
  1380. canonicalize_addr_expr (tree *expr_p)
  1381. {
  1382. tree expr = *expr_p;
  1383. tree addr_expr = TREE_OPERAND (expr, 0);
  1384. tree datype, ddatype, pddatype;
  1385. /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
  1386. if (!POINTER_TYPE_P (TREE_TYPE (expr))
  1387. || TREE_CODE (addr_expr) != ADDR_EXPR)
  1388. return;
  1389. /* The addr_expr type should be a pointer to an array. */
  1390. datype = TREE_TYPE (TREE_TYPE (addr_expr));
  1391. if (TREE_CODE (datype) != ARRAY_TYPE)
  1392. return;
  1393. /* The pointer to element type shall be trivially convertible to
  1394. the expression pointer type. */
  1395. ddatype = TREE_TYPE (datype);
  1396. pddatype = build_pointer_type (ddatype);
  1397. if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
  1398. pddatype))
  1399. return;
  1400. /* The lower bound and element sizes must be constant. */
  1401. if (!TYPE_SIZE_UNIT (ddatype)
  1402. || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
  1403. || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
  1404. || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
  1405. return;
  1406. /* All checks succeeded. Build a new node to merge the cast. */
  1407. *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
  1408. TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
  1409. NULL_TREE, NULL_TREE);
  1410. *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
  1411. /* We can have stripped a required restrict qualifier above. */
  1412. if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
  1413. *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
  1414. }
  1415. /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
  1416. underneath as appropriate. */
  1417. static enum gimplify_status
  1418. gimplify_conversion (tree *expr_p)
  1419. {
  1420. location_t loc = EXPR_LOCATION (*expr_p);
  1421. gcc_assert (CONVERT_EXPR_P (*expr_p));
  1422. /* Then strip away all but the outermost conversion. */
  1423. STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
  1424. /* And remove the outermost conversion if it's useless. */
  1425. if (tree_ssa_useless_type_conversion (*expr_p))
  1426. *expr_p = TREE_OPERAND (*expr_p, 0);
  1427. /* If we still have a conversion at the toplevel,
  1428. then canonicalize some constructs. */
  1429. if (CONVERT_EXPR_P (*expr_p))
  1430. {
  1431. tree sub = TREE_OPERAND (*expr_p, 0);
  1432. /* If a NOP conversion is changing the type of a COMPONENT_REF
  1433. expression, then canonicalize its type now in order to expose more
  1434. redundant conversions. */
  1435. if (TREE_CODE (sub) == COMPONENT_REF)
  1436. canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
  1437. /* If a NOP conversion is changing a pointer to array of foo
  1438. to a pointer to foo, embed that change in the ADDR_EXPR. */
  1439. else if (TREE_CODE (sub) == ADDR_EXPR)
  1440. canonicalize_addr_expr (expr_p);
  1441. }
  1442. /* If we have a conversion to a non-register type force the
  1443. use of a VIEW_CONVERT_EXPR instead. */
  1444. if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
  1445. *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
  1446. TREE_OPERAND (*expr_p, 0));
  1447. /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
  1448. if (TREE_CODE (*expr_p) == CONVERT_EXPR)
  1449. TREE_SET_CODE (*expr_p, NOP_EXPR);
  1450. return GS_OK;
  1451. }
  1452. /* Nonlocal VLAs seen in the current function. */
  1453. static hash_set<tree> *nonlocal_vlas;
  1454. /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
  1455. static tree nonlocal_vla_vars;
  1456. /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
  1457. DECL_VALUE_EXPR, and it's worth re-examining things. */
  1458. static enum gimplify_status
  1459. gimplify_var_or_parm_decl (tree *expr_p)
  1460. {
  1461. tree decl = *expr_p;
  1462. /* ??? If this is a local variable, and it has not been seen in any
  1463. outer BIND_EXPR, then it's probably the result of a duplicate
  1464. declaration, for which we've already issued an error. It would
  1465. be really nice if the front end wouldn't leak these at all.
  1466. Currently the only known culprit is C++ destructors, as seen
  1467. in g++.old-deja/g++.jason/binding.C. */
  1468. if (TREE_CODE (decl) == VAR_DECL
  1469. && !DECL_SEEN_IN_BIND_EXPR_P (decl)
  1470. && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
  1471. && decl_function_context (decl) == current_function_decl)
  1472. {
  1473. gcc_assert (seen_error ());
  1474. return GS_ERROR;
  1475. }
  1476. /* When within an OMP context, notice uses of variables. */
  1477. if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
  1478. return GS_ALL_DONE;
  1479. /* If the decl is an alias for another expression, substitute it now. */
  1480. if (DECL_HAS_VALUE_EXPR_P (decl))
  1481. {
  1482. tree value_expr = DECL_VALUE_EXPR (decl);
  1483. /* For referenced nonlocal VLAs add a decl for debugging purposes
  1484. to the current function. */
  1485. if (TREE_CODE (decl) == VAR_DECL
  1486. && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
  1487. && nonlocal_vlas != NULL
  1488. && TREE_CODE (value_expr) == INDIRECT_REF
  1489. && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
  1490. && decl_function_context (decl) != current_function_decl)
  1491. {
  1492. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  1493. while (ctx
  1494. && (ctx->region_type == ORT_WORKSHARE
  1495. || ctx->region_type == ORT_SIMD))
  1496. ctx = ctx->outer_context;
  1497. if (!ctx && !nonlocal_vlas->add (decl))
  1498. {
  1499. tree copy = copy_node (decl);
  1500. lang_hooks.dup_lang_specific_decl (copy);
  1501. SET_DECL_RTL (copy, 0);
  1502. TREE_USED (copy) = 1;
  1503. DECL_CHAIN (copy) = nonlocal_vla_vars;
  1504. nonlocal_vla_vars = copy;
  1505. SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
  1506. DECL_HAS_VALUE_EXPR_P (copy) = 1;
  1507. }
  1508. }
  1509. *expr_p = unshare_expr (value_expr);
  1510. return GS_OK;
  1511. }
  1512. return GS_ALL_DONE;
  1513. }
  1514. /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
  1515. static void
  1516. recalculate_side_effects (tree t)
  1517. {
  1518. enum tree_code code = TREE_CODE (t);
  1519. int len = TREE_OPERAND_LENGTH (t);
  1520. int i;
  1521. switch (TREE_CODE_CLASS (code))
  1522. {
  1523. case tcc_expression:
  1524. switch (code)
  1525. {
  1526. case INIT_EXPR:
  1527. case MODIFY_EXPR:
  1528. case VA_ARG_EXPR:
  1529. case PREDECREMENT_EXPR:
  1530. case PREINCREMENT_EXPR:
  1531. case POSTDECREMENT_EXPR:
  1532. case POSTINCREMENT_EXPR:
  1533. /* All of these have side-effects, no matter what their
  1534. operands are. */
  1535. return;
  1536. default:
  1537. break;
  1538. }
  1539. /* Fall through. */
  1540. case tcc_comparison: /* a comparison expression */
  1541. case tcc_unary: /* a unary arithmetic expression */
  1542. case tcc_binary: /* a binary arithmetic expression */
  1543. case tcc_reference: /* a reference */
  1544. case tcc_vl_exp: /* a function call */
  1545. TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
  1546. for (i = 0; i < len; ++i)
  1547. {
  1548. tree op = TREE_OPERAND (t, i);
  1549. if (op && TREE_SIDE_EFFECTS (op))
  1550. TREE_SIDE_EFFECTS (t) = 1;
  1551. }
  1552. break;
  1553. case tcc_constant:
  1554. /* No side-effects. */
  1555. return;
  1556. default:
  1557. gcc_unreachable ();
  1558. }
  1559. }
  1560. /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
  1561. node *EXPR_P.
  1562. compound_lval
  1563. : min_lval '[' val ']'
  1564. | min_lval '.' ID
  1565. | compound_lval '[' val ']'
  1566. | compound_lval '.' ID
  1567. This is not part of the original SIMPLE definition, which separates
  1568. array and member references, but it seems reasonable to handle them
  1569. together. Also, this way we don't run into problems with union
  1570. aliasing; gcc requires that for accesses through a union to alias, the
  1571. union reference must be explicit, which was not always the case when we
  1572. were splitting up array and member refs.
  1573. PRE_P points to the sequence where side effects that must happen before
  1574. *EXPR_P should be stored.
  1575. POST_P points to the sequence where side effects that must happen after
  1576. *EXPR_P should be stored. */
  1577. static enum gimplify_status
  1578. gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  1579. fallback_t fallback)
  1580. {
  1581. tree *p;
  1582. enum gimplify_status ret = GS_ALL_DONE, tret;
  1583. int i;
  1584. location_t loc = EXPR_LOCATION (*expr_p);
  1585. tree expr = *expr_p;
  1586. /* Create a stack of the subexpressions so later we can walk them in
  1587. order from inner to outer. */
  1588. auto_vec<tree, 10> expr_stack;
  1589. /* We can handle anything that get_inner_reference can deal with. */
  1590. for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
  1591. {
  1592. restart:
  1593. /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
  1594. if (TREE_CODE (*p) == INDIRECT_REF)
  1595. *p = fold_indirect_ref_loc (loc, *p);
  1596. if (handled_component_p (*p))
  1597. ;
  1598. /* Expand DECL_VALUE_EXPR now. In some cases that may expose
  1599. additional COMPONENT_REFs. */
  1600. else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
  1601. && gimplify_var_or_parm_decl (p) == GS_OK)
  1602. goto restart;
  1603. else
  1604. break;
  1605. expr_stack.safe_push (*p);
  1606. }
  1607. gcc_assert (expr_stack.length ());
  1608. /* Now EXPR_STACK is a stack of pointers to all the refs we've
  1609. walked through and P points to the innermost expression.
  1610. Java requires that we elaborated nodes in source order. That
  1611. means we must gimplify the inner expression followed by each of
  1612. the indices, in order. But we can't gimplify the inner
  1613. expression until we deal with any variable bounds, sizes, or
  1614. positions in order to deal with PLACEHOLDER_EXPRs.
  1615. So we do this in three steps. First we deal with the annotations
  1616. for any variables in the components, then we gimplify the base,
  1617. then we gimplify any indices, from left to right. */
  1618. for (i = expr_stack.length () - 1; i >= 0; i--)
  1619. {
  1620. tree t = expr_stack[i];
  1621. if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
  1622. {
  1623. /* Gimplify the low bound and element type size and put them into
  1624. the ARRAY_REF. If these values are set, they have already been
  1625. gimplified. */
  1626. if (TREE_OPERAND (t, 2) == NULL_TREE)
  1627. {
  1628. tree low = unshare_expr (array_ref_low_bound (t));
  1629. if (!is_gimple_min_invariant (low))
  1630. {
  1631. TREE_OPERAND (t, 2) = low;
  1632. tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
  1633. post_p, is_gimple_reg,
  1634. fb_rvalue);
  1635. ret = MIN (ret, tret);
  1636. }
  1637. }
  1638. else
  1639. {
  1640. tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
  1641. is_gimple_reg, fb_rvalue);
  1642. ret = MIN (ret, tret);
  1643. }
  1644. if (TREE_OPERAND (t, 3) == NULL_TREE)
  1645. {
  1646. tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
  1647. tree elmt_size = unshare_expr (array_ref_element_size (t));
  1648. tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
  1649. /* Divide the element size by the alignment of the element
  1650. type (above). */
  1651. elmt_size
  1652. = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
  1653. if (!is_gimple_min_invariant (elmt_size))
  1654. {
  1655. TREE_OPERAND (t, 3) = elmt_size;
  1656. tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
  1657. post_p, is_gimple_reg,
  1658. fb_rvalue);
  1659. ret = MIN (ret, tret);
  1660. }
  1661. }
  1662. else
  1663. {
  1664. tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
  1665. is_gimple_reg, fb_rvalue);
  1666. ret = MIN (ret, tret);
  1667. }
  1668. }
  1669. else if (TREE_CODE (t) == COMPONENT_REF)
  1670. {
  1671. /* Set the field offset into T and gimplify it. */
  1672. if (TREE_OPERAND (t, 2) == NULL_TREE)
  1673. {
  1674. tree offset = unshare_expr (component_ref_field_offset (t));
  1675. tree field = TREE_OPERAND (t, 1);
  1676. tree factor
  1677. = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
  1678. /* Divide the offset by its alignment. */
  1679. offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
  1680. if (!is_gimple_min_invariant (offset))
  1681. {
  1682. TREE_OPERAND (t, 2) = offset;
  1683. tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
  1684. post_p, is_gimple_reg,
  1685. fb_rvalue);
  1686. ret = MIN (ret, tret);
  1687. }
  1688. }
  1689. else
  1690. {
  1691. tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
  1692. is_gimple_reg, fb_rvalue);
  1693. ret = MIN (ret, tret);
  1694. }
  1695. }
  1696. }
  1697. /* Step 2 is to gimplify the base expression. Make sure lvalue is set
  1698. so as to match the min_lval predicate. Failure to do so may result
  1699. in the creation of large aggregate temporaries. */
  1700. tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
  1701. fallback | fb_lvalue);
  1702. ret = MIN (ret, tret);
  1703. /* And finally, the indices and operands of ARRAY_REF. During this
  1704. loop we also remove any useless conversions. */
  1705. for (; expr_stack.length () > 0; )
  1706. {
  1707. tree t = expr_stack.pop ();
  1708. if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
  1709. {
  1710. /* Gimplify the dimension. */
  1711. if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
  1712. {
  1713. tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
  1714. is_gimple_val, fb_rvalue);
  1715. ret = MIN (ret, tret);
  1716. }
  1717. }
  1718. STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
  1719. /* The innermost expression P may have originally had
  1720. TREE_SIDE_EFFECTS set which would have caused all the outer
  1721. expressions in *EXPR_P leading to P to also have had
  1722. TREE_SIDE_EFFECTS set. */
  1723. recalculate_side_effects (t);
  1724. }
  1725. /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
  1726. if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
  1727. {
  1728. canonicalize_component_ref (expr_p);
  1729. }
  1730. expr_stack.release ();
  1731. gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
  1732. return ret;
  1733. }
  1734. /* Gimplify the self modifying expression pointed to by EXPR_P
  1735. (++, --, +=, -=).
  1736. PRE_P points to the list where side effects that must happen before
  1737. *EXPR_P should be stored.
  1738. POST_P points to the list where side effects that must happen after
  1739. *EXPR_P should be stored.
  1740. WANT_VALUE is nonzero iff we want to use the value of this expression
  1741. in another expression.
  1742. ARITH_TYPE is the type the computation should be performed in. */
  1743. enum gimplify_status
  1744. gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  1745. bool want_value, tree arith_type)
  1746. {
  1747. enum tree_code code;
  1748. tree lhs, lvalue, rhs, t1;
  1749. gimple_seq post = NULL, *orig_post_p = post_p;
  1750. bool postfix;
  1751. enum tree_code arith_code;
  1752. enum gimplify_status ret;
  1753. location_t loc = EXPR_LOCATION (*expr_p);
  1754. code = TREE_CODE (*expr_p);
  1755. gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
  1756. || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
  1757. /* Prefix or postfix? */
  1758. if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
  1759. /* Faster to treat as prefix if result is not used. */
  1760. postfix = want_value;
  1761. else
  1762. postfix = false;
  1763. /* For postfix, make sure the inner expression's post side effects
  1764. are executed after side effects from this expression. */
  1765. if (postfix)
  1766. post_p = &post;
  1767. /* Add or subtract? */
  1768. if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
  1769. arith_code = PLUS_EXPR;
  1770. else
  1771. arith_code = MINUS_EXPR;
  1772. /* Gimplify the LHS into a GIMPLE lvalue. */
  1773. lvalue = TREE_OPERAND (*expr_p, 0);
  1774. ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
  1775. if (ret == GS_ERROR)
  1776. return ret;
  1777. /* Extract the operands to the arithmetic operation. */
  1778. lhs = lvalue;
  1779. rhs = TREE_OPERAND (*expr_p, 1);
  1780. /* For postfix operator, we evaluate the LHS to an rvalue and then use
  1781. that as the result value and in the postqueue operation. */
  1782. if (postfix)
  1783. {
  1784. ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
  1785. if (ret == GS_ERROR)
  1786. return ret;
  1787. lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
  1788. }
  1789. /* For POINTERs increment, use POINTER_PLUS_EXPR. */
  1790. if (POINTER_TYPE_P (TREE_TYPE (lhs)))
  1791. {
  1792. rhs = convert_to_ptrofftype_loc (loc, rhs);
  1793. if (arith_code == MINUS_EXPR)
  1794. rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
  1795. t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
  1796. }
  1797. else
  1798. t1 = fold_convert (TREE_TYPE (*expr_p),
  1799. fold_build2 (arith_code, arith_type,
  1800. fold_convert (arith_type, lhs),
  1801. fold_convert (arith_type, rhs)));
  1802. if (postfix)
  1803. {
  1804. gimplify_assign (lvalue, t1, pre_p);
  1805. gimplify_seq_add_seq (orig_post_p, post);
  1806. *expr_p = lhs;
  1807. return GS_ALL_DONE;
  1808. }
  1809. else
  1810. {
  1811. *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
  1812. return GS_OK;
  1813. }
  1814. }
  1815. /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
  1816. static void
  1817. maybe_with_size_expr (tree *expr_p)
  1818. {
  1819. tree expr = *expr_p;
  1820. tree type = TREE_TYPE (expr);
  1821. tree size;
  1822. /* If we've already wrapped this or the type is error_mark_node, we can't do
  1823. anything. */
  1824. if (TREE_CODE (expr) == WITH_SIZE_EXPR
  1825. || type == error_mark_node)
  1826. return;
  1827. /* If the size isn't known or is a constant, we have nothing to do. */
  1828. size = TYPE_SIZE_UNIT (type);
  1829. if (!size || TREE_CODE (size) == INTEGER_CST)
  1830. return;
  1831. /* Otherwise, make a WITH_SIZE_EXPR. */
  1832. size = unshare_expr (size);
  1833. size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
  1834. *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
  1835. }
  1836. /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
  1837. Store any side-effects in PRE_P. CALL_LOCATION is the location of
  1838. the CALL_EXPR. */
  1839. enum gimplify_status
  1840. gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
  1841. {
  1842. bool (*test) (tree);
  1843. fallback_t fb;
  1844. /* In general, we allow lvalues for function arguments to avoid
  1845. extra overhead of copying large aggregates out of even larger
  1846. aggregates into temporaries only to copy the temporaries to
  1847. the argument list. Make optimizers happy by pulling out to
  1848. temporaries those types that fit in registers. */
  1849. if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
  1850. test = is_gimple_val, fb = fb_rvalue;
  1851. else
  1852. {
  1853. test = is_gimple_lvalue, fb = fb_either;
  1854. /* Also strip a TARGET_EXPR that would force an extra copy. */
  1855. if (TREE_CODE (*arg_p) == TARGET_EXPR)
  1856. {
  1857. tree init = TARGET_EXPR_INITIAL (*arg_p);
  1858. if (init
  1859. && !VOID_TYPE_P (TREE_TYPE (init)))
  1860. *arg_p = init;
  1861. }
  1862. }
  1863. /* If this is a variable sized type, we must remember the size. */
  1864. maybe_with_size_expr (arg_p);
  1865. /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
  1866. /* Make sure arguments have the same location as the function call
  1867. itself. */
  1868. protected_set_expr_location (*arg_p, call_location);
  1869. /* There is a sequence point before a function call. Side effects in
  1870. the argument list must occur before the actual call. So, when
  1871. gimplifying arguments, force gimplify_expr to use an internal
  1872. post queue which is then appended to the end of PRE_P. */
  1873. return gimplify_expr (arg_p, pre_p, NULL, test, fb);
  1874. }
  1875. /* Don't fold inside offloading or taskreg regions: it can break code by
  1876. adding decl references that weren't in the source. We'll do it during
  1877. omplower pass instead. */
  1878. static bool
  1879. maybe_fold_stmt (gimple_stmt_iterator *gsi)
  1880. {
  1881. struct gimplify_omp_ctx *ctx;
  1882. for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
  1883. if (ctx->region_type == ORT_TARGET
  1884. || (ctx->region_type & (ORT_PARALLEL | ORT_TASK)) != 0)
  1885. return false;
  1886. return fold_stmt (gsi);
  1887. }
  1888. /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
  1889. WANT_VALUE is true if the result of the call is desired. */
  1890. static enum gimplify_status
  1891. gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
  1892. {
  1893. tree fndecl, parms, p, fnptrtype;
  1894. enum gimplify_status ret;
  1895. int i, nargs;
  1896. gcall *call;
  1897. bool builtin_va_start_p = false;
  1898. location_t loc = EXPR_LOCATION (*expr_p);
  1899. gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
  1900. /* For reliable diagnostics during inlining, it is necessary that
  1901. every call_expr be annotated with file and line. */
  1902. if (! EXPR_HAS_LOCATION (*expr_p))
  1903. SET_EXPR_LOCATION (*expr_p, input_location);
  1904. /* Gimplify internal functions created in the FEs. */
  1905. if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
  1906. {
  1907. if (want_value)
  1908. return GS_ALL_DONE;
  1909. nargs = call_expr_nargs (*expr_p);
  1910. enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
  1911. auto_vec<tree> vargs (nargs);
  1912. for (i = 0; i < nargs; i++)
  1913. {
  1914. gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
  1915. EXPR_LOCATION (*expr_p));
  1916. vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
  1917. }
  1918. gimple call = gimple_build_call_internal_vec (ifn, vargs);
  1919. gimplify_seq_add_stmt (pre_p, call);
  1920. return GS_ALL_DONE;
  1921. }
  1922. /* This may be a call to a builtin function.
  1923. Builtin function calls may be transformed into different
  1924. (and more efficient) builtin function calls under certain
  1925. circumstances. Unfortunately, gimplification can muck things
  1926. up enough that the builtin expanders are not aware that certain
  1927. transformations are still valid.
  1928. So we attempt transformation/gimplification of the call before
  1929. we gimplify the CALL_EXPR. At this time we do not manage to
  1930. transform all calls in the same manner as the expanders do, but
  1931. we do transform most of them. */
  1932. fndecl = get_callee_fndecl (*expr_p);
  1933. if (fndecl
  1934. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
  1935. switch (DECL_FUNCTION_CODE (fndecl))
  1936. {
  1937. case BUILT_IN_VA_START:
  1938. {
  1939. builtin_va_start_p = TRUE;
  1940. if (call_expr_nargs (*expr_p) < 2)
  1941. {
  1942. error ("too few arguments to function %<va_start%>");
  1943. *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
  1944. return GS_OK;
  1945. }
  1946. if (fold_builtin_next_arg (*expr_p, true))
  1947. {
  1948. *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
  1949. return GS_OK;
  1950. }
  1951. break;
  1952. }
  1953. case BUILT_IN_LINE:
  1954. {
  1955. *expr_p = build_int_cst (TREE_TYPE (*expr_p),
  1956. LOCATION_LINE (EXPR_LOCATION (*expr_p)));
  1957. return GS_OK;
  1958. }
  1959. case BUILT_IN_FILE:
  1960. {
  1961. const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
  1962. *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
  1963. return GS_OK;
  1964. }
  1965. case BUILT_IN_FUNCTION:
  1966. {
  1967. const char *function;
  1968. function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
  1969. *expr_p = build_string_literal (strlen (function) + 1, function);
  1970. return GS_OK;
  1971. }
  1972. default:
  1973. ;
  1974. }
  1975. if (fndecl && DECL_BUILT_IN (fndecl))
  1976. {
  1977. tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
  1978. if (new_tree && new_tree != *expr_p)
  1979. {
  1980. /* There was a transformation of this call which computes the
  1981. same value, but in a more efficient way. Return and try
  1982. again. */
  1983. *expr_p = new_tree;
  1984. return GS_OK;
  1985. }
  1986. }
  1987. /* Remember the original function pointer type. */
  1988. fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
  1989. /* There is a sequence point before the call, so any side effects in
  1990. the calling expression must occur before the actual call. Force
  1991. gimplify_expr to use an internal post queue. */
  1992. ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
  1993. is_gimple_call_addr, fb_rvalue);
  1994. nargs = call_expr_nargs (*expr_p);
  1995. /* Get argument types for verification. */
  1996. fndecl = get_callee_fndecl (*expr_p);
  1997. parms = NULL_TREE;
  1998. if (fndecl)
  1999. parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
  2000. else
  2001. parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
  2002. if (fndecl && DECL_ARGUMENTS (fndecl))
  2003. p = DECL_ARGUMENTS (fndecl);
  2004. else if (parms)
  2005. p = parms;
  2006. else
  2007. p = NULL_TREE;
  2008. for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
  2009. ;
  2010. /* If the last argument is __builtin_va_arg_pack () and it is not
  2011. passed as a named argument, decrease the number of CALL_EXPR
  2012. arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
  2013. if (!p
  2014. && i < nargs
  2015. && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
  2016. {
  2017. tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
  2018. tree last_arg_fndecl = get_callee_fndecl (last_arg);
  2019. if (last_arg_fndecl
  2020. && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
  2021. && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
  2022. && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
  2023. {
  2024. tree call = *expr_p;
  2025. --nargs;
  2026. *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
  2027. CALL_EXPR_FN (call),
  2028. nargs, CALL_EXPR_ARGP (call));
  2029. /* Copy all CALL_EXPR flags, location and block, except
  2030. CALL_EXPR_VA_ARG_PACK flag. */
  2031. CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
  2032. CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
  2033. CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
  2034. = CALL_EXPR_RETURN_SLOT_OPT (call);
  2035. CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
  2036. SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
  2037. /* Set CALL_EXPR_VA_ARG_PACK. */
  2038. CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
  2039. }
  2040. }
  2041. /* Gimplify the function arguments. */
  2042. if (nargs > 0)
  2043. {
  2044. for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
  2045. PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
  2046. PUSH_ARGS_REVERSED ? i-- : i++)
  2047. {
  2048. enum gimplify_status t;
  2049. /* Avoid gimplifying the second argument to va_start, which needs to
  2050. be the plain PARM_DECL. */
  2051. if ((i != 1) || !builtin_va_start_p)
  2052. {
  2053. t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
  2054. EXPR_LOCATION (*expr_p));
  2055. if (t == GS_ERROR)
  2056. ret = GS_ERROR;
  2057. }
  2058. }
  2059. }
  2060. /* Gimplify the static chain. */
  2061. if (CALL_EXPR_STATIC_CHAIN (*expr_p))
  2062. {
  2063. if (fndecl && !DECL_STATIC_CHAIN (fndecl))
  2064. CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
  2065. else
  2066. {
  2067. enum gimplify_status t;
  2068. t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
  2069. EXPR_LOCATION (*expr_p));
  2070. if (t == GS_ERROR)
  2071. ret = GS_ERROR;
  2072. }
  2073. }
  2074. /* Verify the function result. */
  2075. if (want_value && fndecl
  2076. && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
  2077. {
  2078. error_at (loc, "using result of function returning %<void%>");
  2079. ret = GS_ERROR;
  2080. }
  2081. /* Try this again in case gimplification exposed something. */
  2082. if (ret != GS_ERROR)
  2083. {
  2084. tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
  2085. if (new_tree && new_tree != *expr_p)
  2086. {
  2087. /* There was a transformation of this call which computes the
  2088. same value, but in a more efficient way. Return and try
  2089. again. */
  2090. *expr_p = new_tree;
  2091. return GS_OK;
  2092. }
  2093. }
  2094. else
  2095. {
  2096. *expr_p = error_mark_node;
  2097. return GS_ERROR;
  2098. }
  2099. /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
  2100. decl. This allows us to eliminate redundant or useless
  2101. calls to "const" functions. */
  2102. if (TREE_CODE (*expr_p) == CALL_EXPR)
  2103. {
  2104. int flags = call_expr_flags (*expr_p);
  2105. if (flags & (ECF_CONST | ECF_PURE)
  2106. /* An infinite loop is considered a side effect. */
  2107. && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
  2108. TREE_SIDE_EFFECTS (*expr_p) = 0;
  2109. }
  2110. /* If the value is not needed by the caller, emit a new GIMPLE_CALL
  2111. and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
  2112. form and delegate the creation of a GIMPLE_CALL to
  2113. gimplify_modify_expr. This is always possible because when
  2114. WANT_VALUE is true, the caller wants the result of this call into
  2115. a temporary, which means that we will emit an INIT_EXPR in
  2116. internal_get_tmp_var which will then be handled by
  2117. gimplify_modify_expr. */
  2118. if (!want_value)
  2119. {
  2120. /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
  2121. have to do is replicate it as a GIMPLE_CALL tuple. */
  2122. gimple_stmt_iterator gsi;
  2123. call = gimple_build_call_from_tree (*expr_p);
  2124. gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
  2125. notice_special_calls (call);
  2126. gimplify_seq_add_stmt (pre_p, call);
  2127. gsi = gsi_last (*pre_p);
  2128. maybe_fold_stmt (&gsi);
  2129. *expr_p = NULL_TREE;
  2130. }
  2131. else
  2132. /* Remember the original function type. */
  2133. CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
  2134. CALL_EXPR_FN (*expr_p));
  2135. return ret;
  2136. }
  2137. /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
  2138. rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
  2139. TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
  2140. condition is true or false, respectively. If null, we should generate
  2141. our own to skip over the evaluation of this specific expression.
  2142. LOCUS is the source location of the COND_EXPR.
  2143. This function is the tree equivalent of do_jump.
  2144. shortcut_cond_r should only be called by shortcut_cond_expr. */
  2145. static tree
  2146. shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
  2147. location_t locus)
  2148. {
  2149. tree local_label = NULL_TREE;
  2150. tree t, expr = NULL;
  2151. /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
  2152. retain the shortcut semantics. Just insert the gotos here;
  2153. shortcut_cond_expr will append the real blocks later. */
  2154. if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
  2155. {
  2156. location_t new_locus;
  2157. /* Turn if (a && b) into
  2158. if (a); else goto no;
  2159. if (b) goto yes; else goto no;
  2160. (no:) */
  2161. if (false_label_p == NULL)
  2162. false_label_p = &local_label;
  2163. /* Keep the original source location on the first 'if'. */
  2164. t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
  2165. append_to_statement_list (t, &expr);
  2166. /* Set the source location of the && on the second 'if'. */
  2167. new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
  2168. t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
  2169. new_locus);
  2170. append_to_statement_list (t, &expr);
  2171. }
  2172. else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
  2173. {
  2174. location_t new_locus;
  2175. /* Turn if (a || b) into
  2176. if (a) goto yes;
  2177. if (b) goto yes; else goto no;
  2178. (yes:) */
  2179. if (true_label_p == NULL)
  2180. true_label_p = &local_label;
  2181. /* Keep the original source location on the first 'if'. */
  2182. t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
  2183. append_to_statement_list (t, &expr);
  2184. /* Set the source location of the || on the second 'if'. */
  2185. new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
  2186. t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
  2187. new_locus);
  2188. append_to_statement_list (t, &expr);
  2189. }
  2190. else if (TREE_CODE (pred) == COND_EXPR
  2191. && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
  2192. && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
  2193. {
  2194. location_t new_locus;
  2195. /* As long as we're messing with gotos, turn if (a ? b : c) into
  2196. if (a)
  2197. if (b) goto yes; else goto no;
  2198. else
  2199. if (c) goto yes; else goto no;
  2200. Don't do this if one of the arms has void type, which can happen
  2201. in C++ when the arm is throw. */
  2202. /* Keep the original source location on the first 'if'. Set the source
  2203. location of the ? on the second 'if'. */
  2204. new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
  2205. expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
  2206. shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
  2207. false_label_p, locus),
  2208. shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
  2209. false_label_p, new_locus));
  2210. }
  2211. else
  2212. {
  2213. expr = build3 (COND_EXPR, void_type_node, pred,
  2214. build_and_jump (true_label_p),
  2215. build_and_jump (false_label_p));
  2216. SET_EXPR_LOCATION (expr, locus);
  2217. }
  2218. if (local_label)
  2219. {
  2220. t = build1 (LABEL_EXPR, void_type_node, local_label);
  2221. append_to_statement_list (t, &expr);
  2222. }
  2223. return expr;
  2224. }
  2225. /* Given a conditional expression EXPR with short-circuit boolean
  2226. predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
  2227. predicate apart into the equivalent sequence of conditionals. */
  2228. static tree
  2229. shortcut_cond_expr (tree expr)
  2230. {
  2231. tree pred = TREE_OPERAND (expr, 0);
  2232. tree then_ = TREE_OPERAND (expr, 1);
  2233. tree else_ = TREE_OPERAND (expr, 2);
  2234. tree true_label, false_label, end_label, t;
  2235. tree *true_label_p;
  2236. tree *false_label_p;
  2237. bool emit_end, emit_false, jump_over_else;
  2238. bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
  2239. bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
  2240. /* First do simple transformations. */
  2241. if (!else_se)
  2242. {
  2243. /* If there is no 'else', turn
  2244. if (a && b) then c
  2245. into
  2246. if (a) if (b) then c. */
  2247. while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
  2248. {
  2249. /* Keep the original source location on the first 'if'. */
  2250. location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
  2251. TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
  2252. /* Set the source location of the && on the second 'if'. */
  2253. if (EXPR_HAS_LOCATION (pred))
  2254. SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
  2255. then_ = shortcut_cond_expr (expr);
  2256. then_se = then_ && TREE_SIDE_EFFECTS (then_);
  2257. pred = TREE_OPERAND (pred, 0);
  2258. expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
  2259. SET_EXPR_LOCATION (expr, locus);
  2260. }
  2261. }
  2262. if (!then_se)
  2263. {
  2264. /* If there is no 'then', turn
  2265. if (a || b); else d
  2266. into
  2267. if (a); else if (b); else d. */
  2268. while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
  2269. {
  2270. /* Keep the original source location on the first 'if'. */
  2271. location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
  2272. TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
  2273. /* Set the source location of the || on the second 'if'. */
  2274. if (EXPR_HAS_LOCATION (pred))
  2275. SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
  2276. else_ = shortcut_cond_expr (expr);
  2277. else_se = else_ && TREE_SIDE_EFFECTS (else_);
  2278. pred = TREE_OPERAND (pred, 0);
  2279. expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
  2280. SET_EXPR_LOCATION (expr, locus);
  2281. }
  2282. }
  2283. /* If we're done, great. */
  2284. if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
  2285. && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
  2286. return expr;
  2287. /* Otherwise we need to mess with gotos. Change
  2288. if (a) c; else d;
  2289. to
  2290. if (a); else goto no;
  2291. c; goto end;
  2292. no: d; end:
  2293. and recursively gimplify the condition. */
  2294. true_label = false_label = end_label = NULL_TREE;
  2295. /* If our arms just jump somewhere, hijack those labels so we don't
  2296. generate jumps to jumps. */
  2297. if (then_
  2298. && TREE_CODE (then_) == GOTO_EXPR
  2299. && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
  2300. {
  2301. true_label = GOTO_DESTINATION (then_);
  2302. then_ = NULL;
  2303. then_se = false;
  2304. }
  2305. if (else_
  2306. && TREE_CODE (else_) == GOTO_EXPR
  2307. && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
  2308. {
  2309. false_label = GOTO_DESTINATION (else_);
  2310. else_ = NULL;
  2311. else_se = false;
  2312. }
  2313. /* If we aren't hijacking a label for the 'then' branch, it falls through. */
  2314. if (true_label)
  2315. true_label_p = &true_label;
  2316. else
  2317. true_label_p = NULL;
  2318. /* The 'else' branch also needs a label if it contains interesting code. */
  2319. if (false_label || else_se)
  2320. false_label_p = &false_label;
  2321. else
  2322. false_label_p = NULL;
  2323. /* If there was nothing else in our arms, just forward the label(s). */
  2324. if (!then_se && !else_se)
  2325. return shortcut_cond_r (pred, true_label_p, false_label_p,
  2326. EXPR_LOC_OR_LOC (expr, input_location));
  2327. /* If our last subexpression already has a terminal label, reuse it. */
  2328. if (else_se)
  2329. t = expr_last (else_);
  2330. else if (then_se)
  2331. t = expr_last (then_);
  2332. else
  2333. t = NULL;
  2334. if (t && TREE_CODE (t) == LABEL_EXPR)
  2335. end_label = LABEL_EXPR_LABEL (t);
  2336. /* If we don't care about jumping to the 'else' branch, jump to the end
  2337. if the condition is false. */
  2338. if (!false_label_p)
  2339. false_label_p = &end_label;
  2340. /* We only want to emit these labels if we aren't hijacking them. */
  2341. emit_end = (end_label == NULL_TREE);
  2342. emit_false = (false_label == NULL_TREE);
  2343. /* We only emit the jump over the else clause if we have to--if the
  2344. then clause may fall through. Otherwise we can wind up with a
  2345. useless jump and a useless label at the end of gimplified code,
  2346. which will cause us to think that this conditional as a whole
  2347. falls through even if it doesn't. If we then inline a function
  2348. which ends with such a condition, that can cause us to issue an
  2349. inappropriate warning about control reaching the end of a
  2350. non-void function. */
  2351. jump_over_else = block_may_fallthru (then_);
  2352. pred = shortcut_cond_r (pred, true_label_p, false_label_p,
  2353. EXPR_LOC_OR_LOC (expr, input_location));
  2354. expr = NULL;
  2355. append_to_statement_list (pred, &expr);
  2356. append_to_statement_list (then_, &expr);
  2357. if (else_se)
  2358. {
  2359. if (jump_over_else)
  2360. {
  2361. tree last = expr_last (expr);
  2362. t = build_and_jump (&end_label);
  2363. if (EXPR_HAS_LOCATION (last))
  2364. SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
  2365. append_to_statement_list (t, &expr);
  2366. }
  2367. if (emit_false)
  2368. {
  2369. t = build1 (LABEL_EXPR, void_type_node, false_label);
  2370. append_to_statement_list (t, &expr);
  2371. }
  2372. append_to_statement_list (else_, &expr);
  2373. }
  2374. if (emit_end && end_label)
  2375. {
  2376. t = build1 (LABEL_EXPR, void_type_node, end_label);
  2377. append_to_statement_list (t, &expr);
  2378. }
  2379. return expr;
  2380. }
  2381. /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
  2382. tree
  2383. gimple_boolify (tree expr)
  2384. {
  2385. tree type = TREE_TYPE (expr);
  2386. location_t loc = EXPR_LOCATION (expr);
  2387. if (TREE_CODE (expr) == NE_EXPR
  2388. && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
  2389. && integer_zerop (TREE_OPERAND (expr, 1)))
  2390. {
  2391. tree call = TREE_OPERAND (expr, 0);
  2392. tree fn = get_callee_fndecl (call);
  2393. /* For __builtin_expect ((long) (x), y) recurse into x as well
  2394. if x is truth_value_p. */
  2395. if (fn
  2396. && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
  2397. && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
  2398. && call_expr_nargs (call) == 2)
  2399. {
  2400. tree arg = CALL_EXPR_ARG (call, 0);
  2401. if (arg)
  2402. {
  2403. if (TREE_CODE (arg) == NOP_EXPR
  2404. && TREE_TYPE (arg) == TREE_TYPE (call))
  2405. arg = TREE_OPERAND (arg, 0);
  2406. if (truth_value_p (TREE_CODE (arg)))
  2407. {
  2408. arg = gimple_boolify (arg);
  2409. CALL_EXPR_ARG (call, 0)
  2410. = fold_convert_loc (loc, TREE_TYPE (call), arg);
  2411. }
  2412. }
  2413. }
  2414. }
  2415. switch (TREE_CODE (expr))
  2416. {
  2417. case TRUTH_AND_EXPR:
  2418. case TRUTH_OR_EXPR:
  2419. case TRUTH_XOR_EXPR:
  2420. case TRUTH_ANDIF_EXPR:
  2421. case TRUTH_ORIF_EXPR:
  2422. /* Also boolify the arguments of truth exprs. */
  2423. TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
  2424. /* FALLTHRU */
  2425. case TRUTH_NOT_EXPR:
  2426. TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
  2427. /* These expressions always produce boolean results. */
  2428. if (TREE_CODE (type) != BOOLEAN_TYPE)
  2429. TREE_TYPE (expr) = boolean_type_node;
  2430. return expr;
  2431. case ANNOTATE_EXPR:
  2432. switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
  2433. {
  2434. case annot_expr_ivdep_kind:
  2435. case annot_expr_no_vector_kind:
  2436. case annot_expr_vector_kind:
  2437. TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
  2438. if (TREE_CODE (type) != BOOLEAN_TYPE)
  2439. TREE_TYPE (expr) = boolean_type_node;
  2440. return expr;
  2441. default:
  2442. gcc_unreachable ();
  2443. }
  2444. default:
  2445. if (COMPARISON_CLASS_P (expr))
  2446. {
  2447. /* There expressions always prduce boolean results. */
  2448. if (TREE_CODE (type) != BOOLEAN_TYPE)
  2449. TREE_TYPE (expr) = boolean_type_node;
  2450. return expr;
  2451. }
  2452. /* Other expressions that get here must have boolean values, but
  2453. might need to be converted to the appropriate mode. */
  2454. if (TREE_CODE (type) == BOOLEAN_TYPE)
  2455. return expr;
  2456. return fold_convert_loc (loc, boolean_type_node, expr);
  2457. }
  2458. }
  2459. /* Given a conditional expression *EXPR_P without side effects, gimplify
  2460. its operands. New statements are inserted to PRE_P. */
  2461. static enum gimplify_status
  2462. gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
  2463. {
  2464. tree expr = *expr_p, cond;
  2465. enum gimplify_status ret, tret;
  2466. enum tree_code code;
  2467. cond = gimple_boolify (COND_EXPR_COND (expr));
  2468. /* We need to handle && and || specially, as their gimplification
  2469. creates pure cond_expr, thus leading to an infinite cycle otherwise. */
  2470. code = TREE_CODE (cond);
  2471. if (code == TRUTH_ANDIF_EXPR)
  2472. TREE_SET_CODE (cond, TRUTH_AND_EXPR);
  2473. else if (code == TRUTH_ORIF_EXPR)
  2474. TREE_SET_CODE (cond, TRUTH_OR_EXPR);
  2475. ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
  2476. COND_EXPR_COND (*expr_p) = cond;
  2477. tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
  2478. is_gimple_val, fb_rvalue);
  2479. ret = MIN (ret, tret);
  2480. tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
  2481. is_gimple_val, fb_rvalue);
  2482. return MIN (ret, tret);
  2483. }
  2484. /* Return true if evaluating EXPR could trap.
  2485. EXPR is GENERIC, while tree_could_trap_p can be called
  2486. only on GIMPLE. */
  2487. static bool
  2488. generic_expr_could_trap_p (tree expr)
  2489. {
  2490. unsigned i, n;
  2491. if (!expr || is_gimple_val (expr))
  2492. return false;
  2493. if (!EXPR_P (expr) || tree_could_trap_p (expr))
  2494. return true;
  2495. n = TREE_OPERAND_LENGTH (expr);
  2496. for (i = 0; i < n; i++)
  2497. if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
  2498. return true;
  2499. return false;
  2500. }
  2501. /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
  2502. into
  2503. if (p) if (p)
  2504. t1 = a; a;
  2505. else or else
  2506. t1 = b; b;
  2507. t1;
  2508. The second form is used when *EXPR_P is of type void.
  2509. PRE_P points to the list where side effects that must happen before
  2510. *EXPR_P should be stored. */
  2511. static enum gimplify_status
  2512. gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
  2513. {
  2514. tree expr = *expr_p;
  2515. tree type = TREE_TYPE (expr);
  2516. location_t loc = EXPR_LOCATION (expr);
  2517. tree tmp, arm1, arm2;
  2518. enum gimplify_status ret;
  2519. tree label_true, label_false, label_cont;
  2520. bool have_then_clause_p, have_else_clause_p;
  2521. gcond *cond_stmt;
  2522. enum tree_code pred_code;
  2523. gimple_seq seq = NULL;
  2524. /* If this COND_EXPR has a value, copy the values into a temporary within
  2525. the arms. */
  2526. if (!VOID_TYPE_P (type))
  2527. {
  2528. tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
  2529. tree result;
  2530. /* If either an rvalue is ok or we do not require an lvalue, create the
  2531. temporary. But we cannot do that if the type is addressable. */
  2532. if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
  2533. && !TREE_ADDRESSABLE (type))
  2534. {
  2535. if (gimplify_ctxp->allow_rhs_cond_expr
  2536. /* If either branch has side effects or could trap, it can't be
  2537. evaluated unconditionally. */
  2538. && !TREE_SIDE_EFFECTS (then_)
  2539. && !generic_expr_could_trap_p (then_)
  2540. && !TREE_SIDE_EFFECTS (else_)
  2541. && !generic_expr_could_trap_p (else_))
  2542. return gimplify_pure_cond_expr (expr_p, pre_p);
  2543. tmp = create_tmp_var (type, "iftmp");
  2544. result = tmp;
  2545. }
  2546. /* Otherwise, only create and copy references to the values. */
  2547. else
  2548. {
  2549. type = build_pointer_type (type);
  2550. if (!VOID_TYPE_P (TREE_TYPE (then_)))
  2551. then_ = build_fold_addr_expr_loc (loc, then_);
  2552. if (!VOID_TYPE_P (TREE_TYPE (else_)))
  2553. else_ = build_fold_addr_expr_loc (loc, else_);
  2554. expr
  2555. = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
  2556. tmp = create_tmp_var (type, "iftmp");
  2557. result = build_simple_mem_ref_loc (loc, tmp);
  2558. }
  2559. /* Build the new then clause, `tmp = then_;'. But don't build the
  2560. assignment if the value is void; in C++ it can be if it's a throw. */
  2561. if (!VOID_TYPE_P (TREE_TYPE (then_)))
  2562. TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
  2563. /* Similarly, build the new else clause, `tmp = else_;'. */
  2564. if (!VOID_TYPE_P (TREE_TYPE (else_)))
  2565. TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
  2566. TREE_TYPE (expr) = void_type_node;
  2567. recalculate_side_effects (expr);
  2568. /* Move the COND_EXPR to the prequeue. */
  2569. gimplify_stmt (&expr, pre_p);
  2570. *expr_p = result;
  2571. return GS_ALL_DONE;
  2572. }
  2573. /* Remove any COMPOUND_EXPR so the following cases will be caught. */
  2574. STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
  2575. if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
  2576. gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
  2577. /* Make sure the condition has BOOLEAN_TYPE. */
  2578. TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
  2579. /* Break apart && and || conditions. */
  2580. if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
  2581. || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
  2582. {
  2583. expr = shortcut_cond_expr (expr);
  2584. if (expr != *expr_p)
  2585. {
  2586. *expr_p = expr;
  2587. /* We can't rely on gimplify_expr to re-gimplify the expanded
  2588. form properly, as cleanups might cause the target labels to be
  2589. wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
  2590. set up a conditional context. */
  2591. gimple_push_condition ();
  2592. gimplify_stmt (expr_p, &seq);
  2593. gimple_pop_condition (pre_p);
  2594. gimple_seq_add_seq (pre_p, seq);
  2595. return GS_ALL_DONE;
  2596. }
  2597. }
  2598. /* Now do the normal gimplification. */
  2599. /* Gimplify condition. */
  2600. ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
  2601. fb_rvalue);
  2602. if (ret == GS_ERROR)
  2603. return GS_ERROR;
  2604. gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
  2605. gimple_push_condition ();
  2606. have_then_clause_p = have_else_clause_p = false;
  2607. if (TREE_OPERAND (expr, 1) != NULL
  2608. && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
  2609. && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
  2610. && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
  2611. == current_function_decl)
  2612. /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
  2613. have different locations, otherwise we end up with incorrect
  2614. location information on the branches. */
  2615. && (optimize
  2616. || !EXPR_HAS_LOCATION (expr)
  2617. || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
  2618. || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
  2619. {
  2620. label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
  2621. have_then_clause_p = true;
  2622. }
  2623. else
  2624. label_true = create_artificial_label (UNKNOWN_LOCATION);
  2625. if (TREE_OPERAND (expr, 2) != NULL
  2626. && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
  2627. && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
  2628. && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
  2629. == current_function_decl)
  2630. /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
  2631. have different locations, otherwise we end up with incorrect
  2632. location information on the branches. */
  2633. && (optimize
  2634. || !EXPR_HAS_LOCATION (expr)
  2635. || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
  2636. || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
  2637. {
  2638. label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
  2639. have_else_clause_p = true;
  2640. }
  2641. else
  2642. label_false = create_artificial_label (UNKNOWN_LOCATION);
  2643. gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
  2644. &arm2);
  2645. cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
  2646. label_false);
  2647. gimplify_seq_add_stmt (&seq, cond_stmt);
  2648. label_cont = NULL_TREE;
  2649. if (!have_then_clause_p)
  2650. {
  2651. /* For if (...) {} else { code; } put label_true after
  2652. the else block. */
  2653. if (TREE_OPERAND (expr, 1) == NULL_TREE
  2654. && !have_else_clause_p
  2655. && TREE_OPERAND (expr, 2) != NULL_TREE)
  2656. label_cont = label_true;
  2657. else
  2658. {
  2659. gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
  2660. have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
  2661. /* For if (...) { code; } else {} or
  2662. if (...) { code; } else goto label; or
  2663. if (...) { code; return; } else { ... }
  2664. label_cont isn't needed. */
  2665. if (!have_else_clause_p
  2666. && TREE_OPERAND (expr, 2) != NULL_TREE
  2667. && gimple_seq_may_fallthru (seq))
  2668. {
  2669. gimple g;
  2670. label_cont = create_artificial_label (UNKNOWN_LOCATION);
  2671. g = gimple_build_goto (label_cont);
  2672. /* GIMPLE_COND's are very low level; they have embedded
  2673. gotos. This particular embedded goto should not be marked
  2674. with the location of the original COND_EXPR, as it would
  2675. correspond to the COND_EXPR's condition, not the ELSE or the
  2676. THEN arms. To avoid marking it with the wrong location, flag
  2677. it as "no location". */
  2678. gimple_set_do_not_emit_location (g);
  2679. gimplify_seq_add_stmt (&seq, g);
  2680. }
  2681. }
  2682. }
  2683. if (!have_else_clause_p)
  2684. {
  2685. gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
  2686. have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
  2687. }
  2688. if (label_cont)
  2689. gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
  2690. gimple_pop_condition (pre_p);
  2691. gimple_seq_add_seq (pre_p, seq);
  2692. if (ret == GS_ERROR)
  2693. ; /* Do nothing. */
  2694. else if (have_then_clause_p || have_else_clause_p)
  2695. ret = GS_ALL_DONE;
  2696. else
  2697. {
  2698. /* Both arms are empty; replace the COND_EXPR with its predicate. */
  2699. expr = TREE_OPERAND (expr, 0);
  2700. gimplify_stmt (&expr, pre_p);
  2701. }
  2702. *expr_p = NULL;
  2703. return ret;
  2704. }
  2705. /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
  2706. to be marked addressable.
  2707. We cannot rely on such an expression being directly markable if a temporary
  2708. has been created by the gimplification. In this case, we create another
  2709. temporary and initialize it with a copy, which will become a store after we
  2710. mark it addressable. This can happen if the front-end passed us something
  2711. that it could not mark addressable yet, like a Fortran pass-by-reference
  2712. parameter (int) floatvar. */
  2713. static void
  2714. prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
  2715. {
  2716. while (handled_component_p (*expr_p))
  2717. expr_p = &TREE_OPERAND (*expr_p, 0);
  2718. if (is_gimple_reg (*expr_p))
  2719. {
  2720. tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
  2721. DECL_GIMPLE_REG_P (var) = 0;
  2722. *expr_p = var;
  2723. }
  2724. }
  2725. /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
  2726. a call to __builtin_memcpy. */
  2727. static enum gimplify_status
  2728. gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
  2729. gimple_seq *seq_p)
  2730. {
  2731. tree t, to, to_ptr, from, from_ptr;
  2732. gcall *gs;
  2733. location_t loc = EXPR_LOCATION (*expr_p);
  2734. to = TREE_OPERAND (*expr_p, 0);
  2735. from = TREE_OPERAND (*expr_p, 1);
  2736. /* Mark the RHS addressable. Beware that it may not be possible to do so
  2737. directly if a temporary has been created by the gimplification. */
  2738. prepare_gimple_addressable (&from, seq_p);
  2739. mark_addressable (from);
  2740. from_ptr = build_fold_addr_expr_loc (loc, from);
  2741. gimplify_arg (&from_ptr, seq_p, loc);
  2742. mark_addressable (to);
  2743. to_ptr = build_fold_addr_expr_loc (loc, to);
  2744. gimplify_arg (&to_ptr, seq_p, loc);
  2745. t = builtin_decl_implicit (BUILT_IN_MEMCPY);
  2746. gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
  2747. if (want_value)
  2748. {
  2749. /* tmp = memcpy() */
  2750. t = create_tmp_var (TREE_TYPE (to_ptr));
  2751. gimple_call_set_lhs (gs, t);
  2752. gimplify_seq_add_stmt (seq_p, gs);
  2753. *expr_p = build_simple_mem_ref (t);
  2754. return GS_ALL_DONE;
  2755. }
  2756. gimplify_seq_add_stmt (seq_p, gs);
  2757. *expr_p = NULL;
  2758. return GS_ALL_DONE;
  2759. }
  2760. /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
  2761. a call to __builtin_memset. In this case we know that the RHS is
  2762. a CONSTRUCTOR with an empty element list. */
  2763. static enum gimplify_status
  2764. gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
  2765. gimple_seq *seq_p)
  2766. {
  2767. tree t, from, to, to_ptr;
  2768. gcall *gs;
  2769. location_t loc = EXPR_LOCATION (*expr_p);
  2770. /* Assert our assumptions, to abort instead of producing wrong code
  2771. silently if they are not met. Beware that the RHS CONSTRUCTOR might
  2772. not be immediately exposed. */
  2773. from = TREE_OPERAND (*expr_p, 1);
  2774. if (TREE_CODE (from) == WITH_SIZE_EXPR)
  2775. from = TREE_OPERAND (from, 0);
  2776. gcc_assert (TREE_CODE (from) == CONSTRUCTOR
  2777. && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
  2778. /* Now proceed. */
  2779. to = TREE_OPERAND (*expr_p, 0);
  2780. to_ptr = build_fold_addr_expr_loc (loc, to);
  2781. gimplify_arg (&to_ptr, seq_p, loc);
  2782. t = builtin_decl_implicit (BUILT_IN_MEMSET);
  2783. gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
  2784. if (want_value)
  2785. {
  2786. /* tmp = memset() */
  2787. t = create_tmp_var (TREE_TYPE (to_ptr));
  2788. gimple_call_set_lhs (gs, t);
  2789. gimplify_seq_add_stmt (seq_p, gs);
  2790. *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
  2791. return GS_ALL_DONE;
  2792. }
  2793. gimplify_seq_add_stmt (seq_p, gs);
  2794. *expr_p = NULL;
  2795. return GS_ALL_DONE;
  2796. }
  2797. /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
  2798. determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
  2799. assignment. Return non-null if we detect a potential overlap. */
  2800. struct gimplify_init_ctor_preeval_data
  2801. {
  2802. /* The base decl of the lhs object. May be NULL, in which case we
  2803. have to assume the lhs is indirect. */
  2804. tree lhs_base_decl;
  2805. /* The alias set of the lhs object. */
  2806. alias_set_type lhs_alias_set;
  2807. };
  2808. static tree
  2809. gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
  2810. {
  2811. struct gimplify_init_ctor_preeval_data *data
  2812. = (struct gimplify_init_ctor_preeval_data *) xdata;
  2813. tree t = *tp;
  2814. /* If we find the base object, obviously we have overlap. */
  2815. if (data->lhs_base_decl == t)
  2816. return t;
  2817. /* If the constructor component is indirect, determine if we have a
  2818. potential overlap with the lhs. The only bits of information we
  2819. have to go on at this point are addressability and alias sets. */
  2820. if ((INDIRECT_REF_P (t)
  2821. || TREE_CODE (t) == MEM_REF)
  2822. && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
  2823. && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
  2824. return t;
  2825. /* If the constructor component is a call, determine if it can hide a
  2826. potential overlap with the lhs through an INDIRECT_REF like above.
  2827. ??? Ugh - this is completely broken. In fact this whole analysis
  2828. doesn't look conservative. */
  2829. if (TREE_CODE (t) == CALL_EXPR)
  2830. {
  2831. tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
  2832. for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
  2833. if (POINTER_TYPE_P (TREE_VALUE (type))
  2834. && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
  2835. && alias_sets_conflict_p (data->lhs_alias_set,
  2836. get_alias_set
  2837. (TREE_TYPE (TREE_VALUE (type)))))
  2838. return t;
  2839. }
  2840. if (IS_TYPE_OR_DECL_P (t))
  2841. *walk_subtrees = 0;
  2842. return NULL;
  2843. }
  2844. /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
  2845. force values that overlap with the lhs (as described by *DATA)
  2846. into temporaries. */
  2847. static void
  2848. gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  2849. struct gimplify_init_ctor_preeval_data *data)
  2850. {
  2851. enum gimplify_status one;
  2852. /* If the value is constant, then there's nothing to pre-evaluate. */
  2853. if (TREE_CONSTANT (*expr_p))
  2854. {
  2855. /* Ensure it does not have side effects, it might contain a reference to
  2856. the object we're initializing. */
  2857. gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
  2858. return;
  2859. }
  2860. /* If the type has non-trivial constructors, we can't pre-evaluate. */
  2861. if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
  2862. return;
  2863. /* Recurse for nested constructors. */
  2864. if (TREE_CODE (*expr_p) == CONSTRUCTOR)
  2865. {
  2866. unsigned HOST_WIDE_INT ix;
  2867. constructor_elt *ce;
  2868. vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
  2869. FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
  2870. gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
  2871. return;
  2872. }
  2873. /* If this is a variable sized type, we must remember the size. */
  2874. maybe_with_size_expr (expr_p);
  2875. /* Gimplify the constructor element to something appropriate for the rhs
  2876. of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
  2877. the gimplifier will consider this a store to memory. Doing this
  2878. gimplification now means that we won't have to deal with complicated
  2879. language-specific trees, nor trees like SAVE_EXPR that can induce
  2880. exponential search behavior. */
  2881. one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
  2882. if (one == GS_ERROR)
  2883. {
  2884. *expr_p = NULL;
  2885. return;
  2886. }
  2887. /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
  2888. with the lhs, since "a = { .x=a }" doesn't make sense. This will
  2889. always be true for all scalars, since is_gimple_mem_rhs insists on a
  2890. temporary variable for them. */
  2891. if (DECL_P (*expr_p))
  2892. return;
  2893. /* If this is of variable size, we have no choice but to assume it doesn't
  2894. overlap since we can't make a temporary for it. */
  2895. if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
  2896. return;
  2897. /* Otherwise, we must search for overlap ... */
  2898. if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
  2899. return;
  2900. /* ... and if found, force the value into a temporary. */
  2901. *expr_p = get_formal_tmp_var (*expr_p, pre_p);
  2902. }
  2903. /* A subroutine of gimplify_init_ctor_eval. Create a loop for
  2904. a RANGE_EXPR in a CONSTRUCTOR for an array.
  2905. var = lower;
  2906. loop_entry:
  2907. object[var] = value;
  2908. if (var == upper)
  2909. goto loop_exit;
  2910. var = var + 1;
  2911. goto loop_entry;
  2912. loop_exit:
  2913. We increment var _after_ the loop exit check because we might otherwise
  2914. fail if upper == TYPE_MAX_VALUE (type for upper).
  2915. Note that we never have to deal with SAVE_EXPRs here, because this has
  2916. already been taken care of for us, in gimplify_init_ctor_preeval(). */
  2917. static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
  2918. gimple_seq *, bool);
  2919. static void
  2920. gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
  2921. tree value, tree array_elt_type,
  2922. gimple_seq *pre_p, bool cleared)
  2923. {
  2924. tree loop_entry_label, loop_exit_label, fall_thru_label;
  2925. tree var, var_type, cref, tmp;
  2926. loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
  2927. loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
  2928. fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
  2929. /* Create and initialize the index variable. */
  2930. var_type = TREE_TYPE (upper);
  2931. var = create_tmp_var (var_type);
  2932. gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
  2933. /* Add the loop entry label. */
  2934. gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
  2935. /* Build the reference. */
  2936. cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
  2937. var, NULL_TREE, NULL_TREE);
  2938. /* If we are a constructor, just call gimplify_init_ctor_eval to do
  2939. the store. Otherwise just assign value to the reference. */
  2940. if (TREE_CODE (value) == CONSTRUCTOR)
  2941. /* NB we might have to call ourself recursively through
  2942. gimplify_init_ctor_eval if the value is a constructor. */
  2943. gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
  2944. pre_p, cleared);
  2945. else
  2946. gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
  2947. /* We exit the loop when the index var is equal to the upper bound. */
  2948. gimplify_seq_add_stmt (pre_p,
  2949. gimple_build_cond (EQ_EXPR, var, upper,
  2950. loop_exit_label, fall_thru_label));
  2951. gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
  2952. /* Otherwise, increment the index var... */
  2953. tmp = build2 (PLUS_EXPR, var_type, var,
  2954. fold_convert (var_type, integer_one_node));
  2955. gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
  2956. /* ...and jump back to the loop entry. */
  2957. gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
  2958. /* Add the loop exit label. */
  2959. gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
  2960. }
  2961. /* Return true if FDECL is accessing a field that is zero sized. */
  2962. static bool
  2963. zero_sized_field_decl (const_tree fdecl)
  2964. {
  2965. if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
  2966. && integer_zerop (DECL_SIZE (fdecl)))
  2967. return true;
  2968. return false;
  2969. }
  2970. /* Return true if TYPE is zero sized. */
  2971. static bool
  2972. zero_sized_type (const_tree type)
  2973. {
  2974. if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
  2975. && integer_zerop (TYPE_SIZE (type)))
  2976. return true;
  2977. return false;
  2978. }
  2979. /* A subroutine of gimplify_init_constructor. Generate individual
  2980. MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
  2981. assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
  2982. CONSTRUCTOR. CLEARED is true if the entire LHS object has been
  2983. zeroed first. */
  2984. static void
  2985. gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
  2986. gimple_seq *pre_p, bool cleared)
  2987. {
  2988. tree array_elt_type = NULL;
  2989. unsigned HOST_WIDE_INT ix;
  2990. tree purpose, value;
  2991. if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
  2992. array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
  2993. FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
  2994. {
  2995. tree cref;
  2996. /* NULL values are created above for gimplification errors. */
  2997. if (value == NULL)
  2998. continue;
  2999. if (cleared && initializer_zerop (value))
  3000. continue;
  3001. /* ??? Here's to hoping the front end fills in all of the indices,
  3002. so we don't have to figure out what's missing ourselves. */
  3003. gcc_assert (purpose);
  3004. /* Skip zero-sized fields, unless value has side-effects. This can
  3005. happen with calls to functions returning a zero-sized type, which
  3006. we shouldn't discard. As a number of downstream passes don't
  3007. expect sets of zero-sized fields, we rely on the gimplification of
  3008. the MODIFY_EXPR we make below to drop the assignment statement. */
  3009. if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
  3010. continue;
  3011. /* If we have a RANGE_EXPR, we have to build a loop to assign the
  3012. whole range. */
  3013. if (TREE_CODE (purpose) == RANGE_EXPR)
  3014. {
  3015. tree lower = TREE_OPERAND (purpose, 0);
  3016. tree upper = TREE_OPERAND (purpose, 1);
  3017. /* If the lower bound is equal to upper, just treat it as if
  3018. upper was the index. */
  3019. if (simple_cst_equal (lower, upper))
  3020. purpose = upper;
  3021. else
  3022. {
  3023. gimplify_init_ctor_eval_range (object, lower, upper, value,
  3024. array_elt_type, pre_p, cleared);
  3025. continue;
  3026. }
  3027. }
  3028. if (array_elt_type)
  3029. {
  3030. /* Do not use bitsizetype for ARRAY_REF indices. */
  3031. if (TYPE_DOMAIN (TREE_TYPE (object)))
  3032. purpose
  3033. = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
  3034. purpose);
  3035. cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
  3036. purpose, NULL_TREE, NULL_TREE);
  3037. }
  3038. else
  3039. {
  3040. gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
  3041. cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
  3042. unshare_expr (object), purpose, NULL_TREE);
  3043. }
  3044. if (TREE_CODE (value) == CONSTRUCTOR
  3045. && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
  3046. gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
  3047. pre_p, cleared);
  3048. else
  3049. {
  3050. tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
  3051. gimplify_and_add (init, pre_p);
  3052. ggc_free (init);
  3053. }
  3054. }
  3055. }
  3056. /* Return the appropriate RHS predicate for this LHS. */
  3057. gimple_predicate
  3058. rhs_predicate_for (tree lhs)
  3059. {
  3060. if (is_gimple_reg (lhs))
  3061. return is_gimple_reg_rhs_or_call;
  3062. else
  3063. return is_gimple_mem_rhs_or_call;
  3064. }
  3065. /* Gimplify a C99 compound literal expression. This just means adding
  3066. the DECL_EXPR before the current statement and using its anonymous
  3067. decl instead. */
  3068. static enum gimplify_status
  3069. gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
  3070. bool (*gimple_test_f) (tree),
  3071. fallback_t fallback)
  3072. {
  3073. tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
  3074. tree decl = DECL_EXPR_DECL (decl_s);
  3075. tree init = DECL_INITIAL (decl);
  3076. /* Mark the decl as addressable if the compound literal
  3077. expression is addressable now, otherwise it is marked too late
  3078. after we gimplify the initialization expression. */
  3079. if (TREE_ADDRESSABLE (*expr_p))
  3080. TREE_ADDRESSABLE (decl) = 1;
  3081. /* Otherwise, if we don't need an lvalue and have a literal directly
  3082. substitute it. Check if it matches the gimple predicate, as
  3083. otherwise we'd generate a new temporary, and we can as well just
  3084. use the decl we already have. */
  3085. else if (!TREE_ADDRESSABLE (decl)
  3086. && init
  3087. && (fallback & fb_lvalue) == 0
  3088. && gimple_test_f (init))
  3089. {
  3090. *expr_p = init;
  3091. return GS_OK;
  3092. }
  3093. /* Preliminarily mark non-addressed complex variables as eligible
  3094. for promotion to gimple registers. We'll transform their uses
  3095. as we find them. */
  3096. if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
  3097. || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
  3098. && !TREE_THIS_VOLATILE (decl)
  3099. && !needs_to_live_in_memory (decl))
  3100. DECL_GIMPLE_REG_P (decl) = 1;
  3101. /* If the decl is not addressable, then it is being used in some
  3102. expression or on the right hand side of a statement, and it can
  3103. be put into a readonly data section. */
  3104. if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
  3105. TREE_READONLY (decl) = 1;
  3106. /* This decl isn't mentioned in the enclosing block, so add it to the
  3107. list of temps. FIXME it seems a bit of a kludge to say that
  3108. anonymous artificial vars aren't pushed, but everything else is. */
  3109. if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
  3110. gimple_add_tmp_var (decl);
  3111. gimplify_and_add (decl_s, pre_p);
  3112. *expr_p = decl;
  3113. return GS_OK;
  3114. }
  3115. /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
  3116. return a new CONSTRUCTOR if something changed. */
  3117. static tree
  3118. optimize_compound_literals_in_ctor (tree orig_ctor)
  3119. {
  3120. tree ctor = orig_ctor;
  3121. vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
  3122. unsigned int idx, num = vec_safe_length (elts);
  3123. for (idx = 0; idx < num; idx++)
  3124. {
  3125. tree value = (*elts)[idx].value;
  3126. tree newval = value;
  3127. if (TREE_CODE (value) == CONSTRUCTOR)
  3128. newval = optimize_compound_literals_in_ctor (value);
  3129. else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
  3130. {
  3131. tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
  3132. tree decl = DECL_EXPR_DECL (decl_s);
  3133. tree init = DECL_INITIAL (decl);
  3134. if (!TREE_ADDRESSABLE (value)
  3135. && !TREE_ADDRESSABLE (decl)
  3136. && init
  3137. && TREE_CODE (init) == CONSTRUCTOR)
  3138. newval = optimize_compound_literals_in_ctor (init);
  3139. }
  3140. if (newval == value)
  3141. continue;
  3142. if (ctor == orig_ctor)
  3143. {
  3144. ctor = copy_node (orig_ctor);
  3145. CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
  3146. elts = CONSTRUCTOR_ELTS (ctor);
  3147. }
  3148. (*elts)[idx].value = newval;
  3149. }
  3150. return ctor;
  3151. }
  3152. /* A subroutine of gimplify_modify_expr. Break out elements of a
  3153. CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
  3154. Note that we still need to clear any elements that don't have explicit
  3155. initializers, so if not all elements are initialized we keep the
  3156. original MODIFY_EXPR, we just remove all of the constructor elements.
  3157. If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
  3158. GS_ERROR if we would have to create a temporary when gimplifying
  3159. this constructor. Otherwise, return GS_OK.
  3160. If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
  3161. static enum gimplify_status
  3162. gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  3163. bool want_value, bool notify_temp_creation)
  3164. {
  3165. tree object, ctor, type;
  3166. enum gimplify_status ret;
  3167. vec<constructor_elt, va_gc> *elts;
  3168. gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
  3169. if (!notify_temp_creation)
  3170. {
  3171. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  3172. is_gimple_lvalue, fb_lvalue);
  3173. if (ret == GS_ERROR)
  3174. return ret;
  3175. }
  3176. object = TREE_OPERAND (*expr_p, 0);
  3177. ctor = TREE_OPERAND (*expr_p, 1) =
  3178. optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
  3179. type = TREE_TYPE (ctor);
  3180. elts = CONSTRUCTOR_ELTS (ctor);
  3181. ret = GS_ALL_DONE;
  3182. switch (TREE_CODE (type))
  3183. {
  3184. case RECORD_TYPE:
  3185. case UNION_TYPE:
  3186. case QUAL_UNION_TYPE:
  3187. case ARRAY_TYPE:
  3188. {
  3189. struct gimplify_init_ctor_preeval_data preeval_data;
  3190. HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
  3191. bool cleared, complete_p, valid_const_initializer;
  3192. /* Aggregate types must lower constructors to initialization of
  3193. individual elements. The exception is that a CONSTRUCTOR node
  3194. with no elements indicates zero-initialization of the whole. */
  3195. if (vec_safe_is_empty (elts))
  3196. {
  3197. if (notify_temp_creation)
  3198. return GS_OK;
  3199. break;
  3200. }
  3201. /* Fetch information about the constructor to direct later processing.
  3202. We might want to make static versions of it in various cases, and
  3203. can only do so if it known to be a valid constant initializer. */
  3204. valid_const_initializer
  3205. = categorize_ctor_elements (ctor, &num_nonzero_elements,
  3206. &num_ctor_elements, &complete_p);
  3207. /* If a const aggregate variable is being initialized, then it
  3208. should never be a lose to promote the variable to be static. */
  3209. if (valid_const_initializer
  3210. && num_nonzero_elements > 1
  3211. && TREE_READONLY (object)
  3212. && TREE_CODE (object) == VAR_DECL
  3213. && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
  3214. {
  3215. if (notify_temp_creation)
  3216. return GS_ERROR;
  3217. DECL_INITIAL (object) = ctor;
  3218. TREE_STATIC (object) = 1;
  3219. if (!DECL_NAME (object))
  3220. DECL_NAME (object) = create_tmp_var_name ("C");
  3221. walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
  3222. /* ??? C++ doesn't automatically append a .<number> to the
  3223. assembler name, and even when it does, it looks at FE private
  3224. data structures to figure out what that number should be,
  3225. which are not set for this variable. I suppose this is
  3226. important for local statics for inline functions, which aren't
  3227. "local" in the object file sense. So in order to get a unique
  3228. TU-local symbol, we must invoke the lhd version now. */
  3229. lhd_set_decl_assembler_name (object);
  3230. *expr_p = NULL_TREE;
  3231. break;
  3232. }
  3233. /* If there are "lots" of initialized elements, even discounting
  3234. those that are not address constants (and thus *must* be
  3235. computed at runtime), then partition the constructor into
  3236. constant and non-constant parts. Block copy the constant
  3237. parts in, then generate code for the non-constant parts. */
  3238. /* TODO. There's code in cp/typeck.c to do this. */
  3239. if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
  3240. /* store_constructor will ignore the clearing of variable-sized
  3241. objects. Initializers for such objects must explicitly set
  3242. every field that needs to be set. */
  3243. cleared = false;
  3244. else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
  3245. /* If the constructor isn't complete, clear the whole object
  3246. beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
  3247. ??? This ought not to be needed. For any element not present
  3248. in the initializer, we should simply set them to zero. Except
  3249. we'd need to *find* the elements that are not present, and that
  3250. requires trickery to avoid quadratic compile-time behavior in
  3251. large cases or excessive memory use in small cases. */
  3252. cleared = true;
  3253. else if (num_ctor_elements - num_nonzero_elements
  3254. > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
  3255. && num_nonzero_elements < num_ctor_elements / 4)
  3256. /* If there are "lots" of zeros, it's more efficient to clear
  3257. the memory and then set the nonzero elements. */
  3258. cleared = true;
  3259. else
  3260. cleared = false;
  3261. /* If there are "lots" of initialized elements, and all of them
  3262. are valid address constants, then the entire initializer can
  3263. be dropped to memory, and then memcpy'd out. Don't do this
  3264. for sparse arrays, though, as it's more efficient to follow
  3265. the standard CONSTRUCTOR behavior of memset followed by
  3266. individual element initialization. Also don't do this for small
  3267. all-zero initializers (which aren't big enough to merit
  3268. clearing), and don't try to make bitwise copies of
  3269. TREE_ADDRESSABLE types.
  3270. We cannot apply such transformation when compiling chkp static
  3271. initializer because creation of initializer image in the memory
  3272. will require static initialization of bounds for it. It should
  3273. result in another gimplification of similar initializer and we
  3274. may fall into infinite loop. */
  3275. if (valid_const_initializer
  3276. && !(cleared || num_nonzero_elements == 0)
  3277. && !TREE_ADDRESSABLE (type)
  3278. && (!current_function_decl
  3279. || !lookup_attribute ("chkp ctor",
  3280. DECL_ATTRIBUTES (current_function_decl))))
  3281. {
  3282. HOST_WIDE_INT size = int_size_in_bytes (type);
  3283. unsigned int align;
  3284. /* ??? We can still get unbounded array types, at least
  3285. from the C++ front end. This seems wrong, but attempt
  3286. to work around it for now. */
  3287. if (size < 0)
  3288. {
  3289. size = int_size_in_bytes (TREE_TYPE (object));
  3290. if (size >= 0)
  3291. TREE_TYPE (ctor) = type = TREE_TYPE (object);
  3292. }
  3293. /* Find the maximum alignment we can assume for the object. */
  3294. /* ??? Make use of DECL_OFFSET_ALIGN. */
  3295. if (DECL_P (object))
  3296. align = DECL_ALIGN (object);
  3297. else
  3298. align = TYPE_ALIGN (type);
  3299. /* Do a block move either if the size is so small as to make
  3300. each individual move a sub-unit move on average, or if it
  3301. is so large as to make individual moves inefficient. */
  3302. if (size > 0
  3303. && num_nonzero_elements > 1
  3304. && (size < num_nonzero_elements
  3305. || !can_move_by_pieces (size, align)))
  3306. {
  3307. if (notify_temp_creation)
  3308. return GS_ERROR;
  3309. walk_tree (&ctor, force_labels_r, NULL, NULL);
  3310. ctor = tree_output_constant_def (ctor);
  3311. if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
  3312. ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
  3313. TREE_OPERAND (*expr_p, 1) = ctor;
  3314. /* This is no longer an assignment of a CONSTRUCTOR, but
  3315. we still may have processing to do on the LHS. So
  3316. pretend we didn't do anything here to let that happen. */
  3317. return GS_UNHANDLED;
  3318. }
  3319. }
  3320. /* If the target is volatile, we have non-zero elements and more than
  3321. one field to assign, initialize the target from a temporary. */
  3322. if (TREE_THIS_VOLATILE (object)
  3323. && !TREE_ADDRESSABLE (type)
  3324. && num_nonzero_elements > 0
  3325. && vec_safe_length (elts) > 1)
  3326. {
  3327. tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
  3328. TREE_OPERAND (*expr_p, 0) = temp;
  3329. *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
  3330. *expr_p,
  3331. build2 (MODIFY_EXPR, void_type_node,
  3332. object, temp));
  3333. return GS_OK;
  3334. }
  3335. if (notify_temp_creation)
  3336. return GS_OK;
  3337. /* If there are nonzero elements and if needed, pre-evaluate to capture
  3338. elements overlapping with the lhs into temporaries. We must do this
  3339. before clearing to fetch the values before they are zeroed-out. */
  3340. if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
  3341. {
  3342. preeval_data.lhs_base_decl = get_base_address (object);
  3343. if (!DECL_P (preeval_data.lhs_base_decl))
  3344. preeval_data.lhs_base_decl = NULL;
  3345. preeval_data.lhs_alias_set = get_alias_set (object);
  3346. gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
  3347. pre_p, post_p, &preeval_data);
  3348. }
  3349. if (cleared)
  3350. {
  3351. /* Zap the CONSTRUCTOR element list, which simplifies this case.
  3352. Note that we still have to gimplify, in order to handle the
  3353. case of variable sized types. Avoid shared tree structures. */
  3354. CONSTRUCTOR_ELTS (ctor) = NULL;
  3355. TREE_SIDE_EFFECTS (ctor) = 0;
  3356. object = unshare_expr (object);
  3357. gimplify_stmt (expr_p, pre_p);
  3358. }
  3359. /* If we have not block cleared the object, or if there are nonzero
  3360. elements in the constructor, add assignments to the individual
  3361. scalar fields of the object. */
  3362. if (!cleared || num_nonzero_elements > 0)
  3363. gimplify_init_ctor_eval (object, elts, pre_p, cleared);
  3364. *expr_p = NULL_TREE;
  3365. }
  3366. break;
  3367. case COMPLEX_TYPE:
  3368. {
  3369. tree r, i;
  3370. if (notify_temp_creation)
  3371. return GS_OK;
  3372. /* Extract the real and imaginary parts out of the ctor. */
  3373. gcc_assert (elts->length () == 2);
  3374. r = (*elts)[0].value;
  3375. i = (*elts)[1].value;
  3376. if (r == NULL || i == NULL)
  3377. {
  3378. tree zero = build_zero_cst (TREE_TYPE (type));
  3379. if (r == NULL)
  3380. r = zero;
  3381. if (i == NULL)
  3382. i = zero;
  3383. }
  3384. /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
  3385. represent creation of a complex value. */
  3386. if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
  3387. {
  3388. ctor = build_complex (type, r, i);
  3389. TREE_OPERAND (*expr_p, 1) = ctor;
  3390. }
  3391. else
  3392. {
  3393. ctor = build2 (COMPLEX_EXPR, type, r, i);
  3394. TREE_OPERAND (*expr_p, 1) = ctor;
  3395. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
  3396. pre_p,
  3397. post_p,
  3398. rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
  3399. fb_rvalue);
  3400. }
  3401. }
  3402. break;
  3403. case VECTOR_TYPE:
  3404. {
  3405. unsigned HOST_WIDE_INT ix;
  3406. constructor_elt *ce;
  3407. if (notify_temp_creation)
  3408. return GS_OK;
  3409. /* Go ahead and simplify constant constructors to VECTOR_CST. */
  3410. if (TREE_CONSTANT (ctor))
  3411. {
  3412. bool constant_p = true;
  3413. tree value;
  3414. /* Even when ctor is constant, it might contain non-*_CST
  3415. elements, such as addresses or trapping values like
  3416. 1.0/0.0 - 1.0/0.0. Such expressions don't belong
  3417. in VECTOR_CST nodes. */
  3418. FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
  3419. if (!CONSTANT_CLASS_P (value))
  3420. {
  3421. constant_p = false;
  3422. break;
  3423. }
  3424. if (constant_p)
  3425. {
  3426. TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
  3427. break;
  3428. }
  3429. TREE_CONSTANT (ctor) = 0;
  3430. }
  3431. /* Vector types use CONSTRUCTOR all the way through gimple
  3432. compilation as a general initializer. */
  3433. FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
  3434. {
  3435. enum gimplify_status tret;
  3436. tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
  3437. fb_rvalue);
  3438. if (tret == GS_ERROR)
  3439. ret = GS_ERROR;
  3440. }
  3441. if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
  3442. TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
  3443. }
  3444. break;
  3445. default:
  3446. /* So how did we get a CONSTRUCTOR for a scalar type? */
  3447. gcc_unreachable ();
  3448. }
  3449. if (ret == GS_ERROR)
  3450. return GS_ERROR;
  3451. else if (want_value)
  3452. {
  3453. *expr_p = object;
  3454. return GS_OK;
  3455. }
  3456. else
  3457. {
  3458. /* If we have gimplified both sides of the initializer but have
  3459. not emitted an assignment, do so now. */
  3460. if (*expr_p)
  3461. {
  3462. tree lhs = TREE_OPERAND (*expr_p, 0);
  3463. tree rhs = TREE_OPERAND (*expr_p, 1);
  3464. gassign *init = gimple_build_assign (lhs, rhs);
  3465. gimplify_seq_add_stmt (pre_p, init);
  3466. *expr_p = NULL;
  3467. }
  3468. return GS_ALL_DONE;
  3469. }
  3470. }
  3471. /* Given a pointer value OP0, return a simplified version of an
  3472. indirection through OP0, or NULL_TREE if no simplification is
  3473. possible. This may only be applied to a rhs of an expression.
  3474. Note that the resulting type may be different from the type pointed
  3475. to in the sense that it is still compatible from the langhooks
  3476. point of view. */
  3477. static tree
  3478. gimple_fold_indirect_ref_rhs (tree t)
  3479. {
  3480. return gimple_fold_indirect_ref (t);
  3481. }
  3482. /* Subroutine of gimplify_modify_expr to do simplifications of
  3483. MODIFY_EXPRs based on the code of the RHS. We loop for as long as
  3484. something changes. */
  3485. static enum gimplify_status
  3486. gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
  3487. gimple_seq *pre_p, gimple_seq *post_p,
  3488. bool want_value)
  3489. {
  3490. enum gimplify_status ret = GS_UNHANDLED;
  3491. bool changed;
  3492. do
  3493. {
  3494. changed = false;
  3495. switch (TREE_CODE (*from_p))
  3496. {
  3497. case VAR_DECL:
  3498. /* If we're assigning from a read-only variable initialized with
  3499. a constructor, do the direct assignment from the constructor,
  3500. but only if neither source nor target are volatile since this
  3501. latter assignment might end up being done on a per-field basis. */
  3502. if (DECL_INITIAL (*from_p)
  3503. && TREE_READONLY (*from_p)
  3504. && !TREE_THIS_VOLATILE (*from_p)
  3505. && !TREE_THIS_VOLATILE (*to_p)
  3506. && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
  3507. {
  3508. tree old_from = *from_p;
  3509. enum gimplify_status subret;
  3510. /* Move the constructor into the RHS. */
  3511. *from_p = unshare_expr (DECL_INITIAL (*from_p));
  3512. /* Let's see if gimplify_init_constructor will need to put
  3513. it in memory. */
  3514. subret = gimplify_init_constructor (expr_p, NULL, NULL,
  3515. false, true);
  3516. if (subret == GS_ERROR)
  3517. {
  3518. /* If so, revert the change. */
  3519. *from_p = old_from;
  3520. }
  3521. else
  3522. {
  3523. ret = GS_OK;
  3524. changed = true;
  3525. }
  3526. }
  3527. break;
  3528. case INDIRECT_REF:
  3529. {
  3530. /* If we have code like
  3531. *(const A*)(A*)&x
  3532. where the type of "x" is a (possibly cv-qualified variant
  3533. of "A"), treat the entire expression as identical to "x".
  3534. This kind of code arises in C++ when an object is bound
  3535. to a const reference, and if "x" is a TARGET_EXPR we want
  3536. to take advantage of the optimization below. */
  3537. bool volatile_p = TREE_THIS_VOLATILE (*from_p);
  3538. tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
  3539. if (t)
  3540. {
  3541. if (TREE_THIS_VOLATILE (t) != volatile_p)
  3542. {
  3543. if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
  3544. t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
  3545. build_fold_addr_expr (t));
  3546. if (REFERENCE_CLASS_P (t))
  3547. TREE_THIS_VOLATILE (t) = volatile_p;
  3548. }
  3549. *from_p = t;
  3550. ret = GS_OK;
  3551. changed = true;
  3552. }
  3553. break;
  3554. }
  3555. case TARGET_EXPR:
  3556. {
  3557. /* If we are initializing something from a TARGET_EXPR, strip the
  3558. TARGET_EXPR and initialize it directly, if possible. This can't
  3559. be done if the initializer is void, since that implies that the
  3560. temporary is set in some non-trivial way.
  3561. ??? What about code that pulls out the temp and uses it
  3562. elsewhere? I think that such code never uses the TARGET_EXPR as
  3563. an initializer. If I'm wrong, we'll die because the temp won't
  3564. have any RTL. In that case, I guess we'll need to replace
  3565. references somehow. */
  3566. tree init = TARGET_EXPR_INITIAL (*from_p);
  3567. if (init
  3568. && !VOID_TYPE_P (TREE_TYPE (init)))
  3569. {
  3570. *from_p = init;
  3571. ret = GS_OK;
  3572. changed = true;
  3573. }
  3574. }
  3575. break;
  3576. case COMPOUND_EXPR:
  3577. /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
  3578. caught. */
  3579. gimplify_compound_expr (from_p, pre_p, true);
  3580. ret = GS_OK;
  3581. changed = true;
  3582. break;
  3583. case CONSTRUCTOR:
  3584. /* If we already made some changes, let the front end have a
  3585. crack at this before we break it down. */
  3586. if (ret != GS_UNHANDLED)
  3587. break;
  3588. /* If we're initializing from a CONSTRUCTOR, break this into
  3589. individual MODIFY_EXPRs. */
  3590. return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
  3591. false);
  3592. case COND_EXPR:
  3593. /* If we're assigning to a non-register type, push the assignment
  3594. down into the branches. This is mandatory for ADDRESSABLE types,
  3595. since we cannot generate temporaries for such, but it saves a
  3596. copy in other cases as well. */
  3597. if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
  3598. {
  3599. /* This code should mirror the code in gimplify_cond_expr. */
  3600. enum tree_code code = TREE_CODE (*expr_p);
  3601. tree cond = *from_p;
  3602. tree result = *to_p;
  3603. ret = gimplify_expr (&result, pre_p, post_p,
  3604. is_gimple_lvalue, fb_lvalue);
  3605. if (ret != GS_ERROR)
  3606. ret = GS_OK;
  3607. if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
  3608. TREE_OPERAND (cond, 1)
  3609. = build2 (code, void_type_node, result,
  3610. TREE_OPERAND (cond, 1));
  3611. if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
  3612. TREE_OPERAND (cond, 2)
  3613. = build2 (code, void_type_node, unshare_expr (result),
  3614. TREE_OPERAND (cond, 2));
  3615. TREE_TYPE (cond) = void_type_node;
  3616. recalculate_side_effects (cond);
  3617. if (want_value)
  3618. {
  3619. gimplify_and_add (cond, pre_p);
  3620. *expr_p = unshare_expr (result);
  3621. }
  3622. else
  3623. *expr_p = cond;
  3624. return ret;
  3625. }
  3626. break;
  3627. case CALL_EXPR:
  3628. /* For calls that return in memory, give *to_p as the CALL_EXPR's
  3629. return slot so that we don't generate a temporary. */
  3630. if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
  3631. && aggregate_value_p (*from_p, *from_p))
  3632. {
  3633. bool use_target;
  3634. if (!(rhs_predicate_for (*to_p))(*from_p))
  3635. /* If we need a temporary, *to_p isn't accurate. */
  3636. use_target = false;
  3637. /* It's OK to use the return slot directly unless it's an NRV. */
  3638. else if (TREE_CODE (*to_p) == RESULT_DECL
  3639. && DECL_NAME (*to_p) == NULL_TREE
  3640. && needs_to_live_in_memory (*to_p))
  3641. use_target = true;
  3642. else if (is_gimple_reg_type (TREE_TYPE (*to_p))
  3643. || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
  3644. /* Don't force regs into memory. */
  3645. use_target = false;
  3646. else if (TREE_CODE (*expr_p) == INIT_EXPR)
  3647. /* It's OK to use the target directly if it's being
  3648. initialized. */
  3649. use_target = true;
  3650. else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
  3651. /* Always use the target and thus RSO for variable-sized types.
  3652. GIMPLE cannot deal with a variable-sized assignment
  3653. embedded in a call statement. */
  3654. use_target = true;
  3655. else if (TREE_CODE (*to_p) != SSA_NAME
  3656. && (!is_gimple_variable (*to_p)
  3657. || needs_to_live_in_memory (*to_p)))
  3658. /* Don't use the original target if it's already addressable;
  3659. if its address escapes, and the called function uses the
  3660. NRV optimization, a conforming program could see *to_p
  3661. change before the called function returns; see c++/19317.
  3662. When optimizing, the return_slot pass marks more functions
  3663. as safe after we have escape info. */
  3664. use_target = false;
  3665. else
  3666. use_target = true;
  3667. if (use_target)
  3668. {
  3669. CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
  3670. mark_addressable (*to_p);
  3671. }
  3672. }
  3673. break;
  3674. case WITH_SIZE_EXPR:
  3675. /* Likewise for calls that return an aggregate of non-constant size,
  3676. since we would not be able to generate a temporary at all. */
  3677. if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
  3678. {
  3679. *from_p = TREE_OPERAND (*from_p, 0);
  3680. /* We don't change ret in this case because the
  3681. WITH_SIZE_EXPR might have been added in
  3682. gimplify_modify_expr, so returning GS_OK would lead to an
  3683. infinite loop. */
  3684. changed = true;
  3685. }
  3686. break;
  3687. /* If we're initializing from a container, push the initialization
  3688. inside it. */
  3689. case CLEANUP_POINT_EXPR:
  3690. case BIND_EXPR:
  3691. case STATEMENT_LIST:
  3692. {
  3693. tree wrap = *from_p;
  3694. tree t;
  3695. ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
  3696. fb_lvalue);
  3697. if (ret != GS_ERROR)
  3698. ret = GS_OK;
  3699. t = voidify_wrapper_expr (wrap, *expr_p);
  3700. gcc_assert (t == *expr_p);
  3701. if (want_value)
  3702. {
  3703. gimplify_and_add (wrap, pre_p);
  3704. *expr_p = unshare_expr (*to_p);
  3705. }
  3706. else
  3707. *expr_p = wrap;
  3708. return GS_OK;
  3709. }
  3710. case COMPOUND_LITERAL_EXPR:
  3711. {
  3712. tree complit = TREE_OPERAND (*expr_p, 1);
  3713. tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
  3714. tree decl = DECL_EXPR_DECL (decl_s);
  3715. tree init = DECL_INITIAL (decl);
  3716. /* struct T x = (struct T) { 0, 1, 2 } can be optimized
  3717. into struct T x = { 0, 1, 2 } if the address of the
  3718. compound literal has never been taken. */
  3719. if (!TREE_ADDRESSABLE (complit)
  3720. && !TREE_ADDRESSABLE (decl)
  3721. && init)
  3722. {
  3723. *expr_p = copy_node (*expr_p);
  3724. TREE_OPERAND (*expr_p, 1) = init;
  3725. return GS_OK;
  3726. }
  3727. }
  3728. default:
  3729. break;
  3730. }
  3731. }
  3732. while (changed);
  3733. return ret;
  3734. }
  3735. /* Return true if T looks like a valid GIMPLE statement. */
  3736. static bool
  3737. is_gimple_stmt (tree t)
  3738. {
  3739. const enum tree_code code = TREE_CODE (t);
  3740. switch (code)
  3741. {
  3742. case NOP_EXPR:
  3743. /* The only valid NOP_EXPR is the empty statement. */
  3744. return IS_EMPTY_STMT (t);
  3745. case BIND_EXPR:
  3746. case COND_EXPR:
  3747. /* These are only valid if they're void. */
  3748. return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
  3749. case SWITCH_EXPR:
  3750. case GOTO_EXPR:
  3751. case RETURN_EXPR:
  3752. case LABEL_EXPR:
  3753. case CASE_LABEL_EXPR:
  3754. case TRY_CATCH_EXPR:
  3755. case TRY_FINALLY_EXPR:
  3756. case EH_FILTER_EXPR:
  3757. case CATCH_EXPR:
  3758. case ASM_EXPR:
  3759. case STATEMENT_LIST:
  3760. case OACC_PARALLEL:
  3761. case OACC_KERNELS:
  3762. case OACC_DATA:
  3763. case OACC_HOST_DATA:
  3764. case OACC_DECLARE:
  3765. case OACC_UPDATE:
  3766. case OACC_ENTER_DATA:
  3767. case OACC_EXIT_DATA:
  3768. case OACC_CACHE:
  3769. case OMP_PARALLEL:
  3770. case OMP_FOR:
  3771. case OMP_SIMD:
  3772. case CILK_SIMD:
  3773. case OMP_DISTRIBUTE:
  3774. case OACC_LOOP:
  3775. case OMP_SECTIONS:
  3776. case OMP_SECTION:
  3777. case OMP_SINGLE:
  3778. case OMP_MASTER:
  3779. case OMP_TASKGROUP:
  3780. case OMP_ORDERED:
  3781. case OMP_CRITICAL:
  3782. case OMP_TASK:
  3783. /* These are always void. */
  3784. return true;
  3785. case CALL_EXPR:
  3786. case MODIFY_EXPR:
  3787. case PREDICT_EXPR:
  3788. /* These are valid regardless of their type. */
  3789. return true;
  3790. default:
  3791. return false;
  3792. }
  3793. }
  3794. /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
  3795. a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
  3796. DECL_GIMPLE_REG_P set.
  3797. IMPORTANT NOTE: This promotion is performed by introducing a load of the
  3798. other, unmodified part of the complex object just before the total store.
  3799. As a consequence, if the object is still uninitialized, an undefined value
  3800. will be loaded into a register, which may result in a spurious exception
  3801. if the register is floating-point and the value happens to be a signaling
  3802. NaN for example. Then the fully-fledged complex operations lowering pass
  3803. followed by a DCE pass are necessary in order to fix things up. */
  3804. static enum gimplify_status
  3805. gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
  3806. bool want_value)
  3807. {
  3808. enum tree_code code, ocode;
  3809. tree lhs, rhs, new_rhs, other, realpart, imagpart;
  3810. lhs = TREE_OPERAND (*expr_p, 0);
  3811. rhs = TREE_OPERAND (*expr_p, 1);
  3812. code = TREE_CODE (lhs);
  3813. lhs = TREE_OPERAND (lhs, 0);
  3814. ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
  3815. other = build1 (ocode, TREE_TYPE (rhs), lhs);
  3816. TREE_NO_WARNING (other) = 1;
  3817. other = get_formal_tmp_var (other, pre_p);
  3818. realpart = code == REALPART_EXPR ? rhs : other;
  3819. imagpart = code == REALPART_EXPR ? other : rhs;
  3820. if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
  3821. new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
  3822. else
  3823. new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
  3824. gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
  3825. *expr_p = (want_value) ? rhs : NULL_TREE;
  3826. return GS_ALL_DONE;
  3827. }
  3828. /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
  3829. modify_expr
  3830. : varname '=' rhs
  3831. | '*' ID '=' rhs
  3832. PRE_P points to the list where side effects that must happen before
  3833. *EXPR_P should be stored.
  3834. POST_P points to the list where side effects that must happen after
  3835. *EXPR_P should be stored.
  3836. WANT_VALUE is nonzero iff we want to use the value of this expression
  3837. in another expression. */
  3838. static enum gimplify_status
  3839. gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  3840. bool want_value)
  3841. {
  3842. tree *from_p = &TREE_OPERAND (*expr_p, 1);
  3843. tree *to_p = &TREE_OPERAND (*expr_p, 0);
  3844. enum gimplify_status ret = GS_UNHANDLED;
  3845. gimple assign;
  3846. location_t loc = EXPR_LOCATION (*expr_p);
  3847. gimple_stmt_iterator gsi;
  3848. gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
  3849. || TREE_CODE (*expr_p) == INIT_EXPR);
  3850. /* Trying to simplify a clobber using normal logic doesn't work,
  3851. so handle it here. */
  3852. if (TREE_CLOBBER_P (*from_p))
  3853. {
  3854. ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
  3855. if (ret == GS_ERROR)
  3856. return ret;
  3857. gcc_assert (!want_value
  3858. && (TREE_CODE (*to_p) == VAR_DECL
  3859. || TREE_CODE (*to_p) == MEM_REF));
  3860. gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
  3861. *expr_p = NULL;
  3862. return GS_ALL_DONE;
  3863. }
  3864. /* Insert pointer conversions required by the middle-end that are not
  3865. required by the frontend. This fixes middle-end type checking for
  3866. for example gcc.dg/redecl-6.c. */
  3867. if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
  3868. {
  3869. STRIP_USELESS_TYPE_CONVERSION (*from_p);
  3870. if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
  3871. *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
  3872. }
  3873. /* See if any simplifications can be done based on what the RHS is. */
  3874. ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
  3875. want_value);
  3876. if (ret != GS_UNHANDLED)
  3877. return ret;
  3878. /* For zero sized types only gimplify the left hand side and right hand
  3879. side as statements and throw away the assignment. Do this after
  3880. gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
  3881. types properly. */
  3882. if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
  3883. {
  3884. gimplify_stmt (from_p, pre_p);
  3885. gimplify_stmt (to_p, pre_p);
  3886. *expr_p = NULL_TREE;
  3887. return GS_ALL_DONE;
  3888. }
  3889. /* If the value being copied is of variable width, compute the length
  3890. of the copy into a WITH_SIZE_EXPR. Note that we need to do this
  3891. before gimplifying any of the operands so that we can resolve any
  3892. PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
  3893. the size of the expression to be copied, not of the destination, so
  3894. that is what we must do here. */
  3895. maybe_with_size_expr (from_p);
  3896. ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
  3897. if (ret == GS_ERROR)
  3898. return ret;
  3899. /* As a special case, we have to temporarily allow for assignments
  3900. with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
  3901. a toplevel statement, when gimplifying the GENERIC expression
  3902. MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
  3903. GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
  3904. Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
  3905. prevent gimplify_expr from trying to create a new temporary for
  3906. foo's LHS, we tell it that it should only gimplify until it
  3907. reaches the CALL_EXPR. On return from gimplify_expr, the newly
  3908. created GIMPLE_CALL <foo> will be the last statement in *PRE_P
  3909. and all we need to do here is set 'a' to be its LHS. */
  3910. ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
  3911. fb_rvalue);
  3912. if (ret == GS_ERROR)
  3913. return ret;
  3914. /* Now see if the above changed *from_p to something we handle specially. */
  3915. ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
  3916. want_value);
  3917. if (ret != GS_UNHANDLED)
  3918. return ret;
  3919. /* If we've got a variable sized assignment between two lvalues (i.e. does
  3920. not involve a call), then we can make things a bit more straightforward
  3921. by converting the assignment to memcpy or memset. */
  3922. if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
  3923. {
  3924. tree from = TREE_OPERAND (*from_p, 0);
  3925. tree size = TREE_OPERAND (*from_p, 1);
  3926. if (TREE_CODE (from) == CONSTRUCTOR)
  3927. return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
  3928. if (is_gimple_addressable (from))
  3929. {
  3930. *from_p = from;
  3931. return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
  3932. pre_p);
  3933. }
  3934. }
  3935. /* Transform partial stores to non-addressable complex variables into
  3936. total stores. This allows us to use real instead of virtual operands
  3937. for these variables, which improves optimization. */
  3938. if ((TREE_CODE (*to_p) == REALPART_EXPR
  3939. || TREE_CODE (*to_p) == IMAGPART_EXPR)
  3940. && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
  3941. return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
  3942. /* Try to alleviate the effects of the gimplification creating artificial
  3943. temporaries (see for example is_gimple_reg_rhs) on the debug info. */
  3944. if (!gimplify_ctxp->into_ssa
  3945. && TREE_CODE (*from_p) == VAR_DECL
  3946. && DECL_IGNORED_P (*from_p)
  3947. && DECL_P (*to_p)
  3948. && !DECL_IGNORED_P (*to_p))
  3949. {
  3950. if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
  3951. DECL_NAME (*from_p)
  3952. = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
  3953. DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
  3954. SET_DECL_DEBUG_EXPR (*from_p, *to_p);
  3955. }
  3956. if (want_value && TREE_THIS_VOLATILE (*to_p))
  3957. *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
  3958. if (TREE_CODE (*from_p) == CALL_EXPR)
  3959. {
  3960. /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
  3961. instead of a GIMPLE_ASSIGN. */
  3962. gcall *call_stmt;
  3963. if (CALL_EXPR_FN (*from_p) == NULL_TREE)
  3964. {
  3965. /* Gimplify internal functions created in the FEs. */
  3966. int nargs = call_expr_nargs (*from_p), i;
  3967. enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
  3968. auto_vec<tree> vargs (nargs);
  3969. for (i = 0; i < nargs; i++)
  3970. {
  3971. gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
  3972. EXPR_LOCATION (*from_p));
  3973. vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
  3974. }
  3975. call_stmt = gimple_build_call_internal_vec (ifn, vargs);
  3976. gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
  3977. }
  3978. else
  3979. {
  3980. tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
  3981. CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
  3982. STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
  3983. tree fndecl = get_callee_fndecl (*from_p);
  3984. if (fndecl
  3985. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  3986. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
  3987. && call_expr_nargs (*from_p) == 3)
  3988. call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
  3989. CALL_EXPR_ARG (*from_p, 0),
  3990. CALL_EXPR_ARG (*from_p, 1),
  3991. CALL_EXPR_ARG (*from_p, 2));
  3992. else
  3993. {
  3994. call_stmt = gimple_build_call_from_tree (*from_p);
  3995. gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
  3996. }
  3997. }
  3998. notice_special_calls (call_stmt);
  3999. if (!gimple_call_noreturn_p (call_stmt))
  4000. gimple_call_set_lhs (call_stmt, *to_p);
  4001. assign = call_stmt;
  4002. }
  4003. else
  4004. {
  4005. assign = gimple_build_assign (*to_p, *from_p);
  4006. gimple_set_location (assign, EXPR_LOCATION (*expr_p));
  4007. }
  4008. if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
  4009. {
  4010. /* We should have got an SSA name from the start. */
  4011. gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
  4012. }
  4013. gimplify_seq_add_stmt (pre_p, assign);
  4014. gsi = gsi_last (*pre_p);
  4015. maybe_fold_stmt (&gsi);
  4016. if (want_value)
  4017. {
  4018. *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
  4019. return GS_OK;
  4020. }
  4021. else
  4022. *expr_p = NULL;
  4023. return GS_ALL_DONE;
  4024. }
  4025. /* Gimplify a comparison between two variable-sized objects. Do this
  4026. with a call to BUILT_IN_MEMCMP. */
  4027. static enum gimplify_status
  4028. gimplify_variable_sized_compare (tree *expr_p)
  4029. {
  4030. location_t loc = EXPR_LOCATION (*expr_p);
  4031. tree op0 = TREE_OPERAND (*expr_p, 0);
  4032. tree op1 = TREE_OPERAND (*expr_p, 1);
  4033. tree t, arg, dest, src, expr;
  4034. arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
  4035. arg = unshare_expr (arg);
  4036. arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
  4037. src = build_fold_addr_expr_loc (loc, op1);
  4038. dest = build_fold_addr_expr_loc (loc, op0);
  4039. t = builtin_decl_implicit (BUILT_IN_MEMCMP);
  4040. t = build_call_expr_loc (loc, t, 3, dest, src, arg);
  4041. expr
  4042. = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
  4043. SET_EXPR_LOCATION (expr, loc);
  4044. *expr_p = expr;
  4045. return GS_OK;
  4046. }
  4047. /* Gimplify a comparison between two aggregate objects of integral scalar
  4048. mode as a comparison between the bitwise equivalent scalar values. */
  4049. static enum gimplify_status
  4050. gimplify_scalar_mode_aggregate_compare (tree *expr_p)
  4051. {
  4052. location_t loc = EXPR_LOCATION (*expr_p);
  4053. tree op0 = TREE_OPERAND (*expr_p, 0);
  4054. tree op1 = TREE_OPERAND (*expr_p, 1);
  4055. tree type = TREE_TYPE (op0);
  4056. tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
  4057. op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
  4058. op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
  4059. *expr_p
  4060. = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
  4061. return GS_OK;
  4062. }
  4063. /* Gimplify an expression sequence. This function gimplifies each
  4064. expression and rewrites the original expression with the last
  4065. expression of the sequence in GIMPLE form.
  4066. PRE_P points to the list where the side effects for all the
  4067. expressions in the sequence will be emitted.
  4068. WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
  4069. static enum gimplify_status
  4070. gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
  4071. {
  4072. tree t = *expr_p;
  4073. do
  4074. {
  4075. tree *sub_p = &TREE_OPERAND (t, 0);
  4076. if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
  4077. gimplify_compound_expr (sub_p, pre_p, false);
  4078. else
  4079. gimplify_stmt (sub_p, pre_p);
  4080. t = TREE_OPERAND (t, 1);
  4081. }
  4082. while (TREE_CODE (t) == COMPOUND_EXPR);
  4083. *expr_p = t;
  4084. if (want_value)
  4085. return GS_OK;
  4086. else
  4087. {
  4088. gimplify_stmt (expr_p, pre_p);
  4089. return GS_ALL_DONE;
  4090. }
  4091. }
  4092. /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
  4093. gimplify. After gimplification, EXPR_P will point to a new temporary
  4094. that holds the original value of the SAVE_EXPR node.
  4095. PRE_P points to the list where side effects that must happen before
  4096. *EXPR_P should be stored. */
  4097. static enum gimplify_status
  4098. gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
  4099. {
  4100. enum gimplify_status ret = GS_ALL_DONE;
  4101. tree val;
  4102. gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
  4103. val = TREE_OPERAND (*expr_p, 0);
  4104. /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
  4105. if (!SAVE_EXPR_RESOLVED_P (*expr_p))
  4106. {
  4107. /* The operand may be a void-valued expression such as SAVE_EXPRs
  4108. generated by the Java frontend for class initialization. It is
  4109. being executed only for its side-effects. */
  4110. if (TREE_TYPE (val) == void_type_node)
  4111. {
  4112. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  4113. is_gimple_stmt, fb_none);
  4114. val = NULL;
  4115. }
  4116. else
  4117. val = get_initialized_tmp_var (val, pre_p, post_p);
  4118. TREE_OPERAND (*expr_p, 0) = val;
  4119. SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
  4120. }
  4121. *expr_p = val;
  4122. return ret;
  4123. }
  4124. /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
  4125. unary_expr
  4126. : ...
  4127. | '&' varname
  4128. ...
  4129. PRE_P points to the list where side effects that must happen before
  4130. *EXPR_P should be stored.
  4131. POST_P points to the list where side effects that must happen after
  4132. *EXPR_P should be stored. */
  4133. static enum gimplify_status
  4134. gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
  4135. {
  4136. tree expr = *expr_p;
  4137. tree op0 = TREE_OPERAND (expr, 0);
  4138. enum gimplify_status ret;
  4139. location_t loc = EXPR_LOCATION (*expr_p);
  4140. switch (TREE_CODE (op0))
  4141. {
  4142. case INDIRECT_REF:
  4143. do_indirect_ref:
  4144. /* Check if we are dealing with an expression of the form '&*ptr'.
  4145. While the front end folds away '&*ptr' into 'ptr', these
  4146. expressions may be generated internally by the compiler (e.g.,
  4147. builtins like __builtin_va_end). */
  4148. /* Caution: the silent array decomposition semantics we allow for
  4149. ADDR_EXPR means we can't always discard the pair. */
  4150. /* Gimplification of the ADDR_EXPR operand may drop
  4151. cv-qualification conversions, so make sure we add them if
  4152. needed. */
  4153. {
  4154. tree op00 = TREE_OPERAND (op0, 0);
  4155. tree t_expr = TREE_TYPE (expr);
  4156. tree t_op00 = TREE_TYPE (op00);
  4157. if (!useless_type_conversion_p (t_expr, t_op00))
  4158. op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
  4159. *expr_p = op00;
  4160. ret = GS_OK;
  4161. }
  4162. break;
  4163. case VIEW_CONVERT_EXPR:
  4164. /* Take the address of our operand and then convert it to the type of
  4165. this ADDR_EXPR.
  4166. ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
  4167. all clear. The impact of this transformation is even less clear. */
  4168. /* If the operand is a useless conversion, look through it. Doing so
  4169. guarantees that the ADDR_EXPR and its operand will remain of the
  4170. same type. */
  4171. if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
  4172. op0 = TREE_OPERAND (op0, 0);
  4173. *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
  4174. build_fold_addr_expr_loc (loc,
  4175. TREE_OPERAND (op0, 0)));
  4176. ret = GS_OK;
  4177. break;
  4178. default:
  4179. /* If we see a call to a declared builtin or see its address
  4180. being taken (we can unify those cases here) then we can mark
  4181. the builtin for implicit generation by GCC. */
  4182. if (TREE_CODE (op0) == FUNCTION_DECL
  4183. && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
  4184. && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
  4185. set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
  4186. /* We use fb_either here because the C frontend sometimes takes
  4187. the address of a call that returns a struct; see
  4188. gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
  4189. the implied temporary explicit. */
  4190. /* Make the operand addressable. */
  4191. ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
  4192. is_gimple_addressable, fb_either);
  4193. if (ret == GS_ERROR)
  4194. break;
  4195. /* Then mark it. Beware that it may not be possible to do so directly
  4196. if a temporary has been created by the gimplification. */
  4197. prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
  4198. op0 = TREE_OPERAND (expr, 0);
  4199. /* For various reasons, the gimplification of the expression
  4200. may have made a new INDIRECT_REF. */
  4201. if (TREE_CODE (op0) == INDIRECT_REF)
  4202. goto do_indirect_ref;
  4203. mark_addressable (TREE_OPERAND (expr, 0));
  4204. /* The FEs may end up building ADDR_EXPRs early on a decl with
  4205. an incomplete type. Re-build ADDR_EXPRs in canonical form
  4206. here. */
  4207. if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
  4208. *expr_p = build_fold_addr_expr (op0);
  4209. /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
  4210. recompute_tree_invariant_for_addr_expr (*expr_p);
  4211. /* If we re-built the ADDR_EXPR add a conversion to the original type
  4212. if required. */
  4213. if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
  4214. *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
  4215. break;
  4216. }
  4217. return ret;
  4218. }
  4219. /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
  4220. value; output operands should be a gimple lvalue. */
  4221. static enum gimplify_status
  4222. gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
  4223. {
  4224. tree expr;
  4225. int noutputs;
  4226. const char **oconstraints;
  4227. int i;
  4228. tree link;
  4229. const char *constraint;
  4230. bool allows_mem, allows_reg, is_inout;
  4231. enum gimplify_status ret, tret;
  4232. gasm *stmt;
  4233. vec<tree, va_gc> *inputs;
  4234. vec<tree, va_gc> *outputs;
  4235. vec<tree, va_gc> *clobbers;
  4236. vec<tree, va_gc> *labels;
  4237. tree link_next;
  4238. expr = *expr_p;
  4239. noutputs = list_length (ASM_OUTPUTS (expr));
  4240. oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
  4241. inputs = NULL;
  4242. outputs = NULL;
  4243. clobbers = NULL;
  4244. labels = NULL;
  4245. ret = GS_ALL_DONE;
  4246. link_next = NULL_TREE;
  4247. for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
  4248. {
  4249. bool ok;
  4250. size_t constraint_len;
  4251. link_next = TREE_CHAIN (link);
  4252. oconstraints[i]
  4253. = constraint
  4254. = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
  4255. constraint_len = strlen (constraint);
  4256. if (constraint_len == 0)
  4257. continue;
  4258. ok = parse_output_constraint (&constraint, i, 0, 0,
  4259. &allows_mem, &allows_reg, &is_inout);
  4260. if (!ok)
  4261. {
  4262. ret = GS_ERROR;
  4263. is_inout = false;
  4264. }
  4265. if (!allows_reg && allows_mem)
  4266. mark_addressable (TREE_VALUE (link));
  4267. tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
  4268. is_inout ? is_gimple_min_lval : is_gimple_lvalue,
  4269. fb_lvalue | fb_mayfail);
  4270. if (tret == GS_ERROR)
  4271. {
  4272. error ("invalid lvalue in asm output %d", i);
  4273. ret = tret;
  4274. }
  4275. vec_safe_push (outputs, link);
  4276. TREE_CHAIN (link) = NULL_TREE;
  4277. if (is_inout)
  4278. {
  4279. /* An input/output operand. To give the optimizers more
  4280. flexibility, split it into separate input and output
  4281. operands. */
  4282. tree input;
  4283. char buf[10];
  4284. /* Turn the in/out constraint into an output constraint. */
  4285. char *p = xstrdup (constraint);
  4286. p[0] = '=';
  4287. TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
  4288. /* And add a matching input constraint. */
  4289. if (allows_reg)
  4290. {
  4291. sprintf (buf, "%d", i);
  4292. /* If there are multiple alternatives in the constraint,
  4293. handle each of them individually. Those that allow register
  4294. will be replaced with operand number, the others will stay
  4295. unchanged. */
  4296. if (strchr (p, ',') != NULL)
  4297. {
  4298. size_t len = 0, buflen = strlen (buf);
  4299. char *beg, *end, *str, *dst;
  4300. for (beg = p + 1;;)
  4301. {
  4302. end = strchr (beg, ',');
  4303. if (end == NULL)
  4304. end = strchr (beg, '\0');
  4305. if ((size_t) (end - beg) < buflen)
  4306. len += buflen + 1;
  4307. else
  4308. len += end - beg + 1;
  4309. if (*end)
  4310. beg = end + 1;
  4311. else
  4312. break;
  4313. }
  4314. str = (char *) alloca (len);
  4315. for (beg = p + 1, dst = str;;)
  4316. {
  4317. const char *tem;
  4318. bool mem_p, reg_p, inout_p;
  4319. end = strchr (beg, ',');
  4320. if (end)
  4321. *end = '\0';
  4322. beg[-1] = '=';
  4323. tem = beg - 1;
  4324. parse_output_constraint (&tem, i, 0, 0,
  4325. &mem_p, &reg_p, &inout_p);
  4326. if (dst != str)
  4327. *dst++ = ',';
  4328. if (reg_p)
  4329. {
  4330. memcpy (dst, buf, buflen);
  4331. dst += buflen;
  4332. }
  4333. else
  4334. {
  4335. if (end)
  4336. len = end - beg;
  4337. else
  4338. len = strlen (beg);
  4339. memcpy (dst, beg, len);
  4340. dst += len;
  4341. }
  4342. if (end)
  4343. beg = end + 1;
  4344. else
  4345. break;
  4346. }
  4347. *dst = '\0';
  4348. input = build_string (dst - str, str);
  4349. }
  4350. else
  4351. input = build_string (strlen (buf), buf);
  4352. }
  4353. else
  4354. input = build_string (constraint_len - 1, constraint + 1);
  4355. free (p);
  4356. input = build_tree_list (build_tree_list (NULL_TREE, input),
  4357. unshare_expr (TREE_VALUE (link)));
  4358. ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
  4359. }
  4360. }
  4361. link_next = NULL_TREE;
  4362. for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
  4363. {
  4364. link_next = TREE_CHAIN (link);
  4365. constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
  4366. parse_input_constraint (&constraint, 0, 0, noutputs, 0,
  4367. oconstraints, &allows_mem, &allows_reg);
  4368. /* If we can't make copies, we can only accept memory. */
  4369. if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
  4370. {
  4371. if (allows_mem)
  4372. allows_reg = 0;
  4373. else
  4374. {
  4375. error ("impossible constraint in %<asm%>");
  4376. error ("non-memory input %d must stay in memory", i);
  4377. return GS_ERROR;
  4378. }
  4379. }
  4380. /* If the operand is a memory input, it should be an lvalue. */
  4381. if (!allows_reg && allows_mem)
  4382. {
  4383. tree inputv = TREE_VALUE (link);
  4384. STRIP_NOPS (inputv);
  4385. if (TREE_CODE (inputv) == PREDECREMENT_EXPR
  4386. || TREE_CODE (inputv) == PREINCREMENT_EXPR
  4387. || TREE_CODE (inputv) == POSTDECREMENT_EXPR
  4388. || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
  4389. TREE_VALUE (link) = error_mark_node;
  4390. tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
  4391. is_gimple_lvalue, fb_lvalue | fb_mayfail);
  4392. mark_addressable (TREE_VALUE (link));
  4393. if (tret == GS_ERROR)
  4394. {
  4395. if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
  4396. input_location = EXPR_LOCATION (TREE_VALUE (link));
  4397. error ("memory input %d is not directly addressable", i);
  4398. ret = tret;
  4399. }
  4400. }
  4401. else
  4402. {
  4403. tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
  4404. is_gimple_asm_val, fb_rvalue);
  4405. if (tret == GS_ERROR)
  4406. ret = tret;
  4407. }
  4408. TREE_CHAIN (link) = NULL_TREE;
  4409. vec_safe_push (inputs, link);
  4410. }
  4411. link_next = NULL_TREE;
  4412. for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
  4413. {
  4414. link_next = TREE_CHAIN (link);
  4415. TREE_CHAIN (link) = NULL_TREE;
  4416. vec_safe_push (clobbers, link);
  4417. }
  4418. link_next = NULL_TREE;
  4419. for (link = ASM_LABELS (expr); link; ++i, link = link_next)
  4420. {
  4421. link_next = TREE_CHAIN (link);
  4422. TREE_CHAIN (link) = NULL_TREE;
  4423. vec_safe_push (labels, link);
  4424. }
  4425. /* Do not add ASMs with errors to the gimple IL stream. */
  4426. if (ret != GS_ERROR)
  4427. {
  4428. stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
  4429. inputs, outputs, clobbers, labels);
  4430. gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
  4431. gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
  4432. gimplify_seq_add_stmt (pre_p, stmt);
  4433. }
  4434. return ret;
  4435. }
  4436. /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
  4437. GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
  4438. gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
  4439. return to this function.
  4440. FIXME should we complexify the prequeue handling instead? Or use flags
  4441. for all the cleanups and let the optimizer tighten them up? The current
  4442. code seems pretty fragile; it will break on a cleanup within any
  4443. non-conditional nesting. But any such nesting would be broken, anyway;
  4444. we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
  4445. and continues out of it. We can do that at the RTL level, though, so
  4446. having an optimizer to tighten up try/finally regions would be a Good
  4447. Thing. */
  4448. static enum gimplify_status
  4449. gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
  4450. {
  4451. gimple_stmt_iterator iter;
  4452. gimple_seq body_sequence = NULL;
  4453. tree temp = voidify_wrapper_expr (*expr_p, NULL);
  4454. /* We only care about the number of conditions between the innermost
  4455. CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
  4456. any cleanups collected outside the CLEANUP_POINT_EXPR. */
  4457. int old_conds = gimplify_ctxp->conditions;
  4458. gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
  4459. bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
  4460. gimplify_ctxp->conditions = 0;
  4461. gimplify_ctxp->conditional_cleanups = NULL;
  4462. gimplify_ctxp->in_cleanup_point_expr = true;
  4463. gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
  4464. gimplify_ctxp->conditions = old_conds;
  4465. gimplify_ctxp->conditional_cleanups = old_cleanups;
  4466. gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
  4467. for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
  4468. {
  4469. gimple wce = gsi_stmt (iter);
  4470. if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
  4471. {
  4472. if (gsi_one_before_end_p (iter))
  4473. {
  4474. /* Note that gsi_insert_seq_before and gsi_remove do not
  4475. scan operands, unlike some other sequence mutators. */
  4476. if (!gimple_wce_cleanup_eh_only (wce))
  4477. gsi_insert_seq_before_without_update (&iter,
  4478. gimple_wce_cleanup (wce),
  4479. GSI_SAME_STMT);
  4480. gsi_remove (&iter, true);
  4481. break;
  4482. }
  4483. else
  4484. {
  4485. gtry *gtry;
  4486. gimple_seq seq;
  4487. enum gimple_try_flags kind;
  4488. if (gimple_wce_cleanup_eh_only (wce))
  4489. kind = GIMPLE_TRY_CATCH;
  4490. else
  4491. kind = GIMPLE_TRY_FINALLY;
  4492. seq = gsi_split_seq_after (iter);
  4493. gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
  4494. /* Do not use gsi_replace here, as it may scan operands.
  4495. We want to do a simple structural modification only. */
  4496. gsi_set_stmt (&iter, gtry);
  4497. iter = gsi_start (gtry->eval);
  4498. }
  4499. }
  4500. else
  4501. gsi_next (&iter);
  4502. }
  4503. gimplify_seq_add_seq (pre_p, body_sequence);
  4504. if (temp)
  4505. {
  4506. *expr_p = temp;
  4507. return GS_OK;
  4508. }
  4509. else
  4510. {
  4511. *expr_p = NULL;
  4512. return GS_ALL_DONE;
  4513. }
  4514. }
  4515. /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
  4516. is the cleanup action required. EH_ONLY is true if the cleanup should
  4517. only be executed if an exception is thrown, not on normal exit. */
  4518. static void
  4519. gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
  4520. {
  4521. gimple wce;
  4522. gimple_seq cleanup_stmts = NULL;
  4523. /* Errors can result in improperly nested cleanups. Which results in
  4524. confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
  4525. if (seen_error ())
  4526. return;
  4527. if (gimple_conditional_context ())
  4528. {
  4529. /* If we're in a conditional context, this is more complex. We only
  4530. want to run the cleanup if we actually ran the initialization that
  4531. necessitates it, but we want to run it after the end of the
  4532. conditional context. So we wrap the try/finally around the
  4533. condition and use a flag to determine whether or not to actually
  4534. run the destructor. Thus
  4535. test ? f(A()) : 0
  4536. becomes (approximately)
  4537. flag = 0;
  4538. try {
  4539. if (test) { A::A(temp); flag = 1; val = f(temp); }
  4540. else { val = 0; }
  4541. } finally {
  4542. if (flag) A::~A(temp);
  4543. }
  4544. val
  4545. */
  4546. tree flag = create_tmp_var (boolean_type_node, "cleanup");
  4547. gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
  4548. gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
  4549. cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
  4550. gimplify_stmt (&cleanup, &cleanup_stmts);
  4551. wce = gimple_build_wce (cleanup_stmts);
  4552. gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
  4553. gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
  4554. gimplify_seq_add_stmt (pre_p, ftrue);
  4555. /* Because of this manipulation, and the EH edges that jump
  4556. threading cannot redirect, the temporary (VAR) will appear
  4557. to be used uninitialized. Don't warn. */
  4558. TREE_NO_WARNING (var) = 1;
  4559. }
  4560. else
  4561. {
  4562. gimplify_stmt (&cleanup, &cleanup_stmts);
  4563. wce = gimple_build_wce (cleanup_stmts);
  4564. gimple_wce_set_cleanup_eh_only (wce, eh_only);
  4565. gimplify_seq_add_stmt (pre_p, wce);
  4566. }
  4567. }
  4568. /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
  4569. static enum gimplify_status
  4570. gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
  4571. {
  4572. tree targ = *expr_p;
  4573. tree temp = TARGET_EXPR_SLOT (targ);
  4574. tree init = TARGET_EXPR_INITIAL (targ);
  4575. enum gimplify_status ret;
  4576. if (init)
  4577. {
  4578. tree cleanup = NULL_TREE;
  4579. /* TARGET_EXPR temps aren't part of the enclosing block, so add it
  4580. to the temps list. Handle also variable length TARGET_EXPRs. */
  4581. if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
  4582. {
  4583. if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
  4584. gimplify_type_sizes (TREE_TYPE (temp), pre_p);
  4585. gimplify_vla_decl (temp, pre_p);
  4586. }
  4587. else
  4588. gimple_add_tmp_var (temp);
  4589. /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
  4590. expression is supposed to initialize the slot. */
  4591. if (VOID_TYPE_P (TREE_TYPE (init)))
  4592. ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
  4593. else
  4594. {
  4595. tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
  4596. init = init_expr;
  4597. ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
  4598. init = NULL;
  4599. ggc_free (init_expr);
  4600. }
  4601. if (ret == GS_ERROR)
  4602. {
  4603. /* PR c++/28266 Make sure this is expanded only once. */
  4604. TARGET_EXPR_INITIAL (targ) = NULL_TREE;
  4605. return GS_ERROR;
  4606. }
  4607. if (init)
  4608. gimplify_and_add (init, pre_p);
  4609. /* If needed, push the cleanup for the temp. */
  4610. if (TARGET_EXPR_CLEANUP (targ))
  4611. {
  4612. if (CLEANUP_EH_ONLY (targ))
  4613. gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
  4614. CLEANUP_EH_ONLY (targ), pre_p);
  4615. else
  4616. cleanup = TARGET_EXPR_CLEANUP (targ);
  4617. }
  4618. /* Add a clobber for the temporary going out of scope, like
  4619. gimplify_bind_expr. */
  4620. if (gimplify_ctxp->in_cleanup_point_expr
  4621. && needs_to_live_in_memory (temp)
  4622. && flag_stack_reuse == SR_ALL)
  4623. {
  4624. tree clobber = build_constructor (TREE_TYPE (temp),
  4625. NULL);
  4626. TREE_THIS_VOLATILE (clobber) = true;
  4627. clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
  4628. if (cleanup)
  4629. cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
  4630. clobber);
  4631. else
  4632. cleanup = clobber;
  4633. }
  4634. if (cleanup)
  4635. gimple_push_cleanup (temp, cleanup, false, pre_p);
  4636. /* Only expand this once. */
  4637. TREE_OPERAND (targ, 3) = init;
  4638. TARGET_EXPR_INITIAL (targ) = NULL_TREE;
  4639. }
  4640. else
  4641. /* We should have expanded this before. */
  4642. gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
  4643. *expr_p = temp;
  4644. return GS_OK;
  4645. }
  4646. /* Gimplification of expression trees. */
  4647. /* Gimplify an expression which appears at statement context. The
  4648. corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
  4649. NULL, a new sequence is allocated.
  4650. Return true if we actually added a statement to the queue. */
  4651. bool
  4652. gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
  4653. {
  4654. gimple_seq_node last;
  4655. last = gimple_seq_last (*seq_p);
  4656. gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
  4657. return last != gimple_seq_last (*seq_p);
  4658. }
  4659. /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
  4660. to CTX. If entries already exist, force them to be some flavor of private.
  4661. If there is no enclosing parallel, do nothing. */
  4662. void
  4663. omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
  4664. {
  4665. splay_tree_node n;
  4666. if (decl == NULL || !DECL_P (decl))
  4667. return;
  4668. do
  4669. {
  4670. n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
  4671. if (n != NULL)
  4672. {
  4673. if (n->value & GOVD_SHARED)
  4674. n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
  4675. else if (n->value & GOVD_MAP)
  4676. n->value |= GOVD_MAP_TO_ONLY;
  4677. else
  4678. return;
  4679. }
  4680. else if (ctx->region_type == ORT_TARGET)
  4681. omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
  4682. else if (ctx->region_type != ORT_WORKSHARE
  4683. && ctx->region_type != ORT_SIMD
  4684. && ctx->region_type != ORT_TARGET_DATA)
  4685. omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
  4686. ctx = ctx->outer_context;
  4687. }
  4688. while (ctx);
  4689. }
  4690. /* Similarly for each of the type sizes of TYPE. */
  4691. static void
  4692. omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
  4693. {
  4694. if (type == NULL || type == error_mark_node)
  4695. return;
  4696. type = TYPE_MAIN_VARIANT (type);
  4697. if (ctx->privatized_types->add (type))
  4698. return;
  4699. switch (TREE_CODE (type))
  4700. {
  4701. case INTEGER_TYPE:
  4702. case ENUMERAL_TYPE:
  4703. case BOOLEAN_TYPE:
  4704. case REAL_TYPE:
  4705. case FIXED_POINT_TYPE:
  4706. omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
  4707. omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
  4708. break;
  4709. case ARRAY_TYPE:
  4710. omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
  4711. omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
  4712. break;
  4713. case RECORD_TYPE:
  4714. case UNION_TYPE:
  4715. case QUAL_UNION_TYPE:
  4716. {
  4717. tree field;
  4718. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  4719. if (TREE_CODE (field) == FIELD_DECL)
  4720. {
  4721. omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
  4722. omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
  4723. }
  4724. }
  4725. break;
  4726. case POINTER_TYPE:
  4727. case REFERENCE_TYPE:
  4728. omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
  4729. break;
  4730. default:
  4731. break;
  4732. }
  4733. omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
  4734. omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
  4735. lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
  4736. }
  4737. /* Add an entry for DECL in the OMP context CTX with FLAGS. */
  4738. static void
  4739. omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
  4740. {
  4741. splay_tree_node n;
  4742. unsigned int nflags;
  4743. tree t;
  4744. if (error_operand_p (decl))
  4745. return;
  4746. /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
  4747. there are constructors involved somewhere. */
  4748. if (TREE_ADDRESSABLE (TREE_TYPE (decl))
  4749. || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
  4750. flags |= GOVD_SEEN;
  4751. n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
  4752. if (n != NULL && n->value != GOVD_ALIGNED)
  4753. {
  4754. /* We shouldn't be re-adding the decl with the same data
  4755. sharing class. */
  4756. gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
  4757. /* The only combination of data sharing classes we should see is
  4758. FIRSTPRIVATE and LASTPRIVATE. */
  4759. nflags = n->value | flags;
  4760. gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
  4761. == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
  4762. || (flags & GOVD_DATA_SHARE_CLASS) == 0);
  4763. n->value = nflags;
  4764. return;
  4765. }
  4766. /* When adding a variable-sized variable, we have to handle all sorts
  4767. of additional bits of data: the pointer replacement variable, and
  4768. the parameters of the type. */
  4769. if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
  4770. {
  4771. /* Add the pointer replacement variable as PRIVATE if the variable
  4772. replacement is private, else FIRSTPRIVATE since we'll need the
  4773. address of the original variable either for SHARED, or for the
  4774. copy into or out of the context. */
  4775. if (!(flags & GOVD_LOCAL))
  4776. {
  4777. if (flags & GOVD_MAP)
  4778. nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
  4779. else if (flags & GOVD_PRIVATE)
  4780. nflags = GOVD_PRIVATE;
  4781. else
  4782. nflags = GOVD_FIRSTPRIVATE;
  4783. nflags |= flags & GOVD_SEEN;
  4784. t = DECL_VALUE_EXPR (decl);
  4785. gcc_assert (TREE_CODE (t) == INDIRECT_REF);
  4786. t = TREE_OPERAND (t, 0);
  4787. gcc_assert (DECL_P (t));
  4788. omp_add_variable (ctx, t, nflags);
  4789. }
  4790. /* Add all of the variable and type parameters (which should have
  4791. been gimplified to a formal temporary) as FIRSTPRIVATE. */
  4792. omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
  4793. omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
  4794. omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
  4795. /* The variable-sized variable itself is never SHARED, only some form
  4796. of PRIVATE. The sharing would take place via the pointer variable
  4797. which we remapped above. */
  4798. if (flags & GOVD_SHARED)
  4799. flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
  4800. | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
  4801. /* We're going to make use of the TYPE_SIZE_UNIT at least in the
  4802. alloca statement we generate for the variable, so make sure it
  4803. is available. This isn't automatically needed for the SHARED
  4804. case, since we won't be allocating local storage then.
  4805. For local variables TYPE_SIZE_UNIT might not be gimplified yet,
  4806. in this case omp_notice_variable will be called later
  4807. on when it is gimplified. */
  4808. else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
  4809. && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
  4810. omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
  4811. }
  4812. else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
  4813. && lang_hooks.decls.omp_privatize_by_reference (decl))
  4814. {
  4815. omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
  4816. /* Similar to the direct variable sized case above, we'll need the
  4817. size of references being privatized. */
  4818. if ((flags & GOVD_SHARED) == 0)
  4819. {
  4820. t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
  4821. if (TREE_CODE (t) != INTEGER_CST)
  4822. omp_notice_variable (ctx, t, true);
  4823. }
  4824. }
  4825. if (n != NULL)
  4826. n->value |= flags;
  4827. else
  4828. splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
  4829. }
  4830. /* Notice a threadprivate variable DECL used in OMP context CTX.
  4831. This just prints out diagnostics about threadprivate variable uses
  4832. in untied tasks. If DECL2 is non-NULL, prevent this warning
  4833. on that variable. */
  4834. static bool
  4835. omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
  4836. tree decl2)
  4837. {
  4838. splay_tree_node n;
  4839. struct gimplify_omp_ctx *octx;
  4840. for (octx = ctx; octx; octx = octx->outer_context)
  4841. if (octx->region_type == ORT_TARGET)
  4842. {
  4843. n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
  4844. if (n == NULL)
  4845. {
  4846. error ("threadprivate variable %qE used in target region",
  4847. DECL_NAME (decl));
  4848. error_at (octx->location, "enclosing target region");
  4849. splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
  4850. }
  4851. if (decl2)
  4852. splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
  4853. }
  4854. if (ctx->region_type != ORT_UNTIED_TASK)
  4855. return false;
  4856. n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
  4857. if (n == NULL)
  4858. {
  4859. error ("threadprivate variable %qE used in untied task",
  4860. DECL_NAME (decl));
  4861. error_at (ctx->location, "enclosing task");
  4862. splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
  4863. }
  4864. if (decl2)
  4865. splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
  4866. return false;
  4867. }
  4868. /* Record the fact that DECL was used within the OMP context CTX.
  4869. IN_CODE is true when real code uses DECL, and false when we should
  4870. merely emit default(none) errors. Return true if DECL is going to
  4871. be remapped and thus DECL shouldn't be gimplified into its
  4872. DECL_VALUE_EXPR (if any). */
  4873. static bool
  4874. omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
  4875. {
  4876. splay_tree_node n;
  4877. unsigned flags = in_code ? GOVD_SEEN : 0;
  4878. bool ret = false, shared;
  4879. if (error_operand_p (decl))
  4880. return false;
  4881. /* Threadprivate variables are predetermined. */
  4882. if (is_global_var (decl))
  4883. {
  4884. if (DECL_THREAD_LOCAL_P (decl))
  4885. return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
  4886. if (DECL_HAS_VALUE_EXPR_P (decl))
  4887. {
  4888. tree value = get_base_address (DECL_VALUE_EXPR (decl));
  4889. if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
  4890. return omp_notice_threadprivate_variable (ctx, decl, value);
  4891. }
  4892. }
  4893. n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
  4894. if (ctx->region_type == ORT_TARGET)
  4895. {
  4896. ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
  4897. if (n == NULL)
  4898. {
  4899. if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
  4900. {
  4901. error ("%qD referenced in target region does not have "
  4902. "a mappable type", decl);
  4903. omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
  4904. }
  4905. else
  4906. omp_add_variable (ctx, decl, GOVD_MAP | flags);
  4907. }
  4908. else
  4909. {
  4910. /* If nothing changed, there's nothing left to do. */
  4911. if ((n->value & flags) == flags)
  4912. return ret;
  4913. n->value |= flags;
  4914. }
  4915. goto do_outer;
  4916. }
  4917. if (n == NULL)
  4918. {
  4919. enum omp_clause_default_kind default_kind, kind;
  4920. struct gimplify_omp_ctx *octx;
  4921. if (ctx->region_type == ORT_WORKSHARE
  4922. || ctx->region_type == ORT_SIMD
  4923. || ctx->region_type == ORT_TARGET_DATA)
  4924. goto do_outer;
  4925. /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
  4926. remapped firstprivate instead of shared. To some extent this is
  4927. addressed in omp_firstprivatize_type_sizes, but not effectively. */
  4928. default_kind = ctx->default_kind;
  4929. kind = lang_hooks.decls.omp_predetermined_sharing (decl);
  4930. if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
  4931. default_kind = kind;
  4932. switch (default_kind)
  4933. {
  4934. case OMP_CLAUSE_DEFAULT_NONE:
  4935. if ((ctx->region_type & ORT_PARALLEL) != 0)
  4936. {
  4937. error ("%qE not specified in enclosing parallel",
  4938. DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
  4939. error_at (ctx->location, "enclosing parallel");
  4940. }
  4941. else if ((ctx->region_type & ORT_TASK) != 0)
  4942. {
  4943. error ("%qE not specified in enclosing task",
  4944. DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
  4945. error_at (ctx->location, "enclosing task");
  4946. }
  4947. else if (ctx->region_type & ORT_TEAMS)
  4948. {
  4949. error ("%qE not specified in enclosing teams construct",
  4950. DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
  4951. error_at (ctx->location, "enclosing teams construct");
  4952. }
  4953. else
  4954. gcc_unreachable ();
  4955. /* FALLTHRU */
  4956. case OMP_CLAUSE_DEFAULT_SHARED:
  4957. flags |= GOVD_SHARED;
  4958. break;
  4959. case OMP_CLAUSE_DEFAULT_PRIVATE:
  4960. flags |= GOVD_PRIVATE;
  4961. break;
  4962. case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
  4963. flags |= GOVD_FIRSTPRIVATE;
  4964. break;
  4965. case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
  4966. /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
  4967. gcc_assert ((ctx->region_type & ORT_TASK) != 0);
  4968. if (ctx->outer_context)
  4969. omp_notice_variable (ctx->outer_context, decl, in_code);
  4970. for (octx = ctx->outer_context; octx; octx = octx->outer_context)
  4971. {
  4972. splay_tree_node n2;
  4973. if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
  4974. continue;
  4975. n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
  4976. if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
  4977. {
  4978. flags |= GOVD_FIRSTPRIVATE;
  4979. break;
  4980. }
  4981. if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
  4982. break;
  4983. }
  4984. if (flags & GOVD_FIRSTPRIVATE)
  4985. break;
  4986. if (octx == NULL
  4987. && (TREE_CODE (decl) == PARM_DECL
  4988. || (!is_global_var (decl)
  4989. && DECL_CONTEXT (decl) == current_function_decl)))
  4990. {
  4991. flags |= GOVD_FIRSTPRIVATE;
  4992. break;
  4993. }
  4994. flags |= GOVD_SHARED;
  4995. break;
  4996. default:
  4997. gcc_unreachable ();
  4998. }
  4999. if ((flags & GOVD_PRIVATE)
  5000. && lang_hooks.decls.omp_private_outer_ref (decl))
  5001. flags |= GOVD_PRIVATE_OUTER_REF;
  5002. omp_add_variable (ctx, decl, flags);
  5003. shared = (flags & GOVD_SHARED) != 0;
  5004. ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
  5005. goto do_outer;
  5006. }
  5007. if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
  5008. && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
  5009. && DECL_SIZE (decl)
  5010. && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
  5011. {
  5012. splay_tree_node n2;
  5013. tree t = DECL_VALUE_EXPR (decl);
  5014. gcc_assert (TREE_CODE (t) == INDIRECT_REF);
  5015. t = TREE_OPERAND (t, 0);
  5016. gcc_assert (DECL_P (t));
  5017. n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
  5018. n2->value |= GOVD_SEEN;
  5019. }
  5020. shared = ((flags | n->value) & GOVD_SHARED) != 0;
  5021. ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
  5022. /* If nothing changed, there's nothing left to do. */
  5023. if ((n->value & flags) == flags)
  5024. return ret;
  5025. flags |= n->value;
  5026. n->value = flags;
  5027. do_outer:
  5028. /* If the variable is private in the current context, then we don't
  5029. need to propagate anything to an outer context. */
  5030. if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
  5031. return ret;
  5032. if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
  5033. == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
  5034. return ret;
  5035. if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
  5036. | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
  5037. == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
  5038. return ret;
  5039. if (ctx->outer_context
  5040. && omp_notice_variable (ctx->outer_context, decl, in_code))
  5041. return true;
  5042. return ret;
  5043. }
  5044. /* Verify that DECL is private within CTX. If there's specific information
  5045. to the contrary in the innermost scope, generate an error. */
  5046. static bool
  5047. omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
  5048. {
  5049. splay_tree_node n;
  5050. n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
  5051. if (n != NULL)
  5052. {
  5053. if (n->value & GOVD_SHARED)
  5054. {
  5055. if (ctx == gimplify_omp_ctxp)
  5056. {
  5057. if (simd)
  5058. error ("iteration variable %qE is predetermined linear",
  5059. DECL_NAME (decl));
  5060. else
  5061. error ("iteration variable %qE should be private",
  5062. DECL_NAME (decl));
  5063. n->value = GOVD_PRIVATE;
  5064. return true;
  5065. }
  5066. else
  5067. return false;
  5068. }
  5069. else if ((n->value & GOVD_EXPLICIT) != 0
  5070. && (ctx == gimplify_omp_ctxp
  5071. || (ctx->region_type == ORT_COMBINED_PARALLEL
  5072. && gimplify_omp_ctxp->outer_context == ctx)))
  5073. {
  5074. if ((n->value & GOVD_FIRSTPRIVATE) != 0)
  5075. error ("iteration variable %qE should not be firstprivate",
  5076. DECL_NAME (decl));
  5077. else if ((n->value & GOVD_REDUCTION) != 0)
  5078. error ("iteration variable %qE should not be reduction",
  5079. DECL_NAME (decl));
  5080. else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
  5081. error ("iteration variable %qE should not be lastprivate",
  5082. DECL_NAME (decl));
  5083. else if (simd && (n->value & GOVD_PRIVATE) != 0)
  5084. error ("iteration variable %qE should not be private",
  5085. DECL_NAME (decl));
  5086. else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
  5087. error ("iteration variable %qE is predetermined linear",
  5088. DECL_NAME (decl));
  5089. }
  5090. return (ctx == gimplify_omp_ctxp
  5091. || (ctx->region_type == ORT_COMBINED_PARALLEL
  5092. && gimplify_omp_ctxp->outer_context == ctx));
  5093. }
  5094. if (ctx->region_type != ORT_WORKSHARE
  5095. && ctx->region_type != ORT_SIMD)
  5096. return false;
  5097. else if (ctx->outer_context)
  5098. return omp_is_private (ctx->outer_context, decl, simd);
  5099. return false;
  5100. }
  5101. /* Return true if DECL is private within a parallel region
  5102. that binds to the current construct's context or in parallel
  5103. region's REDUCTION clause. */
  5104. static bool
  5105. omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
  5106. {
  5107. splay_tree_node n;
  5108. do
  5109. {
  5110. ctx = ctx->outer_context;
  5111. if (ctx == NULL)
  5112. return !(is_global_var (decl)
  5113. /* References might be private, but might be shared too,
  5114. when checking for copyprivate, assume they might be
  5115. private, otherwise assume they might be shared. */
  5116. || (!copyprivate
  5117. && lang_hooks.decls.omp_privatize_by_reference (decl)));
  5118. if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
  5119. continue;
  5120. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5121. if (n != NULL)
  5122. return (n->value & GOVD_SHARED) == 0;
  5123. }
  5124. while (ctx->region_type == ORT_WORKSHARE
  5125. || ctx->region_type == ORT_SIMD);
  5126. return false;
  5127. }
  5128. /* Return true if the CTX is combined with distribute and thus
  5129. lastprivate can't be supported. */
  5130. static bool
  5131. omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
  5132. {
  5133. do
  5134. {
  5135. if (ctx->outer_context == NULL)
  5136. return false;
  5137. ctx = ctx->outer_context;
  5138. switch (ctx->region_type)
  5139. {
  5140. case ORT_WORKSHARE:
  5141. if (!ctx->combined_loop)
  5142. return false;
  5143. if (ctx->distribute)
  5144. return true;
  5145. break;
  5146. case ORT_COMBINED_PARALLEL:
  5147. break;
  5148. case ORT_COMBINED_TEAMS:
  5149. return true;
  5150. default:
  5151. return false;
  5152. }
  5153. }
  5154. while (1);
  5155. }
  5156. /* Scan the OMP clauses in *LIST_P, installing mappings into a new
  5157. and previous omp contexts. */
  5158. static void
  5159. gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
  5160. enum omp_region_type region_type)
  5161. {
  5162. struct gimplify_omp_ctx *ctx, *outer_ctx;
  5163. tree c;
  5164. ctx = new_omp_context (region_type);
  5165. outer_ctx = ctx->outer_context;
  5166. while ((c = *list_p) != NULL)
  5167. {
  5168. bool remove = false;
  5169. bool notice_outer = true;
  5170. const char *check_non_private = NULL;
  5171. unsigned int flags;
  5172. tree decl;
  5173. switch (OMP_CLAUSE_CODE (c))
  5174. {
  5175. case OMP_CLAUSE_PRIVATE:
  5176. flags = GOVD_PRIVATE | GOVD_EXPLICIT;
  5177. if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
  5178. {
  5179. flags |= GOVD_PRIVATE_OUTER_REF;
  5180. OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
  5181. }
  5182. else
  5183. notice_outer = false;
  5184. goto do_add;
  5185. case OMP_CLAUSE_SHARED:
  5186. flags = GOVD_SHARED | GOVD_EXPLICIT;
  5187. goto do_add;
  5188. case OMP_CLAUSE_FIRSTPRIVATE:
  5189. flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
  5190. check_non_private = "firstprivate";
  5191. goto do_add;
  5192. case OMP_CLAUSE_LASTPRIVATE:
  5193. flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
  5194. check_non_private = "lastprivate";
  5195. decl = OMP_CLAUSE_DECL (c);
  5196. if (omp_no_lastprivate (ctx))
  5197. {
  5198. notice_outer = false;
  5199. flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
  5200. }
  5201. else if (error_operand_p (decl))
  5202. goto do_add;
  5203. else if (outer_ctx
  5204. && outer_ctx->region_type == ORT_COMBINED_PARALLEL
  5205. && splay_tree_lookup (outer_ctx->variables,
  5206. (splay_tree_key) decl) == NULL)
  5207. omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
  5208. else if (outer_ctx
  5209. && outer_ctx->region_type == ORT_WORKSHARE
  5210. && outer_ctx->combined_loop
  5211. && splay_tree_lookup (outer_ctx->variables,
  5212. (splay_tree_key) decl) == NULL
  5213. && !omp_check_private (outer_ctx, decl, false))
  5214. {
  5215. omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
  5216. if (outer_ctx->outer_context
  5217. && (outer_ctx->outer_context->region_type
  5218. == ORT_COMBINED_PARALLEL)
  5219. && splay_tree_lookup (outer_ctx->outer_context->variables,
  5220. (splay_tree_key) decl) == NULL)
  5221. omp_add_variable (outer_ctx->outer_context, decl,
  5222. GOVD_SHARED | GOVD_SEEN);
  5223. }
  5224. goto do_add;
  5225. case OMP_CLAUSE_REDUCTION:
  5226. flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
  5227. check_non_private = "reduction";
  5228. goto do_add;
  5229. case OMP_CLAUSE_LINEAR:
  5230. if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
  5231. is_gimple_val, fb_rvalue) == GS_ERROR)
  5232. {
  5233. remove = true;
  5234. break;
  5235. }
  5236. else
  5237. {
  5238. /* For combined #pragma omp parallel for simd, need to put
  5239. lastprivate and perhaps firstprivate too on the
  5240. parallel. Similarly for #pragma omp for simd. */
  5241. struct gimplify_omp_ctx *octx = outer_ctx;
  5242. decl = NULL_TREE;
  5243. if (omp_no_lastprivate (ctx))
  5244. OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
  5245. do
  5246. {
  5247. if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
  5248. && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
  5249. break;
  5250. decl = OMP_CLAUSE_DECL (c);
  5251. if (error_operand_p (decl))
  5252. {
  5253. decl = NULL_TREE;
  5254. break;
  5255. }
  5256. if (octx
  5257. && octx->region_type == ORT_WORKSHARE
  5258. && octx->combined_loop)
  5259. {
  5260. if (octx->outer_context
  5261. && (octx->outer_context->region_type
  5262. == ORT_COMBINED_PARALLEL
  5263. || (octx->outer_context->region_type
  5264. == ORT_COMBINED_TEAMS)))
  5265. octx = octx->outer_context;
  5266. else if (omp_check_private (octx, decl, false))
  5267. break;
  5268. }
  5269. else
  5270. break;
  5271. gcc_checking_assert (splay_tree_lookup (octx->variables,
  5272. (splay_tree_key)
  5273. decl) == NULL);
  5274. flags = GOVD_SEEN;
  5275. if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
  5276. flags |= GOVD_FIRSTPRIVATE;
  5277. if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
  5278. flags |= GOVD_LASTPRIVATE;
  5279. omp_add_variable (octx, decl, flags);
  5280. if (octx->outer_context == NULL)
  5281. break;
  5282. octx = octx->outer_context;
  5283. }
  5284. while (1);
  5285. if (octx
  5286. && decl
  5287. && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
  5288. || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
  5289. omp_notice_variable (octx, decl, true);
  5290. }
  5291. flags = GOVD_LINEAR | GOVD_EXPLICIT;
  5292. if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
  5293. && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
  5294. {
  5295. notice_outer = false;
  5296. flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
  5297. }
  5298. goto do_add;
  5299. case OMP_CLAUSE_MAP:
  5300. decl = OMP_CLAUSE_DECL (c);
  5301. if (error_operand_p (decl))
  5302. {
  5303. remove = true;
  5304. break;
  5305. }
  5306. if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
  5307. OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
  5308. : TYPE_SIZE_UNIT (TREE_TYPE (decl));
  5309. if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
  5310. NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
  5311. {
  5312. remove = true;
  5313. break;
  5314. }
  5315. if (!DECL_P (decl))
  5316. {
  5317. if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
  5318. NULL, is_gimple_lvalue, fb_lvalue)
  5319. == GS_ERROR)
  5320. {
  5321. remove = true;
  5322. break;
  5323. }
  5324. break;
  5325. }
  5326. flags = GOVD_MAP | GOVD_EXPLICIT;
  5327. goto do_add;
  5328. case OMP_CLAUSE_DEPEND:
  5329. if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
  5330. {
  5331. gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
  5332. NULL, is_gimple_val, fb_rvalue);
  5333. OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
  5334. }
  5335. if (error_operand_p (OMP_CLAUSE_DECL (c)))
  5336. {
  5337. remove = true;
  5338. break;
  5339. }
  5340. OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
  5341. if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
  5342. is_gimple_val, fb_rvalue) == GS_ERROR)
  5343. {
  5344. remove = true;
  5345. break;
  5346. }
  5347. break;
  5348. case OMP_CLAUSE_TO:
  5349. case OMP_CLAUSE_FROM:
  5350. case OMP_CLAUSE__CACHE_:
  5351. decl = OMP_CLAUSE_DECL (c);
  5352. if (error_operand_p (decl))
  5353. {
  5354. remove = true;
  5355. break;
  5356. }
  5357. if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
  5358. OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
  5359. : TYPE_SIZE_UNIT (TREE_TYPE (decl));
  5360. if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
  5361. NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
  5362. {
  5363. remove = true;
  5364. break;
  5365. }
  5366. if (!DECL_P (decl))
  5367. {
  5368. if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
  5369. NULL, is_gimple_lvalue, fb_lvalue)
  5370. == GS_ERROR)
  5371. {
  5372. remove = true;
  5373. break;
  5374. }
  5375. break;
  5376. }
  5377. goto do_notice;
  5378. do_add:
  5379. decl = OMP_CLAUSE_DECL (c);
  5380. if (error_operand_p (decl))
  5381. {
  5382. remove = true;
  5383. break;
  5384. }
  5385. omp_add_variable (ctx, decl, flags);
  5386. if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
  5387. && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
  5388. {
  5389. omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
  5390. GOVD_LOCAL | GOVD_SEEN);
  5391. gimplify_omp_ctxp = ctx;
  5392. push_gimplify_context ();
  5393. OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
  5394. OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
  5395. gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
  5396. &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
  5397. pop_gimplify_context
  5398. (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
  5399. push_gimplify_context ();
  5400. gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
  5401. &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
  5402. pop_gimplify_context
  5403. (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
  5404. OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
  5405. OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
  5406. gimplify_omp_ctxp = outer_ctx;
  5407. }
  5408. else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
  5409. && OMP_CLAUSE_LASTPRIVATE_STMT (c))
  5410. {
  5411. gimplify_omp_ctxp = ctx;
  5412. push_gimplify_context ();
  5413. if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
  5414. {
  5415. tree bind = build3 (BIND_EXPR, void_type_node, NULL,
  5416. NULL, NULL);
  5417. TREE_SIDE_EFFECTS (bind) = 1;
  5418. BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
  5419. OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
  5420. }
  5421. gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
  5422. &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
  5423. pop_gimplify_context
  5424. (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
  5425. OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
  5426. gimplify_omp_ctxp = outer_ctx;
  5427. }
  5428. else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
  5429. && OMP_CLAUSE_LINEAR_STMT (c))
  5430. {
  5431. gimplify_omp_ctxp = ctx;
  5432. push_gimplify_context ();
  5433. if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
  5434. {
  5435. tree bind = build3 (BIND_EXPR, void_type_node, NULL,
  5436. NULL, NULL);
  5437. TREE_SIDE_EFFECTS (bind) = 1;
  5438. BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
  5439. OMP_CLAUSE_LINEAR_STMT (c) = bind;
  5440. }
  5441. gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
  5442. &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
  5443. pop_gimplify_context
  5444. (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
  5445. OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
  5446. gimplify_omp_ctxp = outer_ctx;
  5447. }
  5448. if (notice_outer)
  5449. goto do_notice;
  5450. break;
  5451. case OMP_CLAUSE_COPYIN:
  5452. case OMP_CLAUSE_COPYPRIVATE:
  5453. decl = OMP_CLAUSE_DECL (c);
  5454. if (error_operand_p (decl))
  5455. {
  5456. remove = true;
  5457. break;
  5458. }
  5459. if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
  5460. && !remove
  5461. && !omp_check_private (ctx, decl, true))
  5462. {
  5463. remove = true;
  5464. if (is_global_var (decl))
  5465. {
  5466. if (DECL_THREAD_LOCAL_P (decl))
  5467. remove = false;
  5468. else if (DECL_HAS_VALUE_EXPR_P (decl))
  5469. {
  5470. tree value = get_base_address (DECL_VALUE_EXPR (decl));
  5471. if (value
  5472. && DECL_P (value)
  5473. && DECL_THREAD_LOCAL_P (value))
  5474. remove = false;
  5475. }
  5476. }
  5477. if (remove)
  5478. error_at (OMP_CLAUSE_LOCATION (c),
  5479. "copyprivate variable %qE is not threadprivate"
  5480. " or private in outer context", DECL_NAME (decl));
  5481. }
  5482. do_notice:
  5483. if (outer_ctx)
  5484. omp_notice_variable (outer_ctx, decl, true);
  5485. if (check_non_private
  5486. && region_type == ORT_WORKSHARE
  5487. && omp_check_private (ctx, decl, false))
  5488. {
  5489. error ("%s variable %qE is private in outer context",
  5490. check_non_private, DECL_NAME (decl));
  5491. remove = true;
  5492. }
  5493. break;
  5494. case OMP_CLAUSE_FINAL:
  5495. case OMP_CLAUSE_IF:
  5496. OMP_CLAUSE_OPERAND (c, 0)
  5497. = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
  5498. /* Fall through. */
  5499. case OMP_CLAUSE_SCHEDULE:
  5500. case OMP_CLAUSE_NUM_THREADS:
  5501. case OMP_CLAUSE_NUM_TEAMS:
  5502. case OMP_CLAUSE_THREAD_LIMIT:
  5503. case OMP_CLAUSE_DIST_SCHEDULE:
  5504. case OMP_CLAUSE_DEVICE:
  5505. case OMP_CLAUSE__CILK_FOR_COUNT_:
  5506. case OMP_CLAUSE_ASYNC:
  5507. case OMP_CLAUSE_WAIT:
  5508. case OMP_CLAUSE_NUM_GANGS:
  5509. case OMP_CLAUSE_NUM_WORKERS:
  5510. case OMP_CLAUSE_VECTOR_LENGTH:
  5511. case OMP_CLAUSE_GANG:
  5512. case OMP_CLAUSE_WORKER:
  5513. case OMP_CLAUSE_VECTOR:
  5514. if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
  5515. is_gimple_val, fb_rvalue) == GS_ERROR)
  5516. remove = true;
  5517. if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_GANG
  5518. && gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
  5519. is_gimple_val, fb_rvalue) == GS_ERROR)
  5520. remove = true;
  5521. break;
  5522. case OMP_CLAUSE_DEVICE_RESIDENT:
  5523. case OMP_CLAUSE_USE_DEVICE:
  5524. case OMP_CLAUSE_INDEPENDENT:
  5525. remove = true;
  5526. break;
  5527. case OMP_CLAUSE_NOWAIT:
  5528. case OMP_CLAUSE_ORDERED:
  5529. case OMP_CLAUSE_UNTIED:
  5530. case OMP_CLAUSE_COLLAPSE:
  5531. case OMP_CLAUSE_AUTO:
  5532. case OMP_CLAUSE_SEQ:
  5533. case OMP_CLAUSE_MERGEABLE:
  5534. case OMP_CLAUSE_PROC_BIND:
  5535. case OMP_CLAUSE_SAFELEN:
  5536. break;
  5537. case OMP_CLAUSE_ALIGNED:
  5538. decl = OMP_CLAUSE_DECL (c);
  5539. if (error_operand_p (decl))
  5540. {
  5541. remove = true;
  5542. break;
  5543. }
  5544. if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
  5545. is_gimple_val, fb_rvalue) == GS_ERROR)
  5546. {
  5547. remove = true;
  5548. break;
  5549. }
  5550. if (!is_global_var (decl)
  5551. && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
  5552. omp_add_variable (ctx, decl, GOVD_ALIGNED);
  5553. break;
  5554. case OMP_CLAUSE_DEFAULT:
  5555. ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
  5556. break;
  5557. default:
  5558. gcc_unreachable ();
  5559. }
  5560. if (remove)
  5561. *list_p = OMP_CLAUSE_CHAIN (c);
  5562. else
  5563. list_p = &OMP_CLAUSE_CHAIN (c);
  5564. }
  5565. gimplify_omp_ctxp = ctx;
  5566. }
  5567. struct gimplify_adjust_omp_clauses_data
  5568. {
  5569. tree *list_p;
  5570. gimple_seq *pre_p;
  5571. };
  5572. /* For all variables that were not actually used within the context,
  5573. remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
  5574. static int
  5575. gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
  5576. {
  5577. tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
  5578. gimple_seq *pre_p
  5579. = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
  5580. tree decl = (tree) n->key;
  5581. unsigned flags = n->value;
  5582. enum omp_clause_code code;
  5583. tree clause;
  5584. bool private_debug;
  5585. if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
  5586. return 0;
  5587. if ((flags & GOVD_SEEN) == 0)
  5588. return 0;
  5589. if (flags & GOVD_DEBUG_PRIVATE)
  5590. {
  5591. gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
  5592. private_debug = true;
  5593. }
  5594. else if (flags & GOVD_MAP)
  5595. private_debug = false;
  5596. else
  5597. private_debug
  5598. = lang_hooks.decls.omp_private_debug_clause (decl,
  5599. !!(flags & GOVD_SHARED));
  5600. if (private_debug)
  5601. code = OMP_CLAUSE_PRIVATE;
  5602. else if (flags & GOVD_MAP)
  5603. code = OMP_CLAUSE_MAP;
  5604. else if (flags & GOVD_SHARED)
  5605. {
  5606. if (is_global_var (decl))
  5607. {
  5608. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
  5609. while (ctx != NULL)
  5610. {
  5611. splay_tree_node on
  5612. = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5613. if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
  5614. | GOVD_PRIVATE | GOVD_REDUCTION
  5615. | GOVD_LINEAR | GOVD_MAP)) != 0)
  5616. break;
  5617. ctx = ctx->outer_context;
  5618. }
  5619. if (ctx == NULL)
  5620. return 0;
  5621. }
  5622. code = OMP_CLAUSE_SHARED;
  5623. }
  5624. else if (flags & GOVD_PRIVATE)
  5625. code = OMP_CLAUSE_PRIVATE;
  5626. else if (flags & GOVD_FIRSTPRIVATE)
  5627. code = OMP_CLAUSE_FIRSTPRIVATE;
  5628. else if (flags & GOVD_LASTPRIVATE)
  5629. code = OMP_CLAUSE_LASTPRIVATE;
  5630. else if (flags & GOVD_ALIGNED)
  5631. return 0;
  5632. else
  5633. gcc_unreachable ();
  5634. clause = build_omp_clause (input_location, code);
  5635. OMP_CLAUSE_DECL (clause) = decl;
  5636. OMP_CLAUSE_CHAIN (clause) = *list_p;
  5637. if (private_debug)
  5638. OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
  5639. else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
  5640. OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
  5641. else if (code == OMP_CLAUSE_MAP)
  5642. {
  5643. OMP_CLAUSE_SET_MAP_KIND (clause,
  5644. flags & GOVD_MAP_TO_ONLY
  5645. ? GOMP_MAP_TO
  5646. : GOMP_MAP_TOFROM);
  5647. if (DECL_SIZE (decl)
  5648. && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
  5649. {
  5650. tree decl2 = DECL_VALUE_EXPR (decl);
  5651. gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
  5652. decl2 = TREE_OPERAND (decl2, 0);
  5653. gcc_assert (DECL_P (decl2));
  5654. tree mem = build_simple_mem_ref (decl2);
  5655. OMP_CLAUSE_DECL (clause) = mem;
  5656. OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
  5657. if (gimplify_omp_ctxp->outer_context)
  5658. {
  5659. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
  5660. omp_notice_variable (ctx, decl2, true);
  5661. omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
  5662. }
  5663. tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
  5664. OMP_CLAUSE_MAP);
  5665. OMP_CLAUSE_DECL (nc) = decl;
  5666. OMP_CLAUSE_SIZE (nc) = size_zero_node;
  5667. OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
  5668. OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
  5669. OMP_CLAUSE_CHAIN (clause) = nc;
  5670. }
  5671. else
  5672. OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
  5673. }
  5674. if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
  5675. {
  5676. tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
  5677. OMP_CLAUSE_DECL (nc) = decl;
  5678. OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
  5679. OMP_CLAUSE_CHAIN (nc) = *list_p;
  5680. OMP_CLAUSE_CHAIN (clause) = nc;
  5681. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  5682. gimplify_omp_ctxp = ctx->outer_context;
  5683. lang_hooks.decls.omp_finish_clause (nc, pre_p);
  5684. gimplify_omp_ctxp = ctx;
  5685. }
  5686. *list_p = clause;
  5687. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  5688. gimplify_omp_ctxp = ctx->outer_context;
  5689. lang_hooks.decls.omp_finish_clause (clause, pre_p);
  5690. gimplify_omp_ctxp = ctx;
  5691. return 0;
  5692. }
  5693. static void
  5694. gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
  5695. {
  5696. struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
  5697. tree c, decl;
  5698. while ((c = *list_p) != NULL)
  5699. {
  5700. splay_tree_node n;
  5701. bool remove = false;
  5702. switch (OMP_CLAUSE_CODE (c))
  5703. {
  5704. case OMP_CLAUSE_PRIVATE:
  5705. case OMP_CLAUSE_SHARED:
  5706. case OMP_CLAUSE_FIRSTPRIVATE:
  5707. case OMP_CLAUSE_LINEAR:
  5708. decl = OMP_CLAUSE_DECL (c);
  5709. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5710. remove = !(n->value & GOVD_SEEN);
  5711. if (! remove)
  5712. {
  5713. bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
  5714. if ((n->value & GOVD_DEBUG_PRIVATE)
  5715. || lang_hooks.decls.omp_private_debug_clause (decl, shared))
  5716. {
  5717. gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
  5718. || ((n->value & GOVD_DATA_SHARE_CLASS)
  5719. == GOVD_PRIVATE));
  5720. OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
  5721. OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
  5722. }
  5723. }
  5724. break;
  5725. case OMP_CLAUSE_LASTPRIVATE:
  5726. /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
  5727. accurately reflect the presence of a FIRSTPRIVATE clause. */
  5728. decl = OMP_CLAUSE_DECL (c);
  5729. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5730. OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
  5731. = (n->value & GOVD_FIRSTPRIVATE) != 0;
  5732. if (omp_no_lastprivate (ctx))
  5733. {
  5734. if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
  5735. remove = true;
  5736. else
  5737. OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
  5738. }
  5739. break;
  5740. case OMP_CLAUSE_ALIGNED:
  5741. decl = OMP_CLAUSE_DECL (c);
  5742. if (!is_global_var (decl))
  5743. {
  5744. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5745. remove = n == NULL || !(n->value & GOVD_SEEN);
  5746. if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
  5747. {
  5748. struct gimplify_omp_ctx *octx;
  5749. if (n != NULL
  5750. && (n->value & (GOVD_DATA_SHARE_CLASS
  5751. & ~GOVD_FIRSTPRIVATE)))
  5752. remove = true;
  5753. else
  5754. for (octx = ctx->outer_context; octx;
  5755. octx = octx->outer_context)
  5756. {
  5757. n = splay_tree_lookup (octx->variables,
  5758. (splay_tree_key) decl);
  5759. if (n == NULL)
  5760. continue;
  5761. if (n->value & GOVD_LOCAL)
  5762. break;
  5763. /* We have to avoid assigning a shared variable
  5764. to itself when trying to add
  5765. __builtin_assume_aligned. */
  5766. if (n->value & GOVD_SHARED)
  5767. {
  5768. remove = true;
  5769. break;
  5770. }
  5771. }
  5772. }
  5773. }
  5774. else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
  5775. {
  5776. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5777. if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
  5778. remove = true;
  5779. }
  5780. break;
  5781. case OMP_CLAUSE_MAP:
  5782. decl = OMP_CLAUSE_DECL (c);
  5783. if (!DECL_P (decl))
  5784. break;
  5785. n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
  5786. if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
  5787. remove = true;
  5788. else if (DECL_SIZE (decl)
  5789. && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
  5790. && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER)
  5791. {
  5792. /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
  5793. for these, TREE_CODE (DECL_SIZE (decl)) will always be
  5794. INTEGER_CST. */
  5795. gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
  5796. tree decl2 = DECL_VALUE_EXPR (decl);
  5797. gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
  5798. decl2 = TREE_OPERAND (decl2, 0);
  5799. gcc_assert (DECL_P (decl2));
  5800. tree mem = build_simple_mem_ref (decl2);
  5801. OMP_CLAUSE_DECL (c) = mem;
  5802. OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
  5803. if (ctx->outer_context)
  5804. {
  5805. omp_notice_variable (ctx->outer_context, decl2, true);
  5806. omp_notice_variable (ctx->outer_context,
  5807. OMP_CLAUSE_SIZE (c), true);
  5808. }
  5809. tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
  5810. OMP_CLAUSE_MAP);
  5811. OMP_CLAUSE_DECL (nc) = decl;
  5812. OMP_CLAUSE_SIZE (nc) = size_zero_node;
  5813. OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
  5814. OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
  5815. OMP_CLAUSE_CHAIN (c) = nc;
  5816. c = nc;
  5817. }
  5818. else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
  5819. OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
  5820. break;
  5821. case OMP_CLAUSE_TO:
  5822. case OMP_CLAUSE_FROM:
  5823. case OMP_CLAUSE__CACHE_:
  5824. decl = OMP_CLAUSE_DECL (c);
  5825. if (!DECL_P (decl))
  5826. break;
  5827. if (DECL_SIZE (decl)
  5828. && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
  5829. {
  5830. tree decl2 = DECL_VALUE_EXPR (decl);
  5831. gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
  5832. decl2 = TREE_OPERAND (decl2, 0);
  5833. gcc_assert (DECL_P (decl2));
  5834. tree mem = build_simple_mem_ref (decl2);
  5835. OMP_CLAUSE_DECL (c) = mem;
  5836. OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
  5837. if (ctx->outer_context)
  5838. {
  5839. omp_notice_variable (ctx->outer_context, decl2, true);
  5840. omp_notice_variable (ctx->outer_context,
  5841. OMP_CLAUSE_SIZE (c), true);
  5842. }
  5843. }
  5844. else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
  5845. OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
  5846. break;
  5847. case OMP_CLAUSE_REDUCTION:
  5848. case OMP_CLAUSE_COPYIN:
  5849. case OMP_CLAUSE_COPYPRIVATE:
  5850. case OMP_CLAUSE_IF:
  5851. case OMP_CLAUSE_NUM_THREADS:
  5852. case OMP_CLAUSE_NUM_TEAMS:
  5853. case OMP_CLAUSE_THREAD_LIMIT:
  5854. case OMP_CLAUSE_DIST_SCHEDULE:
  5855. case OMP_CLAUSE_DEVICE:
  5856. case OMP_CLAUSE_SCHEDULE:
  5857. case OMP_CLAUSE_NOWAIT:
  5858. case OMP_CLAUSE_ORDERED:
  5859. case OMP_CLAUSE_DEFAULT:
  5860. case OMP_CLAUSE_UNTIED:
  5861. case OMP_CLAUSE_COLLAPSE:
  5862. case OMP_CLAUSE_FINAL:
  5863. case OMP_CLAUSE_MERGEABLE:
  5864. case OMP_CLAUSE_PROC_BIND:
  5865. case OMP_CLAUSE_SAFELEN:
  5866. case OMP_CLAUSE_DEPEND:
  5867. case OMP_CLAUSE__CILK_FOR_COUNT_:
  5868. case OMP_CLAUSE_ASYNC:
  5869. case OMP_CLAUSE_WAIT:
  5870. case OMP_CLAUSE_DEVICE_RESIDENT:
  5871. case OMP_CLAUSE_USE_DEVICE:
  5872. case OMP_CLAUSE_INDEPENDENT:
  5873. case OMP_CLAUSE_NUM_GANGS:
  5874. case OMP_CLAUSE_NUM_WORKERS:
  5875. case OMP_CLAUSE_VECTOR_LENGTH:
  5876. case OMP_CLAUSE_GANG:
  5877. case OMP_CLAUSE_WORKER:
  5878. case OMP_CLAUSE_VECTOR:
  5879. case OMP_CLAUSE_AUTO:
  5880. case OMP_CLAUSE_SEQ:
  5881. break;
  5882. default:
  5883. gcc_unreachable ();
  5884. }
  5885. if (remove)
  5886. *list_p = OMP_CLAUSE_CHAIN (c);
  5887. else
  5888. list_p = &OMP_CLAUSE_CHAIN (c);
  5889. }
  5890. /* Add in any implicit data sharing. */
  5891. struct gimplify_adjust_omp_clauses_data data;
  5892. data.list_p = list_p;
  5893. data.pre_p = pre_p;
  5894. splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
  5895. gimplify_omp_ctxp = ctx->outer_context;
  5896. delete_omp_context (ctx);
  5897. }
  5898. /* Gimplify OACC_CACHE. */
  5899. static void
  5900. gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
  5901. {
  5902. tree expr = *expr_p;
  5903. gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_WORKSHARE);
  5904. gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr));
  5905. /* TODO: Do something sensible with this information. */
  5906. *expr_p = NULL_TREE;
  5907. }
  5908. /* Gimplify the contents of an OMP_PARALLEL statement. This involves
  5909. gimplification of the body, as well as scanning the body for used
  5910. variables. We need to do this scan now, because variable-sized
  5911. decls will be decomposed during gimplification. */
  5912. static void
  5913. gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
  5914. {
  5915. tree expr = *expr_p;
  5916. gimple g;
  5917. gimple_seq body = NULL;
  5918. gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
  5919. OMP_PARALLEL_COMBINED (expr)
  5920. ? ORT_COMBINED_PARALLEL
  5921. : ORT_PARALLEL);
  5922. push_gimplify_context ();
  5923. g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
  5924. if (gimple_code (g) == GIMPLE_BIND)
  5925. pop_gimplify_context (g);
  5926. else
  5927. pop_gimplify_context (NULL);
  5928. gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
  5929. g = gimple_build_omp_parallel (body,
  5930. OMP_PARALLEL_CLAUSES (expr),
  5931. NULL_TREE, NULL_TREE);
  5932. if (OMP_PARALLEL_COMBINED (expr))
  5933. gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
  5934. gimplify_seq_add_stmt (pre_p, g);
  5935. *expr_p = NULL_TREE;
  5936. }
  5937. /* Gimplify the contents of an OMP_TASK statement. This involves
  5938. gimplification of the body, as well as scanning the body for used
  5939. variables. We need to do this scan now, because variable-sized
  5940. decls will be decomposed during gimplification. */
  5941. static void
  5942. gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
  5943. {
  5944. tree expr = *expr_p;
  5945. gimple g;
  5946. gimple_seq body = NULL;
  5947. gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
  5948. find_omp_clause (OMP_TASK_CLAUSES (expr),
  5949. OMP_CLAUSE_UNTIED)
  5950. ? ORT_UNTIED_TASK : ORT_TASK);
  5951. push_gimplify_context ();
  5952. g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
  5953. if (gimple_code (g) == GIMPLE_BIND)
  5954. pop_gimplify_context (g);
  5955. else
  5956. pop_gimplify_context (NULL);
  5957. gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
  5958. g = gimple_build_omp_task (body,
  5959. OMP_TASK_CLAUSES (expr),
  5960. NULL_TREE, NULL_TREE,
  5961. NULL_TREE, NULL_TREE, NULL_TREE);
  5962. gimplify_seq_add_stmt (pre_p, g);
  5963. *expr_p = NULL_TREE;
  5964. }
  5965. /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
  5966. with non-NULL OMP_FOR_INIT. */
  5967. static tree
  5968. find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
  5969. {
  5970. *walk_subtrees = 0;
  5971. switch (TREE_CODE (*tp))
  5972. {
  5973. case OMP_FOR:
  5974. *walk_subtrees = 1;
  5975. /* FALLTHRU */
  5976. case OMP_SIMD:
  5977. if (OMP_FOR_INIT (*tp) != NULL_TREE)
  5978. return *tp;
  5979. break;
  5980. case BIND_EXPR:
  5981. case STATEMENT_LIST:
  5982. case OMP_PARALLEL:
  5983. *walk_subtrees = 1;
  5984. break;
  5985. default:
  5986. break;
  5987. }
  5988. return NULL_TREE;
  5989. }
  5990. /* Gimplify the gross structure of an OMP_FOR statement. */
  5991. static enum gimplify_status
  5992. gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
  5993. {
  5994. tree for_stmt, orig_for_stmt, decl, var, t;
  5995. enum gimplify_status ret = GS_ALL_DONE;
  5996. enum gimplify_status tret;
  5997. gomp_for *gfor;
  5998. gimple_seq for_body, for_pre_body;
  5999. int i;
  6000. bool simd;
  6001. bitmap has_decl_expr = NULL;
  6002. orig_for_stmt = for_stmt = *expr_p;
  6003. switch (TREE_CODE (for_stmt))
  6004. {
  6005. case OMP_FOR:
  6006. case CILK_FOR:
  6007. case OMP_DISTRIBUTE:
  6008. case OACC_LOOP:
  6009. simd = false;
  6010. break;
  6011. case OMP_SIMD:
  6012. case CILK_SIMD:
  6013. simd = true;
  6014. break;
  6015. default:
  6016. gcc_unreachable ();
  6017. }
  6018. /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
  6019. clause for the IV. */
  6020. if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
  6021. {
  6022. t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
  6023. gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
  6024. decl = TREE_OPERAND (t, 0);
  6025. for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
  6026. if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
  6027. && OMP_CLAUSE_DECL (c) == decl)
  6028. {
  6029. OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
  6030. break;
  6031. }
  6032. }
  6033. gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
  6034. simd ? ORT_SIMD : ORT_WORKSHARE);
  6035. if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
  6036. gimplify_omp_ctxp->distribute = true;
  6037. /* Handle OMP_FOR_INIT. */
  6038. for_pre_body = NULL;
  6039. if (simd && OMP_FOR_PRE_BODY (for_stmt))
  6040. {
  6041. has_decl_expr = BITMAP_ALLOC (NULL);
  6042. if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
  6043. && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
  6044. == VAR_DECL)
  6045. {
  6046. t = OMP_FOR_PRE_BODY (for_stmt);
  6047. bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
  6048. }
  6049. else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
  6050. {
  6051. tree_stmt_iterator si;
  6052. for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
  6053. tsi_next (&si))
  6054. {
  6055. t = tsi_stmt (si);
  6056. if (TREE_CODE (t) == DECL_EXPR
  6057. && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
  6058. bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
  6059. }
  6060. }
  6061. }
  6062. gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
  6063. OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
  6064. if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
  6065. {
  6066. gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
  6067. for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
  6068. NULL, NULL);
  6069. gcc_assert (for_stmt != NULL_TREE);
  6070. gimplify_omp_ctxp->combined_loop = true;
  6071. }
  6072. for_body = NULL;
  6073. gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
  6074. == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
  6075. gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
  6076. == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
  6077. for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
  6078. {
  6079. t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
  6080. gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
  6081. decl = TREE_OPERAND (t, 0);
  6082. gcc_assert (DECL_P (decl));
  6083. gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
  6084. || POINTER_TYPE_P (TREE_TYPE (decl)));
  6085. /* Make sure the iteration variable is private. */
  6086. tree c = NULL_TREE;
  6087. tree c2 = NULL_TREE;
  6088. if (orig_for_stmt != for_stmt)
  6089. /* Do this only on innermost construct for combined ones. */;
  6090. else if (simd)
  6091. {
  6092. splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
  6093. (splay_tree_key)decl);
  6094. omp_is_private (gimplify_omp_ctxp, decl,
  6095. 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
  6096. != 1));
  6097. if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
  6098. omp_notice_variable (gimplify_omp_ctxp, decl, true);
  6099. else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
  6100. {
  6101. c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
  6102. OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
  6103. unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
  6104. if ((has_decl_expr
  6105. && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
  6106. || omp_no_lastprivate (gimplify_omp_ctxp))
  6107. {
  6108. OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
  6109. flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
  6110. }
  6111. OMP_CLAUSE_DECL (c) = decl;
  6112. OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
  6113. OMP_FOR_CLAUSES (for_stmt) = c;
  6114. omp_add_variable (gimplify_omp_ctxp, decl, flags);
  6115. struct gimplify_omp_ctx *outer
  6116. = gimplify_omp_ctxp->outer_context;
  6117. if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
  6118. {
  6119. if (outer->region_type == ORT_WORKSHARE
  6120. && outer->combined_loop)
  6121. {
  6122. if (outer->outer_context
  6123. && (outer->outer_context->region_type
  6124. == ORT_COMBINED_PARALLEL))
  6125. outer = outer->outer_context;
  6126. else if (omp_check_private (outer, decl, false))
  6127. outer = NULL;
  6128. }
  6129. else if (outer->region_type != ORT_COMBINED_PARALLEL)
  6130. outer = NULL;
  6131. if (outer)
  6132. {
  6133. omp_add_variable (outer, decl,
  6134. GOVD_LASTPRIVATE | GOVD_SEEN);
  6135. if (outer->outer_context)
  6136. omp_notice_variable (outer->outer_context, decl, true);
  6137. }
  6138. }
  6139. }
  6140. else
  6141. {
  6142. bool lastprivate
  6143. = (!has_decl_expr
  6144. || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
  6145. && !omp_no_lastprivate (gimplify_omp_ctxp);
  6146. struct gimplify_omp_ctx *outer
  6147. = gimplify_omp_ctxp->outer_context;
  6148. if (outer && lastprivate)
  6149. {
  6150. if (outer->region_type == ORT_WORKSHARE
  6151. && outer->combined_loop)
  6152. {
  6153. if (outer->outer_context
  6154. && (outer->outer_context->region_type
  6155. == ORT_COMBINED_PARALLEL))
  6156. outer = outer->outer_context;
  6157. else if (omp_check_private (outer, decl, false))
  6158. outer = NULL;
  6159. }
  6160. else if (outer->region_type != ORT_COMBINED_PARALLEL)
  6161. outer = NULL;
  6162. if (outer)
  6163. {
  6164. omp_add_variable (outer, decl,
  6165. GOVD_LASTPRIVATE | GOVD_SEEN);
  6166. if (outer->outer_context)
  6167. omp_notice_variable (outer->outer_context, decl, true);
  6168. }
  6169. }
  6170. c = build_omp_clause (input_location,
  6171. lastprivate ? OMP_CLAUSE_LASTPRIVATE
  6172. : OMP_CLAUSE_PRIVATE);
  6173. OMP_CLAUSE_DECL (c) = decl;
  6174. OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
  6175. OMP_FOR_CLAUSES (for_stmt) = c;
  6176. omp_add_variable (gimplify_omp_ctxp, decl,
  6177. (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
  6178. | GOVD_EXPLICIT | GOVD_SEEN);
  6179. c = NULL_TREE;
  6180. }
  6181. }
  6182. else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
  6183. omp_notice_variable (gimplify_omp_ctxp, decl, true);
  6184. else
  6185. omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
  6186. /* If DECL is not a gimple register, create a temporary variable to act
  6187. as an iteration counter. This is valid, since DECL cannot be
  6188. modified in the body of the loop. Similarly for any iteration vars
  6189. in simd with collapse > 1 where the iterator vars must be
  6190. lastprivate. */
  6191. if (orig_for_stmt != for_stmt)
  6192. var = decl;
  6193. else if (!is_gimple_reg (decl)
  6194. || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
  6195. {
  6196. var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
  6197. TREE_OPERAND (t, 0) = var;
  6198. gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
  6199. if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
  6200. {
  6201. c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
  6202. OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
  6203. OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
  6204. OMP_CLAUSE_DECL (c2) = var;
  6205. OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
  6206. OMP_FOR_CLAUSES (for_stmt) = c2;
  6207. omp_add_variable (gimplify_omp_ctxp, var,
  6208. GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
  6209. if (c == NULL_TREE)
  6210. {
  6211. c = c2;
  6212. c2 = NULL_TREE;
  6213. }
  6214. }
  6215. else
  6216. omp_add_variable (gimplify_omp_ctxp, var,
  6217. GOVD_PRIVATE | GOVD_SEEN);
  6218. }
  6219. else
  6220. var = decl;
  6221. tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
  6222. is_gimple_val, fb_rvalue);
  6223. ret = MIN (ret, tret);
  6224. if (ret == GS_ERROR)
  6225. return ret;
  6226. /* Handle OMP_FOR_COND. */
  6227. t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
  6228. gcc_assert (COMPARISON_CLASS_P (t));
  6229. gcc_assert (TREE_OPERAND (t, 0) == decl);
  6230. tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
  6231. is_gimple_val, fb_rvalue);
  6232. ret = MIN (ret, tret);
  6233. /* Handle OMP_FOR_INCR. */
  6234. t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
  6235. switch (TREE_CODE (t))
  6236. {
  6237. case PREINCREMENT_EXPR:
  6238. case POSTINCREMENT_EXPR:
  6239. {
  6240. tree decl = TREE_OPERAND (t, 0);
  6241. /* c_omp_for_incr_canonicalize_ptr() should have been
  6242. called to massage things appropriately. */
  6243. gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
  6244. if (orig_for_stmt != for_stmt)
  6245. break;
  6246. t = build_int_cst (TREE_TYPE (decl), 1);
  6247. if (c)
  6248. OMP_CLAUSE_LINEAR_STEP (c) = t;
  6249. t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
  6250. t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
  6251. TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
  6252. break;
  6253. }
  6254. case PREDECREMENT_EXPR:
  6255. case POSTDECREMENT_EXPR:
  6256. /* c_omp_for_incr_canonicalize_ptr() should have been
  6257. called to massage things appropriately. */
  6258. gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
  6259. if (orig_for_stmt != for_stmt)
  6260. break;
  6261. t = build_int_cst (TREE_TYPE (decl), -1);
  6262. if (c)
  6263. OMP_CLAUSE_LINEAR_STEP (c) = t;
  6264. t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
  6265. t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
  6266. TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
  6267. break;
  6268. case MODIFY_EXPR:
  6269. gcc_assert (TREE_OPERAND (t, 0) == decl);
  6270. TREE_OPERAND (t, 0) = var;
  6271. t = TREE_OPERAND (t, 1);
  6272. switch (TREE_CODE (t))
  6273. {
  6274. case PLUS_EXPR:
  6275. if (TREE_OPERAND (t, 1) == decl)
  6276. {
  6277. TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
  6278. TREE_OPERAND (t, 0) = var;
  6279. break;
  6280. }
  6281. /* Fallthru. */
  6282. case MINUS_EXPR:
  6283. case POINTER_PLUS_EXPR:
  6284. gcc_assert (TREE_OPERAND (t, 0) == decl);
  6285. TREE_OPERAND (t, 0) = var;
  6286. break;
  6287. default:
  6288. gcc_unreachable ();
  6289. }
  6290. tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
  6291. is_gimple_val, fb_rvalue);
  6292. ret = MIN (ret, tret);
  6293. if (c)
  6294. {
  6295. tree step = TREE_OPERAND (t, 1);
  6296. tree stept = TREE_TYPE (decl);
  6297. if (POINTER_TYPE_P (stept))
  6298. stept = sizetype;
  6299. step = fold_convert (stept, step);
  6300. if (TREE_CODE (t) == MINUS_EXPR)
  6301. step = fold_build1 (NEGATE_EXPR, stept, step);
  6302. OMP_CLAUSE_LINEAR_STEP (c) = step;
  6303. if (step != TREE_OPERAND (t, 1))
  6304. {
  6305. tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
  6306. &for_pre_body, NULL,
  6307. is_gimple_val, fb_rvalue);
  6308. ret = MIN (ret, tret);
  6309. }
  6310. }
  6311. break;
  6312. default:
  6313. gcc_unreachable ();
  6314. }
  6315. if (c2)
  6316. {
  6317. gcc_assert (c);
  6318. OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
  6319. }
  6320. if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
  6321. && orig_for_stmt == for_stmt)
  6322. {
  6323. for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
  6324. if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
  6325. && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
  6326. || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
  6327. && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
  6328. && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
  6329. && OMP_CLAUSE_DECL (c) == decl)
  6330. {
  6331. t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
  6332. gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
  6333. gcc_assert (TREE_OPERAND (t, 0) == var);
  6334. t = TREE_OPERAND (t, 1);
  6335. gcc_assert (TREE_CODE (t) == PLUS_EXPR
  6336. || TREE_CODE (t) == MINUS_EXPR
  6337. || TREE_CODE (t) == POINTER_PLUS_EXPR);
  6338. gcc_assert (TREE_OPERAND (t, 0) == var);
  6339. t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
  6340. TREE_OPERAND (t, 1));
  6341. gimple_seq *seq;
  6342. if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
  6343. seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
  6344. else
  6345. seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
  6346. gimplify_assign (decl, t, seq);
  6347. }
  6348. }
  6349. }
  6350. BITMAP_FREE (has_decl_expr);
  6351. gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
  6352. if (orig_for_stmt != for_stmt)
  6353. for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
  6354. {
  6355. t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
  6356. decl = TREE_OPERAND (t, 0);
  6357. var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
  6358. omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
  6359. TREE_OPERAND (t, 0) = var;
  6360. t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
  6361. TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
  6362. TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
  6363. }
  6364. gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
  6365. int kind;
  6366. switch (TREE_CODE (orig_for_stmt))
  6367. {
  6368. case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
  6369. case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
  6370. case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
  6371. case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
  6372. case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
  6373. case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
  6374. default:
  6375. gcc_unreachable ();
  6376. }
  6377. gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
  6378. TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
  6379. for_pre_body);
  6380. if (orig_for_stmt != for_stmt)
  6381. gimple_omp_for_set_combined_p (gfor, true);
  6382. if (gimplify_omp_ctxp
  6383. && (gimplify_omp_ctxp->combined_loop
  6384. || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
  6385. && gimplify_omp_ctxp->outer_context
  6386. && gimplify_omp_ctxp->outer_context->combined_loop)))
  6387. {
  6388. gimple_omp_for_set_combined_into_p (gfor, true);
  6389. if (gimplify_omp_ctxp->combined_loop)
  6390. gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
  6391. else
  6392. gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
  6393. }
  6394. for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
  6395. {
  6396. t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
  6397. gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
  6398. gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
  6399. t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
  6400. gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
  6401. gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
  6402. t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
  6403. gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
  6404. }
  6405. gimplify_seq_add_stmt (pre_p, gfor);
  6406. if (ret != GS_ALL_DONE)
  6407. return GS_ERROR;
  6408. *expr_p = NULL_TREE;
  6409. return GS_ALL_DONE;
  6410. }
  6411. /* Gimplify the gross structure of several OMP constructs. */
  6412. static void
  6413. gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
  6414. {
  6415. tree expr = *expr_p;
  6416. gimple stmt;
  6417. gimple_seq body = NULL;
  6418. enum omp_region_type ort;
  6419. switch (TREE_CODE (expr))
  6420. {
  6421. case OMP_SECTIONS:
  6422. case OMP_SINGLE:
  6423. ort = ORT_WORKSHARE;
  6424. break;
  6425. case OACC_KERNELS:
  6426. case OACC_PARALLEL:
  6427. case OMP_TARGET:
  6428. ort = ORT_TARGET;
  6429. break;
  6430. case OACC_DATA:
  6431. case OMP_TARGET_DATA:
  6432. ort = ORT_TARGET_DATA;
  6433. break;
  6434. case OMP_TEAMS:
  6435. ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
  6436. break;
  6437. default:
  6438. gcc_unreachable ();
  6439. }
  6440. gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
  6441. if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
  6442. {
  6443. push_gimplify_context ();
  6444. gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
  6445. if (gimple_code (g) == GIMPLE_BIND)
  6446. pop_gimplify_context (g);
  6447. else
  6448. pop_gimplify_context (NULL);
  6449. if (ort == ORT_TARGET_DATA)
  6450. {
  6451. enum built_in_function end_ix;
  6452. switch (TREE_CODE (expr))
  6453. {
  6454. case OACC_DATA:
  6455. end_ix = BUILT_IN_GOACC_DATA_END;
  6456. break;
  6457. case OMP_TARGET_DATA:
  6458. end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
  6459. break;
  6460. default:
  6461. gcc_unreachable ();
  6462. }
  6463. tree fn = builtin_decl_explicit (end_ix);
  6464. g = gimple_build_call (fn, 0);
  6465. gimple_seq cleanup = NULL;
  6466. gimple_seq_add_stmt (&cleanup, g);
  6467. g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
  6468. body = NULL;
  6469. gimple_seq_add_stmt (&body, g);
  6470. }
  6471. }
  6472. else
  6473. gimplify_and_add (OMP_BODY (expr), &body);
  6474. gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
  6475. switch (TREE_CODE (expr))
  6476. {
  6477. case OACC_DATA:
  6478. stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
  6479. OMP_CLAUSES (expr));
  6480. break;
  6481. case OACC_KERNELS:
  6482. stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
  6483. OMP_CLAUSES (expr));
  6484. break;
  6485. case OACC_PARALLEL:
  6486. stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
  6487. OMP_CLAUSES (expr));
  6488. break;
  6489. case OMP_SECTIONS:
  6490. stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
  6491. break;
  6492. case OMP_SINGLE:
  6493. stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
  6494. break;
  6495. case OMP_TARGET:
  6496. stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
  6497. OMP_CLAUSES (expr));
  6498. break;
  6499. case OMP_TARGET_DATA:
  6500. stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
  6501. OMP_CLAUSES (expr));
  6502. break;
  6503. case OMP_TEAMS:
  6504. stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
  6505. break;
  6506. default:
  6507. gcc_unreachable ();
  6508. }
  6509. gimplify_seq_add_stmt (pre_p, stmt);
  6510. *expr_p = NULL_TREE;
  6511. }
  6512. /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
  6513. target update constructs. */
  6514. static void
  6515. gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
  6516. {
  6517. tree expr = *expr_p, clauses;
  6518. int kind;
  6519. gomp_target *stmt;
  6520. switch (TREE_CODE (expr))
  6521. {
  6522. case OACC_ENTER_DATA:
  6523. clauses = OACC_ENTER_DATA_CLAUSES (expr);
  6524. kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
  6525. break;
  6526. case OACC_EXIT_DATA:
  6527. clauses = OACC_EXIT_DATA_CLAUSES (expr);
  6528. kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
  6529. break;
  6530. case OACC_UPDATE:
  6531. clauses = OACC_UPDATE_CLAUSES (expr);
  6532. kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
  6533. break;
  6534. case OMP_TARGET_UPDATE:
  6535. clauses = OMP_TARGET_UPDATE_CLAUSES (expr);
  6536. kind = GF_OMP_TARGET_KIND_UPDATE;
  6537. break;
  6538. default:
  6539. gcc_unreachable ();
  6540. }
  6541. gimplify_scan_omp_clauses (&clauses, pre_p, ORT_WORKSHARE);
  6542. gimplify_adjust_omp_clauses (pre_p, &clauses);
  6543. stmt = gimple_build_omp_target (NULL, kind, clauses);
  6544. gimplify_seq_add_stmt (pre_p, stmt);
  6545. *expr_p = NULL_TREE;
  6546. }
  6547. /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
  6548. stabilized the lhs of the atomic operation as *ADDR. Return true if
  6549. EXPR is this stabilized form. */
  6550. static bool
  6551. goa_lhs_expr_p (tree expr, tree addr)
  6552. {
  6553. /* Also include casts to other type variants. The C front end is fond
  6554. of adding these for e.g. volatile variables. This is like
  6555. STRIP_TYPE_NOPS but includes the main variant lookup. */
  6556. STRIP_USELESS_TYPE_CONVERSION (expr);
  6557. if (TREE_CODE (expr) == INDIRECT_REF)
  6558. {
  6559. expr = TREE_OPERAND (expr, 0);
  6560. while (expr != addr
  6561. && (CONVERT_EXPR_P (expr)
  6562. || TREE_CODE (expr) == NON_LVALUE_EXPR)
  6563. && TREE_CODE (expr) == TREE_CODE (addr)
  6564. && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
  6565. {
  6566. expr = TREE_OPERAND (expr, 0);
  6567. addr = TREE_OPERAND (addr, 0);
  6568. }
  6569. if (expr == addr)
  6570. return true;
  6571. return (TREE_CODE (addr) == ADDR_EXPR
  6572. && TREE_CODE (expr) == ADDR_EXPR
  6573. && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
  6574. }
  6575. if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
  6576. return true;
  6577. return false;
  6578. }
  6579. /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
  6580. expression does not involve the lhs, evaluate it into a temporary.
  6581. Return 1 if the lhs appeared as a subexpression, 0 if it did not,
  6582. or -1 if an error was encountered. */
  6583. static int
  6584. goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
  6585. tree lhs_var)
  6586. {
  6587. tree expr = *expr_p;
  6588. int saw_lhs;
  6589. if (goa_lhs_expr_p (expr, lhs_addr))
  6590. {
  6591. *expr_p = lhs_var;
  6592. return 1;
  6593. }
  6594. if (is_gimple_val (expr))
  6595. return 0;
  6596. saw_lhs = 0;
  6597. switch (TREE_CODE_CLASS (TREE_CODE (expr)))
  6598. {
  6599. case tcc_binary:
  6600. case tcc_comparison:
  6601. saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
  6602. lhs_var);
  6603. case tcc_unary:
  6604. saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
  6605. lhs_var);
  6606. break;
  6607. case tcc_expression:
  6608. switch (TREE_CODE (expr))
  6609. {
  6610. case TRUTH_ANDIF_EXPR:
  6611. case TRUTH_ORIF_EXPR:
  6612. case TRUTH_AND_EXPR:
  6613. case TRUTH_OR_EXPR:
  6614. case TRUTH_XOR_EXPR:
  6615. saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
  6616. lhs_addr, lhs_var);
  6617. case TRUTH_NOT_EXPR:
  6618. saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
  6619. lhs_addr, lhs_var);
  6620. break;
  6621. case COMPOUND_EXPR:
  6622. /* Break out any preevaluations from cp_build_modify_expr. */
  6623. for (; TREE_CODE (expr) == COMPOUND_EXPR;
  6624. expr = TREE_OPERAND (expr, 1))
  6625. gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
  6626. *expr_p = expr;
  6627. return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
  6628. default:
  6629. break;
  6630. }
  6631. break;
  6632. default:
  6633. break;
  6634. }
  6635. if (saw_lhs == 0)
  6636. {
  6637. enum gimplify_status gs;
  6638. gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
  6639. if (gs != GS_ALL_DONE)
  6640. saw_lhs = -1;
  6641. }
  6642. return saw_lhs;
  6643. }
  6644. /* Gimplify an OMP_ATOMIC statement. */
  6645. static enum gimplify_status
  6646. gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
  6647. {
  6648. tree addr = TREE_OPERAND (*expr_p, 0);
  6649. tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
  6650. ? NULL : TREE_OPERAND (*expr_p, 1);
  6651. tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
  6652. tree tmp_load;
  6653. gomp_atomic_load *loadstmt;
  6654. gomp_atomic_store *storestmt;
  6655. tmp_load = create_tmp_reg (type);
  6656. if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
  6657. return GS_ERROR;
  6658. if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
  6659. != GS_ALL_DONE)
  6660. return GS_ERROR;
  6661. loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
  6662. gimplify_seq_add_stmt (pre_p, loadstmt);
  6663. if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
  6664. != GS_ALL_DONE)
  6665. return GS_ERROR;
  6666. if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
  6667. rhs = tmp_load;
  6668. storestmt = gimple_build_omp_atomic_store (rhs);
  6669. gimplify_seq_add_stmt (pre_p, storestmt);
  6670. if (OMP_ATOMIC_SEQ_CST (*expr_p))
  6671. {
  6672. gimple_omp_atomic_set_seq_cst (loadstmt);
  6673. gimple_omp_atomic_set_seq_cst (storestmt);
  6674. }
  6675. switch (TREE_CODE (*expr_p))
  6676. {
  6677. case OMP_ATOMIC_READ:
  6678. case OMP_ATOMIC_CAPTURE_OLD:
  6679. *expr_p = tmp_load;
  6680. gimple_omp_atomic_set_need_value (loadstmt);
  6681. break;
  6682. case OMP_ATOMIC_CAPTURE_NEW:
  6683. *expr_p = rhs;
  6684. gimple_omp_atomic_set_need_value (storestmt);
  6685. break;
  6686. default:
  6687. *expr_p = NULL;
  6688. break;
  6689. }
  6690. return GS_ALL_DONE;
  6691. }
  6692. /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
  6693. body, and adding some EH bits. */
  6694. static enum gimplify_status
  6695. gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
  6696. {
  6697. tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
  6698. gimple body_stmt;
  6699. gtransaction *trans_stmt;
  6700. gimple_seq body = NULL;
  6701. int subcode = 0;
  6702. /* Wrap the transaction body in a BIND_EXPR so we have a context
  6703. where to put decls for OMP. */
  6704. if (TREE_CODE (tbody) != BIND_EXPR)
  6705. {
  6706. tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
  6707. TREE_SIDE_EFFECTS (bind) = 1;
  6708. SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
  6709. TRANSACTION_EXPR_BODY (expr) = bind;
  6710. }
  6711. push_gimplify_context ();
  6712. temp = voidify_wrapper_expr (*expr_p, NULL);
  6713. body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
  6714. pop_gimplify_context (body_stmt);
  6715. trans_stmt = gimple_build_transaction (body, NULL);
  6716. if (TRANSACTION_EXPR_OUTER (expr))
  6717. subcode = GTMA_IS_OUTER;
  6718. else if (TRANSACTION_EXPR_RELAXED (expr))
  6719. subcode = GTMA_IS_RELAXED;
  6720. gimple_transaction_set_subcode (trans_stmt, subcode);
  6721. gimplify_seq_add_stmt (pre_p, trans_stmt);
  6722. if (temp)
  6723. {
  6724. *expr_p = temp;
  6725. return GS_OK;
  6726. }
  6727. *expr_p = NULL_TREE;
  6728. return GS_ALL_DONE;
  6729. }
  6730. /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
  6731. expression produces a value to be used as an operand inside a GIMPLE
  6732. statement, the value will be stored back in *EXPR_P. This value will
  6733. be a tree of class tcc_declaration, tcc_constant, tcc_reference or
  6734. an SSA_NAME. The corresponding sequence of GIMPLE statements is
  6735. emitted in PRE_P and POST_P.
  6736. Additionally, this process may overwrite parts of the input
  6737. expression during gimplification. Ideally, it should be
  6738. possible to do non-destructive gimplification.
  6739. EXPR_P points to the GENERIC expression to convert to GIMPLE. If
  6740. the expression needs to evaluate to a value to be used as
  6741. an operand in a GIMPLE statement, this value will be stored in
  6742. *EXPR_P on exit. This happens when the caller specifies one
  6743. of fb_lvalue or fb_rvalue fallback flags.
  6744. PRE_P will contain the sequence of GIMPLE statements corresponding
  6745. to the evaluation of EXPR and all the side-effects that must
  6746. be executed before the main expression. On exit, the last
  6747. statement of PRE_P is the core statement being gimplified. For
  6748. instance, when gimplifying 'if (++a)' the last statement in
  6749. PRE_P will be 'if (t.1)' where t.1 is the result of
  6750. pre-incrementing 'a'.
  6751. POST_P will contain the sequence of GIMPLE statements corresponding
  6752. to the evaluation of all the side-effects that must be executed
  6753. after the main expression. If this is NULL, the post
  6754. side-effects are stored at the end of PRE_P.
  6755. The reason why the output is split in two is to handle post
  6756. side-effects explicitly. In some cases, an expression may have
  6757. inner and outer post side-effects which need to be emitted in
  6758. an order different from the one given by the recursive
  6759. traversal. For instance, for the expression (*p--)++ the post
  6760. side-effects of '--' must actually occur *after* the post
  6761. side-effects of '++'. However, gimplification will first visit
  6762. the inner expression, so if a separate POST sequence was not
  6763. used, the resulting sequence would be:
  6764. 1 t.1 = *p
  6765. 2 p = p - 1
  6766. 3 t.2 = t.1 + 1
  6767. 4 *p = t.2
  6768. However, the post-decrement operation in line #2 must not be
  6769. evaluated until after the store to *p at line #4, so the
  6770. correct sequence should be:
  6771. 1 t.1 = *p
  6772. 2 t.2 = t.1 + 1
  6773. 3 *p = t.2
  6774. 4 p = p - 1
  6775. So, by specifying a separate post queue, it is possible
  6776. to emit the post side-effects in the correct order.
  6777. If POST_P is NULL, an internal queue will be used. Before
  6778. returning to the caller, the sequence POST_P is appended to
  6779. the main output sequence PRE_P.
  6780. GIMPLE_TEST_F points to a function that takes a tree T and
  6781. returns nonzero if T is in the GIMPLE form requested by the
  6782. caller. The GIMPLE predicates are in gimple.c.
  6783. FALLBACK tells the function what sort of a temporary we want if
  6784. gimplification cannot produce an expression that complies with
  6785. GIMPLE_TEST_F.
  6786. fb_none means that no temporary should be generated
  6787. fb_rvalue means that an rvalue is OK to generate
  6788. fb_lvalue means that an lvalue is OK to generate
  6789. fb_either means that either is OK, but an lvalue is preferable.
  6790. fb_mayfail means that gimplification may fail (in which case
  6791. GS_ERROR will be returned)
  6792. The return value is either GS_ERROR or GS_ALL_DONE, since this
  6793. function iterates until EXPR is completely gimplified or an error
  6794. occurs. */
  6795. enum gimplify_status
  6796. gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
  6797. bool (*gimple_test_f) (tree), fallback_t fallback)
  6798. {
  6799. tree tmp;
  6800. gimple_seq internal_pre = NULL;
  6801. gimple_seq internal_post = NULL;
  6802. tree save_expr;
  6803. bool is_statement;
  6804. location_t saved_location;
  6805. enum gimplify_status ret;
  6806. gimple_stmt_iterator pre_last_gsi, post_last_gsi;
  6807. save_expr = *expr_p;
  6808. if (save_expr == NULL_TREE)
  6809. return GS_ALL_DONE;
  6810. /* If we are gimplifying a top-level statement, PRE_P must be valid. */
  6811. is_statement = gimple_test_f == is_gimple_stmt;
  6812. if (is_statement)
  6813. gcc_assert (pre_p);
  6814. /* Consistency checks. */
  6815. if (gimple_test_f == is_gimple_reg)
  6816. gcc_assert (fallback & (fb_rvalue | fb_lvalue));
  6817. else if (gimple_test_f == is_gimple_val
  6818. || gimple_test_f == is_gimple_call_addr
  6819. || gimple_test_f == is_gimple_condexpr
  6820. || gimple_test_f == is_gimple_mem_rhs
  6821. || gimple_test_f == is_gimple_mem_rhs_or_call
  6822. || gimple_test_f == is_gimple_reg_rhs
  6823. || gimple_test_f == is_gimple_reg_rhs_or_call
  6824. || gimple_test_f == is_gimple_asm_val
  6825. || gimple_test_f == is_gimple_mem_ref_addr)
  6826. gcc_assert (fallback & fb_rvalue);
  6827. else if (gimple_test_f == is_gimple_min_lval
  6828. || gimple_test_f == is_gimple_lvalue)
  6829. gcc_assert (fallback & fb_lvalue);
  6830. else if (gimple_test_f == is_gimple_addressable)
  6831. gcc_assert (fallback & fb_either);
  6832. else if (gimple_test_f == is_gimple_stmt)
  6833. gcc_assert (fallback == fb_none);
  6834. else
  6835. {
  6836. /* We should have recognized the GIMPLE_TEST_F predicate to
  6837. know what kind of fallback to use in case a temporary is
  6838. needed to hold the value or address of *EXPR_P. */
  6839. gcc_unreachable ();
  6840. }
  6841. /* We used to check the predicate here and return immediately if it
  6842. succeeds. This is wrong; the design is for gimplification to be
  6843. idempotent, and for the predicates to only test for valid forms, not
  6844. whether they are fully simplified. */
  6845. if (pre_p == NULL)
  6846. pre_p = &internal_pre;
  6847. if (post_p == NULL)
  6848. post_p = &internal_post;
  6849. /* Remember the last statements added to PRE_P and POST_P. Every
  6850. new statement added by the gimplification helpers needs to be
  6851. annotated with location information. To centralize the
  6852. responsibility, we remember the last statement that had been
  6853. added to both queues before gimplifying *EXPR_P. If
  6854. gimplification produces new statements in PRE_P and POST_P, those
  6855. statements will be annotated with the same location information
  6856. as *EXPR_P. */
  6857. pre_last_gsi = gsi_last (*pre_p);
  6858. post_last_gsi = gsi_last (*post_p);
  6859. saved_location = input_location;
  6860. if (save_expr != error_mark_node
  6861. && EXPR_HAS_LOCATION (*expr_p))
  6862. input_location = EXPR_LOCATION (*expr_p);
  6863. /* Loop over the specific gimplifiers until the toplevel node
  6864. remains the same. */
  6865. do
  6866. {
  6867. /* Strip away as many useless type conversions as possible
  6868. at the toplevel. */
  6869. STRIP_USELESS_TYPE_CONVERSION (*expr_p);
  6870. /* Remember the expr. */
  6871. save_expr = *expr_p;
  6872. /* Die, die, die, my darling. */
  6873. if (save_expr == error_mark_node
  6874. || (TREE_TYPE (save_expr)
  6875. && TREE_TYPE (save_expr) == error_mark_node))
  6876. {
  6877. ret = GS_ERROR;
  6878. break;
  6879. }
  6880. /* Do any language-specific gimplification. */
  6881. ret = ((enum gimplify_status)
  6882. lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
  6883. if (ret == GS_OK)
  6884. {
  6885. if (*expr_p == NULL_TREE)
  6886. break;
  6887. if (*expr_p != save_expr)
  6888. continue;
  6889. }
  6890. else if (ret != GS_UNHANDLED)
  6891. break;
  6892. /* Make sure that all the cases set 'ret' appropriately. */
  6893. ret = GS_UNHANDLED;
  6894. switch (TREE_CODE (*expr_p))
  6895. {
  6896. /* First deal with the special cases. */
  6897. case POSTINCREMENT_EXPR:
  6898. case POSTDECREMENT_EXPR:
  6899. case PREINCREMENT_EXPR:
  6900. case PREDECREMENT_EXPR:
  6901. ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
  6902. fallback != fb_none,
  6903. TREE_TYPE (*expr_p));
  6904. break;
  6905. case VIEW_CONVERT_EXPR:
  6906. if (is_gimple_reg_type (TREE_TYPE (*expr_p))
  6907. && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
  6908. {
  6909. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  6910. post_p, is_gimple_val, fb_rvalue);
  6911. recalculate_side_effects (*expr_p);
  6912. break;
  6913. }
  6914. /* Fallthru. */
  6915. case ARRAY_REF:
  6916. case ARRAY_RANGE_REF:
  6917. case REALPART_EXPR:
  6918. case IMAGPART_EXPR:
  6919. case COMPONENT_REF:
  6920. ret = gimplify_compound_lval (expr_p, pre_p, post_p,
  6921. fallback ? fallback : fb_rvalue);
  6922. break;
  6923. case COND_EXPR:
  6924. ret = gimplify_cond_expr (expr_p, pre_p, fallback);
  6925. /* C99 code may assign to an array in a structure value of a
  6926. conditional expression, and this has undefined behavior
  6927. only on execution, so create a temporary if an lvalue is
  6928. required. */
  6929. if (fallback == fb_lvalue)
  6930. {
  6931. *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
  6932. mark_addressable (*expr_p);
  6933. ret = GS_OK;
  6934. }
  6935. break;
  6936. case CALL_EXPR:
  6937. ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
  6938. /* C99 code may assign to an array in a structure returned
  6939. from a function, and this has undefined behavior only on
  6940. execution, so create a temporary if an lvalue is
  6941. required. */
  6942. if (fallback == fb_lvalue)
  6943. {
  6944. *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
  6945. mark_addressable (*expr_p);
  6946. ret = GS_OK;
  6947. }
  6948. break;
  6949. case TREE_LIST:
  6950. gcc_unreachable ();
  6951. case COMPOUND_EXPR:
  6952. ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
  6953. break;
  6954. case COMPOUND_LITERAL_EXPR:
  6955. ret = gimplify_compound_literal_expr (expr_p, pre_p,
  6956. gimple_test_f, fallback);
  6957. break;
  6958. case MODIFY_EXPR:
  6959. case INIT_EXPR:
  6960. ret = gimplify_modify_expr (expr_p, pre_p, post_p,
  6961. fallback != fb_none);
  6962. break;
  6963. case TRUTH_ANDIF_EXPR:
  6964. case TRUTH_ORIF_EXPR:
  6965. {
  6966. /* Preserve the original type of the expression and the
  6967. source location of the outer expression. */
  6968. tree org_type = TREE_TYPE (*expr_p);
  6969. *expr_p = gimple_boolify (*expr_p);
  6970. *expr_p = build3_loc (input_location, COND_EXPR,
  6971. org_type, *expr_p,
  6972. fold_convert_loc
  6973. (input_location,
  6974. org_type, boolean_true_node),
  6975. fold_convert_loc
  6976. (input_location,
  6977. org_type, boolean_false_node));
  6978. ret = GS_OK;
  6979. break;
  6980. }
  6981. case TRUTH_NOT_EXPR:
  6982. {
  6983. tree type = TREE_TYPE (*expr_p);
  6984. /* The parsers are careful to generate TRUTH_NOT_EXPR
  6985. only with operands that are always zero or one.
  6986. We do not fold here but handle the only interesting case
  6987. manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
  6988. *expr_p = gimple_boolify (*expr_p);
  6989. if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
  6990. *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
  6991. TREE_TYPE (*expr_p),
  6992. TREE_OPERAND (*expr_p, 0));
  6993. else
  6994. *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
  6995. TREE_TYPE (*expr_p),
  6996. TREE_OPERAND (*expr_p, 0),
  6997. build_int_cst (TREE_TYPE (*expr_p), 1));
  6998. if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
  6999. *expr_p = fold_convert_loc (input_location, type, *expr_p);
  7000. ret = GS_OK;
  7001. break;
  7002. }
  7003. case ADDR_EXPR:
  7004. ret = gimplify_addr_expr (expr_p, pre_p, post_p);
  7005. break;
  7006. case ANNOTATE_EXPR:
  7007. {
  7008. tree cond = TREE_OPERAND (*expr_p, 0);
  7009. tree kind = TREE_OPERAND (*expr_p, 1);
  7010. tree type = TREE_TYPE (cond);
  7011. if (!INTEGRAL_TYPE_P (type))
  7012. {
  7013. *expr_p = cond;
  7014. ret = GS_OK;
  7015. break;
  7016. }
  7017. tree tmp = create_tmp_var (type);
  7018. gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
  7019. gcall *call
  7020. = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
  7021. gimple_call_set_lhs (call, tmp);
  7022. gimplify_seq_add_stmt (pre_p, call);
  7023. *expr_p = tmp;
  7024. ret = GS_ALL_DONE;
  7025. break;
  7026. }
  7027. case VA_ARG_EXPR:
  7028. ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
  7029. break;
  7030. CASE_CONVERT:
  7031. if (IS_EMPTY_STMT (*expr_p))
  7032. {
  7033. ret = GS_ALL_DONE;
  7034. break;
  7035. }
  7036. if (VOID_TYPE_P (TREE_TYPE (*expr_p))
  7037. || fallback == fb_none)
  7038. {
  7039. /* Just strip a conversion to void (or in void context) and
  7040. try again. */
  7041. *expr_p = TREE_OPERAND (*expr_p, 0);
  7042. ret = GS_OK;
  7043. break;
  7044. }
  7045. ret = gimplify_conversion (expr_p);
  7046. if (ret == GS_ERROR)
  7047. break;
  7048. if (*expr_p != save_expr)
  7049. break;
  7050. /* FALLTHRU */
  7051. case FIX_TRUNC_EXPR:
  7052. /* unary_expr: ... | '(' cast ')' val | ... */
  7053. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  7054. is_gimple_val, fb_rvalue);
  7055. recalculate_side_effects (*expr_p);
  7056. break;
  7057. case INDIRECT_REF:
  7058. {
  7059. bool volatilep = TREE_THIS_VOLATILE (*expr_p);
  7060. bool notrap = TREE_THIS_NOTRAP (*expr_p);
  7061. tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
  7062. *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
  7063. if (*expr_p != save_expr)
  7064. {
  7065. ret = GS_OK;
  7066. break;
  7067. }
  7068. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  7069. is_gimple_reg, fb_rvalue);
  7070. if (ret == GS_ERROR)
  7071. break;
  7072. recalculate_side_effects (*expr_p);
  7073. *expr_p = fold_build2_loc (input_location, MEM_REF,
  7074. TREE_TYPE (*expr_p),
  7075. TREE_OPERAND (*expr_p, 0),
  7076. build_int_cst (saved_ptr_type, 0));
  7077. TREE_THIS_VOLATILE (*expr_p) = volatilep;
  7078. TREE_THIS_NOTRAP (*expr_p) = notrap;
  7079. ret = GS_OK;
  7080. break;
  7081. }
  7082. /* We arrive here through the various re-gimplifcation paths. */
  7083. case MEM_REF:
  7084. /* First try re-folding the whole thing. */
  7085. tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
  7086. TREE_OPERAND (*expr_p, 0),
  7087. TREE_OPERAND (*expr_p, 1));
  7088. if (tmp)
  7089. {
  7090. *expr_p = tmp;
  7091. recalculate_side_effects (*expr_p);
  7092. ret = GS_OK;
  7093. break;
  7094. }
  7095. /* Avoid re-gimplifying the address operand if it is already
  7096. in suitable form. Re-gimplifying would mark the address
  7097. operand addressable. Always gimplify when not in SSA form
  7098. as we still may have to gimplify decls with value-exprs. */
  7099. if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
  7100. || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
  7101. {
  7102. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  7103. is_gimple_mem_ref_addr, fb_rvalue);
  7104. if (ret == GS_ERROR)
  7105. break;
  7106. }
  7107. recalculate_side_effects (*expr_p);
  7108. ret = GS_ALL_DONE;
  7109. break;
  7110. /* Constants need not be gimplified. */
  7111. case INTEGER_CST:
  7112. case REAL_CST:
  7113. case FIXED_CST:
  7114. case STRING_CST:
  7115. case COMPLEX_CST:
  7116. case VECTOR_CST:
  7117. /* Drop the overflow flag on constants, we do not want
  7118. that in the GIMPLE IL. */
  7119. if (TREE_OVERFLOW_P (*expr_p))
  7120. *expr_p = drop_tree_overflow (*expr_p);
  7121. ret = GS_ALL_DONE;
  7122. break;
  7123. case CONST_DECL:
  7124. /* If we require an lvalue, such as for ADDR_EXPR, retain the
  7125. CONST_DECL node. Otherwise the decl is replaceable by its
  7126. value. */
  7127. /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
  7128. if (fallback & fb_lvalue)
  7129. ret = GS_ALL_DONE;
  7130. else
  7131. {
  7132. *expr_p = DECL_INITIAL (*expr_p);
  7133. ret = GS_OK;
  7134. }
  7135. break;
  7136. case DECL_EXPR:
  7137. ret = gimplify_decl_expr (expr_p, pre_p);
  7138. break;
  7139. case BIND_EXPR:
  7140. ret = gimplify_bind_expr (expr_p, pre_p);
  7141. break;
  7142. case LOOP_EXPR:
  7143. ret = gimplify_loop_expr (expr_p, pre_p);
  7144. break;
  7145. case SWITCH_EXPR:
  7146. ret = gimplify_switch_expr (expr_p, pre_p);
  7147. break;
  7148. case EXIT_EXPR:
  7149. ret = gimplify_exit_expr (expr_p);
  7150. break;
  7151. case GOTO_EXPR:
  7152. /* If the target is not LABEL, then it is a computed jump
  7153. and the target needs to be gimplified. */
  7154. if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
  7155. {
  7156. ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
  7157. NULL, is_gimple_val, fb_rvalue);
  7158. if (ret == GS_ERROR)
  7159. break;
  7160. }
  7161. gimplify_seq_add_stmt (pre_p,
  7162. gimple_build_goto (GOTO_DESTINATION (*expr_p)));
  7163. ret = GS_ALL_DONE;
  7164. break;
  7165. case PREDICT_EXPR:
  7166. gimplify_seq_add_stmt (pre_p,
  7167. gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
  7168. PREDICT_EXPR_OUTCOME (*expr_p)));
  7169. ret = GS_ALL_DONE;
  7170. break;
  7171. case LABEL_EXPR:
  7172. ret = GS_ALL_DONE;
  7173. gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
  7174. == current_function_decl);
  7175. gimplify_seq_add_stmt (pre_p,
  7176. gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
  7177. break;
  7178. case CASE_LABEL_EXPR:
  7179. ret = gimplify_case_label_expr (expr_p, pre_p);
  7180. break;
  7181. case RETURN_EXPR:
  7182. ret = gimplify_return_expr (*expr_p, pre_p);
  7183. break;
  7184. case CONSTRUCTOR:
  7185. /* Don't reduce this in place; let gimplify_init_constructor work its
  7186. magic. Buf if we're just elaborating this for side effects, just
  7187. gimplify any element that has side-effects. */
  7188. if (fallback == fb_none)
  7189. {
  7190. unsigned HOST_WIDE_INT ix;
  7191. tree val;
  7192. tree temp = NULL_TREE;
  7193. FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
  7194. if (TREE_SIDE_EFFECTS (val))
  7195. append_to_statement_list (val, &temp);
  7196. *expr_p = temp;
  7197. ret = temp ? GS_OK : GS_ALL_DONE;
  7198. }
  7199. /* C99 code may assign to an array in a constructed
  7200. structure or union, and this has undefined behavior only
  7201. on execution, so create a temporary if an lvalue is
  7202. required. */
  7203. else if (fallback == fb_lvalue)
  7204. {
  7205. *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
  7206. mark_addressable (*expr_p);
  7207. ret = GS_OK;
  7208. }
  7209. else
  7210. ret = GS_ALL_DONE;
  7211. break;
  7212. /* The following are special cases that are not handled by the
  7213. original GIMPLE grammar. */
  7214. /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
  7215. eliminated. */
  7216. case SAVE_EXPR:
  7217. ret = gimplify_save_expr (expr_p, pre_p, post_p);
  7218. break;
  7219. case BIT_FIELD_REF:
  7220. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7221. post_p, is_gimple_lvalue, fb_either);
  7222. recalculate_side_effects (*expr_p);
  7223. break;
  7224. case TARGET_MEM_REF:
  7225. {
  7226. enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
  7227. if (TMR_BASE (*expr_p))
  7228. r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
  7229. post_p, is_gimple_mem_ref_addr, fb_either);
  7230. if (TMR_INDEX (*expr_p))
  7231. r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
  7232. post_p, is_gimple_val, fb_rvalue);
  7233. if (TMR_INDEX2 (*expr_p))
  7234. r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
  7235. post_p, is_gimple_val, fb_rvalue);
  7236. /* TMR_STEP and TMR_OFFSET are always integer constants. */
  7237. ret = MIN (r0, r1);
  7238. }
  7239. break;
  7240. case NON_LVALUE_EXPR:
  7241. /* This should have been stripped above. */
  7242. gcc_unreachable ();
  7243. case ASM_EXPR:
  7244. ret = gimplify_asm_expr (expr_p, pre_p, post_p);
  7245. break;
  7246. case TRY_FINALLY_EXPR:
  7247. case TRY_CATCH_EXPR:
  7248. {
  7249. gimple_seq eval, cleanup;
  7250. gtry *try_;
  7251. /* Calls to destructors are generated automatically in FINALLY/CATCH
  7252. block. They should have location as UNKNOWN_LOCATION. However,
  7253. gimplify_call_expr will reset these call stmts to input_location
  7254. if it finds stmt's location is unknown. To prevent resetting for
  7255. destructors, we set the input_location to unknown.
  7256. Note that this only affects the destructor calls in FINALLY/CATCH
  7257. block, and will automatically reset to its original value by the
  7258. end of gimplify_expr. */
  7259. input_location = UNKNOWN_LOCATION;
  7260. eval = cleanup = NULL;
  7261. gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
  7262. gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
  7263. /* Don't create bogus GIMPLE_TRY with empty cleanup. */
  7264. if (gimple_seq_empty_p (cleanup))
  7265. {
  7266. gimple_seq_add_seq (pre_p, eval);
  7267. ret = GS_ALL_DONE;
  7268. break;
  7269. }
  7270. try_ = gimple_build_try (eval, cleanup,
  7271. TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
  7272. ? GIMPLE_TRY_FINALLY
  7273. : GIMPLE_TRY_CATCH);
  7274. if (EXPR_HAS_LOCATION (save_expr))
  7275. gimple_set_location (try_, EXPR_LOCATION (save_expr));
  7276. else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
  7277. gimple_set_location (try_, saved_location);
  7278. if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
  7279. gimple_try_set_catch_is_cleanup (try_,
  7280. TRY_CATCH_IS_CLEANUP (*expr_p));
  7281. gimplify_seq_add_stmt (pre_p, try_);
  7282. ret = GS_ALL_DONE;
  7283. break;
  7284. }
  7285. case CLEANUP_POINT_EXPR:
  7286. ret = gimplify_cleanup_point_expr (expr_p, pre_p);
  7287. break;
  7288. case TARGET_EXPR:
  7289. ret = gimplify_target_expr (expr_p, pre_p, post_p);
  7290. break;
  7291. case CATCH_EXPR:
  7292. {
  7293. gimple c;
  7294. gimple_seq handler = NULL;
  7295. gimplify_and_add (CATCH_BODY (*expr_p), &handler);
  7296. c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
  7297. gimplify_seq_add_stmt (pre_p, c);
  7298. ret = GS_ALL_DONE;
  7299. break;
  7300. }
  7301. case EH_FILTER_EXPR:
  7302. {
  7303. gimple ehf;
  7304. gimple_seq failure = NULL;
  7305. gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
  7306. ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
  7307. gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
  7308. gimplify_seq_add_stmt (pre_p, ehf);
  7309. ret = GS_ALL_DONE;
  7310. break;
  7311. }
  7312. case OBJ_TYPE_REF:
  7313. {
  7314. enum gimplify_status r0, r1;
  7315. r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
  7316. post_p, is_gimple_val, fb_rvalue);
  7317. r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
  7318. post_p, is_gimple_val, fb_rvalue);
  7319. TREE_SIDE_EFFECTS (*expr_p) = 0;
  7320. ret = MIN (r0, r1);
  7321. }
  7322. break;
  7323. case LABEL_DECL:
  7324. /* We get here when taking the address of a label. We mark
  7325. the label as "forced"; meaning it can never be removed and
  7326. it is a potential target for any computed goto. */
  7327. FORCED_LABEL (*expr_p) = 1;
  7328. ret = GS_ALL_DONE;
  7329. break;
  7330. case STATEMENT_LIST:
  7331. ret = gimplify_statement_list (expr_p, pre_p);
  7332. break;
  7333. case WITH_SIZE_EXPR:
  7334. {
  7335. gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7336. post_p == &internal_post ? NULL : post_p,
  7337. gimple_test_f, fallback);
  7338. gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
  7339. is_gimple_val, fb_rvalue);
  7340. ret = GS_ALL_DONE;
  7341. }
  7342. break;
  7343. case VAR_DECL:
  7344. case PARM_DECL:
  7345. ret = gimplify_var_or_parm_decl (expr_p);
  7346. break;
  7347. case RESULT_DECL:
  7348. /* When within an OMP context, notice uses of variables. */
  7349. if (gimplify_omp_ctxp)
  7350. omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
  7351. ret = GS_ALL_DONE;
  7352. break;
  7353. case SSA_NAME:
  7354. /* Allow callbacks into the gimplifier during optimization. */
  7355. ret = GS_ALL_DONE;
  7356. break;
  7357. case OMP_PARALLEL:
  7358. gimplify_omp_parallel (expr_p, pre_p);
  7359. ret = GS_ALL_DONE;
  7360. break;
  7361. case OMP_TASK:
  7362. gimplify_omp_task (expr_p, pre_p);
  7363. ret = GS_ALL_DONE;
  7364. break;
  7365. case OMP_FOR:
  7366. case OMP_SIMD:
  7367. case CILK_SIMD:
  7368. case CILK_FOR:
  7369. case OMP_DISTRIBUTE:
  7370. case OACC_LOOP:
  7371. ret = gimplify_omp_for (expr_p, pre_p);
  7372. break;
  7373. case OACC_CACHE:
  7374. gimplify_oacc_cache (expr_p, pre_p);
  7375. ret = GS_ALL_DONE;
  7376. break;
  7377. case OACC_HOST_DATA:
  7378. case OACC_DECLARE:
  7379. sorry ("directive not yet implemented");
  7380. ret = GS_ALL_DONE;
  7381. break;
  7382. case OACC_KERNELS:
  7383. if (OACC_KERNELS_COMBINED (*expr_p))
  7384. sorry ("directive not yet implemented");
  7385. else
  7386. gimplify_omp_workshare (expr_p, pre_p);
  7387. ret = GS_ALL_DONE;
  7388. break;
  7389. case OACC_PARALLEL:
  7390. if (OACC_PARALLEL_COMBINED (*expr_p))
  7391. sorry ("directive not yet implemented");
  7392. else
  7393. gimplify_omp_workshare (expr_p, pre_p);
  7394. ret = GS_ALL_DONE;
  7395. break;
  7396. case OACC_DATA:
  7397. case OMP_SECTIONS:
  7398. case OMP_SINGLE:
  7399. case OMP_TARGET:
  7400. case OMP_TARGET_DATA:
  7401. case OMP_TEAMS:
  7402. gimplify_omp_workshare (expr_p, pre_p);
  7403. ret = GS_ALL_DONE;
  7404. break;
  7405. case OACC_ENTER_DATA:
  7406. case OACC_EXIT_DATA:
  7407. case OACC_UPDATE:
  7408. case OMP_TARGET_UPDATE:
  7409. gimplify_omp_target_update (expr_p, pre_p);
  7410. ret = GS_ALL_DONE;
  7411. break;
  7412. case OMP_SECTION:
  7413. case OMP_MASTER:
  7414. case OMP_TASKGROUP:
  7415. case OMP_ORDERED:
  7416. case OMP_CRITICAL:
  7417. {
  7418. gimple_seq body = NULL;
  7419. gimple g;
  7420. gimplify_and_add (OMP_BODY (*expr_p), &body);
  7421. switch (TREE_CODE (*expr_p))
  7422. {
  7423. case OMP_SECTION:
  7424. g = gimple_build_omp_section (body);
  7425. break;
  7426. case OMP_MASTER:
  7427. g = gimple_build_omp_master (body);
  7428. break;
  7429. case OMP_TASKGROUP:
  7430. {
  7431. gimple_seq cleanup = NULL;
  7432. tree fn
  7433. = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
  7434. g = gimple_build_call (fn, 0);
  7435. gimple_seq_add_stmt (&cleanup, g);
  7436. g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
  7437. body = NULL;
  7438. gimple_seq_add_stmt (&body, g);
  7439. g = gimple_build_omp_taskgroup (body);
  7440. }
  7441. break;
  7442. case OMP_ORDERED:
  7443. g = gimple_build_omp_ordered (body);
  7444. break;
  7445. case OMP_CRITICAL:
  7446. g = gimple_build_omp_critical (body,
  7447. OMP_CRITICAL_NAME (*expr_p));
  7448. break;
  7449. default:
  7450. gcc_unreachable ();
  7451. }
  7452. gimplify_seq_add_stmt (pre_p, g);
  7453. ret = GS_ALL_DONE;
  7454. break;
  7455. }
  7456. case OMP_ATOMIC:
  7457. case OMP_ATOMIC_READ:
  7458. case OMP_ATOMIC_CAPTURE_OLD:
  7459. case OMP_ATOMIC_CAPTURE_NEW:
  7460. ret = gimplify_omp_atomic (expr_p, pre_p);
  7461. break;
  7462. case TRANSACTION_EXPR:
  7463. ret = gimplify_transaction (expr_p, pre_p);
  7464. break;
  7465. case TRUTH_AND_EXPR:
  7466. case TRUTH_OR_EXPR:
  7467. case TRUTH_XOR_EXPR:
  7468. {
  7469. tree orig_type = TREE_TYPE (*expr_p);
  7470. tree new_type, xop0, xop1;
  7471. *expr_p = gimple_boolify (*expr_p);
  7472. new_type = TREE_TYPE (*expr_p);
  7473. if (!useless_type_conversion_p (orig_type, new_type))
  7474. {
  7475. *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
  7476. ret = GS_OK;
  7477. break;
  7478. }
  7479. /* Boolified binary truth expressions are semantically equivalent
  7480. to bitwise binary expressions. Canonicalize them to the
  7481. bitwise variant. */
  7482. switch (TREE_CODE (*expr_p))
  7483. {
  7484. case TRUTH_AND_EXPR:
  7485. TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
  7486. break;
  7487. case TRUTH_OR_EXPR:
  7488. TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
  7489. break;
  7490. case TRUTH_XOR_EXPR:
  7491. TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
  7492. break;
  7493. default:
  7494. break;
  7495. }
  7496. /* Now make sure that operands have compatible type to
  7497. expression's new_type. */
  7498. xop0 = TREE_OPERAND (*expr_p, 0);
  7499. xop1 = TREE_OPERAND (*expr_p, 1);
  7500. if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
  7501. TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
  7502. new_type,
  7503. xop0);
  7504. if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
  7505. TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
  7506. new_type,
  7507. xop1);
  7508. /* Continue classified as tcc_binary. */
  7509. goto expr_2;
  7510. }
  7511. case FMA_EXPR:
  7512. case VEC_COND_EXPR:
  7513. case VEC_PERM_EXPR:
  7514. /* Classified as tcc_expression. */
  7515. goto expr_3;
  7516. case POINTER_PLUS_EXPR:
  7517. {
  7518. enum gimplify_status r0, r1;
  7519. r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7520. post_p, is_gimple_val, fb_rvalue);
  7521. r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
  7522. post_p, is_gimple_val, fb_rvalue);
  7523. recalculate_side_effects (*expr_p);
  7524. ret = MIN (r0, r1);
  7525. break;
  7526. }
  7527. case CILK_SYNC_STMT:
  7528. {
  7529. if (!fn_contains_cilk_spawn_p (cfun))
  7530. {
  7531. error_at (EXPR_LOCATION (*expr_p),
  7532. "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
  7533. ret = GS_ERROR;
  7534. }
  7535. else
  7536. {
  7537. gimplify_cilk_sync (expr_p, pre_p);
  7538. ret = GS_ALL_DONE;
  7539. }
  7540. break;
  7541. }
  7542. default:
  7543. switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
  7544. {
  7545. case tcc_comparison:
  7546. /* Handle comparison of objects of non scalar mode aggregates
  7547. with a call to memcmp. It would be nice to only have to do
  7548. this for variable-sized objects, but then we'd have to allow
  7549. the same nest of reference nodes we allow for MODIFY_EXPR and
  7550. that's too complex.
  7551. Compare scalar mode aggregates as scalar mode values. Using
  7552. memcmp for them would be very inefficient at best, and is
  7553. plain wrong if bitfields are involved. */
  7554. {
  7555. tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
  7556. /* Vector comparisons need no boolification. */
  7557. if (TREE_CODE (type) == VECTOR_TYPE)
  7558. goto expr_2;
  7559. else if (!AGGREGATE_TYPE_P (type))
  7560. {
  7561. tree org_type = TREE_TYPE (*expr_p);
  7562. *expr_p = gimple_boolify (*expr_p);
  7563. if (!useless_type_conversion_p (org_type,
  7564. TREE_TYPE (*expr_p)))
  7565. {
  7566. *expr_p = fold_convert_loc (input_location,
  7567. org_type, *expr_p);
  7568. ret = GS_OK;
  7569. }
  7570. else
  7571. goto expr_2;
  7572. }
  7573. else if (TYPE_MODE (type) != BLKmode)
  7574. ret = gimplify_scalar_mode_aggregate_compare (expr_p);
  7575. else
  7576. ret = gimplify_variable_sized_compare (expr_p);
  7577. break;
  7578. }
  7579. /* If *EXPR_P does not need to be special-cased, handle it
  7580. according to its class. */
  7581. case tcc_unary:
  7582. ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7583. post_p, is_gimple_val, fb_rvalue);
  7584. break;
  7585. case tcc_binary:
  7586. expr_2:
  7587. {
  7588. enum gimplify_status r0, r1;
  7589. r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7590. post_p, is_gimple_val, fb_rvalue);
  7591. r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
  7592. post_p, is_gimple_val, fb_rvalue);
  7593. ret = MIN (r0, r1);
  7594. break;
  7595. }
  7596. expr_3:
  7597. {
  7598. enum gimplify_status r0, r1, r2;
  7599. r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
  7600. post_p, is_gimple_val, fb_rvalue);
  7601. r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
  7602. post_p, is_gimple_val, fb_rvalue);
  7603. r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
  7604. post_p, is_gimple_val, fb_rvalue);
  7605. ret = MIN (MIN (r0, r1), r2);
  7606. break;
  7607. }
  7608. case tcc_declaration:
  7609. case tcc_constant:
  7610. ret = GS_ALL_DONE;
  7611. goto dont_recalculate;
  7612. default:
  7613. gcc_unreachable ();
  7614. }
  7615. recalculate_side_effects (*expr_p);
  7616. dont_recalculate:
  7617. break;
  7618. }
  7619. gcc_assert (*expr_p || ret != GS_OK);
  7620. }
  7621. while (ret == GS_OK);
  7622. /* If we encountered an error_mark somewhere nested inside, either
  7623. stub out the statement or propagate the error back out. */
  7624. if (ret == GS_ERROR)
  7625. {
  7626. if (is_statement)
  7627. *expr_p = NULL;
  7628. goto out;
  7629. }
  7630. /* This was only valid as a return value from the langhook, which
  7631. we handled. Make sure it doesn't escape from any other context. */
  7632. gcc_assert (ret != GS_UNHANDLED);
  7633. if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
  7634. {
  7635. /* We aren't looking for a value, and we don't have a valid
  7636. statement. If it doesn't have side-effects, throw it away. */
  7637. if (!TREE_SIDE_EFFECTS (*expr_p))
  7638. *expr_p = NULL;
  7639. else if (!TREE_THIS_VOLATILE (*expr_p))
  7640. {
  7641. /* This is probably a _REF that contains something nested that
  7642. has side effects. Recurse through the operands to find it. */
  7643. enum tree_code code = TREE_CODE (*expr_p);
  7644. switch (code)
  7645. {
  7646. case COMPONENT_REF:
  7647. case REALPART_EXPR:
  7648. case IMAGPART_EXPR:
  7649. case VIEW_CONVERT_EXPR:
  7650. gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  7651. gimple_test_f, fallback);
  7652. break;
  7653. case ARRAY_REF:
  7654. case ARRAY_RANGE_REF:
  7655. gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
  7656. gimple_test_f, fallback);
  7657. gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
  7658. gimple_test_f, fallback);
  7659. break;
  7660. default:
  7661. /* Anything else with side-effects must be converted to
  7662. a valid statement before we get here. */
  7663. gcc_unreachable ();
  7664. }
  7665. *expr_p = NULL;
  7666. }
  7667. else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
  7668. && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
  7669. {
  7670. /* Historically, the compiler has treated a bare reference
  7671. to a non-BLKmode volatile lvalue as forcing a load. */
  7672. tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
  7673. /* Normally, we do not want to create a temporary for a
  7674. TREE_ADDRESSABLE type because such a type should not be
  7675. copied by bitwise-assignment. However, we make an
  7676. exception here, as all we are doing here is ensuring that
  7677. we read the bytes that make up the type. We use
  7678. create_tmp_var_raw because create_tmp_var will abort when
  7679. given a TREE_ADDRESSABLE type. */
  7680. tree tmp = create_tmp_var_raw (type, "vol");
  7681. gimple_add_tmp_var (tmp);
  7682. gimplify_assign (tmp, *expr_p, pre_p);
  7683. *expr_p = NULL;
  7684. }
  7685. else
  7686. /* We can't do anything useful with a volatile reference to
  7687. an incomplete type, so just throw it away. Likewise for
  7688. a BLKmode type, since any implicit inner load should
  7689. already have been turned into an explicit one by the
  7690. gimplification process. */
  7691. *expr_p = NULL;
  7692. }
  7693. /* If we are gimplifying at the statement level, we're done. Tack
  7694. everything together and return. */
  7695. if (fallback == fb_none || is_statement)
  7696. {
  7697. /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
  7698. it out for GC to reclaim it. */
  7699. *expr_p = NULL_TREE;
  7700. if (!gimple_seq_empty_p (internal_pre)
  7701. || !gimple_seq_empty_p (internal_post))
  7702. {
  7703. gimplify_seq_add_seq (&internal_pre, internal_post);
  7704. gimplify_seq_add_seq (pre_p, internal_pre);
  7705. }
  7706. /* The result of gimplifying *EXPR_P is going to be the last few
  7707. statements in *PRE_P and *POST_P. Add location information
  7708. to all the statements that were added by the gimplification
  7709. helpers. */
  7710. if (!gimple_seq_empty_p (*pre_p))
  7711. annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
  7712. if (!gimple_seq_empty_p (*post_p))
  7713. annotate_all_with_location_after (*post_p, post_last_gsi,
  7714. input_location);
  7715. goto out;
  7716. }
  7717. #ifdef ENABLE_GIMPLE_CHECKING
  7718. if (*expr_p)
  7719. {
  7720. enum tree_code code = TREE_CODE (*expr_p);
  7721. /* These expressions should already be in gimple IR form. */
  7722. gcc_assert (code != MODIFY_EXPR
  7723. && code != ASM_EXPR
  7724. && code != BIND_EXPR
  7725. && code != CATCH_EXPR
  7726. && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
  7727. && code != EH_FILTER_EXPR
  7728. && code != GOTO_EXPR
  7729. && code != LABEL_EXPR
  7730. && code != LOOP_EXPR
  7731. && code != SWITCH_EXPR
  7732. && code != TRY_FINALLY_EXPR
  7733. && code != OACC_PARALLEL
  7734. && code != OACC_KERNELS
  7735. && code != OACC_DATA
  7736. && code != OACC_HOST_DATA
  7737. && code != OACC_DECLARE
  7738. && code != OACC_UPDATE
  7739. && code != OACC_ENTER_DATA
  7740. && code != OACC_EXIT_DATA
  7741. && code != OACC_CACHE
  7742. && code != OMP_CRITICAL
  7743. && code != OMP_FOR
  7744. && code != OACC_LOOP
  7745. && code != OMP_MASTER
  7746. && code != OMP_TASKGROUP
  7747. && code != OMP_ORDERED
  7748. && code != OMP_PARALLEL
  7749. && code != OMP_SECTIONS
  7750. && code != OMP_SECTION
  7751. && code != OMP_SINGLE);
  7752. }
  7753. #endif
  7754. /* Otherwise we're gimplifying a subexpression, so the resulting
  7755. value is interesting. If it's a valid operand that matches
  7756. GIMPLE_TEST_F, we're done. Unless we are handling some
  7757. post-effects internally; if that's the case, we need to copy into
  7758. a temporary before adding the post-effects to POST_P. */
  7759. if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
  7760. goto out;
  7761. /* Otherwise, we need to create a new temporary for the gimplified
  7762. expression. */
  7763. /* We can't return an lvalue if we have an internal postqueue. The
  7764. object the lvalue refers to would (probably) be modified by the
  7765. postqueue; we need to copy the value out first, which means an
  7766. rvalue. */
  7767. if ((fallback & fb_lvalue)
  7768. && gimple_seq_empty_p (internal_post)
  7769. && is_gimple_addressable (*expr_p))
  7770. {
  7771. /* An lvalue will do. Take the address of the expression, store it
  7772. in a temporary, and replace the expression with an INDIRECT_REF of
  7773. that temporary. */
  7774. tmp = build_fold_addr_expr_loc (input_location, *expr_p);
  7775. gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
  7776. *expr_p = build_simple_mem_ref (tmp);
  7777. }
  7778. else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
  7779. {
  7780. /* An rvalue will do. Assign the gimplified expression into a
  7781. new temporary TMP and replace the original expression with
  7782. TMP. First, make sure that the expression has a type so that
  7783. it can be assigned into a temporary. */
  7784. gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
  7785. *expr_p = get_formal_tmp_var (*expr_p, pre_p);
  7786. }
  7787. else
  7788. {
  7789. #ifdef ENABLE_GIMPLE_CHECKING
  7790. if (!(fallback & fb_mayfail))
  7791. {
  7792. fprintf (stderr, "gimplification failed:\n");
  7793. print_generic_expr (stderr, *expr_p, 0);
  7794. debug_tree (*expr_p);
  7795. internal_error ("gimplification failed");
  7796. }
  7797. #endif
  7798. gcc_assert (fallback & fb_mayfail);
  7799. /* If this is an asm statement, and the user asked for the
  7800. impossible, don't die. Fail and let gimplify_asm_expr
  7801. issue an error. */
  7802. ret = GS_ERROR;
  7803. goto out;
  7804. }
  7805. /* Make sure the temporary matches our predicate. */
  7806. gcc_assert ((*gimple_test_f) (*expr_p));
  7807. if (!gimple_seq_empty_p (internal_post))
  7808. {
  7809. annotate_all_with_location (internal_post, input_location);
  7810. gimplify_seq_add_seq (pre_p, internal_post);
  7811. }
  7812. out:
  7813. input_location = saved_location;
  7814. return ret;
  7815. }
  7816. /* Look through TYPE for variable-sized objects and gimplify each such
  7817. size that we find. Add to LIST_P any statements generated. */
  7818. void
  7819. gimplify_type_sizes (tree type, gimple_seq *list_p)
  7820. {
  7821. tree field, t;
  7822. if (type == NULL || type == error_mark_node)
  7823. return;
  7824. /* We first do the main variant, then copy into any other variants. */
  7825. type = TYPE_MAIN_VARIANT (type);
  7826. /* Avoid infinite recursion. */
  7827. if (TYPE_SIZES_GIMPLIFIED (type))
  7828. return;
  7829. TYPE_SIZES_GIMPLIFIED (type) = 1;
  7830. switch (TREE_CODE (type))
  7831. {
  7832. case INTEGER_TYPE:
  7833. case ENUMERAL_TYPE:
  7834. case BOOLEAN_TYPE:
  7835. case REAL_TYPE:
  7836. case FIXED_POINT_TYPE:
  7837. gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
  7838. gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
  7839. for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
  7840. {
  7841. TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
  7842. TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
  7843. }
  7844. break;
  7845. case ARRAY_TYPE:
  7846. /* These types may not have declarations, so handle them here. */
  7847. gimplify_type_sizes (TREE_TYPE (type), list_p);
  7848. gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
  7849. /* Ensure VLA bounds aren't removed, for -O0 they should be variables
  7850. with assigned stack slots, for -O1+ -g they should be tracked
  7851. by VTA. */
  7852. if (!(TYPE_NAME (type)
  7853. && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
  7854. && DECL_IGNORED_P (TYPE_NAME (type)))
  7855. && TYPE_DOMAIN (type)
  7856. && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
  7857. {
  7858. t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
  7859. if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
  7860. DECL_IGNORED_P (t) = 0;
  7861. t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
  7862. if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
  7863. DECL_IGNORED_P (t) = 0;
  7864. }
  7865. break;
  7866. case RECORD_TYPE:
  7867. case UNION_TYPE:
  7868. case QUAL_UNION_TYPE:
  7869. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  7870. if (TREE_CODE (field) == FIELD_DECL)
  7871. {
  7872. gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
  7873. gimplify_one_sizepos (&DECL_SIZE (field), list_p);
  7874. gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
  7875. gimplify_type_sizes (TREE_TYPE (field), list_p);
  7876. }
  7877. break;
  7878. case POINTER_TYPE:
  7879. case REFERENCE_TYPE:
  7880. /* We used to recurse on the pointed-to type here, which turned out to
  7881. be incorrect because its definition might refer to variables not
  7882. yet initialized at this point if a forward declaration is involved.
  7883. It was actually useful for anonymous pointed-to types to ensure
  7884. that the sizes evaluation dominates every possible later use of the
  7885. values. Restricting to such types here would be safe since there
  7886. is no possible forward declaration around, but would introduce an
  7887. undesirable middle-end semantic to anonymity. We then defer to
  7888. front-ends the responsibility of ensuring that the sizes are
  7889. evaluated both early and late enough, e.g. by attaching artificial
  7890. type declarations to the tree. */
  7891. break;
  7892. default:
  7893. break;
  7894. }
  7895. gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
  7896. gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
  7897. for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
  7898. {
  7899. TYPE_SIZE (t) = TYPE_SIZE (type);
  7900. TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
  7901. TYPE_SIZES_GIMPLIFIED (t) = 1;
  7902. }
  7903. }
  7904. /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
  7905. a size or position, has had all of its SAVE_EXPRs evaluated.
  7906. We add any required statements to *STMT_P. */
  7907. void
  7908. gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
  7909. {
  7910. tree expr = *expr_p;
  7911. /* We don't do anything if the value isn't there, is constant, or contains
  7912. A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
  7913. a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
  7914. will want to replace it with a new variable, but that will cause problems
  7915. if this type is from outside the function. It's OK to have that here. */
  7916. if (is_gimple_sizepos (expr))
  7917. return;
  7918. *expr_p = unshare_expr (expr);
  7919. gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
  7920. }
  7921. /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
  7922. containing the sequence of corresponding GIMPLE statements. If DO_PARMS
  7923. is true, also gimplify the parameters. */
  7924. gbind *
  7925. gimplify_body (tree fndecl, bool do_parms)
  7926. {
  7927. location_t saved_location = input_location;
  7928. gimple_seq parm_stmts, seq;
  7929. gimple outer_stmt;
  7930. gbind *outer_bind;
  7931. struct cgraph_node *cgn;
  7932. timevar_push (TV_TREE_GIMPLIFY);
  7933. /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
  7934. gimplification. */
  7935. default_rtl_profile ();
  7936. gcc_assert (gimplify_ctxp == NULL);
  7937. push_gimplify_context ();
  7938. if (flag_openacc || flag_openmp)
  7939. {
  7940. gcc_assert (gimplify_omp_ctxp == NULL);
  7941. if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
  7942. gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
  7943. }
  7944. /* Unshare most shared trees in the body and in that of any nested functions.
  7945. It would seem we don't have to do this for nested functions because
  7946. they are supposed to be output and then the outer function gimplified
  7947. first, but the g++ front end doesn't always do it that way. */
  7948. unshare_body (fndecl);
  7949. unvisit_body (fndecl);
  7950. cgn = cgraph_node::get (fndecl);
  7951. if (cgn && cgn->origin)
  7952. nonlocal_vlas = new hash_set<tree>;
  7953. /* Make sure input_location isn't set to something weird. */
  7954. input_location = DECL_SOURCE_LOCATION (fndecl);
  7955. /* Resolve callee-copies. This has to be done before processing
  7956. the body so that DECL_VALUE_EXPR gets processed correctly. */
  7957. parm_stmts = do_parms ? gimplify_parameters () : NULL;
  7958. /* Gimplify the function's body. */
  7959. seq = NULL;
  7960. gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
  7961. outer_stmt = gimple_seq_first_stmt (seq);
  7962. if (!outer_stmt)
  7963. {
  7964. outer_stmt = gimple_build_nop ();
  7965. gimplify_seq_add_stmt (&seq, outer_stmt);
  7966. }
  7967. /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
  7968. not the case, wrap everything in a GIMPLE_BIND to make it so. */
  7969. if (gimple_code (outer_stmt) == GIMPLE_BIND
  7970. && gimple_seq_first (seq) == gimple_seq_last (seq))
  7971. outer_bind = as_a <gbind *> (outer_stmt);
  7972. else
  7973. outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
  7974. DECL_SAVED_TREE (fndecl) = NULL_TREE;
  7975. /* If we had callee-copies statements, insert them at the beginning
  7976. of the function and clear DECL_VALUE_EXPR_P on the parameters. */
  7977. if (!gimple_seq_empty_p (parm_stmts))
  7978. {
  7979. tree parm;
  7980. gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
  7981. gimple_bind_set_body (outer_bind, parm_stmts);
  7982. for (parm = DECL_ARGUMENTS (current_function_decl);
  7983. parm; parm = DECL_CHAIN (parm))
  7984. if (DECL_HAS_VALUE_EXPR_P (parm))
  7985. {
  7986. DECL_HAS_VALUE_EXPR_P (parm) = 0;
  7987. DECL_IGNORED_P (parm) = 0;
  7988. }
  7989. }
  7990. if (nonlocal_vlas)
  7991. {
  7992. if (nonlocal_vla_vars)
  7993. {
  7994. /* tree-nested.c may later on call declare_vars (..., true);
  7995. which relies on BLOCK_VARS chain to be the tail of the
  7996. gimple_bind_vars chain. Ensure we don't violate that
  7997. assumption. */
  7998. if (gimple_bind_block (outer_bind)
  7999. == DECL_INITIAL (current_function_decl))
  8000. declare_vars (nonlocal_vla_vars, outer_bind, true);
  8001. else
  8002. BLOCK_VARS (DECL_INITIAL (current_function_decl))
  8003. = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
  8004. nonlocal_vla_vars);
  8005. nonlocal_vla_vars = NULL_TREE;
  8006. }
  8007. delete nonlocal_vlas;
  8008. nonlocal_vlas = NULL;
  8009. }
  8010. if ((flag_openacc || flag_openmp || flag_openmp_simd)
  8011. && gimplify_omp_ctxp)
  8012. {
  8013. delete_omp_context (gimplify_omp_ctxp);
  8014. gimplify_omp_ctxp = NULL;
  8015. }
  8016. pop_gimplify_context (outer_bind);
  8017. gcc_assert (gimplify_ctxp == NULL);
  8018. #ifdef ENABLE_CHECKING
  8019. if (!seen_error ())
  8020. verify_gimple_in_seq (gimple_bind_body (outer_bind));
  8021. #endif
  8022. timevar_pop (TV_TREE_GIMPLIFY);
  8023. input_location = saved_location;
  8024. return outer_bind;
  8025. }
  8026. typedef char *char_p; /* For DEF_VEC_P. */
  8027. /* Return whether we should exclude FNDECL from instrumentation. */
  8028. static bool
  8029. flag_instrument_functions_exclude_p (tree fndecl)
  8030. {
  8031. vec<char_p> *v;
  8032. v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
  8033. if (v && v->length () > 0)
  8034. {
  8035. const char *name;
  8036. int i;
  8037. char *s;
  8038. name = lang_hooks.decl_printable_name (fndecl, 0);
  8039. FOR_EACH_VEC_ELT (*v, i, s)
  8040. if (strstr (name, s) != NULL)
  8041. return true;
  8042. }
  8043. v = (vec<char_p> *) flag_instrument_functions_exclude_files;
  8044. if (v && v->length () > 0)
  8045. {
  8046. const char *name;
  8047. int i;
  8048. char *s;
  8049. name = DECL_SOURCE_FILE (fndecl);
  8050. FOR_EACH_VEC_ELT (*v, i, s)
  8051. if (strstr (name, s) != NULL)
  8052. return true;
  8053. }
  8054. return false;
  8055. }
  8056. /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
  8057. node for the function we want to gimplify.
  8058. Return the sequence of GIMPLE statements corresponding to the body
  8059. of FNDECL. */
  8060. void
  8061. gimplify_function_tree (tree fndecl)
  8062. {
  8063. tree parm, ret;
  8064. gimple_seq seq;
  8065. gbind *bind;
  8066. gcc_assert (!gimple_body (fndecl));
  8067. if (DECL_STRUCT_FUNCTION (fndecl))
  8068. push_cfun (DECL_STRUCT_FUNCTION (fndecl));
  8069. else
  8070. push_struct_function (fndecl);
  8071. for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
  8072. {
  8073. /* Preliminarily mark non-addressed complex variables as eligible
  8074. for promotion to gimple registers. We'll transform their uses
  8075. as we find them. */
  8076. if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
  8077. || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
  8078. && !TREE_THIS_VOLATILE (parm)
  8079. && !needs_to_live_in_memory (parm))
  8080. DECL_GIMPLE_REG_P (parm) = 1;
  8081. }
  8082. ret = DECL_RESULT (fndecl);
  8083. if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
  8084. || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
  8085. && !needs_to_live_in_memory (ret))
  8086. DECL_GIMPLE_REG_P (ret) = 1;
  8087. bind = gimplify_body (fndecl, true);
  8088. /* The tree body of the function is no longer needed, replace it
  8089. with the new GIMPLE body. */
  8090. seq = NULL;
  8091. gimple_seq_add_stmt (&seq, bind);
  8092. gimple_set_body (fndecl, seq);
  8093. /* If we're instrumenting function entry/exit, then prepend the call to
  8094. the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
  8095. catch the exit hook. */
  8096. /* ??? Add some way to ignore exceptions for this TFE. */
  8097. if (flag_instrument_function_entry_exit
  8098. && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
  8099. && !flag_instrument_functions_exclude_p (fndecl))
  8100. {
  8101. tree x;
  8102. gbind *new_bind;
  8103. gimple tf;
  8104. gimple_seq cleanup = NULL, body = NULL;
  8105. tree tmp_var;
  8106. gcall *call;
  8107. x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
  8108. call = gimple_build_call (x, 1, integer_zero_node);
  8109. tmp_var = create_tmp_var (ptr_type_node, "return_addr");
  8110. gimple_call_set_lhs (call, tmp_var);
  8111. gimplify_seq_add_stmt (&cleanup, call);
  8112. x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
  8113. call = gimple_build_call (x, 2,
  8114. build_fold_addr_expr (current_function_decl),
  8115. tmp_var);
  8116. gimplify_seq_add_stmt (&cleanup, call);
  8117. tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
  8118. x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
  8119. call = gimple_build_call (x, 1, integer_zero_node);
  8120. tmp_var = create_tmp_var (ptr_type_node, "return_addr");
  8121. gimple_call_set_lhs (call, tmp_var);
  8122. gimplify_seq_add_stmt (&body, call);
  8123. x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
  8124. call = gimple_build_call (x, 2,
  8125. build_fold_addr_expr (current_function_decl),
  8126. tmp_var);
  8127. gimplify_seq_add_stmt (&body, call);
  8128. gimplify_seq_add_stmt (&body, tf);
  8129. new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
  8130. /* Clear the block for BIND, since it is no longer directly inside
  8131. the function, but within a try block. */
  8132. gimple_bind_set_block (bind, NULL);
  8133. /* Replace the current function body with the body
  8134. wrapped in the try/finally TF. */
  8135. seq = NULL;
  8136. gimple_seq_add_stmt (&seq, new_bind);
  8137. gimple_set_body (fndecl, seq);
  8138. bind = new_bind;
  8139. }
  8140. if ((flag_sanitize & SANITIZE_THREAD) != 0
  8141. && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
  8142. {
  8143. gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
  8144. gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
  8145. gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
  8146. /* Clear the block for BIND, since it is no longer directly inside
  8147. the function, but within a try block. */
  8148. gimple_bind_set_block (bind, NULL);
  8149. /* Replace the current function body with the body
  8150. wrapped in the try/finally TF. */
  8151. seq = NULL;
  8152. gimple_seq_add_stmt (&seq, new_bind);
  8153. gimple_set_body (fndecl, seq);
  8154. }
  8155. DECL_SAVED_TREE (fndecl) = NULL_TREE;
  8156. cfun->curr_properties = PROP_gimple_any;
  8157. pop_cfun ();
  8158. }
  8159. /* Return a dummy expression of type TYPE in order to keep going after an
  8160. error. */
  8161. static tree
  8162. dummy_object (tree type)
  8163. {
  8164. tree t = build_int_cst (build_pointer_type (type), 0);
  8165. return build2 (MEM_REF, type, t, t);
  8166. }
  8167. /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
  8168. builtin function, but a very special sort of operator. */
  8169. enum gimplify_status
  8170. gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
  8171. {
  8172. tree promoted_type, have_va_type;
  8173. tree valist = TREE_OPERAND (*expr_p, 0);
  8174. tree type = TREE_TYPE (*expr_p);
  8175. tree t;
  8176. location_t loc = EXPR_LOCATION (*expr_p);
  8177. /* Verify that valist is of the proper type. */
  8178. have_va_type = TREE_TYPE (valist);
  8179. if (have_va_type == error_mark_node)
  8180. return GS_ERROR;
  8181. have_va_type = targetm.canonical_va_list_type (have_va_type);
  8182. if (have_va_type == NULL_TREE)
  8183. {
  8184. error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
  8185. return GS_ERROR;
  8186. }
  8187. /* Generate a diagnostic for requesting data of a type that cannot
  8188. be passed through `...' due to type promotion at the call site. */
  8189. if ((promoted_type = lang_hooks.types.type_promotes_to (type))
  8190. != type)
  8191. {
  8192. static bool gave_help;
  8193. bool warned;
  8194. /* Unfortunately, this is merely undefined, rather than a constraint
  8195. violation, so we cannot make this an error. If this call is never
  8196. executed, the program is still strictly conforming. */
  8197. warned = warning_at (loc, 0,
  8198. "%qT is promoted to %qT when passed through %<...%>",
  8199. type, promoted_type);
  8200. if (!gave_help && warned)
  8201. {
  8202. gave_help = true;
  8203. inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
  8204. promoted_type, type);
  8205. }
  8206. /* We can, however, treat "undefined" any way we please.
  8207. Call abort to encourage the user to fix the program. */
  8208. if (warned)
  8209. inform (loc, "if this code is reached, the program will abort");
  8210. /* Before the abort, allow the evaluation of the va_list
  8211. expression to exit or longjmp. */
  8212. gimplify_and_add (valist, pre_p);
  8213. t = build_call_expr_loc (loc,
  8214. builtin_decl_implicit (BUILT_IN_TRAP), 0);
  8215. gimplify_and_add (t, pre_p);
  8216. /* This is dead code, but go ahead and finish so that the
  8217. mode of the result comes out right. */
  8218. *expr_p = dummy_object (type);
  8219. return GS_ALL_DONE;
  8220. }
  8221. else
  8222. {
  8223. /* Make it easier for the backends by protecting the valist argument
  8224. from multiple evaluations. */
  8225. if (TREE_CODE (have_va_type) == ARRAY_TYPE)
  8226. {
  8227. /* For this case, the backends will be expecting a pointer to
  8228. TREE_TYPE (abi), but it's possible we've
  8229. actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
  8230. So fix it. */
  8231. if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
  8232. {
  8233. tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
  8234. valist = fold_convert_loc (loc, p1,
  8235. build_fold_addr_expr_loc (loc, valist));
  8236. }
  8237. gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
  8238. }
  8239. else
  8240. gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
  8241. if (!targetm.gimplify_va_arg_expr)
  8242. /* FIXME: Once most targets are converted we should merely
  8243. assert this is non-null. */
  8244. return GS_ALL_DONE;
  8245. *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
  8246. return GS_OK;
  8247. }
  8248. }
  8249. /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
  8250. DST/SRC are the destination and source respectively. You can pass
  8251. ungimplified trees in DST or SRC, in which case they will be
  8252. converted to a gimple operand if necessary.
  8253. This function returns the newly created GIMPLE_ASSIGN tuple. */
  8254. gimple
  8255. gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
  8256. {
  8257. tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
  8258. gimplify_and_add (t, seq_p);
  8259. ggc_free (t);
  8260. return gimple_seq_last_stmt (*seq_p);
  8261. }
  8262. inline hashval_t
  8263. gimplify_hasher::hash (const value_type *p)
  8264. {
  8265. tree t = p->val;
  8266. return iterative_hash_expr (t, 0);
  8267. }
  8268. inline bool
  8269. gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
  8270. {
  8271. tree t1 = p1->val;
  8272. tree t2 = p2->val;
  8273. enum tree_code code = TREE_CODE (t1);
  8274. if (TREE_CODE (t2) != code
  8275. || TREE_TYPE (t1) != TREE_TYPE (t2))
  8276. return false;
  8277. if (!operand_equal_p (t1, t2, 0))
  8278. return false;
  8279. #ifdef ENABLE_CHECKING
  8280. /* Only allow them to compare equal if they also hash equal; otherwise
  8281. results are nondeterminate, and we fail bootstrap comparison. */
  8282. gcc_assert (hash (p1) == hash (p2));
  8283. #endif
  8284. return true;
  8285. }